text stringlengths 1 1.05M |
|---|
package cyclops.container.ordering;
import cyclops.function.higherkinded.Higher;
public interface OrderedBy<W, T1, T2 extends OrderedBy<W, T1, ?>> extends Higher<W, T1> {
default Ord.Ordering order(Ord<W, T1> ord,
T2 other) {
return ord.compare(this,
other);
}
}
|
#!/bin/bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
declare -a mandatory
mandatory=(
PKG
ARCH
GIT_COMMIT
REPO_INFO
TAG
HOME
)
missing=false
for var in ${mandatory[@]}; do
if [[ -z "${!var+x}" ]]; then
echo "Environment variable $var must be set"
missing=true
fi
done
if [ "$missing" = true ];then
exit 1
fi
E2E_IMAGE=quay.io/kubernetes-ingress-controller/e2e:v10292018-240c7274b
DOCKER_OPTS=${DOCKER_OPTS:-""}
FLAGS=$@
tee .env << EOF
PKG=${PKG:-""}
ARCH=${ARCH:-""}
GIT_COMMIT=${GIT_COMMIT:-""}
E2E_NODES=${E2E_NODES:-4}
FOCUS=${FOCUS:-.*}
TAG=${TAG:-"0.0"}
HOME=${HOME:-/root}
KUBECONFIG=${HOME}/.kube/config
GOARCH=${GOARCH}
GOBUILD_FLAGS=${GOBUILD_FLAGS:-"-v"}
PWD=${PWD}
BUSTED_ARGS=${BUSTED_ARGS:-""}
REPO_INFO=${REPO_INFO:-local}
NODE_IP=${NODE_IP:-127.0.0.1}
SLOW_E2E_THRESHOLD=${SLOW_E2E_THRESHOLD:-40}
EOF
MINIKUBE_PATH=${HOME}/.minikube
MINIKUBE_VOLUME="-v ${MINIKUBE_PATH}:${MINIKUBE_PATH}"
if [ ! -d ${MINIKUBE_PATH} ]; then
echo "Minikube directory not found! Volume will be excluded from docker build."
MINIKUBE_VOLUME=""
fi
docker run \
--tty \
--rm \
${DOCKER_OPTS} \
-v ${HOME}/.kube:/${HOME}/.kube \
-v ${PWD}:/go/src/${PKG} \
-v ${PWD}/.gocache:${HOME}/.cache/go-build \
-v ${PWD}/bin/${ARCH}:/go/bin/linux_${ARCH} \
${MINIKUBE_VOLUME} \
-w /go/src/${PKG} \
--env-file .env \
--entrypoint ${FLAGS} \
${E2E_IMAGE}
rm .env
|
#!/bin/bash
if [ "$#" -ne 1 ]; then
echo "Illegal number of parameters: Enter the path for saving weights"
exit 1
fi
out_dir=$1
mkdir -p $1
wget -c -O $1/crossview_weights.pth https://www.dropbox.com/s/dlhyo4bsyquvk55/crossview_weights.pth?dl=0
wget -c -O $1/crosssubject_weights.pth https://www.dropbox.com/s/3n2rxosp78vdtj1/crosssubject_weights.pth?dl=0
|
#!/bin/bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/..
source "${KUBE_ROOT}/hack/lib/init.sh"
kube::golang::setup_env
BUILD_TARGETS=(
vendor/k8s.io/code-generator/cmd/client-gen
vendor/k8s.io/code-generator/cmd/lister-gen
vendor/k8s.io/code-generator/cmd/informer-gen
)
make -C "${KUBE_ROOT}" WHAT="${BUILD_TARGETS[*]}"
clientgen=$(kube::util::find-binary "client-gen")
listergen=$(kube::util::find-binary "lister-gen")
informergen=$(kube::util::find-binary "informer-gen")
# Please do not add any logic to this shell script. Add logic to the go code
# that generates the set-gen program.
#
GROUP_VERSIONS=(${KUBE_AVAILABLE_GROUP_VERSIONS})
GV_DIRS=()
for gv in "${GROUP_VERSIONS[@]}"; do
# add items, but strip off any leading apis/ you find to match command expectations
api_dir=$(kube::util::group-version-to-pkg-path "${gv}")
nopkg_dir=${api_dir#pkg/}
nopkg_dir=${nopkg_dir#vendor/k8s.io/api/}
pkg_dir=${nopkg_dir#apis/}
# skip groups that aren't being served, clients for these don't matter
if [[ " ${KUBE_NONSERVER_GROUP_VERSIONS} " == *" ${gv} "* ]]; then
continue
fi
GV_DIRS+=("${pkg_dir}")
done
# delimit by commas for the command
GV_DIRS_CSV=$(IFS=',';echo "${GV_DIRS[*]// /,}";IFS=$)
# This can be called with one flag, --verify-only, so it works for both the
# update- and verify- scripts.
${clientgen} "$@"
${clientgen} --output-base "${KUBE_ROOT}/vendor" --clientset-path="k8s.io/client-go" --clientset-name="kubernetes" --input-base="k8s.io/kubernetes/vendor/k8s.io/api" --input="${GV_DIRS_CSV}" "$@"
listergen_internal_apis=(
pkg/api
$(
cd ${KUBE_ROOT}
find pkg/apis -maxdepth 2 -name types.go | xargs -n1 dirname | sort
)
)
listergen_internal_apis=(${listergen_internal_apis[@]/#/k8s.io/kubernetes/})
listergen_internal_apis_csv=$(IFS=,; echo "${listergen_internal_apis[*]}")
${listergen} --input-dirs "${listergen_internal_apis_csv}" "$@"
listergen_external_apis=(
$(
cd ${KUBE_ROOT}/staging/src
# because client-gen doesn't do policy/v1alpha1, we have to skip it too
find k8s.io/api -name types.go | xargs -n1 dirname | sort | grep -v pkg.apis.policy.v1alpha1
)
)
listergen_external_apis_csv=$(IFS=,; echo "${listergen_external_apis[*]}")
${listergen} --output-base "${KUBE_ROOT}/vendor" --output-package "k8s.io/client-go/listers" --input-dirs "${listergen_external_apis_csv}" "$@"
informergen_internal_apis=(
pkg/api
$(
cd ${KUBE_ROOT}
find pkg/apis -maxdepth 2 -name types.go | xargs -n1 dirname | sort
)
)
informergen_internal_apis=(${informergen_internal_apis[@]/#/k8s.io/kubernetes/})
informergen_internal_apis_csv=$(IFS=,; echo "${informergen_internal_apis[*]}")
${informergen} \
--input-dirs "${informergen_internal_apis_csv}" \
--internal-clientset-package k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset \
--listers-package k8s.io/kubernetes/pkg/client/listers \
"$@"
informergen_external_apis=(
$(
cd ${KUBE_ROOT}/staging/src
# because client-gen doesn't do policy/v1alpha1, we have to skip it too
find k8s.io/api -name types.go | xargs -n1 dirname | sort | grep -v pkg.apis.policy.v1alpha1
)
)
informergen_external_apis_csv=$(IFS=,; echo "${informergen_external_apis[*]}")
${informergen} \
--output-base "${KUBE_ROOT}/vendor" \
--output-package "k8s.io/client-go/informers" \
--single-directory \
--input-dirs "${informergen_external_apis_csv}" \
--versioned-clientset-package k8s.io/client-go/kubernetes \
--listers-package k8s.io/client-go/listers \
"$@"
# You may add additional calls of code generators like set-gen above.
# call generation on sub-project for now
CODEGEN_PKG=./vendor/k8s.io/code-generator vendor/k8s.io/code-generator/hack/update-codegen.sh
CODEGEN_PKG=./vendor/k8s.io/code-generator vendor/k8s.io/kube-aggregator/hack/update-codegen.sh
CODEGEN_PKG=./vendor/k8s.io/code-generator vendor/k8s.io/sample-apiserver/hack/update-codegen.sh
CODEGEN_PKG=./vendor/k8s.io/code-generator vendor/k8s.io/sample-controller/hack/update-codegen.sh
CODEGEN_PKG=./vendor/k8s.io/code-generator vendor/k8s.io/apiextensions-apiserver/hack/update-codegen.sh
CODEGEN_PKG=./vendor/k8s.io/code-generator vendor/k8s.io/metrics/hack/update-codegen.sh
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.streampipes.manager.util;
import org.apache.streampipes.commons.exceptions.NoSepaInPipelineException;
import org.apache.streampipes.model.base.InvocableStreamPipesEntity;
import org.apache.streampipes.model.pipeline.Pipeline;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
public class PipelineVerificationUtils {
/**
* returns the root node of a partial pipeline (a pipeline without an action)
*
* @param pipeline
* @return {@link org.apache.streampipes.model.base.InvocableStreamPipesEntity}
*/
public static InvocableStreamPipesEntity getRootNode(Pipeline pipeline) throws NoSepaInPipelineException {
List<InvocableStreamPipesEntity> elements = new ArrayList<>();
elements.addAll(pipeline.getSepas());
elements.addAll(pipeline.getActions());
List<InvocableStreamPipesEntity> unconfiguredElements = elements
.stream()
.filter(e -> !e.isConfigured())
.collect(Collectors.toList());
if (unconfiguredElements.size() != 1) {
throw new NoSepaInPipelineException();
} else {
return unconfiguredElements.get(0);
}
}
}
|
package com.slimgears.rxrepo.query;
import com.slimgears.rxrepo.expressions.Aggregator;
import com.slimgears.rxrepo.expressions.PropertyExpression;
import com.slimgears.rxrepo.expressions.UnaryOperationExpression;
import io.reactivex.Maybe;
import io.reactivex.Observable;
import io.reactivex.Single;
import java.util.Arrays;
import java.util.Collection;
import java.util.function.Function;
public abstract class SelectQuery<T> {
public abstract Maybe<T> first();
public abstract <R, E extends UnaryOperationExpression<T, Collection<T>, R>> Single<R> aggregate(Aggregator<T, T, R, E> aggregator);
@SafeVarargs
public final Observable<T> retrieve(PropertyExpression<T, ?, ?>... properties) {
return retrieve(Arrays.asList(properties));
}
public <R> R apply(Function<SelectQuery<T>, R> mapper) {
return mapper.apply(this);
}
public Observable<T> retrieve() {
//noinspection unchecked
return retrieve(new PropertyExpression[0]);
}
public Single<Long> count() {
return aggregate(Aggregator.count());
}
protected abstract Observable<T> retrieve(Collection<PropertyExpression<T, ?, ?>> properties);
}
|
/*
* Copyright 2006-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.egovframe.brte.sample.example.support;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import javax.sql.DataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ReaderNotOpenException;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.dao.DataAccessException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.util.Assert;
import org.springframework.util.SerializationUtils;
/**
* ์์
์ํ์ ํ์ํ ์๋ฃ๋ฅผ ๊ฐ์ ธ์ค๊ธฐ ์ํด BATCH_STAGING ํ
์ด๋ธ์ Select ํ๋ ๋ฆฌ๋
* @author ๋ฐฐ์น์คํ๊ฐ๋ฐํ
* @since 2012. 07.25
* @version 1.0
* @see <pre>
* ๊ฐ์ ์ด๋ ฅ(Modification Information)
*
* ์์ ์ผ ์์ ์ ์์ ๋ด์ฉ
* ------- -------- ---------------------------
* 2012. 07.25 ๋ฐฐ์น์คํ๊ฐ๋ฐํ ์ต์ด ์์ฑ
* </pre>
*/
@SuppressWarnings("deprecation")
public class EgovStagingItemReader<T> implements ItemReader<EgovProcessIndicatorItemWrapper<T>>, StepExecutionListener, InitializingBean, DisposableBean {
private static Log logger = LogFactory.getLog(EgovStagingItemReader.class);
//stepExecution
private StepExecution stepExecution;
private final Object lock = new Object();
//์ด๊ธฐํ ํ๋๊ทธ
private volatile boolean initialized = false;
//์์ด๋๊ฐ ์ ์ฅ๋ ๋ณ์
private volatile Iterator<Long> keys;
//DB ์ฌ์ฉ์ ์ํ JdbcTemplate
private JdbcTemplate jdbcTemplate;
/**
* dataSource ์ค์
* @param dataSource
*/
public void setDataSource(DataSource dataSource) {
jdbcTemplate = new JdbcTemplate(dataSource);
}
/**
* ์ด๊ธฐํ
*/
@Override
public void destroy() throws Exception {
initialized = false;
keys = null;
}
/**
* ์ค์ ํ์ธ
*/
@Override
public final void afterPropertiesSet() throws Exception {
Assert.notNull(jdbcTemplate, "You must provide a DataSource.");
}
/**
* BATCH_STAGING์ ID๊ฐ์ ๊ฐ์ ธ์จ๋ค.
* @return List<Long> :BATCH_STAGING์ Id๊ฐ๋ค
*/
@SuppressWarnings("unchecked")
private List<Long> retrieveKeys() {
synchronized (lock) {
return jdbcTemplate.query("SELECT ID FROM BATCH_STAGING WHERE JOB_ID=? AND PROCESSED=? ORDER BY ID",
new Object[] {stepExecution.getJobExecution().getJobId(), EgovStagingItemWriter.NEW},
new RowMapper() {
@Override
public Object mapRow(ResultSet rs, int rowNum) throws SQLException {
return rs.getLong(1);
}
}
);
}
}
/**
* BATCH_STAGING์ value๊ฐ์ ์ฝ๋๋ค
* @return EgovProcessIndicatorItemWrapper : ๊ฒฐ๊ณผ๊ฐ
*/
@Override
public EgovProcessIndicatorItemWrapper<T> read() throws DataAccessException {
if (!initialized) {
throw new ReaderNotOpenException("Reader must be open before it can be used.");
}
Long id = null;
synchronized (lock) {
if (keys.hasNext()) {
id = keys.next();
}
}
logger.debug("Retrieved key from list: " + id);
if (id == null) {
return null;
}
@SuppressWarnings("unchecked")
T result = (T) jdbcTemplate.queryForObject("SELECT VALUE FROM BATCH_STAGING WHERE ID=?"
, new Object[] {id}
, new RowMapper<Object>() {
@Override
public Object mapRow(ResultSet rs, int rowNum) throws SQLException {
byte[] blob = rs.getBytes(1);
return SerializationUtils.deserialize(blob);
}
}
);
return new EgovProcessIndicatorItemWrapper<T>(id, result);
}
/**
* ๋ฐฐ์น์์
ํ ์คํ
*/
@Override
public ExitStatus afterStep(StepExecution stepExecution) {
return null;
}
/**
* ๋ฐฐ์น์์
์ ์คํ
*/
@Override
public void beforeStep(StepExecution stepExecution) {
this.stepExecution = stepExecution;
synchronized (lock) {
if (keys == null) {
keys = retrieveKeys().iterator();
logger.info("Keys obtained for staging.");
initialized = true;
}
}
}
}
|
SELECT author
FROM blog_posts
GROUP BY author
ORDER BY COUNT(*) DESC; |
<reponame>anticipasean/girakkafunc
package cyclops.container.immutable.impl;
import cyclops.container.immutable.ImmutableList;
import cyclops.container.persistent.PersistentCollection;
import cyclops.container.persistent.PersistentIndexed;
import cyclops.function.cacheable.Memoize;
import cyclops.function.combiner.Monoid;
import cyclops.reactive.ReactiveSeq;
import cyclops.reactive.companion.Spouts;
import cyclops.stream.spliterator.IteratableSpliterator;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Random;
import java.util.Spliterator;
import java.util.function.BiFunction;
import java.util.function.BiPredicate;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Stream;
import org.reactivestreams.Publisher;
public abstract class NonEmptyChain<T> extends Chain<T> implements ImmutableList.Some<T> {
private Supplier<Integer> hash;
@Override
public NonEmptyChain<T> appendAll(Iterable<? extends T> value) {
Chain<? extends T> w = wrap(value);
return w.isEmpty() ? this : append(this,
w);
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public <R> Chain<R> flatMap(Function<? super T, ? extends ImmutableList<? extends R>> fn) {
return wrap(ReactiveSeq.fromIterable(this)
.concatMap(fn));
}
public <R> NonEmptyChain<R> flatMapNEC(Function<? super T, ? extends NonEmptyChain<? extends R>> fn) {
return new Chain.Wrap<R>(ReactiveSeq.fromIterable(this)
.concatMap(fn));
}
@Override
public NonEmptyChain<T> prepend(T value) {
return append(singleton(value),
this);
}
@Override
public NonEmptyChain<T> prependAll(Iterable<? extends T> value) {
return append(wrap(value),
this);
}
@Override
public NonEmptyChain<T> append(T value) {
return append(this,
singleton(value));
}
@Override
public <R> NonEmptyChain<R> map(Function<? super T, ? extends R> fn) {
return new Chain.Wrap<R>(ReactiveSeq.fromIterable(this)
.map(fn));
}
@Override
public NonEmptyChain<T> concat(Chain<T> b) {
return append(this,
b);
}
@Override
public NonEmptyChain<T> insertStreamAt(int pos,
Stream<T> stream) {
return (NonEmptyChain<T>) super.insertStreamAt(pos,
stream);
}
@Override
public NonEmptyChain<T> prependStream(Stream<? extends T> stream) {
return (NonEmptyChain<T>) super.prependStream(stream);
}
@Override
public NonEmptyChain<T> peek(Consumer<? super T> c) {
return (NonEmptyChain<T>) super.peek(c);
}
@Override
public NonEmptyChain<ReactiveSeq<T>> permutations() {
return (NonEmptyChain<ReactiveSeq<T>>) super.permutations();
}
@Override
public NonEmptyChain<ReactiveSeq<T>> combinations(int size) {
return (NonEmptyChain<ReactiveSeq<T>>) super.combinations(size);
}
@Override
public NonEmptyChain<ReactiveSeq<T>> combinations() {
return (NonEmptyChain<ReactiveSeq<T>>) super.combinations();
}
@Override
public NonEmptyChain<T> combine(BiPredicate<? super T, ? super T> predicate,
BinaryOperator<T> op) {
return (NonEmptyChain<T>) super.combine(predicate,
op);
}
@Override
public NonEmptyChain<T> combine(Monoid<T> op,
BiPredicate<? super T, ? super T> predicate) {
return (NonEmptyChain<T>) super.combine(op,
predicate);
}
@Override
public NonEmptyChain<T> cycle(long times) {
return (NonEmptyChain<T>) super.cycle(times);
}
@Override
public NonEmptyChain<T> cycle(Monoid<T> m,
long times) {
return (NonEmptyChain<T>) super.cycle(m,
times);
}
@Override
public NonEmptyChain<Seq<T>> sliding(int windowSize) {
return (NonEmptyChain<Seq<T>>) super.sliding(windowSize);
}
@Override
public NonEmptyChain<Seq<T>> sliding(int windowSize,
int increment) {
return (NonEmptyChain<Seq<T>>) super.sliding(windowSize,
increment);
}
@Override
public <C extends PersistentCollection<? super T>> NonEmptyChain<C> grouped(int size,
Supplier<C> supplier) {
return (NonEmptyChain<C>) super.grouped(size,
supplier);
}
@Override
public NonEmptyChain<Vector<T>> grouped(int groupSize) {
return (NonEmptyChain<Vector<T>>) super.grouped(groupSize);
}
@Override
public NonEmptyChain<T> distinct() {
return (NonEmptyChain<T>) super.distinct();
}
@Override
public NonEmptyChain<T> scanLeft(Monoid<T> monoid) {
return (NonEmptyChain<T>) super.scanLeft(monoid);
}
@Override
public <U> NonEmptyChain<U> scanLeft(U seed,
BiFunction<? super U, ? super T, ? extends U> function) {
return (NonEmptyChain<U>) super.scanLeft(seed,
function);
}
@Override
public NonEmptyChain<T> scanRight(Monoid<T> monoid) {
return (NonEmptyChain<T>) super.scanRight(monoid);
}
@Override
public <U> NonEmptyChain<U> scanRight(U identity,
BiFunction<? super T, ? super U, ? extends U> combiner) {
return (NonEmptyChain<U>) super.scanRight(identity,
combiner);
}
@Override
public NonEmptyChain<T> sorted() {
return (NonEmptyChain<T>) super.sorted();
}
@Override
public NonEmptyChain<T> sorted(Comparator<? super T> c) {
return (NonEmptyChain<T>) super.sorted(c);
}
@Override
public NonEmptyChain<T> intersperse(T value) {
return (NonEmptyChain<T>) super.intersperse(value);
}
@Override
public NonEmptyChain<T> shuffle() {
return (NonEmptyChain<T>) super.shuffle();
}
@Override
public NonEmptyChain<T> shuffle(Random random) {
return (NonEmptyChain<T>) super.shuffle(random);
}
@Override
public <U extends Comparable<? super U>> NonEmptyChain<T> sorted(Function<? super T, ? extends U> function) {
return (NonEmptyChain<T>) super.sorted(function);
}
@Override
public NonEmptyChain<T> prependAll(T... values) {
return super.prependAll(values);
}
@Override
public NonEmptyChain<T> insertAt(int pos,
T... values) {
return (NonEmptyChain<T>) super.insertAt(pos,
values);
}
@Override
public NonEmptyChain<T> plusAll(Iterable<? extends T> list) {
return (NonEmptyChain<T>) super.plusAll(list);
}
@Override
public NonEmptyChain<T> plus(T value) {
return super.plus(value);
}
@Override
public NonEmptyChain<T> insertAt(int pos,
Iterable<? extends T> values) {
return (NonEmptyChain<T>) super.insertAt(pos,
values);
}
@Override
public NonEmptyChain<T> insertAt(int i,
T value) {
return (NonEmptyChain<T>) super.insertAt(i,
value);
}
@Override
public <R> R fold(Function<? super Some<T>, ? extends R> fn1,
Function<? super None<T>, ? extends R> fn2) {
return fn1.apply(this);
}
@Override
public NonEmptyChain<T> onEmpty(T value) {
return this;
}
@Override
public NonEmptyChain<T> onEmptyGet(Supplier<? extends T> supplier) {
return this;
}
@Override
public int hashCode() {
if (hash == null) {
Supplier<Integer> local = Memoize.memoizeSupplier(() -> {
int hashCode = 1;
for (T e : this) {
hashCode = 31 * hashCode + (e == null ? 0 : e.hashCode());
}
return hashCode;
});
hash = local;
return local.get();
}
return hash.get();
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj == this) {
return true;
}
if (obj instanceof Chain) {
Chain<T> seq1 = this;
Chain seq2 = (Chain) obj;
if (seq1.size() != seq2.size()) {
return false;
}
}
if (obj instanceof PersistentIndexed) {
return equalToIteration((Iterable) obj);
}
return false;
}
@Override
public NonEmptyChain<T> reverse() {
return new Wrap(this::reverseIterator);
}
@Override
public String toString() {
Iterator<T> it = iterator();
StringBuffer b = new StringBuffer("[" + it.next());
while (it.hasNext()) {
b.append(", " + it.next());
}
b.append("]");
return b.toString();
}
@Override
public Iterator<T> iterator() {
return new ChainIterator<T>(this);
}
@Override
public <T2, R> Chain<R> zip(BiFunction<? super T, ? super T2, ? extends R> fn,
Publisher<? extends T2> publisher) {
return wrap(Spouts.from(this)
.zip(fn,
publisher));
}
@Override
public Spliterator<T> spliterator() {
return new IteratableSpliterator<>(this);
}
}
|
class SolrManager:
def __init__(self):
self.collections = {}
def create_collection(self, collection_name, config_name=None):
if collection_name in self.collections:
return f"Collection '{collection_name}' already exists"
else:
self.collections[collection_name] = config_name
if config_name:
return f"Created collection '{collection_name}' with config-set '{config_name}'"
else:
return f"Created collection '{collection_name}'"
# Usage
solr_manager = SolrManager()
print(solr_manager.create_collection("COLL_NAME"))
print(solr_manager.create_collection("COLL_NAME", "other_conf_name"))
print(solr_manager.create_collection("COLL_NAME_1", "shared_config")) |
<reponame>codelight-house/tech-radar<filename>src/data/rings-codelight.js
export const RINGS = [
{ radius: 180, name: 'ADOPT', color: '#93c47d' },
{ radius: 260, name: 'TRIAL', color: '#93d2c2' },
{ radius: 330, name: 'ASSESS', color: '#fbdb84' },
{ radius: 390, name: 'HOLD', color: '#efafa9' },
// { radius: 440, name: 'DEAD', color: '#efafa9' },
];
export const RING_NAMES = RINGS.map( (ring) => {
return ring.name;
})
export const CUSTOM_RING_FILTERS = {
// 'I know it quite well': ['ADOPT', 'TRIAL', 'HOLD'],
// 'I would like to know it better': ['TRIAL', 'ASSESS'],
};
|
<reponame>m-nakagawa/sample
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jena.cmd;
import org.apache.jena.atlas.io.IndentedWriter;
import org.apache.jena.atlas.lib.Version ;
public class ModVersion extends ModBase
{
protected final ArgDecl versionDecl = new ArgDecl(ArgDecl.NoValue, "version") ;
protected boolean version = false ;
protected boolean printAndExit = false ;
private Version versionMgr = new Version() ;
public ModVersion(boolean printAndExit)
{
this.printAndExit = printAndExit ;
}
public void addClass(Class<?> c) { versionMgr.addClass(c) ; }
@Override
public void registerWith(CmdGeneral cmdLine)
{
cmdLine.add(versionDecl, "--version", "Version information") ;
}
@Override
public void processArgs(CmdArgModule cmdLine)
{
if ( cmdLine.contains(versionDecl) )
version = true ;
// The --version flag causes us to print and exit.
if ( version && printAndExit )
printVersionAndExit() ;
}
public boolean getVersionFlag() { return version ; }
public void printVersion()
{
versionMgr.print(IndentedWriter.stdout);
}
public void printVersionAndExit()
{
printVersion() ;
System.exit(0) ;
}
}
|
echo "Start building for [linux]..."
go env -w CGO_ENABLED=0
go env -w GOOS=linux
go env -w GOARCH=amd64
go build -o ../fyi ../cmd/main.go
echo "Done!" |
<reponame>reconbot/subscriptionless
import {
ConnectionInitMessage,
SubscribeMessage,
CompleteMessage,
PingMessage,
PongMessage,
} from 'graphql-ws';
import { DataMapper } from '@aws/dynamodb-data-mapper';
import { APIGatewayEvent } from 'aws-lambda';
import { GraphQLSchema } from 'graphql';
import { DynamoDB, StepFunctions } from 'aws-sdk';
import { Subscription, Connection } from './model';
export type ServerArgs = {
/** GraphQL schema containing subscriptions. */
schema: GraphQLSchema;
/** Constructor function for GraphQL context. */
context?: ((arg: { connectionParams: any }) => object) | object;
/** Options for server->client ping/pong (recommended). */
ping?: {
/** Rate at which pings are sent. */
interval: number;
/** Time for pong response before closing socket. */
timeout: number;
/** State machine resource for dispatching pings. */
machineArn: string;
};
/** Override default table names. */
tableNames?: Partial<TableNames>;
/** Override default DynamoDB instance. */
dynamodb?: DynamoDB;
/** Called on incoming API Gateway `$connect` event. */
onConnect?: (e: { event: APIGatewayEvent }) => MaybePromise<void>;
/** Called on incoming API Gateway `$disconnect` event. */
onDisconnect?: (e: { event: APIGatewayEvent }) => MaybePromise<void>;
/**
* Called on incoming graphql-ws `connection_init` message.
* Returned value is persisted and provided at context creation on publish events.
**/
onConnectionInit?: (e: {
event: APIGatewayEvent;
message: ConnectionInitMessage;
}) => MaybePromise<object>;
/** Called on incoming graphql-ws `subscribe` message. */
onSubscribe?: (e: {
event: APIGatewayEvent;
message: SubscribeMessage;
}) => MaybePromise<void>;
/** Called on graphql-ws `complete` message. */
onComplete?: (e: {
event: APIGatewayEvent;
message: CompleteMessage;
}) => MaybePromise<void>;
/** Called on incoming graphql-ws `ping` message. */
onPing?: (e: {
event: APIGatewayEvent;
message: PingMessage;
}) => MaybePromise<void>;
/** Called on incoming graphql-ws `pong` message. */
onPong?: (e: {
event: APIGatewayEvent;
message: PongMessage;
}) => MaybePromise<void>;
/** Called on unexpected errors during resolution of API Gateway or graphql-ws events. */
onError?: (error: any, context: any) => void;
};
type MaybePromise<T> = T | Promise<T>;
export type ServerClosure = {
mapper: DataMapper;
model: {
Subscription: typeof Subscription;
Connection: typeof Connection;
};
} & Omit<ServerArgs, 'tableNames' | 'dynamodb'>;
type TableNames = {
connections: string;
subscriptions: string;
};
export type WebsocketResponse = {
statusCode: number;
headers?: Record<string, string>;
body: string;
};
export type SubscriptionDefinition = {
topic: string;
filter?: object | (() => void);
};
export type SubscribeHandler = (...args: any[]) => SubscribePsuedoIterable;
export type SubscribePsuedoIterable = {
(): void;
definitions: SubscriptionDefinition[];
};
export type SubscribeArgs = any[];
export type Class = { new (...args: any[]): any };
export type StateFunctionInput = {
connectionId: string;
domainName: string;
stage: string;
state: 'PING' | 'REVIEW' | 'ABORT';
seconds: number;
};
|
<gh_stars>10-100
var searchData=
[
['basic_20concepts',['Basic concepts',['../basic_concepts.html',1,'']]]
];
|
def count_letters(text):
num_vowels = 0
num_consonants = 0
vowels = ["a", "e", "i", "o", "u"]
for char in text.lower():
if char in vowels:
num_vowels += 1
elif char.isalpha():
num_consonants += 1
return num_vowels, num_consonants |
# Import necessary libraries and constants
from scipy.constants import boltzmann
from pint import UnitRegistry
# Initialize unit registry
unit_registry = UnitRegistry()
# Define the function to convert error value based on final type
def convert_error_value(final_type, error_value, in_unit, out_unit):
# Define necessary constants
temperature = 298 # Example temperature value in Kelvin
# Convert error value based on final type
if final_type == "kd":
# Perform the conversion using the given logic
error = (
boltzmann
* temperature
* unit_registry.kelvin
/ value
* error_value
)
return error.to(out_unit).round(2)
elif final_type == "ki" or final_type == "ic50":
return error_value.to(out_unit)
# Example usage
print(convert_error_value("ki", 500, "m", "cm")) # Output: 5.0 cm |
const jwt = require('jsonwebtoken');
const privateKey = 'my-private-key';
// Users store in the database
const users = [
{ id: 1, username: 'joe', password: 'password' },
{ id: 2, username: 'sam', password: '123456' }
];
// Authenticate function
const authenticate = (username, password) => {
const user = users.find(u => u.username === username && u.password === password);
if (user) {
const payload = {
id: user.id,
username: user.username
};
const token = jwt.sign(payload, privateKey);
return token;
}
return false;
};
// Verify token function
const verifyToken = (token) => {
return jwt.verify(token, privateKey);
};
module.exports = {
authenticate,
verifyToken
}; |
#! /bin/sh
#
# %CopyrightBegin%
#
# Copyright Ericsson AB 2002-2011. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# %CopyrightEnd%
#
# Icky cl wrapper that does it's best to behave like a Unixish cc.
# Made to work for Erlang builds and to make configure happy, not really
# general I suspect.
# set -x
# Save the command line for debug outputs
SAVE="$@"
# Constants
COMMON_CFLAGS="-nologo -D__WIN32__ -DWIN32 -DWINDOWS -D_WIN32 -DNT -D_CRT_SECURE_NO_DEPRECATE"
# Variables
# The stdout and stderr for the compiler
MSG_FILE=/tmp/cl.exe.$$.1
ERR_FILE=/tmp/cl.exe.$$.2
# "Booleans" determined during "command line parsing"
# If the stdlib option is explicitly passed to this program
MD_FORCED=false
# If we're preprocession (only) i.e. -E
PREPROCESSING=false
# If we're generating dependencies (implies preprocesing)
DEPENDENCIES=false
# If this is supposed to be a debug build
DEBUG_BUILD=false
# If this is supposed to be an optimized build (there can only be one...)
OPTIMIZED_BUILD=false
# If we're linking or only compiling
LINKING=true
# This data is accumulated during command line "parsing"
# The stdlibrary option, default multithreaded dynamic
MD=-MD
# Flags for debug compilation
DEBUG_FLAGS=""
# Flags for optimization
OPTIMIZE_FLAGS=""
# The specified output filename (if any), may be either object or exe.
OUTFILE=""
# Unspecified command line options for the compiler
CMD=""
# All the c source files, in unix style
SOURCES=""
# All the options to pass to the linker, kept in Unix style
LINKCMD=""
# Loop through the parameters and set the above variables accordingly
# Also convert some cygwin filenames to "mixed style" dito (understood by the
# compiler very well), except for anything passed to the linker, that script
# handles those and the sources, which are also kept unixish for now
while test -n "$1" ; do
x="$1"
case "$x" in
-Wall)
;;
-c)
LINKING=false;;
#CMD="$CMD -c";;
-MM)
PREPROCESSING=true;
LINKING=false;
DEPENDENCIES=true;;
-E)
PREPROCESSING=true;
LINKING=false;; # Obviously...
#CMD="$CMD -E";;
-Owx)
# Optimization hardcoded of wxErlang, needs to disable debugging too
OPTIMIZE_FLAGS="-Ob2ity -Gs -Zi";
DEBUG_FLAGS="";
DEBUG_BUILD=false;
if [ $MD_FORCED = false ]; then
MD=-MD;
fi
OPTIMIZED_BUILD=true;;
-O*)
# Optimization hardcoded, needs to disable debugging too
OPTIMIZE_FLAGS="-Ox -Zi";
DEBUG_FLAGS="";
DEBUG_BUILD=false;
if [ $MD_FORCED = false ]; then
MD=-MD;
fi
OPTIMIZED_BUILD=true;;
-g|-ggdb)
if [ $OPTIMIZED_BUILD = false ];then
# Hardcoded windows debug flags
DEBUG_FLAGS="-Z7";
if [ $MD_FORCED = false ]; then
MD=-MDd;
fi
LINKCMD="$LINKCMD -g";
DEBUG_BUILD=true;
fi;;
# Allow forcing of stdlib
-mt|-MT)
MD="-MT";
MD_FORCED=true;;
-md|-MD)
MD="-MD";
MD_FORCED=true;;
-ml|-ML)
MD="-ML";
MD_FORCED=true;;
-mdd|-MDD|-MDd)
MD="-MDd";
MD_FORCED=true;;
-mtd|-MTD|-MTd)
MD="-MTd";
MD_FORCED=true;;
-mld|-MLD|-MLd)
MD="-MLd";
MD_FORCED=true;;
-o)
shift;
OUTFILE="$1";;
-o*)
y=`echo $x | sed 's,^-[Io]\(.*\),\1,g'`;
OUTFILE="$y";;
-I/*)
y=`echo $x | sed 's,^-[Io]\(/.*\),\1,g'`;
z=`echo $x | sed 's,^-\([Io]\)\(/.*\),\1,g'`;
MPATH=`echo $y`;
CMD="$CMD -$z\"$MPATH\"";;
-I*)
y=`echo $x | sed 's,",\\\",g'`;
CMD="$CMD $y";;
-D*)
y=`echo $x | sed 's,",\\\",g'`;
CMD="$CMD $y";;
-EH*)
y=`echo $x | sed 's,",\\\",g'`;
CMD="$CMD $y";;
-TP|-Tp)
y=`echo $x | sed 's,",\\\",g'`;
CMD="$CMD $y";;
-l*)
y=`echo $x | sed 's,^-l\(.*\),\1,g'`;
LINKCMD="$LINKCMD $x";;
/*.c)
SOURCES="$SOURCES $x";;
*.c)
SOURCES="$SOURCES $x";;
/*.cc)
SOURCES="$SOURCES $x";;
*.cc)
SOURCES="$SOURCES $x";;
/*.cpp)
SOURCES="$SOURCES $x";;
*.cpp)
SOURCES="$SOURCES $x";;
/*.o)
LINKCMD="$LINKCMD $x";;
*.o)
LINKCMD="$LINKCMD $x";;
*)
# Try to quote uninterpreted options
y=`echo $x | sed 's,",\\\",g'`;
LINKCMD="$LINKCMD $y";;
esac
shift
done
#Return code from compiler, linker.sh and finally this script...
RES=0
# Accumulated object names
ACCUM_OBJECTS=""
# A temporary object file location
TMPOBJDIR=/tmp/tmpobj$$
mkdir $TMPOBJDIR
# Compile
for x in $SOURCES; do
start_time=`date '+%s'`
# Compile each source
if [ $LINKING = false ]; then
# We should have an output defined, which is a directory
# or an object file
case $OUTFILE in
/*.o)
# Simple output, SOURCES should be one single
n=`echo $SOURCES | wc -w`;
if [ $n -gt 1 ]; then
echo "cc.sh:Error, multiple sources, one object output.";
exit 1;
else
output_filename=`echo $OUTFILE`;
fi;;
*.o)
# Relative path needs no translation
n=`echo $SOURCES | wc -w`
if [ $n -gt 1 ]; then
echo "cc.sh:Error, multiple sources, one object output."
exit 1
else
output_filename=$OUTFILE
fi;;
/*)
# Absolute directory
o=`echo $x | sed 's,.*/,,' | sed 's,\.c$,.o,'`
output_filename=`echo $OUTFILE`
output_filename="$output_filename/${o}";;
*)
# Relative_directory or empty string (.//x.o is valid)
o=`echo $x | sed 's,.*/,,' | sed 's,\.cp*$,.o,'`
output_filename="./${OUTFILE}/${o}";;
esac
else
# We are linking, which means we build objects in a temporary
# directory and link from there. We should retain the basename
# of each source to make examining the exe easier...
o=`echo $x | sed 's,.*/,,' | sed 's,\.c$,.o,'`
output_filename=$TMPOBJDIR/$o
ACCUM_OBJECTS="$ACCUM_OBJECTS $output_filename"
fi
# Now we know enough, lets try a compilation...
MPATH=`echo $x`
if [ $PREPROCESSING = true ]; then
output_flag="-E"
else
output_flag="-FS -c -Fo`cmd //C echo ${output_filename}`"
fi
params="$COMMON_CFLAGS $MD $DEBUG_FLAGS $OPTIMIZE_FLAGS \
$CMD ${output_flag} $MPATH"
if [ "X$CC_SH_DEBUG_LOG" != "X" ]; then
echo cc.sh "$SAVE" >>$CC_SH_DEBUG_LOG
echo cl.exe $params >>$CC_SH_DEBUG_LOG
fi
# MSYS2 (currently) converts the paths wrong, avoid it
export MSYS2_ARG_CONV_EXCL=-FoC
eval cl.exe $params >$MSG_FILE 2>$ERR_FILE
RES=$?
if test $PREPROCESSING = false; then
cat $ERR_FILE >&2
tail -n +2 $MSG_FILE
else
tail -n +2 $ERR_FILE >&2
if test $DEPENDENCIES = true; then
if test `grep -v $x $MSG_FILE | grep -c '#line'` != "0"; then
o=`echo $x | sed 's,.*/,,' | sed 's,\.cp*$,.o,'`
echo -n $o':'
# cat $MSG_FILE | grep '#line' | grep -v $x | awk -F\" '{printf("%s\n",$2)}' | sort -u | grep -v " " | xargs -n 1 win2msys_path.sh | awk '{printf("\\\n %s ",$0)}'
cat $MSG_FILE | grep '#line' | grep -v $x | awk -F\" '{printf("%s\n",$2)}' | sort -u | grep -v " " | sed 's,^\([A-Za-z]\):[\\/]*,/\1/,;s,\\\\*,/,g'| awk '{printf("\\\n %s ",$0)}'
echo
echo
after_sed=`date '+%s'`
echo Made dependencises for $x':' `expr $after_sed '-' $start_time` 's' >&2
fi
else
cat $MSG_FILE
fi
fi
rm -f $ERR_FILE $MSG_FILE
if [ $RES != 0 ]; then
echo Failed: cl.exe $params
rm -rf $TMPOBJDIR
exit $RES
fi
done
# If we got here, we succeeded in compiling (if there were anything to compile)
# The output filename should name an executable if we're linking
if [ $LINKING = true ]; then
case $OUTFILE in
"")
# Use the first source name to name the executable
first_source=""
for x in $SOURCES; do first_source=$x; break; done;
if [ -n "$first_source" ]; then
e=`echo $x | sed 's,.*/,,' | sed 's,\.c$,.exe,'`;
out_spec="-o $e";
else
out_spec="";
fi;;
*)
out_spec="-o $OUTFILE";;
esac
# Descide which standard library to link against
case $MD in
-ML)
stdlib="-lLIBC";;
-MLd)
stdlib="-lLIBCD";;
-MD)
stdlib="-lMSVCRT";;
-MDd)
stdlib="-lMSVCRTD";;
-MT)
stdlib="-lLIBCMT";;
-MTd)
stdlib="-lLIBMTD";;
esac
# And finally call the next script to do the linking...
params="$out_spec $LINKCMD $stdlib"
if [ "X$CC_SH_DEBUG_LOG" != "X" ]; then
echo ld.sh $ACCUM_OBJECTS $params
fi
eval ld.sh $ACCUM_OBJECTS $params
RES=$?
fi
rm -rf $TMPOBJDIR
exit $RES
|
#!/bin/bash
if [ "$1" = "cpu" ] ; then
./build/examples/cpp_classification/classification.bin ./models/vgg_model/VGG_ILSVRC_16_layers_deploy.prototxt ./models/vgg_model/VGG_ILSVRC_16_layers.caffemodel data/ilsvrc12/imagenet_mean.binaryproto data/ilsvrc12/synset_words.txt list
elif [ "$1" = "fpga" ] ; then
./build/examples/cpp_classification_driverTest/classification_fpga.bin models/vgg_model/VGG_ILSVRC_16_layers_deploy.prototxt models/vgg_model/VGG_ILSVRC_16_layers.caffemodel data/ilsvrc12/imagenet_mean.binaryproto data/ilsvrc12/synset_words.txt list /home/cdscdemo/Workspace/7v3/myproj/impl/vgg16.xclbin
fi
|
<gh_stars>0
import Container from '@material-ui/core/Container'
import CssBaseline from '@material-ui/core/CssBaseline'
import Divider from '@material-ui/core/Divider'
import ExpansionPanel from '@material-ui/core/ExpansionPanel'
import ExpansionPanelDetails from '@material-ui/core/ExpansionPanelDetails'
import ExpansionPanelSummary from '@material-ui/core/ExpansionPanelSummary'
import Grid from '@material-ui/core/Grid'
import IconButton from '@material-ui/core/IconButton'
import { makeStyles, ThemeProvider, useTheme } from '@material-ui/core/styles'
import Tooltip from '@material-ui/core/Tooltip'
import Typography from '@material-ui/core/Typography'
import EditIcon from '@material-ui/icons/Edit'
import ExpandMoreIcon from '@material-ui/icons/ExpandMore'
import React, { useState } from 'react'
import { Helmet } from 'react-helmet'
import Loadable from 'react-loadable'
import useDarkMode from '../lib/useDarkMode'
import scrollbar from '../styles/scrollbar'
import { CustomCssBaseline, darkTheme, lightTheme } from '../theme'
import BackTop from './BackTop'
import EditWarn from './EditWarn'
import Footer from './Footer'
import Meta from './Meta'
import NavAndDrawer from './NavAndDrawer'
import ToC from './Toc'
const useStyles = makeStyles((theme) => ({
toolbar: {
[theme.breakpoints.down('md')]: {
minHeight: 64,
},
minHeight: 48 + 64,
alignItems: 'flex-start',
},
content: {
flexGrow: 1,
width: '100%',
},
main: {
padding: theme.spacing(5),
[theme.breakpoints.down('md')]: {
padding: theme.spacing(2),
},
minHeight: '100vh',
'& .math-display': scrollbar(theme, {
overflow: 'auto',
}),
},
divider: {
marginTop: theme.spacing(2),
marginBottom: theme.spacing(2),
},
footer: {
background: theme.palette.footer.background,
color: theme.palette.footer.text,
padding: theme.spacing(3),
[theme.breakpoints.up('lg')]: {
marginLeft: 250,
},
},
container: {
[theme.breakpoints.up('lg')]: {
marginLeft: 250,
},
overflowY: 'hidden',
},
iconButton: {
float: 'right',
},
}))
const LazyComment = Loadable({
loader: () => import('./Comment'),
// eslint-disable-next-line react/display-name
loading: () => <div />,
})
function MyLayout ({
children,
location,
authors,
title,
description,
tags,
toc,
relativePath,
modifiedTime,
noMeta,
noComment,
noEdit,
noToC,
overflow,
}) {
const theme = useTheme()
const classes = useStyles()
// const pageTitle = title === 'OI Wiki' ? title : `${title} - OI Wiki`
const displayToC = toc && toc.items && noToC !== 'true'
const gridWidthMdUp = overflow === 'true' ? 12 : 10
const [dialogOpen, setDialogOpen] = useState(false)
const descriptionRes = description || 'OI Wiki ๆฏไธไธช็ผ็จ็ซ่ต็ฅ่ฏๆดๅ็ซ็น๏ผๆไพๆ่ถฃๅๅฎ็จ็็ผ็จ็ซ่ต็ฅ่ฏไปฅๅๅ
ถไปๆๅธฎๅฉ็ๅ
ๅฎน๏ผๅธฎๅฉๅนฟๅคง็ผ็จ็ซ่ต็ฑๅฅฝ่
ๆดๅฟซๆดๆทฑๅ
ฅๅฐๅญฆไน ็ผ็จ็ซ่ต'
return (
<>
<Helmet>
<title>{`${title === 'OI Wiki' ? '' : title + ' - '}OI Wiki`}</title>
<meta name="description" content={descriptionRes} />
</Helmet>
<EditWarn relativePath={relativePath} dialogOpen={dialogOpen} setDialogOpen={setDialogOpen} />
<NavAndDrawer pathname={location.pathname} />
<Grid container>
<Grid
item
xs={12}
sm={12}
md={gridWidthMdUp}
lg={gridWidthMdUp}
xl={gridWidthMdUp}
>
<div className={classes.toolbar} />
<div className={classes.container}>
<main className={classes.content}>
<div className={classes.main}>
<Grid container spacing={2}>
<Grid item xs>
<Typography variant="h4" component="h1">
{title}
</Typography>
</Grid>
{noEdit === 'false' && (
<Grid item xs={1}>
<Tooltip title="็ผ่พ้กต้ข" placement="left" arrow>
<IconButton
onClick={() => setDialogOpen(true)}
className={classes.iconButton}
>
<EditIcon fontSize="small" />
</IconButton>
</Tooltip>
</Grid>
)}
</Grid>
<Divider className={classes.divider} />
<Typography variant="body1" component="div">
{children}
</Typography>
<Meta
authors={authors}
tags={tags}
relativePath={relativePath}
modifiedTime={modifiedTime}
noMeta={noMeta}
/>
{noComment === 'false' && (
<div style={{ width: '100%', marginTop: theme.spacing(2) }}>
<ExpansionPanel variant="outlined">
<ExpansionPanelSummary
expandIcon={<ExpandMoreIcon />}
aria-controls="comment"
>
<Typography className={classes.heading}>
่ฏ่ฎบ
</Typography>
</ExpansionPanelSummary>
<ExpansionPanelDetails>
<Container>
<LazyComment title={title} />
</Container>
</ExpansionPanelDetails>
</ExpansionPanel>
</div>
)}
</div>
</main>
</div>
</Grid>
{displayToC && (
<Grid item xs>
<ToC toc={toc} pathname={location.pathname} />
</Grid>
)}
</Grid>
<Divider />
<div className={classes.footer}>
<Footer />
</div>
<BackTop />
</>
)
}
function StyledLayout (props) {
const enableDark = useDarkMode()
return (
<ThemeProvider theme={enableDark ? darkTheme : lightTheme}>
<CssBaseline/>
<CustomCssBaseline/>
<MyLayout {...props}/>
</ThemeProvider>
)
}
export default StyledLayout
|
#include <iostream>
using namespace std;
int add_int(int x, int y){
return x+y;
}
int main()
{
int x = 3;
int y = 4;
cout<<"Sum = "<<add_int(x, y)<<endl;
return 0;
} |
<gh_stars>0
package com.twitter.inject.thrift
import com.twitter.inject.Test
import com.twitter.inject.thrift.utils.ThriftMethodUtils
import com.twitter.inject.utils.ExceptionUtils
import com.twitter.scrooge.{ThriftStructCodec3, ThriftMethod}
class ThriftClientExceptionTest extends Test {
val FakeThriftMethod = new ThriftMethod {
override val name = "fakeThriftMethod"
/** Thrift service name. A thrift service is a list of methods. */
override def serviceName: String = "FakeService"
/** Convert a service implementation of this method into a function implementation */
override def serviceToFunction(svc: ServiceType): FunctionType = ???
/** True for oneway thrift methods */
override def oneway: Boolean = ???
/** Codec for the request args */
override def argsCodec: ThriftStructCodec3[Args] = ???
/** Codec for the response */
override def responseCodec: ThriftStructCodec3[Result] = ???
/** Convert a function implementation of this method into a service implementation */
override def functionToService(f: FunctionType): ServiceType = ???
override def toString: String = name
}
"toString" in {
val cause = new Exception("ThriftClientException")
val thriftClientException = new ThriftClientException(
"my-client",
FakeThriftMethod,
cause)
thriftClientException.toString should equal(
s"ThriftClientException: my-client/${ThriftMethodUtils.prettyStr(FakeThriftMethod)} = ${ExceptionUtils.stripNewlines(cause)}")
}
}
|
from typing import Literal
import streamlit as st
import sqlite3
from .provider.sqlite.settings import SQLITE_SETTINGS
def _sqlite_hash_func(allow_db_create=False, if_table_exists: Literal['ignore', 'recreate'] = 'ignore'):
path = SQLITE_SETTINGS.DB_PATH
db = SQLITE_SETTINGS.DB
hash_key = f'{path}|{db}|{allow_db_create}|{if_table_exists}'
return hash_key
class StorageFactory():
def __init__(self):
pass
@staticmethod
@st.cache(hash_funcs={sqlite3.Connection: _sqlite_hash_func}) # @st.singleton << not yet released
def _sqlite_provider(allow_db_create=False, if_table_exists: Literal['ignore', 'recreate'] = 'ignore'):
print(f'_sqlite_provider(allow_db_create={allow_db_create}, if_table_exists={if_table_exists})')
from .provider.sqlite.implementation import SQLiteProvider
provider = SQLiteProvider(allow_db_create=allow_db_create, if_table_exists=if_table_exists)
return provider
@staticmethod
@st.cache(allow_output_mutation=True) # @st.singleton << not yet released
def _airtable_provider():
print(f'_airtable_provider()')
from .provider.airtable.implementation import AirtableProvider
provider = AirtableProvider()
return provider
def get_provider(self, storage, allow_db_create=False, if_table_exists: Literal['ignore', 'recreate'] = 'ignore'):
assert(storage in ['SQLITE', 'AIRTABLE'])
if storage == 'SQLITE':
provider = StorageFactory._sqlite_provider(allow_db_create=allow_db_create, if_table_exists=if_table_exists)
elif storage == 'AIRTABLE':
provider = StorageFactory._airtable_provider()
else:
raise ValueError(storage)
return provider
|
import { DataHandlingModule } from './data-handling.module';
describe( 'DataHandlingModule', () => {
it( 'should work', () => {
expect( new DataHandlingModule() ).toBeDefined();
} );
} );
|
<reponame>paujim/cloneBucket
package services
import (
"errors"
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/iam"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
type MockS3 struct {
mock.Mock
}
func (m *MockS3) PutBucketPolicy(input *s3.PutBucketPolicyInput) (*s3.PutBucketPolicyOutput, error) {
args := m.Called(input)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).(*s3.PutBucketPolicyOutput), args.Error(1)
}
func (m *MockS3) DeleteBucketPolicy(input *s3.DeleteBucketPolicyInput) (*s3.DeleteBucketPolicyOutput, error) {
args := m.Called(input)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).(*s3.DeleteBucketPolicyOutput), args.Error(1)
}
func (m *MockS3) ListObjectsV2(input *s3.ListObjectsV2Input) (*s3.ListObjectsV2Output, error) {
args := m.Called(input)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).(*s3.ListObjectsV2Output), args.Error(1)
}
func (m *MockS3) CopyObject(input *s3.CopyObjectInput) (*s3.CopyObjectOutput, error) {
args := m.Called(input)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).(*s3.CopyObjectOutput), args.Error(1)
}
type MockIAM struct {
mock.Mock
}
func (m *MockIAM) GetUser(input *iam.GetUserInput) (*iam.GetUserOutput, error) {
args := m.Called(input)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).(*iam.GetUserOutput), args.Error(1)
}
func TestClone(t *testing.T) {
t.Run("Valid clone", func(t *testing.T) {
assert := assert.New(t)
mockIAM := &MockIAM{}
mockIAM.On("GetUser", mock.Anything).Return(&iam.GetUserOutput{User: &iam.User{Arn: aws.String("arn")}}, nil)
sourceS3 := &MockS3{}
sourceS3.On("PutBucketPolicy", mock.Anything).Return(&s3.PutBucketPolicyOutput{}, nil)
sourceS3.On("ListObjectsV2", mock.Anything).Return(&s3.ListObjectsV2Output{Contents: []*s3.Object{{Key: aws.String("key")}}}, nil)
sourceS3.On("DeleteBucketPolicy", mock.Anything).Return(&s3.DeleteBucketPolicyOutput{}, nil)
destinationS3 := &MockS3{}
destinationS3.On("CopyObject", mock.Anything).Return(&s3.CopyObjectOutput{}, nil)
err := NewCloner(sourceS3, destinationS3, mockIAM, aws.String("src-bucket"), aws.String("dst-bucket")).Clone()
assert.NoError(err)
mockIAM.AssertExpectations(t)
sourceS3.AssertExpectations(t)
destinationS3.AssertExpectations(t)
})
t.Run("Fail to update bucket policy", func(t *testing.T) {
assert := assert.New(t)
mockIAM := &MockIAM{}
mockIAM.On("GetUser", mock.Anything).Return(&iam.GetUserOutput{User: &iam.User{Arn: aws.String("arn")}}, nil)
sourceS3 := &MockS3{}
sourceS3.On("PutBucketPolicy", mock.Anything).Return(nil, errors.New("Fail to update policy"))
destinationS3 := &MockS3{}
err := NewCloner(sourceS3, destinationS3, mockIAM, aws.String("src-bucket"), aws.String("dst-bucket")).Clone()
assert.EqualError(err, "Fail to update policy")
mockIAM.AssertExpectations(t)
sourceS3.AssertExpectations(t)
destinationS3.AssertExpectations(t)
})
t.Run("Fail to list source bucket", func(t *testing.T) {
assert := assert.New(t)
mockIAM := &MockIAM{}
mockIAM.On("GetUser", mock.Anything).Return(&iam.GetUserOutput{User: &iam.User{Arn: aws.String("arn")}}, nil)
sourceS3 := &MockS3{}
sourceS3.On("PutBucketPolicy", mock.Anything).Return(&s3.PutBucketPolicyOutput{}, nil)
sourceS3.On("ListObjectsV2", mock.Anything).Return(nil, errors.New("Fail to list objects"))
sourceS3.On("DeleteBucketPolicy", mock.Anything).Return(&s3.DeleteBucketPolicyOutput{}, nil)
destinationS3 := &MockS3{}
err := NewCloner(sourceS3, destinationS3, mockIAM, aws.String("src-bucket"), aws.String("dst-bucket")).Clone()
assert.EqualError(err, "Fail to list objects")
mockIAM.AssertExpectations(t)
sourceS3.AssertExpectations(t)
destinationS3.AssertExpectations(t)
})
t.Run("Fail to get iam/user", func(t *testing.T) {
assert := assert.New(t)
mockIAM := &MockIAM{}
mockIAM.On("GetUser", mock.Anything).Return(nil, errors.New("Fail to get iam/user"))
sourceS3 := &MockS3{}
destinationS3 := &MockS3{}
err := NewCloner(sourceS3, destinationS3, mockIAM, aws.String("src-bucket"), aws.String("dst-bucket")).Clone()
assert.EqualError(err, "Fail to get iam/user")
mockIAM.AssertExpectations(t)
sourceS3.AssertExpectations(t)
destinationS3.AssertExpectations(t)
})
}
|
<gh_stars>10-100
const opentracingMiddleware = require('./middleware');
const { opentracingBegin, opentracingEnd, opentracingError } = require('./hooks');
const { setOpentracingError } = require('./utils');
module.exports = {
opentracingMiddleware,
opentracingBegin,
opentracingEnd,
opentracingError,
setOpentracingError
};
|
<filename>src/language/language.repository.ts
import { EntityRepository, Repository } from 'typeorm';
import { CreateLanguageDto } from './dto/create-language.dto';
import { Language } from './entities/language.entity';
@EntityRepository(Language)
export class LanguageRepository extends Repository<Language> {
async saveLanguage(
saveLanguageDto: CreateLanguageDto | CreateLanguageDto,
language = new Language(),
) {
const { name, locale } = saveLanguageDto;
language.name = name;
language.locale = locale;
await language.save();
return language;
}
}
|
import { AppComponent } from './app.component';
import { PageFooterComponent } from './page-footer/page-footer.component';
import { PageHeaderComponent } from './page-header/page-header.component';
export const Components = [
AppComponent,
PageFooterComponent,
PageHeaderComponent
];
|
<filename>src/main/java/net/blay09/mods/cookingforblockheads/compat/VanillaAddon.java
package net.blay09.mods.cookingforblockheads.compat;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import net.blay09.mods.cookingforblockheads.CookingForBlockheads;
import net.blay09.mods.cookingforblockheads.api.CookingForBlockheadsAPI;
import net.blay09.mods.cookingforblockheads.api.FoodStatsProvider;
import net.blay09.mods.cookingforblockheads.api.ToastHandler;
import net.blay09.mods.cookingforblockheads.api.event.FoodRegistryInitEvent;
import net.blay09.mods.cookingforblockheads.api.SinkHandler;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.ItemArmor;
import net.minecraft.item.ItemFood;
import net.minecraft.item.ItemStack;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.oredict.OreDictionary;
public class VanillaAddon implements FoodStatsProvider {
public VanillaAddon() {
SinkHandler simpleHandler = new SinkHandler() {
@Override
public ItemStack getSinkOutput(ItemStack itemStack) {
ItemStack result = itemStack.copy();
result.setItemDamage(0);
return result;
}
};
CookingForBlockheadsAPI.addSinkHandler(new ItemStack(Blocks.wool, 1, OreDictionary.WILDCARD_VALUE), simpleHandler);
CookingForBlockheadsAPI.addSinkHandler(new ItemStack(Blocks.carpet, 1, OreDictionary.WILDCARD_VALUE), simpleHandler);
SinkHandler armorHandler = new SinkHandler() {
@Override
public ItemStack getSinkOutput(ItemStack itemStack) {
if(itemStack.getItem() instanceof ItemArmor) {
((ItemArmor) itemStack.getItem()).removeColor(itemStack);
}
return itemStack;
}
};
CookingForBlockheadsAPI.addSinkHandler(new ItemStack(Items.leather_boots, 1, OreDictionary.WILDCARD_VALUE), armorHandler);
CookingForBlockheadsAPI.addSinkHandler(new ItemStack(Items.leather_chestplate, 1, OreDictionary.WILDCARD_VALUE), armorHandler);
CookingForBlockheadsAPI.addSinkHandler(new ItemStack(Items.leather_helmet, 1, OreDictionary.WILDCARD_VALUE), armorHandler);
CookingForBlockheadsAPI.addSinkHandler(new ItemStack(Items.leather_leggings, 1, OreDictionary.WILDCARD_VALUE), armorHandler);
CookingForBlockheadsAPI.addSinkHandler(new ItemStack(Items.milk_bucket), new SinkHandler() {
@Override
public ItemStack getSinkOutput(ItemStack itemStack) {
return new ItemStack(Items.bucket, 1);
}
});
CookingForBlockheadsAPI.addSinkHandler(new ItemStack(Items.potionitem, 1, OreDictionary.WILDCARD_VALUE), new SinkHandler() {
@Override
public ItemStack getSinkOutput(ItemStack itemStack) {
return new ItemStack(Items.glass_bottle, 1);
}
});
CookingForBlockheadsAPI.addToastHandler(new ItemStack(Items.bread), new ToastHandler() {
@Override
public ItemStack getToasterOutput(ItemStack itemStack) {
return new ItemStack(CookingForBlockheads.itemToast);
}
});
MinecraftForge.EVENT_BUS.register(this);
CookingForBlockheadsAPI.setFoodStatsProvider(this);
}
@SubscribeEvent
public void onFoodRegistryInit(FoodRegistryInitEvent event) {
event.registerNonFoodRecipe(new ItemStack(Items.cake));
event.registerNonFoodRecipe(new ItemStack(Items.sugar));
}
@Override
public float getSaturation(ItemStack itemStack, EntityPlayer entityPlayer) {
ItemFood item = (ItemFood) itemStack.getItem();
return item.func_150906_h(itemStack);
}
@Override
public int getFoodLevel(ItemStack itemStack, EntityPlayer entityPlayer) {
ItemFood item = (ItemFood) itemStack.getItem();
return item.func_150905_g(itemStack);
}
}
|
<gh_stars>1-10
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {TEXT, LIST, DATE, TIME, DATETIME, getTransformType} from 'constants/DataTypes';
export default function mapConvertDataType(form) {
const { columnName, newFieldName, dropSourceField, columnType, toType, ...data } = form;
const transformType = getTransformType(data, columnType, toType);
let fieldTransformation = {
...data,
type: transformType
};
// hacky tweaking of params for different transform types
if (columnType === LIST && toType === TEXT) {
delete fieldTransformation.format;
}
if (fieldTransformation.type === 'SimpleConvertToType') {
fieldTransformation.dataType = toType;
} else {
if ([DATE, TIME, DATETIME].indexOf(toType) !== -1) {
fieldTransformation.desiredType = toType;
}
if ([DATE, TIME, DATETIME].indexOf(columnType) !== -1 && toType !== TEXT) {
fieldTransformation.convertType = columnType;
fieldTransformation.desiredType = toType;
}
if ([DATE, TIME, DATETIME].indexOf(columnType) !== -1 && toType === TEXT) {
fieldTransformation.convertType = columnType;
}
}
//remove other arguments for JSON
if (fieldTransformation.type === 'ConvertToJSON') {
fieldTransformation = {type: 'ConvertToJSON'};
}
return {
type: 'field',
sourceColumnName: columnName,
newColumnName: newFieldName,
dropSourceColumn: dropSourceField,
fieldTransformation
};
}
|
declare type GeneratorOptions = {
outputFile?: string;
namespace?: string;
};
export declare function GenerateTypes(options?: GeneratorOptions): Promise<void>;
export {};
|
/*
* Copyright [2020-2030] [https://www.stylefeng.cn]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Guns้็จAPACHE LICENSE 2.0ๅผๆบๅ่ฎฎ๏ผๆจๅจไฝฟ็จ่ฟ็จไธญ๏ผ้่ฆๆณจๆไปฅไธๅ ็น๏ผ
*
* 1.่ฏทไธ่ฆๅ ้คๅไฟฎๆนๆ น็ฎๅฝไธ็LICENSEๆไปถใ
* 2.่ฏทไธ่ฆๅ ้คๅไฟฎๆนGunsๆบ็ ๅคด้จ็็ๆๅฃฐๆใ
* 3.่ฏทไฟ็ๆบ็ ๅ็ธๅ
ณๆ่ฟฐๆไปถ็้กน็ฎๅบๅค๏ผไฝ่
ๅฃฐๆ็ญใ
* 4.ๅๅๆบ็ ๆถๅ๏ผ่ฏทๆณจๆ่ฝฏไปถๅบๅค https://gitee.com/stylefeng/guns
* 5.ๅจไฟฎๆนๅ
ๅ๏ผๆจกๅๅ็งฐ๏ผ้กน็ฎไปฃ็ ็ญๆถ๏ผ่ฏทๆณจๆ่ฝฏไปถๅบๅค https://gitee.com/stylefeng/guns
* 6.่ฅๆจ็้กน็ฎๆ ๆณๆปก่ถณไปฅไธๅ ็น๏ผๅฏ็ณ่ฏทๅไธๆๆ
*/
package cn.stylefeng.roses.kernel.db.api.sqladapter.table;
import cn.stylefeng.roses.kernel.db.api.sqladapter.AbstractSql;
import lombok.Getter;
/**
* ่ทๅๆไธช่กจ็ๆๆๅญๆฎต็sql
*
* @author fengshuonan
* @date 2019-07-16-13:06
*/
@Getter
public class TableFieldListSql extends AbstractSql {
@Override
protected String mysql() {
return "select COLUMN_NAME as columnName,COLUMN_COMMENT as columnComment from information_schema.COLUMNS where table_name = ? and table_schema = ?";
}
@Override
protected String sqlServer() {
return "SELECT A.name as columnName,\n" +
" CONVERT(varchar(200), isnull(G.[value], '')) as columnComment\n" +
" FROM syscolumns A\n" +
" Left Join systypes B On A.xusertype= B.xusertype\n" +
" Inner Join sysobjects D On A.id= D.id\n" +
" and D.xtype= 'U'\n" +
" and D.name<> 'dtproperties'\n" +
" Left Join syscomments E on A.cdefault= E.id\n" +
" Left Join sys.extended_properties G on A.id= G.major_id\n" +
" and A.colid= G.minor_id\n" +
" Left Join sys.extended_properties F On D.id= F.major_id\n" +
" and F.minor_id= 0\n" +
" where d.name= ? \n" +
" Order By A.id,\n" +
" A.colorder";
}
@Override
protected String pgSql() {
return "SELECT a.attname as \"columnName\" , col_description(a.attrelid,a.attnum) as \"columnComment\"\n" +
"FROM pg_class as c,pg_attribute as a " +
"where c.relname = ? and a.attrelid = c.oid and a.attnum>0";
}
@Override
protected String oracle() {
return "select column_name as columnName, comments as columnComment from user_col_comments where Table_Name= ?";
}
}
|
<reponame>zs5460/my<gh_stars>1-10
package my
import (
"testing"
)
func TestTest(t *testing.T) {
var tests = []struct {
str string
pattern string
want bool
}{
{"430001001005001", "idcard", false},
{"430001200010050012", "idcard", true},
{"43000120001005001x", "idcard", true},
{"43000120001005001X", "idcard", true},
{"4300012000100500123456", "idcard", false},
{"zhousong", "english", true},
{"zs5460", "english", false},
{"ไธญๅไบบๆฐๅ
ฑๅๅฝ", "chinese", true},
{"ๅ
ฌๅ
2000ๅนด", "chinese", false},
{"zs5460", "username", true},
{"Zs5460", "username", false},
{"<EMAIL>", "email", true},
{"<EMAIL>", "email", false},
{"<EMAIL>", "email", false},
{"<EMAIL>", "email", true},
{"123456", "zip", true},
{"012345", "zip", false},
{"1234567", "zip", false},
{"274619", "qq", true},
{"0123", "qq", false},
{"12345678900", "qq", false},
{"731-12345678", "phone", true},
{"0731-12345678", "phone", true},
{"0731-12345678-01", "phone", true},
{"0731-123456789", "phone", false},
{"13073112345", "mobile", true},
{"19973112345", "mobile", true},
{"12073112345", "mobile", false},
{"http://www.54600.net/index.htm", "url", true},
{"https://www.google.com/?q=test", "url", true},
{"ftp://54600.net/", "url", true},
{"123456", "url", false},
{"1.2.4.8", "ip", true},
{"172.16.17.32", "ip", true},
{"123.456", "ip", false},
{"Zs123456", "password", true},
{"zs5460", "password", false},
{"Zs5460", "password", false},
{"zs12345678", "password", false},
{"5460", `^\d{4}$`, true},
{"54600", `^\d{4}$`, false},
}
for _, test := range tests {
if got := Test(test.str, test.pattern); got != test.want {
t.Errorf("Test(%q,%q) = %v, want %v",
test.str,
test.pattern,
got,
test.want)
}
}
}
|
<reponame>tarasowski/vscode-tailwindcss
const {
workspace,
languages,
CompletionItem,
CompletionItemKind,
Position,
Range
} = require('vscode')
const _ = require('lodash')
const generateClasses = require('./tailwind')
const fileTypes = require('./filetypes')
let classes
const triggerCharacters = ['"', "'", ' ', '.']
async function activate(context) {
// Generate classes and set them on activation
classes = await generateClasses()
const fileSystemWatcher = workspace.createFileSystemWatcher('**/tailwind.js')
// Changes to tailwind.js should invalidate above cache
fileSystemWatcher.onDidChange(async () => {
classes = await generateClasses()
})
// This handles the case where the project didn't have tailwind.js
// but was created after VS Code was initialized
fileSystemWatcher.onDidCreate(async () => {
classes = await generateClasses()
})
// If the config is deleted, it is safe to say that the user isn't going to
// use tailwind and we should remove autocomplete suggestions
fileSystemWatcher.onDidDelete(() => {
classes = []
})
const disposables = _.flatMap(fileTypes, ({ extension, patterns }) => {
return _.map(patterns, pattern =>
languages.registerCompletionItemProvider(
extension,
{
provideCompletionItems: (document, position) => {
// Get range including all characters in the current line
// till the current position
const range = new Range(new Position(position.line, 0), position)
// Get text in current line
const textInCurrentLine = document.getText(range)
const classesInCurrentLine = textInCurrentLine
.match(pattern.regex)[1]
.split(pattern.splitCharacter)
return _.chain(classes)
.difference(classesInCurrentLine)
.map(classItem => {
return new CompletionItem(
classItem,
CompletionItemKind.Variable
)
})
.value()
}
},
...triggerCharacters
)
)
})
context.subscriptions.push(disposables)
}
function deactivate() {}
exports.activate = activate
exports.deactivate = deactivate
|
<filename>app/src/main/java/com/example/ridorianto/bangun_datar/Persegi.java
package com.example.ridorianto.bangun_datar;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.content.Intent;
import android.widget.TextView;
import android.widget.EditText;
import android.widget.Button;
import android.widget.Toast;
public class Persegi extends AppCompatActivity {
//Deklarasi variabel komponen
EditText p, l;
Button cek;
TextView hasil;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//memanggil file xml
setContentView(R.layout.activity_persegi);
//Deklarasi yang dikaitkan dengan ID Komponen
p = (EditText)findViewById(R.id.panjang);
l = (EditText)findViewById(R.id.lebar);
cek = (Button) findViewById(R.id.button_cek);
hasil = (TextView)findViewById(R.id.hasil);
cek.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v){
//jika tidak mengisi panjang dan lebar sekaligus maka akan tampil notifikasi
if(p.length()==0 && l.length()==0){
Toast.makeText(getApplication(),"Panjang dan Lebar masih Kosong, Gan!",Toast.LENGTH_LONG).show();
}
//notifikasi ini akan muncul jika panjangnya tidak diisi
else if (p.length()==0){
Toast.makeText(getApplication(),"Panjangnya lupa di isi ya...", Toast.LENGTH_LONG).show();
}
//notifikasi jika lebar tidak diisi
else if (l.length()==0){
Toast.makeText(getApplication(),"Lebarnya Kosong tuh, Gan...",Toast.LENGTH_LONG).show();
}
else{
//Deklarasi variabel perhitungan
String isipanjang = p.getText().toString();
String isilebar = l.getText().toString();
double p = Double.parseDouble(isipanjang);
double l = Double.parseDouble(isilebar);
double hs = Persegi(p,l);
String output = String.valueOf(hs);
hasil.setText(output.toString());
}
}
});
}
public double Persegi(double p, double l){return p*l;}
} |
<filename>snail/src/test/java/com/acgist/snail/gui/event/ExitEvent.java
package com.acgist.snail.gui.event;
import com.acgist.snail.gui.event.adapter.ExitEventAdapter;
/**
* <p>GUI้ๅบ็ชๅฃไบไปถ</p>
*
* @author acgist
*/
public final class ExitEvent extends ExitEventAdapter {
private static final ExitEvent INSTANCE = new ExitEvent();
public static final GuiEvent getInstance() {
return INSTANCE;
}
private ExitEvent() {
}
}
|
<reponame>LostPlayer99/node-bluez<gh_stars>10-100
const Agent = require("./Agent");
class StaticKeyAgent extends Agent {
constructor(bluez, DbusObject, pin) {
super(bluez, DbusObject);
this.pin = pin;
}
RequestPinCode(device, callback) {
callback(null, this.pin.toString());
}
RequestPasskey(device, callback) {
callback(null, parseInt(this.pin, 10));
}
}
module.exports = StaticKeyAgent; |
<filename>1-2/2020 ComputingChallenge/CX.cpp<gh_stars>1-10
void f(int n){
for(int i=0;i<n;i++){
printf("*");
}
}
|
require_relative './generator/generator'
|
#!/bin/bash
source ./venv/bin/activate
python run_ensemble.py \
exec_time=2021-1031-2310 \
fold=4 \
exp=exp33_ensemble_lgbm_1 |
def _get_optimizers(self):
optimizers = []
for param in self.parameters():
optimizer = torch.optim.Adam(param, lr=self.config['learning_rate'])
optimizers.append(optimizer)
return optimizers |
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
//const UserSchema = require('./user.js');
const noteSchema = new Schema({
title: { type: String },
content: { type: String },
author: { type: String },
// created: { type: Date },
// lastUpdated: { type : Date, default: new Date() },
});
const Note = mongoose.model('Note', noteSchema);
module.exports = Note;
|
#!/bin/sh
set -e
echo "pulling my recent config"
git pull
echo "Updating tmux config"
cat tmux_config.conf > ~/.tmux.conf
echo "done with Tmux!!"
echo "Updating vim config"
echo "putting custom/extra config file to my_configs.vim"
cat vim_config.vim > ~/.vim_runtime/my_configs.vim
echo "Installing all plugins using Vundle"
vim +PluginInstall +qall
echo "changing diractory to ~/.vim_runtime"
cd ~/.vim_runtime
echo "Git pull with rebase - awsome vimrc"
git pull --rebase
echo "source vimrc file"
vim vim -c ~/.vimrc
echo "source tmux file"
tmux source ~/.tmux.conf
echo "Sucessfully updated vim config!"
|
package util
import (
"crypto/md5"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"github.com/astaxie/beego"
"io/ioutil"
"log"
"net/http"
)
/**
* md5ๅ ๅฏ
*/
func GetMD5(password string) string {
Md5Inst := md5.New()
Md5Inst.Write([]byte(password))
Result := Md5Inst.Sum(nil)
// ไปฅไธไธค็ง่พๅบ็ปๆไธๆ ท
log.Printf("ๆ ผๅผๅ>>>>>>>%x\n", Result)
log.Print("hex่งฃ็ >>>>>>>", hex.EncodeToString(Result), "\n")
return fmt.Sprintf("%x", Result)
}
/**
* ็ๆๅฏ็
*/
func GeneratePassword(mobile string) string {
b := []byte(mobile)
p := b[7:]
password := "<PASSWORD>" + string(p)
return GetMD5(password)
}
// ๆทๅฎapi
// type IPBody struct {
// Code int
// Data struct {
// Ip string
// Country string
// Area string
// Region string
// City string
// County string
// Isp string
// Country_id string
// area_id string
// Region_id string
// City_id string
// County_id string
// Isp_id string
// }
// }
// ็พๅบฆapi
type IPBody struct {
Address string
Content struct {
Address_detail struct {
province string
City string
District string
Dtreet string
Atreet_number string
City_code int
}
Address string
Point struct {
y float32
x float32
}
}
Status int
}
func GetIpGeography(ip string, objBody *IPBody) error {
ipService := beego.AppConfig.String("ipService")
res, err := http.Get(fmt.Sprintf(ipService, ip))
if err != nil {
beego.Error(err)
}
defer res.Body.Close()
if res.StatusCode != 200 {
beego.Error("่ฏทๆฑๅฐ็ไฝ็ฝฎ้่ฏฏ: ", res.Status)
return errors.New("่ฏทๆฑๅฐ็ไฝ็ฝฎ้่ฏฏ๏ผ" + res.Status)
} else {
bodyByte, err := ioutil.ReadAll(res.Body)
if err != nil {
beego.Error("ๅฐ็ไฝ็ฝฎ่งฃๆๅคฑ่ดฅ๏ผ", err)
}
json.Unmarshal(bodyByte, &objBody)
return nil
}
}
|
#!/usr/bin/env bash
# Detect which `ls` flavor is in use
if ls --color > /dev/null 2>&1; then
# GNU `ls`
export LS_COLORS='no=00:fi=00:di=01;31:ln=01;36:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.gz=01;31:*.bz2=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.avi=01;35:*.fli=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.ogg=01;35:*.mp3=01;35:*.wav=01;35:';
else
# macOS `ls`
export LSCOLORS='BxBxhxDxfxhxhxhxhxcxcx';
fi;
|
export { default } from "./ProvidersBrowse";
|
<gh_stars>1-10
#ifndef INCLUDED_CORE_ARMOR_COMPONENT_H
#define INCLUDED_CORE_ARMOR_COMPONENT_H
#include "i_armor_component.h"
#include "core/property_loader.h"
#include "platform/export.h"
class ArmorComponent : public IArmorComponent
{
public:
ArmorComponent();
virtual void SetCurrentArmor( int32_t currentArmor );
virtual int32_t GetCurrentArmor()const;
protected:
friend class ComponentFactory;
int32_t mCurrentArmor;
private:
public:
friend class ::boost::serialization::access;
template<class Archive>
void serialize( Archive& ar, const unsigned int version );
};
template<class Archive>
void ArmorComponent::serialize( Archive& ar, const unsigned int version )
{
//NOTE: generated archive for this class
ar& boost::serialization::base_object<IArmorComponent>( *this );
ar& mCurrentArmor;
}
class ArmorComponentLoader : public ComponentLoader<ArmorComponent>
{
public:
DEFINE_COMPONENT_LOADER_BASE( ArmorComponentLoader )
private:
virtual void BindValues();
protected:
ArmorComponentLoader();
friend class ComponentLoaderFactory;
};
REAPING2_CLASS_EXPORT_KEY2( ArmorComponent, ArmorComponent, "armor_component" );
#endif//INCLUDED_CORE_ARMOR_COMPONENT_H
//command: "classgenerator.exe" -g "component" -c "armor_component" -m "int32_t-currentArmor"
|
<reponame>Donkey-Tao/TFKit
//
// UIView+TFExtension.h
// UIView็ๅ็ฑป,ไธบไบ็ฎๅviewไธญๅฐบๅฏธไน็ฑป็่ฎพ็ฝฎ
//
// Created by Donkey-Tao on 15/2/29.
// Copyright ยฉ 2015ๅนด http://taofei.me All rights reserved.
//
#import <UIKit/UIKit.h>
@interface UIView (TFExtension)
/** ๅฐบๅฏธ */
@property (nonatomic,assign) CGSize size;
/** ๅฎฝๅบฆ */
@property (nonatomic,assign) CGFloat width;
/** ้ซๅบฆ */
@property (nonatomic,assign) CGFloat height;
/** x */
@property (nonatomic,assign) CGFloat x;
/** y */
@property (nonatomic,assign) CGFloat y;
/** centerX */
@property (nonatomic,assign) CGFloat centerX;
/** centerY */
@property (nonatomic,assign) CGFloat centerY;
/**ๅจๅ็ฑปไธญๅฃฐๆ@property,ๅชไผ็ๆๆนๆณ็ๅฃฐๆ,ไธไผ็ๆๆนๆณ็ๅฎ็ฐๅๅธฆๆไธๅ็บฟ็ๆๅๅ้*/
/**
*ๅคๆญไธไธช็ฉบ้ดๆฏๅฆ็ๆญฃๆพ็คบๅจไธป็ชๅฃไธ
*/
-(BOOL)tf_isShowingOnKeyWindow;
@end
|
<gh_stars>0
const AWS = require("aws-sdk");
const codeDeploy = new AWS.CodeDeploy({ apiVersion: "2014-10-06" });
const lambda = new AWS.Lambda();
exports.handler = (event, context, callback) => {
console.log("Entering PostTraffic Hook!");
const deploymentId = event.DeploymentId;
const lifecycleEventHookExecutionId = event.LifecycleEventHookExecutionId;
const functionToTest = process.env.NewVersion;
console.log(`Testing new function version: ${functionToTest}`);
const lambdaParams = {
FunctionName: functionToTest,
InvocationType: "RequestResponse"
};
let status = "Failed";
lambda.invoke(lambdaParams, (error, data) => {
console.log(`Lambda Data ${JSON.stringify(data)}`);
const response = JSON.parse(data.Payload);
if (response.body === "Hello World v5") {
status = "Succeeded";
};
const params = {
deploymentId,
lifecycleEventHookExecutionId,
status // status can be "Succeeded" or "Failed"
};
return codeDeploy.putLifecycleEventHookExecutionStatus(params).promise()
.then(data => callback(null, "Validation test succeeded"))
.catch(err => callback("Validation test failed"));
});
};
|
#!/bin/sh
node index.js |
/**
* An alias for {}
*/
declare type object = {}
|
#!/bin/bash
# Copyright (c) 2013 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
if [ -d "$1" ]; then
cd "$1"
else
echo "Usage: $0 <datadir>" >&2
echo "Removes obsolete AsiaDevelopCoin database files" >&2
exit 1
fi
LEVEL=0
if [ -f wallet.dat -a -f addr.dat -a -f blkindex.dat -a -f blk0001.dat ]; then LEVEL=1; fi
if [ -f wallet.dat -a -f peers.dat -a -f blkindex.dat -a -f blk0001.dat ]; then LEVEL=2; fi
if [ -f wallet.dat -a -f peers.dat -a -f coins/CURRENT -a -f blktree/CURRENT -a -f blocks/blk00000.dat ]; then LEVEL=3; fi
if [ -f wallet.dat -a -f peers.dat -a -f chainstate/CURRENT -a -f blocks/index/CURRENT -a -f blocks/blk00000.dat ]; then LEVEL=4; fi
case $LEVEL in
0)
echo "Error: no AsiaDevelopCoin datadir detected."
exit 1
;;
1)
echo "Detected old AsiaDevelopCoin datadir (before 0.7)."
echo "Nothing to do."
exit 0
;;
2)
echo "Detected AsiaDevelopCoin 0.7 datadir."
;;
3)
echo "Detected AsiaDevelopCoin pre-0.8 datadir."
;;
4)
echo "Detected AsiaDevelopCoin 0.8 datadir."
;;
esac
FILES=""
DIRS=""
if [ $LEVEL -ge 3 ]; then FILES=$(echo $FILES blk????.dat blkindex.dat); fi
if [ $LEVEL -ge 2 ]; then FILES=$(echo $FILES addr.dat); fi
if [ $LEVEL -ge 4 ]; then DIRS=$(echo $DIRS coins blktree); fi
for FILE in $FILES; do
if [ -f $FILE ]; then
echo "Deleting: $FILE"
rm -f $FILE
fi
done
for DIR in $DIRS; do
if [ -d $DIR ]; then
echo "Deleting: $DIR/"
rm -rf $DIR
fi
done
echo "Done."
|
package de.uni_stuttgart.visus.etfuse.misc;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Point;
import java.awt.geom.Line2D;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.io.File;
import javax.swing.JPanel;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfDouble;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.videoio.Videoio;
import de.uni_stuttgart.visus.etfuse.gui.surface.HeatMapImagePanel;
import de.uni_stuttgart.visus.etfuse.gui.surface.VideoSurfacePanel;
public class Utils {
public final static String jpeg = "jpeg";
public final static String jpg = "jpg";
public final static String gif = "gif";
public final static String tiff = "tiff";
public final static String tif = "tif";
public final static String png = "png";
public final static String tsv = "tsv";
/*
* Get the extension of a file.
*/
public static String getExtension(File f) {
String ext = null;
String s = f.getName();
int i = s.lastIndexOf('.');
if (i > 0 && i < s.length() - 1) {
ext = s.substring(i+1).toLowerCase();
}
return ext;
}
public static BufferedImage Mat2BufferedImage(Mat m) {
//Method converts a Mat to a Buffered Image
int type = BufferedImage.TYPE_BYTE_GRAY;
if ( m.channels() > 1 )
type = BufferedImage.TYPE_3BYTE_BGR;
if ( m.channels() > 3 )
type = BufferedImage.TYPE_4BYTE_ABGR;
int bufferSize = m.channels() * m.cols() * m.rows();
byte [] b = new byte[bufferSize];
m.get(0, 0, b); // get all the pixels
BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
System.arraycopy(b, 0, targetPixels, 0, b.length);
return image;
}
// https://docs.opencv.org/2.4/doc/tutorials/highgui/video-input-psnr-ssim/video-input-psnr-ssim.html#videoinputpsnrmssim
public static Scalar computeMSSIM(Mat i1, Mat i2) {
double C1 = 6.5025, C2 = 58.5225;
/***************************** INITS **********************************/
int d = CvType.CV_32F;
Mat I1 = new Mat();
Mat I2 = new Mat();
i1.convertTo(I1, d); // cannot calculate on one byte large values
i2.convertTo(I2, d);
Mat I2_2 = I2.mul(I2); // I2^2
Mat I1_2 = I1.mul(I1); // I1^2
Mat I1_I2 = I1.mul(I2); // I1 * I2
/*************************** END INITS **********************************/
Mat mu1 = new Mat();
Mat mu2 = new Mat(); // PRELIMINARY COMPUTING
Imgproc.GaussianBlur(I1, mu1, new Size(11, 11), 1.5);
Imgproc.GaussianBlur(I2, mu2, new Size(11, 11), 1.5);
Mat mu1_2 = mu1.mul(mu1);
Mat mu2_2 = mu2.mul(mu2);
Mat mu1_mu2 = mu1.mul(mu2);
Mat sigma1_2 = new Mat();
Mat sigma2_2 = new Mat();
Mat sigma12 = new Mat();
Imgproc.GaussianBlur(I1_2, sigma1_2, new Size(11, 11), 1.5);
Core.subtract(sigma1_2, mu1_2, sigma1_2);
Imgproc.GaussianBlur(I2_2, sigma2_2, new Size(11, 11), 1.5);
Core.subtract(sigma2_2, mu2_2, sigma2_2);
Imgproc.GaussianBlur(I1_I2, sigma12, new Size(11, 11), 1.5);
Core.subtract(sigma12, mu1_mu2, sigma12);
///////////////////////////////// FORMULA ////////////////////////////////
Mat t1 = new Mat();
Mat t2 = new Mat();
Mat t3 = new Mat();
t1 = mu1_mu2.mul(new MatOfDouble(2));
Core.add(t1, new MatOfDouble(C1), t1);
t2 = sigma12.mul(new MatOfDouble(2));
Core.add(t1, new MatOfDouble(C2), t2);
t3 = t1.mul(t2); // t3 = ((2*mu1_mu2 + C1).*(2*sigma12 + C2))
Core.add(mu1_2, mu2_2, t1);
Core.add(t1, new MatOfDouble(C1), t1);
Core.add(sigma1_2, sigma2_2, t2);
Core.add(t2, new MatOfDouble(C2), t2);
t1 = t1.mul(t2); // t1 =((mu1_2 + mu2_2 + C1).*(sigma1_2 + sigma2_2 + C2))
Mat ssim_map = new Mat();
Core.divide(t3, t1, ssim_map); // ssim_map = t3./t1;
Scalar mssim = Core.mean(ssim_map); // mssim = average of ssim map
System.gc();
return mssim;
}
public static Point transformCoordinate(Point p, Line2D sourceFrame, Line2D targetFrame) {
Rectangle2D sourceRect = new Rectangle2D.Double(sourceFrame.getX1(), sourceFrame.getY1(),
sourceFrame.getX2() - sourceFrame.getX1(), sourceFrame.getY2() - sourceFrame.getY1());
Rectangle2D targetRect = new Rectangle2D.Double(targetFrame.getX1(), targetFrame.getY1(),
targetFrame.getX2() - targetFrame.getX1(), targetFrame.getY2() - targetFrame.getY1());
double fractionX = (p.x - sourceRect.getCenterX()) / (sourceRect.getWidth() / 2);
double fractionY = (p.y - sourceRect.getCenterY()) / (sourceRect.getHeight() / 2);
p.x = (int) Math.round(targetRect.getCenterX() + (fractionX * (targetRect.getWidth() / 2)));
p.y = (int) Math.round(targetRect.getCenterY() + (fractionY * (targetRect.getHeight() / 2)));
return p;
}
public static Mat[] splitMatrixIntoCells(Mat matrix, int sideNum) {
int cellWidth = matrix.cols() / sideNum;
int cellHeight = matrix.rows() / sideNum;
Mat[] matrices = new Mat[sideNum * sideNum];
for (int y = 0; y < sideNum; y++) {
for (int x = 0; x < sideNum; x++) {
int cellIndex = (y * sideNum) + x;
matrices[cellIndex] = matrix.submat(y * cellHeight, (y + 1) * cellHeight,
x * cellWidth, (x + 1) * cellWidth);
}
}
return matrices;
}
public static double getArraySum(double[] array) {
double sum = 0.0;
for (double val : array) {
sum += val;
}
return sum;
}
public static double computeSRGBDistance(int color1, int color2) {
Color rgb1 = new Color(color1);
Color rgb2 = new Color(color2);
LAB lab1 = LAB.fromRGB(rgb1.getRed(), rgb1.getGreen(), rgb1.getBlue(), 0);
LAB lab2 = LAB.fromRGB(rgb2.getRed(), rgb2.getGreen(), rgb2.getBlue(), 0);
return LAB.ciede2000_n(lab1, lab2);
}
public static void resizePanelToRetainAspectRatio(VideoSurfacePanel panel, JPanel panelContainer) {
double mediaW = panel.getCamera().get(Videoio.CAP_PROP_FRAME_WIDTH);
double mediaH = panel.getCamera().get(Videoio.CAP_PROP_FRAME_HEIGHT);
resizePanelToRetainAspectRatio(mediaW, mediaH, panel, panelContainer);
}
public static void resizePanelToRetainAspectRatio(HeatMapImagePanel panel, JPanel panelContainer) {
double mediaW = panel.getHeatMap().cols();
double mediaH = panel.getHeatMap().rows();
resizePanelToRetainAspectRatio(mediaW, mediaH, panel, panelContainer);
}
private static void resizePanelToRetainAspectRatio(double normalWidth, double normalHeight,
JPanel panel, JPanel panelContainer) {
double mediaW = normalWidth;
double mediaH = normalHeight;
double mediaWidthPerHeight = mediaW / mediaH;
int w = panelContainer.getWidth();
int h = panelContainer.getHeight();
double panelW = w;
double panelH = h;
double panelWidthPerHeight = panelW / panelH;
if (mediaWidthPerHeight > panelWidthPerHeight)
h = (int) Math.floor(w * Math.pow(mediaWidthPerHeight, -1));
else
w = (int) Math.floor(h * mediaWidthPerHeight);
panel.setPreferredSize(new Dimension(w, h));
panelContainer.revalidate();
}
}
|
import '/sass/main.scss';
// // preloader
// const preloaderTL = gsap.timeline();
// preloaderTL.to('#logo', {yPercent: -20, opacity: 0, delay: 4})
// preloaderTL.to('.preloader', {transform: 'scaleY(0)', transformOrigin: 'top', delay: 1})
// custom cursor
// const cursor = document.querySelector('.cursor');
// window.onmousemove = (e) => {
// cursor.setAttribute('style', `top: ${e.pageY}px; left: ${e.pageX}px; z-index: 2;`)
// }
// navigation
const tl = gsap.timeline({paused:true, reversed: true});
tl.to('.box', {height: '100vh', duration: .5, transformOrigin: 'bottom', stagger: .3})
tl.to('.mobile-logo', {opacity: '1'})
tl.to('.nav-main__content', {opacity: '1', visibility: 'visible', yPercent: -5, duration: .5, transformOrigin: 'bottom', stagger: .3})
const navIcon = document.querySelector('.nav-icon');
navIcon.onclick = function() {
if (tl.reversed()) {
this.classList.add('nav-anim')
tl.play()
document.body.classList.add('noScroll');
} else {
this.classList.remove('nav-anim')
tl.reverse()
document.body.classList.remove('noScroll');
}
}
const allLinks = document.querySelectorAll('.list__item a');
allLinks.forEach(link => {
link.addEventListener('click', () => {
document.body.classList.remove('noScroll');
tl.reverse()
navIcon.classList.remove('nav-anim')
})
})
const projs = document.querySelectorAll('#projects .project-box')
const contents = document.querySelectorAll("#projects .project-box__content")
const links = document.querySelectorAll("#projects .project-box__link")
projs.forEach( proj => {
proj.onclick = () => viewProj(proj)
})
function resetProjs() {
projs.forEach(p => p.style.height = "35rem")
contents.forEach(c => c.style.opacity = "1")
links.forEach(l => l.classList.remove("shaky"))
}
function viewProj(proj) {
let content = proj.querySelector("#projects .project-box__content")
let link = proj.querySelector("#projects .project-box__link")
if (proj.style.height == "60rem")
{
proj.style.height = "35rem"
content.style.opacity = "1"
link.classList.remove("shaky")
}
else
{
resetProjs()
proj.style.height = "60rem"
content.style.opacity = "0"
link.classList.add("shaky")
}
}
const quote = document.querySelector('q')
const timer = 5000
function getQuotes () {
fetch('https://gist.githubusercontent.com/tiapnn/ca5f70fc803eef6c02ded745ad624c71/raw/9b2c6f5440785d7b62ee04953d5a779c3ed8b166/programming-quotes.json')
.then(response => response.json())
.then(data => setInterval(() => shuffleQuotes(data.data), timer))
}
getQuotes()
function shuffleQuotes(allQuotes) {
let randomQuote = getRandomQuote(allQuotes)
toggleTransition()
changeQuote(randomQuote)
}
function getRandomQuote(allQuotes) {
return allQuotes[Math.floor(Math.random()*allQuotes.length)]
}
function changeQuote(q) {
setTimeout(() => {
quote.innerHTML = q.quote
quote.cite = q.source
}, 300);
}
function toggleTransition() {
// trigger toggleTransition method
quote.removeEventListener("transitionend", onTransitionEnd);
quote.addEventListener("transitionend", onTransitionEnd);
// removing the class for leave state
quote.classList.remove("fade");
}
function onTransitionEnd() {
// removing the listener again so that it is triggered only once
quote.removeEventListener("transitionend", onTransitionEnd)
quote.classList.add("fade")
}
//scroll anchor links fixed
let switchView = function (hash = location.hash, adjust = 350) {
try {
let mobileView = window.matchMedia("(max-width: 426px)")
if (mobileView.matches) adjust = 250
let elem = document.querySelector(hash);
let top = elem.offsetTop
window.scrollTo(0, top - adjust)
} catch (DOMException) {
location.hash = "";
}
}
document.addEventListener('DOMContentLoaded', () => { switchView() });
window.onhashchange = () => { switchView() }
document.querySelectorAll('a[href^="#"]').forEach(anchor => {
anchor.onclick = (e) => {
let target = e.target;
e.preventDefault()
switchView(target.attributes.href.value)
}
})
//message to all the developers out there
const message = "Nice to see you around here! Send me a message if you want to reuse my portfolio template or you want explanations of any features! Cheers!"
console.group("Hey developer!")
console.log(message)
console.log("%c<NAME> - <EMAIL>", 'font-size:20px');
console.groupEnd()
|
#!/bin/sh
channels_url="http://omahaproxy.appspot.com/all?csv=1";
history_url="http://omahaproxy.appspot.com/history";
bucket_url="http://commondatastorage.googleapis.com/chromium-browser-official/";
base_path="$(cd "$(dirname "$0")" && pwd)/source";
source "$(nix-build --no-out-link "$base_path/update.nix" -A updateHelpers)";
### poor mans key/value-store :-) ###
ver_sha_table=""; # list of version:sha256
sha_lookup()
{
version="$1";
for ver_sha in $ver_sha_table;
do
if [ "x${ver_sha%:*}" = "x$version" ];
then
echo "${ver_sha##*:}";
return 0;
fi;
done;
return 1;
}
sha_insert()
{
version="$1";
sha256="$2";
ver_sha_table="$ver_sha_table $version:$sha256";
}
get_newest_ver()
{
versions="$(for v in $@; do echo "$v"; done)";
if oldest="$(echo "$versions" | sort -V 2> /dev/null | tail -n1)";
then
echo "$oldest";
else
echo "$versions" | sort -t. -k 1,1n -k 2,2n -k 3,3n -k 4,4n | tail -n1;
fi;
}
fetch_filtered_history()
{
curl -s "$history_url" | sed -nr 's/^'"linux,$1"',([^,]+).*$/\1/p';
}
get_prev_sha256()
{
channel="$1";
current_version="$2";
for version in $(fetch_filtered_history "$channel");
do
[ "x$version" = "x$current_version" ] && continue;
sha256="$(get_sha256 "$channel" "$version")" || continue;
echo "$sha256:$version";
return 0;
done;
}
get_channel_exprs()
{
for chline in $1;
do
channel="${chline%%,*}";
version="${chline##*,}";
echo -n "Checking if sha256 of version $version is cached..." >&2;
if sha256="$(sha_lookup "$version")";
then
echo " yes: $sha256" >&2;
else
echo " no." >&2;
sha256="$(get_sha256 "$channel" "$version")";
if [ $? -ne 0 ];
then
echo "Whoops, failed to fetch $version, trying previous" \
"versions:" >&2;
sha_ver="$(get_prev_sha256 "$channel" "$version")";
sha256="${sha_ver%:*}";
version="${sha_ver#*:}";
fi;
fi;
sha_insert "$version" "$sha256";
main="${sha256%%.*}";
deb="${sha256#*.}";
deb32="${deb%.*}";
deb64="${deb#*.}";
echo " $channel = {";
echo " version = \"$version\";";
echo " sha256 = \"$main\";";
if [ "x${deb#*[a-z0-9].[a-z0-9]}" != "x$deb" ];
then
echo " sha256bin32 = \"$deb32\";";
echo " sha256bin64 = \"$deb64\";";
fi;
echo " };";
done;
}
cd "$(dirname "$0")";
omaha="$(curl -s "$channels_url")";
versions="$(echo "$omaha" | sed -nr -e 's/^linux,([^,]+,[^,]+).*$/\1/p')";
channel_exprs="$(get_channel_exprs "$versions")";
cat > "$base_path/sources.nix" <<-EOF
# This file is autogenerated from update.sh in the parent directory.
{
$channel_exprs
}
EOF
|
#!/bin/bash
set -euo pipefail
# You can pass a specific falco rule name and only yaml files matching
# that rule will be considered. The default is "all", meaning all yaml
# files will be applied.
RULE=${1:-all}
# Replace any '/' in RULES with a '.' and any space with a dash. (K8s
# label values can not contain slashes/spaces)
RULE=$(echo "$RULE" | tr '/ ' '.-')
echo "***Testing kubectl configuration..."
kubectl version --short
while true; do
RET=$(kubectl get namespaces --output=name | grep falco-event-generator || true)
if [[ "$RET" == *falco-event-generator* ]]; then
echo "***Deleting existing falco-event-generator namespace..."
kubectl delete namespace falco-event-generator
fi
echo "***Creating falco-event-generator namespace..."
kubectl create namespace falco-event-generator
for file in yaml/*.yaml; do
MATCH=0
if [[ "${RULE}" == "all" ]]; then
MATCH=1
else
RET=$(grep -E "falco.rules:.*${RULE}" $file || true)
if [[ "$RET" != "" ]]; then
MATCH=1
fi
fi
if [[ $MATCH == 1 ]]; then
MESSAGES=$(grep -E 'message' $file | cut -d: -f2 | tr '\n' ',')
RULES=$(grep -E 'falco.rules' $file | cut -d: -f2 | tr '\n' ',')
# The message uses dashes in place of spaces, convert them back to spaces
MESSAGES=$(echo "$MESSAGES" | tr '-' ' ' | sed -e 's/ *//' | sed -e 's/,$//')
RULES=$(echo "$RULES" | tr '-' ' '| tr '.' '/' | sed -e 's/ *//' | sed -e 's/,$//')
echo "***$MESSAGES (Rule(s) $RULES)..."
kubectl apply -f $file
sleep 2
fi
done
sleep 10
done
|
#!/bin/sh
# syntax: guess <word>
ISPELL=ispell
DICT="-d lietuviu"
for i in `echo $1 | $ISPELL $DICT -c`; do
echo "** $i"
echo $i | $ISPELL $DICT -e | fmt | head
done
|
#!/usr/bin/env node
/** @format */
const { Command } = require("commander");
const get = require("./utils/getMethods.js");
const dotenv = require("dotenv").config();
const program = new Command();
program.version("0.0.1");
program
.command("get-persons")
.description("Make a network request to fetch most popular persons")
.requiredOption("--page <number>", "The page of data results to fetch")
.requiredOption("-p, --popular", "Fetch the popular persons")
.option("-s, --save", "The page of data to JSON file")
.option("-l, --local", "Read data from local JSON")
.action(function handleAction(opt) {
if (!opt.local) get.PersonsByPage(opt);
if (opt.local) {
if (opt.save) {
get.PersonsByPage(opt, (page) => get.JsonPersonByPage(page));
return;
}
get.JsonPersonByPage(opt.page);
}
});
program
.command("get-person")
.description("Make a network request to fetch the data of a single person")
.requiredOption("-i, --id <id>", "The id of the person")
.action(function handleAction(option) {
get.PersonById(option.id);
});
program
.command("get-movies")
.description("Make a network request to fetch movies")
.requiredOption("--page <number>", "The page of movies data results to fetch")
.option("-p, --popular", "Fetch the popular movies")
.option("-n, --now-playing", "Fetch the movies that are playing now")
.option("-s, --save", "The page of data to JSON file")
.option("-l, --local", "Read data from local JSON")
.action(function handleAction(opt) {
if (!opt.local) get.MoviesByPage(opt.page, opt.nowPlaying, opt.save);
if (opt.local) {
if (opt.save) get.MoviesByPage(opt.page, opt.nowPlaying, opt.save);
get.JsonMoviesByPage(opt.page, opt.nowPlaying);
}
});
program
.command("get-movie")
.description("Make a network request to fetch the data of a single person")
.requiredOption("-i,--id <number>", "The id of the movie")
.option("-r, --reviews", "Fetch the reviews of the movie")
.action(function handleAction(opt) {
get.SingleMovie(opt.id, opt.reviews);
});
// error on unknown commands
program.parse(process.argv);
|
/**
* Classes that implement the ServerProvider registry.
*/
package io.opensphere.core.server.impl;
|
<gh_stars>0
import React, { Component } from 'react'
export default class App extends Component {
render(){
let activeLink = location.pathname;
const navigateTo = url => window.history.pushState(null, null, url);
return (
<nav className="navbar navbar-expand-lg navbar-dark bg-dark">
<a className="navbar-brand text-white">Micro-Frontends</a>
<button className="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarSupportedContent" aria-controls="navbarSupportedContent" aria-expanded="false" aria-label="Toggle navigation">
<span className="navbar-toggler-icon"></span>
</button>
<div className="collapse navbar-collapse" id="navbarSupportedContent">
<ul className="navbar-nav mr-auto">
<li className={activeLink.indexOf('/home') === 0 ? 'nav-item app-link active' : 'nav-item app-link'}>
<a style={{ cursor: 'pointer' }}
className="nav-link"
onClick={() => {
this.setState({activeLink: 'home'})
navigateTo("/home")
}}
>
Home
</a>
</li>
<li className={activeLink.indexOf('/contacts') === 0 ? 'nav-item app-link active' : 'nav-item app-link'}>
<a style={{ cursor: 'pointer' }}
className="nav-link"
onClick={() => {
this.setState({activeLink: 'contacts'})
navigateTo("/contacts")
}}
>
Contatos
</a>
</li>
</ul>
</div>
</nav>
)
}
} |
#!/bin/sh
# DEB and SYSV distros: Debian 7 'Wheezy'
serviceFile=/etc/init.d/newrelic-infra
# check the run mode
userMode=$(cat /tmp/nria_mode 2>/dev/null)
# check usermode is set
if [ -z "$userMode" ]; then
userMode="ROOT"
fi
# check the user mode
if [ "$userMode" != "ROOT" ] && [ "$userMode" != "PRIVILEGED" ] && [ "$userMode" != "UNPRIVILEGED" ]; then
# user mode is not valid so we set it by default: ROOT
userMode="ROOT"
fi
if [ "$userMode" = "PRIVILEGED" ] || [ "$userMode" = "UNPRIVILEGED" ]; then
runDir=/var/run/newrelic-infra
installDir=/var/db/newrelic-infra
logDir=/var/log/newrelic-infra
configDir=/etc/newrelic-infra
tmpDir=/tmp/nr-integrations
# Give nri-agent ownership over it's folder
chown -R nri-agent:nri-agent ${runDir}
chown -R nri-agent:nri-agent ${installDir}
chown -R nri-agent:nri-agent ${logDir}
chown -R nri-agent:nri-agent ${configDir}
chown -R nri-agent:nri-agent ${tmpDir} 2>/dev/null || true
if [ "$userMode" = "PRIVILEGED" ]; then
failFlag=0
# Give the Agent kernel capabilities if setcap command exists
setCap=$(command -v setcap) || setCap="/sbin/setcap" && [ -f $setCap ] || setCap=""
if [ ! -z $setCap ]; then
eval "$setCap CAP_SYS_PTRACE,CAP_DAC_READ_SEARCH=+ep /usr/bin/newrelic-infra" || failFlag=1
else
failFlag=1
fi
if [ $failFlag -eq 1 ]; then
(>&2 echo "Error setting PRIVILEGED mode. Fallbacking to UNPRIVILEGED mode")
fi
fi
if [ -e "$serviceFile" ]; then
# If the user or group is set to root, change it to nri-agent
# If no user or group is set, set it to nri-agent
if grep 'USER=root' $serviceFile >/dev/null ; then
sed -i 's/USER=root/USER=nri-agent/g' "$serviceFile"
elif ! grep 'USER=' $serviceFile >/dev/null ; then
sed -i '/### END INIT INFO/aUSER=nri-agent' "$serviceFile"
fi
fi
fi
# Previous versions had an incorrect `prerm` that didn't stop the service
# because it couldn't detect it was running, for that reason we have to make
# sure that there is not an older version running.
oldPid=/var/run/newrelic-infra.pid
if [ -e "$oldPid" ] ; then
. /lib/lsb/init-functions
killproc -p $oldPid /usr/bin/newrelic-infra
rm $oldPid
fi
if [ -e "$serviceFile" ]; then
insserv newrelic-infra || exit $?
${serviceFile} start || exit $?
fi
|
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome to ${PREFIX}'s app. HERE WE GOOOOO.
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
// This file is auto-generated and should not be modified directly.
#pragma once
#include "Texture.hh"
#include <glow/common/gltypeinfo.hh>
#include <glow/common/nodiscard.hh>
#include <glow/common/log.hh>
#include <glow/data/ColorSpace.hh>
#include <vector>
#include <typed-geometry/tg-lean.hh>
namespace glow
{
GLOW_SHARED(class, Texture2D);
GLOW_SHARED(class, TextureData);
/// Defines a 2D texture in OpenGL
class Texture2D final : public Texture
{
public:
struct BoundTexture2D;
using SizeT = tg::isize2;
struct Shape
{
GLenum format;
SizeT size;
int immutableMipmaps; ///< Amount of fixed mipmaps, -1 if the texture is not immutable. This parameter is only respected in ::createStorageImmutable
inline bool operator==(Shape const& rhs) const
{
return (format == rhs.format) && (size == rhs.size) && (immutableMipmaps == rhs.immutableMipmaps);
}
};
private:
/// Minification filter
GLenum mMinFilter = GL_NEAREST_MIPMAP_LINEAR;
/// Magnification filter
GLenum mMagFilter = GL_LINEAR;
/// Border color
tg::color4 mBorderColor = {0.0f, 0.0f, 0.0f, 0.0f};
/// Wrapping in S
GLenum mWrapS = GL_REPEAT;
/// Wrapping in T
GLenum mWrapT = GL_REPEAT;
/// Comparison mode
GLenum mCompareMode = GL_NONE;
/// Comparison function
GLenum mCompareFunc = GL_LESS;
/// Depth/Stencil read mode
GLenum mDepthStencilMode = GL_DEPTH_COMPONENT;
/// Level of anisotropic filtering (>= 1.f, which is isotropic)
/// Max number of samples basically
GLfloat mAnisotropicFiltering = 1.0f;
/// Texture size: Width
int mWidth = 0u;
/// Texture size: Height
int mHeight = 0u;
/// True iff mipmaps are generated since last data upload
bool mMipmapsGenerated = false;
/// LOD bias
float mLodBias = 0.0f;
/// Mipmap base level
int mBaseLevel = 0;
/// Mipmap max level
int mMaxLevel = 1000;
/// Mipmap min lod
float mMinLod = -1000.0f;
/// Mipmap max lod
float mMaxLod = 1000.0f;
/// if true, this texture got immutable storage by glTexStorage2D
bool mStorageImmutable = false;
/// Amount of fixed mipmaps, -1 if this texture does not have immutable storage
int mImmutableMipmaps = -1;
public: // getter
/// Gets the currently bound texture (nullptr if none)
static BoundTexture2D* getCurrentTexture();
GLenum getMinFilter() const { return mMinFilter; }
GLenum getMagFilter() const { return mMagFilter; }
tg::color4 getBorderColor() const { return mBorderColor; }
GLenum getWrapS() const { return mWrapS; }
GLenum getWrapT() const { return mWrapT; }
GLenum getCompareMode() const { return mCompareMode; }
GLenum getCompareFunc() const { return mCompareFunc; }
GLenum getDepthStencilMode() const { return mDepthStencilMode; }
int getWidth() const { return mWidth; }
int getHeight() const { return mHeight; }
tg::isize3 getDimensions() const override { return { mWidth, mHeight, 1 }; }
SizeT getSize() const { return { mWidth, mHeight }; }
Shape getShape() const { return { mInternalFormat, getSize(), mImmutableMipmaps }; }
bool isStorageImmutable() const override { return mStorageImmutable; }
/// returns true iff mipmaps are used (based on min filter)
bool hasMipmapsEnabled() const override;
/// returns true iff mipmaps were generated via bind().generateMipmaps() (and are still valid)
bool areMipmapsGenerated() const override { return mMipmapsGenerated; }
/// Manually sets the internal flag if mipmaps were generated
/// CAUTION: this should only be used if you modified the mipmap status manually (e.g. via glGenerateMipmaps)
void setMipmapsGenerated(bool areGenerated) override { mMipmapsGenerated = areGenerated; }
float getLodBias() const { return mLodBias; }
int getBaseLevel() const { return mBaseLevel; }
int getMaxLevel() const { return mMaxLevel; }
float getMinLod() const { return mMinLod; }
float getMaxLod() const { return mMaxLod; }
/// returns the uniform type that should be used for sampling this texture
GLenum getUniformType() const override;
public:
/// RAII-object that defines a "bind"-scope for a 2D texture
/// All functions that operate on the currently bound tex are accessed here
struct BoundTexture2D
{
GLOW_RAII_CLASS(BoundTexture2D);
/// Backreference to the texture
Texture2D* const texture;
/// Makes the storage of this texture immutable
/// It is an error to call this more than once
/// It is an error to upload data with a different internal format at a later point
/// It is an error to resize after storage was made immutable (unless it's the same size)
/// Invalidates previously uploaded data
/// If mipmapLevels is <= 0, log2(max(width, height)) + 1 is used
void makeStorageImmutable(int width, int height, GLenum internalFormat, int mipmapLevels = 0);
/// Sets minification filter (GL_NEAREST, GL_LINEAR, GL_NEAREST_MIPMAP_NEAREST, ..., GL_LINEAR_MIPMAP_LINEAR)
void setMinFilter(GLenum filter);
/// Sets magnification filter (GL_NEAREST, GL_LINEAR)
void setMagFilter(GLenum filter);
/// Sets mag and min filter
void setFilter(GLenum magFilter, GLenum minFilter);
/// Sets the number of anisotropic samples (>= 1)
void setAnisotropicFiltering(GLfloat samples);
/// Sets the border color
void setBorderColor(tg::color4 const& color);
/// Sets texture wrapping in S
void setWrapS(GLenum wrap);
/// Sets texture wrapping in T
void setWrapT(GLenum wrap);
/// Sets texture wrapping in all directions
void setWrap(GLenum wrapS, GLenum wrapT);
/// Sets the texture compare mode (must be enabled for shadow samplers)
/// Valid values: GL_COMPARE_REF_TO_TEXTURE and GL_NONE
void setCompareMode(GLenum mode);
/// Sets the function for comparison (LESS, LEQUAL, ...)
void setCompareFunc(GLenum func);
/// Sets the depth/stencil texture mode (GL_DEPTH_COMPONENT or GL_STENCIL_COMPONENT)
void setDepthStencilMode(GLenum mode);
/// Generates mipmaps for this texture
void generateMipmaps();
/// Sets the bias (offset) for LOD
void setLodBias(float bias);
/// Sets the finest uploaded mip level
void setBaseLevel(int lvl);
/// Sets the coarsest uploaded mip level
void setMaxLevel(int lvl);
/// Sets the smallest lod value that should be used by texture(...)
void setMinLod(float lvl);
/// Sets the largest lod value that should be used by texture(...)
void setMaxLod(float lvl);
/// Resizes the texture
/// invalidates the data
void resize(int width, int height);
void resize(SizeT size) { resize(size.width, size.height); }
/// Generic data uploads
/// Changes internal format, width, height, and data
void setData(GLenum internalFormat, int width, int height, GLenum format, GLenum type, const GLvoid* data, int mipmapLevel = 0);
/// Data upload via glm, tg, or c++ type (see gltypeinfo)
template <typename DataT>
void setData(GLenum internalFormat, int width, int height, std::vector<DataT> const& data, int mipmapLevel = 0)
{
if ((int)data.size() != width * height)
{
error() << "Texture size is " << width << " x " << height << " = " << width * height << " but " << data.size()
<< " pixels are provided. " << to_string(texture);
return;
}
setData(internalFormat, width, height, glTypeOf<DataT>::format, glTypeOf<DataT>::type, data.data(), mipmapLevel);
}
/// Same as above
/// Usage:
/// tg::vec3 texData[] = { ... }
/// setData(iFormat, width, height, texData);
template <typename DataT, std::size_t N>
void setData(GLenum internalFormat, int width, int height, const DataT(&data)[N], int mipmapLevel = 0)
{
if (N != width * height)
{
error() << "Texture size is " << width << " x " << height << " = " << width * height << " but " << N
<< " pixels are provided. " << to_string(texture);
return;
}
setData(internalFormat, width, height, glTypeOf<DataT>::format, glTypeOf<DataT>::type, data, mipmapLevel);
}
/// Same as above
/// Usage:
/// tg::vec3 texData[][] = { ... }
/// // it's [height][width]
/// setData(iFormat, texData);
template <typename DataT, int width, int height>
void setData(GLenum internalFormat, const DataT(&data)[height][width] , int mipmapLevel = 0)
{
setData(internalFormat, width, height, glTypeOf<DataT>::format, glTypeOf<DataT>::type, data, mipmapLevel);
}
/// Generic partial data uploads
/// Only changes data. Offset and size must be inside original bounds.
void setSubData(int x, int y, int width, int height, GLenum format, GLenum type, const GLvoid* data, int mipmapLevel = 0);
/// Partial data upload via glm, tg, or c++ type (see gltypeinfo)
template <typename DataT>
void setSubData(int x, int y, int width, int height, std::vector<DataT> const& data, int mipmapLevel = 0)
{
if ((int)data.size() != width * height)
{
error() << "Texture size is " << width << " x " << height << " = " << width * height << " but " << data.size()
<< " pixels are provided. " << to_string(texture);
return;
}
setSubData(x, y, width, height, glTypeOf<DataT>::format, glTypeOf<DataT>::type, data.data(), mipmapLevel);
}
/// Same as above
/// Usage:
/// tg::vec3 texData[] = { ... }
/// setSubData(x, y, width, height, texData);
template <typename DataT, std::size_t N>
void setSubData(int x, int y, int width, int height, const DataT(&data)[N], int mipmapLevel = 0)
{
if (N != width * height)
{
error() << "Texture size is " << width << " x " << height << " = " << width * height << " but " << N
<< " pixels are provided. " << to_string(texture);
return;
}
setSubData(x, y, width, height, glTypeOf<DataT>::format, glTypeOf<DataT>::type, data, mipmapLevel);
}
/// Same as above
/// Usage:
/// tg::vec3 texData[][] = { ... }
/// // it's [height][width]
/// setSubData(x, y, texData);
template <typename DataT, int width, int height>
void setSubData(int x, int y, const DataT(&data)[height][width] , int mipmapLevel = 0)
{
setSubData(x, y, width, height, glTypeOf<DataT>::format, glTypeOf<DataT>::type, data, mipmapLevel);
}
/// Sets texture data from surface data
/// May set multiple levels at once
/// May modify texture parameter
void setData(GLenum internalFormat, SharedTextureData const& data);
/// Generic data download
std::vector<char> getData(GLenum format, GLenum type, int mipmapLevel = 0);
/// Generic data download
void getData(GLenum format, GLenum type, size_t bufferSize, void* buffer, int mipmapLevel = 0);
/// Data download via glm, tg, or c++ type (see gltypeinfo)
template <typename DataT>
std::vector<DataT> getData(int mipmapLevel = 0)
{
std::vector<DataT> data;
data.resize(texture->mWidth * texture->mHeight);
getData(glTypeOf<DataT>::format, glTypeOf<DataT>::type, data.size() * sizeof(DataT), data.data(), mipmapLevel);
return std::move(data);
}
/* TODO: OpenGL <4.5 does not support subimage retrieval (in 4.5, https://www.opengl.org/sdk/docs/man/html/glGetTextureSubImage.xhtml can be used)
/// Generic partial data download
std::vector<char> getSubData(GLenum format, GLenum type, int x, int y, int width, int height, int mipmapLevel = 0);
/// Generic partial data download
void getSubData(GLenum format, GLenum type, int x, int y, int width, int height, size_t bufferSize, void* buffer, int mipmapLevel = 0);
/// Partial data download via glm, tg, or c++ type (see gltypeinfo)
template <typename DataT>
std::vector<DataT> getSubData(int x, int y, int width, int height, int mipmapLevel = 0)
{
std::vector<DataT> data;
data.resize(width * height);
getSubData(glTypeOf<DataT>::format, glTypeOf<DataT>::type, x, y, width, height, data.size() * sizeof(DataT), data.data(), mipmapLevel);
return std::move(data);
}
*/
/// Extracts all stored surface data up to a given max mipmap level (inclusive)
/// This is useful for saving the texture to a file
SharedTextureData getTextureData(int maxMipLevel = 1000);
/// Same as getTextureData()->writeToFile(filename)
void writeToFile(std::string const& filename);
private:
GLint previousTexture; ///< previously bound tex
BoundTexture2D* previousTexturePtr; ///< previously bound tex
BoundTexture2D (Texture2D* buffer);
friend class Texture2D;
/// returns true iff it's safe to use this bound class
/// otherwise, runtime error
bool isCurrent() const;
public:
BoundTexture2D (BoundTexture2D &&); // allow move
~BoundTexture2D ();
};
public:
/// Fills the specific mipmap level (default 0) with the given data
/// Requires OpenGL 4.4 (for now) and will throw a run-time error otherwise
void clear(GLenum format, GLenum type, const GLvoid* data, int mipmapLevel = 0);
/// Clear via glm, tg, or c++ type (see gltypeinfo)
/// CAREFUL: pointers do not work!
template <typename DataT>
void clear(DataT const& data, int mipmapLevel = 0)
{
clear(glTypeOf<DataT>::format, glTypeOf<DataT>::type, (const GLvoid*)&data, mipmapLevel);
}
public:
Texture2D(GLenum internalFormat = GL_RGBA);
Texture2D(Shape const& shape);
/// Binds this texture.
/// Unbinding is done when the returned object runs out of scope.
GLOW_NODISCARD BoundTexture2D bind() { return {this}; }
public: // static construction
/// Creates a 2D texture with given width and height
static SharedTexture2D create(int width = 1, int height = 1, GLenum internalFormat = GL_RGBA);
/// Creates a 2D texture from Shape
static SharedTexture2D create(Shape const& shape);
static SharedTexture2D create(SizeT size, GLenum internalFormat = GL_RGBA) { return create(size.width, size.height, internalFormat); }
/// Creates a 2D texture with given width and height which is storage immutable
/// If mipmapLevels is <= 0, log2(max(width, height)) + 1 is used
static SharedTexture2D createStorageImmutable(int width, int height, GLenum internalFormat, int mipmapLevels = 0);
static SharedTexture2D createStorageImmutable(SizeT size, GLenum internalFormat, int mipmapLevels = 0) { return createStorageImmutable(size.width, size.height, internalFormat, mipmapLevels); }
static SharedTexture2D createStorageImmutable(Shape const& shape);
/// Creates a 2D texture from file
/// See TextureData::createFromFile for format documentation
/// Uses preferred internal format
static SharedTexture2D createFromFile(std::string const& filename, ColorSpace colorSpace);
/// same as createFromFile but with custom internal format
static SharedTexture2D createFromFile(std::string const& filename, GLenum internalFormat, ColorSpace colorSpace);
/// Creates a 2D texture from given data
/// Uses preferred internal format
static SharedTexture2D createFromData(SharedTextureData const& data);
/// same as createFromData but with custom internal format
static SharedTexture2D createFromData(SharedTextureData const& data, GLenum internalFormat);
friend class Framebuffer;
};
}
|
<reponame>c4milo/mcrouter
/*
* Copyright (c) 2014-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the LICENSE
* file in the root directory of this source tree.
*
*/
#include "AsyncMcClientImpl.h"
#include <netinet/tcp.h>
#include <memory>
#include <folly/SingletonThreadLocal.h>
#include <folly/container/EvictingCacheMap.h>
#include <folly/io/async/AsyncSSLSocket.h>
#include <folly/io/async/EventBase.h>
#include "mcrouter/lib/debug/FifoManager.h"
#include "mcrouter/lib/fbi/cpp/LogFailure.h"
#include "mcrouter/lib/network/ThreadLocalSSLContextProvider.h"
namespace facebook {
namespace memcache {
constexpr size_t kReadBufferSizeMin = 256;
constexpr size_t kReadBufferSizeMax = 4096;
constexpr size_t kStackIovecs = 128;
constexpr size_t kMaxBatchSize = 24576 /* 24KB */;
namespace {
class OnEventBaseDestructionCallback : public folly::EventBase::LoopCallback {
public:
explicit OnEventBaseDestructionCallback(AsyncMcClientImpl& client)
: client_(client) {}
~OnEventBaseDestructionCallback() override {}
void runLoopCallback() noexcept final {
client_.closeNow();
}
private:
AsyncMcClientImpl& client_;
};
struct GoAwayContext : public folly::AsyncTransportWrapper::WriteCallback {
GoAwayAcknowledgement message;
McSerializedRequest data;
std::unique_ptr<GoAwayContext> selfPtr;
explicit GoAwayContext(const CodecIdRange& supportedCodecs)
: data(message, 0, mc_caret_protocol, supportedCodecs) {}
void writeSuccess() noexcept final {
auto self = std::move(selfPtr);
self.reset();
}
void writeErr(size_t, const folly::AsyncSocketException&) noexcept final {
auto self = std::move(selfPtr);
self.reset();
}
};
inline size_t calculateIovecsTotalSize(const struct iovec* iovecs, size_t num) {
size_t size = 0;
while (num) {
size += iovecs->iov_len;
++iovecs;
--num;
}
return size;
}
std::string getServiceIdentity(const ConnectionOptions& opts) {
return opts.sslServiceIdentity.empty() ? opts.accessPoint->toHostPortString()
: opts.sslServiceIdentity;
}
} // anonymous
void AsyncMcClientImpl::WriterLoop::runLoopCallback() noexcept {
// Delay this write until the end of current loop (e.g. after
// runActiveFibers() callback). That way we achieve better batching without
// affecting latency.
if (!client_.flushList_ && !rescheduled_) {
rescheduled_ = true;
client_.eventBase_.runInLoop(this, /* thisIteration */ true);
return;
}
rescheduled_ = false;
client_.pushMessages();
}
AsyncMcClientImpl::AsyncMcClientImpl(
folly::VirtualEventBase& eventBase,
ConnectionOptions options)
: eventBase_(eventBase.getEventBase()),
queue_(options.accessPoint->getProtocol() != mc_ascii_protocol),
outOfOrder_(options.accessPoint->getProtocol() != mc_ascii_protocol),
writer_(*this),
connectionOptions_(std::move(options)),
eventBaseDestructionCallback_(
std::make_unique<OnEventBaseDestructionCallback>(*this)) {
eventBase.runOnDestruction(eventBaseDestructionCallback_.get());
if (connectionOptions_.compressionCodecMap) {
supportedCompressionCodecs_ =
connectionOptions_.compressionCodecMap->getIdRange();
}
}
std::shared_ptr<AsyncMcClientImpl> AsyncMcClientImpl::create(
folly::VirtualEventBase& eventBase,
ConnectionOptions options) {
auto client = std::shared_ptr<AsyncMcClientImpl>(
new AsyncMcClientImpl(eventBase, std::move(options)), Destructor());
client->selfPtr_ = client;
return client;
}
void AsyncMcClientImpl::closeNow() {
DestructorGuard dg(this);
if (socket_) {
isAborting_ = true;
// We need to destroy it immediately.
socket_->closeNow();
socket_.reset();
isAborting_ = false;
}
}
void AsyncMcClientImpl::setStatusCallbacks(
std::function<void(const folly::AsyncSocket&)> onUp,
std::function<void(ConnectionDownReason)> onDown) {
DestructorGuard dg(this);
statusCallbacks_ =
ConnectionStatusCallbacks{std::move(onUp), std::move(onDown)};
if (connectionState_ == ConnectionState::UP && statusCallbacks_.onUp) {
statusCallbacks_.onUp(*socket_);
}
}
void AsyncMcClientImpl::setRequestStatusCallbacks(
std::function<void(int pendingDiff, int inflightDiff)> onStateChange,
std::function<void(int numToSend)> onWrite) {
DestructorGuard dg(this);
requestStatusCallbacks_ =
RequestStatusCallbacks{std::move(onStateChange), std::move(onWrite)};
}
AsyncMcClientImpl::~AsyncMcClientImpl() {
assert(getPendingRequestCount() == 0);
assert(getInflightRequestCount() == 0);
if (socket_) {
// Close the socket immediately. We need to process all callbacks, such as
// readEOF and connectError, before we exit destructor.
socket_->closeNow();
}
eventBaseDestructionCallback_.reset();
}
size_t AsyncMcClientImpl::getPendingRequestCount() const {
return queue_.getPendingRequestCount();
}
size_t AsyncMcClientImpl::getInflightRequestCount() const {
return queue_.getInflightRequestCount();
}
void AsyncMcClientImpl::setThrottle(size_t maxInflight, size_t maxPending) {
maxInflight_ = maxInflight;
maxPending_ = maxPending;
}
void AsyncMcClientImpl::sendCommon(McClientRequestContextBase& req) {
switch (req.reqContext.serializationResult()) {
case McSerializedRequest::Result::OK:
incMsgId(nextMsgId_);
queue_.markAsPending(req);
scheduleNextWriterLoop();
if (connectionState_ == ConnectionState::DOWN) {
attemptConnection();
}
return;
case McSerializedRequest::Result::BAD_KEY:
req.replyError(mc_res_bad_key, "The key provided is invalid");
return;
case McSerializedRequest::Result::ERROR:
req.replyError(mc_res_local_error, "Error when serializing the request.");
return;
}
}
size_t AsyncMcClientImpl::getNumToSend() const {
size_t numToSend = queue_.getPendingRequestCount();
if (maxInflight_ != 0) {
if (maxInflight_ <= getInflightRequestCount()) {
numToSend = 0;
} else {
numToSend = std::min(numToSend, maxInflight_ - getInflightRequestCount());
}
}
return numToSend;
}
void AsyncMcClientImpl::scheduleNextWriterLoop() {
if (connectionState_ == ConnectionState::UP &&
!writer_.isLoopCallbackScheduled() &&
(getNumToSend() > 0 || pendingGoAwayReply_)) {
if (flushList_) {
flushList_->push_back(writer_);
} else {
eventBase_.runInLoop(&writer_);
}
}
}
void AsyncMcClientImpl::cancelWriterCallback() {
writer_.cancelLoopCallback();
}
void AsyncMcClientImpl::pushMessages() {
DestructorGuard dg(this);
assert(connectionState_ == ConnectionState::UP);
auto numToSend = getNumToSend();
// Call batch status callback
if (requestStatusCallbacks_.onWrite && numToSend > 0) {
requestStatusCallbacks_.onWrite(numToSend);
}
std::array<struct iovec, kStackIovecs> iovecs;
size_t iovsUsed = 0;
size_t batchSize = 0;
McClientRequestContextBase* tail = nullptr;
auto sendBatchFun = [this](
McClientRequestContextBase* tailReq,
const struct iovec* iov,
size_t iovCnt,
bool last) {
tailReq->isBatchTail = true;
socket_->writev(
this,
iov,
iovCnt,
last ? folly::WriteFlags::NONE : folly::WriteFlags::CORK);
return connectionState_ == ConnectionState::UP;
};
while (getPendingRequestCount() != 0 && numToSend > 0 &&
/* we might be already not UP, because of failed writev */
connectionState_ == ConnectionState::UP) {
auto& req = queue_.peekNextPending();
auto iov = req.reqContext.getIovs();
auto iovcnt = req.reqContext.getIovsCount();
if (debugFifo_.isConnected()) {
debugFifo_.startMessage(MessageDirection::Sent, req.reqContext.typeId());
debugFifo_.writeData(iov, iovcnt);
}
if (iovsUsed + iovcnt > kStackIovecs && iovsUsed) {
// We're out of inline iovecs, flush what we batched.
if (!sendBatchFun(tail, iovecs.data(), iovsUsed, false)) {
break;
}
iovsUsed = 0;
batchSize = 0;
}
if (iovcnt >= kStackIovecs || (iovsUsed == 0 && numToSend == 1)) {
// Req is either too big to batch or it's the last one, so just send it
// alone.
queue_.markNextAsSending();
sendBatchFun(&req, iov, iovcnt, numToSend == 1);
} else {
auto size = calculateIovecsTotalSize(iov, iovcnt);
if (size + batchSize > kMaxBatchSize && iovsUsed) {
// We already accumulated too much data, flush what we have.
if (!sendBatchFun(tail, iovecs.data(), iovsUsed, false)) {
break;
}
iovsUsed = 0;
batchSize = 0;
}
queue_.markNextAsSending();
if (size >= kMaxBatchSize || (iovsUsed == 0 && numToSend == 1)) {
// Req is either too big to batch or it's the last one, so just send it
// alone.
sendBatchFun(&req, iov, iovcnt, numToSend == 1);
} else {
memcpy(iovecs.data() + iovsUsed, iov, sizeof(struct iovec) * iovcnt);
iovsUsed += iovcnt;
batchSize += size;
tail = &req;
if (numToSend == 1) {
// This was the last request flush everything.
sendBatchFun(tail, iovecs.data(), iovsUsed, true);
}
}
}
--numToSend;
}
if (connectionState_ == ConnectionState::UP && pendingGoAwayReply_) {
// Note: we're not waiting for all requests to be sent, since that may take
// a while and if we didn't succeed in one loop, this means that we're
// already backlogged.
sendGoAwayReply();
}
pendingGoAwayReply_ = false;
scheduleNextWriterLoop();
}
void AsyncMcClientImpl::sendGoAwayReply() {
auto ctxPtr = std::make_unique<GoAwayContext>(supportedCompressionCodecs_);
auto& ctx = *ctxPtr;
switch (ctx.data.serializationResult()) {
case McSerializedRequest::Result::OK: {
auto iov = ctx.data.getIovs();
auto iovcnt = ctx.data.getIovsCount();
// Pass context ownership of itself, writev will call a callback that
// will destroy the context.
ctx.selfPtr = std::move(ctxPtr);
socket_->writev(&ctx, iov, iovcnt);
break;
}
default:
// Ignore errors on GoAway.
break;
}
}
namespace {
void createTCPKeepAliveOptions(
folly::AsyncSocket::OptionMap& options,
int cnt,
int idle,
int interval) {
// 0 means KeepAlive is disabled.
if (cnt != 0) {
#ifdef SO_KEEPALIVE
folly::AsyncSocket::OptionMap::key_type key;
key.level = SOL_SOCKET;
key.optname = SO_KEEPALIVE;
options[key] = 1;
key.level = IPPROTO_TCP;
#ifdef TCP_KEEPCNT
key.optname = TCP_KEEPCNT;
options[key] = cnt;
#endif // TCP_KEEPCNT
#ifdef TCP_KEEPIDLE
key.optname = TCP_KEEPIDLE;
options[key] = idle;
#endif // TCP_KEEPIDLE
#ifdef TCP_KEEPINTVL
key.optname = TCP_KEEPINTVL;
options[key] = interval;
#endif // TCP_KEEPINTVL
#endif // SO_KEEPALIVE
}
}
const folly::AsyncSocket::OptionKey getQoSOptionKey(sa_family_t addressFamily) {
static const folly::AsyncSocket::OptionKey kIpv4OptKey = {IPPROTO_IP, IP_TOS};
static const folly::AsyncSocket::OptionKey kIpv6OptKey = {IPPROTO_IPV6,
IPV6_TCLASS};
return (addressFamily == AF_INET) ? kIpv4OptKey : kIpv6OptKey;
}
uint64_t getQoS(uint64_t qosClassLvl, uint64_t qosPathLvl) {
// class
static const uint64_t kDefaultClass = 0x00;
static const uint64_t kLowestClass = 0x20;
static const uint64_t kMediumClass = 0x40;
static const uint64_t kHighClass = 0x60;
static const uint64_t kHighestClass = 0x80;
static const uint64_t kQoSClasses[] = {
kDefaultClass, kLowestClass, kMediumClass, kHighClass, kHighestClass};
// path
static const uint64_t kAnyPathNoProtection = 0x00;
static const uint64_t kAnyPathProtection = 0x04;
static const uint64_t kShortestPathNoProtection = 0x08;
static const uint64_t kShortestPathProtection = 0x0c;
static const uint64_t kQoSPaths[] = {kAnyPathNoProtection,
kAnyPathProtection,
kShortestPathNoProtection,
kShortestPathProtection};
if (qosClassLvl > 4) {
qosClassLvl = 0;
LOG_FAILURE(
"AsyncMcClient",
failure::Category::kSystemError,
"Invalid QoS class value in AsyncMcClient");
}
if (qosPathLvl > 3) {
qosPathLvl = 0;
LOG_FAILURE(
"AsyncMcClient",
failure::Category::kSystemError,
"Invalid QoS path value in AsyncMcClient");
}
return kQoSClasses[qosClassLvl] | kQoSPaths[qosPathLvl];
}
void createQoSClassOption(
folly::AsyncSocket::OptionMap& options,
const sa_family_t addressFamily,
uint64_t qosClass,
uint64_t qosPath) {
const auto& optkey = getQoSOptionKey(addressFamily);
options[optkey] = getQoS(qosClass, qosPath);
}
void checkWhetherQoSIsApplied(
const folly::SocketAddress& address,
int socketFd,
const ConnectionOptions& connectionOptions) {
const auto& optkey = getQoSOptionKey(address.getFamily());
const uint64_t expectedValue =
getQoS(connectionOptions.qosClass, connectionOptions.qosPath);
uint64_t val = 0;
socklen_t len = sizeof(expectedValue);
int rv = getsockopt(socketFd, optkey.level, optkey.optname, &val, &len);
// Zero out last 2 bits as they are not used for the QOS value
constexpr uint64_t kMaskTwoLeastSignificantBits = 0xFFFFFFFc;
val = val & kMaskTwoLeastSignificantBits;
if (rv != 0 || val != expectedValue) {
LOG_FAILURE(
"AsyncMcClient",
failure::Category::kSystemError,
"Failed to apply QoS! "
"Return Value: {} (expected: {}). "
"QoS Value: {} (expected: {}).",
rv,
0,
val,
expectedValue);
}
}
folly::AsyncSocket::OptionMap createSocketOptions(
const folly::SocketAddress& address,
const ConnectionOptions& connectionOptions) {
folly::AsyncSocket::OptionMap options;
createTCPKeepAliveOptions(
options,
connectionOptions.tcpKeepAliveCount,
connectionOptions.tcpKeepAliveIdle,
connectionOptions.tcpKeepAliveInterval);
if (connectionOptions.enableQoS) {
createQoSClassOption(
options,
address.getFamily(),
connectionOptions.qosClass,
connectionOptions.qosPath);
}
return options;
}
} // anonymous namespace
void AsyncMcClientImpl::attemptConnection() {
// We may use a lot of stack memory (e.g. hostname resolution) or some
// expensive SSL code. This should be always executed on main context.
folly::fibers::runInMainContext([this] {
assert(connectionState_ == ConnectionState::DOWN);
connectionState_ = ConnectionState::CONNECTING;
pendingGoAwayReply_ = false;
if (connectionOptions_.sslContextProvider) {
// Unix Domain Sockets do not work with SSL because
// the protocol is not implemented for Unix Domain
// Sockets. Trying to use SSL with Unix Domain Sockets
// will result in protocol error.
if (connectionOptions_.accessPoint->isUnixDomainSocket()) {
connectErr(folly::AsyncSocketException(
folly::AsyncSocketException::BAD_ARGS,
"SSL protocol is not applicable for Unix Domain Sockets"));
return;
}
auto sslContext = connectionOptions_.sslContextProvider();
if (!sslContext) {
connectErr(folly::AsyncSocketException(
folly::AsyncSocketException::SSL_ERROR,
"SSLContext provider returned nullptr, "
"check SSL certificates"));
return;
}
auto sslSocket = new folly::AsyncSSLSocket(sslContext, &eventBase_);
if (connectionOptions_.sessionCachingEnabled) {
auto clientCtx =
std::dynamic_pointer_cast<ClientSSLContext>(sslContext);
if (clientCtx) {
const auto& serviceId = getServiceIdentity(connectionOptions_);
sslSocket->setSessionKey(serviceId);
auto session = clientCtx->getCache().getSSLSession(serviceId);
if (session) {
sslSocket->setSSLSession(session.release(), true);
}
}
}
if (connectionOptions_.tfoEnabledForSsl) {
sslSocket->enableTFO();
}
socket_.reset(sslSocket);
} else {
socket_.reset(new folly::AsyncSocket(&eventBase_));
}
folly::SocketAddress address;
try {
if (connectionOptions_.accessPoint->isUnixDomainSocket()) {
address.setFromPath(connectionOptions_.accessPoint->getHost());
} else {
address = folly::SocketAddress(
connectionOptions_.accessPoint->getHost(),
connectionOptions_.accessPoint->getPort(),
/* allowNameLookup */ true);
}
} catch (const std::system_error& e) {
LOG_FAILURE(
"AsyncMcClient", failure::Category::kBadEnvironment, "{}", e.what());
connectErr(folly::AsyncSocketException(
folly::AsyncSocketException::NOT_OPEN, ""));
return;
}
auto socketOptions = createSocketOptions(address, connectionOptions_);
socket_->setSendTimeout(connectionOptions_.writeTimeout.count());
socket_->connect(
this, address, connectionOptions_.writeTimeout.count(), socketOptions);
// If AsyncSocket::connect() fails, socket_ may have been reset
if (socket_ && connectionOptions_.enableQoS) {
checkWhetherQoSIsApplied(address, socket_->getFd(), connectionOptions_);
}
});
}
void AsyncMcClientImpl::connectSuccess() noexcept {
assert(connectionState_ == ConnectionState::CONNECTING);
DestructorGuard dg(this);
connectionState_ = ConnectionState::UP;
if (statusCallbacks_.onUp) {
statusCallbacks_.onUp(*socket_);
}
if (!connectionOptions_.debugFifoPath.empty()) {
if (auto fifoManager = FifoManager::getInstance()) {
if (auto fifo =
fifoManager->fetchThreadLocal(connectionOptions_.debugFifoPath)) {
debugFifo_ = ConnectionFifo(
std::move(fifo), socket_.get(), connectionOptions_.routerInfoName);
}
}
}
if (connectionOptions_.sslContextProvider &&
connectionOptions_.sessionCachingEnabled) {
auto* sslSocket = socket_->getUnderlyingTransport<folly::AsyncSSLSocket>();
assert(sslSocket != nullptr);
}
assert(getInflightRequestCount() == 0);
assert(queue_.getParserInitializer() == nullptr);
scheduleNextWriterLoop();
parser_ = std::make_unique<ParserT>(
*this,
kReadBufferSizeMin,
kReadBufferSizeMax,
connectionOptions_.useJemallocNodumpAllocator,
connectionOptions_.compressionCodecMap,
&debugFifo_);
socket_->setReadCB(this);
}
void AsyncMcClientImpl::connectErr(
const folly::AsyncSocketException& ex) noexcept {
assert(connectionState_ == ConnectionState::CONNECTING);
DestructorGuard dg(this);
std::string errorMessage;
if (ex.getType() == folly::AsyncSocketException::SSL_ERROR) {
errorMessage = folly::sformat(
"SSLError: {}. Connect to {} failed.",
ex.what(),
connectionOptions_.accessPoint->toHostPortString());
LOG_FAILURE(
"AsyncMcClient", failure::Category::kBadEnvironment, errorMessage);
}
mc_res_t error = mc_res_connect_error;
ConnectionDownReason reason = ConnectionDownReason::CONNECT_ERROR;
if (ex.getType() == folly::AsyncSocketException::TIMED_OUT) {
error = mc_res_connect_timeout;
reason = ConnectionDownReason::CONNECT_TIMEOUT;
errorMessage = "Timed out when trying to connect to server";
} else if (isAborting_) {
error = mc_res_aborted;
reason = ConnectionDownReason::ABORTED;
errorMessage = "Connection aborted";
}
assert(getInflightRequestCount() == 0);
queue_.failAllPending(error, errorMessage);
connectionState_ = ConnectionState::DOWN;
// We don't need it anymore, so let it perform complete cleanup.
socket_.reset();
if (statusCallbacks_.onDown) {
statusCallbacks_.onDown(reason);
}
}
void AsyncMcClientImpl::processShutdown(folly::StringPiece errorMessage) {
DestructorGuard dg(this);
switch (connectionState_) {
case ConnectionState::UP: // on error, UP always transitions to ERROR state
// Cancel loop callback, or otherwise we might attempt to write
// something while processing error state.
cancelWriterCallback();
connectionState_ = ConnectionState::ERROR;
// We're already in ERROR state, no need to listen for reads.
socket_->setReadCB(nullptr);
// We can safely close connection, it will stop all writes.
socket_->close();
/* fallthrough */
case ConnectionState::ERROR:
queue_.failAllSent(
isAborting_ ? mc_res_aborted : mc_res_remote_error, errorMessage);
if (queue_.getInflightRequestCount() == 0) {
// No need to send any of remaining requests if we're aborting.
if (isAborting_) {
queue_.failAllPending(mc_res_aborted, errorMessage);
}
// This is a last processShutdown() for this error and it is safe
// to go DOWN.
if (statusCallbacks_.onDown) {
statusCallbacks_.onDown(
isAborting_ ? ConnectionDownReason::ABORTED
: ConnectionDownReason::ERROR);
}
connectionState_ = ConnectionState::DOWN;
// We don't need it anymore, so let it perform complete cleanup.
socket_.reset();
// In case we still have some pending requests, then try reconnecting
// immediately.
if (getPendingRequestCount() != 0) {
attemptConnection();
}
}
return;
case ConnectionState::CONNECTING:
// connectError is not a remote error, it's processed in connectError.
case ConnectionState::DOWN:
// We shouldn't have any errors while not connected.
CHECK(false);
}
}
void AsyncMcClientImpl::getReadBuffer(void** bufReturn, size_t* lenReturn) {
curBuffer_ = parser_->getReadBuffer();
*bufReturn = curBuffer_.first;
*lenReturn = curBuffer_.second;
}
void AsyncMcClientImpl::readDataAvailable(size_t len) noexcept {
assert(curBuffer_.first != nullptr && curBuffer_.second >= len);
DestructorGuard dg(this);
parser_->readDataAvailable(len);
}
void AsyncMcClientImpl::readEOF() noexcept {
assert(connectionState_ == ConnectionState::UP);
processShutdown("Connection closed by the server.");
}
void AsyncMcClientImpl::readErr(
const folly::AsyncSocketException& ex) noexcept {
assert(connectionState_ == ConnectionState::UP);
std::string errorMessage = folly::sformat(
"Failed to read from socket with remote endpoint \"{}\". Exception: {}",
connectionOptions_.accessPoint->toString(),
ex.what());
VLOG(1) << errorMessage;
processShutdown(errorMessage);
}
void AsyncMcClientImpl::writeSuccess() noexcept {
assert(
connectionState_ == ConnectionState::UP ||
connectionState_ == ConnectionState::ERROR);
DestructorGuard dg(this);
bool last;
do {
auto& req = queue_.markNextAsSent();
last = req.isBatchTail;
req.scheduleTimeout();
} while (!last);
// It is possible that we're already processing error, but still have a
// successfull write.
if (connectionState_ == ConnectionState::ERROR) {
processShutdown("Connection was in ERROR state.");
}
}
void AsyncMcClientImpl::writeErr(
size_t bytesWritten,
const folly::AsyncSocketException& ex) noexcept {
assert(
connectionState_ == ConnectionState::UP ||
connectionState_ == ConnectionState::ERROR);
std::string errorMessage = folly::sformat(
"Failed to write into socket with remote endpoint \"{}\", "
"wrote {} bytes. Exception: {}",
connectionOptions_.accessPoint->toString(),
bytesWritten,
ex.what());
VLOG(1) << errorMessage;
// We're already in an error state, so all requests in pendingReplyQueue_ will
// be replied with an error.
bool last;
do {
auto& req = queue_.markNextAsSent();
last = req.isBatchTail;
} while (!last);
processShutdown(errorMessage);
}
folly::StringPiece AsyncMcClientImpl::clientStateToStr() const {
switch (connectionState_) {
case ConnectionState::UP:
return "UP";
case ConnectionState::DOWN:
return "DOWN";
case ConnectionState::CONNECTING:
return "CONNECTING";
case ConnectionState::ERROR:
return "ERROR";
}
return "state is incorrect";
}
void AsyncMcClientImpl::logErrorWithContext(folly::StringPiece reason) {
LOG_FAILURE(
"AsyncMcClient",
failure::Category::kOther,
"Error: \"{}\", client state: {}, remote endpoint: {}, "
"number of requests sent through this client: {}, "
"McClientRequestContextQueue info: {}",
reason,
clientStateToStr(),
connectionOptions_.accessPoint->toString(),
nextMsgId_,
queue_.debugInfo());
}
void AsyncMcClientImpl::handleConnectionControlMessage(
const UmbrellaMessageInfo& headerInfo) {
DestructorGuard dg(this);
// Handle go away request.
switch (headerInfo.typeId) {
case GoAwayRequest::typeId: {
// No need to process GoAway if the connection is already closing.
if (connectionState_ != ConnectionState::UP) {
break;
}
if (statusCallbacks_.onDown) {
statusCallbacks_.onDown(ConnectionDownReason::SERVER_GONE_AWAY);
}
pendingGoAwayReply_ = true;
scheduleNextWriterLoop();
break;
}
default:
// Ignore unknown control messages.
break;
}
}
void AsyncMcClientImpl::parseError(
mc_res_t /* result */,
folly::StringPiece reason) {
logErrorWithContext(reason);
// mc_parser can call the parseError multiple times, process only first.
if (connectionState_ != ConnectionState::UP) {
return;
}
DestructorGuard dg(this);
processShutdown(reason);
}
bool AsyncMcClientImpl::nextReplyAvailable(uint64_t reqId) {
assert(connectionState_ == ConnectionState::UP);
auto initializer = queue_.getParserInitializer(reqId);
if (initializer) {
(*initializer)(*parser_);
return true;
}
return false;
}
void AsyncMcClientImpl::incMsgId(uint32_t& msgId) {
msgId += 2;
}
void AsyncMcClientImpl::updateWriteTimeout(std::chrono::milliseconds timeout) {
if (!timeout.count()) {
return;
}
auto selfWeak = selfPtr_;
eventBase_.runInEventBaseThread([selfWeak, timeout]() {
if (auto self = selfWeak.lock()) {
if (!self->connectionOptions_.writeTimeout.count() ||
self->connectionOptions_.writeTimeout > timeout) {
self->connectionOptions_.writeTimeout = timeout;
}
if (self->socket_) {
self->socket_->setSendTimeout(
self->connectionOptions_.writeTimeout.count());
}
}
});
}
double AsyncMcClientImpl::getRetransmissionInfo() {
if (socket_ != nullptr) {
struct tcp_info tcpinfo;
socklen_t len = sizeof(struct tcp_info);
auto& socket = dynamic_cast<folly::AsyncSocket&>(*socket_);
if (socket.getSockOpt(IPPROTO_TCP, TCP_INFO, &tcpinfo, &len) == 0) {
const uint64_t totalKBytes = socket.getRawBytesWritten() / 1000;
if (totalKBytes == lastKBytes_) {
return 0.0;
}
const auto retransPerKByte = (tcpinfo.tcpi_total_retrans - lastRetrans_) /
(double)(totalKBytes - lastKBytes_);
lastKBytes_ = totalKBytes;
lastRetrans_ = tcpinfo.tcpi_total_retrans;
return retransPerKByte;
}
}
return -1.0;
}
} // memcache
} // facebook
|
echo "Hello world!"
#celery multi start w1 -A main.celery -l info --pidfile=/var/run/%n.pid --logfile=/data/logs/w1.log
#celery multi start w2 -A main.celery -l info --pidfile=/var/run/%n.pid --logfile=/data/logs/w2.log |
<reponame>laishujie/MvpMTC
package com.lai.mtc.bean;
import com.lai.mtc.mvp.utlis.IPopMenu;
import java.io.Serializable;
import java.util.List;
/**
* @author Lai
* @time 2018/1/20 22:57
* @describe describe
*/
public class ComicListDetail implements Serializable, IPopMenu {
private int id;
private String cover;
private String name;
private String author;
private String category;
private String updated_date;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getCover() {
return cover;
}
public void setCover(String cover) {
this.cover = cover;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public String getUpdated_date() {
return updated_date;
}
public void setUpdated_date(String updated_date) {
this.updated_date = updated_date;
}
public String getArea() {
return area;
}
public void setArea(String area) {
this.area = area;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getTrack_url() {
return track_url;
}
public void setTrack_url(String track_url) {
this.track_url = track_url;
}
public int getHot() {
return hot;
}
public void setHot(int hot) {
this.hot = hot;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
public int getLong_updated_date() {
return long_updated_date;
}
public void setLong_updated_date(int long_updated_date) {
this.long_updated_date = long_updated_date;
}
public int getChapters_count() {
return chapters_count;
}
public void setChapters_count(int chapters_count) {
this.chapters_count = chapters_count;
}
public List<String> getTag_list() {
return tag_list;
}
public void setTag_list(List<String> tag_list) {
this.tag_list = tag_list;
}
public List<ChaptersBean> getChapters() {
return chapters;
}
public void setChapters(List<ChaptersBean> chapters) {
this.chapters = chapters;
this.ascendingOrderChapters = chapters;
}
public List<MirrorsBean> getMirrors() {
return mirrors;
}
public void setMirrors(List<MirrorsBean> mirrors) {
this.mirrors = mirrors;
}
private String area;
private String status;
private String description;
private String track_url;
private int hot;
private String source;
private int long_updated_date;
private int chapters_count;
private java.util.List<String> tag_list;
private java.util.List<ChaptersBean> chapters;
public List<ChaptersBean> getAscendingOrderChapters() {
return ascendingOrderChapters;
}
public void setAscendingOrderChapters(List<ChaptersBean> ascendingOrderChapters) {
this.ascendingOrderChapters = ascendingOrderChapters;
}
//ๅๅบ
private java.util.List<ChaptersBean> ascendingOrderChapters;
public List<ChaptersBean> getShowChapters() {
return mShowChapters;
}
public void setShowChapters(List<ChaptersBean> showChapters) {
mShowChapters = showChapters;
}
private java.util.List<ChaptersBean> mShowChapters;
public List<ChaptersBean> getLastChapters() {
return mLastChapters;
}
public void setLastChapters(List<ChaptersBean> lastChapters) {
mLastChapters = lastChapters;
}
private java.util.List<ChaptersBean> mLastChapters;
private java.util.List<MirrorsBean> mirrors;
public static class ChaptersBean implements Serializable {
private int comic_id;
private int index;
private String name;
private String track_url;
public static final int NORMAL = 1;
public static final int BOTTOM = 2;
public int getItemType() {
return itemType;
}
private int itemType = NORMAL;
public int getComic_id() {
return comic_id;
}
public void setComic_id(int comic_id) {
this.comic_id = comic_id;
}
public int getIndex() {
return index;
}
public void setIndex(int index) {
this.index = index;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getTrack_url() {
return track_url;
}
public void setTrack_url(String track_url) {
this.track_url = track_url;
}
}
public static class MirrorsBean implements IPopMenu, Serializable {
private int id;
private String source;
@Override
public String getName() {
return source;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
}
}
|
import React, { Component } from 'react';
import { BrowserRouter, Route, Redirect, Switch } from 'react-router-dom';
import { connect } from 'react-redux';
import './Assets/groceryGetterTheme/theme.css';
import 'primereact/resources/primereact.min.css';
import 'primeicons/primeicons.css';
import './App.css';
import AuthPage from './Pages/Auth';
import MenuPage from './Pages/Menu';
import GroceryPage from './Pages/Grocery';
import RecipePage from './Pages/Recipe';
class App extends Component {
render() {
return (
<BrowserRouter>
<React.Fragment>
<Switch>
{this.props.token && <Redirect from="/auth" to="/menu" exact />}
{!this.props.token && <Route path="/auth" component={AuthPage} />}
{!this.props.token && <Redirect to="/auth" exact />}
{<Route path="/menu" component={MenuPage} />}
{<Route path="/grocery" component={GroceryPage} />}
{<Route path="/recipe" component={RecipePage}/>}
</Switch>
</React.Fragment>
</BrowserRouter>
);
}
}
const mapStateToProps = state => ({
token: state.auth.token
})
export default connect(mapStateToProps)(App) |
<gh_stars>0
#!/usr/bin/env ruby
module TelemetryDaemon
def api_host(api_host)
Telemetry.api_host = api_host
end
def api_token(token)
Telemetry.token = token
end
def log_level(log_level)
level = log_level.to_s
if log_level == 'debug'
Telemetry.logger.level = Logger::DEBUG
Telemetry.logger.debug "Starting Debug Logging"
end
end
def interval(interval)
@@interval = interval
end
@@flows_expire_in = nil
def flows_expire_in(i)
@@flows_expire_in = i
end
# Ensure a minimum of 1 second between loops
def wait_for_interval_from(timestamp)
@@interval ||= 60
@@interval = 1.0 if @@interval < 1
sleep_length = timestamp + @@interval.to_f - Time.now.to_f
sleep sleep_length if sleep_length > 0
end
def set_json(x)
x.each {|k,v| @@h[k.to_sym] = MultiJson.load(v)}
end
def set(x)
x.each {|k,v| @@h[k.to_sym] = v}
end
# Code blocks users can call at the beginning and end of each interval
@@begin_interval = nil
@@end_interval = nil
def begin_interval(&block)
@@begin_interval = block
end
def run_begin_interval
@@begin_interval.yield if @@begin_interval
end
def end_interval(&block)
@@end_interval = block
end
def run_end_interval
@@end_interval.yield if @@end_interval
end
def barchart(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :barchart, tag, frequency, offset, block ]
end
def bulletchart(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :value, tag, frequency, offset, block ]
end
def box(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :countdown, tag, frequency, offset, block ]
end
def clock(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :countdown, tag, frequency, offset, block ]
end
def countdown(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :countdown, tag, frequency, offset, block ]
end
def compass(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :countdown, tag, frequency, offset, block ]
end
def custom(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :custom, tag, frequency, offset, block ]
end
def funnelchart(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :funnelchart, tag, frequency, offset, block ]
end
def gauge(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :gauge, tag, frequency, offset, block ]
end
def graph(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :graph, tag, frequency, offset, block ]
end
def grid(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :gid, tag, frequency, offset, block ]
end
def histogram(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :histogram, tag, frequency, offset, block ]
end
def icon(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :icon, tag, frequency, offset, block ]
end
def image(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :image, tag, frequency, offset, block ]
end
def iframe(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :iframe, tag, frequency, offset, block ]
end
def log(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :log, tag, frequency, offset, block ]
end
def map(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :map, tag, frequency, offset, block ]
end
def multigauge(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :multigauge, tag, frequency, offset, block ]
end
def multivalue(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :multivalue, tag, frequency, offset, block ]
end
def piechart(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :piechart, tag, frequency, offset, block ]
end
def scatterplot(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :scatterplot, tag, frequency, offset, block ]
end
def servers(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :servers, tag, frequency, offset, block ]
end
def status(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :servers, tag, frequency, offset, block ]
end
def table(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :table, tag, frequency, offset, block ]
end
def text(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :text, tag, frequency, offset, block ]
end
def tickertape(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :tickertape, tag, frequency, offset, block ]
end
def timechart(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :timechart, tag, frequency, offset, block ]
end
def timeline(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :timeline, tag, frequency, offset, block ]
end
def timeseries(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :timeseries, tag, frequency, offset, block ]
end
def upstatus(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :upstatus, tag, frequency, offset, block ]
end
def value(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :value, tag, frequency, offset, block ]
end
def video(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :value, tag, frequency, offset, block ]
end
def waterfall(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :waterfall, tag, frequency, offset, block ]
end
def weather(tag, frequency = 0, offset=nil, &block)
@@tasks ||= []
@@tasks << [ :countdown, tag, frequency, offset, block ]
end
def run_scheduled_flow_updates
@@buffer = {}
@@tasks ||= {}
@@next_run_at ||= {}
@@tasks.each do |task|
@@h = {}
variant, tag, frequency, offset, block = task
now = Time.now
# Check whether we should wait an interval before running
if frequency > 0
Telemetry::logger.debug "Task #{task[0]} #{task[1]} (every #{task[2]}s)"
#Telemetry::logger.debug "Update frequency is #{frequency} now #{Time.now.to_i} next #{@@next_run_at[tag]}"
if @@next_run_at[tag] && @@next_run_at[tag] > now.to_i
Telemetry::logger.debug " - Not scheduled yet (waiting #{-(now.to_i - @@next_run_at[tag])}s)"
next
end
@@next_run_at[tag] = now.to_i + frequency
Telemetry::logger.debug " - Running intermittant task at #{now}"
# If an offset is defined then align runtimes to the offset
# How close you can get to the desired offset depends on the global interval. So set it relatively small
# when using this feature
if offset and offset >= 0 and offset <= 86400
this_morning = Time.new(now.year, now.month, now.day).to_i
time_since_offset = now.to_i - this_morning - offset
time_since_offset += 86400 if time_since_offset < 0
@@next_run_at[tag] -= time_since_offset % frequency
#Telemetry::logger.debug "#{now.to_i} #{@@next_run_at[tag]}"
end
else
Telemetry::logger.debug " - Task #{task[0]} #{task[1]}"
end
# Execute the flow
Telemetry.logger.debug " + Executing task #{task[0]} #{task[1]}"
block.yield
if @@h == {}
Telemetry.logger.debug " - Skipping empty task values #{task[0]} #{task[1]}"
next
end
# Use global default to set expires_at field if necessary
if @@flows_expire_in and not @@h[:expires_at]
set expires_at: Time.now.to_i + @@flows_expire_in
end
values = @@h
# Telemetry.logger.debug " - Values for #{task[0]} #{task[1]}:\n#{values}\n#{@@last_values[tag]}"
# Telemetry.logger.debug "LV\n\n #{@@last_values}\n\n"
@@buffer[tag] = values
end
@@buffer
end
end
|
# DO NOT EDIT -- GENERATED BY: `generate-tool.py --generate`
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
URLS = Literal[
"cli_launch",
"doc_run",
"doc_require",
"doc_start_err",
"upgrade_local",
"wandb_init",
]
|
#!/bin/bash
#SBATCH -J Act_sin_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py sin 1 Adam 4 0.7087279355875828 493 0.0010470492554084177 lecun_uniform PE-infersent
|
import SwiftUI
// Expense model
struct ExpenseItem {
let name: String
let amount: Double
}
// ViewModel to manage expenses
class ExpensesViewModel: ObservableObject {
@Published var expenses: [ExpenseItem] = []
}
// AddExpenseView to add new expenses
struct AddExpenseView: View {
let onAddExpense: (ExpenseItem) -> Void
@State private var expenseName = ""
@State private var expenseAmount = ""
var body: some View {
VStack {
TextField("Expense Name", text: $expenseName)
.textFieldStyle(RoundedBorderTextFieldStyle())
.padding()
TextField("Amount", text: $expenseAmount)
.textFieldStyle(RoundedBorderTextFieldStyle())
.padding()
Button("Add Expense") {
if let amount = Double(expenseAmount) {
let newExpense = ExpenseItem(name: expenseName, amount: amount)
onAddExpense(newExpense)
expenseName = ""
expenseAmount = ""
}
}
.padding()
}
}
}
// ExpensesListView to display the list of expenses
struct ExpensesListView: View {
@ObservedObject var viewModel: ExpensesViewModel
var body: some View {
List {
ForEach(viewModel.expenses, id: \.name) { expense in
HStack {
Text(expense.name)
Spacer()
Text("$\(String(format: "%.2f", expense.amount))")
}
}
}
}
}
// Main ContentView
struct ContentView: View {
@State private var isShowingAddView = false
@ObservedObject var viewModel = ExpensesViewModel()
var body: some View {
NavigationView {
ExpensesListView(viewModel: viewModel, onAddExpense: { expenseItem in
self.viewModel.expenses.append(expenseItem)
})
.navigationBarTitle("Spendy")
}
.sheet(isPresented: $isShowingAddView) {
AddExpenseView { expenseItem in
self.viewModel.expenses.append(expenseItem)
}
}
}
} |
#!/bin/bash
if [[ -n `docker ps -a|grep -E 'wecube-plugins|service-mgmt|monitor|artifacts'|awk '{print $1}'` ]]
then
docker rm -f `docker ps -a|grep -E 'wecube-plugins|service-mgmt|monitor|artifacts'|awk '{print $1}'`
fi
if [[ -n `docker images|grep -E 'wecube-plugins|wecmdb|monitor|service-mgmt|artifacts'|awk '{print $1":"$2}'` ]]
then
docker rmi `docker images|grep -E 'wecube-plugins|wecmdb|monitor|service-mgmt|artifacts'|awk '{print $1":"$2}'`
fi
docker-compose -f docker-compose.yml down
docker-compose -f wecube_core_mysql.yml down
if [[ -n `docker images|grep -v $1|grep -E 'platform-core|platform-gateway|wecube-portal|platform-auth-server|wecube-db'|awk '{print $1":"$2}'` ]]
then
docker rmi `docker images|grep -v $1|grep -E 'platform-core|platform-gateway|wecube-portal|platform-auth-server|wecube-db'|awk '{print $1":"$2}'`
fi
if [[ -n `docker images|grep none|awk '{print $3}'` ]]
then
docker rmi `docker images|grep none|awk '{print $3}'`
fi |
import { Player, Team, uuid } from "@cph-scorer/model";
export interface TeamProvider {
insert: (players: Player[]) => Promise<Team>;
update: (id: uuid, score: number) => Promise<Team>;
}
|
#!/usr/bin/env bash
python train.py \
--model_type gt_layout \
--data_path data/shapes_dataset |
<gh_stars>0
myApp.controller('UserController', ['UserService', 'ProjectService', function (UserService, ProjectService) {
var self = this;
//functions
self.newProject = ProjectService.newProject;
self.deleteProject = ProjectService.deleteProject
self.createProject = ProjectService.createProject;
self.addCollaborator = ProjectService.addCollaborator;
self.removeSelf = ProjectService.removeSelf
//variables
self.currentProject = ProjectService.currentProject;
self.projects = ProjectService.projects;
self.collaboratorList = ProjectService.collaboratorList
self.project = ProjectService.project;
self.userService = UserService;
self.userObject = UserService.userObject;
ProjectService.getProjects()
ProjectService.getCollaboratorProjects()
// Selects a project then runs a get to get track data
self.selectProject = function (name) {
self.currentProject.name = name
ProjectService.getTrack()
}
// Selects who else has access to track
self.listCollaborators = function (list) {
let stringToPrint = ''
for (let i = 0; i < list.length; i++) {
stringToPrint += list[i] + ', '
}
stringToPrint = stringToPrint.slice(0, -2)
if (stringToPrint === 'null') {
stringToPrint = 'None'
}
return stringToPrint
}
}]);
|
<filename>OR-M Data Entities/OR-M Data Entities.Tests.Database/ts/Tables/TestUpdateNewSchema.sql
๏ปฟCREATE TABLE [ts].[TestUpdateNewSchema]
(
[Id] INT NOT NULL PRIMARY KEY IDENTITY,
[Name] VARCHAR(50) NULL
)
|
if(typeof process !== "undefined" && typeof process.nextTick === "function") {
module.exports = process.nextTick;
} else if (typeof setImmediate === "function") {
module.exports = setImmediate;
} else {
module.exports = setTimeout;
} |
<filename>src/main/java/com/sinergise/sentinel/byoctool/sentinelhub/models/ByocTile.java
package com.sinergise.sentinel.byoctool.sentinelhub.models;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import lombok.Getter;
import lombok.Setter;
import org.geojson.GeoJsonObject;
import java.time.Instant;
import java.util.HashMap;
import java.util.Map;
@Data
public class ByocTile implements NoJsonAutoDetect {
public static final String BAND_PLACEHOLDER = "(BAND)";
@JsonProperty("id")
private String id;
@JsonProperty("path")
private String path;
@JsonProperty("status")
private String status;
@JsonProperty("sensingTime")
private Instant sensingTime;
@JsonProperty("coverGeometry")
private GeoJsonObject coverGeometry;
@JsonProperty("additionalData")
private AdditionalData additionalData;
private Map<String, Object> other = new HashMap<>();
@JsonAnySetter
public void set(String name, Object value) {
other.put(name, value);
}
@JsonAnyGetter
public Map<String, Object> any() {
return other;
}
@Getter
@Setter
public static class AdditionalData {
@JsonProperty("failedIngestionCause")
private String failedIngestionCause;
}
}
|
export default function(ref) {
return {
state: {
platform: window.utils.platform,
environment: process.env.NODE_ENV || 'development',
currPath: 'upload',
downloadLocation: window.utils.defaultDownloadDir,
testdata: '',
infoTipText: '',
},
getters: {
platform(state, getters) {
return state.platform;
},
environment(state, getters) {
return state.environment;
},
currPath(state) {
return state.currPath;
},
downloadLocation(state) {
return state.downloadLocation;
},
testdata(state) {
return state.testdata;
},
infoTipText(state) {
return state.infoTipText;
},
},
mutations: {
// generic mutation setter
// useful for simple value redeclarations and
// when no logic is used in the mutation
byKey(state, obj) {
for (const key in obj) {
state[key] = obj[key];
}
},
setCurrPath(state, path) {
state.currPath = path;
},
setDownloadLocation(state, location) {
state.downloadLocation = location;
},
setInfoTipText(state, text) {
state.infoTipText = text;
},
setTestdata(state, str) {
state.testdata = str;
},
},
actions: {},
};
}
|
<reponame>ShaolinDeng/SDK-Android
/*
* Copyright (C) 2018 iFLYTEK CO.,LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.iflytek.cyber.resolver.xftemplateruntime.model;
import android.os.Parcel;
import android.os.Parcelable;
public class XfTemplatePayload implements Parcelable {
public String token;
public String title;
public Image skillIcon;
public String mediaType;
public Image imageMedia;
public Video videoMedia;
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(this.token);
dest.writeString(this.title);
dest.writeParcelable(this.skillIcon, flags);
dest.writeString(this.mediaType);
dest.writeParcelable(this.imageMedia, flags);
dest.writeParcelable(this.videoMedia, flags);
}
public XfTemplatePayload() {
}
protected XfTemplatePayload(Parcel in) {
this.token = in.readString();
this.title = in.readString();
this.skillIcon = in.readParcelable(Image.class.getClassLoader());
this.mediaType = in.readString();
this.imageMedia = in.readParcelable(Image.class.getClassLoader());
this.videoMedia = in.readParcelable(Video.class.getClassLoader());
}
public static final Parcelable.Creator<XfTemplatePayload> CREATOR = new Parcelable.Creator<XfTemplatePayload>() {
@Override
public XfTemplatePayload createFromParcel(Parcel source) {
return new XfTemplatePayload(source);
}
@Override
public XfTemplatePayload[] newArray(int size) {
return new XfTemplatePayload[size];
}
};
}
|
import pygame
import sys
# Initialize Pygame
pygame.init()
# Constants
WIDTH = 800
HEIGHT = 600
WHITE = (255, 255, 255)
RED = (255, 0, 0)
BLUE = (0, 0, 255)
# Set up the game window
window = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("Maze Game")
# Player character
player_width = 40
player_height = 40
player_x = 50
player_y = 50
player_speed = 5
# Maze design
maze_walls = [
pygame.Rect(200, 100, 20, 400),
pygame.Rect(200, 100, 400, 20),
pygame.Rect(600, 100, 20, 400),
pygame.Rect(200, 480, 420, 20)
]
# Game loop
running = True
while running:
window.fill(WHITE)
# Draw maze walls
for wall in maze_walls:
pygame.draw.rect(window, RED, wall)
# Draw player character
pygame.draw.rect(window, BLUE, (player_x, player_y, player_width, player_height))
# Handle events
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
# Handle player movement
keys = pygame.key.get_pressed()
if keys[pygame.K_LEFT]:
player_x -= player_speed
if keys[pygame.K_RIGHT]:
player_x += player_speed
if keys[pygame.K_UP]:
player_y -= player_speed
if keys[pygame.K_DOWN]:
player_y += player_speed
# Ensure player cannot move through walls
player_rect = pygame.Rect(player_x, player_y, player_width, player_height)
for wall in maze_walls:
if player_rect.colliderect(wall):
if keys[pygame.K_LEFT]:
player_x += player_speed
if keys[pygame.K_RIGHT]:
player_x -= player_speed
if keys[pygame.K_UP]:
player_y += player_speed
if keys[pygame.K_DOWN]:
player_y -= player_speed
# Update display
pygame.display.update()
# Quit Pygame
pygame.quit()
sys.exit() |
<gh_stars>1-10
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vertx.java.addons.old.amqp;
public class Channel {
// private com.rabbitmq.client.Channel channel;
//
// Channel(com.rabbitmq.client.Channel channel) {
// this.channel = channel;
// }
//
// public void publish(final String exchange, final String routingKey, AmqpProps props, final byte[] body) {
// try {
// if (props == null) {
// props = new AmqpProps();
// }
// AMQP.BasicProperties aprops = props.toBasicProperties();
// channel.basicPublish(exchange, routingKey, aprops, body);
// } catch (IOException e) {
// //TODO handle exceptionHandler by passing them back on callback
// e.printStackTrace();
// }
// }
//
// public void publish(final String exchange, final String routingKey, final AmqpProps props, final String message) {
// try {
// publish(exchange, routingKey, props, message.getBytes("UTF-8"));
// } catch (UnsupportedEncodingException e) {
// e.printStackTrace();
// }
// }
//
// public void declareQueue(final String queueName, final boolean durable, final boolean exclusive, final boolean autoDelete,
// final Runnable doneCallback) {
// VertxInternal.instance.executeInBackground(new Runnable() {
// public void run() {
// try {
// channel.queueDeclare(queueName, durable, exclusive, autoDelete, null);
// doneCallback.run();
// } catch (IOException e) {
// //TODO handle exceptionHandler by passing them back on callback
// e.printStackTrace();
// }
// }
// });
// }
//
// public void subscribe(final String queueName, final boolean autoAck, final AmqpMsgCallback messageCallback) {
// VertxInternal.instance.executeInBackground(new Runnable() {
// public void run() {
// try {
// channel.basicConsume(queueName, autoAck, "blah",
// new DefaultConsumer(channel) {
// @Override
// public void handleDelivery(String consumerTag,
// Envelope envelope,
// AMQP.BasicProperties properties,
// byte[] body)
// throws IOException {
// AmqpProps props = properties == null ? null : new AmqpProps(properties);
// messageCallback.onMessage(props, body);
// }
// });
// } catch (IOException e) {
// //TODO handle exceptionHandler by passing them back on callback
// e.printStackTrace();
// }
// }
// });
// }
//
// private Map<String, AmqpMsgCallback> callbacks = new ConcurrentHashMap();
// private volatile String responseQueue;
//// private Composable responseQueueSetup = new Composable();
////
//// private synchronized void createResponseQueue() {
//// if (responseQueue == null) {
//// final String queueName = UUID.randomUUID().toString();
//// declareQueue(queueName, false, true, true, new Runnable() {
//// public void run() {
//// responseQueue = queueName;
//// responseQueueSetup.complete(); //Queue is now set up
//// subscribe(queueName, true, new AmqpMsgCallback() {
//// public void onMessage(AmqpProps props, byte[] body) {
//// String cid = props.correlationId;
//// if (cid == null) {
//// //TODO better error reporting
//// System.err.println("No correlation id");
//// } else {
//// AmqpMsgCallback cb = callbacks.get(cid);
//// if (cb == null) {
//// System.err.println("No callback for correlation id");
//// } else {
//// cb.onMessage(props, body);
//// }
//// }
//// }
//// });
//// }
//// });
//// }
//// }
//
// // Request-response pattern
//
//// public Composable request(final String exchange, final String routingKey, final AmqpProps props, final String body, final AmqpMsgCallback responseCallback) {
//// try {
//// return request(exchange, routingKey, props, body == null ? null : body.getBytes("UTF-8"), responseCallback);
//// } catch (UnsupportedEncodingException e) {
//// e.printStackTrace();
//// return null;
//// }
//// }
////
//// public Composable request(final String exchange, final String routingKey, final AmqpProps props, final byte[] body, final AmqpMsgCallback responseCallback) {
//// final AmqpProps theProps = props == null ? new AmqpProps() : props;
//// if (responseQueue == null) createResponseQueue();
//// //We make sure we don't actually send until the response queue has been setup, this is done by using a
//// //Composable
//// final Composable c = new Composable();
//// responseQueueSetup.onComplete(new Runnable() {
//// public void run() {
//// AmqpMsgCallback cb = new AmqpMsgCallback() {
//// public void onMessage(AmqpProps props, byte[] body) {
//// responseCallback.onMessage(props, body);
//// c.complete();
//// }
//// };
//// String cid = UUID.randomUUID().toString();
//// theProps.correlationId = cid;
//// theProps.replyTo = responseQueue;
//// callbacks.put(cid, cb);
//// publish(exchange, routingKey, theProps, body);
//// }
//// });
//// return c;
//// }
////
//
// public void close(final Runnable doneCallback) {
// VertxInternal.instance.executeInBackground(new Runnable() {
// public void run() {
// try {
// channel.close();
// doneCallback.run();
// } catch (IOException e) {
// //TODO handle exceptionHandler by passing them back on callback
// e.printStackTrace();
// }
// }
// });
// }
//
}
|
package com.jinke.kanbox;
public class DownloadIndexThread extends Thread implements RequestListener{
@Override
public void run() {
// TODO Auto-generated method stub
}
@Override
public void onComplete(String response, int operationType) {
// TODO Auto-generated method stub
}
@Override
public void onError(KanboxException error, int operationType) {
// TODO Auto-generated method stub
}
@Override
public void onError(KanboxException error, int operationType, String path,
String destPath) {
// TODO Auto-generated method stub
}
@Override
public void downloadProgress(long currSize) {
// TODO Auto-generated method stub
}
}
|
#!/bin/bash
set -eu -o pipefail
PNAME=svcall
rm -rf $PNAME-workflow
bcbio_vm.py cwl --systemconfig=../bcbio_system.yaml $PNAME.yaml
BASEDIR=`cd .. && pwd`
sed -i.bak "s#$BASEDIR/testdata#../../testdata#" $PNAME-workflow/main-$PNAME-samples.json
|
package acousticfield3d.utils;
/**
*
* @author am14010
*/
public class VarConv {
public static int uByteToInt( byte b){
return b & 0xFF;
}
}
|
<filename>src/example.py
# Importing cef
from cef_wrapper import cef, Browser
# Example with the 'Browser' object:
# 'Browser' can be used for type hints
# https://github.com/cztomczak/cefpython/blob/master/api/Browser.md
def foo(browser: Browser):
browser.CanGoBack() # Syntax highlighting and a docstring ;)
# Example with the class 'WindowInfo':
# https://github.com/cztomczak/cefpython/blob/master/api/WindowInfo.md
def bar():
window_info = cef.WindowInfo() # Like with cefpython3, WindowInfo() needs to be initialized
window_info.SetAsChild(parentWindowHandle=None, windowRect=None)
# Using the hello_world example from:
# https://github.com/cztomczak/cefpython/blob/master/examples/hello_world.py
#
# Hello world example. Doesn't depend on any third party GUI framework.
# Tested with CEF Python v57.0+.
#
# ==== High DPI support on Windows ====
# To enable DPI awareness on Windows you have to either embed DPI aware manifest
# in your executable created with pyinstaller or change python.exe properties manually:
# Compatibility > High DPI scaling override > Application.
# Setting DPI awareness programmatically via a call to cef.DpiAware.EnableHighDpiSupport
# is problematic in Python, may not work and can cause display glitches.
import platform
import sys
def main():
check_versions()
sys.excepthook = cef.ExceptHook # To shutdown all CEF processes on error
cef.Initialize()
cef.CreateBrowserSync(url="https://www.google.com/",
window_title="Hello World!")
cef.MessageLoop()
cef.Shutdown()
def check_versions():
ver = cef.GetVersion()
print("[hello_world.py] CEF Python {ver}".format(ver=ver["version"]))
print("[hello_world.py] Chromium {ver}".format(ver=ver["chrome_version"]))
print("[hello_world.py] CEF {ver}".format(ver=ver["cef_version"]))
print("[hello_world.py] Python {ver} {arch}".format(
ver=platform.python_version(),
arch=platform.architecture()[0]))
assert cef.__version__ >= "57.0", "CEF Python v57.0+ required to run this"
if __name__ == '__main__':
main() |
package vkdumper
import java.util.concurrent.ConcurrentLinkedQueue
import com.typesafe.scalalogging.LazyLogging
import vkdumper.ApiData._
import scala.collection.concurrent.TrieMap
import scala.util.Random
import MockingCommon._
import monix.eval.Task
import scala.collection.JavaConverters._
import Utils._
import akka.actor.ActorRefFactory
object MockingCommon {
type RespMapK = (Int, Long)
type RespMapV = List[Task[Result[ApiMessagesResponse]]]
type CLQ[T] = ConcurrentLinkedQueue[T]
def ns: Nothing = throw new UnsupportedOperationException("not mocked")
}
case class MockingOpts(
cfg: Cfg = Cfg(token = "")
)
class ApiMock(opts: MockingOpts)
extends Api(opts.cfg)
with LazyLogging {
val rnd = new Random
case class MsgR(peer: Int, offset: Int, count: Int)
val msgReqs = new CLQ[MsgR]
case class ConvR(offset: Int, count: Int)
val convReqs = new CLQ[ConvR]
val msgResps = new TrieMap[Int, List[ApiMessage]]
val convResps = new CLQ[ApiMessageItem]
val userResps = new CLQ[ApiUser]
val historyErrors = new TrieMap[(Int, Int), CLQ[Any]]
val convErrors = new TrieMap[Int, CLQ[Any]]
def clearAll(): Unit = {
msgReqs.clear()
convReqs.clear()
msgResps.clear()
convResps.clear()
userResps.clear()
historyErrors.clear()
convErrors.clear()
}
private def pushC[T](c: CLQ[T], o: List[T]): Unit = {
c.clear()
c.addAll(o.asJava)
}
def pushCv(c: List[ApiMessageItem]): Unit =
pushC(convResps, c)
def pushUsers(u: List[ApiUser]): Unit =
pushC(userResps, u)
def pushMsg(peer: Int, m: List[ApiMessage]): Unit =
msgResps.put(peer, m)
def pushMsgErr(peer: Int, offset: Int, err: List[Any]): Unit =
historyErrors.put(peer -> offset, new CLQ(err.asJava))
def pushCvErr(offset: Int, err: List[Any]): Unit =
convErrors.put(offset, new CLQ(err.asJava))
override def get(method: String, args: (String, String)*) = ns
override def usersGet(uid: Int*) = ns
override def getMe = ns
override def getMsgByConvId(peer: Int, cids: Seq[Long]) = ns
override def getHistory(peer: Int, offset: Int, count: Int, rev: Boolean) = Task {
val pe = Option(historyErrors.getOrElse(peer -> offset, new CLQ).poll())
pe.getOrElse(unit) match {
case e: ResErr => e
case t: Throwable => throw t
case _ =>
val u = userResps.asScala.toList
val rq = MsgR(peer, offset, count)
msgReqs.add(rq)
val rs = msgResps.get(peer) match {
case None => ApiMessagesResponse(0, Nil, u)
case Some(m) =>
val storedCount = m.length
val nl = m.slice(offset, offset + count)
ApiMessagesResponse(storedCount, nl, u)
}
Res(rs)
}
}
override def getConversations(offset: Int, count: Int) = Task {
val pe = Option(convErrors.getOrElse(offset, new CLQ).poll())
pe.getOrElse(unit) match {
case e: ResErr => e
case t: Throwable => throw t
case _ =>
val rq = ConvR(offset, count)
convReqs.add(rq)
val resps = convResps.asScala.toList
val storedCount = resps.length
val rs = resps.slice(offset, offset + count)
Res(ApiConvList(storedCount, rs))
}
}
}
class DBMock(opts: MockingOpts)(implicit fac: ActorRefFactory) extends DB(InMem) {
val usersM = new CLQ[ApiUser]
val msgM = new CLQ[ApiMessage]
override def addProfiles(in: Iterable[ApiUser]) = {
usersM.addAll(in.toList.asJava)
super.addProfiles(in)
}
override def addMessages(msgs: Iterable[ApiMessage]) = {
msgM.addAll(msgs.toList.asJava)
super.addMessages(msgs)
}
}
|
module.exports = {
exportPathMap: () => ({
'/': { page: '/Home' },
}),
}
|
module.exports = class {
constructor() {
this.round = 0;
this.maxRound = 10;
this.users = [];
this.isPlaying = false;
this.order = [];
this.set = 0;
this.answer = '';
this.solver = new Set();
this.maxTime = 60;
this.time = 60;
this.timeClock;
}
joinUser(id, nickname) {
const user = {
id: id,
nickname: nickname,
score: 0,
};
this.users.push(user);
}
findUser(id) {
return this.users.findIndex(i => i.id == id);
}
leaveUser(id) {
const idx = this.findUser(id);
this.users.splice(idx, 1);
}
solve(id, score) {
const idx = this.findUser(id);
this.users[idx].score += score;
this.solver.add(id);
}
isSolved(id) {
return this.solver.has(id);
}
startGame(maxRound, maxTime) {
this.maxRound = maxRound;
this.maxTime = this.time = maxTime;
this.isPlaying = true;
this.round = 0;
this.solver.clear();
for (let i = 0; i < this.users.length; i++) {
this.users[i].score = 0;
}
this.finishRound();
}
finishGame() {
this.isPlaying = false;
}
finishRound() {
this.round++;
this.set = 0;
this.order = [...Array(this.userCount)].map((v, i) => i);
this.order.sort(() => Math.random() - 0.5);
}
finishSet() {
this.set++;
this.solver.clear();
}
get owner() {
return this.users[0].id;
}
get userCount() {
return this.users.length;
}
get questioner() {
console.log(`order = ${this.order}`);
const idx = this.order[this.set];
console.log(`idx = ${idx}`);
return this.users[idx];
}
get isFinishSet() {
return this.solver.size >= this.userCount;
}
get isFinishRound() {
return this.set >= this.userCount;
}
get isFinishGame() {
return this.round > this.maxRound;
}
};
|
<reponame>awslabs/clencli<filename>cobra/model/configure.go
/*
Copyright ยฉ 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package model
// Credentials model
type Credentials struct {
Profiles []CredentialProfile `yaml:"profiles"`
}
// CredentialProfile model
type CredentialProfile struct {
Name string `yaml:"name"`
Description string `yaml:"description,omitempty"`
Credentials []Credential `yaml:"credentials"`
}
// Credential model
type Credential struct {
Name string `yaml:"name,omitempty"`
Provider string `yaml:"provider"`
AccessKey string `yaml:"accessKey"`
SecretKey string `yaml:"secretKey"`
SessionToken string `yaml:"sessionToken"`
}
// Configurations model
type Configurations struct {
Profiles []ConfigurationProfile `yaml:"profiles"`
}
// ConfigurationProfile model
type ConfigurationProfile struct {
Name string `yaml:"name"`
Description string `yaml:"description,omitempty"`
Configurations []Configuration `yaml:"configurations"`
}
// Configuration model
type Configuration struct {
Name string `yaml:"name,omitempty"`
Initialization `yaml:"initialization,omitempty"`
Unsplash `yaml:"unsplash,omitempty"`
}
|
def sortList(numbers):
numbers.sort(key = len)
return numbers
numbers = sortList(numbers)
print(numbers)
# Output: [4, 8, 1, 7, 9, 16] |
# -*- coding: utf-8 -*-
# Django:
from django.forms import ModelForm
from django.forms import TextInput
from django.forms import Select
from django.forms import Textarea
from django.forms import ImageField
# Modelos
from .models import Contrato
from .models import Empresa
# ----------------- CONTRATO ----------------- #
class ContratoFiltersForm(ModelForm):
class Meta:
model = Contrato
fields = {
'clave',
'nombre',
'cliente',
'numero',
'region',
'estado',
}
widgets = {
'clave': TextInput(attrs={'class': 'form-control input-sm'}),
'nombre': TextInput(attrs={'class': 'form-control input-sm'}),
'cliente': TextInput(attrs={'class': 'form-control input-sm'}),
'numero': TextInput(attrs={'class': 'form-control input-sm'}),
'region': TextInput(attrs={'class': 'form-control input-sm'}),
'estado': Select(attrs={'class': 'form-control input-sm select2'}),
}
class ContratoForm(ModelForm):
class Meta:
model = Contrato
fields = '__all__'
exclude = [
'created_date',
'created_by',
'updated_date',
'updated_by',
]
widgets = {
'clave': TextInput(attrs={'class': 'form-control input-sm'}),
'nombre': TextInput(attrs={'class': 'form-control input-sm'}),
'descripcion': Textarea(attrs={'class': 'form-control input-sm'}),
'cliente': TextInput(attrs={'class': 'form-control input-sm'}),
'numero': TextInput(attrs={'class': 'form-control input-sm'}),
'region': TextInput(attrs={'class': 'form-control input-sm'}),
'estado': Select(attrs={'class': 'form-control input-sm select2'}),
}
class EmpresaForm(ModelForm):
class Meta:
model = Empresa
fields = [
'clave',
'logo',
'descripcion'
]
widgets = {
'clave': TextInput(attrs={'class': 'form-control input-sm'}),
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
}
|
/*-------------------------------------------------------------------------------------*/
House = {
html_container: null,
room_bindings: new Map(), // (HTML Node, JS Model)
// START! All work starts here.
init: function () {
House.html_container = document.getElementById("rooms");
},
createAllRooms: function() {
var data = Requester.getData(Requester.roomsURI);
for (i in data.rooms) {
House.createRoom(data.rooms[i]);
}
},
createRoom: function(room) {
var NewRoom = new Room(room.id, room.description, room.objects);
var htmlNode = DOMFactory.createRoom(room.id, room.description);
House.html_container.appendChild(htmlNode);
House.bindModelWithDOM(htmlNode, NewRoom);
htmlNode.onclick = function () {
House.loadRoomObjects(this);
}
},
loadRoomObjects: function(this_room) {
this_room.classList.toggle("active");
var room_model = House.room_bindings.get(this_room);
// Check if the room is currently viewing
if (room_model.isActive() === false) {
// Check if html nodes have appropriate JS models
if (!room_model.hasMappedObjects()) {
House.appendNewObjects(this_room, room_model);
} else {
House.updateObjectsByRoom(room_model);
}
}
room_model.toggleActivity();
this_room.nextSibling.classList.toggle("show");
},
appendNewObjects: function(this_room, room_model) {
var room_obj_ids = room_model._object_ids;
var received_objects = [];
// Getting the data from server successively
for (i in room_obj_ids) {
var objectData = Requester.getData(Requester.objectsURI + room_obj_ids[i]);
received_objects.push(objectData);
}
var objects_nodes = [];
// Create all mappings
for (i in received_objects) {
// Step 1: Create Objects Model.
var object_model = new ControllableObject(received_objects[i].id
, received_objects[i].actions
, received_objects[i].status
, received_objects[i].type
, received_objects[i].description
);
// Step 2: create DOM controller
var controller = DOMFactory.createController(object_model._status);
// Step 3: bind click listener
controller.onclick = function () {
House.toggleObject(room_model, this);
}
// Step 4: Bind the input controller to the model of controllable object
// (class Room)
room_model.bindControllerWithModel(controller, object_model);
// Step 5: Add design and decorator for main controller (input)
// TODO: In future make it possible to have several controllers for one object
var pretty_controller = DOMFactory.decorateController(controller);
// Step 6: TODO: so, put them in the array
var controllers = [];
controllers.push(pretty_controller);
// Step 7: At last - create the whole html node with id and description views
var ViewedObject = DOMFactory.createObjectNode(controllers
, object_model._id
, object_model._type
, object_model._description
);
objects_nodes.push(ViewedObject);
}
// Step 8: Add objects to the panel
var objects_panel = DOMFactory.createObjectPanel(objects_nodes);
this_room.parentNode.insertBefore(objects_panel, this_room.nextSibling);
},
updateObjectsByRoom: function(room_model) {
var object_models = room_model._object_bindings.keys();
for (let key_val of object_models) {
//console.log(room_model._object_bindings.get(key_val));
let model_obj = room_model._object_bindings.get(key_val); // Controllable Object
let model_id = model_obj._id;
//console.log(model_id);
let objectData = Requester.getData(Requester.objectsURI + model_id);
model_obj.updateStatus(key_val, objectData.status);
}
},
toggleObject: function(room_model, this_object) {
var model_obj = room_model._object_bindings.get(this_object);
model_obj.toggleStatus();
Requester.postCommands(model_obj._id, "toggle");
},
bindModelWithDOM: function (htmlNode, roomModel) {
House.room_bindings.set(htmlNode, roomModel);
},
};
|
#!/usr/bin/env bash
BRANCH=$BRANCH_NAME
ENDPOINT_URL=$S3_ENDPOINT_URL
S3_PATH=$S3_UPLOAD_PATH
# don't upload the '*.pickle' files
find output -name '*.pickle' -delete
# upload json and images
aws s3 sync output/json $S3_PATH/$BRANCH/json --endpoint-url=$ENDPOINT_URL --delete --include "*" --exclude "*.jpg" --exclude "*.png" --exclude "*.svg"
aws s3 sync output/json/_build_en/json/_images $S3_PATH/$BRANCH/images_en --endpoint-url=$ENDPOINT_URL --delete --size-only
aws s3 sync output/json/_build_ru/json/_images $S3_PATH/$BRANCH/images_ru --endpoint-url=$ENDPOINT_URL --delete --size-only
# upload pdf files
aws s3 cp --acl public-read output/_latex_en/Tarantool.pdf $S3_PATH/$BRANCH/Tarantool-en.pdf --endpoint-url=$ENDPOINT_URL
aws s3 cp --acl public-read output/_latex_ru/Tarantool.pdf $S3_PATH/$BRANCH/Tarantool-ru.pdf --endpoint-url=$ENDPOINT_URL
# upload singlehtml and assets
aws s3 sync --acl public-read output/html/en/_static $S3_PATH/$BRANCH/en/_static --endpoint-url=$ENDPOINT_URL --delete --size-only
aws s3 sync --acl public-read output/html/en/_images $S3_PATH/$BRANCH/en/_images --endpoint-url=$ENDPOINT_URL --delete --size-only
aws s3 cp --acl public-read output/html/en/singlehtml.html $S3_PATH/$BRANCH/en/singlehtml.html --endpoint-url=$ENDPOINT_URL
aws s3 sync --acl public-read output/html/ru/_static $S3_PATH/$BRANCH/ru/_static --endpoint-url=$ENDPOINT_URL --delete --size-only
aws s3 sync --acl public-read output/html/ru/_images $S3_PATH/$BRANCH/ru/_images --endpoint-url=$ENDPOINT_URL --delete --size-only
aws s3 cp --acl public-read output/html/ru/singlehtml.html $S3_PATH/$BRANCH/ru/singlehtml.html --endpoint-url=$ENDPOINT_URL
curl --data '{"update_key":"'"$TARANTOOL_UPDATE_KEY"'"}' --header "Content-Type: application/json" --request POST "$TARANTOOL_UPDATE_URL""$BRANCH"/
|
<gh_stars>1000+
// Copyright 2017-2021 @polkadot/api-contract authors & contributors
// SPDX-License-Identifier: Apache-2.0
import { v0ToLatest, v1ToLatest } from '@polkadot/api-contract/Abi/toLatest';
import { TypeRegistry } from '@polkadot/types';
import abis from '../test/contracts';
describe('v0ToLatest', (): void => {
const registry = new TypeRegistry();
const contract = registry.createType('ContractMetadata', { V0: abis.ink_v0_erc20 });
const latest = v0ToLatest(registry, contract.asV0);
it('has the correct constructors', (): void => {
expect(
latest.spec.constructors.map(({ label }) => label.toString())
).toEqual(['new']);
});
it('has the correct messages', (): void => {
expect(
latest.spec.messages.map(({ label }) => label.toString())
).toEqual(['total_supply', 'balance_of', 'allowance', 'transfer', 'approve', 'transfer_from']);
});
it('has the correct events', (): void => {
expect(
latest.spec.events.map(({ label }) => label.toString())
).toEqual(['Transfer', 'Approval']);
});
it('has the correct constructor arguments', (): void => {
expect(
latest.spec.constructors[0].args.map(({ label }) => label.toString())
).toEqual(['initial_supply']);
});
it('has the correct message arguments', (): void => {
expect(
latest.spec.messages[1].args.map(({ label }) => label.toString())
).toEqual(['owner']);
});
it('has the correct event arguments', (): void => {
expect(
latest.spec.events[0].args.map(({ label }) => label.toString())
).toEqual(['from', 'to', 'value']);
});
});
describe('v1ToLatest', (): void => {
const registry = new TypeRegistry();
const contract = registry.createType('ContractMetadata', { V1: abis.ink_v1_flipper.V1 });
const latest = v1ToLatest(registry, contract.asV1);
it('has the correct constructors', (): void => {
expect(
latest.spec.constructors.map(({ label }) => label.toString())
).toEqual(['new', 'default']);
});
it('has the correct messages', (): void => {
expect(
latest.spec.messages.map(({ label }) => label.toString())
).toEqual(['flip', 'get']);
});
it('has the correct constructor arguments', (): void => {
expect(
latest.spec.constructors[0].args.map(({ label }) => label.toString())
).toEqual(['init_value']);
});
});
|
import time
class PokemonTrainer:
def __init__(self, session):
self.session = session
def releaseAllPokemon(self):
inventory = self.session.checkInventory()
confirmation = input("Are you sure you want to release all Pokรฉmon? Enter 'yes' to confirm: ")
if confirmation.lower() == "yes":
for pokemon in inventory["party"]:
self.session.releasePokemon(pokemon)
time.sleep(1)
print("All Pokรฉmon have been released.")
else:
print("Release operation aborted.")
# Just in case you didn't want any revives |
gappsWrapperArgs=()
find_gio_modules() {
if [ -d "$1"/lib/gio/modules ] && [ -n "$(ls -A $1/lib/gio/modules)" ] ; then
gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$1/lib/gio/modules")
fi
}
envHooks+=(find_gio_modules)
# Note: $gappsWrapperArgs still gets defined even if $dontWrapGApps is set.
wrapGAppsHook() {
# guard against running multiple times (e.g. due to propagation)
[ -z "$wrapGAppsHookHasRun" ] || return 0
wrapGAppsHookHasRun=1
if [ -n "$GDK_PIXBUF_MODULE_FILE" ]; then
gappsWrapperArgs+=(--set GDK_PIXBUF_MODULE_FILE "$GDK_PIXBUF_MODULE_FILE")
fi
if [ -n "$XDG_ICON_DIRS" ]; then
gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$XDG_ICON_DIRS")
fi
if [ -n "$GSETTINGS_SCHEMAS_PATH" ]; then
gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$GSETTINGS_SCHEMAS_PATH")
fi
if [ -d "$prefix/share" ]; then
gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$prefix/share")
fi
for v in $wrapPrefixVariables GST_PLUGIN_SYSTEM_PATH_1_0 GI_TYPELIB_PATH GRL_PLUGIN_PATH; do
eval local dummy="\$$v"
gappsWrapperArgs+=(--prefix $v : "$dummy")
done
if [[ -z "$dontWrapGApps" ]]; then
targetDirs=( "${prefix}/bin" "${prefix}/libexec" )
for targetDir in "${targetDirs[@]}"; do
if [[ -d "${targetDir}" ]]; then
find -L "${targetDir}" -type f -executable -print0 \
| while IFS= read -r -d '' file; do
echo "Wrapping program ${file}"
wrapProgram "${file}" "${gappsWrapperArgs[@]}"
done
fi
done
fi
}
fixupOutputHooks+=(wrapGAppsHook)
|
import numpy as np
import matplotlib.pyplot as plt
# Creating a dataset
X = np.random.uniform(-1,1,100)
Y = np.random.uniform(-1,1,100)
# Class labels
y = np.zeros(100)
# Adding some noise
y[X>0] = 1
y[X<0] = 0
# Normal dataset
plt.scatter(X, Y, c=y)
plt.show()
# Unbalanced dataset
# Reducing the number of 0s
y[X<0] = np.random.choice(np.arange(0,2), int(0.50*len(y[X<0])), replace=False)
plt.scatter(X, Y, c=y)
plt.show() |
#!/bin/sh
docker-compose up -d
echo
echo
docker ps --format 'table {{.ID}}\t{{.Names}}\t{{.Image}}\t{{.Status}}\t{{.Ports}}' -f name=go-starters
echo
echo
sleep 5
docker logs go-starters
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.