text stringlengths 1 1.05M |
|---|
#!/bin/bash
set -e
cd "$(dirname "$(readlink -f "$BASH_SOURCE")")/.."
# see also ".mailmap" for how email addresses and names are deduplicated
{
cat <<-'EOF'
# This file lists all individuals having contributed content to the repository.
# For how it is generated, see `scripts/generate-authors.sh`.
EOF
# old manual entries
read -d '' authors <<-"EOF" || true
Michel Baylac
Cyrille d'Haese
Ellen Reitmayr
Michael Beckmann
Oliver Beckmann
Fedor Bezrukov
Fabian Bieker
Aaron Chen
Fabrice Dessaint
Nathan Dunn
Alexis Gallagher
David Gleich
Behrouz Javanmardi
Bernd Kalbfuss
Martin Kähmer
Ervin Kolenovic
Krzysztof A. Kościuszkiewicz
Christian Kopf
Jeffrey Kuhn
Uwe Kuehn
Felix Langner
Stephan Lau
Alex Montgomery
Saverio Mori
Ambrogio Oliva
Stephan Rave
John Relph
Hannes Restel
Moritz Ringler
Rudolf Seemann
Toralf Senger
Manuel Siebeneicher
Mike Smoot
Ulrich Stärk
Martin Stolle
David Weitzman
John Zedlewski
Samin Muhammad Ridwanul Karim
Stefan Robert
Bernhard Tempel
EOF
# authors %aN = author name
# co-authors
coauthors=$(git log -i --grep=co-authored-by | grep -i "co-authored-by" | sed "s/.*co-authored-by: \(.*\)/\1/I" | grep -v "luis-valdez" | sed "s/ <.*//")
echo -e "$authors\n$(git log --format='%aN')\n$coauthors" | grep -v "\[bot\]" | grep -v "JabRef" | grep -v "Siedlerchr" | grep -v "^Christoph$" | grep -v "^Mootez$" | grep -v "oscargus" | grep -v "dependabot" | grep -v "github actions" | grep -v "igorsteinmacher" | grep -v "halirutan" | grep -v "matthiasgeiger" | grep -v "Gitter Badger" | grep -v "gdstewart" | grep -v "m-mauersberger" | grep -v "chenyuheng" | LC_ALL=C.UTF-8 sort --unique --ignore-case
} > AUTHORS
|
#!/bin/bash
# Copyright 2019 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# performance-tests.sh is added to manage all clusters that run the performance
# benchmarks in eventing repo, it is ONLY intended to be run by Prow, users
# should NOT run it manually.
# Setup env vars to override the default settings
export PROJECT_NAME="knative-eventing-performance"
export BENCHMARK_ROOT_PATH="$GOPATH/src/knative.dev/eventing/test/performance/benchmarks"
source vendor/knative.dev/test-infra/scripts/performance-tests.sh
# Vars used in this script
export TEST_CONFIG_VARIANT="continuous"
export TEST_NAMESPACE="default"
function update_knative() {
echo ">> Update eventing core"
ko apply --selector knative.dev/crd-install=true \
-f config/ || abort "Failed to apply eventing CRDs"
ko apply \
-f config/ || abort "Failed to apply eventing resources"
echo ">> Update InMemoryChannel"
ko apply --selector knative.dev/crd-install=true \
-f config/channels/in-memory-channel/ || abort "Failed to apply InMemoryChannel CRDs"
ko apply \
-f config/channels/in-memory-channel/ || abort "Failed to apply InMemoryChannel resources"
echo ">> Update Broker"
ko apply --selector knative.dev/crd-install=true \
-f config/brokers/channel-broker || abort "Failed to apply Broker CRD"
ko apply \
-f config/brokers/channel-broker || abort "Failed to apply Broker resources"
}
function update_benchmark() {
local benchmark_path="${BENCHMARK_ROOT_PATH}/$1"
# TODO(chizhg): add update_environment function in test-infra/scripts/performance-tests.sh and move the below code there
echo ">> Updating configmap"
kubectl delete configmap config-mako -n "${TEST_NAMESPACE}" --ignore-not-found=true
kubectl create configmap config-mako -n "${TEST_NAMESPACE}" --from-file="${benchmark_path}/prod.config" || abort "failed to create config-mako configmap"
kubectl patch configmap config-mako -n "${TEST_NAMESPACE}" -p '{"data":{"environment":"prod"}}' || abort "failed to patch config-mako configmap"
echo ">> Updating benchmark $1"
# TODO(chizhg): remove --wait=false once https://github.com/knative/eventing/issues/2633 is fixed
ko delete -f "${benchmark_path}"/${TEST_CONFIG_VARIANT} --ignore-not-found=true --wait=false
ko apply -f "${benchmark_path}"/${TEST_CONFIG_VARIANT} || abort "failed to apply benchmark $1"
}
main $@
|
/*
* This file is generated by jOOQ.
*/
package jooq.generated.entities.static_.tables.records;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.UUID;
import javax.annotation.Generated;
import jooq.generated.entities.static_.tables.Schedule;
import org.jooq.Field;
import org.jooq.Record1;
import org.jooq.Record7;
import org.jooq.Row7;
import org.jooq.impl.UpdatableRecordImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.9.2"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class ScheduleRecord extends UpdatableRecordImpl<ScheduleRecord> implements Record7<UUID, String, Time, Time, UUID, UUID, Timestamp> {
private static final long serialVersionUID = -199503847;
/**
* Setter for <code>public.schedule.id</code>.
*/
public ScheduleRecord setId(UUID value) {
set(0, value);
return this;
}
/**
* Getter for <code>public.schedule.id</code>.
*/
public UUID getId() {
return (UUID) get(0);
}
/**
* Setter for <code>public.schedule.platform</code>.
*/
public ScheduleRecord setPlatform(String value) {
set(1, value);
return this;
}
/**
* Getter for <code>public.schedule.platform</code>.
*/
public String getPlatform() {
return (String) get(1);
}
/**
* Setter for <code>public.schedule.planned_arrival</code>.
*/
public ScheduleRecord setPlannedArrival(Time value) {
set(2, value);
return this;
}
/**
* Getter for <code>public.schedule.planned_arrival</code>.
*/
public Time getPlannedArrival() {
return (Time) get(2);
}
/**
* Setter for <code>public.schedule.planned_departure</code>.
*/
public ScheduleRecord setPlannedDeparture(Time value) {
set(3, value);
return this;
}
/**
* Getter for <code>public.schedule.planned_departure</code>.
*/
public Time getPlannedDeparture() {
return (Time) get(3);
}
/**
* Setter for <code>public.schedule.stop</code>.
*/
public ScheduleRecord setStop(UUID value) {
set(4, value);
return this;
}
/**
* Getter for <code>public.schedule.stop</code>.
*/
public UUID getStop() {
return (UUID) get(4);
}
/**
* Setter for <code>public.schedule.journey</code>.
*/
public ScheduleRecord setJourney(UUID value) {
set(5, value);
return this;
}
/**
* Getter for <code>public.schedule.journey</code>.
*/
public UUID getJourney() {
return (UUID) get(5);
}
/**
* Setter for <code>public.schedule.update</code>.
*/
public ScheduleRecord setUpdate(Timestamp value) {
set(6, value);
return this;
}
/**
* Getter for <code>public.schedule.update</code>.
*/
public Timestamp getUpdate() {
return (Timestamp) get(6);
}
// -------------------------------------------------------------------------
// Primary key information
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Record1<UUID> key() {
return (Record1) super.key();
}
// -------------------------------------------------------------------------
// Record7 type implementation
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Row7<UUID, String, Time, Time, UUID, UUID, Timestamp> fieldsRow() {
return (Row7) super.fieldsRow();
}
/**
* {@inheritDoc}
*/
@Override
public Row7<UUID, String, Time, Time, UUID, UUID, Timestamp> valuesRow() {
return (Row7) super.valuesRow();
}
/**
* {@inheritDoc}
*/
@Override
public Field<UUID> field1() {
return Schedule.SCHEDULE.ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field2() {
return Schedule.SCHEDULE.PLATFORM;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Time> field3() {
return Schedule.SCHEDULE.PLANNED_ARRIVAL;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Time> field4() {
return Schedule.SCHEDULE.PLANNED_DEPARTURE;
}
/**
* {@inheritDoc}
*/
@Override
public Field<UUID> field5() {
return Schedule.SCHEDULE.STOP;
}
/**
* {@inheritDoc}
*/
@Override
public Field<UUID> field6() {
return Schedule.SCHEDULE.JOURNEY;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Timestamp> field7() {
return Schedule.SCHEDULE.UPDATE;
}
/**
* {@inheritDoc}
*/
@Override
public UUID value1() {
return getId();
}
/**
* {@inheritDoc}
*/
@Override
public String value2() {
return getPlatform();
}
/**
* {@inheritDoc}
*/
@Override
public Time value3() {
return getPlannedArrival();
}
/**
* {@inheritDoc}
*/
@Override
public Time value4() {
return getPlannedDeparture();
}
/**
* {@inheritDoc}
*/
@Override
public UUID value5() {
return getStop();
}
/**
* {@inheritDoc}
*/
@Override
public UUID value6() {
return getJourney();
}
/**
* {@inheritDoc}
*/
@Override
public Timestamp value7() {
return getUpdate();
}
/**
* {@inheritDoc}
*/
@Override
public ScheduleRecord value1(UUID value) {
setId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public ScheduleRecord value2(String value) {
setPlatform(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public ScheduleRecord value3(Time value) {
setPlannedArrival(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public ScheduleRecord value4(Time value) {
setPlannedDeparture(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public ScheduleRecord value5(UUID value) {
setStop(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public ScheduleRecord value6(UUID value) {
setJourney(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public ScheduleRecord value7(Timestamp value) {
setUpdate(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public ScheduleRecord values(UUID value1, String value2, Time value3, Time value4, UUID value5, UUID value6, Timestamp value7) {
value1(value1);
value2(value2);
value3(value3);
value4(value4);
value5(value5);
value6(value6);
value7(value7);
return this;
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached ScheduleRecord
*/
public ScheduleRecord() {
super(Schedule.SCHEDULE);
}
/**
* Create a detached, initialised ScheduleRecord
*/
public ScheduleRecord(UUID id, String platform, Time plannedArrival, Time plannedDeparture, UUID stop, UUID journey, Timestamp update) {
super(Schedule.SCHEDULE);
set(0, id);
set(1, platform);
set(2, plannedArrival);
set(3, plannedDeparture);
set(4, stop);
set(5, journey);
set(6, update);
}
}
|
#
# File: run.sh
# License: Part of the PIRA project. Licensed under BSD 3 clause license. See LICENSE.txt file at https://github.com/jplehr/pira/LICENSE.txt
# Description: Runs the game of life integration test
#
testDir=$PWD
export TEST_DIR=$testDir
export PATH=$PWD/../bear/install/bin:$PATH
echo $PATH
# Export all the Pira tools for the integration test
cd $testDir/../../../resources
. setup_paths.sh
cd $testDir
echo $PATH
echo -e "\n------ PATH -----"
echo $PATH
echo -e "\n------ LD_LIBRARY_PATH -----"
echo $LD_LIBRARY_PATH
echo -e "\n------ Which tools -----"
which pgis_pira
which cgcollector
which scorep
which wrap.py
# Download the target application
stat PIRA-testing.tar.gz
if [ $? -ne 0 ]; then
wget https://github.com/jplehr/GameOfLife/archive/PIRA-testing.tar.gz
fi
tar xzf PIRA-testing.tar.gz
mv GameOfLife-PIRA-testing gol
echo -e "\n----- Build GameOfLife / build call graph -----"
cd gol/serial_non_template
bear make gol 2>&1 > /dev/null
cgc main.cpp 2>&1 > /dev/null
cgc SerialGoL.cpp 2>&1 > /dev/null
echo "null" > gol.ipcg
cgmerge gol.ipcg main.ipcg SerialGoL.ipcg 2>&1 > /dev/null
cp gol.ipcg $PWD/../../../../../extern/install/pgis/bin/gol_ct.mcg
cd ../..
cd gol
echo -e "\n----- Running Pira -----\n"
# use runtime folder for extrap files
if [[ -z "${XDG_DATA_HOME}" ]]; then
pira_dir=$HOME/.local/share/pira
else
pira_dir=$XDG_DATA_HOME/pira
fi
echo -e "Using ${pira_dir} for runtime files\n"
sed -i "s|CUBES_FOLDER|${pira_dir}/gol_cubes|g" $testDir/gol_config.json
python3 ../../../../pira.py --config-version 2 --extrap-dir ${pira_dir}/piraII --extrap-prefix t --tape ../gol.tp $testDir/gol_config.json
pirafailed=$?
rm -rf ${pira_dir}/piraII
rm -rf ${pira_dir}/gol_cubes-*
cd $testDir
rm -rf gol
exit $pirafailed
|
#!/bin/sh
#SBATCH --job-name=serial_job_test # Job name
#SBATCH --mail-type=ALL # Mail events (NONE, BEGIN, END, FAIL, ALL
#SBATCH --nodes=1
#SBATCH --mail-user=<email_address> # Where to send mail
#SBATCH --ntasks=2 # Run on a single CPU
#SBATCH --mem=4080mb # Memory limit
#SBATCH --time=02:05:00 # Time limit hrs:min:sec
#SBATCH --output=serial_test_%j.out # Standard output and error log
pwd; hostname; date
module load gcc python libz
export LIBRARY_PATH=/apps/gcc/5.2.0/python/2.7.10/lib:$LIBRARY_PATH
#echo "Running plot script on a single CPU core"
sh /home/rakeshjha/MLC_CARE/gem5/tt.sh
|
<gh_stars>1-10
from netdumplings import DumplingChef
class ChefOneFromFile(DumplingChef):
pass
class ChefTwoFromFile(DumplingChef):
pass
|
#!/bin/bash
# Copyright (c) 2020, Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
set -o errexit
set -o nounset
set -o pipefail
SCRIPT_ROOT=$(dirname "${BASH_SOURCE}")/..
DIFFROOT="${SCRIPT_ROOT}/pkg"
TMP_DIFFROOT="${SCRIPT_ROOT}/_tmp/pkg"
_tmp="${SCRIPT_ROOT}/_tmp"
cleanup() {
rm -rf "${_tmp}"
}
trap "cleanup" EXIT SIGINT
cleanup
mkdir -p "${TMP_DIFFROOT}"
cp -a "${DIFFROOT}"/* "${TMP_DIFFROOT}"
"${SCRIPT_ROOT}/hack/update-codegen.sh"
echo "diffing ${DIFFROOT} against freshly generated codegen"
ret=0
diff -Naupr "${DIFFROOT}" "${TMP_DIFFROOT}" || ret=$?
cp -a "${TMP_DIFFROOT}"/* "${DIFFROOT}"
if [[ $ret -eq 0 ]]
then
echo "${DIFFROOT} up to date."
else
echo "${DIFFROOT} is out of date. Please run hack/update-codegen.sh"
exit 1
fi
|
<gh_stars>0
package de.schwedt.weightlifting.app;
public abstract class UpdateableItem {
}
|
Rscript ../functions/main_localisation_measure_function_nodist.R ../cache/ ../data/network_edgelist_additional ../data/table_disease_gene_assoc_orphanet_genetic.tsv Orphageneset_rare_additionalNetworks 15 2000 FALSE FALSE
|
<reponame>WeiSmart/EasyAnnotation<filename>annotation-processor/src/main/java/com/linwei/buriedpointlibrary/logic/object/ObjectFactoryClasses.java
package com.linwei.buriedpointlibrary.logic.object;
import com.linwei.annotation.ObjectFactory;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.MirroredTypeException;
/**
* @Author: WS
* @Time: 2020/5/12
* @Description: TypeElements包装类,
*/
public class ObjectFactoryClasses {
private TypeElement mTypeElement;
private String mQualifiedName;
private String mSimpleName;
private String mKey;
public ObjectFactoryClasses(TypeElement typeElement) {
this.mTypeElement = typeElement;
ObjectFactory factory = mTypeElement.getAnnotation(ObjectFactory.class);
this.mKey = factory.key();
if ("".equals(mKey)) {
throw new IllegalArgumentException(
String.format("key() in @%s for class %s is null or empty! that's not allowed",
ObjectFactory.class.getSimpleName(), mTypeElement.getQualifiedName().toString()));
}
try {
Class<?> clazz = factory.type();
mQualifiedName = clazz.getCanonicalName();
mSimpleName = clazz.getSimpleName();
} catch (MirroredTypeException mte) {
DeclaredType classTypeMirror = (DeclaredType) mte.getTypeMirror();
TypeElement classTypeElement = (TypeElement) classTypeMirror.asElement();
mQualifiedName = classTypeElement.getQualifiedName().toString();
mSimpleName = classTypeElement.getSimpleName().toString();
}
}
/**
* 获取在{@link ObjectFactory#key()}指定的类型合法全名
*
* @return
*/
public String getQualifiedName() {
return mQualifiedName;
}
/**
* 获取在 {@link ObjectFactory#key()} 中指定的类型的简单名字
*
* @return qualified name
*/
public String getSimpleName() {
return mSimpleName;
}
/**
* 返回TypeElement
*
* @return
*/
public TypeElement getTypeElement() {
return mTypeElement;
}
/**
* 获取对象标识
*
* @return
*/
public String getKey() {
return mKey;
}
}
|
#!/bin/bash
set -e
if [ -z "$KUBECONFIG" ]; then
echo "Must set KUBECONFIG"
exit 1
fi
if [ -z "$TEMPLATE_SCRIPT" ]; then
echo "Must set TEMPLATE_SCRIPT"
exit 1
fi
if [ -z "$FROM_SERVER" ] || [ -z "$FROM_VERSION" ] || [ -z "$TO_SERVER" ] || [ -z "$TO_VERSION" ]; then
echo "Must set FROM_SERVER FROM_VERSION TO_SERVER and TO_VERSION"
exit 1
fi
if [ -z "$ACTOR" ]; then
echo "Must set ACTOR"
exit 1
fi
if [ -z "$MODE" ]; then
echo "Must set MODE"
exit 1
fi
echo "Setting from $FROM_SERVER: $FROM_VERSION"
echo "Setting to $TO_SERVER: $TO_VERSION"
# use first 8 characters of TO_VERSION to differentiate
# jobs
short=${TO_VERSION:0:8}
lowered=$(echo "$ACTOR" | tr '[:upper:]' '[:lower:]')
actorShort="$lowered-$short"
jobname="$actorShort"
timeprefix=$(date +%Y/%m/%d)
actorprefix="$MODE/$ACTOR/$actorShort"
format="markdown"
if [ "$MODE" = "release" ]; then
format="html"
fi
# set value to ISSUE_NUMBER environment variable
# or default to -1
issuenumber=${ISSUE_NUMBER:-"-1"}
source "$TEMPLATE_SCRIPT" "$jobname" "$FROM_SERVER" "$FROM_VERSION" "$TO_SERVER" "$TO_VERSION" "$timeprefix" "$actorprefix" "$format" "$issuenumber" > job.json
out=$(KUBECONFIG="$KUBECONFIG" kubectl apply -f job.json || true)
if [ "$out" != "job.batch/$jobname created" ]; then
echo "something went wrong creating job... this job likely already exists in the cluster"
echo "$out"
exit 1
else
echo "$out"
fi
exit 0
|
<reponame>omshivaprakash/inspirational-quotes<gh_stars>10-100
class CreatePeople < ActiveRecord::Migration[5.0]
def change
create_table :people do |t|
t.string :name, null: false, index: true
t.datetime :born_on
t.integer :profession, default: 0
t.datetime :died_on
t.string :country
t.timestamps
end
end
end
|
<filename>resource_references.go
// +build !lambdabinary
package sparta
import (
"encoding/json"
"reflect"
"strings"
gocf "github.com/mweagle/go-cloudformation"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
)
type resourceRefType int
const (
resourceLiteral resourceRefType = iota
resourceRefFunc
resourceGetAttrFunc
resourceStringFunc
)
type resourceRef struct {
RefType resourceRefType
ResourceName string
}
// resolvedResourceVisitor represents the signature of a function that
// visits
type resolvedResourceVisitor func(lambdaAWSInfo *LambdaAWSInfo,
eventSourceMapping *EventSourceMapping,
mappingIndex int,
resource *resourceRef) error
// resolveResourceRef takes an interface representing a dynamic ARN
// and tries to determine the CloudFormation resource name it resolves to
func resolveResourceRef(expr interface{}) (*resourceRef, error) {
// Is there any chance it's just a string?
typedString, typedStringOk := expr.(string)
if typedStringOk {
return &resourceRef{
RefType: resourceLiteral,
ResourceName: typedString,
}, nil
}
// Some type of intrinsic function?
marshalled, marshalledErr := json.Marshal(expr)
if marshalledErr != nil {
return nil, errors.Errorf("Failed to unmarshal dynamic resource ref %v", expr)
}
var refFunc gocf.RefFunc
if json.Unmarshal(marshalled, &refFunc) == nil &&
len(refFunc.Name) != 0 {
return &resourceRef{
RefType: resourceRefFunc,
ResourceName: refFunc.Name,
}, nil
}
var getAttFunc gocf.GetAttFunc
if json.Unmarshal(marshalled, &getAttFunc) == nil && len(getAttFunc.Resource) != 0 {
return &resourceRef{
RefType: resourceGetAttrFunc,
ResourceName: getAttFunc.Resource,
}, nil
}
// Any chance it's a string?
var stringExprFunc gocf.StringExpr
if json.Unmarshal(marshalled, &stringExprFunc) == nil && len(stringExprFunc.Literal) != 0 {
return &resourceRef{
RefType: resourceStringFunc,
ResourceName: stringExprFunc.Literal,
}, nil
}
// Nope
return nil, nil
}
// isResolvedResourceType is a utility function to determine if a resolved
// reference is a given type. If it is a literal, the literalTokenIndicator
// substring match is used for the predicate. If it is a resource provisioned
// by this template, the &gocf.RESOURCE_TYPE{} will be used via reflection
// Example:
// isResolvedResourceType(resourceRef, template, ":dynamodb:", &gocf.DynamoDBTable{}) {
//
func isResolvedResourceType(resource *resourceRef,
template *gocf.Template,
literalTokenIndicator string,
templateType gocf.ResourceProperties) bool {
if resource.RefType == resourceLiteral ||
resource.RefType == resourceStringFunc {
return strings.Contains(resource.ResourceName, literalTokenIndicator)
}
// Dynamically provisioned resource included in the template definition?
existingResource, existingResourceExists := template.Resources[resource.ResourceName]
if existingResourceExists {
if reflect.TypeOf(existingResource.Properties) == reflect.TypeOf(templateType) {
return true
}
}
return false
}
// visitResolvedEventSourceMapping is a utility function that visits all the EventSourceMapping
// entries for the given lambdaAWSInfo struct
func visitResolvedEventSourceMapping(visitor resolvedResourceVisitor,
lambdaAWSInfos []*LambdaAWSInfo,
template *gocf.Template,
logger *logrus.Logger) error {
//
// BEGIN
// Inline closure to wrap the visitor function so that we can provide
// specific error messages
visitEventSourceMappingRef := func(lambdaAWSInfo *LambdaAWSInfo,
eventSourceMapping *EventSourceMapping,
mappingIndex int,
resource *resourceRef) error {
annotateStatementsErr := visitor(lambdaAWSInfo,
eventSourceMapping,
mappingIndex,
resource)
// Early exit?
if annotateStatementsErr != nil {
return errors.Wrapf(annotateStatementsErr,
"Visiting event source mapping: %#v",
eventSourceMapping)
}
return nil
}
//
// END
// Iterate through every lambda function. If there is an EventSourceMapping
// that points to a piece of infastructure provisioned by this stack,
// find the referred resource and supply it to the visitor
for _, eachLambda := range lambdaAWSInfos {
for eachIndex, eachEventSource := range eachLambda.EventSourceMappings {
resourceRef, resourceRefErr := resolveResourceRef(eachEventSource.EventSourceArn)
if resourceRefErr != nil {
return errors.Wrapf(resourceRefErr,
"Failed to resolve EventSourceArn: %#v", eachEventSource)
}
// At this point everything is a string, so we need to unmarshall
// and see if the Arn is supplied by either a Ref or a GetAttr
// function. In those cases, we need to look around in the template
// to go from: EventMapping -> Type -> Lambda -> LambdaIAMRole
// so that we can add the permissions
if resourceRef != nil {
annotationErr := visitEventSourceMappingRef(eachLambda,
eachEventSource,
eachIndex,
resourceRef)
// Anything go wrong?
if annotationErr != nil {
return errors.Wrapf(annotationErr,
"Failed to annotate template for EventSourceMapping: %#v",
eachEventSource)
}
}
}
}
return nil
}
|
#!/usr/bin/env bash
cut -c 2,7
|
//
// Boost.Pointer Container
//
// Copyright <NAME> 2003-2005. Use, modification and
// distribution is subject to the Boost Software License, Version
// 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// For more information, see http://www.boost.org/libs/ptr_container/
//
#ifndef BOOST_PTR_CONTAINER_PTR_ARRAY_HPP
#define BOOST_PTR_CONTAINER_PTR_ARRAY_HPP
#if defined(_MSC_VER) && (_MSC_VER >= 1200)
# pragma once
#endif
#include <boost/array.hpp>
#include <boost/static_assert.hpp>
#include <boost/ptr_container/ptr_sequence_adapter.hpp>
namespace boost
{
namespace ptr_container_detail
{
template
<
class T,
size_t N,
class Allocator = int // dummy
>
class ptr_array_impl : public boost::array<T,N>
{
public:
typedef Allocator allocator_type;
ptr_array_impl( Allocator a = Allocator() )
{
this->assign( 0 );
}
ptr_array_impl( size_t, T*, Allocator a = Allocator() )
{
this->assing( 0 );
}
};
}
template
<
class T,
size_t N,
class CloneAllocator = heap_clone_allocator
>
class ptr_array : public
ptr_sequence_adapter< T,
ptr_container_detail::ptr_array_impl<void*,N>,
CloneAllocator >
{
private:
typedef ptr_sequence_adapter< T,
ptr_container_detail::ptr_array_impl<void*,N>,
CloneAllocator >
base_class;
typedef BOOST_DEDUCED_TYPENAME remove_nullable<T>::type U;
typedef ptr_array<T,N,CloneAllocator>
this_type;
ptr_array( const this_type& );
void operator=( const this_type& );
public:
typedef U* value_type;
typedef U* pointer;
typedef U& reference;
typedef const U& const_reference;
typedef BOOST_DEDUCED_TYPENAME base_class::auto_type
auto_type;
public: // constructors
ptr_array() : base_class()
{ }
ptr_array( std::auto_ptr<this_type> r )
: base_class( r ) { }
void operator=( std::auto_ptr<this_type> r )
{
base_class::operator=(r);
}
std::auto_ptr<this_type> release()
{
std::auto_ptr<this_type> ptr( new this_type );
this->swap( *ptr );
return ptr;
}
std::auto_ptr<this_type> clone() const
{
std::auto_ptr<this_type> pa( new this_type );
for( size_t i = 0; i != N; ++i )
{
if( ! is_null(i) )
pa->replace( i, CloneAllocator::allocate_clone( (*this)[i] ) );
}
return pa;
}
private: // hide some members
using base_class::insert;
using base_class::erase;
using base_class::push_back;
using base_class::push_front;
using base_class::pop_front;
using base_class::pop_back;
using base_class::transfer;
using base_class::get_allocator;
public: // compile-time interface
template< size_t idx >
auto_type replace( U* r ) // strong
{
BOOST_STATIC_ASSERT( idx < N );
this->enforce_null_policy( r, "Null pointer in 'ptr_array::replace()'" );
auto_type res( static_cast<U*>( this->c_private()[idx] ) ); // nothrow
this->c_private()[idx] = r; // nothrow
return move(res); // nothrow
}
auto_type replace( size_t idx, U* r ) // strong
{
this->enforce_null_policy( r, "Null pointer in 'ptr_array::replace()'" );
auto_type ptr( r );
if( idx >= N )
throw bad_index( "'replace()' aout of bounds" );
auto_type res( static_cast<U*>( this->c_private()[idx] ) ); // nothrow
this->c_private()[idx] = ptr.release(); // nothrow
return move(res); // nothrow
}
using base_class::at;
template< size_t idx >
T& at()
{
BOOST_STATIC_ASSERT( idx < N );
return (*this)[idx];
}
template< size_t idx >
const T& at() const
{
BOOST_STATIC_ASSERT( idx < N );
return (*this)[idx];
}
bool is_null( size_t idx ) const
{
return base_class::is_null(idx);
}
template< size_t idx >
bool is_null() const
{
BOOST_STATIC_ASSERT( idx < N );
return this->c_private()[idx] == 0;
}
};
//////////////////////////////////////////////////////////////////////////////
// clonability
template< typename T, size_t size, typename CA >
inline ptr_array<T,size,CA>* new_clone( const ptr_array<T,size,CA>& r )
{
return r.clone().release();
}
/////////////////////////////////////////////////////////////////////////
// swap
template< typename T, size_t size, typename CA >
inline void swap( ptr_array<T,size,CA>& l, ptr_array<T,size,CA>& r )
{
l.swap(r);
}
}
#endif
|
import pandas as pd
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15]
]
df = pd.DataFrame(data, columns=['Column1', 'Column2', 'Column3', 'Column4', 'Column5']) |
/*
* Copyright 2013 The Polymer Authors. All rights reserved.
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file.
*/
suite('observe', function() {
var work;
var assert = chai.assert;
setup(function() {
work = document.createElement('div');
document.body.appendChild(work);
});
teardown(function() {
document.body.removeChild(work);
});
function registerTestComponent(inName, inValue) {
var proto = Object.create(HTMLElement.prototype);
proto.value = inValue || 'value';
document.register(inName, {
prototype: proto
});
}
function testElements(node, selector, value) {
Array.prototype.forEach.call(node.querySelectorAll(selector), function(n) {
assert.equal(n.value, value);
});
}
test('custom element automatically upgrades', function(done) {
registerTestComponent('x-auto', 'auto');
work.innerHTML = '<x-auto></x-auto>';
var x = work.firstChild;
assert.isUndefined(x.value);
setTimeout(function() {
assert.equal(x.value, 'auto');
done();
}, 0);
});
test('custom element automatically upgrades in subtree', function(done) {
registerTestComponent('x-auto-sub', 'auto-sub');
work.innerHTML = '<div></div>';
var target = work.firstChild;
setTimeout(function() {
target.innerHTML = '<x-auto-sub></x-auto-sub>';
var x = target.firstChild;
assert.isUndefined(x.value);
setTimeout(function() {
assert.equal(x.value, 'auto-sub');
done();
}, 0);
}, 0);
});
test('custom elements automatically upgrade', function(done) {
registerTestComponent('x-auto1', 'auto1');
registerTestComponent('x-auto2', 'auto2');
work.innerHTML = '<div><div><x-auto1></x-auto1><x-auto1></x-auto1>' +
'</div></div><div><x-auto2><x-auto1></x-auto1></x-auto2>' +
'<x-auto2><x-auto1></x-auto1></x-auto2></div>';
setTimeout(function() {
testElements(work, 'x-auto1', 'auto1');
testElements(work, 'x-auto2', 'auto2');
done();
}, 0);
});
test('custom elements automatically upgrade in subtree', function(done) {
registerTestComponent('x-auto-sub1', 'auto-sub1');
registerTestComponent('x-auto-sub2', 'auto-sub2');
work.innerHTML = '<div></div>';
var target = work.firstChild;
setTimeout(function() {
target.innerHTML = '<div><div><x-auto-sub1></x-auto-sub1><x-auto-sub1></x-auto-sub1>' +
'</div></div><div><x-auto-sub2><x-auto-sub1></x-auto-sub1></x-auto-sub2>' +
'<x-auto-sub2><x-auto-sub1></x-auto-sub1></x-auto-sub2></div>';
setTimeout(function() {
testElements(target, 'x-auto-sub1', 'auto-sub1');
testElements(target, 'x-auto-sub2', 'auto-sub2');
done();
}, 0);
}, 0);
});
// test ShadowDOM only in webkit for now...
if (HTMLElement.prototype.webkitCreateShadowRoot) {
test('custom element automatically upgrades in ShadowDOM', function(done) {
registerTestComponent('x-auto-shadow', 'auto-shadow');
work.innerHTML = '<div></div>';
var div = work.firstChild;
var root = div.webkitCreateShadowRoot();
CustomElements.watchShadow(root);
root.innerHTML = '<x-auto-shadow></x-auto-shadow>';
var x = root.firstChild;
assert.isUndefined(x.value);
setTimeout(function() {
assert.equal(x.value, 'auto-shadow');
done();
}, 0);
});
test('custom element automatically upgrades in ShadowDOM subtree', function(done) {
registerTestComponent('x-sub', 'sub');
work.innerHTML = '<div></div>';
var div = work.firstChild;
var root = div.webkitCreateShadowRoot();
root.innerHTML = '<div></div>';
CustomElements.watchShadow(root);
var target = root.firstChild;
target.innerHTML = '<x-sub></x-sub>';
var x = target.firstChild;
assert.isUndefined(x.value);
setTimeout(function() {
assert.equal(x.value, 'sub');
done();
}, 0);
});
}
});
|
#!/bin/bash
trap 'echo "${BASH_SOURCE[0]}: line ${LINENO}: status ${?}: user ${USER}: func ${FUNCNAME[0]}"' ERR
set -o errexit
set -o errtrace
kubectl run --rm -it --restart=Never --image=infoblox/dnstools:latest dnstools
|
#!/bin/bash
# Copyright (c) 2020, Mathias Lüdtke
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is the entrypoint for GitHub Actions only.
# 2016/05/18 http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in
DIR_THIS="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export TARGET_REPO_PATH=$GITHUB_WORKSPACE
export TARGET_REPO_NAME=${GITHUB_REPOSITORY##*/}
export _FOLDING_TYPE=github_actions
if [ -n "$INPUT_CONFIG" ]; then
vars=$(jq -r 'keys[] as $k | "export \($k)=\(.[$k]|tojson)" | gsub("\\$\\$";"\\$")' <<< "$INPUT_CONFIG" | grep "^export [A-Z][A-Z_]*=")
echo "$vars"
eval "$vars"
fi
env "$@" bash "$DIR_THIS/../industrial_ci/src/ci_main.sh"
|
class TemperatureConverter:
def celsius_to_fahrenheit(self, celsius):
fahrenheit = celsius * 9/5 + 32
return fahrenheit
def fahrenheit_to_celsius(self, fahrenheit):
celsius = (fahrenheit - 32) * 5/9
return celsius |
<filename>client/src/Pages/Signup.js<gh_stars>0
import React, { useState } from "react";
import { ToastContainer, toast } from "react-toastify";
import "react-toastify/dist/ReactToastify.css";
import "./css/signup.css";
import { signup } from "./helper/authhelper";
import { Redirect } from "react-router-dom";
const Signup = () => {
const [values, setValues] = useState({
name: "",
email: "",
password: "",
error: "",
success: false,
});
const { name, email, password, error, success } = values;
const handleChange = (name) => (event) => {
setValues({ ...values, error: false, [name]: event.target.value });
};
const onSubmit = (event) => {
event.preventDefault();
setValues({ ...values, error: false });
signup({ name, email, password }).then((data) => {
if (data.error) {
setValues({ ...values, error: data.error, success: false });
errorMessage();
} else {
setValues({
...values,
name: "",
password: "",
error: "",
success: true,
});
}
});
};
const errorMessage = () => {
toast(error);
};
const performRedirect = () => {
if (success === false) {
return <Redirect to="/signup" />;
} else {
var url = `/otpverification/${email}`;
console.log(url);
return <Redirect to={url} />;
}
};
const SignupPage = () => {
return (
<div className="signup">
<div className="signupcard">
<h1>Create new account</h1>
<div className="signupform">
<div className="signupname">
<input
type="text"
placeholder="Enter your Name"
value={name}
onChange={handleChange("name")}
/>
</div>
<div className="signupemail">
<input
type="email"
placeholder="Enter email address"
value={email}
onChange={handleChange("email")}
/>
</div>
<div className="signuppassword">
<input
type="password"
placeholder="Enter password"
value={password}
onChange={handleChange("password")}
/>
</div>
<div className="signupbutton">
<button onClick={onSubmit}>Signup</button>
</div>
<div className="existingaccount">
<a href="/signin">Signin|Login to existing account</a>
</div>
</div>
</div>
</div>
);
};
return (
<div>
{SignupPage()}
<ToastContainer
position="top-right"
autoClose={5000}
hideProgressBar={false}
newestOnTop={false}
closeOnClick
rtl={false}
pauseOnFocusLoss
draggable
pauseOnHover
/>
{performRedirect()}
</div>
);
};
export default Signup;
|
#!/bin/bash
cd /home/tutorial/bin/ITK-OpenCV-Bridge-Tutorial/Exercises/ITKIntroduction/exercise1
make
gnome-terminal
|
package main
import (
"encoding/json"
"os"
"github.com/spf13/cobra"
"github.com/invopop/gobl.cli/internal"
"github.com/invopop/gobl/dsig"
)
type verifyOpts struct {
publicKeyFile string
}
func verify() *verifyOpts {
return &verifyOpts{}
}
func (v *verifyOpts) cmd() *cobra.Command {
cmd := &cobra.Command{
Use: "verify [infile]",
Args: cobra.MaximumNArgs(1),
RunE: v.runE,
}
f := cmd.Flags()
f.StringVarP(&v.publicKeyFile, "key", "k", "~/.gobl/id_es256.pub.jwk", "Public key file for signature validation")
return cmd
}
func (v *verifyOpts) runE(cmd *cobra.Command, args []string) error {
input, err := openInput(cmd, args)
if err != nil {
return err
}
defer input.Close() // nolint:errcheck
pbFilename, err := expandHome(v.publicKeyFile)
if err != nil {
return err
}
keyFile, err := os.Open(pbFilename)
if err != nil {
return err
}
defer keyFile.Close() // nolint:errcheck
key := new(dsig.PublicKey)
if err = json.NewDecoder(keyFile).Decode(key); err != nil {
return err
}
return internal.Verify(cmdContext(cmd), input, key)
}
|
set -e
# Set file name prefixes
ENCODE_ID="ENCSR706ANY"
ALIGN_PREFIX="${ENCODE_ID}_mq${MAPQ}"
OUT_PREFIX="${ALIGN_PREFIX}_exon_cov"
# Download alignments
aws s3 cp s3://vg-k8s/users/jsibbesen/vgrna/benchmark/whole_genome/data/alignments/${ENCODE_ID}/${ALIGN_PREFIX}.bam . --no-progress
aws s3 cp s3://vg-k8s/users/jsibbesen/vgrna/benchmark/whole_genome/data/alignments/${ENCODE_ID}/${ALIGN_PREFIX}.bam.bai . --no-progress
# Download alignment exons
aws s3 cp s3://vg-k8s/users/jsibbesen/vgrna/benchmark/whole_genome/data/alignments/${ENCODE_ID}/${ALIGN_PREFIX}.bed . --no-progress
# Download gencode exons
aws s3 cp s3://vg-k8s/users/jsibbesen/vgrna/data/transcripts/gencode29/gencode.v29.primary_assembly.annotation_renamed_full_exons.bed exons_gc.bed --no-progress
# Calculate exon read coverage
/usr/bin/time -v bash -c "calc_exon_read_coverage ${ALIGN_PREFIX}.bam ${ALIGN_PREFIX}.bed > ${OUT_PREFIX}_bam.txt; gzip ${OUT_PREFIX}_bam.txt"
# Calculate exon read coverage
/usr/bin/time -v bash -c "calc_exon_read_coverage ${ALIGN_PREFIX}.bam exons_gc.bed > ${OUT_PREFIX}_gc.txt; gzip ${OUT_PREFIX}_gc.txt"
# Upload exon coverage
aws s3 sync . s3://vg-k8s/users/jsibbesen/vgrna/benchmark/whole_genome/data/alignments/${ENCODE_ID}/ --exclude "*" --include "${OUT_PREFIX}*" --no-progress
|
<filename>website/src/components/HomepageFeatures.tsx
import React from 'react';
import clsx from 'clsx';
import styles from './HomepageFeatures.module.css';
const FeatureList = [
{
title: 'Offline',
description: <>No need for internet to use any of the plugins functionality.</>,
},
{
title: 'Creativity',
description: <>Create anything on your mind. Most things are possible with this plugin.</>,
},
{
title: 'Metric sensitive',
description: <>This plugin was made to visualize metrics.</>,
},
];
function Feature({ title, description }) {
return (
<div className={clsx('col col--4')}>
<div className="text--center padding-horiz--md">
<h3>{title}</h3>
<p>{description}</p>
</div>
</div>
);
}
export default function HomepageFeatures() {
return (
<section className={styles.features}>
<div className="container">
<div className="row">
{FeatureList.map((props, idx) => (
<Feature key={idx} {...props} />
))}
</div>
</div>
</section>
);
}
|
<reponame>pureport/pureport-python-client
# -*- coding: utf-8 -*_
#
# Copyright (c) 2020, Pureport, Inc.
# All Rights Reserved
from __future__ import absolute_import
from click import argument
from pureport_client.commands import (
CommandBase,
AccountsMixin
)
from pureport_client.util import JSON
from pureport import models
class Command(AccountsMixin, CommandBase):
"""Manage Pureport account API keys
"""
def list(self):
"""Get a list of all API keys for an account.
\f
:returns: list of account objects
:rtype: list
"""
return self.client.find_api_keys()
@argument('api_key')
def get(self, api_key):
"""Get an account's API Key with the provided API Key key.
\f
:param api_key: the key associated with the API Key to return
:type api_key: str
:returns: an APIKey object
:rtype: models.ApiKey
"""
return self.client.get_api_key(api_key)
@argument('api_key', type=JSON)
def create(self, api_key):
"""Create an API Key for the provided account.
\f
:param api_key: the key associated with the API Key to return
:type api_key: str
"""
model = models.load("ApiKey", api_key)
model.account_id = self.account_id
return self.client.create_api_key(model=model)
@argument('api_key', type=JSON)
def update(self, api_key):
"""Update an API Key for the provided account.
\f
:param api_key: the key associated with the API Key to return
:type api_key: str
"""
model = models.load("ApiKey", api_key)
model.account_id = self.account_id
return self.client.update_api_key(model=model)
@argument('api_key')
def delete(self, api_key):
"""Delete an API Key from the provided account.
\f
:param api_key: the key associated with the API Key to return
:type api_key: str
"""
self.client.delete_api_key(api_key)
|
// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package models
import (
"fmt"
"testing"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/references"
"github.com/stretchr/testify/assert"
)
func TestXRef_AddCrossReferences(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
// Issue #1 to test against
itarget := testCreateIssue(t, 1, 2, "title1", "content1", false)
// PR to close issue #1
content := fmt.Sprintf("content2, closes #%d", itarget.Index)
pr := testCreateIssue(t, 1, 2, "title2", content, true)
ref := unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: 0}).(*Comment)
assert.Equal(t, CommentTypePullRef, ref.Type)
assert.Equal(t, pr.RepoID, ref.RefRepoID)
assert.True(t, ref.RefIsPull)
assert.Equal(t, references.XRefActionCloses, ref.RefAction)
// Comment on PR to reopen issue #1
content = fmt.Sprintf("content2, reopens #%d", itarget.Index)
c := testCreateComment(t, 1, 2, pr.ID, content)
ref = unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: c.ID}).(*Comment)
assert.Equal(t, CommentTypeCommentRef, ref.Type)
assert.Equal(t, pr.RepoID, ref.RefRepoID)
assert.True(t, ref.RefIsPull)
assert.Equal(t, references.XRefActionReopens, ref.RefAction)
// Issue mentioning issue #1
content = fmt.Sprintf("content3, mentions #%d", itarget.Index)
i := testCreateIssue(t, 1, 2, "title3", content, false)
ref = unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0}).(*Comment)
assert.Equal(t, CommentTypeIssueRef, ref.Type)
assert.Equal(t, pr.RepoID, ref.RefRepoID)
assert.False(t, ref.RefIsPull)
assert.Equal(t, references.XRefActionNone, ref.RefAction)
// Issue #4 to test against
itarget = testCreateIssue(t, 3, 3, "title4", "content4", false)
// Cross-reference to issue #4 by admin
content = fmt.Sprintf("content5, mentions user3/repo3#%d", itarget.Index)
i = testCreateIssue(t, 2, 1, "title5", content, false)
ref = unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0}).(*Comment)
assert.Equal(t, CommentTypeIssueRef, ref.Type)
assert.Equal(t, i.RepoID, ref.RefRepoID)
assert.False(t, ref.RefIsPull)
assert.Equal(t, references.XRefActionNone, ref.RefAction)
// Cross-reference to issue #4 with no permission
content = fmt.Sprintf("content6, mentions user3/repo3#%d", itarget.Index)
i = testCreateIssue(t, 4, 5, "title6", content, false)
unittest.AssertNotExistsBean(t, &Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0})
}
func TestXRef_NeuterCrossReferences(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
// Issue #1 to test against
itarget := testCreateIssue(t, 1, 2, "title1", "content1", false)
// Issue mentioning issue #1
title := fmt.Sprintf("title2, mentions #%d", itarget.Index)
i := testCreateIssue(t, 1, 2, title, "content2", false)
ref := unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0}).(*Comment)
assert.Equal(t, CommentTypeIssueRef, ref.Type)
assert.Equal(t, references.XRefActionNone, ref.RefAction)
d := unittest.AssertExistsAndLoadBean(t, &User{ID: 2}).(*User)
i.Title = "title2, no mentions"
assert.NoError(t, i.ChangeTitle(d, title))
ref = unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0}).(*Comment)
assert.Equal(t, CommentTypeIssueRef, ref.Type)
assert.Equal(t, references.XRefActionNeutered, ref.RefAction)
}
func TestXRef_ResolveCrossReferences(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
d := unittest.AssertExistsAndLoadBean(t, &User{ID: 2}).(*User)
i1 := testCreateIssue(t, 1, 2, "title1", "content1", false)
i2 := testCreateIssue(t, 1, 2, "title2", "content2", false)
i3 := testCreateIssue(t, 1, 2, "title3", "content3", false)
_, err := i3.ChangeStatus(d, true)
assert.NoError(t, err)
pr := testCreatePR(t, 1, 2, "titlepr", fmt.Sprintf("closes #%d", i1.Index))
rp := unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: i1.ID, RefIssueID: pr.Issue.ID, RefCommentID: 0}).(*Comment)
c1 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i2.Index))
r1 := unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c1.ID}).(*Comment)
// Must be ignored
c2 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("mentions #%d", i2.Index))
unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c2.ID})
// Must be superseded by c4/r4
c3 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("reopens #%d", i3.Index))
unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c3.ID})
c4 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i3.Index))
r4 := unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c4.ID}).(*Comment)
refs, err := pr.ResolveCrossReferences()
assert.NoError(t, err)
assert.Len(t, refs, 3)
assert.Equal(t, rp.ID, refs[0].ID, "bad ref rp: %+v", refs[0])
assert.Equal(t, r1.ID, refs[1].ID, "bad ref r1: %+v", refs[1])
assert.Equal(t, r4.ID, refs[2].ID, "bad ref r4: %+v", refs[2])
}
func testCreateIssue(t *testing.T, repo, doer int64, title, content string, ispull bool) *Issue {
r := unittest.AssertExistsAndLoadBean(t, &Repository{ID: repo}).(*Repository)
d := unittest.AssertExistsAndLoadBean(t, &User{ID: doer}).(*User)
idx, err := db.GetNextResourceIndex("issue_index", r.ID)
assert.NoError(t, err)
i := &Issue{
RepoID: r.ID,
PosterID: d.ID,
Poster: d,
Title: title,
Content: content,
IsPull: ispull,
Index: idx,
}
ctx, committer, err := db.TxContext()
assert.NoError(t, err)
defer committer.Close()
err = newIssue(ctx, d, NewIssueOptions{
Repo: r,
Issue: i,
})
assert.NoError(t, err)
i, err = getIssueByID(db.GetEngine(ctx), i.ID)
assert.NoError(t, err)
assert.NoError(t, i.addCrossReferences(ctx, d, false))
assert.NoError(t, committer.Commit())
return i
}
func testCreatePR(t *testing.T, repo, doer int64, title, content string) *PullRequest {
r := unittest.AssertExistsAndLoadBean(t, &Repository{ID: repo}).(*Repository)
d := unittest.AssertExistsAndLoadBean(t, &User{ID: doer}).(*User)
i := &Issue{RepoID: r.ID, PosterID: d.ID, Poster: d, Title: title, Content: content, IsPull: true}
pr := &PullRequest{HeadRepoID: repo, BaseRepoID: repo, HeadBranch: "head", BaseBranch: "base", Status: PullRequestStatusMergeable}
assert.NoError(t, NewPullRequest(r, i, nil, nil, pr))
pr.Issue = i
return pr
}
func testCreateComment(t *testing.T, repo, doer, issue int64, content string) *Comment {
d := unittest.AssertExistsAndLoadBean(t, &User{ID: doer}).(*User)
i := unittest.AssertExistsAndLoadBean(t, &Issue{ID: issue}).(*Issue)
c := &Comment{Type: CommentTypeComment, PosterID: doer, Poster: d, IssueID: issue, Issue: i, Content: content}
ctx, committer, err := db.TxContext()
assert.NoError(t, err)
defer committer.Close()
err = db.Insert(ctx, c)
assert.NoError(t, err)
assert.NoError(t, c.addCrossReferences(ctx, d, false))
assert.NoError(t, committer.Commit())
return c
}
|
<filename>instrumentation-test-harness/src/main/java/org/glowroot/instrumentation/test/harness/impl/JavaagentContainer.java<gh_stars>1-10
/*
* Copyright 2011-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.glowroot.instrumentation.test.harness.impl;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.lang.management.ManagementFactory;
import java.lang.management.RuntimeMXBean;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.base.StandardSystemProperty;
import com.google.common.base.Stopwatch;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.io.ByteStreams;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.glowroot.instrumentation.test.harness.AppUnderTest;
import org.glowroot.instrumentation.test.harness.Container;
import org.glowroot.instrumentation.test.harness.IncomingSpan;
import org.glowroot.instrumentation.test.harness.agent.Premain;
import org.glowroot.instrumentation.test.harness.util.ConsoleOutputPipe;
import org.glowroot.instrumentation.test.harness.util.TempDirs;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
public class JavaagentContainer implements Container {
private static final boolean XDEBUG = Boolean.getBoolean("test.harness.xdebug");
private static final Logger logger = LoggerFactory.getLogger(JavaagentContainer.class);
private final HeartbeatListener heartbeatListener;
private final ExecutorService heartbeatListenerExecutor;
private final File tmpDir;
private final TraceCollector traceCollector;
private final JavaagentClient javaagentClient;
private final ExecutorService consolePipeExecutor;
private final Future<?> consolePipeFuture;
private final Process process;
private final ConsoleOutputPipe consoleOutputPipe;
private final Thread shutdownHook;
public static JavaagentContainer create() throws Exception {
return new JavaagentContainer(ImmutableList.<String>of());
}
public static JavaagentContainer createWithExtraJvmArgs(List<String> extraJvmArgs)
throws Exception {
return new JavaagentContainer(extraJvmArgs);
}
private JavaagentContainer(List<String> extraJvmArgs) throws Exception {
// need to start heartbeat socket listener before spawning process
heartbeatListener = new HeartbeatListener();
heartbeatListenerExecutor = Executors.newSingleThreadExecutor();
heartbeatListenerExecutor.execute(heartbeatListener);
traceCollector = new TraceCollector();
traceCollector.start();
tmpDir = TempDirs.createTempDir("harness-dir");
List<String> command = buildCommand(heartbeatListener.serverSocket.getLocalPort(),
traceCollector.getPort(), tmpDir, extraJvmArgs);
ProcessBuilder processBuilder = new ProcessBuilder(command);
processBuilder.redirectErrorStream(true);
Process process = processBuilder.start();
consolePipeExecutor = Executors.newSingleThreadExecutor();
InputStream in = process.getInputStream();
// process.getInputStream() only returns null if ProcessBuilder.redirectOutput() is used
// to redirect output to a file
checkNotNull(in);
consoleOutputPipe = new ConsoleOutputPipe(in, System.out);
consolePipeFuture = consolePipeExecutor.submit(consoleOutputPipe);
this.process = process;
Stopwatch stopwatch = Stopwatch.createStarted();
javaagentClient = connectToJavaagent(heartbeatListener.getJavaagentServerPort(), stopwatch);
javaagentClient.resetInstrumentationProperties();
shutdownHook = new ShutdownHookThread(javaagentClient);
// unfortunately, ctrl-c during maven test will kill the maven process, but won't kill the
// forked surefire jvm where the tests are being run
// (http://jira.codehaus.org/browse/SUREFIRE-413), and so this hook won't get triggered by
// ctrl-c while running tests under maven
Runtime.getRuntime().addShutdownHook(shutdownHook);
}
private static JavaagentClient connectToJavaagent(int javaagentServerPort, Stopwatch stopwatch)
throws Exception {
// this can take a while on slow travis ci build machines
Exception lastException = null;
while (stopwatch.elapsed(SECONDS) < 30) {
try {
return new JavaagentClient(javaagentServerPort);
} catch (Exception e) {
logger.debug(e.getMessage(), e);
lastException = e;
}
MILLISECONDS.sleep(10);
}
throw checkNotNull(lastException);
}
@Override
public void setInstrumentationProperty(String instrumentationId, String propertyName,
boolean propertyValue) throws Exception {
javaagentClient.setInstrumentationProperty(instrumentationId, propertyName, propertyValue);
}
@Override
public void setInstrumentationProperty(String instrumentationId, String propertyName,
Double propertyValue) throws Exception {
javaagentClient.setInstrumentationProperty(instrumentationId, propertyName, propertyValue);
}
@Override
public void setInstrumentationProperty(String instrumentationId, String propertyName,
String propertyValue) throws Exception {
javaagentClient.setInstrumentationProperty(instrumentationId, propertyName, propertyValue);
}
@Override
public void setInstrumentationProperty(String instrumentationId, String propertyName,
List<String> propertyValue) throws Exception {
// converting to ArrayList since guava ImmutableList may be shaded in javaagent and so would
// error on de-serialization
javaagentClient.setInstrumentationProperty(instrumentationId, propertyName,
new ArrayList<String>(propertyValue));
}
@Override
public IncomingSpan execute(Class<? extends AppUnderTest> appClass, Serializable... args)
throws Exception {
return executeInternal(appClass, null, null, args);
}
@Override
public IncomingSpan executeForType(Class<? extends AppUnderTest> appClass,
String transactionType, Serializable... args) throws Exception {
return executeInternal(appClass, transactionType, null, args);
}
@Override
public IncomingSpan executeForTypeAndName(Class<? extends AppUnderTest> appClass,
String transactionType, String transactionName, Serializable... args) throws Exception {
return executeInternal(appClass, transactionType, transactionName, args);
}
@Override
public void executeNoExpectedTrace(Class<? extends AppUnderTest> appClass, Serializable... args)
throws Exception {
executeInternal(appClass, args);
// give a short time to see if trace gets collected
MILLISECONDS.sleep(10);
if (traceCollector != null && traceCollector.hasIncomingSpan()) {
throw new IllegalStateException("Trace was collected when none was expected");
}
}
@Override
public void resetAfterEachTest() throws Exception {
javaagentClient.resetInstrumentationProperties();
}
@Override
public void close() throws Exception {
heartbeatListener.closed = true;
javaagentClient.kill();
traceCollector.close();
process.waitFor();
consolePipeFuture.get();
consolePipeExecutor.shutdown();
if (!consolePipeExecutor.awaitTermination(10, SECONDS)) {
throw new IllegalStateException("Could not terminate executor");
}
heartbeatListenerExecutor.shutdown();
if (!heartbeatListenerExecutor.awaitTermination(10, SECONDS)) {
throw new IllegalStateException("Could not terminate executor");
}
heartbeatListener.serverSocket.close();
Runtime.getRuntime().removeShutdownHook(shutdownHook);
TempDirs.deleteRecursively(tmpDir);
}
private IncomingSpan executeInternal(Class<? extends AppUnderTest> appClass,
@Nullable String transactionType, @Nullable String transactionName,
Serializable... args) throws Exception {
checkNotNull(traceCollector);
executeInternal(appClass, args);
// extra long wait time is needed for StackOverflowOOMIT on slow travis ci machines since it
// can sometimes take a long time for that large trace to be serialized and transferred
IncomingSpan incomingSpan =
traceCollector.getCompletedIncomingSpan(transactionType, transactionName, 20,
SECONDS);
traceCollector.clearIncomingSpans();
return incomingSpan;
}
private void executeInternal(Class<? extends AppUnderTest> appUnderTestClass,
Serializable[] args) throws Exception {
javaagentClient.executeApp(appUnderTestClass.getName(), args);
}
private static List<String> buildCommand(int heartbeatPort, int collectorPort, File tmpDir,
List<String> extraJvmArgs) throws Exception {
List<String> command = Lists.newArrayList();
String javaExecutable = StandardSystemProperty.JAVA_HOME.value() + File.separator + "bin"
+ File.separator + "java";
command.add(javaExecutable);
boolean hasXmx = false;
for (String extraJvmArg : extraJvmArgs) {
command.add(extraJvmArg);
if (extraJvmArg.startsWith("-Xmx")) {
hasXmx = true;
}
}
// it is important for jacoco javaagent to be prior to the test harness javaagent so that
// jacoco will use original class bytes to form its class id at runtime which will then
// match up with the class id at analysis time
command.addAll(getJacocoArgsFromCurrentJvm());
String classpath = Strings.nullToEmpty(StandardSystemProperty.JAVA_CLASS_PATH.value());
List<String> bootPaths = Lists.newArrayList();
List<String> paths = Lists.newArrayList();
List<String> maybeBootPaths = Lists.newArrayList();
File delegatingJavaagentJarFile = null;
File javaagentJarFile = null;
for (String path : Splitter.on(File.pathSeparatorChar).split(classpath)) {
File file = new File(path);
String name = file.getName();
String targetClasses = File.separator + "target" + File.separator + "classes";
if (name.matches("delegating-javaagent-[0-9.]+(-SNAPSHOT)?.jar")) {
delegatingJavaagentJarFile = file;
} else if (name.matches("instrumentation-test-harness-[0-9.]+(-SNAPSHOT)?.jar")) {
javaagentJarFile = file;
} else if (name.matches("instrumentation-api-[0-9.]+(-SNAPSHOT)?.jar")
|| name.matches("instrumentation-engine-[0-9.]+(-SNAPSHOT)?.jar")) {
// these are instrumentation-test-harness transitive dependencies
maybeBootPaths.add(path);
} else if (file.getAbsolutePath()
.endsWith(File.separator + "instrumentation-api" + targetClasses)
|| file.getAbsolutePath().endsWith(File.separator + "engine" + targetClasses)) {
// these are instrumentation-test-harness transitive dependencies
maybeBootPaths.add(path);
} else if (name.matches("asm-.*\\.jar")
|| name.matches("guava-.*\\.jar")
|| name.matches("gson-.*\\.jar")
|| name.matches("logback-.*\\.jar")
// javax.servlet-api is needed because logback-classic has
// META-INF/services/javax.servlet.ServletContainerInitializer
|| name.matches("javax.servlet-api-.*\\.jar")
|| name.matches("slf4j-api-.*\\.jar")
|| name.matches("value-.*\\.jar")
// this is needed for now to support reusable ExecuteHttpBase
|| name.matches("nanohttpd-.*\\.jar")
|| name.matches("error_prone_annotations-.*\\.jar")
|| name.matches("jsr305-.*\\.jar")) {
// these are instrumentation-test-harness transitive dependencies
maybeBootPaths.add(path);
} else if (name.endsWith(".jar") && file.getAbsolutePath()
.endsWith(File.separator + "target" + File.separator + name)) {
// this is the instrumentation under test
bootPaths.add(path);
} else if (name.matches("instrumentation-[a-z0-9-]+-[0-9.]+(-SNAPSHOT)?.jar")) {
// this another (core) instrumentation that it depends on, e.g. the executor
// instrumentation
bootPaths.add(path);
} else if (file.getAbsolutePath().endsWith(targetClasses)) {
// this is the instrumentation under test
bootPaths.add(path);
} else if (file.getAbsolutePath()
.endsWith(File.separator + "target" + File.separator + "test-classes")) {
// this is the instrumentation test classes
paths.add(path);
} else {
// these are instrumentation test dependencies
paths.add(path);
}
}
if (javaagentJarFile == null) {
bootPaths.addAll(maybeBootPaths);
} else {
boolean shaded = false;
JarInputStream jarIn = new JarInputStream(new FileInputStream(javaagentJarFile));
try {
JarEntry jarEntry;
while ((jarEntry = jarIn.getNextJarEntry()) != null) {
if (jarEntry.getName()
.startsWith("org/glowroot/instrumentation/test/harness/shaded/")) {
shaded = true;
break;
}
}
} finally {
jarIn.close();
}
if (shaded) {
paths.addAll(maybeBootPaths);
} else {
bootPaths.addAll(maybeBootPaths);
}
}
command.add("-Xbootclasspath/a:" + Joiner.on(File.pathSeparatorChar).join(bootPaths));
command.add("-classpath");
command.add(Joiner.on(File.pathSeparatorChar).join(paths));
if (XDEBUG) {
// the -agentlib arg needs to come before the -javaagent arg
command.add("-Xdebug");
command.add("-agentlib:jdwp=transport=dt_socket,address=8000,server=y,suspend=y");
}
if (javaagentJarFile == null) {
javaagentJarFile = checkNotNull(delegatingJavaagentJarFile);
}
command.add("-javaagent:" + javaagentJarFile + "=" + Premain.class.getName());
command.add("-Dtest.harness.tmpDir=" + tmpDir.getAbsolutePath());
command.add("-Dtest.harness.collectorPort=" + collectorPort);
// this is used inside low-entropy docker containers
String sourceOfRandomness = System.getProperty("java.security.egd");
if (sourceOfRandomness != null) {
command.add("-Djava.security.egd=" + sourceOfRandomness);
}
if (!hasXmx) {
command.add("-Xmx" + Runtime.getRuntime().maxMemory());
}
// leave as much memory as possible to old gen
command.add("-XX:NewRatio=20");
command.add(JavaagentMain.class.getName());
command.add(Integer.toString(heartbeatPort));
return command;
}
private static List<String> getJacocoArgsFromCurrentJvm() {
RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean();
List<String> arguments = runtimeMXBean.getInputArguments();
List<String> jacocoArgs = Lists.newArrayList();
for (String argument : arguments) {
if (argument.startsWith("-javaagent:") && argument.contains("jacoco")) {
jacocoArgs.add(argument + ",includes=org.glowroot.instrumentation.*");
break;
}
}
return jacocoArgs;
}
private static class HeartbeatListener implements Runnable {
private final ServerSocket serverSocket;
private final AtomicInteger javaagentServerPort = new AtomicInteger();
private volatile boolean closed;
private HeartbeatListener() throws IOException {
serverSocket = new ServerSocket(0);
}
@Override
public void run() {
try {
Socket socket = serverSocket.accept();
ObjectInputStream socketIn = new ObjectInputStream(socket.getInputStream());
synchronized (javaagentServerPort) {
javaagentServerPort.set(socketIn.readInt());
javaagentServerPort.notifyAll();
}
ByteStreams.exhaust(socketIn);
} catch (IOException e) {
if (!closed) {
logger.error(e.getMessage(), e);
}
}
}
private int getJavaagentServerPort() throws InterruptedException {
synchronized (javaagentServerPort) {
while (javaagentServerPort.get() == 0) {
javaagentServerPort.wait();
}
}
return javaagentServerPort.get();
}
}
private static class ShutdownHookThread extends Thread {
private final JavaagentClient javaagentClient;
private ShutdownHookThread(JavaagentClient javaagentClient) {
this.javaagentClient = javaagentClient;
}
@Override
public void run() {
try {
javaagentClient.kill();
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
}
}
|
/* 1: */ package com.four.common.utils;
/* 2: */
/* 3: */ import java.io.File;
/* 4: */ import java.io.FileInputStream;
/* 5: */ import java.io.InputStream;
/* 6: */ import java.util.Collection;
/* 7: */ import java.util.Properties;
/* 8: */
/* 9: */ public class Utils
/* 10: */ {
/* 11: */ public static boolean isEmpty(String str)
/* 12: */ {
/* 13:19 */ return (str == null) || (str.length() == 0);
/* 14: */ }
/* 15: */
/* 16: */ public static boolean isBlank(String str)
/* 17: */ {
/* 18: */ int strLen;
/* 19:28 */ if ((str == null) || ((strLen = str.length()) == 0)) {
/* 20:29 */ return true;
/* 21: */ }
/* 22: */ //int strLen;
/* 23:31 */ for (int i = 0; i < strLen; i++) {
/* 24:32 */ if (!Character.isWhitespace(str.charAt(i))) {
/* 25:33 */ return false;
/* 26: */ }
/* 27: */ }
/* 28:36 */ return true;
/* 29: */ }
/* 30: */
/* 31: */ public static boolean isEmpty(Collection list)
/* 32: */ {
/* 33:44 */ return (list == null) || (list.size() == 0);
/* 34: */ }
/* 35: */
/* 36: */ public static Properties loadProperties(File file)
/* 37: */ {
/* 38:53 */ Properties properties = new Properties();
/* 39:54 */ InputStream inputStream = null;
/* 40: */ try
/* 41: */ {
/* 42:56 */ inputStream = new FileInputStream(file);
/* 43:57 */ properties.load(inputStream);
/* 44: */
/* 45: */
/* 46: */
/* 47: */
/* 48: */
/* 49: */
/* 50: */
/* 51: */
/* 52: */
/* 53: */
/* 54:68 */ return properties;
/* 55: */ }
/* 56: */ catch (Exception e)
/* 57: */ {
/* 58:59 */ return null;
/* 59: */ }
/* 60: */ finally
/* 61: */ {
/* 62:61 */ if (inputStream != null) {
/* 63: */ try
/* 64: */ {
/* 65:63 */ inputStream.close();
/* 66: */ }
/* 67: */ catch (Exception localException3) {}
/* 68: */ }
/* 69: */ }
/* 70: */ }
/* 71: */ }
/* Location: E:\Henuo\public\public\bin\convertdata-1.0.jar
* Qualified Name: com.four.common.utils.Utils
* JD-Core Version: 0.7.0.1
*/ |
function StdInnerHTML(ajCall, resParam) {
var a = resParam.split(",");
for (var i = 0; i < a.length; i++) {
document.getElementById(a[i]).innerHTML = ajCall.xhr.responseText;
}
}
|
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*************************************************************************************
*/
package com.normation.ldap.ldif
import com.unboundid.ldif._
import com.unboundid.ldap.sdk.{DN,Entry}
import com.normation.ldap.sdk.LDAPTree
import org.slf4j.{Logger, LoggerFactory}
import java.io.File
/**
* A service that allows to log LDAP objects into
* files if a trace log level is enable for the
* log unit "loggerName".
*
* Be careful, enabling that logger may be
* extremely heavy in log size.
*/
trait LDIFFileLogger {
/**
* The name of the log unit to set to trace
* to enable LDIF output.
*/
def loggerName : String
/**
* Root directory for LDIF traces.
*/
def ldifTraceRootDir : String
/**
* Write the given tree as a set of LDIF records in
* the trace directory.
*/
def tree(tree:LDAPTree) : Unit
/**
* Write the given record in the trace directory.
*/
def record(LDIFRecord: => LDIFRecord,comment:Option[String] = None) : Unit
def records(LDIFRecords: => Seq[LDIFRecord]) : Unit
}
class DummyLDIFFileLogger extends LDIFFileLogger {
val loggerName = "dummy logger - no output"
val ldifTraceRootDir = "no a real ldifTraceRootDir"
def tree(tree:LDAPTree) {}
def record(LDIFRecord: => LDIFRecord,comment:Option[String] = None) {}
def records(LDIFRecords: => Seq[LDIFRecord]) {}
}
/**
* A simple logger that just print LDIF in a
* configured directory.
* This class should never raise an exception, as it's
* only use to log.
*/
object DefaultLDIFFileLogger {
val defaultTraceDir = System.getProperty("java.io.tmpdir") +
System.getProperty("file.separator") + "ldifTrace"
val defaultLoggerName = "trace.ldif.in.file"
}
class DefaultLDIFFileLogger(
override val loggerName:String = DefaultLDIFFileLogger.defaultLoggerName,
override val ldifTraceRootDir:String = DefaultLDIFFileLogger.defaultTraceDir
) extends Slf4jLDIFLogger {
override def isLogLevel : Boolean = logger.isTraceEnabled
override def log(s:String) : Unit = logger.trace(s)
override def logE(s:String,e:Exception): Unit = logger.trace(s,e)
}
/**
* A trait that let the log level be specified,
* even if the idea is only to use a trace level.
*/
trait Slf4jLDIFLogger extends LDIFFileLogger {
def isLogLevel : Boolean
def log(s:String) : Unit
def logE(s:String,e:Exception): Unit
val logger = LoggerFactory.getLogger(loggerName)
def rootDir() = {
val dir = new File(ldifTraceRootDir)
if(!dir.exists()) dir.mkdirs
dir
}
protected def traceFileName(dn:DN, opType:String) : String = {
val fileName = dn.getRDNStrings().map( _.replaceAll(File.separator, "|")).reverse.mkString("/")
fileName + "-" + System.currentTimeMillis.toString + "-" + opType + ".ldif"
}
protected def createTraceFile(fileName:String) : File = {
new File(rootDir, fileName)
}
private def errorMessage(e:Exception,filename:String) : Unit =
logE(s"Exception when loggin LDIF trace in ${filename} (ignored)",e)
private def writeRecord(ldifWriter:LDIFWriter,LDIFRecord:LDIFRecord,comment:Option[String] = None) {
comment match {
case None => ldifWriter.writeLDIFRecord(LDIFRecord)
case Some(c) => ldifWriter.writeLDIFRecord(LDIFRecord,c)
}
}
override def tree(tree:LDAPTree) {
if(isLogLevel) {
val filename = traceFileName(tree.root.dn, "CONTENT")
try {
val ldif = createTraceFile(filename)
log("Printing LDIF trace of Entity Tree : " + ldif.getAbsolutePath)
val writer = new LDIFWriter(ldif)
try {
tree.foreach { e =>
writeRecord(writer,e.backed)
}
} finally {
writer.close
}
} catch {
case e:Exception => errorMessage(e,filename)
}
}
}
override def record(record: => LDIFRecord,comment:Option[String] = None) {
if(isLogLevel) {
var writer:LDIFWriter = null
val opType = record match {
case _:Entry => "CONTENT"
case _:LDIFAddChangeRecord => "ADD"
case _:LDIFDeleteChangeRecord => "DELETE"
case _:LDIFModifyChangeRecord => "MODIFY"
case _:LDIFModifyDNChangeRecord => "MODIFY_DN"
case _ => "UNKNOWN_OP"
}
val filename = traceFileName(record.getParsedDN,opType)
try {
val ldif = createTraceFile(filename)
log("Printing LDIF trace of unitary operation on record in : " + ldif.getAbsolutePath)
writer = new LDIFWriter(ldif)
writeRecord(writer,record,comment)
} catch {
case e:Exception => errorMessage(e,filename)
} finally {
if(null != writer) writer.close
}
}
}
override def records(records: => Seq[LDIFRecord]) {
if(isLogLevel) {
var writer:LDIFWriter = null
val filename = traceFileName(records.head.getParsedDN, "RECORDS")
try {
if(records.nonEmpty) { //don't check it if logger trace is not enabled
val ldif = createTraceFile(filename)
//create parent directory if it does not exists
ldif.getParentFile().mkdirs()
//save ldif
log("Printing LDIF trace of operations on records in : " + ldif.getAbsolutePath)
writer = new LDIFWriter(ldif)
records.foreach { record => writeRecord(writer,record) }
} else {
log("Nothing to print as record list is empty")
}
} catch {
case e:Exception => errorMessage(e,filename)
} finally {
if(null != writer) writer.close
}
}
}
} |
// Shape.java
public abstract class Shape {
public abstract void draw();
}
// Circle.java
public class Circle extends Shape {
@Override
public void draw() {
System.out.println("Drawing Circle");
}
}
// Square.java
public class Square extends Shape {
@Override
public void draw() {
System.out.println("Drawing Square");
}
}
// Triangle.java
public class Triangle extends Shape {
@Override
public void draw() {
System.out.println("Drawing Triangle");
}
}
// ShapeFactory.java
public class ShapeFactory {
public Shape getShape(String shapeType) {
if (shapeType == null) {
return null;
}
if (shapeType.equalsIgnoreCase("CIRCLE")) {
return new Circle();
} else if (shapeType.equalsIgnoreCase("SQUARE")) {
return new Square();
} else if (shapeType.equalsIgnoreCase("TRIANGLE")) {
return new Triangle();
}
return null;
}
} |
# -*- coding: utf-8 -*-
# FOGLAMP_BEGIN
# See: http://foglamp.readthedocs.io/
# FOGLAMP_END
"""Unit test for foglamp.plugins.south.http_south.http_south"""
import copy
import json
from unittest import mock
from unittest.mock import call, patch
import pytest
import aiohttp.web_exceptions
from aiohttp.test_utils import make_mocked_request
from aiohttp.streams import StreamReader
from multidict import CIMultiDict
from python.foglamp.plugins.south.http_south import http_south
from python.foglamp.plugins.south.http_south.http_south import HttpSouthIngest, async_ingest, c_callback, c_ingest_ref, _DEFAULT_CONFIG as config
__author__ = "<NAME>"
__copyright__ = "Copyright (c) 2017 Dianomic Systems"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
_CONFIG_CATEGORY_NAME = 'HTTP_SOUTH'
_CONFIG_CATEGORY_DESCRIPTION = 'South Plugin HTTP Listener'
_NEW_CONFIG = {
'plugin': {
'description': 'South Plugin HTTP Listener',
'type': 'string',
'default': 'http_south'
},
'port': {
'description': 'Port to listen on',
'type': 'integer',
'default': '1234',
},
'host': {
'description': 'Address to accept data on',
'type': 'string',
'default': 'localhost',
},
'uri': {
'description': 'URI to accept data on',
'type': 'string',
'default': 'sensor-reading',
}
}
def test_plugin_contract():
# Evaluates if the plugin has all the required methods
assert callable(getattr(http_south, 'plugin_info'))
assert callable(getattr(http_south, 'plugin_init'))
assert callable(getattr(http_south, 'plugin_start'))
assert callable(getattr(http_south, 'plugin_shutdown'))
assert callable(getattr(http_south, 'plugin_reconfigure'))
def mock_request(data, loop):
payload = StreamReader(loop=loop)
payload.feed_data(data.encode())
payload.feed_eof()
protocol = mock.Mock()
app = mock.Mock()
headers = CIMultiDict([('CONTENT-TYPE', 'application/json')])
req = make_mocked_request('POST', '/sensor-reading', headers=headers,
protocol=protocol, payload=payload, app=app)
return req
@pytest.allure.feature("unit")
@pytest.allure.story("plugin", "south", "http")
def test_plugin_info():
assert http_south.plugin_info() == {
'name': 'HTTP South Listener',
'version': '1.5.0',
'mode': 'async',
'type': 'south',
'interface': '1.0',
'config': config
}
@pytest.allure.feature("unit")
@pytest.allure.story("plugin", "south", "http")
def test_plugin_init():
assert http_south.plugin_init(config) == config
@pytest.allure.feature("unit")
@pytest.allure.story("plugin", "south", "http")
def test_plugin_start(mocker, unused_port):
# GIVEN
port = {
'description': 'Port to listen on',
'type': 'integer',
'default': str(unused_port()),
}
config_data = copy.deepcopy(config)
mocker.patch.dict(config_data, {'port': port})
config_data['port']['value'] = config_data['port']['default']
config_data['host']['value'] = config_data['host']['default']
config_data['uri']['value'] = config_data['uri']['default']
config_data['enableHttp']['value'] = config_data['enableHttp']['default']
# WHEN
http_south.plugin_start(config_data)
# THEN
assert isinstance(config_data['app'], aiohttp.web.Application)
assert isinstance(config_data['handler'], aiohttp.web_server.Server)
# assert isinstance(config_data['server'], asyncio.base_events.Server)
http_south.loop.stop()
http_south.t._delete()
@pytest.allure.feature("unit")
@pytest.allure.story("plugin", "south", "http")
def test_plugin_start_exception(unused_port, mocker):
# GIVEN
port = {
'description': 'Port to listen on',
'type': 'integer',
'default': str(unused_port()),
}
config_data = copy.deepcopy(config)
mocker.patch.dict(config_data, {'port': port})
log_exception = mocker.patch.object(http_south._LOGGER, "exception")
# WHEN
http_south.plugin_start(config_data)
# THEN
assert 1 == log_exception.call_count
log_exception.assert_called_with("'value'")
@pytest.allure.feature("unit")
@pytest.allure.story("plugin", "south", "http")
def test_plugin_reconfigure(mocker, unused_port):
# GIVEN
port = {
'description': 'Port to listen on',
'type': 'integer',
'default': str(unused_port()),
}
config_data = copy.deepcopy(config)
mocker.patch.dict(config_data, {'port': port})
config_data['port']['value'] = config_data['port']['default']
config_data['host']['value'] = config_data['host']['default']
config_data['uri']['value'] = config_data['uri']['default']
config_data['enableHttp']['value'] = config_data['enableHttp']['default']
pstop = mocker.patch.object(http_south, '_plugin_stop', return_value=True)
log_info = mocker.patch.object(http_south._LOGGER, "info")
# WHEN
new_config = http_south.plugin_reconfigure(config_data, _NEW_CONFIG)
# THEN
assert _NEW_CONFIG == new_config
assert 3 == log_info.call_count
assert 1 == pstop.call_count
@pytest.allure.feature("unit")
@pytest.allure.story("plugin", "south", "http")
def test_plugin__stop(mocker, unused_port, loop):
# GIVEN
port = {
'description': 'Port to listen on',
'type': 'integer',
'default': str(unused_port()),
}
config_data = copy.deepcopy(config)
mocker.patch.dict(config_data, {'port': port})
config_data['port']['value'] = config_data['port']['default']
config_data['host']['value'] = config_data['host']['default']
config_data['uri']['value'] = config_data['uri']['default']
config_data['enableHttp']['value'] = config_data['enableHttp']['default']
log_exception = mocker.patch.object(http_south._LOGGER, "exception")
log_info = mocker.patch.object(http_south._LOGGER, "info")
# WHEN
http_south.plugin_start(config_data)
http_south._plugin_stop(config_data)
# THEN
assert 2 == log_info.call_count
calls = [call('Stopping South HTTP plugin.')]
log_info.assert_has_calls(calls, any_order=True)
assert 0 == log_exception.call_count
@pytest.allure.feature("unit")
@pytest.allure.story("plugin", "south", "http")
def test_plugin_shutdown(mocker, unused_port):
# GIVEN
port = {
'description': 'Port to listen on',
'type': 'integer',
'default': str(unused_port()),
}
config_data = copy.deepcopy(config)
mocker.patch.dict(config_data, {'port': port})
config_data['port']['value'] = config_data['port']['default']
config_data['host']['value'] = config_data['host']['default']
config_data['uri']['value'] = config_data['uri']['default']
config_data['enableHttp']['value'] = config_data['enableHttp']['default']
log_exception = mocker.patch.object(http_south._LOGGER, "exception")
log_info = mocker.patch.object(http_south._LOGGER, "info")
# WHEN
http_south.plugin_start(config_data)
http_south.plugin_shutdown(config_data)
# THEN
assert 3 == log_info.call_count
calls = [call('Stopping South HTTP plugin.'),
call('South HTTP plugin shut down.')]
log_info.assert_has_calls(calls, any_order=True)
assert 0 == log_exception.call_count
@pytest.allure.feature("unit")
@pytest.allure.story("plugin", "south", "http")
@pytest.mark.skip(reason="server object is None in tests. To be investigated.")
def test_plugin_shutdown_error(mocker, unused_port, loop):
# GIVEN
port = {
'description': 'Port to listen on',
'type': 'integer',
'default': str(unused_port()),
}
config_data = copy.deepcopy(config)
mocker.patch.dict(config_data, {'port': port})
config_data['port']['value'] = config_data['port']['default']
config_data['host']['value'] = config_data['host']['default']
config_data['uri']['value'] = config_data['uri']['default']
config_data['enableHttp']['value'] = config_data['enableHttp']['default']
log_exception = mocker.patch.object(http_south._LOGGER, "exception")
log_info = mocker.patch.object(http_south._LOGGER, "info")
# WHEN
http_south.plugin_start(config_data)
server = config_data['server']
mocker.patch.object(server, 'wait_closed', side_effect=Exception)
with pytest.raises(Exception):
http_south.plugin_shutdown(config_data)
# THEN
assert 2 == log_info.call_count
calls = [call('Stopping South HTTP plugin.')]
log_info.assert_has_calls(calls, any_order=True)
assert 1 == log_exception.call_count
@pytest.allure.feature("unit")
@pytest.allure.story("services", "south", "ingest")
class TestHttpSouthIngest(object):
"""Unit tests foglamp.plugins.south.http_south.http_south.HttpSouthIngest
"""
@pytest.mark.asyncio
async def test_render_post_reading_ok(self, loop):
data = """[{
"timestamp": "2017-01-02T01:02:03.23232Z-05:00",
"asset": "sensor1",
"key": "80a43623-ebe5-40d6-8d80-3f892da9b3b4",
"readings": {
"velocity": "500",
"temperature": {
"value": "32",
"unit": "kelvin"
}
}
}]"""
with patch.object(async_ingest, 'ingest_callback') as ingest_add_readings:
request = mock_request(data, loop)
config_data = copy.deepcopy(config)
config_data['assetNamePrefix']['value'] = config_data['assetNamePrefix']['default']
r = await HttpSouthIngest(config_data).render_post(request)
retval = json.loads(r.body.decode())
# Assert the POST request response
assert 200 == r.status
assert 'success' == retval['result']
assert 1 == ingest_add_readings.call_count
@pytest.mark.asyncio
async def test_render_post_sensor_values_ok(self, loop):
data = """[{
"timestamp": "2017-01-02T01:02:03.23232Z-05:00",
"asset": "sensor1",
"key": "80a43623-ebe5-40d6-8d80-3f892da9b3b4",
"sensor_values": {
"velocity": "500",
"temperature": {
"value": "32",
"unit": "kelvin"
}
}
}]"""
with patch.object(async_ingest, 'ingest_callback') as ingest_add_readings:
request = mock_request(data, loop)
config_data = copy.deepcopy(config)
config_data['assetNamePrefix']['value'] = config_data['assetNamePrefix']['default']
r = await HttpSouthIngest(config_data).render_post(request)
retval = json.loads(r.body.decode())
# Assert the POST request response
assert 200 == r.status
assert 'success' == retval['result']
assert 1 == ingest_add_readings.call_count
@pytest.mark.asyncio
async def test_render_post_invalid_payload(self, loop):
data = "blah"
msg = 'Payload block must be a valid json'
with patch.object(async_ingest, 'ingest_callback') as ingest_add_readings:
with patch.object(http_south._LOGGER, 'exception') as log_exc:
with pytest.raises(aiohttp.web_exceptions.HTTPBadRequest) as ex:
request = mock_request(data, loop)
config_data = copy.deepcopy(config)
config_data['assetNamePrefix']['value'] = config_data['assetNamePrefix']['default']
r = await HttpSouthIngest(config_data).render_post(request)
assert 400 == r.status
assert str(ex).endswith(msg)
assert 1 == log_exc.call_count
log_exc.assert_called_once_with('%d: %s', 400, msg)
@pytest.mark.asyncio
async def test_render_post_reading_missing_delimiter(self, loop):
data = """{
"timestamp": "2017-01-02T01:02:03.23232Z-05:00",
"asset": "sensor1",
"key": "80a43623-ebe5-40d6-8d80-3f892da9b3b4",
"readings": {
"velocity": "500",
"temperature": {
"value": "32",
"unit": "kelvin"
}
}"""
msg = 'Payload block must be a valid json'
with patch.object(async_ingest, 'ingest_callback') as ingest_add_readings:
with patch.object(http_south._LOGGER, 'exception') as log_exc:
with pytest.raises(aiohttp.web_exceptions.HTTPBadRequest) as ex:
request = mock_request(data, loop)
config_data = copy.deepcopy(config)
config_data['assetNamePrefix']['value'] = config_data['assetNamePrefix']['default']
r = await HttpSouthIngest(config_data).render_post(request)
assert 400 == r.status
assert str(ex).endswith(msg)
assert 1 == log_exc.call_count
log_exc.assert_called_once_with('%d: %s', 400, msg)
@pytest.mark.asyncio
async def test_render_post_reading_not_dict(self, loop):
data = """[{
"timestamp": "2017-01-02T01:02:03.23232Z-05:00",
"asset": "sensor2",
"key": "80a43623-ebe5-40d6-8d80-3f892da9b3b4",
"readings": "500"
}]"""
msg = 'readings must be a dictionary'
with patch.object(async_ingest, 'ingest_callback') as ingest_add_readings:
with patch.object(http_south._LOGGER, 'exception') as log_exc:
with pytest.raises(aiohttp.web_exceptions.HTTPBadRequest) as ex:
request = mock_request(data, loop)
config_data = copy.deepcopy(config)
config_data['assetNamePrefix']['value'] = config_data['assetNamePrefix']['default']
r = await HttpSouthIngest(config_data).render_post(request)
assert 400 == r.status
assert str(ex).endswith(msg)
assert 1 == log_exc.call_count
log_exc.assert_called_once_with('%d: %s', 400, msg)
|
<filename>facade/src/main/java/study/facade/address/internal/AddressFacadeImpl.java
package study.facade.address.internal;
import study.business.application.service.AddressService;
import study.business.application.service.BusinessException;
import study.business.domain.model.address.Address;
import study.facade.address.AddressFacade;
import study.facade.address.dto.AddressDTO;
import study.facade.address.dto.EditAddressCommandDTO;
import study.facade.address.dto.NewAddressCommandDTO;
import study.facade.address.mapper.AddressMapper;
import study.facade.exception.FacadeBusinessException;
import javax.ejb.EJB;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.inject.Inject;
import java.util.List;
@Stateless
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public class AddressFacadeImpl implements AddressFacade {
@EJB
private AddressService addressService;
@Inject
private AddressMapper addressMapper;
@Override
public void save(NewAddressCommandDTO command) {
try {
AddressService.NewAddressCommand cmd = addressMapper.fromDTO(command);
addressService.save(cmd);
} catch (BusinessException exception) {
throw new FacadeBusinessException(exception);
}
}
@Override
public void edit(EditAddressCommandDTO command) {
try {
AddressService.EditAddressCommand cmd = addressMapper.fromDTO(command);
addressService.edit(cmd);
} catch (BusinessException exception) {
throw new FacadeBusinessException(exception);
}
}
@Override
public void delete(Long id) {
try {
addressService.delete(id);
} catch (BusinessException exception) {
throw new FacadeBusinessException(exception);
}
}
@Override
public List<AddressDTO> list() {
try {
List<Address> list = addressService.list();
return addressMapper.toDTOList(list);
} catch (BusinessException exception) {
throw new FacadeBusinessException(exception);
}
}
}
|
export type AppRoute = {
path: Array<string> | string;
component: React.ElementType;
key: string;
exact: boolean;
};
|
celery -A website worker -c1 -l info
celery -A website beat -l info |
<filename>packages/s3-access-metrics/configHelpers.js<gh_stars>0
'use strict';
const {
isNil,
isNotArrayOfStrings,
isNotNil,
isNotString
} = require('./utils');
const noValue = () => ({ Ref: 'AWS::NoValue' });
const validateIsArrayOfStrings = (name, value) => {
if (isNotArrayOfStrings(value)) {
throw new Error(`${name} must be an array of strings`);
}
};
const validateIsSet = (name, value) => {
if (isNil(value)) throw new Error(`${name} must be set`);
};
const validateIsString = (name, value) => {
if (isNotString(value)) throw new Error(`${name} must be a string`);
};
const validateRequiredString = (name, value) => {
validateIsSet(name, value);
validateIsString(name, value);
};
const validateOptionalString = (name, value) => {
if (isNil(value)) return;
validateIsString(name, value);
};
const validateSubnetIds = ({ subnetIds, vpcId }) => {
if (isNotNil(subnetIds)) {
if (isNil(vpcId)) {
throw new Error('Both vpcId and subnetIds must be set');
}
validateIsArrayOfStrings('subnetIds', subnetIds);
}
};
const validateVpcId = ({ subnetIds, vpcId }) => {
if (isNotNil(vpcId)) {
if (isNil(subnetIds)) {
throw new Error('Both vpcId and subnetIds must be set');
}
validateIsString('vpcId', vpcId);
}
};
const validateConfig = (config) => {
validateRequiredString('logsBucket', config.logsBucket);
validateOptionalString('logsPrefix', config.logsPrefix);
validateOptionalString('permissionsBoundary', config.permissionsBoundary);
validateRequiredString('prefix', config.prefix);
validateRequiredString('stack', config.stack);
validateSubnetIds(config);
validateVpcId(config);
};
const configFetcher = (key, defaultValue) =>
(config) => {
validateConfig(config);
return isNil(config[key]) ? defaultValue : config[key];
};
const deployToVpc = (config) => (
isNotNil(config.vpcId)
&& isNotNil(config.subnetIds)
);
const vpcConfig = (config) => {
validateConfig(config);
if (module.exports.deployToVpc(config)) {
return {
securityGroupIds: [
{ 'Fn::GetAtt': ['LambdaSecurityGroup', 'GroupId'] }
],
subnetIds: config.subnetIds
};
}
return noValue();
};
module.exports = {
deployToVpc,
vpcConfig,
logsBucket: configFetcher('logsBucket'),
logsPrefix: configFetcher('logsPrefix', ''),
permissionsBoundary: configFetcher('permissionsBoundary', noValue()),
prefix: configFetcher('prefix'),
stack: configFetcher('stack'),
subnetIds: configFetcher('subnetIds', false),
vpcId: configFetcher('vpcId', false)
};
|
<reponame>ttiurani/extendedmind
/* Copyright 2013-2016 Extended Mind Technologies Oy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
function reminderPickerDirective() {
return {
restrict: 'A',
require: 'ngModel', // get a hold of NgModelController
scope: {
getOptions: '&reminderPicker'
},
link: function(scope, element, attrs, ngModel) {
if (ngModel.$formatters && ngModel.$formatters.length) {
// Clear formatters. Otherwise Angular will throw 'numfmt' exception.
// https://docs.angularjs.org/error/ngModel/numfmt
ngModel.$formatters = [];
}
var options = scope.getOptions();
var maxValue = options.limit;
var minValue = options.bottomLimit;
function isNull(viewValue) {
if (viewValue === null) {
return;
}
return viewValue;
}
function isInteger(viewValue) {
var currentValue;
// http://stackoverflow.com/a/3886106
if (viewValue % 1 !== 0) {
currentValue = ngModel.$modelValue;
ngModel.$setViewValue(currentValue);
ngModel.$render();
return currentValue;
}
return viewValue;
}
function isTooLong(viewValue) {
var currentValue;
if (!options.padOneDigit && viewValue < 10 && viewValue.length > 1) {
currentValue = ngModel.$modelValue;
ngModel.$setViewValue(currentValue);
ngModel.$render();
return currentValue;
} else if (viewValue.length > 2) {
currentValue = ngModel.$modelValue;
if (currentValue !== undefined && currentValue !== null) {
if (viewValue.charAt(0) === '0') {
currentValue = '0' + currentValue.toString();
}
}
ngModel.$setViewValue(currentValue);
ngModel.$render();
return currentValue;
}
return viewValue;
}
function isTooBig(viewValue) {
var currentValue;
var viewValueInt = parseInt(viewValue);
if (viewValueInt > maxValue) {
currentValue = ngModel.$modelValue;
ngModel.$setViewValue(currentValue);
ngModel.$render();
return currentValue;
}
return viewValue;
}
function isTooSmall(viewValue) {
var currentValue;
var viewValueInt = parseInt(viewValue);
if (viewValueInt < minValue) {
currentValue = ngModel.$modelValue;
ngModel.$setViewValue(currentValue);
ngModel.$render();
return currentValue;
}
return viewValue;
}
function hasOneDigit(viewValue) {
if (viewValue.length === 1) {
padOneDigitInput();
}
return viewValue;
}
/*
* Pad one digit to two digits debounced.
*/
var padOneDigitInput = function() {
if (document.activeElement === element[0]) {
var currentValue = ngModel.$modelValue;
if (currentValue !== undefined && currentValue !== null && currentValue.toString().length === 1) {
var newValue = '0' + currentValue.toString();
ngModel.$setViewValue(newValue);
ngModel.$render();
return newValue;
}
}
}.debounce(3000);
function padValidInput() {
if (ngModel.$valid) {
var value = element[0].value;
if (value === '') {
element[0].value = options.padOneDigit ? '00' : 0;
} else if (value && value.length === 1 && options.padOneDigit) {
element[0].value = '0' + value.toString();
}
}
}
if (options.padOneDigit) {
ngModel.$parsers.unshift(isNull, isInteger, isTooLong, isTooBig, hasOneDigit);
} else {
ngModel.$parsers.unshift(isNull, isInteger, isTooLong, isTooBig, isTooSmall);
}
element[0].addEventListener('blur', padValidInput);
}
};
}
angular.module('common').directive('reminderPicker', reminderPickerDirective);
|
import string
import random
characters = string.ascii_letters + string.digits + string.punctuation
password = "".join(random.choice(characters) for i in range(8))
print(password) |
class CommentsController < ApplicationController
def show
end
def new
@comment = Comment.new
end
def create
@post = Post.find(params[:post_id])
@comment = Comment.new(comment_params)
@comment.user_id = session[:id]
@comment.post_id = @post.id
if @comment.save
redirect_to posts_path
else
render 'new'
end
end
private
def comment_params
params.require(:comment).permit(:body)
end
end
|
class CreateMediaGalleryGalleries < ActiveRecord::Migration[5.1]
def change
create_table :media_gallery_galleries do |t|
t.string :name, null: false
t.string :description, limit: 1024
t.references :ownable, polymorphic: true, index: true
t.timestamps
end
end
end
|
package com.yuan.myproject.webui.dao;
import com.yuan.myproject.entity.ContentComment;
import com.yuan.myproject.persistence.BaseDao;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface ContentCommentDao extends BaseDao<ContentComment> {
Long getCount(@Param("contentId")Long contentId);
ContentComment deleteUserIdAndId(ContentComment contentComment);
}
|
<gh_stars>1-10
"use strict"
// Get some tools.
const Verb = require("./index")
const Joi = require("@hapi/joi")
class PATCH extends Verb {
/**
* Set some properties on this verb instance.
* @param {Waterline.Collection} model to generate route on.
* @param {Joi} headers Joi object to validate with.
* @param {String} auth_strategy_name to authorise with. [Optional]
* @return {Verb} Verb for chaining.
*/
constructor(server, model) {
// Construct.
super("PATCH", model, server)
// Strings for route desription in Swagger.
this._path = `/${this._name}/{id}`
if (model.metadata && model.metadata.patch) {
if (model.metadata.patch.description)
this._description = model.metadata.patch.description
else
this._description = `Update a "${this._name}".`
if (model.metadata.patch.notes)
this._notes = model.metadata.patch.notes
else
this._notes = `Update a "${this._name}".`
}
else {
this._description = `Update a "${this._name}".`
this._notes = `Update a "${this._name}".`
}
this._class_name = `Update ${this._name}s`
// Properties pertaining to the route.
this._model = model
this._tags = [model.model_name, "write"]
this._params = Joi.object({
id: Joi.alternatives().try(Joi.string(), Joi.number())
.required()
.description(`ID of particular ${this._name} to update.`)
})
// Return the Verbiage.
return this
}
get_response_codes(model, headers) {
return {
200: {
description: "OK",
schema: model,
headers
},
202: {
description: "Accepted/Updated",
schema: model,
headers
},
400: {
description: "Bad Request",
schema: model,
headers
},
404: {
description: "Not Found",
schema: model,
headers
},
500: {
description: "Server Error",
schema: model,
headers
}
}
}
}
// Export the class.
module.exports = PATCH
|
<filename>src/extractor/extractColors.ts<gh_stars>0
import extractorInterface from '@typings/extractorInterface'
import { colorPropertyInterface, fillValuesType, gradientValuesType } from '@typings/propertyObject'
import { PaintStyleObject } from '@typings/styles'
import { GradientType, PropertyType } from '@typings/valueTypes'
import { tokenTypes } from '@config/tokenTypes'
import { convertPaintToRgba, roundRgba } from '../utilities/convertColor'
import roundWithDecimals from '../utilities/roundWithDecimals'
import { tokenCategoryType } from '@typings/tokenCategory'
import { tokenExportKeyType } from '@typings/tokenExportKey'
import config from '@config/config'
const parseDescription = (description: string = '', aliasArray: string[]) => {
aliasArray = !aliasArray || aliasArray.filter(i => i).length === 0 ? ['Ref:'] : aliasArray
const regex = new RegExp('(' + aliasArray.join('|').toLowerCase() + ')' + ':?\\s')
// split description in lines
let alias: string
const descriptionLines = description.split(/\r?\n/)
// find match
.filter(line => {
if (line.toLowerCase().match(regex)) {
alias = line.toLowerCase().replace(regex, '').trim()
return false
}
return true
})
// return object
return {
alias: alias,
description: descriptionLines.join('\n')
}
}
const addAlias = (alias: string) => alias ? ({ [config.key.extensionAlias]: alias }) : ({})
const gradientType = {
GRADIENT_LINEAR: 'linear',
GRADIENT_RADIAL: 'radial',
GRADIENT_ANGULAR: 'angular',
GRADIENT_DIAMOND: 'diamond'
}
const isGradient = (paint): boolean => ['GRADIENT_LINEAR', 'GRADIENT_RADIAL', 'GRADIENT_ANGULAR', 'GRADIENT_DIAMOND'].includes(paint.type)
const rotationFromMatrix = ([[x1, y1], [x2, y2]]) => {
// https://stackoverflow.com/questions/24909586/find-rotation-angle-for-affine-transform
const angle = Math.atan2(y2 - y1, x2 - x1) * (180.0 / Math.PI) + 315
return angle > 360 ? angle - 360 : angle
}
const extractFills = (paint): fillValuesType | gradientValuesType => {
if (paint.type === 'SOLID') {
return {
fill: {
value: convertPaintToRgba(paint),
type: 'color' as PropertyType
}
}
}
if (['GRADIENT_LINEAR', 'GRADIENT_RADIAL', 'GRADIENT_ANGULAR', 'GRADIENT_DIAMOND'].includes(paint.type)) {
return {
gradientType: {
value: gradientType[paint.type] as GradientType,
type: 'string' as PropertyType
},
rotation: {
// https://stackoverflow.com/questions/24909586/find-rotation-angle-for-affine-transform
value: rotationFromMatrix(paint.gradientTransform),
type: 'number' as PropertyType,
unit: 'degree'
},
stops: paint.gradientStops.map(stop => ({
position: {
value: roundWithDecimals(stop.position),
type: 'number' as PropertyType
},
color: {
value: roundRgba(stop.color),
type: 'color' as PropertyType
}
})),
opacity: {
value: roundWithDecimals(paint.opacity),
type: 'number' as PropertyType
}
}
}
// return null if no matching type
/* istanbul ignore next */
return null
}
const extractColors: extractorInterface = (tokenNodes: PaintStyleObject[], prefixArray: {color: string[], gradient: string[], alias: string[]}): colorPropertyInterface[] => {
// get all paint styles
return tokenNodes
// remove images fills from tokens
.map(node => {
node.paints = node.paints.filter(paint => paint.type !== 'IMAGE')
return node
})
// remove tokens with no fill
.filter(node => node.paints.length > 0)
// transform style
.map(node => {
const { alias, description } = parseDescription(node.description, prefixArray.alias)
return {
name: `${isGradient(node.paints[0]) ? prefixArray.gradient[0] : prefixArray.color[0]}/${node.name}`,
category: isGradient(node.paints[0]) ? 'gradient' : 'color' as tokenCategoryType,
exportKey: (isGradient(node.paints[0]) ? tokenTypes.gradient.key : tokenTypes.color.key) as tokenExportKeyType,
description: description,
values: node.paints.map(paint => extractFills(paint)),
extensions: {
[config.key.extensionPluginData]: {
[config.key.extensionFigmaStyleId]: node.id,
exportKey: (isGradient(node.paints[0]) ? tokenTypes.gradient.key : tokenTypes.color.key) as tokenExportKeyType,
...(addAlias(alias))
}
}
}
})
}
export default extractColors
|
(function(){
'use strict';
angular.module('ubongoApp').controller('DashboardController',
['ubongoNotificationsService', function(ubongoNotificationsService){
ubongoNotificationsService.getPushNotifications();
}]);
})(); |
#!bin/bash
cd vqa/bottom_up_attention_pytorch/detectron2
pip install -e . -I
cd ..
git clone https://github.com/NVIDIA/apex.git
cd apex
python setup.py install
cd ..
python setup.py build develop
wget https://awma1-my.sharepoint.com/:u:/g/personal/yuz_l0_tn/EaXvCC3WjtlLvvEfLr3oa8UBLA21tcLh4L8YLbYXl6jgjg?download=1 -O bua-caffe-frcn-r101_with_attributes.pth
cd ..
bash download.sh |
<filename>database/API/topping.js
const db = require('../mainDB')
const { Topping } = require('../custom_pizzaDB')
const topping = {
getAll: ( request, response, next ) => {
Topping.getAll()
.then( data => {
response.status(200)
.json({
status: 'success',
data: data,
message: 'Retrieved ALL toppings'
})
})
.catch( (err) => {
return next(err)
})
},
add: ( request, response, next ) => {
const { name, price } = request.body
Topping.add( name, price )
.then( () => {
response.status(200)
.json({
status: 'success',
message: 'Added topping'
})
})
.catch( err => {
return next(err)
})
},
getOne: ( request, response, next ) => {
const { id } = request.params
Topping.getById( id )
.then( data => {
response.status(200)
.json({
status: 'success',
data: data,
message: 'Retrieved single topping'
})
})
},
update: ( request, response, next ) => {
const { id } = request.params
const { name, price } = request.body
Topping.api_update( id, name, price )
.then( () => {
response.status(200)
.json({
status: 'succes',
message: 'Updated Topping entry.'
})
})
.catch( error => next( error ))
},
delete: ( request, response, next ) => {
const { id } = request.params
Topping.delete( id )
.then( () => {
response.status(200)
.json({
status: 'success',
message: 'Deleted topping.'
})
})
.catch( error => next( error ))
}
}
module.exports = topping
|
<gh_stars>10-100
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.testgrid.reporting.renderer;
import com.github.mustachejava.DefaultMustacheFactory;
import com.github.mustachejava.Mustache;
import org.wso2.testgrid.reporting.ReportingException;
import java.io.IOException;
import java.io.StringWriter;
import java.util.Map;
/**
* This class is responsible for handling rendering of mustache templates.
*
* @since 1.0.0
*/
public class MustacheTemplateRenderer implements Renderable {
private static final String TEMPLATE_DIR = "templates";
@Override
public String render(String view, Map<String, Object> model) throws ReportingException {
Mustache mustache = new DefaultMustacheFactory(TEMPLATE_DIR).compile(view);
StringWriter stringWriter = new StringWriter();
try {
mustache.execute(stringWriter, model).close();
} catch (IOException e) {
throw new ReportingException(e);
}
return stringWriter.toString();
}
}
|
var mongoose = require("mongoose");
// Create the userSettings schema
var userSettingsSchema = new mongoose.Schema({
username: {
type: String,
required: true,
unique: true
},
settings: {
type: Map,
of: String
}
});
// Create the userSettings model
var UserSettings = mongoose.model("UserSettings", userSettingsSchema);
// Export the userSettings model
module.exports = UserSettings; |
package com.javatechie.spring.orm.api.model;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
@Entity
public class division {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private int division_id;
private String division_name;
public int getDivision_id() {
return division_id;
}
public void setDivision_id(int division_id) {
this.division_id = division_id;
}
public String getDivision_name() {
return division_name;
}
public void setDivision_name(String division_name) {
this.division_name = division_name;
}
}
|
#!/bin/bash
cd /home/nlpserver/zzilong/kaldi/egs/supermarket-product
. ./path.sh
( echo '#' Running on `hostname`
echo '#' Started at `date`
echo -n '# '; cat <<EOF
nnet-subset-egs --n=2000 --randomize-order=true --srand=55 ark:exp/nnet4a/egs/train_diagnostic.egs ark:- | nnet-combine-fast --use-gpu=no --num-threads=8 --verbose=3 --minibatch-size=250 exp/nnet4a/56.mdl ark:- exp/nnet4a/56.mdl
EOF
) >exp/nnet4a/log/shrink.55.log
time1=`date +"%s"`
( nnet-subset-egs --n=2000 --randomize-order=true --srand=55 ark:exp/nnet4a/egs/train_diagnostic.egs ark:- | nnet-combine-fast --use-gpu=no --num-threads=8 --verbose=3 --minibatch-size=250 exp/nnet4a/56.mdl ark:- exp/nnet4a/56.mdl ) 2>>exp/nnet4a/log/shrink.55.log >>exp/nnet4a/log/shrink.55.log
ret=$?
time2=`date +"%s"`
echo '#' Accounting: time=$(($time2-$time1)) threads=1 >>exp/nnet4a/log/shrink.55.log
echo '#' Finished at `date` with status $ret >>exp/nnet4a/log/shrink.55.log
[ $ret -eq 137 ] && exit 100;
touch exp/nnet4a/q/done.14845
exit $[$ret ? 1 : 0]
## submitted with:
# qsub -v PATH -cwd -S /bin/bash -j y -l arch=*64* -o exp/nnet4a/q/shrink.55.log -l mem_free=10G,ram_free=2G,arch=*64 -l mem_free=1G,ram_free=1G -pe smp 4 /home/nlpserver/zzilong/kaldi/egs/supermarket-product/exp/nnet4a/q/shrink.55.sh >>exp/nnet4a/q/shrink.55.log 2>&1
|
#!/usr/bin/env bash
set -e
echo "Running terraform plan..."
TF_ARGS=$1
if [ "${TERRAFORM_PLAN_ALTERNATE_COMMAND}" == "true" ]; then
printf "${WARN}Running Alternate Terraform command.${NC}"
TERRAFORM_COMMAND=$(shyaml get-value terraform_options.terraform_plan.command < "$TERRAFORM_BITOPS_CONFIG" || true)
bash $SCRIPTS_DIR/util/run-text-as-script.sh "$TERRAFORM_ROOT" "$TERRAFORM_COMMAND"
else
terraform plan $TF_ARGS
fi
|
echo "Stopping Ferret..."
./stop_ferret.sh
echo "Stopping Molten Mongrels..."
monit stop all
echo "Sleeping 60 secs..."
sleep 60
echo "Starting Ferret..."
./start_ferret.sh
echo "Starting Molten Mongrels..."
monit start all
|
<reponame>baiqizhang/galileomidi2<filename>app/node_modules/midi-node/sequence.js
'use strict';
var Message = require('./message');
var Track = require('./track');
var vlv = require('./vlv');
var constants = {
START_OF_FILE: 0x4d546864 // MThd
};
var fileTypes = {
TYPE_0: 0x0, // single track
TYPE_1: 0x1 // multi track
};
function Sequence(header) {
this.tracks = [];
this.header = header;
}
Sequence.prototype.addTrack = function (track) {
if (this.tracks.length >= this.header.noTracks) {
console.warn('Tracks exceed specified number of tracks in header field.');
}
this.tracks.push(track);
};
Sequence.prototype.getTracks = function () {
return this.tracks;
};
Sequence.prototype.getFileType = function () {
return this.header.fileType;
};
Sequence.prototype.getTicks = function () {
return this.header.ticks;
};
/**
*
* @param buffer
* @returns {Sequence}
*/
Sequence.fromBuffer = function (buffer) {
var offset = 0;
if (buffer.readUInt32BE(offset, false) !== constants.START_OF_FILE) {
throw new Error("Expected start of file marker 'MThd'.");
}
offset += 4;
if (buffer.readUInt32BE(offset) !== 0x6) {
throw new Error('Invalid header size (expected 6 bytes).');
}
offset += 4;
var fileType = buffer.readUInt16BE(offset);
offset += 2;
var noTracks = buffer.readUInt16BE(offset);
offset += 2;
if (fileType === fileTypes.TYPE_0 && noTracks !== 1) {
throw new Error('Number of tracks mismatch file type (expected 1 track).');
}
var ticks = buffer.readUInt16BE(offset);
offset += 2;
var sequence = new Sequence({
fileType: fileType,
ticks: ticks,
noTracks: noTracks
});
for (var i = 0; i < noTracks; i++) {
var track = Track.fromBuffer(buffer.slice(offset));
sequence.addTrack(track);
offset += 8;
var runningStatus = null;
while (buffer.length > 0) {
var delta = vlv.fromBuffer(buffer.slice(offset));
if (delta > 0x7F) {
offset += 2;
} else {
offset += 1;
}
var message = Message.fromBuffer(buffer.slice(offset), runningStatus);
if (!message) {
throw new Error("Unexpected end of buffer.");
}
track.addEvent(delta, message);
offset += message.length;
runningStatus = message.statusByte;
if (message.isEndOfTrack()) {
break;
}
}
}
return sequence;
};
/**
* Returns a promise of a midi sequence read directly from a stream.
* @param stream
* @see https://nodejs.org/api/stream.html
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
*/
Sequence.fromStream = function (stream) {
return new Promise(function (resolve, reject) {
stream.on('data', function (chunk) {
try {
resolve(Sequence.fromBuffer(chunk));
} catch (error) {
reject(error);
}
});
});
};
/**
* Returns a promise of a midi sequence read directly from a file.
* @param filename
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
*/
Sequence.fromFile = function (filename) {
return Sequence.fromStream(require('fs').createReadStream(filename, 'binary'));
};
module.exports = Sequence;
|
<gh_stars>10-100
"use strict";
const csrf = require('lusca').csrf();
module.exports.csrf = function(opts) {
return function(req, res, next) {
if (opts.whitelist.indexOf(req.path) > -1) {
return next();
}
csrf(req, res, next);
};
};
|
<gh_stars>100-1000
# -*- coding: utf-8 -*-
"""
ui driver
"""
import flybirds.core.global_resource as gr
from flybirds.core.plugin.plugins.default.ui_driver.poco.poco_manage import \
poco_init
from flybirds.core.plugin.plugins.default.ui_driver.poco.poco_screen import \
air_bdd_screen_size
__open__ = ["UIDriver"]
class UIDriver:
"""
ui driver
"""
name = "ios_ui_driver"
@staticmethod
def init_driver():
return poco_init()
@staticmethod
def air_bdd_screen_size(dr_instance):
return air_bdd_screen_size(dr_instance)
@staticmethod
def close_driver():
screen_record = gr.get_value("screenRecord")
if screen_record is not None and hasattr(screen_record, "destroy"):
screen_record.destroy()
|
def process_string(input_string: str) -> str:
assert input_string and isinstance(input_string, str), "Input must be a non-empty string"
if input_string.islower():
return input_string.upper()
elif input_string.isupper():
return input_string.lower()
else:
result = ""
for i, char in enumerate(input_string):
if i % 2 == 0:
result += char.lower()
else:
result += char.upper()
return result |
cd /usr/bin/v2ray
wget -O v2ray.zip https://github.com/v2ray/v2ray-core/releases/download/v$VER/v2ray-linux-64.zip
unzip v2ray.zip
mv /usr/bin/v2ray/v2ray-v$VER-linux-64/v2ray /usr/bin/v2ray/
mv /usr/bin/v2ray/v2ray-v$VER-linux-64/v2ctl /usr/bin/v2ray/
mv /usr/bin/v2ray/v2ray-v$VER-linux-64/geoip.dat /usr/bin/v2ray/
mv /usr/bin/v2ray/v2ray-v$VER-linux-64/geosite.dat /usr/bin/v2ray/
chmod +x /usr/bin/v2ray/v2ray
chmod +x /usr/bin/v2ray/v2ctl
chgrp -R 0 /usr/bin/v2ray/
chmod -R g+rwX /usr/bin/v2ray/
rm -rf v2ray.zip
rm -rf v2ray-v$VER-linux-64
echo -e "$CONFIG_JSON" > config.json
if [ "$CERT_PEM" != "$KEY_PEM" ]; then
echo -e "$CERT_PEM" > cert.pem
echo -e "$KEY_PEM" > key.pem
fi
./v2ray
|
import { h } from "preact";
import * as css from "./style.css";
const NoDataMessage = () => <div></div>;
export default NoDataMessage;
|
#!/usr/bin/env bash
trap 'rm -rf "${WORKDIR}"' EXIT
[[ -z "${WORKDIR}" || "${WORKDIR}" != "/tmp/"* || ! -d "${WORKDIR}" ]] && WORKDIR="$(mktemp -d)"
[[ -z "${CURRENT_DIR}" || ! -d "${CURRENT_DIR}" ]] && CURRENT_DIR=$(pwd)
# Load custom functions
if type 'colorEcho' 2>/dev/null | grep -q 'function'; then
:
else
if [[ -s "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/custom_functions.sh" ]]; then
source "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/custom_functions.sh"
else
echo "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/custom_functions.sh does not exist!"
exit 0
fi
fi
[[ -z "${CURL_CHECK_OPTS[*]}" ]] && Get_Installer_CURL_Options
# RustDesk: Open source virtual/remote desktop infrastructure for everyone!
# https://github.com/rustdesk/rustdesk
APP_INSTALL_NAME="rustdesk"
GITHUB_REPO_NAME="rustdesk/rustdesk"
EXEC_INSTALL_NAME="rustdesk"
IS_INSTALL="yes"
IS_UPDATE="no"
CURRENT_VERSION="0.0.0"
REMOTE_VERSION=""
## self-host relay server
## https://rustdesk.com/docs/en/self-host/install/
# sudo docker image pull rustdesk/rustdesk-server
# sudo docker run --name hbbs \
# --net=host \
# -p 21115:21115 -p 21116:21116 -p 21116:21116/udp -p 21118:21118 \
# -v `pwd`:/root \
# rustdesk/rustdesk-server hbbs -r <relay-server-ip[:port]>
# sudo docker run --name hbbr \
# --net=host \
# -p 21117:21117 -p 21119:21119 \
# -v `pwd`:/root rustdesk/rustdesk-server hbbr
if [[ -x "$(command -v pacman)" ]]; then
# Pre-requisite packages
PackagesList=(
clang
gcc
cmake
make
pkg-config
nasm
yasm
alsa-lib
xdotool
libxcb
libxfixes
pulseaudio
gtk3
vcpkg
)
for TargetPackage in "${PackagesList[@]}"; do
if checkPackageNeedInstall "${TargetPackage}"; then
colorEcho "${BLUE} Installing ${FUCHSIA}${TargetPackage}${BLUE}..."
sudo pacman --noconfirm -S "${TargetPackage}"
fi
done
fi
# rust
if [[ ! -x "$(command -v rustc)" ]]; then
[[ -s "${MY_SHELL_SCRIPTS}/installer/cargo_rust_installer.sh" ]] && \
source "${MY_SHELL_SCRIPTS}/installer/cargo_rust_installer.sh"
fi
# vcpkg
if [[ -z "${VCPKG_ROOT}" && -x "$(command -v vcpkg)" ]]; then
[[ -z "${VCPKG_ROOT}" ]] && VCPKG_ROOT=$(dirname "$(readlink -f "$(command -v vcpkg)")")
[[ -z "${VCPKG_DOWNLOADS}" && -d "/var/cache/vcpkg" ]] && VCPKG_DOWNLOADS="/var/cache/vcpkg"
export VCPKG_ROOT
export VCPKG_DOWNLOADS
elif [[ -z "${VCPKG_ROOT}" && -d "$HOME/vcpkg" ]]; then
export VCPKG_ROOT="$HOME/vcpkg"
fi
if [[ -z "${VCPKG_ROOT}" ]]; then
Git_Clone_Update_Branch "microsoft/vcpkg" "$HOME/vcpkg"
if [[ -d "$HOME/vcpkg" ]]; then
"$HOME/vcpkg/bootstrap-vcpkg.sh"
export VCPKG_ROOT="$HOME/vcpkg"
mkdir -p "$HOME/.cache/vcpkg/"
export VCPKG_DOWNLOADS="$HOME/.cache/vcpkg/"
fi
fi
if [[ "${VCPKG_ROOT}" == *"$HOME"* ]]; then
if ! "${VCPKG_ROOT}/vcpkg" list 2>/dev/null | grep -q 'libvpx\|libyuv\|opus'; then
colorEcho "${BLUE} Installing ${FUCHSIA}libvpx libyuv opus${BLUE}..."
"${VCPKG_ROOT}/vcpkg" install libvpx libyuv opus
fi
else
if ! vcpkg list 2>/dev/null | grep -q 'libvpx\|libyuv\|opus'; then
colorEcho "${BLUE} Installing ${FUCHSIA}libvpx libyuv opus${BLUE}..."
sudo vcpkg install libvpx libyuv opus
fi
fi
# Insall or Build
OS_RELEASE_ID="$(grep -E '^ID=([a-zA-Z]*)' /etc/os-release 2>/dev/null | cut -d '=' -f2)"
if [[ "${OS_RELEASE_ID}" == "manjaro" ]]; then
if [[ -x "$(command -v ${EXEC_INSTALL_NAME})" ]]; then
IS_UPDATE="yes"
CURRENT_VERSION=$(${EXEC_INSTALL_NAME} --version 2>&1 | grep -Eo '([0-9]{1,}\.)+[0-9]{1,}' | head -n1)
else
[[ "${IS_UPDATE_ONLY}" == "yes" ]] && IS_INSTALL="no"
fi
if [[ "${IS_INSTALL}" == "yes" ]]; then
colorEcho "${BLUE}Checking latest version for ${FUCHSIA}${APP_INSTALL_NAME}${BLUE}..."
# CHECK_URL="https://api.github.com/repos/${GITHUB_REPO_NAME}/releases"
# REMOTE_VERSION=$(curl "${CURL_CHECK_OPTS[@]}" "${CHECK_URL}" | jq -r 'map(select(.prerelease)) | first | .tag_name' | cut -d'v' -f2)
if [[ -z "${REMOTE_VERSION}" ]]; then
CHECK_URL="https://api.github.com/repos/${GITHUB_REPO_NAME}/releases/latest"
REMOTE_VERSION=$(curl "${CURL_CHECK_OPTS[@]}" "${CHECK_URL}" | jq -r '.tag_name//empty' 2>/dev/null | cut -d'v' -f2)
fi
if version_le "${REMOTE_VERSION}" "${CURRENT_VERSION}"; then
IS_INSTALL="no"
fi
fi
if [[ "${IS_INSTALL}" == "yes" ]]; then
colorEcho "${BLUE} Installing ${FUCHSIA}${APP_INSTALL_NAME} ${YELLOW}${REMOTE_VERSION}${BLUE}..."
REMOTE_FILENAME="rustdesk-${REMOTE_VERSION}-manjaro-arch.pkg.tar.zst"
DOWNLOAD_FILENAME="${WORKDIR}/${REMOTE_FILENAME}"
DOWNLOAD_URL="${GITHUB_DOWNLOAD_URL:-https://github.com}/${GITHUB_REPO_NAME}/releases/download/${REMOTE_VERSION}/${REMOTE_FILENAME}"
colorEcho "${BLUE} From ${ORANGE}${DOWNLOAD_URL}"
curl "${CURL_DOWNLOAD_OPTS[@]}" -o "${DOWNLOAD_FILENAME}" "${DOWNLOAD_URL}"
curl_download_status=$?
if [[ ${curl_download_status} -gt 0 && -n "${GITHUB_DOWNLOAD_URL}" ]]; then
DOWNLOAD_URL="${DOWNLOAD_URL//${GITHUB_DOWNLOAD_URL}/https://github.com}"
colorEcho "${BLUE} From ${ORANGE}${DOWNLOAD_URL}"
curl "${CURL_DOWNLOAD_OPTS[@]}" -o "${DOWNLOAD_FILENAME}" "${DOWNLOAD_URL}"
curl_download_status=$?
fi
if [[ ${curl_download_status} -eq 0 ]]; then
sudo pacman --noconfirm -U "${DOWNLOAD_FILENAME}"
fi
# ignoring package upgrade with pacman or yay
PacmanConf=$(yay -Pg 2>/dev/null | jq -r '.pacmanconf//empty')
[[ -z "${PacmanConf}" ]] && PacmanConf="/etc/pacman.conf"
if ! grep -q '^IgnorePkg.*' "${PacmanConf}"; then
sudo sed -i '0,/^#\s*IgnorePkg/{s/^#\s*IgnorePkg.*/IgnorePkg=/}' "${PacmanConf}"
fi
if grep -q '^IgnorePkg.*' "${PacmanConf}"; then
IgnorePkg=$(grep '^IgnorePkg' "${PacmanConf}" | cut -d"=" -f2)
if [[ -z "${IgnorePkg}" ]]; then
sudo sed -i "s/^IgnorePkg.*/IgnorePkg=rustdesk/" "${PacmanConf}"
elif [[ "${IgnorePkg}" != *"rustdesk"* ]]; then
sudo sed -i "s/^IgnorePkg.*/IgnorePkg=${IgnorePkg},rustdesk/" "${PacmanConf}"
fi
fi
fi
else
Git_Clone_Update_Branch "${GITHUB_REPO_NAME}" "$HOME/rustdesk"
if [[ -d "$HOME/rustdesk" ]]; then
cd "$HOME/rustdesk" && \
mkdir -p target/debug && \
wget "https://raw.githubusercontent.com/c-smile/sciter-sdk/master/bin.lnx/x64/libsciter-gtk.so" && \
mv libsciter-gtk.so target/debug && \
sudo cp target/debug/libsciter-gtk.so /usr/lib && \
cargo install --path .
# cargo run
fi
fi
|
#pragma once
#include <typed-geometry/types/scalars/default.hh>
#include "../array.hh"
#include "../pos.hh"
#include "../vec.hh"
#include "traits.hh"
namespace tg
{
template <int D, class ScalarT>
struct quad;
// Common quad types
using quad2 = quad<2, f32>;
using quad3 = quad<3, f32>;
using quad4 = quad<4, f32>;
using fquad2 = quad<2, f32>;
using fquad3 = quad<3, f32>;
using fquad4 = quad<4, f32>;
using dquad2 = quad<2, f64>;
using dquad3 = quad<3, f64>;
using dquad4 = quad<4, f64>;
using iquad2 = quad<2, i32>;
using iquad3 = quad<3, i32>;
using iquad4 = quad<4, i32>;
using uquad2 = quad<2, u32>;
using uquad3 = quad<3, u32>;
using uquad4 = quad<4, u32>;
// ======== IMPLEMENTATION ========
template <int D, class ScalarT>
struct quad
{
using scalar_t = ScalarT;
using vec_t = vec<D, ScalarT>;
using pos_t = pos<D, ScalarT>;
pos_t pos00;
pos_t pos10;
pos_t pos11;
pos_t pos01;
constexpr quad() = default;
constexpr quad(pos_t p00, pos_t p10, pos_t p11, pos_t p01) : pos00(p00), pos10(p10), pos11(p11), pos01(p01) {}
template <class OtherT>
explicit constexpr quad(quad<D, OtherT> const& v) : pos00(v.pos00), pos10(v.pos10), pos11(v.pos11), pos01(v.pos01)
{
}
template <class Range, class = std::enable_if_t<tg::is_range<Range, pos_t>>>
explicit constexpr quad(Range&& r)
{
auto it = tg::begin(r);
auto end = tg::end(r);
TG_CONTRACT(it != end);
pos00 = pos_t(*it++);
TG_CONTRACT(it != end);
pos10 = pos_t(*it++);
TG_CONTRACT(it != end);
pos11 = pos_t(*it++);
TG_CONTRACT(it != end);
pos01 = pos_t(*it++);
TG_CONTRACT(!(it != end));
}
[[nodiscard]] constexpr pos_t operator[](comp<2, ScalarT> const& barycoords) const;
[[nodiscard]] bool operator==(quad const& rhs) const
{
return pos00 == rhs.pos00 && pos10 == rhs.pos10 && pos11 == rhs.pos11 && pos01 == rhs.pos01;
}
[[nodiscard]] bool operator!=(quad const& rhs) const { return !operator==(rhs); }
};
template <class I, int D, class ScalarT>
constexpr void introspect(I&& i, quad<D, ScalarT>& v)
{
i(v.pos00, "pos00");
i(v.pos10, "pos10");
i(v.pos11, "pos11");
i(v.pos01, "pos01");
}
template <int D, class ScalarT>
struct object_traits<quad<D, ScalarT>> : detail::finite_object_traits<2, ScalarT, D, default_object_tag>
{
};
} // namespace tg
|
package com.kqp.tcrafting.screen;
import com.kqp.tcrafting.init.TCrafting;
import com.kqp.tcrafting.network.init.TCraftingNetwork;
import com.kqp.tcrafting.screen.inventory.TCraftingRecipeLookUpInventory;
import com.kqp.tcrafting.screen.inventory.TCraftingResultInventory;
import com.kqp.tcrafting.screen.slot.TRecipeSlot;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.entity.player.PlayerInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.screen.ScreenHandler;
import net.minecraft.screen.ScreenHandlerContext;
import net.minecraft.screen.slot.Slot;
import net.minecraft.server.network.ServerPlayerEntity;
/**
* Screen handler for the crafting GUI.
*/
public class TCraftingScreenHandler extends ScreenHandler {
public final TCraftingResultInventory resultInventory;
public final TCraftingRecipeLookUpInventory lookUpInventory;
private final ScreenHandlerContext context;
private final PlayerEntity player;
public final CraftingSession craftingSession;
/**
* Client-side constructor.
*
* @param syncId Sync ID
*/
public TCraftingScreenHandler(int syncId, PlayerInventory playerInventory) {
this(syncId, playerInventory, ScreenHandlerContext.EMPTY);
}
/**
* Server-side constructor.
*
* @param syncId Sync ID
* @param playerInventory Player's screen
* @param context Context for executing server-side tasks
*/
public TCraftingScreenHandler(int syncId, PlayerInventory playerInventory, ScreenHandlerContext context) {
super(TCrafting.TCRAFTING_SCREEN_HANDLER, syncId);
// Init fields
this.context = context;
this.player = playerInventory.player;
// Init inventories
resultInventory = new TCraftingResultInventory();
lookUpInventory = new TCraftingRecipeLookUpInventory();
// Crafting results screen (24 output)
int counter = 0;
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 8; j++) {
this.addSlot(new TRecipeSlot(this, resultInventory, playerInventory, counter++, 8 + j * 18, 18 + i * 18, View.CRAFTING) {
@Override
public void onStackChanged(ItemStack originalItem, ItemStack itemStack) {
super.onStackChanged(originalItem, itemStack);
updateCraftingResults();
}
});
}
}
// Player Inventory (27 storage + 9 hotbar)
{
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 9; j++) {
this.addSlot(new Slot(playerInventory, i * 9 + j + 9, 8 + j * 18, 84 + i * 18) {
@Override
public void markDirty() {
super.markDirty();
updateCraftingResults();
}
@Override
public void onStackChanged(ItemStack originalItem, ItemStack itemStack) {
super.onStackChanged(originalItem, itemStack);
updateCraftingResults();
}
});
}
}
for (int j = 0; j < 9; j++) {
this.addSlot(new Slot(playerInventory, j, 8 + j * 18, 142) {
@Override
public void markDirty() {
super.markDirty();
updateCraftingResults();
}
@Override
public void onStackChanged(ItemStack originalItem, ItemStack itemStack) {
super.onStackChanged(originalItem, itemStack);
updateCraftingResults();
}
});
}
}
// Look-up screen (1 query slot + 18 result slots)
{
counter = 0;
this.addSlot(new Slot(lookUpInventory, counter++, 209, 22) {
@Override
public void markDirty() {
super.markDirty();
if (player.world.isClient) {
updateRecipeLookUpResults();
}
}
});
for (int i = 0; i < 6; i++) {
for (int j = 0; j < 3; j++) {
this.addSlot(new TRecipeSlot(this, lookUpInventory, playerInventory, counter++, 186 + j * 18, 48 + i * 18, View.LOOK_UP));
}
}
}
this.craftingSession = new CraftingSession(this, player);
craftingSession.refreshCraftingResults(false);
}
public void updateCraftingResults() {
craftingSession.refreshCraftingResults(true);
// If on server, notify the client that something has changed so the client can reply with the scroll bar position.
if (!player.world.isClient) {
TCraftingNetwork.REQUEST_SCROLL_POSITION_S2C.sendToPlayer((ServerPlayerEntity) player, View.CRAFTING);
}
}
public void updateRecipeLookUpResults() {
craftingSession.refreshLookUpResults();
}
@Override
public void close(PlayerEntity player) {
super.close(player);
// Drop look up slot
player.inventory.offerOrDrop(player.world, lookUpInventory.removeStack(0));
}
/**
* Handles the shift clicking.
*
* @param player The player
* @param invSlot The slot that is being shift clicked (I think)
* @return The ItemStack (I don't know)
*/
@Override
public ItemStack transferSlot(PlayerEntity player, int invSlot) {
ItemStack itemStack = ItemStack.EMPTY;
Slot slot = this.slots.get(invSlot);
if (slot != null && slot.hasStack()) {
ItemStack itemStack2 = slot.getStack();
itemStack = itemStack2.copy();
if (invSlot < 24) {
// Shift click inside result slots
if (!this.insertItem(itemStack2, 51, 60, false)) {
if (!this.insertItem(itemStack2, 24, 51, false)) {
return ItemStack.EMPTY;
}
}
slot.onStackChanged(itemStack2, itemStack);
} else if (invSlot >= 24 && invSlot < 51) {
// Shift click inside main screen
if (!this.insertItem(itemStack2, 60, 61, false)) {
if (!this.insertItem(itemStack2, 51, 60, false)) {
return ItemStack.EMPTY;
}
}
} else if (invSlot >= 51 && invSlot < 60) {
// Shift click inside hot-bar slots
if (!this.insertItem(itemStack2, 60, 61, false)) {
if (!this.insertItem(itemStack2, 24, 51, false)) {
return ItemStack.EMPTY;
}
}
} else if (invSlot == 60) {
// Shift click inside recipe look-up slot
if (!this.insertItem(itemStack2, 51, 60, false)) {
if (!this.insertItem(itemStack2, 24, 51, false)) {
slot.markDirty();
return ItemStack.EMPTY;
}
}
}
if (itemStack2.isEmpty()) {
slot.setStack(ItemStack.EMPTY);
} else {
slot.markDirty();
}
if (itemStack2.getCount() == itemStack.getCount()) {
return ItemStack.EMPTY;
}
ItemStack itemStack3 = slot.onTakeItem(player, itemStack2);
if (invSlot == 0) {
player.dropItem(itemStack3, false);
}
}
return itemStack;
}
@Override
public boolean canInsertIntoSlot(ItemStack stack, Slot slot) {
return !(slot instanceof TRecipeSlot);
}
@Override
public boolean canUse(PlayerEntity player) {
return true;
}
public boolean shouldShowCraftingScrollbar() {
return this.craftingSession.craftingItemStacks.size() > 24;
}
public boolean shouldShowLookUpScrollbar() {
return this.craftingSession.lookUpItemStacks.size() > 18;
}
public static enum View {
CRAFTING,
LOOK_UP;
public static View from(int cardinal) {
return values()[cardinal];
}
}
}
|
<reponame>DeagleGross/vscode-apimanagement
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.md in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { ApiManagementModels } from "@azure/arm-apimanagement";
import { AzureTreeItem } from "vscode-azureextensionui";
import { IApiTreeRoot } from "../../IApiTreeRoot";
import { BaseArmResourceEditor } from "./BaseArmResourceEditor";
// tslint:disable-next-line:no-any
export class ApiResourceEditor extends BaseArmResourceEditor<IApiTreeRoot> {
public entityType: string = "API";
constructor() {
super();
}
public async getDataInternal(context: AzureTreeItem<IApiTreeRoot>): Promise<ApiManagementModels.ApiContract> {
return await context.root.client.api.get(context.root.resourceGroupName, context.root.serviceName, context.root.apiName);
}
public async updateDataInternal(context: AzureTreeItem<IApiTreeRoot>, payload: ApiManagementModels.ApiCreateOrUpdateParameter): Promise<ApiManagementModels.ApiContract> {
return await context.root.client.api.createOrUpdate(context.root.resourceGroupName, context.root.serviceName, context.root.apiName, payload);
}
}
|
#!/bin/sh
set -xe
# Detect the host IP
export DOCKER_BRIDGE_IP=$(ip ro | grep default | cut -d' ' -f 3)
if [ "$SYMFONY_ENV" = 'prod' ]; then
composer install --prefer-dist --no-dev --no-progress --no-suggest --optimize-autoloader --classmap-authoritative
else
composer install --prefer-dist --no-progress --no-suggest
fi
# Start Apache with the right permissions after removing pre-existing PID file
rm -f /var/run/apache2/apache2.pid
exec docker/apache/start_safe_perms -DFOREGROUND
|
<reponame>jimbobhickville/sqlalchemy-jsonapi
"""Test for serializer's delete_relationship."""
from sqlalchemy_jsonapi import errors
from sqlalchemy_jsonapi.unittests.utils import testcases
from sqlalchemy_jsonapi.unittests import models
class DeleteRelationship(testcases.SqlalchemyJsonapiTestCase):
"""Tests for serializer.delete_relationship."""
def test_delete_one_to_many_relationship_successs(self):
"""Delete a relationship from a resource is successful.
Ensure objects are no longer related in database.
I don't want this comment to be associated with this post
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is a comment', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
payload = {
'data': [{
'type': 'comments',
'id': comment.id
}]
}
models.serializer.delete_relationship(
self.session, payload, 'posts', blog_post.id, 'comments')
updated_comment = self.session.query(models.Comment).get(comment.id)
self.assertEquals(updated_comment.post_id, None)
self.assertEquals(updated_comment.post, None)
def test_delete_one_to_many_relationship_response_success(self):
"""Delete a relationship from a resource is successful returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is a comment', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
payload = {
'data': [{
'type': 'comments',
'id': comment.id
}]
}
response = models.serializer.delete_relationship(
self.session, payload, 'posts', blog_post.id, 'comments')
expected = {'data': []}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
def test_delete_one_to_many_relationship_with_invalid_data(self):
"""Delete a relationship from a resource is successful returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is a comment', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
payload = {
'data': {
'type': 'comments',
'id': comment.id
}
}
with self.assertRaises(errors.ValidationError) as error:
models.serializer.delete_relationship(
self.session, payload, 'posts', blog_post.id, 'comments')
self.assertEquals(
error.exception.detail, 'Provided data must be an array.')
self.assertEquals(error.exception.status_code, 409)
def test_delete_many_to_one_relationship_response(self):
"""Delete a many-to-one relationship returns a 409.
A ToManyExpectedError is returned.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is a comment', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
payload = {
'data': [{
'type': 'users',
'id': user.id
}]
}
response = models.serializer.delete_relationship(
self.session, payload, 'posts', blog_post.id, 'author')
expected_detail = 'posts.1.author is not a to-many relationship'
self.assertEqual(expected_detail, response.detail)
self.assertEqual(409, response.status_code)
def test_delete_one_to_many_relationship_model_not_found(self):
"""Delete a one-to-many relationship whose api-type does not exist returns 404.
A ResourceTypeNotFoundError is raised.
"""
with self.assertRaises(errors.ResourceTypeNotFoundError) as error:
models.serializer.delete_relationship(
self.session, {}, 'not-existant', 1, 'author')
self.assertEquals(error.exception.status_code, 404)
def test_delete_one_to_many_relationship_of_nonexistant_resource(self):
"""Delete a one-to-many relationship of nonexistant resource returns 404.
A ResourceNotFoundError is raised.
"""
with self.assertRaises(errors.ResourceNotFoundError) as error:
models.serializer.delete_relationship(
self.session, {}, 'posts', 1, 'author')
self.assertEquals(error.exception.status_code, 404)
def test_delete_one_to_many_relationship_with_unknown_relationship(self):
"""Delete a one-to-many relationship with unknown relationship returns 404.
A RelationshipNotFoundError is raised.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
self.session.commit()
with self.assertRaises(errors.RelationshipNotFoundError) as error:
models.serializer.delete_relationship(
self.session, {}, 'posts', 1, 'logs')
self.assertEquals(error.exception.status_code, 404)
|
function logErrors(err, req, res, next) {
console.error(err.stack);
next(err);
}
function errorHandler(err, req, res, next) {
res.status(500);
res.render('error', { error: err });
}
module.exports = {
logErrors,
errorHandler
} |
// Function to simulate a dice roll
int rollDice(int numSides) {
int result = rand() % numSides + 1;
return result;
}
// Driver code
int main()
{
cout << rollDice(4) << endl;
return 0;
} |
<reponame>murphybytes/saml
package main
import (
"net/http"
"time"
"github.com/murphybytes/saml"
)
type logoutHandler struct {
logoutProfile *saml.SingleLogOutProfile
}
func newLogoutHandler(sp saml.ServiceProvider, metadata *saml.EntityDescriptor) http.Handler {
return &logoutHandler{
logoutProfile: saml.NewSingleLogOutProfile(&sp, metadata),
}
}
func (h *logoutHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
writeServerError(w, err, "parsing form logout redirect")
return
}
email := r.FormValue(keyUID)
redirectURL, err := h.logoutProfile.RedirectBinding(email)
if err != nil {
writeServerError(w, err, "building redirect binding")
}
// Trigger redirect in browser
w.Header().Set("Location", redirectURL)
w.WriteHeader(http.StatusFound)
}
type logoutCallbackHandler struct {
logoutProfile *saml.SingleLogOutProfile
}
func newLogoutCallbackHandler(sp saml.ServiceProvider, metadata *saml.EntityDescriptor) http.Handler {
return &logoutCallbackHandler{
logoutProfile: saml.NewSingleLogOutProfile(&sp, metadata),
}
}
func (h *logoutCallbackHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
cb, err := h.logoutProfile.HandlePostResponse(r, time.Now())
if err != nil {
writeServerError(w, err, "logout callback")
}
var location string
if cb.SelfInitiatedLogout != nil {
location = cb.SelfInitiatedLogout.RelayURL
}
if cb.ExternallyInitiatedLogout != nil {
location = cb.ExternallyInitiatedLogout.RedirectURL
}
// redirect to hompage
w.Header().Set("Location", location)
w.WriteHeader(http.StatusFound)
}
|
#!/bin/sh
wget "https://bitbucket.org/dan2097/opsin/downloads/opsin-2.3.0-jar-with-dependencies.jar" -O "opsin.jar"
|
<gh_stars>0
interface CaliforniaStore { CaliforniaStoreId: string; }
import {CaliforniaProject} from "./CaliforniaProject";
export class SharedProjectInfo {
SharedProjectInfoId: number;
SharedWithCaliforniaStoreId: string;
SharedWithCaliforniaStore: CaliforniaStore;
OwnerCaliforniaStoreId: string;
OwnerCaliforniaStore: CaliforniaStore;
CaliforniaProjectId: number;
CaliforniaProject: CaliforniaProject;
Name: string;
ShareEnabledTime: Date | string
IsReshareAllowed: boolean;
IsEditAllowed: boolean;
} |
#!/bin/sh
source vars.sh
find $srcdir \
-path '$srcdir/Lightroom/Lightroom 4 Catalog Previews.lrdata' -prune \
-or -not -name '.DS_Store' \
-type f \
-exec cksum "{}" \; >> src_checksums.txt
find $dstdir \
-path '$dstdir/Lightroom/Lightroom 4 Catalog Previews.lrdata' -prune \
-or -not -name '.DS_Store' \
-type f \
-exec cksum "{}" \; >> dst_checksums.txt
diff src_checksums.txt dst_checksums.txt
|
package cn.crabapples.common;
import lombok.Getter;
/**
* TODO 应用异常类
*
* @author Mr.He
* 2019/9/21 20:54
* e-mail <EMAIL>
* qq 294046317
* pc-name 29404
*/
public class ApplicationException extends RuntimeException{
@Getter
private int code;
public ApplicationException() {
}
public ApplicationException(String message) {
super(message);
}
public ApplicationException(String message,int code) {
super(message);
this.code = code;
}
public ApplicationException(String message, Throwable cause) {
super(message, cause);
}
public ApplicationException(Throwable cause) {
super(cause);
}
public ApplicationException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
|
/**
* Usage of node-telnet with node's REPL, in full-featured "terminal" mode.
* (Requires node >= v0.7.7)
*/
var telnet = require('./')
, repl = require('repl')
, port = Number(process.argv[2]) || 1337
var server = telnet.createServer(function (client) {
client.on('window size', function (e) {
if (e.command === 'sb') {
// a real "resize" event; 'readline' listens for this
client.columns = e.columns
client.rows = e.rows
client.emit('resize')
}
})
client.on('suppress go ahead', console.log)
client.on('echo', console.log)
client.on('window size', console.log)
client.on('x display location', console.log)
client.on('terminal speed', console.log)
client.on('environment variables', console.log)
client.on('transmit binary', console.log)
client.on('status', console.log)
client.on('linemode', console.log)
client.on('authentication', console.log)
// 'readline' will call `setRawMode` when it is a function
client.setRawMode = setRawMode
// make unicode characters work properly
client.do.transmit_binary()
// emit 'window size' events
client.do.window_size()
// create the REPL
var r = repl.start({
input: client
, output: client
, prompt: 'telnet repl> '
, terminal: true
, useGlobal: false
}).on('exit', function () {
client.end()
})
r.context.r = r
r.context.client = client
r.context.socket = client
})
server.on('error', function (err) {
if (err.code == 'EACCES') {
console.error('%s: You must be "root" to bind to port %d', err.code, port)
} else {
throw err
}
})
server.on('listening', function () {
console.log('node repl telnet(1) server listening on port %d', this.address().port)
console.log(' $ telnet localhost' + (port != 23 ? ' ' + port : ''))
})
server.listen(port)
/**
* The equivalent of "raw mode" via telnet option commands.
* Set this function on a telnet `client` instance.
*/
function setRawMode (mode) {
if (mode) {
this.do.suppress_go_ahead()
this.will.suppress_go_ahead()
this.will.echo()
} else {
this.dont.suppress_go_ahead()
this.wont.suppress_go_ahead()
this.wont.echo()
}
}
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
import { AbortSignalLike } from "@azure/abort-controller";
import { HttpHeaders, URLBuilder } from "@azure/core-http";
import { HeaderConstants, URLConstants, DevelopmentConnectionString } from "./constants";
import { StorageClientContext } from "../generated/src/storageClientContext";
import { Pipeline } from "../Pipeline";
/**
* Append a string to URL path. Will remove duplicated "/" in front of the string
* when URL path ends with a "/".
*
* @export
* @param {string} url Source URL string
* @param {string} name String to be appended to URL
* @returns {string} An updated URL string
*/
export function appendToURLPath(url: string, name: string): string {
const urlParsed = URLBuilder.parse(url);
let path = urlParsed.getPath();
path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name;
urlParsed.setPath(path);
return urlParsed.toString();
}
/**
* Set URL parameter name and value. If name exists in URL parameters, old value
* will be replaced by name key. If not provide value, the parameter will be deleted.
*
* @export
* @param {string} url Source URL string
* @param {string} name Parameter name
* @param {string} [value] Parameter value
* @returns {string} An updated URL string
*/
export function setURLParameter(url: string, name: string, value?: string): string {
const urlParsed = URLBuilder.parse(url);
urlParsed.setQueryParameter(name, value);
return urlParsed.toString();
}
/**
* Get URL parameter by name.
*
* @export
* @param {string} url URL string
* @param {string} name Parameter name
* @returns {(string | string[] | undefined)} Parameter value(s) for the given parameter name.
*/
export function getURLParameter(url: string, name: string): string | string[] | undefined {
const urlParsed = URLBuilder.parse(url);
return urlParsed.getQueryParameterValue(name);
}
/**
* Set URL host.
*
* @export
* @param {string} url Source URL string
* @param {string} host New host string
* @returns An updated URL string
*/
export function setURLHost(url: string, host: string): string {
const urlParsed = URLBuilder.parse(url);
urlParsed.setHost(host);
return urlParsed.toString();
}
/**
* Gets URL path from an URL string.
*
* @export
* @param {string} url Source URL string
* @returns {(string | undefined)} The path part of the given URL string.
*/
export function getURLPath(url: string): string | undefined {
const urlParsed = URLBuilder.parse(url);
return urlParsed.getPath();
}
/**
* Gets URL query key value pairs from an URL string.
*
* @export
* @param {string} url
* @returns {{[key: string]: string}} query key value string pairs from the given URL string.
*/
export function getURLQueries(url: string): { [key: string]: string } {
let queryString = URLBuilder.parse(url).getQuery();
if (!queryString) {
return {};
}
queryString = queryString.trim();
queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString;
let querySubStrings: string[] = queryString.split("&");
querySubStrings = querySubStrings.filter((value: string) => {
const indexOfEqual = value.indexOf("=");
const lastIndexOfEqual = value.lastIndexOf("=");
return (
indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1
);
});
const queries: { [key: string]: string } = {};
for (const querySubString of querySubStrings) {
const splitResults = querySubString.split("=");
const key: string = splitResults[0];
const value: string = splitResults[1];
queries[key] = value;
}
return queries;
}
export interface ConnectionString {
kind: "AccountConnString" | "SASConnString";
url: string;
accountName: string;
accountKey?: any;
accountSas?: string;
proxyUri?: string; // Development Connection String may contain proxyUri
}
function getProxyUriFromDevConnString(connectionString: string): string {
// Development Connection String
// https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key
let proxyUri = "";
if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) {
// CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri
const matchCredentials = connectionString.split(";");
for (const element of matchCredentials) {
if (element.trim().startsWith("DevelopmentStorageProxyUri=")) {
proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")![1];
}
}
}
return proxyUri;
}
function getValueInConnString(
connectionString: string,
argument:
| "QueueEndpoint"
| "AccountName"
| "AccountKey"
| "DefaultEndpointsProtocol"
| "EndpointSuffix"
| "SharedAccessSignature"
) {
const elements = connectionString.split(";");
for (const element of elements) {
if (element.trim().startsWith(argument)) {
return element.trim().match(argument + "=(.*)")![1];
}
}
return "";
}
/**
* Extracts the parts of an Azure Storage account connection string.
*
* @export
* @param {string} connectionString Connection string.
* @returns {ConnectionString} String key value pairs of the storage account's url and credentials.
*/
export function extractConnectionStringParts(connectionString: string): ConnectionString {
let proxyUri = "";
if (connectionString.startsWith("UseDevelopmentStorage=true")) {
// Development connection string
proxyUri = getProxyUriFromDevConnString(connectionString);
connectionString = DevelopmentConnectionString;
}
// Matching QueueEndpoint in the Account connection string
let queueEndpoint = getValueInConnString(connectionString, "QueueEndpoint");
// Slicing off '/' at the end if exists
// (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)
queueEndpoint = queueEndpoint.endsWith("/") ? queueEndpoint.slice(0, -1) : queueEndpoint;
if (
connectionString.search("DefaultEndpointsProtocol=") !== -1 &&
connectionString.search("AccountKey=") !== -1
) {
// Account connection string
let defaultEndpointsProtocol = "";
let accountName = "";
let accountKey = Buffer.from("accountKey", "base64");
let endpointSuffix = "";
// Get account name and key
accountName = getValueInConnString(connectionString, "AccountName");
accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64");
if (!queueEndpoint) {
// QueueEndpoint is not present in the Account connection string
// Can be obtained from `${defaultEndpointsProtocol}://${accountName}.queue.${endpointSuffix}`
defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol");
const protocol = defaultEndpointsProtocol!.toLowerCase();
if (protocol !== "https" && protocol !== "http") {
throw new Error(
"Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"
);
}
endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix");
if (!endpointSuffix) {
throw new Error("Invalid EndpointSuffix in the provided Connection String");
}
queueEndpoint = `${defaultEndpointsProtocol}://${accountName}.queue.${endpointSuffix}`;
}
if (!accountName) {
throw new Error("Invalid AccountName in the provided Connection String");
} else if (accountKey.length === 0) {
throw new Error("Invalid AccountKey in the provided Connection String");
}
return {
kind: "AccountConnString",
url: queueEndpoint,
accountName,
accountKey,
proxyUri
};
} else {
// SAS connection string
let accountSas = getValueInConnString(connectionString, "SharedAccessSignature");
let accountName = getAccountNameFromUrl(queueEndpoint);
if (!queueEndpoint) {
throw new Error("Invalid QueueEndpoint in the provided SAS Connection String");
} else if (!accountSas) {
throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String");
}
return { kind: "SASConnString", url: queueEndpoint, accountName, accountSas };
}
}
/**
* Rounds a date off to seconds.
*
* @export
* @param {Date} date
* @param {boolean} [withMilliseconds=true] If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;
* If false, YYYY-MM-DDThh:mm:ssZ will be returned.
* @returns {string} Date string in ISO8061 format, with or without 7 milliseconds component
*/
export function truncatedISO8061Date(date: Date, withMilliseconds: boolean = true): string {
// Date.toISOString() will return like "2018-10-29T06:34:36.139Z"
const dateString = date.toISOString();
return withMilliseconds
? dateString.substring(0, dateString.length - 1) + "0000" + "Z"
: dateString.substring(0, dateString.length - 5) + "Z";
}
/**
* Delay specified time interval.
*
* @export
* @param {number} timeInMs
* @param {AbortSignalLike} [aborter]
* @param {Error} [abortError]
*/
export async function delay(timeInMs: number, aborter?: AbortSignalLike, abortError?: Error) {
return new Promise((resolve, reject) => {
let timeout: any;
const abortHandler = () => {
if (timeout !== undefined) {
clearTimeout(timeout);
}
reject(abortError);
};
const resolveHandler = () => {
if (aborter !== undefined) {
aborter.removeEventListener("abort", abortHandler);
}
resolve();
};
timeout = setTimeout(resolveHandler, timeInMs);
if (aborter !== undefined) {
aborter.addEventListener("abort", abortHandler);
}
});
}
/**
* String.prototype.padStart()
*
* @export
* @param {string} currentString
* @param {number} targetLength
* @param {string} [padString=" "]
* @returns {string}
*/
export function padStart(
currentString: string,
targetLength: number,
padString: string = " "
): string {
if (String.prototype.padStart) {
return currentString.padStart(targetLength, padString);
}
padString = padString || " ";
if (currentString.length > targetLength) {
return currentString;
} else {
targetLength = targetLength - currentString.length;
if (targetLength > padString.length) {
padString += padString.repeat(targetLength / padString.length);
}
return padString.slice(0, targetLength) + currentString;
}
}
export function sanitizeURL(url: string): string {
let safeURL: string = url;
if (getURLParameter(safeURL, URLConstants.Parameters.SIGNATURE)) {
safeURL = setURLParameter(safeURL, URLConstants.Parameters.SIGNATURE, "*****");
}
return safeURL;
}
export function sanitizeHeaders(originalHeader: HttpHeaders): HttpHeaders {
const headers: HttpHeaders = new HttpHeaders();
for (const header of originalHeader.headersArray()) {
if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION) {
headers.set(header.name, "*****");
} else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) {
headers.set(header.name, sanitizeURL(header.value));
} else {
headers.set(header.name, header.value);
}
}
return headers;
}
export function getAccountNameFromUrl(url: string): string {
if (url.startsWith("http://127.0.0.1:10000")) {
// Dev Conn String
return getValueInConnString(DevelopmentConnectionString, "AccountName");
} else {
try {
// `${defaultEndpointsProtocol}://${accountName}.queue.${endpointSuffix}`;
// Slicing off '/' at the end if exists
url = url.endsWith("/") ? url.slice(0, -1) : url;
const accountName = url.substring(url.lastIndexOf("://") + 3, url.lastIndexOf(".queue."));
if (!accountName) {
throw new Error("Provided accountName is invalid.");
}
return accountName;
} catch (error) {
throw new Error("Unable to extract accountName with provided information.");
}
}
}
export function getStorageClientContext(url: string, pipeline: Pipeline): StorageClientContext {
const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());
// Override protocol layer's default content-type
(storageClientContext as any).requestContentType = undefined;
return storageClientContext;
}
|
SELECT p.name, count(o.product_id) as count
FROM products p
LEFT JOIN orders o ON p.product_id = o.product_id
GROUP BY p.product_id
ORDER BY count DESC
LIMIT 3; |
#define VERSION_NUMBER (45.0)
|
SELECT name, score FROM Students ORDER BY score DESC; |
#!/bin/python3
from hackerrank_problems import HackerrankProblems
if __name__ == '__main__':
hp = HackerrankProblems()
hp.createRandomParams()
hp.launchTests() |
#!/bin/bash
source "/opt/ros/$ROS_DISTRO/setup.bash"
set -euo pipefail
source "/opt/urdfdev/lib/log.sh"
model_path=$1
urdf_path=$2
if [ "$URDFDEV_MODE" == "dev" ]; then
run_status_path=$3
fi
status "Building..."
if [ -v URDFDEV_CUSTOM_BUILD ]; then
build_exec eval "$URDFDEV_CUSTOM_BUILD"
elif [[ "$model_path" = *.xacro ]]; then
build_exec rosrun xacro xacro ${URDFDEV_XACRO_ADDITIONAL_OPTIONS:-} "$model_path" -o "$urdf_path"
else
build_exec cp "$model_path" "$urdf_path"
fi
if [ "$urdfdev_build_exit" != "0" ]; then
error "Build failed. Check your files."
exit
fi
build_exec check_urdf "$urdf_path"
if [ "$urdfdev_build_exit" != "0" ]; then
error "URDF check failed. Check your files."
exit
fi
if [ "$URDFDEV_MODE" == "dev" ]; then
if [ -s "$run_status_path" ]; then
exec_log xdotool search --name RViz key ctrl+s
# Wait until saving is done
info "Waiting RViz to save changes..."
exec_log xdotool search --sync --name '^[^*]*RViz$'
info "Restarting visualization components..."
exec_log eval "rosnode list | grep -e rviz -e joint_state_publisher -e robot_state_publisher | xargs -r rosnode kill"
# Kill all joint_state_publisher processes, which is left after `rosnode kill`
ps ax | grep "[j]oint_state_publisher" | awk '{print $1}' | xargs -r kill -9
fi
exec_log rosparam set robot_description -t "$urdf_path"
exec_log rosrun rviz rviz -d $(rospack find urdf_tutorial)/rviz/urdf.rviz &
exec_log rosrun joint_state_publisher joint_state_publisher &
exec_log rosrun robot_state_publisher state_publisher &
echo "started" > $run_status_path
# Maximize rviz window
exec_log xdotool search --sync --name RViz windowsize 100% 100%
status "Built and started rviz"
else
status "Built"
fi
|
# Define function to parse the date object
def parse_date(date):
# Split the date string into parts
parts = date.split('-')
# Get month, day, and year from parts
month = int(parts[0])
day = int(parts[1])
year = int(parts[2])
# Return the month, day, and year
return month, day, year
if __name__ == '__main__':
date_obj = '2020-11-03'
print(parse_date(date_obj)) |
#!/usr/bin/env bash
shopt -s extglob
abspath_script="$(readlink -f -e "$0")"
script_absdir="$(dirname "$abspath_script")"
script_name="$(basename "$0" .sh)"
TEMP=$(getopt -o hp: -l help,perc: -n "$script_name.sh" -- "$@")
if [ $? -ne 0 ]
then
echo "Terminating..." >&2
exit -1
fi
eval set -- "$TEMP"
# Defaults
perc=0.5
while true
do
case "$1" in
-h|--help)
cat "$script_absdir"/${script_name}_help.txt
exit
;;
-p|--perc)
perc="$2"
shift 2
;;
--)
shift
break
;;
*)
echo "$script_name.sh:Internal error!"
exit -1
;;
esac
done
# Vars
nanofile="$1"
outdir="$(dirname $(realpath ${nanofile}))"
outfile="${outdir}/${nanofile%.tsv}_perc_${perc}.tsv"
# Cat header
head -n 1 "$nanofile" > "$outfile"
# Add reads
cut -f 5 "$nanofile" | awk 'NR>1{print $0| "sort -r"}' | uniq | awk -v PERC="$perc" 'BEGIN {srand()} !/^$/ { if (rand() <= PERC) print $0}' | grep -f /dev/stdin "$nanofile" >> "$outfile"
|
const express = require('express');
const WebSocket = require('ws');
const uuidv1 = require('uuid/v1');
// Set the port to 3001
const PORT = 3001;
// Create a new express server
const server = express()
// Make the express server serve static assets (html, javascript, css) from the /public folder
.use(express.static('public'))
.listen(PORT, '0.0.0.0', 'localhost', () => console.log(`Listening on ${ PORT }`));
// Create the WebSockets server
const wss = new WebSocket.Server({ server });
wss.broadcast = function broadcast(data) {
wss.clients.forEach(function each(client) {
if (client.readyState === WebSocket.OPEN) {
client.send(data);
}
});
};
// Set up a callback that will run when a client connects to the server
// When a client connects they are assigned a socket, represented by
// the ws parameter in the callback.
//var on_connect = 0;
wss.on('connection', (ws,req) => {
var id = req.headers['sec-websocket-key'];
console.log('Client connected');
wss.clients.forEach(client => {
let clientCount = {
type: 'clientCount',
payload: {
count: wss.clients.size
}
}
client.send(JSON.stringify(clientCount))
})
ws.on('message', function incoming(e) {
//captured all notifications in variables and assign if statements separate
var newNotifcationMessage = JSON.parse(e);
var newMessage = JSON.parse(e);
if(newNotifcationMessage.type === "postNotification"){
newNotifcationMessage.type = "incomingNotification";
const sendNotification = JSON.stringify(newNotifcationMessage);
wss.broadcast(sendNotification);
}
if(newMessage.type === "postMessage"){
newMessage.type = "incomingMessage";
newMessage.id = uuidv1();
const sendMessage = JSON.stringify(newMessage);
wss.broadcast(sendMessage);
}
});
// Set up a callback for when a client closes the socket. This usually means they closed their browser.
ws.on('close', () => console.log('Client disconnected'));
}); |
<filename>webpack/entry.js
import React, { Component } from 'react';
import { render } from 'react-dom';
import { createStore, applyMiddleware } from 'redux';
import { Provider } from 'react-redux';
import createSagaMiddleware from 'redux-saga';
import { all, fork } from 'redux-saga/effects';
import App from './components/App.jsx';
import rootReducer from './reducers/index.js';
import loginSaga from './actions/sagas.js';
import driversListSaga from './actions/sagaDriversList.js';
import ridersListSaga from './actions/sagaRidersList.js';
import matchListSaga from './actions/sagaMatchList.js';
import matchOtherDriverListSaga from './actions/sagaMatchOtherDriverList.js';
import uploadSaga from './actions/sagaUpload.js'
console.log('entry.js loaded');
const sagaMiddleware = createSagaMiddleware();
const store = createStore(rootReducer, applyMiddleware(sagaMiddleware));
function* allSagas() {
yield all([
fork(loginSaga),
fork(driversListSaga),
fork(ridersListSaga),
fork(matchListSaga),
fork(matchOtherDriverListSaga),
fork(uploadSaga)
]);
}
sagaMiddleware.run(allSagas);
render(
<Provider store={store}>
<App />
</Provider>,
document.getElementById('root')
);
|
switch (questionIndex) {
case 5: {
answer.co2e = result.answers[2].answer.multiplier * answer.multiplier;
return answer;
}
case 7: {
answer.co2e = result.answers[3].answer.multiplier * answer.multiplier;
return answer;
}
case 9: {
if (answer.index === 125) {
answer.co2e = 0;
} else if (answer.index === 126) {
answer.co2e = result.answers[4].answer.multiplier * 0.1;
}
return answer;
}
case 10: {
answer.co2e = result.answers[8].answer.multiplier / answer.multiplier;
return answer;
}
case 13: {
if (answer.index === 219) {
// Handle specific condition for index 219
}
// Handle other conditions for case 13
return answer;
}
// Handle other cases as needed
} |
<gh_stars>0
package bootcamp.mercado.produto.caracteristica;
import java.util.List;
import java.util.stream.Collectors;
public class ProdutoDetalheCaracteristicaResponse {
private String nome;
private String descricao;
public ProdutoDetalheCaracteristicaResponse(Caracteristica caracteristica) {
this.nome = caracteristica.getNome();
this.descricao = caracteristica.getDescricao();
}
public String getNome() {
return nome;
}
public String getDescricao() {
return descricao;
}
public static List<ProdutoDetalheCaracteristicaResponse> fromList(
List<Caracteristica> caracteristicas) {
return caracteristicas.stream()
.map(ProdutoDetalheCaracteristicaResponse::new)
.collect(Collectors.toList());
}
}
|
#!/bin/bash
set -e
ROOTBUNTU="/tmp/ubuntu-bionic"
TARGET="/tmp/ubuntu-debugfs.tar.gz"
# preparing target
mkdir -p ${ROOTBUNTU}
rm -rf ${ROOTBUNTU}/*
rm -rf ${TARGET}
echo "Installing image into: ${ROOTBUNTU}"
echo "Exporting flist to: ${TARGET}"
echo ""
echo "Bootstrapping the base image..."
# installing system
debootstrap \
--arch=amd64 \
--components=main,restricted,universe,multiverse \
--include curl,ca-certificates,tcpdump,ethtool,pciutils,strace,lsof,htop,\
binutils,bzip2,coreutils,cpio,curl,e2fsprogs,file,findutils,htop,iproute2,\
net-tools,netcat-openbsd,procps,strace,tcpdump,vim,lsscsi,btrfs-tools,bmon, \
unzip,alien,libncurses5 \
bionic ${ROOTBUNTU} \
http://archive.ubuntu.com/ubuntu/
echo "Debugfs base system installed"
files=$(find ${ROOTBUNTU} | wc -l)
rootsize=$(du -sh ${ROOTBUNTU})
echo "${rootsize}, ${files} files installed"
echo "Customizing settings..."
touch ${ROOTBUNTU}/root/.sudo_as_admin_successful
echo 'export PS1="(debugfs) $PS1"' >> ${ROOTBUNTU}/root/.bashrc
echo 'cat /root/.debugfs' >> ${ROOTBUNTU}/root/.bashrc
echo "You are now on debugfs environment" > ${ROOTBUNTU}/root/.debugfs
echo "Don't forget to 'apt-get update' before installing new packages" >> ${ROOTBUNTU}/root/.debugfs
echo "" >> ${ROOTBUNTU}/root/.debugfs
echo "Cleaning installation..."
# cleaning documentation and not needed files
find ${ROOTBUNTU}/usr/share/doc -type f ! -name 'copyright' | xargs rm -f
find ${ROOTBUNTU}/usr/share/locale -mindepth 1 -maxdepth 1 ! -name 'en' | xargs rm -rf
rm -rf ${ROOTBUNTU}/usr/share/info
rm -rf ${ROOTBUNTU}/usr/share/man
rm -rf ${ROOTBUNTU}/usr/share/lintian
rm -rf ${ROOTBUNTU}/var/cache/apt/archives/*deb
rm -rf ${ROOTBUNTU}/var/lib/apt/lists/*_Packages
files=$(find ${ROOTBUNTU} | wc -l)
rootsize=$(du -sh ${ROOTBUNTU})
echo "${rootsize}, ${files} files installed"
echo "Archiving..."
pushd ${ROOTBUNTU}
tar -czf ${TARGET} *
popd
ls -alh ${TARGET}
echo "Debugfs flist is ready."
|
<filename>src/index.js
export const specFor = {
object: function (data, { $id, title }) {
const keys = Object.keys(data || {});
const schema = {
title: title || 'An object value',
description: '',
required: keys,
properties: {}
};
keys.forEach(key => {
schema.properties[key] = spec(data[key], { $id: `${$id}/properties/${key}` });
});
return schema;
},
array: function (data, { $id }) {
const schema = {
title: 'An array of items',
description: ''
};
if (data.length) {
schema.items = spec(data[0], { $id: `${$id}/items` });
}
return schema;
},
boolean: function (data) {
return {
title: 'A boolean value',
description: '',
default: false,
examples: [data]
};
},
integer: function (data) {
return {
title: 'An integer value',
description: '',
default: 0,
examples: [data]
};
},
number: function (data) {
return {
title: 'A number value',
description: '',
default: 0,
examples: [data]
};
},
string: function (data) {
return {
title: 'A string value',
description: '',
default: '',
pattern: '^(.*)$',
examples: [data]
};
}
};
export function infer (data, options = {}) {
const schema = spec(data, { $id: '#', title: options.title });
schema.definitions = {};
schema.$schema = options.$schema || 'http://json-schema.org/draft-07/schema#';
schema.$id = options.$id || 'http://example.org/root.json#';
return schema;
}
export function spec (data, options = {}) {
const type = typeOf(data);
const impl = specFor[type];
const $id = options.$id;
if (!impl) throw new Error(`implementation for ${type} not found`);
return Object.assign(impl(data, options), { $id, type });
}
export function typeOf (obj) {
if (obj instanceof Array) return 'array';
if (typeof obj === 'string' || obj instanceof String) return 'string';
if (Number.isInteger(obj)) return 'integer';
return typeof obj;
}
|
#!/bin/bash
### USAGE: dump_tsv.sh [-h|--help] [--dry-run] [--focus[=PROGRAM]]
### [-o|--ouptput[=]OUTPUT_PATH] INPUT_PATH
###
### dump a tsv of information_schema columns to OUTPUT_PATH
###
### ARGS:
### -h|--help print this message and exit
### -o=OUTPUT| where to write the TSV (default: /dev/stdout)
### --output=OUTPUT
### --focus[=PROGRAM] run `sh -c "$PROGRAM '$OUTPUT_PATH'"`. (PROGRAM
### defaults to ${EDITOR} if set, else code/vi/ed)
### --dry-run validate and print input options
# shellcheck source=./common.sh
. "${BASH_SOURCE[0]%/*}/common.sh"
lookup_preferred_editor() {
if test -n "${EDITOR:-}"; then echo "$EDITOR" && return 0; fi
command -v editor || command -v vi || command -v ed
}
cmd="
-- configure output to be tsvs wth headers
.headers on
.mode csv
.separator '$(printf '\t')'
-- ensure the desired view exists
drop view if exists cols;
.read ./pkg/schema/views.sql
-- write to stdout
select * from cols;
"
main() {
set -euo pipefail
local input_path=""
local output_path=""
local should_focus=""
local _editor=""
_editor="$(lookup_preferred_editor)"
local dry_run=false
while test -n "${1:-}"; do
case "$1" in
--dry-run)
shift
dry_run=true
;;
-h | --help) usage && exit 0 ;;
-o=* | --output=*)
output_path="${1##*=}"
shift
;;
-o | --output)
shift
if test -z "${1:-}"; then fail; fi
case "$1" in
-*) fail "invalid argument: $1" ;;
*) output_path="$1" ;;
esac
shift
;;
--focus=*)
should_focus=true
_editor="${1##*=}"
echo "$_editor"
shift
;;
--focus)
should_focus=true
shift
;;
-*) fail "invalid argument: $1" ;;
*)
if test -n "$input_path"; then
fail "no input-path argument passed"
else
input_path="$1"
shift
fi
;;
esac
done
# validate input
if ! test -f "$input_path"; then
fail "input path '$input_path' is not a file"
elif ! (head -1 "$input_path" &>/dev/null); then
fail "input path '$input_path' can't be read"
fi
# validate output
if test -z "$output_path"; then
if test "$should_focus" = true; then
fail "can't focus stdout"
else
output_path="/dev/stdout"
fi
fi
if test "$dry_run" = "true"; then
echo " output_path=$output_path"
echo " input_path=$input_path"
echo " should_focus=$should_focus"
echo " _editor=$_editor"
exit 0
fi
echo "$cmd" | sqlite3 "$input_path" >"$output_path"
if test "$should_focus" = true; then
sh -c "$_editor $output_path"
fi
}
if test "${BASH_SOURCE[0]}" = "$0"; then main "$@"; fi
|
def inches_to_centimeters(inch):
centimeter = inch * 2.54
return centimeter
inch = float(input("Enter length in inches: "))
centimeter = inches_to_centimeters(inch)
print(f"{inch} inches is equal to {centimeter} centimeters") |
# (C) Datadog, Inc. 2018
# (C) <NAME> <<EMAIL>> 2013
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
# stdlib
import time
import re
from collections import defaultdict
# 3rd party
import requests
# project
from datadog_checks.checks import AgentCheck
from datadog_checks.utils.headers import headers
from datadog_checks.utils.containers import hash_mutable
# Constants
COUCHBASE_STATS_PATH = '/pools/default'
COUCHBASE_VITALS_PATH = '/admin/vitals'
DEFAULT_TIMEOUT = 10
class Couchbase(AgentCheck):
"""
Extracts stats from Couchbase via its REST API
http://docs.couchbase.com/couchbase-manual-2.0/#using-the-rest-api
"""
# Service Checks
SERVICE_CHECK_NAME = 'couchbase.can_connect'
NODE_CLUSTER_SERVICE_CHECK_NAME = 'couchbase.by_node.cluster_membership'
NODE_HEALTH_SERVICE_CHECK_NAME = 'couchbase.by_node.health'
NODE_MEMBERSHIP_TRANSLATION = {
'active': AgentCheck.OK,
'inactiveAdded': AgentCheck.WARNING,
'activeFailed': AgentCheck.CRITICAL,
None: AgentCheck.UNKNOWN,
}
NODE_HEALTH_TRANSLATION = {
'healthy': AgentCheck.OK,
'warmup': AgentCheck.OK,
'unhealthy': AgentCheck.CRITICAL,
None: AgentCheck.UNKNOWN
}
# Events
SOURCE_TYPE_NAME = 'couchbase'
# Selected metrics to send amongst all the bucket stats, after name normalization
BUCKET_STATS = set([
"avg_bg_wait_time",
"avg_disk_commit_time",
"avg_disk_update_time",
"bg_wait_total",
"bytes_read",
"bytes_written",
"cas_badval",
"cas_hits",
"cas_misses",
"cmd_get",
"cmd_set",
"couch_docs_actual_disk_size",
"couch_docs_data_size",
"couch_docs_disk_size",
"couch_docs_fragmentation",
"couch_spatial_data_size",
"couch_spatial_disk_size",
"couch_spatial_ops",
"couch_total_disk_size",
"couch_views_data_size",
"couch_views_disk_size",
"couch_views_fragmentation",
"couch_views_ops",
"cpu_idle_ms",
"cpu_utilization_rate",
"curr_connections",
"curr_items_tot",
"curr_items",
"decr_hits",
"decr_misses",
"delete_hits",
"delete_misses",
"disk_commit_count",
"disk_update_count",
"disk_write_queue",
"ep_bg_fetched",
"ep_cache_miss_rate",
"ep_cache_miss_ratio",
"ep_dcp_fts_backoff",
"ep_dcp_fts_count",
"ep_dcp_fts_items_remaining",
"ep_dcp_fts_items_sent",
"ep_dcp_fts_producer_count",
"ep_dcp_fts_total_bytes",
"ep_dcp_2i_backoff",
"ep_dcp_2i_count",
"ep_dcp_2i_items_remaining",
"ep_dcp_2i_items_sent",
"ep_dcp_2i_producer_count",
"ep_dcp_2i_total_bytes",
"ep_dcp_other_backoff",
"ep_dcp_other_count",
"ep_dcp_other_items_remaining",
"ep_dcp_other_items_sent",
"ep_dcp_other_producer_count",
"ep_dcp_other_total_bytes",
"ep_dcp_replica_backoff",
"ep_dcp_replica_count",
"ep_dcp_replica_items_remaining",
"ep_dcp_replica_items_sent",
"ep_dcp_replica_producer_count",
"ep_dcp_replica_total_bytes",
"ep_dcp_views_backoff",
"ep_dcp_views_count",
"ep_dcp_views_items_remaining",
"ep_dcp_views_items_sent",
"ep_dcp_views_producer_count",
"ep_dcp_views_total_bytes",
"ep_dcp_xdcr_backoff",
"ep_dcp_xdcr_count",
"ep_dcp_xdcr_items_remaining",
"ep_dcp_xdcr_items_sent",
"ep_dcp_xdcr_producer_count",
"ep_dcp_xdcr_total_bytes",
"ep_diskqueue_drain",
"ep_diskqueue_fill",
"ep_diskqueue_items",
"ep_flusher_todo",
"ep_item_commit_failed",
"ep_kv_size",
"ep_max_size",
"ep_mem_high_wat",
"ep_mem_low_wat",
"ep_meta_data_memory",
"ep_num_non_resident",
"ep_num_ops_del_meta",
"ep_num_ops_del_ret_meta",
"ep_num_ops_get_meta",
"ep_num_ops_set_meta",
"ep_num_ops_set_ret_meta",
"ep_num_value_ejects",
"ep_oom_errors",
"ep_ops_create",
"ep_ops_update",
"ep_overhead",
"ep_queue_size",
"ep_resident_items_rate",
"ep_tap_replica_queue_drain",
"ep_tap_total_queue_drain",
"ep_tap_total_queue_fill",
"ep_tap_total_total_backlog_size",
"ep_tmp_oom_errors",
"ep_vb_total",
"evictions",
"get_hits",
"get_misses",
"hibernated_requests",
"hibernated_waked",
"hit_ratio",
"incr_hits",
"incr_misses",
"mem_actual_free",
"mem_actual_used",
"mem_free",
"mem_total",
"mem_used",
"mem_used_sys",
"misses",
"ops",
"page_faults",
"replication_docs_rep_queue",
"replication_meta_latency_aggr",
"rest_requests",
"swap_total",
"swap_used",
"vb_active_eject",
"vb_active_itm_memory",
"vb_active_meta_data_memory",
"vb_active_num_non_resident",
"vb_active_num",
"vb_active_ops_create",
"vb_active_ops_update",
"vb_active_queue_age",
"vb_active_queue_drain",
"vb_active_queue_fill",
"vb_active_queue_size",
"vb_active_resident_items_ratio",
"vb_avg_active_queue_age",
"vb_avg_pending_queue_age",
"vb_avg_replica_queue_age",
"vb_avg_total_queue_age",
"vb_pending_curr_items",
"vb_pending_eject",
"vb_pending_itm_memory",
"vb_pending_meta_data_memory",
"vb_pending_num_non_resident",
"vb_pending_num",
"vb_pending_ops_create",
"vb_pending_ops_update",
"vb_pending_queue_age",
"vb_pending_queue_drain",
"vb_pending_queue_fill",
"vb_pending_queue_size",
"vb_pending_resident_items_ratio",
"vb_replica_curr_items",
"vb_replica_eject",
"vb_replica_itm_memory",
"vb_replica_meta_data_memory",
"vb_replica_num_non_resident",
"vb_replica_num",
"vb_replica_ops_create",
"vb_replica_ops_update",
"vb_replica_queue_age",
"vb_replica_queue_drain",
"vb_replica_queue_fill",
"vb_replica_queue_size",
"vb_replica_resident_items_ratio",
"vb_total_queue_age",
"xdc_ops",
])
# Selected metrics of the query monitoring API
# See https://developer.couchbase.com/documentation/server/4.5/tools/query-monitoring.html
QUERY_STATS = set([
'cores',
'cpu_sys_percent',
'cpu_user_percent',
'gc_num',
'gc_pause_percent',
'gc_pause_time',
'memory_system',
'memory_total',
'memory_usage',
'request_active_count',
'request_completed_count',
'request_per_sec_15min',
'request_per_sec_1min',
'request_per_sec_5min',
'request_prepared_percent',
'request_time_80percentile',
'request_time_95percentile',
'request_time_99percentile',
'request_time_mean',
'request_time_median',
'total_threads',
])
TO_SECONDS = {
'ns': 1e9,
'us': 1e6,
'ms': 1e3,
's': 1,
}
seconds_value_pattern = re.compile('(\d+(\.\d+)?)(\D+)')
class CouchbaseInstanceState(object):
def __init__(self):
self.previous_status = None
def __init__(self, name, init_config, agentConfig, instances=None):
AgentCheck.__init__(self, name, init_config, agentConfig, instances)
# Keep track of all instances
self._instance_states = defaultdict(lambda: self.CouchbaseInstanceState())
def _create_metrics(self, data, instance_state, server, tags=None):
# Get storage metrics
storage_totals = data['stats']['storageTotals']
for key, storage_type in storage_totals.items():
for metric_name, val in storage_type.items():
if val is not None:
metric_name = 'couchbase.{}.{}'.format(key, self.camel_case_to_joined_lower(metric_name))
self.gauge(metric_name, val, tags=tags)
# Get bucket metrics
for bucket_name, bucket_stats in data['buckets'].items():
metric_tags = list(tags)
metric_tags.append('bucket:{}'.format(bucket_name))
for metric_name, val in bucket_stats.items():
if val is not None:
norm_metric_name = self.camel_case_to_joined_lower(metric_name)
if norm_metric_name in self.BUCKET_STATS:
full_metric_name = 'couchbase.by_bucket.{}'.format(norm_metric_name)
self.gauge(full_metric_name, val[0], tags=metric_tags, device_name=bucket_name)
# Get node metrics
for node_name, node_stats in data['nodes'].items():
metric_tags = list(tags)
metric_tags.append('node:{}'.format(node_name))
for metric_name, val in node_stats['interestingStats'].items():
if val is not None:
metric_name = 'couchbase.by_node.{}'.format(self.camel_case_to_joined_lower(metric_name))
self.gauge(metric_name, val, tags=metric_tags, device_name=node_name)
# Get cluster health data
self._process_cluster_health_data(node_name, node_stats, tags)
# Get query metrics
for metric_name, val in data['query'].items():
if val is not None:
norm_metric_name = self.camel_case_to_joined_lower(metric_name)
if norm_metric_name in self.QUERY_STATS:
# for query times, the unit is part of the value, we need to extract it
if isinstance(val, basestring):
val = self.extract_seconds_value(val)
full_metric_name = 'couchbase.query.{}'.format(self.camel_case_to_joined_lower(norm_metric_name))
self.gauge(full_metric_name, val, tags=tags)
# Get tasks, we currently only care about 'rebalance' tasks
rebalance_status, rebalance_msg = data['tasks'].get('rebalance', (None, None))
# Only fire an event when the state has changed
if rebalance_status is not None and instance_state.previous_status != rebalance_status:
rebalance_event = None
# If we get an error, we create an error event with the msg we receive
if rebalance_status == 'error':
msg_title = 'Encountered an error while rebalancing'
msg = rebalance_msg
rebalance_event = self._create_event('error', msg_title, msg, server, tags=tags)
# We only want to fire a 'completion' of a rebalance so make sure we're not firing an event on first run
elif rebalance_status == 'notRunning' and instance_state.previous_status is not None:
msg_title = 'Stopped rebalancing'
msg = 'stopped rebalancing.'
rebalance_event = self._create_event('info', msg_title, msg, server, tags=tags)
# If a rebalance task is running, fire an event. This will also fire an event if a rebalance task was
# already running when the check first runs.
elif rebalance_status == 'gracefulFailover':
msg_title = 'Failing over gracefully'
msg = 'is failing over gracefully.'
rebalance_event = self._create_event('info', msg_title, msg, server, tags=tags)
elif rebalance_status == 'rebalance':
msg_title = 'Rebalancing'
msg = 'is rebalancing.'
rebalance_event = self._create_event('info', msg_title, msg, server, tags=tags)
# Send the event
if rebalance_event is not None:
self.event(rebalance_event)
# Update the status of this instance
instance_state.previous_status = rebalance_status
def _process_cluster_health_data(self, node_name, node_stats, tags):
"""
Process and send cluster health data (i.e. cluster membership status and node health
"""
# Tags for service check
cluster_health_tags = list(tags) + ['node:{}'.format(node_name)]
# Get the membership status of the node
cluster_membership = node_stats.get('clusterMembership', None)
membership_status = self.NODE_MEMBERSHIP_TRANSLATION.get(cluster_membership, AgentCheck.UNKNOWN)
self.service_check(self.NODE_CLUSTER_SERVICE_CHECK_NAME, membership_status, tags=cluster_health_tags)
# Get the health status of the node
health = node_stats.get('status', None)
health_status = self.NODE_HEALTH_TRANSLATION.get(health, AgentCheck.UNKNOWN)
self.service_check(self.NODE_HEALTH_SERVICE_CHECK_NAME, health_status, tags=cluster_health_tags)
def _create_event(self, alert_type, msg_title, msg, server, tags=None):
"""
Create an event object
"""
msg_title = 'Couchbase {}: {}'.format(server, msg_title)
msg = 'Couchbase instance {} {}'.format(server, msg)
return {
'timestamp': int(time.time()),
'event_type': 'couchbase_rebalance',
'msg_text': msg,
'msg_title': msg_title,
'alert_type': alert_type,
'source_type_name': self.SOURCE_TYPE_NAME,
'aggregation_key': server,
'tags': tags
}
def _get_stats(self, url, instance):
"""
Hit a given URL and return the parsed json.
"""
self.log.debug('Fetching Couchbase stats at url: {}'.format(url))
timeout = float(instance.get('timeout', DEFAULT_TIMEOUT))
auth = None
if 'user' in instance and 'password' in instance:
auth = (instance['user'], instance['password'])
r = requests.get(url, auth=auth, headers=headers(self.agentConfig), timeout=timeout)
r.raise_for_status()
return r.json()
def check(self, instance):
instance_state = self._instance_states[hash_mutable(instance)]
server = instance.get('server', None)
if server is None:
raise Exception("The server must be specified")
tags = instance.get('tags', [])
# Clean up tags in case there was a None entry in the instance
# e.g. if the yaml contains tags: but no actual tags
if tags is None:
tags = []
else:
tags = list(set(tags))
tags.append('instance:{}'.format(server))
data = self.get_data(server, instance)
self._create_metrics(data, instance_state, server, tags=list(set(tags)))
def get_data(self, server, instance):
# The dictionary to be returned.
couchbase = {
'stats': None,
'buckets': {},
'nodes': {},
'query': {},
'tasks': {}
}
# build couchbase stats entry point
url = '{}{}'.format(server, COUCHBASE_STATS_PATH)
# Fetch initial stats and capture a service check based on response.
service_check_tags = instance.get('tags', [])
if service_check_tags is None:
service_check_tags = []
else:
service_check_tags = list(set(service_check_tags))
service_check_tags.append('instance:{}'.format(server))
try:
overall_stats = self._get_stats(url, instance)
# No overall stats? bail out now
if overall_stats is None:
raise Exception("No data returned from couchbase endpoint: {}".format(url))
except requests.exceptions.HTTPError as e:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
tags=service_check_tags, message=str(e.message))
raise
except Exception as e:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, tags=service_check_tags, message=str(e))
raise
else:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK, tags=service_check_tags)
couchbase['stats'] = overall_stats
nodes = overall_stats['nodes']
# Next, get all the nodes
if nodes is not None:
for node in nodes:
couchbase['nodes'][node['hostname']] = node
# Next, get all buckets .
endpoint = overall_stats['buckets']['uri']
url = '{}{}'.format(server, endpoint)
buckets = self._get_stats(url, instance)
if buckets is not None:
for bucket in buckets:
bucket_name = bucket['name']
# Fetch URI for the stats bucket
endpoint = bucket['stats']['uri']
url = '{}{}'.format(server, endpoint)
try:
bucket_stats = self._get_stats(url, instance)
except requests.exceptions.HTTPError:
url_backup = '{}/pools/nodes/buckets/{}/stats'.format(server, bucket_name)
bucket_stats = self._get_stats(url_backup, instance)
bucket_samples = bucket_stats['op']['samples']
if bucket_samples is not None:
couchbase['buckets'][bucket['name']] = bucket_samples
# Next, get the query monitoring data
query_monitoring_url = instance.get('query_monitoring_url', None)
if query_monitoring_url is not None:
try:
url = '{}{}'.format(query_monitoring_url, COUCHBASE_VITALS_PATH)
query = self._get_stats(url, instance)
if query is not None:
couchbase['query'] = query
except requests.exceptions.HTTPError:
self.log.error("Error accessing the endpoint {}, make sure you're running at least "
"couchbase 4.5 to collect the query monitoring metrics".format(url))
# Next, get all the tasks
tasks_url = '{}{}/tasks'.format(server, COUCHBASE_STATS_PATH)
try:
tasks = self._get_stats(tasks_url, instance)
for task in tasks:
task_type = task['type']
# We only care about rebalance statuses
if task_type != 'rebalance':
continue
# Format the status so it's easier to understand
if 'errorMessage' in task:
couchbase['tasks'][task_type] = ('error', task['errorMessage'])
elif task['status'] == 'notRunning':
couchbase['tasks'][task_type] = (task['status'], None)
# If the status is 'running', we want to differentiate between a regular rebalance and a graceful
# failover rebalance, so we use the subtype
elif task['status'] == 'running':
couchbase['tasks'][task_type] = (task['subtype'], None)
# Should only be 1 rebalance
break
except requests.exceptions.HTTPError:
self.log.error("Error accessing the endpoint {}".format(url))
return couchbase
# Takes a camelCased variable and returns a joined_lower equivalent.
# Returns input if non-camelCase variable is detected.
def camel_case_to_joined_lower(self, variable):
# replace non-word with _
converted_variable = re.sub('\W+', '_', variable)
# insert _ in front of capital letters and lowercase the string
converted_variable = re.sub('([A-Z])', '_\g<1>', converted_variable).lower()
# remove duplicate _
converted_variable = re.sub('_+', '_', converted_variable)
# handle special case of starting/ending underscores
converted_variable = re.sub('^_|_$', '', converted_variable)
return converted_variable
# Takes a string with a time and a unit (e.g '3.45ms') and returns the value in seconds
def extract_seconds_value(self, value):
# When couchbase is set up, most of values are equal to 0 and are exposed as "0" and not "0s"
# This statement is preventing values to be searched by the pattern (and break things)
if value == '0':
return 0
match = self.seconds_value_pattern.search(value)
val, unit = match.group(1, 3)
# They use the 'micro' symbol for microseconds so there is an encoding problem
# so let's assume it's microseconds if we don't find the key in unit
if unit not in self.TO_SECONDS:
unit = 'us'
return float(val) / self.TO_SECONDS[unit]
|
require 'rails_helper'
RSpec.describe ApplicationChoice, type: :model do
describe 'auditing', with_audited: true do
it 'creates audit entries' do
application_choice = create :application_choice, status: 'unsubmitted'
expect(application_choice.audits.count).to eq 1
application_choice.update!(status: 'awaiting_provider_decision')
expect(application_choice.audits.count).to eq 2
end
it 'creates an associated object in each audit record' do
application_choice = create :application_choice
expect(application_choice.audits.last.associated).to eq application_choice.application_form
end
it 'audit record can be attributed to a candidate' do
candidate = create :candidate
application_choice = Audited.audit_class.as_user(candidate) do
create :application_choice
end
expect(application_choice.audits.last.user).to eq candidate
end
end
describe '#course_full?' do
context 'with 3 options all full' do
it 'returns true' do
course = create(:course)
create_list(:course_option, 3, vacancy_status: :no_vacancies, course: course)
application_choice = create(:application_choice, course_option: course.course_options.first)
expect(application_choice.course_full?).to be true
end
end
context 'with 2 options only 1 full' do
it 'returns false' do
course = create(:course)
course_option_without_vacancies = create(:course_option, vacancy_status: :no_vacancies, course: course)
create(:course_option, vacancy_status: :vacancies, course: course)
application_choice = create :application_choice, course_option: course_option_without_vacancies
expect(application_choice.course_full?).to be false
end
end
end
describe '#site_full?' do
context 'with 3 options all full' do
it 'returns true' do
course = create(:course)
create_list(:course_option, 3, vacancy_status: :no_vacancies, course: course)
application_choice = create(:application_choice, course_option: course.course_options.first)
expect(application_choice.site_full?).to be true
end
end
context 'with 2 options only 1 full' do
it 'returns true' do
course = create(:course)
course_option_without_vacancies = create(:course_option, vacancy_status: :no_vacancies, course: course)
create(:course_option, vacancy_status: :vacancies, course: course)
application_choice = create :application_choice, course_option: course_option_without_vacancies
expect(application_choice.site_full?).to be true
end
end
context 'with 2 options for same site only 1 full' do
it 'returns true' do
course = create(:course)
site = create(:site, provider: course.provider)
course_option_without_vacancies = create(:course_option, vacancy_status: :no_vacancies, course: course, site: site, study_mode: 'full_time')
create(:course_option, vacancy_status: :vacancies, course: course, site: site, study_mode: 'part_time')
application_choice = create :application_choice, course_option: course_option_without_vacancies
expect(application_choice.site_full?).to be false
end
end
end
describe '#course_study_mode_full?' do
context 'with option that has vacancies' do
it 'returns false' do
course = create(:course)
create(:course_option, vacancy_status: :vacancies, course: course)
application_choice = create(:application_choice, course_option: course.course_options.first)
expect(application_choice.study_mode_full?).to be false
end
end
context 'with 2 options only 1 full' do
it 'returns true' do
course = create(:course)
course_option_without_vacancies = create(:course_option, vacancy_status: :no_vacancies, course: course)
create(:course_option, vacancy_status: :vacancies, course: course)
application_choice = create :application_choice, course_option: course_option_without_vacancies
expect(application_choice.study_mode_full?).to be true
end
end
context 'with 2 options for same site only 1 full' do
it 'returns true' do
course = create(:course)
site = create(:site, provider: course.provider)
course_option_without_vacancies = create(:course_option, vacancy_status: :no_vacancies, course: course, site: site, study_mode: 'full_time')
create(:course_option, vacancy_status: :vacancies, course: course, site: site, study_mode: 'part_time')
application_choice = create :application_choice, course_option: course_option_without_vacancies
expect(application_choice.study_mode_full?).to be true
end
end
end
describe '#no_feedback?' do
it 'returns false if simple rejection reason is provided' do
application_choice = build(:application_choice, :with_rejection)
expect(application_choice.no_feedback?).to be false
end
it 'returns false if structured rejection reasons are provided' do
application_choice = build(:application_choice, :with_structured_rejection_reasons)
expect(application_choice.no_feedback?).to be false
end
it 'returns true if no feedback for the candidate is provided' do
application_choice = build(:application_choice)
expect(application_choice.no_feedback?).to be true
end
end
describe 'validations' do
subject(:application_choice) { create(:application_choice) }
it { is_expected.to validate_uniqueness_of(:course_option).scoped_to(:application_form_id) }
end
describe '#structured_rejection_reasons' do
it 'are serialized and rehydrateable' do
reasons = ReasonsForRejection.new(
candidate_behaviour_y_n: 'Yes',
candidate_behaviour_what_did_the_candidate_do: %w[other],
candidate_behaviour_other: 'Used the wrong spoon for soup',
)
application_choice = create(:application_choice)
application_choice.update!(structured_rejection_reasons: reasons)
rehydrated_reasons = ReasonsForRejection.new(application_choice.reload.structured_rejection_reasons)
expect(rehydrated_reasons.candidate_behaviour_y_n).to eq('Yes')
expect(rehydrated_reasons.candidate_behaviour_what_did_the_candidate_do).to eq(%w[other])
expect(rehydrated_reasons.candidate_behaviour_other).to eq('Used the wrong spoon for soup')
end
end
end
|
<reponame>janCstoffregen/raeber-mit-generischer-suche
/**
* Created by <NAME> (<EMAIL>) on 6/7/17.
*/
import { Component } from '@angular/core';
@Component({
moduleId: module.id,
selector: 'rae-navigationsleiste',
templateUrl: './navigationsleiste.component.html'
})
export class NavigationsleisteComponent {
}
|
func compareCertificates(localCertPath: String, remoteCertPath: String) -> Bool {
guard let localCertData = try? Data(contentsOf: URL(fileURLWithPath: localCertPath)),
let remoteCertData = try? Data(contentsOf: URL(fileURLWithPath: remoteCertPath)) else {
return false // Unable to read certificate data from one or both paths
}
return localCertData == remoteCertData
} |
SELECT author, COUNT(*) AS "Number of Books"
FROM books
GROUP BY author; |
# Editing
if [[ ! "$SSH_TTY" ]] && is_osx; then
export EDITOR='atom'
export LESSEDIT='atom ?lm+%lm -- %f'
else
export EDITOR='vim'
fi
export VISUAL="$EDITOR"
alias q="$EDITOR"
alias qv="q $DOTFILES/link/.{,g}vimrc +'cd $DOTFILES'"
alias qs="q +'cd $DOTFILES'"
|
import React from 'react';
import styled from 'styled-components';
const Button = styled.button`
opacity: ${({ disabled }) => (disabled ? 0.5 : 1)};
/* Add additional styling for the button component if needed */
`;
const Text = styled.span`
font-size: ${({ theme }) => theme.metrics.largeSize}px;
color: ${({ theme }) => theme.colors.buttonText};
font-family: CircularStd-Black;
text-transform: uppercase;
text-align: center;
letter-spacing: 1px;
`;
const ButtonText = styled(Text)`
/* Additional styling for the button text component if needed */
`;
// Example usage of the Button and ButtonText components
const CustomButton = ({ disabled, theme, children }) => {
return (
<Button disabled={disabled}>
<ButtonText theme={theme}>{children}</ButtonText>
</Button>
);
};
export default CustomButton; |
import nltk
from nltk.sentiment.vader import SentimentIntensityAnalyzer
def sentiment_analysis(text):
# Download language models
nltk.download('vader_lexicon')
# Initialize a sentiment analyzer
sid = SentimentIntensityAnalyzer()
# Calculate the sentiment score
sentiment = sid.polarity_scores(text)
# Return the sentiment score
return sentiment |
#!/bin/sh
set -eu
#if test -z "$SLACK_BOT_TOKEN"; then
# echo "Set the SLACK_BOT_TOKEN secret."
# exit 1
#fi
echo "Hello $"
#curl -v -X "POST" "https://slack.com/api/chat.postMessage" \
# -H "Content-Type: application/json; charset: utf-8" \
# -H "Authorization: Bearer $SLACK_BOT_TOKEN" \
# -d "{\"channel\":\"${SLACK_CHANNEL_ID}\",\"blocks\":\"${SLACK_MESSAGE_BLOCKS}\"}"
|
def genetic_algorithm(population, fitness_fn, mutation_prob, num_generations):
best_fitness = -float('inf')
best_individual = None
for _ in range(num_generations):
new_population = []
for old_individual in population:
individual = old_individual
if random.random() < mutation_prob:
# This individual will be mutated
individual = mutation(individual)
new_population.append(individual)
population = new_population
for individual in population:
fitness = fitness_fn(individual)
if fitness > best_fitness:
best_fitness = fitness
best_individual = individual
return best_individual, best_fitness |
<reponame>forwardemail/free-email-forwarding-service<filename>helpers/index.js
const CustomError = require('./custom-error');
const MessageSplitter = require('./message-splitter');
const env = require('./env');
const logger = require('./logger');
module.exports = {
CustomError,
MessageSplitter,
env,
logger
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.