text stringlengths 1 1.05M |
|---|
import logging
class KmIOFilter(logging.Filter):
def filter(self, record):
# Implement the filtering logic based on specific criteria
# For example, filter log records based on log level or message content
if record.levelno >= logging.WARNING:
return True
return False
# Create a logger instance
logger = logging.getLogger('custom_logger')
# Add the KmIOFilter as a filter to the logger
logger.addFilter(KmIOFilter())
# Create a console handler to manage log messages
ch = logging.StreamHandler()
# Add the console handler to the logger
logger.addHandler(ch)
# Example usage of the logger
logger.warning('This is a warning message')
logger.error('This is an error message')
logger.info('This is an info message') # This message will be filtered out |
import type { BuiltInProviderType } from "next-auth/providers";
import type { ClientSafeProvider, LiteralUnion } from "next-auth/react";
export type TNextAuthProviders = Record<LiteralUnion<BuiltInProviderType, string>, ClientSafeProvider> | null;
|
/**
*
*/
package jframe.core.msg;
/**
* Poison Message
*
* @author dzh
* @date Nov 18, 2013 1:20:06 PM
* @since 1.0
*/
public class PoisonMsg extends TextMsg {
public static final PoisonMsg PM = new PoisonMsg();
public String toString() {
return "PoisonMsg";
}
}
|
<gh_stars>1-10
package org.softuni.realestate.domain.models.view;
public interface Viewable<E> {
}
|
#!/bin/bash
if [ -f /bin/vim ]; then
pikaur -R vim --noconfirm
fi
if [ -f /bin/vi ] && [ ! -L /bin/vi ]; then
pikaur -R vi --noconfirm
fi
|
//
// RCTMqtt.h
// RCTMqtt
//
// Created by <NAME> on 2/2/16.
// Copyright © 2016 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <React/RCTEventEmitter.h>
@interface RCTMqtt : RCTEventEmitter
@end
|
import type { IApplication } from './IApplication';
/**
* Application Client interface. Used to register
* middleware and other effects with the main
* Application instance.
*
* @export
* @interface IClient
*/
export interface IClient {
/**
* Sets up Client middleware to serve an autogenerated TS
* client from the specified endoint. The client has all
* the controllers registered to the application available
* as generated IClientAdapter implementations.
*
* @param {IApplication} app
* @param {string} endpoint
* @returns {this}
* @memberof IClient
*/
applyMiddleware(app: IApplication, endpoint?: string): this
}
|
<gh_stars>1-10
// 15784. 질투진서
// 2020.11.14
// 구현
#include<iostream>
#include<queue>
using namespace std;
int x[1001][1001];
int main()
{
int n, a, b;
cin >> n >> a >> b;
for (int i = 1; i <= n; i++)
{
for (int j = 1; j <= n; j++)
{
cin >> x[i][j];
}
}
for (int i = 1; i <= n; i++)
{
if (i == a)
{
continue;
}
if (x[i][b] > x[a][b])
{
cout << "ANGRY" << endl;
return 0;
}
}
for (int i = 1; i <= n; i++)
{
if (i == b)
{
continue;
}
if (x[a][i] > x[a][b])
{
cout << "ANGRY" << endl;
return 0;
}
}
cout << "HAPPY" << endl;
return 0;
}
|
#!/bin/sh
srcdir=`dirname $0`
test -z "$srcdir" && srcdir=.
ORIGDIR=`pwd`
cd "$srcdir"
PROJECT=Transmission
GETTEXTIZE="glib-gettextize"
$GETTEXTIZE --version < /dev/null > /dev/null 2>&1
if test $? -ne 0; then
GETTEXTIZE=""
fi
LIBTOOLIZE=libtoolize
if libtoolize --help >/dev/null 2>&1
then
:
elif glibtoolize --help >/dev/null 2>&1
then
LIBTOOLIZE=glibtoolize
fi
export LIBTOOLIZE
./update-version-h.sh
autoreconf -fi || exit 1;
if test "$GETTEXTIZE"; then
echo "Creating aclocal.m4 ..."
test -r aclocal.m4 || touch aclocal.m4
echo "Running $GETTEXTIZE... Ignore non-fatal messages."
echo "no" | $GETTEXTIZE --force --copy
echo "Making aclocal.m4 writable ..."
test -r aclocal.m4 && chmod u+w aclocal.m4
echo "Running intltoolize..."
intltoolize --copy --force --automake
fi
cd "$ORIGDIR" || exit $?
if test -z "$AUTOGEN_SUBDIR_MODE"; then
echo Running $srcdir/configure "$@"
$srcdir/configure "$@"
echo
echo "Now type 'make' to compile $PROJECT."
fi
|
export PATH="$HOME/phacility/arcanist/bin:$PATH"
|
#!/bin/sh -e
DIRECTORY=$(dirname "${0}")
SCRIPT_DIRECTORY=$(
cd "${DIRECTORY}" || exit 1
pwd
)
# shellcheck source=/dev/null
. "${SCRIPT_DIRECTORY}/../../configuration/project.sh"
# shellcheck source=/dev/null
. "${HOME}/.virtualization-tools.sh"
git config --get remote.origin.url | grep --quiet github.com && IS_GITHUB=true || IS_GITHUB=false
if [ "${IS_GITHUB}" = 'true' ]; then
REGISTRY_SERVER='ghcr.io'
else
REGISTRY_SERVER="${PRIVATE_REGISTRY_PASSWORD}"
fi
# Picks the directory helm-chart in the project root.
helm install --set "ImagePrefix=${REGISTRY_SERVER}/${VENDOR_NAME_LOWER}" "${PROJECT_NAME_DASH}" helm-chart
|
#!/usr/bin/env bash
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Start in scripts/ even if run from root directory
cd "$(dirname "$0")"
# Get release version from tfjs-core's package.json file.
function getReleaseVersion {
local version=""
local regex="\"version\": \"(.*)\""
while read line
do
if [[ $line =~ $regex ]]; then
version="${BASH_REMATCH[1]}"
break
fi
done < "tfjs-core/package.json"
echo "$version"
}
# Exit the script on any command with non 0 return code
set -e
# Echo every command being executed
set -x
# Go to root
cd ../../
# Yarn in the top-level
yarn
RELEASE_VERSION=`getReleaseVersion`
if [[ -z ${RELEASE_VERSION} ]]; then
echo "Expect a valid release version, but got ${RELEASE_VERSION}"
exit 1
else
echo "Publishing version ${RELEASE_VERSION}"
fi
# All packages to publish. This includes Bazel packages.
PACKAGES=("tfjs-core" "tfjs-backend-cpu" "tfjs-backend-webgl" \
"tfjs-backend-wasm" "tfjs-layers" "tfjs-converter" "tfjs-data" "tfjs" \
"tfjs-node" "tfjs-node-gpu")
# Packages that build with Bazel
BAZEL_PACKAGES=("tfjs-core" "tfjs-backend-cpu" "tfjs-tflite" "tfjs-converter"
"tfjs-backend-webgl" "tfjs-backend-webgpu" "tfjs-layers" "tfjs-data")
for package in "${PACKAGES[@]}"
do
cd $package
# tfjs-node-gpu needs to get some files from tfjs-node.
if [[ $package == "tfjs-node-gpu" ]]; then
yarn prep-gpu
fi
# Install dependencies.
yarn
if [[ " ${BAZEL_PACKAGES[@]} " =~ " ${package} " ]]; then
# Build and publish to local npm.
echo "Publishing $package using Bazel"
yarn publish-npm
else
echo "Publishing $package using npm"
# Build npm.
yarn build-npm for-publish
# Publish to local npm.
npm publish
fi
echo "Published ${package}@${RELEASE_VERSION}"
cd ..
done
# Update e2e's package.json's all tfjs related packages to locally published
# version.
cd e2e
npx ts-node ./scripts/update-dependency.ts --version=$RELEASE_VERSION
|
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
# Load the data
data = pd.read_csv("bicycle_sales_data.csv")
# Extract the input and target variables
X = data.drop("sales", axis=1).values
y = data["sales"].values
# Split the data into training and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Train the linear regression model
model = LinearRegression()
model.fit(X_train, y_train)
# Make predictions
y_pred = model.predict(X_test) |
// Copyright 2021 Google LLC. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"fmt"
"sort"
"strings"
"unicode"
"github.com/golang/glog"
"github.com/nasa9084/go-openapi"
)
// Property is the representation of a TPG resource property in tpgtools.
type Property struct {
// title is the name of a property.
title string
// PackageName is the title-cased shortname of a field as it appears as a
// property in the DCL. For example, "MachineType".
PackageName string
// Type is the type of a property.
Type
// Settable indicates that a field is settable in desired specs provided to
// Apply.
// Settable fields may be Computed- fields are sometimes Optional + Computed
// to indicate that they have complex default values in the API.
Settable bool
// Only applies to nested object properties.
// Indicates this property should be excluded and its
// subproperties should be brought up a level.
Collapsed bool
// Elem is the value to insert into the Elem field. If empty, none will be
// inserted.
// In most cases, this should be a function call to a schema function. For
// TypeMaps, this will be a one-liner adding a primitive elem.
Elem *string
ElemIsBasicType bool
// ConflictsWith is the list of fields that this field conflicts with
ConflictsWith ConflictsWith
// ConflictsWith is the list of fields that this field conflicts with
// in JSONCase. For example, "machineType"
JSONCaseConflictsWith []string
// Default is the default for the field.
Default *string
// raw schema values
Required bool
Optional bool
Computed bool
Sensitive bool
ForceNew bool
Description string
DiffSuppressFunc *string
ValidateFunc *string
SetHashFunc *string
MaxItems *int64
MinItems *int64
ConfigMode *string
Removed *string
Deprecated *string
// end raw schema values
// StateGetter is the line of code to retrieve a field from the `d`
// ResourceData or (TODO:) from a map[string]interface{}
StateGetter *string
// StateSetter is the line of code to set a field in the `d` ResourceData
// or (TODO:) a map[string]interface{}
StateSetter *string
// If this field is a three-state boolean in DCL which is represented as a
// string in terraform. This is done so that the DCL can distinguish between
// the field being unset and being set to false.
EnumBool bool
// An IdentityGetter is a function to retrieve the value of an "identity" field
// from state. Identity fields will sometimes allow retrieval from multiple
// fields or from the user's environment variables.
// In the most common case, project/region/zone will use special resource-level
// properties instead of IdentityGetters. However, if they have atypical
// behaviour, such as sourcing a region from a zone, an IdentityGetter will be
// used instead.
IdentityGetter *string
// Sub-properties of nested objects or arrays with nested objects
Properties []Property
// Reference to the parent resource.
// note: "Properties" will not be available.
resource *Resource
// Reference to the parent property of a sub-property. If the property is
// top-level, this will be unset.
parent *Property
// customName is the Terraform-specific name that overrides title.
customName string
// Ref is the name of the shared reference type.
ref string
// If this property allows forward slashes in its value (only important for
// properties sent in the URL)
forwardSlashAllowed bool
}
// An IdentityGetter is a function to retrieve the value of an "identity" field
// from state.
type IdentityGetter struct {
// If HasError is set to true, the function called by FunctionCall will
// return (v, error)
HasError bool
// Rendered function call to insert into the template. For example,
// d.Get("name").(string)
FunctionCall string
}
// Name is the shortname of a field. For example, "machine_type".
func (p Property) Name() string {
if len(p.customName) > 0 {
return p.customName
}
return p.title
}
// overridePath is the path of a property used in override names. For example,
// "node_config.machine_type".
func (p Property) overridePath() string {
if p.parent != nil {
return p.parent.overridePath() + "." + p.title
}
return p.title
}
// PackageJSONName is the camel-cased shortname of a field as it appears in the
// DCL's json serialization. For example, "machineType".
func (p Property) PackageJSONName() string {
s := p.PackageName
a := []rune(s)
if len(a) == 0 {
return ""
}
a[0] = unicode.ToLower(a[0])
return string(a)
}
// PackagePath is the title-cased path of a type (relative to the resource) for
// use in naming functions. For example, "MachineType" or "NodeConfigPreemptible".
func (p Property) PackagePath() string {
if p.ref != "" {
return p.ref
}
if p.parent != nil {
return p.parent.PackagePath() + p.PackageName
}
return p.PackageName
}
func (p Property) ObjectType() string {
parent := p
// Look up chain to see if we are within a reference
// types within a reference should not use the parent resource's type
for {
if parent.ref != "" {
return p.PackagePath()
}
if parent.parent == nil {
break
}
parent = *parent.parent
}
return fmt.Sprintf("%s%s", p.resource.DCLTitle(), p.PackagePath())
}
func (p Property) IsArray() bool {
return (p.Type.String() == SchemaTypeList || p.Type.String() == SchemaTypeSet) && !p.Type.IsObject()
}
func (t Type) IsSet() bool {
return t.String() == SchemaTypeSet
}
// ShouldGenerateNestedSchema returns true if an object's nested schema function should be generated.
func (p Property) ShouldGenerateNestedSchema() bool {
return len(p.Properties) > 0 && !p.Collapsed
}
func (p Property) IsServerGeneratedName() bool {
return p.StateGetter != nil && !p.Settable
}
// DefaultStateGetter returns the line of code to retrieve a field from the `d`
// ResourceData or (TODO:) from a map[string]interface{}
func (p Property) DefaultStateGetter() string {
rawGetter := fmt.Sprintf("d.Get(%q)", p.Name())
return buildGetter(p, rawGetter)
}
func (p Property) ChangeStateGetter() string {
return buildGetter(p, fmt.Sprintf("oldValue(d.GetChange(%q))", p.Name()))
}
// Builds a Getter for constructing a shallow
// version of the object for destory purposes
func (p Property) StateGetterForDestroyTest() string {
pullValueFromState := fmt.Sprintf(`rs.Primary.Attributes["%s"]`, p.Name())
switch p.Type.String() {
case SchemaTypeBool:
return fmt.Sprintf(`dcl.Bool(%s == "true")`, pullValueFromState)
case SchemaTypeString:
if p.Type.IsEnum() {
return fmt.Sprintf("%s.%sEnumRef(%s)", p.resource.Package(), p.ObjectType(), pullValueFromState)
}
if p.Computed {
return fmt.Sprintf("dcl.StringOrNil(%s)", pullValueFromState)
}
return fmt.Sprintf("dcl.String(%s)", pullValueFromState)
}
return ""
}
// Builds a Getter for a property with given raw value
func buildGetter(p Property, rawGetter string) string {
switch p.Type.String() {
case SchemaTypeBool:
return fmt.Sprintf("dcl.Bool(%s.(bool))", rawGetter)
case SchemaTypeString:
if p.Type.IsEnum() {
return fmt.Sprintf("%s.%sEnumRef(%s.(string))", p.resource.Package(), p.ObjectType(), rawGetter)
}
if p.EnumBool {
return fmt.Sprintf("expandEnumBool(%s.(string))", rawGetter)
}
if p.Computed {
return fmt.Sprintf("dcl.StringOrNil(%s.(string))", rawGetter)
}
return fmt.Sprintf("dcl.String(%s.(string))", rawGetter)
case SchemaTypeFloat:
if p.Computed {
return fmt.Sprintf("dcl.Float64OrNil(%s.(float64))", rawGetter)
}
return fmt.Sprintf("dcl.Float64(%s.(float64))", rawGetter)
case SchemaTypeInt:
if p.Computed {
return fmt.Sprintf("dcl.Int64OrNil(int64(%s.(int)))", rawGetter)
}
return fmt.Sprintf("dcl.Int64(int64(%s.(int)))", rawGetter)
case SchemaTypeMap:
return fmt.Sprintf("checkStringMap(%s)", rawGetter)
case SchemaTypeList, SchemaTypeSet:
if p.Type.IsEnumArray() {
return fmt.Sprintf("expand%s%sArray(%s)", p.resource.PathType(), p.PackagePath(), rawGetter)
}
if p.Type.typ.Items != nil && p.Type.typ.Items.Type == "string" {
return fmt.Sprintf("expandStringArray(%s)", rawGetter)
}
if p.Type.typ.Items != nil && p.Type.typ.Items.Type == "integer" {
return fmt.Sprintf("expandIntegerArray(%s)", rawGetter)
}
if p.Type.typ.Items != nil && len(p.Properties) > 0 {
return fmt.Sprintf("expand%s%sArray(%s)", p.resource.PathType(), p.PackagePath(), rawGetter)
}
}
if p.typ.Type == "object" {
return fmt.Sprintf("expand%s%s(%s)", p.resource.PathType(), p.PackagePath(), rawGetter)
}
return "<unknown>"
}
// DefaultStateSetter returns the line of code to set a field in the `d`
// ResourceData or (TODO:) a map[string]interface{}
func (p Property) DefaultStateSetter() string {
switch p.Type.String() {
case SchemaTypeBool:
fallthrough
case SchemaTypeString:
fallthrough
case SchemaTypeInt:
fallthrough
case SchemaTypeFloat:
fallthrough
case SchemaTypeMap:
return fmt.Sprintf("d.Set(%q, res.%s)", p.Name(), p.PackageName)
case SchemaTypeList, SchemaTypeSet:
if p.Type.typ.Items != nil && p.Type.typ.Items.Type == "string" {
return fmt.Sprintf("d.Set(%q, res.%s)", p.Name(), p.PackageName)
}
if p.Type.typ.Items != nil && p.Type.typ.Items.Type == "integer" {
return fmt.Sprintf("d.Set(%q, res.%s)", p.Name(), p.PackageName)
}
if p.Type.typ.Items != nil && len(p.Properties) > 0 {
return fmt.Sprintf("d.Set(%q, flatten%s%sArray(res.%s))", p.Name(), p.resource.PathType(), p.PackagePath(), p.PackageName)
}
}
if p.typ.Type == "object" {
return fmt.Sprintf("d.Set(%q, flatten%s%s(res.%s))", p.Name(), p.resource.PathType(), p.PackagePath(), p.PackageName)
}
return "<unknown>"
}
// ExpandGetter needs to return a snippet of code that produces the DCL-type
// for the field from a map[string]interface{} named obj that represents the
// parent object in Terraform.
func (p Property) ExpandGetter() string {
rawGetter := fmt.Sprintf("obj[%q]", p.Name())
return buildGetter(p, rawGetter)
}
// FlattenGetter needs to return a snippet of code that returns an interface{} which
// can be used in the d.Set() call, given a DCL-type for the parent object named `obj`.
func (p Property) FlattenGetter() string {
return p.flattenGetterWithParent("obj")
}
func (p Property) flattenGetterWithParent(parent string) string {
switch p.Type.String() {
case SchemaTypeBool:
fallthrough
case SchemaTypeString:
fallthrough
case SchemaTypeInt:
fallthrough
case SchemaTypeFloat:
fallthrough
case SchemaTypeMap:
if p.EnumBool {
return fmt.Sprintf("flattenEnumBool(%s.%s)", parent, p.PackageName)
}
return fmt.Sprintf("%s.%s", parent, p.PackageName)
case SchemaTypeList, SchemaTypeSet:
if p.Type.IsEnumArray() {
return fmt.Sprintf("flatten%s%sArray(obj.%s)", p.resource.PathType(), p.PackagePath(), p.PackageName)
}
if p.Type.typ.Items != nil && p.Type.typ.Items.Type == "integer" {
return fmt.Sprintf("%s.%s", parent, p.PackageName)
}
if p.Type.typ.Items != nil && p.Type.typ.Items.Type == "string" {
return fmt.Sprintf("%s.%s", parent, p.PackageName)
}
if p.Type.typ.Items != nil && len(p.Properties) > 0 {
return fmt.Sprintf("flatten%s%sArray(%s.%s)", p.resource.PathType(), p.PackagePath(), parent, p.PackageName)
}
}
if p.typ.Type == "object" {
return fmt.Sprintf("flatten%s%s(%s.%s)", p.resource.PathType(), p.PackagePath(), parent, p.PackageName)
}
return "<unknown>"
}
func getSchemaExtensionMap(v interface{}) map[interface{}]interface{} {
if v != nil {
return nil
}
ls, ok := v.([]interface{})
if ok && len(ls) > 0 {
return ls[0].(map[interface{}]interface{})
}
return nil
}
func (p Property) DefaultDiffSuppress() *string {
switch p.Type.String() {
case SchemaTypeString:
// Field is reference to another resource
if _, ok := p.typ.Extension["x-dcl-references"]; ok {
dsf := "compareSelfLinkOrResourceName"
return &dsf
}
}
return nil
}
func (p Property) GetRequiredFileImports() (imports []string) {
if p.ValidateFunc != nil && strings.Contains(*p.ValidateFunc, "validation.") {
imports = append(imports, GoPkgTerraformSdkValidation)
}
return imports
}
func (p Property) DefaultSetHashFunc() *string {
switch p.Type.String() {
case SchemaTypeSet:
if p.ElemIsBasicType {
shf := "schema.HashString"
return &shf
}
shf := fmt.Sprintf("schema.HashResource(%s)", *p.Elem)
return &shf
}
glog.Fatalf("Failed to find valid hash func")
return nil
}
// Objects returns a flatmap of the sub-properties within a Property which are
// objects (eg: have sub-properties themselves).
func (p Property) Objects() (props []Property) {
// if p.Properties is set, p is an object
for _, v := range p.Properties {
if len(v.Properties) != 0 {
if v.ref == "" {
props = append(props, v)
props = append(props, v.Objects()...)
}
}
}
return props
}
// collapsedProperties returns the input list of properties with nested objects collapsed if needed.
func collapsedProperties(props []Property) (collapsed []Property) {
for _, v := range props {
if len(v.Properties) != 0 && v.Collapsed {
collapsed = append(collapsed, collapsedProperties(v.Properties)...)
} else {
collapsed = append(collapsed, v)
}
}
return collapsed
}
// Alias []string so that append etc. still work, but we can attach a rendering
// function to the object
type ConflictsWith []string
func (c ConflictsWith) String() string {
var quoted []string
for _, s := range c {
quoted = append(quoted, fmt.Sprintf("%q", s))
}
return fmt.Sprintf("[]string{%s}", strings.Join(quoted, ","))
}
// Builds a list of properties from an OpenAPI schema.
func createPropertiesFromSchema(schema *openapi.Schema, typeFetcher *TypeFetcher, overrides Overrides, resource *Resource, parent *Property, location string) (props []Property, err error) {
identityFields := []string{} // always empty if parent != nil
if parent == nil {
identityFields = idParts(resource.ID)
}
// Maps PackageJSONName back to property Name
// for conflict fields
conflictsMap := make(map[string]string)
for k, v := range schema.Properties {
ref := ""
packageName := ""
if pName, ok := v.Extension["x-dcl-go-name"].(string); ok {
packageName = pName
}
if v.Ref != "" {
ref = v.Ref
v, err = typeFetcher.ResolveSchema(v.Ref)
if err != nil {
return nil, err
}
ref = typeFetcher.PackagePathForReference(ref, v.Extension["x-dcl-go-type"].(string))
}
// Sub-properties are referenced by name, and the explicit title value
// won't be set initially.
v.Title = k
if parent == nil && v.Title == "id" {
// If top-level field is named `id`, rename to avoid collision with Terraform id
v.Title = fmt.Sprintf("%s%s", resource.Name(), "Id")
}
p := Property{
title: jsonToSnakeCase(v.Title).snakecase(),
Type: Type{typ: v},
PackageName: packageName,
Description: v.Description,
resource: resource,
parent: parent,
ref: ref,
}
if overrides.PropertyOverride(Exclude, p, location) {
continue
}
do := CustomDefaultDetails{}
doOk, err := overrides.PropertyOverrideWithDetails(CustomDefault, p, &do, location)
if err != nil {
return nil, fmt.Errorf("failed to decode custom list size details")
}
if v.Default != "" || doOk {
def := v.Default
if doOk {
def = do.Default
}
d, err := renderDefault(p.Type, def)
if err != nil {
return nil, fmt.Errorf("failed to render default: %v", err)
}
p.Default = &d
}
cn := CustomNameDetails{}
cnOk, err := overrides.PropertyOverrideWithDetails(CustomName, p, &cn, location)
if err != nil {
return nil, fmt.Errorf("failed to decode custom name details: %v", err)
}
if cnOk {
p.customName = cn.Name
}
if p.Type.String() == SchemaTypeMap {
e := "&schema.Schema{Type: schema.TypeString}"
p.Elem = &e
p.ElemIsBasicType = true
}
if sens, ok := v.Extension["x-dcl-sensitive"].(bool); ok {
p.Sensitive = sens
}
if v, ok := v.Extension["x-dcl-conflicts"].([]interface{}); ok {
// NOTE: DCL not label x-dcl-conflicts for reused types
// TODO(shuya): handle nested field when b/213503595 got fixed
if parent == nil {
for _, ci := range v {
p.JSONCaseConflictsWith = append(p.JSONCaseConflictsWith, ci.(string))
}
conflictsMap[p.PackageJSONName()] = p.Name()
}
}
// Do this before handling properties so we can check if the parent is readOnly
isSGP := false
if sgp, ok := v.Extension["x-dcl-server-generated-parameter"].(bool); ok {
isSGP = sgp
}
if v.ReadOnly || isSGP || (parent != nil && parent.Computed) {
p.Computed = true
if stringInSlice(p.Name(), identityFields) {
sg := p.DefaultStateGetter()
p.StateGetter = &sg
}
}
// Handle object properties
if len(v.Properties) > 0 {
props, err := createPropertiesFromSchema(v, typeFetcher, overrides, resource, &p, location)
if err != nil {
return nil, err
}
p.Properties = props
if !p.Computed {
// Computed fields cannot specify MaxItems
mi := int64(1)
p.MaxItems = &mi
}
e := fmt.Sprintf("%s%sSchema()", resource.PathType(), p.PackagePath())
p.Elem = &e
p.ElemIsBasicType = false
}
// Handle array properties
if v.Items != nil {
ls := CustomListSizeConstraintDetails{}
lsOk, err := overrides.PropertyOverrideWithDetails(CustomListSize, p, &ls, location)
if err != nil {
return nil, fmt.Errorf("failed to decode custom list size details")
}
if lsOk {
if ls.Max > 0 {
p.MaxItems = &ls.Max
}
if ls.Min > 0 {
p.MinItems = &ls.Min
}
}
// We end up handling arrays of objects very similarly to nested objects
// themselves
if len(v.Items.Properties) > 0 {
props, err := createPropertiesFromSchema(v.Items, typeFetcher, overrides, resource, &p, location)
if err != nil {
return nil, err
}
p.Properties = props
e := fmt.Sprintf("%s%sSchema()", resource.PathType(), p.PackagePath())
p.Elem = &e
p.ElemIsBasicType = false
} else {
i := Type{typ: v.Items}
e := fmt.Sprintf("&schema.Schema{Type: schema.%s}", i.String())
if _, ok := v.Extension["x-dcl-references"]; ok {
e = fmt.Sprintf("&schema.Schema{Type: schema.%s, DiffSuppressFunc: compareSelfLinkOrResourceName, }", i.String())
}
p.Elem = &e
p.ElemIsBasicType = true
}
}
if !p.Computed {
glog.Infof("Looking for %q in %v.", v.Title, schema.Required)
if stringInSlice(v.Title, schema.Required) {
p.Required = true
} else {
p.Optional = true
}
}
cr := CustomSchemaValuesDetails{}
crOk, err := overrides.PropertyOverrideWithDetails(CustomSchemaValues, p, &cr, location)
if err != nil {
return nil, fmt.Errorf("failed to decode custom required details")
}
if crOk {
p.Required = cr.Required
p.Optional = cr.Optional
p.Computed = cr.Computed
}
// Handle settable fields. If the field is computed it's not settable but
// if it's also optional (O+C), it is.
if !p.Computed || (p.Optional) {
p.Settable = true
// NOTE: x-kubernetes-immmutable implies that all children of a field
// are actually immutable. However, in practice, DCL specs will label
// every immutable subfield.
if isImmutable, ok := v.Extension["x-kubernetes-immutable"].(bool); ok && isImmutable {
p.ForceNew = true
}
if serverDefault, ok := v.Extension["x-dcl-server-default"].(bool); ok && serverDefault {
p.Computed = true
}
if forwardSlashAllowed, ok := v.Extension["x-dcl-forward-slash-allowed"].(bool); ok && forwardSlashAllowed {
p.forwardSlashAllowed = true
}
// special handling for project/region/zone/other fields with
// provider defaults
if stringInSlice(p.title, []string{"project", "region", "zone"}) || stringInSlice(p.customName, []string{"region", "project", "zone"}) {
p.Optional = true
p.Required = false
p.Computed = true
sg := fmt.Sprintf("dcl.String(%v)", p.Name())
p.StateGetter = &sg
cig := &CustomIdentityGetterDetails{}
cigOk, err := overrides.PropertyOverrideWithDetails(CustomIdentityGetter, p, cig, location)
if err != nil {
return nil, fmt.Errorf("failed to decode custom identity getter details")
}
propertyName := p.title
if p.customName != "" {
propertyName = p.customName
}
ig := fmt.Sprintf("get%s(d, config)", renderSnakeAsTitle(miscellaneousNameSnakeCase(propertyName)))
if cigOk {
ig = fmt.Sprintf("%s(d, config)", cig.Function)
}
p.IdentityGetter = &ig
} else {
sg := p.DefaultStateGetter()
p.StateGetter = &sg
}
}
ss := p.DefaultStateSetter()
p.StateSetter = &ss
if p.Sensitive {
p.StateSetter = nil
}
css := CustomStateSetterDetails{}
cssOk, err := overrides.PropertyOverrideWithDetails(CustomStateSetter, p, &css, location)
if err != nil {
return nil, fmt.Errorf("failed to decode custom stateSetter func: %v", err)
}
if cssOk {
p.StateSetter = &css.Function
}
irOk := overrides.PropertyOverride(IgnoreRead, p, location)
if irOk {
p.StateSetter = nil
}
cd := CustomDescriptionDetails{}
cdOk, err := overrides.PropertyOverrideWithDetails(CustomDescription, p, &cd, location)
if err != nil {
return nil, fmt.Errorf("failed to decode custom description details: %v", err)
}
if cdOk {
p.Description = cd.Description
}
dsf := CustomDiffSuppressFuncDetails{}
dsfOk, err := overrides.PropertyOverrideWithDetails(DiffSuppressFunc, p, &dsf, location)
if err != nil {
return nil, fmt.Errorf("failed to decode custom diff suppress func: %v", err)
}
if dsfOk {
p.DiffSuppressFunc = &dsf.DiffSuppressFunc
} else if !(p.Computed && !p.Optional) {
p.DiffSuppressFunc = p.DefaultDiffSuppress()
}
vf := CustomValidationDetails{}
vfOk, err := overrides.PropertyOverrideWithDetails(CustomValidation, p, &vf, location)
if err != nil {
return nil, fmt.Errorf("failed to decode custom validation func: %v", err)
}
if vfOk {
p.ValidateFunc = &vf.Function
}
if p.Type.String() == SchemaTypeSet {
shf := SetHashFuncDetails{}
shfOk, err := overrides.PropertyOverrideWithDetails(SetHashFunc, p, &shf, location)
if err != nil {
return nil, fmt.Errorf("failed to decode set hash func: %v", err)
}
if shfOk {
p.SetHashFunc = &shf.Function
} else {
p.SetHashFunc = p.DefaultSetHashFunc()
}
}
cm := CustomConfigModeDetails{}
cmOk, err := overrides.PropertyOverrideWithDetails(CustomConfigMode, p, &cm, location)
if err != nil {
return nil, fmt.Errorf("failed to decode custom config mode func: %v", err)
}
if cmOk {
p.ConfigMode = &cm.Mode
}
rd := &RemovedDetails{}
rdOk, err := overrides.PropertyOverrideWithDetails(Removed, p, rd, location)
if err != nil {
return nil, fmt.Errorf("failed to decode removed details")
}
if rdOk {
p.Removed = &rd.Message
}
dd := &DeprecatedDetails{}
ddOk, err := overrides.PropertyOverrideWithDetails(Deprecated, p, dd, location)
if err != nil {
return nil, fmt.Errorf("failed to decode deprecated details")
}
if ddOk {
p.Deprecated = &dd.Message
}
if overrides.PropertyOverride(CollapsedObject, p, location) {
p.Collapsed = true
if p.parent == nil {
collapseSS := fmt.Sprintf("setStateForCollapsedObject(d, %s)", p.flattenGetterWithParent("res"))
p.StateSetter = &collapseSS
}
collapseSG := fmt.Sprintf("expand%s%sCollapsed(d)", p.resource.PathType(), p.PackagePath())
p.StateGetter = &collapseSG
}
// Add any new imports as needed
if ls := p.GetRequiredFileImports(); len(ls) > 0 {
resource.additionalFileImportSet.Add(ls...)
}
csgd := CustomStateGetterDetails{}
csgdOk, err := overrides.PropertyOverrideWithDetails(CustomStateGetter, p, &csgd, location)
if err != nil {
return nil, fmt.Errorf("failed to decode custom state getter details with err %v", err)
}
if csgdOk {
p.StateGetter = &csgd.Function
}
if overrides.PropertyOverride(EnumBool, p, location) {
p.EnumBool = true
p.Type.typ.Type = "string"
var parent string
if p.parent == nil {
parent = "res"
} else {
parent = "obj"
}
enumBoolSS := fmt.Sprintf("d.Set(%q, flattenEnumBool(%s.%s))", p.Name(), parent, p.PackageName)
p.StateSetter = &enumBoolSS
enumBoolSG := fmt.Sprintf("expandEnumBool(d.Get(%q))", p.Name())
p.StateGetter = &enumBoolSG
}
if overrides.PropertyOverride(GenerateIfNotSet, p, location) {
p.Computed = true
p.Required = false
p.Optional = true
ig := fmt.Sprintf("generateIfNotSet(d, %q, %q)", p.Name(), "tfgen")
p.IdentityGetter = &ig
n := fmt.Sprintf("&%s", p.Name())
p.StateGetter = &n
}
if overrides.PropertyOverride(NamePrefix, p, location) {
p.Computed = true
p.Required = false
p.Optional = true
ig := fmt.Sprintf("generateIfNotSet(d, %q, d.Get(%q).(string))", p.Name(), "name_prefix")
p.IdentityGetter = &ig
n := fmt.Sprintf("&%s", p.Name())
p.StateGetter = &n
// plus, add the "name_prefix" property.
props = append(props, Property{
title: "name_prefix",
Type: p.Type,
resource: resource,
parent: parent,
Optional: true,
Computed: true,
ForceNew: true,
})
}
if p.ref != "" {
resource.ReusedTypes = resource.RegisterReusedType(p)
}
props = append(props, p)
}
// handle conflict fields
for i, _ := range props {
p := &props[i]
if p.JSONCaseConflictsWith != nil {
for _, cf := range p.JSONCaseConflictsWith {
if val, ok := conflictsMap[cf]; ok {
p.ConflictsWith = append(p.ConflictsWith, val)
} else {
return nil, fmt.Errorf("Error generating conflict fields. %s is not labeled as a conflict field in DCL", cf)
}
}
}
}
// sort the properties so they're in a nice order
sort.SliceStable(props, propComparator(props))
return props, nil
}
|
<gh_stars>0
#include <iostream>
using namespace std;
int main() {
float bmi, bodyWeight, bodyHeight;
cout<<"Please enter weight in pounds:"<<endl;
cin>>bodyWeight;
cout<<"Please enter height in inches:"<<endl;
cin>>bodyHeight;
bodyWeight = bodyWeight * 0.453592;
bodyHeight = bodyHeight * 0.0254;
bmi = bodyWeight/(bodyHeight * bodyHeight);
float value = (int)(bmi * 100 + .5);
bmi = (float)value / 100;
cout<<"BMI is: "<<bmi<<endl;
return 0;
} |
# Assumptions: NVM installed
# Add a reference to nvm
export NVM_DIR=~/.nvm
[ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh"
# Baseline to Node LTS
nvm install 4.2.6
# Global prerequisites
npm install -g webpack webpack-dev-server
# Local NPM install
npm install
# Webpack
webpack
# Start the dev server
webpack-dev-server --content-base dist/ |
def strip_whitespace_and_punctuations(string):
import string
return string.translate(str.maketrans('','', string.whitespace + string.punctuation)) |
<filename>chapter_001/src/test/java/ru/job4j/loop/TestPrimeNumber.java
package ru.job4j.loop;
import org.junit.Test;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.*;
/**
* Класс тест для PrimeNumber.
*
* @author @author <NAME> ( https://vk.com/id428714363)
* @version 1.0
* @since 23.07.2019
*/
public class TestPrimeNumber {
@Test
public void when5() {
PrimeNumber prime = new PrimeNumber();
int count = prime.calc(5);
assertThat(count, is(3));
}
@Test
public void when11() {
PrimeNumber prime = new PrimeNumber();
int count = prime.calc(11);
assertThat(count, is(5));
}
@Test
public void when1() {
PrimeNumber prime = new PrimeNumber();
int count = prime.calc(2);
assertThat(count, is(1));
}
}
|
<reponame>beigirad/CompilerWorkshop
public class HelloWorld {
static int age = 10;
private static double height = 1.74;
public double weight = (((74.02 - 75.549)));
public static int action(int a, int b, boolean sum) {
if (sum)
return a + b; //sum
else
return a - b;
}
private static double myFunc2(double b, int c) {
if (true) {
b = b + (2 - 6);
} else {
}
if (c < 2 * b) {
b = 8 * 2;
} else {
c = 0;
}
return b / c;
}
public static void doSth() {
Integer a = 0;
while (a < 200) {
a = a + 5;
}
// call methods
double result = Test.myFunc2(5, 10);
}
}
|
import * as constants from './constants'
const initialState = {
user: null,
error: null
};
const userReducer = (state = initialState, action) => {
switch (action.type) {
case (constants.SET_USER):
return Object.assign({}, state, {user: action.user, error: null});
case (constants.LOGIN_USER_WITH_GOOGLE):
return Object.assign({}, state, {error: null});
case (constants.LOGIN_USER_WITH_EMAIL):
return Object.assign({}, state, {error: null});
case (constants.LOGIN_USER_SUCCESS):
return Object.assign({}, state, {user: action, error: null});
case (constants.LOGIN_USER_ERROR):
return Object.assign({}, state, {error: action.error});
case (constants.LOGOUT_USER):
return Object.assign({}, state, {user: null});
default:
return state;
}
};
export default userReducer; |
<filename>public/uploads/files/arcedu.sql.sql
INSERT INTO `environments` (`env_id`, `type`, `area`, `flat`, `code`, `busy`, `delete`, `created_at`, `updated_at`) VALUES
(1, 'Departamento', 45.50, 1, 'A-1', 0, 0, '2016-02-09 04:08:58', '2016-02-09 07:41:36'),
(2, 'Departamento', 67.50, 1, 'A-2', 1, 0, '2016-02-09 04:13:53', '2016-02-09 08:13:53'),
(3, 'Departamento', 95.50, 1, 'A-3', 0, 0, '2016-02-06 02:56:51', '2016-02-06 06:53:43'),
(4, 'Departamento', 45.50, 1, 'A-4', 0, 0, '2016-02-09 03:28:27', '2016-02-06 06:58:24'),
(5, 'Departamento', 94.50, 1, 'A-5', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(6, 'Oficina', 45.50, 2, 'B-1', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(7, 'Oficina', 60.50, 2, 'B-2', 0, 0, '2016-02-09 03:28:24', '2016-02-06 06:59:14'),
(8, 'Oficina', 75.50, 2, 'B-3', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(9, 'Oficina', 55.50, 2, 'B-4', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(10, 'Tienda', 45.50, 0, 'P-1', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(11, 'Tienda', 60.50, 0, 'P-2', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(12, 'Tienda', 78.50, 0, 'P-3', 0, 0, '2016-02-09 03:28:31', '0000-00-00 00:00:00'),
(13, 'Tienda', 45.50, 0, 'P-4', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(14, 'Deposito', 30.50, 0, 'P-5', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(15, 'Deposito', 30.50, -1, 'S-1', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(16, 'Deposito', 40.50, -1, 'S-2', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(17, 'Deposito', 35.50, -1, 'S-2', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(18, 'Area Social', 100.50, 7, 'I-1', 0, 0, '2016-01-22 09:40:10', '0000-00-00 00:00:00'),
(19, 'Area Social', 100.50, 7, 'I-2', 0, 0, '2016-01-22 09:40:11', '0000-00-00 00:00:00'),
(20, 'Area Social', 100.50, 7, 'I-3', 0, 0, '2016-01-22 09:40:11', '0000-00-00 00:00:00');
--
-- Dumping data for table `persons`
--
INSERT INTO `persons` (`id`, `user_id`, `ci`, `expedido`, `names`, `last_name_f`, `last_name_m`, `phone`, `phone_cel`, `email`, `delete`, `created_at`, `updated_at`) VALUES
(1, NULL, 5208822, 'CBA', '<NAME>', 'Murillo', 'Nava', '4739913', '79346585', '<EMAIL>', 0, '2016-02-05 19:52:22', '2016-02-05 23:52:22'),
(2, NULL, 789582, 'ORU', 'Juan', 'Capriles', '', '', '', NULL, 0, '2016-02-03 20:45:33', '0000-00-00 00:00:00'),
(3, NULL, 5208821, 'LPZ', 'Romulo', 'Capriles', 'Orellana', '4739918', '79346588', '<EMAIL>', 0, '2016-02-05 19:58:35', '2016-02-05 23:57:51'),
(4, NULL, 5208820, 'LPZ', 'Carmilo', 'Peres', 'Soria', '', '70755485', '<EMAIL>', 0, '2016-02-05 22:48:12', '2016-02-06 02:48:12');
INSERT INTO `rental_month` (`rm_id`, `date_admission`, `date_end`, `warranty`, `payment`, `larder`, `penalty_fee`, `delete`, `created_at`, `updated_at`) VALUES
(4, '2016-02-01', '2017-02-01', 1500.00, 250.00, 85.00, 5.00, 0, '2016-02-09 08:13:53', '2016-02-09 08:13:53');
INSERT INTO `users` (`id`, `name`, `email`, `password`, `type`, `remember_token`, `created_at`, `updated_at`) VALUES
(1, 'angel', '<EMAIL>', <PASSWORD>', 'admin', '<PASSWORD>', '2016-02-05 03:16:26', '2016-02-05 07:16:26'),
(2, 'pedro', '<EMAIL>', <PASSWORD>', 'user', 'DfJlsy6gdQTkqeErZ28JRlDMiFKfWkxf8Ge2j3A3757E04Ch1mz1UJxAP8Xy', '2016-01-27 04:29:18', '2016-01-27 08:29:18');
|
<gh_stars>0
// SPDX-License-Identifier: BSD-2-Clause-Patent
#ifndef SHIM_STR_H
#define SHIM_STR_H
#ifdef SHIM_UNIT_TEST
#pragma GCC diagnostic error "-Wnonnull-compare"
#else
#pragma GCC diagnostic ignored "-Wnonnull-compare"
#endif
static inline UNUSED NONNULL(1) unsigned long
strnlena(const CHAR8 *s, unsigned long n)
{
unsigned long i;
for (i = 0; i < n; i++)
if (s[i] == '\0')
break;
return i;
}
static inline UNUSED RETURNS_NONNULL NONNULL(1, 2) CHAR8 *
strncpya(CHAR8 *dest, const CHAR8 *src, unsigned long n)
{
unsigned long i;
for (i = 0; i < n && src[i] != '\0'; i++)
dest[i] = src[i];
for (; i < n; i++)
dest[i] = '\0';
return dest;
}
static inline UNUSED RETURNS_NONNULL NONNULL(1, 2) CHAR8 *
strcata(CHAR8 *dest, const CHAR8 *src)
{
unsigned long dest_len = strlena(dest);
unsigned long i;
for (i = 0; src[i] != '\0'; i++)
dest[dest_len + i] = src[i];
dest[dest_len + i] = '\0';
return dest;
}
static inline UNUSED NONNULL(1) CHAR8 *
strdup(const CHAR8 * const src)
{
UINTN len;
CHAR8 *news = NULL;
len = strlena(src);
news = AllocateZeroPool(len + 1);
if (news)
strncpya(news, src, len);
return news;
}
static inline UNUSED NONNULL(1) CHAR8 *
strndupa(const CHAR8 * const src, const UINTN srcmax)
{
UINTN len;
CHAR8 *news = NULL;
len = strnlena(src, srcmax);
news = AllocateZeroPool(len + 1);
if (news)
strncpya(news, src, len);
return news;
}
static inline UNUSED RETURNS_NONNULL NONNULL(1, 2) char *
stpcpy(char *dest, const char * const src)
{
size_t i = 0;
for (i = 0; src[i]; i++)
dest[i] = src[i];
dest[i] = '\000';
return &dest[i];
}
static inline UNUSED CHAR8 *
translate_slashes(CHAR8 *out, const char *str)
{
int i;
int j;
if (str == NULL || out == NULL)
return NULL;
for (i = 0, j = 0; str[i] != '\0'; i++, j++) {
if (str[i] == '\\') {
out[j] = '/';
if (str[i+1] == '\\')
i++;
} else
out[j] = str[i];
}
out[j] = '\0';
return out;
}
static inline UNUSED RETURNS_NONNULL NONNULL(1) CHAR8 *
strchrnula(const CHAR8 *s, int c)
{
unsigned int i;
for (i = 0; s[i] != '\000' && s[i] != c; i++)
;
return (CHAR8 *)&s[i];
}
static inline UNUSED NONNULL(1) CHAR8 *
strchra(const CHAR8 *s, int c)
{
const CHAR8 *s1;
s1 = strchrnula(s, c);
if (!s1 || s1[0] == '\000')
return NULL;
return (CHAR8 *)s1;
}
static inline UNUSED RETURNS_NONNULL NONNULL(1) char *
strnchrnul(const char *s, size_t max, int c)
{
unsigned int i;
if (!s || !max)
return (char *)s;
for (i = 0; i < max && s[i] != '\0' && s[i] != c; i++)
;
if (i == max)
i--;
return (char *)&s[i];
}
/**
* strntoken: tokenize a string, with a limit
* str: your string (will be modified)
* max: maximum number of bytes to ever touch
* delims: string of one character delimeters, any of which will tokenize
* *token: the token we're passing back (must be a pointer to NULL initially)
* state: a pointer to one char of state for between calls
*
* Ensure that both token and state are preserved across calls. Do:
* char state = 0;
* char *token = NULL;
* for (...) {
* valid = strntoken(...)
* not:
* char state = 0;
* for (...) {
* char *token = NULL;
* valid = strntoken(...)
*
* - it will not test bytes beyond str[max-1]
* - it will not set *token to an address beyond &str[max-1]
* - it will set *token to &str[max-1] without testing &str[max-2] for
* &str[max-1] == str
* - sequences of multiple delimeters will result in empty (pointer to '\0')
* tokens.
* - it expects you to update str and max on successive calls.
*
* return:
* true means it hasn't tested str[max-1] yet and token is valid
* false means it got to a NUL or str[max-1] and token is invalid
*/
static inline UNUSED NONNULL(1, 3, 4) int
strntoken(char *str, size_t max, const char *delims, char **token, char *state)
{
char *tokend;
const char *delim;
int isdelim = 0;
int state_is_delim = 0;
if (!str || !max || !delims || !token || !state)
return 0;
tokend = &str[max-1];
if (!str || max == 0 || !delims || !token)
return 0;
/*
* the very special case of "" with max=1, where we have no prior
* state to let us know this is the same as right after a delim
*/
if (*token == NULL && max == 1 && *str == '\0') {
state_is_delim = 1;
}
for (delim = delims; *delim; delim++) {
char *tmp = NULL;
if (*token && *delim == *state)
state_is_delim = 1;
tmp = strnchrnul(str, max, *delim);
if (tmp < tokend)
tokend = tmp;
if (*tokend == *delim)
isdelim = 1;
}
*token = str;
if (isdelim) {
*state = *tokend;
*tokend = '\0';
return 1;
}
return state_is_delim;
}
#define UTF8_BOM { 0xef, 0xbb, 0xbf }
#define UTF8_BOM_SIZE 3
static inline UNUSED NONNULL(1) BOOLEAN
is_utf8_bom(CHAR8 *buf, size_t bufsize)
{
unsigned char bom[] = UTF8_BOM;
return CompareMem(buf, bom, MIN(UTF8_BOM_SIZE, bufsize)) == 0;
}
/**
* parse CSV data from data to end.
* *data points to the first byte of the data
* end points to a NUL byte at the end of the data
* n_columns number of columns per entry
* list the list head we're adding to
*
* On success, list will be populated with individually allocate a list of
* struct csv_list objects, with one column per entry of the "columns" array,
* filled left to right with up to n_columns elements, or NULL when a csv line
* does not have enough elements.
*
* Note that the data will be modified; all comma, linefeed, and newline
* characters will be set to '\000'. Additionally, consecutive linefeed and
* newline characters will not result in rows in the results.
*
* On failure, list will be empty and all entries on it will have been freed,
* using free_csv_list(), whether they were there before calling
* parse_csv_data or not.
*/
struct csv_row {
list_t list; /* this is a linked list */
size_t n_columns; /* this is how many columns are actually populated */
char *columns[0]; /* these are pointers to columns */
};
EFI_STATUS parse_csv_data(char *data, char *end, size_t n_columns,
list_t *list);
void free_csv_list(list_t *list);
#ifdef SHIM_UNIT_TEST
void NONNULL(1, 3, 4)
parse_csv_line(char * line, size_t max, size_t *n_columns, const char *columns[]);
#endif
#endif /* SHIM_STR_H */
|
<filename>examples/query.js
/**
* Created with JetBrains WebStorm.
* User: kamol
* Date: 3/27/15
* Time: 10:29 AM
* To change this template use File | Settings | File Templates.
*/
var DataSource = require('loopback-datasource-juggler').DataSource;
var config = require('rc')('loopback', {dev: {firebase: {}}}).dev.firebase;
var FBConnector = require('../index.js');
// Create new Datasource with FBConenctor
var ds = new DataSource(FBConnector, config);
// User data
var user1 = {
name: '<NAME>',
age: 30,
title: 'MyPost',
contents: 'My First Post'
};
var user2 = {
name: '<NAME>',
age: 30,
title: 'MyPost',
contents: 'Updated Contents'
};
var user3 = {
name: 'John',
age: 30,
title: 'Post',
contents: 'Current affairs'
};
var user4 = {
name: 'John',
age: 35,
title: 'Post2',
contents: 'The making of USA'
};
var user5 = {
name: 'David',
age: 20,
title: 'World as seen'
};
// Define our model
User = ds.define('users', {
id: { type: String },
name: { type: String},
dob: { type: String}
});
// Connect to Datasource
ds.connect(function(err, authData){
if (err) {
console.log("connection failed");
} else {
console.log("connection successfull");
createUser(user1);
createUser(user2);
createUser(user3);
createUser(user4);
createUser(user5);
}
});
// Add user
function createUser (user) {
User.create(user, function (err, user) {
if (err) {
console.log("creation failed");
} else {
console.log("user created");
if (user.name === 'David') {
FindAll();
FindWithName();
FindWithAge();
FindLimit();
FindOrdered();
// Not Supported Yet
//FindAgeAndName();
//FindProperty();
//FindPosts();
//FindOwnerPosts();
}
}
});
}
// Return All Data
function FindAll() {
User.find ({},function(err, data){
console.log('FindAll Data: ', data);
});
}
// Return data for given User
function FindWithName() {
User.find ({where: {name: '<NAME>'}},function(err, data){
console.log('FindWithName Data: ', data);
});
}
// Return data witch matches age criteria
function FindWithAge() {
User.find ({where: {age: {gt: 25}}},function(err, data){
console.log('FindWithAge Data: ', data);
});
}
// Return data with matching Name and Age
function FindAgeAndName() {
User.find ({where: {name: 'John', age: {gt: 30}}},function(err, data){
console.log('FindAgeAndName Data: ', data);
});
}
// Return only x data
function FindLimit() {
User.find ({limit: 3}, function(err, data){
console.log('FindLimit Data: ', data);
});
}
// Return ordered data
function FindOrdered() {
User.find ({order: 'age DESC', limit: 3}, function(err, data){
console.log('FindOrdered Data: ', data);
});
}
// Return only title and contents
function FindProperty() {
User.find ({fields: {title: true, contents: true}}, function(err, data){
console.log('FindProperty Data: ', data);
});
}
function FindPosts() {
User.find ({include: 'contents'}, function(err, data){
console.log('FindPosts Data: ', data);
});
}
function FindOwnerPosts() {
User.find ({include: {name: 'title'}}, function(err, data){
console.log('FindOwnerPosts Data: ', data);
});
} |
describe("Datasource form related tests", function() {
it("Check whether the delete button has the right color", function() {
cy.NavigateToAPI_Panel();
cy.CreateAPI("Testapi");
cy.enterDatasourceAndPath("https://reqres.in/api/", "users");
cy.get(".t--store-as-datasource-menu").click();
cy.get(".t--store-as-datasource").click();
cy.get(".t--form-control-KEY_VAL_INPUT .t--add-field").click();
cy.get(".t--form-control-KEY_VAL_INPUT .t--delete-field").should(
"attr",
"color",
"#A3B3BF",
);
});
it("Check if save button is disabled", function() {
cy.testDatasource();
cy.get(".t--save-datasource").should("not.be.disabled");
});
it("Check if saved api as a datasource does not fail on cloning", function() {
cy.NavigateToAPI_Panel();
cy.GlobalSearchEntity("Testapi");
cy.xpath('//*[local-name()="g" and @id="Icon/Outline/more-vertical"]')
.last()
.should("be.hidden")
.invoke("show")
.click({ force: true });
cy.get('.single-select:contains("Copy to page")').click();
cy.get('.single-select:contains("Page1")').click();
cy.validateToastMessage("Testapi Action copied");
});
});
|
#!/bin/sh
echo "------ Create database tables ------"
python manage.py migrate --noinput
echo "------ create default admin user ------"
echo "from django.contrib.auth.models import User; User.objects.create_superuser('admin', 'nkmurli99@gmail.com', 'Passw0rd')" | python manage.py shell
echo "------ starting gunicorn ------"
gunicorn vasuapp.wsgi --workers 2 |
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2018 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.util.ltl.formulas;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
public abstract class PropositionalFormula implements LtlFormula {
private final ImmutableList<? extends LtlFormula> children;
PropositionalFormula(Iterable<? extends LtlFormula> pChildren) {
children = ImmutableList.copyOf(pChildren);
}
PropositionalFormula(LtlFormula... pChildren) {
children = ImmutableList.copyOf(pChildren);
}
public ImmutableList<? extends LtlFormula> getChildren() {
return children;
}
@Override
public final int hashCode() {
final int prime = 31;
return prime + children.hashCode();
}
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof PropositionalFormula)) {
return false;
}
PropositionalFormula other = (PropositionalFormula) obj;
return getSymbol().equals(other.getSymbol()) && children.equals(other.children);
}
public abstract String getSymbol();
@Override
public String toString() {
return "("
+ String.join(" " + getSymbol() + " ", Iterables.transform(children, Object::toString))
+ ")";
}
}
|
import torch
from torch import nn
from torch.autograd import Variable
from typing import Tuple
class ReinforcementLearningAgent:
def __init__(self, master, env, process_id):
self.master = master
self.env = env
self.process_id = process_id
def update_lstm_hidden_state(training: bool) -> Tuple[torch.Tensor, torch.Tensor]:
not_training = not training
if self.master.enable_continuous:
hidden_state = (Variable(torch.zeros(2, self.master.hidden_dim).type(self.master.dtype), volatile=not_training),
Variable(torch.zeros(2, self.master.hidden_dim).type(self.master.dtype), volatile=not_training))
else:
hidden_state = (Variable(torch.zeros(1, self.master.hidden_dim).type(self.master.dtype), volatile=not_training),
Variable(torch.zeros(1, self.master.hidden_dim).type(self.master.dtype), volatile=not_training))
return hidden_state |
<filename>src/listeners/MainListener.js
const { EventHandler, CommandContext, MayfiEmbed } = require('../')
const DatabaseCheck = require("../utils/DatabaseCheck.js")
module.exports = class ClientOnMessage extends EventHandler {
constructor(client) {
super(client, 'message')
}
async run(message) {
if (message.author.bot) return
const user = await this.client.database.users.findOne({ _id: message.author.id })
const guild = await this.client.database.guilds.findOne({ _id: message.guild.id })
let prefix = guild ? guild.prefix : "m!"
const botMention = this.client.user.toString()
const mc = (...m) => m.some(st => message.content.toLowerCase().startsWith(st))
const usedPrefix = mc(botMention, `<@!${this.client.user.id}>`) ? `${botMention} ` : mc(prefix) ? prefix : null
if(!usedPrefix) return
const fullCmd = message.content.substring(usedPrefix.length).split(/[ \t]+/).filter(a => a)
const args = fullCmd.slice(1)
if (!fullCmd.length) return
DatabaseCheck.checkGuild(this.client, guild, message.guild.id)
DatabaseCheck.checkUser(this.client, user, message.author)
if(user && user.blacklisted) return
const language = guild ? guild.language : "en-US"
const cmd = fullCmd[0].toLowerCase().trim()
const command = this.client.commands.get(cmd) || this.client.commands.get(this.client.aliases.get(cmd))
if(!command) return
if (!message.channel.permissionsFor(message.guild.me).has("SEND_MESSAGES")) {
if (!message.guild.owner || guild.noPermissions === true) return
message.guild.owner.send(
new MayfiEmbed(message.guild.owner)
.setTitle("An error occurred")
.setDescription(`:flag_us: Take note that i need the permission **"SEND_MESSAGES"** in order to send messages, please consider adding me this permission.`)
)
await this.client.database.guilds.updateOne({_id: message.guild.id}, { noPermissions: true })
}
const context = new CommandContext({
client: this.client,
message,
language,
command,
prefix
})
console.log(`[Commands] "${message.content}" (${command.constructor.name}) ran by "${message.author.tag}" (${message.author.id}) on guild "${message.guild.name}" (${message.guild.id}) channel "#${message.channel.name}" (${message.channel.id})`)
this.client.runCommand(command, context, args, language)
}
}
|
import React from 'react';
export interface IconCopyProps extends React.SVGAttributes<SVGElement> {
color?: string;
size?: string | number;
className?: string;
style?: React.CSSProperties;
}
export const IconCopy: React.SFC<IconCopyProps> = (
props: IconCopyProps
): React.ReactElement => {
const { color, size, style, ...restProps } = props;
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={size}
height={size}
viewBox="0 0 24 24"
fill="none"
stroke={color}
className="feather feather-copy"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
style={{ verticalAlign: 'middle', ...style }}
{...restProps}
>
<rect x="9" y="9" width="13" height="13" rx="2" ry="2" />
<path d="M5 15H4a2 2 0 0 1-2-2V4a2 2 0 0 1 2-2h9a2 2 0 0 1 2 2v1" />
</svg>
);
};
IconCopy.defaultProps = {
color: 'currentColor',
size: '1em',
};
export default IconCopy;
|
<reponame>zypeh/utf8rewind<filename>tools/gyp/test/dependencies/sharedlib-linksettings/staticlib.c
/*
* Copyright (c) 2013 Google Inc. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
/*
* This will fail to compile if TEST_DEFINE was propagated from sharedlib to
* staticlib.
*/
#ifdef TEST_DEFINE
#error TEST_DEFINE is defined!
#endif
#ifdef _WIN32
__declspec(dllimport)
#else
extern
#endif
int sharedLibFunc();
int staticLibFunc() {
return sharedLibFunc();
}
|
import pandas as pd
SEP = '[SEP]'
def process_data(data_fn, output_fn):
data = pd.read_csv(data_fn)
with open(output_fn, "w") as f:
for i, row in data.iterrows():
f.write(f"{row['Name']}{SEP}{row['Description_Visual']}\n") |
<reponame>Trevonte/network
class Member < ApplicationRecord
default_scope { order(:last_name, :first_name) }
include PgSearch
pg_search_scope :search_by_full_name,
:against => [:first_name, :last_name],
:using => { :tsearch => {:prefix => true} }
validates :first_name, presence: true
validates :last_name, presence: true
belongs_to :user
belongs_to :graduating_class, class_name: 'GraduatingClass', foreign_key: :graduating_class_id
belongs_to :school
belongs_to :identity
has_many :affiliations, dependent: :delete_all
has_many :organizations, through: :affiliations
has_many :talent_assignments, dependent: :delete_all
has_many :talents, through: :talent_assignments
has_many :extracurricular_activity_assignments, dependent: :delete_all
has_many :extracurricular_activities, through: :extracurricular_activity_assignments
has_many :residences, dependent: :delete_all
has_many :neighborhoods, through: :residences
has_many :participations, dependent: :delete_all
has_many :events, through: :participations, source: :network_event
has_many :cohortians, dependent: :delete_all
has_many :cohorts, through: :cohortians
has_many :matches, dependent: :delete_all
has_many :matched_actions, through: :matches, source: :network_action
has_many :communications, dependent: :destroy
has_many :network_actions, dependent: :destroy, foreign_key: :actor_id
def self.ethnicities
%w{
Hispanic\ or\ Latino\ or\ Spanish\ Origin
Not\ Hispanic\ or\ Latino\ or\ Spanish\ Origin
}
end
def self.races
%w{
American\ Indian\ or\ Alaska\ Native
Asian
Black\ or\ African\ American
Native\ Hawaiian\ or\ Other\ Pacific\ Islander
White
}
end
def self.sexes
%w{ Female Male }
end
def self.shirt_sizes
%w{ S M L XL 2XL 3XL }
end
def self.identities
%w{
Student
Parent
Educator
Resident
Community\ Partner
}
end
def name
[first_name, last_name].compact.join(' ')
end
def text
name
end
def graduation_year
if graduating_class
graduating_class.year
else
''
end
end
def talent_list
talents.map(&:name).compact.sort.join(', ')
end
def extracurricular_activities_list
extracurricular_activities.map(&:name).compact.sort.join(', ')
end
def affiliation_list
organizations.map(&:name).compact.sort.join(', ')
end
end
|
public int getCount(int[] arr, int n) {
int count = 0;
for (int i : arr) {
if (i == n) {
count++;
}
}
return count;
} |
package sri.mobile.examples.navigationx.stackintabs
import sri.navigation.NavigationScreenComponentNoPS
class HomeScreen extends NavigationScreenComponentNoPS {
def render() = MyNavScreen("Home Screen")
}
|
import numpy as np
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.naive_bayes import MultinomialNB
data = np.array(['The customer service was excellent.', 'The customer service was terrible.'])
# Create the bag of words vectorizer and transform the data
vectorizer = CountVectorizer()
vectorized_data = vectorizer.fit_transform(data)
# Train the sentiment classifier
model = MultinomialNB()
model.fit(vectorized_data, np.array([1, 0]))
# Test the classifier
input_data = 'The customer service was excellent.'
input_vectorized = vectorizer.transform([input_data])
prediction = model.predict(input_vectorized)[0]
if prediction == 1:
print('positive sentiment')
else:
print('negative sentiment') |
<filename>src/lab/Grid/index.tsx
import { useMemo, useState } from "react";
import Row from "./Row";
import { makeGrid } from "./utils";
import "./styles.scss";
const Grid = () => {
const [size, setSize] = useState(10);
const grid = useMemo(() => {
return makeGrid(size);
}, [size]);
return (
<div className="Container">
<input
type="range"
min="1"
max="20"
value={size}
onChange={(event) => setSize(Number(event.target.value))}
/>
<span>{size}</span>
<div className="Grid">
{grid.map((row, index) => (
<Row
key={index}
index={index}
items={row}
last={index === grid.length - 1}
/>
))}
</div>
</div>
);
};
export default Grid;
|
#!/bin/bash
# This script will be executed inside pre_test_hook function in devstack gate
set -ex
DIR=${BASH_SOURCE%/*}
source $DIR/commons $@ |
#!/bin/bash
cflags="-Wall -O3 -g -std=gnu11 -fno-strict-aliasing"
if [[ $* == *windows* ]]; then
platform="windows"
outfile="mkfontmap.exe"
compiler="x86_64-w64-mingw32-gcc"
else
platform="unix"
outfile="mkfontmap"
compiler="gcc"
fi
if command -v ccache >/dev/null; then
compiler="ccache $compiler"
fi
echo "Compiling ($platform)..."
$compiler $cflags mkfontmap.c -lm -o $outfile
|
package CS351_A1;
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* A utility class for reading and writing PGM images. Methods use integers to represent unsigned bytes.
*
* Does not fully conform to the PGM specification because currently there is no support for:
* <ul>
* <li>More than one image per file</li>
* <li>Images with more than 256 shades of gray</li>
* <li>Comments within the raster</li>
* </ul>
*
* @author <NAME>
*/
public final class PGMIO {
/**
* Magic number representing the binary PGM file type.
*/
private static final String MAGIC = "P5";
/**
* Character indicating a comment.
*/
private static final char COMMENT = '#';
/**
* The maximum gray value.
*/
private static final int MAXVAL = 255;
private PGMIO() {}
/**
* Reads a grayscale image from a file in PGM format.
* @param file the PGM file read from
* @return two-dimensional byte array representation of the image
* @throws IOException
*/
public static int[][] read(final File file) throws IOException {
final BufferedInputStream stream = new BufferedInputStream(new FileInputStream(file));
try {
if (!next(stream).equals(MAGIC))
throw new IOException("File " + file + " is not a binary PGM image.");
final int col = Integer.parseInt(next(stream));
final int row = Integer.parseInt(next(stream));
final int max = Integer.parseInt(next(stream));
if (max < 0 || max > MAXVAL)
throw new IOException("The image's maximum gray value must be in range [0, " + MAXVAL + "].");
final int[][] image = new int[row][col];
for (int i = 0; i < row; ++i) {
for (int j = 0; j < col; ++j) {
final int p = stream.read();
if (p == -1)
throw new IOException("Reached end-of-file prematurely.");
else if (p < 0 || p > max)
throw new IOException("Pixel value " + p + " outside of range [0, " + max + "].");
image[i][j] = p;
}
}
return image;
} finally {
stream.close();
}
}
/**
* Finds the next whitespace-delimited string in a stream, ignoring any comments.
* @param stream the stream read from
* @return the next whitespace-delimited string
* @throws IOException
*/
private static String next(final InputStream stream) throws IOException {
final List<Byte> bytes = new ArrayList<Byte>();
while (true) {
final int b = stream.read();
if (b != -1) {
final char c = (char) b;
if (c == COMMENT) {
int d;
do {
d = stream.read();
} while (d != -1 && d != '\n' && d != '\r');
} else if (!Character.isWhitespace(c)) {
bytes.add((byte) b);
} else if (bytes.size() > 0) {
break;
}
} else {
break;
}
}
final byte[] bytesArray = new byte[bytes.size()];
for (int i = 0; i < bytesArray.length; ++i)
bytesArray[i] = bytes.get(i);
return new String(bytesArray);
}
/**
* Writes a grayscale image to a file in PGM format.
* @param image a two-dimensional byte array representation of the image
* @param file the file to write to
* @throws IllegalArgumentException
* @throws IOException
*/
public static void write(final int[][] image, final File file) throws IOException {
write(image, file, MAXVAL);
}
/**
* Writes a grayscale image to a file in PGM format.
* @param image a two-dimensional byte array representation of the image
* @param file the file to write to
* @param maxval the maximum gray value
* @throws IllegalArgumentException
* @throws IOException
*/
public static void write(final int[][] image, final File file, final int maxval) throws IOException {
if (maxval > MAXVAL)
throw new IllegalArgumentException("The maximum gray value cannot exceed " + MAXVAL + ".");
final BufferedOutputStream stream = new BufferedOutputStream(new FileOutputStream(file));
try {
stream.write(MAGIC.getBytes());
stream.write("\n".getBytes());
stream.write(Integer.toString(image[0].length).getBytes());
stream.write(" ".getBytes());
stream.write(Integer.toString(image.length).getBytes());
stream.write("\n".getBytes());
stream.write(Integer.toString(maxval).getBytes());
stream.write("\n".getBytes());
for (int i = 0; i < image.length; ++i) {
for (int j = 0; j < image[0].length; ++j) {
final int p = image[i][j];
if (p < 0 || p > maxval)
throw new IOException("Pixel value " + p + " outside of range [0, " + maxval + "].");
stream.write(image[i][j]);
}
}
} finally {
stream.close();
}
}
}
|
import java.util.Scanner;
public class Main {
public static void main(String[] args) {
Scanner scan = new Scanner(System.in);
int n = scan.nextInt();
int[] medicine = new int[100];
for (int i = 0; i < n; i++) {
int type = scan.nextInt() - 1;
medicine[type] = scan.nextInt();
}
int r = scan.nextInt();
for (int i = 0; i < r; i++) {
int numOfSymptom = scan.nextInt();
String result = "";
boolean died = false;
for (int j = 0; j < numOfSymptom; j++) {
int symptom = scan.nextInt() - 1;
if (medicine[symptom] != 0) {
result += medicine[symptom];
if (j < numOfSymptom - 1)
result += " ";
} else {
died = true;
}
}
if (died) {
System.out.println("YOU DIED");
}
else {
System.out.println(result);
}
}
}
}
|
import { Injectable, HttpException, HttpStatus, UnauthorizedException, ForbiddenException } from '@nestjs/common';
import { UserService } from '../user/user.service';
import { JwtService } from '@nestjs/jwt';
import { UserDTO } from '../user/user.dto';
import * as bcrypt from 'bcryptjs';
// import { redisClient } from '../shared/redis.db';
import * as jwt_decode from 'jwt-decode';
import { UserInfoService } from '../userinfo/userinfo.service';
import { AuthDTO } from './auth.dto';
import { PasswordChangeDTO } from './passwordchange.dto';
import { UserInfoDTO } from '../userinfo/userinfo.dto';
@Injectable()
export class AuthService {
constructor(
private readonly userService: UserService,
private readonly jwtService: JwtService,
private userInfoService: UserInfoService
) {}
private async validate(userData: any): Promise<UserDTO> {
return await this.userService.getUserPass(userData.username);
}
public async login(user: AuthDTO): Promise<any> {
return this.validate(user).then(async (userData: any) => {
if (!userData) {
throw new HttpException('Not Found', HttpStatus.NOT_FOUND);
} else if (!await this.comparePassword(user.password, userData.password)) {
throw new UnauthorizedException();
} else {
// delete the expired token associated with this user
let expiredToken = [];
let userInfos: any = await this.userInfoService.getUserInfo(userData.id, null);
if (userInfos) {
userInfos.forEach((element) => {
let decodedtoken = jwt_decode(element.token);
let exp_date = decodedtoken.exp;
let d = new Date();
let now_sec = (d.getTime() - d.getMilliseconds()) / 1000;
if (now_sec > exp_date) {
expiredToken.push(element.id);
}
});
if(expiredToken.length > 0) await this.userInfoService.deleteUserInfo(expiredToken);
}
const payload = { username: userData.username, sub: userData.id };
const accessToken = this.jwtService.sign(payload, {
expiresIn: '60m'
});
return {
expires_in: 3600,
access_token: accessToken,
userid: payload.sub
};
}
});
}
public async passwordChange(passwords: PasswordChangeDTO, id: string): Promise<any> {
let user = await this.userService.getUserPassById(id);
if (!await this.comparePassword(passwords.oldpassword, user.password)) {
throw new ForbiddenException();
} else {
let newpass = await bcrypt.hash(passwords.newpassword || '<PASSWORD>', 10);
return this.userService.updateUser(id, {
password: <PASSWORD>
});
}
}
async logout(id: string, token) {
let decodedtoken = jwt_decode(token);
let exp_date = decodedtoken.exp;
let d = new Date();
let now_sec = (d.getTime() - d.getMilliseconds()) / 1000;
//set to redis
// if(exp_date-now_sec > 0){
// redisClient.setex(id, (exp_date-now_sec) , token);
// }
// return { logout: true };
// save to postgres db
if (exp_date - now_sec > 0) {
await this.userInfoService.saveUserInfo({
userid: id,
token: token
});
return { logout: true };
}
}
async comparePassword(attempt: string, password: string): Promise<boolean> {
return await bcrypt.compare(attempt, password);
}
}
|
#!/bin/bash
for station in $(cut -d' ' -f1 < stations.txt); do
echo Getting $station
url=https://mesonet.agron.iastate.edu/cgi-bin/request/asos.py?station=${station}\&data=all\&year1=2018\&month1=1\&day1=1\&year2=2018\&month2=12\&day2=31\&tz=Etc%2FUTC\&format=onlycomma\&latlon=yes\&direct=yes\&report_type=1\&report_type=2
curl $url -o raw/${station}.txt
done
|
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=/Users/xuhao/Documents/51NB/Flutter/flutter"
export "FLUTTER_APPLICATION_PATH=/Users/xuhao/Documents/mine/FlutterProject/flutterapp"
export "FLUTTER_TARGET=lib/main.dart"
export "FLUTTER_BUILD_DIR=build"
export "SYMROOT=${SOURCE_ROOT}/../build/ios"
export "FLUTTER_FRAMEWORK_DIR=/Users/xuhao/Documents/51NB/Flutter/flutter/bin/cache/artifacts/engine/ios"
export "FLUTTER_BUILD_NAME=1.0.0"
export "FLUTTER_BUILD_NUMBER=1"
|
<filename>musiclibrary/src/main/java/com/cyl/musiclake/api/baidu/BaiduMusicInfo.java
package com.cyl.musiclake.api.baidu;//package com.cyl.musiclake.api.baidu;
//
///**
// */
//public class BaiduMusicInfo {
// private String pic_big;
// private String pic_radio;
// private String pic_small;
// private String lrclink;
// private String song_id;
// private String title;
// private String ting_uid;
// private String album_title;
// private long album_id;
// private String artist_name;
// private long artist_id;
//
//
// public String getPic_big() {
// return pic_big;
// }
//
// public void setPic_big(String pic_big) {
// this.pic_big = pic_big;
// }
//
// public String getPic_small() {
// return pic_small;
// }
//
// public void setPic_small(String pic_small) {
// this.pic_small = pic_small;
// }
//
// public String getLrclink() {
// return lrclink;
// }
//
// public void setLrclink(String lrclink) {
// this.lrclink = lrclink;
// }
//
// public String getSong_id() {
// return song_id;
// }
//
// public void setSong_id(String song_id) {
// this.song_id = song_id;
// }
//
// public String getTitle() {
// return title;
// }
//
// public void setTitle(String title) {
// this.title = title;
// }
//
// public String getTing_uid() {
// return ting_uid;
// }
//
// public void setTing_uid(String ting_uid) {
// this.ting_uid = ting_uid;
// }
//
// public String getAlbum_title() {
// return album_title;
// }
//
// public void setAlbum_title(String album_title) {
// this.album_title = album_title;
// }
//
// public String getArtist_name() {
// return artist_name;
// }
//
// public void setArtist_name(String artist_name) {
// this.artist_name = artist_name;
// }
//
// public String getPic_radio() {
// return pic_radio;
// }
//
// public void setPic_radio(String pic_radio) {
// this.pic_radio = pic_radio;
// }
//
// public long getAlbum_id() {
// return album_id;
// }
//
// public void setAlbum_id(long album_id) {
// this.album_id = album_id;
// }
//
// public long getArtist_id() {
// return artist_id;
// }
//
// public void setArtist_id(long artist_id) {
// this.artist_id = artist_id;
// }
//}
|
#!/usr/bin/env node
var ts = require('../')
if (isNaN(process.argv[2])) return console.log(
'Usage: ts [TIMESTAMP]'
)
console.log(ts(process.argv[2]))
|
require 'spec_helper'
require 'direct_execution_helper'
describe "rhc member scenarios" do
context "with an existing domain" do
before(:all) do
standard_config
@domain = has_a_domain
end
let(:domain){ @domain }
context "with no users" do
before{ no_members(domain) }
it "should not show members in the domain" do
r = rhc 'show-domain', domain.name
r.status.should == 0
r.stdout.should_not match "Members:"
r.stdout.should match "owned by #{domain.owner.name}"
end
it "should prevent leaving the domain for the owner" do
r = rhc 'leave-domain', domain.name
r.status.should_not == 1
r.stdout.should match "Leaving domain.*You are the owner of this domain and cannot leave"
end
it "should add and remove a member" do
user = other_users.keys.take(1).first
r = rhc 'add-member', user, '-n', domain.name
r.status.should == 0
r.stdout.should match "Adding 1 editor to domain"
r.stdout.should match "done"
client.find_domain(domain.name).members.any?{ |m| m.id == other_users[user].id && m.editor? }.should be_true
r = rhc 'show-domain', domain.name
r.status.should == 0
r.stdout.should match "Members:"
r.stdout.should match "#{user} \\(edit\\)"
r = rhc 'remove-member', user, '-n', domain.name
r.status.should == 0
r.stdout.should match "Removing 1 member from domain"
client.find_domain(domain.name).members.none?{ |m| m.id == other_users[user].id }.should be_true
end
it "should add and remove two members" do
user1, user2 = other_users.keys.take(2)
r = rhc 'add-member', user1, user2, '-n', domain.name
r.status.should == 0
r.stdout.should match "Adding 2 editors to domain"
r.stdout.should match "done"
members = client.find_domain(domain.name).members
members.any?{ |m| m.id == other_users[user1].id && m.editor? }.should be_true
members.any?{ |m| m.id == other_users[user2].id && m.editor? }.should be_true
r = rhc 'show-domain', domain.name
r.status.should == 0
r.stdout.should match "Members:"
r.stdout.should match "#{user1} \\(edit\\)"
r.stdout.should match "#{user2} \\(edit\\)"
r = rhc 'remove-member', user1, user2, '-n', domain.name
r.status.should == 0
r.stdout.should match "Removing 2 members from domain"
client.find_domain(domain.name).members.none?{ |m| m.id == other_users[user1].id }.should be_true
client.find_domain(domain.name).members.none?{ |m| m.id == other_users[user2].id }.should be_true
end
it "should add a view and an admin member. and allow users to leave the domain" do
user1, user2 = other_users.keys.take(2)
r = rhc 'add-member', user1, '--role', 'admin', '-n', domain.name
r.status.should == 0
r.stdout.should match "Adding 1 administrator to domain"
r.stdout.should match "done"
client.find_domain(domain.name).members.any?{ |m| m.id == other_users[user1].id && m.admin? }.should be_true
r = rhc 'add-member', user2, '--role', 'view', '-n', domain.name
r.status.should == 0
r.stdout.should match "Adding 1 viewer to domain"
r.stdout.should match "done"
client.find_domain(domain.name).members.any?{ |m| m.id == other_users[user2].id && m.viewer? }.should be_true
r = rhc 'show-domain', domain.name
r.status.should == 0
r.stdout.should match "Members:"
r.stdout.should match "#{user1} \\(admin\\)"
r.stdout.should match "#{user2} \\(view\\)"
r = rhc 'leave-domain', domain.name, :as => other_users[user2]
r.status.should == 0
r.stdout.should match "Leaving domain.*done"
end
it "should remove all non owners" do
user1, user2 = other_users.keys.take(2)
r = rhc 'add-member', user1, user2, '-n', domain.name
r.status.should == 0
r.stdout.should match "Adding 2 editors to domain"
r.stdout.should match "done"
members = client.find_domain(domain.name).members
members.any?{ |m| m.id == other_users[user1].id && m.editor? }.should be_true
members.any?{ |m| m.id == other_users[user2].id && m.editor? }.should be_true
r = rhc 'remove-member', '-n', domain.name, '--all'
r.status.should == 0
r.stdout.should match "Removing all members from domain.*done"
members = client.find_domain(domain.name).members
members.select(&:owner).should == members
end
it "should reject a non-existent user" do
r = rhc 'add-member', 'not-a-user', '-n', domain.name
r.status.to_i.should == 256
r.stdout.should match "There is no account with login not-a-user."
client.find_domain(domain.name).members.length.should == 1
end
it "should add a user by id" do
user = other_users.values.take(1).first
r = rhc 'add-member', user.id, '--ids', '-n', domain.name
r.status.should == 0
r.stdout.should match "Adding 1 editor to domain"
r.stdout.should match "done"
client.find_domain(domain.name).members.any?{ |m| m.id == user.id && m.editor? }.should be_true
end
end
context "with an application" do
let(:other_user){ other_users.values.first }
before{ has_an_application }
before{ has_local_ssh_key(other_user) }
it "should allow SSH only for admin and edit roles" do
user = other_user.login
name = @domain.applications.first.name
r = rhc 'add-member', user, '--role', 'admin', '-n', domain.name
r.status.should == 0
with_environment(other_user) do
r = rhc 'ssh', name, '-n', domain.name, '--ssh', ssh_exec_for_env
r.status.should == 0
end
r = rhc 'add-member', user, '--role', 'view', '-n', domain.name
r.status.should == 0
with_environment(other_user) do
r = rhc 'ssh', name, '-n', domain.name, '--ssh', ssh_exec_for_env
r.status.to_i.should_not == 0
end
r = rhc 'add-member', user, '--role', 'edit', '-n', domain.name
r.status.should == 0
with_environment(other_user) do
r = rhc 'ssh', name, '-n', domain.name, '--ssh', ssh_exec_for_env
r.status.should == 0
end
end
it "should filter applications by owner" do
user = other_user.login
name = @domain.applications.first.name
r = rhc 'add-member', user, '--role', 'admin', '-n', domain.name
r.status.should == 0
with_environment(other_user) do
r = rhc 'apps', '--mine'
#r.status.should == 0
r.stdout.should match "No applications"
r = rhc 'apps'
r.status.should == 0
r.stdout.should match /You have access to \d+ applications?/
end
end
after { @domain.applications.first.destroy }
end
end
end |
var Dictionary = require('./Dictionary'),
QuickSortSet = require('./QuickSortSet'),
AnalyzeService = require('./AnalyzeService'),
SortedSetService = AnalyzeService.SortedSetService,
LexemeService = AnalyzeService.LexemeService;
var Lexeme = require('./Lexeme'),
consts = require('./consts'),
utils = require('./utils'),
CharType = consts.CharType,
LexemeType = consts.LexemeType;
var CharacterUtil = require('./CharacterUtil');
var BUFF_EXHAUST_CRITICAL = 100;
var AnalyzeContext = function(opts){
this.segmentBuff = '';
this.charTypes = [];
this.buffLocker = {};
this.orgLexemes = new QuickSortSet();
this.pathMap = {};
this.results = [];
this.totalReadCount = 0; // 总共读取了多少字符
};
module.exports = AnalyzeContext;
AnalyzeContext.prototype.getCurrentChar = function(){
return this.segmentBuff[this.cursor];
};
AnalyzeContext.prototype.getCurrentCharType = function(){
return this.charTypes[this.cursor];
};
/**
* 根据context的上下文情况,填充segmentBuff
* @param reader
* @return 返回待分析的(有效的)字串长度
*/
AnalyzeContext.prototype.fillBuffer = function(txt){
this.segmentBuff = txt;
this.cursor = 0;
this.available = this.segmentBuff.length;
return this.available;
};
/**
* 初始化buff指针,处理第一个字符
*/
AnalyzeContext.prototype.initCursor = function(){
this.cursor = 0;
//this.segmentBuff[this.cursor] = CharacterUtil.regularize(this.segmentBuff[this.cursor]);
//this.charTypes[this.cursor] = CharacterUtil.identifyCharType(this.segmentBuff[this.cursor]);
var chr = CharacterUtil.regularize(this.segmentBuff[this.cursor]);
this.charTypes[this.cursor] = CharacterUtil.identifyCharType(chr);
};
/**
* 指针+1
* 成功返回 true; 指针已经到了buff尾部,不能前进,返回false
* 并处理当前字符
*/
AnalyzeContext.prototype.moveCursor = function(){
if (this.cursor < this.available - 1){
this.cursor += 1;
//this.segmentBuff[this.cursor] = CharacterUtil.regularize(this.segmentBuff[this.cursor]);
//this.charTypes[this.cursor] = CharacterUtil.identifyCharType(this.segmentBuff[this.cursor]);
var chr = CharacterUtil.regularize(this.segmentBuff[this.cursor]);
this.charTypes[this.cursor] = CharacterUtil.identifyCharType(chr);
return true;
}
else{
return false;
}
};
/**
* 设置当前segmentBuff为锁定状态
* 加入占用segmentBuff的子分词器名称,表示占用segmentBuff
* @param segmenterName
*/
AnalyzeContext.prototype.lockBuffer = function(segmenterName){
this.buffLocker[segmenterName] = 1;
};
/**
* 移除指定的子分词器名,释放对segmentBuff的占用
* @param segmenterName
*/
AnalyzeContext.prototype.unlockBuffer = function(segmenterName){
this.buffLocker[segmenterName] = 0;
};
/**
* 只要buffLocker中存在segmenterName
* 则buffer被锁定
* @return boolean 缓冲去是否被锁定
*/
AnalyzeContext.prototype.isBufferLocked = function(){
for(var k in this.buffLocker){
if (this.buffLocker[k]) {
return true;
}
}
return false;
};
/**
* 判断当前segmentBuff是否已经用完
* 当前执针cursor移至segmentBuff末端this.available - 1
* @return
*/
/*AnalyzeContext.prototype.isBufferConsumed = function(){
return this.cursor === this.available - 1;
};*/
/**
* 判断segmentBuff是否需要读取新数据
*
* 满足以下条件时,
* 1.available == BUFF_SIZE 表示buffer满载
* 2.buffIndex < available - 1 && buffIndex > available - BUFF_EXHAUST_CRITICAL表示当前指针处于临界区内
* 3.!context.isBufferLocked()表示没有segmenter在占用buffer
* 要中断当前循环(buffer要进行移位,并再读取数据的操作)
* @return
*/
/*AnalyzeContext.prototype.needRefillBuffer = function(){
return (this.cursor < this.available - 1 &&
this.cursor > this.available - BUFF_EXHAUST_CRITICAL &&
!this.isBufferLocked());
};*/
/**
* 累计当前的segmentBuff相对于reader起始位置的位移
*/
AnalyzeContext.prototype.markBufferOffset = function(){
this.buffOffset += this.cursor;
};
/**
* 向分词结果集添加词元
* @param lexeme
*/
/*AnalyzeContext.prototype.addLexeme = function(lexeme){
this.orgLexemes.addLexeme(lexeme);
};*/
/**
* 添加分词结果路径
* 路径起始位置 ---> 路径 映射表
* @param path
*/
AnalyzeContext.prototype.addLexemePath = function(crossPath){
if (crossPath){
this.pathMap[crossPath.pathBegin] = crossPath;
}
};
/**
* 推送分词结果到结果集合
* 1.从buff头部遍历到this.cursor已处理位置
* 2.将map中存在的分词结果推入results
* 3.将map中不存在的CJDK字符以单字方式推入results
*/
AnalyzeContext.prototype.outputToResult = function(){
var lexeme;
for(var index = 0;index <= this.cursor;){
//跳过非CJK字符
if (CharType.CHAR_USELESS === this.charTypes[index]){
index ++;
continue;
}
//从pathMap找出对应index位置的LexemePath
var crosspath = this.pathMap[index];
if (crosspath){
//输出LexemePath中的lexeme到results集合
lexeme = crosspath.pollFirst();
while (lexeme){
this.results.push(lexeme);
//将index移至lexeme后
index = lexeme.begin + lexeme.len;
lexeme = crosspath.pollFirst();
if (lexeme){
//输出path内部,词元间遗漏的单字
for(;index < lexeme.begin; index++){
this.outputSingleCJK(index);
}
}
}
}
else{//pathMap中找不到index对应的LexemePath
//单字输出
this.outputSingleCJK(index);
index++;
}
}
//清空当前的Map
this.pathMap = {};
var result = [];
lexeme = this.results.shift();
while(lexeme){
this.compound(lexeme);
result.push(this.segmentBuff.substr(lexeme.begin, lexeme.len));
lexeme = this.results.shift();
}
/* this.results.forEach(function(v){
result.push(segmentBuff.substr(v.begin, v.len));
});*/
return result.join(' ');
};
/**
* 对CJK字符进行单字输出
* @param index
*/
AnalyzeContext.prototype.outputSingleCJK = function(index){
if (CharType.CHAR_CHINESE === this.charTypes[index]){
var singleCharLexeme = new Lexeme(this.buffOffset, index, 1, LexemeType.TYPE_CNCHAR);
this.results.push(singleCharLexeme);
}
else if(CharType.CHAR_OTHER_CJK === this.charTypes[index]){
var singleCharLexeme = new Lexeme(this.buffOffset , index , 1 , LexemeType.TYPE_OTHER_CJK);
this.results.push(singleCharLexeme);
}
};
/**
* 重置分词上下文状态
*/
AnalyzeContext.prototype.reset = function(){
this.buffLocker = {};
this.orgLexemes = new QuickSortSet();
this.available =0;
this.buffOffset = 0;
this.charTypes = [];
this.cursor = 0;
this.results = [];
this.segmentBuff = [];
this.pathMap = {};
};
/**
* 组合词元
*/
AnalyzeContext.prototype.compound = function(lexeme){
//数量词合并处理
var nextLexeme, appendOk = true;
while (appendOk && this.results.length > 0){
appendOk = false;
if (LexemeType.TYPE_ARABIC === lexeme.lexemeType){
nextLexeme = this.results[0];
if (LexemeType.TYPE_CNUM === nextLexeme.lexemeType){
//合并英文数词+中文数词
appendOk = LexemeService.append(lexeme, nextLexeme, LexemeType.TYPE_CNUM);
}
else if (LexemeType.TYPE_COUNT === nextLexeme.lexemeType){
//合并英文数词+中文量词
appendOk = LexemeService.append(lexeme, nextLexeme, LexemeType.TYPE_CQUAN);
}
if (appendOk){
//弹出
this.results.shift();
}
}
//可能存在第二轮合并
if (LexemeType.TYPE_CNUM === lexeme.lexemeType && this.results.length > 0){
nextLexeme = this.results[0];
appendOk = false;
if (LexemeType.TYPE_COUNT == nextLexeme.lexemeType){
//合并中文数词+中文量词
appendOk = LexemeService.append(lexeme, nextLexeme, LexemeType.TYPE_CQUAN);
}
if (appendOk){
//弹出
this.results.shift();
}
}
}
};
|
<gh_stars>0
import StoryCard from './StoryCard'
export default StoryCard |
import org.json.JSONObject;
public class Main {
public static void main(String[] args) {
String jsonString = "{\"name\": \"John Smith\",\"age\": 30,\"position\": \"Software Engineer\"}";
JSONObject json = new JSONObject(jsonString);
String name = json.getString("name");
int age = json.getInt("age");
String position = json.getString("position");
System.out.println("Name: " + name);
System.out.println("Age: " + age);
System.out.println("Position: " + position);
}
} |
import {Component, Inject, ViewChild} from '@angular/core';
import {FormBuilder, FormGroup, Validators} from '@angular/forms';
import {MAT_DIALOG_DATA, MatDialogRef} from '@angular/material/dialog';
import {ExecutionContext} from 'projects/runtime/src/lib/entities/execution-context';
import {HostsSelectorComponent} from 'projects/runtime/src/lib/runtime-host/hosts-selector/hosts-selector.component';
export interface ImportHarDialogComponentData {
harPath: string;
}
@Component({
selector: 'app-record-har-dialog',
templateUrl: './import-har-dialog.component.html',
})
export class ImportHarDialogComponent {
recordForm: FormGroup;
@ViewChild('hostsSelector', {static: true})
hostsSelector: HostsSelectorComponent;
constructor(public dialogRef: MatDialogRef<ImportHarDialogComponent>,
@Inject(MAT_DIALOG_DATA) public data: ImportHarDialogComponentData,
private fb: FormBuilder) {
this.recordForm = this.fb.group({
simulationClass: ['', [
Validators.required,
Validators.pattern(/^[\w]+$/)
]],
simulationPackage: ['', [
Validators.required,
Validators.pattern(/^(\w+\.)*\w+$/)
]],
});
}
get simulationClass() {
return this.recordForm.get('simulationClass');
}
get simulationPackage() {
return this.recordForm.get('simulationPackage');
}
import() {
const hostId: string = this.hostsSelector.hostId;
const hosts = {};
hosts[hostId] = {};
const context = new ExecutionContext(
'RECORD',
`Import har ${this.simulationPackage.value}.${this.simulationClass.value}`,
{
KRAKEN_GATLING_SIMULATION_CLASS: this.simulationClass.value,
KRAKEN_GATLING_SIMULATION_PACKAGE: this.simulationPackage.value,
KRAKEN_GATLING_HAR_PATH_REMOTE: this.data.harPath
},
hosts
);
this.dialogRef.close(context);
}
}
|
func calculateAverageScore(_ scores: [Int]) -> Int {
guard !scores.isEmpty else { return 0 } // Return 0 if the input array is empty
let totalScore = scores.reduce(0, +) // Calculate the total score by summing all the scores
let averageScore = totalScore / scores.count // Calculate the average score
let roundedAverage = Int((Double(totalScore) / Double(scores.count)).rounded()) // Round the average score to the nearest whole number
return roundedAverage
}
// Test the function
let testScores = [85, 90, 92, 88, 95]
let average = calculateAverageScore(testScores)
print("Average score: \(average)") // Output: Average score: 90 |
require("dotenv").config();
const bodyParser = require("body-parser");
const cookieParser = require("cookie-parser");
const express = require("express");
const favicon = require("serve-favicon");
const hbs = require("hbs");
const mongoose = require("mongoose");
const logger = require("morgan");
const path = require("path");
hbs.registerHelper("isEqual", function (a, b) {
return a === b;
});
mongoose
.connect(
`mongodb+srv://${process.env.MONGO_USER}:${process.env.MONGO_PASS}@cluster0.xbhyn.mongodb.net/dontwaste?retryWrites=true&w=majority`,
{
useNewUrlParser: true,
useCreateIndex: true,
useUnifiedTopology: true,
}
)
.then((x) => {
console.log(
`Connected to Mongo! Database name: "${x.connections[0].name}"`
);
})
.catch((err) => {
console.error("Error connecting to mongo", err);
});
const app_name = require("./package.json").name;
const debug = require("debug")(
`${app_name}:${path.basename(__filename).split(".")[0]}`
);
const app = express();
// Middleware Setup
app.use(logger("dev"));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
require("./configs/session.config")(app);
// Express View engine setup
app.use(
require("node-sass-middleware")({
src: path.join(__dirname, "public"),
dest: path.join(__dirname, "public"),
sourceMap: true,
})
);
app.set("views", path.join(__dirname, "views"));
app.set("view engine", "hbs");
app.use(express.static(path.join(__dirname, "public")));
app.use(favicon(path.join(__dirname, "public", "images", "favicon.ico")));
// default value for title local
app.locals.title = "Express - Generated with IronGenerator";
// IMPORT ROUTES
const companyRouter = require("./routes/company.routes");
app.use("/company", companyRouter);
const institutionRouter = require("./routes/institution.routes");
app.use("/institution", institutionRouter);
const personRouter = require("./routes/person.routes");
app.use("/person", personRouter);
const index = require("./routes/index");
app.use("/", index);
module.exports = app;
|
import { Pipe, PipeTransform } from '@angular/core';
import { PeopleResult } from '../model/people-result.class';
@Pipe({
name: 'percentageFridays'
})
export class PercentageFridaysPipe implements PipeTransform {
transform(person: PeopleResult): string {
let num = 0;
if (person.num_coffee!==0) {
num = Math.floor((person.num_pay / person.num_coffee) * 100);
}
return `${num}% (${person.num_pay}/${person.num_coffee})`;
}
} |
<gh_stars>1-10
export const encodeBase16 = bytes =>
Array.from(bytes).map(x => (x & 0xff).toString(16).padStart(2, 0)).join('')
export const decodeBase16 = hexStr => {
const removed0x = hexStr.replace(/^0x/, '')
const byte2hex = ([arr, bhi], x) =>
bhi ? [[...arr, parseInt(`${bhi}${x}`, 16)]] : [arr, x]
const [resArr] = Array.from(removed0x).reduce(byte2hex, [[]])
return Uint8Array.from(resArr)
}
export const decodeAscii = (str = '') =>
Array.from(str).map(x => `${x}`.charCodeAt(0))
|
package com.myproject.framework.mvp.ui.about;
import com.myproject.framework.mvp.data.DataManager;
import com.myproject.framework.mvp.ui.base.BasePresenter;
import com.myproject.framework.mvp.utils.rx.SchedulerProvider;
import javax.inject.Inject;
import io.reactivex.disposables.CompositeDisposable;
/**
* Created by Nhat on 12/13/17.
*/
public class AboutPresenter<V extends AboutBaseView> extends BasePresenter<V>
implements AboutMvpPresenter<V> {
@Inject
public AboutPresenter(DataManager dataManager,
SchedulerProvider schedulerProvider,
CompositeDisposable compositeDisposable) {
super(dataManager, schedulerProvider, compositeDisposable);
}
}
|
#!/bin/bash
# Module specific variables go here
# Files: file=/path/to/file
# Arrays: declare -a array_name
# Strings: foo="bar"
# Integers: x=9
###############################################
# Bootstrapping environment setup
###############################################
# Get our working directory
cwd="$(pwd)"
# Define our bootstrapper location
bootstrap="${cwd}/tools/bootstrap.sh"
# Bail if it cannot be found
if [ ! -f ${bootstrap} ]; then
echo "Unable to locate bootstrap; ${bootstrap}" && exit 1
fi
# Load our bootstrap
source ${bootstrap}
###############################################
# Metrics start
###############################################
# Get EPOCH
s_epoch="$(gen_epoch)"
# Create a timestamp
timestamp="$(gen_date)"
# Whos is calling? 0 = singular, 1 is as group
caller=$(ps $PPID | grep -c stigadm)
###############################################
# Perform restoration
###############################################
# If ${restore} = 1 go to restoration mode
if [ ${restore} -eq 1 ]; then
report "Not yet implemented" && exit 1
fi
###############################################
# STIG validation/remediation
###############################################
# Module specific validation code should go here
# Errors should go in ${errors[@]} array (which on remediation get handled)
# All inspected items should go in ${inspected[@]} array
errors=("${stigid}")
# If ${change} = 1
#if [ ${change} -eq 1 ]; then
# Create the backup env
#backup_setup_env "${backup_path}"
# Create a backup (configuration output, file/folde permissions output etc
#bu_configuration "${backup_path}" "${author}" "${stigid}" "$(echo "${array_values[@]}" | tr ' ' '\n')"
#bu_file "${backup_path}" "${author}" "${stigid}" "${file}"
#if [ $? -ne 0 ]; then
# Stop, we require a backup
#report "Unable to create backup" && exit 1
#fi
# Iterate ${errors[@]}
#for error in ${errors[@]}; do
# Work to remediate ${error} should go here
#done
#fi
# Remove dupes
#inspected=( $(remove_duplicates "${inspected[@]}") )
###############################################
# Results for printable report
###############################################
# If ${#errors[@]} > 0
if [ ${#errors[@]} -gt 0 ]; then
# Set ${results} error message
#results="Failed validation" UNCOMMENT ONCE WORK COMPLETE!
results="Not yet implemented!"
fi
# Set ${results} passed message
[ ${#errors[@]} -eq 0 ] && results="Passed validation"
###############################################
# Report generation specifics
###############################################
# Apply some values expected for report footer
[ ${#errors[@]} -eq 0 ] && passed=1 || passed=0
[ ${#errors[@]} -gt 0 ] && failed=1 || failed=0
# Calculate a percentage from applied modules & errors incurred
percentage=$(percent ${passed} ${failed})
# If the caller was only independant
if [ ${caller} -eq 0 ]; then
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Generate the report
report "${results}"
# Display the report
cat ${log}
else
# Since we were called from stigadm
module_header "${results}"
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Finish up the module specific report
module_footer
fi
###############################################
# Return code for larger report
###############################################
# Return an error/success code (0/1)
exit ${#errors[@]}
# Date: 2018-09-19
#
# Severity: CAT-III
# Classification: UNCLASSIFIED
# STIG_ID: V0050739
# STIG_Version: SV-64945r1
# Rule_ID: OL6-00-000339
#
# OS: Oracle_Linux
# Version: 6
# Architecture:
#
# Title: The FTP daemon must be configured for logging or verbose mode.
# Description: To trace malicious activity facilitated by the FTP service, it must be configured to ensure that all commands sent to the ftp server are logged using the verbose vsftpd log format. The default vsftpd log file is /var/log/vsftpd.log.
|
#!/bin/bash
set -e
if [ "$1" = 'web' ]; then
exec node /var/service/bin/www "$@"
fi
exec "$@"
|
<reponame>xrr2016/egg
'use strict';
const assert = require('assert');
const request = require('supertest');
const coffee = require('coffee');
const path = require('path');
const utils = require('../utils');
describe('test/ts/index.test.js', () => {
describe('compiler code', () => {
let app;
before(async () => {
await coffee.fork(
require.resolve('typescript/bin/tsc'),
[ '-p', path.resolve(__dirname, '../fixtures/apps/app-ts/tsconfig.json') ]
)
.debug()
.expect('code', 0)
.end();
app = utils.app('apps/app-ts');
await app.ready();
});
after(async () => {
await app.close();
assert.deepStrictEqual(app._app.stages, [
'configWillLoad',
'configDidLoad',
'didLoad',
'willReady',
'didReady',
'serverDidReady',
'beforeClose',
]);
});
it('controller run ok', done => {
request(app.callback())
.get('/foo')
.expect(200)
.expect({ env: 'unittest' })
.end(done);
});
it('controller of app.router run ok', done => {
request(app.callback())
.get('/test')
.expect(200)
.expect({ env: 'unittest' })
.end(done);
});
});
describe('type check', () => {
it('should compile with esModuleInterop without error', async () => {
await coffee.fork(
require.resolve('typescript/bin/tsc'),
[ '-p', path.resolve(__dirname, '../fixtures/apps/app-ts-esm/tsconfig.json') ]
)
.debug()
.expect('code', 0)
.end();
});
it('should compile type-check ts without error', async () => {
await coffee.fork(
require.resolve('typescript/bin/tsc'),
[ '-p', path.resolve(__dirname, '../fixtures/apps/app-ts-type-check/tsconfig.json') ]
)
.debug()
.expect('code', 0)
.end();
});
it('should throw error with type-check-error ts', async () => {
await coffee.fork(
require.resolve('typescript/bin/tsc'),
[ '-p', path.resolve(__dirname, '../fixtures/apps/app-ts-type-check/tsconfig-error.json') ]
)
// .debug()
.expect('stdout', /Property 'ctx' is protected/)
.expect('stdout', /Property 'localsCheckAny' does not exist on type 'string'/)
.expect('stdout', /Property 'configKeysCheckAny' does not exist on type 'string'/)
.expect('stdout', /Property 'appCheckAny' does not exist on type 'Application'/)
.expect('stdout', /Property 'serviceLocalCheckAny' does not exist on type 'string'/)
.expect('stdout', /Property 'serviceConfigCheckAny' does not exist on type 'string'/)
.expect('stdout', /Property 'serviceAppCheckAny' does not exist on type 'Application'/)
.expect('stdout', /Property 'checkSingleTon' does not exist/)
.expect('stdout', /Property 'directory' is missing in type '{}' but required in type 'CustomLoaderConfig'/)
.notExpect('stdout', /Cannot find module 'yadan'/)
.expect('stdout', /Object is possibly 'undefined'\./)
.expect('stdout', /Expected 1 arguments, but got 0\./)
.expect('stdout', /Expected 0-1 arguments, but got 2\./)
.expect('code', 2)
.end();
});
});
});
|
package install_test
import (
"encoding/json"
"fmt"
"os"
"strings"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/epinio/epinio/acceptance/helpers/catalog"
"github.com/epinio/epinio/acceptance/helpers/epinio"
"github.com/epinio/epinio/acceptance/helpers/proc"
"github.com/epinio/epinio/acceptance/testenv"
)
var _ = Describe("<Scenario3> RKE, Private CA, Service, on External Registry", func() {
var (
flags []string
epinioHelper epinio.Epinio
serviceName = catalog.NewServiceName()
appName string
loadbalancer string
registryUsername string
registryPassword string
rangeIP string
domainIP []string
// testenv.New is not needed for VerifyAppServiceBound helper :shrug:
env testenv.EpinioEnv
localpathURL = "https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml"
)
BeforeEach(func() {
epinioHelper = epinio.NewEpinioHelper(testenv.EpinioBinaryPath())
// Clean previous installed helm repos
// Done at the beginning because we don't know the runner's state
out, err := proc.Run(testenv.Root(), false, "bash", "./scripts/remove-helm-repos.sh")
Expect(err).NotTo(HaveOccurred(), out)
// Get a free IP address on server's network
rangeIP, _ = proc.Run(testenv.Root(), false, "bash", "./scripts/get-free-ip.sh")
domainIP = strings.Split(rangeIP, "-")
appName = "externalregtest"
registryUsername = os.Getenv("REGISTRY_USERNAME")
Expect(registryUsername).ToNot(BeEmpty())
registryPassword = os.Getenv("REGISTRY_PASSWORD")
Expect(registryPassword).ToNot(BeEmpty())
flags = []string{
"--set", "skipCertManager=true",
"--set", "domain=" + fmt.Sprintf("%s.omg.howdoi.website", domainIP[0]),
"--set", "tlsIssuer=private-ca",
"--set", "externalRegistryURL=registry.hub.docker.com",
"--set", "externalRegistryUsername=" + registryUsername,
"--set", "externalRegistryPassword=" + registryPassword,
"--set", "externalRegistryNamespace=splatform",
}
})
AfterEach(func() {
out, err := epinioHelper.Uninstall()
Expect(err).NotTo(HaveOccurred(), out)
out, err = proc.RunW("helm", "uninstall", "-n", "metallb", "metallb")
Expect(err).NotTo(HaveOccurred(), out)
})
It("installs with private CA and pushes an app with service", func() {
By("Installing MetalLB", func() {
out, err := proc.RunW("sed", "-i", fmt.Sprintf("s/@IP_RANGE@/%s/", rangeIP),
testenv.TestAssetPath("values-metallb-rke.yml"))
Expect(err).NotTo(HaveOccurred(), out)
out, err = proc.RunW("helm", "repo", "add", "metallb", "https://metallb.github.io/metallb")
Expect(err).NotTo(HaveOccurred(), out)
out, err = proc.RunW("helm", "upgrade", "--install", "-n", "metallb",
"--create-namespace", "metallb", "metallb/metallb", "-f",
testenv.TestAssetPath("values-metallb-rke.yml"))
Expect(err).NotTo(HaveOccurred(), out)
})
By("Configuring local-path storage", func() {
out, err := proc.RunW("kubectl", "apply", "-f", localpathURL)
Expect(err).NotTo(HaveOccurred(), out)
value := `{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}`
out, err = proc.RunW("kubectl", "patch", "storageclass", "local-path", "-p", value)
Expect(err).NotTo(HaveOccurred(), out)
})
By("Installing CertManager", func() {
out, err := proc.RunW("helm", "repo", "add", "jetstack", "https://charts.jetstack.io")
Expect(err).NotTo(HaveOccurred(), out)
out, err = proc.RunW("helm", "repo", "update")
Expect(err).NotTo(HaveOccurred(), out)
out, err = proc.RunW("helm", "upgrade", "--install", "cert-manager", "jetstack/cert-manager",
"-n", "cert-manager",
"--create-namespace",
"--set", "installCRDs=true",
"--set", "extraArgs[0]=--enable-certificate-owner-ref=true",
)
Expect(err).NotTo(HaveOccurred(), out)
// Create certificate secret and cluster_issuer
out, err = proc.RunW("kubectl", "apply", "-f", testenv.TestAssetPath("cluster-issuer-private-ca.yml"))
Expect(err).NotTo(HaveOccurred(), out)
})
By("Installing Epinio", func() {
out, err := epinioHelper.Install(flags...)
Expect(err).NotTo(HaveOccurred(), out)
Expect(out).To(ContainSubstring("STATUS: deployed"))
out, err = testenv.PatchEpinio()
Expect(err).ToNot(HaveOccurred(), out)
})
By("Checking Loadbalancer IP", func() {
out, err := proc.RunW("kubectl", "get", "service", "-n", "traefik", "traefik", "-o", "json")
Expect(err).NotTo(HaveOccurred(), out)
status := &testenv.LoadBalancerHostname{}
err = json.Unmarshal([]byte(out), status)
Expect(err).NotTo(HaveOccurred())
Expect(status.Status.LoadBalancer.Ingress).To(HaveLen(1))
loadbalancer = status.Status.LoadBalancer.Ingress[0].IP
Expect(loadbalancer).ToNot(BeEmpty())
// We need to be sure that the specified IP is really used
Expect(loadbalancer).To(Equal(domainIP[0]))
})
By("Updating Epinio config", func() {
out, err := epinioHelper.Run("config", "update")
Expect(err).NotTo(HaveOccurred(), out)
Expect(out).To(ContainSubstring("Ok"))
})
By("Checking Epinio info command", func() {
Eventually(func() string {
out, _ := epinioHelper.Run("info")
return out
}, "2m", "2s").Should(ContainSubstring("Epinio Version:"))
})
By("Creating a service and pushing an app", func() {
out, err := epinioHelper.Run("service", "create", serviceName, "mariadb", "10-3-22")
Expect(err).NotTo(HaveOccurred(), out)
out, err = epinioHelper.Run("push",
"--name", appName,
"--path", testenv.AssetPath("sample-app"),
"--bind", serviceName)
Expect(err).NotTo(HaveOccurred(), out)
env.VerifyAppServiceBound(appName, serviceName, testenv.DefaultWorkspace, 1)
// Verify cluster_issuer is used
out, err = proc.RunW("kubectl", "get", "certificate",
"-n", testenv.DefaultWorkspace,
"--selector", "app.kubernetes.io/name="+appName,
"-o", "jsonpath='{.items[*].spec.issuerRef.name}'")
Expect(err).NotTo(HaveOccurred(), out)
Expect(out).To(Equal("'private-ca'"))
})
})
})
|
package frc.team3388.robot.actions;
import com.flash3388.flashlib.hid.Joystick;
import com.flash3388.flashlib.hid.JoystickAxis;
import com.flash3388.flashlib.scheduling.actions.ActionBase;
import frc.team3388.robot.subsystems.CustomTankDrive;
public class CustomTankDriveAction extends ActionBase {
// This action, is a simple recreation of the TankDriveAction.
// To implement an action, we first extend ActionBase.
private final CustomTankDrive mDrive;
private final Joystick mStickRight;
private final Joystick mStickLeft;
// In the constructor, we receive the systems and parameters we need to perform the action.
// Here we need the drive system, and the joysticks.
// We will store those values in instance variables.
public CustomTankDriveAction(CustomTankDrive drive, Joystick stickRight, Joystick stickLeft) {
mDrive = drive;
mStickRight = stickRight;
mStickLeft = stickLeft;
// Now we need to declare drive as a requirement for this action,
// reporting to the Scheduler that it is used here in order
// to prevent 2 actions from using this system at the same time.
requires(drive);
}
// Now we can start implementing the action's lifecycle.
// This lifecycle will define what the action should do when:
//
// initialize: runs once, each time the action is started. We would normally use it to initialize
// states/variables/dependencies, in preparation for the execute phase. In our case, we have no need.
//
// execute: the main phase for an action. Runs periodically (timing differs but will likely be around 25ms).
// In it we will perform the main logic of the action. In this case, moving the drive system.
//
// isFinished: also a part of the execute phase. It defines when the action should stop. If it returns true,
// the action will stop running. In our case, we don't really want to stop the action, from our end, so we will return
// false. If someone wants to stop this action, they can cancel it, or overwrite it with another one.
//
// end: the end phase runs after the execution phase. In this phase we stop and deinitialize anything used during the
// action. In our case, that would mean stopping the drive system.
// Note the wasInterrupted parameter. If the action finished normally, i.e. isFinished returned true, wasInterrupted
// would be false. If someone canceled the action, it reached a timeout, or was overwritten by another action;
// wasInterrupted would be true.
@Override
public void initialize() {
}
@Override
public void execute() {
// We grab the values from the joysticks.
// - right: right stick axis Y
// - left: left stick axis Y
double right = mStickRight.getAxis(JoystickAxis.Y).getAsDouble();
double left = mStickLeft.getAxis(JoystickAxis.Y).getAsDouble();
// Move the drive system with the values from the joysticks.
mDrive.tankDrive(right, left);
}
@Override
public boolean isFinished() {
return false;
}
@Override
public void end(boolean wasInterrupted) {
// When the action is done, we should stop the drive system.
mDrive.stop();
}
}
|
#!/bin/bash
#SBATCH --job-name="wind-M"
#SBATCH --output="log.slurm.%j.out"
#SBATCH --partition=normal
#SBATCH --nodes=8
#SBATCH --ntasks-per-node=64
#SBATCH -t 00:15:00
export SUBMITDIR=$SLURM_SUBMIT_DIR
export BROKER_ENDPOINT_FILE="$SUBMITDIR/../../cloud-components/endpoints_512.ini"
cd $SUBMITDIR
export data_str=$(date +"%Y%m%d-%H%M")
export RUNDIR=$SCRATCH/wind/$data_str
export RESULTDIR=$SUBMITDIR/saved_logs/$data_str
mkdir -pv ${RESULTDIR}
mkdir -pv $RUNDIR
module list
source ./AllrunParallel
## Wait for the entire workflow to finish
|
from collections import OrderedDict
import hashlib
from django.core.cache import CacheHandler, DEFAULT_CACHE_ALIAS
from django.middleware.cache import UpdateCacheMiddleware, FetchFromCacheMiddleware
from django.utils.cache import patch_response_headers, get_max_age, has_vary_header, _generate_cache_header_key, \
_i18n_cache_key_suffix, cc_delim_re
from django.conf import settings
from django.utils.decorators import decorator_from_middleware_with_args
from django.utils.encoding import force_bytes
caches = CacheHandler()
def _generate_orderless_cache_url(request):
sorted_keys = [k for k in sorted(request.GET.keys(), key=lambda k: k)]
sorted_arr = OrderedDict()
sorted_query_string = "{path}?".format(path=request.path_info)
ampersand = ""
counter = 0
for key in sorted_keys:
sorted_arr[key] = [v for v in sorted(request.GET.getlist(key))]
for value in sorted_arr[key]:
if counter > 0:
ampersand = "&"
sorted_query_string += "{ampersand}{key}={value}".format(ampersand=ampersand, key=key, value=value)
counter += 1
return sorted_query_string
def _generate_orderless_cache_key(request, method, headerlist, key_prefix):
"""Returns a cache key from the headers given in the header list."""
ctx = hashlib.md5()
for header in headerlist:
value = request.META.get(header, None)
if value is not None:
ctx.update(force_bytes(value))
url = hashlib.md5(force_bytes(_generate_orderless_cache_url(request)))
cache_key = 'views.decorators.cache.cache_page.%s.%s.%s.%s' % (
key_prefix, method, url.hexdigest(), ctx.hexdigest())
return _i18n_cache_key_suffix(request, cache_key)
def get_orderless_cache_key(request, key_prefix=None, method='GET', cache=None):
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
headerlist = cache.get(cache_key, None)
if headerlist is not None:
return _generate_orderless_cache_key(request, method, headerlist, key_prefix)
else:
return None
class OrderlessUpdateCacheMiddleware(UpdateCacheMiddleware):
def process_response(self, request, response):
"""Sets the cache, if needed."""
if not self._should_update_cache(request, response):
# We don't need to update the cache, just return.
return response
if response.streaming or response.status_code != 200:
return response
# Don't cache responses that set a user-specific (and maybe security
# sensitive) cookie in response to a cookie-less request.
if not request.COOKIES and response.cookies and has_vary_header(response, 'Cookie'):
return response
# Try to get the timeout from the "max-age" section of the "Cache-
# Control" header before reverting to using the default cache_timeout
# length.
timeout = get_max_age(response)
if timeout is None:
timeout = self.cache_timeout
elif timeout == 0:
# max-age was set to 0, don't bother caching.
return response
patch_response_headers(response, timeout)
if timeout:
cache_key = learn_orderless_cache_key(request, response, timeout, self.key_prefix, cache=self.cache)
if hasattr(response, 'render') and callable(response.render):
response.add_post_render_callback(
lambda r: self.cache.set(cache_key, r, timeout)
)
else:
self.cache.set(cache_key, response, timeout)
return response
class OrderlessFetchFromCacheMiddleware(FetchFromCacheMiddleware):
def process_request(self, request):
"""
Checks whether the page is already cached and returns the cached
version if available.
"""
if request.method not in ('GET', 'HEAD'):
request._cache_update_cache = False
return None # Don't bother checking the cache.
# try and get the cached GET response
cache_key = get_orderless_cache_key(request, self.key_prefix, 'GET', cache=self.cache)
if cache_key is None:
request._cache_update_cache = True
return None # No cache information available, need to rebuild.
response = self.cache.get(cache_key, None)
# if it wasn't found and we are looking for a HEAD, try looking just for that
if response is None and request.method == 'HEAD':
cache_key = get_orderless_cache_key(request, self.key_prefix, 'HEAD', cache=self.cache)
response = self.cache.get(cache_key, None)
if response is None:
request._cache_update_cache = True
return None # No cache information available, need to rebuild.
# hit, return cached response
request._cache_update_cache = False
return response
class OrderlessCacheMiddleware(OrderlessUpdateCacheMiddleware, OrderlessFetchFromCacheMiddleware):
"""
Cache middleware that provides basic behavior for many simple sites.
Also used as the hook point for the cache decorator, which is generated
using the decorator-from-middleware utility.
"""
def __init__(self, cache_timeout=None, **kwargs):
# We need to differentiate between "provided, but using default value",
# and "not provided". If the value is provided using a default, then
# we fall back to system defaults. If it is not provided at all,
# we need to use middleware defaults.
try:
key_prefix = kwargs['key_prefix']
if key_prefix is None:
key_prefix = ''
except KeyError:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
self.key_prefix = key_prefix
try:
cache_alias = kwargs['cache_alias']
if cache_alias is None:
cache_alias = DEFAULT_CACHE_ALIAS
except KeyError:
cache_alias = settings.CACHE_MIDDLEWARE_ALIAS
self.cache_alias = cache_alias
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
self.cache_timeout = cache_timeout
self.cache = caches[self.cache_alias]
def orderless_cache_page(*args, **kwargs):
"""
Decorator for views that tries getting the page from the cache and
populates the cache if the page isn't in the cache yet.
The cache is keyed by the URL and some data from the headers.
Additionally there is the key prefix that is used to distinguish different
cache areas in a multi-site setup. You could use the
get_current_site().domain, for example, as that is unique across a Django
project.
Additionally, all headers from the response's Vary header will be taken
into account on caching -- just like the middleware does.
"""
# We also add some asserts to give better error messages in case people are
# using other ways to call cache_page that no longer work.
if len(args) != 1 or callable(args[0]):
raise TypeError("cache_page has a single mandatory positional argument: timeout")
cache_timeout = args[0]
cache_alias = kwargs.pop('cache', None)
key_prefix = kwargs.pop('key_prefix', None)
if kwargs:
raise TypeError("cache_page has two optional keyword arguments: cache and key_prefix")
return decorator_from_middleware_with_args(OrderlessCacheMiddleware)(
cache_timeout=cache_timeout, cache_alias=cache_alias, key_prefix=key_prefix
)
def learn_orderless_cache_key(request, response, cache_timeout=None, key_prefix=None, cache=None):
"""
Learns what headers to take into account for some request URL from the
response object. It stores those headers in a global URL registry so that
later access to that URL will know what headers to take into account
without building the response object itself. The headers are named in the
Vary header of the response, but we want to prevent response generation.
The list of headers to use for cache key generation is stored in the same
cache as the pages themselves. If the cache ages some data out of the
cache, this just means that we have to build the response once to get at
the Vary header and so at the list of headers to use for the cache key.
"""
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
if response.has_header('Vary'):
is_accept_language_redundant = settings.USE_I18N or settings.USE_L10N
# If i18n or l10n are used, the generated cache key will be suffixed
# with the current locale. Adding the raw value of Accept-Language is
# redundant in that case and would result in storing the same content
# under multiple keys in the cache. See #18191 for details.
headerlist = []
for header in cc_delim_re.split(response['Vary']):
header = header.upper().replace('-', '_')
if header == 'ACCEPT_LANGUAGE' and is_accept_language_redundant:
continue
headerlist.append('HTTP_' + header)
headerlist.sort()
cache.set(cache_key, headerlist, cache_timeout)
return _generate_orderless_cache_key(request, request.method, headerlist, key_prefix)
else:
# if there is no Vary header, we still need a cache key
# for the request.build_absolute_uri()
cache.set(cache_key, [], cache_timeout)
return _generate_orderless_cache_key(request, request.method, [], key_prefix)
|
<reponame>kal727/l5r-sandbox
const DrawCard = require('../../../drawcard.js');
class Viserion extends DrawCard {
setupCardAbilities(ability) {
this.persistentEffect({
match: (card) => card.hasTrait('Stormborn'),
effect: ability.effects.addKeyword('Stealth')
});
}
}
Viserion.code = '01166';
module.exports = Viserion;
|
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
package icecanedbpb
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// IcecaneKVClient is the client API for IcecaneKV service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type IcecaneKVClient interface {
// KV commands with txn supported.
Get(ctx context.Context, in *GetRequest, opts ...grpc.CallOption) (*GetResponse, error)
Scan(ctx context.Context, in *ScanRequest, opts ...grpc.CallOption) (*ScanResponse, error)
Set(ctx context.Context, in *SetRequest, opts ...grpc.CallOption) (*SetResponse, error)
Delete(ctx context.Context, in *DeleteRequest, opts ...grpc.CallOption) (*DeleteResponse, error)
BeginTxn(ctx context.Context, in *BeginTxnRequest, opts ...grpc.CallOption) (*BeginTxnResponse, error)
CommitTxn(ctx context.Context, in *CommitTxnRequest, opts ...grpc.CallOption) (*CommitTxnResponse, error)
RollbackTxn(ctx context.Context, in *RollbackTxnRequest, opts ...grpc.CallOption) (*RollbackTxnResponse, error)
// Raft commands b/w two kv servers. Not to be used by the client
RequestVote(ctx context.Context, in *RequestVoteRequest, opts ...grpc.CallOption) (*RequestVoteResponse, error)
AppendEntries(ctx context.Context, in *AppendEntriesRequest, opts ...grpc.CallOption) (*AppendEntriesResponse, error)
PeerSet(ctx context.Context, in *PeerSetRequest, opts ...grpc.CallOption) (*PeerSetResponse, error)
PeerDelete(ctx context.Context, in *PeerDeleteRequest, opts ...grpc.CallOption) (*PeerDeleteResponse, error)
}
type icecaneKVClient struct {
cc grpc.ClientConnInterface
}
func NewIcecaneKVClient(cc grpc.ClientConnInterface) IcecaneKVClient {
return &icecaneKVClient{cc}
}
func (c *icecaneKVClient) Get(ctx context.Context, in *GetRequest, opts ...grpc.CallOption) (*GetResponse, error) {
out := new(GetResponse)
err := c.cc.Invoke(ctx, "/icecanedbpb.IcecaneKV/Get", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *icecaneKVClient) Scan(ctx context.Context, in *ScanRequest, opts ...grpc.CallOption) (*ScanResponse, error) {
out := new(ScanResponse)
err := c.cc.Invoke(ctx, "/icecanedbpb.IcecaneKV/Scan", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *icecaneKVClient) Set(ctx context.Context, in *SetRequest, opts ...grpc.CallOption) (*SetResponse, error) {
out := new(SetResponse)
err := c.cc.Invoke(ctx, "/icecanedbpb.IcecaneKV/Set", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *icecaneKVClient) Delete(ctx context.Context, in *DeleteRequest, opts ...grpc.CallOption) (*DeleteResponse, error) {
out := new(DeleteResponse)
err := c.cc.Invoke(ctx, "/icecanedbpb.IcecaneKV/Delete", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *icecaneKVClient) BeginTxn(ctx context.Context, in *BeginTxnRequest, opts ...grpc.CallOption) (*BeginTxnResponse, error) {
out := new(BeginTxnResponse)
err := c.cc.Invoke(ctx, "/icecanedbpb.IcecaneKV/BeginTxn", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *icecaneKVClient) CommitTxn(ctx context.Context, in *CommitTxnRequest, opts ...grpc.CallOption) (*CommitTxnResponse, error) {
out := new(CommitTxnResponse)
err := c.cc.Invoke(ctx, "/icecanedbpb.IcecaneKV/CommitTxn", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *icecaneKVClient) RollbackTxn(ctx context.Context, in *RollbackTxnRequest, opts ...grpc.CallOption) (*RollbackTxnResponse, error) {
out := new(RollbackTxnResponse)
err := c.cc.Invoke(ctx, "/icecanedbpb.IcecaneKV/RollbackTxn", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *icecaneKVClient) RequestVote(ctx context.Context, in *RequestVoteRequest, opts ...grpc.CallOption) (*RequestVoteResponse, error) {
out := new(RequestVoteResponse)
err := c.cc.Invoke(ctx, "/icecanedbpb.IcecaneKV/RequestVote", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *icecaneKVClient) AppendEntries(ctx context.Context, in *AppendEntriesRequest, opts ...grpc.CallOption) (*AppendEntriesResponse, error) {
out := new(AppendEntriesResponse)
err := c.cc.Invoke(ctx, "/icecanedbpb.IcecaneKV/AppendEntries", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *icecaneKVClient) PeerSet(ctx context.Context, in *PeerSetRequest, opts ...grpc.CallOption) (*PeerSetResponse, error) {
out := new(PeerSetResponse)
err := c.cc.Invoke(ctx, "/icecanedbpb.IcecaneKV/PeerSet", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *icecaneKVClient) PeerDelete(ctx context.Context, in *PeerDeleteRequest, opts ...grpc.CallOption) (*PeerDeleteResponse, error) {
out := new(PeerDeleteResponse)
err := c.cc.Invoke(ctx, "/icecanedbpb.IcecaneKV/PeerDelete", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// IcecaneKVServer is the server API for IcecaneKV service.
// All implementations must embed UnimplementedIcecaneKVServer
// for forward compatibility
type IcecaneKVServer interface {
// KV commands with txn supported.
Get(context.Context, *GetRequest) (*GetResponse, error)
Scan(context.Context, *ScanRequest) (*ScanResponse, error)
Set(context.Context, *SetRequest) (*SetResponse, error)
Delete(context.Context, *DeleteRequest) (*DeleteResponse, error)
BeginTxn(context.Context, *BeginTxnRequest) (*BeginTxnResponse, error)
CommitTxn(context.Context, *CommitTxnRequest) (*CommitTxnResponse, error)
RollbackTxn(context.Context, *RollbackTxnRequest) (*RollbackTxnResponse, error)
// Raft commands b/w two kv servers. Not to be used by the client
RequestVote(context.Context, *RequestVoteRequest) (*RequestVoteResponse, error)
AppendEntries(context.Context, *AppendEntriesRequest) (*AppendEntriesResponse, error)
PeerSet(context.Context, *PeerSetRequest) (*PeerSetResponse, error)
PeerDelete(context.Context, *PeerDeleteRequest) (*PeerDeleteResponse, error)
mustEmbedUnimplementedIcecaneKVServer()
}
// UnimplementedIcecaneKVServer must be embedded to have forward compatible implementations.
type UnimplementedIcecaneKVServer struct {
}
func (UnimplementedIcecaneKVServer) Get(context.Context, *GetRequest) (*GetResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Get not implemented")
}
func (UnimplementedIcecaneKVServer) Scan(context.Context, *ScanRequest) (*ScanResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Scan not implemented")
}
func (UnimplementedIcecaneKVServer) Set(context.Context, *SetRequest) (*SetResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Set not implemented")
}
func (UnimplementedIcecaneKVServer) Delete(context.Context, *DeleteRequest) (*DeleteResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Delete not implemented")
}
func (UnimplementedIcecaneKVServer) BeginTxn(context.Context, *BeginTxnRequest) (*BeginTxnResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method BeginTxn not implemented")
}
func (UnimplementedIcecaneKVServer) CommitTxn(context.Context, *CommitTxnRequest) (*CommitTxnResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method CommitTxn not implemented")
}
func (UnimplementedIcecaneKVServer) RollbackTxn(context.Context, *RollbackTxnRequest) (*RollbackTxnResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method RollbackTxn not implemented")
}
func (UnimplementedIcecaneKVServer) RequestVote(context.Context, *RequestVoteRequest) (*RequestVoteResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method RequestVote not implemented")
}
func (UnimplementedIcecaneKVServer) AppendEntries(context.Context, *AppendEntriesRequest) (*AppendEntriesResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method AppendEntries not implemented")
}
func (UnimplementedIcecaneKVServer) PeerSet(context.Context, *PeerSetRequest) (*PeerSetResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method PeerSet not implemented")
}
func (UnimplementedIcecaneKVServer) PeerDelete(context.Context, *PeerDeleteRequest) (*PeerDeleteResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method PeerDelete not implemented")
}
func (UnimplementedIcecaneKVServer) mustEmbedUnimplementedIcecaneKVServer() {}
// UnsafeIcecaneKVServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to IcecaneKVServer will
// result in compilation errors.
type UnsafeIcecaneKVServer interface {
mustEmbedUnimplementedIcecaneKVServer()
}
func RegisterIcecaneKVServer(s grpc.ServiceRegistrar, srv IcecaneKVServer) {
s.RegisterService(&IcecaneKV_ServiceDesc, srv)
}
func _IcecaneKV_Get_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(IcecaneKVServer).Get(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/icecanedbpb.IcecaneKV/Get",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(IcecaneKVServer).Get(ctx, req.(*GetRequest))
}
return interceptor(ctx, in, info, handler)
}
func _IcecaneKV_Scan_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ScanRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(IcecaneKVServer).Scan(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/icecanedbpb.IcecaneKV/Scan",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(IcecaneKVServer).Scan(ctx, req.(*ScanRequest))
}
return interceptor(ctx, in, info, handler)
}
func _IcecaneKV_Set_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(SetRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(IcecaneKVServer).Set(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/icecanedbpb.IcecaneKV/Set",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(IcecaneKVServer).Set(ctx, req.(*SetRequest))
}
return interceptor(ctx, in, info, handler)
}
func _IcecaneKV_Delete_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(DeleteRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(IcecaneKVServer).Delete(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/icecanedbpb.IcecaneKV/Delete",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(IcecaneKVServer).Delete(ctx, req.(*DeleteRequest))
}
return interceptor(ctx, in, info, handler)
}
func _IcecaneKV_BeginTxn_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BeginTxnRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(IcecaneKVServer).BeginTxn(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/icecanedbpb.IcecaneKV/BeginTxn",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(IcecaneKVServer).BeginTxn(ctx, req.(*BeginTxnRequest))
}
return interceptor(ctx, in, info, handler)
}
func _IcecaneKV_CommitTxn_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(CommitTxnRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(IcecaneKVServer).CommitTxn(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/icecanedbpb.IcecaneKV/CommitTxn",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(IcecaneKVServer).CommitTxn(ctx, req.(*CommitTxnRequest))
}
return interceptor(ctx, in, info, handler)
}
func _IcecaneKV_RollbackTxn_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(RollbackTxnRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(IcecaneKVServer).RollbackTxn(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/icecanedbpb.IcecaneKV/RollbackTxn",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(IcecaneKVServer).RollbackTxn(ctx, req.(*RollbackTxnRequest))
}
return interceptor(ctx, in, info, handler)
}
func _IcecaneKV_RequestVote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(RequestVoteRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(IcecaneKVServer).RequestVote(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/icecanedbpb.IcecaneKV/RequestVote",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(IcecaneKVServer).RequestVote(ctx, req.(*RequestVoteRequest))
}
return interceptor(ctx, in, info, handler)
}
func _IcecaneKV_AppendEntries_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(AppendEntriesRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(IcecaneKVServer).AppendEntries(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/icecanedbpb.IcecaneKV/AppendEntries",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(IcecaneKVServer).AppendEntries(ctx, req.(*AppendEntriesRequest))
}
return interceptor(ctx, in, info, handler)
}
func _IcecaneKV_PeerSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(PeerSetRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(IcecaneKVServer).PeerSet(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/icecanedbpb.IcecaneKV/PeerSet",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(IcecaneKVServer).PeerSet(ctx, req.(*PeerSetRequest))
}
return interceptor(ctx, in, info, handler)
}
func _IcecaneKV_PeerDelete_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(PeerDeleteRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(IcecaneKVServer).PeerDelete(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/icecanedbpb.IcecaneKV/PeerDelete",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(IcecaneKVServer).PeerDelete(ctx, req.(*PeerDeleteRequest))
}
return interceptor(ctx, in, info, handler)
}
// IcecaneKV_ServiceDesc is the grpc.ServiceDesc for IcecaneKV service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var IcecaneKV_ServiceDesc = grpc.ServiceDesc{
ServiceName: "icecanedbpb.IcecaneKV",
HandlerType: (*IcecaneKVServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Get",
Handler: _IcecaneKV_Get_Handler,
},
{
MethodName: "Scan",
Handler: _IcecaneKV_Scan_Handler,
},
{
MethodName: "Set",
Handler: _IcecaneKV_Set_Handler,
},
{
MethodName: "Delete",
Handler: _IcecaneKV_Delete_Handler,
},
{
MethodName: "BeginTxn",
Handler: _IcecaneKV_BeginTxn_Handler,
},
{
MethodName: "CommitTxn",
Handler: _IcecaneKV_CommitTxn_Handler,
},
{
MethodName: "RollbackTxn",
Handler: _IcecaneKV_RollbackTxn_Handler,
},
{
MethodName: "RequestVote",
Handler: _IcecaneKV_RequestVote_Handler,
},
{
MethodName: "AppendEntries",
Handler: _IcecaneKV_AppendEntries_Handler,
},
{
MethodName: "PeerSet",
Handler: _IcecaneKV_PeerSet_Handler,
},
{
MethodName: "PeerDelete",
Handler: _IcecaneKV_PeerDelete_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "icecanekvpb.proto",
}
|
<gh_stars>0
import React from 'react';
import ReactDOM from 'react-dom';
import {BrowserRouter as Router} from 'react-router-dom';
import {Provider} from 'react-redux';
import {createStore, applyMiddleware} from 'redux';
import thunk from 'redux-thunk';
import { logger } from 'redux-logger';
import combineReducers from './reducers/combineReducers'
import App from './App';
const store = createStore(combineReducers, applyMiddleware(thunk, logger))
// const store = compose(window.devToolsExtension ? window.devToolsExtension() : f => f)
// (createStore(combineReducers, applyMiddleware(thunk, logger)))
ReactDOM.render(
<Provider store = {store}>
<Router>
<App />
</Router>
</Provider>, document.getElementById('root')); |
function read_param () {
TEMP_INPUT=""
while [ "$TEMP_INPUT" == "" ]
do
read -p "Enter $1: " TEMP_INPUT
done
eval "$2='$TEMP_INPUT'"
}
function replace_content_file () {
local FILE_NAME=$1
local OLD=$2
local NEW=$3
sed "s/${OLD}/${NEW}/g" $FILE_NAME > $FILE_NAME.bak
mv -f $FILE_NAME.bak $FILE_NAME
rm -f $FILE_NAME.bak
}
BASE_PROJECT_NAME="TemplateProject"
BASE_BUNDLE_PROJECT_ID="com.dwarvesv.TemplateProject"
NEW_PROJECT_NAME="TemplateProject"
BUNDLE_PROJECT_ID="com.dwarvesv.TemplateProject"
echo "iOS template project setup script"
echo "================================="
echo "--> Input initialize values"
read_param "project name ($BASE_PROJECT_NAME)" NEW_PROJECT_NAME
echo $NEW_PROJECT_NAME
read_param "bundle id ($BASE_BUNDLE_PROJECT_ID)" BUNDLE_PROJECT_ID
echo $BUNDLE_PROJECT_ID
## Init project params
TEST_STR="Tests"
DEV_STR="Dev"
XCODEPROJ_FILE="./$BASE_PROJECT_NAME.xcodeproj"
NEW_XCODEPROJ_FILE="./$NEW_PROJECT_NAME.xcodeproj"
PBPROJECT_FILE="$XCODEPROJ_FILE/project.pbxproj"
SCHEMA_FILE="$XCODEPROJ_FILE/xcshareddata/xcschemes/$BASE_PROJECT_NAME.xcscheme"
DEV_SCHEMA_FILE="$XCODEPROJ_FILE/xcshareddata/xcschemes/$BASE_PROJECT_NAME$DEV_STR.xcscheme"
WORKSPACE_DATA_FILE="$XCODEPROJ_FILE/project.xcworkspace/contents.xcworkspacedata"
NEW_SCHEMA_FILE="$XCODEPROJ_FILE/xcshareddata/xcschemes/$NEW_PROJECT_NAME.xcscheme"
NEW_DEV_SCHEMA_FILE="$XCODEPROJ_FILE/xcshareddata/xcschemes/$NEW_PROJECT_NAME$DEV_STR.xcscheme"
echo "--> Modify $XCODEPROJ_FILE content"
### Replace bundle id
replace_content_file $PBPROJECT_FILE $BASE_BUNDLE_PROJECT_ID $BUNDLE_PROJECT_ID
### Replace project name
replace_content_file $PBPROJECT_FILE $BASE_PROJECT_NAME $NEW_PROJECT_NAME
### Replace workspace data
replace_content_file $WORKSPACE_DATA_FILE $BASE_PROJECT_NAME $NEW_PROJECT_NAME
### Replace schema file
replace_content_file $SCHEMA_FILE $BASE_PROJECT_NAME $NEW_PROJECT_NAME
### Replace dev schema file
replace_content_file $DEV_SCHEMA_FILE $BASE_PROJECT_NAME $NEW_PROJECT_NAME
### Rename schema files
mv -f $SCHEMA_FILE $NEW_SCHEMA_FILE
mv -f $DEV_SCHEMA_FILE $NEW_DEV_SCHEMA_FILE
echo "--> Modify ./$BASE_PROJECT_NAME$TEST_STR content"
### Replace test folder
OLD_TEST_FOLDER="./$BASE_PROJECT_NAME$TEST_STR"
OLD_TEST_FILE="./$BASE_PROJECT_NAME$TEST_STR/$BASE_PROJECT_NAME$TEST_STR.swift"
NEW_TEST_FOLDER="./$NEW_PROJECT_NAME$TEST_STR"
NEW_TEST_FILE_NAME="$NEW_PROJECT_NAME$TEST_STR.swift"
DEV_STR="DevInfo"
DEV_PLIST="./$BASE_PROJECT_NAME/$BASE_PROJECT_NAME$DEV_STR.plist"
NEW_DEV_PLIST="./$BASE_PROJECT_NAME/$NEW_PROJECT_NAME$DEV_STR.plist"
### Replace unit test content
replace_content_file $OLD_TEST_FILE $BASE_PROJECT_NAME $NEW_PROJECT_NAME
mv $OLD_TEST_FILE "$OLD_TEST_FOLDER/$NEW_TEST_FILE_NAME"
mv $OLD_TEST_FOLDER $NEW_TEST_FOLDER
mv $XCODEPROJ_FILE $NEW_XCODEPROJ_FILE
mv $DEV_PLIST $NEW_DEV_PLIST
mv "./$BASE_PROJECT_NAME" "./$NEW_PROJECT_NAME"
echo "--> Modify Podfile and generate xcworkspace"
### Replace podfile content
PODFILE="Podfile"
rm -rf ./Pods
rm -rf "$BASE_PROJECT_NAME.xcworkspace"
replace_content_file $PODFILE $BASE_PROJECT_NAME $NEW_PROJECT_NAME
pod install
open ./$NEW_PROJECT_NAME.xcworkspace
echo "--> Git init"
rm -rf .git/
git init
git add .
git commit -m "Init Project"
|
<reponame>WlodzimierzKorza/small_eod<gh_stars>10-100
default_app_config = "small_eod.features.apps.FeatureConfig"
|
package ru.stqa.pft.homework_1;
public class P2 {
public double y1;
public double y2;
public P2(double y1, double y2){
this.y1 = y1;
this.y2 = y2;
}
public double coord() {
return Math.pow((this.y2 - this.y1), 2);
}
}
|
from django.db import models
import json
class CustomJSONField(models.Field):
description = "Custom field to store and process JSON data"
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
kwargs['null'] = True
super().__init__(*args, **kwargs)
def db_type(self, connection):
return 'jsonb' if connection.vendor == 'postgresql' else 'json'
def from_db_value(self, value, expression, connection):
if value is None:
return value
return json.loads(value)
def to_python(self, value):
if isinstance(value, dict) or value is None:
return value
return json.loads(value)
def get_prep_value(self, value):
if value is None:
return value
return json.dumps(value)
def validate(self, value, model_instance):
super().validate(value, model_instance)
# Custom validation logic based on specific requirements
def process_json_data(self, value, model_instance):
# Custom processing logic based on specific requirements
# Example: Process JSON data based on the related model's needs
if isinstance(model_instance, PozeelementImportantAnsambluConstruit):
# Process JSON data for PozeelementImportantAnsambluConstruit model
pass
elif isinstance(model_instance, PozeetapeIstoriceVizibile):
# Process JSON data for PozeetapeIstoriceVizibile model
pass |
<filename>PL_Java/src/lists/ManipulateList.java<gh_stars>1-10
package lists;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
import java.util.Set;
import org.apache.commons.configuration.ConfigurationComparator;
public class ManipulateList {
static void printSelectedCutoffs (TwoArgumentSelector<Integer, Integer> cutoffSelector, int[][] aTotalFinalCutoffs) {
int anIndex = 0;
while (anIndex < aTotalFinalCutoffs.length) {
int[] aPair = aTotalFinalCutoffs[anIndex];
if (cutoffSelector.select(aPair[0], aPair[1])) {
System.out.println("Total = " + aPair[0] + " Final = " + aPair[1]);
}
anIndex = anIndex + 1;
}
}
enum Color {RED, GREEN, BLUE};
static int[][]
totalFinalCutoffs =
{
{20, 100},
{30, 90},
{40, 80},
{50, 70}
};
static void printTotalFinalCutoffs (int[][] aTotalFinalCutoffs) {
int anIndex = 0;
while (anIndex < aTotalFinalCutoffs.length) {
int[] aPair = aTotalFinalCutoffs[anIndex];
System.out.println("Total = " + aPair[0] + " Final = " + aPair[1]);
anIndex = anIndex + 1;
}
}
public static Object[] reverse (Object[] listToBeReversed) {
Object[] reversedList = new Object[listToBeReversed.length];
for (int index = 0; index < listToBeReversed.length; index++ ) {
reversedList[listToBeReversed.length - 1 - index] = listToBeReversed[index];
}
return reversedList;
}
public static Object[] reverse2 (Object[] listToBeReversed) {
Object[] reversedList = new Object[listToBeReversed.length];
for (int index = 0; index < listToBeReversed.length; index++ ) {
reversedList[listToBeReversed.length - 1 - index] = index;
}
return reversedList;
}
// public static <ElementType> ElementType[] reverse3 (ElementType[] listToBeReversed) {
// ElementType[] reversedList = new Object[listToBeReversed.length];
// for (int index = 0; index < listToBeReversed.length; index++ ) {
// reversedList[listToBeReversed.length - 1 - index] = listToBeReversed[index];;
// }
// return reversedList;
// }
public static <ElementType> void reverse3 (ElementType[] listToBeReversed, ElementType[] reversedList) {
for (int index = 0; index < listToBeReversed.length; index++ ) {
reversedList[listToBeReversed.length - 1 - index] = listToBeReversed[index];;
}
}
// public static <ElementType> void reverse4 (ElementType[] listToBeReversed, ElementType[] reversedList) {
// for (int index = 0; index < listToBeReversed.length; index++ ) {
// reversedList[listToBeReversed.length - 1 - index] = index;
// }
// }
public static <ElementType> void reverse (Object[] listToBeReversed, Object[] reversedList) {
for (int index = 0; index < listToBeReversed.length; index++ ) {
reversedList[listToBeReversed.length - 1 - index] = listToBeReversed[index];;
}
}
public static <ElementType> void reverse2 (Object[] listToBeReversed, Object[] reversedList) {
for (int index = 0; index < listToBeReversed.length; index++ ) {
reversedList[listToBeReversed.length - 1 - index] = index;
}
}
public static void reverseInts (int[] listToBeReversed, int[] reversedList) {
for (int index = 0; index < listToBeReversed.length; index++ ) {
reversedList[listToBeReversed.length - 1 - index] = listToBeReversed[index];
}
}
public static void reverseBools (boolean[] listToBeReversed, boolean[] reversedList) {
for (int index = 0; index < listToBeReversed.length; index++ ) {
reversedList[listToBeReversed.length - 1 - index] = listToBeReversed[index];
}
}
// public static <ElementType> ElementType[] reverse4 (ElementType[] listToBeReversed) {
// ElementType[] reversedList = (ElementType[]) new Object[listToBeReversed.length];
// for (int index = 0; index < listToBeReversed.length; index++ ) {
// reversedList[listToBeReversed.length - 1 - index] = index;;
// }
// return reversedList;
// }
//
public static int[] reverseInts (int[] listToBeReversed) {
int[] reversedList = new int[listToBeReversed.length];
for (int index = 0; index < listToBeReversed.length; index++ ) {
reversedList[listToBeReversed.length - 1 - index] = listToBeReversed[index];;
}
return reversedList;
}
public static boolean[] reverseBools (boolean[] listToBeReversed) {
boolean[] reversedList = new boolean[listToBeReversed.length];
for (int index = 0; index < listToBeReversed.length; index++ ) {
reversedList[listToBeReversed.length - 1 - index] = listToBeReversed[index];;
}
return reversedList;
}
public static void main (String[] args) {
printSelectedCutoffs(new FirstArgumentGreaterThanSecond(), totalFinalCutoffs);
printSelectedCutoffs(new FirstArgumentLessThanSecond(), totalFinalCutoffs);
printSelectedCutoffs((Integer argument1, Integer argument2) -> argument1 == argument2, totalFinalCutoffs);
var number = 5;
number = 6;
// number = "6";
var list = new ArrayList<>();
// list = new LinkedList();
Integer[] listToBeReversed = {1, 2, 3};
Integer[] reversedList = new Integer[listToBeReversed.length];
reverse3(listToBeReversed, reversedList);
// int[] intList = {1, 2, 3};
// int[] reversedIntList = reverse4(intList);
System.out.println("Original:" + Arrays.toString(listToBeReversed));
System.out.println("Reversed:" + Arrays.toString(reversedList));
Set aSet = new HashSet();
((List) aSet).get(0);
System.out.println(Color.RED);
System.out.println(Color.RED.ordinal());
System.out.println(Color.GREEN.ordinal());
System.out.println("RED" == "RED");
Scanner aScanner = new Scanner(System.in);
System.out.println("Color?");
String aColor = aScanner.nextLine();
System.out.println("RED" == aColor);
System.out.println("RED".equals(aColor));
printTotalFinalCutoffs(totalFinalCutoffs);
}
}
|
<reponame>mkralik3/syndesis-qe
package io.syndesis.qe.utils;
import io.syndesis.qe.account.Account;
import io.syndesis.qe.account.AccountsDirectory;
import org.assertj.core.api.Assertions;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;
import com.github.seratch.jslack.Slack;
import com.github.seratch.jslack.api.methods.SlackApiException;
import com.github.seratch.jslack.api.methods.request.chat.ChatPostMessageRequest;
import com.github.seratch.jslack.api.methods.request.conversations.ConversationsHistoryRequest;
import com.github.seratch.jslack.api.methods.request.conversations.ConversationsListRequest;
import com.github.seratch.jslack.api.methods.response.chat.ChatPostMessageResponse;
import com.github.seratch.jslack.api.methods.response.conversations.ConversationsHistoryResponse;
import com.github.seratch.jslack.api.methods.response.conversations.ConversationsListResponse;
import com.github.seratch.jslack.api.model.Conversation;
import com.github.seratch.jslack.api.model.Message;
import java.io.IOException;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Component
@Lazy
public class SlackUtils {
private static final Slack SLACK = Slack.getInstance();
private String token;
/**
* To create instance of slack connector, correct credentials must be set up. Class expects
* slack connector with name "QE Slack". Syndesis connector requires "webhookUrl" with
* web hook for desired workspace. For tests to work there must be property "Token". Token
* is API Token for slack workspace custom integration called "Bots".
*/
public SlackUtils() {
Account account = AccountsDirectory.getInstance().get(Account.Name.SLACK);
token = account.getProperty("token");
Assertions.assertThat(token).as("There is no slack property \"Token\" in credentials file!");
}
/**
* Send message to specified channel.
* <p>
* Note that Slack QE credentials must have "Token" property correctly set up for desired workspace.
*
* @param message message
* @param channelName channel
* @return response
* @throws IOException when something goes wrong
* @throws SlackApiException when something goes wrongn
*/
public ChatPostMessageResponse sendMessage(String message, String channelName) throws IOException, SlackApiException {
// find all channels in the workspace
ConversationsListResponse conversationsList = SLACK.methods().conversationsList(ConversationsListRequest.builder().token(token).build());
// find channelName
Conversation chann = conversationsList.getChannels().stream()
.filter(c -> c.getName().equals(channelName)).findFirst().get();
// https://slack.com/api/chat.postMessage
return SLACK.methods().chatPostMessage(ChatPostMessageRequest.builder()
.token(token)
.channel(chann.getId())
.username("syndesis-bot")
.text(message)
.build());
}
/**
* Check if last message on channel is expectedMessage.
* <p>
* Note that Slack QE credentials must have "Token" property correctly set up for desired workspace.
*
* @param expectedMessage expected message
* @param channelName channel
* @throws IOException when something goes wrong
* @throws SlackApiException when something goes wrong
*/
public void checkLastMessageFromChannel(String expectedMessage, String channelName) throws IOException, SlackApiException {
// find all channels in the workspace
ConversationsListResponse conversationsList = SLACK.methods().conversationsList(ConversationsListRequest.builder().token(token).build());
Assertions.assertThat(conversationsList.isOk()).isTrue();
// find channelName
Conversation chann = conversationsList.getChannels().stream()
.filter(c -> c.getName().equals(channelName)).findFirst().get();
// fetch history
ConversationsHistoryResponse history = SLACK.methods().conversationsHistory(ConversationsHistoryRequest.builder()
.token(token)
.channel(chann.getId())
.build());
Assertions.assertThat(history.isOk()).isTrue();
log.debug("Slack history fetched successfully...");
// get last message
Message lastMessage = history.getMessages().get(0);
log.info("Last slack message in channel {} is '{}'", channelName, lastMessage.getText());
Assertions.assertThat(lastMessage.getText()).isEqualToIgnoringCase(expectedMessage);
}
}
|
<gh_stars>0
import { PipeTransform } from '@angular/core';
import * as ɵngcc0 from '@angular/core';
export declare class DropPipe implements PipeTransform {
transform(input: any, quantity?: number): any;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<DropPipe, never>;
static ɵpipe: ɵngcc0.ɵɵPipeDeclaration<DropPipe, "drop">;
}
export declare class NgDropPipeModule {
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<NgDropPipeModule, never>;
static ɵmod: ɵngcc0.ɵɵNgModuleDeclaration<NgDropPipeModule, [typeof DropPipe], never, [typeof DropPipe]>;
static ɵinj: ɵngcc0.ɵɵInjectorDeclaration<NgDropPipeModule>;
}
//# sourceMappingURL=drop.pipe.d.ts.map |
package com.me.DataStructure.tuple;
import java.io.ObjectStreamException;
public class Tuple0 extends Tuple {
private static final long serialVersionUID = 1L;
// an immutable reusable Tuple0 instance
public static final Tuple0 INSTANCE = new Tuple0();
// ------------------------------------------------------------------------
@Override
public int getArity() {
return 0;
}
@Override
public <T> T getField(int pos) {
throw new IndexOutOfBoundsException(String.valueOf(pos));
}
@Override
public <T> void setField(T value, int pos) {
throw new IndexOutOfBoundsException(String.valueOf(pos));
}
/**
* Shallow tuple copy.
* @return A new Tuple with the same fields as this.
*/
@Override
@SuppressWarnings("unchecked")
public Tuple0 copy(){
return new Tuple0();
}
// -------------------------------------------------------------------------------------------------
// standard utilities
// -------------------------------------------------------------------------------------------------
/**
* Creates a string representation of the tuple in the form "()".
*
* @return The string representation of the tuple.
*/
@Override
public String toString() {
return "()";
}
/**
* Deep equality for tuples by calling equals() on the tuple members.
*
* @param o
* the object checked for equality
* @return true if this is equal to o.
*/
@Override
public boolean equals(Object o) {
return this == o || o instanceof Tuple0;
}
@Override
public int hashCode() {
return 0;
}
// singleton deserialization
private Object readResolve() throws ObjectStreamException {
return INSTANCE;
}
}
|
<filename>test/integration/node/objects/devices/input/keyboard.js
function Keyboard() {
this.language = 'EN';
}
Keyboard.prototype.desc = function() {
return this.language;
};
module.exports = Keyboard;
|
<gh_stars>1-10
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
({
labels : [ "threadHostile" ],
/**
* Verify simple lazy loading case.
*/
// TODO: W-2406307: remaining Halo test failure
_testSimpleLazyLoading:{
attributes:{start:0,end:2, slowFacet:true},
test:function(cmp){
var helper = cmp.getDef().getHelper();
var items = cmp.find("lazy");
$A.test.assertEquals(2, items.length,
"Expected two items in iteration component.");
$A.test.assertEquals("placeholder", items[0].getDef().getDescriptor().getName(),
"Expected a placeholder for first lazy loading component.");
$A.test.assertEquals("placeholder", items[1].getDef().getDescriptor().getName(),
"Expected a placeholder for second lazy loading component.");
//
// Note that we have to free up the second component because they get
// put in a single request to the server.
//
helper.resumeGateId(cmp, "withLazyComponents0");
helper.resumeGateId(cmp, "withLazyComponents1");
//First Item
$A.test.addWaitFor("serverComponent", function(){
return cmp.find("lazy")[0].getDef().getDescriptor().getName();
},function(){
$A.test.assertTrue(cmp.find("lazy")[0].isRendered());
$A.test.assertTrue($A.test.getTextByComponent(cmp.find("lazy")[0]).indexOf("Server component")!=-1);
//iteration component
var iteration = cmp.find("iteration");
var body = iteration.get("v.body");
$A.test.assertTrue(body instanceof Array);
$A.test.assertEquals(2, body.length);
//Placeholder for lazy component
var placeholder = body[0];
$A.test.assertTrue($A.util.isObject(placeholder));
$A.test.assertTrue($A.util.isComponent(placeholder));
$A.test.assertEquals("markup://aura:placeholder", placeholder.getDef().getDescriptor().getQualifiedName());
//Actual lazy loaded component
var placeholderBody = placeholder.get("v.body");
$A.test.assertTrue(placeholderBody instanceof Array);
$A.test.assertEquals(1, placeholderBody.length);
$A.test.assertTrue($A.util.isObject(placeholderBody[0]));
$A.test.assertTrue($A.util.isComponent(placeholderBody[0]));
$A.test.assertEquals("markup://loadLevelTest:serverComponent", placeholderBody[0].getDef().getDescriptor().getQualifiedName());
//Verify that the placeholder's elements were associated with the iteration component
var flag= false;
var itrElements = iteration.getElements();
for(var i=0;i<itrElements.length;i++){
if(itrElements[i].className){
flag = flag || (($A.test.isInstanceOfDivElement(itrElements[i])) &&
(itrElements[i].className.indexOf("auraPlaceholder")!=-1 ) );
}
}
$A.test.assertTrue(flag, "Placeholder elements were not associated with iteration component");
});
//Second Item
$A.test.addWaitFor("serverComponent", function(){
return cmp.find("lazy")[1].getDef().getDescriptor().getName();
},function(){
$A.test.assertTrue(cmp.find("lazy")[1].isRendered());
$A.test.assertTrue($A.test.getTextByComponent(cmp.find("lazy")[1]).indexOf("Server component")!=-1);
});
}
},
/**
* Verify that iteration component facilitates using the outer component as value provider.
*/
testLazyLoadingWithAttributeValues:{
attributes:{start:0,end:4, fastFacet:true},
test:function(cmp){
var helper = cmp.getDef().getHelper();
var iteration = cmp.find("iterationWithAttributes");
var items = cmp.find("lazyWithAttributes");
$A.test.assertEquals(4,items.length, "Expected 4 items in iteration.");
//Verify that all items are initially represented by placeholder
for(var i = 0, n = items.length; i < n; i++) {
var item = items[i];
$A.test.assertEquals("placeholder", item.getDef().getDescriptor().getName(),
"Expected a placeholder for lazy loading component.");
}
helper.resumeGateId(cmp, "withAttributes");
//Verify first item is replaced by expected value
$A.test.addWaitFor('markup://aura:expression', function(){
return cmp.find('lazyWithAttributes')[0].getDef().getDescriptor().getQualifiedName();
},
function(){
$A.test.assertEquals(cmp.get('m.innerData')[0],$A.test.getTextByComponent(cmp.find("lazyWithAttributes")[0]),
"Failed to provide value for lazy component in iteration block [loc1]");
});
//Verify last item is replaced by expected value
$A.test.addWaitFor('markup://aura:expression', function(){
return cmp.find('lazyWithAttributes')[3].getDef().getDescriptor().getQualifiedName();
},
function(){
$A.test.assertEquals(cmp.get('m.innerData')[3],$A.test.getTextByComponent(cmp.find("lazyWithAttributes")[3]),
"Failed to provide value for lazy component in iteration block [loc2]");
});
}
}
})
|
<gh_stars>0
import { EntityRepository, Repository } from 'typeorm';
import Gestion from '../domain/gestion.entity';
@EntityRepository(Gestion)
export class GestionRepository extends Repository<Gestion> {}
|
<reponame>cat-in-136/redmine-slack
module RedmineSlack
module NewsPatch
def self.included(base) # :nodoc:
base.extend(ClassMethods)
base.send(:include, InstanceMethods)
base.class_eval do
unloadable # Send unloadable so it will not be unloaded in development
after_create :create_from_news
after_update :save_from_news
end
end
module ClassMethods
end
module InstanceMethods
def create_from_news
Redmine::Hook.call_hook(:redmine_slack_news_new_after_save, { :news => self})
return true
end
def save_from_news
Redmine::Hook.call_hook(:redmine_slack_news_edit_after_save, { :news => self })
return true
end
end
end
end
|
#!/bin/sh
set -e
cd ~/.vim_runtime
echo 'set runtimepath+=~/.vim_runtime
source ~/.vim_runtime/vimrcs/basic.vim
source ~/.vim_runtime/vimrcs/filetypes.vim
source ~/.vim_runtime/vimrcs/plugins_config.vim
source ~/.vim_runtime/vimrcs/extended.vim
source ~/.vim_runtime/vimrcs/my_configs.vim
try
source ~/.vim_runtime/vimrcs/my_configs.vim
catch
endtry' > ~/.vimrc
echo "Installed the Ultimate Vim configuration successfully! Enjoy :-)"
|
#!/bin/bash
set -Eeuo pipefail
oldterm=$TERM
if [[ "$(tput colors)" -ne 256 ]]; then
export TERM=xterm-256color
reset
fi
source color.sh
# Define functions
# function to set root password
setrpass() {
read -r -s -p "Enter the password you want to use for root: " rootpass
echo
read -r -s -p "Please repeat the password: " rootpass2
echo
if [[ -z $rootpass || -z $rootpass2 ]]; then
echo -e "${RED}Either one or both of the passwords were empty, please try again.${NORMAL}"
sleep 4
return 5
elif [[ $rootpass != "$rootpass2" ]]; then
echo 'The passwords do not match, please try again.'
sleep 4
return 5
else
echo -n "root:$rootpass" | chpasswd
return 0
fi
}
# function to set user password
setupass() {
read -r -p "Enter the name of the user you would like to create: " username
echo
read -r -s -p "Enter the password you would like to use for $username: " userpass
echo
read -r -s -p "Please repeat the password: " userpass2
echo
if [[ -z $userpass || -z $userpass2 ]]; then
printf "%s\n" "${RED}Either one of the passwords, or both were empty, please make sure to type something!${NORMAL}"
sleep 4
return 5
elif [[ -z $username ]]; then
printf "%s\n" "${RED}The username is empty, please make sure to type something!${NORMAL}"
sleep 4
return 5
elif [[ $userpass != "$userpass2" ]]; then
printf "%s\n" "${RED}The passwords do not match, please try again.${NORMAL}"
sleep 4
return 5
else
while IFS="" read -r f || [[ -n "$f" ]]; do
if [[ $username = "$f" ]]; then
printf "%s\n" "${RED}That username already exists! Try another one.${NORMAL}"
return 5
else
useradd -m "$username"
usermod -aG wheel "$username"
echo -n "${username}:${userpass}" | chpasswd
return 0
fi
done < <(awk -F':' '{print $1}' /etc/passwd)
fi
}
kernel() {
while read -r line; do
line="${line##*/}"
line="${line#*-}"
pacman -S --noconfirm "$line"-headers
done < <(find /boot -type f -name 'vmlinuz*')
}
ln -sf /usr/share/zoneinfo/America/Los_Angeles /etc/localtime
hwclock --systohc
sed -i '177s/.//' /etc/locale.gen
locale-gen
echo "LANG=en_US.UTF-8" >> /etc/locale.conf
read -r -p "What hostname would you like to use: " host
echo "$host" >> /etc/hostname
printf '%s %s\n%s\t %s\n%s %s %s\n' "127.0.0.1" "localhost" "::1" "localhost" "127.0.1.1" "$host.localdomain" "$host" >> /etc/hosts
setrpass
until [[ $? -eq 0 ]]; do
clear
setrpass
done
kernel
pacman -S --noconfirm grub networkmanager network-manager-applet e2fsprogs dialog wpa_supplicant mtools dosfstools reflector base-devel avahi xdg-user-dirs xdg-utils gvfs gvfs-smb nfs-utils inetutils dnsutils alsa-utils bash-completion openssh rsync acpi acpi_call openbsd-netcat iptables ipset firewalld sof-firmware nss-mdns acpid os-prober ntfs-3g man-db man-pages texinfo
grubInstall() {
local esp partTable
esp="$(awk '/fat/ {print $2}' /proc/mounts)"
partTable="$(fdisk -l | awk '/Disklabel type:/ {print $3}')"
if [[ $partTable = dos ]]; then
grub-install --target=i386-pc "$(fdisk -l | awk 'NR==1 {print $2}' | tr -d ':')"
grub-mkconfig -o /boot/grub/grub.cfg
elif [[ $partTable = gpt ]]; then
pacman -S --noconfirm efibootmgr
grub-install --target=x86_64-efi --efi-directory="$esp" --bootloader-id=GRUB
grub-mkconfig -o /boot/grub/grub.cfg
else
echo -e "${RED}${UNDERLINE}Something has gone wrong with installing grub${NORMAL}" && exit 1
fi
}
grubInstall
virtGpu() {
local vmplat gpubrand
vmplat="$(systemd-detect-virt)"
gpubrand="$(lspci | grep VGA | cut -d ':' -f3)"
# instead of enabling the systemd service here add it to array and do it later?
if [[ -z $vmplat ]]; then
if [[ $vmplat = vmware ]]; then
pacman -S --noconfirm open-vm-tools gtkmm3 &&
systemctl enable vmtoolsd
systemctl enable vmware-vmblock-fuse
elif [[ $vmplat = oracle ]]; then
pacman -S --noconfirm virtualbox-guest-utils &&
systemctl enable vboxservice
else
echo "No virtual machine platform found"
fi
else
if [[ $gpubrand =~ Nvidia ]]; then
echo -e "${MAGNETA}Installing Nvidia gpu drivers${NORMAL}"
pacman -S --noconfirm nvidia nvidia-utils nvidia-settings
elif [[ $gpubrand =~ 'Advanced Micro Devices' ]]; then
echo -e "${MAGENTA}Installing AMD gpu drivers"
pacman -S --noconfirm xf86-video-amdgpu
elif [[ $gpubrand =~ Intel ]]; then
echo -e "${MAGENTA}Installing Intel gpu drivers${NORMAL}"
pacman -S --noconfirm mesa
else
echo -e "${RED}${UNDERLINE}No gpu found${NORMAL}"
fi
fi
}
virtGpu
microcode() {
local cpu
cpu="$(awk -F: '/vendor_id/ {print $2}' /proc/cpuinfo | tail -n1)"
if [[ -z "$(systemd-detect-virt)" ]]; then
if [[ $cpu =~ GenuineIntel ]]; then
echo -e "${CYAN}Installing intel-ucode${NORMAL}"
sleep 1
pacman -S --noconfirm intel-ucode
elif [[ $cpu =~ AMDisbetter! ]] || [[ $cpu =~ AuthenticAMD ]]; then
echo -e "${CYAN}Installing amd-ucode${NORMAL}"
sleep 1
pacman -S --noconfirm amd-ucode
else
echo -e "${RED}${UNDERLINE}Unknown cpu detected${NORMAL}"
fi
fi
}
microcode
serviceEnable() {
local -a services
services=('NetworkManager' 'sshd' 'avahi-daemon' 'reflector.timer' 'fstrim.timer' 'firewalld' 'acpid')
for service in "${services[@]}"; do
systemctl enable "$service"
done
}
serviceEnable
setupass
until [[ $? -eq 0 ]]; do
clear
setupass
done
ln="$(awk '{ if ( ($2 == "%wheel" && $4 == "ALL")) print NR;}' /etc/sudoers)"
sed -i "${ln}s/^# //" /etc/sudoers
printf '%s\n' "\e[1;32mDone! Type exit, umount -a and reboot.\e[0m"
|
function greetUser($name) {
echo "Hello, $name! Good to see you.";
} |
export class Config {
static apiUrl = "http://localhost:8000";
static appKey = "kid_HyHoT_REf";
static appUserHeader = "Basic a2lkX0h5SG9UX1JFZjo1MTkxMDJlZWFhMzQ0MzMyODFjN2MyODM3MGQ5OTIzMQ";
static token = '';
} |
const fs = require('fs');
const { imagePath } = require('../config/app');
const ImageBase = {
determineNumberOfImages: () => new Promise((resolve, reject) => {
fs.readdir(imagePath, (err, files) => {
if (err) throw reject(err);
resolve(files.length);
});
}),
getImage: number => new Promise((resolve, reject) => {
fs.readdir(imagePath, (err, files) => {
if (err) throw reject(err);
resolve(files[number]);
});
}),
};
module.exports = ImageBase;
|
#!/bin/sh
../../scripts/wrapper.bin -m /vagrant/Cisco-IOU-L2-L3-Collection-v4/bin/i86bi-linux-l3-adventerprisek9-15.4.1T.bin -p 2011 -- -e2 -s0 11 &
sleep 5
../../scripts/wrapper.bin -m /vagrant/Cisco-IOU-L2-L3-Collection-v4/bin/i86bi-linux-l3-adventerprisek9-15.4.1T.bin -p 2012 -- -e2 -s0 12 &
sleep 5
../../scripts/wrapper.bin -m /vagrant/Cisco-IOU-L2-L3-Collection-v4/bin/i86bi-linux-l3-adventerprisek9-15.4.1T.bin -p 2013 -- -e2 -s0 13 &
sleep 5
../../scripts/wrapper.bin -m /vagrant/Cisco-IOU-L2-L3-Collection-v4/bin/i86bi-linux-l3-adventerprisek9-15.4.1T.bin -p 2014 -- -e2 -s0 14 &
sleep 5
|
#!/usr/bin/env python
# coding: utf-8
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # patch for imports
os.chdir(os.path.dirname(__file__)) # places in corect dir
import time
from Test import *
from mep import Utilities
def main() :
lst =["<NAME>",
"<NAME>",
"<NAME>",
"<NAME> & His Comets",
"<NAME>",
"<NAME>",
"Elvis Presley",
"USA for Africa",
"The Ink Spots",
"Céline Dion",
"The Beatles",
"<NAME> et <NAME>",
"<NAME>"]
res = ""
headp("Testing Utilities")
for a in lst :
for b in lst :
if Utilities.similar(a,b) :
res+=(f'{a} = {b} ')
assert(res=="<NAME> = Bing Crosby <NAME> = <NAME> = <NAME> <NAME> & His Comets = <NAME> & His Comets D<NAME>no = Domenico Modugno Whitney Houston = Whitney Houston Elvis Presley = Elvis Presley USA for Africa = USA for Africa The Ink Spots = The Ink Spots Céline Dion = Céline Dion The Beatles = The Beatles <NAME> et <NAME> = <NAME> et O<NAME> <NAME> = <NAME> ")
greenp("similar working")
title = "LOVE. FEAT. Zacari"
assert(Utilities.remove_feat(title)=="LOVE.")
greenp("remove_feat working")
artist = "The Strokes"
assert(Utilities.remove_the(artist)=="Strokes")
greenp("remove_the working")
title = "LOVE."
assert(Utilities.clean_string(title)=="LOVE")
greenp("clean_string working")
interface = Interface()
assert(Utilities.create_config())
greenp("create_config working")
config={'feat_acronym': 'feat.', 'default_genre': 'Other', 'folder_name': 'music', 'get_label': True, 'get_bpm': True, 'get_lyrics': True, 'store_image_in_file': True}
start = time.time()
assert(Utilities.update_config(config, interface))
end = time.time()
greenp(f"update_config working (took {end-start}s)")
start = time.time()
assert(Utilities.read_config(interface)==config)
end = time.time()
greenp(f"read_config working (took {end-start}s)")
assert(Utilities.rm_file('config.json'))
greenp("rm_file working")
class Interface :
def __init__(self):
pass
def warn(self, msg):
print(msg)
def ask(self, msg):
print("ask : "+msg)
return True
if __name__ == '__main__':
main()
|
/**
* @license Copyright (c) 2003-2021, CKSource - <NAME>. All rights reserved.
* For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
/* global document */
/**
* Removes all the `.ck-body` elements available in the DOM.
*
* It is commonly used to cleanup after editors that test editor crashes.
*
* See https://github.com/ckeditor/ckeditor5/issues/6018 for more details.
*/
export function removeEditorBodyOrphans() {
for ( const bodyOrphan of document.querySelectorAll( '.ck-body-wrapper' ) ) {
bodyOrphan.remove();
}
}
|
#!/usr/bin/env bash
#Faça um programa que peça 10 números inteiros, calcule e mostre a quantidade de números pares e a quantidade de números impares.
contador=0
while ((contador < 10)); do
read -rp "Digite um número: " lista[contador]
((contador+=1))
done
for numero in "${lista[@]}"; do
if [ $((numero%2)) = 0 ]; then
pares+=("$numero")
else
impares+=("$numero")
fi
done
echo "${#pares[*]} número(s) par(es): ${pares[*]}"
echo "${#impares[*]} número(s) ímpar(es): ${impares[*]}"
|
#!/usr/bin/env bash
export APACHE_LOCK_DIR=/tmp
export APACHE_RUN_USER=app
export APACHE_RUN_GROUP=app
export APACHE_LOG_DIR=log
if [ ! -d /app/.apache2/log ]; then
mkdir -p /app/.apache2/log
fi
apache2 -DFOREGROUND
|
<gh_stars>0
package uk.joshiejack.husbandry.api.trait;
import uk.joshiejack.husbandry.api.IMobStats;
public interface IInitTrait extends IMobTrait {
default void initTrait(IMobStats<?> stats) {}
} |
/*=============================================================================
Copyright (c) 2010-2015 <NAME>
https://github.com/bolero-MURAKAMI/KTL
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
=============================================================================*/
#ifndef KTL_RC_VERSION_H
#define KTL_RC_VERSION_H
#include <WinVer.h>
#ifndef KTL_RC_PROJECT_NAME
# error Requires: define KTL_RC_PROJECT_NAME
#endif // #ifndef KTL_RC_PROJECT_NAME
#ifndef KTL_RC_MODULE_NAME
# error Requires: define KTL_RC_MODULE_NAME
#endif // #ifndef KTL_RC_MODULE_NAME
#ifndef KTL_RC_VERSION
# define KTL_RC_VERSION 0,0,0,0
#endif // #ifndef KTL_RC_VERSION
#define KTL_FULL_PROJECT_NAME "ktl_" KTL_RC_PROJECT_NAME
#define KTL_FULL_MODULE_NAME "KTL." KTL_RC_MODULE_NAME
#ifdef NDEBUG
# define KTL_COMMENTS_BUILD "(Release)"
#else
#define KTL_COMMENTS_BUILD "(Debug)"
#endif
#if defined(KTL_RC_TARGET)
# define KTL_COMMENTS_TARGET "(" KTL_RC_TARGET ")"
#else
# define KTL_COMMENTS_TARGET "(Unknown)"
#endif
#if defined(KTL_RC_COMPILER)
# define KTL_COMMENTS_COMPILER "(" KTL_RC_COMPILER ")"
#else
# define KTL_COMMENTS_COMPILER "(Unknown)"
#endif
#if defined(KTL_RC_USELIB_0)
# define KTL_COMMENTS_USELIB_0 "(" KTL_RC_USELIB_0 ")"
#else
# define KTL_COMMENTS_USELIB_0 ""
#endif
#if defined(KTL_RC_USELIB_1)
# define KTL_COMMENTS_USELIB_1 "(" KTL_RC_USELIB_1 ")"
#else
# define KTL_COMMENTS_USELIB_1 ""
#endif
#if defined(KTL_RC_USELIB_2)
# define KTL_COMMENTS_USELIB_2 "(" KTL_RC_USELIB_2 ")"
#else
# define KTL_COMMENTS_USELIB_2 ""
#endif
#if defined(KTL_RC_USELIB_3)
# define KTL_COMMENTS_USELIB_3 "(" KTL_RC_USELIB_3 ")"
#else
# define KTL_COMMENTS_USELIB_3 ""
#endif
#if defined(KTL_RC_USELIB_4)
# define KTL_COMMENTS_USELIB_4 "(" KTL_RC_USELIB_4 ")"
#else
# define KTL_COMMENTS_USELIB_4 ""
#endif
#if defined(KTL_RC_USELIB_5)
# define KTL_COMMENTS_USELIB_5 "(" KTL_RC_USELIB_5 ")"
#else
# define KTL_COMMENTS_USELIB_5 ""
#endif
#if defined(KTL_RC_USELIB_6)
# define KTL_COMMENTS_USELIB_6 "(" KTL_RC_USELIB_6 ")"
#else
# define KTL_COMMENTS_USELIB_6 ""
#endif
#if defined(KTL_RC_USELIB_7)
# define KTL_COMMENTS_USELIB_7 "(" KTL_RC_USELIB_7 ")"
#else
# define KTL_COMMENTS_USELIB_7 ""
#endif
#if defined(KTL_RC_USELIB_8)
# define KTL_COMMENTS_USELIB_8 "(" KTL_RC_USELIB_8 ")"
#else
# define KTL_COMMENTS_USELIB_8 ""
#endif
#if defined(KTL_RC_USELIB_9)
# define KTL_COMMENTS_USELIB_9 "(" KTL_RC_USELIB_9 ")"
#else
# define KTL_COMMENTS_USELIB_9 ""
#endif
#define KTL_COMMENTS_LIBRARIES KTL_COMMENTS_USELIB_0 \
KTL_COMMENTS_USELIB_1 \
KTL_COMMENTS_USELIB_2 \
KTL_COMMENTS_USELIB_3 \
KTL_COMMENTS_USELIB_4 \
KTL_COMMENTS_USELIB_5 \
KTL_COMMENTS_USELIB_6 \
KTL_COMMENTS_USELIB_7 \
KTL_COMMENTS_USELIB_8 \
KTL_COMMENTS_USELIB_9
#define KTL_STRINGIZE_I(TEXT) #TEXT
#define KTL_STRINGIZE(TEXT) KTL_STRINGIZE_I(TEXT)
#define KTL_FILEVERSION KTL_RC_VERSION
#define KTL_PRODUCTVERSION KTL_RC_VERSION
#define KTL_FILEFLAGSMASK VS_FFI_FILEFLAGSMASK
#ifdef NDEBUG
#define KTL_FILEFLAGS (0x0L | VS_FF_PRERELEASE)
#else
#define KTL_FILEFLAGS (0x0L | VS_FF_PRERELEASE | VS_FF_DEBUG)
#endif
#define KTL_FILEOS VOS_NT_WINDOWS32
#define KTL_FILETYPE VFT_DLL
#define KTL_FILESUBTYPE VFT2_UNKNOWN
#define KTL_COMMENTS KTL_FULL_MODULE_NAME \
" " KTL_STRINGIZE(KTL_RC_VERSION) "\n" \
" " KTL_COMMENTS_BUILD "\n" \
" / Target: " KTL_COMMENTS_TARGET "\n" \
" / Compiler: " KTL_COMMENTS_COMPILER "\n" \
" / Libraries: " KTL_COMMENTS_LIBRARIES "\n" \
"\0"
#define KTL_COMPANY_NAME "<NAME>\0"
#define KTL_FILE_DESCRIPTION KTL_FULL_MODULE_NAME " (KiriKiri Plug-in DLL)\0"
#define KTL_FILE_VERSION KTL_STRINGIZE(KTL_RC_VERSION) "\0"
#define KTL_INTERNAL_NAME KTL_FULL_PROJECT_NAME "\0"
#define KTL_LEGAL_COPYRIGHT "Copyright (C) 2009-2015 <NAME>.\0"
#define KTL_LEGAL_TRADEMARKS "\0"
#ifdef NDEBUG
# define KTL_ORIGINAL_FILENAME KTL_FULL_PROJECT_NAME ".dll\0"
#else
# define KTL_ORIGINAL_FILENAME KTL_FULL_PROJECT_NAME "-d.dll\0"
#endif
#define KTL_PRODUCT_NAME KTL_FULL_MODULE_NAME "\0"
#define KTL_PRODUCT_VERSION KTL_STRINGIZE(KTL_RC_VERSION) "\0"
#define KTL_PRIVATE_BUILD "\0"
#define KTL_SPECIAL_BUILD "\0"
#endif // #ifndef KTL_RC_VERSION_H
|
#!/usr/bin/env sh
BASEDIR=$(dirname "$0")
if ! command -v gopherjs &> /dev/null; then
echo "error: gopherjs command not available."
echo "Install it from here: <https://github.com/gopherjs/gopherjs>"
exit 1
fi
echo "icza/screp" $(cat "$BASEDIR"/go.mod | grep "^\s\+github.com/icza/screp" | head -n 1 | grep -o "v\(.*\)")
gopherjs build -vo "$BASEDIR"/dist/index.js "$BASEDIR"/src/main.go
# Remove the source map, since we don't intend for the Go code to be available to the end user.
rm "$BASEDIR"/dist/index.js.map
sed 's/^\/\/# sourceMappingURL=\(.*\)//g' "$BASEDIR"/dist/index.js > "$BASEDIR"/dist/_index.js
rm "$BASEDIR"/dist/index.js
mv "$BASEDIR"/dist/_index.js "$BASEDIR"/dist/index.js
|
//
// Created by bartek on 30.03.19.
//
#include "Event.h"
#include <string>
using namespace event;
Event::Event(configuration::Time &time, float_t temperature) : time(time), temperature(temperature) {}
std::string Event::toString() const {
char buffer[50];
snprintf(buffer, sizeof(buffer), "@environment\ntime::%s\ntemperature::%f", time.toString().data(), temperature);
return std::string(buffer);
}
std::ostream &operator<<(std::ostream &ostream, Event &event) {
return ostream << event.toString();
}
|
/*!
* @vueuse/motion v1.6.0
* (c) 2021
* @license MIT
*/
'use strict'
Object.defineProperty(exports, '__esModule', { value: true })
var vueDemi = require('vue-demi')
var core = require('@vueuse/core')
var shared = require('@vueuse/shared')
var popmotion = require('popmotion')
const motionState = {}
/*! *****************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
var __assign = function () {
__assign =
Object.assign ||
function __assign(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i]
for (var p in s)
if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]
}
return t
}
return __assign.apply(this, arguments)
}
function __rest(s, e) {
var t = {}
for (var p in s)
if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p]
if (s != null && typeof Object.getOwnPropertySymbols === 'function')
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (
e.indexOf(p[i]) < 0 &&
Object.prototype.propertyIsEnumerable.call(s, p[i])
)
t[p[i]] = s[p[i]]
}
return t
}
function __awaiter(thisArg, _arguments, P, generator) {
function adopt(value) {
return value instanceof P
? value
: new P(function (resolve) {
resolve(value)
})
}
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) {
try {
step(generator.next(value))
} catch (e) {
reject(e)
}
}
function rejected(value) {
try {
step(generator['throw'](value))
} catch (e) {
reject(e)
}
}
function step(result) {
result.done
? resolve(result.value)
: adopt(result.value).then(fulfilled, rejected)
}
step((generator = generator.apply(thisArg, _arguments || [])).next())
})
}
var defaultTimestep = (1 / 60) * 1000
var getCurrentTime =
typeof performance !== 'undefined'
? function () {
return performance.now()
}
: function () {
return Date.now()
}
var onNextFrame =
typeof window !== 'undefined'
? function (callback) {
return window.requestAnimationFrame(callback)
}
: function (callback) {
return setTimeout(function () {
return callback(getCurrentTime())
}, defaultTimestep)
}
function createRenderStep(runNextFrame) {
var toRun = []
var toRunNextFrame = []
var numToRun = 0
var isProcessing = false
var toKeepAlive = new WeakSet()
var step = {
schedule: function (callback, keepAlive, immediate) {
if (keepAlive === void 0) {
keepAlive = false
}
if (immediate === void 0) {
immediate = false
}
var addToCurrentFrame = immediate && isProcessing
var buffer = addToCurrentFrame ? toRun : toRunNextFrame
if (keepAlive) toKeepAlive.add(callback)
if (buffer.indexOf(callback) === -1) {
buffer.push(callback)
if (addToCurrentFrame && isProcessing) numToRun = toRun.length
}
return callback
},
cancel: function (callback) {
var index = toRunNextFrame.indexOf(callback)
if (index !== -1) toRunNextFrame.splice(index, 1)
toKeepAlive.delete(callback)
},
process: function (frameData) {
var _a
isProcessing = true
;(_a = [toRunNextFrame, toRun]), (toRun = _a[0]), (toRunNextFrame = _a[1])
toRunNextFrame.length = 0
numToRun = toRun.length
if (numToRun) {
for (var i = 0; i < numToRun; i++) {
var callback = toRun[i]
callback(frameData)
if (toKeepAlive.has(callback)) {
step.schedule(callback)
runNextFrame()
}
}
}
isProcessing = false
},
}
return step
}
var maxElapsed = 40
var useDefaultElapsed = true
var runNextFrame = false
var isProcessing = false
var frame = {
delta: 0,
timestamp: 0,
}
var stepsOrder = ['read', 'update', 'preRender', 'render', 'postRender']
var steps = /*#__PURE__*/ stepsOrder.reduce(function (acc, key) {
acc[key] = createRenderStep(function () {
return (runNextFrame = true)
})
return acc
}, {})
var sync = /*#__PURE__*/ stepsOrder.reduce(function (acc, key) {
var step = steps[key]
acc[key] = function (process, keepAlive, immediate) {
if (keepAlive === void 0) {
keepAlive = false
}
if (immediate === void 0) {
immediate = false
}
if (!runNextFrame) startLoop()
return step.schedule(process, keepAlive, immediate)
}
return acc
}, {})
var processStep = function (stepId) {
return steps[stepId].process(frame)
}
var processFrame = function (timestamp) {
runNextFrame = false
frame.delta = useDefaultElapsed
? defaultTimestep
: Math.max(Math.min(timestamp - frame.timestamp, maxElapsed), 1)
frame.timestamp = timestamp
isProcessing = true
stepsOrder.forEach(processStep)
isProcessing = false
if (runNextFrame) {
useDefaultElapsed = false
onNextFrame(processFrame)
}
}
var startLoop = function () {
runNextFrame = true
useDefaultElapsed = true
if (!isProcessing) onNextFrame(processFrame)
}
var getFrameData = function () {
return frame
}
/**
* A generic subscription manager.
*/
class SubscriptionManager {
constructor() {
this.subscriptions = new Set()
}
add(handler) {
this.subscriptions.add(handler)
return () => void this.subscriptions.delete(handler)
}
notify(
/**
* Using ...args would be preferable but it's array creation and this
* might be fired every frame.
*/
a,
b,
c,
) {
if (!this.subscriptions.size) return
for (const handler of this.subscriptions) {
handler(a, b, c)
}
}
clear() {
this.subscriptions.clear()
}
}
const isFloat = (value) => {
return !isNaN(parseFloat(value))
}
/**
* `MotionValue` is used to track the state and velocity of motion values.
*/
class MotionValue {
/**
* @param init - The initiating value
* @param config - Optional configuration options
*/
constructor(init) {
/**
* Duration, in milliseconds, since last updating frame.
*/
this.timeDelta = 0
/**
* Timestamp of the last time this `MotionValue` was updated.
*/
this.lastUpdated = 0
/**
* Functions to notify when the `MotionValue` updates.
*/
this.updateSubscribers = new SubscriptionManager()
/**
* Tracks whether this value can output a velocity.
*/
this.canTrackVelocity = false
/**
* Update and notify `MotionValue` subscribers.
*
* @param v
* @param render
*/
this.updateAndNotify = (v) => {
// Update values
this.prev = this.current
this.current = v
// Get frame data
const { delta, timestamp } = getFrameData()
// Update timestamp
if (this.lastUpdated !== timestamp) {
this.timeDelta = delta
this.lastUpdated = timestamp
}
// Schedule velocity check post frame render
sync.postRender(this.scheduleVelocityCheck)
// Update subscribers
this.updateSubscribers.notify(this.current)
}
/**
* Schedule a velocity check for the next frame.
*/
this.scheduleVelocityCheck = () => sync.postRender(this.velocityCheck)
/**
* Updates `prev` with `current` if the value hasn't been updated this frame.
* This ensures velocity calculations return `0`.
*/
this.velocityCheck = ({ timestamp }) => {
if (!this.canTrackVelocity) this.canTrackVelocity = isFloat(this.current)
if (timestamp !== this.lastUpdated) {
this.prev = this.current
}
}
this.prev = this.current = init
this.canTrackVelocity = isFloat(this.current)
}
/**
* Adds a function that will be notified when the `MotionValue` is updated.
*
* It returns a function that, when called, will cancel the subscription.
*/
onChange(subscription) {
return this.updateSubscribers.add(subscription)
}
clearListeners() {
this.updateSubscribers.clear()
}
/**
* Sets the state of the `MotionValue`.
*
* @param v
* @param render
*/
set(v) {
this.updateAndNotify(v)
}
/**
* Returns the latest state of `MotionValue`
*
* @returns - The latest state of `MotionValue`
*/
get() {
return this.current
}
/**
* Get previous value.
*
* @returns - The previous latest state of `MotionValue`
*/
getPrevious() {
return this.prev
}
/**
* Returns the latest velocity of `MotionValue`
*
* @returns - The latest velocity of `MotionValue`. Returns `0` if the state is non-numerical.
*/
getVelocity() {
// This could be isFloat(this.prev) && isFloat(this.current), but that would be wasteful
return this.canTrackVelocity
? // These casts could be avoided if parseFloat would be typed better
popmotion.velocityPerSecond(
parseFloat(this.current) - parseFloat(this.prev),
this.timeDelta,
)
: 0
}
/**
* Registers a new animation to control this `MotionValue`. Only one
* animation can drive a `MotionValue` at one time.
*/
start(animation) {
this.stop()
return new Promise((resolve) => {
const { stop } = animation(resolve)
this.stopAnimation = stop
}).then(() => this.clearAnimation())
}
/**
* Stop the currently active animation.
*/
stop() {
if (this.stopAnimation) this.stopAnimation()
this.clearAnimation()
}
/**
* Returns `true` if this value is currently animating.
*/
isAnimating() {
return !!this.stopAnimation
}
/**
* Clear the current animation reference.
*/
clearAnimation() {
this.stopAnimation = null
}
/**
* Destroy and clean up subscribers to this `MotionValue`.
*/
destroy() {
this.updateSubscribers.clear()
this.stop()
}
}
function getMotionValue(init) {
return new MotionValue(init)
}
const { isArray } = Array
function useMotionValues() {
const motionValues = {}
const stop = (keys) => {
// Destroy key closure
const destroyKey = (key) => {
if (!motionValues[key]) return
motionValues[key].stop()
motionValues[key].destroy()
vueDemi.del(motionValues, key)
}
// Check if keys argument is defined
if (keys) {
if (isArray(keys)) {
// If `keys` are an array, loop on specified keys and destroy them
keys.forEach(destroyKey)
} else {
// If `keys` is a string, destroy the specified one
destroyKey(keys)
}
} else {
// No keys specified, destroy all animations
Object.keys(motionValues).forEach(destroyKey)
}
}
const get = (key, from, target) => {
if (motionValues[key]) return motionValues[key]
// Create motion value
const motionValue = getMotionValue(from)
// Set motion properties mapping
motionValue.onChange((v) => {
vueDemi.set(target, key, v)
})
// Set instance motion value
vueDemi.set(motionValues, key, motionValue)
return motionValue
}
// Ensure everything is cleared on unmount
shared.tryOnUnmounted(stop)
return {
motionValues,
get,
stop,
}
}
var clamp = function (min, max) {
return function (v) {
return Math.max(Math.min(v, max), min)
}
}
var sanitize = function (v) {
return v % 1 ? Number(v.toFixed(5)) : v
}
var floatRegex = /(-)?([\d]*\.?[\d])+/g
var colorRegex =
/(#[0-9a-f]{6}|#[0-9a-f]{3}|#(?:[0-9a-f]{2}){2,4}|(rgb|hsl)a?\((-?[\d\.]+%?[,\s]+){2,3}\s*\/*\s*[\d\.]+%?\))/gi
var singleColorRegex =
/^(#[0-9a-f]{3}|#(?:[0-9a-f]{2}){2,4}|(rgb|hsl)a?\((-?[\d\.]+%?[,\s]+){2,3}\s*\/*\s*[\d\.]+%?\))$/i
function isString(v) {
return typeof v === 'string'
}
var number = {
test: function (v) {
return typeof v === 'number'
},
parse: parseFloat,
transform: function (v) {
return v
},
}
var alpha = __assign(__assign({}, number), { transform: clamp(0, 1) })
var scale = __assign(__assign({}, number), { default: 1 })
var createUnitType = function (unit) {
return {
test: function (v) {
return isString(v) && v.endsWith(unit) && v.split(' ').length === 1
},
parse: parseFloat,
transform: function (v) {
return '' + v + unit
},
}
}
var degrees = createUnitType('deg')
var percent = createUnitType('%')
var px = createUnitType('px')
var progressPercentage = __assign(__assign({}, percent), {
parse: function (v) {
return percent.parse(v) / 100
},
transform: function (v) {
return percent.transform(v * 100)
},
})
var isColorString = function (type, testProp) {
return function (v) {
return Boolean(
(isString(v) && singleColorRegex.test(v) && v.startsWith(type)) ||
(testProp && Object.prototype.hasOwnProperty.call(v, testProp)),
)
}
}
var splitColor = function (aName, bName, cName) {
return function (v) {
var _a
if (!isString(v)) return v
var _b = v.match(floatRegex),
a = _b[0],
b = _b[1],
c = _b[2],
alpha = _b[3]
return (
(_a = {}),
(_a[aName] = parseFloat(a)),
(_a[bName] = parseFloat(b)),
(_a[cName] = parseFloat(c)),
(_a.alpha = alpha !== undefined ? parseFloat(alpha) : 1),
_a
)
}
}
var hsla = {
test: isColorString('hsl', 'hue'),
parse: splitColor('hue', 'saturation', 'lightness'),
transform: function (_a) {
var hue = _a.hue,
saturation = _a.saturation,
lightness = _a.lightness,
_b = _a.alpha,
alpha$1 = _b === void 0 ? 1 : _b
return (
'hsla(' +
Math.round(hue) +
', ' +
percent.transform(sanitize(saturation)) +
', ' +
percent.transform(sanitize(lightness)) +
', ' +
sanitize(alpha.transform(alpha$1)) +
')'
)
},
}
var clampRgbUnit = clamp(0, 255)
var rgbUnit = __assign(__assign({}, number), {
transform: function (v) {
return Math.round(clampRgbUnit(v))
},
})
var rgba = {
test: isColorString('rgb', 'red'),
parse: splitColor('red', 'green', 'blue'),
transform: function (_a) {
var red = _a.red,
green = _a.green,
blue = _a.blue,
_b = _a.alpha,
alpha$1 = _b === void 0 ? 1 : _b
return (
'rgba(' +
rgbUnit.transform(red) +
', ' +
rgbUnit.transform(green) +
', ' +
rgbUnit.transform(blue) +
', ' +
sanitize(alpha.transform(alpha$1)) +
')'
)
},
}
function parseHex(v) {
var r = ''
var g = ''
var b = ''
var a = ''
if (v.length > 5) {
r = v.substr(1, 2)
g = v.substr(3, 2)
b = v.substr(5, 2)
a = v.substr(7, 2)
} else {
r = v.substr(1, 1)
g = v.substr(2, 1)
b = v.substr(3, 1)
a = v.substr(4, 1)
r += r
g += g
b += b
a += a
}
return {
red: parseInt(r, 16),
green: parseInt(g, 16),
blue: parseInt(b, 16),
alpha: a ? parseInt(a, 16) / 255 : 1,
}
}
var hex = {
test: isColorString('#'),
parse: parseHex,
transform: rgba.transform,
}
var color = {
test: function (v) {
return rgba.test(v) || hex.test(v) || hsla.test(v)
},
parse: function (v) {
if (rgba.test(v)) {
return rgba.parse(v)
} else if (hsla.test(v)) {
return hsla.parse(v)
} else {
return hex.parse(v)
}
},
transform: function (v) {
return isString(v)
? v
: v.hasOwnProperty('red')
? rgba.transform(v)
: hsla.transform(v)
},
}
var colorToken = '${c}'
var numberToken = '${n}'
function test(v) {
var _a, _b, _c, _d
return (
isNaN(v) &&
isString(v) &&
((_b =
(_a = v.match(floatRegex)) === null || _a === void 0
? void 0
: _a.length) !== null && _b !== void 0
? _b
: 0) +
((_d =
(_c = v.match(colorRegex)) === null || _c === void 0
? void 0
: _c.length) !== null && _d !== void 0
? _d
: 0) >
0
)
}
function analyse(v) {
var values = []
var numColors = 0
var colors = v.match(colorRegex)
if (colors) {
numColors = colors.length
v = v.replace(colorRegex, colorToken)
values.push.apply(values, colors.map(color.parse))
}
var numbers = v.match(floatRegex)
if (numbers) {
v = v.replace(floatRegex, numberToken)
values.push.apply(values, numbers.map(number.parse))
}
return { values: values, numColors: numColors, tokenised: v }
}
function parse(v) {
return analyse(v).values
}
function createTransformer(v) {
var _a = analyse(v),
values = _a.values,
numColors = _a.numColors,
tokenised = _a.tokenised
var numValues = values.length
return function (v) {
var output = tokenised
for (var i = 0; i < numValues; i++) {
output = output.replace(
i < numColors ? colorToken : numberToken,
i < numColors ? color.transform(v[i]) : sanitize(v[i]),
)
}
return output
}
}
var convertNumbersToZero = function (v) {
return typeof v === 'number' ? 0 : v
}
function getAnimatableNone$1(v) {
var parsed = parse(v)
var transformer = createTransformer(v)
return transformer(parsed.map(convertNumbersToZero))
}
var complex = {
test: test,
parse: parse,
createTransformer: createTransformer,
getAnimatableNone: getAnimatableNone$1,
}
var maxDefaults = new Set(['brightness', 'contrast', 'saturate', 'opacity'])
function applyDefaultFilter(v) {
var _a = v.slice(0, -1).split('('),
name = _a[0],
value = _a[1]
if (name === 'drop-shadow') return v
var number = (value.match(floatRegex) || [])[0]
if (!number) return v
var unit = value.replace(number, '')
var defaultValue = maxDefaults.has(name) ? 1 : 0
if (number !== value) defaultValue *= 100
return name + '(' + defaultValue + unit + ')'
}
var functionRegex = /([a-z-]*)\(.*?\)/g
var filter = __assign(__assign({}, complex), {
getAnimatableNone: function (v) {
var functions = v.match(functionRegex)
return functions ? functions.map(applyDefaultFilter).join(' ') : v
},
})
const isKeyframesTarget = (v) => {
return Array.isArray(v)
}
const underDampedSpring = () => ({
type: 'spring',
stiffness: 500,
damping: 25,
restDelta: 0.5,
restSpeed: 10,
})
const criticallyDampedSpring = (to) => ({
type: 'spring',
stiffness: 550,
damping: to === 0 ? 2 * Math.sqrt(550) : 30,
restDelta: 0.01,
restSpeed: 10,
})
const overDampedSpring = (to) => ({
type: 'spring',
stiffness: 550,
damping: to === 0 ? 100 : 30,
restDelta: 0.01,
restSpeed: 10,
})
const linearTween = () => ({
type: 'keyframes',
ease: 'linear',
duration: 300,
})
const keyframes = (values) => ({
type: 'keyframes',
duration: 800,
values,
})
const defaultTransitions = {
default: overDampedSpring,
x: underDampedSpring,
y: underDampedSpring,
z: underDampedSpring,
rotate: underDampedSpring,
rotateX: underDampedSpring,
rotateY: underDampedSpring,
rotateZ: underDampedSpring,
scaleX: criticallyDampedSpring,
scaleY: criticallyDampedSpring,
scale: criticallyDampedSpring,
backgroundColor: linearTween,
color: linearTween,
opacity: linearTween,
}
const getDefaultTransition = (valueKey, to) => {
let transitionFactory
if (isKeyframesTarget(to)) {
transitionFactory = keyframes
} else {
transitionFactory =
defaultTransitions[valueKey] || defaultTransitions.default
}
return Object.assign({ to }, transitionFactory(to))
}
/**
* ValueType for ints
*/
const int = Object.assign(Object.assign({}, number), { transform: Math.round })
const valueTypes = {
// Color props
color,
backgroundColor: color,
outlineColor: color,
fill: color,
stroke: color,
// Border props
borderColor: color,
borderTopColor: color,
borderRightColor: color,
borderBottomColor: color,
borderLeftColor: color,
borderWidth: px,
borderTopWidth: px,
borderRightWidth: px,
borderBottomWidth: px,
borderLeftWidth: px,
borderRadius: px,
radius: px,
borderTopLeftRadius: px,
borderTopRightRadius: px,
borderBottomRightRadius: px,
borderBottomLeftRadius: px,
// Positioning props
width: px,
maxWidth: px,
height: px,
maxHeight: px,
size: px,
top: px,
right: px,
bottom: px,
left: px,
// Spacing props
padding: px,
paddingTop: px,
paddingRight: px,
paddingBottom: px,
paddingLeft: px,
margin: px,
marginTop: px,
marginRight: px,
marginBottom: px,
marginLeft: px,
// Transform props
rotate: degrees,
rotateX: degrees,
rotateY: degrees,
rotateZ: degrees,
scale,
scaleX: scale,
scaleY: scale,
scaleZ: scale,
skew: degrees,
skewX: degrees,
skewY: degrees,
distance: px,
translateX: px,
translateY: px,
translateZ: px,
x: px,
y: px,
z: px,
perspective: px,
transformPerspective: px,
opacity: alpha,
originX: progressPercentage,
originY: progressPercentage,
originZ: px,
// Misc
zIndex: int,
filter,
WebkitFilter: filter,
// SVG
fillOpacity: alpha,
strokeOpacity: alpha,
numOctaves: int,
}
/**
* Return the value type for a key.
*
* @param key
*/
const getValueType = (key) => valueTypes[key]
/**
* Transform the value using its value type, or return the value.
*
* @param value
* @param type
*/
const getValueAsType = (value, type) => {
return type && typeof value === 'number' && type.transform
? type.transform(value)
: value
}
/**
* Get default animatable
*
* @param key
* @param value
*/
function getAnimatableNone(key, value) {
let defaultValueType = getValueType(key)
if (defaultValueType !== filter) defaultValueType = complex
// If value is not recognised as animatable, ie "none", create an animatable version origin based on the target
return defaultValueType.getAnimatableNone
? defaultValueType.getAnimatableNone(value)
: undefined
}
// Easing map from popmotion
const easingLookup = {
linear: popmotion.linear,
easeIn: popmotion.easeIn,
easeInOut: popmotion.easeInOut,
easeOut: popmotion.easeOut,
circIn: popmotion.circIn,
circInOut: popmotion.circInOut,
circOut: popmotion.circOut,
backIn: popmotion.backIn,
backInOut: popmotion.backInOut,
backOut: popmotion.backOut,
anticipate: popmotion.anticipate,
bounceIn: popmotion.bounceIn,
bounceInOut: popmotion.bounceInOut,
bounceOut: popmotion.bounceOut,
}
/**
* Transform easing definition to easing function.
*
* @param definition
*/
const easingDefinitionToFunction = (definition) => {
if (Array.isArray(definition)) {
const [x1, y1, x2, y2] = definition
return popmotion.cubicBezier(x1, y1, x2, y2)
} else if (typeof definition === 'string') {
return easingLookup[definition]
}
return definition
}
/**
* Create an easing array
*
* @param ease
*/
const isEasingArray = (ease) => {
return Array.isArray(ease) && typeof ease[0] !== 'number'
}
/**
* Check if a value is animatable. Examples:
*
* ✅: 100, "100px", "#fff"
* ❌: "block", "url(2.jpg)"
* @param value
*
* @internal
*/
const isAnimatable = (key, value) => {
// If the list of keys tat might be non-animatable grows, replace with Set
if (key === 'zIndex') return false
// If it's a number or a keyframes array, we can animate it. We might at some point
// need to do a deep isAnimatable check of keyframes, or let Popmotion handle this,
// but for now lets leave it like this for performance reasons
if (typeof value === 'number' || Array.isArray(value)) return true
if (
typeof value === 'string' && // It's animatable if we have a string
complex.test(value) && // And it contains numbers and/or colors
!value.startsWith('url(') // Unless it starts with "url("
) {
return true
}
return false
}
/**
* Hydrate keyframes from transition options.
*
* @param options
*/
function hydrateKeyframes(options) {
if (Array.isArray(options.to) && options.to[0] === null) {
options.to = [...options.to]
options.to[0] = options.from
}
return options
}
/**
* Convert Transition type into Popmotion-compatible options.
*/
function convertTransitionToAnimationOptions(_a) {
var { ease, times, delay } = _a,
transition = __rest(_a, ['ease', 'times', 'delay'])
const options = Object.assign({}, transition)
if (times) options['offset'] = times
// Map easing names to Popmotion's easing functions
if (ease) {
options['ease'] = isEasingArray(ease)
? ease.map(easingDefinitionToFunction)
: easingDefinitionToFunction(ease)
}
// Map delay to elapsed from Popmotion
if (delay) {
options['elapsed'] = -delay
}
return options
}
/**
* Get PopMotion animation options from Transition definition
*
* @param transition
* @param options
* @param key
*/
function getPopmotionAnimationOptions(transition, options, key) {
if (Array.isArray(options.to)) {
if (!transition.duration) transition.duration = 800
}
hydrateKeyframes(options)
// Get a default transition if none is determined to be defined.
if (!isTransitionDefined(transition)) {
transition = Object.assign(
Object.assign({}, transition),
getDefaultTransition(key, options.to),
)
}
return Object.assign(
Object.assign({}, options),
convertTransitionToAnimationOptions(transition),
)
}
/**
* Decide whether a transition is defined on a given Transition.
* This filters out orchestration options and returns true
* if any options are left.
*/
function isTransitionDefined(_a) {
var transition = __rest(_a, [
'delay',
'repeat',
'repeatType',
'repeatDelay',
'from',
])
return !!Object.keys(transition).length
}
/**
* Get the transition definition for the current value.
*
* First search for transition nested definition (key or default),
* then fallback on the main transition definition itself.
*
* @param transition
* @param key
*/
function getValueTransition(transition, key) {
return transition[key] || transition['default'] || transition
}
/**
* Get the animation function populated with variant values.
*/
function getAnimation(key, value, target, transition, onComplete) {
// Get key transition or fallback values
const valueTransition = getValueTransition(transition, key)
// Get origin
let origin =
valueTransition.from === null || valueTransition.from === undefined
? value.get()
: valueTransition.from
// Is target animatable
const isTargetAnimatable = isAnimatable(key, target)
// If we're trying to animate from "none", try and get an animatable version
// of the target. This could be improved to work both ways.
if (origin === 'none' && isTargetAnimatable && typeof target === 'string') {
origin = getAnimatableNone(key, target)
}
// Is origin animatable
const isOriginAnimatable = isAnimatable(key, origin)
/**
* Start the animation.
*/
function start(complete) {
const options = {
from: origin,
to: target,
velocity: transition.velocity ? transition.velocity : value.getVelocity(),
onUpdate: (v) => value.set(v),
}
return valueTransition.type === 'inertia' ||
valueTransition.type === 'decay'
? popmotion.inertia(
Object.assign(Object.assign({}, options), valueTransition),
)
: popmotion.animate(
Object.assign(
Object.assign(
{},
getPopmotionAnimationOptions(valueTransition, options, key),
),
{
onUpdate: (v) => {
options.onUpdate(v)
if (valueTransition.onUpdate) valueTransition.onUpdate(v)
},
onComplete: () => {
if (transition.onComplete) transition.onComplete()
if (onComplete) onComplete()
if (complete) complete()
},
},
),
)
}
/**
* Set value without transition.
*/
function set(complete) {
value.set(target)
if (transition.onComplete) transition.onComplete()
if (onComplete) onComplete()
if (complete) complete()
return { stop: () => {} }
}
return !isOriginAnimatable ||
!isTargetAnimatable ||
valueTransition.type === false
? set
: start
}
/**
* A Composable holding all the ongoing transitions in a local reference.
*/
function useMotionTransitions() {
const { motionValues, stop, get } = useMotionValues()
const push = (key, value, target, transition = {}, onComplete) => {
// Get the `from` key from target
const from = target[key]
// Get motion value for the target key
const motionValue = get(key, from, target)
// Sets the value immediately if specified
if (transition && transition.immediate) {
motionValue.set(value)
return
}
// Create animation
const animation = getAnimation(
key,
motionValue,
value,
transition,
onComplete,
)
// Start animation
motionValue.start(animation)
}
return { motionValues, stop, push }
}
/**
* A Composable handling motion controls, pushing resolved variant to useMotionTransitions manager.
*
* @param transform
* @param style
* @param currentVariant
*/
function useMotionControls(
motionProperties,
variants = {},
{ push, stop } = useMotionTransitions(),
) {
// Variants as ref
const _variants = vueDemi.unref(variants)
const getVariantFromKey = (variant) => {
if (!_variants || !_variants[variant]) {
throw new Error(`The variant ${variant} does not exist.`)
}
return _variants[variant]
}
const apply = (variant) => {
// If variant is a key, try to resolve it
if (typeof variant === 'string') {
variant = getVariantFromKey(variant)
}
// Return Promise chain
return Promise.all(
Object.entries(variant).map(([key, value]) => {
// Skip transition key
if (key === 'transition') return
return new Promise((resolve) => {
push(
key,
value,
motionProperties,
variant.transition || getDefaultTransition(key, variant[key]),
resolve,
)
})
}),
)
}
const set = (variant) => {
// Get variant data from parameter
let variantData = core.isObject(variant)
? variant
: getVariantFromKey(variant)
// Set in chain
Object.entries(variantData).forEach(([key, value]) => {
// Skip transition key
if (key === 'transition') return
push(key, value, motionProperties, {
immediate: true,
})
})
}
const leave = (done) =>
__awaiter(this, void 0, void 0, function* () {
let leaveVariant
if (_variants) {
if (_variants.leave) {
leaveVariant = _variants.leave
}
if (!_variants.leave && _variants.initial) {
leaveVariant = _variants.initial
}
}
if (!leaveVariant) {
done()
return
}
yield apply(leaveVariant)
done()
})
return {
apply,
set,
stopTransitions: stop,
leave,
}
}
const isBrowser = typeof window !== 'undefined'
const supportsPointerEvents = () => isBrowser && window.onpointerdown === null
const supportsTouchEvents = () => isBrowser && window.ontouchstart === null
const supportsMouseEvents = () => isBrowser && window.onmousedown === null
function registerEventListeners({ target, state, variants, apply }) {
const _variants = vueDemi.unref(variants)
// State
const hovered = vueDemi.ref(false)
const tapped = vueDemi.ref(false)
const focused = vueDemi.ref(false)
const mutableKeys = vueDemi.computed(() => {
let result = []
if (!_variants) return result
if (_variants.hovered) {
result = [...result, ...Object.keys(_variants.hovered)]
}
if (_variants.tapped) {
result = [...result, ...Object.keys(_variants.tapped)]
}
if (_variants.focused) {
result = [...result, ...Object.keys(_variants.focused)]
}
return result
})
const computedProperties = vueDemi.computed(() => {
const result = {}
Object.assign(result, state.value)
if (hovered.value && _variants.hovered) {
Object.assign(result, _variants.hovered)
}
if (tapped.value && _variants.tapped) {
Object.assign(result, _variants.tapped)
}
if (focused.value && _variants.focused) {
Object.assign(result, _variants.focused)
}
for (const key in result) {
if (!mutableKeys.value.includes(key)) delete result[key]
}
return result
})
vueDemi.watch(
() => core.unrefElement(target),
(el) => {
if (!el || !_variants) return
// Hovered
if (_variants.hovered) {
core.useEventListener(el, 'mouseenter', () => {
hovered.value = true
})
core.useEventListener(el, 'mouseleave', () => {
hovered.value = false
tapped.value = false
})
core.useEventListener(el, 'mouseout', () => {
hovered.value = false
tapped.value = false
})
}
// Tapped
if (_variants.tapped) {
// Mouse
if (supportsMouseEvents()) {
core.useEventListener(el, 'mousedown', () => {
tapped.value = true
})
core.useEventListener(el, 'mouseup', () => {
tapped.value = false
})
}
// Pointer
if (supportsPointerEvents()) {
core.useEventListener(el, 'pointerdown', () => {
tapped.value = true
})
core.useEventListener(el, 'pointerup', () => {
tapped.value = false
})
}
// Touch
if (supportsTouchEvents()) {
core.useEventListener(el, 'touchstart', () => {
tapped.value = true
})
core.useEventListener(el, 'touchend', () => {
tapped.value = false
})
}
}
// Focused
if (_variants.focused) {
core.useEventListener(el, 'focus', () => {
focused.value = true
})
core.useEventListener(el, 'blur', () => {
focused.value = false
})
}
},
{
immediate: true,
},
)
// Watch local computed variant, apply it dynamically
vueDemi.watch(computedProperties, (newVal) => {
apply(newVal)
})
}
function registerLifeCycleHooks({ target, variants, variant }) {
const _variants = vueDemi.unref(variants)
const stop = vueDemi.watch(
() => target,
() => {
// Lifecycle hooks bindings
if (_variants && _variants.enter) {
// Set initial before the element is mounted
if (_variants.initial) variant.value = 'initial'
// Set enter animation, once the element is mounted
vueDemi.nextTick(() => (variant.value = 'enter'))
}
},
{
immediate: true,
flush: 'pre',
},
)
return { stop }
}
function registerVariantsSync({ state, apply }) {
// Watch for variant changes and apply the new one
const stop = vueDemi.watch(
state,
(newVal) => {
if (newVal) apply(newVal)
},
{
immediate: true,
},
)
return { stop }
}
function registerVisibilityHooks({ target, variants, variant }) {
const _variants = vueDemi.unref(variants)
let _stopObserver = core.noop
const _stopWatcher = vueDemi.watch(
() => core.unrefElement(target),
(el) => {
if (!el) return
// Bind intersection observer on target
_stopObserver = core.useIntersectionObserver(
target,
([{ isIntersecting }]) => {
if (_variants && _variants.visible) {
if (isIntersecting) {
variant.value = 'visible'
} else {
variant.value = 'initial'
}
}
},
).stop
},
{
immediate: true,
},
)
/**
* Stop both the watcher and the intersection observer.
*/
const stop = () => {
_stopObserver()
_stopWatcher()
}
return {
stop,
}
}
/**
* A Composable executing resolved variants features from variants declarations.
*
* Supports:
* - lifeCycleHooks: Bind the motion hooks to the component lifecycle hooks.
*
* @param variant
* @param variants
* @param options
*/
function useMotionFeatures(
instance,
options = {
syncVariants: true,
lifeCycleHooks: true,
visibilityHooks: true,
eventListeners: true,
},
) {
// Lifecycle hooks bindings
if (options.lifeCycleHooks) {
registerLifeCycleHooks(instance)
}
if (options.syncVariants) {
registerVariantsSync(instance)
}
// Visibility hooks
if (options.visibilityHooks) {
registerVisibilityHooks(instance)
}
// Event listeners
if (options.eventListeners) {
registerEventListeners(instance)
}
}
/**
* Reactive style object implementing all native CSS properties.
*
* @param props
*/
function reactiveStyle(props = {}) {
// Reactive StyleProperties object
const state = vueDemi.reactive(Object.assign({}, props))
const style = vueDemi.ref({})
// Reactive DOM Element compatible `style` object bound to state
vueDemi.watch(
state,
() => {
// Init result object
const result = {}
for (const [key, value] of Object.entries(state)) {
// Get value type for key
const valueType = getValueType(key)
// Get value as type for key
const valueAsType = getValueAsType(value, valueType)
// Append the computed style to result object
result[key] = valueAsType
}
style.value = result
},
{
immediate: true,
deep: true,
},
)
return {
state,
style,
}
}
/**
* A list of all transformable axes. We'll use this list to generated a version
* of each axes for each transform.
*/
const transformAxes = ['', 'X', 'Y', 'Z']
/**
* An ordered array of each transformable value. By default, transform values
* will be sorted to this order.
*/
const order = ['perspective', 'translate', 'scale', 'rotate', 'skew']
/**
* Generate a list of every possible transform key.
*/
const transformProps = ['transformPerspective', 'x', 'y', 'z']
order.forEach((operationKey) => {
transformAxes.forEach((axesKey) => {
const key = operationKey + axesKey
transformProps.push(key)
})
})
/**
* A quick lookup for transform props.
*/
const transformPropSet = new Set(transformProps)
function isTransformProp(key) {
return transformPropSet.has(key)
}
/**
* A quick lookup for transform origin props
*/
const transformOriginProps = new Set(['originX', 'originY', 'originZ'])
function isTransformOriginProp(key) {
return transformOriginProps.has(key)
}
/**
* A Composable giving access to a StyleProperties object, and binding the generated style object to a target.
*
* @param target
*/
function useElementStyle(target, onInit) {
// Transform cache available before the element is mounted
let _cache
// Local target cache as we need to resolve the element from PermissiveTarget
let _target = undefined
// Create a reactive style object
const { state, style } = reactiveStyle()
// Sync existing style from supplied element
const stopInitWatch = vueDemi.watch(
() => core.unrefElement(target),
(el) => {
if (!el) return
_target = el
// Loop on style keys
for (const key of Object.keys(valueTypes)) {
if (
el.style[key] === null ||
el.style[key] === '' ||
isTransformProp(key) ||
isTransformOriginProp(key)
)
continue
// Append a defined key to the local StyleProperties state object
vueDemi.set(state, key, el.style[key])
}
// If cache is present, init the target with the current cached value
if (_cache) {
Object.entries(_cache).forEach(([key, value]) =>
vueDemi.set(el.style, key, value),
)
}
if (onInit) onInit(state)
},
{
immediate: true,
},
)
// Sync reactive style to element
const stopSyncWatch = vueDemi.watch(
style,
(newVal) => {
// Add the current value to the cache so it is set on target creation
if (!_target) {
_cache = newVal
return
}
// Append the state object to the target style properties
for (const key in newVal) vueDemi.set(_target.style, key, newVal[key])
},
{
immediate: true,
},
)
// Stop watchers
const stop = () => {
stopInitWatch()
stopSyncWatch()
}
return {
style: state,
stop,
}
}
/**
* Aliases translate key for simpler API integration.
*/
const translateAlias = {
x: 'translateX',
y: 'translateY',
z: 'translateZ',
}
/**
* Reactive transform string implementing all native CSS transform properties.
*
* @param props
* @param enableHardwareAcceleration
*/
function reactiveTransform(props = {}, enableHardwareAcceleration = true) {
// Reactive TransformProperties object
const state = vueDemi.reactive(Object.assign({}, props))
const transform = vueDemi.ref('')
vueDemi.watch(
state,
(newVal) => {
// Init result
let result = ''
let hasHardwareAcceleration = false
// Use translate3d by default has a better GPU optimization
// And corrects scaling discrete behaviors
if (enableHardwareAcceleration && (newVal.x || newVal.y || newVal.z)) {
const str = [newVal.x || 0, newVal.y || 0, newVal.z || 0]
.map(px.transform)
.join(',')
result += `translate3d(${str}) `
hasHardwareAcceleration = true
}
// Loop on defined TransformProperties state keys
for (const [key, value] of Object.entries(newVal)) {
if (
enableHardwareAcceleration &&
(key === 'x' || key === 'y' || key === 'z')
)
continue
// Get value type for key
const valueType = getValueType(key)
// Get value as type for key
const valueAsType = getValueAsType(value, valueType)
// Append the computed transform key to result string
result += `${translateAlias[key] || key}(${valueAsType}) `
}
if (enableHardwareAcceleration && !hasHardwareAcceleration) {
result += `translateZ(0px) `
}
transform.value = result.trim()
},
{
immediate: true,
deep: true,
},
)
return {
state,
transform,
}
}
/**
* Return an object from a transform string.
*
* @param str
*/
function parseTransform(transform) {
// Split transform string.
const transforms = transform.trim().split(/\) |\)/)
// Handle "initial", "inherit", "unset".
if (transforms.length === 1) {
return {}
}
const parseValues = (value) => {
// If value is ending with px or deg, return it as a number
if (value.endsWith('px') || value.endsWith('deg')) return parseFloat(value)
// Return as number
if (isNaN(Number(value))) return Number(value)
// Parsing impossible, return as string
return value
}
// Reduce the result to an object and return it
return transforms.reduce((acc, transform) => {
if (!transform) return acc
const [name, transformValue] = transform.split('(')
const valueArray = transformValue.split(',')
const values = valueArray.map((val) => {
return parseValues(val.endsWith(')') ? val.replace(')', '') : val.trim())
})
const value = values.length === 1 ? values[0] : values
return Object.assign(Object.assign({}, acc), { [name]: value })
}, {})
}
/**
* Sets the state from a parsed transform string.
*
* Used in useElementTransform init to restore element transform string in cases it does exists.
*
* @param state
* @param transform
*/
function stateFromTransform(state, transform) {
Object.entries(parseTransform(transform)).forEach(([key, value]) => {
// Get value w/o unit, as unit is applied later on
value = parseFloat(value)
// Axes reference for loops
const axes = ['x', 'y', 'z']
// Handle translate3d and scale3d
if (key === 'translate3d') {
// Loop on parsed scale / translate definition
value.forEach((axisValue, index) => {
vueDemi.set(state, axes[index], axisValue)
})
return
}
// Sync translateX on X
if (key === 'translateX') {
vueDemi.set(state, 'x', value)
return
}
// Sync translateY on Y
if (key === 'translateY') {
vueDemi.set(state, 'y', value)
return
}
// Sync translateZ on Z
if (key === 'translateZ') {
vueDemi.set(state, 'z', value)
return
}
// Set raw value
vueDemi.set(state, key, value)
})
}
/**
* A Composable giving access to a TransformProperties object, and binding the generated transform string to a target.
*
* @param target
*/
function useElementTransform(target, onInit) {
// Transform cache available before the element is mounted
let _cache
// Local target cache as we need to resolve the element from PermissiveTarget
let _target = undefined
// Create a reactive transform object
const { state, transform } = reactiveTransform()
// Cache transform until the element is alive and we can bind to it
const stopInitWatch = vueDemi.watch(
() => core.unrefElement(target),
(el) => {
if (!el) return
_target = el
// Parse transform properties and applies them to the current state
if (el.style.transform) stateFromTransform(state, el.style.transform)
// If cache is present, init the target with the current cached value
if (_cache) {
el.style.transform = _cache
}
if (onInit) onInit(state)
},
{
immediate: true,
},
)
// Sync reactive transform to element
const stopSyncWatch = vueDemi.watch(
transform,
(newValue) => {
// Add the current value to the cache so it is set on target creation
if (!_target) {
_cache = newValue
return
}
// Set the transform string on the target
_target.style.transform = newValue
},
{
immediate: true,
},
)
// Stop watchers
const stop = () => {
stopInitWatch()
stopSyncWatch()
}
return {
transform: state,
stop,
}
}
/**
* A Composable giving access to both `transform` and `style`objects for a single element.
*
* @param target
*/
function useMotionProperties(target, defaultValues) {
// Local motion properties
const motionProperties = vueDemi.reactive({})
// Local mass setter
const apply = (values) => {
Object.entries(values).forEach(([key, value]) => {
vueDemi.set(motionProperties, key, value)
})
}
// Target element style object
const { style, stop: stopStyleWatchers } = useElementStyle(target, apply)
// Target element transform object
const { transform, stop: stopTransformWatchers } = useElementTransform(
target,
apply,
)
// Watch local object and apply styling accordingly
const stopPropertiesWatch = vueDemi.watch(
motionProperties,
(newVal) => {
Object.entries(newVal).forEach(([key, value]) => {
const target = isTransformProp(key) ? transform : style
if (target[key] && target[key] === value) return
vueDemi.set(target, key, value)
})
},
{
immediate: true,
deep: true,
},
)
// Apply default values once target is available
const stopInitWatch = vueDemi.watch(
() => core.unrefElement(target),
(el) => {
if (!el) return
if (defaultValues) apply(defaultValues)
},
{
immediate: true,
},
)
// Stop watchers
const stop = () => {
stopStyleWatchers()
stopTransformWatchers()
stopPropertiesWatch()
stopInitWatch()
}
return {
motionProperties,
style,
transform,
stop,
}
}
/**
* A Composable handling variants selection and features.
*
* @param variants
* @param initial
* @param options
*/
function useMotionVariants(variants = {}) {
// Unref variants
const _variants = vueDemi.unref(variants)
// Current variant string
const variant = vueDemi.ref()
// Current variant state
const state = vueDemi.computed(() => {
if (!variant.value) return
return _variants[variant.value]
})
return {
state,
variant,
}
}
/**
* A Vue Composable that put your components in motion.
*
* @docs https://motion.vueuse.js.org
*
* @param target
* @param variants
* @param options
*/
function useMotion(target, variants = {}, options) {
// Reactive styling and transform
const { motionProperties } = useMotionProperties(target)
// Variants manager
const { variant, state } = useMotionVariants(variants)
// Motion controls, synchronized with motion properties and variants
const controls = useMotionControls(motionProperties, variants)
// Create motion instance
const instance = Object.assign(
{ target, variant, variants, state, motionProperties },
controls,
)
// Bind features
useMotionFeatures(instance, options)
return instance
}
const directivePropsKeys = [
'initial',
'enter',
'leave',
'visible',
'hovered',
'tapped',
'focused',
'delay',
]
const resolveVariants = (node, variantsRef) => {
// This is done to achieve compat with Vue 2 & 3
// node.props = Vue 3 element props location
// node.data.attrs = Vue 2 element props location
const target = node.props
? node.props // @ts-expect-error
: node.data && node.data.attrs // @ts-expect-error
? node.data.attrs
: {}
if (target) {
if (target['variants'] && core.isObject(target['variants'])) {
// If variant are passed through a single object reference, initialize with it
variantsRef.value = Object.assign(
Object.assign({}, variantsRef.value),
target['variants'],
)
}
// Loop on directive prop keys, add them to the local variantsRef if defined
directivePropsKeys.forEach((key) => {
if (key === 'delay') {
if (target && target[key] && core.isNumber(target[key])) {
const delay = target[key]
if (variantsRef && variantsRef.value) {
if (variantsRef.value.enter) {
if (!variantsRef.value.enter.transition) {
variantsRef.value.enter.transition = {}
}
variantsRef.value.enter.transition = Object.assign(
Object.assign({}, variantsRef.value.enter.transition),
{ delay },
)
}
if (variantsRef.value.visible) {
if (!variantsRef.value.visible.transition) {
variantsRef.value.visible.transition = {}
}
variantsRef.value.visible.transition = Object.assign(
Object.assign({}, variantsRef.value.visible.transition),
{ delay },
)
}
}
}
return
}
if (target && target[key] && core.isObject(target[key])) {
variantsRef.value[key] = target[key]
}
})
}
}
const directive = (variants) => {
const register = (el, binding, node) => {
// Initialize variants with argument
const variantsRef = vueDemi.ref(variants || {})
// Resolve variants from node props
resolveVariants(node, variantsRef)
// Create motion instance
const motionInstance = useMotion(el, variantsRef)
// Set the global state reference if the name is set through v-motion="`value`"
if (binding.value) vueDemi.set(motionState, binding.value, motionInstance)
}
const unregister = (_, binding, __) => {
// Check if motion state has the current element as reference
if (binding.value && motionState[binding.value])
vueDemi.del(motionState, binding.value)
}
return {
// Vue 3 Directive Hooks
created: register,
unmounted: unregister,
// Vue 2 Directive Hooks
// For Nuxt & Vue 2 compatibility
// @ts-expect-error
bind: register,
unbind: unregister,
}
}
const fade = {
initial: {
opacity: 0,
},
enter: {
opacity: 1,
},
}
const fadeVisible = {
initial: {
opacity: 0,
},
visible: {
opacity: 1,
},
}
const pop = {
initial: {
scale: 0,
opacity: 0,
},
enter: {
scale: 1,
opacity: 1,
},
}
const popVisible = {
initial: {
scale: 0,
opacity: 0,
},
visible: {
scale: 1,
opacity: 1,
},
}
// Roll from left
const rollLeft = {
initial: {
x: -100,
rotate: 90,
opacity: 0,
},
enter: {
x: 0,
rotate: 0,
opacity: 1,
},
}
const rollVisibleLeft = {
initial: {
x: -100,
rotate: 90,
opacity: 0,
},
visible: {
x: 0,
rotate: 0,
opacity: 1,
},
}
// Roll from right
const rollRight = {
initial: {
x: 100,
rotate: -90,
opacity: 0,
},
enter: {
x: 0,
rotate: 0,
opacity: 1,
},
}
const rollVisibleRight = {
initial: {
x: 100,
rotate: -90,
opacity: 0,
},
visible: {
x: 0,
rotate: 0,
opacity: 1,
},
}
// Roll from top
const rollTop = {
initial: {
y: -100,
rotate: -90,
opacity: 0,
},
enter: {
y: 0,
rotate: 0,
opacity: 1,
},
}
const rollVisibleTop = {
initial: {
y: -100,
rotate: -90,
opacity: 0,
},
visible: {
y: 0,
rotate: 0,
opacity: 1,
},
}
// Roll from bottom
const rollBottom = {
initial: {
y: 100,
rotate: 90,
opacity: 0,
},
enter: {
y: 0,
rotate: 0,
opacity: 1,
},
}
const rollVisibleBottom = {
initial: {
y: 100,
rotate: 90,
opacity: 0,
},
visible: {
y: 0,
rotate: 0,
opacity: 1,
},
}
// Slide from left
const slideLeft = {
initial: {
x: -100,
opacity: 0,
},
enter: {
x: 0,
opacity: 1,
},
}
const slideVisibleLeft = {
initial: {
x: -100,
opacity: 0,
},
visible: {
x: 0,
opacity: 1,
},
}
// Slide from right
const slideRight = {
initial: {
x: 100,
opacity: 0,
},
enter: {
x: 0,
opacity: 1,
},
}
const slideVisibleRight = {
initial: {
x: 100,
opacity: 0,
},
visible: {
x: 0,
opacity: 1,
},
}
// Slide from top
const slideTop = {
initial: {
y: -100,
opacity: 0,
},
enter: {
y: 0,
opacity: 1,
},
}
const slideVisibleTop = {
initial: {
y: -100,
opacity: 0,
},
visible: {
y: 0,
opacity: 1,
},
}
// Slide from bottom
const slideBottom = {
initial: {
y: 100,
opacity: 0,
},
enter: {
y: 0,
opacity: 1,
},
}
const slideVisibleBottom = {
initial: {
y: 100,
opacity: 0,
},
visible: {
y: 0,
opacity: 1,
},
}
var presets = /*#__PURE__*/ Object.freeze({
__proto__: null,
fade: fade,
fadeVisible: fadeVisible,
pop: pop,
popVisible: popVisible,
rollBottom: rollBottom,
rollLeft: rollLeft,
rollRight: rollRight,
rollTop: rollTop,
rollVisibleBottom: rollVisibleBottom,
rollVisibleLeft: rollVisibleLeft,
rollVisibleRight: rollVisibleRight,
rollVisibleTop: rollVisibleTop,
slideBottom: slideBottom,
slideLeft: slideLeft,
slideRight: slideRight,
slideTop: slideTop,
slideVisibleBottom: slideVisibleBottom,
slideVisibleLeft: slideVisibleLeft,
slideVisibleRight: slideVisibleRight,
slideVisibleTop: slideVisibleTop,
})
/**
* Convert a string to a slug.
*
* Source: https://gist.github.com/hagemann/382adfc57adbd5af078dc93feef01fe1
* Credits: @hagemann
*
* Edited to transform camel naming to slug with `-`.
*
* @param str
*/
function slugify(string) {
const a =
'àáâäæãåāăąçćčđďèéêëēėęěğǵḧîïíīįìłḿñńǹňôöòóœøōõőṕŕřßśšşșťțûüùúūǘůűųẃẍÿýžźż·/_,:;'
const b =
'aaaaaaaaaacccddeeeeeeeegghiiiiiilmnnnnoooooooooprrsssssttuuuuuuuuuwxyyzzz------'
const p = new RegExp(a.split('').join('|'), 'g')
return string
.toString()
.replace(/[A-Z]/g, (s) => '-' + s) // Camel to slug
.toLowerCase()
.replace(/\s+/g, '-') // Replace spaces with -
.replace(p, (c) => b.charAt(a.indexOf(c))) // Replace special characters
.replace(/&/g, '-and-') // Replace & with 'and'
.replace(/[^\w\-]+/g, '') // Remove all non-word characters
.replace(/\-\-+/g, '-') // Replace multiple - with single -
.replace(/^-+/, '') // Trim - from start of text
.replace(/-+$/, '') // Trim - from end of text
}
const MotionPlugin = {
install(app, options) {
// Register default `v-motion` directive
app.directive('motion', directive())
// Register presets
if (!options || (options && !options.excludePresets)) {
for (const key in presets) {
// Get preset variants
const preset = presets[key]
// Register the preset `v-motion-${key}` directive
app.directive(`motion-${slugify(key)}`, directive(preset))
}
}
// Register plugin-wise directives
if (options && options.directives) {
// Loop on options, create a custom directive for each definition
for (const key in options.directives) {
// Get directive variants
const variants = options.directives[key]
// Development warning, showing definitions missing `initial` key
if (!variants.initial && true) {
console.warn(
`Your directive v-motion-${key} is missing initial variant!`,
)
}
// Register the custom `v-motion-${key}` directive
app.directive(`motion-${key}`, directive(variants))
}
}
},
}
function useMotions() {
return motionState
}
function useSpring(values, spring) {
const { stop, get } = useMotionValues()
return {
values,
stop,
set: (properties) =>
Promise.all(
Object.entries(properties).map(([key, value]) => {
const motionValue = get(key, values[key], values)
return motionValue.start((onComplete) => {
const options = Object.assign(
{ type: 'spring' },
spring || getDefaultTransition(key, value),
)
return popmotion.animate(
Object.assign(
{
from: motionValue.get(),
to: value,
velocity: motionValue.getVelocity(),
onUpdate: (v) => motionValue.set(v),
onComplete,
},
options,
),
)
})
}),
),
}
}
/**
* Check whether an object is a Motion Instance or not.
*
* Can be useful while building packages based on @vueuse/motion.
*
* @param obj
* @returns bool
*/
function isMotionInstance(obj) {
const _obj = obj
return (
_obj.apply !== undefined &&
shared.isFunction(_obj.apply) &&
_obj.set !== undefined &&
shared.isFunction(_obj.set) &&
_obj.stopTransitions !== undefined &&
shared.isFunction(_obj.stopTransitions) &&
_obj.target !== undefined &&
vueDemi.isRef(_obj.target)
)
}
/**
* Reactive prefers-reduced-motion.
*
* @param options
*/
function useReducedMotion(options = {}) {
return core.useMediaQuery('(prefers-reduced-motion: reduce)', options)
}
exports.MotionDirective = directive
exports.MotionPlugin = MotionPlugin
exports.fade = fade
exports.fadeVisible = fadeVisible
exports.isMotionInstance = isMotionInstance
exports.pop = pop
exports.popVisible = popVisible
exports.reactiveStyle = reactiveStyle
exports.reactiveTransform = reactiveTransform
exports.rollBottom = rollBottom
exports.rollLeft = rollLeft
exports.rollRight = rollRight
exports.rollTop = rollTop
exports.rollVisibleBottom = rollVisibleBottom
exports.rollVisibleLeft = rollVisibleLeft
exports.rollVisibleRight = rollVisibleRight
exports.rollVisibleTop = rollVisibleTop
exports.slideBottom = slideBottom
exports.slideLeft = slideLeft
exports.slideRight = slideRight
exports.slideTop = slideTop
exports.slideVisibleBottom = slideVisibleBottom
exports.slideVisibleLeft = slideVisibleLeft
exports.slideVisibleRight = slideVisibleRight
exports.slideVisibleTop = slideVisibleTop
exports.slugify = slugify
exports.useElementStyle = useElementStyle
exports.useElementTransform = useElementTransform
exports.useMotion = useMotion
exports.useMotionControls = useMotionControls
exports.useMotionProperties = useMotionProperties
exports.useMotionTransitions = useMotionTransitions
exports.useMotionVariants = useMotionVariants
exports.useMotions = useMotions
exports.useReducedMotion = useReducedMotion
exports.useSpring = useSpring
|
<filename>station/Scripts/map.js
var map = L.map('map', { zoomControl: false }).setView([53.134699, 23.157905], 13);
L.tileLayer('https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}.png?access_token={accessToken}', {
attribution: 'Map data © <a href="http://openstreetmap.org">OpenStreetMap</a> contributors, <a href="http://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>, Imagery © <a href="http://mapbox.com">Mapbox</a>',
maxZoom: 18,
id: 'adamgolubowski.og5h8gk5',
accessToken: '<KEY>'
}).addTo(map);
//var locations = ["Białystok - Centrum", 53.1346, 23.1579]
//marker = new L.marker([locations[1], locations[2]])
// .addTo(map)
// .bindPopup(locations[0]);
function addMarker(id, name, lon, lat) {
//baseUrl = "localhost:60480/";
controllerMethod = "stations/details/";
url = controllerMethod + id;
popupContent = '<p>'+name+'</p><p><a href="'+url+'">Details and sensor readings</a></p>';
//popupContent = '<a href="stations/details/1">this</a>';
marker = new L.marker([lon, lat])
.addTo(map)
.bindPopup(popupContent);
}
//addMarker("Białystok - Centrum", 53.1346, 23.1579);
//@for(int i=0;i<model.)
//{
// addMarker('@item.Name',@item.LocLongitude,@item.LocLattitude);
//}
//@foreach (var item in Model)
//{
// @: addMarker('@item.Name',@(item.LocLattitude.ToString("0.0000", System.Globalization.CultureInfo.InvariantCulture)),@(item.LocLongitude.ToString("0.0000", System.Globalization.CultureInfo.InvariantCulture)));
//}
|
import java.util.Arrays;
public class MaxMinFinder {
public static int[] findMaxMin(int[] a) {
int n = a.length;
int min, max;
// If the array has odd number of elements, initialize min and max with the first element
if (n % 2 == 1) {
min = a[0];
max = a[0];
} else {
// If the array has even number of elements, compare the first two elements to initialize min and max
if (a[0] > a[1]) {
max = a[0];
min = a[1];
} else {
max = a[1];
min = a[0];
}
}
// Traverse the array in pairs and compare elements to update min and max
for (int i = (n % 2 == 1) ? 1 : 2; i < n; i += 2) {
if (a[i] > a[i + 1]) {
if (a[i] > max) {
max = a[i];
}
if (a[i + 1] < min) {
min = a[i + 1];
}
} else {
if (a[i + 1] > max) {
max = a[i + 1];
}
if (a[i] < min) {
min = a[i];
}
}
}
return new int[]{min, max};
}
public static void main(String[] args) {
int[] a = {4, 7, 2, 9, 5};
System.out.println(Arrays.toString(findMaxMin(a))); // Output: [2, 9]
}
} |
#!/usr/bin/env bash
testdir=$(readlink -f $(dirname $0))
rootdir=$(readlink -f $testdir/../../..)
source $rootdir/scripts/common.sh
source $rootdir/test/common/autotest_common.sh
NVME_CMD="/usr/local/src/nvme-cli/nvme"
rpc_py=$rootdir/scripts/rpc.py
$rootdir/scripts/setup.sh
sleep 1
bdfs=$(get_nvme_bdfs)
$rootdir/scripts/setup.sh reset
# Find bdf that supports Namespace Managment
for bdf in $bdfs; do
nvme_name=$(get_nvme_ctrlr_from_bdf ${bdf})
if [[ -z "$nvme_name" ]]; then
continue
fi
# Check Optional Admin Command Support for Namespace Management
oacs=$($NVME_CMD id-ctrl /dev/${nvme_name} | grep oacs | cut -d: -f2)
oacs_ns_manage=$((oacs & 0x8))
if [[ "$oacs_ns_manage" -ne 0 ]]; then
break
fi
done
if [[ "${nvme_name}" == "" ]] || [[ "$oacs_ns_manage" -eq 0 ]]; then
echo "No NVMe device supporting Namespace managment found"
$rootdir/scripts/setup.sh
exit 1
fi
nvme_dev=/dev/${nvme_name}
# Detect supported features and configuration
oaes=$($NVME_CMD id-ctrl ${nvme_dev} | grep oaes | cut -d: -f2)
aer_ns_change=$((oaes & 0x100))
function reset_nvme_if_aer_unsupported() {
if [[ "$aer_ns_change" -eq "0" ]]; then
sleep 1
$NVME_CMD reset "$1" || true
fi
}
function clean_up() {
$rootdir/scripts/setup.sh reset
# This assumes every NVMe controller contains single namespace,
# encompassing Total NVM Capacity and formatted as 512 block size.
# 512 block size is needed for test/vhost/vhost_boot.sh to
# succesfully run.
tnvmcap=$($NVME_CMD id-ctrl ${nvme_dev} | grep tnvmcap | cut -d: -f2)
blksize=512
size=$((tnvmcap / blksize))
echo "Restoring $nvme_dev..."
$NVME_CMD detach-ns ${nvme_dev} -n 0xffffffff -c 0 || true
$NVME_CMD delete-ns ${nvme_dev} -n 0xffffffff || true
$NVME_CMD create-ns ${nvme_dev} -s ${size} -c ${size} -b ${blksize}
$NVME_CMD attach-ns ${nvme_dev} -n 1 -c 0
$NVME_CMD reset ${nvme_dev}
$rootdir/scripts/setup.sh
}
function info_print() {
echo "---"
echo "$@"
echo "---"
}
# Prepare controller
info_print "delete all namespaces"
$NVME_CMD detach-ns ${nvme_dev} -n 0xffffffff -c 0 || true
$NVME_CMD delete-ns ${nvme_dev} -n 0xffffffff || true
reset_nvme_if_aer_unsupported ${nvme_dev}
sleep 1
PCI_WHITELIST="${bdf}" $rootdir/scripts/setup.sh
$SPDK_BIN_DIR/spdk_tgt -m 0x3 &
spdk_tgt_pid=$!
trap 'kill -9 ${spdk_tgt_pid}; clean_up; exit 1' SIGINT SIGTERM EXIT
waitforlisten $spdk_tgt_pid
$rpc_py bdev_nvme_attach_controller -b Nvme0 -t PCIe -a ${bdf}
$rpc_py bdev_nvme_cuse_register -n Nvme0
sleep 1
[[ -c /dev/spdk/nvme0 ]]
for dev in /dev/spdk/nvme0n*; do
[[ ! -c ${dev} ]]
done
info_print "create ns: nsze=10000 ncap=10000 flbias=0"
$NVME_CMD create-ns /dev/spdk/nvme0 -s 10000 -c 10000 -f 0
info_print "attach ns: nsid=1 controller=0"
$NVME_CMD attach-ns /dev/spdk/nvme0 -n 1 -c 0
reset_nvme_if_aer_unsupported /dev/spdk/nvme0
sleep 1
[[ -c /dev/spdk/nvme0n1 ]]
info_print "create ns: nsze=10000 ncap=10000 flbias=0"
$NVME_CMD create-ns /dev/spdk/nvme0 -s 10000 -c 10000 -f 0
info_print "attach ns: nsid=2 controller=0"
$NVME_CMD attach-ns /dev/spdk/nvme0 -n 2 -c 0
reset_nvme_if_aer_unsupported /dev/spdk/nvme0
sleep 1
[[ -c /dev/spdk/nvme0n2 ]]
info_print "detach ns: nsid=2 controller=0"
$NVME_CMD detach-ns /dev/spdk/nvme0 -n 2 -c 0 || true
info_print "delete ns: nsid=2"
$NVME_CMD delete-ns /dev/spdk/nvme0 -n 2 || true
reset_nvme_if_aer_unsupported /dev/spdk/nvme0
sleep 1
[[ ! -c /dev/spdk/nvme0n2 ]]
info_print "detach ns: nsid=1 controller=0"
$NVME_CMD detach-ns /dev/spdk/nvme0 -n 1 -c 0 || true
info_print "delete ns: nsid=1"
$NVME_CMD delete-ns /dev/spdk/nvme0 -n 1 || true
reset_nvme_if_aer_unsupported /dev/spdk/nvme0
sleep 1
# Here we should not have any cuse devices
for dev in /dev/spdk/nvme0n*; do
[[ ! -c ${dev} ]]
done
$rpc_py bdev_nvme_detach_controller Nvme0
sleep 1
[[ ! -c /dev/spdk/nvme0 ]]
trap - SIGINT SIGTERM EXIT
killprocess $spdk_tgt_pid
clean_up
|
#!/bin/bash
# Copyright 2012 Johns Hopkins University (Author: Daniel Povey)
# Apache 2.0.
# This script prepares the lang/ directory.
#
. ./path.sh
# Decided to do this using something like a real lexicon, although we
# could also have used whole-word models.
tmpdir=data/local/dict
lang=data/lang
mkdir -p $tmpdir
cat >$tmpdir/lexicon.txt <<EOF
z z iy r ow
o ow
1 w ah n
2 t uw
3 th r iy
4 f ao r
5 f ay v
6 s ih k s
7 s eh v ah n
8 ey t
9 n ay n
EOF
# and note, we'll have a silence phone, but it won't appear
# in this form of lexicon as there's no silence word; it's an option
# in the lexicon FST that gets added by the script.
mkdir -p $lang/phones
# symbol-table for words:
cat $tmpdir/lexicon.txt | awk '{print $1}' | awk 'BEGIN {print "<eps> 0"; n=1;} { printf("%s %s\n", $1, n++); }' \
>$lang/words.txt
# list of phones.
cat $tmpdir/lexicon.txt | awk '{for(n=2;n<=NF;n++) seen[$n]=1; } END{print "sil"; for (w in seen) { print w; }}' \
>$tmpdir/phone.list
# symbol-table for phones:
cat $tmpdir/phone.list | awk 'BEGIN {print "<eps> 0"; n=1;} { printf("%s %s\n", $1, n++); }' \
>$lang/phones.txt
p=$lang/phones
echo sil > $p/silence.txt
echo sil > $p/context_indep.txt
echo sil > $p/optional_silence.txt
grep -v -w sil $tmpdir/phone.list > $p/nonsilence.txt
touch $p/disambig.txt # disambiguation-symbols list, will be empty.
touch $p/extra_questions.txt # list of "extra questions"-- empty; we don't
# have things like tone or word-positions or stress markings.
cat $tmpdir/phone.list > $p/sets.txt # list of "phone sets"-- each phone is in its
# own set. Normally, each line would have a bunch of word-position-dependenent or
# stress-dependent realizations of the same phone.
for t in silence nonsilence context_indep optional_silence disambig; do
utils/sym2int.pl $lang/phones.txt <$p/$t.txt >$p/$t.int
cat $p/$t.int | awk '{printf(":%d", $1);} END{printf "\n"}' | sed s/:// > $p/$t.csl
done
for t in extra_questions sets; do
utils/sym2int.pl $lang/phones.txt <$p/$t.txt >$p/$t.int
done
cat $tmpdir/phone.list | awk '{printf("shared split %s\n", $1);}' >$p/roots.txt
utils/sym2int.pl -f 3- $lang/phones.txt $p/roots.txt >$p/roots.int
echo z > $lang/oov.txt # we map OOV's to this.. there are no OOVs in this setup,
# but the scripts expect this file to exist.
utils/sym2int.pl $lang/words.txt <$lang/oov.txt >$lang/oov.int
# Note: "word_boundary.{txt,int}" will not exist in this setup. This will mean it's
# not very easy to get word alignments, but it simplifies some things.
# Make the FST form of the lexicon (this includes optional silence).
utils/make_lexicon_fst.pl $tmpdir/lexicon.txt 0.5 sil | \
fstcompile --isymbols=$lang/phones.txt --osymbols=$lang/words.txt \
--keep_isymbols=false --keep_osymbols=false | \
fstarcsort --sort_type=olabel > $lang/L.fst
# Note: in this setup there are no "disambiguation symbols" because the lexicon
# contains no homophones; and there is no '#0' symbol in the LM because it's
# not a backoff LM, so L_disambig.fst is the same as L.fst.
cp $lang/L.fst $lang/L_disambig.fst
silphonelist=`cat $lang/phones/silence.csl | sed 's/:/ /g'`
nonsilphonelist=`cat $lang/phones/nonsilence.csl | sed 's/:/ /g'`
cat conf/topo.proto | sed "s:NONSILENCEPHONES:$nonsilphonelist:" | \
sed "s:SILENCEPHONES:$silphonelist:" > $lang/topo
# Now we prepare a simple grammar G.fst that's a kind of loop of
# digits (no silence in this, since that's handled in L.fst)
# there are 12 options: 1-9, zero, oh, and end-of-sentence.
penalty=`perl -e '$prob = 1.0/12; print -log($prob); '` # negated log-prob,
# which becomes the cost on the FST.
( for x in `echo z o 1 2 3 4 5 6 7 8 9`; do
echo 0 0 $x $x $penalty # format is: from-state to-state input-symbol output-symbol cost
done
echo 0 $penalty # format is: state final-cost
) | fstcompile --isymbols=$lang/words.txt --osymbols=$lang/words.txt \
--keep_isymbols=false --keep_osymbols=false |\
fstarcsort --sort_type=ilabel > $lang/G.fst
exit 0;
if [ $# -ne 0 ]; then
echo "Argument should be the TIDIGITS directory, see ../run.sh for example."
exit 1;
fi
tidigits=$1
tmpdir=`pwd`/data/local/data
mkdir -p $tmpdir
# Note: the .wav files are not in .wav format but "sphere" format (this was
# produced in the days before Windows).
find $tidigits/tidigits/train -name '*.wav' > $tmpdir/train.flist
n=`cat $tmpdir/train.flist | wc -l`
[ $n -eq 8623 ] || echo Unexpected number of training files $n versus 8623
find $tidigits/tidigits/test -name '*.wav' > $tmpdir/test.flist
n=`cat $tmpdir/test.flist | wc -l`
[ $n -eq 8700 ] || echo Unexpected number of test files $n versus 8700
sph2pipe=$KALDI_ROOT/tools/sph2pipe_v2.5/sph2pipe
if [ ! -x $sph2pipe ]; then
echo "Could not find (or execute) the sph2pipe program at $sph2pipe";
exit 1;
fi
for x in train test; do
# get scp file that has utterance-ids and maps to the sphere file.
cat $tmpdir/$x.flist | perl -ane 'm|/(..)/([1-9zo]+[ab])\.wav| || die "bad line $_"; print "$1_$2 $_"; ' \
| sort > $tmpdir/${x}_sph.scp
# turn it into one that has a valid .wav format in the modern sense (i.e. RIFF format, not sphere).
# This file goes into its final location
mkdir -p data/$x
awk '{printf("%s '$sph2pipe' -f wav %s |\n", $1, $2);}' < $tmpdir/${x}_sph.scp > data/$x/wav.scp
# Now get the "text" file that says what the transcription is.
cat data/$x/wav.scp |
perl -ane 'm/^(.._([1-9zo]+)[ab]) / || die; $text = join(" ", split("", $2)); print "$1 $text\n";' \
<data/$x/wav.scp >data/$x/text
# now get the "utt2spk" file that says, for each utterance, the speaker name.
perl -ane 'm/^((..)_\S+) / || die; print "$1 $2\n"; ' \
<data/$x/wav.scp >data/$x/utt2spk
# create the file that maps from speaker to utterance-list.
utils/utt2spk_to_spk2utt.pl <data/$x/utt2spk >data/$x/spk2utt
done
echo "Data preparation succeeded"
|
<reponame>edwargix/pins
$('#add_pic').on('click', function() {
$('#add_pic').fadeOut(function() {
$('#upload').fadeIn();
});
});
$('#add_location').on('click', function() {
$('#add_location').fadeOut(function() {
$('#mkdir').fadeIn();
});
});
$('img').on('click', function() {
var img = this;
$('html, body').animate({
scrollTop: $('[id="' + img.id + '_title"]').offset().top
}, 400, function() {
window.location.hash = img.id + '_title';
$('[id="' + img.id + '_actions"]').fadeIn('fast', function() {
$('[id="' + img.id + '_cancel"]').off('click').on('click', function() {
$('[id="' + img.id + '_actions"]').fadeOut('fast');
});
$('[id="' + img.id + '_rename"]').off('click').on('click', function() {
$('[id="' + img.id + '_rename_form"]').toggle();
});
$('[id="' + img.id + '_delete"]').off('click').on('click', function() {
if (window.confirm('The pic will be deleted permanently'))
$.post('/rm', { path: $('div#path').html(), name: img.alt }, () => { window.location.reload(); });
});
});
});
});
$('#delete_page').on('click', function() {
if (window.confirm('This page and every page beneath it will be deleted'))
$.post('/rm', { path: $('div#path').html(), name: '' }, () => { window.location = $('#parent').attr('href'); });
});
|
TERMUX_PKG_HOMEPAGE=http://software.schmorp.de/pkg/libev.html
TERMUX_PKG_DESCRIPTION="Full-featured and high-performance event loop library"
TERMUX_PKG_VERSION=4.24
TERMUX_PKG_SRCURL=http://dist.schmorp.de/libev/libev-${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=973593d3479abdf657674a55afe5f78624b0e440614e2b8cb3a07f16d4d7f821
|
#!/bin/bash
# Fail fast, including pipelines
set -e -o pipefail
# Logstash source directory
LOGSTASH_SRC_DIR=${LOGSTASH_SRC_DIR:-'/opt/logstash'}
# Logstash binary path
LOGSTASH_BINARY="${LOGSTASH_SRC_DIR}/bin/logstash"
# Logstash config file used if `LOGSTASH_CONFIG_URL` isn't defined
LOGSTASH_DEFAULT_CONFIG_URL='https://gist.githubusercontent.com/pblittle/8778567/raw/logstash.conf'
# Logstash config download URL (monolithic file or tarball)
LOGSTASH_CONFIG_URL=${LOGSTASH_CONFIG_URL:-${LOGSTASH_DEFAULT_CONFIG_URL}}
# Logstash config directory
LOGSTASH_CONFIG_DIR="${LOGSTASH_SRC_DIR}/conf.d"
# Logstash config search path
LOGSTASH_CONFIG_PATH="${LOGSTASH_CONFIG_DIR}/**/*.conf"
# Logstash log directory
LOGSTASH_LOG_DIR='/var/log/logstash'
# Logstash log file path
LOGSTASH_LOG_FILE="${LOGSTASH_LOG_DIR}/logstash.log"
# Elasticsearch config file path
ES_CONFIG_FILE="${LOGSTASH_SRC_DIR}/elasticsearch.yml"
# Kibana config file path
KIBANA_CONFIG_FILE="${LOGSTASH_SRC_DIR}/vendor/kibana/config.js"
# Kibana proxy regular expression
readonly PROXY_PROTOCOL_REGEX='\(http[s]\?\)'
|
package base.socket;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.net.ServerSocket;
import java.net.Socket;
/**
* http://haohaoxuexi.iteye.com/blog/1979837
* socket编程例子
*/
public class Server {
public static void main(String args[]) throws IOException {
// 为了简单起见,所有的异常信息都往外抛
int port = 8899;
// 定义一个ServerSocket监听在端口8899上
ServerSocket server = new ServerSocket(port);
/*System.out.println(server);
System.out.println(server.getLocalPort());
Socket socket = server.accept();
//只有在客户端请求后才会打印
System.out.println(socket!=null);
System.out.println(socket);
new Thread(new Task(socket)).start();*/
while (true) {
// server尝试接收其他Socket的连接请求,server的accept方法是阻塞式的
// 如果从输入流中没有读取到数据程序会一直在那里不动,直到客户端往Socket的输出流中写入了数据,或关闭了Socket的输出流。
Socket socket = server.accept();
// 每接收到一个Socket就建立一个新的线程来处理它
new Thread(new Task(socket)).start();
}
}
/**
* 用来处理Socket请求的
*/
static class Task implements Runnable {
private Socket socket;
public Task(Socket socket) {
this.socket = socket;
}
public void run() {
try {
handleSocket();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 跟客户端Socket进行通信
*
* @throws Exception
*/
private void handleSocket() throws Exception {
// 跟客户端建立好连接之后,我们就可以获取socket的BufferedReader,并从中读取客户端发过来的信息了。
BufferedReader br = new BufferedReader(new InputStreamReader(
socket.getInputStream(), "UTF-8"));
StringBuilder sb = new StringBuilder();
int index;
String temp;
while ((temp = br.readLine()) != null) {
// 如果不加判断,则除非客户端主动关闭阻塞,否则服务器端无法进行后续的writer操作
if ((index = temp.indexOf("eof")) != -1) {// 遇到eof时就结束接收
sb.append(temp.substring(0, index));
break;
}
sb.append(temp);
}
System.out.println("from client: " + sb);
// 读完后写一句
Writer writer = new OutputStreamWriter(socket.getOutputStream(),
"UTF-8");
writer.write("服务器端发送asdsadsadassdsadsadsa。eof\n");
writer.flush();
writer.close();
br.close();
socket.close();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.