text stringlengths 1 1.05M |
|---|
/* Copyright 2010 Smartmobili SARL
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CGDATAPROVIDER_H_
#define CGDATAPROVIDER_H_
typedef struct CGDataProvider *CGDataProviderRef;
#include <CoreGraphics/CGBase.h>
#include <CoreFoundation/CFURL.h>
#include <stddef.h>
#include <unistd.h>
typedef size_t (*CGDataProviderGetBytesCallback)(void *info, void *buffer, size_t count);
typedef off_t (*CGDataProviderSkipForwardCallback)(void *info, off_t count);
typedef void (*CGDataProviderRewindCallback)(void *info);
typedef void (*CGDataProviderReleaseInfoCallback)(void *info);
struct CGDataProviderSequentialCallbacks {
unsigned int version;
CGDataProviderGetBytesCallback getBytes;
CGDataProviderSkipForwardCallback skipForward;
CGDataProviderRewindCallback rewind;
CGDataProviderReleaseInfoCallback releaseInfo;
};
typedef struct CGDataProviderSequentialCallbacks
CGDataProviderSequentialCallbacks;
typedef const void *(*CGDataProviderGetBytePointerCallback)(void *info);
typedef void (*CGDataProviderReleaseBytePointerCallback)(void *info,
const void *pointer);
typedef size_t (*CGDataProviderGetBytesAtPositionCallback)(void *info,
void *buffer, off_t position, size_t count);
struct CGDataProviderDirectCallbacks {
unsigned int version;
CGDataProviderGetBytePointerCallback getBytePointer;
CGDataProviderReleaseBytePointerCallback releaseBytePointer;
CGDataProviderGetBytesAtPositionCallback getBytesAtPosition;
CGDataProviderReleaseInfoCallback releaseInfo;
};
typedef struct CGDataProviderDirectCallbacks CGDataProviderDirectCallbacks;
CG_EXTERN CFTypeID CGDataProviderGetTypeID(void);
CG_EXTERN CGDataProviderRef CGDataProviderCreateSequential(void *info,
const CGDataProviderSequentialCallbacks *callbacks);
CG_EXTERN CGDataProviderRef CGDataProviderCreateDirect(void *info, off_t size,
const CGDataProviderDirectCallbacks *callbacks);
typedef void (*CGDataProviderReleaseDataCallback)(void *info, const void *data,
size_t size);
CG_EXTERN CGDataProviderRef CGDataProviderCreateWithData(void *info,
const void *data, size_t size,
CGDataProviderReleaseDataCallback releaseData);
CG_EXTERN CGDataProviderRef CGDataProviderCreateWithCFData(CFDataRef data);
CG_EXTERN CGDataProviderRef CGDataProviderCreateWithURL(CFURLRef url);
CG_EXTERN CGDataProviderRef
CGDataProviderCreateWithFilename(const char *filename);
CG_EXTERN CGDataProviderRef CGDataProviderRetain(CGDataProviderRef provider);
CG_EXTERN void CGDataProviderRelease(CGDataProviderRef provider);
CG_EXTERN CFDataRef CGDataProviderCopyData(CGDataProviderRef provider);
typedef void (*CGDataProviderSkipBytesCallback)(void *info, size_t count);
struct CGDataProviderCallbacks {
CGDataProviderGetBytesCallback getBytes;
CGDataProviderSkipBytesCallback skipBytes;
CGDataProviderRewindCallback rewind;
CGDataProviderReleaseInfoCallback releaseProvider;
};
typedef struct CGDataProviderCallbacks CGDataProviderCallbacks;
typedef size_t (*CGDataProviderGetBytesAtOffsetCallback)(void *info,
void *buffer, size_t offset, size_t count);
struct CGDataProviderDirectAccessCallbacks {
CGDataProviderGetBytePointerCallback getBytePointer;
CGDataProviderReleaseBytePointerCallback releaseBytePointer;
CGDataProviderGetBytesAtOffsetCallback getBytes;
CGDataProviderReleaseInfoCallback releaseProvider;
};
typedef struct CGDataProviderDirectAccessCallbacks
CGDataProviderDirectAccessCallbacks;
CG_EXTERN CGDataProviderRef CGDataProviderCreate(void *info,
const CGDataProviderCallbacks *callbacks);
CG_EXTERN CGDataProviderRef CGDataProviderCreateDirectAccess(void *info,
size_t size, const CGDataProviderDirectAccessCallbacks *callbacks);
#endif /* CGDATAPROVIDER_H_ */
|
<filename>app/containers/RoomPage/constants.js
export const REACHED_ROOMS = 'REACHED_ROOMS';
export const GET_ROOMS = 'GET_ROOMS';
export const GET_ROOMS_SUCCESS = 'GET_ROOMS_SUCCESS';
export const GET_ROOMS_ERROR = 'GET_ROOMS_ERROR'; |
package sample.sadashiv.examplerealmmvp.presenter;
import sample.sadashiv.examplerealmmvp.model.realm.RealmService;
import sample.sadashiv.examplerealmmvp.ui.BaseView;
public class BasePresenter<T extends BaseView> {
protected T mView;
protected RealmService mRealmService;
public BasePresenter(T view, RealmService realmService) {
mView = view;
mRealmService = realmService;
}
public void clearView() {
mView = null;
}
public void closeRealm() {
mRealmService.closeRealm();
}
}
|
/*
* @ModuleName: Cookies
* @Author: <EMAIL>
* @LastEditTime: 2022-01-12 11:49:03
*/
import Cookie from "js-cookie";
const token_key = "pro_token";
// token
export const SetToken = (value: string): string | undefined => Cookie.set(token_key, value);
export const GetToken = (): string | undefined => Cookie.get(token_key);
export const RemoveToken = (): void => Cookie.remove(token_key);
|
def rearrange(nums, order):
if order == "ascending":
nums.sort()
else:
nums.sort(reverse=True)
return nums
print(rearrange([5,2,6,4,8], "descending")) # Output: [8,6,5,4,2] |
def increasing_words(words):
result = []
for word in words:
is_increasing = True
for i in range(1,len(word)):
if word[i-1] > word[i]: #word[i] should always be greater than word[i-1]
is_increasing = False # set false if condition fails
break # exit the loop
if is_increasing and len(word)>=4:
result.append(word)
return result
words = ["barn", "caramel", "homestead", "magic", "react"]
print(increasing_words(words)) # ['caramel', 'homestead'] |
public string CalculateAspectRatio(ResolutionChangedMessage resolutionChangedMessage)
{
Vector2<int> newResolution = resolutionChangedMessage._newResolution;
int width = newResolution.X;
int height = newResolution.Y;
int gcd = CalculateGCD(width, height);
return $"{width / gcd}:{height / gcd}";
}
private int CalculateGCD(int a, int b)
{
while (b != 0)
{
int temp = b;
b = a % b;
a = temp;
}
return a;
} |
#!/bin/sh
arch=x86
archdir=Win32
clean_build=true
cross_prefix=
for opt in "$@"
do
case "$opt" in
x86)
;;
x64 | amd64)
arch=x86_64
archdir=x64
cross_prefix=x86_64-w64-mingw32-
;;
quick)
clean_build=false
;;
*)
echo "Unknown Option $opt"
exit 1
esac
done
make_dirs() (
mkdir -p bin_${archdir}/lib
mkdir -p bin_${archdir}d/lib
)
copy_libs() (
# install -s --strip-program=${cross_prefix}strip lib*/*-lav-*.dll ../bin_${archdir}
cp lib*/*-lav-*.dll ../bin_${archdir}
${cross_prefix}strip ../bin_${archdir}/*-lav-*.dll
cp -u lib*/*.lib ../bin_${archdir}/lib
cp lib*/*-lav-*.dll ../bin_${archdir}d
${cross_prefix}strip ../bin_${archdir}d/*-lav-*.dll
cp -u lib*/*.lib ../bin_${archdir}d/lib
)
clean() (
make distclean > /dev/null 2>&1
)
configure() (
OPTIONS="
--enable-shared \
--disable-static \
--enable-gpl \
--enable-version3 \
--enable-w32threads \
--disable-demuxer=matroska \
--disable-filters \
--enable-filter=scale,yadif,w3fdif \
--disable-protocol=async,cache,concat,httpproxy,icecast,md5,subfile \
--disable-muxers \
--enable-muxer=spdif \
--disable-bsfs \
--enable-bsf=extract_extradata,vp9_superframe_split \
--disable-cuda \
--disable-cuvid \
--disable-nvenc \
--enable-libdav1d \
--enable-libspeex \
--enable-libopencore-amrnb \
--enable-libopencore-amrwb \
--enable-avresample \
--enable-avisynth \
--disable-avdevice \
--disable-postproc \
--disable-swresample \
--disable-encoders \
--disable-devices \
--disable-programs \
--disable-debug \
--disable-doc \
--disable-schannel \
--enable-gnutls \
--enable-gmp \
--build-suffix=-lav \
--arch=${arch}"
EXTRA_CFLAGS="-fno-tree-vectorize -D_WIN32_WINNT=0x0600 -DWINVER=0x0600"
EXTRA_LDFLAGS=""
PKG_CONFIG_PREFIX_DIR=""
if [ "${arch}" == "x86_64" ]; then
export PKG_CONFIG_PATH="$PKG_CONFIG_PATH:../thirdparty/64/lib/pkgconfig/"
OPTIONS="${OPTIONS} --enable-cross-compile --cross-prefix=${cross_prefix} --target-os=mingw32 --pkg-config=pkg-config"
EXTRA_CFLAGS="${EXTRA_CFLAGS} -I../thirdparty/64/include"
EXTRA_LDFLAGS="${EXTRA_LDFLAGS} -L../thirdparty/64/lib"
PKG_CONFIG_PREFIX_DIR="--define-variable=prefix=../thirdparty/64"
else
export PKG_CONFIG_PATH="$PKG_CONFIG_PATH:../thirdparty/32/lib/pkgconfig/"
OPTIONS="${OPTIONS} --cpu=i686"
EXTRA_CFLAGS="${EXTRA_CFLAGS} -I../thirdparty/32/include -mmmx -msse -msse2 -mfpmath=sse -mstackrealign"
EXTRA_LDFLAGS="${EXTRA_LDFLAGS} -L../thirdparty/32/lib"
PKG_CONFIG_PREFIX_DIR="--define-variable=prefix=../thirdparty/32"
fi
sh configure --extra-ldflags="${EXTRA_LDFLAGS}" --extra-cflags="${EXTRA_CFLAGS}" --pkg-config-flags="--static ${PKG_CONFIG_PREFIX_DIR}" ${OPTIONS}
)
build() (
make -j$NUMBER_OF_PROCESSORS
)
make_dirs
echo
echo Building ffmpeg in GCC ${arch} Release config...
echo
cd ffmpeg
if $clean_build ; then
clean
## run configure, redirect to file because of a msys bug
configure > ffbuild/config.out 2>&1
CONFIGRETVAL=$?
## show configure output
cat ffbuild/config.out
fi
## Only if configure succeeded, actually build
if ! $clean_build || [ ${CONFIGRETVAL} -eq 0 ]; then
build &&
copy_libs
fi
cd ..
|
package lit.litfx.core;
import java.util.ArrayList;
import javafx.geometry.Point2D;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
/**
*
* @author phillsm1
*/
public class AlgorithmsTest {
/**
* Test of simpleBres2D method, of class Algorithms.
*/
@Test
public void testSimpleBres2D() {
System.out.println("simpleBres2D");
int x1 = 1;
int y1 = 1;
int x2 = 3;
int y2 = 5;
ArrayList<Point2D> expResult = new ArrayList<>();
expResult.add(new Point2D(1, 1));
expResult.add(new Point2D(1, 2));
expResult.add(new Point2D(2, 3));
expResult.add(new Point2D(2, 4));
expResult.add(new Point2D(3, 5));
ArrayList<Point2D> result = Algorithms.simpleBres2D(x1, y1, x2, y2);
assertEquals(expResult, result);
}
}
|
package uvm
import (
"context"
"errors"
"fmt"
"strconv"
"github.com/Microsoft/hcsshim/internal/guestrequest"
"github.com/Microsoft/hcsshim/internal/log"
"github.com/Microsoft/hcsshim/internal/logfields"
"github.com/Microsoft/hcsshim/internal/requesttype"
hcsschema "github.com/Microsoft/hcsshim/internal/schema2"
"github.com/Microsoft/hcsshim/osversion"
"github.com/sirupsen/logrus"
)
type Plan9Share struct {
name, uvmPath string
}
const plan9Port = 564
// AddPlan9 adds a Plan9 share to a utility VM.
func (uvm *UtilityVM) AddPlan9(ctx context.Context, hostPath string, uvmPath string, readOnly bool, restrict bool, allowedNames []string) (_ *Plan9Share, err error) {
op := "uvm::AddPlan9"
l := log.G(ctx).WithFields(logrus.Fields{
logfields.UVMID: uvm.id,
"host-path": hostPath,
"uvm-path": uvmPath,
"readOnly": readOnly,
"restrict": restrict,
"allowedNames": allowedNames,
})
l.Debug(op + " - Begin Operation")
defer func() {
if err != nil {
l.Data[logrus.ErrorKey] = err
l.Error(op + " - End Operation - Error")
} else {
l.Debug(op + " - End Operation - Success")
}
}()
if uvm.operatingSystem != "linux" {
return nil, errNotSupported
}
if restrict && osversion.Get().Build < 18328 {
return nil, errors.New("single-file mappings are not supported on this build of Windows")
}
if uvmPath == "" {
return nil, fmt.Errorf("uvmPath must be passed to AddPlan9")
}
// TODO: JTERRY75 - These are marked private in the schema. For now use them
// but when there are public variants we need to switch to them.
const (
shareFlagsReadOnly int32 = 0x00000001
shareFlagsLinuxMetadata int32 = 0x00000004
shareFlagsCaseSensitive int32 = 0x00000008
shareFlagsRestrictFileAccess int32 = 0x00000080
)
// TODO: JTERRY75 - `shareFlagsCaseSensitive` only works if the Windows
// `hostPath` supports case sensitivity. We need to detect this case before
// forwarding this flag in all cases.
flags := shareFlagsLinuxMetadata // | shareFlagsCaseSensitive
if readOnly {
flags |= shareFlagsReadOnly
}
if restrict {
flags |= shareFlagsRestrictFileAccess
}
uvm.m.Lock()
index := uvm.plan9Counter
uvm.plan9Counter++
uvm.m.Unlock()
name := strconv.FormatUint(index, 10)
modification := &hcsschema.ModifySettingRequest{
RequestType: requesttype.Add,
Settings: hcsschema.Plan9Share{
Name: name,
AccessName: name,
Path: hostPath,
Port: plan9Port,
Flags: flags,
AllowedFiles: allowedNames,
},
ResourcePath: fmt.Sprintf("VirtualMachine/Devices/Plan9/Shares"),
GuestRequest: guestrequest.GuestRequest{
ResourceType: guestrequest.ResourceTypeMappedDirectory,
RequestType: requesttype.Add,
Settings: guestrequest.LCOWMappedDirectory{
MountPath: uvmPath,
ShareName: name,
Port: plan9Port,
ReadOnly: readOnly,
},
},
}
if err := uvm.Modify(ctx, modification); err != nil {
return nil, err
}
share := &Plan9Share{name: name, uvmPath: uvmPath}
return share, nil
}
// RemovePlan9 removes a Plan9 share from a utility VM. Each Plan9 share is ref-counted
// and only actually removed when the ref-count drops to zero.
func (uvm *UtilityVM) RemovePlan9(ctx context.Context, share *Plan9Share) (err error) {
op := "uvm::RemovePlan9"
l := log.G(ctx).WithFields(logrus.Fields{
logfields.UVMID: uvm.id,
"name": share.name,
"uvm-path": share.uvmPath,
})
l.Debug(op + " - Begin Operation")
defer func() {
if err != nil {
l.Data[logrus.ErrorKey] = err
l.Error(op + " - End Operation - Error")
} else {
l.Debug(op + " - End Operation - Success")
}
}()
if uvm.operatingSystem != "linux" {
return errNotSupported
}
modification := &hcsschema.ModifySettingRequest{
RequestType: requesttype.Remove,
Settings: hcsschema.Plan9Share{
Name: share.name,
AccessName: share.name,
Port: plan9Port,
},
ResourcePath: fmt.Sprintf("VirtualMachine/Devices/Plan9/Shares"),
GuestRequest: guestrequest.GuestRequest{
ResourceType: guestrequest.ResourceTypeMappedDirectory,
RequestType: requesttype.Remove,
Settings: guestrequest.LCOWMappedDirectory{
MountPath: share.uvmPath,
ShareName: share.name,
Port: plan9Port,
},
},
}
if err := uvm.Modify(ctx, modification); err != nil {
return fmt.Errorf("failed to remove plan9 share %s from %s: %+v: %s", share.name, uvm.id, modification, err)
}
return nil
}
|
function err {
if test "${COLORIZE}" -eq 1
then
tput bold
tput setaf 1
fi
echo "$@" 1>&2
if test "${COLORIZE}" -eq 1
then
tput sgr0
fi
}
function status {
if test "${COLORIZE}" -eq 1
then
tput bold
tput setaf 4
fi
echo "$@"
if test "${COLORIZE}" -eq 1
then
tput sgr0
fi
}
function status_stage {
if test "${COLORIZE}" -eq 1
then
tput bold
tput setaf 2
fi
echo "$@"
if test "${COLORIZE}" -eq 1
then
tput sgr0
fi
}
function debug {
if is_set "${BUILD_DEBUG}"
then
if test "${COLORIZE}" -eq 1
then
tput setaf 6
fi
echo "$@"
if test "${COLORIZE}" -eq 1
then
tput sgr0
fi
fi
}
function sed_i {
if test "$(uname)" == "Darwin"
then
sed -i '' "$@"
return $?
else
sed -i "$@"
return $?
fi
}
function is_set {
# Arguments:
# $1 - string value to check its truthiness
#
# Return:
# 0 - is truthy (backwards I know but allows syntax like `if is_set <var>` to work)
# 1 - is not truthy
local val=$(tr '[:upper:]' '[:lower:]' <<< "$1")
case $val in
1 | t | true | y | yes)
return 0
;;
*)
return 1
;;
esac
}
function have_gpg_key {
# Arguments:
# $1 - GPG Key id to check if we have installed
#
# Return:
# 0 - success (we can use this key for signing)
# * - failure (key cannot be used)
gpg --list-secret-keys $1 > /dev/null 2>&1
return $?
}
function parse_version {
# Arguments:
# $1 - Path to the top level Consul source
# $2 - boolean value for whether the release version should be parsed from the source
# $3 - boolean whether to use GIT_DESCRIBE and GIT_COMMIT environment variables
# $4 - boolean whether to omit the version part of the version string. (optional)
#
# Return:
# 0 - success (will write the version to stdout)
# * - error (no version output)
#
# Notes:
# If the GOTAGS environment variable is present then it is used to determine which
# version file to use for parsing.
local vfile="${1}/version/version.go"
# ensure the version file exists
if ! test -f "${vfile}"
then
err "Error - File not found: ${vfile}"
return 1
fi
local include_release="$2"
local use_git_env="$3"
local omit_version="$4"
local git_version=""
local git_commit=""
if test -z "${include_release}"
then
include_release=true
fi
if test -z "${use_git_env}"
then
use_git_env=true
fi
if is_set "${use_git_env}"
then
git_version="${GIT_DESCRIBE}"
git_commit="${GIT_COMMIT}"
fi
# Get the main version out of the source file
version_main=$(awk '$1 == "Version" && $2 == "=" { gsub(/"/, "", $3); print $3 }' < ${vfile})
release_main=$(awk '$1 == "VersionPrerelease" && $2 == "=" { gsub(/"/, "", $3); print $3 }' < ${vfile})
# try to determine the version if we have build tags
for tag in "$GOTAGS"
do
for vfile in $(find "${1}/version" -name "version_*.go" 2> /dev/null| sort)
do
if grep -q "// +build $tag" "${vfile}"
then
version_main=$(awk '$1 == "Version" && $2 == "=" { gsub(/"/, "", $3); print $3 }' < ${vfile})
release_main=$(awk '$1 == "VersionPrerelease" && $2 == "=" { gsub(/"/, "", $3); print $3 }' < ${vfile})
fi
done
done
local version="${version_main}"
# override the version from source with the value of the GIT_DESCRIBE env var if present
if test -n "${git_version}"
then
version="${git_version}"
fi
local rel_ver=""
if is_set "${include_release}"
then
# Default to pre-release from the source
rel_ver="${release_main}"
# When no GIT_DESCRIBE env var is present and no release is in the source then we
# are definitely in dev mode
if test -z "${git_version}" -a -z "${rel_ver}" && is_set "${use_git_env}"
then
rel_ver="dev"
fi
# Add the release to the version
if test -n "${rel_ver}" -a -n "${git_commit}"
then
rel_ver="${rel_ver} (${git_commit})"
fi
fi
if test -n "${rel_ver}"
then
if is_set "${omit_version}"
then
echo "${rel_ver}" | tr -d "'"
else
echo "${version}-${rel_ver}" | tr -d "'"
fi
return 0
elif ! is_set "${omit_version}"
then
echo "${version}" | tr -d "'"
return 0
else
return 1
fi
}
function get_version {
# Arguments:
# $1 - Path to the top level Consul source
# $2 - Whether the release version should be parsed from source (optional)
# $3 - Whether to use GIT_DESCRIBE and GIT_COMMIT environment variables
#
# Returns:
# 0 - success (the version is also echoed to stdout)
# 1 - error
#
# Notes:
# If a VERSION environment variable is present it will override any parsing of the version from the source
# In addition to processing the main version.go, version_*.go files will be processed if they have
# a Go build tag that matches the one in the GOTAGS environment variable. This tag processing is
# primitive though and will not match complex build tags in the files with negation etc.
local vers="$VERSION"
if test -z "$vers"
then
# parse the OSS version from version.go
vers="$(parse_version ${1} ${2} ${3})"
fi
if test -z "$vers"
then
return 1
else
echo $vers
return 0
fi
}
function git_branch {
# Arguments:
# $1 - Path to the git repo (optional - assumes pwd is git repo otherwise)
#
# Returns:
# 0 - success
# * - failure
#
# Notes:
# Echos the current branch to stdout when successful
local gdir="$(pwd)"
if test -d "$1"
then
gdir="$1"
fi
pushd "${gdir}" > /dev/null
local ret=0
local head="$(git status -b --porcelain=v2 | awk '{if ($1 == "#" && $2 =="branch.head") { print $3 }}')" || ret=1
popd > /dev/null
test ${ret} -eq 0 && echo "$head"
return ${ret}
}
function git_upstream {
# Arguments:
# $1 - Path to the git repo (optional - assumes pwd is git repo otherwise)
#
# Returns:
# 0 - success
# * - failure
#
# Notes:
# Echos the current upstream branch to stdout when successful
local gdir="$(pwd)"
if test -d "$1"
then
gdir="$1"
fi
pushd "${gdir}" > /dev/null
local ret=0
local head="$(git status -b --porcelain=v2 | awk '{if ($1 == "#" && $2 =="branch.upstream") { print $3 }}')" || ret=1
popd > /dev/null
test ${ret} -eq 0 && echo "$head"
return ${ret}
}
function git_log_summary {
# Arguments:
# $1 - Path to the git repo (optional - assumes pwd is git repo otherwise)
#
# Returns:
# 0 - success
# * - failure
#
local gdir="$(pwd)"
if test -d "$1"
then
gdir="$1"
fi
pushd "${gdir}" > /dev/null
local ret=0
local head=$(git_branch) || ret=1
local upstream=$(git_upstream) || ret=1
local rev_range="${head}...${upstream}"
if test ${ret} -eq 0
then
status "Git Changes:"
git log --pretty=oneline ${rev_range} || ret=1
fi
return $ret
}
function git_diff {
# Arguments:
# $1 - Path to the git repo (optional - assumes pwd is git repo otherwise)
# $2 .. $N - Optional path specification
#
# Returns:
# 0 - success
# * - failure
#
local gdir="$(pwd)"
if test -d "$1"
then
gdir="$1"
fi
shift
pushd "${gdir}" > /dev/null
local ret=0
local head=$(git_branch) || ret=1
local upstream=$(git_upstream) || ret=1
if test ${ret} -eq 0
then
status "Git Diff - Paths: $@"
git diff ${HEAD} ${upstream} -- "$@" || ret=1
fi
return $ret
}
function normalize_git_url {
url="${1#https://}"
url="${url#git@}"
url="${url%.git}"
url="$(sed ${SED_EXT} -e 's/([^\/:]*)[:\/](.*)/\1:\2/' <<< "${url}")"
echo "$url"
return 0
}
function git_remote_url {
# Arguments:
# $1 - Path to the top level Consul source
# $2 - Remote name
#
# Returns:
# 0 - success
# * - error
#
# Note:
# The push url for the git remote will be echoed to stdout
if ! test -d "$1"
then
err "ERROR: '$1' is not a directory. git_remote_url must be called with the path to the top level source as the first argument'"
return 1
fi
if test -z "$2"
then
err "ERROR: git_remote_url must be called with a second argument that is the name of the remote"
return 1
fi
local ret=0
pushd "$1" > /dev/null
local url=$(git remote get-url --push $2 2>&1) || ret=1
popd > /dev/null
if test "${ret}" -eq 0
then
echo "${url}"
return 0
fi
}
function find_git_remote {
# Arguments:
# $1 - Path to the top level Consul source
#
# Returns:
# 0 - success
# * - error
#
# Note:
# The remote name to use for publishing will be echoed to stdout upon success
if ! test -d "$1"
then
err "ERROR: '$1' is not a directory. find_git_remote must be called with the path to the top level source as the first argument'"
return 1
fi
need_url=$(normalize_git_url "${PUBLISH_GIT_HOST}:${PUBLISH_GIT_REPO}")
debug "Required normalized remote: ${need_url}"
pushd "$1" > /dev/null
local ret=1
for remote in $(git remote)
do
url=$(git remote get-url --push ${remote}) || continue
url=$(normalize_git_url "${url}")
debug "Testing Remote: ${remote}: ${url}"
if test "${url}" == "${need_url}"
then
echo "${remote}"
ret=0
break
fi
done
popd > /dev/null
return ${ret}
}
function git_remote_not_blacklisted {
# Arguments:
# $1 - path to the repo
# $2 - the remote name
#
# Returns:
# 0 - not blacklisted
# * - blacklisted
return 0
}
function is_git_clean {
# Arguments:
# $1 - Path to git repo
# $2 - boolean whether the git status should be output when not clean
#
# Returns:
# 0 - success
# * - error
#
if ! test -d "$1"
then
err "ERROR: '$1' is not a directory. is_git_clean must be called with the path to a git repo as the first argument'"
return 1
fi
local output_status="$2"
pushd "${1}" > /dev/null
local ret=0
test -z "$(git status --porcelain=v2 2> /dev/null)" || ret=1
if is_set "${output_status}" && test "$ret" -ne 0
then
err "Git repo is not clean"
# --porcelain=v1 is the same as --short except uncolorized
git status --porcelain=v1
fi
popd > /dev/null
return ${ret}
}
function update_git_env {
# Arguments:
# $1 - Path to git repo
#
# Returns:
# 0 - success
# * - error
#
if ! test -d "$1"
then
err "ERROR: '$1' is not a directory. is_git_clean must be called with the path to a git repo as the first argument'"
return 1
fi
export GIT_COMMIT=$(git rev-parse --short HEAD)
export GIT_DIRTY=$(test -n "$(git status --porcelain)" && echo "+CHANGES")
export GIT_DESCRIBE=$(git describe --tags --always)
export GIT_IMPORT=github.com/hashicorp/consul/version
export GOLDFLAGS="-X ${GIT_IMPORT}.GitCommit=${GIT_COMMIT}${GIT_DIRTY} -X ${GIT_IMPORT}.GitDescribe=${GIT_DESCRIBE}"
return 0
}
function git_push_ref {
# Arguments:
# $1 - Path to the top level Consul source
# $2 - Git ref (optional)
# $3 - remote (optional - if not specified we will try to determine it)
#
# Returns:
# 0 - success
# * - error
if ! test -d "$1"
then
err "ERROR: '$1' is not a directory. push_git_release must be called with the path to the top level source as the first argument'"
return 1
fi
local sdir="$1"
local ret=0
local remote="$3"
# find the correct remote corresponding to the desired repo (basically prevent pushing enterprise to oss or oss to enterprise)
if test -z "${remote}"
then
local remote=$(find_git_remote "${sdir}") || return 1
status "Using git remote: ${remote}"
fi
local ref=""
pushd "${sdir}" > /dev/null
if test -z "$2"
then
# If no git ref was provided we lookup the current local branch and its tracking branch
# It must have a tracking upstream and it must be tracking the sanctioned git remote
local head=$(git_branch "${sdir}") || return 1
local upstream=$(git_upstream "${sdir}") || return 1
# upstream branch for this branch does not track the remote we need to push to
# basically this checks that the upstream (could be something like origin/master) references the correct remote
# if it doesn't then the string modification wont apply and the var will reamin unchanged and equal to itself.
if test "${upstream#${remote}/}" == "${upstream}"
then
err "ERROR: Upstream branch '${upstream}' does not track the correct remote '${remote}' - cannot push"
ret=1
fi
ref="refs/heads/${head}"
else
# A git ref was provided - get the full ref and make sure it isn't ambiguous and also to
# be able to determine whether its a branch or tag we are pushing
ref_out=$(git rev-parse --symbolic-full-name "$2" --)
# -ne 2 because it should have the ref on one line followed by a line with '--'
if test "$(wc -l <<< "${ref_out}")" -ne 2
then
err "ERROR: Git ref '$2' is ambiguous"
debug "${ref_out}"
ret=1
else
ref=$(head -n 1 <<< "${ref_out}")
fi
fi
if test ${ret} -eq 0
then
case "${ref}" in
refs/tags/*)
status "Pushing tag ${ref#refs/tags/} to ${remote}"
;;
refs/heads/*)
status "Pushing local branch ${ref#refs/tags/} to ${remote}"
;;
*)
err "ERROR: git_push_ref func is refusing to push ref that isn't a branch or tag"
return 1
esac
if ! git push "${remote}" "${ref}"
then
err "ERROR: Failed to push ${ref} to remote: ${remote}"
ret=1
fi
fi
popd > /dev/null
return $ret
}
function update_version {
# Arguments:
# $1 - Path to the version file
# $2 - Version string
# $3 - PreRelease version (if unset will become an empty string)
#
# Returns:
# 0 - success
# * - error
if ! test -f "$1"
then
err "ERROR: '$1' is not a regular file. update_version must be called with the path to a go version file"
return 1
fi
if test -z "$2"
then
err "ERROR: The version specified was empty"
return 1
fi
local vfile="$1"
local version="$2"
local prerelease="$3"
sed_i ${SED_EXT} -e "s/(Version[[:space:]]*=[[:space:]]*)\"[^\"]*\"/\1\"${version}\"/g" -e "s/(VersionPrerelease[[:space:]]*=[[:space:]]*)\"[^\"]*\"/\1\"${prerelease}\"/g" "${vfile}"
return $?
}
function set_changelog_version {
# Arguments:
# $1 - Path to top level Consul source
# $2 - Version to put into the Changelog
# $3 - Release Date
#
# Returns:
# 0 - success
# * - error
local changelog="${1}/CHANGELOG.md"
local version="$2"
local rel_date="$3"
if ! test -f "${changelog}"
then
err "ERROR: File not found: ${changelog}"
return 1
fi
if test -z "${version}"
then
err "ERROR: Must specify a version to put into the changelog"
return 1
fi
if test -z "${rel_date}"
then
rel_date=$(date +"%B %d, %Y")
fi
sed_i ${SED_EXT} -e "s/## UNRELEASED/## ${version} (${rel_date})/" "${changelog}"
return $?
}
function unset_changelog_version {
# Arguments:
# $1 - Path to top level Consul source
#
# Returns:
# 0 - success
# * - error
local changelog="${1}/CHANGELOG.md"
if ! test -f "${changelog}"
then
err "ERROR: File not found: ${changelog}"
return 1
fi
sed_i ${SED_EXT} -e "1 s/^## [0-9]+\.[0-9]+\.[0-9]+ \([^)]*\)/## UNRELEASED/" "${changelog}"
return $?
}
function add_unreleased_to_changelog {
# Arguments:
# $1 - Path to top level Consul source
#
# Returns:
# 0 - success
# * - error
local changelog="${1}/CHANGELOG.md"
if ! test -f "${changelog}"
then
err "ERROR: File not found: ${changelog}"
return 1
fi
# Check if we are already in unreleased mode
if head -n 1 "${changelog}" | grep -q -c UNRELEASED
then
return 0
fi
local tfile="$(mktemp) -t "CHANGELOG.md_")"
(
echo -e "## UNRELEASED\n" > "${tfile}" &&
cat "${changelog}" >> "${tfile}" &&
cp "${tfile}" "${changelog}"
)
local ret=$?
rm "${tfile}"
return $ret
}
function set_release_mode {
# Arguments:
# $1 - Path to top level Consul source
# $2 - The version of the release
# $3 - The release date
# $4 - The pre-release version
#
#
# Returns:
# 0 - success
# * - error
if ! test -d "$1"
then
err "ERROR: '$1' is not a directory. set_release_mode must be called with the path to a git repo as the first argument"
return 1
fi
if test -z "$2"
then
err "ERROR: The version specified was empty"
return 1
fi
local sdir="$1"
local vers="$2"
local rel_date="$(date +"%B %d, %Y")"
if test -n "$3"
then
rel_date="$3"
fi
local changelog_vers="${vers}"
if test -n "$4"
then
changelog_vers="${vers}-$4"
fi
status_stage "==> Updating CHANGELOG.md with release info: ${changelog_vers} (${rel_date})"
set_changelog_version "${sdir}" "${changelog_vers}" "${rel_date}" || return 1
status_stage "==> Updating version/version.go"
if ! update_version "${sdir}/version/version.go" "${vers}" "$4"
then
unset_changelog_version "${sdir}"
return 1
fi
return 0
}
function set_dev_mode {
# Arguments:
# $1 - Path to top level Consul source
#
# Returns:
# 0 - success
# * - error
if ! test -d "$1"
then
err "ERROR: '$1' is not a directory. set_dev_mode must be called with the path to a git repo as the first argument'"
return 1
fi
local sdir="$1"
local vers="$(parse_version "${sdir}" false false)"
status_stage "==> Setting VersionPreRelease back to 'dev'"
update_version "${sdir}/version/version.go" "${vers}" dev || return 1
status_stage "==> Adding new UNRELEASED label in CHANGELOG.md"
add_unreleased_to_changelog "${sdir}" || return 1
return 0
}
function git_staging_empty {
# Arguments:
# $1 - Path to git repo
#
# Returns:
# 0 - success (nothing staged)
# * - error (staged files)
if ! test -d "$1"
then
err "ERROR: '$1' is not a directory. commit_dev_mode must be called with the path to a git repo as the first argument'"
return 1
fi
pushd "$1" > /dev/null
declare -i ret=0
for status in $(git status --porcelain=v2 | awk '{print $2}' | cut -b 1)
do
if test "${status}" != "."
then
ret=1
break
fi
done
popd > /dev/null
return ${ret}
}
function commit_dev_mode {
# Arguments:
# $1 - Path to top level Consul source
#
# Returns:
# 0 - success
# * - error
if ! test -d "$1"
then
err "ERROR: '$1' is not a directory. commit_dev_mode must be called with the path to a git repo as the first argument'"
return 1
fi
status "Checking for previously staged files"
git_staging_empty "$1" || return 1
declare -i ret=0
pushd "$1" > /dev/null
status "Staging CHANGELOG.md and version_*.go files"
git add CHANGELOG.md && git add version/version*.go
ret=$?
if test ${ret} -eq 0
then
status "Adding Commit"
git commit -m "Putting source back into Dev Mode"
ret=$?
fi
popd >/dev/null
return ${ret}
}
function gpg_detach_sign {
# Arguments:
# $1 - File to sign
# $2 - Alternative GPG key to use for signing
#
# Returns:
# 0 - success
# * - failure
# determine whether the gpg key to use is being overridden
local gpg_key=${HASHICORP_GPG_KEY}
if test -n "$2"
then
gpg_key=$2
fi
gpg --default-key "${gpg_key}" --detach-sig --yes -v "$1"
return $?
}
function shasum_directory {
# Arguments:
# $1 - Path to directory containing the files to shasum
# $2 - File to output sha sums to
#
# Returns:
# 0 - success
# * - failure
if ! test -d "$1"
then
err "ERROR: '$1' is not a directory and shasum_release requires passing a directory as the first argument"
return 1
fi
if test -z "$2"
then
err "ERROR: shasum_release requires a second argument to be the filename to output the shasums to but none was given"
return 1
fi
pushd $1 > /dev/null
shasum -a256 * > "$2"
ret=$?
popd >/dev/null
return $ret
}
function ui_version {
# Arguments:
# $1 - path to index.html
#
# Returns:
# 0 - success
# * -failure
#
# Notes: echoes the version to stdout upon success
if ! test -f "$1"
then
err "ERROR: No such file: '$1'"
return 1
fi
local ui_version=$(sed -n ${SED_EXT} -e 's/.*CONSUL_VERSION%22%3A%22([^%]*)%22%2C%22.*/\1/p' < "$1") || return 1
echo "$ui_version"
return 0
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/UtilitiesInSwift/UtilitiesInSwift.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/UtilitiesInSwift/UtilitiesInSwift.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<filename>src/js/main.js
import PlayerFactory from './components/player';
import Board from './components/board';
import Game from './components/game';
const xSym = 'X';
const oSym = 'O';
const container = document.querySelector('.container');
const setPlayers = (players) => {
const [playerXName, playerOName] = players;
const playerX = PlayerFactory(playerXName, xSym);
const playerO = PlayerFactory(playerOName, oSym);
Game.setPlayers([playerX, playerO]);
};
// eslint-disable-next-line consistent-return
const getSymbol = (symbol) => {
if (symbol === 'O') return '<i class="material-icons symbol">radio_button_unchecked</i>';
if (symbol === 'X') return '<i class="material-icons symbol">clear</i>';
};
const removeHelper = (parent, children) => {
children.forEach((child) => {
parent.removeChild(child);
});
};
const play = (players, replay = false) => {
const cells = document.querySelectorAll('.cell');
if (replay === false) {
setPlayers(players);
}
Game.setBoard(Board);
// eslint-disable-next-line no-use-before-define
changeCells(cells);
};
const createGridDOM = () => {
const root = document.querySelector('.container');
const cells = 8;
const grid = document.querySelector('#grid');
grid.classList.add('h-600');
for (let index = 0; index <= cells; index += 1) {
const cell = document.createElement('div');
cell.id = index;
cell.className = 'cell col s4';
grid.appendChild(cell);
}
root.classList.add('pt-4');
root.appendChild(grid);
};
const drawPlayerTurn = (grid) => {
const playerTurnDiv = document.createElement('div');
playerTurnDiv.id = 'player_turn';
grid.appendChild(playerTurnDiv);
};
const drawGrid = (children = null) => {
const grid = document.querySelector('#grid');
drawPlayerTurn(grid);
const cells = document.querySelectorAll('.cell');
grid.classList.remove(
'teal',
'card-panel',
'z-depth-2',
'player-wrapper',
'flex-col',
);
if (children) {
removeHelper(grid, children);
}
[0, 3].forEach((i) => cells[i].classList.add('first'));
[2, 5].forEach((i) => cells[i].classList.add('last'));
cells[6].classList.add('first');
cells[8].classList.add('last');
};
const getPlayers = (form) => {
form.addEventListener('submit', (e) => {
e.preventDefault();
const players = [...form.elements].slice(0, 2).map((elem) => elem.value);
createGridDOM();
const createdForm = document.querySelector('form');
const title = document.querySelector('#title');
drawGrid([createdForm, title]);
play(players);
});
};
const createPlayerForm = (eventListener) => {
const mainRow = document.querySelector('#grid');
const title = document.querySelector('#title');
const form = document.createElement('form');
const subRow = document.createElement('div');
const inputX = document.createElement('div');
const inputO = document.createElement('div');
const input = document.createElement('input');
eventListener.addEventListener('click', () => {
mainRow.classList.add('player-wrapper');
mainRow.removeChild(eventListener);
form.className = 'col s12';
title.textContent = 'Enter your names:';
subRow.className = 'row';
inputX.className = 'input-field col s12';
inputO.className = 'input-field col s12';
input.type = 'submit';
input.className = 'btn teal';
input.id = 'submit';
input.value = 'PLAY';
inputX.innerHTML = '<input id="x_name" placeholder="Name of Player X" value="Player X" type="text" class="validate">';
inputO.innerHTML = '<input id="o_name" placeholder="Name of Player O" value="Player O" type="text" class="validate">';
subRow.appendChild(inputX);
subRow.appendChild(inputO);
subRow.appendChild(input);
form.appendChild(subRow);
mainRow.appendChild(form);
});
return form;
};
const getStarted = (start) => {
const form = createPlayerForm(start);
getPlayers(form);
};
const startScreen = (root) => {
const row = document.createElement('div');
const h2 = document.createElement('h2');
const startBtn = document.createElement('button');
row.classList.add('row');
row.id = 'grid';
row.classList.add('card-panel');
row.classList.add('teal', 'lighten-4', 'z-depth-2', 'h-600', 'flex-col');
h2.innerHTML = 'Welcome,<br><br>Click below to start';
h2.id = 'title';
h2.className = 'white-txt';
startBtn.className = 'btn';
startBtn.innerHTML = 'Start';
startBtn.id = 'start';
row.appendChild(h2);
row.appendChild(startBtn);
root.appendChild(row);
getStarted(startBtn);
};
const render = (root) => {
startScreen(root);
};
const reset = () => {
const container = document.querySelector('.container');
const grid = document.querySelector('#grid');
removeHelper(container, [grid]);
render(container);
};
const createAskRematchDom = (grid) => {
const resultMsg = document.querySelector('h2');
const question = document.createElement('h3');
question.innerText = 'Would you like to play again?';
const input = document.createElement('input');
const input2 = document.createElement('input');
const btnDiv = document.createElement('div');
btnDiv.className = 'btn-div';
input.type = 'submit';
input.id = 'yes';
input.value = 'Yes';
input.className = 'play-btn btn';
input2.type = 'submit';
input2.id = 'no';
input2.value = 'No';
input2.className = 'play-btn btn';
grid.appendChild(question);
btnDiv.appendChild(input);
btnDiv.appendChild(input2);
grid.appendChild(btnDiv);
return [resultMsg, question, btnDiv];
};
const rematch = () => {
createGridDOM();
drawGrid();
play([], true);
};
const askRematch = () => {
const grid = document.querySelector('#grid');
const createdElements = createAskRematchDom(grid);
const yes = document.querySelector('#yes');
const no = document.querySelector('#no');
yes.addEventListener('click', () => {
removeHelper(grid, createdElements);
rematch();
});
no.addEventListener('click', () => {
reset();
});
};
const displayResult = (winner = null) => {
const grid = document.querySelector('#grid');
grid.removeChild(document.querySelector('#player_turn'));
grid.classList.add('teal', 'z-depth-2', 'h-600', 'flex-col');
const h2 = document.createElement('h2');
if (winner) {
h2.innerHTML = `The winner is ${winner}`;
} else {
h2.innerHTML = "It's a draw!";
}
h2.classList.add('white-txt');
grid.appendChild(h2);
};
const setPlayerTurn = (div, player) => {
div.innerHTML = `<h4>It is ${player}'s turn.</h4>`;
};
const removeGrid = () => {
const grid = document.querySelector('#grid');
const cells = document.querySelectorAll('.cell');
cells.forEach((cell) => grid.removeChild(cell));
};
const changeCells = (cells) => {
const playerTurnDiv = document.querySelector('#player_turn');
setPlayerTurn(playerTurnDiv, Game.getCurrentPlayer().name);
cells.forEach((cell) => cell.addEventListener('click', function () {
if (Game.getBoard().setCell(this.id, Game.getCurrentPlayer().symbol)) {
this.innerHTML = getSymbol(Game.getCurrentPlayer().symbol);
const gameOver = Game.gameOver();
if (gameOver) {
removeGrid();
if (gameOver === 'W') {
displayResult(Game.getCurrentPlayer().name);
} else {
displayResult();
}
Game.getBoard().reset();
askRematch();
}
if (!gameOver) {
Game.switchPlayers();
setPlayerTurn(playerTurnDiv, Game.getCurrentPlayer().name);
}
}
}));
};
render(container);
|
<gh_stars>1-10
/**
* Copyright © 2016-2021 The Thingsboard Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ciat.bim.server.telemetry.sub;
import lombok.Getter;
import org.jeecg.modules.alarm.entity.Alarm;
public class AlarmSubscriptionUpdate {
@Getter
private int subscriptionId;
@Getter
private int errorCode;
@Getter
private String errorMsg;
@Getter
private Alarm alarm;
@Getter
private boolean alarmDeleted;
public AlarmSubscriptionUpdate(int subscriptionId, Alarm alarm) {
this(subscriptionId, alarm, false);
}
public AlarmSubscriptionUpdate(int subscriptionId, Alarm alarm, boolean alarmDeleted) {
super();
this.subscriptionId = subscriptionId;
this.alarm = alarm;
this.alarmDeleted = alarmDeleted;
}
public AlarmSubscriptionUpdate(int subscriptionId, SubscriptionErrorCode errorCode) {
this(subscriptionId, errorCode, null);
}
public AlarmSubscriptionUpdate(int subscriptionId, SubscriptionErrorCode errorCode, String errorMsg) {
super();
this.subscriptionId = subscriptionId;
this.errorCode = errorCode.getCode();
this.errorMsg = errorMsg != null ? errorMsg : errorCode.getDefaultMsg();
}
// @Override
// public String toString() {
// return "AlarmUpdate [subscriptionId=" + subscriptionId + ", errorCode=" + errorCode + ", errorMsg=" + errorMsg + ", alarm="
// + alarm + "]";
// }
}
|
import argparse
import shutil
import os
import glob
def main():
parser = argparse.ArgumentParser(description='File manipulation tool')
group = parser.add_argument_group('file types', 'Options affecting different file types.')
group.add_argument('--fastqbam', help="Work on fastq-fastq.bam files", default=False, action="store_true")
group.add_argument('--pileup', help="Work on pileup files", default=False, action="store_true")
group.add_argument('--split', help="Work on *-split directories", default=False, action="store_true")
group.add_argument('--tmp', help="Work on staging (tx) and tmp directories", default=False, action="store_true")
group.add_argument('--txt', help="Work on txt files", default=False, action="store_true")
group.add_argument('--glob', help="Work on freetext glob expression. CAUTION: using wildcard expressions will remove *everything* that matches.", default=None, action="store")
transfer_group = parser.add_argument_group('file transfer', 'Options affecting file transfer operations.')
transfer_group.add_argument('--move', help="Transfer file with move", default=False, action="store_true")
args = parser.parse_args()
if args.fastqbam:
# Perform operations on fastq-fastq.bam files
pass # Placeholder for file operations
if args.pileup:
# Perform operations on pileup files
pass # Placeholder for file operations
if args.split:
# Perform operations on *-split directories
pass # Placeholder for file operations
if args.tmp:
# Perform operations on staging (tx) and tmp directories
pass # Placeholder for file operations
if args.txt:
# Perform operations on txt files
pass # Placeholder for file operations
if args.glob:
# Remove everything that matches the specified glob expression
files_to_remove = glob.glob(args.glob)
for file_to_remove in files_to_remove:
os.remove(file_to_remove)
if args.move:
# Transfer files with move
pass # Placeholder for file transfer operations
if __name__ == "__main__":
main() |
#!/bin/bash
cd /app
chown -R root:root /app
chmod -R 555 /app
socat TCP4-LISTEN:8000,reuseaddr,fork,su=ctf EXEC:"./zeus_sandbox ./zeus" > /dev/null 2>&1 &
(crontab -l ; echo "* * * * * /bin/bash -c '/sanity.sh 127.0.0.1 8000 5'") | crontab
service cron start
tail -f /dev/null
|
#include <cmath>
double calculateGradientNorm(double sumx, double sumy, double sumz, bool normalize) {
double gradientNorm = sqrt(sumx * sumx + sumy * sumy + sumz * sumz);
if (normalize) {
if (gradientNorm != 0) {
sumx /= gradientNorm;
sumy /= gradientNorm;
sumz /= gradientNorm;
gradientNorm = 1.0; // After normalization, the magnitude becomes 1
}
}
return gradientNorm;
} |
<gh_stars>1-10
package api
import (
"cf/configuration"
"cf/net"
)
type FakeAuthenticationRepository struct {
Config configuration.ReadWriter
AuthenticateArgs struct {
Credentials map[string]string
}
GetLoginPromptsReturns struct {
ApiResponse net.ApiResponse
Prompts map[string]configuration.AuthPrompt
}
AuthError bool
AccessToken string
RefreshToken string
}
func (auth *FakeAuthenticationRepository) Authenticate(credentials map[string]string) (apiResponse net.ApiResponse) {
auth.AuthenticateArgs.Credentials = credentials
if auth.AuthError {
apiResponse = net.NewApiResponseWithMessage("Error authenticating.")
return
}
if auth.AccessToken == "" {
auth.AccessToken = "<PASSWORD>access_token"
}
auth.Config.SetAccessToken(auth.AccessToken)
auth.Config.SetRefreshToken(auth.RefreshToken)
return
}
func (auth *FakeAuthenticationRepository) RefreshAuthToken() (updatedToken string, apiResponse net.ApiResponse) {
return
}
func (auth *FakeAuthenticationRepository) GetLoginPrompts() (prompts map[string]configuration.AuthPrompt, apiResponse net.ApiResponse) {
prompts = auth.GetLoginPromptsReturns.Prompts
apiResponse = auth.GetLoginPromptsReturns.ApiResponse
return
}
|
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class SourceCodeAnalyzer {
public static void main(String[] args) {
String fileName = "path_to_source_file.java"; // Replace with the actual file path
analyzeSourceCode(fileName);
}
public static void analyzeSourceCode(String fileName) {
try (BufferedReader br = new BufferedReader(new FileReader(fileName))) {
String line;
String packageName = "";
int extendsCount = 0;
int implementsCount = 0;
int importCount = 0;
while ((line = br.readLine()) != null) {
if (line.startsWith("package")) {
packageName = line.substring(8, line.indexOf(';')).trim();
} else {
extendsCount += countKeywordOccurrences(line, "extends");
implementsCount += countKeywordOccurrences(line, "implements");
importCount += countKeywordOccurrences(line, "import");
}
}
System.out.println("Package Name: " + packageName);
System.out.println("Keyword 'extends' count: " + extendsCount);
System.out.println("Keyword 'implements' count: " + implementsCount);
System.out.println("Keyword 'import' count: " + importCount);
} catch (IOException e) {
e.printStackTrace();
}
}
public static int countKeywordOccurrences(String line, String keyword) {
Pattern pattern = Pattern.compile("\\b" + keyword + "\\b");
Matcher matcher = pattern.matcher(line);
int count = 0;
while (matcher.find()) {
count++;
}
return count;
}
} |
/*
* @Author: <NAME>
* @Date: 2022-01-28 13:52:44
* @Last Modified by: <NAME>
* @Last Modified time: 2022-01-28 15:31:56
*/
export const lazyUpdate: unique symbol = Symbol('lazyReadonly.update');
/**
* Creates a readonly object with all entries uninitialized,
* value of each entry will be initialized at the first time when it'll be got.
* @template T
* @template Context
* @param {({ [key in keyof (Context & T)]: (obj: Readonly<T & Context>) => (Context & T)[key] })} getters
* @returns {Readonly<T & {[lazyUpdate]: (keys: (keyof T)[]) => void}>}
*/
const lazyReadonly = <
T extends Record<string | number | symbol, any>,
Context extends Record<string | number | symbol, any> = T
>(
getters: { [key in keyof (Context & T)]: (obj: Readonly<T & Context>) => (Context & T)[key] }
): Readonly<
T & {
[lazyUpdate]: (keys: (keyof T)[]) => void;
}
> => {
const target = {
[lazyUpdate]: (keys: (keyof T)[]) => {
for (const key of keys) {
if (key in getters) {
(target as T & Context)[key as keyof T & Context] = getters[key as keyof T & Context](
target as T & Context
);
}
}
}
} as (T & Context & {
[lazyUpdate]: (keys: (keyof T)[]) => void;
});
let useProxy: () => Readonly<T & Context> = () => target;
const proxy = new Proxy<
T & Context & {
[lazyUpdate]: (keys: (keyof T)[]) => void;
}
>(
target, {
get: (_, key) => {
if (key === lazyUpdate) {
return target[lazyUpdate];
}
if (key in getters) {
if (!(key in target)) {
(target as T & Context)[key as keyof T & Context] = getters[key as keyof T & Context](
useProxy()
);
}
return target[key];
}
return undefined;
}
}
);
useProxy = () => proxy;
return proxy;
};
export default lazyReadonly;
|
import React from "react"
import { NoVendorWrapper, Message } from "./noprefixes.styled"
const NoPrefixes = ({ type, func }) => {
return (
<NoVendorWrapper>
<Message>
{func
? `We don't have any functions for this property. Check other for SCSS Functions.`
: `This styling property works in all browser without ${type} or it simply
isn't in our database`}
</Message>
</NoVendorWrapper>
)
}
export default NoPrefixes
|
#!/bin/sh
if [ -d /app/mysql ]; then
echo "[i] MySQL directory already present, skipping creation"
else
echo "[i] MySQL data directory not found, creating initial DBs"
mysql_install_db --user=root > /dev/null
mkdir -p /app/mysql
if [ ! -d "/run/mysqld" ]; then
mkdir -p /run/mysqld
fi
/usr/bin/mysqld --user=root --bootstrap --verbose=0 < /pulsar-manager/init_db.sql
fi
mysqld_safe &
|
<reponame>Sasha7b9Work/S8-53M2
/////////////////////////////////////////////////////////////////////////////
// Name: src/qt/font.cpp
// Author: <NAME>, <NAME>, <NAME>
// Copyright: (c) 2009 wxWidgets dev team
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
// For compilers that support precompilation, includes "wx.h".
#include "wx/wxprec.h"
#include <QtGui/QFont>
#include <QtGui/QFontInfo>
#include "wx/font.h"
#include "wx/fontutil.h"
#include "wx/qt/private/utils.h"
#include "wx/qt/private/converter.h"
// Older versions of QT don't define all the QFont::Weight enum values, so just
// do it ourselves here for all case instead.
#if (QT_VERSION >= QT_VERSION_CHECK(5, 5, 0))
#define wxQFontEnumOrInt(a, b) a
#else
#define wxQFontEnumOrInt(a, b) b
#endif
enum
{
wxQFont_Thin = wxQFontEnumOrInt( QFont::Thin, 0 ),
wxQFont_ExtraLight = wxQFontEnumOrInt( QFont::ExtraLight, 12 ),
wxQFont_Light = QFont::Light,
wxQFont_Normal = QFont::Normal,
wxQFont_Medium = wxQFontEnumOrInt( QFont::Medium, 57 ),
wxQFont_DemiBold = QFont::DemiBold,
wxQFont_Bold = QFont::Bold,
wxQFont_ExtraBold = wxQFontEnumOrInt( QFont::ExtraBold, 81 ),
wxQFont_Black = QFont::Black
};
static QFont::StyleHint ConvertFontFamily(wxFontFamily family)
{
switch (family)
{
case wxFONTFAMILY_DEFAULT:
return QFont::AnyStyle;
case wxFONTFAMILY_DECORATIVE:
return QFont::Decorative;
case wxFONTFAMILY_ROMAN:
return QFont::Serif;
case wxFONTFAMILY_SCRIPT:
return QFont::Decorative;
case wxFONTFAMILY_SWISS:
return QFont::SansSerif;
case wxFONTFAMILY_MODERN:
return QFont::TypeWriter;
case wxFONTFAMILY_TELETYPE:
return QFont::TypeWriter;
case wxFONTFAMILY_MAX:
wxFAIL_MSG( "Invalid font family value" );
break;
}
return QFont::AnyStyle;
}
// Helper of ConvertFontWeight() and GetNumericWeight(): if a value lies in
// ]fromMin, fromMax] interval, then map it to [toMin, toMax] interval linearly
// and return true, otherwise return false and don't modify it.
static bool TryToMap(int& x, int fromMin, int fromMax, int toMin, int toMax)
{
if ( x > fromMin && x <= fromMax )
{
x = (toMin*(fromMax - x) + toMax*(x - fromMin))/(fromMax - fromMin);
return true;
}
return false;
}
static int ConvertFontWeight(int w)
{
// Note that wxQFont_Thin is 0, so we can't have anything lighter than it.
if ( TryToMap(w, wxFONTWEIGHT_INVALID, wxFONTWEIGHT_THIN,
wxQFont_Thin, wxQFont_Thin) ||
TryToMap(w, wxFONTWEIGHT_THIN, wxFONTWEIGHT_EXTRALIGHT,
wxQFont_Thin, wxQFont_ExtraLight) ||
TryToMap(w, wxFONTWEIGHT_EXTRALIGHT, wxFONTWEIGHT_LIGHT,
wxQFont_ExtraLight, wxQFont_Light) ||
TryToMap(w, wxFONTWEIGHT_LIGHT, wxFONTWEIGHT_NORMAL,
wxQFont_Light, wxQFont_Normal) ||
TryToMap(w, wxFONTWEIGHT_NORMAL, wxFONTWEIGHT_MEDIUM,
wxQFont_Normal, wxQFont_Medium) ||
TryToMap(w, wxFONTWEIGHT_MEDIUM, wxFONTWEIGHT_SEMIBOLD,
wxQFont_Medium, wxQFont_DemiBold) ||
TryToMap(w, wxFONTWEIGHT_SEMIBOLD, wxFONTWEIGHT_BOLD,
wxQFont_DemiBold, wxQFont_Bold) ||
TryToMap(w, wxFONTWEIGHT_BOLD, wxFONTWEIGHT_EXTRABOLD,
wxQFont_Bold, wxQFont_ExtraBold) ||
TryToMap(w, wxFONTWEIGHT_EXTRABOLD, wxFONTWEIGHT_HEAVY,
wxQFont_ExtraBold, wxQFont_Black) ||
TryToMap(w, wxFONTWEIGHT_HEAVY, wxFONTWEIGHT_EXTRAHEAVY,
wxQFont_Black, 99) )
{
return w;
}
wxFAIL_MSG("invalid wxFont weight");
return wxQFont_Normal;
}
class wxFontRefData: public wxGDIRefData
{
public:
wxFontRefData() {}
wxFontRefData(const wxFontInfo& info)
{
if ( info.HasFaceName() )
m_nativeFontInfo.SetFaceName(info.GetFaceName());
else
m_nativeFontInfo.SetFamily(info.GetFamily());
if ( info.IsUsingSizeInPixels() )
m_nativeFontInfo.SetPixelSize(info.GetPixelSize());
else
m_nativeFontInfo.SetSizeOrDefault(info.GetFractionalPointSize());
m_nativeFontInfo.SetStyle(info.GetStyle());
m_nativeFontInfo.SetWeight(info.GetWeight());
m_nativeFontInfo.SetUnderlined(info.IsUnderlined());
m_nativeFontInfo.SetStrikethrough(info.IsStrikethrough());
}
wxFontRefData( const wxFontRefData& data )
: wxGDIRefData()
{
m_nativeFontInfo.m_qtFont = data.m_nativeFontInfo.m_qtFont;
}
wxNativeFontInfo m_nativeFontInfo;
};
#define M_FONTDATA ((wxFontRefData *)m_refData)->m_nativeFontInfo
wxFont::wxFont()
{
m_refData = new wxFontRefData();
}
wxFont::wxFont(const wxFontInfo& info)
{
m_refData = new wxFontRefData(info);
}
wxFont::wxFont(const wxString& nativeFontInfoString)
{
m_refData = new wxFontRefData();
QFont font;
font.fromString(wxQtConvertString( nativeFontInfoString ));
M_FONTDATA.m_qtFont = font;
}
wxFont::wxFont(const wxNativeFontInfo& info)
{
m_refData = new wxFontRefData();
M_FONTDATA.m_qtFont = info.m_qtFont;
}
wxFont::wxFont(const QFont& font)
{
m_refData = new wxFontRefData();
M_FONTDATA.m_qtFont = font;
}
wxFont::wxFont(int size,
wxFontFamily family,
wxFontStyle style,
wxFontWeight weight,
bool underlined,
const wxString& face,
wxFontEncoding encoding)
{
m_refData = new wxFontRefData();
Create(wxSize(0, size), family, style, weight, underlined, face, encoding);
}
wxFont::wxFont(const wxSize& pixelSize,
wxFontFamily family,
wxFontStyle style,
wxFontWeight weight,
bool underlined,
const wxString& face,
wxFontEncoding encoding)
{
Create(pixelSize, family, style, weight, underlined, face, encoding);
}
wxFont::wxFont(int size,
int family,
int style,
int weight,
bool underlined,
const wxString& face,
wxFontEncoding encoding)
{
Create(wxSize(0, size), (wxFontFamily)family, (wxFontStyle)style, (wxFontWeight)weight, underlined, face, encoding);
}
bool wxFont::Create(wxSize size, wxFontFamily family, wxFontStyle style,
wxFontWeight weight, bool underlined, const wxString& face,
wxFontEncoding encoding )
{
UnRef();
m_refData = new wxFontRefData(InfoFromLegacyParams(size.GetHeight(), family,
style, weight, underlined,
face, encoding));
return true;
}
int wxFont::GetPointSize() const
{
return M_FONTDATA.wxNativeFontInfo::GetPointSize();
}
double wxFont::GetFractionalPointSize() const
{
return M_FONTDATA.GetFractionalPointSize();
}
wxSize wxFont::GetPixelSize() const
{
return M_FONTDATA.GetPixelSize();
}
wxFontStyle wxFont::GetStyle() const
{
return M_FONTDATA.GetStyle();
}
int wxFont::GetNumericWeight() const
{
return M_FONTDATA.GetNumericWeight();
}
bool wxFont::GetUnderlined() const
{
return M_FONTDATA.GetUnderlined();
}
wxString wxFont::GetFaceName() const
{
return M_FONTDATA.GetFaceName();
}
wxFontEncoding wxFont::GetEncoding() const
{
return M_FONTDATA.GetEncoding();
}
const wxNativeFontInfo *wxFont::GetNativeFontInfo() const
{
return &M_FONTDATA;
}
bool wxFont::GetStrikethrough() const
{
return M_FONTDATA.GetStrikethrough();
}
void wxFont::SetFractionalPointSize(double pointSize)
{
AllocExclusive();
M_FONTDATA.SetFractionalPointSize(pointSize);
}
void wxFont::SetPixelSize(const wxSize& pixelSize)
{
AllocExclusive();
M_FONTDATA.SetPixelSize(pixelSize);
}
bool wxFont::SetFaceName(const wxString& facename)
{
AllocExclusive();
return M_FONTDATA.SetFaceName(facename);
}
void wxFont::SetFamily( wxFontFamily family )
{
AllocExclusive();
M_FONTDATA.SetFamily(family);
}
void wxFont::SetStyle( wxFontStyle style )
{
AllocExclusive();
M_FONTDATA.SetStyle(style);
}
void wxFont::SetNumericWeight(int weight)
{
AllocExclusive();
M_FONTDATA.SetNumericWeight(weight);
}
void wxFont::SetUnderlined( bool underlined )
{
AllocExclusive();
M_FONTDATA.SetUnderlined(underlined);
}
void wxFont::SetStrikethrough(bool strikethrough)
{
AllocExclusive();
M_FONTDATA.SetStrikethrough(strikethrough);
}
void wxFont::SetEncoding(wxFontEncoding encoding)
{
AllocExclusive();
M_FONTDATA.SetEncoding(encoding);
}
void wxFont::DoSetNativeFontInfo(const wxNativeFontInfo& info)
{
SetFractionalPointSize(info.GetPointSize());
SetFamily(info.GetFamily());
SetStyle(info.GetStyle());
SetNumericWeight(info.GetWeight());
SetUnderlined(info.GetUnderlined());
SetStrikethrough(info.GetStrikethrough());
SetFaceName(info.GetFaceName());
SetEncoding(info.GetEncoding());
}
wxGDIRefData *wxFont::CreateGDIRefData() const
{
return new wxFontRefData;
}
wxGDIRefData *wxFont::CloneGDIRefData(const wxGDIRefData *data) const
{
return new wxFontRefData(*(wxFontRefData *)data);
}
QFont wxFont::GetHandle() const
{
return M_FONTDATA.m_qtFont;
}
wxFontFamily wxFont::DoGetFamily() const
{
return M_FONTDATA.GetFamily();
}
// ----------------------------------------------------------------------------
// wxNativeFontInfo
// ----------------------------------------------------------------------------
void wxNativeFontInfo::Init()
{
}
double wxNativeFontInfo::GetFractionalPointSize() const
{
return m_qtFont.pointSizeF();
}
wxSize wxNativeFontInfo::GetPixelSize() const
{
return wxSize(0, m_qtFont.pixelSize());
}
wxFontStyle wxNativeFontInfo::GetStyle() const
{
switch (m_qtFont.style())
{
case QFont::StyleNormal:
return wxFONTSTYLE_NORMAL;
case QFont::StyleItalic:
return wxFONTSTYLE_ITALIC;
case QFont::StyleOblique:
return wxFONTSTYLE_SLANT;
}
wxFAIL_MSG( "Invalid font style value" );
return wxFontStyle();
}
int wxNativeFontInfo::GetNumericWeight() const
{
int w = m_qtFont.weight();
// Special case of wxQFont_Thin == 0.
if ( w == wxQFont_Thin )
return wxFONTWEIGHT_THIN;
if ( TryToMap(w, wxQFont_Thin, wxQFont_ExtraLight,
wxFONTWEIGHT_THIN, wxFONTWEIGHT_EXTRALIGHT) ||
TryToMap(w, wxQFont_ExtraLight, wxQFont_Light,
wxFONTWEIGHT_EXTRALIGHT, wxFONTWEIGHT_LIGHT) ||
TryToMap(w, wxQFont_Light, wxQFont_Normal,
wxFONTWEIGHT_LIGHT, wxFONTWEIGHT_NORMAL) ||
TryToMap(w, wxQFont_Normal, wxQFont_Medium,
wxFONTWEIGHT_NORMAL, wxFONTWEIGHT_MEDIUM) ||
TryToMap(w, wxQFont_Medium, wxQFont_DemiBold,
wxFONTWEIGHT_MEDIUM, wxFONTWEIGHT_SEMIBOLD) ||
TryToMap(w, wxQFont_DemiBold, wxQFont_Bold,
wxFONTWEIGHT_SEMIBOLD, wxFONTWEIGHT_BOLD) ||
TryToMap(w, wxQFont_Bold, wxQFont_ExtraBold,
wxFONTWEIGHT_BOLD, wxFONTWEIGHT_EXTRABOLD) ||
TryToMap(w, wxQFont_ExtraBold, wxQFont_Black,
wxFONTWEIGHT_EXTRABOLD, wxFONTWEIGHT_HEAVY) ||
TryToMap(w, wxQFont_Black, 99,
wxFONTWEIGHT_HEAVY, wxFONTWEIGHT_EXTRAHEAVY) )
{
return w;
}
wxFAIL_MSG( "Invalid QFont weight" );
return wxFONTWEIGHT_NORMAL;
}
bool wxNativeFontInfo::GetUnderlined() const
{
return m_qtFont.underline();
}
bool wxNativeFontInfo::GetStrikethrough() const
{
return m_qtFont.strikeOut();
}
wxString wxNativeFontInfo::GetFaceName() const
{
// use font info to get the matched face name (not the family given)
QFontInfo info = QFontInfo(m_qtFont);
return wxQtConvertString(info.family());
}
wxFontFamily wxNativeFontInfo::GetFamily() const
{
switch (m_qtFont.styleHint())
{
case QFont::System:
case QFont::AnyStyle:
return wxFONTFAMILY_DEFAULT;
case QFont::Fantasy:
case QFont::Cursive:
case QFont::Decorative:
return wxFONTFAMILY_DECORATIVE;
case QFont::Serif:
return wxFONTFAMILY_ROMAN;
case QFont::SansSerif:
return wxFONTFAMILY_SWISS;
case QFont::Monospace:
case QFont::TypeWriter:
return wxFONTFAMILY_TELETYPE;
}
return wxFONTFAMILY_UNKNOWN;
}
wxFontEncoding wxNativeFontInfo::GetEncoding() const
{
return wxFONTENCODING_UTF8;
}
void wxNativeFontInfo::SetFractionalPointSize(double pointsize)
{
m_qtFont.setPointSizeF(pointsize);
}
void wxNativeFontInfo::SetPixelSize(const wxSize& size)
{
m_qtFont.setPixelSize(size.GetHeight());
}
void wxNativeFontInfo::SetStyle(wxFontStyle style)
{
QFont::Style qtStyle;
switch ( style )
{
case wxFONTSTYLE_ITALIC:
qtStyle = QFont::StyleItalic;
break;
case wxFONTSTYLE_NORMAL:
qtStyle = QFont::StyleNormal;
break;
case wxFONTSTYLE_SLANT:
qtStyle = QFont::StyleOblique;
break;
case wxFONTSTYLE_MAX:
wxFAIL_MSG("unknown font style");
return;
}
m_qtFont.setStyle(qtStyle);
}
void wxNativeFontInfo::SetNumericWeight(int weight)
{
m_qtFont.setWeight(ConvertFontWeight(weight));
}
void wxNativeFontInfo::SetUnderlined(bool underlined)
{
m_qtFont.setUnderline(underlined);
}
void wxNativeFontInfo::SetStrikethrough(bool strikethrough)
{
m_qtFont.setStrikeOut(strikethrough);
}
bool wxNativeFontInfo::SetFaceName(const wxString& facename)
{
m_qtFont.setFamily(wxQtConvertString(facename));
// Qt uses a "font matching algorithm" so the font will be always valid
return true;
}
void wxNativeFontInfo::SetFamily(wxFontFamily family)
{
m_qtFont.setStyleHint(ConvertFontFamily(family));
// reset the face name to force qt to choose a new font
m_qtFont.setFamily(m_qtFont.defaultFamily());
}
void wxNativeFontInfo::SetEncoding(wxFontEncoding WXUNUSED(encoding))
{
wxMISSING_IMPLEMENTATION( __FUNCTION__ );
}
bool wxNativeFontInfo::FromString(const wxString& s)
{
return m_qtFont.fromString( wxQtConvertString( s ) );
}
wxString wxNativeFontInfo::ToString() const
{
return wxQtConvertString( m_qtFont.toString() );
}
bool wxNativeFontInfo::FromUserString(const wxString& s)
{
return FromString(s);
}
wxString wxNativeFontInfo::ToUserString() const
{
return ToString();
}
|
package app
import (
"google-calendar-to-notion/api/calendarapi"
"google-calendar-to-notion/api/notionapi"
)
type Config struct {
Logger struct {
ServiceName string `required:"true"`
Level string `required:"true"`
Development bool `required:"true"`
}
GoogleCalendar struct {
APISecret string `required:"true"`
Oauth2 string `required:"true"`
}
Notion struct{
APISecret string `required:"true"`
}
CalendarSettings calendarapi.Config
NotionSettings notionapi.Config
}
|
<filename>Aula15/ambiente.js<gh_stars>1-10
let num = [5, 8, 9, 3]
num[3]=6 // acrescenta um valor na posição 3
num.push(7) // acrescenta um valor independente de onde parou
num.length // atributo para saber o comprimento
num.sort() // organiza em ordem crescente
console.log(num)
console.log(`O vetor tem ${num.length} posições.`)
console.log(`O primeiro valor do vetor é ${num[0]}`)
let pos = num.indexOf(4)
if (pos == -1) {
console.log(`O valor não foi encontrado!`)
} else {
console.log(`O valor 8 está na posição ${pos}`)
} |
<filename>Product_Display_Cards/Product_Card_06/code/script.js
window.onload = function(){
const card = document.getElementsByClassName('card');
const select = document.querySelectorAll('.opt-img');
const btn = document.getElementById('btn');
const productImg = document.getElementById('product-img');
select.forEach((div,index)=>{
div.addEventListener('click',function(){
if(index==0){
productImg.src='assets/Product1.png';
btn.style.background='#1d9659';
}
else if(index==1){
productImg.src='assets/Product2.png';
btn.style.background='rgb(206, 43, 32)';
}
else if(index==2){
productImg.src='assets/Product3.png';
btn.style.background='#054146';
}
})
})
} |
package LiuOu.entity;
public class Person {
private int id;
private String name;
private String sex;
private int age;
private String rank;
private String department;
public Person() {
}
public Person(String name, String sex, int age, String rank, String department) {
this.name = name;
this.sex = sex;
this.age = age;
this.rank = rank;
this.department = department;
}
public Person(int id, String name, String sex, int age, String rank, String department) {
this.id = id;
this.name = name;
this.sex = sex;
this.age = age;
this.rank = rank;
this.department = department;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getSex() {
return sex;
}
public void setSex(String sex) {
this.sex = sex;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public String getRank() {
return rank;
}
public void setRank(String rank) {
this.rank = rank;
}
public String getDepartment() {
return department;
}
public void setDepartment(String department) {
this.department = department;
}
}
|
# returns the last modified timestamp from a HTTP URL
# reads URL from arg 1 or stdin
#
# Requires: curl
#
http_last_modified() {
url=${1:-/dev/stdin}
# tail -c 31 -- include ending \r\n
# head -c 29 -- removes them
# curl -L = follow redirect
# curl -s = no progress meter
curl -L -s --fail --head "$url" | grep 'Last-Modified:' | tail -c 31 | head -c 29
}
|
<reponame>jamacanbacn/splits-io
require 'rails_helper'
describe Api::V4::ConvertsController do
describe '#create' do
context 'when passed a Llanfair file' do
let(:file) do
fixture_file_upload('files/llanfair')
end
context 'with a good format' do
subject { post :create, params: {file: file, format: 'json', historic: '1'} }
let(:body) { JSON.parse(subject.body) }
it 'returns a 200' do
expect(subject).to have_http_status 200
end
it 'has no id field' do
expect(body['run']['id']).to be_nil
end
it 'renders a run schema' do
expect(subject.body).to match_json_schema(:run)
end
end
context 'with a bad format' do
subject { post :create, params: {file: file, format: 'llanfair'} }
let(:body) { JSON.parse(subject.body) }
it 'returns a 400' do
expect(subject).to have_http_status 400
end
it 'returns an error body' do
expect(body['status']).to be_truthy
expect(body['message']).to be_truthy
end
end
context 'with a missing parameter' do
subject { post :create, params: {file: file} }
let(:body) { JSON.parse(subject.body) }
it 'returns a 400' do
expect(subject).to have_http_status 400
end
it 'returns an error body' do
expect(body['status']).to be_truthy
expect(body['message']).to be_truthy
end
end
end
context 'with a malformed file' do
let(:file) do
fixture_file_upload('files/malformed')
end
subject { post :create, params: {file: file, format: 'json'} }
let(:body) { JSON.parse(subject.body) }
it 'returns a 400' do
expect(subject).to have_http_status 400
end
it 'returns an error body' do
expect(body['status']).to be_truthy
expect(body['message']).to be_truthy
end
end
end
end
|
<filename>src/Lobby.cpp
//
// Created by salamander on 3/2/19.
//
#include "Lobby.h"
#include "gui_engine_defs.h"
#include "Network.h"
#include "game_state.h"
Lobby::Lobby(GameDataRef data)
:m_data(data)
{
}
void Lobby::Init()
{
m_data->assets.loadFont("Text Font",GAME_LOBBY_FONT_PATH);
m_data->assets.loadTexture("Background",GAME_LOBBY_BACKGROUND_FILEPATH);
m_background.setTexture(m_data->assets.getTexture("Background"));
m_title.setFont(m_data->assets.getFont("Text Font"));
m_title.setPosition((SCREEN_WIDTH*0.25),(SCREEN_HEIGHT*0.10));
m_title.setFillColor((sf::Color::White));
m_title.setString("Player List");
m_title.setCharacterSize(30);
}
void Lobby::Update(float dt)
{
sf::Text tempText;
if (Network::peers.size() > 0 && m_textIP.size() != Network::peers.size())
{
for (int i = m_textIP.size(); i < Network::peers.size(); i++) {
tempText.setFont(m_data->assets.getFont("Text Font"));
tempText.setPosition((SCREEN_WIDTH * 0.15), (SCREEN_HEIGHT * (0.10 * (i + 2))));
tempText.setFillColor((sf::Color::White));
tempText.setString(Network::peers[i]);
tempText.setCharacterSize(24);
m_textIP.push_back(tempText);
tempText.setFont(m_data->assets.getFont("Text Font"));
tempText.setPosition((SCREEN_WIDTH * 0.35), (SCREEN_HEIGHT * (0.10 * (i + 2))));
tempText.setFillColor((sf::Color::White));
tempText.setString(Network::peersName[Network::peers[i]]);
tempText.setCharacterSize(24);
m_textName.push_back(tempText);
}
}
}
void Lobby::HandleInput()
{
if(Network::GameStart)
{
// Switch to Game State
m_data->machine.addState(StateRef(new GameState(m_data)), true);
}
sf::Event event;
while (m_data->window.pollEvent(event)) {
if (event.type == sf::Event::Closed) {
m_data->window.close();
}
}
}
void Lobby::Draw(float dt)
{
m_data->window.clear();
m_data->window.draw(m_background);
m_data->window.draw(m_title);
for (int i = 0; i < m_textIP.size();i++)
{
m_data->window.draw(m_textIP[i]);
m_data->window.draw(m_textName[i]);
}
m_data->window.display();
} |
<filename>CSW/csw_unidata.py
# coding: utf-8
# # How to search the IOOS CSW catalog with Python tools
#
#
# This notebook demonstrates a how to query a [Catalog Service for the Web (CSW)](https://en.wikipedia.org/wiki/Catalog_Service_for_the_Web), like the IOOS Catalog, and to parse its results into endpoints that can be used to access the data.
# In[1]:
import os
import sys
ioos_tools = os.path.join(os.path.pardir)
sys.path.append(ioos_tools)
# Let's start by creating the search filters.
# The filter used here constraints the search on a certain geographical region (bounding box), a time span (last week), and some [CF](http://cfconventions.org/Data/cf-standard-names/37/build/cf-standard-name-table.html) variable standard names that represent sea surface temperature.
# In[2]:
from datetime import datetime, timedelta
import dateutil.parser
service_type = 'WMS'
min_lon, min_lat = -90.0, 30.0
max_lon, max_lat = -80.0, 40.0
bbox = [min_lon, min_lat, max_lon, max_lat]
crs = 'urn:ogc:def:crs:OGC:1.3:CRS84'
# Temporal range: Last week.
now = datetime.utcnow()
start, stop = now - timedelta(days=(7)), now
start = dateutil.parser.parse('2017-03-01T00:00:00Z')
stop = dateutil.parser.parse('2017-04-01T00:00:00Z')
# Ocean Model Names
model_names = ['NAM', 'GFS']
# With these 3 elements it is possible to assemble a [OGC Filter Encoding (FE)](http://www.opengeospatial.org/standards/filter) using the `owslib.fes`\* module.
#
# \* OWSLib is a Python package for client programming with Open Geospatial Consortium (OGC) web service (hence OWS) interface standards, and their related content models.
# In[3]:
from owslib import fes
from ioos_tools.ioos import fes_date_filter
kw = dict(wildCard='*', escapeChar='\\',
singleChar='?', propertyname='apiso:AnyText')
or_filt = fes.Or([fes.PropertyIsLike(literal=('*%s*' % val), **kw)
for val in model_names])
kw = dict(wildCard='*', escapeChar='\\',
singleChar='?', propertyname='apiso:ServiceType')
serviceType = fes.PropertyIsLike(literal=('*%s*' % service_type), **kw)
begin, end = fes_date_filter(start, stop)
bbox_crs = fes.BBox(bbox, crs=crs)
filter_list = [
fes.And(
[
bbox_crs, # bounding box
begin, end, # start and end date
or_filt, # or conditions (CF variable names)
serviceType # search only for datasets that have WMS services
]
)
]
# In[4]:
from owslib.csw import CatalogueServiceWeb
endpoint = 'https://data.ioos.us/csw'
csw = CatalogueServiceWeb(endpoint, timeout=60)
# The `csw` object created from `CatalogueServiceWeb` did not fetched anything yet.
# It is the method `getrecords2` that uses the filter for the search. However, even though there is a `maxrecords` option, the search is always limited by the server side and there is the need to iterate over multiple calls of `getrecords2` to actually retrieve all records.
# The `get_csw_records` does exactly that.
# In[5]:
def get_csw_records(csw, filter_list, pagesize=10, maxrecords=1000):
"""Iterate `maxrecords`/`pagesize` times until the requested value in
`maxrecords` is reached.
"""
from owslib.fes import SortBy, SortProperty
# Iterate over sorted results.
sortby = SortBy([SortProperty('dc:title', 'ASC')])
csw_records = {}
startposition = 0
nextrecord = getattr(csw, 'results', 1)
while nextrecord != 0:
csw.getrecords2(constraints=filter_list, startposition=startposition,
maxrecords=pagesize, sortby=sortby)
csw_records.update(csw.records)
if csw.results['nextrecord'] == 0:
break
startposition += pagesize + 1 # Last one is included.
if startposition >= maxrecords:
break
csw.records.update(csw_records)
# In[6]:
get_csw_records(csw, filter_list, pagesize=10, maxrecords=1000)
records = '\n'.join(csw.records.keys())
print('Found {} records.\n'.format(len(csw.records.keys())))
for key, value in list(csw.records.items()):
print('[{}]\n{}\n'.format(value.title, key))
# In[7]:
csw.request
# In[8]:
#write to JSON for use in TerriaJS
csw_request = '"{}": {}"'.format('getRecordsTemplate',str(csw.request,'utf-8'))
import io
import json
with io.open('query.json', 'a', encoding='utf-8') as f:
f.write(json.dumps(csw_request, ensure_ascii=False))
f.write('\n')
|
public class VendingMachine {
private int balance;
private int itemCost;
public VendingMachine(int balance, int itemCost) {
this.balance = balance;
this.itemCost = itemCost;
}
public int getBalance() {
return this.balance;
}
public void setBalance(int balance) {
this.balance = balance;
}
public int getItemCost() {
return this.itemCost;
}
public void setItemCost(int itemCost) {
this.itemCost = itemCost;
}
public void purchaseItem(){
if (balance >= itemCost){
balance -= itemCost;
System.out.println("Item purchased successfully");
} else {
System.out.println("Insufficient balance. Please add funds");
}
}
} |
$(document).ready(function() {
var api =
"http://en.wikipedia.org/w/api.php?action=opensearch&format=json&limit=15&search=";
var searchTerm = $("#search");
var term;
$("#search").keypress(function(e) {
if (e.originalEvent.key === "Enter") {
term = api + searchTerm[0].value;
getWiki(term);
}
});
$("#submit-button").click(function() {
term = api + searchTerm[0].value;
getWiki(term);
});
function getWiki(val) {
$.ajax({
method: "GET",
url: val,
dataType: "jsonp",
success: function(data) {
for (var i = 0; i < data[1].length; i++) {
$("#results").prepend(
'<div class="col-sm-6">'
+ '<div class="card">'
+ '<div class="card-block">'
+ '<h3 class="card-title">' + data[1][i] + '</h3>'
+ '<p class="card-text">' + data[2][i] + '</p>'
+ '<a href="' + data[3][i] + '" class="btn btn-primary" target="_blank">Read More</a>'
+ '</div>'
+ '</div>'
+ '</div>'
);
}
}
});
}
});
|
//
// DraggableAnnotationViewExample.h
// Examples
//
// Created by <NAME> on 7/11/16.
// Copyright © 2016 Mapbox. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface DraggableAnnotationViewExample : UIViewController
@end
|
#!/bin/bash
echo '### Installing terraform...'
wget https://releases.hashicorp.com/terraform/1.1.7/terraform_1.1.7_linux_amd64.zip -O /tmp/terraform.zip
unzip /tmp/terraform.zip -d /tmp
sudo mv /tmp/terraform /usr/local/bin
echo '### Testing the installation...'
terraform version
echo '### Supported commands:'
terraform
echo '### Commands help can be seen by terraform -help [command]'
|
<filename>src/components/grid/Grid.js<gh_stars>0
import React from 'react';
import _ from 'underscore';
import Card from '../card/Card';
const Grid = ({ itemMatrix }) => {
return (
<div className='grid'>
<div className='container'>
{_.map(itemMatrix, (row, index) => (
<div className='row row-eq-height' key={index}>
{_.map(row, item => (
<div className='col' key={item.id}>
<Card
image={item.image}
title={item.title}
outlink={item.outlink}
inlink={item.inlink}
text={item.text}
footer={item.footer}
/>
</div>
))}
</div>
))}
</div>
</div>
);
}
export default Grid;
|
public class ConcreteRegistration implements Registration {
private String containerID;
private IRemoteServiceID serviceID;
private Map<String, Object> properties;
public ConcreteRegistration(String containerID, IRemoteServiceID serviceID, Map<String, Object> properties) {
this.containerID = containerID;
this.serviceID = serviceID;
this.properties = properties;
}
@Override
public String getContainerID() {
return containerID;
}
@Override
public IRemoteServiceID getID() {
return serviceID;
}
@Override
public Object getProperty(String key) {
return properties.get(key);
}
} |
import flask
from flask import request
import textblob
app = flask.Flask(__name__)
@app.route('/')
def main():
return flask.render_template('main.html')
@app.route('/sentiment', methods=['POST'])
def result():
input_text = request.form['input_text']
analysis = textblob.TextBlob(input_text)
polarity = analysis.sentiment.polarity
return flask.render_template('sentiment.html', results=polarity)
if __name__ == '__main__':
app.run() |
#!/usr/bin/bash
rm dist/*
python3 setup.py bdist_wheel
|
#!/bin/bash
#========================================================================================
# hacktool_shell.sh: Run shell in the hacktools service.
#========================================================================================
CTR_NAME="hack-tools"
docker exec -it "$CTR_NAME" /bin/bash
|
import { Module } from '@nestjs/common';
import { HobbyController } from './hobby.controller';
import { HobbyService } from './hobby.service';
@Module({
controllers: [HobbyController],
providers: [HobbyService]
})
export class HobbyModule {}
|
string text = "hello world";
int numberofVowels = 0;
foreach (char c in text)
{
if (c == 'a' || c == 'e' || c == 'i' || c == 'o' || c == 'u')
{
numberOfVowels++;
}
}
Console.WriteLine("Number of vowels: " + numberOfVowels); |
#!/bin/sh
alias ll='ls -la' |
<filename>Plugin~/Src/MeshSync/Utils/msNetworkUtils.cpp
#include "pch.h"
#include "msNetworkUtils.h"
#include <Poco/RegularExpression.h>
namespace ms {
bool NetworkUtils::IsInLocalNetwork(const std::string& hostAndPort) {
const uint32_t MAX_TOKENS = 3;
std::vector<std::string> tokens(MAX_TOKENS);
std::istringstream input(hostAndPort);
uint32_t i = 0;
while (i < MAX_TOKENS && getline(input, tokens[i], ':')) {
++i;
}
if (i >= MAX_TOKENS)
return false;
static const Poco::RegularExpression regex(
"(^localhost$)"
"|(^127\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}$)"
"|(^10\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}$)"
"|(^172\\.1[6-9]{1}[0-9]{0,1}\\.[0-9]{1,3}\\.[0-9]{1,3}$)"
"|(^172\\.2[0-9]{1}[0-9]{0,1}\\.[0-9]{1,3}\\.[0-9]{1,3}$)"
"|(^172\\.3[0-1]{1}[0-9]{0,1}\\.[0-9]{1,3}\\.[0-9]{1,3}$)|(^192\\.168\\.[0-9]{1,3}\\.[0-9]{1,3}$)");
return regex.match(tokens[0]);
}
} // namespace ms
|
import { Injector, ModuleWithProviders, NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { RouterModule } from '@angular/router';
import { FormsModule } from '@angular/forms';
import { HttpClientModule } from '@angular/common/http';
import { NbLayoutModule, NbCardModule, NbCheckboxModule } from '@nebular/theme';
import { NbAuthService } from './services/auth.service';
import { NbAuthProvider } from './providers/auth.provider';
import {
defaultSettings,
NB_AUTH_USER_OPTIONS_TOKEN,
NB_AUTH_OPTIONS_TOKEN,
NB_AUTH_PROVIDERS_TOKEN,
NB_AUTH_TOKEN_WRAPPER_TOKEN,
NbAuthOptions, NB_AUTH_INTERCEPTOR_HEADER,
} from './auth.options';
import { NgxAuthComponent } from './components/auth.component';
import {NbAuthToken, NbTokenService} from './services/token.service';
import { NgxAuthBlockComponent } from './components/auth-block/auth-block.component';
import { NgxLoginComponent } from './components/login/login.component';
import { NgxRegisterComponent } from './components/register/register.component';
import { NgxLogoutComponent } from './components/logout/logout.component';
import { NgxRequestPasswordComponent } from './components/request-password/request-password.component';
import { NgxResetPasswordComponent } from './components/reset-password/reset-password.component';
import { routes } from './auth.routes';
import { deepExtend } from './helpers';
export function nbAuthServiceFactory(config: any, tokenService: NbTokenService, injector: Injector) {
const providers = config.providers || {};
for (const key in providers) {
if (providers.hasOwnProperty(key)) {
const provider = providers[key];
const object = injector.get(provider.service);
object.setConfig(provider.config || {});
}
}
return new NbAuthService(tokenService, injector, providers);
}
export function nbOptionsFactory(options) {
return deepExtend(defaultSettings, options);
}
@NgModule({
imports: [
CommonModule,
NbLayoutModule,
NbCardModule,
NbCheckboxModule,
RouterModule.forChild(routes),
FormsModule,
HttpClientModule,
],
declarations: [
NgxAuthComponent,
NgxAuthBlockComponent,
NgxLoginComponent,
NgxRegisterComponent,
NgxRequestPasswordComponent,
NgxResetPasswordComponent,
NgxLogoutComponent,
],
exports: [
NgxAuthComponent,
NgxAuthBlockComponent,
NgxLoginComponent,
NgxRegisterComponent,
NgxRequestPasswordComponent,
NgxResetPasswordComponent,
NgxLogoutComponent,
],
})
export class NbAuthModule {
static forRoot(nbAuthOptions?: NbAuthOptions): ModuleWithProviders {
return <ModuleWithProviders> {
ngModule: NbAuthModule,
providers: [
{ provide: NB_AUTH_USER_OPTIONS_TOKEN, useValue: nbAuthOptions },
{ provide: NB_AUTH_OPTIONS_TOKEN, useFactory: nbOptionsFactory, deps: [NB_AUTH_USER_OPTIONS_TOKEN] },
{ provide: NB_AUTH_PROVIDERS_TOKEN, useValue: {} },
{ provide: NB_AUTH_TOKEN_WRAPPER_TOKEN, useClass: NbAuthToken },
{ provide: NB_AUTH_INTERCEPTOR_HEADER, useValue: 'Authorization' },
{
provide: NbAuthService,
useFactory: nbAuthServiceFactory,
deps: [NB_AUTH_OPTIONS_TOKEN, NbTokenService, Injector],
},
NbTokenService,
NbAuthProvider,
],
};
}
}
|
def max_value(x, y, z):
max = x
if y > max:
max = y
if z > max:
max = z
return max |
import styles from './ReidVideoPlayer.css';
import PropTypes from 'prop-types'
const Button = ({color,text,onClick}) => {
return <button
onClick = {onClick}
style={{backgroundColor: color
}}
className = {styles.btn}>{text}</button>
}
Button.defaultProps = {
color: 'steelblue'
}
Button.propTypes = {
text: PropTypes.string,
color: PropTypes.string,
onClick: PropTypes.func,
}
export default Button |
<filename>src/main/java/com/shop/dao/solr/ItemSearchDaoImpl.java
package com.shop.dao.solr;
import com.shop.been.SolrResult;
import com.shop.model.merchant.Item;
import com.shop.model.merchant.Merchant;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* <p>Description:</p>
*
* @Author 姚洪斌
* @Date 2017/9/18 17:05
*/
@Repository
public class ItemSearchDaoImpl implements ItemSearchDao {
@Autowired
private SolrClient solrClient ;
public SolrResult searchItemByName(SolrQuery solrQuery) {
SolrResult<Item> solrResult = new SolrResult<Item>();
try {
// 查询并获取查询响应
QueryResponse queryResponse = solrClient.query(solrQuery);
// 从响应中获取结果集
SolrDocumentList solrDocuments = queryResponse.getResults();
List<Item> items = new ArrayList<Item>();
if (solrDocuments != null) {
// 设置查询出来的记录数
solrResult.setResultCount(solrDocuments.getNumFound());
for (SolrDocument solrDocument : solrDocuments) {
// 创建一个商品集合保存查询到的商品
Item item = new Item();
// 取出来是String型的数据,不能直接强转成int型
item.setItemId(Integer.parseInt((String) solrDocument.get("id")));
Map<String,Map<String,List<String>>> highlighting = queryResponse.getHighlighting();
System.out.println(highlighting.size());
System.out.println(highlighting);
if (highlighting != null) {
List<String> stringList = highlighting.get(solrDocument.get("id")).get("item_title");
if (stringList != null && stringList.size() > 0) {
item.setItemTitle(stringList.get(0));
} else {
item.setItemTitle((String) solrDocument.get("item_title"));
}
} else {
item.setItemTitle((String) solrDocument.get("item_title"));
}
item.setImages((String) solrDocument.get("item_image"));
item.setPrice((Double) solrDocument.get("item_price"));
item.setSales((Integer) solrDocument.get("item_sales"));
Merchant merchant = new Merchant();
merchant.setMerchantId((Integer) solrDocument.get("merchant_id"));
merchant.setMerchantName((String) solrDocument.get("item_merchant"));
item.setMerchant(merchant);
items.add(item);
}
}
solrResult.setList(items);
} catch (Exception e) {
e.printStackTrace();
}
return solrResult;
}
}
|
package org.sonatype.nexus.repository.protop.internal;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import org.sonatype.nexus.repository.Repository;
import java.util.List;
import java.util.function.Supplier;
import static java.util.Arrays.asList;
import static org.sonatype.nexus.repository.protop.internal.ProtopMetadataUtils.*;
/**
* Simple factory class for providing handlers that are common for manipulating protop JSON Fields.
*
* @since 3.16
*/
public class ProtopFieldFactory {
public static final ProtopFieldDeserializer NULL_DESERIALIZER = new ProtopFieldDeserializer() {
@Override
public Object deserialize(final String fieldName,
final Object defaultValue,
final JsonParser parser,
final DeserializationContext context,
final JsonGenerator generator) {
return null;
}
};
public static final ProtopFieldMatcher REMOVE_ID_MATCHER = removeFieldMatcher(META_ID, "/" + META_ID);
public static final ProtopFieldMatcher REMOVE_REV_MATCHER = removeFieldMatcher(META_REV, "/" + META_REV);
public static final List<ProtopFieldMatcher> REMOVE_DEFAULT_FIELDS_MATCHERS = asList(REMOVE_ID_MATCHER,
REMOVE_REV_MATCHER);
private ProtopFieldFactory() {
// factory constructor
}
private static ProtopFieldMatcher removeFieldMatcher(final String fieldName, final String pathRegex) {
return new ProtopFieldMatcher(fieldName, pathRegex, NULL_DESERIALIZER);
}
public static ProtopFieldUnmatcher missingFieldMatcher(final String fieldName,
final String pathRegex,
final Supplier<Object> supplier) {
return new ProtopFieldUnmatcher(fieldName, pathRegex, missingFieldDeserializer(supplier));
}
public static ProtopFieldDeserializer missingFieldDeserializer(final Supplier<Object> supplier) {
ProtopFieldDeserializer deserializer = new ProtopFieldDeserializer() {
@Override
public Object deserializeValue(final Object defaultValue) {
return supplier.get();
}
};
return deserializer;
}
public static ProtopFieldUnmatcher missingRevFieldMatcher(final Supplier<Object> supplier) {
return missingFieldMatcher(META_REV, "/" + META_REV, supplier);
}
public static ProtopFieldMatcher rewriteTarballUrlMatcher(final Repository repository, final String packageId) {
return rewriteTarballUrlMatcher(repository.getName(), packageId);
}
public static ProtopFieldMatcher rewriteTarballUrlMatcher(final String repositoryName, final String packageId) {
return new ProtopFieldMatcher("tarball", "/versions/(.*)/dist/tarball",
rewriteTarballUrlDeserializer(repositoryName, packageId));
}
public static ProtopFieldDeserializer rewriteTarballUrlDeserializer(final String repositoryName,
final String packageId) {
return new ProtopFieldDeserializer() {
@Override
public Object deserializeValue(final Object defaultValue) {
return rewriteTarballUrl(repositoryName, packageId, super.deserializeValue(defaultValue).toString());
}
};
}
}
|
#!/bin/bash
# trap 'echo "${BASH_SOURCE[0]}: line ${LINENO}: status ${?}: user ${USER}: func ${FUNCNAME[0]}"' ERR
# set -o errexit
# set -o errtrace
ARGOCD_VERSION="v2.1.2"
kubectl create namespace argocd
### Non HA
# kubectl apply --namespace argocd --filename https://raw.githubusercontent.com/argoproj/argo-cd/stable/manifests/install.yaml
kubectl apply --namespace argocd --filename https://raw.githubusercontent.com/argoproj/argo-cd/${ARGOCD_VERSION}/manifests/install.yaml
### HA
# kubectl apply --namespace argocd --filename https://raw.githubusercontent.com/argoproj/argo-cd/stable/manifests/ha/install.yaml
# kubectl apply --namespace argocd --filename https://raw.githubusercontent.com/argoproj/argo-cd/${ARGOCD_VERSION}/manifests/ha/install.yaml
|
class Point {
public:
float x, y;
Point(float xVal, float yVal) :x(xVal), y(yVal) {}
float distance(Point &other) const {
float dx = other.x - x;
float dy = other.y - y;
return sqrt(dx * dx + dy * dy);
}
};
#include <cmath> |
class GeneticTree():
def __init__(self):
self.genetic_matrix = []
self.free_indexes = []
self.nodes = []
def get_index(self):
if self.free_indexes:
return self.free_indexes.pop()
self.genetic_matrix.append([]) # Añadimos la nueva fila
self.nodes.append("x") # Es necesario crear la posicion
return len(self.genetic_matrix)-1
def get_distance(self, index_1, index_2):
return self.genetic_matrix[index_1][index_2]
class Node():
def __init__(self, parent_1, parent_2, species):
self.index = species.get_index()
self.parents = (parent_1, parent_2) # referencias a los Nodes de los parents
self.species = species
self.update_matrix()
self.species.nodes[self.index] = self
self.agent = None
def get_parents(self):
if None in self.parents:
return []
return list(self.parents)
def update_matrix(self):
lineage = self.get_lineage()
is_new_colum = self.index == len(self.species.genetic_matrix)-1 # Vemos si hay que añadir nuevas columnas o sobreescribir
for i in range(0, len(self.species.genetic_matrix)):
if self.index != i:
distance = self.get_distance(self.species.nodes[i],lineage) #VAMOS A CAMBIAR ESTO, CUIDAO
else:
distance = 1
if is_new_colum:
if i == self.index:
self.species.genetic_matrix[i].append(distance)
else:
self.species.genetic_matrix[i].append(distance)
self.species.genetic_matrix[self.index].append(distance)
else:
self.species.genetic_matrix[i][self.index] = distance
self.species.genetic_matrix[self.index][i] = distance
def get_distance(self, agent, self_lineage):
if not self.get_parents:
return 0
lineage_index = agent.get_lineage()
distances = [] # Pueden darse varias coincidencias, así que nos quedaremos con la mejor
self_distance = 1
distance_index = 1
for i in range(0, len(self_lineage)): # Comprobamos si son descendents directos
if agent in self_lineage[i]:
return (1/(2**(i+1)))
# Comprobamos relacion familiar indirecta
for i in self_lineage:
for j in lineage_index:
aux_distance = len([ x for x in i if x in j ]) # Contamos ascendentes comunes
if aux_distance == 1:
distances.append(1/(2**(self_distance+distance_index)))
elif aux_distance > 1: # Si tiene más de una coincidencia pensaremos que tienen 2 familiares en común y por tanto la distance se multiplica por 2.
distances.append(1/(2**(self_distance+distance_index-1)))
distance_index +=1
distance_index = 1
self_distance +=1
if distances: # Si no hay datos en distances es que no tienen niguna relación
return max(distances)
return 0
"""
Utilizando estos 2 metodos obtendremos una lista con listas de parents
abuelos, bisabuelos y tatarabuelos.
"""
def get_lineage(self): # -1 = yo, 0 = papas, 1 = abuelos, 2 = bisabuelos, 3 = tatarabuelo
if self.parents[0] == None:
return []
generation = 0
lineage = []
lineage.append(self.get_parents())
while generation < 3:
auxlist = self.get_direct_lineage(lineage[generation])
if auxlist:
lineage.append(auxlist)
generation+=1
else:
generation = 8000 # Si no hay ascendentes salimos
return lineage
def get_direct_lineage(self, descendents):
direct_lineage = []
for i in descendents:
direct_lineage += i.get_parents()
return direct_lineage
def muerteFamiliar(self):
for i in range(0, len(self.species.genetic_matrix)):
self.species.genetic_matrix[i][self.index] = 0
self.species.genetic_matrix[self.index][i] = 0
self.species.nodes[self.index] = None
self.species.free_indexes.append(self.index)
class VegetableNode():
def __init__(self, species):
self.species = species
#_## PRUEBAS
#_#m1= [[1, 0, 0, 0.5, 0, 0, 0.25, 0.125],
#_# [0, 1, 0, 0.5, 0.5, 0.5, 0.25, 0.25],
#_# [0, 0, 1, 0, 0.5, 0.5, 0.25, 0.25],
#_# [0.5, 0.5, 0, 1, 0.25, 0.25, 0.5, 0.25],
#_# [0, 0.5, 0.5, 0.25, 1, 0.5, 0.5, 0.25],
#_# [0, 0.5, 0.5, 0.25, 0.5, 1, 0.25, 0.5],
#_# [0.25, 0.25, 0.25, 0.5, 0.5, 0.25, 1, 0.5],
#_# [0.125, 0.25, 0.25, 0.25, 0.25, 0.5, 0.5, 1]]
#_#m2= [[1, 0, 0, 0, 0, 0, 0.25, 0.125],
#_#[0, 1, 0, 0, 0.5, 0.5, 0.25, 0.25],
#_# [0, 0, 1, 0, 0.5, 0.5, 0.25, 0.25],
#_# [0, 0, 0, 0, 0, 0, 0, 0],
#_# [0, 0.5, 0.5, 0, 1, 0.5, 0.5, 0.25],
#_# [0, 0.5, 0.5, 0, 0.5, 1, 0.25, 0.5],
#_# [0.25, 0.25, 0.25, 0, 0.5, 0.25, 1, 0.5],
#_# [0.125, 0.25, 0.25, 0, 0.25, 0.5, 0.5, 1]]
#_#m3= [[1, 0, 0, 0.125, 0, 0, 0.25, 0.125],
#_# [0, 1, 0, 0.25, 0.5, 0.5, 0.25, 0.25],
#_# [0, 0, 1, 0.25, 0.5, 0.5, 0.25, 0.25],
#_# [0.125, 0.25, 0.25, 1, 0.25, 0.5, 0.5, 0.5],
#_# [0, 0.5, 0.5, 0.25, 1, 0.5, 0.5, 0.25],
#_# [0, 0.5, 0.5, 0.5, 0.5, 1, 0.25, 0.5],
#_# [0.25, 0.25, 0.25, 0.5, 0.5, 0.25, 1, 0.5],
#_# [0.125, 0.25, 0.25, 0.5, 0.25, 0.5, 0.5, 1]]
#_#species = GeneticTree()
#_#pepe = Node(None,None,species)
#_#pepa = Node(None,None,species)
#_#pepo = Node(None,None,species)
#_#pipi = Node(pepe,pepa,species)
#_#popo = Node(pepo,pepa,species)
#_#popo2 = Node(pepo,pepa,species)
#_#pepin = Node(pipi,popo,species)
#_#pepinio = Node(pepin,popo2,species)
#_#print (species.genetic_matrix)
#_#if species.genetic_matrix == m1:
#_# print("SUCCESSFUL")
#_#pipi.muerteFamiliar()
#_#print (species.genetic_matrix)
#_#if species.genetic_matrix == m2:
#_# print("SUCCESSFUL")
#_#pepinio2 = Node(pepin,popo2,species)
#_#print (species.genetic_matrix)
#_#if species.genetic_matrix == m3:
#_# print("SUCCESSFUL")
# [0.125, 0.25, 0.25, 1, 0.25, 0.5, 0.5, 0.5, ]]
# [0.125, 0.25, 0.25, 1, 0.25, 0.5, 0.5, 0.5]
#pandoc planteamientoTFG.md --latex-engine=xelatex -o planteamientoTFG.pdf
|
#!/bin/bash
RESOURCE_GROUP="ca-kw-2"
LOCATION="northeurope"
ENVIRONMENTNAME="ca-kw-2"
if [ $(az group exists --name $RESOURCE_GROUP) = false ]; then
az group create --name $RESOURCE_GROUP --location $LOCATION
fi
az deployment group create --resource-group $RESOURCE_GROUP \
--template-file main.bicep \
--parameters environmentName=$ENVIRONMENTNAME
|
#!/usr/bin/env bash
set -euo pipefail
conan install . -if build --build outdated \
-s build_type=Release -pr .profile/clang_x86_64_mt \
-s:b build_type=Release -pr:b .profile/clang_x86_64_mt \
-e CONAN_CMAKE_GENERATOR=Ninja \
-c tools.cmake.cmaketoolchain:generator=Ninja
conan build . -bf build
|
#!/bin/bash
# Always sync to ../data, relative to this script.
SCRIPT_DIR="$(cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd)"
PROJECT_DIR="$(dirname ${SCRIPT_DIR})"
aws s3 sync s3://shwang-chai/public/data/ ${PROJECT_DIR}/data
|
import javax.swing.*;
import java.awt.*;
public class JFrameWithComponents extends JFrame
{
JLabel label = new JLabel("Name?");
JTextField field = new JTextField(12);
JButton button = new JButton("OK");
public JFrameWithComponents()
{
super("Frame with Components");
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setLayout(new FlowLayout());
add(label);
add(field);
add(button);
}
public static void main(String[] args) {
System.out.println("Running...");
}
} |
<reponame>AlixThoi/Ember-Facial-Recognition<filename>app/adapters/mcs-train-group-request.js
export { default } from 'ember-facial-recognition/adapters/mcs-train-group-request';
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
docker build -f Dockerfile.rstudio -t outlierbio/shiny-dev .
docker run \
--rm \
-d \
-p 80:8787 \
-v $DIR:/home/rstudio \
outlierbio/shiny-dev
|
<reponame>c-hive/team-contribution-calendar<gh_stars>1-10
import * as getStyledCalendarElement from "../GetStyledCalendarElement/GetStyledCalendarElement";
import elementIds from "../../resources/ElementIds/ElementIds";
const showTooltip = event => {
const hoveredDay = event.target;
const contributions = Number(hoveredDay.getAttribute("data-count"));
const date = hoveredDay.getAttribute("data-date");
const tooltipElement = document.getElementById(elementIds.TOOLTIP);
const tooltipInnerText = getStyledCalendarElement.contributionsWithDateText(
contributions,
date
);
tooltipElement.appendChild(tooltipInnerText);
const hoveredDayPositionAttributes = hoveredDay.getBoundingClientRect();
tooltipElement.style.display = "block";
tooltipElement.style.top = `${hoveredDayPositionAttributes.top - 35}px`;
tooltipElement.style.left = `${hoveredDayPositionAttributes.left -
tooltipElement.clientWidth / 2}px`;
};
const hideTooltip = () => {
const tooltipElement = document.getElementById(elementIds.TOOLTIP);
if (tooltipElement.childNodes.length === 0) {
return;
}
tooltipElement.style.display = "none";
tooltipElement.style.top = "0px";
tooltipElement.style.left = "0px;";
tooltipElement.removeChild(tooltipElement.childNodes[0]);
};
export const addEventsToRectElements = () => {
const rectElements = document.getElementsByTagName("rect");
const rectsArray = Array.from(rectElements);
rectsArray.forEach(rect => {
rect.addEventListener("mouseenter", showTooltip);
rect.addEventListener("mouseleave", hideTooltip);
});
};
|
<gh_stars>1-10
import settings from '../../../config';
import * as admin from 'firebase-admin';
const serviceAccount = require(settings.firebase.serviceAccount);
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: settings.firebase.databaseURL,
});
export const getUser = async (uid) => {
const result = await admin.auth().getUser(uid);
return result;
};
|
from pygame import mixer
print()
print('=' * 40)
mixer.init()
mixer.music.load("C:/Users/berse/OneDrive/Documentos/MeusProjetos/desafios-de-python/desafios resolvidos/Young Love.mp3")
mixer.music.play(-1)
input('Aperte qualquer tecla para parar: ')
print()
print('=' * 40)
print()
|
#ifndef PY4DGEO_TEST_DATA_DIRECTORY
#error Test data directory needs to be set from CMake
#endif
#define DATAPATH(filename) (std::string(PY4DGEO_TEST_DATA_DIRECTORY) + "/" + filename) |
formatted_date = datetime.datetime.strptime(date, '%d %B %Y').strftime('%Y-%m-%d')
print(formatted_date) # 2020-12-19 |
#! /bin/sh
# vim:et:ft=sh:sts=2:sw=2
#
# shFlags unit test suite runner.
#
# This script runs all the unit tests that can be found, and generates a nice
# report of the tests.
MY_NAME=`basename $0`
MY_PATH=`dirname $0`
PREFIX='shflags_test_'
SHELLS='/bin/sh /bin/bash /bin/dash /bin/ksh /bin/pdksh /bin/zsh'
TESTS=''
for test in ${PREFIX}[a-z]*.sh; do
TESTS="${TESTS} ${test}"
done
# load libraries
. ../lib/versions
. ./shflags_test_helpers
usage()
{
echo "usage: ${MY_NAME} [-e key=val ...] [-s shell(s)] [-t test(s)]"
}
env=''
# process command line flags
while getopts 'e:hs:t:' opt; do
case ${opt} in
e) # set an environment variable
key=`expr "${OPTARG}" : '\([^=]*\)='`
val=`expr "${OPTARG}" : '[^=]*=\(.*\)'`
if [ -z "${key}" -o -z "${val}" ]; then
usage
exit 1
fi
eval "${key}='${val}'"
export ${key}
env="${env:+${env} }${key}"
;;
h) usage; exit 0 ;; # help output
s) shells=${OPTARG} ;; # list of shells to run
t) tests=${OPTARG} ;; # list of tests to run
*) usage; exit 1 ;;
esac
done
shift `expr ${OPTIND} - 1`
# fill shells and/or tests
shells=${shells:-${SHELLS}}
tests=${tests:-${TESTS}}
# error checking
if [ -z "${tests}" ]; then
th_error 'no tests found to run; exiting'
exit 1
fi
cat <<EOF
#------------------------------------------------------------------------------
# System data
#
# test run info
shells="${shells}"
tests="${tests}"
EOF
for key in ${env}; do
eval "echo \"${key}=\$${key}\""
done
echo
# output system data
echo "# system info"
echo "$ date"
date
echo "$ uname -mprsv"
uname -mprsv
#
# run tests
#
for shell in ${shells}; do
echo
cat <<EOF
#------------------------------------------------------------------------------
# Running the test suite with ${shell}
#
EOF
# check for existance of shell
if [ ! -x ${shell} ]; then
th_warn "unable to run tests with the ${shell} shell"
continue
fi
shell_name=`basename ${shell}`
shell_version=`versions_shellVersion "${shell}"`
echo "shell name: ${shell_name}"
echo "shell version: ${shell_version}"
# execute the tests
for suite in ${tests}; do
suiteName=`expr "${suite}" : "${PREFIX}\(.*\).sh"`
echo
echo "--- Executing the '${suiteName}' test suite ---"
( exec ${shell} ./${suite} 2>&1; )
done
done
|
<filename>spec/classes/server/firewall_spec.rb
require 'spec_helper'
# Testing private nfs::server::firewall class via nfs class
describe 'nfs' do
describe 'private nfs::server::firewall' do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
let(:facts) {
# to workaround service provider issues related to masking haveged
# when tests are run on GitLab runners which are docker containers
os_facts.merge( { :haveged__rngd_enabled => false } )
}
context 'when stunnel enabled' do
context 'when nfsv3 enabled' do
let(:params) { {
# nfs class params
:is_server => true,
:nfsv3 => true,
:firewall => true,
:stunnel => true
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::firewall') }
it { is_expected.to create_class('nfs::server::firewall::nfsv3and4') }
it { is_expected.to_not create_class('nfs::server::firewall::nfsv4') }
end
context 'when nfsv3 only enabled for the NFS client' do
let(:hieradata) { 'nfs_nfsv3_and_not_nfs_server_nfsd_vers3' }
let(:params) { {
# nfs class params
:firewall => true,
:stunnel => true
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::firewall') }
it { is_expected.to_not create_class('nfs::server::firewall::nfsv3and4') }
it { is_expected.to_not create_class('nfs::server::firewall::nfsv4') }
end
context 'when nfsv3 disabled' do
let(:params) { {
# nfs class params
:is_server => true,
:nfsv3 => false,
:firewall => true,
:stunnel => true
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::firewall') }
it { is_expected.to_not create_class('nfs::server::firewall::nfsv3and4') }
it { is_expected.to_not create_class('nfs::server::firewall::nfsv4') }
end
end
context 'when stunnel disabled' do
context 'when nfsv3 enabled' do
let(:params) { {
# nfs class params
:is_server => true,
:nfsv3 => true,
:firewall => true,
:stunnel => false
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::firewall::nfsv3and4') }
it { is_expected.to_not create_class('nfs::server::firewall::nfsv4') }
end
context 'when nfsv3 only enabled for the NFS client' do
let(:hieradata) { 'nfs_nfsv3_and_not_nfs_server_nfsd_vers3' }
let(:params) { {
# nfs class params
:firewall => true,
:stunnel => false
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to_not create_class('nfs::server::firewall::nfsv3and4') }
it { is_expected.to create_class('nfs::server::firewall::nfsv4') }
end
context 'when nfsv3 disabled' do
let(:params) { {
# nfs class params
:is_server => true,
:nfsv3 => false,
:firewall => true,
:stunnel => false
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to_not create_class('nfs::server::firewall::nfsv3and4') }
it { is_expected.to create_class('nfs::server::firewall::nfsv4') }
end
end
end
end
end
end
|
#!/bin/bash
set -eu
SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
# contains(string, substring)
#
# Returns 0 if the specified string contains the specified substring,
# otherwise returns 1.
contains() {
string="$1"
substring="$2"
if test "${string#*$substring}" != "$string"
then
return 0 # $substring is in $string
else
return 1 # $substring is not in $string
fi
}
if ! command -v heroku >/dev/null 2>&1; then
echo "The Heroku CLI ('heroku') could not be found and must be installed." 1>&2
exit 1
fi
if [ -z "$GIT_EMAIL" ]; then
echo "The environment variable GIT_EMAIL is required." 1>&2
exit 1
fi
if [ -z "$GIT_NAME" ]; then
echo "The environment variable GIT_NAME is required." 1>&2
exit 1
fi
if [ -z "$HEROKU_AUTH_EMAIL" ]; then
echo "The environment variable HEROKU_AUTH_EMAIL is required." 1>&2
exit 1
fi
if [ -z "$HEROKU_AUTH_TOKEN" ]; then
echo "The environment variable HEROKU_AUTH_TOKEN is required." 1>&2
exit 1
fi
if [ -z "$HEROKU_APP_NAME" ]; then
echo "The environment variable HEROKU_APP_NAME is required." 1>&2
exit 1
fi
app_name="$HEROKU_APP_NAME"
app_buildpack="${HEROKU_APP_BUILDPACK:-heroku/nodejs}"
# login (interactively or automatically with Service Principal)
source $SCRIPT_DIR/login.sh
app_exists_output=$(heroku apps:info --app="$app_name" 2>&1 || true)
if contains "$app_exists_output" "Couldn't find"; then
echo 'Creating a new application...'
heroku apps:create \
"$app_name" \
--buildpack="$app_buildpack"
echo 'Successfully created a new application.'
else
echo 'Application already exists.'
fi
################################################################################
# Heroku works with git to sync changes to the cloud. For example, a production
# application might have it's own repository and the DevOps CI/CD pipeline might
# use the URL to that repository for the Heroku deployments.
#
# So, we're going to simulate a new git repo on every deploy to force Heroku
# to synchronize every time by initializing git and committing all of the
# "/app" files to it.
echo ''
echo 'Initializing empty git repository for Heroku deployment.'
echo 'See notes for why this is necessary.'
echo ''
git config --global user.email "$GIT_EMAIL"
git config --global user.name "$GIT_NAME"
git init
git add .
git commit -m 'feat: hello-cloud, heroku style'
echo ''
echo 'Successfully initialized git repository for Heroku deployment.'
################################################################################
echo 'Pushing application git repository to Heroku deployment.'
git push "https://git.heroku.com/$app_name.git" HEAD:main
|
#!/bin/bash
set -e
# install nvm
curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.33.2/install.sh | bash
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
# build frontend
cd home-assistant-polymer
nvm install
nvm use
yarn
./node_modules/.bin/bower install --allow-root
npm install -g gulp-cli
gulp hassio-es5
# prepare data
mkdir -p ../hassio/api/panel
cp -f build-hassio/index.html ../hassio/api/panel/index.html
gzip -f -k -9 ../hassio/api/panel/index.html
cp -f build-hassio/hassio-app.html ../hassio/api/panel/hassio-app.html
gzip -f -k -9 ../hassio/api/panel/hassio-app.html
|
define(["jquery","jquery.validate","store"],function($,va,store){
return {
userName:function(){
$.validator.addMethod("userName", function(value, element) {
return this.optional(element) || /^[\u0391-\uFFE5\w]+$/.test(value);
}, "用户名只能包括中文字、英文字母、数字和下划线");
},
isPhone:function(){
$.validator.addMethod("isMobile", function(value, element) {
var length = value.length;
var mobile = /^(((13[0-9]{1})|(15[0-9]{1}))+\d{8})$/;
return this.optional(element) || (length == 11 && mobile.test(value));
}, "请正确填写您的手机号码");
},
format:function(){
$.format = function (source, params) {
if (arguments.length == 1)
return function () {
var args = $.makeArray(arguments);
args.unshift(source);
return $.format.apply(this, args);
};
if (arguments.length > 2 && params.constructor != Array) {
params = $.makeArray(arguments).slice(1);
}
if (params.constructor != Array) {
params = [params];
}
$.each(params, function (i, n) {
source = source.replace(new RegExp("\\{" + i + "\\}", "g"), n);
});
return source;
};
},
isPassword:function(){
$.validator.addMethod("isPassword", function(value, element,params) {
//默认值 : {trim:true,minLength:-1,maxLength:-1
params = $.extend([true,-1,-1],params); //对于默认参数支持
if(params[0]){ //过滤首尾空格
value=$.trim(value);
}
value = value.replace(/<(?:.)*?>/g,""); //验证时过滤标签
return this.optional(element) || ((params[1]<0 || value.length>=params[1]));
}, jQuery.format("长度不能小于{1}个字符"));
},
validate:function(form){
this.userName();
this.isPhone();
this.format();
this.isPassword();
var formName = form;
if(form == "signForm"){
$("#signForm").validate({
errorElement:"em",
debug:true,
rules:{
username:{
required:true,
userName:true
},
password:{
required: true,
minlength: 6
}
},
messages:{
username:{
required:"请输入用户名"
},
password: {
required: "请输入密码",
minlength: $.format("密码不能小于{0}个字符")
}
},
submitHandler:function(form){
$("#name").val($("#username").val());
var username = $("#username").val(),pass = $("#password").val();
if(username != "" && pass != ""){
$.ajax({
type:"post",
url: "/user/login",
dataType: "json",
data:$("#"+formName).serialize(),
success: function(d){
if(d.success){
var d = d.content || d.msg;
store.set('user', { type:d.type,id: d.id,doctor_id:d.doctor_id});
if(d.url){
url = d.url;
}else{
if(d.type == "sick"){
url = "/gakf/inpatientSick.html";
}else if(d.type == "doctor"){
url = "/gakf/sick.html";
}
}
window.location.href = url;
}else{
alert(d.msg);
}
},
error:function(e){
}
});
}
}
});
}
if(form == "regForm"){
$("#regForm").validate({
errorElement: "em",
rules:{
username:{
required:true,
userName:true
},
name:{
required:true,
userName:true
},
user_mobile:{
required:true,
isMobile:true
},
password:{
required: true,
isPassword:["true",6]
},
login_repwd:{
required: true,
isPassword:["true",6],
equalTo: "#password"
},
doctor_id:{
required: true
},
hospital:{
required: true
},
area:{
required: true
},
room:{
required: true
},
height:{
required: true,
maxlength:3,
min:1,
max:250
},
weight:{
required: true,
maxlength:3
},
age:{
required: true,
min:1,
max:150
},
gender:{
required: true
},
agree:{
required:true
}
},
messages:{
username:{
required:"请输入用户名"
},
name:{
required:"请输入真实姓名"
},
user_mobile:{
required:"请输入正确电话号码"
},
password: {
required: "请输入密码",
isPassword: $.format("密码不能小于{1}个字符,不能包含空格,括号,* 等特殊字符")
},
login_repwd: {
required: "请输入确认密码",
isPassword: $.format("密码不能小于{1}个字符,不能包含空格,括号,* 等特殊字符"),
equalTo: "两次输入密码不一致吆"
},
doctor_id:{
required:"请选择主治医生"
},
hospital:{
required: "请选择医院"
},
area:{
required: "请选择病区"
},
room:{
required: "请选择科室"
},
height:{
required: "请输入身高",
maxlength: "请输入正常身高,单位为cm",
min:"请输入正常身高,单位为cm",
max:"请输入正常身高,单位为cm"
},
weight:{
required: "请输入体重",
maxlength: "请输入正常体重,单位为kg"
},
age:{
required: "请输入年龄",
min:"请输入正常年龄,单位为岁",
max:"请输入正常年龄,单位为岁"
},
gender:{
required: "请选择性别"
},
agree:{
required: "请阅读协议"
}
},
errorPlacement: function(error, element) {
if (element.attr("name") == "phone_vali")
error.insertAfter($(element).parent());
else if(element.attr("name") == "agree")
error.insertAfter($(element).parent());
else
error.insertAfter(element);
},
submitHandler:function(form){
$.ajax({
type:"post",
url: "/user/reg",
dataType: "json",
data:$("#"+formName).serialize(),
success: function(d){
if(d.success){
var dd = d.content;
store.set('user', { type:dd.type,id: dd.id ,doctor_id:dd.doctor_id});
if(dd.url){
url = dd.url;
}else{
if(dd.type == "sick"){
//url = "/gakf/inpatientInfo.html";
url = "/gakf/msg.html?"+encodeURIComponent("doctor_name="+dd.doctor_name);
}
}
window.location.href = url;
}else{
alert(d.msg);
}
},
error:function(e){
}
});
}
});
}
//user_id, type, name, new_password, old_password,sign
if(form == "settingForm"){
$("#settingForm").validate({
errorElement: "em",
rules:{
name:{
required:true,
userName:true
},
user_mobile:{
required:true,
isMobile:true
},
old_password:{
//required: true,
isPassword:["true",6]
},
new_password:{
//required: true,
isPassword:["true",6]
},sign:{
maxlength:128
}
},
messages:{
name:{
required:"请输入姓名"
},
old_password: {
//required: "请输入密码",
isPassword: $.format("密码不能小于{1}个字符,不能包含空格,括号,* 等特殊字符")
},
new_password: {
//required: "请输入确认密码",
isPassword: $.format("密码不能小于{1}个字符,不能包含空格,括号,* 等特殊字符")
},sign: {
maxlength:"不能超过128个字符"
}
},
errorPlacement: function(error, element) {
error.insertAfter(element);
},
submitHandler:function(form){
$.ajax({
type:"post",
url: "/user/update",
dataType: "json",
data:$("#"+formName).serialize(),
success: function(d){
if(d.success){
$("#commesCom").html("保存成功").show();
setTimeout(function(){
$("#commesCom").html("").hide();
},1500);
}else{
alert(d.msg);
}
},
error:function(e){
}
});
}
});
}
}
}
}); |
<filename>router.js
const express = require('express');
const router = express.Router();
const mysql = require('mysql');
var bodyParser = require('body-parser')
const session = require('express-session');
const cors = require('cors');
const corsOptions = {
origin: true,
credentials: true
};
router.use(cors(corsOptions));
router.use(session({
secret: 'balloon',
resave: false,
saveUninitialized: true
}))
router.use(bodyParser.json());
router.use(bodyParser.urlencoded({ extended: true }));
const Chat = require('./model/chats');
// const MongoClient = require('mongodb').MongoClient;
// const assert = require('assert');
//몽구스 연결
var mongoose = require('mongoose');
// CONNECT TO MONGODB SERVER
var db = mongoose.connection;
db.on('error', console.error);
db.once('open', function () {
// CONNECTED TO MONGODB SERVER
console.log("Connected to mongod server");
});
mongoose.connect(`mongodb://localhost:27017/admin`, { dbName: 'mongochat' }, (error) => {
if (error) {
console.log('몽고디비 연결 에러', error);
} else {
console.log('몽고디비 연결 성공');
}
});
// 몽고 db에서 chatList 가져오기
router.get('/api/chats/:member', function (req, res) {
console.log(req.params.member)
Chat.find({ users: req.params.member }, function (err, chats) {
if (err) return res.status(500).send({ error: 'database failure' });
console.log(chats)
res.json(chats);
})
});
//몽고db에 새로운 chatList 입력하기
router.post('/api/newChats', function (req, res) {
var chat = new Chat();
chat.room_id = req.body.room_name;
chat.users = req.body.users;
chat.messages = [];
chat.save(function (err) {
if (err) {
console.error(err);
res.json({ result: 0 });
return;
}
res.json({ result: 1 });
});
});
//몽고db에 messages Update하기
router.put('/api/messageUpdate/:room_id', function (req, res) {
Chat.update({ room_id: req.params.room_id }, { $push: { messages: { user: req.body.user, message: req.body.message, date: new Date() } } }, (err, output) => {
if (err) res.status(500).json({ error: 'database failure' });
// console.log(output);
if (!output.n) return res.status(404).json({ error: 'book not found' });
res.json({ message: 'chat updated' });
})
});
router.get('/', (req, res) => {
res.send('server is up and running');
});
//mysql연결
var mysqlConnection = mysql.createConnection(
{
host: 'localhost', //'192.168.0.117'
user: 'balloon',
password: '<PASSWORD>',
database: 'balloonDB'
}
);
mysqlConnection.connect((err) => {
if (!err) {
console.log('Mysql DB connetion succeeded');
} else {
console.log('failed:', JSON.stringify(err, undefined, 2))
}
})
// DB에서 회원 id가 포함된 주소록 멤버 아이디 가져오기
router.get('/getmember', (req, res) => {
mysqlConnection.query(`SELECT member_id, project_id FROM project_members WHERE project_id ='201911031025-1'`,
(err, rows, fields) => {
if (err) {
console.log(err)
}
else {
const address = rows
res.json(rows)
// console.log(addressList)
}
})
})
// DB에서 회원 id가 포함된 주소록 멤버 아이디 가져오기
router.get('/getAddress/:projectid', (req, res) => {
// console.log(row.project_id)
mysqlConnection.query(`select distinct pm.project_id as project_id,
p.name as project_name,
pm.member_id as member_id,
m.name as member_name
from project_members as pm
join projects as p
on pm.project_id = p.id
join members as m
on m.id=pm.member_id
where pm.project_id=?`, [req.params.projectid], (err, rows, fields) => {
if (err) {
console.log(err)
}
else {
console.log(rows)
res.send(rows)
// let address = {}
// let members = []
// fullList.map(member => {
// members.push({ member_id: member.member_id, member_name: member.member_name })
// address = { project_name: member.project_name, members: members }
// })
// console.log(address)
}
})
}
)
//DB에서 회원id가 포함된 Project List 가져오기
router.get('/projectList/:memberid', (req, res) => {
mysqlConnection.query(`SELECT distinct
pm.project_id as id, p.name as name , p.image_id as image
from project_members as pm
JOIN projects as p
on pm.project_id = p.id
where pm.member_id = ?;`
, [req.params.memberid], (err, rows, fields) => {
if (!err) {
const projects = rows
let projectList = []
projects.map(project => {
projectList.push(
{ project_id: project.id, project_name: project.name, project_image: project.image })
})
res.json(projectList)
//console.log(projectList)
}
})
})
// DB 에서 kanbanList 가져오기.
router.get('/kanbanList/:project_id', (req, res) => {
mysqlConnection.query('SELECT * FROM kanbancards where project_id=?', [req.params.project_id], (err, rows, fields) => {
if (!err) {
let kanbans = rows
let kanbanList = []
let listID = 0
let category = []
// 가져온 kanbanList 원하는 JSon으로 재구성
kanbans.map((kanban, index) => {
// console.log(kanban)
if (kanbanList.length === 0) {
// console.log(index, "=>0일때 수행")
kanbanList.push({
category: kanban.category,
id: `list-${listID}`,
cards: [{
title: kanban.title,
id: `card-${kanban.id}`,
content: kanban.content
}]
})
listID += 1
category.push(kanban.category)
} else if (kanbanList.length !== 0) {
for (const element of kanbanList) {
//console.log(element)
//새로운 Category인 경우, List 추가.
if (!(category.includes(kanban.category))) {
category.push(kanban.category)
//console.log(index, "=>", element.category, kanban.ctegory, "=>새로운 category")
kanbanList.push({
category: kanban.category,
id: `list-${listID}`,
cards: [{
title: kanban.title,
id: `card-${kanban.id}`,
content: kanban.content
}]
})
listID += 1
break;
// 기존 Category 하위 카드 cards에 card 추가하기.
} else if ((category.includes(kanban.category)) && (element.category === kanban.category)) {
//console.log(index, "=>같다 수행")
element.cards.push({
title: kanban.title,
id: `card-${kanban.id}`,
content: kanban.content
})
}
}
//console.log(category)
}
})
res.json(kanbanList)
// console.log(kanbanList)
} else
console.log(err)
})
})
// 새로운 project 생성하기.
router.post("/newProject", (req, res) => {
const body = req.body
let memberid
// console.log("server newProject==> body", req)
mysqlConnection.query("insert into projects(id,name,type,image_id) values (?,?,?,1) ", [body.id, body.name, body.type], (err, result) => {
if (!err) {
console.log("projects 입력 성공")
//res.redirect('/')
} else {
console.log(err)
}
})
mysqlConnection.query(" insert into project_members (member_id,project_id) values (?,?) ", [body.userid, body.id], (err, result) => {
if (!err) {
console.log("project_members 입력 성공")
//res.redirect('/')
} else {
console.log(err)
}
})
if (body.member) {
mysqlConnection.query("select id from members where email=?", [body.member], (err, rows, fields) => {
if (!err) {
memberid = rows[0].id
console.log("select", memberid)
mysqlConnection.query(" insert into project_members (member_id,project_id) values (?,?) ", [memberid, body.id], (err, result) => {
if (!err) {
console.log("project_members 입력 성공2")
//res.redirect('/')
} else {
console.log(err)
}
})
} else {
console.log(err)
}
})
// mysqlConnection.query(" insert into project_members (member_id,project_id) values (?,?) ", [memberid, body.id], (err, result) => {
// if (!err) {
// console.log("project_members 입력 성공2")
// //res.redirect('/')
// } else {
// console.log(err)
// }
// })
}
})
// 새로운 card 입력하기
router.post('/newCard', (req, res) => {
const body = req.body
//console.log("server newCard==> body", body)
mysqlConnection.query("insert into kanbancards(category, content, project_id) values (?,?,?)", [body.category, body.content, body.project_id], (err, result) => {
if (!err) {
console.log("new card 입력 성공")
//res.redirect('/')
} else {
console.log(err)
}
})
})
// 회원가입
router.post('/api/customers', (req, res) => {
const body = req.body
//console.log("회원가입 server", body)
mysqlConnection.query("insert into members(email, password, name) values (?, ?, ?)", [body.email, body.password, body.name], (err, result) => {
if (!err) {
console.log('회원가입성공')
} else {
console.log(err)
}
})
let settings = "lang\/theme"
mysqlConnection.query("insert into member_settings(member_email,settings) values (?, ?)", [body.email, settings], (err, result) => {
if (!err) {
console.log('설정추가성공')
} else {
console.log(err)
}
})
})
// 로그인
router.post('/login', (req, res) => {
const body = req.body
// 세션 사용
sess = req.session;
// 입력한 email이 DB의 members 테이블 있나 확인
mysqlConnection.query("SELECT * FROM members WHERE email=?", [body.email], (err, rows, fields) => {
if (!err) {
let member = rows[0]
console.log(member)
// 고객정보가 있으면
if (member) {
// 고객정보 password와 입력한 password를 확인
if (member.password === body.password) {
console.log('로그인성공')
// email 세션 저장
sess.email = body.email;
sess.name = member.name;
sess.userid = member.id;
sess.save(() => {
console.log("로그인", sess)
})
res.json(sess)
} else {
console.log('비밀번호가 일치하지 않습니다')
//res.send(false)
}
} else {
console.log('회원이 아닙니다')
//res.send(false)
}
} else {
console.log(err)
}
})
})
//session check
router.get('/api/checksession', (req, res) => {
sess = req.session;
if (sess) {
res.json(sess)
//console.log("session check", sess)
}
// res.sendFile('index.html');
});
// 로그 아웃
router.post('/logout', (req, res) => {
sess = req.session;
if (sess.email) {
req.session.destroy(function (err) {
if (err) {
console.log(err);
} else {
console.log("logout성공")
}
})
}
})
//setting 값 가져오기
router.get('/getSetting/:email', (req, res) => {
mysqlConnection.query(`select settings from member_settings where member_email=?`, [req.params.email],
(err, rows, fields) => {
if (err) {
console.log(err)
}
else {
if (rows.length > 0) {
let setting = rows[0].settings.split('/')
console.log(setting)
res.json(setting)
}
}
})
})
//setting 값 변경 시 update 하기
router.put('/updateSetting/:email', (req, res) => {
mysqlConnection.query(`update member_settings set settings = ? where member_email=?`, [req.body.settings, req.params.email],
(err, results) => {
if (err) {
console.log(err)
}
else {
//console.log(results)
res.json(results)
}
})
})
module.exports = router; |
<html>
<title>Table Spiral</title>
<head>
<style>
table, td {
border: 1px solid #000;
}
</style>
</head>
<body>
<table>
<!-- 1 to 6 -->
<tr>
<td>1</td>
<td>2</td>
<td>3</td>
<td>4</td>
<td>5</td>
<td>6</td>
</tr>
<!-- 7 to 12 -->
<tr>
<td>12</td>
<td>11</td>
<td>10</td>
<td>9</td>
<td>8</td>
<td>7</td>
</tr>
<!-- 13 to 18 -->
<tr>
<td>18</td>
<td>17</td>
<td>16</td>
<td>15</td>
<td>14</td>
<td>13</td>
</tr>
<!-- 19 to 24 -->
<tr>
<td>24</td>
<td>23</td>
<td>22</td>
<td>21</td>
<td>20</td>
<td>19</td>
</tr>
<!-- 25 to 30 -->
<tr>
<td>30</td>
<td>29</td>
<td>28</td>
<td>27</td>
<td>26</td>
<td>25</td>
</tr>
<!-- 31 to 36 -->
<tr>
<td>36</td>
<td>35</td>
<td>34</td>
<td>33</td>
<td>32</td>
<td>31</td>
</tr>
</table>
</body>
</html> |
import React, { useEffect, useContext } from "react";
import { Formik, Form } from "formik";
import { toast } from "react-toastify";
import { useRouter } from "next/router";
import {
Input,
TextArea,
Select,
PincodeSelect,
Vaccine,
CowinCode,
Date,
Time,
GetOTP,
} from "./Inputs";
import SubmitButton from "./SubmitButton";
import { TicketSchema, TicketValidationSchema } from "../utils/schema";
import APIService from "../utils/api";
import { AuthContext } from "./Stores/AuthContext";
const initValues: TicketSchema = {
name: "",
age: 0,
gender: "",
address: "",
pincode: "",
po: "",
ps: "",
mobile: "",
dose: "",
cowin: {
registration: "",
code: "",
beneficiaryId: "",
otp: "",
validatedOtp: undefined,
},
vaccine: "",
price: 0,
date: (() => {
if (typeof window !== "undefined") {
const day = new window.Date();
return `${day.getDate() < 10 ? "0" + day.getDate() : day.getDate()}/${
day.getMonth() + 1 < 10
? "0" + (day.getMonth() + 1)
: day.getMonth() + 1
}/${
day.getFullYear() < 10 ? "0" + day.getFullYear() : day.getFullYear()
}`;
} else {
return "";
}
})(),
time: (() => {
if (typeof window !== "undefined") {
const day = new window.Date();
return `${day.getHours() < 10 ? "0" + day.getHours() : day.getHours()}:${
day.getMinutes() < 10 ? "0" + day.getMinutes() : day.getMinutes()
}:${day.getSeconds() < 10 ? "0" + day.getSeconds() : day.getSeconds()}`;
} else {
return "";
}
})(),
};
const Ticket = () => {
const router = useRouter();
const tabContext = useContext(AuthContext);
useEffect(() => {
const checkAuth = () => {
try {
const API = APIService();
if (!tabContext.auth) {
throw "Not Authenticated!";
}
} catch (err) {
router.push("/login");
toast.error("Session Expired! Please Login!");
}
};
checkAuth();
}, []);
const submitHandler = async (
values: TicketSchema,
hooks: {
setSubmitting: (isSubmitting: boolean) => void;
resetForm: (nextState?: any) => void;
}
) => {
const payload = { ...values };
delete payload.cowin.code;
delete payload.cowin.otp;
delete payload.cowin.validatedOtp;
try {
let API;
try {
API = APIService();
} catch (err) {
router.push("/login");
toast.error("Session Expired! Please Login!");
} finally {
const { data } = await API?.post(
"/api/v1/validate/beneficiary",
payload,
{
responseType: "blob",
}
)!;
const file = new Blob([data], { type: "application/pdf" });
const fileURL = URL.createObjectURL(file);
if (typeof window !== "undefined") {
const ticketWindow = window.open();
ticketWindow!.location.href = fileURL;
}
toast.success("Form successfully submitted!");
hooks.setSubmitting(false);
hooks.resetForm(initValues);
}
} catch (err) {
if (err.response && typeof err.response.data === "string") {
toast.error(err.response.data);
} else if (
err.response &&
typeof err.response.data.success !== "undefined"
) {
toast.error(err.response.data.error);
} else {
toast.error("Error in submitting form!");
}
}
};
const currTime = (() => {
if (typeof window !== "undefined") {
const day = new window.Date();
return `${day.getHours() < 10 ? "0" + day.getHours() : day.getHours()}:${
day.getMinutes() < 10 ? "0" + day.getMinutes() : day.getMinutes()
}:${day.getSeconds() < 10 ? "0" + day.getSeconds() : day.getSeconds()}`;
}
})();
const currDate = (() => {
if (typeof window !== "undefined") {
const day = new window.Date();
return `${day.getDate() < 10 ? "0" + day.getDate() : day.getDate()}/${
day.getMonth() + 1 < 10
? "0" + (day.getMonth() + 1)
: day.getMonth() + 1
}/${
day.getFullYear() < 10 ? "0" + day.getFullYear() : day.getFullYear()
}`;
} else {
return "";
}
})();
return (
<>
<div className="col-md-10 col-11 mb-5 mb-md-0">
<div className="row mx-auto text-center mb-0 mb-md-5">
<h1>
<strong>Register</strong>
</h1>
</div>
<Formik
initialValues={initValues}
validationSchema={TicketValidationSchema}
onSubmit={(values, { setSubmitting, resetForm }) =>
submitHandler(values, {
setSubmitting,
resetForm,
})
}
>
<Form>
<div className="row mx-auto">
<div className="col-md-6 col-12">
<div className="row mx-auto mb-2">
<div className="col-md-6 col-12">
<Input
label="Name"
name="name"
type="text"
placeholder="<NAME>"
/>
</div>
<div className="col-md-6 col-12">
<Input
label="Age"
name="age"
type="text"
placeholder="18"
/>
</div>
</div>
<div className="row mx-auto mb-2">
<TextArea
label="Address"
name="address"
type="textarea"
placeholder="14F/1A, Dum Dum Road, Kolkata"
/>
</div>
<div className="row mx-auto mb-2">
<div className="col-md-6 col-12">
<Select label="Gender" name="gender">
<option value="">Select Option</option>
<option value="Male">Male</option>
<option value="Female">Female</option>
<option value="Others">Others</option>
</Select>
</div>
<div className="col-md-6 col-12">
<Input
label="Pincode"
name="pincode"
type="text"
placeholder="700030"
/>
</div>
</div>
<div className="row mx-auto mb-2">
<div className="col-12 col-md-6">
<PincodeSelect />
</div>
<div className="col-12 col-md-6">
<Input
label="Police Station"
name="ps"
type="text"
placeholder="Chitpore P.S"
/>
</div>
</div>
<div className="row mx-auto mb-2">
<div className="col-12 col-md-6">
<Select label="Dose" name="dose">
<option value="">Select Option</option>
<option value="I">I</option>
<option value="II">II</option>
</Select>
</div>
<div className="col-12 col-md-6">
<Input
label="Mobile Number"
name="mobile"
type="text"
placeholder="9123456780"
/>
</div>
</div>
</div>
<div className="col-md-6 col-12">
<div className="row mx-auto mb-2">
<div className="col-12 col-md-6">
<Select
label="Co-WIN Registration?"
name="cowin.registration"
>
<option value="">Select Option</option>
<option value="Y">Yes</option>
<option value="N">No</option>
</Select>
</div>
<div className="col-12 col-md-6 d-flex justify-content-center align-items-end">
<GetOTP />
</div>
</div>
<div className="row mx-auto mb-2">
<CowinCode />
</div>
<div className="row mx-auto mb-2">
<div className="col-12 col-md-6">
<Select label="Vaccine Type" name="vaccine">
<option value="">Select Option</option>
<option value="Covishield">Covishield</option>
<option value="Covaxin">Covaxin</option>
<option value="Sputnik-V">Sputnik-V</option>
</Select>
</div>
<div className="col-12 col-md-6">
<Vaccine />
</div>
</div>
<div className="row mx-auto mt-5 mb-3">
<div className="col-12 col-md-6">
<Date currDate={currDate} />
</div>
<div className="col-12 col-md-6">
<Time currTime={currTime} />
</div>
</div>
<div className="row mx-auto mb-3 mt-5 text-center">
<div className="col-md-6 col-12 mx-auto">
<SubmitButton />
</div>
</div>
</div>
</div>
</Form>
</Formik>
</div>
</>
);
};
export default Ticket;
|
// Extend user session creation functionality
if ($type == 'admin') {
$app_user = array(
'user' => $profile->name,
'type' => 'admin',
'user_id' => $profile->profile_id,
);
} elseif ($type == 'manager') {
$app_user = array(
'user' => $profile->name,
'type' => 'manager',
'user_id' => $profile->profile_id,
);
} elseif ($type == 'employee') {
$app_user = array(
'user' => $profile->name,
'type' => 'employee',
'user_id' => $profile->profile_id,
);
} elseif ($type == 'customer') {
$app_user = array(
'user' => $profile->name,
'type' => 'customer',
'user_id' => $profile->profile_id,
);
}
$this->session->set_userdata('kandora_user', $app_user);
// Implement function to check user access rights
function checkAccessRights($userType, $requiredAccessType) {
$accessMatrix = array(
'admin' => array('manage_users', 'manage_settings', 'view_reports'),
'manager' => array('manage_employees', 'view_reports'),
'employee' => array('view_reports'),
'customer' => array('view_products', 'purchase_products'),
);
if (array_key_exists($userType, $accessMatrix)) {
return in_array($requiredAccessType, $accessMatrix[$userType]);
} else {
return false; // Handle unknown user types
}
}
// Example usage of checkAccessRights function
$userType = 'admin';
$requiredAccessType = 'manage_users';
if (checkAccessRights($userType, $requiredAccessType)) {
echo "User has access to manage users";
} else {
echo "User does not have access to manage users";
} |
package ca
import (
"crypto/rand"
"crypto/x509"
"fmt"
"io"
"os"
"strings"
"time"
"github.com/manifoldco/promptui"
"github.com/smallstep/certificates/cas/apiv1"
"github.com/smallstep/certificates/pki"
"github.com/smallstep/cli/command"
"github.com/smallstep/cli/crypto/pemutil"
"github.com/smallstep/cli/errs"
"github.com/smallstep/cli/ui"
"github.com/smallstep/cli/utils"
"github.com/urfave/cli"
)
func initCommand() cli.Command {
return cli.Command{
Name: "init",
Action: cli.ActionFunc(initAction),
Usage: "initialize the CA PKI",
UsageText: `**step ca init**
[**--root**=<file>] [**--key**=<file>] [**--pki**] [**--ssh**] [**--name**=<name>]
[**--dns**=<dns>] [**--address**=<address>] [**--provisioner**=<name>]
[**--provisioner-password-file**=<file>] [**--password-file**=<file>]
[**--with-ca-url**=<url>] [**--no-db**]`,
Description: `**step ca init** command initializes a public key infrastructure (PKI) to be
used by the Certificate Authority.`,
Flags: []cli.Flag{
cli.StringFlag{
Name: "root",
Usage: "The path of an existing PEM <file> to be used as the root certificate authority.",
EnvVar: command.IgnoreEnvVar,
},
cli.StringFlag{
Name: "key",
Usage: "The path of an existing key <file> of the root certificate authority.",
EnvVar: command.IgnoreEnvVar,
},
cli.BoolFlag{
Name: "pki",
Usage: "Generate only the PKI without the CA configuration.",
},
cli.BoolFlag{
Name: "ssh",
Usage: `Create keys to sign SSH certificates.`,
},
cli.BoolFlag{
Name: "helm",
Usage: `Generates a Helm values YAML to be used with step-certificates chart.`,
},
cli.StringFlag{
Name: "deployment-type",
Usage: `The <name> of the deployment type to use. Options are:
**standalone**
: An instance of step-ca that does not connect to any cloud services. You
manage authority keys and configuration yourself.
Choose standalone if you'd like to run step-ca yourself and do not want
cloud services or commercial support.
**linked**
: An instance of step-ca with locally managed keys that connects to your
Certificate Manager account for provisioner management, alerting,
reporting, revocation, and other managed services.
Choose linked if you'd like cloud services and support, but need to
control your authority's signing keys.
**hosted**
: A highly available, fully-managed instance of step-ca run by smallstep
just for you.
Choose hosted if you'd like cloud services and support.
: More information and pricing at: https://u.step.sm/cm`,
},
cli.StringFlag{
Name: "name",
Usage: "The <name> of the new PKI.",
},
cli.StringFlag{
Name: "dns",
Usage: "The comma separated DNS <names> or IP addresses of the new CA.",
},
cli.StringFlag{
Name: "address",
Usage: "The <address> that the new CA will listen at.",
},
cli.StringFlag{
Name: "provisioner",
Usage: "The <name> of the first provisioner.",
},
cli.StringFlag{
Name: "password-file",
Usage: `The path to the <file> containing the password to encrypt the keys.`,
},
cli.StringFlag{
Name: "provisioner-password-file",
Usage: `The path to the <file> containing the password to encrypt the provisioner key.`,
},
cli.StringFlag{
Name: "with-ca-url",
Usage: `<URI> of the Step Certificate Authority to write in defaults.json`,
},
cli.StringFlag{
Name: "ra",
Usage: `The registration authority <name> to use. Currently "StepCAS" and "CloudCAS" are supported.`,
},
cli.StringFlag{
Name: "issuer",
Usage: `The registration authority issuer <url> to use.
: If StepCAS is used, this flag should be the URL of the CA to connect
to, e.g https://ca.smallstep.com:9000
: If CloudCAS is used, this flag should be the resource name of the
intermediate certificate to use. This has the format
'projects/\\*/locations/\\*/caPools/\\*/certificateAuthorities/\\*'.`,
},
cli.StringFlag{
Name: "issuer-fingerprint",
Usage: `The root certificate <fingerprint> of the issuer CA.
This flag is supported in "StepCAS", and it should be the result of running:
'''
$ step certificate fingerprint root_ca.crt
4fe5f5ef09e95c803fdcb80b8cf511e2a885eb86f3ce74e3e90e62fa3faf1531
'''`,
},
cli.StringFlag{
Name: "issuer-provisioner",
Usage: `The <name> of an existing provisioner in the issuer CA.
This flag is supported in "StepCAS".`,
},
cli.StringFlag{
Name: "credentials-file",
Usage: `The registration authority credentials <file> to use.
: If CloudCAS is used, this flag should be the path to a service account key.
It can also be set using the 'GOOGLE_APPLICATION_CREDENTIALS=path'
environment variable or the default service account in an instance in Google
Cloud.`,
},
cli.BoolFlag{
Name: "no-db",
Usage: `Generate a CA configuration without the DB stanza. No persistence layer.`,
},
},
}
}
func initAction(ctx *cli.Context) (err error) {
if err = assertCryptoRand(); err != nil {
return err
}
var rootCrt *x509.Certificate
var rootKey interface{}
caURL := ctx.String("with-ca-url")
root := ctx.String("root")
key := ctx.String("key")
ra := strings.ToLower(ctx.String("ra"))
pkiOnly := ctx.Bool("pki")
noDB := ctx.Bool("no-db")
helm := ctx.Bool("helm")
switch {
case len(root) > 0 && len(key) == 0:
return errs.RequiredWithFlag(ctx, "root", "key")
case len(root) == 0 && len(key) > 0:
return errs.RequiredWithFlag(ctx, "key", "root")
case len(root) > 0 && len(key) > 0:
if rootCrt, err = pemutil.ReadCertificate(root); err != nil {
return err
}
if rootKey, err = pemutil.Read(key); err != nil {
return err
}
case ra != "" && ra != apiv1.CloudCAS && ra != apiv1.StepCAS:
return errs.InvalidFlagValue(ctx, "ra", ctx.String("ra"), "StepCAS or CloudCAS")
case pkiOnly && noDB:
return errs.IncompatibleFlagWithFlag(ctx, "pki", "no-db")
case pkiOnly && helm:
return errs.IncompatibleFlagWithFlag(ctx, "pki", "helm")
}
var password string
if passwordFile := ctx.String("password-file"); passwordFile != "" {
password, err = utils.ReadStringPasswordFromFile(passwordFile)
if err != nil {
return err
}
}
// Provisioner password will be equal to the certificate private keys if
// --provisioner-password-file is not provided.
var provisionerPassword []byte
if passwordFile := ctx.String("provisioner-password-file"); passwordFile != "" {
provisionerPassword, err = utils.ReadPasswordFromFile(passwordFile)
if err != nil {
return err
}
}
// Common for both CA and RA
var name, org, resource string
var casOptions apiv1.Options
var deploymentType pki.DeploymentType
switch ra {
case apiv1.CloudCAS:
var create bool
var project, location, caPool, caPoolTier, gcsBucket string
caPoolTiers := []struct {
Name string
Value string
}{{"DevOps", "DEVOPS"}, {"Enterprise", "ENTERPRISE"}}
// Prompt or get deployment type from flag
deploymentType, err = promptDeploymentType(ctx, true)
if err != nil {
return err
}
iss := ctx.String("issuer")
if iss == "" {
create, err = ui.PromptYesNo("Would you like to create a new PKI (y) or use an existing one (n)?")
if err != nil {
return err
}
if create {
ui.Println("What would you like to name your new PKI?", ui.WithValue(ctx.String("name")))
name, err = ui.Prompt("(e.g. Smallstep)",
ui.WithValidateNotEmpty(), ui.WithValue(ctx.String("name")))
if err != nil {
return err
}
ui.Println("What is the name of your organization?")
org, err = ui.Prompt("(e.g. Smallstep)",
ui.WithValidateNotEmpty())
if err != nil {
return err
}
ui.Println("What resource id do you want to use? [we will append -Root-CA or -Intermediate-CA]")
resource, err = ui.Prompt("(e.g. Smallstep)",
ui.WithValidateRegexp("^[a-zA-Z0-9-_]+$"))
if err != nil {
return err
}
ui.Println("What is the id of your project on Google's Cloud Platform?")
project, err = ui.Prompt("(e.g. smallstep-ca)",
ui.WithValidateRegexp("^[a-z][a-z0-9-]{4,28}[a-z0-9]$"))
if err != nil {
return err
}
ui.Println("What region or location do you want to use?")
location, err = ui.Prompt("(e.g. us-west1)",
ui.WithValidateRegexp("^[a-z0-9-]+$"))
if err != nil {
return err
}
ui.Println("What CA pool name do you want to use?")
caPool, err = ui.Prompt("(e.g. Smallstep)",
ui.WithValidateRegexp("^[a-zA-Z0-9_-]{1,63}"))
if err != nil {
return err
}
i, _, err := ui.Select("What CA pool tier do you want to use?", caPoolTiers, ui.WithSelectTemplates(ui.NamedSelectTemplates("Tier")))
if err != nil {
return err
}
caPoolTier = caPoolTiers[i].Value
ui.Println("What GCS bucket do you want to use? Leave it empty to use a managed one.")
gcsBucket, err = ui.Prompt("(e.g. my-bucket)", ui.WithValidateRegexp("(^$)|(^[a-z0-9._-]{3,222}$)"))
if err != nil {
return err
}
} else {
ui.Println("What certificate authority would you like to use?")
iss, err = ui.Prompt("(e.g. projects/smallstep-ca/locations/us-west1/caPools/smallstep/certificateAuthorities/intermediate-ca)",
ui.WithValidateRegexp("^projects/[a-z][a-z0-9-]{4,28}[a-z0-9]/locations/[a-z0-9-]+/caPools/[a-zA-Z0-9-_]+/certificateAuthorities/[a-zA-Z0-9-_]+$"))
if err != nil {
return err
}
}
}
casOptions = apiv1.Options{
Type: apiv1.CloudCAS,
CredentialsFile: ctx.String("credentials-file"),
CertificateAuthority: iss,
IsCreator: create,
Project: project,
Location: location,
CaPool: caPool,
CaPoolTier: caPoolTier,
GCSBucket: gcsBucket,
}
case apiv1.StepCAS:
deploymentType, err = promptDeploymentType(ctx, true)
if err != nil {
return err
}
ui.Println("What is the url of your CA?", ui.WithValue(ctx.String("issuer")))
ca, err := ui.Prompt("(e.g. https://ca.smallstep.com:9000)",
ui.WithValidateRegexp("(?i)^https://.+$"), ui.WithValue(ctx.String("issuer")))
if err != nil {
return err
}
ui.Println("What is the fingerprint of the CA's root file?", ui.WithValue(ctx.String("issuer-fingerprint")))
fingerprint, err := ui.Prompt("(e.g. 4fe5f5ef09e95c803fdcb80b8cf511e2a885eb86f3ce74e3e90e62fa3faf1531)",
ui.WithValidateRegexp("^[a-fA-F0-9]{64}$"), ui.WithValue(ctx.String("issuer-fingerprint")))
if err != nil {
return err
}
ui.Println("What is the JWK provisioner you want to use?", ui.WithValue(ctx.String("issuer-provisioner")))
provisioner, err := ui.Prompt("(e.g. <EMAIL>)",
ui.WithValidateNotEmpty(), ui.WithValue(ctx.String("issuer-provisioner")))
if err != nil {
return err
}
casOptions = apiv1.Options{
Type: apiv1.StepCAS,
IsCreator: false,
IsCAGetter: true,
CertificateAuthority: ca,
CertificateAuthorityFingerprint: fingerprint,
CertificateIssuer: &apiv1.CertificateIssuer{
Type: "JWK",
Provisioner: provisioner,
},
}
default:
deploymentType, err = promptDeploymentType(ctx, false)
if err != nil {
return err
}
if deploymentType == pki.HostedDeployment {
ui.Println()
ui.Println("Sorry, we can't create hosted authorities from the CLI yet. To create a hosted")
ui.Println("authority please visit:\n")
ui.Println(" \033[1mhttps://u.step.sm/hosted\033[0m\n")
ui.Println("To connect to an existing hosted authority run:\n")
ui.Println(" $ step ca bootstrap --team <name> --authority <authority>")
ui.Println()
return nil
}
ui.Println("What would you like to name your new PKI?", ui.WithValue(ctx.String("name")))
name, err = ui.Prompt("(e.g. Smallstep)", ui.WithValidateNotEmpty(), ui.WithValue(ctx.String("name")))
if err != nil {
return err
}
org = name
casOptions = apiv1.Options{
Type: apiv1.SoftCAS,
IsCreator: true,
}
}
var opts []pki.Option
if pkiOnly {
opts = append(opts, pki.WithPKIOnly())
} else {
var names string
ui.Println("What DNS names or IP addresses would you like to add to your new CA?", ui.WithValue(ctx.String("dns")))
names, err = ui.Prompt("(e.g. ca.smallstep.com[,1.1.1.1,etc.])",
ui.WithValidateFunc(ui.DNS()), ui.WithValue(ctx.String("dns")))
if err != nil {
return err
}
names = strings.Replace(names, " ", ",", -1)
parts := strings.Split(names, ",")
var dnsNames []string
for _, name := range parts {
if len(name) == 0 {
continue
}
dnsNames = append(dnsNames, strings.TrimSpace(name))
}
var address string
ui.Println("What IP and port will your new CA bind to?", ui.WithValue(ctx.String("address")))
address, err = ui.Prompt("(e.g. :443 or 127.0.0.1:4343)",
ui.WithValidateFunc(ui.Address()), ui.WithValue(ctx.String("address")))
if err != nil {
return err
}
var provisioner string
// Only standalone deployments will create an initial provisioner.
// Linked or hosted deployments will use an OIDC token as the first
// deployment.
if deploymentType == pki.StandaloneDeployment {
ui.Println("What would you like to name the CA's first provisioner?", ui.WithValue(ctx.String("provisioner")))
provisioner, err = ui.Prompt("(e.g. <EMAIL>)",
ui.WithValidateNotEmpty(), ui.WithValue(ctx.String("provisioner")))
if err != nil {
return err
}
}
opts = []pki.Option{
pki.WithAddress(address),
pki.WithCaURL(caURL),
pki.WithDNSNames(dnsNames),
pki.WithDeploymentType(deploymentType),
}
if deploymentType == pki.StandaloneDeployment {
opts = append(opts, pki.WithProvisioner(provisioner))
}
if deploymentType == pki.LinkedDeployment {
opts = append(opts, pki.WithAdmin())
} else if ctx.Bool("ssh") {
opts = append(opts, pki.WithSSH())
}
if noDB {
opts = append(opts, pki.WithNoDB())
}
if helm {
opts = append(opts, pki.WithHelm())
}
}
p, err := pki.New(casOptions, opts...)
if err != nil {
return err
}
if ra != "" {
// RA mode will not have encrypted keys. With the exception of SSH keys,
// but this is not common on RA mode.
ui.Println("Choose a password for your first provisioner.", ui.WithValue(password))
} else {
// Linked CAs will use OIDC as a first provisioner.
if pkiOnly || deploymentType != pki.StandaloneDeployment {
ui.Println("Choose a password for your CA keys.", ui.WithValue(password))
} else {
ui.Println("Choose a password for your CA keys and first provisioner.", ui.WithValue(password))
}
}
pass, err := ui.PromptPasswordGenerate("[leave empty and we'll generate one]", ui.WithRichPrompt(), ui.WithValue(password))
if err != nil {
return err
}
if !pkiOnly && deploymentType == pki.StandaloneDeployment {
// Generate provisioner key pairs.
if len(provisionerPassword) > 0 {
if err = p.GenerateKeyPairs(provisionerPassword); err != nil {
return err
}
} else {
if err = p.GenerateKeyPairs(pass); err != nil {
return err
}
}
}
if casOptions.IsCreator {
var root *apiv1.CreateCertificateAuthorityResponse
ui.Println()
// Generate root certificate if not set.
if rootCrt == nil && rootKey == nil {
ui.Print("Generating root certificate... ")
root, err = p.GenerateRootCertificate(name, org, resource, pass)
if err != nil {
return err
}
ui.Println("done!")
} else {
ui.Printf("Copying root certificate... ")
// Do not copy key in STEPPATH
if err = p.WriteRootCertificate(rootCrt, nil, nil); err != nil {
return err
}
root = p.CreateCertificateAuthorityResponse(rootCrt, rootKey)
ui.Println("done!")
}
// Always generate the intermediate certificate
ui.Printf("Generating intermediate certificate... ")
time.Sleep(1 * time.Second)
err = p.GenerateIntermediateCertificate(name, org, resource, root, pass)
if err != nil {
return err
}
ui.Println("done!")
} else {
// Attempt to get the root certificate from RA.
if err := p.GetCertificateAuthority(); err != nil {
return err
}
}
if ctx.Bool("ssh") {
ui.Printf("Generating user and host SSH certificate signing keys... ")
if err := p.GenerateSSHSigningKeys(pass); err != nil {
return err
}
ui.Println("done!")
}
if helm {
return p.WriteHelmTemplate(os.Stdout)
}
return p.Save()
}
func isNonInteractiveInit(ctx *cli.Context) bool {
var pkiFlags []string
configFlags := []string{
"dns", "address", "provisioner",
}
switch strings.ToLower(ctx.String("ra")) {
case apiv1.CloudCAS:
pkiFlags = []string{"issuer"}
configFlags = append(configFlags, "password-file")
case apiv1.StepCAS:
pkiFlags = []string{"issuer", "issuer-fingerprint", "issuer-provisioner"}
configFlags = append(configFlags, "password-file")
default:
pkiFlags = []string{"name", "password-file"}
}
for _, s := range pkiFlags {
if ctx.String(s) == "" {
return false
}
}
// If not pki only, then require all config flags.
if !ctx.Bool("pki") {
for _, s := range configFlags {
if ctx.String(s) == "" {
return false
}
}
}
return true
}
func promptDeploymentType(ctx *cli.Context, isRA bool) (pki.DeploymentType, error) {
type deployment struct {
Name string
Description string
Value pki.DeploymentType
}
var deploymentTypes []deployment
deploymentType := strings.ToLower(ctx.String("deployment-type"))
// Assume standalone for backward compatibility if all required flags are
// passed.
if deploymentType == "" && isNonInteractiveInit(ctx) {
return pki.StandaloneDeployment, nil
}
deploymentTypes = []deployment{
{"Standalone", "step-ca instance you run yourself", pki.StandaloneDeployment},
{"Linked", "standalone, plus cloud configuration, reporting & alerting", pki.LinkedDeployment},
{"Hosted", "fully-managed step-ca cloud instance run for you by smallstep", pki.HostedDeployment},
}
if isRA {
switch deploymentType {
case "":
// Deployment type Hosted is not supported for RAs
deploymentTypes = deploymentTypes[:2]
case "standalone":
return pki.StandaloneDeployment, nil
case "linked":
return pki.LinkedDeployment, nil
default:
return 0, errs.InvalidFlagValue(ctx, "deployment-type", deploymentType, "standalone or linked")
}
} else {
switch deploymentType {
case "":
case "standalone":
return pki.StandaloneDeployment, nil
case "linked":
return pki.LinkedDeployment, nil
case "hosted":
return pki.HostedDeployment, nil
default:
return 0, errs.InvalidFlagValue(ctx, "deployment-type", deploymentType, "standalone, linked or hosted")
}
}
i, _, err := ui.Select("What deployment type would you like to configure?", deploymentTypes,
ui.WithSelectTemplates(&promptui.SelectTemplates{
Active: fmt.Sprintf("%s {{ printf \"%%s - %%s\" .Name .Description | underline }}", ui.IconSelect),
Inactive: " {{ .Name }} - {{ .Description }}",
Selected: fmt.Sprintf(`{{ "%s" | green }} {{ "Deployment Type:" | bold }} {{ .Name }}`, ui.IconGood),
}))
if err != nil {
return 0, err
}
return deploymentTypes[i].Value, nil
}
// assertCryptoRand asserts that a cryptographically secure random number
// generator is available, it will return an error otherwise.
func assertCryptoRand() error {
buf := make([]byte, 64)
_, err := io.ReadFull(rand.Reader, buf)
if err != nil {
return errs.NewError("crypto/rand is unavailable: Read() failed with %#v", err)
}
return nil
}
|
<reponame>YanickJair/go-exercises
package utils
import (
"fmt"
"io/ioutil"
"net/http"
)
// Req - make a http request to API
func Req() {
response, err := http.Get("http://quotes.rest/qod/categories.json")
if err != nil {
fmt.Println(err)
return
}
// similar of using final in JS
defer response.Body.Close()
contents, err := ioutil.ReadAll(response.Body)
if err != nil {
fmt.Println(err)
return
}
fmt.Println(string(contents))
}
|
define(["require", "exports", './BoundNodeCallbackObservable'], function (require, exports, BoundNodeCallbackObservable_1) {
"use strict";
exports.bindNodeCallback = BoundNodeCallbackObservable_1.BoundNodeCallbackObservable.create;
});
//# sourceMappingURL=bindNodeCallback.js.map |
set -e
if [[ -z $1 ]]; then
echo "Enter new version: "
read VERSION
else
VERSION=$1
fi
read -p "Releasing $VERSION - are you sure? (y/n) " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
echo "Releasing $VERSION ..."
npm run lint
npm run flow
npm run test:cover
npm run test:e2e
npm run test:ssr
if [[ -z $SKIP_SAUCE ]]; then
export SAUCE_BUILD_ID=$VERSION:`date +"%s"`
npm run test:sauce
fi
# build
VERSION=$VERSION npm run build
# update packages
cd packages/vue-template-compiler
npm version $VERSION
if [[ -z $RELEASE_TAG ]]; then
npm publish
else
npm publish --tag $RELEASE_TAG
fi
cd -
cd packages/vue-server-renderer
npm version $VERSION
if [[ -z $RELEASE_TAG ]]; then
npm publish
else
npm publish --tag $RELEASE_TAG
fi
cd -
# commit
git add -A
git add -f \
dist/*.js \
!dist/vue.common.min.js \
packages/vue-server-renderer/basic.js \
packages/vue-server-renderer/build.js \
packages/vue-server-renderer/server-plugin.js \
packages/vue-server-renderer/client-plugin.js \
packages/vue-template-compiler/build.js
git commit -m "[build] $VERSION"
npm version $VERSION --message "[release] $VERSION"
# publish
git push origin refs/tags/v$VERSION
git push
if [[ -z $RELEASE_TAG ]]; then
npm publish
else
npm publish --tag $RELEASE_TAG
fi
# generate release note
VERSION=$VERSION npm run release:note
fi
|
def setup_environment(DIR):
commands = [
f"source {DIR}/venv/bin/activate",
f"(cd {DIR} && {DIR}/venv/bin/pip install -r {DIR}/requirements.txt)",
f"(cd {DIR} && {DIR}/venv/bin/pip install --editable dace)",
f"source {DIR}/venv/bin/activate"
]
return '\n'.join(commands) |
#!/bin/bash
###############################################################################
# Copyright (c) 2018 Advanced Micro Devices, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
###############################################################################
BASE_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
set -e
trap 'lastcmd=$curcmd; curcmd=$BASH_COMMAND' DEBUG
trap 'errno=$?; print_cmd=$lastcmd; if [ $errno -ne 0 ]; then echo "\"${print_cmd}\" command failed with exit code $errno."; fi' EXIT
source "$BASE_DIR/common/common_options.sh"
parse_args "$@"
echo "Preparing to set up ROCm requirements. You must be root/sudo for this."
sudo dnf install -y kernel-headers-`uname -r` kernel-devel-`uname -r`
sudo sh -c "echo [ROCm] > /etc/yum.repos.d/rocm.repo"
sudo sh -c "echo name=ROCm >> /etc/yum.repos.d/rocm.repo"
sudo sh -c "echo baseurl=http://repo.radeon.com/rocm/yum/rpm >> /etc/yum.repos.d/rocm.repo"
sudo sh -c "echo enabled=1 >> /etc/yum.repos.d/rocm.repo"
sudo sh -c "echo gpgcheck=0 >> /etc/yum.repos.d/rocm.repo"
# On Fedora, we can skip the kernel module because the proper KFD
# version was backported so our user-land tools can work cleanly.
# In addition, the ROCm 2.0.0 DKMS module fails to build against this
# kernel, so we must skip the driver.
# ROCm requirements
sudo dnf install -y gcc-c++
# We must build HCC from source because the RPM that ships in the AMD binary
# repo does not work here. Ask the user if they want to do this.
ROCM_BUILD_HCC_FROM_SOURCE=true
if [ ${ROCM_FORCE_YES} = true ]; then
ROCM_BUILD_HCC_FROM_SOURCE=true
elif [ ${ROCM_FORCE_NO} = true ]; then
ROCM_BUILD_HCC_FROM_SOURCE=false
else
echo ""
echo "This script will require you to build HCC from source."
echo "This can take a long time."
read -p "Do you wish to proceed to download/build HCC (y/n)? " answer
case ${answer:0:1} in
y|Y )
ROCM_RUN_NEXT_SCRIPT=true
echo 'User chose "yes". Will build HCC and install HIP etc.'
;;
* )
ROCM_BUILD_HCC_FROM_SOURCE=false
echo 'User chose "no". Will not install HCC or HIP.'
echo 'The ROCm librearies will thus not work either.'
;;
esac
fi
sudo dnf --setopt=install_weak_deps=False install -y hsakmt-roct hsakmt-roct-dev hsa-rocr-dev hsa-ext-rocr-dev rocm-smi rocm-cmake rocminfo rocprofiler-dev rocm-opencl rocm-opencl-devel rocm-clang-ocl
if [ ${ROCM_BUILD_HCC_FROM_SOURCE} = true ]; then
echo "Installing HCC and HIP requires us to rebuild them from source."
echo "This may take a while..."
pushd ${BASE_DIR}/../src_install/component_scripts/
HCC_TEMP_DIR=`mktemp -d`
./01_09_hcc.sh -s ${HCC_TEMP_DIR}/src/ -p ${HCC_TEMP_DIR}/pkg
HIP_TEMP_DIR=`mktemp -d`
./01_10_hip.sh -s ${HIP_TEMP_DIR}/src/ -p ${HIP_TEMP_DIR}/pkg
popd
sudo dnf --setopt=install_weak_deps=False install -y rocm-device-libs atmi comgr rocr_debug_agent rocm_bandwidth_test rocm-dev rocm-utils
else
sudo dnf --setopt=install_weak_deps=False install -y rocm-device-libs atmi comgr rocr_debug_agent rocm_bandwidth_test rocm-utils
fi
mkdir -p /opt/rocm/.info/
echo ${ROCM_VERSION_LONG} | sudo tee /opt/rocm/.info/version
sudo mkdir -p /etc/udev/rules.d/
echo 'SUBSYSTEM=="kfd", KERNEL=="kfd", TAG+="uaccess", GROUP="video"' | sudo tee /etc/udev/rules.d/70-kfd.rules
# Detect if you are actually logged into the system or not.
# Containers, for instance, may not have you as a user with
# a meaningful value for logname
num_users=`who am i | wc -l`
if [ ${num_users} -gt 0 ]; then
sudo usermod -a -G video `logname`
else
echo ""
echo "Was going to attempt to add your user to the 'video' group."
echo "However, it appears that we cannot determine your username."
echo "Perhaps you are running inside a container?"
echo ""
fi
# Remove other OpenCL installations for stuff that isn't ROCm, or our OpenCL
# programs may crash with a lot of noise.
for app in pocl libclc beignet; do
num_pkgs=`dnf list installed ${app} 2>/dev/null | wc -l`
if [ ${num_pkgs} -gt 0 ]; then
sudo dnf remove -y ${app}
fi
done
if [ ${ROCM_FORCE_YES} = true ]; then
ROCM_RUN_NEXT_SCRIPT=true
elif [ ${ROCM_FORCE_NO} = true ]; then
ROCM_RUN_NEXT_SCRIPT=false
else
echo ""
echo "The next script will set up users on the system to have GPU access."
read -p "Do you want to automatically run the next script now? (y/n)? " answer
case ${answer:0:1} in
y|Y )
ROCM_RUN_NEXT_SCRIPT=true
echo 'User chose "yes". Running next setup script.'
;;
* )
echo 'User chose "no". Not running the next script.'
;;
esac
fi
if [ ${ROCM_RUN_NEXT_SCRIPT} = true ]; then
${BASE_DIR}/02_setup_rocm_users.sh "$@"
fi
|
autoload -Uz add-zsh-hook
title_precmd() {
case $TERM in
'screen'*|'tmux'*)
print -Pn -- "\e_\005{g}%n\005{-}@\005{m}%m\005{-} \005{B}%~\005{-}\e\\"
;;
*)
print -Pn -- "\e]2;%n@%m > %~\a"
;;
esac
}
title_preexec() {
case $TERM in
'screen'*|'tmux'*)
print -Pn -- '\e_\005{g}%n\005{-}@\005{m}%m\005{-} \005{B}%~\005{-} | %(!.sudo .) '
print -n -- "${(q)1}\e\\"
;;
*)
print -Pn -- "\e]2;%n@%m > %~ | %(!.sudo .) "
print -n -- "${(q)1}\a"
;;
esac
}
is_term_in() {
case $1 in
'alacritty'*|'aterm'*|'Eterm'*|'gnome'*|'konsole'*|'kterm'*|'putty'*|'rxvt'*|'screen'*|'tmux'*|'xterm'*) return 0 ;;
*) return 1
esac
}
if is_term_in "$TERM"; then
add-zsh-hook -Uz precmd title_precmd
add-zsh-hook -Uz preexec title_preexec
fi
|
#!/usr/bin/env bash
docker rm -f bt_mongo
docker rm -f bt_es
docker rm -f bt_s3
docker rm -f bt_smtp
|
<reponame>smagill/opensphere-desktop
package io.opensphere.core.util.javafx.input.view;
import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.geometry.Insets;
import javafx.scene.layout.Region;
/**
* An invalidation listener that preserves the padding of the to-be-invalidated region.
*/
public class PaddedInvalidationListener implements InvalidationListener
{
/**
* This boolean protects against unwanted recursion.
*/
private boolean myRounding;
/**
* The region to invalidate.
*/
private final Region myTargetRegion;
/**
* Creates a new invalidation listener.
*
* @param pTargetRegion the region to invalidate.
*
*/
public PaddedInvalidationListener(Region pTargetRegion)
{
myTargetRegion = pTargetRegion;
}
/**
* {@inheritDoc}
*
* @see javafx.beans.InvalidationListener#invalidated(javafx.beans.Observable)
*/
@Override
public void invalidated(Observable observable)
{
if (!myRounding)
{
Insets padding = myTargetRegion.getPadding();
Insets rounded = new Insets(Math.round(padding.getTop()), Math.round(padding.getRight()),
Math.round(padding.getBottom()), Math.round(padding.getLeft()));
if (!rounded.equals(padding))
{
myRounding = true;
myTargetRegion.setPadding(rounded);
myRounding = false;
}
}
}
}
|
/* Please view in Chrome for best effects! :) */
// Configure
var MAX_DISTANCE = 200,
PARTICLES = 40,
PARTICLE_SIZE = 5;
// No configure! :p
Math.Tau = Math.PI * 2;
Math.rand = function rand(min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
};
Math.map = function map(value, imin, imax, omin, omax) {
return ((value - imin) * (omax - omin) / (imax - imin) + omin);
};
window.requestAnimFrame = (function(){
return window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
function( callback ){
window.setTimeout(callback, 1000 / 60);
};
})();
window.addEventListener('load', function(event) {
var canvas = document.getElementById('c');
var context = canvas.getContext('2d');
var width, height;
var particleCounter = 0,
hover = false,
stats = new Stats(),
mmon = new MousePositionMonitor(),
is_firefox = navigator.userAgent.toLowerCase().indexOf('firefox') > -1;
stats.setMode(0); // Start off with FPS mode
// Place the statistics at the bottom right.
stats.domElement.style.position = 'absolute';
stats.domElement.style.right = '5px';
stats.domElement.style.bottom = '5px';
document.body.appendChild(stats.domElement);
context.lineWidth = "hairline";
var resize = function(event) {
width = canvas.width = window.innerWidth;
height = canvas.height = window.innerHeight;
}; resize();
window.addEventListener('resize', resize);
canvas.addEventListener('mouseenter', function() {
hover = true;
});
canvas.addEventListener('mouseleave', function() {
hover = false;
});
var Color = function Color(r, g, b, a) {
this.r = Math.floor(r);
this.g = Math.floor(g);
this.b = Math.floor(b);
this.a = Math.floor(a || 255);
};
Color.prototype.clone = function() {
return new Color(this.r, this.g, this.b, this.a);
};
Color.prototype.toString = function() {
if(this.a === 255) {
return 'rgb(' + this.r + ', ' + this.g + ', ' + this.b + ')';
} else {
return 'rgba(' + this.r + ', ' + this.g + ', ' + this.b + ', ' + (this.a / 255) + ')';
}
};
var Particle = function Particle(x, y, size, color) {
this.x = x;
this.y = y;
this.s = size;
this.r = size / 2;
this.vx = (Math.random() < 0.5 ? -1 : 1) * Math.rand(0.5, 2);
this.vy = (Math.random() < 0.5 ? -1 : 1) * Math.rand(0.5, 2);
this.id = particleCounter++;
if(color instanceof Color) {
this.c = color;
} else {
this.c = new Color(255, 255, 255, 255);
}
};
Particle.prototype.distance = function(that) {
if(that instanceof Particle) {
return Math.sqrt((this.x-that.x) * (this.x - that.x) + (this.y - that.y) * (this.y - that.y));
}
};
Particle.prototype.step = function() {
this.x = (this.x + this.vx);
if(this.x < this.r) {
this.x = this.r;
this.vx *= -1;
} else if(this.x > width - this.r) {
this.x = width - this.r;
this.vx *= -1;
}
this.y = (this.y + this.vy);
if(this.y < this.r) {
this.y = this.r;
this.vy *= -1;
} else if(this.y > height - this.r) {
this.y = height - this.r;
this.vy *= -1;
}
};
Particle.prototype.render = function() {
context.fillStyle = this.c.toString();
context.beginPath();
context.arc(this.x, this.y, Math.floor(this.s / 2), 0, Math.Tau, false);
context.closePath();
context.fill();
};
var particles = [];
for(var i = 0; i < PARTICLES - 1; i++) {
particles.push(
new Particle(
Math.random() * width,
Math.random() * height,
PARTICLE_SIZE,
new Color(
Math.random() * 255,
Math.random() * 255,
Math.random() * 255,
255)
)
);
}
// this one is controllable by mouse movement.
var mouseParticle = new Particle(
Math.random() * width,
Math.random() * height,
PARTICLE_SIZE * 2,
new Color(0, 200, 100, 255)
);
mouseParticle.imp = true;
particles.push(mouseParticle);
var render = function() {
//context.clearRect(0, 0, width, height);
context.fillStyle = 'rgba(0, 0, 0, 0.3)';
context.fillRect(0, 0, width, height);
// render all the particles and check distances
var paired = {};
var ipart = PARTICLES;
while(ipart--) {
var p1 = particles[ipart];
var jpart = ipart;
p1.step();
if(p1.imp && hover) {
var pos = mmon.getMousePosition();
p1.x = pos.x;
p1.y = pos.y;
}
p1.render();
while(jpart--) {
var p2 = particles[jpart];
if(p1 !== p2 && !paired[p1.id + '-' + p2.id] && !paired[p2.id + '-' + p1.id]) {
var distance = p1.distance(p2);
if(distance < MAX_DISTANCE) {
if(!is_firefox) {
var grd = context.createLinearGradient(p1.x, p1.y, p2.x, p2.y),
c1 = p1.c.clone(), c2 = p2.c.clone();
c1.a = c2.a = Math.floor(Math.map(distance, MAX_DISTANCE, 0, 0, 255));
grd.addColorStop(0, c1), grd.addColorStop(1, c2);
context.strokeStyle = grd;
} else {
var c = p1.c.clone();
c.a = Math.floor(Math.map(distance, MAX_DISTANCE, 0, 0, 255));
context.strokeStyle = c.toString();
}
context.beginPath();
context.moveTo(p1.x, p1.y);
context.lineTo(p2.x, p2.y);
context.closePath();
context.stroke();
paired[p1.id + '-' + p2.id] = paired[p2.id + '-' + p1.id] = true;
}
}
}
}
};
var loop = function() {
requestAnimFrame(loop);
stats.begin();
render();
stats.end();
}; loop();
}); |
import { Channel } from '../../../state';
import Hls from 'hls.js';
function initializeHLSPlayer(channel) {
const videoPlayer = document.getElementById('videoPlayer');
if (Hls.isSupported()) {
const hls = new Hls();
hls.loadSource(channel.videoUrl);
hls.attachMedia(videoPlayer);
hls.on(Hls.Events.MANIFEST_PARSED, function() {
videoPlayer.play();
});
} else if (videoPlayer.canPlayType('application/vnd.apple.mpegurl')) {
videoPlayer.src = channel.videoUrl;
videoPlayer.addEventListener('loadedmetadata', function() {
videoPlayer.play();
});
} else {
console.error('HLS is not supported');
}
}
// Example usage
const channel = {
id: '123',
name: 'Sample Channel',
videoUrl: 'https://example.com/sample-video.m3u8'
};
initializeHLSPlayer(channel); |
<filename>mediawiki/resources/src/mediawiki.action/mediawiki.action.view.dblClickEdit.js
/*!
* Enables double-click-to-edit functionality.
*/
( function ( mw, $ ) {
$( function () {
mw.util.$content.dblclick( function ( e ) {
e.preventDefault();
// Trigger native HTMLElement click instead of opening URL (bug 43052)
$( '#ca-edit a' ).get( 0 ).click();
} );
} );
}( mediaWiki, jQuery ) );
|
def compare_versions(version1: str, version2: str) -> str:
v1_parts = list(map(int, version1.split('.')))
v2_parts = list(map(int, version2.split('.'))
for i in range(3):
if v1_parts[i] > v2_parts[i]:
return "Version 1 is greater"
elif v1_parts[i] < v2_parts[i]:
return "Version 2 is greater"
return "Versions are equal" |
<filename>src/app/app.module.ts
import { NgModule } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { AppComponent } from './app.component';
// pages
import { HomeComponent } from './pages/home/home.component';
// components
import { NavBarComponent } from './components/NavBar/nav-bar.component';
@NgModule({
imports: [
BrowserModule
],
declarations: [
AppComponent,
HomeComponent,
NavBarComponent
],
bootstrap: [
AppComponent
]
})
export class AppModule { }
|
const express = require('express');
const app = express();
app.get('/', (req, res)=>{
res.send(`
<html>
<head>
<title>Create a blog post</title>
</head>
<body>
<form action="/create-post" method="POST">
<input type="text" name="title" placeholder="Post Title" />
<textarea name="content" placeholder="Post Content"></textarea>
<input type="submit" value="Create Post" />
</form>
</body>
</html>
`);
})
app.post('/create-post', (req, res)=>{
//...
})
app.listen(3000); |
<reponame>caikaijie/generator-ts-repo
import Generator, { Answers } from 'yeoman-generator'
import chalk from 'chalk'
import yosay from 'yosay'
import path from 'path'
import sortPackageJSON from 'sort-package-json'
import { askForPackageName } from './package'
import fse from 'fs-extra'
export default class extends Generator {
answers: Answers
constructor(args: string | string[], options: {}) {
// Calling the super constructor is important so our generator is correctly set up
super(args, options)
this.answers = {}
}
async prompting(): Promise<void> {
// Have Yeoman greet the user.
this.log(
yosay(`Welcome to the ${chalk.red('generator-ts-repo')} generator!`)
)
// const prompts = [
// {
// type: 'input',
// name: 'repoName',
// message: 'Repo name?',
// },
// ]
// this.answers = await this.prompt(prompts)
this.answers = await askForPackageName(this)
this.log(`I don't have more questions, thanks. Let's Go!\n`)
}
writing(): void {
// Relocate Root Dirs.
const sourceRoot = path.join(this.sourceRoot(), '../../../')
this.sourceRoot(sourceRoot)
const destinationRoot = path.join(
this.destinationRoot(),
this.answers.repoName
)
fse.ensureDirSync(destinationRoot)
this.destinationRoot(destinationRoot)
// Copy configs, dotfiles and LICENSE.
const files = [
'.editorconfig',
'.eslintignore',
'.eslintrc.js',
'.gitattributes',
'.gitignore',
'.prettierrc',
'.travis.yml',
'.vscode',
'commitlint.config.js',
'jest.config.js',
'LICENSE',
'tsconfig.json',
]
files.forEach(file => {
this.fs.copy(this.templatePath(file), this.destinationPath(file))
})
// Copy static files.
this.fs.copy(
this.templatePath('templates/src'),
this.destinationPath('./src')
)
// Render README.md.
this.fs.copyTpl(
this.templatePath('templates/README.md'),
this.destinationPath('README.md'),
{ repoName: this.answers.repoName }
)
// Render package.json
const pkg: { name: string } = this.fs.readJSON(
this.templatePath('templates/package.json')
)
pkg.name = this.answers.repoName
const json = JSON.stringify(sortPackageJSON(pkg), null, 2)
this.fs.write(this.destinationPath('package.json'), json)
}
install(): void {
// Try to init git repo.
const r = this.spawnCommandSync('git', ['init', '--quiet'], {
cwd: this.destinationPath(),
})
if (r.error != undefined) {
this.log(
`${chalk.red(chalk.bold('Tip: '))} "git" not found, skipped.\n`
)
}
const message = `Please install dependencies in your favorite way, thanks.`
const hints = `(running "yarn" or "npm install")`
this.log(
yosay(message + '\n' + hints, {
maxLength: message.length,
})
)
}
}
|
json.array!(@hypotheses) do |hypothesis|
json.extract! hypothesis, :id, :short_name, :state, :business_model_part, :hypotheses, :experiment, :pass_fail_criterion, :created_by
json.url hypothesis_url(hypothesis, format: :json)
end
|
#!/bin/bash
# copy this file to .setenv.sh
# Sets secrets like connection strings
export AZURE_STORAGE_CONNECTION_STRING="SETME"
|
import { argv } from 'process'
// eslint-disable-next-line import/order
import logProcessErrors from 'log-process-errors'
import { EVENTS_MAP } from './events/main.js'
const stopLogging = logProcessErrors()
const [, , eventName] = argv
EVENTS_MAP[eventName].emit().then(stopLogging).catch(stopLogging)
|
import { Channel, PlaylistCompact, VideoCompact } from "..";
export declare type SearchOptions = {
type: "video" | "channel" | "playlist" | "all";
limit: number;
};
export declare type SearchType<T> = T extends {
type: "video";
} ? VideoCompact : T extends {
type: "channel";
} ? Channel : T extends {
type: "playlist";
} ? PlaylistCompact : VideoCompact | Channel | PlaylistCompact;
export declare type GetPlaylistOptions = {
continuationLimit: number;
};
|
<filename>scripts/scenes/telaInicial.js
class TelaInicial {
constructor() {}
draw() {
this._background();
this._text();
this._button();
}
_background() {
image(telaInicialImage, 0, 0, width, height);
}
_text() {
textFont(fontTelaInicial);
textAlign(CENTER);
textSize(50);
fill("#fff");
text("Run Peter,", width / 2, 100);
text("RUN!", width / 2, 150);
}
_button() {
managerButton.y = (height / 8) * 5;
managerButton.draw();
}
}
|
#!/usr/bin/env bash
# ignore-tidy-linelength
set -ex
curl https://static.redox-os.org/toolchain/x86_64-unknown-redox/relibc-install.tar.gz | \
tar --extract --gzip --directory /usr/local
|
#!/usr/bin/env bash
#
# Copyright (c) 2018 The mkscoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Check for shellcheck warnings in shell scripts.
export LC_ALL=C
# The shellcheck binary segfault/coredumps in Travis with LC_ALL=C
# It does not do so in Ubuntu 14.04, 16.04, 18.04 in versions 0.3.3, 0.3.7, 0.4.6
# respectively. So export LC_ALL=C is set as required by lint-shell-locale.sh
# but unset here in case of running in Travis.
if [ "$TRAVIS" = "true" ]; then
unset LC_ALL
fi
if ! command -v shellcheck > /dev/null; then
echo "Skipping shell linting since shellcheck is not installed."
exit 0
fi
# Disabled warnings:
disabled=(
SC1087 # Use braces when expanding arrays, e.g. ${array[idx]} (or ${var}[.. to quiet).
SC2001 # See if you can use ${variable//search/replace} instead.
SC2004 # $/${} is unnecessary on arithmetic variables.
SC2005 # Useless echo? Instead of 'echo $(cmd)', just use 'cmd'.
SC2006 # Use $(..) instead of legacy `..`.
SC2016 # Expressions don't expand in single quotes, use double quotes for that.
SC2028 # echo won't expand escape sequences. Consider printf.
SC2046 # Quote this to prevent word splitting.
SC2048 # Use "$@" (with quotes) to prevent whitespace problems.
SC2066 # Since you double quoted this, it will not word split, and the loop will only run once.
SC2086 # Double quote to prevent globbing and word splitting.
SC2116 # Useless echo? Instead of 'cmd $(echo foo)', just use 'cmd foo'.
SC2162 # read without -r will mangle backslashes.
SC2166 # Prefer [ p ] {&&,||} [ q ] as [ p -{a,o} q ] is not well defined.
SC2181 # Check exit code directly with e.g. 'if mycmd;', not indirectly with $?.
SC2206 # Quote to prevent word splitting, or split robustly with mapfile or read -a.
SC2207 # Prefer mapfile or read -a to split command output (or quote to avoid splitting).
SC2230 # which is non-standard. Use builtin 'command -v' instead.
SC2236 # Don't force -n instead of ! -z.
)
shellcheck -e "$(IFS=","; echo "${disabled[*]}")" \
$(git ls-files -- "*.sh" | grep -vE 'src/(secp256k1|univalue)/')
|
import React from 'react';
import Nutriscore from '../../components/Nutriscore/Nutriscore';
const Index: React.FC = () => {
return (
<Nutriscore letter="c"></Nutriscore>
);
}
export default Index; |
<gh_stars>10-100
#include "template_handler_url.h"
#include "url_utils.h"
char* temphand_url_encodeToken(pool* p,void* config,char* src){
return url_encode2(p,src);
}
char* temphand_url_decodeToken(pool* p,void* config,char* src){
return url_decode2(p,src);
}
// base64 encode decode
char* temphand_base64_encodeToken(pool* p,void* config,char* src){
char encoded[2048];
if(src==NULL) return "(null)";
base64_encode(encoded, (char*)src, strlen(src));
return apr_pstrdup(p, encoded);
}
char* temphand_base64_decodeToken(pool* p,void* config,char* src){
char decoded[2048];
if(src==NULL) return "(null)";
base64_decode(decoded, (char*)src, strlen(src));
return apr_pstrdup(p, decoded);
}
|
import React ,{ Component } from 'react'
import { connect } from 'react-redux'
import InputForm from '../../components/InputForm'
import NumberList from '../../components/NumberList'
import { handleInputChange,
updateDuplicateAndUniqInputsFromText,
fetchNumbers,
addNumbersToArray } from '../../actions/inputFormActions'
class Problem2 extends Component {
componentDidMount() {
}
handleSubmit() {
let {newUniqNumbers, numberList} = this.props
this.props.addNumbersToArray({
newNumbers: newUniqNumbers,
oldNumbers: numberList
})
}
render() {
const { handleKeyUp, inputText, numberList, duplicateNumbers, numbersByValue, isSaving, error } = this.props
return (
<div className="container-fluid">
<div className="push-20 col-md-12">
<InputForm inputText={inputText}
numbers = {duplicateNumbers}
handleKeyUp={handleKeyUp}
handleSubmit={this.handleSubmit.bind(this)}
isSaving={isSaving}
error={error}
/>
<NumberList numbers={numberList}
numbersByValue={numbersByValue}/>
</div>
</div>
)
}
}
const mapStateToProps = (state) => {
return {
...state.InputForm
}
}
const mapDispatchToProps = (dispatch) => {
return {
handleKeyUp: (payload) => {
return dispatch(handleInputChange(payload))
},
updateDuplicateAndUniqInputsFromText: (text) => {
return dispatch(updateDuplicateAndUniqInputsFromText(text))
},
fetchNumbers: () => {
return dispatch(fetchNumbers())
},
addNumbersToArray: (newNumbers, oldNumbers) => {
return dispatch(addNumbersToArray(newNumbers, oldNumbers))
}
}
}
export default connect(
mapStateToProps,
mapDispatchToProps
)(Problem2)
|
package org.sklsft.generator.bc.metadata.factory.impl;
import java.util.ArrayList;
import org.sklsft.generator.bc.metadata.factory.interfaces.TableFactory;
import org.sklsft.generator.bc.resolvers.DatabaseHandlerResolver;
import org.sklsft.generator.model.domain.Model;
import org.sklsft.generator.model.domain.Package;
import org.sklsft.generator.model.domain.database.Column;
import org.sklsft.generator.model.domain.database.Table;
import org.sklsft.generator.model.domain.database.UniqueConstraint;
import org.sklsft.generator.model.metadata.ColumnMetaData;
import org.sklsft.generator.model.metadata.DataType;
import org.sklsft.generator.model.metadata.RelationType;
import org.sklsft.generator.model.metadata.TableMetaData;
import org.sklsft.generator.model.metadata.UniqueConstraintMetaData;
import org.sklsft.generator.model.metadata.Visibility;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class TableFactoryImpl implements TableFactory {
private static final Logger logger = LoggerFactory.getLogger(TableFactory.class);
@Override
public Table scanTable(TableMetaData tableMetaData, Package myPackage) {
Table table = new Table();
table.myPackage = myPackage;
table.originalName = tableMetaData.getName();
table.name = DatabaseHandlerResolver.getDatabaseHandler(myPackage.model.project).rename(table.originalName);
if (tableMetaData.getIdType() != null) {
table.idType = tableMetaData.getIdType();
} else {
table.idType = DataType.LONG;
}
if (table.idGeneratorType != null) {
table.idGeneratorType = tableMetaData.getIdGeneratorType();
} else {
table.idGeneratorType = table.idType.getDefaultGenerator();
}
table.cardinality = tableMetaData.getCardinality();
logger.trace("Table found : " + tableMetaData.getName());
return table;
}
@Override
public Table fillTable(TableMetaData tableMetaData, Model model) {
Table table = model.findTable(tableMetaData.getName());
for (ColumnMetaData columnMetaData : tableMetaData.getColumns()) {
Column column = new Column();
column.originalName = columnMetaData.getName();
column.name = DatabaseHandlerResolver.getDatabaseHandler(model.project).rename(column.originalName);
if (columnMetaData.getDataType() != null) {
column.dataType = columnMetaData.getDataType();
}
column.nullable = (columnMetaData.getNullable());
if (columnMetaData.getReferenceTableRelation() != null) {
column.relation = columnMetaData.getReferenceTableRelation();
} else {
column.relation = RelationType.PROPERTY;
}
column.deleteCascade = (column.relation.equals(RelationType.MANY_TO_ONE_COMPONENT));
column.referenceTable = model.findTable(columnMetaData.getReferenceTableName());
if (column.referenceTable != null) {
column.dataType = column.referenceTable.idType;
}
column.unique = columnMetaData.getUnique() || column.relation.isUnique();
column.editable = columnMetaData.getEditable();
column.filterable = columnMetaData.getFilterable();
if (columnMetaData.getVisibility()!=null) {
column.visibility = columnMetaData.getVisibility();
} else {
column.visibility = Visibility.VISIBLE;
}
column.rendering = columnMetaData.getRendering();
column.annotations = columnMetaData.getAnnotations();
table.columns.add(column);
}
if (tableMetaData.getUniqueConstraints()!=null && !tableMetaData.getUniqueConstraints().isEmpty()) {
table.uniqueConstraints = new ArrayList<>();
for (UniqueConstraintMetaData uniqueConstraintMetaData:tableMetaData.getUniqueConstraints()) {
UniqueConstraint uniqueConstraint = new UniqueConstraint();
uniqueConstraint.name = uniqueConstraintMetaData.getName();
for (String columnName:uniqueConstraintMetaData.getFields()) {
uniqueConstraint.columns.add(table.findColumnByName(columnName));
}
table.uniqueConstraints.add(uniqueConstraint);
}
}
return table;
}
}
|
<filename>index.ts
import express from "express";
import privateRoute from "./src/routes/private.route";
import publicRoute from "./src/routes/public.route";
import authRoute from "./src/routes/auth.route";
import cookieParser from "cookie-parser";
const app = express();
app.use(
express.urlencoded({
extended: true,
})
);
app.use(express.json());
app.use(cookieParser());
app.use("/auth", authRoute);
app.use("/private", privateRoute);
app.use("/public", publicRoute);
app.listen(8000, () => {
console.log("Listening to 8000");
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.