text stringlengths 1 1.05M |
|---|
<?php
// Set your secret key. Remember to switch to your live secret key in production!
// See your keys here: https://dashboard.stripe.com/account/apikeys
\Stripe\Stripe::setApiKey("sk_live_SECRET_KEY");
// Token is created using Checkout or Elements!
// Get the payment token ID submitted by the form:
$token = $_POST['stripeToken'];
$charge = \Stripe\Charge::create([
'amount' => 999,
'currency' => 'usd',
'description' => 'Example charge',
'source' => $token,
]);
echo 'Payment successful!';
?> |
<filename>app/router/test.js
import test from '../controller/test.js';
module.exports = function(router) {
router.get('/page/index', test.index);
router.get('/page/login', test.login);
router.get('/page/article', test.listArticle);
router.get('/page/article/[0-9]{1,11}', test.viewArticle);
router.get('/page/article/add', test.addArticle);
}; |
def invert_dictionary(d):
inverted_d = {}
for key, value in d.items():
if value not in inverted_d:
inverted_d[value] = []
inverted_d[value].append(key)
return inverted_d |
//
// NCIBar.h
// NCIChart
//
// Created by Ira on 3/11/14.
// Copyright (c) 2014 FlowForwarding.Org. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface NCIBar : UIView
@end
|
<reponame>shrtCKT/FCG-to-Vector
package util.math;
public class LinearAlgebra {
public static double magnitude(Double[] vector) {
double mag = 0;
for (Double v : vector) {
mag += v * v;
}
return Math.sqrt(mag);
}
public static double magnitude(double[] vector) {
double mag = 0;
for (Double v : vector) {
mag += v * v;
}
return Math.sqrt(mag);
}
public static double dotProduct(Double[] vector1, Double[] vector2) {
double prod = 0;
for (int i = 0; i < vector1.length; i++) {
prod += vector1[i] * vector2[i];
}
return prod;
}
public static double dotProduct(double[] vector1, double[] vector2) {
double prod = 0;
for (int i = 0; i < vector1.length; i++) {
prod += vector1[i] * vector2[i];
}
return prod;
}
public static double cosine(Double[] vector1, Double[] vector2) {
double dotProd = dotProduct(vector1, vector2);
double mag1 = magnitude(vector1);
double mag2 = magnitude(vector2);
if (mag1 == 0 || mag2 == 0) {
return 0;
}
return dotProd / (mag1 * mag2);
}
}
|
<reponame>love-adela/algorithm<filename>acmicpc/1012/1012-1.py<gh_stars>1-10
"""
배추밭의 가로길이: M(1 ≤ M ≤ 50)
세로길이 : N(1 ≤ N ≤ 50)
배추가 심어져 있는 위치의 개수 :K
"""
import sys
sys.setrecursionlimit(10**8)
def dfs(curr_x, curr_y, visited):
visited[curr_x][curr_y] = True; cnt = 1
for dx, dy in (-1, 0), (0, -1), (1, 0), (0, 1):
next_x = curr_x + dx
next_y = curr_y + dy
# 무한루프가 돌지 않게 처리 : 옆에 있는 정점끼리 왔다갔다 하는걸 막음
if 0<= next_x<M and 0<=next_y< N and not visited[next_x][next_y]:
if table[next_x][next_y] == 1:
res = dfs(next_x, next_y, visited)
cnt += res
return cnt
T = int(input())
while T:
M, N, K = map(int, input().split())
table = [[0] * N for _ in range(M)]
visited = [[False]*N for _ in range(M)]
answer = 0
for _ in range(K):
Y, X = map(int, input().split())
table[Y][X] = 1
for i in range(M):
for j in range(N):
# 연결요소가 여러개이기 때문에 visited 체크를 해줘야 함
# 다녀왔던 정점도 dfs가 가능하다고 여기지 않게 막음
if table[i][j] == 1 and not visited[i][j]:
dfs(i, j, visited)
answer += 1
print(answer)
T -= 1
|
import React, {useState} from 'react';
import styled from 'styled-components';
import { SidebarData } from './SidebarData';
import SubMenu from './SubMenu';
import { IconContext } from 'react-icons/lib';
const SidebarNav = styled.nav`
background: #1C72BD;
width: 250px;
height: 100vh;
display: flex;
justify-content: center;
position: fixed;
left: ${({ sidebar }) => (sidebar ? '0' : '-100%')};
transition: 350ms;
z-index: 10;
`;
const SidebarWrap = styled.div`
width: 100%;
`;
const SidebarButton = styled.button`
padding: 20px;
height: 60px;
width: 125px;
color: white;
font-size: 16px;
border: white;
background-color: #1C72BD;
:hover {
background-color: #99c6f5;
}
`;
const Sidebar = () => {
const [device, setDevice] = useState('V');
const changeDevice = (evt) => setDevice(evt.target.name);
const [sidebar, setSidebar] = useState(false);
const showSidebar = () => setSidebar(!sidebar);
return (
<IconContext.Provider value={{ color: 'black' }}>
<SidebarButton name='V' onClick={(evt)=>{changeDevice(evt); showSidebar();}}>V</SidebarButton>
<SidebarButton name='D' onClick={(evt)=>{changeDevice(evt); showSidebar();}}>D</SidebarButton>
<SidebarNav sidebar={sidebar}>
<SidebarWrap>
{SidebarData.map((item, index) => {
return <SubMenu item={item} key={index} device={device}/>;
})}
</SidebarWrap>
</SidebarNav>
</IconContext.Provider>
);
};
export default Sidebar; |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2012:1256
#
# Security announcement date: 2012-09-11 18:35:16 UTC
# Script generation date: 2017-01-01 21:14:10 UTC
#
# Operating System: Red Hat 5
# Architecture: i386
#
# Vulnerable packages fix on version:
# - ghostscript.i386:8.70-14.el5_8.1
# - ghostscript-debuginfo.i386:8.70-14.el5_8.1
# - ghostscript-gtk.i386:8.70-14.el5_8.1
# - ghostscript-devel.i386:8.70-14.el5_8.1
#
# Last versions recommanded by security team:
# - ghostscript.i386:8.70-14.el5_8.1
# - ghostscript-debuginfo.i386:8.70-14.el5_8.1
# - ghostscript-gtk.i386:8.70-14.el5_8.1
# - ghostscript-devel.i386:8.70-14.el5_8.1
#
# CVE List:
# - CVE-2012-4405
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install ghostscript.i386-8.70 -y
sudo yum install ghostscript-debuginfo.i386-8.70 -y
sudo yum install ghostscript-gtk.i386-8.70 -y
sudo yum install ghostscript-devel.i386-8.70 -y
|
#!/bin/bash
#SBATCH --job-name=/data/unibas/boittier/test-neighbours2
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --partition=short
#SBATCH --output=/data/unibas/boittier/test-neighbours2_%A-%a.out
hostname
# Path to scripts and executables
cubefit=/home/unibas/boittier/fdcm_project/mdcm_bin/cubefit.x
fdcm=/home/unibas/boittier/fdcm_project/fdcm.x
ars=/home/unibas/boittier/fdcm_project/ARS.py
# Variables for the job
n_steps=2
n_charges=24
scan_name=frame_
suffix=.chk
cubes_dir=/data/unibas/boittier/fdcm/amide_graph
output_dir=/data/unibas/boittier/test-neighbours2
frames=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/frames.txt
initial_fit=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/24_charges_refined.xyz
initial_fit_cube=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/amide1.pdb.chk
prev_frame=0
start_frame=11
next_frame=(11, 30)
acd=/home/unibas/boittier/fdcm_project/0_fit.xyz.acd
start=$start_frame
next=$next_frame
dir='frame_'$next
output_name=$output_dir/$dir/$dir'-'$start'-'$next'.xyz'
initial_fit=$output_dir/"frame_"$start/"frame_"$start'-'$prev_frame'-'$start'.xyz'
# Go to the output directory
mkdir -p $output_dir
cd $output_dir
mkdir -p $dir
cd $dir
# Do Initial Fit
# for initial fit
esp1=$cubes_dir/$scan_name$start$suffix'.p.cube'
dens1=$cubes_dir/$scan_name$start$suffix'.d.cube'
esp=$cubes_dir/$scan_name$next$suffix'.p.cube'
dens=$cubes_dir/$scan_name$next$suffix'.d.cube'
# adjust reference frame
python $ars -charges $initial_fit -pcube $dens1 -pcube2 $dens -frames $frames -output $output_name -acd $acd > $output_name.ARS.log
# do gradient descent fit
$fdcm -xyz $output_name.global -dens $dens -esp $esp -stepsize 0.2 -n_steps $n_steps -learning_rate 0.5 -output $output_name > $output_name.GD.log
# adjust reference frame
python $ars -charges $output_name -pcube $esp -pcube2 $esp -frames $frames -output $output_name -acd $acd > $output_name.ARS-2.log
# make a cube file for the fit
$cubefit -v -generate -esp $esp -dens $dens -xyz refined.xyz > $output_name.cubemaking.log
# do analysis
$cubefit -v -analysis -esp $esp -esp2 $n_charges'charges.cube' -dens $dens > $output_name.analysis.log
echo $PWD
|
#!/bin/bash
PLATFORMS="darwin/amd64 darwin/arm64" # amd64 only as of go1.5
PLATFORMS="$PLATFORMS windows/amd64 windows/386" # arm compilation not available for Windows
PLATFORMS="$PLATFORMS linux/amd64 linux/386"
#PLATFORMS="$PLATFORMS linux/ppc64 linux/ppc64le"
#PLATFORMS="$PLATFORMS linux/mips64 linux/mips64le" # experimental in go1.6
#PLATFORMS="$PLATFORMS freebsd/amd64"
#PLATFORMS="$PLATFORMS netbsd/amd64" # amd64 only as of go1.6
#PLATFORMS="$PLATFORMS openbsd/amd64" # amd64 only as of go1.6
#PLATFORMS="$PLATFORMS dragonfly/amd64" # amd64 only as of go1.5
#PLATFORMS="$PLATFORMS plan9/amd64 plan9/386" # as of go1.4
#PLATFORMS="$PLATFORMS solaris/amd64" # as of go1.3
# ARMBUILDS lists the platforms that are currently supported. From this list
# we generate the following architectures:
#
# ARM64 (aka ARMv8) <- only supported on linux and darwin builds (go1.6)
# ARMv7
# ARMv6
# ARMv5
#
# Some words of caution from the master:
#
# @dfc: you'll have to use gomobile to build for darwin/arm64 [and others]
# @dfc: that target expects that you're bulding for a mobile phone
# @dfc: iphone 5 and below, ARMv7, iphone 3 and below ARMv6, iphone 5s and above arm64
#
PLATFORMS_ARM="linux"
#PLATFORMS_ARM="linux freebsd netbsd"
##############################################################
# Shouldn't really need to modify anything below this line. #
##############################################################
type setopt >/dev/null 2>&1
SCRIPT_NAME=`basename "$0"`
FAILURES=""
SOURCE_FILE=`echo $@ | sed 's/\.go//'`
CURRENT_DIRECTORY=${PWD##*/}
#OUTPUT=${SOURCE_FILE:-$CURRENT_DIRECTORY} # if no src file given, use current dir name
OUTPUT=reindexer
for PLATFORM in $PLATFORMS; do
GOOS=${PLATFORM%/*}
GOARCH=${PLATFORM#*/}
BIN_FILENAME="binaries/${GOOS}-${GOARCH}/${OUTPUT}"
if [[ "${GOOS}" == "windows" ]]; then BIN_FILENAME="${BIN_FILENAME}.exe"; fi
CMD="GOOS=${GOOS} GOARCH=${GOARCH} go build -o ${BIN_FILENAME} $@"
echo "${CMD}"
eval $CMD || FAILURES="${FAILURES} ${PLATFORM}"
done
# ARM builds
if [[ $PLATFORMS_ARM == *"linux"* ]]; then
CMD="GOOS=linux GOARCH=arm64 go build -o binaries/linux-arm64/${OUTPUT} $@"
echo "${CMD}"
eval $CMD || FAILURES="${FAILURES} ${PLATFORM}"
fi
for GOOS in $PLATFORMS_ARM; do
GOARCH="arm"
# build for each ARM version
for GOARM in 7 6 5; do
BIN_FILENAME="binaries/${GOOS}-${GOARCH}${GOARM}/${OUTPUT}"
CMD="GOARM=${GOARM} GOOS=${GOOS} GOARCH=${GOARCH} go build -o ${BIN_FILENAME} $@"
echo "${CMD}"
eval "${CMD}" || FAILURES="${FAILURES} ${GOOS}/${GOARCH}${GOARM}"
done
done
# eval errors
if [[ "${FAILURES}" != "" ]]; then
echo ""
echo "${SCRIPT_NAME} failed on: ${FAILURES}"
exit 1
fi |
pkg_name=cairo
pkg_origin=core
pkg_version="1.14.10"
pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>"
pkg_license=(
"LGPL-2.1"
"MPL-1.1"
)
pkg_source="https://www.cairographics.org/releases/${pkg_name}-${pkg_version}.tar.xz"
pkg_shasum="7e87878658f2c9951a14fc64114d4958c0e65ac47530b8ac3078b2ce41b66a09"
pkg_description="Cairo is a 2D graphics library with support for multiple output devices."
pkg_upstream_url="https://www.cairographics.org"
pkg_deps=(
core/bzip2
core/expat
core/fontconfig
core/freetype
core/gcc-libs
core/glib
core/glibc
core/libffi
core/libice
core/libiconv
core/libpng
core/libsm
core/libxau
core/libxcb
core/libxdmcp
core/libxext
core/lzo
core/pcre
core/pixman
core/xlib
core/zlib
)
pkg_build_deps=(
core/diffutils
core/file
core/gcc
core/make
core/pkg-config
core/xextproto
core/xproto
)
pkg_bin_dirs=(bin)
pkg_include_dirs=(include)
pkg_lib_dirs=(
lib
lib/cairo
)
pkg_pconfig_dirs=(lib/pkgconfig)
do_prepare() {
if [[ ! -r /usr/bin/file ]]; then
ln -sv "$(pkg_path_for file)/bin/file" /usr/bin/file
_clean_file=true
fi
}
do_build() {
CFLAGS="-Os ${CFLAGS}"
./configure --prefix="${pkg_prefix}" \
--enable-xlib
make
}
do_check() {
make test
}
do_end() {
if [[ -n "$_clean_file" ]]; then
rm -fv /usr/bin/file
fi
}
|
#include <stdio.h>
int findMinimum(int array[], int size)
{
int min = array[0];
int i;
for (i = 0; i < size; ++i)
{
if (array[i] < min)
{
min = array[i];
}
}
return min;
}
int main()
{
int array[] = {5, 6, 3, 2, 8, 4};
int size = sizeof(array) / sizeof(array[0]);
int min = findMinimum(array, size);
printf("Minimum: %d\n", min);
return 0;
} |
<filename>commons-geometry-io-core/src/test/java/org/apache/commons/geometry/io/core/output/StreamGeometryOutputTest.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.geometry.io.core.output;
import java.io.ByteArrayOutputStream;
import java.nio.charset.StandardCharsets;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
class StreamGeometryOutputTest {
private final ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
@Test
void testCtor_stream() {
// act
final StreamGeometryOutput out = new StreamGeometryOutput(byteStream);
// assert
Assertions.assertNull(out.getFileName());
Assertions.assertNull(out.getCharset());
Assertions.assertEquals(byteStream, out.getOutputStream());
}
@Test
void testCtor_streamAndFileName() {
// act
final StreamGeometryOutput out = new StreamGeometryOutput(byteStream, "test.txt");
// assert
Assertions.assertEquals("test.txt", out.getFileName());
Assertions.assertNull(out.getCharset());
Assertions.assertEquals(byteStream, out.getOutputStream());
}
@Test
void testCtor_allArgs() {
// act
final StreamGeometryOutput out = new StreamGeometryOutput(byteStream, "test.txt", StandardCharsets.UTF_16);
// assert
Assertions.assertEquals("test.txt", out.getFileName());
Assertions.assertEquals(StandardCharsets.UTF_16, out.getCharset());
Assertions.assertEquals(byteStream, out.getOutputStream());
}
@Test
void testToString() {
// arrange
final StreamGeometryOutput out = new StreamGeometryOutput(byteStream, "abc.txt");
// act
final String result = out.toString();
// assert
Assertions.assertEquals("StreamGeometryOutput[fileName= abc.txt]", result);
}
}
|
from base import BaseGame
from packages.utils import Value
class TicTacToe(BaseGame):
"""
The classic game of Tic Tac Toe
"""
PLAYER_PIECES = ['x', 'o', ' ']
X = 0
O = 1
EMPTY = 2
def __init__(self):
"""
Initialize the play board
"""
self.board = [
[self.EMPTY, self.EMPTY, self.EMPTY],
[self.EMPTY, self.EMPTY, self.EMPTY],
[self.EMPTY, self.EMPTY, self.EMPTY]
]
self.players_turn = 0
def hash(self):
"""
Turn a board into a position
"""
hash = 0
for index in range(9):
row, col = self._index_to_coords(index)
hash += self.board[row][col]
hash *= 3
hash /= 3
return hash
@classmethod
def unhash(cls, hash):
"""
Turn a position (value) into a board
"""
board = cls()
x_count = 0
o_count = 0
for index in range(8, -1, -1):
row, col = cls._index_to_coords(index)
piece_value = hash % 3
board.board[row][col] = piece_value
if cls.PLAYER_PIECES[piece_value] == 'x':
x_count += 1
elif cls.PLAYER_PIECES[piece_value] == 'o':
o_count += 1
hash /= 3
if x_count == o_count + 1:
board.players_turn = cls.O
else:
assert(x_count == o_count)
return board
def get_moves(self):
"""
Get supported moves
"""
moves = []
for row, columns in enumerate(self.board):
for column, space in enumerate(columns):
if space == self.EMPTY:
moves.append((row + 1, column + 1))
return moves
def do_move(self, move):
"""
Apply the move to the current board
"""
self.board[move[0] - 1][move[1] - 1] = self.players_turn
self.players_turn = (self.players_turn + 1) % 2
def undo_move(self, move):
"""
Unapply the move that resulted in the current board
"""
self.board[move[0] - 1][move[1] - 1] = self.EMPTY
self.players_turn = (self.players_turn + 1) % 2
def get_value(self):
"""
Return if this is an ending position
"""
other_player = (self.players_turn + 1) % 2
# rows
for row in self.board:
if row[0] == row[1] == row[2] == other_player:
return Value.LOSS
# columns
for column in range(0, 3):
if self.board[0][column] == \
self.board[1][column] == \
self.board[2][column] == other_player:
return Value.LOSS
# diagonals
if self.board[0][0] == self.board[1][1] == self.board[2][2] == other_player:
return Value.LOSS
if self.board[0][2] == self.board[1][1] == self.board[2][0] == other_player:
return Value.LOSS
for row in self.board:
for space in row:
if space == self.EMPTY:
return Value.UNKNOWN
return Value.TIE
def print_position(self):
"""
Print the specified position
"""
print
print '======================='
print 'Current game board:'
print
print ' columns'
print ' 1 | 2 | 3'
print ' --------------------'
print ' |'
for row_i, row in enumerate(self.board):
if row_i == 1:
print 'rows ', row_i + 1, '| ',
else:
print ' ', row_i + 1, '| ',
for column_i, space in enumerate(row):
print self.PLAYER_PIECES[space],
if column_i < 2:
print ' | ',
else:
print
if row_i < 2:
print ' -- | ---------------'
print ' |'
print
print 'Player {}\'s turn ({})!'.format(
str(self.players_turn + 1), self.PLAYER_PIECES[self.players_turn]
)
print '======================='
print
@staticmethod
def _index_to_coords(index):
"""
Converts a linear index to a row x column
>>> TicTacToe._index_to_coords(0)
(0, 0)
>>> TicTacToe._index_to_coords(1)
(0, 1)
>>> TicTacToe._index_to_coords(3)
(1, 0)
>>> TicTacToe._index_to_coords(4)
(1, 1)
>>> TicTacToe._index_to_coords(8)
(2, 2)
"""
return (index / 3, index % 3)
@staticmethod
def _coords_to_index(row, column):
"""
Converts a row x clumn to a linear index
>>> TicTacToe._coords_to_index(0, 0)
0
>>> TicTacToe._coords_to_index(0, 1)
1
>>> TicTacToe._coords_to_index(1, 0)
3
>>> TicTacToe._coords_to_index(1, 1)
4
>>> TicTacToe._coords_to_index(2, 2)
8
"""
return (row) * 3 + column
|
#!/bin/bash
go run gen2d.go && cp /tmp/gemlab/d2-coarse.msh .
go run gen3d.go && cp /tmp/gemlab/d3-coarse.msh .
|
<html>
<head>
<title>Verify User</title>
</head>
<body>
<form action="/verify" method="post">
Username: <input type="text" name="username" size="30"><br>
Password: <input type="password" name="password" size="30"><br><br>
<input type="submit" value="Submit">
</form>
<br>
<h3>Result: <%=@result%></h3>
</body>
</html>
<%
def verify
username = params[:username]
password = params[:password]
if username == "user" && password == "pass":
@result = "Valid user"
else
@result = "Invalid user"
end
end
%> |
<reponame>scala-steward/aws-sdk-scalajs-facade
package facade.amazonaws.services
import scalajs._
import scalajs.js.annotation.JSImport
import scala.scalajs.js.|
import scala.concurrent.Future
import facade.amazonaws._
package object dlm {
type ActionList = js.Array[Action]
type ActionName = String
type AvailabilityZone = String
type AvailabilityZoneList = js.Array[AvailabilityZone]
type AwsAccountId = String
type CmkArn = String
type CopyTags = Boolean
type CopyTagsNullable = Boolean
type Count = Int
type CronExpression = String
type CrossRegionCopyActionList = js.Array[CrossRegionCopyAction]
type CrossRegionCopyRules = js.Array[CrossRegionCopyRule]
type DescriptionRegex = String
type Encrypted = Boolean
type ExcludeBootVolume = Boolean
type ExecutionRoleArn = String
type Interval = Int
type LifecyclePolicySummaryList = js.Array[LifecyclePolicySummary]
type NoReboot = Boolean
type PolicyArn = String
type PolicyDescription = String
type PolicyId = String
type PolicyIdList = js.Array[PolicyId]
type ResourceLocationList = js.Array[ResourceLocationValues]
type ResourceTypeValuesList = js.Array[ResourceTypeValues]
type ScheduleList = js.Array[Schedule]
type ScheduleName = String
type ShareRules = js.Array[ShareRule]
type ShareTargetAccountList = js.Array[AwsAccountId]
type SnapshotOwnerList = js.Array[AwsAccountId]
type StatusMessage = String
type TagFilter = String
type TagKey = String
type TagKeyList = js.Array[TagKey]
type TagMap = js.Dictionary[TagValue]
type TagValue = String
type TagsToAddFilterList = js.Array[TagFilter]
type TagsToAddList = js.Array[Tag]
type Target = String
type TargetRegion = String
type TargetTagList = js.Array[Tag]
type TargetTagsFilterList = js.Array[TagFilter]
type Time = String
type TimesList = js.Array[Time]
type Timestamp = js.Date
type VariableTagsList = js.Array[Tag]
implicit final class DLMOps(private val service: DLM) extends AnyVal {
@inline def createLifecyclePolicyFuture(params: CreateLifecyclePolicyRequest): Future[CreateLifecyclePolicyResponse] = service.createLifecyclePolicy(params).promise().toFuture
@inline def deleteLifecyclePolicyFuture(params: DeleteLifecyclePolicyRequest): Future[DeleteLifecyclePolicyResponse] = service.deleteLifecyclePolicy(params).promise().toFuture
@inline def getLifecyclePoliciesFuture(params: GetLifecyclePoliciesRequest): Future[GetLifecyclePoliciesResponse] = service.getLifecyclePolicies(params).promise().toFuture
@inline def getLifecyclePolicyFuture(params: GetLifecyclePolicyRequest): Future[GetLifecyclePolicyResponse] = service.getLifecyclePolicy(params).promise().toFuture
@inline def listTagsForResourceFuture(params: ListTagsForResourceRequest): Future[ListTagsForResourceResponse] = service.listTagsForResource(params).promise().toFuture
@inline def tagResourceFuture(params: TagResourceRequest): Future[TagResourceResponse] = service.tagResource(params).promise().toFuture
@inline def untagResourceFuture(params: UntagResourceRequest): Future[UntagResourceResponse] = service.untagResource(params).promise().toFuture
@inline def updateLifecyclePolicyFuture(params: UpdateLifecyclePolicyRequest): Future[UpdateLifecyclePolicyResponse] = service.updateLifecyclePolicy(params).promise().toFuture
}
}
package dlm {
@js.native
@JSImport("aws-sdk/clients/dlm", JSImport.Namespace, "AWS.DLM")
class DLM() extends js.Object {
def this(config: AWSConfig) = this()
def createLifecyclePolicy(params: CreateLifecyclePolicyRequest): Request[CreateLifecyclePolicyResponse] = js.native
def deleteLifecyclePolicy(params: DeleteLifecyclePolicyRequest): Request[DeleteLifecyclePolicyResponse] = js.native
def getLifecyclePolicies(params: GetLifecyclePoliciesRequest): Request[GetLifecyclePoliciesResponse] = js.native
def getLifecyclePolicy(params: GetLifecyclePolicyRequest): Request[GetLifecyclePolicyResponse] = js.native
def listTagsForResource(params: ListTagsForResourceRequest): Request[ListTagsForResourceResponse] = js.native
def tagResource(params: TagResourceRequest): Request[TagResourceResponse] = js.native
def untagResource(params: UntagResourceRequest): Request[UntagResourceResponse] = js.native
def updateLifecyclePolicy(params: UpdateLifecyclePolicyRequest): Request[UpdateLifecyclePolicyResponse] = js.native
}
/** Specifies an action for an event-based policy.
*/
@js.native
trait Action extends js.Object {
var CrossRegionCopy: CrossRegionCopyActionList
var Name: ActionName
}
object Action {
@inline
def apply(
CrossRegionCopy: CrossRegionCopyActionList,
Name: ActionName
): Action = {
val __obj = js.Dynamic.literal(
"CrossRegionCopy" -> CrossRegionCopy.asInstanceOf[js.Any],
"Name" -> Name.asInstanceOf[js.Any]
)
__obj.asInstanceOf[Action]
}
}
@js.native
trait CreateLifecyclePolicyRequest extends js.Object {
var Description: PolicyDescription
var ExecutionRoleArn: ExecutionRoleArn
var PolicyDetails: PolicyDetails
var State: SettablePolicyStateValues
var Tags: js.UndefOr[TagMap]
}
object CreateLifecyclePolicyRequest {
@inline
def apply(
Description: PolicyDescription,
ExecutionRoleArn: ExecutionRoleArn,
PolicyDetails: PolicyDetails,
State: SettablePolicyStateValues,
Tags: js.UndefOr[TagMap] = js.undefined
): CreateLifecyclePolicyRequest = {
val __obj = js.Dynamic.literal(
"Description" -> Description.asInstanceOf[js.Any],
"ExecutionRoleArn" -> ExecutionRoleArn.asInstanceOf[js.Any],
"PolicyDetails" -> PolicyDetails.asInstanceOf[js.Any],
"State" -> State.asInstanceOf[js.Any]
)
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateLifecyclePolicyRequest]
}
}
@js.native
trait CreateLifecyclePolicyResponse extends js.Object {
var PolicyId: js.UndefOr[PolicyId]
}
object CreateLifecyclePolicyResponse {
@inline
def apply(
PolicyId: js.UndefOr[PolicyId] = js.undefined
): CreateLifecyclePolicyResponse = {
val __obj = js.Dynamic.literal()
PolicyId.foreach(__v => __obj.updateDynamic("PolicyId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateLifecyclePolicyResponse]
}
}
/** Specifies when to create snapshots of EBS volumes. You must specify either a Cron expression or an interval, interval unit, and start time. You cannot specify both.
*/
@js.native
trait CreateRule extends js.Object {
var CronExpression: js.UndefOr[CronExpression]
var Interval: js.UndefOr[Interval]
var IntervalUnit: js.UndefOr[IntervalUnitValues]
var Location: js.UndefOr[LocationValues]
var Times: js.UndefOr[TimesList]
}
object CreateRule {
@inline
def apply(
CronExpression: js.UndefOr[CronExpression] = js.undefined,
Interval: js.UndefOr[Interval] = js.undefined,
IntervalUnit: js.UndefOr[IntervalUnitValues] = js.undefined,
Location: js.UndefOr[LocationValues] = js.undefined,
Times: js.UndefOr[TimesList] = js.undefined
): CreateRule = {
val __obj = js.Dynamic.literal()
CronExpression.foreach(__v => __obj.updateDynamic("CronExpression")(__v.asInstanceOf[js.Any]))
Interval.foreach(__v => __obj.updateDynamic("Interval")(__v.asInstanceOf[js.Any]))
IntervalUnit.foreach(__v => __obj.updateDynamic("IntervalUnit")(__v.asInstanceOf[js.Any]))
Location.foreach(__v => __obj.updateDynamic("Location")(__v.asInstanceOf[js.Any]))
Times.foreach(__v => __obj.updateDynamic("Times")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateRule]
}
}
/** Specifies a rule for copying shared snapshots across Regions.
*/
@js.native
trait CrossRegionCopyAction extends js.Object {
var EncryptionConfiguration: EncryptionConfiguration
var Target: Target
var RetainRule: js.UndefOr[CrossRegionCopyRetainRule]
}
object CrossRegionCopyAction {
@inline
def apply(
EncryptionConfiguration: EncryptionConfiguration,
Target: Target,
RetainRule: js.UndefOr[CrossRegionCopyRetainRule] = js.undefined
): CrossRegionCopyAction = {
val __obj = js.Dynamic.literal(
"EncryptionConfiguration" -> EncryptionConfiguration.asInstanceOf[js.Any],
"Target" -> Target.asInstanceOf[js.Any]
)
RetainRule.foreach(__v => __obj.updateDynamic("RetainRule")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CrossRegionCopyAction]
}
}
/** Specifies the retention rule for cross-Region snapshot copies.
*/
@js.native
trait CrossRegionCopyRetainRule extends js.Object {
var Interval: js.UndefOr[Interval]
var IntervalUnit: js.UndefOr[RetentionIntervalUnitValues]
}
object CrossRegionCopyRetainRule {
@inline
def apply(
Interval: js.UndefOr[Interval] = js.undefined,
IntervalUnit: js.UndefOr[RetentionIntervalUnitValues] = js.undefined
): CrossRegionCopyRetainRule = {
val __obj = js.Dynamic.literal()
Interval.foreach(__v => __obj.updateDynamic("Interval")(__v.asInstanceOf[js.Any]))
IntervalUnit.foreach(__v => __obj.updateDynamic("IntervalUnit")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CrossRegionCopyRetainRule]
}
}
/** Specifies a rule for cross-Region snapshot copies.
*/
@js.native
trait CrossRegionCopyRule extends js.Object {
var Encrypted: Encrypted
var CmkArn: js.UndefOr[CmkArn]
var CopyTags: js.UndefOr[CopyTagsNullable]
var RetainRule: js.UndefOr[CrossRegionCopyRetainRule]
var Target: js.UndefOr[Target]
var TargetRegion: js.UndefOr[TargetRegion]
}
object CrossRegionCopyRule {
@inline
def apply(
Encrypted: Encrypted,
CmkArn: js.UndefOr[CmkArn] = js.undefined,
CopyTags: js.UndefOr[CopyTagsNullable] = js.undefined,
RetainRule: js.UndefOr[CrossRegionCopyRetainRule] = js.undefined,
Target: js.UndefOr[Target] = js.undefined,
TargetRegion: js.UndefOr[TargetRegion] = js.undefined
): CrossRegionCopyRule = {
val __obj = js.Dynamic.literal(
"Encrypted" -> Encrypted.asInstanceOf[js.Any]
)
CmkArn.foreach(__v => __obj.updateDynamic("CmkArn")(__v.asInstanceOf[js.Any]))
CopyTags.foreach(__v => __obj.updateDynamic("CopyTags")(__v.asInstanceOf[js.Any]))
RetainRule.foreach(__v => __obj.updateDynamic("RetainRule")(__v.asInstanceOf[js.Any]))
Target.foreach(__v => __obj.updateDynamic("Target")(__v.asInstanceOf[js.Any]))
TargetRegion.foreach(__v => __obj.updateDynamic("TargetRegion")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CrossRegionCopyRule]
}
}
@js.native
trait DeleteLifecyclePolicyRequest extends js.Object {
var PolicyId: PolicyId
}
object DeleteLifecyclePolicyRequest {
@inline
def apply(
PolicyId: PolicyId
): DeleteLifecyclePolicyRequest = {
val __obj = js.Dynamic.literal(
"PolicyId" -> PolicyId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DeleteLifecyclePolicyRequest]
}
}
@js.native
trait DeleteLifecyclePolicyResponse extends js.Object
object DeleteLifecyclePolicyResponse {
@inline
def apply(): DeleteLifecyclePolicyResponse = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[DeleteLifecyclePolicyResponse]
}
}
/** Specifies the encryption settings for shared snapshots that are copied across Regions.
*/
@js.native
trait EncryptionConfiguration extends js.Object {
var Encrypted: Encrypted
var CmkArn: js.UndefOr[CmkArn]
}
object EncryptionConfiguration {
@inline
def apply(
Encrypted: Encrypted,
CmkArn: js.UndefOr[CmkArn] = js.undefined
): EncryptionConfiguration = {
val __obj = js.Dynamic.literal(
"Encrypted" -> Encrypted.asInstanceOf[js.Any]
)
CmkArn.foreach(__v => __obj.updateDynamic("CmkArn")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[EncryptionConfiguration]
}
}
/** Specifies an event that triggers an event-based policy.
*/
@js.native
trait EventParameters extends js.Object {
var DescriptionRegex: DescriptionRegex
var EventType: EventTypeValues
var SnapshotOwner: SnapshotOwnerList
}
object EventParameters {
@inline
def apply(
DescriptionRegex: DescriptionRegex,
EventType: EventTypeValues,
SnapshotOwner: SnapshotOwnerList
): EventParameters = {
val __obj = js.Dynamic.literal(
"DescriptionRegex" -> DescriptionRegex.asInstanceOf[js.Any],
"EventType" -> EventType.asInstanceOf[js.Any],
"SnapshotOwner" -> SnapshotOwner.asInstanceOf[js.Any]
)
__obj.asInstanceOf[EventParameters]
}
}
/** Specifies an event that triggers an event-based policy.
*/
@js.native
trait EventSource extends js.Object {
var Type: EventSourceValues
var Parameters: js.UndefOr[EventParameters]
}
object EventSource {
@inline
def apply(
Type: EventSourceValues,
Parameters: js.UndefOr[EventParameters] = js.undefined
): EventSource = {
val __obj = js.Dynamic.literal(
"Type" -> Type.asInstanceOf[js.Any]
)
Parameters.foreach(__v => __obj.updateDynamic("Parameters")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[EventSource]
}
}
@js.native
sealed trait EventSourceValues extends js.Any
object EventSourceValues {
val MANAGED_CWE = "MANAGED_CWE".asInstanceOf[EventSourceValues]
@inline def values = js.Array(MANAGED_CWE)
}
@js.native
sealed trait EventTypeValues extends js.Any
object EventTypeValues {
val shareSnapshot = "shareSnapshot".asInstanceOf[EventTypeValues]
@inline def values = js.Array(shareSnapshot)
}
/** Specifies a rule for enabling fast snapshot restore. You can enable fast snapshot restore based on either a count or a time interval.
*/
@js.native
trait FastRestoreRule extends js.Object {
var AvailabilityZones: AvailabilityZoneList
var Count: js.UndefOr[Count]
var Interval: js.UndefOr[Interval]
var IntervalUnit: js.UndefOr[RetentionIntervalUnitValues]
}
object FastRestoreRule {
@inline
def apply(
AvailabilityZones: AvailabilityZoneList,
Count: js.UndefOr[Count] = js.undefined,
Interval: js.UndefOr[Interval] = js.undefined,
IntervalUnit: js.UndefOr[RetentionIntervalUnitValues] = js.undefined
): FastRestoreRule = {
val __obj = js.Dynamic.literal(
"AvailabilityZones" -> AvailabilityZones.asInstanceOf[js.Any]
)
Count.foreach(__v => __obj.updateDynamic("Count")(__v.asInstanceOf[js.Any]))
Interval.foreach(__v => __obj.updateDynamic("Interval")(__v.asInstanceOf[js.Any]))
IntervalUnit.foreach(__v => __obj.updateDynamic("IntervalUnit")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[FastRestoreRule]
}
}
@js.native
trait GetLifecyclePoliciesRequest extends js.Object {
var PolicyIds: js.UndefOr[PolicyIdList]
var ResourceTypes: js.UndefOr[ResourceTypeValuesList]
var State: js.UndefOr[GettablePolicyStateValues]
var TagsToAdd: js.UndefOr[TagsToAddFilterList]
var TargetTags: js.UndefOr[TargetTagsFilterList]
}
object GetLifecyclePoliciesRequest {
@inline
def apply(
PolicyIds: js.UndefOr[PolicyIdList] = js.undefined,
ResourceTypes: js.UndefOr[ResourceTypeValuesList] = js.undefined,
State: js.UndefOr[GettablePolicyStateValues] = js.undefined,
TagsToAdd: js.UndefOr[TagsToAddFilterList] = js.undefined,
TargetTags: js.UndefOr[TargetTagsFilterList] = js.undefined
): GetLifecyclePoliciesRequest = {
val __obj = js.Dynamic.literal()
PolicyIds.foreach(__v => __obj.updateDynamic("PolicyIds")(__v.asInstanceOf[js.Any]))
ResourceTypes.foreach(__v => __obj.updateDynamic("ResourceTypes")(__v.asInstanceOf[js.Any]))
State.foreach(__v => __obj.updateDynamic("State")(__v.asInstanceOf[js.Any]))
TagsToAdd.foreach(__v => __obj.updateDynamic("TagsToAdd")(__v.asInstanceOf[js.Any]))
TargetTags.foreach(__v => __obj.updateDynamic("TargetTags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[GetLifecyclePoliciesRequest]
}
}
@js.native
trait GetLifecyclePoliciesResponse extends js.Object {
var Policies: js.UndefOr[LifecyclePolicySummaryList]
}
object GetLifecyclePoliciesResponse {
@inline
def apply(
Policies: js.UndefOr[LifecyclePolicySummaryList] = js.undefined
): GetLifecyclePoliciesResponse = {
val __obj = js.Dynamic.literal()
Policies.foreach(__v => __obj.updateDynamic("Policies")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[GetLifecyclePoliciesResponse]
}
}
@js.native
trait GetLifecyclePolicyRequest extends js.Object {
var PolicyId: PolicyId
}
object GetLifecyclePolicyRequest {
@inline
def apply(
PolicyId: PolicyId
): GetLifecyclePolicyRequest = {
val __obj = js.Dynamic.literal(
"PolicyId" -> PolicyId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[GetLifecyclePolicyRequest]
}
}
@js.native
trait GetLifecyclePolicyResponse extends js.Object {
var Policy: js.UndefOr[LifecyclePolicy]
}
object GetLifecyclePolicyResponse {
@inline
def apply(
Policy: js.UndefOr[LifecyclePolicy] = js.undefined
): GetLifecyclePolicyResponse = {
val __obj = js.Dynamic.literal()
Policy.foreach(__v => __obj.updateDynamic("Policy")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[GetLifecyclePolicyResponse]
}
}
@js.native
sealed trait GettablePolicyStateValues extends js.Any
object GettablePolicyStateValues {
val ENABLED = "ENABLED".asInstanceOf[GettablePolicyStateValues]
val DISABLED = "DISABLED".asInstanceOf[GettablePolicyStateValues]
val ERROR = "ERROR".asInstanceOf[GettablePolicyStateValues]
@inline def values = js.Array(ENABLED, DISABLED, ERROR)
}
@js.native
sealed trait IntervalUnitValues extends js.Any
object IntervalUnitValues {
val HOURS = "HOURS".asInstanceOf[IntervalUnitValues]
@inline def values = js.Array(HOURS)
}
/** Detailed information about a lifecycle policy.
*/
@js.native
trait LifecyclePolicy extends js.Object {
var DateCreated: js.UndefOr[Timestamp]
var DateModified: js.UndefOr[Timestamp]
var Description: js.UndefOr[PolicyDescription]
var ExecutionRoleArn: js.UndefOr[ExecutionRoleArn]
var PolicyArn: js.UndefOr[PolicyArn]
var PolicyDetails: js.UndefOr[PolicyDetails]
var PolicyId: js.UndefOr[PolicyId]
var State: js.UndefOr[GettablePolicyStateValues]
var StatusMessage: js.UndefOr[StatusMessage]
var Tags: js.UndefOr[TagMap]
}
object LifecyclePolicy {
@inline
def apply(
DateCreated: js.UndefOr[Timestamp] = js.undefined,
DateModified: js.UndefOr[Timestamp] = js.undefined,
Description: js.UndefOr[PolicyDescription] = js.undefined,
ExecutionRoleArn: js.UndefOr[ExecutionRoleArn] = js.undefined,
PolicyArn: js.UndefOr[PolicyArn] = js.undefined,
PolicyDetails: js.UndefOr[PolicyDetails] = js.undefined,
PolicyId: js.UndefOr[PolicyId] = js.undefined,
State: js.UndefOr[GettablePolicyStateValues] = js.undefined,
StatusMessage: js.UndefOr[StatusMessage] = js.undefined,
Tags: js.UndefOr[TagMap] = js.undefined
): LifecyclePolicy = {
val __obj = js.Dynamic.literal()
DateCreated.foreach(__v => __obj.updateDynamic("DateCreated")(__v.asInstanceOf[js.Any]))
DateModified.foreach(__v => __obj.updateDynamic("DateModified")(__v.asInstanceOf[js.Any]))
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
ExecutionRoleArn.foreach(__v => __obj.updateDynamic("ExecutionRoleArn")(__v.asInstanceOf[js.Any]))
PolicyArn.foreach(__v => __obj.updateDynamic("PolicyArn")(__v.asInstanceOf[js.Any]))
PolicyDetails.foreach(__v => __obj.updateDynamic("PolicyDetails")(__v.asInstanceOf[js.Any]))
PolicyId.foreach(__v => __obj.updateDynamic("PolicyId")(__v.asInstanceOf[js.Any]))
State.foreach(__v => __obj.updateDynamic("State")(__v.asInstanceOf[js.Any]))
StatusMessage.foreach(__v => __obj.updateDynamic("StatusMessage")(__v.asInstanceOf[js.Any]))
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[LifecyclePolicy]
}
}
/** Summary information about a lifecycle policy.
*/
@js.native
trait LifecyclePolicySummary extends js.Object {
var Description: js.UndefOr[PolicyDescription]
var PolicyId: js.UndefOr[PolicyId]
var PolicyType: js.UndefOr[PolicyTypeValues]
var State: js.UndefOr[GettablePolicyStateValues]
var Tags: js.UndefOr[TagMap]
}
object LifecyclePolicySummary {
@inline
def apply(
Description: js.UndefOr[PolicyDescription] = js.undefined,
PolicyId: js.UndefOr[PolicyId] = js.undefined,
PolicyType: js.UndefOr[PolicyTypeValues] = js.undefined,
State: js.UndefOr[GettablePolicyStateValues] = js.undefined,
Tags: js.UndefOr[TagMap] = js.undefined
): LifecyclePolicySummary = {
val __obj = js.Dynamic.literal()
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
PolicyId.foreach(__v => __obj.updateDynamic("PolicyId")(__v.asInstanceOf[js.Any]))
PolicyType.foreach(__v => __obj.updateDynamic("PolicyType")(__v.asInstanceOf[js.Any]))
State.foreach(__v => __obj.updateDynamic("State")(__v.asInstanceOf[js.Any]))
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[LifecyclePolicySummary]
}
}
@js.native
trait ListTagsForResourceRequest extends js.Object {
var ResourceArn: PolicyArn
}
object ListTagsForResourceRequest {
@inline
def apply(
ResourceArn: PolicyArn
): ListTagsForResourceRequest = {
val __obj = js.Dynamic.literal(
"ResourceArn" -> ResourceArn.asInstanceOf[js.Any]
)
__obj.asInstanceOf[ListTagsForResourceRequest]
}
}
@js.native
trait ListTagsForResourceResponse extends js.Object {
var Tags: js.UndefOr[TagMap]
}
object ListTagsForResourceResponse {
@inline
def apply(
Tags: js.UndefOr[TagMap] = js.undefined
): ListTagsForResourceResponse = {
val __obj = js.Dynamic.literal()
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListTagsForResourceResponse]
}
}
@js.native
sealed trait LocationValues extends js.Any
object LocationValues {
val CLOUD = "CLOUD".asInstanceOf[LocationValues]
val OUTPOST_LOCAL = "OUTPOST_LOCAL".asInstanceOf[LocationValues]
@inline def values = js.Array(CLOUD, OUTPOST_LOCAL)
}
/** Specifies optional parameters to add to a policy. The set of valid parameters depends on the combination of policy type and resource type.
*/
@js.native
trait Parameters extends js.Object {
var ExcludeBootVolume: js.UndefOr[ExcludeBootVolume]
var NoReboot: js.UndefOr[NoReboot]
}
object Parameters {
@inline
def apply(
ExcludeBootVolume: js.UndefOr[ExcludeBootVolume] = js.undefined,
NoReboot: js.UndefOr[NoReboot] = js.undefined
): Parameters = {
val __obj = js.Dynamic.literal()
ExcludeBootVolume.foreach(__v => __obj.updateDynamic("ExcludeBootVolume")(__v.asInstanceOf[js.Any]))
NoReboot.foreach(__v => __obj.updateDynamic("NoReboot")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Parameters]
}
}
/** Specifies the configuration of a lifecycle policy.
*/
@js.native
trait PolicyDetails extends js.Object {
var Actions: js.UndefOr[ActionList]
var EventSource: js.UndefOr[EventSource]
var Parameters: js.UndefOr[Parameters]
var PolicyType: js.UndefOr[PolicyTypeValues]
var ResourceLocations: js.UndefOr[ResourceLocationList]
var ResourceTypes: js.UndefOr[ResourceTypeValuesList]
var Schedules: js.UndefOr[ScheduleList]
var TargetTags: js.UndefOr[TargetTagList]
}
object PolicyDetails {
@inline
def apply(
Actions: js.UndefOr[ActionList] = js.undefined,
EventSource: js.UndefOr[EventSource] = js.undefined,
Parameters: js.UndefOr[Parameters] = js.undefined,
PolicyType: js.UndefOr[PolicyTypeValues] = js.undefined,
ResourceLocations: js.UndefOr[ResourceLocationList] = js.undefined,
ResourceTypes: js.UndefOr[ResourceTypeValuesList] = js.undefined,
Schedules: js.UndefOr[ScheduleList] = js.undefined,
TargetTags: js.UndefOr[TargetTagList] = js.undefined
): PolicyDetails = {
val __obj = js.Dynamic.literal()
Actions.foreach(__v => __obj.updateDynamic("Actions")(__v.asInstanceOf[js.Any]))
EventSource.foreach(__v => __obj.updateDynamic("EventSource")(__v.asInstanceOf[js.Any]))
Parameters.foreach(__v => __obj.updateDynamic("Parameters")(__v.asInstanceOf[js.Any]))
PolicyType.foreach(__v => __obj.updateDynamic("PolicyType")(__v.asInstanceOf[js.Any]))
ResourceLocations.foreach(__v => __obj.updateDynamic("ResourceLocations")(__v.asInstanceOf[js.Any]))
ResourceTypes.foreach(__v => __obj.updateDynamic("ResourceTypes")(__v.asInstanceOf[js.Any]))
Schedules.foreach(__v => __obj.updateDynamic("Schedules")(__v.asInstanceOf[js.Any]))
TargetTags.foreach(__v => __obj.updateDynamic("TargetTags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[PolicyDetails]
}
}
@js.native
sealed trait PolicyTypeValues extends js.Any
object PolicyTypeValues {
val EBS_SNAPSHOT_MANAGEMENT = "EBS_SNAPSHOT_MANAGEMENT".asInstanceOf[PolicyTypeValues]
val IMAGE_MANAGEMENT = "IMAGE_MANAGEMENT".asInstanceOf[PolicyTypeValues]
val EVENT_BASED_POLICY = "EVENT_BASED_POLICY".asInstanceOf[PolicyTypeValues]
@inline def values = js.Array(EBS_SNAPSHOT_MANAGEMENT, IMAGE_MANAGEMENT, EVENT_BASED_POLICY)
}
@js.native
sealed trait ResourceLocationValues extends js.Any
object ResourceLocationValues {
val CLOUD = "CLOUD".asInstanceOf[ResourceLocationValues]
val OUTPOST = "OUTPOST".asInstanceOf[ResourceLocationValues]
@inline def values = js.Array(CLOUD, OUTPOST)
}
@js.native
sealed trait ResourceTypeValues extends js.Any
object ResourceTypeValues {
val VOLUME = "VOLUME".asInstanceOf[ResourceTypeValues]
val INSTANCE = "INSTANCE".asInstanceOf[ResourceTypeValues]
@inline def values = js.Array(VOLUME, INSTANCE)
}
/** Specifies the retention rule for a lifecycle policy. You can retain snapshots based on either a count or a time interval.
*/
@js.native
trait RetainRule extends js.Object {
var Count: js.UndefOr[Count]
var Interval: js.UndefOr[Interval]
var IntervalUnit: js.UndefOr[RetentionIntervalUnitValues]
}
object RetainRule {
@inline
def apply(
Count: js.UndefOr[Count] = js.undefined,
Interval: js.UndefOr[Interval] = js.undefined,
IntervalUnit: js.UndefOr[RetentionIntervalUnitValues] = js.undefined
): RetainRule = {
val __obj = js.Dynamic.literal()
Count.foreach(__v => __obj.updateDynamic("Count")(__v.asInstanceOf[js.Any]))
Interval.foreach(__v => __obj.updateDynamic("Interval")(__v.asInstanceOf[js.Any]))
IntervalUnit.foreach(__v => __obj.updateDynamic("IntervalUnit")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[RetainRule]
}
}
@js.native
sealed trait RetentionIntervalUnitValues extends js.Any
object RetentionIntervalUnitValues {
val DAYS = "DAYS".asInstanceOf[RetentionIntervalUnitValues]
val WEEKS = "WEEKS".asInstanceOf[RetentionIntervalUnitValues]
val MONTHS = "MONTHS".asInstanceOf[RetentionIntervalUnitValues]
val YEARS = "YEARS".asInstanceOf[RetentionIntervalUnitValues]
@inline def values = js.Array(DAYS, WEEKS, MONTHS, YEARS)
}
/** Specifies a backup schedule for a snapshot or AMI lifecycle policy.
*/
@js.native
trait Schedule extends js.Object {
var CopyTags: js.UndefOr[CopyTags]
var CreateRule: js.UndefOr[CreateRule]
var CrossRegionCopyRules: js.UndefOr[CrossRegionCopyRules]
var FastRestoreRule: js.UndefOr[FastRestoreRule]
var Name: js.UndefOr[ScheduleName]
var RetainRule: js.UndefOr[RetainRule]
var ShareRules: js.UndefOr[ShareRules]
var TagsToAdd: js.UndefOr[TagsToAddList]
var VariableTags: js.UndefOr[VariableTagsList]
}
object Schedule {
@inline
def apply(
CopyTags: js.UndefOr[CopyTags] = js.undefined,
CreateRule: js.UndefOr[CreateRule] = js.undefined,
CrossRegionCopyRules: js.UndefOr[CrossRegionCopyRules] = js.undefined,
FastRestoreRule: js.UndefOr[FastRestoreRule] = js.undefined,
Name: js.UndefOr[ScheduleName] = js.undefined,
RetainRule: js.UndefOr[RetainRule] = js.undefined,
ShareRules: js.UndefOr[ShareRules] = js.undefined,
TagsToAdd: js.UndefOr[TagsToAddList] = js.undefined,
VariableTags: js.UndefOr[VariableTagsList] = js.undefined
): Schedule = {
val __obj = js.Dynamic.literal()
CopyTags.foreach(__v => __obj.updateDynamic("CopyTags")(__v.asInstanceOf[js.Any]))
CreateRule.foreach(__v => __obj.updateDynamic("CreateRule")(__v.asInstanceOf[js.Any]))
CrossRegionCopyRules.foreach(__v => __obj.updateDynamic("CrossRegionCopyRules")(__v.asInstanceOf[js.Any]))
FastRestoreRule.foreach(__v => __obj.updateDynamic("FastRestoreRule")(__v.asInstanceOf[js.Any]))
Name.foreach(__v => __obj.updateDynamic("Name")(__v.asInstanceOf[js.Any]))
RetainRule.foreach(__v => __obj.updateDynamic("RetainRule")(__v.asInstanceOf[js.Any]))
ShareRules.foreach(__v => __obj.updateDynamic("ShareRules")(__v.asInstanceOf[js.Any]))
TagsToAdd.foreach(__v => __obj.updateDynamic("TagsToAdd")(__v.asInstanceOf[js.Any]))
VariableTags.foreach(__v => __obj.updateDynamic("VariableTags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Schedule]
}
}
@js.native
sealed trait SettablePolicyStateValues extends js.Any
object SettablePolicyStateValues {
val ENABLED = "ENABLED".asInstanceOf[SettablePolicyStateValues]
val DISABLED = "DISABLED".asInstanceOf[SettablePolicyStateValues]
@inline def values = js.Array(ENABLED, DISABLED)
}
/** Specifies a rule for sharing snapshots across AWS accounts.
*/
@js.native
trait ShareRule extends js.Object {
var TargetAccounts: ShareTargetAccountList
var UnshareInterval: js.UndefOr[Interval]
var UnshareIntervalUnit: js.UndefOr[RetentionIntervalUnitValues]
}
object ShareRule {
@inline
def apply(
TargetAccounts: ShareTargetAccountList,
UnshareInterval: js.UndefOr[Interval] = js.undefined,
UnshareIntervalUnit: js.UndefOr[RetentionIntervalUnitValues] = js.undefined
): ShareRule = {
val __obj = js.Dynamic.literal(
"TargetAccounts" -> TargetAccounts.asInstanceOf[js.Any]
)
UnshareInterval.foreach(__v => __obj.updateDynamic("UnshareInterval")(__v.asInstanceOf[js.Any]))
UnshareIntervalUnit.foreach(__v => __obj.updateDynamic("UnshareIntervalUnit")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ShareRule]
}
}
/** Specifies a tag for a resource.
*/
@js.native
trait Tag extends js.Object {
var Key: String
var Value: String
}
object Tag {
@inline
def apply(
Key: String,
Value: String
): Tag = {
val __obj = js.Dynamic.literal(
"Key" -> Key.asInstanceOf[js.Any],
"Value" -> Value.asInstanceOf[js.Any]
)
__obj.asInstanceOf[Tag]
}
}
@js.native
trait TagResourceRequest extends js.Object {
var ResourceArn: PolicyArn
var Tags: TagMap
}
object TagResourceRequest {
@inline
def apply(
ResourceArn: PolicyArn,
Tags: TagMap
): TagResourceRequest = {
val __obj = js.Dynamic.literal(
"ResourceArn" -> ResourceArn.asInstanceOf[js.Any],
"Tags" -> Tags.asInstanceOf[js.Any]
)
__obj.asInstanceOf[TagResourceRequest]
}
}
@js.native
trait TagResourceResponse extends js.Object
object TagResourceResponse {
@inline
def apply(): TagResourceResponse = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[TagResourceResponse]
}
}
@js.native
trait UntagResourceRequest extends js.Object {
var ResourceArn: PolicyArn
var TagKeys: TagKeyList
}
object UntagResourceRequest {
@inline
def apply(
ResourceArn: PolicyArn,
TagKeys: TagKeyList
): UntagResourceRequest = {
val __obj = js.Dynamic.literal(
"ResourceArn" -> ResourceArn.asInstanceOf[js.Any],
"TagKeys" -> TagKeys.asInstanceOf[js.Any]
)
__obj.asInstanceOf[UntagResourceRequest]
}
}
@js.native
trait UntagResourceResponse extends js.Object
object UntagResourceResponse {
@inline
def apply(): UntagResourceResponse = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[UntagResourceResponse]
}
}
@js.native
trait UpdateLifecyclePolicyRequest extends js.Object {
var PolicyId: PolicyId
var Description: js.UndefOr[PolicyDescription]
var ExecutionRoleArn: js.UndefOr[ExecutionRoleArn]
var PolicyDetails: js.UndefOr[PolicyDetails]
var State: js.UndefOr[SettablePolicyStateValues]
}
object UpdateLifecyclePolicyRequest {
@inline
def apply(
PolicyId: PolicyId,
Description: js.UndefOr[PolicyDescription] = js.undefined,
ExecutionRoleArn: js.UndefOr[ExecutionRoleArn] = js.undefined,
PolicyDetails: js.UndefOr[PolicyDetails] = js.undefined,
State: js.UndefOr[SettablePolicyStateValues] = js.undefined
): UpdateLifecyclePolicyRequest = {
val __obj = js.Dynamic.literal(
"PolicyId" -> PolicyId.asInstanceOf[js.Any]
)
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
ExecutionRoleArn.foreach(__v => __obj.updateDynamic("ExecutionRoleArn")(__v.asInstanceOf[js.Any]))
PolicyDetails.foreach(__v => __obj.updateDynamic("PolicyDetails")(__v.asInstanceOf[js.Any]))
State.foreach(__v => __obj.updateDynamic("State")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateLifecyclePolicyRequest]
}
}
@js.native
trait UpdateLifecyclePolicyResponse extends js.Object
object UpdateLifecyclePolicyResponse {
@inline
def apply(): UpdateLifecyclePolicyResponse = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[UpdateLifecyclePolicyResponse]
}
}
}
|
#!/bin/bash
#
# SPDX-License-Identifier: Apache-2.0
#
# Copyright (C) 2019 Intel Corporation
#
# devstack/plugin.sh
# Triggers stx_config specific functions to install and configure stx_config
echo_summary "sysinv devstack plugin.sh called: $1/$2"
# check for service enabled
if is_service_enabled config; then
if [[ "$1" == "stack" && "$2" == "install" ]]; then
# Perform installation of source
echo_summary "Installing stx-config"
install_config
elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then
# Configure after the other layer 1 and 2 services have been configured
echo_summary "Configure sysinv"
configure_config
elif [[ "$1" == "stack" && "$2" == "extra" ]]; then
# Initialize and start the sysinv service
echo_summary "Initialize and start sysinv "
init_config
start_config
elif [[ "$1" == "stack" && "$2" == "test-config" ]]; then
# do sanity test for sysinv
echo_summary "do test-config"
# check sysinv services status
echo_summary "do check sysinv services"
check_sysinv_services
fi
if [[ "$1" == "unstack" ]]; then
# Shut down sysinv services
echo_summary "Stop Sysinv service"
stop_config
fi
if [[ "$1" == "clean" ]]; then
cleanup_config
fi
fi
|
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
/* This file has been modified by Open Source Strategies, Inc. */
package org.ofbiz.workflow.impl;
import java.sql.Timestamp;
import java.util.Date;
import org.ofbiz.base.util.ObjectType;
import org.ofbiz.workflow.SourceNotAvailable;
import org.ofbiz.workflow.WfActivity;
import org.ofbiz.workflow.WfEventAudit;
import org.ofbiz.workflow.WfException;
import org.ofbiz.workflow.WfExecutionObject;
import org.ofbiz.workflow.WfProcess;
/**
* WfEventAuditImpl - Workflow Event Audit implementation
*/
public class WfEventAuditImpl implements WfEventAudit {
private WfExecutionObject object = null;
private String eventType = null;
private Timestamp timeStamp = null;
public WfEventAuditImpl(WfExecutionObject object, String eventType) {
this.object = object;
this.eventType = eventType;
this.timeStamp = new Timestamp(new Date().getTime());
}
/**
* @see org.ofbiz.workflow.WfEventAudit#source()
*/
public WfExecutionObject source() throws WfException, SourceNotAvailable {
return object;
}
/**
* @see org.ofbiz.workflow.WfEventAudit#timeStamp()
*/
public Timestamp timeStamp() throws WfException {
return timeStamp;
}
/**
* @see org.ofbiz.workflow.WfEventAudit#eventType()
*/
public String eventType() throws WfException {
return eventType;
}
/**
* @see org.ofbiz.workflow.WfEventAudit#activityKey()
*/
public String activityKey() throws WfException {
try {
if (ObjectType.instanceOf(object, "org.ofbiz.workflow.WfActivity"))
return object.key();
} catch (Exception e) {
throw new WfException("Source is not a WfActivity object");
}
throw new WfException("Source is not a WfActivity object");
}
/**
* @see org.ofbiz.workflow.WfEventAudit#activityName()
*/
public String activityName() throws WfException {
try {
if (ObjectType.instanceOf(object, "org.ofbiz.workflow.WfActivity"))
return object.name();
} catch (Exception e) {}
throw new WfException("Source is not a WfActivity object");
}
/**
* @see org.ofbiz.workflow.WfEventAudit#processKey()
*/
public String processKey() throws WfException {
try {
if (ObjectType.instanceOf(object, "org.ofbiz.workflow.WfProcess"))
return object.key();
} catch (Exception e) {}
throw new WfException("Source is not a WfProcess object");
}
/**
* @see org.ofbiz.workflow.WfEventAudit#processName()
*/
public String processName() throws WfException {
try {
if (ObjectType.instanceOf(object, "org.ofbiz.workflow.WfProcess"))
return object.name();
} catch (Exception e) {}
throw new WfException("Source is not a WfProcess object");
}
/**
* @see org.ofbiz.workflow.WfEventAudit#processMgrName()
*/
public String processMgrName() throws WfException {
try {
if (ObjectType.instanceOf(object, "org.ofbiz.workflow.WfProcess"))
return ((WfProcess) object).manager().name();
else if (ObjectType.instanceOf(object, "org.ofbiz.workflow.WfActivity"))
return ((WfActivity) object).container().manager().name();
} catch (Exception e) {}
throw new WfException("Illegal source object");
}
/**
* @see org.ofbiz.workflow.WfEventAudit#processMgrVersion()
*/
public String processMgrVersion() throws WfException {
try {
if (ObjectType.instanceOf(object, "org.ofbiz.workflow.WfProcess"))
return ((WfProcess) object).manager().version();
else if (ObjectType.instanceOf(object, "org.ofbiz.workflow.WfActivity"))
return ((WfActivity) object).container().manager().version();
} catch (Exception e) {}
throw new WfException("Illegal source object");
}
}
|
module.exports = {
live: {
port: 9320,
httpPort: 8090,
address: '10.10.10.10',
},
production: {
port: 9320,
httpPort: 8090,
address: '10.10.10.10',
},
test: {
port: 9322,
httpPort: 8099,
address: '127.0.0.1',
},
dev: {
port: 9320,
httpPort: 8090,
address: '127.0.0.1',
},
development: {
port: 9320,
httpPort: 8090,
address: '127.0.0.1',
},
};
|
const os = require("os");
const fs = require("fs");
const SeleniumStealth = require("../selenium_stealth");
const {Builder} = require('selenium-webdriver');
const test = (headless, stealth)=> new Promise(async resolve => {
const driver = new Builder()
.withCapabilities({
'goog:chromeOptions': {
args: headless ? ["--headless"] : [],
excludeSwitches: [
'enable-automation',
'useAutomationExtension',
],
},
})
.forBrowser('chrome')
.build();
const seleniumStealth = new SeleniumStealth(driver)
if (stealth) {
await seleniumStealth.stealth({
languages: ["en-US", "en"],
vendor: "Google Inc.",
platform: "Win32",
webglVendor: "Intel Inc.",
renderer: "Intel Iris OpenGL Engine",
fixHairline: true
})
}else {
seleniumStealth.cdpConnection = await seleniumStealth.cdpConnection
}
const isWindows = os.type() === "Windows_NT"
const url = `${isWindows ? 'file:///' : 'file://'}${__dirname.replace('\\', '/')}/static/test.html`
await driver.get(url)
await driver.wait(() => {
return driver
.executeScript('return document.readyState')
.then((readyState) => {
return readyState === 'complete';
});
});
const metrics = await seleniumStealth.executeCDPCommand("Page.getLayoutMetrics")
const width = Math.ceil(metrics['contentSize']['width'])
const height = Math.ceil(metrics['contentSize']['height'])
const screenOrientation = {angle: 0, type: 'portraitPrimary'}
await seleniumStealth.executeCDPCommand("Emulation.setDeviceMetricsOverride", {
mobile: false,
width,
height,
screenOrientation,
deviceScaleFactor: 1
})
const clip = {x:0, y:0, width, height, scale: 1}
const opt = {format: 'png'}
if (clip)
opt['clip'] = clip
const result = await seleniumStealth.executeCDPCommand("Page.captureScreenshot", opt)
const html = await driver.getPageSource();
fs.writeFile(`stealthtests/selenium_chrome_${headless ? 'headless' : 'headful' }_${stealth ? 'with' : 'without'}_stealth.png`, result.data, 'base64', async()=>{
try{
await driver.close()
await driver.quit()
}catch (e) {}
resolve({html, result})
});
})
const tests = async()=>{
await test(true, true)
await test(true, false)
await test(false, true)
await test(false, false)
}
tests()
|
import java.util.ArrayList;
public class objectTable {
public final static int N_ATTR = 4;
public int classId;
public int tupleId;
public int blockId;
public int offset;
public objectTable() {
this.classId = 0;
this.tupleId = 0;
this.blockId = 0;
this.offset = 0;
}
public objectTable(ArrayList<String> src) {
this.classId = Integer.parseInt(src.get(0));
this.tupleId = Integer.parseInt(src.get(1));
this.blockId = Integer.parseInt(src.get(2));
this.offset = Integer.parseInt(src.get(3));
}
public ArrayList<String> class2StringList() {
ArrayList<String> data = new ArrayList<String>();
data.add(Integer.toString(this.classId));
data.add(Integer.toString(this.tupleId));
data.add(Integer.toString(this.blockId));
data.add(Integer.toString(this.offset));
return data;
}
}
|
function sumDigits(n){
// Initialize sum
let sum = 0;
// Iterate over each digit
while (n > 0) {
sum += n % 10;
n /= 10;
}
return sum;
}
num = 5635;
console.log(sumDigits(num));
// Output: 18 |
/*
*
*/
package net.community.apps.tools.xmlstruct;
import net.community.chest.CoVariantReturn;
import net.community.chest.ui.components.tree.document.BaseDocumentPanel;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Jan 6, 2009 4:30:57 PM
*/
public class DocStructPanel extends BaseDocumentPanel {
/**
*
*/
private static final long serialVersionUID = -5033348104749825219L;
public DocStructPanel (boolean autoLayout)
{
super(autoLayout);
}
public DocStructPanel ()
{
this(true);
}
/*
* @see net.community.chest.ui.components.tree.BaseDocumentPanel#createDocumentTree()
*/
@Override
@CoVariantReturn
protected DocStructTree createDocumentTree ()
{
return new DocStructTree();
}
}
|
import torch.nn as nn
class CustomBlock(nn.Module):
def __init__(self, in_channels, out_channels, stride):
super(CustomBlock, self).__init__()
self.shortcut = nn.Sequential(
nn.Conv2d(in_channels, in_channels, 3, stride=stride, padding=1, groups=in_channels),
nn.BatchNorm2d(in_channels),
nn.Conv2d(in_channels, int(out_channels / 2), 1),
nn.BatchNorm2d(int(out_channels / 2)),
nn.ReLU(inplace=True)
)
def forward(self, x):
return self.shortcut(x)
def create_custom_block(in_channels, out_channels, stride):
return CustomBlock(in_channels, out_channels, stride) |
partner_id = fields.Many2one('res.partner', string='Partner')
@api.multi
def action_context_demo(self):
if self.env.context.get('context_data'):
raise ValidationError('have context data')
else:
raise ValidationError('hello') |
class CalendarEvent:
def __init__(self, id, user, calendar, is_hidden):
self.id = id
self.user = user
self.calendar = calendar
self.is_hidden = is_hidden
# Assume the following global variables are available:
# active_users: a set containing the IDs of currently active users
# deferred_objects: a list of deferred CalendarEvent objects
def flush_deferred_objects():
global deferred_objects
global active_users
# Filter out hidden events and events belonging to inactive users
filtered_events = [event for event in deferred_objects if not event.is_hidden and event.user in active_users]
# Process and load the filtered events into the system
for event in filtered_events:
# Process and load the event into the calendar application
# Example: calendar_app.load_event(event)
pass
# Clear the deferred_objects list after processing
deferred_objects = [] |
/* Copyright 2017 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.bitbrain.braingdx.context;
import box2dLight.RayHandler;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.physics.box2d.World;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.utils.Disposable;
import com.badlogic.gdx.utils.viewport.FitViewport;
import de.bitbrain.braingdx.BrainGdxGame;
import de.bitbrain.braingdx.debug.DebugMetric;
import de.bitbrain.braingdx.debug.DebugStageRenderLayer;
import de.bitbrain.braingdx.event.GameEventRouter;
import de.bitbrain.braingdx.graphics.BatchResolver;
import de.bitbrain.braingdx.graphics.GameCamera;
import de.bitbrain.braingdx.graphics.SpriteBatchResolver;
import de.bitbrain.braingdx.graphics.VectorGameCamera;
import de.bitbrain.braingdx.graphics.lighting.LightingManager;
import de.bitbrain.braingdx.graphics.lighting.LightingManagerImpl;
import de.bitbrain.braingdx.graphics.lighting.LightingManagerRenderLayer;
import de.bitbrain.braingdx.graphics.particles.ParticleManager;
import de.bitbrain.braingdx.graphics.particles.ParticleManagerImpl;
import de.bitbrain.braingdx.graphics.particles.ParticleManagerRenderLayer;
import de.bitbrain.braingdx.graphics.pipeline.RenderLayer2D;
import de.bitbrain.braingdx.graphics.pipeline.RenderPipeline;
import de.bitbrain.braingdx.graphics.pipeline.layers.ColoredRenderLayer;
import de.bitbrain.braingdx.graphics.pipeline.layers.GameObjectRenderLayer;
import de.bitbrain.braingdx.graphics.pipeline.layers.RenderPipeIds;
import de.bitbrain.braingdx.graphics.pipeline.layers.StageRenderLayer;
import de.bitbrain.braingdx.graphics.shader.ShaderConfig;
import de.bitbrain.braingdx.physics.PhysicsManager;
import de.bitbrain.braingdx.physics.PhysicsManagerImpl;
import de.bitbrain.braingdx.screens.AbstractScreen;
import de.bitbrain.braingdx.tmx.TiledMapContextFactory;
import de.bitbrain.braingdx.tmx.TiledMapEvents.OnLoadGameObjectEvent;
import de.bitbrain.braingdx.tmx.TiledMapInfoExtractor;
import de.bitbrain.braingdx.tmx.TiledMapManager;
import de.bitbrain.braingdx.tmx.TiledMapManagerImpl;
import de.bitbrain.braingdx.tmx.events.TmxAudioConfigurer;
import de.bitbrain.braingdx.tmx.events.TmxLightingConfigurer;
import de.bitbrain.braingdx.util.ArgumentFactory;
import de.bitbrain.braingdx.util.Resizeable;
import de.bitbrain.braingdx.util.ViewportFactory;
/**
* 2D Implementation of {@link GameContext}.
*
* @author <NAME>
* @since 0.1.0
*/
public class GameContext2DImpl extends GameContextImpl implements GameContext2D, Disposable, Resizeable {
private final Stage worldStage;
private final LightingManagerImpl lightingManager;
private final ParticleManagerImpl particleManager;
private final World boxWorld;
private final TiledMapManager tiledMapManager;
private final PhysicsManagerImpl physicsManager;
private final ColoredRenderLayer coloredRenderLayer;
private final GameEventRouter tiledMapEventRouter;
private static final ArgumentFactory<GameContext, GameCamera> GAME_CAMERA_FACTORY = new ArgumentFactory<GameContext, GameCamera>() {
@Override
public GameCamera create(GameContext context) {
return new VectorGameCamera(new OrthographicCamera(), context.getGameWorld());
}
};
private static final ArgumentFactory<GameContext, BatchResolver<?>[]> BATCH_RESOLVER_FACTORY = new ArgumentFactory<GameContext, BatchResolver<?>[]>() {
@Override
public BatchResolver<?>[] create(GameContext supplier) {
return new BatchResolver[]{
new SpriteBatchResolver(supplier.getGameCamera().getInternalCamera())
};
}
};
public GameContext2DImpl(ViewportFactory viewportFactory, ShaderConfig shaderConfig, BrainGdxGame game, AbstractScreen<?, ?> screen) {
super(shaderConfig, viewportFactory, GAME_CAMERA_FACTORY, game, screen, BATCH_RESOLVER_FACTORY);
coloredRenderLayer = new ColoredRenderLayer();
particleManager = new ParticleManagerImpl(getBehaviorManager(), getSettings().getGraphics());
worldStage = new Stage(new FitViewport(Gdx.graphics.getWidth(), Gdx.graphics.getHeight(), getGameCamera().getInternalCamera()));
boxWorld = new World(Vector2.Zero, true);
physicsManager = new PhysicsManagerImpl(
boxWorld,
getGameWorld(),
getBehaviorManager()
);
lightingManager = new LightingManagerImpl(
new RayHandler(boxWorld),
getBehaviorManager(),
(OrthographicCamera) getGameCamera().getInternalCamera()
);
tiledMapEventRouter = new GameEventRouter(
getEventManager(),
getGameWorld(),
new TiledMapInfoExtractor()
);
tiledMapManager = new TiledMapManagerImpl(
getGameWorld(),
getEventManager(),
new TiledMapContextFactory(
getRenderManager(),
getGameWorld(),
getEventManager(),
tiledMapEventRouter,
getBehaviorManager(),
physicsManager
)
);
configurePipeline(getRenderPipeline(), this);
wire();
}
@Override
public Stage getWorldStage() {
return worldStage;
}
@Override
public World getBox2DWorld() {
return boxWorld;
}
@Override
public ParticleManager getParticleManager() {
return particleManager;
}
@Override
public LightingManager getLightingManager() {
return lightingManager;
}
@Override
public TiledMapManager getTiledMapManager() {
return tiledMapManager;
}
@Override
public void dispose() {
super.dispose();
worldStage.dispose();
particleManager.dispose();
physicsManager.dispose();
lightingManager.dispose();
}
public void updateAndRender(float delta) {
physicsManager.update(delta);
worldStage.act(delta);
super.updateAndRender(delta);
}
@Override
public void setBackgroundColor(Color color) {
super.setBackgroundColor(color);
coloredRenderLayer.setColor(color);
getRenderPipeline().put(RenderPipeIds.BACKGROUND, coloredRenderLayer);
}
@Override
public void resize(int width, int height) {
super.resize(width, height);
worldStage.getViewport().update(width, height, true);
lightingManager.resize(width, height);
}
@Override
public PhysicsManager getPhysicsManager() {
return physicsManager;
}
private void configurePipeline(RenderPipeline pipeline, GameContext2D context) {
pipeline.put(RenderPipeIds.BACKGROUND, new RenderLayer2D() {
@Override
public void render(Batch batch, float delta) {
}
});
pipeline.put(RenderPipeIds.FOREGROUND, new RenderLayer2D() {
@Override
public void render(Batch batch, float delta) {
// noOp
}
});
pipeline.put(RenderPipeIds.WORLD, new GameObjectRenderLayer(context.getRenderManager()));
pipeline.put(RenderPipeIds.LIGHTING, new LightingManagerRenderLayer(lightingManager));
pipeline.put(RenderPipeIds.PARTICLES, new ParticleManagerRenderLayer(particleManager));
pipeline.put(RenderPipeIds.WORLD_UI, new StageRenderLayer(context.getWorldStage()));
pipeline.put(RenderPipeIds.UI, new StageRenderLayer(context.getStage()));
pipeline.put(RenderPipeIds.DEBUG, new DebugStageRenderLayer(context));
}
private void wire() {
getInputManager().register(worldStage);
getBehaviorManager().apply(tiledMapEventRouter);
// TiledMap features
getEventManager().register(new TmxAudioConfigurer(getAudioManager()), OnLoadGameObjectEvent.class);
getEventManager().register(new TmxLightingConfigurer(getLightingManager()), OnLoadGameObjectEvent.class);
getDebugPanel().addMetric("light count", new DebugMetric() {
@Override
public String getCurrentValue() {
return String.valueOf(lightingManager.size());
}
});
getDebugPanel().addMetric("box2d body count", new DebugMetric() {
@Override
public String getCurrentValue() {
return String.valueOf(physicsManager.getPhysicsWorld().getBodyCount());
}
});
getDebugPanel().addMetric("particle effect count", new DebugMetric() {
@Override
public String getCurrentValue() {
return String.valueOf(particleManager.getTotalEffectCount());
}
});
}
}
|
package com.ua.nure.TestHelper.repository;
import com.ua.nure.TestHelper.domain.Test;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import javax.validation.constraints.NotNull;
import java.lang.annotation.Native;
import java.util.List;
@Repository
public interface TestRepository extends JpaRepository<Test, Long> {
List<Test> getAllByIdTest(String idTest);
@Query(value = "SELECT * FROM tests WHERE tests.id_template in(SELECT id_template FROM templates WHERE templates.id_teacher = ?1) group by id_test;", nativeQuery = true)
List<Test> getAllTestTeacher(String idTeacher);
@Query(value ="SELECT * FROM tests WHERE tests.id_test not In (SELECT id_test from test4groups WHERE id_group = ?1) AND tests.id_template in(SELECT id_template FROM templates WHERE templates.id_teacher = ?2) group by id_test;", nativeQuery = true)
List<Test> getAllTestTeacherNotInTheGroup(String id_Group, String id_teacher);
@Query(value = "SELECT * FROM tests WHERE id_test = ?1 GROUP by id_test", nativeQuery = true)
Test getByIdTest(@NotNull String idTest);
}
|
package com.example.plugintest.receiver;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
import android.widget.Toast;
import com.example.plugintest.R;
import com.example.plugintest.vo.ParamVO;
/**
* 静态注册的插件receiver不能监听系统广播
*
* @author cailiming
*
*/
public class PluginTestReceiver2 extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
Log.d("PluginTestReceiver2", ((ParamVO) intent.getSerializableExtra("paramvo")) + ", action:" + intent.getAction());
Toast.makeText(context, "PluginTestReceiver2 onReceive " + context.getResources().getText(R.string.hello_world4), Toast.LENGTH_LONG).show();
}
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
DOCKER_EXEC echo \> \$HOME/.buttcoin # Make sure default datadir does not exist and is never read by creating a dummy file
mkdir -p depends/SDKs depends/sdk-sources
if [ -n "$OSX_SDK" -a ! -f depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz ]; then
curl --location --fail $SDK_URL/MacOSX${OSX_SDK}.sdk.tar.gz -o depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz
fi
if [ -n "$OSX_SDK" -a -f depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz ]; then
tar -C depends/SDKs -xf depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz
fi
if [[ $HOST = *-mingw32 ]]; then
DOCKER_EXEC update-alternatives --set $HOST-g++ \$\(which $HOST-g++-posix\)
fi
if [ -z "$NO_DEPENDS" ]; then
DOCKER_EXEC CONFIG_SHELL= make $MAKEJOBS -C depends HOST=$HOST $DEP_OPTS
fi
|
/*
* Copyright (c) 2001-2004 Sendmail, Inc. All Rights Reserved
*/
package com.sendmail.jilter;
import java.io.IOException;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.nio.channels.WritableByteChannel;
import java.util.Properties;
import org.apache.log4j.Category;
import com.sendmail.jilter.internal.JilterConstants;
import com.sendmail.jilter.internal.JilterEOMActionsImpl;
import com.sendmail.jilter.internal.JilterPacket;
import com.sendmail.jilter.internal.JilterServerPacketUtil;
/**
* The guts of handling the filter side of the Milter protocol. If you have your own
* way that you like to handle communicating with the MTA side of the Milter protocol,
* you can feed an instance of this class the bytes from the MTA, and it will handle
* calling methods in a {@link JilterHandler}, as well as sending data back to the MTA
* via an arbitrary {@link WritableByteChannel}.
*/
public class JilterProcessor
{
private static Category log = Category.getInstance(JilterProcessor.class.getName());
private JilterHandler handler = null;
private final JilterPacket packet = new JilterPacket();
private Properties lastProperties = null;
/**
* Public constructor.
*
* @param handler
* the underlying handler that will receive calls based on the Milter conversation.
*/
public JilterProcessor(final JilterHandler handler)
{
this.handler = handler;
}
/**
* Process more data from the MTA.
*
* @param writeChannel
* the data channel for communicating back to the MTA.
* @param dataBuffer
* the next chunk of data from the MTA.
* @return <code>false</code> if processing is completed.
*/
public boolean process(final WritableByteChannel writeChannel, final ByteBuffer dataBuffer) throws IOException
{
while (packet.process(dataBuffer))
{
if (!processCurrentPacket(writeChannel))
{
return false;
}
packet.reset();
}
return true;
}
private static boolean isBitSet(final int bit, final int position)
{
return (bit & position) != 0;
}
private boolean processCurrentPacket(final WritableByteChannel writeChannel) throws IOException
{
boolean returnCode = true;
if (log.isDebugEnabled())
{
// log.debug(">SMFIC command is '" + ((char) this.packet.getCommand()) + "', Raw packet data:" + Util.newline() +
// Util.hexDumpLong(this.packet.getData()));
log.debug(">SMFIC command is '" + (char) packet.getCommand() + "'");
}
switch (packet.getCommand())
{
case JilterConstants.SMFIC_CONNECT:
log.debug("SMFIC_CONNECT");
processConnectPacket(writeChannel);
break;
case JilterConstants.SMFIC_MACRO:
log.debug("SMFIC_MACRO");
processMacroPacket(writeChannel);
break;
case JilterConstants.SMFIC_HELO:
log.debug("SMFIC_HELO");
processHeloPacket(writeChannel);
break;
case JilterConstants.SMFIC_MAIL:
log.debug("SMFIC_MAIL");
processMailPacket(writeChannel);
break;
case JilterConstants.SMFIC_RCPT:
log.debug("SMFIC_RCPT");
processRcptPacket(writeChannel);
break;
case JilterConstants.SMFIC_BODYEOB:
log.debug("SMFIC_BODYEOB");
processBodyEOBPacket(writeChannel);
break;
case JilterConstants.SMFIC_HEADER:
log.debug("SMFIC_HEADER");
processHeaderPacket(writeChannel);
break;
case JilterConstants.SMFIC_EOH:
log.debug("SMFIC_EOH");
processEOHPacket(writeChannel);
break;
case JilterConstants.SMFIC_OPTNEG:
log.debug("SMFIC_OPTNEG");
processOptnegPacket(writeChannel);
break;
case JilterConstants.SMFIC_QUIT:
log.debug("SMFIC_QUIT");
returnCode = false;
break;
case JilterConstants.SMFIC_BODY:
log.debug("SMFIC_BODY");
processBodyPacket(writeChannel);
break;
case JilterConstants.SMFIC_ABORT:
log.debug("SMFIC_ABORT");
processAbortPacket(writeChannel);
break;
case JilterConstants.SMFIC_DATA:
// don't proccess should be skipped
JilterServerPacketUtil.sendPacket(writeChannel, JilterConstants.SMFIR_CONTINUE, null);
break;
default:
log.error("Unhandled case [" + packet.getCommand() + "]", new Exception());
JilterServerPacketUtil.sendPacket(writeChannel, JilterConstants.SMFIR_CONTINUE, null);
break;
}
return returnCode;
}
private void processOptnegPacket(final WritableByteChannel writeChannel) throws IOException
{
final int smfif = handler.getRequiredModifications();
int smfip = 0;
final int supported = handler.getSupportedProcesses();
if (!isBitSet(supported, JilterHandler.PROCESS_CONNECT))
{
smfip |= JilterConstants.SMFIP_NOCONNECT;
}
if (!isBitSet(supported, JilterHandler.PROCESS_HEADER))
{
smfip |= JilterConstants.SMFIP_NOHDRS;
}
if (!isBitSet(supported, JilterHandler.PROCESS_HELO))
{
smfip |= JilterConstants.SMFIP_NOHELO;
}
if (!isBitSet(supported, JilterHandler.PROCESS_BODY))
{
smfip |= JilterConstants.SMFIP_NOBODY;
}
if (!isBitSet(supported, JilterHandler.PROCESS_ENVRCPT))
{
smfip |= JilterConstants.SMFIP_NORCPT;
}
if (!isBitSet(supported, JilterHandler.PROCESS_ENVFROM))
{
smfip |= JilterConstants.SMFIP_NOMAIL;
}
log.debug("Supported flags " + Integer.toHexString(supported) + " maps to SMFIP_ flags "
+ Integer.toHexString(smfip));
final ByteBuffer optionData = ByteBuffer.wrap(new byte[] { 0x00, 0x00, 0x00, 0x02, // version
0x00, 0x00, 0x00, (byte) smfif, // SMFIF_
0x00, 0x00, 0x00, (byte) smfip, // SMFIP_
});
JilterServerPacketUtil.sendPacket(writeChannel, JilterConstants.SMFIC_OPTNEG, optionData);
}
private void processBodyPacket(final WritableByteChannel writeChannel) throws IOException
{
sendReplyPacket(writeChannel, handler.body(packet.getData()));
}
private void processEOHPacket(final WritableByteChannel writeChannel) throws IOException
{
sendReplyPacket(writeChannel, handler.eoh());
}
private void processHeaderPacket(final WritableByteChannel writeChannel) throws IOException
{
String name = null;
String value = null;
final ByteBuffer dataBuffer = packet.getData();
// char name[]
name = JilterServerPacketUtil.getZeroTerminatedString(dataBuffer);
// char value[]
value = JilterServerPacketUtil.getZeroTerminatedString(dataBuffer);
sendReplyPacket(writeChannel, handler.header(name, value));
}
private void processBodyEOBPacket(final WritableByteChannel writeChannel) throws IOException
{
JilterStatus status;
JilterEOMActions eomactions;
eomactions = new JilterEOMActionsImpl(writeChannel);
status = handler.eom(eomactions, lastProperties);
if (status != null)
{
eomactions.finish(status);
}
}
private void processRcptPacket(final WritableByteChannel writeChannel) throws IOException
{
String argv[] = null;
final ByteBuffer dataBuffer = packet.getData();
// char args[][]
argv = JilterServerPacketUtil.getZeroTerminatedStringArray(dataBuffer);
log.debug("Recipient is \"" + argv[0] + "\"");
sendReplyPacket(writeChannel, handler.envrcpt(argv, lastProperties));
}
private void processMailPacket(final WritableByteChannel writeChannel) throws IOException
{
String argv[] = null;
final ByteBuffer dataBuffer = packet.getData();
// char args[][]
argv = JilterServerPacketUtil.getZeroTerminatedStringArray(dataBuffer);
log.debug("Sender is \"" + argv[0] + "\"");
sendReplyPacket(writeChannel, handler.envfrom(argv, lastProperties));
}
private void processHeloPacket(final WritableByteChannel writeChannel) throws IOException
{
String helohost = null;
final ByteBuffer dataBuffer = packet.getData();
// char helo[]
helohost = JilterServerPacketUtil.getZeroTerminatedString(dataBuffer);
log.debug("Client identifier parsed as \"" + helohost + "\"");
sendReplyPacket(writeChannel, handler.helo(helohost, lastProperties));
}
private void processMacroPacket(final WritableByteChannel writeChannel)
{
final ByteBuffer dataBuffer = packet.getData();
String[] propertiesStrings = null;
// char cmdcode
dataBuffer.get();
// char nameval[][]
propertiesStrings = JilterServerPacketUtil.getZeroTerminatedStringArray(dataBuffer);
lastProperties = new Properties();
for (int counter = 0; counter < propertiesStrings.length; counter += 2)
{
log.debug("Setting property " + propertiesStrings[counter] + " = " + propertiesStrings[counter + 1]);
lastProperties.setProperty(propertiesStrings[counter], propertiesStrings[counter + 1]);
}
// No reply at all...
}
private void processConnectPacket(final WritableByteChannel writeChannel) throws IOException
{
InetAddress address = null;
final ByteBuffer dataBuffer = packet.getData();
String hostname = null;
// char hostname[]
hostname = JilterServerPacketUtil.getZeroTerminatedString(dataBuffer);
// char family
if (dataBuffer.get() == JilterConstants.SMFIA_INET)
{
// uint16 port
dataBuffer.getShort();
// char address[]
{
String stringAddress = null;
stringAddress = JilterServerPacketUtil.getZeroTerminatedString(dataBuffer);
log.debug("Parsed IP address is " + stringAddress);
address = InetAddress.getByName(stringAddress);
}
}
sendReplyPacket(writeChannel, handler.connect(hostname, address, lastProperties));
}
private void sendReplyPacket(final WritableByteChannel writeChannel, final JilterStatus status) throws IOException
{
status.sendReplyPacket(writeChannel);
}
private void processAbortPacket(final WritableByteChannel writeChannel)
{
handler.abort();
// No reply at all...
}
/**
* Closes this processor. Will do the right thing to communicate to the underlying handler
* that processing is completed.
*/
public void close()
{
packet.reset();
handler.close();
lastProperties = null;
}
}
|
import com.gu.mediaservice.lib.play.GridAppLoader
class AppLoader extends GridAppLoader("usage", new UsageComponents(_))
|
#! /bin/bash
if [[ "$1" == "--debug" ]]; then set -x; shift; fi
# the below is supposed to handle both tags, branches when specified as argument:
PACKAGE_VERSION="${1:-v0.0.0}"
PACKAGE_VERSION="${PACKAGE_VERSION#v}"
PACKAGE_VERSION="${PACKAGE_VERSION##*/}"
# metadata
FPM_OPTIONS=(
--name dnscontrol
--version "${PACKAGE_VERSION}"
--license "The MIT License (MIT)"
--url "https://dnscontrol.org/"
--description "DNSControl: Infrastructure as Code for DNS Zones"
)
# list of files to be packaged and their respective locations (path names are subject to os-/archive-specific adjustment)
# TODO: maybe include additional documentation/examples?
DNSCONTROL_FILES=(
dnscontrol=/bin/
LICENSE=/share/doc/dnscontrol/
)
rm -Rf .ci/build/ 2>/dev/null
mkdir -p .ci/build
# taken from build/build.go
MAIN_SHA="$(git rev-parse HEAD)"
MAIN_BUILDTIME="$(date +%s)"
# TODO: check whether to include armel/armhf builds for .deb/.rpm/.txz (NB we might need to map 'arm' to 'armXX' in this case)
for BUILD_OS_ARCH in darwin/amd64 darwin/arm64 freebsd/386 freebsd/amd64 freebsd/arm64 linux/386 linux/amd64 linux/arm64 windows/amd64 windows/arm64; do
BUILD_OS="${BUILD_OS_ARCH%%/*}"
BUILD_ARCH1="${BUILD_OS_ARCH##*/}"
BUILD_ARCH2="${BUILD_ARCH1}"
[[ "${BUILD_ARCH2}" == "386" ]] && BUILD_ARCH2="i386"
BUILD_ARCH3="${BUILD_ARCH2}"
[[ "${BUILD_ARCH3}" == "arm64" ]] && BUILD_ARCH3="aarch64"
BUILD_ARCH4="${BUILD_ARCH3}"
[[ "${BUILD_ARCH4}" == "amd64" ]] && BUILD_ARCH4="x86_64"
BUILD_OPTS=""
[[ "${BUILD_OS}" == "linux" ]] && BUILD_OPTS="${BUILD_OPTS} CGO_ENABLED=0"
SUFFIX=""
[[ "${BUILD_OS}" == "windows" ]] && SUFFIX=".exe"
go clean
echo "**** Executing 'env${BUILD_OPTS} GOOS=\"${BUILD_OS}\" GOARCH=\"${BUILD_ARCH1}\" go build -mod vendor -ldflags=\"-s -w -X main.SHA=\"${MAIN_SHA}\" -X main.BuildTime=${MAIN_BUILDTIME}\"'"
#shellcheck disable=SC2086
env${BUILD_OPTS} GOOS="${BUILD_OS}" GOARCH="${BUILD_ARCH1}" go build -mod vendor -ldflags="-s -w -X main.SHA=\"${MAIN_SHA}\" -X main.BuildTime=${MAIN_BUILDTIME}"
if [[ -f "dnscontrol${SUFFIX}" ]]; then
if [[ "${BUILD_OS}" == "linux" ]]; then
# create rpm, deb archives using fpm (if available)
if type fpm 2>/dev/null 1>&2; then
fpm -a "${BUILD_ARCH2}" --log error -p .ci/build --prefix /usr -s dir -t deb "${FPM_OPTIONS[@]}" "${DNSCONTROL_FILES[@]}"
fpm -a "${BUILD_ARCH4}" --log error -p .ci/build --prefix /usr -s dir -t rpm "${FPM_OPTIONS[@]}" "${DNSCONTROL_FILES[@]}"
fi
elif [[ "${BUILD_OS}" == "freebsd" ]]; then
# create txz archive using fpm (if available)
if type fpm 2>/dev/null 1>&2; then
rm -Rf ./*.txz 2>/dev/null
fpm -a "${BUILD_ARCH3}" --log error --prefix /usr/local -s dir -t freebsd "${FPM_OPTIONS[@]}" "${DNSCONTROL_FILES[@]}"
TXZNAME="$(ls ./*.txz 2>/dev/null)"
if [[ -n "${TXZNAME}" ]]; then
# FIXUP: fpm 3.13.1 (and older?) creates invalid txz archives lacking a leading '/' for non-metadata files
# see https://github.com/jordansissel/fpm/issues/1832
if tar -tf "${TXZNAME}" 2>/dev/null | grep -qE "^[a-z]"; then
FTMPDIR="$(mktemp -d -p .)"
if [[ -d "${FTMPDIR}" ]]; then
tar -C "${FTMPDIR}" -xf "${TXZNAME}"
#shellcheck disable=SC2046
tar -cJf "${TXZNAME}" $(find "${FTMPDIR}" -type f | sort) --transform "s|${FTMPDIR}||" --transform 's|/+|+|'
rm -Rf "./${FTMPDIR}" 2>/dev/null
fi
fi
mv "${TXZNAME}" ".ci/build/${TXZNAME/\.txz/_${BUILD_ARCH3}.txz}" 2>/dev/null
fi
fi
fi
# create zip archives containing ${DNSCONTROL_FILES[@]} with stripped paths and accounting for the executable's ${SUFFIX}
DNSCONTROL_ZFILES=("${DNSCONTROL_FILES[@]}")
#shellcheck disable=SC2068
for idx in ${!DNSCONTROL_ZFILES[@]}; do
BASENAME="${DNSCONTROL_ZFILES[$idx]}"
BASENAME="${BASENAME%=*}"
[[ "${BASENAME}" == "dnscontrol" ]] && BASENAME="dnscontrol${SUFFIX}"
DNSCONTROL_ZFILES[$idx]="${BASENAME}"
done
zip -X -9 -o ".ci/build/dnscontrol_${PACKAGE_VERSION}_${BUILD_OS}-${BUILD_ARCH1}.zip" "${DNSCONTROL_ZFILES[@]}"
fi
done
echo "----------"
ls -l .ci/build/*
echo "----------"
|
from typing import List
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
def middleNode(head: ListNode) -> ListNode:
p1 = p2 = head
while p1 != None and p1.next != None:
p1, p2 = p1.next.next, p2.next
return p2
def create_linked_list(values: List[int]) -> ListNode:
if not values:
return None
head = ListNode(values[0])
current = head
for val in values[1:]:
current.next = ListNode(val)
current = current.next
return head
def find_middle_node(values: List[int]) -> int:
head = create_linked_list(values)
middle = middleNode(head)
return middle.val |
<filename>ios/OpenImSdkRn.h
@import OpenIMCore;
#import "CallbackProxy.h"
#if __has_include("RCTBridgeModule.h")
#import "RCTBridgeModule.h"
#import "RCTEventEmitter.h"
#else
#import <React/RCTBridgeModule.h>
#import <React/RCTEventEmitter.h>
#endif
@interface OpenIMSDKRN : RCTEventEmitter <RCTBridgeModule,Open_im_sdk_callbackOnConnListener,Open_im_sdk_callbackOnUserListener, Open_im_sdk_callbackOnAdvancedMsgListener, Open_im_sdk_callbackOnFriendshipListener, Open_im_sdk_callbackOnConversationListener, Open_im_sdk_callbackOnGroupListener,Open_im_sdk_callbackOnSignalingListener>
- (void)pushEvent:(NSString *) eventName errCode:(NSNumber *) errCode errMsg:(NSString *) errMsg data:(NSString *) data;
@end
|
package ua.kata;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
class ShiftArrayLeftTest {
@Test
void twoItemsArray_shiftByOne() throws Exception {
final int[] items = {1, 2};
new ShiftArrayLeft().shift(items, 1);
assertThat(items).isEqualTo(new int[]{2, 1});
}
@Test
void fourItemsArray_shiftByTwo() throws Exception {
final int[] items = {1, 2, 3, 4};
new ShiftArrayLeft().shift(items, 2);
assertThat(items).isEqualTo(new int[]{3, 4, 1, 2});
}
@Test
void threeItemsArray_shiftByOne() throws Exception {
final int[] items = {1, 2, 3};
new ShiftArrayLeft().shift(items, 1);
assertThat(items).isEqualTo(new int[]{2, 3, 1});
}
@Test
void fourItemsArray_shiftByFive() throws Exception {
final int[] items = {1, 2, 3, 4};
new ShiftArrayLeft().shift(items, 5);
assertThat(items).isEqualTo(new int[]{2, 3, 4, 1});
}
}
|
TERMUX_PKG_HOMEPAGE=https://github.com/facebook/zstd
TERMUX_PKG_DESCRIPTION="Zstandard compression."
TERMUX_PKG_VERSION=1.3.6
TERMUX_PKG_SHA256=7f5516148fb66b184cbd6e4e8202997754b6a9d3d016ee789cda87f2fa659dc7
TERMUX_PKG_SRCURL=https://github.com/facebook/zstd/archive/v$TERMUX_PKG_VERSION.tar.gz
TERMUX_PKG_BUILD_IN_SRC=yes
TERMUX_PKG_DEPENDS="liblzma"
|
<filename>src/utils/pageUtils.tsx
import React from 'react';
import { v4 as uuidv4 } from 'uuid';
import { ComponentsList } from '@components/index';
import { ApolloClient, DocumentNode } from '@apollo/client';
import {
GET_DATA_LAYOUT_COMPONENT,
GET_DATA_SERVICES_HOME_COMPONENT,
GET_DATA_VIDEO_BLOCK_COMPONENT,
GET_DATA_APP_CONFIG_CONTEXT,
GET_DATA_SERVICES_ALL_COMPONENT,
GET_DATA_APP_COMPONENT,
GET_DATA_LINK_TREE_COMPONENT,
} from '@queries/index';
/**
* DEFINE THE COMMONS QUERIES FOR ALL PAGES
* Example:
* - GET_DATA_LAYOUT_COMPONENT = Query for menus data, footer, etc.
*/
const globalPageQueries = [
GET_DATA_APP_COMPONENT,
GET_DATA_LAYOUT_COMPONENT,
GET_DATA_APP_CONFIG_CONTEXT,
];
/**
*
* Define each query for an specific page
*/
const pageQueries: PageQueriesType = {
indexPage: [GET_DATA_SERVICES_HOME_COMPONENT, GET_DATA_VIDEO_BLOCK_COMPONENT],
servicesPage: [GET_DATA_SERVICES_ALL_COMPONENT],
linkTreePage: [GET_DATA_LINK_TREE_COMPONENT],
};
/**
*
*
*/
const pageComponents: any = {
indexPage: [
{
component: 'servicesHome',
},
{
component: 'videoHome',
},
],
servicesPage: [
{
component: 'servicesAll',
},
],
linkTreePage: [
{
component: 'LinkTree',
},
{
component: 'languageSwitcher',
},
],
};
type PageQueriesType = {
[pageName: string]: DocumentNode[];
};
/**
* Return a promise with all queries that be needed to build a page
* with SSR
*/
export function queriesForPage(
pageName: string,
apolloClient: ApolloClient<any>,
locale: string
): Promise<any> {
let queries = globalPageQueries;
if (Object.prototype.hasOwnProperty.call(pageQueries, pageName)) {
queries = globalPageQueries.concat(pageQueries[pageName]);
}
const variables = {
locale: locale,
};
return Promise.all(
queries.map((pageQuery) =>
apolloClient.query({
variables,
query: pageQuery,
})
)
);
}
/**
* Render a dynamic list of string
* to mapped components
*/
export function renderPageComponents(pageName: string): JSX.Element[] {
let components = [];
if (Object.prototype.hasOwnProperty.call(pageComponents, pageName)) {
components = pageComponents[pageName];
}
return components.map((block) => {
let component: any = ComponentsList['unRendered'];
if (typeof ComponentsList[block.component] !== 'undefined') {
component = ComponentsList[block.component];
}
return React.createElement(component, {
key: uuidv4(),
componentName: block.component,
});
});
}
|
<gh_stars>1-10
package com.singularitycoder.folkdatabase.profile.repository;
import android.util.Log;
import androidx.lifecycle.MutableLiveData;
import com.google.firebase.firestore.DocumentSnapshot;
import com.google.firebase.firestore.FirebaseFirestore;
import com.singularitycoder.folkdatabase.auth.model.AuthUserItem;
import com.singularitycoder.folkdatabase.database.model.AllUsersItem;
import com.singularitycoder.folkdatabase.database.model.ContactItem;
import com.singularitycoder.folkdatabase.database.model.FolkGuideItem;
import com.singularitycoder.folkdatabase.database.model.TeamLeadItem;
import com.singularitycoder.folkdatabase.helper.RequestStateMediator;
import com.singularitycoder.folkdatabase.helper.HelperConstants;
import com.singularitycoder.folkdatabase.helper.UiState;
import java.util.HashMap;
import java.util.List;
import static java.lang.String.valueOf;
public class ProfileRepository {
private static final String TAG = "ProfileRepository";
private static ProfileRepository _instance;
public ProfileRepository() {
// Initialize Firebase if necessary
}
public static ProfileRepository getInstance() {
if (_instance == null) {
_instance = new ProfileRepository();
}
return _instance;
}
// READ
public MutableLiveData<RequestStateMediator> getAuthUserData(String email) {
final MutableLiveData<RequestStateMediator> authUserLiveData = new MutableLiveData<>();
final RequestStateMediator requestStateMediator = new RequestStateMediator();
requestStateMediator.set(null, UiState.LOADING, "Please wait...", null);
authUserLiveData.postValue(requestStateMediator);
FirebaseFirestore.getInstance()
.collection(HelperConstants.COLL_AUTH_FOLK_MEMBERS)
.whereEqualTo("email", email)
.get()
.addOnSuccessListener(queryDocumentSnapshots -> {
if (!queryDocumentSnapshots.isEmpty()) {
List<DocumentSnapshot> docList = queryDocumentSnapshots.getDocuments();
Log.d(TAG, "docList: " + docList);
for (DocumentSnapshot docSnap : docList) {
AuthUserItem authUserItem = docSnap.toObject(AuthUserItem.class);
if (authUserItem != null) {
Log.d(TAG, "AuthItem: " + authUserItem);
// REAL
if (!("").equals(valueOf(docSnap.getString("shortName")))) {
authUserItem.setShortName(valueOf(docSnap.getString("shortName")));
Log.d(TAG, "readAuthUserData: shortName: " + valueOf(docSnap.getString("shortName")));
}
//
if (!("").equals(valueOf(docSnap.getString("fullName")))) {
authUserItem.setFullName(valueOf(docSnap.getString("fullName")));
Log.d(TAG, "readAuthUserData: fullname: " + valueOf(docSnap.getString("fullName")));
}
if (!("").equals(valueOf(docSnap.getString("profileImageUrl")))) {
authUserItem.setProfileImageUrl(valueOf(docSnap.getString("profileImageUrl")));
Log.d(TAG, "readAuthUserData: profilepic: " + valueOf(docSnap.getString("profileImageUrl")));
}
requestStateMediator.set(authUserItem, UiState.SUCCESS, "Got Basic Info!", "AUTH USER");
authUserLiveData.postValue(requestStateMediator);
}
Log.d(TAG, "firedoc id: " + docSnap.getId());
}
} else {
requestStateMediator.set(null, UiState.EMPTY, "Nothing!", null);
authUserLiveData.postValue(requestStateMediator);
}
})
.addOnFailureListener(e -> {
requestStateMediator.set(null, UiState.ERROR, e.getMessage(), null);
authUserLiveData.postValue(requestStateMediator);
});
return authUserLiveData;
}
// READ
public MutableLiveData<RequestStateMediator> getFolkGuideData(String email) {
final MutableLiveData<RequestStateMediator> folkGuideLiveData = new MutableLiveData<>();
final RequestStateMediator requestStateMediator = new RequestStateMediator();
requestStateMediator.set(null, UiState.LOADING, "Please wait...", null);
folkGuideLiveData.postValue(requestStateMediator);
FirebaseFirestore.getInstance()
.collection(HelperConstants.COLL_AUTH_FOLK_MEMBERS)
.whereEqualTo("email", email)
.get()
.addOnSuccessListener(queryDocumentSnapshots -> {
if (!queryDocumentSnapshots.isEmpty()) {
List<DocumentSnapshot> docList = queryDocumentSnapshots.getDocuments();
Log.d(TAG, "docList: " + docList);
for (DocumentSnapshot docSnap : docList) {
FolkGuideItem folkGuideItem = docSnap.toObject(FolkGuideItem.class);
if (folkGuideItem != null) {
Log.d(TAG, "FolkGuideItem: " + folkGuideItem);
if (!("").equals(valueOf(docSnap.getString("shortName")))) {
folkGuideItem.setStrFolkGuideShortName(valueOf(docSnap.getString("shortName")));
Log.d(TAG, "readFolkGuideData: shortName: " + valueOf(docSnap.getString("shortName")));
}
if (!("").equals(valueOf(docSnap.getString("fullName")))) {
folkGuideItem.setStrName(valueOf(docSnap.getString("fullName")));
Log.d(TAG, "readFolkGuideData: fullname: " + valueOf(docSnap.getString("fullName")));
}
if (!("").equals(valueOf(docSnap.getString("profileImageUrl")))) {
folkGuideItem.setStrProfileImage(valueOf(docSnap.getString("profileImageUrl")));
Log.d(TAG, "readFolkGuideData: profilepic: " + valueOf(docSnap.getString("profileImageUrl")));
}
if (!("").equals(valueOf(docSnap.getString("email")))) {
folkGuideItem.setStrEmail(valueOf(docSnap.getString("email")));
Log.d(TAG, "readFolkGuideData: email: " + valueOf(docSnap.getString("email")));
}
if (!("").equals(valueOf(docSnap.getString("phone")))) {
folkGuideItem.setStrPhone(valueOf(docSnap.getString("phone")));
Log.d(TAG, "readFolkGuideData: phone: " + valueOf(docSnap.getString("phone")));
}
if (!("").equals(valueOf(docSnap.getString("phone")))) {
folkGuideItem.setStrWhatsApp(valueOf(docSnap.getString("phone")));
Log.d(TAG, "readFolkGuideData: whatsapp: " + valueOf(docSnap.getString("phone")));
}
requestStateMediator.set(folkGuideItem, UiState.SUCCESS, "Got Folk Guide Info!", "FOLK GUIDE");
folkGuideLiveData.postValue(requestStateMediator);
}
Log.d(TAG, "firedoc id: " + docSnap.getId());
}
} else {
requestStateMediator.set(null, UiState.EMPTY, "Nothing!", null);
folkGuideLiveData.postValue(requestStateMediator);
}
})
.addOnFailureListener(e -> {
requestStateMediator.set(null, UiState.ERROR, e.getMessage(), null);
folkGuideLiveData.postValue(requestStateMediator);
});
return folkGuideLiveData;
}
// READ
public MutableLiveData<RequestStateMediator> getTeamLeadData(String email) {
final MutableLiveData<RequestStateMediator> teamLeadLiveData = new MutableLiveData<>();
final RequestStateMediator requestStateMediator = new RequestStateMediator();
requestStateMediator.set(null, UiState.LOADING, "Please wait...", null);
teamLeadLiveData.postValue(requestStateMediator);
FirebaseFirestore.getInstance()
.collection(HelperConstants.COLL_AUTH_FOLK_MEMBERS)
.whereEqualTo("email", email)
.get()
.addOnSuccessListener(queryDocumentSnapshots -> {
if (!queryDocumentSnapshots.isEmpty()) {
List<DocumentSnapshot> docList = queryDocumentSnapshots.getDocuments();
Log.d(TAG, "docList: " + docList);
for (DocumentSnapshot docSnap : docList) {
TeamLeadItem teamLeadItem = docSnap.toObject(TeamLeadItem.class);
if (teamLeadItem != null) {
Log.d(TAG, "TeamLeadItem: " + teamLeadItem);
if (!("").equals(valueOf(docSnap.getString("shortName")))) {
teamLeadItem.setStrTeamLeadShortName(valueOf(docSnap.getString("shortName")));
Log.d(TAG, "readTeamLeadData: shortName: " + valueOf(docSnap.getString("shortName")));
}
if (!("").equals(valueOf(docSnap.getString("fullName")))) {
teamLeadItem.setStrName(valueOf(docSnap.getString("fullName")));
Log.d(TAG, "readTeamLeadData: fullname: " + valueOf(docSnap.getString("fullName")));
}
if (!("").equals(valueOf(docSnap.getString("profileImageUrl")))) {
teamLeadItem.setStrProfileImage(valueOf(docSnap.getString("profileImageUrl")));
Log.d(TAG, "readTeamLeadData: profilepic: " + valueOf(docSnap.getString("profileImageUrl")));
}
if (!("").equals(valueOf(docSnap.getString("email")))) {
teamLeadItem.setStrEmail(valueOf(docSnap.getString("email")));
Log.d(TAG, "readFolkGuideData: email: " + valueOf(docSnap.getString("email")));
}
if (!("").equals(valueOf(docSnap.getString("phone")))) {
teamLeadItem.setStrPhone(valueOf(docSnap.getString("phone")));
Log.d(TAG, "readFolkGuideData: phone: " + valueOf(docSnap.getString("phone")));
}
if (!("").equals(valueOf(docSnap.getString("phone")))) {
teamLeadItem.setStrWhatsApp(valueOf(docSnap.getString("phone")));
Log.d(TAG, "readFolkGuideData: whatsapp: " + valueOf(docSnap.getString("phone")));
}
requestStateMediator.set(teamLeadItem, UiState.SUCCESS, "Got Team Lead Info!", "TEAM LEAD");
teamLeadLiveData.postValue(requestStateMediator);
}
Log.d(TAG, "firedoc id: " + docSnap.getId());
}
} else {
requestStateMediator.set(null, UiState.EMPTY, "Nothing!", null);
teamLeadLiveData.postValue(requestStateMediator);
}
})
.addOnFailureListener(e -> {
requestStateMediator.set(null, UiState.ERROR, e.getMessage(), null);
teamLeadLiveData.postValue(requestStateMediator);
});
return teamLeadLiveData;
}
// READ
public MutableLiveData<RequestStateMediator> getContactData(String email) {
final MutableLiveData<RequestStateMediator> contactLiveData = new MutableLiveData<>();
final RequestStateMediator requestStateMediator = new RequestStateMediator();
requestStateMediator.set(null, UiState.LOADING, "Please wait...", null);
contactLiveData.postValue(requestStateMediator);
FirebaseFirestore.getInstance()
.collection(HelperConstants.COLL_FOLK_NEW_MEMBERS)
.whereEqualTo("email", email)
.get()
.addOnSuccessListener(queryDocumentSnapshots -> {
if (!queryDocumentSnapshots.isEmpty()) {
List<DocumentSnapshot> docList = queryDocumentSnapshots.getDocuments();
Log.d(TAG, "docList: " + docList);
for (DocumentSnapshot docSnap : docList) {
ContactItem contactItem = docSnap.toObject(ContactItem.class);
if (contactItem != null) {
Log.d(TAG, "ContactItem: " + contactItem);
if (!("").equals(valueOf(docSnap.getString("folk_guide")))) {
contactItem.setStrFolkGuide(valueOf(docSnap.getString("folk_guide")));
Log.d(TAG, "readContactData: folkGuide: " + valueOf(docSnap.getString("folk_guide")));
} else {
contactItem.setStrFolkGuide("No Data");
}
if (!("").equals(valueOf(docSnap.getString("name")))) {
contactItem.setStrName(valueOf(docSnap.getString("name")));
Log.d(TAG, "readContactData: name: " + valueOf(docSnap.getString("name")));
} else {
contactItem.setStrName("No Data");
}
if (!("").equals(valueOf(docSnap.getString("email")))) {
contactItem.setStrEmail(valueOf(docSnap.getString("email")));
Log.d(TAG, "readFolkGuideData: email: " + valueOf(docSnap.getString("email")));
} else {
contactItem.setStrEmail("No Data");
}
if (!("").equals(valueOf(docSnap.getString("mobile")))) {
contactItem.setStrPhone(valueOf(docSnap.getString("mobile")));
Log.d(TAG, "readFolkGuideData: phone: " + valueOf(docSnap.getString("phone")));
} else {
contactItem.setStrPhone("0000000000");
}
if (!("").equals(valueOf(docSnap.getString("whatsapp")))) {
contactItem.setStrWhatsApp(valueOf(docSnap.getString("whatsapp")));
Log.d(TAG, "readFolkGuideData: whatsapp: " + valueOf(docSnap.getString("phone")));
} else {
contactItem.setStrWhatsApp("0000000000");
}
HashMap<String, String> imageData = new HashMap<>();
if (null != docSnap.get("docs")) {
imageData = (HashMap<String, String>) docSnap.get("docs");
if (!("").equals(imageData.get("doc_url"))) {
contactItem.setStrDocumentImage(imageData.get("doc_url"));
} else {
contactItem.setStrDocumentImage(imageData.get(""));
}
if (!("").equals(imageData.get("photo_url"))) {
contactItem.setStrProfileImage(imageData.get("photo_url"));
} else {
contactItem.setStrProfileImage(imageData.get(""));
}
} else {
contactItem.setStrDocumentImage(imageData.get(""));
contactItem.setStrProfileImage(imageData.get(""));
}
requestStateMediator.set(contactItem, UiState.SUCCESS, "Got Contact Info!", "CONTACT");
contactLiveData.postValue(requestStateMediator);
}
Log.d(TAG, "firedoc id: " + docSnap.getId());
}
} else {
requestStateMediator.set(null, UiState.EMPTY, "Nothing!", null);
contactLiveData.postValue(requestStateMediator);
}
})
.addOnFailureListener(e -> {
requestStateMediator.set(null, UiState.ERROR, e.getMessage(), null);
contactLiveData.postValue(requestStateMediator);
});
return contactLiveData;
}
// READ
public MutableLiveData<RequestStateMediator> getAllUsersData(String email) {
final MutableLiveData<RequestStateMediator> allUsersLiveData = new MutableLiveData<>();
final RequestStateMediator requestStateMediator = new RequestStateMediator();
requestStateMediator.set(null, UiState.LOADING, "Please wait...", null);
allUsersLiveData.postValue(requestStateMediator);
FirebaseFirestore.getInstance()
.collection(HelperConstants.COLL_AUTH_FOLK_MEMBERS)
.whereEqualTo("email", email)
.get()
.addOnSuccessListener(queryDocumentSnapshots -> {
if (!queryDocumentSnapshots.isEmpty()) {
List<DocumentSnapshot> docList = queryDocumentSnapshots.getDocuments();
Log.d(TAG, "docList: " + docList);
for (DocumentSnapshot docSnap : docList) {
AllUsersItem allUsersItem = docSnap.toObject(AllUsersItem.class);
if (null != allUsersItem) {
Log.d(TAG, "AllUsersItem: " + allUsersItem);
// REAL
if (!("").equals(valueOf(docSnap.getString("email")))) {
allUsersItem.setStrEmail(valueOf(docSnap.getString("email")));
Log.d(TAG, "readAllUsersData: email: " + valueOf(docSnap.getString("email")));
}
if (!("").equals(valueOf(docSnap.getString("phone")))) {
allUsersItem.setStrPhone(valueOf(docSnap.getString("phone")));
Log.d(TAG, "readAllUsersData: phone: " + valueOf(docSnap.getString("phone")));
}
//
if (!("").equals(valueOf(docSnap.getString("fullName")))) {
allUsersItem.setStrFirstName(valueOf(docSnap.getString("fullName")));
Log.d(TAG, "readAllUsersData: fullname: " + valueOf(docSnap.getString("fullName")));
}
if (!("").equals(valueOf(docSnap.getString("profileImageUrl")))) {
allUsersItem.setStrProfileImage(valueOf(docSnap.getString("profileImageUrl")));
Log.d(TAG, "readAllUsersData: profilepic: " + valueOf(docSnap.getString("profileImageUrl")));
}
requestStateMediator.set(allUsersItem, UiState.SUCCESS, "Got All Users Info!", "ALL USERS");
allUsersLiveData.postValue(requestStateMediator);
}
Log.d(TAG, "firedoc id: " + docSnap.getId());
}
} else {
requestStateMediator.set(null, UiState.EMPTY, "Nothing!", null);
allUsersLiveData.postValue(requestStateMediator);
}
})
.addOnFailureListener(e -> {
requestStateMediator.set(null, UiState.ERROR, e.getMessage(), null);
allUsersLiveData.postValue(requestStateMediator);
});
return allUsersLiveData;
}
// READ
public MutableLiveData<RequestStateMediator> getBasicInfo(String emailKey) {
final MutableLiveData<RequestStateMediator> basicInfoLiveData = new MutableLiveData<>();
final RequestStateMediator requestStateMediator = new RequestStateMediator();
requestStateMediator.set(null, UiState.LOADING, "Please wait...", null);
basicInfoLiveData.postValue(requestStateMediator);
FirebaseFirestore.getInstance()
.collection(HelperConstants.COLL_AUTH_FOLK_MEMBERS)
.whereEqualTo("email", emailKey)
.get()
.addOnSuccessListener(queryDocumentSnapshots -> {
if (!queryDocumentSnapshots.isEmpty()) {
List<DocumentSnapshot> docList = queryDocumentSnapshots.getDocuments();
Log.d(TAG, "docList: " + docList);
for (DocumentSnapshot docSnap : docList) {
AuthUserItem authUserItem = docSnap.toObject(AuthUserItem.class);
if (null != authUserItem) {
Log.d(TAG, "AuthUserItem: " + authUserItem);
if (!("").equals(valueOf(docSnap.getString("email")))) {
authUserItem.setEmail(valueOf(docSnap.getString("email")));
Log.d(TAG, "readBasicData: email: " + valueOf(docSnap.getString("email")));
}
if (!("").equals(valueOf(docSnap.getString("fullName")))) {
authUserItem.setFullName(valueOf(docSnap.getString("fullName")));
Log.d(TAG, "readBasicData: fullname: " + valueOf(docSnap.getString("fullName")));
}
if (!("").equals(valueOf(docSnap.getString("gmail")))) {
authUserItem.setGmail(valueOf(docSnap.getString("gmail")));
Log.d(TAG, "readBasicData: gmail: " + valueOf(docSnap.getString("gmail")));
}
if (!("").equals(valueOf(docSnap.getString("phone")))) {
authUserItem.setPhone(valueOf(docSnap.getString("phone")));
Log.d(TAG, "readBasicData: phone: " + valueOf(docSnap.getString("phone")));
}
if (!("").equals(valueOf(docSnap.getString("creationTimeStamp")))) {
authUserItem.setCreationTimeStamp(docSnap.getString("creationTimeStamp"));
Log.d(TAG, "readBasicData: creationTimeStamp: " + valueOf(docSnap.getString("creationTimeStamp")));
}
if (!("").equals(valueOf(docSnap.getString("hkmJoiningDate")))) {
authUserItem.setHkmJoiningDate(valueOf(docSnap.getString("hkmJoiningDate")));
Log.d(TAG, "readBasicData: hkmJoiningDate: " + valueOf(docSnap.getString("hkmJoiningDate")));
}
requestStateMediator.set(authUserItem, UiState.SUCCESS, "Got Basic Info!", "BASIC INFO");
basicInfoLiveData.postValue(requestStateMediator);
}
Log.d(TAG, "firedoc id: " + docSnap.getId());
}
} else {
requestStateMediator.set(null, UiState.EMPTY, "Nothing!", null);
basicInfoLiveData.postValue(requestStateMediator);
}
})
.addOnFailureListener(e -> {
requestStateMediator.set(null, UiState.ERROR, e.getMessage(), null);
basicInfoLiveData.postValue(requestStateMediator);
});
return basicInfoLiveData;
}
// READ
// public Observable<AllCallbacks> getBasicInfoObservable(String emailKey) {
// final MutableLiveData<AllCallbacks> basicInfoLiveData = new MutableLiveData<>();
// AllCallbacks allCallbacks = new AllCallbacks();
//
// allCallbacks.set(null, Status.LOADING, "Please wait...");
// basicInfoLiveData.postValue(allCallbacks);
//
// FirebaseFirestore.getInstance()
// .collection(HelperConstants.COLL_AUTH_FOLK_MEMBERS)
// .whereEqualTo("email", emailKey)
// .get()
// .addOnSuccessListener(queryDocumentSnapshots -> {
//
// if (!queryDocumentSnapshots.isEmpty()) {
// List<DocumentSnapshot> docList = queryDocumentSnapshots.getDocuments();
// Log.d(TAG, "docList: " + docList);
//
// for (DocumentSnapshot docSnap : docList) {
// AuthUserItem authUserItem = docSnap.toObject(AuthUserItem.class);
// if (null != authUserItem) {
// Log.d(TAG, "AuthUserItem: " + authUserItem);
//
// if (!("").equals(valueOf(docSnap.getString("email")))) {
// authUserItem.setEmail(valueOf(docSnap.getString("email")));
// Log.d(TAG, "readBasicData: email: " + valueOf(docSnap.getString("email")));
// }
//
// if (!("").equals(valueOf(docSnap.getString("fullName")))) {
// authUserItem.setFullName(valueOf(docSnap.getString("fullName")));
// Log.d(TAG, "readBasicData: fullname: " + valueOf(docSnap.getString("fullName")));
// }
//
// if (!("").equals(valueOf(docSnap.getString("gmail")))) {
// authUserItem.setGmail(valueOf(docSnap.getString("gmail")));
// Log.d(TAG, "readBasicData: gmail: " + valueOf(docSnap.getString("gmail")));
// }
//
// if (!("").equals(valueOf(docSnap.getString("phone")))) {
// authUserItem.setPhone(valueOf(docSnap.getString("phone")));
// Log.d(TAG, "readBasicData: phone: " + valueOf(docSnap.getString("phone")));
// }
//
// if (!("").equals(valueOf(docSnap.getString("creationTimeStamp")))) {
// authUserItem.setCreationTimeStamp(docSnap.getString("creationTimeStamp"));
// Log.d(TAG, "readBasicData: creationTimeStamp: " + valueOf(docSnap.getString("creationTimeStamp")));
// }
//
// if (!("").equals(valueOf(docSnap.getString("hkmJoiningDate")))) {
// authUserItem.setHkmJoiningDate(valueOf(docSnap.getString("hkmJoiningDate")));
// Log.d(TAG, "readBasicData: hkmJoiningDate: " + valueOf(docSnap.getString("hkmJoiningDate")));
// }
//
// allCallbacks.set(authUserItem, Status.SUCCESS, "Got Basic Info!");
// basicInfoLiveData.postValue(allCallbacks);
// }
// Log.d(TAG, "firedoc id: " + docSnap.getId());
// }
// } else {
// allCallbacks.set(null, Status.EMPTY, "Nothing!");
// basicInfoLiveData.postValue(allCallbacks);
// }
// })
// .addOnFailureListener(e -> {
// allCallbacks.set(null, Status.ERROR, e.getMessage());
// basicInfoLiveData.postValue(allCallbacks);
// });
// return basicInfoLiveData;
// }
}
|
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2021.1 (64-bit)
#
# Filename : ram.sh
# Simulator : Aldec Riviera-PRO Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Fri Jul 09 13:12:19 +0800 2021
# SW Build 3247384 on Thu Jun 10 19:36:33 MDT 2021
#
# Copyright 1986-2021 Xilinx, Inc. All Rights Reserved.
#
# usage: ram.sh [-help]
# usage: ram.sh [-lib_map_path]
# usage: ram.sh [-noclean_files]
# usage: ram.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'ram.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "ram.sh - Script generated by export_simulation (Vivado v2021.1 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
simulate
}
# RUN_STEP: <compile>
compile()
{
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <simulate>
simulate()
{
runvsimsa -l simulate.log -do "do {simulate.do}"
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./ram.sh -help\" for more information)\n"
exit 1
fi
map_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
map_setup_file $2
esac
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Map library.cfg file
map_setup_file()
{
file="library.cfg"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="V:/cpu/Minisys-1_2/Minisys-1_2.cache/compile_simlib/riviera"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
if [[ -e $src_file ]]; then
vmap -link $lib_map_path
fi
fi
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaboration.log simulate.log dataset.asdb work riviera)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./ram.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: ram.sh [-help]\n\
Usage: ram.sh [-lib_map_path]\n\
Usage: ram.sh [-reset_run]\n\
Usage: ram.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
<html>
<head>
<title>Name Form</title>
</head>
<body>
<form method="POST">
<input type="text" name="firstname" placeholder="First Name">
<input type="text" name="lastname" placeholder="Last Name">
<input type="submit" value="Submit">
</form>
</body>
</html> |
def second_largest(lst):
sorted_list = sorted(lst)
return sorted_list[-2]
result = second_largest(lst)
print(result) |
#! /usr/bin/env bash
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
dotnet publish --configuration Release --output "$SCRIPT_DIR/build" --verbosity normal --self-contained false
cp "$SCRIPT_DIR/scripts/run-tool.sh" "$SCRIPT_DIR/build/run.sh"
cp "$SCRIPT_DIR/scripts/run-tool.ps1" "$SCRIPT_DIR/build/run.ps1"
|
const fs = require('fs');
const path = require('path');
const parser = require('../parseBnf');
const readGrammar = filename => {
return fs.readFileSync(path.join(__dirname, `${filename}.grammar`), 'utf8');
};
describe('parseBnf', () => {
test('simple definition', () => {
const grammar = readGrammar('simple');
const parsed = parser.parseGrammar(grammar)[0];
expect(parsed.lhs).toBe('LHS');
expect(parsed.rhs).toBe('RHS');
});
test('multiple definitions', () => {
const grammar = readGrammar('multiple_definitions');
const parsed = parser.parseGrammar(grammar);
expect(parsed.length).toBe(3);
});
test('RHS alternatives singleline', () => {
const grammar = readGrammar('rhs_multiple_singleline');
const parsed = parser.parseGrammar(grammar)[0];
expect(parsed.lhs).toBe('ClassAxiom');
expect(Array.isArray(parsed.rhs)).toBe(true);
expect(parsed.rhs.length).toBe(4);
});
test('RHS alternatives multiline', () => {
const grammar = readGrammar('rhs_multiple_multiline');
const parsed = parser.parseGrammar(grammar)[0];
expect(parsed.lhs).toBe('Assertion');
expect(Array.isArray(parsed.rhs)).toBe(true);
});
test('RHS function with single param', () => {
const grammar = readGrammar('rhs_function_single_param');
const parsed = parser.parseGrammar(grammar)[0];
expect(parsed.lhs).toBe('InverseObjectProperty');
expect(parsed.rhs.type).toBe('function');
expect(parsed.rhs.name).toBe('ObjectInverseOf');
expect(parsed.rhs.params[0]).toBe('ObjectProperty');
});
test('RHS function with multiple params', () => {
const grammar = readGrammar('rhs_function');
const parsed = parser.parseGrammar(grammar)[0];
expect(parsed.lhs).toBe('TransitiveObjectProperty');
expect(parsed.rhs.type).toBe('function');
expect(parsed.rhs.name).toBe('TransitiveObjectProperty');
expect(parsed.rhs.params.length).toBe(2);
});
test('RHS function with mixed params', () => {
const grammar = readGrammar('rhs_mixed_function');
const parsed = parser.parseGrammar(grammar)[0];
expect(parsed.lhs).toBe('DisjointDataProperties');
expect(parsed.rhs.type).toBe('function');
expect(parsed.rhs.name).toBe('DisjointDataProperties');
expect(parsed.rhs.params.length).toBe(4);
});
test('RHS zero or more', () => {
const grammar = readGrammar('rhs_zero_or_more');
const parsed = parser.parseGrammar(grammar)[0];
expect(parsed.lhs).toBe('axiomAnnotations');
expect(parsed.rhs.type).toBe('zeroOrMore');
expect(parsed.rhs.name).toBe('Annotation');
});
test('RHS zero or one', () => {
const grammar = readGrammar('rhs_zero_or_one');
const parsed = parser.parseGrammar(grammar)[0];
expect(parsed.lhs).toBe('axiomAnnotations');
expect(parsed.rhs.type).toBe('zeroOrOne');
expect(parsed.rhs.name).toBe('Annotation');
});
describe('examples', () => {
test('section_1', () => {
const grammar = readGrammar('section_1');
parser.parseGrammar(grammar);
});
test.skip('full', () => {
const grammar = readGrammar('full');
parser.parseGrammar(grammar);
});
test('implemented', () => {
const grammar = readGrammar('implemented');
parser.parseGrammar(grammar);
});
});
});
|
package com.ind.tired.web.json.util;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.ind.tired.web.json.Encoders;
import java.util.Map;
public class JsonMaker {
public static JsonNode make(Map<String, Object> properties) {
final ObjectNode node = JsonNodeFactory.instance.objectNode();
properties.forEach((name, value) -> node.set(name, Encoders.asJsonNode(value)));
return node;
}
}
|
#!/bin/sh
EXIFTOOL=exiftool
show_help() {
cat << EOF
Usage: ${0##*/} [-hgd] [FILE]
Strip potentially privacy-relevant EXIF information from files while preserving
technical information.
-h display this help and exit
-g preserve GPS data
-d dry-run. Just print options and exit
EOF
}
type $EXIFTOOL > /dev/null 2>&1 || {
echo >&2 "This script requires exiftool but it's not installed."
echo -n >&2 "exiftool is available in Debian as libimage-exiftool-perl"
echo >&2 " and perl-image-exiftool in Arch Linux."
echo >&2 "Aborting."
exit 1;
}
GPS_TAGS="-GPSLatitude -GPSLatitudeRef -GPSLongitude -GPSLongitudeRef \
-GPSAltitude -GPSAltitudeRef -GPSDateStamp -GPSTimeStamp \
-GPSDateTime -GPSTrack -GPSTrackRef -GPSSpeed -GPSSpeedRef \
-GPSImgDirection -GPSImgDirectionRef -GPSPitch -GPSRoll"
IMAGE_TAGS="\
-Make -Model -LensModel -ApertureValue -FlashSetting -FlashType \
-FlashExposureComp -FlashMode -ManufacturersModelName -Orientation \
-Software -ModifyDate -Copyright -ExposureTime -FNumber \
-ExposureProgram -ISO -CreateDate -FocalLength -ISOSetting \
-FocusPosition -FocusDistance -ExifImageWidth -ExifImageHeight \
-ColorSpace -Rights -ImageWidth -ImageHeight -ImageSize \
-ExposureProgram -ExposureMode -Lens -ShutterSpeedValue \
-DepthOfField -FieldOfView -FocalLength -XResolution -YResolution"
PRESERVE_GPS=0
DRY_RUN=""
while getopts "ghd" opt; do
case "$opt" in
h)
show_help
exit 0
;;
g)
PRESERVE_GPS=1
;;
d)
DRY_RUN="echo"
;;
esac
done
# delete options found
shift "$((OPTIND-1))"
if [ $PRESERVE_GPS -ne 1 ]; then
GPS_TAGS=""
fi
$DRY_RUN $EXIFTOOL -all= -tagsFromFile @ $IMAGE_TAGS $GPS_TAGS "$@"
|
require './lib/maxon/store'
describe Maxon::Store do
let(:storage) { SortedSet.new([12, 23, 1, 43, 67, 2, 90, 11]) }
let(:store) { described_class.new(storage, 4) }
describe '#show_result' do
it 'return specified number of elements' do
expect(store.show_result).to match_array([90, 67, 43, 23])
end
end
describe '#clear_storage' do
it 'return empty set' do
expect(store.clear_storage).to be_empty
end
end
end
|
(function(document) {
'use strict';
// Grab a reference to our auto-binding template
// and give it some initial binding values
// Learn more about auto-binding templates at http://goo.gl/Dx1u2g
var app = document.querySelector('#app');
// Sets app default base URL
app.baseUrl = '/';
// app.displayInstalledToast = function() {
// // Check to make sure caching is actually enabled—it won't be in the dev environment.
// if (!Polymer.dom(document).querySelector('platinum-sw-cache').disabled) {
// Polymer.dom(document).querySelector('#caching-complete').show();
// }
// };
// Listen for template bound event to know when bindings
// have resolved and content has been stamped to the page
app.addEventListener('dom-change', function() {
// console.log('Our app is ready to rock!');
});
// See https://github.com/Polymer/polymer/issues/1381
window.addEventListener('WebComponentsReady', function() {
var scrollHeaderPanel = document.querySelector('paper-scroll-header-panel');
var scrollThreshold = document.querySelector('#scrollThreshold');
var arrowUp = document.querySelector('#arrowUp');
var firebaseLogin = document.querySelector("#firebaseLogin");
var container = document.querySelector('.container');
/* background for toolbar when it is at its full size */
var header = document.querySelector('#headerBg');
var condensedHeader = document.querySelector('#condensedHeaderBg');
header.style.backgroundImage='url'+'('+app.baseUrl+'images/header.jpg'+')';
condensedHeader.style.backgroundImage='url'+'('+app.baseUrl+'images/header2.jpg'+')';
scrollThreshold.scrollTarget = scrollHeaderPanel.scroller;
arrowUp.hidden = true;
scrollHeaderPanel.addEventListener('content-scroll', function() {
if (scrollThreshold._scrollTop > 192) {
arrowUp.hidden = false;
} else {
arrowUp.hidden = true;
}
});
});
// Main area's paper-scroll-header-panel custom condensing transformation of
// the appName in the middle-container and the bottom title in the bottom-container.
// The appName is moved to top and shrunk on condensing. The bottom sub title
// is shrunk to nothing on condensing.
window.addEventListener('paper-header-transform', function(e) {
var appName = Polymer.dom(document).querySelector('#mainToolbar .app-name');
var middleContainer = Polymer.dom(document).querySelector('#mainToolbar .middle-container');
var bottomContainer = Polymer.dom(document).querySelector('#mainToolbar .bottom-container');
var detail = e.detail;
var heightDiff = detail.height - detail.condensedHeight;
var yRatio = Math.min(1, detail.y / heightDiff);
// appName max size when condensed. The smaller the number the smaller the condensed size.
var maxMiddleScale = 0.50;
var auxHeight = heightDiff - detail.y;
var auxScale = heightDiff / (1 - maxMiddleScale);
var scaleMiddle = Math.max(maxMiddleScale, auxHeight / auxScale + maxMiddleScale);
var scaleBottom = 1 - yRatio;
// Move/translate middleContainer
//Polymer.Base.transform('translate3d(0,' + yRatio * 100 + '%,0)', middleContainer);
Polymer.Base.transform('scale(' + scaleMiddle + ') translateZ(0)', middleContainer);
middleContainer.style.opacity=1-yRatio;
// Scale bottomContainer and bottom sub title to nothing and back
//Polymer.Base.transform('scale(' + scaleBottom + ') translateZ(0)', bottomContainer);
// Scale middleContainer appName
Polymer.Base.transform('scale(' + scaleMiddle + ') translateZ(0)', appName);
});
app.closeDrawer = function() {
app.$.paperDrawerPanel.closeDrawer();
};
app.refreshApp = function() {
location.reload();
};
// Scroll page to top and expand header
app.scrollPageToTop = function() {
app.$.headerPanelMain.scrollToTop(true);
};
app.showArrow = function() {
this.async(function () {
scrollThreshold.clearTriggers();
});
}
app.openSearchDialog = function() {
app.$.searchDialog.open();
}
// Polymerfire
app.error = null;
app.signIn = function() {
this.error = null;
this.$.auth.signInWithPopup();
};
app.signOut = function() {
this.error = null;
this.$.auth.signOut();
}
app.showError = function(e) {
this.error = e.detail;
app.toggleAuthErrorToast();
};
// End Polymerfire
app.loginTap = function(e) {
if (app.user) {
app.signOut();
} else {
app.signIn();
}
};
app.toggleAuthErrorToast = function() {
this.$.authErrorToast.fitInto = this.$.pagesContainer;
this.$.authErrorToast.toggle();
}
app._loadMoreData = function() {
// this.limitToFirst += 1;
// console.log('startAt -> ', this.startAt, 'limitToFirst -> ', this.limitToFirst, 'endAt -> ', this.endAt, 'limitToLast -> ', this.limitToLast);
// document.$.ajax.generateRequest();
};
})(document);
|
<gh_stars>1-10
import should from 'should'
import BigNumber from 'bignumber.js'
import jibrelContractsApi from '../index'
if (process.env.JSON_PATH == null) {
throw (new Error('JSON_PATH env variable not found'))
}
const testParams = require(process.env.JSON_PATH)
const erc20Named = jibrelContractsApi.contracts.erc20Named
const rpcaddr = process.env.RPCADDR || '127.0.0.1'
const rpcport = process.env.RPCPORT || 8545
const contractAddress = testParams.contracts.JNTViewERC20
describe('ERC20Named API', function() {
describe('name', function() {
it('returns token name', function(done) {
erc20Named.name({
rpcaddr,
rpcport,
contractAddress,
}).then((result) => {
result.should.be.a.String()
result.length.should.be.greaterThan(0)
done()
}).catch(done)
})
})
describe('symbol', function() {
it('returns token symbol', function(done) {
erc20Named.symbol({
rpcaddr,
rpcport,
contractAddress,
}).then((result) => {
result.should.be.a.String()
result.length.should.be.greaterThan(0)
done()
}).catch(done)
})
})
describe('decimals', function() {
it('returns token decimals', function(done) {
erc20Named.decimals({
rpcaddr,
rpcport,
contractAddress,
}).then((result) => {
result.greaterThan(0).should.be.equal(true)
result.toNumber().should.be.greaterThan(0)
done()
}).catch(done)
})
})
})
|
<filename>src/fields/TagsInput/index.js
cm.define('Com.TagsInput', {
'modules' : [
'Params',
'Events',
'Messages',
'Structure',
'DataConfig',
'Stack'
],
'events' : [
'onRender',
'onAdd',
'onRemove',
'onChange',
'onOpen',
'onClose',
'onReset'
],
'params' : {
'node' : cm.node('input', {'type' : 'text'}),
'container' : null,
'name' : '',
'embedStructure' : 'replace',
'data' : [],
'maxSingleTagLength': 255,
'max' : 0, // Not implemented
'autocomplete' : false,
'icons' : {
'add' : 'icon default linked',
'remove' : 'icon default linked'
},
'Com.Autocomplete' : {
'clearOnEmpty' : false
}
}
},
function(params){
var that = this,
nodes = {},
tags = [],
items = {};
that.isDestructed = null;
that.value = null;
that.components = {};
that.isAutocomplete = false;
var init = function(){
var sourceTags,
splitTags;
// Init modules
that.setParams(params);
that.convertEvents(that.params['events']);
that.getDataConfig(that.params['node']);
// Render
validateParams();
render();
that.addToStack(nodes['container']);
that.triggerEvent('onRender');
// Set tags
splitTags = that.params['node'].value.split(',');
sourceTags = cm.extend(that.params['data'], splitTags);
cm.forEach(sourceTags, function(tag){
addTag(tag);
});
};
var validateParams = function(){
if(cm.isNode(that.params['node'])){
that.params['name'] = that.params['node'].getAttribute('name') || that.params['name'];
}
that.isAutocomplete = that.params['autocomplete'];
};
var render = function(){
// Structure
nodes['container'] = cm.node('div', {'class' : 'com__tags-input'},
nodes['hidden'] = cm.node('input', {'type' : 'hidden'}),
nodes['inner'] = cm.node('div', {'class' : 'inner input'},
nodes['tags'] = cm.node('div', {'class' : 'tags'})
)
);
renderInput();
// Attributes
if(that.params['name']){
nodes['hidden'].setAttribute('name', that.params['name']);
}
// Events
cm.addEvent(nodes['container'], 'click', function(e){
var target = cm.getEventTarget(e);
if(!cm.isParent(nodes['tags'], target, true)){
nodes['input'].focus();
}
});
// Append
that.embedStructure(nodes['container']);
};
var renderInput = function(){
// Structure
nodes['input'] = cm.node('input', {'type' : 'text', 'maxlength' : that.params['maxSingleTagLength'], 'class' : 'input adder', 'placeholder' : that.message('placeholder')});
cm.appendChild(nodes['input'], nodes['inner']);
// Autocomplete
if(that.isAutocomplete){
cm.getConstructor('Com.Autocomplete', function(classConstructor){
that.components['autocomplete'] = new classConstructor(cm.merge(that.params['Com.Autocomplete'], {
'events' : {
'onClickSelect' : function(){
addAdderTags(true);
}
}
}));
});
that.components['autocomplete'].setTarget(nodes['input']);
that.components['autocomplete'].setInput(nodes['input']);
}
// Add new tag on comma
cm.addEvent(nodes['input'], 'keypress', function(e){
if(e.charCode === 44 || e.charCode === 59){
cm.preventDefault(e);
addAdderTags(true);
that.isAutocomplete && that.components['autocomplete'].hide();
}
});
// Add new tag on enter or escape
cm.addEvent(nodes['input'], 'keydown', function(e){
if(cm.isKey(e, ['enter', 'escape'])){
cm.preventDefault(e);
addAdderTags(true);
that.isAutocomplete && that.components['autocomplete'].hide();
}
});
cm.addEvent(nodes['input'], 'focus', function(){
cm.addClass(nodes['container'], 'active');
cm.addClass(nodes['inner'], 'input-focus');
});
cm.addEvent(nodes['input'], 'blur', function(){
addAdderTags(true);
cm.removeClass(nodes['container'], 'active');
cm.removeClass(nodes['inner'], 'input-focus');
});
};
var addAdderTags = function(execute){
var sourceTags = nodes['input'].value.split(',');
cm.forEach(sourceTags, function(tag){
addTag(tag, execute);
});
nodes['input'].value = '';
that.isAutocomplete && that.components['autocomplete'].clear();
};
var addTag = function(tag, execute){
tag = tag.trim();
if(tag && tag.length && !/^[\s]*$/.test(tag) && !cm.inArray(tags, tag)){
tags.push(tag);
renderTag(tag);
setHiddenInputData();
// Execute events
if(execute){
// API onChange Event
that.triggerEvent('onChange', {'tag' : tag});
// API onAdd Event
that.triggerEvent('onAdd', {'tag' : tag});
}
}
};
var renderTag = function(tag){
var item = {
'tag' : tag
};
// Structure
item['container'] = cm.node('div', {'class' : 'item'},
cm.node('div', {'class' : 'inner'},
cm.node('div', {'class' : 'text', 'title' : tag}, tag),
item['button'] = cm.node('div', {'class' : that.params['icons']['remove'], 'title' : that.message('remove')})
)
);
item['anim'] = new cm.Animation(item['container']);
// Append
cm.appendChild(item['container'], nodes['tags']);
// Add click event on "Remove Tag" button
cm.addEvent(item['button'], 'click', function(){
removeTag(item);
});
// Push to global array
items[tag] = item;
};
var removeTag = function(item){
// Remove tag from data
tags = cm.arrayRemove(tags, item['tag']);
delete items[item['tag']];
setHiddenInputData();
// API onChange Event
that.triggerEvent('onChange', {
'tag' : item['tag']
});
// API onRemove Event
that.triggerEvent('onRemove', {
'tag' : item['tag']
});
// Hide
cm.remove(item['container']);
item = null;
};
var setHiddenInputData = function(){
that.value = tags.join(',');
nodes['hidden'].value = that.value;
};
/* ******* MAIN ******* */
that.destruct = function(){
var that = this;
if(!that.isDestructed){
that.isDestructed = true;
that.removeFromStack();
}
return that;
};
that.get = function(){
return !cm.isEmpty(that.value) ? that.value : '';
};
that.set = function(value){
that.add(value);
return that;
};
that.add = function(tag /* or tags comma separated or array */){
var sourceTags;
if(!tag){
sourceTags = [];
}else if(cm.isArray(tag)){
sourceTags = tag;
}else{
sourceTags = tag.split(',');
}
cm.forEach(sourceTags, function(tag){
addTag(tag, true);
});
return that;
};
that.remove = function(tag){
var sourceTags;
if(!tag){
sourceTags = [];
}else if(cm.isArray(tag)){
sourceTags = tag;
}else{
sourceTags = tag.split(',');
}
cm.forEach(sourceTags, function(tag){
if(cm.inArray(tags, tag)){
removeTag(items[tag]);
}
});
return that;
};
that.reset = function(){
cm.forEach(items, function(item){
removeTag(item, true);
});
that.triggerEvent('onReset');
return that;
};
that.getAutocomplete = function(){
return that.components['autocomplete'];
};
init();
});
/* ****** FORM FIELD COMPONENT ******* */
Com.FormFields.add('tags', {
'node' : cm.node('input', {'type' : 'text', 'class' : 'input'}),
'fieldConstructor' : 'Com.AbstractFormField',
'constructor' : 'Com.TagsInput'
});
|
__all__ = ['NoIntersection', 'BxViolation']
class NoIntersection(Exception):
"""No intersection of boxes occur."""
pass
class BxViolation(SyntaxWarning):
"""Violation of Bx properties."""
pass
|
<filename>backup_daemon/lib/run_job.rb<gh_stars>1-10
require 'setup_job'
require 'backup_job'
require 'restore_job'
require 'maintenance_job'
require 'file_index'
require 'custom_find'
require 'stats'
#require 'sys/cpu'
#include Sys
require 'zfs'
include Zfs
class RunJob
# Testing if our IP address matches what we can resolve the hostname to.
# If hostname and IP address are set in the object, resolve the hostname and
# see if we can find an IP address that matches. If IP address is not matched
# we'll fill it in with the first IP address we resolve.
def self.check_hostname(hostname, ip_address=nil)
begin
addrinfo = Socket.getaddrinfo(hostname, 22)
rescue SocketError
return false
end
addrinfo.each do |addr|
if ip_address && ip_address == addr[3]
return addr[3]
elsif ip_address
next
else
return addr[3]
end
end
return false
end
def self.run(jobs)
jobs.each do |job|
if job.aasm_events_for_current_state.include? :finish
unless job.aasm_current_state == :running
job.run
job.save!
end
# Not sure this should live on. Or what ops it should be run in.
if job.data['hostname'] && job.data['ip_address'] && job.data['hostname'][:value] && job.data['ip_address'][:value]
unless self.check_hostname(job.data['hostname'][:value], job.data['ip_address'][:value])
DaemonKit.logger.warn "Hostname #{job.data['hostname'][:value]} does not match IP address given. Tested by resolving hostname. SKIPPING!"
next
end
end
##### SETUP ######
if job.operation == 'setup'
setupJob = SetupJob.new(:ip_address => job.data['ip_address'][:value],
:hostname => job.data['hostname'][:value],
:size => job.data['quota'][:value]
)
rstatus = setupJob.create_zfs_fs!
path = setupJob.path
if rstatus[0] == 0 && path[0] == 0
job.finish
job.finished_at = Time.now_zone
if job.data['backup_dir'] && backup_dirs = job.data['backup_dir'][:value]
backup_dirs = YAML::load(backup_dirs)
backup_dirs[job.schedule_id] = path[1]
job.data['backup_dir'] = { :value => backup_dirs }
else
job.data['backup_dir'] = { :value => {job.schedule_id => path[1]} }
end
job.save!
DaemonKit.logger.info "Successfully setup host #{job.data['ip_address'][:value]}"
else
job.error
job.data['error'] = {'exit_code' => rstatus[0], 'message' => rstatus[1]}
job.save!
DaemonKit.logger.warn "Error while trying to setup host, see job id: #{job.id} for more information."
end
##### MAINTENANCE ######
elsif job.operation == 'maintenance'
drop_snaps = job.data['drop_snaps']
backup_dirs = YAML::load(job.data['backup_dir'][:value])
rstatus = MaintenanceJob.destroy_snaps(drop_snaps, backup_dirs[job.schedule_id])
if rstatus[0] == 0
job.finish
DaemonKit.logger.info "Successfully ran maintenance job id: #{job.id}."
else
job.error
job.data['error'] = {'exit_code' => rstatus[0], 'message' => rstatus[1]}
DaemonKit.logger.warn "Error while running maintenance job. See job id: #{job.id} for more information."
end
job.save!
##### BACKUP ######
elsif job.operation == 'backup'
# Make sure we have a backup dir for this job's schedule before we go on.
begin
backup_dirs = YAML::load(job.data['backup_dir'][:value])
rescue NoMethodError
DaemonKit.logger.warn "Could not find a backup_dir for host #{job.data['hostname'][:value]}, schedule id #{job.schedule_id}, SKIPPING!"
next
end
unless backup_dirs && backup_dirs[job.schedule_id]
DaemonKit.logger.warn "Could not find a backup_dir for host #{job.data['hostname'][:value]}, schedule id #{job.schedule_id}, SKIPPING!"
next
end
backupJob = BackupJob.new(:ip_address => job.data['ip_address'][:value],
:hostname => job.data['hostname'][:value],
:host_type => job.data['host_type'][:value],
:local_backup_dir => backup_dirs[job.schedule_id],
:exclusions => job.data['exclusions'][:value],
:directories => job.data['backup_directories'][:value]
)
if job.data['ssh_port'] && job.data['ssh_port'][:value]
backupJob.port = job.data['ssh_port'][:value]
end
rbsync = backupJob.run(job.data)
rstatus = rbsync.pull
snap_status = []
if rstatus[0] == 0
snap_status = backupJob.do_snapshot
end
file_index_saved = false
if snap_status[0] == 0
begin
compressed_file_index = CustomFind.find(snap_status[1], "#{backup_dirs[job.schedule_id]}/.zfs/snapshot")
file_index = FileIndex.new(:data => compressed_file_index,
:basepath => "#{backup_dirs[job.schedule_id]}/.zfs/snapshots",
:host_id => job.host_id,
:schedule_id => job.schedule_id,
:snapname => snap_status[1]
)
if file_index.save!
file_index_saved = true
end
rescue Errno::ENOENT
end
end
if rstatus[0] == 0 && snap_status[0] == 0 && file_index_saved
job.finish
job.data['new_snapshot'] = snap_status[1]
job.finished_at = Time.now_zone
job.save!
DaemonKit.logger.info "Successfully ran backup job for #{job.data['ip_address'][:value]}"
else
job.error
exit_code = []
message = []
if rstatus[0] != 0
exit_code << rstatus[0]
message << rstatus[1]
end
if snap_status[0] != 0
exit_code << snap_status[0]
message << snap_status[1]
end
unless file_index_saved
message << "File index did not properly save to the database."
end
job.data['error'] = {'exit_code' => exit_code, 'message' => rstatus[1]}
job.save!
DaemonKit.logger.warn "Error while trying to run backup job for host, #{job.data['ip_address'][:value]}. See job id: #{job.id} for more information."
end
##### RESTORE ######
elsif job.operation == 'restore'
if job.data['restore_data'] && job.data['backup_dir'] && backup_dirs = job.data['backup_dir'][:value]
backup_dirs = YAML::load(backup_dirs)
restoreJob = RestoreJob.new(
:data => job.data['restore_data'],
:backup_dir => backup_dirs[job.schedule_id]
)
rstatus = restoreJob.run
if rstatus[0] == 0
job.finish
job.data['download_url'] = rstatus[1]
job.save!
DaemonKit.logger.info "Successfully ran restore job for Restore ID: #{job.data['restore_id']}"
else
job.error
job.data['error'] = {'exit_code' => exit_code, 'message' => rstatus[1]}
job.save!
DaemonKit.logger.warn "Error while trying to run restore job for Restore ID: #{job.data['restore_id']}. See job id: #{job.id} for more information."
end
end
end
# Add stats to the db.
get_schedule_stats(job)
end # End if
end # End each
end # End run method
def self.get_schedule_stats(job)
begin
backup_dirs = YAML::load(job.data['backup_dir'][:value])
rescue NoMethodError, TypeError
DaemonKit.logger.warn "Could not find a backup_dir for host #{job.data['hostname'][:value]}, while trying to add stats, SKIPPING!"
return nil
end
stat = nil
if backup_dirs && backup_dirs[job.schedule_id]
list = zfs_list("target" => backup_dirs[job.schedule_id])
filesystem = ""
if list[0] == 0
filesystem = list[1].first['name']
else
DaemonKit.logger.warn "Get_stats: Can't convert mountpoint into filesystem name"
return nil
end
schedule_stats = zfs_get("flags" => "p", "target" => filesystem, "field" => "property,value", "properties" => "used,available")
if schedule_stats[0] == 0
stat = Stat.new
stat.schedule_id = job.schedule_id
schedule_stats[1].each do |schedule_stat|
if schedule_stat["property"] == 'used'
stat.disk_used = schedule_stat["value"]
elsif schedule_stat["property"] == 'available'
stat.disk_avail = schedule_stat["value"]
end
end
if stat.disk_used && stat.disk_avail
stat.save!
else
DaemonKit.logger.warn "Get_stats: zfs_get return sucessfully, but I couldn't find \"used\" or \"available\" disk space for backup_dir #{backup_dirs[job.schedule_id]}"
end
else
DaemonKit.logger.warn "Get_stats: zfs_get returned with an error, #{schedule_stats[1]}"
end
end
end #End get_schedule_stats
def self.get_node_stats(node)
settings = DaemonKit::Config.load('settings').to_h
stat = Stat.new
stat.cpu_load_avg = `uptime`.chomp.split(' ')[9,12].join(' ')
# Sys CPU Bus Errors... :-(
#stat.cpu_load_avg = CPU.load_avg
#stat.cpu_load_avg = [0.1, 0.2, 0.3]
stat.node_id = node.id
unless backup_zvol = settings['backup_zvol']
DaemonKit.logger.warn "backup_root not specified in settings.yml, can't add stats!"
return nil
end
# args = {"flags" => "rHp", "field" => "field1,field2", "source" => "source1,source2", "properties" => "nfsshare,iscsishare",
# "target" => "filesystem|volume|snapshot"}
node_stats = zfs_get("flags" => "p", "target" => settings['backup_zvol'], "field" => "property,value", "properties" => "used,available")
if node_stats[0] == 0
node_stats[1].each do |node_stat|
if node_stat["property"] == 'used'
stat.disk_used = node_stat["value"]
elsif node_stat["property"] == 'available'
stat.disk_avail = node_stat["value"]
end
end
if stat.disk_used && stat.disk_avail
stat.save!
else
DaemonKit.logger.warn "Get_stats: zfs_get return sucessfully, but I couldn't find \"used\" or \"available\" disk space for node #{node.id}"
end
else
DaemonKit.logger.warn "Get_stats: zfs_get returned with an error, #{node_stats[1]}"
end
end # End get_node_stats
end # End Class |
def lowestNum(num1, num2):
if num1 < num2:
return num1
else:
return num2 |
<gh_stars>0
import {AnyJson} from '@salesforce/ts-types';
import {expect} from 'chai';
import {validateConfig} from '../../src/lib/schema';
const testValidateInvalid = (data: AnyJson) =>
// tslint:disable-next-line:chai-vague-errors
expect(validateConfig(data)).not.to.be.undefined;
const testValidateValid = (data: AnyJson) =>
// tslint:disable-next-line:chai-vague-errors
expect(validateConfig(data)).to.be.undefined;
describe('config validate', () => {
it('empty', () => {
testValidateValid({});
});
it('import invalid', () => {
testValidateInvalid({
data: {
sObjects: {
import: {
deleteBeforeImport: {}
}
}
}
});
testValidateInvalid({
data: {
sObjects: {
import: {
deleteBeforeImport: 'all'
}
}
}
});
testValidateInvalid({
data: {
sObjects: {
import: false
}
}
});
});
it('export invalid', () => {
testValidateInvalid({
data: {
sObjects: {
export: {
deleteBeforeImport: false
}
}
}
});
testValidateInvalid({
data: {
sObjects: {
export: []
}
}
});
testValidateInvalid({
data: {
sObjects: {
export: false
}
}
});
});
it('import valid', () => {
testValidateValid({
data: {
sObjects: {
import: {
order: []
}
}
}
});
testValidateValid({
data: {
sObjects: {
import: {
order: ['Account', 'Contact']
}
}
}
});
testValidateValid({
data: {
sObjects: {
import: {
order: []
}
}
}
});
testValidateValid({
data: {
sObjects: {
import: {
deleteBeforeImport: false,
order: ['Account', 'Contact']
}
}
}
});
testValidateValid({
data: {
sObjects: {
import: {
deleteBeforeImport: 'reversedOrder',
order: ['Account', 'Contact']
}
}
}
});
testValidateValid({
data: {
sObjects: {
import: {
deleteBeforeImport: ['Contact', 'Account'],
order: ['Account', 'Contact']
}
}
}
});
});
it('export valid', () => {
testValidateValid({
data: {
sObjects: {
export: {}
}
}
});
testValidateValid({
data: {
sObjects: {
export: {
order: ['Account', 'Contact']
}
}
}
});
testValidateValid({
data: {
sObjects: {
export: {
order: []
}
}
}
});
});
});
|
<reponame>Habens/cascade
package com.github.robindevilliers.onlinebankingexample.controller;
import com.github.robindevilliers.onlinebankingexample.BackendService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import com.github.robindevilliers.onlinebankingexample.model.User;
import java.util.HashMap;
import java.util.Map;
@Controller
@RequestMapping("/database/set-user")
public class DatabaseController {
@Autowired
private BackendService backendService;
@RequestMapping(method= RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
public @ResponseBody Map setData(@RequestBody User user) {
backendService.setUser(user);
return new HashMap<String, String>(){{put("success","ok");}};
}
} |
import React from 'react';
import {
BrowserRouter as Router,
Switch,
Route,
} from 'react-router-dom';
import NavBar from './components/NavBar';
import PrivateRoute from './components/PrivateRoute';
import Configuration from './views/Configuration';
import Overview from './views/Overview';
function App() {
return (
<Router>
<Route path="*">
<NavBar />
</Route>
<Switch>
<Route exact path="/">
<p>hello</p>
</Route>
<PrivateRoute path="/configuration">
<Configuration />
</PrivateRoute>
<PrivateRoute path="/overview">
<Overview />
</PrivateRoute>
</Switch>
</Router>
);
}
export default App;
|
//===============================================================================
// @ Player.cpp
// ------------------------------------------------------------------------------
// Player
//
// Copyright (C) 2008-2015 by <NAME> and <NAME>.
// All rights reserved.
//
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// This demo implements a simple physical simulation using only linear forces.
// The cyan sphere is being pulled towards the origin by a spring. It is also
// being affected by gravity and a slight drag force, as well as forces created
// by the user.
//
// The key commands are:
//
// I,J,K,L: generate forces in the yz directions to pull the sphere around.
// W,S: increase and decrease the spring force
// SPACE: reset
//
//===============================================================================
//-------------------------------------------------------------------------------
//-- Dependencies ---------------------------------------------------------------
//-------------------------------------------------------------------------------
#include <IvRenderer.h>
#include <IvRendererHelp.h>
#include <IvEventHandler.h>
#include <IvMatrix44.h>
#include <IvDebugger.h>
#include "Player.h"
#include "Game.h"
//-------------------------------------------------------------------------------
//-- Static Members -------------------------------------------------------------
//-------------------------------------------------------------------------------
//-------------------------------------------------------------------------------
//-- Methods --------------------------------------------------------------------
//-------------------------------------------------------------------------------
//-------------------------------------------------------------------------------
// @ Player::Player()
//-------------------------------------------------------------------------------
// Constructor
//-------------------------------------------------------------------------------
Player::Player()
{
mTranslate.Zero();
mVelocity.Zero();
mMass = 10.0f;
mConstantForce.Zero();
mSpringConstant = 100.0f;
} // End of Player::Player()
//-------------------------------------------------------------------------------
// @ Player::~Player()
//-------------------------------------------------------------------------------
// Destructor
//-------------------------------------------------------------------------------
Player::~Player()
{
} // End of Player::~Player()
//-------------------------------------------------------------------------------
// @ Player::Update()
//-------------------------------------------------------------------------------
// Main update loop
//-------------------------------------------------------------------------------
void
Player::Update( float dt )
{
// add up part of forces and torques which are constant over interval
mConstantForce.Zero();
if (IvGame::mGame->mEventHandler->IsKeyDown('k'))
{
mConstantForce -= 75.0f*IvVector3::zAxis;
}
if (IvGame::mGame->mEventHandler->IsKeyDown('i'))
{
mConstantForce += 75.0f*IvVector3::zAxis;
}
if (IvGame::mGame->mEventHandler->IsKeyDown('l'))
{
mConstantForce -= 75.0f*IvVector3::yAxis;
}
if (IvGame::mGame->mEventHandler->IsKeyDown('j'))
{
mConstantForce += 75.0f*IvVector3::yAxis;
}
// adjust spring force
if (IvGame::mGame->mEventHandler->IsKeyDown('w'))
{
mSpringConstant += 10.0f;
}
if (IvGame::mGame->mEventHandler->IsKeyDown('s'))
{
mSpringConstant -= 10.0f;
}
// reset simulation
if (IvGame::mGame->mEventHandler->IsKeyDown(' '))
{
mTranslate.Zero();
mVelocity.Zero();
}
// integrate (via symplectic Euler)
// compute acceleration
IvVector3 accel = CurrentForce( mTranslate, mVelocity ) / mMass;
// clear small values
accel.Clean();
// compute new position, velocity
mVelocity += dt*accel;
// clear small values
mVelocity.Clean();
mTranslate += dt*mVelocity;
} // End of Player::Update()
//-------------------------------------------------------------------------------
// @ Player::CurrentForce()
//-------------------------------------------------------------------------------
// Compute force, given forces constant over interval and variable params
//-------------------------------------------------------------------------------
IvVector3
Player::CurrentForce( const IvVector3& position, const IvVector3& velocity )
{
IvVector3 totalForce = mConstantForce;
// add in spring force
totalForce -= mSpringConstant*position;
// add in gravity
totalForce -= 9.8f*mMass*IvVector3::zAxis;
// add some drag
float speed = velocity.Length();
totalForce -= 0.1f*mMass*speed*velocity;
return totalForce;
} // End of Player::CurrentForce()
//-------------------------------------------------------------------------------
// @ Player::Render()
//-------------------------------------------------------------------------------
// Render stuff
//-------------------------------------------------------------------------------
void
Player::Render()
{
// build 4x4 matrix
IvMatrix44 transform;
transform.Translation(mTranslate);
IvSetWorldMatrix(transform);
IvDrawSphere(0.5f, kCyan );
// draw force line
IvSetWorldMatrix(transform);
IvDrawLine( IvVector3::origin, mConstantForce, kOrange );
// draw line from origin to ball, to show spring
IvSetWorldIdentity();
IvDrawLine( IvVector3::origin, mTranslate, kWhite );
}
|
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { User } from '../entities/user.entity';
@Injectable()
export class UsersService {
constructor(
@InjectRepository(User, 'default') private readonly userRepository: Repository<User>,
@InjectRepository(User, 'readonly') private readonly userReadonlyRepository: Repository<User>,
) {}
async findOne(username: string): Promise<User | undefined> {
return await this.userReadonlyRepository
.findOne({
relations: ['stations', 'stations.stationMessageCount'],
where: {
username: username
}
});
}
async findOneById(id: String): Promise<User | undefined> {
return await this.userReadonlyRepository
.findOne({
relations: ['stations', 'stations.stationMessageCount'],
where: {
id: id
}
});
}
async findOneByConfirmationToken(token: string): Promise<User | undefined> {
return await this.userReadonlyRepository
.findOne({
where: {
confirmationToken: token
}
});
}
async findOneByEmail(email: string): Promise<User | undefined> {
return await this.userReadonlyRepository
.findOne({
relations: ['stations', 'stations.stationMessageCount'],
where: {
email: email
}
});
}
async findOneByUsername(username: string): Promise<User | undefined> {
return await this.userReadonlyRepository
.findOne({
relations: ['stations', 'stations.stationMessageCount'],
where: {
username: username
}
});
}
async create(user: User): Promise<User | undefined> {
return await this.userRepository
.save(user);
}
async save(user: User): Promise<User | undefined> {
return await this.userRepository.save(user);
}
}
|
mkdir -p public/lib
curl --fail https://cdnjs.cloudflare.com/ajax/libs/jquery/1.9.1/jquery.min.js \
> public/lib/jquery.min.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/jquery/1.9.1/jquery.js \
> public/lib/jquery.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/jquery/1.9.1/jquery.min.map \
> public/lib/jquery.min.map
curl --fail https://cdnjs.cloudflare.com/ajax/libs/coffee-script/1.6.2/coffee-script.min.js \
> public/lib/coffee-script.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/less.js/1.3.3/less.min.js \
> public/lib/less.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/dropbox.js/0.9.1/dropbox.min.js \
> public/lib/dropbox.min.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/dropbox.js/0.9.1/dropbox.js \
> public/lib/dropbox.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/dropbox.js/0.9.1/dropbox.min.map \
> public/lib/dropbox.min.map
curl --fail https://cdnjs.cloudflare.com/ajax/libs/html5shiv/3.6.2/html5shiv.js \
> public/lib/html5shiv.js
|
from typing import List
import re
def extractFunctionNames(sourceCode: str) -> List[str]:
function_names = re.findall(r'\b\w+\s*\([^)]*\)\s*{', sourceCode)
function_names = [name.split('(')[0].strip() for name in function_names]
return function_names |
#!/bin/bash
node_modules/log.io/bin/log.io-server &
node_modules/log.io-file-input/bin/log.io-file-input
|
// Variables used by Scriptable.
// These must be at the very top of the file. Do not edit.
// icon-color: yellow; icon-glyph: clipboard-check;
// Tests to be run inside of Scriptable.
// Depending on the number of Sonos speakers that you have, you may need to alter some of the tests.
// Note that these tests aren't very smart or comprehensive. Many of them just check for the right type of output.
// There's still some human judgment required (e.g., Did the right station start playing in the right room?).
// (The easiest way to assess the outcome of many of the tests is to watch what happens in the Sonos app on a different device than the one that's running this script.)
// **** TEST DATA. CHANGE THESE VALUES. ****
const allRooms = ['Basement', 'Kitchen', 'Living Room', 'Office']; // All rooms in the system.
const threeRooms = ['Kitchen', 'Living Room', 'Office']; // Used to validate grouping.
const roomsToGroup = ['Kitchen', 'Living Room']; // This is used for `roomsToGroup` in the test functions.
const mainRoom = 'Office'; // This is used for `mainRoom` in the test functions.
const favorite = 'Downtempo'; // This is used for `favorite` in the test functions.
const playlist = 'Test Playlist'; // This is used for `playlist` in the test functions.
const millisecondsBetweenTests = 5000; // Delay between each test. 1000 = 1 second
// **** END TEST DATA. DO NOT CHANGE ANYTHING BELOW THIS LINE. ****
// Import the controller and settings.
const SonosController = importModule('SonosController');
const settings = importModule('SonosSettings');
// Create the controller.
const controller = new SonosController(settings.sonosServerUrl);
// Other variables
let runTests = false;
let testResults = {};
// Test connectivity to server. If successful, run the tests.
try {
let connected = await controller.isConnected();
if (connected) {
runTests = true;
} else {
let message = `Unable to connect to the server. Verify that the server is running at ${controller.sonosBaseUrl}.`;
console.log(message);
let popup = new Alert();
popup.title = 'Error';
popup.message = message;
let result = await popup.presentAlert();
}
} catch (error) {
console.log(`Error testing connectivity: ${error}`);
}
if (runTests) {
console.log('Connected to server. Running tests...');
await systemInformationTests();
await roomTests(mainRoom, favorite, playlist, millisecondsBetweenTests);
await groupTests(mainRoom, roomsToGroup, playlist, millisecondsBetweenTests);
console.log(testResults);
await displayTestReport();
}
// Test if two arrays are equal.
function arraysAreEqual(arr1, arr2) {
// `arr1` and `arr2` must be arrays.
if (!Array.isArray(arr1) || !Array.isArray(arr2)) {
return false;
}
// Arrays must be the same length.
if (arr1.length !== arr2.length) {
return false;
}
// `arr2` must contain every element in `arr1`.
return arr1.every(element => arr2.includes(element));
}
// Pass in the test name as a string and a boolean.
// If `booleanResult` is true, the test passed. Else the test failed.
function testPassed(testName, testOutput, booleanResult) {
let result = {}
if (booleanResult === true) {
result['result'] = 'Passed';
} else {
result['result'] = 'Failed';
}
result['output'] = testOutput;
testResults[testName] = result;
}
function createHTML(includeOutput=false) {
let style = `
<style>
body {
font-family: Helvetica, sans-serif;
font-size: 1.5em;
}
table, tr, th, td {
border: 1px solid black;
}
table {
border-collapse: collapse;
}
th, td {
padding: 0.5em;
}
.passed {
color: green;
font-weight: bold;
}
.failed {
color: red;
font-weight: bold;
}
</style>
`;
let head = `
<head>
<title>Test Results</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
${style}
</head>
`;
let tableHeader;
if (includeOutput) {
tableHeader = '<th>Test Name</th><th>Test Result</th><th>Test Output</th>';
} else {
tableHeader = '<th>Test Name</th><th>Test Result</th>';
}
let html = `<html>${head}<body>`;
html += `<table><tr>${tableHeader}</tr>`;
for (r in testResults) {
let testName = r;
let testResult = testResults[r]['result'];
let resultStyle = testResult.toLowerCase();
if (includeOutput) {
let testOutput = JSON.stringify(testResults[r]['output']);
}
html += `<tr><td>${testName}</td>`;
html += `<td class="${resultStyle}">${testResult}</td>`;
if (includeOutput) {
html += `<td>${testOutput}</td></tr>`;
}
}
html += '</table></body></html>';
return html;
}
function sleep(ms) {
let startTime = new Date().getTime();
while (new Date().getTime() < startTime + ms);
}
async function displayTestReport() {
let wv = new WebView();
let html = createHTML();
wv.loadHTML(html);
await wv.present();
}
// **** SYSTEM INFORMATION TESTS ***
async function systemInformationTests() {
let testName = null;
let output = null;
let result = null;
// Get the list of all available services.
testName = 'Get Services';
console.log(`Testing ${testName}...`);
output = await controller.getServices();
result = output !== null && Object.keys(output).includes('Pandora');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Get the system zones.
testName = 'Get Zones';
console.log(`Testing ${testName}...`);
output = await controller.getZones();
result = Array.isArray(output);
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Get the names of all rooms in the system.
testName = 'Get Rooms';
console.log(`Testing ${testName}...`);
output = await controller.getRooms();
result = Array.isArray(output) && output.length === allRooms.length;
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
testName = 'Get Groups';
console.log(`Testing ${testName}...`);
output = await controller.getGroups();
result = Array.isArray(output) && output.length > 0;
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Get stations, playlists, etc. that have been marked as favorites.
testName = 'Get Favorites';
console.log(`Testing ${testName}...`);
output = await controller.getFavorites();
result = Array.isArray(output) && output.includes(favorite);
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Get Sonos playlists.
testName = 'Get Playlists';
console.log(`Testing ${testName}...`);
output = await controller.getPlaylists();
result = Array.isArray(output) && output.includes(playlist);
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
return true;
}
// **** ROOM TESTS ****
async function roomTests(mainRoom, favorite, playlist, millisecondsBetweenTests=5000) {
let testName = null;
let output = null;
let result = null;
// Play a favorite.
testName = 'Play Favorite';
console.log(`Testing ${testName}...`);
output = await controller.playFavorite(favorite, mainRoom);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Get current playback state.
testName = 'Get Current Playback State';
console.log(`Testing ${testName}...`);
output = await controller.getCurrentPlaybackState(mainRoom);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Get the current track.
testName = 'Get Current Track';
console.log(`Testing ${testName}...`);
output = await controller.getCurrentTrack(mainRoom);
result = output !== null && typeof output === 'object';
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Get the current track's album art.
testName = 'Get Album Art';
console.log(`Testing ${testName}...`);
currentTrack = await controller.getCurrentTrack(mainRoom);
output = await controller.getAlbumArt(currentTrack.albumArtUri);
result = output !== null && output.size.width > 0;
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Get the current track's album art as a base64 encoded string.
testName = 'Get Album Art as base64';
console.log(`Testing ${testName}...`);
currentTrack = await controller.getCurrentTrack(mainRoom);
output = await controller.getAlbumArtAsBase64(currentTrack.albumArtUri);
result = output !== null;
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Get the next track.
testName = 'Get Next Track';
console.log(`Testing ${testName}...`);
output = await controller.getNextTrack(mainRoom);
result = output !== null && typeof output === 'object';
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Pause playback.
testName = 'Pause';
console.log(`Testing ${testName}...`);
output = await controller.pause(mainRoom);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Start playback.
testName = 'Play';
console.log(`Testing ${testName}...`);
output = await controller.play(mainRoom);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Toggle playback. (It should pause.)
testName = 'Toggle (should pause)';
console.log(`Testing ${testName}...`);
output = await controller.toggle(mainRoom);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Toggle playback. (It should start playing again.)
testName = 'Toggle (should play)';
console.log(`Testing ${testName}...`);
output = await controller.toggle(mainRoom);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Skip to next track.
testName = 'Play Next Track';
console.log(`Testing ${testName}...`);
output = await controller.next(mainRoom);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Go back to previous track.
testName = 'Play Previous Track';
console.log(`Testing ${testName}...`);
output = await controller.previous(mainRoom);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Set room volume to 20%.
testName = 'Set Room Volume to 20%';
console.log(`Testing ${testName}...`);
output = await controller.setRoomVolume(mainRoom, 20);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Set room volume to 10%.
testName = 'Set Room Volume to 10%';
console.log(`Testing ${testName}...`);
output = await controller.setRoomVolume(mainRoom, 10);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Play a playlist
testName = 'Play Playlist';
console.log(`Testing ${testName}...`);
output = await controller.playPlaylist(playlist, mainRoom);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
await controller.pause(mainRoom);
return true;
}
// **** GROUP TESTS ****
async function groupTests(mainRoom, roomsToGroup, playlist, millisecondsBetweenTests=5000) {
let testName = null;
let output = null;
let result = null;
// Make sure music is playing in the main room.
await controller.playPlaylist(playlist, mainRoom);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Group two rooms.
testName = 'Group 2 Rooms';
console.log(`Testing ${testName}...`);
output = await controller.group(mainRoom, [roomsToGroup[0]]);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Ungroup the room that was just added to the group.
testName = 'Ungroup 1 Room';
console.log(`Testing ${testName}...`);
output = await controller.ungroup([roomsToGroup[0]]);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Group three rooms.
testName = 'Group 3 Rooms';
console.log(`Testing ${testName}...`);
output = await controller.group(mainRoom, roomsToGroup);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
testName = 'Get Rooms in Group Inclusive';
console.log(`Testing ${testName}...`);
output = await controller.getRoomsInGroupInclusive(mainRoom);
console.log(output);
result = output !== null && arraysAreEqual(output, threeRooms);
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
testName = 'Get Rooms in Group Exclusive';
console.log(`Testing ${testName}...`);
output = await controller.getRoomsInGroupExclusive(mainRoom);
result = output !== null && arraysAreEqual(output, roomsToGroup);
console.log(output);
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Set group volume to 20%.
testName = 'Set Group Volume to 20%';
console.log(`Testing ${testName}...`);
output = await controller.setGroupVolume(mainRoom, 20);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Set group volume to 10%.
testName = 'Set Group Volume to 10%';
console.log(`Testing ${testName}...`);
output = await controller.setGroupVolume(mainRoom, 10);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Ungroup the rooms that were grouped with the main room.
testName = 'Ungroup 2 Rooms';
console.log(`Testing ${testName}...`);
output = await controller.ungroup(roomsToGroup);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Group all rooms with the main room.
testName = 'Group All Rooms With Main Room';
console.log(`Testing ${testName}...`);
output = await controller.groupAllRoomsWith(mainRoom);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Ungroup all rooms from the main room.
testName = 'Ungroup All From Main Room';
console.log(`Testing ${testName}...`);
output = await controller.ungroupAllRoomsFrom(mainRoom);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Wait before running next test.
sleep(millisecondsBetweenTests);
// Play a favorite everywhere.
testName = 'Play Favorite Everywhere';
console.log(`Testing ${testName}...`);
output = await controller.playFavoriteEverywhere(favorite);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
// Reset system state and wait before running next test.
await controller.pause();
await controller.ungroupAllRoomsFrom(mainRoom);
sleep(millisecondsBetweenTests);
// Play a playlist everywhere.
testName = 'Play Playlist Everywhere';
console.log(`Testing ${testName}...`);
output = await controller.playPlaylistEverywhere(playlist);
result = output !== null && !output.startsWith('error');
testPassed(testName, output, result);
console.log(`Finished testing ${testName}.`);
sleep(millisecondsBetweenTests);
await controller.pause(mainRoom);
return true;
} |
var tick_8c =
[
[ "TIMER_FREQUENCY_HZ", "tick_8c.html#a290a7b04bb56e02e733a35599442a915", null ],
[ "SysTick_Handler", "tick_8c.html#ab5e09814056d617c521549e542639b7e", null ],
[ "Tick_DelayMs", "tick_8c.html#a01864fcfa99d364250b9e3e10390ff93", null ],
[ "Tick_DelayMs_NonBlocking", "tick_8c.html#a700a086c5fb6a2a2491026af0fda77a8", null ],
[ "Tick_GetMs", "tick_8c.html#a5018c28a90fe9864e870a712c8e2b571", null ],
[ "Tick_Init", "tick_8c.html#acae0a023fcbd4801e394c473890925e7", null ],
[ "TickCounter", "tick_8c.html#a497a8709fd22682ab11917334ea779d2", null ]
]; |
// Define the RtpEncodingParameters struct
pub struct RtpEncodingParameters {
max_bitrate: Option<u32>,
min_bitrate: Option<u32>,
max_framerate: Option<u32>,
priority: Option<u32>,
}
// Implement methods for the RtpEncodingParameters struct
impl RtpEncodingParameters {
// Constructor method with default values
pub fn new() -> Self {
RtpEncodingParameters {
max_bitrate: None,
min_bitrate: None,
max_framerate: None,
priority: None,
}
}
// Method to set the maximum bitrate
pub fn set_max_bitrate(&mut self, bitrate: u32) {
self.max_bitrate = Some(bitrate);
}
// Method to set the minimum bitrate
pub fn set_min_bitrate(&mut self, bitrate: u32) {
self.min_bitrate = Some(bitrate);
}
// Method to set the maximum framerate
pub fn set_max_framerate(&mut self, framerate: u32) {
self.max_framerate = Some(framerate);
}
// Method to set the priority
pub fn set_priority(&mut self, priority: u32) {
self.priority = Some(priority);
}
}
// Implement a trait for printing the encoding parameters
pub trait PrintEncodingParameters {
fn print_parameters(&self) {
println!("Max Bitrate: {:?}", self.max_bitrate);
println!("Min Bitrate: {:?}", self.min_bitrate);
println!("Max Framerate: {:?}", self.max_framerate);
println!("Priority: {:?}", self.priority);
}
}
// Implement the trait for the RtpEncodingParameters struct
impl PrintEncodingParameters for RtpEncodingParameters {} |
#!/bin/sh
nix-shell -E 'with import <nixpkgs> {}; pkgsi686Linux.stdenv.mkDerivation { name = "dummy"; buildInputs = []; }' --run "cargo build --target i686-unknown-linux-gnu" |
def LCS(X, Y):
m = len(X)
n = len(Y)
L = [[None]*(n + 1) for i in range(m + 1)]
for i in range(m + 1):
for j in range(n + 1):
if i == 0 or j == 0 :
L[i][j] = 0
elif X[i-1] == Y[j-1]:
L[i][j] = L[i-1][j-1]+1
else:
L[i][j] = max(L[i-1][j], L[i][j-1])
# Following code is used to print LCS
index = L[m][n]
lcs = [""] * (index+1)
lcs[index] = ""
# Start from the right-most-bottom-most corner and
# one by one store characters in lcs[]
i = m
j = n
while i > 0 and j > 0:
# If current character in X[] and Y are same, then
# current character is part of LCS
if X[i-1] == Y[j-1]:
lcs[index-1] = X[i-1]
i-=1
j-=1
index-=1
# If not same, then find the larger of two and
# go in the direction of larger value
elif L[i-1][j] > L[i][j-1]:
i-=1
else:
j-=1
print ("".join(lcs))
# Driver program
X = "ABABC"
Y = "BABCA"
LCS(X, Y) |
<gh_stars>1-10
require 'chef/resource/lwrp_base'
require 'chef/provisioning'
require 'chef/provisioning/machine'
require 'chef/provisioning/driver'
class Chef
class Resource
class MachineFile < Chef::Resource::LWRPBase
self.resource_name = 'machine_file'
def initialize(*args)
super
@chef_server = run_context.cheffish.current_chef_server
end
actions :upload, :download, :delete, :nothing
default_action :upload
attribute :path, :kind_of => String, :name_attribute => true
attribute :machine, :kind_of => String
attribute :local_path, :kind_of => String
attribute :content
attribute :owner, :kind_of => String
attribute :group, :kind_of => String
attribute :mode, :kind_of => String
attribute :chef_server, :kind_of => Hash
attribute :driver, :kind_of => Chef::Provisioning::Driver
end
end
end |
def filter_hidden(x: dict) -> dict:
if not isinstance(x, dict):
raise TypeError("Input must be a dictionary")
filtered_dict = {}
for key, value in x.items():
if not key.startswith('.'):
if isinstance(value, dict):
filtered_dict[key] = filter_hidden(value)
else:
filtered_dict[key] = value
return filtered_dict |
def longest_substring_unique(s):
longest = 0
substring = ""
seen = ""
for c in s:
if c not in seen:
seen+=c
substring+=c
if len(seen) > longest:
longest = len(seen)
else:
seen = c
substring = c
return (longest, substring[:longest]) |
sed -i '/OPTIONS=.*/c\OPTIONS="--selinux-enabled --insecure-registry 172.30.0.0/16"' /etc/sysconfig/docker
groupadd docker
usermod -aG docker fedora
systemctl start docker --ignore-dependencies
sleep 20
dnf -y install dnf-plugins-core libselinux-python
dnf -y copr enable @ansible-service-broker/ansible-service-broker-latest
dnf -y install apb
wget https://raw.githubusercontent.com/openshift/ansible-service-broker/master/scripts/run_latest_build.sh
[% if '.' in openshift_version %]
export ORIGIN_VERSION="v[[ openshift_version ]]"
[% else %]
export ORIGIN_VERSION="[[ openshift_version ]]"
wget https://apb-oc.s3.amazonaws.com/apb-oc/oc-linux-64bit.tar.gz
tar zxvf oc-linux-64bit.tar.gz
mv oc-linux-64bit/oc /usr/bin
chmod u+x /usr/bin/oc
[% endif %]
rm -rf /usr/share/rhel/secrets
[% if org is defined %]
export TEMPLATE_URL=file:///root/deploy-ansible-service-broker.template.yaml
[% endif %]
chmod +x run_latest_build.sh
export PUBLIC_IP=`ip a l eth0 | grep 'inet ' | cut -d' ' -f6 | awk -F'/' '{ print $1}'`
sh run_latest_build.sh
oc adm policy add-cluster-role-to-user cluster-admin admin --config=/var/lib/origin/openshift.local.config/master/admin.kubeconfig
oc adm policy add-cluster-role-to-user cluster-admin developer --config=/var/lib/origin/openshift.local.config/master/admin.kubeconfig
|
import React, { useEffect } from 'react';
import { ScrollSpy } from 'bootstrap/dist/js/bootstrap.esm'
import Section from './Section';
import experience from '../../data/experience';
import education from '../../data/education';
import projects from '../../data/projects';
import volunteer from '../../data/volunteer';
import softSkills from '../../data/soft-skills';
import organisations from '../../data/organisations';
export default function Sections({ filters }) {
useEffect(() => {
new ScrollSpy(document.body, {
offset: 0,
target: '#sidebar-nav'
})
})
return (
<div id="sections">
<Section filters={filters} title="Experience" id="experience" data={experience} />
<Section filters={filters} title="Education" id="education" data={education} />
<Section filters={filters} title="Projects" id="projects" data={projects} />
<Section filters={filters} title="Volunteer" id="volunteer" data={volunteer} />
<Section filters={filters} title="Soft Skills" id="soft-skills" data={softSkills} />
<Section filters={filters} title="Organisations" id="organisations" data={organisations} />
</div>
)
}
|
<reponame>LazyDuke/ts-promise
import isArrayLikeObject from 'lodash.isarraylikeobject'
class TsPromise {
static PENDING = 'PENDING'
static FULFILLED = 'FULFILLED'
static REJECTED = 'REJECTED'
static resolvePromise: ResolvePromise = (
anotherPromise,
x,
resolve,
reject
) => {
// 如果 onFulfilled 或者 onRejected 返回一个值 x ,则运行下面的 Promise 解决过程:[[Resolve]](promise2, x)
// 运行 [[Resolve]](promise, x) 需遵循以下步骤:
// 如果 promise 和 x 指向同一对象,以 TypeError 为拒因拒绝执行 promise 以防止循环引用
if (anotherPromise === x) {
return reject(new TypeError('Chaining cycle detected for promise'))
}
// 如果 x 为 Promise ,则使 promise 接受 x 的状态
if (x instanceof TsPromise || x instanceof Promise) {
return x.then(
(
// 如果 x 处于执行态,用相同的值执行 promise
value: any
) => {
return TsPromise.resolvePromise(
anotherPromise,
value,
resolve,
reject
)
},
(
// 如果 x 处于拒绝态,用相同的拒因拒绝 promise
reason: any
) => {
return reject(reason)
}
)
// 如果 x 为对象或者函数
} else if (
x !== null &&
(typeof x === 'object' || typeof x === 'function')
) {
let called = false
try {
// 把 x.then 赋值给 then(这步我们先是存储了一个指向 x.then 的引用,然后测试并调用该引用,以避免多次访问 x.then 属性。这种预防措施确保了该属性的一致性,因为其值可能在检索调用时被改变。)
const then = x.then
// 如果 then 是函数,将 x 作为函数的作用域 this 调用之。传递两个回调函数作为参数,
if (typeof then === 'function') {
return then.call(
x,
(
// 第一个参数叫做 resolvePromise ,
value: any
) => {
// 如果 resolvePromise 和 rejectPromise 均被调用,或者被同一参数调用了多次,则优先采用首次调用并忽略剩下的调用
if (called) {
return
}
called = true
// 如果 resolvePromise 以值 y 为参数被调用,则运行 [[Resolve]](promise, y)
return TsPromise.resolvePromise(
anotherPromise,
value,
resolve,
reject
)
},
(
// 第二个参数叫做 rejectPromise
reason: any
) => {
// 如果 resolvePromise 和 rejectPromise 均被调用,或者被同一参数调用了多次,则优先采用首次调用并忽略剩下的调用
if (called) {
return
}
called = true
// 如果 rejectPromise 以拒因 r 为参数被调用,则以拒因 r 拒绝 promise
return reject(reason)
}
)
} else {
//如果 then 不是函数,以 x 为参数执行 promise
return resolve(x)
}
} catch (error) {
// 如果调用 then 方法抛出了异常 e, 如果 resolvePromise 或 rejectPromise 已经被调用,则忽略之
if (called) {
return
}
called = true
// 如果取 x.then 的值时抛出错误 e ,则以 e 为拒因拒绝 promise
return reject(error)
}
} else {
// 如果 x 不为对象或者函数,以 x 为参数执行 promise
return resolve(x)
}
}
static resolve: TsPromiseResolve = value =>
new TsPromise((resolve, reject) => {
return resolve(value)
})
static reject: TsPromiseReject = reason =>
new TsPromise((resolve, reject) => {
return reject(reason)
})
static all: All = promises => {
if (!isArrayLikeObject(promises)) {
throw new TypeError(
`${
typeof promises === 'undefined' ? '' : typeof promises
} ${promises} is not iterable (cannot read property Symbol(Symbol.iterator))`
)
}
// 实现的 promise 基于 macroTask 的 setTimeout 实现,需要 async/await 调节执行顺序
// 原生的 promise 基于 microTask 实现,执行顺序是正确的,不需要 async/await
return new TsPromise(async (resolve, reject) => {
const result = []
for (const promise of promises) {
await TsPromise.resolve(promise).then(resolvePromise, rejectPromise)
}
return resolve(result)
function resolvePromise(value: any) {
if (value instanceof TsPromise || value instanceof Promise) {
value.then(resolvePromise, rejectPromise)
} else {
result.push(value)
}
}
function rejectPromise(reason: any) {
return reject(reason)
}
})
}
static race: Race = promises => {
if (!isArrayLikeObject(promises)) {
throw new TypeError(
`${
typeof promises === 'undefined' ? '' : typeof promises
} ${promises} is not iterable (cannot read property Symbol(Symbol.iterator))`
)
}
return new TsPromise((resolve, reject) => {
for (const promise of promises) {
TsPromise.resolve(promise).then(
value => {
return resolve(value)
},
reason => {
return reject(reason)
}
)
}
})
}
static defer = () => {
let dfd: any = {}
dfd.promise = new TsPromise((resolve, reject) => {
dfd.resolve = resolve
dfd.reject = reject
})
return dfd
}
static deferred = TsPromise.defer
value: any
reason: any
status = TsPromise.PENDING
onFulfilledCallbacks: onFulfilledCallback[] = []
onRejectedCallbacks: onRejectedCallback[] = []
constructor(executor: Executor) {
// 参数校验
if (typeof executor !== 'function') {
throw new TypeError(`Promise resolver ${executor} is not a function`)
}
// 失败后的一系列操作(状态的改变,失败回调的执行)
const reject: Reject = reason => {
// 只有处于 pending 状态的 promise 能调用 resolve
if (this.status === TsPromise.PENDING) {
// reject 调用后,status 转为 rejected
this.status = TsPromise.REJECTED
// 储存 rejected 的拒因
this.reason = reason
// 一旦 reject 执行,调用储存在失败回调数组里的回调
this.onRejectedCallbacks.forEach(onRejected => onRejected())
}
}
// 成功后的一系列操作(状态的改变,成功回调的执行)
const resolve: Resolve = x => {
const __resolve = (value: any) => {
// 只有处于 pending 状态的 promise 能调用 resolve
if (this.status === TsPromise.PENDING) {
// resolve 调用后,status 转为 fulfilled
this.status = TsPromise.FULFILLED
// 储存 fulfilled 的终值
this.value = value
// 一旦 resolve 执行,调用储存在成功回调数组里的回调
this.onFulfilledCallbacks.forEach(onFulfilled => onFulfilled())
}
}
return TsPromise.resolvePromise.call(this, this, x, __resolve, reject)
}
try {
executor(resolve, reject)
} catch (error) {
// 如果 executor 执行报错,则直接执行 reject
reject(error)
}
}
/**
* 一个 promise 必须提供一个 then 方法以访问其当前值、终值和拒因
*/
then: Then = (onFulfilled, onRejected) => {
// 如果 onFulfilled 不是函数,其必须被“忽略”
onFulfilled =
typeof onFulfilled === 'function' ? onFulfilled : value => value
// 如果 onFulfilled 不是函数,其必须被“忽略”
onRejected =
typeof onRejected === 'function'
? onRejected
: error => {
throw error
}
// then 方法必须返回一个 promise 对象
const anotherPromise = new TsPromise((resolve, reject) => {
// 封装处理链式调用的方法
const handle: Handle = (fn, argv) => {
// 确保 onFulfilled 和 onRejected 方法异步执行
setTimeout(() => {
try {
const x = fn(argv)
return TsPromise.resolvePromise(anotherPromise, x, resolve, reject)
} catch (error) {
return reject(error)
}
})
}
// 当 status 为执行态(Fulfilled)时
if (this.status === TsPromise.FULFILLED) {
// 则执行 onFulfilled,value 作为第一个参数
handle(onFulfilled, this.value)
}
// 当 status 为拒绝态(Rejected)时
if (this.status === TsPromise.REJECTED) {
// 则执行 onRejected,reason 作为第一个参数
handle(onRejected, this.reason)
}
// 当 status 为 Pending 时
if (this.status === TsPromise.PENDING) {
// 将 onFulfilled 存入成功回调数组
this.onFulfilledCallbacks.push(() => {
handle(onFulfilled, this.value)
})
// 将 onRejected 存入失败回调数组
this.onRejectedCallbacks.push(() => {
handle(onRejected, this.reason)
})
}
})
return anotherPromise
}
catch: Catch = onRejected => {
return this.then(null, onRejected)
}
finally: Finally = fn => {
return this.then(
(value: any) => {
setTimeout(fn)
return value
},
(reason: any) => {
setTimeout(fn)
throw reason
}
)
}
}
export default TsPromise
interface Executor {
(resolve: Resolve, reject?: Reject): void
}
interface Resolve {
(value?: any | PromiseLike<any>): void
}
interface Reject {
(reason?: any): void
}
interface Then {
(onFulfilled: OnFulfilled, onRejected: OnRejected): TsPromise
}
interface Catch {
(onRejected: OnRejected): TsPromise
}
interface Finally {
(fn: Function): TsPromise
}
interface Handle {
(fn: OnFulfilled | OnRejected, argv: any): void
}
interface OnFulfilled {
(value?: any): any
}
interface onFulfilledCallback {
(): void
}
interface OnRejected {
(error: Error): void
}
interface onRejectedCallback {
(): void
}
interface ResolvePromise {
(anotherPromise: TsPromise, x: any, resolve: Resolve, reject: Reject): void
}
interface TsPromiseResolve {
(value?: any | PromiseLike<any>): TsPromise
}
interface TsPromiseReject {
(reason?: any): TsPromise
}
interface All {
(promises: TsPromise[]): TsPromise
}
interface Race {
(promises: TsPromise[]): TsPromise
}
|
<gh_stars>0
const { range } = require("./src/util");
const { isRightTriangle, getUniqueTriangles } = require("./src/geometry");
const { getPositiveAnswerPhrase } = require("./src/output");
const lowerBound = 1;
const upperBound = 30;
const nextCombination = (values, lower, upper) => {
let needToIncrease = true;
const res = Array.from(values);
for (let i = values.length - 1; i > 0; i--) {
if (needToIncrease) {
res[i] = values[i] + 1;
needToIncrease = false;
}
if (res[i] > upper) {
res[i] = lower;
needToIncrease = true;
}
}
if (needToIncrease) {
res[0] = values[0] + 1;
if (res[0] > upper) {
return null;
}
}
return res;
};
let s = [lowerBound, lowerBound, lowerBound];
let found = 0;
while (s !== null) {
const triangle = [
{
sideName: "a",
sideLength: s[0],
},
{
sideName: "b",
sideLength: s[1],
},
{
sideName: "c",
sideLength: s[2],
},
];
if (isRightTriangle(triangle)) {
console.log(s);
found++;
if (found === 10) break;
}
s = nextCombination(s, lowerBound, upperBound);
}
|
#!/bin/sh
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -----------------------------------------------------------------------------
# Start Script for the CATALINA Server
# -----------------------------------------------------------------------------
# Better OS/400 detection: see Bugzilla 31132
os400=false
darwin=false
case "`uname`" in
CYGWIN*) cygwin=true;;
OS400*) os400=true;;
Darwin*) darwin=true;;
esac
# resolve links - $0 may be a softlink
PRG="$0"
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`/"$link"
fi
done
PRGDIR=`dirname "$PRG"`
EXECUTABLE=catalina.sh
# Check that target executable exists
if $os400; then
# -x will Only work on the os400 if the files are:
# 1. owned by the user
# 2. owned by the PRIMARY group of the user
# this will not work if the user belongs in secondary groups
eval
else
if [ ! -x "$PRGDIR"/"$EXECUTABLE" ]; then
echo "Cannot find $PRGDIR/$EXECUTABLE"
echo "The file is absent or does not have execute permission"
echo "This file is needed to run this program"
exit 1
fi
fi
exec "$PRGDIR"/"$EXECUTABLE" start "$@"
|
<gh_stars>0
/*
* Title: Article Schema.
* Description: Article schema for Markdown blog app.
* Author: <NAME> (github profile: https://github.com/Mohammad-Mesbaul-Haque )
* Date: 19/05/2021
*/
// Dependencies.
const mongoose = require('mongoose');
const marked = require('marked');
const slugify = require('slugify');
const createDomPurifier = require('dompurify');
const { JSDOM } = require('jsdom')
// Main functions.
const dompurify = createDomPurifier(new JSDOM().window);
const articleSchema = mongoose.Schema({
title: {
type: String,
required: true
},
description: {
type: String
},
markdown: {
type: String,
required: true
},
createdAt: {
type: Date,
default: Date.now
},
slug: {
type: String,
required: true,
unique: true
},
sanitizedHtml: {
type: String,
required: true
}
})
//setup sum pre validation
articleSchema.pre('validate', function (next) {
if(this.title){
this.slug = slugify(this.title, {lower: true, strict: true})
}
if (this.markdown) {
this.sanitizedHtml = dompurify.sanitize(marked(this.markdown));
}
next();
})
// export the module.
module.exports = mongoose.model('Article', articleSchema);
|
<filename>src/components/Player.js<gh_stars>100-1000
import React, { useEffect, useRef } from 'react';
import { func, bool, number } from 'prop-types';
const Player = ({ player: ReactPlayer, playing, play, ready, ...props }) => {
const ref = useRef(null);
useEffect(() => {
if (playing && ref && ref.current && ref.current.getInternalPlayer())
play(ref.current.getInternalPlayer());
});
return (
<ReactPlayer
ref={ref}
loop
tabIndex="-1"
playsinline
playing={playing}
{...props}
/>
);
};
Player.propTypes = {
player: func.isRequired,
playing: bool,
muted: bool,
playbackRate: number,
play: func.isRequired,
ready: bool
};
Player.defaultProps = {
playing: true,
muted: true,
playbackRate: 1,
ready: true
};
export default Player;
|
const styles = theme => ({
root: {
marginTop: '2em',
marginRight: '2em',
width: '100%',
},
titleStyle: {
fontWeight: 900,
fontSize: '1.5rem',
marginLeft: '0.67em',
marginRight: '0.67em',
},
projectGridStyle: {
marginBottom: '2em',
},
});
export default styles;
|
# RUN: %clang -cc1apinotes -dump %s | FileCheck %s
---
Name: UIKit
Availability: iOS
AvailabilityMsg: iOSOnly
Classes:
- Name: UIFont
Availability: iOS
AvailabilityMsg: iOSOnly
Methods:
- Selector: 'fontWithName:size:'
MethodKind: Instance
Nullability: [ N ]
NullabilityOfRet: O
Availability: iOS
AvailabilityMsg: iOSOnly
DesignatedInit: true
Properties:
- Name: familyName
Nullability: N
Availability: iOS
AvailabilityMsg: iOSOnly
- Name: fontName
Nullability: N
Availability: iOS
AvailabilityMsg: iOSOnly
Protocols:
- Name: MyProto
AuditedForNullability: true
- Name: MyProto2
AuditedForNullability: true
Functions:
- Name: 'globalFoo'
Nullability: [ N, N, O, S ]
NullabilityOfRet: O
Availability: iOS
AvailabilityMsg: iOSOnly
- Name: 'globalFoo2'
Nullability: [ N, N, O, S ]
NullabilityOfRet: O
Globals:
- Name: globalVar
Nullability: O
Availability: iOS
AvailabilityMsg: iOSOnly
- Name: globalVar2
Nullability: O
# CHECK: Name: UIKit
# CHECK: Availability: iOS
# CHECK: AvailabilityMsg: iOSOnly
# CHECK: Classes:
# CHECK: - Name: UIFont
# CHECK: Availability: iOS
# CHECK: AvailabilityMsg: iOSOnly
# CHECK: Methods:
# CHECK: - Selector: 'fontWithName:size:'
# CHECK: MethodKind: Instance
# CHECK: Nullability: [ N ]
# CHECK: NullabilityOfRet: O
# CHECK: Availability: iOS
# CHECK: AvailabilityMsg: iOSOnly
# CHECK: DesignatedInit: true
# CHECK: Properties:
# CHECK: - Name: familyName
# CHECK: Nullability: N
# CHECK: Availability: iOS
# CHECK: AvailabilityMsg: iOSOnly
# CHECK: - Name: fontName
# CHECK: Nullability: N
# CHECK: Availability: iOS
# CHECK: AvailabilityMsg: iOSOnly
# CHECK:Protocols:
# CHECK: - Name: MyProto
# CHECK: AuditedForNullability: true
# CHECK: Availability: available
# CHECK: AvailabilityMsg: ''
# CHECK: - Name: MyProto2
# CHECK: AuditedForNullability: true
# CHECK: Availability: available
# CHECK: AvailabilityMsg: ''
# CHECK:Functions:
# CHECK: - Name: globalFoo
# CHECK: Nullability: [ N, N, O, U ]
# CHECK: NullabilityOfRet: O
# CHECK: Availability: iOS
# CHECK: AvailabilityMsg: iOSOnly
# CHECK: - Name: globalFoo2
# CHECK: Nullability: [ N, N, O, U ]
# CHECK: NullabilityOfRet: O
# CHECK: Availability: available
# CHECK: AvailabilityMsg: ''
# CHECK:Globals:
# CHECK: - Name: globalVar
# CHECK: Nullability: O
# CHECK: Availability: iOS
# CHECK: AvailabilityMsg: iOSOnly
# CHECK: - Name: globalVar2
# CHECK: Nullability: O
# CHECK: Availability: available
# CHECK: AvailabilityMsg:
|
const express = require('express');
const mysql = require('mysql2/promise');
const { isLoggedIn, isNotLoggedIn, isAdmin } = require('./middlewares');
const dbconfig = require('../config/database');
const router = express.Router();
const pool = mysql.createPool(dbconfig);
// 일반 사용자 목록
router.get('/user', isAdmin, async (req, res, next) => {
try {
const conn = await pool.getConnection(async conn => conn);
try {
const [freelancers] = await conn.query(
'SELECT * FROM freelancer'
);
const [clients] = await conn.query(
'SELECT * FROM client'
);
conn.release();
res.render('user', {
title: 'Admin - 사용자 관리',
freelancers: freelancers,
clients: clients,
user : req.user
});
}
catch (err) {
conn.release();
console.error('Query Error');
next(err);
}
}
catch(err) {
conn.release();
console.log(err);
next(err);
}
});
// 전체 의뢰 목록
router.get('/request', isAdmin, async (req, res, next) => {
if(!req.query.orderType) req.query.orderType = 'rqid';
try {
const conn = await pool.getConnection(async conn => conn);
try {
const [requests] = await conn.query(
`SELECT * FROM request
ORDER BY ${req.query.orderType}`
);
conn.release();
res.render('request', {
title: '의뢰 관리',
user: req.user,
requests: requests,
tableName: '전체 의뢰 관리',
orderType: req.query.orderType
});
}
catch (err) {
conn.release();
next(err);
}
}
catch (err) {
conn.release();
next(err);
}
});
// 전체 언어 목록
router.get('/lang', isAdmin, async (req, res, next) => {
try {
const conn = await pool.getConnection(async conn => conn);
try {
const [langs] = await conn.query(
'SELECT * FROM program_lang ORDER BY lang_name'
);
conn.release();
res.render('lang', {
title: 'Admin - 프로그래밍언어',
langs: langs,
user: req.user,
createError: req.flash('createError')
});
}
catch (err) {
conn.release();
console.error('Query Error');
next(err);
}
}
catch(err) {
conn.release();
console.log(err);
next(err);
}
});
// 새로운 언어 추가
router.post('/lang', isAdmin, async (req, res, next) => {
try {
const conn = await pool.getConnection(async conn => conn);
try {
// 이미 있는 언어 예외처리
const [exLang] = await conn.query(
'SELECT * FROM program_lang WHERE lang_name=?',
req.body.lang_name
);
if(exLang.length) {
req.flash('createError', '이미 있는 언어입니다');
conn.release();
return res.redirect('/admin/lang');
}
// 프로그래밍 언어 목록에 추가
await conn.query(
'INSERT INTO program_lang(lang_name) VALUES(?)',
req.body.lang_name
);
const [job_seekers] = await conn.query(
`SELECT * FROM job_seeker`
);
const [requests] = await conn.query(
`SELECT rqid FROM request`
);
// 모든 프리랜서에 대해 프로그래밍 언어 능숙도에 0으로 추가
for(var i=0; i<job_seekers.length; i++) {
await conn.query(
`INSERT INTO knows VALUES(?,?,?)`,
[job_seekers[i].job_seeker_id, req.body.lang_name, 0]
)
}
// 모든 의뢰에 대해 요구 언어 능숙도에 0으로 추가
for(var i=0; i<requests.length; i++) {
await conn.query(
`INSERT INTO requires VALUES(?,?,?)`,
[requests[i].rqid, req.body.lang_name, 0]
)
}
conn.release();
return res.redirect('/admin/lang');
}
catch (err) {
conn.release();
console.error(err);
return res.redirect('/admin/lang');
}
}
catch(err) {
conn.release();
console.log(err);
next(err);
}
});
// 언어 삭제
router.post('/lang/delete', isAdmin, async (req, res, next) => {
try {
const conn = await pool.getConnection(async conn => conn);
try {
const [exLang] = await conn.query(
'SELECT * FROM program_lang WHERE lang_name=?',
req.body.lang_name
);
if(!exLang.length) {
req.flash('deleteError', '없는 언어입니다');
return res.redirect('/admin/lang');
}
await conn.query(
'DELETE FROM program_lang WHERE lang_name=?',
req.body.lang_name
);
conn.release();
return res.redirect('/admin/lang');
}
catch (err) {
conn.release();
console.error('Query Error');
return res.redirect('/admin/lang');
}
}
catch(err) {
conn.release();
console.log(err);
next(err);
}
});
// 전체 팀 목록
router.get('/team', isAdmin, async (req, res, next) => {
const conn = await pool.getConnection(async conn => conn);
try {
const [teams] = await conn.query('SELECT * FROM team');
res.render('admin_team_list', {
title: '팀 관리 - 관리자 모드',
user: req.user,
teams: teams,
teamError: req.flash('teamError')
});
conn.release();
}
catch (err) {
conn.release();
console.error(err);
next(err);
}
});
// 특정 팀 조회
router.get('/team/:tname', isAdmin, async (req, res, next) => {
const tname = req.params.tname;
const conn = await pool.getConnection(async conn => conn);
try {
const [[team]] = await conn.query(
'SELECT * FROM team WHERE tname=?', tname
);
const [members] = await conn.query(
'SELECT fid FROM participates WHERE tname=?',
tname
);
const [knows] = await conn.query(
`SELECT * FROM team t, knows k
WHERE t.job_seeker_id = k.job_seeker_id
AND t.tname=?`, tname
);
conn.release();
res.render('team_profile', {
title: '팀 관리 - 관리자 모드',
user: req.user,
team: team,
tname: team.tname,
knows: knows,
members: members,
teamError: req.flash('teamError')
});
}
catch (err) {
conn.release();
console.error(err);
next(err);
}
});
// 거절된 의뢰완료신청 목록
router.get('/report', isAdmin, async (req, res, next) => {
const conn = await pool.getConnection(async conn => conn);
try {
const [declineds] = await conn.query(
`SELECT rp.rid, rq.rqid, rq.rname, rp.rfile, de.message
FROM report rp, request rq, declined de
WHERE rp.rid = de.drid AND rp.rqid = rq.rqid`
);
conn.release();
res.render('admin_report', {
title: '의뢰완료요청 관리',
user: req.user,
declineds: declineds
});
}
catch (err) {
conn.release();
console.error(err);
next(err);
}
});
// 거절된 의뢰완료신청 삭제
router.post('/delete/declined', isAdmin, async (req, res, next) => {
const conn = await pool.getConnection(async conn => conn);
try {
// 결과보고서는 파일이름으로 대체했으므로 파일 처리 필요없음
await conn.query(
`DELETE FROM report WHERE rid=?`,
[req.body.drid]
);
conn.release();
return res.redirect('/');
}
catch (err) {
conn.release();
console.error(err);
next(err);
}
});
// 관리자 초기화면
router.get('/', isAdmin, (req, res, next) => {
try {
res.render('main', {
title: 'CodingMon - DBDBDIP @ admin',
user: req.user,
loginError: req.flash('loginError'),
});
}
catch (err) {
console.log(err);
next(err);
}
});
module.exports = router; |
package com.stylingandroid.rialto.app;
import android.graphics.Typeface;
import android.os.Bundle;
import android.text.style.CharacterStyle;
import android.text.style.StyleSpan;
import android.text.style.UnderlineSpan;
import android.widget.TextView;
import com.stylingandroid.rialto.RialtoDelegate;
import com.stylingandroid.rialto.RialtoDelegateImpl;
import com.stylingandroid.rialto.RialtoRegistry;
import com.stylingandroid.rialto.format.SpannableFormatterKt;
import org.jetbrains.annotations.NotNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import kotlin.jvm.functions.Function0;
import java.util.Set;
public class MainActivity extends AppCompatActivity implements RialtoDelegate {
private RialtoDelegate delegate = null;
@Override
public void registerSpanFactory(@NotNull String key, @NotNull String value, @NotNull Function0<?> creator) {
delegate.registerSpanFactory(key, value, creator);
}
@Override
public CharSequence processAnnotations(CharSequence text) {
return delegate.processAnnotations(text);
}
@NotNull
@Override
public Set<Function0<Object>> get(@NotNull String key, @NotNull String value) {
return delegate.get(key, value);
}
@NotNull
@Override
public RialtoRegistry copy() {
return delegate.copy();
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
delegate = new RialtoDelegateImpl(this);
super.onCreate(savedInstanceState);
registerSpanFactory("format", "bold", new Function0<CharacterStyle>() {
@Override
public CharacterStyle invoke() {
return new StyleSpan(Typeface.BOLD);
}
});
registerSpanFactory("format", "italic", () -> new StyleSpan(Typeface.ITALIC));
registerSpanFactory("format", "bold_underline", () -> new StyleSpan(Typeface.BOLD));
registerSpanFactory("format", "bold_underline", UnderlineSpan::new);
setContentView(R.layout.activity_main);
TextView textView = findViewById(R.id.format_string);
textView.setText(SpannableFormatterKt.getFormattedText(getResources(), R.string.formatted_italic, "formatted"));
}
}
|
#!/bin/sh
sudo cp kit /bin/kit
sudo chmod 755 /bin/kit
|
<reponame>ginuraju/nodejs-docs-samples
/**
* Copyright 2017, Google, Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// Require process, so we can mock environment variables
const process = require('process');
// [START createTables]
// [START setup]
const Knex = require('knex');
const prompt = require('prompt');
// [END setup]
// [START createTable]
/**
* Create the "visits" table.
*
* @param {object} knex A Knex client object.
*/
function createTable (knex) {
return knex.schema.createTable('visits', (table) => {
table.increments();
table.timestamp('timestamp');
table.string('userIp');
})
.then(() => {
console.log(`Successfully created 'visits' table.`);
return knex;
})
.catch((err) => {
console.error(`Failed to create 'visits' table:`, err);
return knex;
});
}
// [END createTable]
// [START getConnection]
/**
* Ask the user for connection configuration and create a new connection.
*/
function getConnection () {
const FIELDS = ['user', 'password', 'database'];
return new Promise((resolve, reject) => {
prompt.start();
prompt.get(FIELDS, (err, config) => {
if (err) {
return reject(err);
}
// Connect to the database
return resolve(Knex({
client: process.env.SQL_CLIENT,
connection: config
}));
});
});
}
// [END getConnection]
exports.main = function () {
// [START main]
getConnection()
.then((knex) => {
return createTable(knex);
})
.then((knex) => {
return knex.destroy();
})
.catch((err, knex) => {
console.error(`Failed to create database connection:`, err);
if (knex) {
knex.destroy();
}
});
// [END main]
};
// [END createTables]
// Get type of SQL client to use
const sqlClient = process.env.SQL_CLIENT;
if (sqlClient === 'pg' || sqlClient === 'mysql') {
exports.main();
} else {
throw new Error(`The SQL_CLIENT environment variable must be set to lowercase 'pg' or 'mysql'.`);
}
|
#!/bin/bash
#
# Usage
# -----
# $ bash launch_experiments.sh ACTION_NAME
#
# where ACTION_NAME is either 'list' or 'submit' or 'run_here'
if [[ -z $1 ]]; then
ACTION_NAME='list'
else
ACTION_NAME=$1
fi
export gpu_idx=0
export data_dir="$YOUR_PATH/fNIRS-mental-workload-classifiers/data/slide_window_data/size_30sec_150ts_stride_3ts/"
export window_size=150
export classification_task='binary'
export scenario='64vs4'
export bucket='TestBucket3'
export setting="64vs4_TestBucket3"
export adapt_on='train_100'
export result_save_rootdir="$YOUR_PATH/fNIRS-mental-workload-classifiers/experiments/domain_adaptation/RandomForest/binary/$adapt_on/$scenario/$bucket"
if [[ $ACTION_NAME == 'submit' ]]; then
## Use this line to submit the experiment to the batch scheduler
sbatch < $YOUR_PATH/fNIRS-mental-workload-classifiers/domain_adaptation/runs/do_experiment_RandomForest.slurm
elif [[ $ACTION_NAME == 'run_here' ]]; then
## Use this line to just run interactively
bash $YOUR_PATH/fNIRS-mental-workload-classifiers/domain_adaptation/runs/do_experiment_RandomForest.slurm
fi
|
#!/bin/bash
#psql baseball -c "drop table if exists ncaa.results;"
#psql baseball -f sos/standardized_results.sql
psql baseball -c "vacuum full verbose analyze ncaa.results;"
psql baseball -c "drop table if exists ncaa._zim_basic_factors;"
psql baseball -c "drop table if exists ncaa._zim_parameter_levels;"
R --vanilla -f sos/ncaa_zim.R
psql baseball -c "vacuum full verbose analyze ncaa._parameter_levels;"
psql baseball -c "vacuum full verbose analyze ncaa._basic_factors;"
psql baseball -f sos/zim_normalize_factors.sql
psql baseball -c "vacuum full verbose analyze ncaa._zim_factors;"
psql baseball -f sos/zim_schedule_factors.sql
psql baseball -c "vacuum full verbose analyze ncaa._zim_schedule_factors;"
#psql baseball -f sos/current_ranking.sql > sos/current_ranking.txt
#psql baseball -f sos/division_ranking.sql > sos/division_ranking.txt
#psql baseball -f sos/connectivity.sql > sos/connectivity.txt
|
/*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webauthn4j.metadata.data.statement;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
import java.util.Objects;
/**
* The CodeAccuracyDescriptor describes the relevant accuracy/complexity aspects of passcode user verification methods.
*/
public class CodeAccuracyDescriptor implements Serializable {
private final Integer base;
private final Integer minLength;
private final Integer maxRetries;
private final Integer blockSlowdown;
@JsonCreator
public CodeAccuracyDescriptor(
@JsonProperty("base") Integer base,
@JsonProperty("minLength") Integer minLength,
@JsonProperty("maxRetries") Integer maxRetries,
@JsonProperty("blockSlowdown") Integer blockSlowdown) {
this.base = base;
this.minLength = minLength;
this.maxRetries = maxRetries;
this.blockSlowdown = blockSlowdown;
}
public Integer getBase() {
return base;
}
public Integer getMinLength() {
return minLength;
}
public Integer getMaxRetries() {
return maxRetries;
}
public Integer getBlockSlowdown() {
return blockSlowdown;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CodeAccuracyDescriptor that = (CodeAccuracyDescriptor) o;
return Objects.equals(base, that.base) &&
Objects.equals(minLength, that.minLength) &&
Objects.equals(maxRetries, that.maxRetries) &&
Objects.equals(blockSlowdown, that.blockSlowdown);
}
@Override
public int hashCode() {
return Objects.hash(base, minLength, maxRetries, blockSlowdown);
}
}
|
<gh_stars>1-10
const requestPromise = require( 'request-promise' );
const $ = require( 'cheerio' );
const baseUrl = 'http://www.mivascript.com/item/';
const functionList = [
'evp_pkey_derive',
'evp_pkey_load_pubkey_mem',
'evp_pkey_load_mem',
'pkcs7_free',
'pkcs7_verify',
'pkcs7_get_certs',
'pkcs7_load_mem',
'crypto_evp_encrypt_auth',
'x509_pubkey_digest',
'x509_digest',
'x509_get_extensions',
'x509_get_issuer_name',
'x509_get_subject_name',
'x509_load_mem',
'evp_pkey_load_pubkey_x509',
'crypto_evp_verify',
'crypto_evp_sign',
'crypto_clear_error',
'crypto_next_error',
'crypto_next_error',
'file_set_time'
];
const parseHTML = function( html ) {
return $( '.item-description > pre', html ).text();
};
const parseFunctionParameters = function( data ) {
if ( data == undefined ) {
return false;
}
let matches = data.text.match( /(?<=\()(.*)(?=\))/gi );
let parameterText = matches[ 0 ];
let parameters = [];
parameters = parameterText.split( ',' );
parameters = parameters.map( ( x ) => x.trim() );
return { ...data, parameters: parameters };
};
const buildAutocomplete = function( fn ) {
const len = (fn.parameters.length - 1);
const paramText = fn.parameters.reduce(function( str, current, index ) {
let sep = ( index < len ) ? ', ' : '';
return str += '${' + (index + 1) + ':' + current + '}' + sep;
}, '');
return `{ "trigger": "${ fn.name }", "contents": "${ fn.name }( ${ paramText } )" },`;
};
const makeRequest = function( functionName, data ) {
return new Promise(function( resolve, reject ) {
let url = baseUrl + functionName + '.html';
console.log( '———— loading url ... ' + url );
return requestPromise( url )
.then(function( html ) {
let foundHTML = parseHTML( html );
if ( foundHTML ) {
data.push( { name: functionName, text: foundHTML } );
resolve( data );
}
else {
reject( 'Could not find HTML fragment' );
}
})
.catch(function( err ) {
console.error( err, functionName );
});
});
};
const run = function( functionList ) {
let data = [];
return functionList.reduce(function( promise, functionName ) {
return promise.then(function() {
return makeRequest( functionName, data );
});
}, Promise.resolve());
};
run( functionList )
.then(function( foundFunctions ) {
console.log( '———————————————————————————————— ' );
for ( let data of foundFunctions ) {
let fn = parseFunctionParameters( data );
let builtAutocomplete = buildAutocomplete( fn );
console.log( builtAutocomplete );
}
});
/*
assignAdditionalCustomfields(self, data) {
console.log('└── Getting Additional Customfield Data...');
return data.itemSets.reduce(function(promise, itemSet) {
return promise.then(function() {
return self.getAdditionalAttributes(self, data, itemSet);
});
}, Promise.resolve());
}
*/ |
<reponame>Cxgoal/optimization
from decimal import Decimal
import numpy as np
def cholesky_fraction(g, fit_decimal=False):
"""
:param g:
:param fit_decimal:
:return:
"""
l = np.eye(g.shape[0])
d = np.zeros(g.shape)
if fit_decimal is True:
l, d, g = l.astype(np.object), d.astype(np.object), g.astype(np.object)
for i in range(l.shape[0]):
for j in range(l.shape[1]):
l[i][j], d[i][j], g[i][j] = Decimal(l[i][j]), Decimal(d[i][j]), Decimal(g[i][j])
for j in range(g.shape[0]):
d[j][j] = g[j][j] - np.sum([l[j][k] ** 2 * d[k][k] for k in range(j)])
for i in range(j + 1, g.shape[0]):
l[i][j] = 1 / d[j][j] * (g[i][j] - (np.sum([l[i][k] * l[j][k] * d[k][k] for k in range(j)])))
return l, d
def modify_cholesky_fraction(g, fit_decimal=False, machine_err=1e-15):
"""
:param g:
:param fit_decimal
:param machine_err:
:return:
"""
l = np.eye(g.shape[0]).astype(np.float)
d = np.zeros(g.shape, np.float)
if fit_decimal is True:
l, d, g = l.astype(np.object), d.astype(np.object), g.astype(np.object)
for i in range(l.shape[0]):
for j in range(l.shape[0]):
l[i][j], d[i][j], g[i][j] = Decimal(l[i][j]), Decimal(d[i][j]), Decimal(g[i][j])
machine_err = Decimal(machine_err)
epsilon = np.max(np.abs(g-np.diag(np.diag(g))))
gamma = np.max(np.abs(np.diag(g)))
delta = machine_err*np.max([epsilon+gamma, 1])
condition_num = np.sqrt(g.shape[0]**2 - 1)
condition_num = Decimal(condition_num) if fit_decimal is True else condition_num
beta = np.sqrt(np.max([gamma, epsilon/condition_num, machine_err]))
'''
print("epsilon:{}\n"
"gamma:{}\n"
"delta:{}\n"
"beta:{}, {}".format(epsilon, gamma, delta, beta, beta**1))
'''
for j in range(g.shape[0]):
d[j][j] = np.max([delta, np.abs(g[j][j] - np.sum([g[j][r]*l[j][r] for r in range(0, j)]))])
for i in range(j+1, g.shape[0]):
g[i][j] = g[i][j]-np.sum([l[j][r]*g[i][r] for r in range(0, j)])
if j == g.shape[0]-1:
theta = 0
else:
theta = np.max([np.abs(g[i][j]) for i in range(j+1, g.shape[0])])
d[j][j] = np.max([d[j][j], theta**2/beta**2])
for i in range(j+1, g.shape[0]):
l[i][j] = g[i][j]/d[j][j]
print("i is {}, j is {}, g[i][j] is {}, d[i][j] is{}".format(i, j, l[i][j], d[j][j]))
return l, d
def bunch_parlett_fraction(a, magic_num=2/3):
"""
:param a:
:param magic_num:
:return:
"""
A = a.copy()
n, m, k = A.shape[0], 0, 0
y, L, D = np.arange(n), np.zeros(A.shape, np.float), np.zeros(A.shape, np.float)
while m < n:
# find a_tt
tt = np.argmax(np.abs(np.diag(A))[m:])+m
att = A[tt][tt]
# find a_ls
if m == n-1:
als = 0
else:
tmp = np.argmax(np.abs(A-np.diag(np.diag(A))), axis=1)
l = np.argmax(np.array([np.abs(A[i+m][tmp[i+m]])] for i in range(n-m)))
try:
als = A[l+m][tmp[l]]
except:
print(A)
print(m, l, tmp[l], tmp)
raise ValueError("")
l, s = np.max([l+m, tmp[l]]), np.min([l+m, tmp[l]])
if att == 0 and als == 0:
break
if np.abs(att) > magic_num * np.abs(als):
# print("1 block")
# 1x1 block
# translate block
A[[m, tt], :] = A[[tt, m], :]
A[:, [m, tt]] = A[:, [tt, m]]
y[[tt, m]] = y[[m, tt]]
# compute
dmm = A[m, m]
lm = (A[:, m] / dmm).reshape(-1, 1)
A = A-dmm * np.dot(lm, lm.T)
D[m][m] = dmm
L[:, m] = lm.reshape(-1)
L[[m, tt], :k] = L[[tt, m], :k]
m = m+1
else:
# print("2 block")
# 2x2 block
# translate block
A[[m, s, m+1, l], :] = A[[s, m, l, m+1], :]
A[:, [m, s, m+1, l]] = A[:, [s, m, l, m+1]]
y[[s, m, l, m+1]] = y[[m, s, m+1, l]]
D1 = A[m: m+2, m: m+2]
L1 = np.dot(A[:, m:m+2], np.linalg.inv(D1))
A = A - np.dot(np.dot(L1, D1), L1.T)
D[m:m+2, m:m+2] = D1
L[:, m:m+2] = L1
m = m+2
k = k+1
P = np.eye(n)
P = P[:, y]
"""
print("L is {}\n"
"D is {}\n"
"P is {}\n".format(L, D, P))
"""
return L, D, P |
<gh_stars>0
#pragma once
#include "EventInfo.h"
namespace Lunia {
namespace XRated {
namespace Database {
namespace Info {
EventUnit::StringTable EventUnit::sharedStringTable;
EventUnit::EventUnit() :
eventType(NoEvent),
reserveType(Unknown),
specifiedWeek(DateTime::Unknown),
specifiedDayOfWeek(const_cast<DateTime::Week::type>(DateTime::Week::NumberOfWeek)),
dailyInterval(1),
weeklyInterval(1),
day(0),
month(0)
{
memset(selectedMonth, 0, sizeof(int) * (monthCount + 1));
memset(selectedDayOfWeek, 0, sizeof(int) * DateTime::Week::NumberOfWeek);
InitStringTable();
}
EventUnit::ReserveType EventUnit::GetReserveType()
{
return reserveType;
}
void EventUnit::SetReserveType(EventUnit::ReserveType type)
{
reserveType = type;
}
EventUnit::EventType EventUnit::GetEventType() const
{
return eventType;
}
void EventUnit::SetEventType(EventUnit::EventType type)
{
eventType = type;
}
void EventUnit::SetEventInfo(const ParamContainer& info)
{
eventInfo = info;
}
void EventUnit::operator =(const EventUnit& rhs)
{
reserveType = rhs.reserveType;
reserveTypeStr = rhs.reserveTypeStr;
eventTime = rhs.eventTime;
eventType = rhs.eventType;
eventTypeStr = rhs.eventTypeStr;
specifiedWeek = rhs.specifiedWeek;
specifiedWeekStr = rhs.specifiedWeekStr;
specifiedDayOfWeek = rhs.specifiedDayOfWeek;
specifiedDayOfWeekStr = rhs.specifiedDayOfWeekStr;
selectedMonthStr = rhs.selectedMonthStr;
memcpy(selectedMonth, (void*)rhs.selectedMonth, sizeof(char) * (monthCount + 1));
selectedDayOfWeekStr = rhs.selectedDayOfWeekStr;
memcpy(selectedDayOfWeek, (void*)rhs.selectedDayOfWeek, sizeof(char) * DateTime::Week::NumberOfWeek);
dailyInterval = rhs.dailyInterval;
weeklyInterval = rhs.weeklyInterval;
day = rhs.day;
month = rhs.month;
year = rhs.year;
eventTerm = rhs.eventTerm;
eventInfo = rhs.eventInfo;
}
void EventUnit::InitStringTable()
{
// EventType
sharedStringTable.insert(std::make_pair(L"stageevent", 1));
sharedStringTable.insert(std::make_pair(L"beginstageevent", 1));
sharedStringTable.insert(std::make_pair(L"endstageevent", 2));
sharedStringTable.insert(std::make_pair(L"beginscriptevent", 3));
sharedStringTable.insert(std::make_pair(L"endscriptevent", 4));
sharedStringTable.insert(std::make_pair(L"beginpresentevent", 5));
sharedStringTable.insert(std::make_pair(L"beginleveluppresentevent", 5));
sharedStringTable.insert(std::make_pair(L"endpresentevent", 6));
sharedStringTable.insert(std::make_pair(L"endleveluppresentevent", 6));
sharedStringTable.insert(std::make_pair(L"beginplaytimepresentevent", 7));
sharedStringTable.insert(std::make_pair(L"endplaytimepresentevent", 8));
sharedStringTable.insert(std::make_pair(L"beginquestevent", 9));
sharedStringTable.insert(std::make_pair(L"endquestevent", 10));
sharedStringTable.insert(std::make_pair(L"begindungeonstageevent", 12));
sharedStringTable.insert(std::make_pair(L"enddungeonstageevent", 13));
sharedStringTable.insert(std::make_pair(L"beginnpcdropevent", 14));
sharedStringTable.insert(std::make_pair(L"endnpcdropevent", 15));
// ReserveType
sharedStringTable.insert(std::make_pair(L"once", 1));
sharedStringTable.insert(std::make_pair(L"daily", 2));
sharedStringTable.insert(std::make_pair(L"weekly", 3));
sharedStringTable.insert(std::make_pair(L"monthly", 4));
sharedStringTable.insert(std::make_pair(L"monthlyspecific", 5));
sharedStringTable.insert(std::make_pair(L"first", 1));
sharedStringTable.insert(std::make_pair(L"second", 2));
sharedStringTable.insert(std::make_pair(L"third", 3));
sharedStringTable.insert(std::make_pair(L"fourth", 4));
sharedStringTable.insert(std::make_pair(L"last", 5));
// Day of week
sharedStringTable.insert(std::make_pair(L"sun", 0));
sharedStringTable.insert(std::make_pair(L"sunday", 0));
sharedStringTable.insert(std::make_pair(L"mon", 1));
sharedStringTable.insert(std::make_pair(L"monday", 1));
sharedStringTable.insert(std::make_pair(L"tue", 2));
sharedStringTable.insert(std::make_pair(L"tuesday", 2));
sharedStringTable.insert(std::make_pair(L"wed", 3));
sharedStringTable.insert(std::make_pair(L"wedneday", 3));
sharedStringTable.insert(std::make_pair(L"thur", 4));
sharedStringTable.insert(std::make_pair(L"thursday", 4));
sharedStringTable.insert(std::make_pair(L"fri", 5));
sharedStringTable.insert(std::make_pair(L"friday", 5));
sharedStringTable.insert(std::make_pair(L"sat", 6));
sharedStringTable.insert(std::make_pair(L"saturday", 6));
}
bool EventUnit::GetDataFromString(std::wstring& in, int& out)
{
StringUtil::Trim(in);
StringUtil::ToLowerCase(in);
StringTable::iterator itr = sharedStringTable.find(in);
if (itr == sharedStringTable.end())
{
return false;
}
out = itr->second;
return true;
}
const DateTime& EventUnit::GetEventTime() const
{
return eventTime;
}
void EventUnit::GetLastEventTime(DateTime& dateTime) const
{
dateTime = GetEventTime();
switch (reserveType)
{
case Daily:
dateTime.Add(DateTime::Unit::Day, -1);
break;
case Weekly:
dateTime.Add(DateTime::Unit::Day, -7);
break;
case Monthly:
case MonthlySpecific:
dateTime.Add(DateTime::Unit::Year, -1);
break;
}
}
bool EventUnit::SetNextEventTime()
{
DateTime finalTime = eventTime;
DateTime now = DateTime::Now();
DateTime diff;
switch (reserveType)
{
case Once:
case Immediately:
{
return false;
}
case Daily:
{
// set next day.
eventTerm.termUnit = DateTime::Unit::Day;
eventTerm.term = dailyInterval;
} break;
case Weekly:
{
int nextWeek = 0;
int thisWeek = eventTime.GetDayOfWeek();
if (GetNextSelectedWeek(thisWeek, nextWeek))
{
// set next selected week.
int dayDiff = nextWeek - eventTime.GetDayOfWeek();
assert(dayDiff > 0);
eventTerm.termUnit = DateTime::Unit::Day;
eventTerm.term = dayDiff;
}
else
{
// set first selected day of week and strides interval of week.
int dayDiff = nextWeek - eventTime.GetDayOfWeek();
assert(dayDiff <= 0);
eventTerm.termUnit = DateTime::Unit::Day;
eventTerm.term = dayDiff + weeklyInterval * 7;
}
} break;
case Monthly:
case MonthlySpecific:
{
int thisMonth = eventTime.GetDate().GetMonth();
if (!GetNextSelectedMonth(thisMonth, month))
{
eventTime.Add(DateTime::Unit::Year, 1);
}
eventTime.Set(DateTime::Unit::Month, static_cast<uint16>(month));
if (reserveType == MonthlySpecific)
{
day = DateTime::GetSpecificDayOfWeek(static_cast<DateTime::SpecificType>(specifiedWeek), specifiedDayOfWeek, eventTime);
Logger::GetInstance().Info(L"GetSpecificDayOfWeek, Month : {0}, Day : {1}", month, day);
}
eventTime.Set(DateTime::Unit::Day, day);
} break;
}
// it needs assignment?
eventTime = eventTime.Add(eventTerm);
Logger::GetInstance().Info(L"ReserveType : {0}, NextEventTime : {1}", GetReserveType(), eventTime.ToString().c_str());
return IsValid();
}
void EventUnit::SetFirstEventTime()
{
DateTime finalTime = eventTime;
DateTime now = DateTime::Now();
DateTime diff;
switch (reserveType)
{
case Immediately:
{
break;
}
case Once:
{
eventTime.Set(DateTime::Unit::Year, year);
eventTime.Set(DateTime::Unit::Month, month);
eventTime.Set(DateTime::Unit::Day, day);
break;
}
case Daily:
{
eventTime.Set(DateTime::Unit::Year, now.GetDate().GetYear());
eventTime.Set(DateTime::Unit::Month, now.GetDate().GetMonth());
eventTime.Set(DateTime::Unit::Day, now.GetDate().GetDay());
} break;
case Weekly:
{
eventTime.Set(DateTime::Unit::Year, now.GetDate().GetYear());
eventTime.Set(DateTime::Unit::Month, now.GetDate().GetMonth());
eventTime.Set(DateTime::Unit::Day, now.GetDate().GetDay());
int nextWeek = 0;
int thisWeek = now.GetDate().GetDayOfWeek();
if (selectedDayOfWeek[thisWeek] == 0)
{
if (GetNextSelectedWeek(thisWeek, nextWeek))
{
// set next selected week.
int dayDiff = nextWeek - now.GetDayOfWeek();
eventTerm.termUnit = DateTime::Unit::Day;
eventTerm.term = dayDiff;
}
else
{
// set first selected day of week and strides interval of week.
int dayDiff = nextWeek - thisWeek;
assert(dayDiff <= 0);
eventTerm.termUnit = DateTime::Unit::Day;
eventTerm.term = dayDiff + weeklyInterval * 7;
}
eventTime = eventTime.Add(eventTerm);
}
} break;
case Monthly:
case MonthlySpecific:
{
eventTime.Set(DateTime::Unit::Year, now.GetDate().GetYear());
eventTime.Set(DateTime::Unit::Day, 1);
int thisMonth = now.GetDate().GetMonth();
if (selectedMonth[thisMonth] == 0)
{
if (!GetNextSelectedMonth(thisMonth, month))
{
eventTime.Add(DateTime::Unit::Year, 1);
}
}
else
{
month = thisMonth;
}
eventTime.Set(DateTime::Unit::Month, static_cast<uint16>(month));
if (reserveType == MonthlySpecific)
{
day = DateTime::GetSpecificDayOfWeek(static_cast<DateTime::SpecificType>(specifiedWeek), specifiedDayOfWeek, eventTime);
Logger::GetInstance().Info(L"GetSpecificDayOfWeek, Month : {0}, Day : {1}", month, day);
}
eventTime.Set(DateTime::Unit::Day, day);
} break;
}
Logger::GetInstance().Info(L"ReserveType : {0}, SetFirstEventTime : {1}", GetReserveType(), eventTime.ToString().c_str());
if (now > eventTime)
{
Logger::GetInstance().Info(L"FirstTime is Already Passed !!!");
SetNextEventTime();
}
}
void EventUnit::SetFirstEventTimeAfter(const DateTime& serverTime)
{
DateTime finalTime = eventTime;
DateTime now = serverTime;
DateTime diff;
switch (reserveType)
{
case Immediately:
{
break;
}
case Once:
{
eventTime.Set(DateTime::Unit::Year, year);
eventTime.Set(DateTime::Unit::Month, month);
eventTime.Set(DateTime::Unit::Day, day);
break;
}
case Daily:
{
eventTime.Set(DateTime::Unit::Year, now.GetDate().GetYear());
eventTime.Set(DateTime::Unit::Month, now.GetDate().GetMonth());
eventTime.Set(DateTime::Unit::Day, now.GetDate().GetDay());
} break;
case Weekly:
{
eventTime.Set(DateTime::Unit::Year, now.GetDate().GetYear());
eventTime.Set(DateTime::Unit::Month, now.GetDate().GetMonth());
eventTime.Set(DateTime::Unit::Day, now.GetDate().GetDay());
int nextWeek = 0;
int thisWeek = now.GetDate().GetDayOfWeek();
if (selectedDayOfWeek[thisWeek] == 0)
{
if (GetNextSelectedWeek(thisWeek, nextWeek))
{
// set next selected week.
int dayDiff = nextWeek - now.GetDayOfWeek();
eventTerm.termUnit = DateTime::Unit::Day;
eventTerm.term = dayDiff;
}
else
{
// set first selected day of week and strides interval of week.
int dayDiff = nextWeek - thisWeek;
assert(dayDiff <= 0);
eventTerm.termUnit = DateTime::Unit::Day;
eventTerm.term = dayDiff + weeklyInterval * 7;
}
eventTime = eventTime.Add(eventTerm);
}
} break;
case Monthly:
case MonthlySpecific:
{
eventTime.Set(DateTime::Unit::Year, now.GetDate().GetYear());
eventTime.Set(DateTime::Unit::Day, 1);
int thisMonth = now.GetDate().GetMonth();
if (selectedMonth[thisMonth] == 0)
{
if (!GetNextSelectedMonth(thisMonth, month))
{
eventTime.Add(DateTime::Unit::Year, 1);
}
}
else
{
month = thisMonth;
}
eventTime.Set(DateTime::Unit::Month, static_cast<uint16>(month));
if (reserveType == MonthlySpecific)
{
day = DateTime::GetSpecificDayOfWeek(static_cast<DateTime::SpecificType>(specifiedWeek), specifiedDayOfWeek, eventTime);
Logger::GetInstance().Info(L"GetSpecificDayOfWeek, Month : {0}, Day : {1}", month, day);
}
eventTime.Set(DateTime::Unit::Day, day);
} break;
}
Logger::GetInstance().Info(L"ReserveType : {0}, SetFirstEventTime : {1}", GetReserveType(), eventTime.ToString().c_str());
if (now > eventTime)
{
Logger::GetInstance().Info(L"FirstTime is Already Passed !!!");
SetNextEventTime();
}
}
const EventUnit::ParamContainer& EventUnit::GetEventInfo() const
{
return eventInfo;
}
void EventUnit::Serialize(Serializer::IStreamWriter& out) const
{
out.Begin(L"Stage::EventUnit");
// data reverse.
out.Write(L"EventType", eventTypeStr);
out.Write(L"EventInfo", eventInfo);
out.Write(L"ReserveType", reserveTypeStr);
std::wstring startAt = eventTime.GetTime().ToString();
out.Write(L"StartAt", startAt);
out.Write(L"Year", year);
out.Write(L"Month", month);
out.Write(L"Day", day);
out.Write(L"SelectedMonth", selectedMonthStr);
out.Write(L"SelectedDayOfWeek", selectedDayOfWeekStr);
out.Write(L"SpecifiedWeek", specifiedWeekStr);
out.Write(L"SpecifiedDayOfWeek", specifiedDayOfWeekStr);
out.Write(L"WeeklyInterval", weeklyInterval);
out.Write(L"DailyInterval", dailyInterval);
}
void EventUnit::Deserialize(Serializer::IStreamReader& in)
{
std::wstring startTime;
in.Begin(L"Stage::EventUnit");
in.Read(L"EventType", eventTypeStr);
if (!GetDataFromString(eventTypeStr, reinterpret_cast<int&>(eventType)))
{
eventType = NoEvent;
}
// Event Infos (vector)
in.Read(L"EventInfo", eventInfo, std::vector<std::wstring>());
in.Read(L"ReserveType", reserveTypeStr);
if (!GetDataFromString(reserveTypeStr, reinterpret_cast<int&>(reserveType)))
{
reserveType = Unknown;
}
in.Read(L"StartAt", startTime); // essential
in.Read(L"Year", year, 0);
in.Read(L"Month", month, 0);
in.Read(L"Day", day, 0); // essential in most of all.
DateTime::Time time(startTime.c_str());
eventTime.SetTime(time);
in.Read(L"SelectedMonth", selectedMonthStr, std::wstring(L"")); // for monthly (optional)
ParseSelectedMonth(selectedMonthStr);
in.Read(L"SelectedDayOfWeek", selectedDayOfWeekStr, std::wstring(L"")); // for weekly (optional)
ParseSelectedWeek(selectedDayOfWeekStr);
in.Read(L"SpecifiedWeek", specifiedWeekStr, std::wstring(L"")); // for monthly specific
GetDataFromString(specifiedWeekStr, reinterpret_cast<int&>(specifiedWeek));
in.Read(L"SpecifiedDayOfWeek", specifiedDayOfWeekStr, std::wstring(L"")); // for monthly specific
GetDataFromString(specifiedDayOfWeekStr, reinterpret_cast<int&>(specifiedDayOfWeek));
in.Read(L"WeeklyInterval", weeklyInterval, 1); // for weekly
in.Read(L"DailyInterval", dailyInterval, 1); // for daily
}
void EventUnit::SetEventTime(const DateTime& dateTime)
{
eventTime = dateTime;
year = dateTime.GetDate().GetYear();
month = dateTime.GetDate().GetMonth();
day = dateTime.GetDate().GetDay();
}
bool EventUnit::IsValid()
{
// we need validation.
bool isValid = false;
switch (reserveType)
{
case Immediately:
{
isValid = true;
}
break;
case Once:
{
// eventTime.IsValid() [day, month]
isValid = eventTime.IsValid();
}
break;
case Daily:
{
// eventTime.GetTime().IsValid() [just need time]
// dailyInterval limitation(365)
isValid = eventTime.GetTime().IsValid();
if (dailyInterval <= 0 || dailyInterval >= 365)
{
Logger::GetInstance().Info(L"Invalid dailyInterval Range : {0}", dailyInterval);
isValid = false;
break;
}
}
break;
case Weekly:
{
// weeklyInterval limitation(54) => one year limit(54 weeks!)
// eventTime.GetTime().IsValid() [just need time]
// at least one selected week.
isValid = eventTime.GetTime().IsValid();
if (!isValid)
{
Logger::GetInstance().Info(L"Invalid StartTime : {0}", eventTime.GetTime().ToString());
break;
}
if (weeklyInterval <= 0 || weeklyInterval >= 54)
{
Logger::GetInstance().Info(L"Invalid weeklyInterval Range : {0}", weeklyInterval);
isValid = false;
break;
}
bool findSelectedDayOfweek = false;
for (int index = 0; index < DateTime::Week::NumberOfWeek; ++index)
{
if (selectedDayOfWeek[index])
{
findSelectedDayOfweek = true;
isValid = true;
break;
}
}
if (!findSelectedDayOfweek)
{
Logger::GetInstance().Info(L"No Selected Day Of Week!");
isValid = false;
}
}
break;
case Monthly:
{
// eventTime.GetTime().IsValid()
// day, and at least one selected month. [day and selected months]
isValid = eventTime.GetTime().IsValid();
if (!isValid || day <= 0)
{
isValid = false;
break;
}
bool findSelectedMonth = false;
for (int index = 1; index <= monthCount; ++index)
{
if (selectedMonth[index])
{
findSelectedMonth = true;
break;
}
}
if (!findSelectedMonth)
{
Logger::GetInstance().Info(L"No Selected Day Of Month!");
isValid = false;
}
}
break;
case MonthlySpecific:
{
// eventTime.GetTime().IsValid()
// specifiedWeek is default first, but have to selected.
// specifiedDayOfWeek has to selected.
isValid = eventTime.GetTime().IsValid();
if (!isValid || specifiedDayOfWeek == DateTime::Week::NumberOfWeek)
{
Logger::GetInstance().Info(L"No settings: specifiedDayOfWeek");
isValid = false;
break;
}
if (specifiedWeek == DateTime::Unknown)
{
Logger::GetInstance().Info(L"No settings: specifiedWeek");
isValid = false;
break;
}
} break;
default:
{
isValid = false;
} break;
}
if (GetReserveType() == EventUnit::Once &&
DateTime::Now() > GetEventTime())
{
isValid = false;
}
return isValid;
}
void EventUnit::ParseSelectedMonth(std::wstring selectedMonths)
{
if (selectedMonths.empty())
{
return;
}
typedef std::vector<std::wstring> TokenContainer;
TokenContainer tokens;
StringUtil::ToLowerCase(selectedMonths);
StringUtil::Trim(selectedMonths);
StringUtil::Split(selectedMonths.begin(), selectedMonths.end(), ',', tokens);
int index = 0;
for (TokenContainer::iterator itr = tokens.begin();
itr != tokens.end();
++itr)
{
// 1~12
index = StringUtil::ToInt(itr->c_str());
if (index < 1 && index > monthCount)
{
continue;
}
//ALLM_IMPORTANT((L"Selected Month : %d", index));
selectedMonth[index] = 1;
}
}
bool EventUnit::GetNextSelectedMonth(int month, int& nextMonth)
{
if (month < 1)
{
return false;
}
bool findNextMonth = false;
while (++month <= EventUnit::monthCount)
{
if (selectedMonth[month] != 0)
{
findNextMonth = true;
nextMonth = month;
break;
}
}
if (!findNextMonth)
{
// month starts at index one.
int index = 1;
while (!selectedMonth[index] && index <= monthCount)
{
++index;
}
nextMonth = index;
}
return findNextMonth;
}
void EventUnit::ParseSelectedWeek(std::wstring selectedWeeks)
{
if (selectedWeeks.empty())
{
return;
}
typedef std::vector<std::wstring> TokenContainer;
TokenContainer tokens;
StringUtil::ToLowerCase(selectedWeeks);
StringUtil::Trim(selectedWeeks);
StringUtil::Split(selectedWeeks.begin(), selectedWeeks.end(), ',', tokens);
int index = 0;
for (TokenContainer::iterator itr = tokens.begin();
itr != tokens.end();
++itr)
{
if (GetDataFromString(*itr, index))
{
assert(index >= DateTime::Week::Sunday && index < DateTime::Week::NumberOfWeek);
if (index < 0 || index >= DateTime::Week::NumberOfWeek)
{
Logger::GetInstance().Info(L"Invalid Week Index: {0}", index);
continue;
}
selectedDayOfWeek[index] = 1;
}
}
}
bool EventUnit::GetNextSelectedWeek(int week, int& nextWeek)
{
if (week < 0)
{
return false;
}
bool findNextWeek = false;
while (++week < DateTime::Week::NumberOfWeek)
{
if (selectedDayOfWeek[week] != 0)
{
findNextWeek = true;
nextWeek = week;
break;
}
}
if (!findNextWeek)
{
int index = 0;
while (!selectedDayOfWeek[index])
{
++index;
}
nextWeek = index;
}
return findNextWeek;
}
}
}
}
}
|
<gh_stars>0
import React from 'react';
import PropTypes from 'prop-types';
import { Container, Box, Header, Title, Time, Content } from './styles';
export default function Show({ navigation }) {
const { question, answer, time } = navigation.state.params;
return (
<Container>
<Box>
<Header>
<Title>PERGUNTA</Title>
<Time>{time}</Time>
</Header>
<Content>{question}</Content>
<Header style={{ marginTop: 20 }}>
<Title>RESPOSTA</Title>
</Header>
<Content>{answer}</Content>
</Box>
</Container>
);
}
Show.propTypes = {
navigation: PropTypes.shape({
state: PropTypes.shape({
params: PropTypes.shape({
question: PropTypes.string,
answer: PropTypes.string,
time: PropTypes.string
})
})
}).isRequired
};
|
#ifndef INCLUDED_NETWORK_MESSAGE_HOLDER_H
#define INCLUDED_NETWORK_MESSAGE_HOLDER_H
#include "message.h"
#include "boost/ptr_container/ptr_list.hpp"
#include <boost/ptr_container/serialize_ptr_list.hpp>
#include "boost/static_assert.hpp"
#include <mutex>
#include <condition_variable>
namespace network {
class MessageList
{
public:
typedef ::boost::ptr_list<Message> Messages_t;
Messages_t mMessages;
Messages_t mPublishedMessages;
friend class ::boost::serialization::access;
template<class Archive>
void serialize( Archive& ar, const unsigned int version )
{
ar& mPublishedMessages;
}
// no lock needed only the producer thread should access
void Add( std::auto_ptr<Message> message );
// no lock needed only the producer thread should access
void TransferFrom( Messages_t& messages );
// needs lock Publish and TransferPublishedMessagesTo can race
void Publish();
// needs lock Publish and TransferPublishedMessagesTo can race
void TransferPublishedMessagesTo( Messages_t& messages);
// needs lock. It's just a size ofc.
bool HasPublishedMessages() const;
std::mutex& GetMutex();
std::condition_variable& GetCV();
private:
std::mutex mMutex;
std::condition_variable mCV;
};
class MessageHolder : public platform::Singleton<MessageHolder>
{
public:
friend class platform::Singleton<MessageHolder>;
MessageList mOutgoingMessages;
MessageList mIncomingMessages;
MessageHolder();
MessageList& GetOutgoingMessages();
MessageList& GetIncomingMessages();
template<typename MESSAGE>
void AddOutgoingMessage( std::auto_ptr<MESSAGE> message );
private:
};
template<typename MESSAGE>
void MessageHolder::AddOutgoingMessage( std::auto_ptr<MESSAGE> message )
{
{
if ( message.get() == NULL )
{
return;
}
BOOST_STATIC_ASSERT_MSG(
( boost::is_base_of<Message, MESSAGE>::value ),
"MESSAGE must be a descendant of Message!"
);
mOutgoingMessages.Add( std::auto_ptr<Message>(message.release()) );
}
}
} // namespace network
#endif//INCLUDED_NETWORK_MESSAGE_HOLDER_H
|
type Configuration = {
layout: {
Layout?: OptionalElement;
TopMenu?: OptionalElement;
};
auth: {
AuthWidget?: OptionalElement;
};
icons: {
Logo?: OptionalElement;
};
};
type OptionalElement = {
// Define the structure of an optional element here
// For example:
prop1?: string;
prop2?: number;
// Add any other properties as needed
}; |
<filename>src/utils/math-utils/number-capacity.ts
export const getCapacity = (n:number):number =>
sign(n) * Math.abs(Math.floor(n)).toString().length;
export const getCapacityBase = (capacity:number):number =>
capacity === 1
? 1
: Math.pow(10, capacity);
export const getNumberByCapacity = (capacity:number):number =>
Math.pow(10, capacity);
export const getBaseCapacityNumber = (n:number):number =>
getNumberByCapacity(getCapacity(n));
export const sign = (n:number):number =>
n === 0 || isNaN(n)
? n
: n > 0 ? 1 : -1;
|
service rabbitmq-server start
celery worker -A portia_dashboard -B -l debug
|
<filename>server/controllers/sessions.js
const sessionModel = require('./../models/sessionModel');
exports.bookSession = (req, res) => {
sessionModel.addSession(req.body, (err, newSession) => {
if (err) {
console.error(err);
} else {
res.sendStatus(201);
}
});
};
exports.deleteSession = (req, res) => {
sessionModel.deleteSession(req.params, (err, result) => {
if (err) {
console.error('Error deleting session from database', err);
} else {
res.status(201).end();
}
});
};
exports.updateSession = (req, res) => {
sessionModel.updateSession(req.params.id, (err, result) => {
if (err) {
console.error('There was an error getting the session info: ', err);
} else {
res.status(201);
}
});
};
exports.getSession = (req, res) => {
var form = {
id: req.params.id,
isTutor: Number(req.query.isTutor)
};
sessionModel.getSession(form, (err, result) => {
if (err) {
console.error('There was an error getting the session info: ', err);
} else {
res.send(result);
}
});
};
|
#!/bin/bash
set -e
function getCurrentDir() {
local current_dir="${BASH_SOURCE%/*}"
if [[ ! -d "${current_dir}" ]]; then current_dir="$PWD"; fi
echo "${current_dir}"
}
current_dir=$(getCurrentDir)
# if not root, run as root
if (( $EUID != 0 )); then
sudo ${current_dir}/update.sh
exit
fi
apt-get update
apt-get -y upgrade
apt-get -y dist-upgrade
apt-get clean
apt-get -y autoremove |
let { makeExecutableSchema, addMockFunctionsToSchema } = require( 'graphql-tools');
let resolvers = require( './resolvers');
// allAppointments(pageSize: Int, page: Int): AppointmentsResult
const typeDefs = `
type Query {
students(pageSize: Int, page: Int, filter: FilterInput): StudentsResult
student(id: ID): Student
studentSearch(query: String): [Student]
cars: [Car]
car(id: ID): Car
instructor(id: ID): Instructor
allInstructors: [Instructor]
timeSlotsByInstructor(instructorId: ID, date: String): [TimeSlot]
appointmentById(id: ID): Appointment
appointmentsByDate(date: String): [Appointment]
appointmentsByStudent(studentId: ID): [Appointment]
isAppointmentExist(instructorId: ID, time: String): Boolean
}
type TimeSlot {
time: String
isAvailable: Boolean,
classType: String,
instructorName: String
}
input FilterInput {
filters: [Filter]
}
input Filter {
id: String
value: String
}
type Mutation {
login(password: String!, username: String!): AuthPayload!
updateStudent(studentInput: StudentInput): Student
createStudent(studentInput: StudentInput): Student
deleteStudent(id: ID): Student
updateCar(carInput: CarInput): Car
createCar(carInput: CarInput): Car
deleteCar(id: ID): Car
createInstructor(instructorInput: InstructorInput): Instructor
updateInstructor(instructorInput: InstructorInput): Instructor
deleteInstructor(id: ID): Instructor
createAppointment(input: AppointmentInput): Appointment
updateAppointment(input: AppointmentInput): Appointment
deleteAppointment(id: ID): Appointment
}
type Student {
address: String
call: Boolean
discontinue: Boolean
dob: String
firstDay: String
phone: String
id: ID
name: String
learnerPermitExp: String
learnerPermitNo: String
notes: String
gender: String
zip: String
}
extend type Student {
lessons: Int
}
input StudentInput {
address: String
call: Boolean
discontinue: Boolean
dob: String
firstDay: String
phone: String
id: ID
name: String
learnerPermitExp: String
learnerPermitNo: String
notes: String
gender: String
zip: String
}
type Appointment {
id: ID
studentId: ID
instructorId: ID
carId: ID
classType: String
note: String
instructor: Instructor
car: Car
student: Student
date: String
time: String
}
input AppointmentInput {
id: ID
date: String!
time: String!
studentId: ID!
instructorId: ID!
carId: ID!
classType: String!
note: String
timezoneOffset: String
}
type AppointmentsResult {
appointments: [Appointment]
page: Int
pageSize: Int
pages: Int
}
type StudentsResult {
students: [Student]
page: Int
pageSize: Int
pages: Int
}
type Car {
id: ID
no: String
}
type Instructor {
id: ID
name: String
}
input InstructorInput {
id: ID
name: String
}
input CarInput {
id: ID
no: String
}
type AuthPayload {
token: String!
user: User!
}
type User {
id: ID
username: String
}
`;
const schema = makeExecutableSchema({ typeDefs, resolvers });
// export default schema;
module.exports = schema; |
require 'spec_helper'
feature 'Updating games' do
given(:any_date) { Date.today }
given!(:game) { FactoryGirl.create :game, last_played: any_date }
scenario 'access edit game from games#show page' do
visit '/games'
click_on "#{game.name}"
expect(current_path).to eq "/games/#{game.id}"
click_on 'Edit'
expect(page).to have_selector 'form'
expect(page).to have_field('Name', with: 'Skat')
end
scenario 'submitting the update form updates the game' do
visit '/games'
click_on "#{game.name}"
expect(current_path).to eq "/games/#{game.id}"
click_on 'Edit'
expect(page).to have_field 'Name', with: 'Skat'
expect(page).to have_field 'Author', with: 'Author of Skat'
expect(page).to have_field 'Category', with: 'Cardgame'
expect(page).to have_field 'Location', with: 'Raimund'
expect(page).to have_field 'Min players', with: '3'
expect(page).to have_field 'Max players', with: '4'
expect(page).to have_field 'Min playtime', with: '60'
expect(page).to have_field 'Max playtime', with: '120'
expect(page).to have_field 'Rating', with: '5'
expect(page).to have_field 'Times played', with: '10'
expect(page).to have_field 'Last played', with: any_date.to_s
fill_in 'Name', with: 'Rommie'
fill_in 'Author', with: 'him'
fill_in 'Category', with: 'another Cardgame'
fill_in 'Location', with: 'Mirko'
fill_in 'Min players', with: '4'
fill_in 'Max players', with: ''
fill_in 'Min playtime', with: '10'
fill_in 'Max playtime', with: '120'
fill_in 'Rating', with: '3'
fill_in 'Times played', with: '5'
fill_in 'Last played', with: any_date.next_day
click_on 'Update Game'
expect(current_path).to eq "/games/#{game.id}"
expect(page).to have_content 'Game was successfully updated.'
expect(page).to have_content 'Rommie'
expect(page).to have_content 'him'
expect(page).to have_content 'another Cardgame'
expect(page).to have_content 'Mirko'
expect(page).to have_content '4'
expect(page).to have_content ''
expect(page).to have_content '10'
expect(page).to have_content '120'
expect(page).to have_content '3'
expect(page).to have_content '5'
expect(page).to have_content any_date.next_day
game.reload
expect(game.name).to eq 'Rommie'
expect(game.author).to eq 'him'
expect(game.category).to eq 'another Cardgame'
expect(game.location).to eq 'Mirko'
expect(game.min_players).to eq 4
expect(game.max_players).to eq nil
expect(game.min_playtime).to eq 10
expect(game.max_playtime).to eq 120
expect(game.rating).to eq 3
expect(game.times_played).to eq 5
expect(game.last_played).to eq any_date.next_day
end
scenario 'submitting the update accessed via show form updates the game' do
visit '/games'
click_on "#{game.name}"
expect(current_path).to eq "/games/#{game.id}"
click_on 'Edit'
fill_in 'Name', with: 'Rommie'
click_on 'Update Game'
expect(current_path).to eq "/games/#{game.id}"
expect(page).to have_content 'Rommie'
expect(game.reload.name).to eq 'Rommie'
end
scenario 'access game detail view after updating a new game' do
visit '/games'
click_on "#{game.name}"
click_on 'Edit'
fill_in 'Name', with: 'Rommie'
click_on 'Update Game'
expect(current_path).to eq "/games/#{game.id}"
expect(page).to have_content 'Rommie'
end
end
|
#!/bin/bash
#-------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#-------------------------------------------------------------
# error help print
printUsageExit()
{
cat << EOF
Usage: $0 <dml-filename> [arguments] [-help]
-help - Print this usage message and exit
Default Java options (-Xmx4g -Xms4g -Xmn400m) can be overridden by setting SYSTEMML_STANDALONE_OPTS.
EOF
exit 1
}
# Script internally invokes 'java [SYSTEMML_STANDALONE_OPTS] -jar StandaloneSystemML.jar -f <dml-filename> -exec singlenode -config=SystemML-config.xml [arguments]'
while getopts "h:" options; do
case $options in
h ) echo Warning: Help requested. Will exit after usage message;
printUsageExit
;;
\? ) echo Warning: Help requested. Will exit after usage message;
printUsageExit
;;
* ) echo Error: Unexpected error while processing options;
esac
done
if [ -z $1 ] ; then
echo "Wrong Usage.";
printUsageExit;
fi
# Peel off first argument so that $@ contains arguments to DML script
SCRIPT_FILE=$1
shift
# Build up a classpath with all included libraries
CURRENT_PATH=$( cd $(dirname $0) ; pwd -P )
CLASSPATH=""
for f in ${CURRENT_PATH}/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
LOG4JPROP=log4j.properties
# set default java opts if none supplied
if [ -z "$SYSTEMML_STANDALONE_OPTS" ] ; then
SYSTEMML_STANDALONE_OPTS="-Xmx4g -Xms4g -Xmn400m"
fi;
# invoke the jar with options and arguments
CMD="\
java ${SYSTEMML_STANDALONE_OPTS} \
-cp ${CLASSPATH} \
-Dlog4j.configuration=file:${LOG4JPROP} \
org.apache.sysml.api.DMLScript \
-f ${SCRIPT_FILE} \
-exec singlenode \
-config=$CURRENT_PATH"/SystemML-config.xml" \
$@"
$CMD
# if there was an error, display the full java command
# RETURN_CODE=$?
# if [ $RETURN_CODE -ne 0 ]
# then
# echo "Failed to run SystemML. Exit code: $RETURN_CODE"
# echo ${CMD}
# fi
|
#!/bin/bash
# Copyright (c) 2020, Oracle Corporation and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
usage() {
cat << EOF
Usage: buildDockerImage.sh [-t tag]
Builds a Docker Image for the Oracle WebLogic Kubernetes Operator.
Parameters:
-t: image name and tag in 'name:tag' format
Copyright (c) 2020, Oracle Corporation and/or its affiliates.
Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
EOF
exit 0
}
# WebLogic Kubernetes Operator Image Name
name=""
# Parameters
while getopts "t:" optname; do
case ${optname} in
t )
name="$OPTARG"
;;
\? )
usage
;;
esac
done
IMAGE_NAME=${name:-oracle/weblogic-kubernetes-operator:3.0.3}
SCRIPTPATH="$( cd "$(dirname "$0")" > /dev/null 2>&1 ; pwd -P )"
# Proxy settings
PROXY_SETTINGS=""
if [ "${http_proxy}" != "" ]; then
PROXY_SETTINGS="$PROXY_SETTINGS --build-arg http_proxy=${http_proxy}"
fi
if [ "${https_proxy}" != "" ]; then
PROXY_SETTINGS="$PROXY_SETTINGS --build-arg https_proxy=${https_proxy}"
fi
if [ "${ftp_proxy}" != "" ]; then
PROXY_SETTINGS="$PROXY_SETTINGS --build-arg ftp_proxy=${ftp_proxy}"
fi
if [ "${no_proxy}" != "" ]; then
PROXY_SETTINGS="$PROXY_SETTINGS --build-arg no_proxy=${no_proxy}"
fi
if [ "$PROXY_SETTINGS" != "" ]; then
echo "Proxy settings were found and will be used during build."
fi
# ################## #
# BUILDING THE IMAGE #
# ################## #
echo "Building image '$IMAGE_NAME' ..."
# BUILD THE IMAGE (replace all environment variables)
BUILD_START=$(date '+%s')
docker build $PROXY_SETTINGS -t $IMAGE_NAME -f $SCRIPTPATH/Dockerfile $SCRIPTPATH || {
echo "There was an error building the image."
exit 1
}
BUILD_END=$(date '+%s')
BUILD_ELAPSED=`expr $BUILD_END - $BUILD_START`
echo ""
if [ $? -eq 0 ]; then
cat << EOF
WebLogic Kubernetes Operator Docker Image is ready:
--> $IMAGE_NAME
Build completed in $BUILD_ELAPSED seconds.
EOF
else
echo "WebLogic Kubernetes Operator Docker Image was NOT successfully created. Check the output and correct any reported problems with the docker build operation."
fi |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.