text
stringlengths 1
1.05M
|
|---|
package com.yoloho.enhanced.cache.support;
import org.springframework.data.redis.core.RedisTemplate;
/**
* Basicly initializing needed under aspectj
*
* @author jason
*
*/
public class CacheSimpleInit {
public CacheSimpleInit(String ns) {
this(ns, null);
}
public CacheSimpleInit(String ns, RedisTemplate<String, Object> redisTemplate) {
if (ns == null || ns.length() == 0) {
throw new RuntimeException("namespace for cache should not be empty");
}
CacheProcessor.setNamespace(ns);
if (redisTemplate != null) {
CacheProcessor.setRedisTemplate(redisTemplate);
}
}
}
|
const str1 = "Hello";
const str2 = "World";
const combinedString = str1 + " " + str2;
console.log(combinedString); // Output: "Hello World"
|
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2018 Xilinx, Inc. All Rights Reserved.
#
echo "This script was generated under a different operating system."
echo "Please update the PATH and LD_LIBRARY_PATH variables below, before executing this script"
exit
if [ -z "$PATH" ]; then
PATH=C:/Xilinx/SDK/2018.3/bin;C:/Xilinx/Vivado/2018.3/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2018.3/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2018.3/bin
else
PATH=C:/Xilinx/SDK/2018.3/bin;C:/Xilinx/Vivado/2018.3/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2018.3/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2018.3/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='C:/Users/Administrator/Desktop/FPGAUSB/CYUSB3014/FPGASource/USBLoopBack/USBLoopBack/USBLoopBack.runs/synth_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
EAStep vivado -log loopback.vds -m64 -product Vivado -mode batch -messageDb vivado.pb -notrace -source loopback.tcl
|
<filename>14_funcoes/a118_prog_funcional_spoiler_callable.py
#!/usr/bin/python3
'''
def executar(funcao): = Criando a funcao executar
if callable(funcao): = Validando se o objeto é tipo de uma funcao
funcao() = Executando a funcoa recebida
'''
def executar(funcao):
if callable(funcao): # server para ingorar o parametro (1)
funcao()
def bom_dia():
print('Bom dia!')
def boa_tarde():
print('Boa tarde!')
if __name__ == '__main__':
executar(bom_dia) # Passando como parametro a funcao bom_dia
executar(boa_tarde)
executar(1) # Sera ignorado pois nao existe uma funcao com 1
# Fontes:
# Curso Python 3 - Curso Completo do Básico ao Avançado Udemy Aula 118
# https://github.com/cod3rcursos/curso-python/tree/master/funcoes
|
cd src
# Training
python main.py tracking,polydet \
--dataset kitti_mots --exp_id kitti_mots --dataset_version train_full \
--num_epochs 120 \
--hm_disturb 0.05 --lost_disturb 0.4 --fp_disturb 0.1 \
--pre_hm --same_aug --elliptical_gt --nbr_points 32 \
--arch hourglass \
--lr_step 50,90 \
--head_conv 256 --num_head_conv_poly 3 \
--batch_size 3 \
--load_model ../models/ctdet_coco_hg.pth
# Testing
python test.py tracking,polydet \
--dataset kitti_mots --exp_id kitti_mots --dataset_version train_full \
--test_dataset kitti_mots --arch hourglass \
--head_conv 256 --num_head_conv_poly 3 \
--pre_hm --same_aug --elliptical_gt --nbr_points 32 \
--load_model ../exp/tracking,polydet/kitti_mots/model_last.pth \
--track_thresh 0.3 --max_age 32 --ukf --keep_res
cd ..
|
#!/bin/bash
#*************************************************************************#
# @param
# drop_rate: Decode Drop frame rate (0~1)
# src_frame_rate: frame rate for send data
# data_path: Video or image list path
# model_path: offline model path
# label_path: label path
# postproc_name: postproc class name (PostprocSsd)
# wait_time: time of one test case. When set tot 0, it will automatically exit after the eos signal arrives
# rtsp = true: use rtsp
# dump_dir: dump result videos to this directory
# loop = true: loop through video
# device_id: mlu device id
#
# @notice: other flags see ./../bin/demo --help
#*************************************************************************#
CURRENT_DIR=$(cd $(dirname ${BASH_SOURCE[0]});pwd)
MODEL_PATH=$CURRENT_DIR/../../data/models/MLU270/Classification/resnet50
mkdir -p $MODEL_PATH
pushd $MODEL_PATH
if [ ! -f "resnet50_offline.cambricon" ]; then
wget -O resnet50_offline.cambricon http://video.cambricon.com/models/MLU270/Classification/resnet50/resnet50_offline.cambricon
else
echo "resnet50 offline model exists."
fi
popd
source env.sh
mkdir -p output
./../bin/demo \
--data_path ./files.list_video \
--src_frame_rate 60 \
--wait_time 0 \
--rtsp=false \
--loop=false \
--config_fname "resnet50_config_mlu270.json" \
--alsologtostderr
|
<gh_stars>1000+
//+build ignore
/*
Copyright 2021 Cesanta Software Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"fmt"
"log"
"os"
"strings"
"time"
"github.com/cooldrip/cstrftime" // strftime implemented with cgo
"github.com/go-git/go-git/v5"
"github.com/go-git/go-git/v5/plumbing"
)
func main() {
dir, err := os.Getwd()
if err != nil {
log.Fatal(err)
}
r, err := git.PlainOpenWithOptions(dir, &git.PlainOpenOptions{DetectDotGit: true})
if err != nil {
log.Fatal(err)
}
t := time.Now()
ts := cstrftime.Format("%Y%m%d-%H%M%S", t)
head, err := r.Head()
if err != nil {
log.Fatal(err)
}
short := fmt.Sprintf("%s", head.Hash())[:8]
w, err := r.Worktree()
if err != nil {
log.Fatal(err)
}
status, err := w.Status()
if err != nil {
log.Fatal(err)
}
is_dirty := ""
if len(status) > 0 {
is_dirty = "+"
}
branch_or_tag := head.Name().Short()
if branch_or_tag == "HEAD" {
branch_or_tag = "?"
}
tags, _ := r.Tags()
tags.ForEach(func(ref *plumbing.Reference) error {
if ref.Type() != plumbing.HashReference {
return nil
}
if strings.HasPrefix(ref.String(), short) {
tag := ref.String()
branch_or_tag = trimRef(strings.Split(tag, " ")[1])
}
return nil
})
buildId := fmt.Sprintf("%s/%s@%s%s", ts, branch_or_tag, short, is_dirty)
version := cstrftime.Format("%Y%m%d%H", t)
if is_dirty != "" || branch_or_tag == "?" {
version = branch_or_tag
}
fmt.Printf("%s\t%s\n", version, buildId)
}
func trimRef(ref string) string {
ref = strings.TrimPrefix(ref, "refs/heads/")
ref = strings.TrimPrefix(ref, "refs/tags/")
return ref
}
|
<gh_stars>100-1000
#ifndef MODULE_CALLS_H
#define MODULE_CALLS_H
#include <module/device_generic_driver.h>
#include <stddef.h>
#include <stdint.h>
template <typename T, typename... argument>
using mfunc = T (*)(argument...);
#define IO_BIT_SIZE_8 0b0
#define IO_BIT_SIZE_16 0b01
#define IO_BIT_SIZE_32 0b10
#define IO_BIT_SIZE_64 0b11
struct io_func_exec
{
bool write : 1;
uint8_t bit_size : 2;
uint8_t return_result : 1;
uint8_t use_memory : 1;
};
struct module_calls_list
{
mfunc<void> null_func;
mfunc<void, const char *> echo_out;
mfunc<size_t> get_kernel_version;
mfunc<size_t, size_t, size_t, io_func_exec> io_func;
mfunc<int, uint32_t, general_device *> set_device_driver;
mfunc<const general_device *, uint32_t> get_device_driver;
mfunc<int, const char *> set_module_name;
mfunc<bool, size_t, mfunc<void, uint32_t>> add_irq_handler;
} __attribute__((packed));
#ifdef MODULE
void call_init();
void echo_out(const char *data);
size_t get_kern_version();
size_t io_function(size_t addr, size_t write_val, io_func_exec flags);
int set_device_driver(uint32_t id, general_device *as);
const general_device *get_device_driver(uint32_t id);
int set_module_name(const char *name);
int add_irq_handler(size_t irq, mfunc<void, uint32_t> func);
// just equivalent of cli/sti
static inline void enter_low_level_context()
{
asm volatile("cli");
}
static inline void exit_low_level_context()
{
asm volatile("sti");
}
inline void outb(uint16_t port, uint8_t value)
{
asm volatile("out dx, al" ::"a"(value), "d"(port));
}
inline void outw(uint16_t port, uint16_t value)
{
asm volatile("out dx, ax" ::"a"(value), "d"(port));
}
inline void outl(uint16_t port, uint32_t value)
{
asm volatile("out dx, eax" ::"a"(value), "d"(port));
}
inline uint8_t inb(uint16_t port)
{
uint8_t ret;
asm volatile("in al, dx"
: "=a"(ret)
: "d"(port));
return ret;
}
inline uint16_t inw(uint16_t port)
{
uint16_t ret;
asm volatile("in ax, dx"
: "=a"(ret)
: "d"(port));
return ret;
}
inline uint32_t inl(uint16_t port)
{
uint32_t ret;
asm volatile("in eax, dx"
: "=a"(ret)
: "d"(port));
return ret;
}
#endif
#endif // MODULE_CALLS_H
|
<gh_stars>0
import setuptools
with open("ReadMe.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="heraut",
version="0.0.1dev1",
author="<NAME>",
author_email="<EMAIL>",
description="The herald of python message passing between threads and processes",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/mkupperman/heraut",
project_urls={
"Bug Tracker": "https://github.com/mkupperman/heraut/issues",
},
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
],
install_requires=['watchdog>=2.0.0'],
package_dir={"": "src"},
packages=setuptools.find_packages(where="src"),
python_requires=">=3.6",
)
|
sed -i.bak "s/CPP=g++/CPP=\${GXX}/" src/Makefile
sed -i.bak "s/-o clan_search/-o clan_search -lrt/" src/Makefile
sed -i.bak "s/-o clan_annotate/-o clan_annotate -lrt/" src/Makefile
make
mkdir -p $PREFIX/bin
cp bin/* $PREFIX/bin/
|
'use strict';
/* Memory Game Models and Business Logic */
function Tile(title) {
this.title = title;
this.flipped = false;
}
Tile.prototype.flip = function() {
this.flipped = !this.flipped;
}
function Game(tileNames) {
var tileDeck = makeDeck(tileNames);
this.grid = makeGrid(tileDeck);
this.message = Game.MESSAGE_CLICK;
this.unmatchedPairs = tileNames.length;
this.flipTile = function(tile) {
if (tile.flipped) {
return;
}
tile.flip();
if (!this.firstPick || this.secondPick) {
if (this.secondPick) {
this.firstPick.flip();
this.secondPick.flip();
this.firstPick = this.secondPick = undefined;
}
this.firstPick = tile;
this.message = Game.MESSAGE_ONE_MORE;
} else {
if (this.firstPick.title === tile.title) {
this.unmatchedPairs--;
this.message = (this.unmatchedPairs > 0) ? Game.MESSAGE_MATCH : Game.MESSAGE_WON;
this.firstPick = this.secondPick = undefined;
} else {
this.secondPick = tile;
this.message = Game.MESSAGE_MISS;
}
}
}
}
Game.MESSAGE_CLICK = 'Click on a tile.';
Game.MESSAGE_ONE_MORE = 'Pick one more card.'
Game.MESSAGE_MISS = 'Try again.';
Game.MESSAGE_MATCH = 'Good job! Keep going.';
Game.MESSAGE_WON = 'You win!';
/* Create an array with two of each tileName in it */
function makeDeck(tileNames) {
var tileDeck = [];
tileNames.forEach(function(name) {
tileDeck.push(new Tile(name));
tileDeck.push(new Tile(name));
});
return tileDeck;
}
function makeGrid(tileDeck) {
var gridDimension = Math.sqrt(tileDeck.length),
grid = [];
for (var row = 0; row < gridDimension; row++) {
grid[row] = [];
for (var col = 0; col < gridDimension; col++) {
grid[row][col] = removeRandomTile(tileDeck);
}
}
return grid;
}
function removeRandomTile(tileDeck) {
var i = Math.floor(Math.random()*tileDeck.length);
return tileDeck.splice(i, 1)[0];
}
|
<reponame>path64/assembler<filename>frontends/ygas.cpp
//
// GNU AS-like frontend
//
// Copyright (C) 2001-2010 <NAME>
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND OTHER CONTRIBUTORS ``AS IS''
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
#include "config.h"
#include <memory>
#include "llvm/ADT/StringExtras.h"
#include "llvm/ADT/STLExtras.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/Format.h"
#include "llvm/Support/ManagedStatic.h"
#include "llvm/Support/MemoryBuffer.h"
#include "llvm/Support/raw_ostream.h"
#include "llvm/Support/system_error.h"
#include "yasmx/Basic/Diagnostic.h"
#include "yasmx/Basic/FileManager.h"
#include "yasmx/Basic/SourceManager.h"
#include "yasmx/Frontend/DiagnosticOptions.h"
#include "yasmx/Frontend/TextDiagnosticPrinter.h"
#include "yasmx/Parse/HeaderSearch.h"
#include "yasmx/Parse/Parser.h"
#include "yasmx/Support/registry.h"
#include "yasmx/System/plugin.h"
#include "yasmx/Arch.h"
#include "yasmx/Assembler.h"
#include "yasmx/DebugFormat.h"
#include "yasmx/Expr.h"
#include "yasmx/IntNum.h"
#include "yasmx/ListFormat.h"
#include "yasmx/Module.h"
#include "yasmx/Object.h"
#include "yasmx/ObjectFormat.h"
#include "yasmx/Symbol.h"
#ifdef HAVE_LIBGEN_H
#include <libgen.h>
#endif
#include "frontends/license.cpp"
// Preprocess-only buffer size
#define PREPROC_BUF_SIZE 16384
using namespace yasm;
namespace cl = llvm::cl;
static std::auto_ptr<raw_ostream> errfile;
// version message
static const char* full_version =
PACKAGE_NAME " " PACKAGE_VERSION;
void
PrintVersion()
{
llvm::outs()
<< full_version << '\n'
<< "Compiled on " __DATE__ ".\n"
<< "Copyright (c) 2009-2011 PathScale Inc. and others. All Rights Reserved.\n"
<< "You can find complete copyright, patent and legal notices in the "
<< "corresponding documentation.\n";
}
// extra help messages
static cl::extrahelp help_tail(
"\n"
"Files are asm sources to be assembled.\n"
"\n"
"Sample invocation:\n"
" pathas -32 -o object.o source.s\n"
"\n"
"Report bugs to <EMAIL>\n");
static cl::opt<std::string> in_filename(cl::Positional,
cl::desc("file"));
// -32
static cl::list<bool> bits_32("32",
cl::desc("set 32-bit output"));
// -64
static cl::list<bool> bits_64("64",
cl::desc("set 64-bit output"));
// -defsym
static cl::list<std::string> defsym("defsym",
cl::desc("define symbol"));
// -D (ignored)
static cl::list<std::string> ignored_D("D",
cl::desc("Ignored"),
cl::Prefix,
cl::Hidden);
// -dump-object
static cl::opt<Assembler::ObjectDumpTime> dump_object("dump-object",
cl::desc("Dump object in XML after this phase:"),
cl::values(
clEnumValN(Assembler::DUMP_NEVER, "never", "never dump"),
clEnumValN(Assembler::DUMP_AFTER_PARSE, "parsed",
"after parse phase"),
clEnumValN(Assembler::DUMP_AFTER_FINALIZE, "finalized",
"after finalization"),
clEnumValN(Assembler::DUMP_AFTER_OPTIMIZE, "optimized",
"after optimization"),
clEnumValN(Assembler::DUMP_AFTER_OUTPUT, "output",
"after output"),
clEnumValEnd));
// --execstack, --noexecstack
static cl::list<bool> execstack("execstack",
cl::desc("require executable stack for this object"));
static cl::list<bool> noexecstack("noexecstack",
cl::desc("don't require executable stack for this object"));
// -J
static cl::list<bool> no_signed_overflow("J",
cl::desc("don't warn about signed overflow"));
// -I
static cl::list<std::string> include_paths("I",
cl::desc("Add include path"),
cl::value_desc("path"),
cl::Prefix);
// --license
static cl::opt<bool> show_license("license",
cl::desc("Show license text"));
// --plugin
#ifndef BUILD_STATIC
static cl::list<std::string> plugin_names("plugin",
cl::desc("Load plugin module"),
cl::value_desc("plugin"));
#endif
// -o
static cl::opt<std::string> obj_filename("o",
cl::desc("Name of object-file output"),
cl::value_desc("filename"),
cl::Prefix);
// -w
static cl::opt<bool> ignored_w("w",
cl::desc("Ignored"),
cl::ZeroOrMore,
cl::Hidden);
// -x
static cl::opt<bool> ignored_x("x",
cl::desc("Ignored"),
cl::ZeroOrMore,
cl::Hidden);
// -Qy
static cl::opt<bool> ignored_qy("Qy",
cl::desc("Ignored"),
cl::ZeroOrMore,
cl::Hidden);
// -Qn
static cl::opt<bool> ignored_qn("Qn",
cl::desc("Ignored"),
cl::ZeroOrMore,
cl::Hidden);
// -W, --no-warn
static cl::list<bool> inhibit_warnings("W",
cl::desc("Suppress warning messages"));
static cl::alias inhibit_warnings_long("no-warn",
cl::desc("Alias for -W"),
cl::aliasopt(inhibit_warnings));
// --fatal-warnings
static cl::list<bool> fatal_warnings("fatal-warnings",
cl::desc("Suppress warning messages"));
// --warn
static cl::list<bool> enable_warnings("warn",
cl::desc("Don't suppress warning messages or treat them as errors"));
// sink to warn instead of error on unrecognized options
static cl::list<std::string> unknown_options(cl::Sink);
static void
ApplyWarningSettings(DiagnosticsEngine& diags)
{
// Disable init-nobits and uninit-contents by default.
diags.setDiagnosticGroupMapping("init-nobits", diag::MAP_IGNORE);
diags.setDiagnosticGroupMapping("uninit-contents", diag::MAP_IGNORE);
// Walk through inhibit_warnings, fatal_warnings, enable_warnings, and
// no_signed_overflow in parallel, ordering by command line argument
// position.
unsigned int inhibit_pos = 0, inhibit_num = 0;
unsigned int enable_pos = 0, enable_num = 0;
unsigned int fatal_pos = 0, fatal_num = 0;
unsigned int signed_pos = 0, signed_num = 0;
for (;;)
{
if (inhibit_num < inhibit_warnings.size())
inhibit_pos = inhibit_warnings.getPosition(inhibit_num);
else
inhibit_pos = 0;
if (enable_num < enable_warnings.size())
enable_pos = enable_warnings.getPosition(enable_num);
else
enable_pos = 0;
if (fatal_num < fatal_warnings.size())
fatal_pos = fatal_warnings.getPosition(fatal_num);
else
fatal_pos = 0;
if (signed_num < no_signed_overflow.size())
signed_pos = no_signed_overflow.getPosition(signed_num);
else
signed_pos = 0;
if (inhibit_pos != 0 &&
(enable_pos == 0 || inhibit_pos < enable_pos) &&
(fatal_pos == 0 || inhibit_pos < fatal_pos) &&
(signed_pos == 0 || inhibit_pos < signed_pos))
{
// Handle inhibit option
++inhibit_num;
diags.setIgnoreAllWarnings(true);
}
else if (enable_pos != 0 &&
(inhibit_pos == 0 || enable_pos < inhibit_pos) &&
(fatal_pos == 0 || enable_pos < fatal_pos) &&
(signed_pos == 0 || enable_pos < signed_pos))
{
// Handle enable option
++enable_num;
diags.setIgnoreAllWarnings(false);
diags.setWarningsAsErrors(false);
diags.setDiagnosticGroupMapping("signed-overflow",
diag::MAP_WARNING);
}
else if (fatal_pos != 0 &&
(enable_pos == 0 || fatal_pos < enable_pos) &&
(inhibit_pos == 0 || fatal_pos < inhibit_pos) &&
(signed_pos == 0 || fatal_pos < signed_pos))
{
// Handle fatal option
++fatal_num;
diags.setWarningsAsErrors(true);
}
else if (signed_pos != 0 &&
(enable_pos == 0 || signed_pos < enable_pos) &&
(fatal_pos == 0 || signed_pos < fatal_pos) &&
(inhibit_pos == 0 || signed_pos < inhibit_pos))
{
// Handle signed option
++signed_num;
diags.setDiagnosticGroupMapping("signed-overflow",
diag::MAP_IGNORE);
}
else
break; // we're done with the list
}
}
static std::string
GetBitsSetting()
{
std::string bits = YGAS_OBJFMT_BITS;
// Walk through bits_32 and bits_64 in parallel, ordering by command line
// argument position.
unsigned int bits32_pos = 0, bits32_num = 0;
unsigned int bits64_pos = 0, bits64_num = 0;
for (;;)
{
if (bits32_num < bits_32.size())
bits32_pos = bits_32.getPosition(bits32_num);
else
bits32_pos = 0;
if (bits64_num < bits_64.size())
bits64_pos = bits_64.getPosition(bits64_num);
else
bits64_pos = 0;
if (bits32_pos != 0 && (bits64_pos == 0 || bits32_pos < bits64_pos))
{
// Handle bits32 option
++bits32_num;
bits = "32";
}
else if (bits64_pos != 0 &&
(bits32_pos == 0 || bits64_pos < bits32_pos))
{
// Handle bits64 option
++bits64_num;
bits = "64";
}
else
break; // we're done with the list
}
return bits;
}
static void
ConfigureObject(Object& object)
{
Object::Config& config = object.getConfig();
// Walk through execstack and noexecstack in parallel, ordering by command
// line argument position.
unsigned int exec_pos = 0, exec_num = 0;
unsigned int noexec_pos = 0, noexec_num = 0;
for (;;)
{
if (exec_num < execstack.size())
exec_pos = execstack.getPosition(exec_num);
else
exec_pos = 0;
if (noexec_num < noexecstack.size())
noexec_pos = noexecstack.getPosition(noexec_num);
else
noexec_pos = 0;
if (exec_pos != 0 && (noexec_pos == 0 || exec_pos < noexec_pos))
{
// Handle exec option
++exec_num;
config.ExecStack = true;
config.NoExecStack = false;
}
else if (noexec_pos != 0 && (exec_pos == 0 || noexec_pos < exec_pos))
{
// Handle noexec option
++noexec_num;
config.ExecStack = false;
config.NoExecStack = true;
}
else
break; // we're done with the list
}
}
static int
do_assemble(SourceManager& source_mgr, DiagnosticsEngine& diags)
{
// Apply warning settings
ApplyWarningSettings(diags);
// Determine objfmt_bits based on -32 and -64 options
std::string objfmt_bits = GetBitsSetting();
FileManager& file_mgr = source_mgr.getFileManager();
Assembler assembler("x86", YGAS_OBJFMT_BASE + objfmt_bits, diags,
dump_object);
HeaderSearch headers(file_mgr);
if (diags.hasFatalErrorOccurred())
return EXIT_FAILURE;
// Set object filename if specified.
if (!obj_filename.empty())
assembler.setObjectFilename(obj_filename);
// Set parser.
assembler.setParser("gas", diags);
if (diags.hasFatalErrorOccurred())
return EXIT_FAILURE;
// Set debug format to dwarf2pass if it's legal for this object format.
if (assembler.isOkDebugFormat("dwarf2pass"))
{
assembler.setDebugFormat("dwarf2pass", diags);
if (diags.hasFatalErrorOccurred())
return EXIT_FAILURE;
}
// open the input file or STDIN (for filename of "-")
if (in_filename == "-")
{
OwningPtr<MemoryBuffer> my_stdin;
if (llvm::error_code err = MemoryBuffer::getSTDIN(my_stdin))
{
diags.Report(SourceLocation(), diag::fatal_file_open)
<< in_filename << err.message();
return EXIT_FAILURE;
}
source_mgr.createMainFileIDForMemBuffer(my_stdin.take());
}
else
{
const FileEntry* in = file_mgr.getFile(in_filename);
if (!in)
{
diags.Report(SourceLocation(), diag::fatal_file_open)
<< in_filename;
return EXIT_FAILURE;
}
source_mgr.createMainFileID(in);
}
// Initialize the object.
if (!assembler.InitObject(source_mgr, diags))
return EXIT_FAILURE;
// Configure object per command line parameters.
ConfigureObject(*assembler.getObject());
// Predefine symbols.
for (std::vector<std::string>::const_iterator i=defsym.begin(),
end=defsym.end(); i != end; ++i)
{
StringRef str(*i);
size_t equalpos = str.find('=');
if (equalpos == StringRef::npos)
{
diags.Report(diag::fatal_bad_defsym) << str;
continue;
}
StringRef name = str.slice(0, equalpos);
StringRef vstr = str.slice(equalpos+1, StringRef::npos);
IntNum value;
if (!vstr.empty())
{
// determine radix
unsigned int radix;
if (vstr[0] == '0' && vstr.size() > 1 &&
(vstr[1] == 'x' || vstr[1] == 'X'))
{
vstr = vstr.substr(2);
radix = 16;
}
else if (vstr[0] == '0')
{
vstr = vstr.substr(1);
radix = 8;
}
else
radix = 10;
// check validity
const char* ptr = vstr.begin();
const char* end = vstr.end();
if (radix == 16)
{
while (ptr != end && isxdigit(*ptr))
++ptr;
}
else if (radix == 8)
{
while (ptr != end && (*ptr >= '0' && *ptr <= '7'))
++ptr;
}
else
{
while (ptr != end && isdigit(*ptr))
++ptr;
}
if (ptr != end)
{
diags.Report(diag::fatal_bad_defsym) << name;
continue;
}
value.setStr(vstr, radix);
}
// define equ
assembler.getObject()->getSymbol(name)->DefineEqu(Expr(value));
}
if (diags.hasFatalErrorOccurred())
return EXIT_FAILURE;
// Initialize the parser.
assembler.InitParser(source_mgr, diags, headers);
// Assemble the input.
if (!assembler.Assemble(source_mgr, diags))
{
// An error occurred during assembly.
return EXIT_FAILURE;
}
// open the object file for output
std::string err;
raw_fd_ostream out(assembler.getObjectFilename().str().c_str(),
err, raw_fd_ostream::F_Binary);
if (!err.empty())
{
diags.Report(SourceLocation(), diag::err_cannot_open_file)
<< obj_filename << err;
return EXIT_FAILURE;
}
if (!assembler.Output(out, diags))
{
// An error occurred during output.
// If we had an error at this point, we also need to delete the output
// object file (to make sure it's not left newer than the source).
out.close();
remove(assembler.getObjectFilename().str().c_str());
return EXIT_FAILURE;
}
// close object file
out.close();
return EXIT_SUCCESS;
}
// main function
int
main(int argc, char* argv[])
{
llvm::llvm_shutdown_obj llvm_manager(false);
cl::SetVersionPrinter(&PrintVersion);
cl::ParseCommandLineOptions(argc, argv, "", true);
// Handle special exiting options
if (show_license)
{
for (std::size_t i=0; i<sizeof(license_msg)/sizeof(license_msg[0]); i++)
llvm::outs() << license_msg[i] << '\n';
return EXIT_SUCCESS;
}
DiagnosticOptions diag_opts;
diag_opts.ShowOptionNames = 1;
diag_opts.ShowSourceRanges = 1;
TextDiagnosticPrinter diag_printer(llvm::errs(), diag_opts);
IntrusiveRefCntPtr<DiagnosticIDs> diagids(new DiagnosticIDs);
DiagnosticsEngine diags(diagids, &diag_printer, false);
FileSystemOptions opts;
FileManager file_mgr(opts);
SourceManager source_mgr(diags, file_mgr);
diags.setSourceManager(&source_mgr);
diag_printer.setPrefix("ygas");
for (std::vector<std::string>::const_iterator i=unknown_options.begin(),
end=unknown_options.end(); i != end; ++i)
{
diags.Report(diag::warn_unknown_command_line_option) << *i;
}
// Load standard modules
if (!LoadStandardPlugins())
{
diags.Report(diag::fatal_standard_modules);
return EXIT_FAILURE;
}
#ifndef BUILD_STATIC
// Load plugins
for (std::vector<std::string>::const_iterator i=plugin_names.begin(),
end=plugin_names.end(); i != end; ++i)
{
if (!LoadPlugin(*i))
diags.Report(diag::warn_plugin_load) << *i;
}
#endif
// Default to stdin if no filename specified.
if (in_filename.empty())
in_filename = "-";
return do_assemble(source_mgr, diags);
}
|
import requests
from bs4 import BeautifulSoup
url = 'https://www.example.com'
page = requests.get(url)
soup = BeautifulSoup(page.content, 'html.parser')
data = soup.find_all('p')
# Process the data
|
<filename>src/constants.js
/**
* Created by FDD on 2017/9/18.
* @desc 静态变量相关
*/
const EVENT_TYPE = {
// 事件类型
LOAD_MAP_SUCCESS: 'loadMapSuccess', // 地图初始化成功事件
CLICK: 'click', // 点击事件
DBCLICK: 'dbclick', // 双击事件
SINGLECLICK: 'singleclick', // 单击事件
MOVESTART: 'movestart', // 地图开始移动事件
MOVEEND: 'moveend', // 地图结束移动事件
POINTERDRAG: 'pointerdrag', // 拖拽事件
POINTERMOVE: 'pointermove', // 移动事件
PRECOMPOSE: 'precompose', // 开始渲染之前
POSTRENDER: 'postrender', // 开始渲染
POSTCOMPOSE: 'postcompose', // 渲染完成
PROPERTYCHANGE: 'propertychange', // 属性变化
CHANGE: 'change', // change
CHANGELAYERGROUP: 'change:layerGroup', // 图层组变化
CHANGESIZE: 'change:size', // 大小变化
CHANGETARGET: 'change:target', // target变化
CHANGEVIEW: 'change:view', // 视图变化
FEATUREONMOUSEOVER: 'feature:onmouseover', // 要素的鼠标移入事件
FEATUREONMOUSEOUT: 'feature:onmouseout', // 要素的鼠标移出事件
FEATUREONMOUSEDOWN: 'feature:onmousedown', // 要素鼠标按下
FEATUREONMOUSEUP: 'feature:onmouseup', // 要素鼠标抬起
FEATUREONMOVE: 'feature:onmove', // 要素移动
FEATUREONSELECT: 'feature:onselect', // 要素选中事件
FEATUREONDISSELECT: 'feature:ondisselect', // 要素取消选中事件
OVERLAYONMOUSELEFT: 'overlay:onmouseleft', // 覆盖物左键事件
OVERLAYONMOUSERIGHT: 'overlay:onmouseright', // 覆盖物右键事件
OVERLAYCLICK: 'overlay:click', // 覆盖物点击事件
OVERLAYONMOUSEOVER: 'overlay:onmouseover', // 覆盖物鼠标移入
OVERLAYONMOUSEOUT: 'overlay:onmouseout' // 覆盖物鼠标移出
};
const INTERNAL_KEY = {
// 自定义键值
SELECTABLE: 'selectable', // 要素是否可以被选择
MOVEABLE: 'moveable' // 要素是否可以被移动
};
const BASE_CLASS_NAME = {
CLASS_HIDDEN: 'hmap-hidden',
CLASS_SELECTABLE: 'hmap-selectable',
CLASS_UNSELECTABLE: 'hmap-unselectable',
CLASS_CONTROL: 'hmap-control'
};
let UNITS = {
DEGREES: 'degrees',
FEET: 'ft',
METERS: 'm',
PIXELS: 'pixels',
TILE_PIXELS: 'tile-pixels',
USFEET: 'us-ft',
METERS_PER_UNIT: {}
};
UNITS.METERS_PER_UNIT[UNITS.DEGREES] = (2 * Math.PI * 6370997) / 360;
UNITS.METERS_PER_UNIT[UNITS.FEET] = 0.3048;
UNITS.METERS_PER_UNIT[UNITS.METERS] = 1;
UNITS.METERS_PER_UNIT[UNITS.USFEET] = 1200 / 3937;
const OVERVIEWMAP = {
MIN_RATIO: 0.1,
MAX_RATIO: 0.75
};
export { EVENT_TYPE, INTERNAL_KEY, BASE_CLASS_NAME, UNITS, OVERVIEWMAP };
|
CREATE TABLE Movies (
name VARCHAR(30),
genres VARCHAR(200),
release_year INT
);
|
<reponame>ftraple/cpp-FractalCreator
#ifndef BITMAP_FILE_HEADER_HPP
#define BITMAP_FILE_HEADER_HPP
#include <cstdint>
#pragma pack(2) // Avoid padding
struct BitmapFileHeader {
char header[2]{'B', 'M'};
int32_t fileSize;
int32_t reserver{0};
int32_t dataOffset;
};
#endif // BITMAP_FILE_HEADER_HPP
|
#!/bin/bash
# Source library
source ../../../utils/helper.sh
source ./delta_configs/env.delta
topics_to_delete="test1 test2"
for topic in $topics_to_delete
do
if [[ $(docker-compose exec connect kafka-topics --bootstrap-server $CONNECT_BOOTSTRAP_SERVERS --command-config /tmp/ak-tools-ccloud.delta --describe --topic $topic) =~ "Topic:${topic}"$'\t' ]]; then
echo "Deleting $topic"
docker-compose exec connect kafka-topics --bootstrap-server $CONNECT_BOOTSTRAP_SERVERS --command-config /tmp/ak-tools-ccloud.delta -delete --topic $topic 2>/dev/null
fi
done
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
REL=$1
DEV=$2
REPO="-Dmaven.repo.local=/tmp/streams_release"
if [[ -z "$REL" || -z "$DEV" ]]; then
echo "You must specify a release and new dev version"
echo "Useage: ./release.sh <new_version> <new_dev_version>"
echo "Example: ./release.sh 0.5.1 0.6.0-SNAPSHOT"
exit 1
fi
mkdir -p /tmp/streams_release
mkdir -p logs
printInfo() {
echo "\n"
mvn -v
echo "\n"
docker -v
echo "\n"
docker info
echo "\n\n"
git status
git log | head
}
checkStatus() {
local output=$1
if [[ -z "$(tail $output | egrep 'BUILD SUCCESS')" ]]; then
echo "Release failed"
exit 1
fi
}
#streams
git clone https://github.com/apache/streams.git ./streams-$REL
cd streams-$REL
printInfo
mvn -Pcheck apache-rat:check -e -DskipTests=true -Drat.excludeSubprojects=false $REPO > ../logs/streams_ratcheck.txt
checkStatus ../logs/streams_ratcheck.txt
mvn clean test $REPO > ../logs/streams_unittests.txt
checkStatus ../logs/streams_unittests.txt
mvn -Papache-release $REPO release:prepare -DpushChanges=false -DautoVersionSubmodules=true -DreleaseVersion=$REL -DdevelopmentVersion=$DEV-SNAPSHOT -Dtag=streams-project-$REL > ../logs/streams_release-prepare.txt
checkStatus ../logs/streams_release-prepare.txt
mvn -Papache-release $REPO clean install release:perform -Darguments='-Dmaven.test.skip.exec=true' -Dgoals=deploy -DlocalRepoDirectory=. -DlocalCheckout=true > ../logs/streams_release-perform.txt
checkStatus ../logs/streams_release-perform.txt
git push origin master
git push origin streams-project-$REL
cd..
cat << EOM
##################################################################
RELEASE COMPLETE
##################################################################
EOM
|
<gh_stars>1-10
package com.singularitycoder.folkdatabase.auth.viewmodel;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.MutableLiveData;
import androidx.lifecycle.ViewModel;
import com.singularitycoder.folkdatabase.auth.repository.LoginAuthRepository;
import com.singularitycoder.folkdatabase.helper.RequestStateMediator;
import io.reactivex.disposables.CompositeDisposable;
public class AuthViewModel extends ViewModel {
private static final String TAG = "AuthViewModel";
private MutableLiveData<RequestStateMediator> mutableLiveData;
private LoginAuthRepository loginAuthRepository;
private CompositeDisposable compositeDisposable = new CompositeDisposable();
public LiveData<RequestStateMediator> loginUserFromRepository(String email, String password) throws IllegalArgumentException {
loginAuthRepository = LoginAuthRepository.getInstance();
mutableLiveData = loginAuthRepository.loginUser(email, password);
return mutableLiveData;
}
public LiveData<RequestStateMediator> getSignUpStatusFromRepository(String email) throws IllegalArgumentException {
loginAuthRepository = LoginAuthRepository.getInstance();
mutableLiveData = loginAuthRepository.readSignUpStatus(email);
return mutableLiveData;
}
public LiveData<RequestStateMediator> resetPasswordFromRepository(String email) throws IllegalArgumentException {
loginAuthRepository = LoginAuthRepository.getInstance();
mutableLiveData = loginAuthRepository.resetPassword(email);
return mutableLiveData;
}
@Override
protected void onCleared() {
super.onCleared();
compositeDisposable.dispose();
}
}
|
import { NgModule } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { RouterModule} from "@angular/router";
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { MatSliderModule } from '@angular/material/slider';
import { MainNavComponent } from './Components/main-nav/main-nav.component';
import { LayoutModule } from '@angular/cdk/layout';
import { MatToolbarModule } from '@angular/material/toolbar';
import { MatButtonModule } from '@angular/material/button';
import { MatSidenavModule } from '@angular/material/sidenav';
import { MatIconModule } from '@angular/material/icon';
import { MatListModule } from '@angular/material/list';
import { HomeNavComponent } from './Components/home-nav/home-nav.component';
import { LandingComponent } from './Components/landing/landing.component';
import { HomeComponent } from './Components/home/home.component';
import { MatGridListModule } from '@angular/material/grid-list';
import { MatCardModule } from '@angular/material/card';
import { MatMenuModule } from '@angular/material/menu';
import { PharmacyComponent } from './Components/pharmacy/pharmacy.component';
import { AuthModule } from "@auth0/auth0-angular";
import { AuthButtonComponent } from './Components/auth-button/auth-button.component';
import { environment } from 'src/environments/environment';
import { HttpClientModule } from '@angular/common/http';
@NgModule({
declarations: [
AppComponent,
MainNavComponent,
HomeNavComponent,
LandingComponent,
HomeComponent,
PharmacyComponent,
AuthButtonComponent
],
imports: [
BrowserModule,
AppRoutingModule,
BrowserAnimationsModule,
MatSliderModule,
LayoutModule,
MatToolbarModule,
MatButtonModule,
MatSidenavModule,
MatIconModule,
MatListModule,
MatGridListModule,
MatCardModule,
MatMenuModule,
RouterModule,
HttpClientModule,
AuthModule.forRoot({
domain: environment.DOMAIN,
clientId: environment.CLIENTID
}),
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule { }
|
def convert_time_format(time):
"""
Converts a given time in 24-hour format to 12-hour AM/PM format.
Args:
time (str): The time in 24-hour format.
Return:
str: The converted time in 12-hour AM/PM format.
"""
# Extract the hour and minutes
hour, minutes = int(time.split(':')[0]), int(time.split(':')[1])
# Check for Edge Cases
if hour > 12:
hour -= 12
pm = True
elif hour == 0:
hour = 12
pm = False
elif hour == 12:
pm = True
else:
pm = False
# Format the time
if minutes < 10:
time_formatted = '{}:0{} '.format(hour, minutes)
else:
time_formatted = '{}:{} '.format(hour, minutes)
# Add the PM/AM marker
if pm:
time_formatted += 'PM'
else:
time_formatted += 'AM'
return time_formatted
|
/***************************************************************************
MIT License
Copyright (c) 2018 Rohit
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*****************************************************************************/
#ifndef MOVIO_HPP
#define MOVIO_HPP
#include <iostream>
#include <string>
#include "opencv2/opencv.hpp"
#include <opencv2/core/core.hpp>
#include "opencv2/features2d.hpp"
#include "opencv2/xfeatures2d.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/calib3d/calib3d.hpp"
using namespace std;
using namespace cv;
class Movio {
private:
string filepath;
bool debug;
public:
Movio(string filepath, bool debug);
~Movio();
bool startOdom();
void getSURFFeatures(const Mat& src, Ptr<xfeatures2d::SURF> detector,
vector<Point2f>& keypoints);
void getFastFeatures(const Mat& src, vector<Point2f>& keypoints);
void trackFeatures(const Mat& src1, const Mat& src2, vector<Point2f>& point1,
vector<Point2f>& point2);
};
#endif
|
// IndexStyles.js:
// ------------------
import styled from 'styled-components';
const Banner = styled.div `
&:after {
content: "";
display: block;
height: 100vh;
width: 100%;
background-image: url('/img/cover.jpg');
background-size: cover;
background-repeat: no-repeat;
background-position: center;
filter: grayscale(100%) blur(2px);
}
`;
const TextWrapper = styled.div`
position: absolute;
z-index: 1;
left: 50%;
top: 50%;
transform: translate(-50%, -50%);
color: white;
div{
display: flex;
justify-content: cexºnter;
align-items: center;
flex-direction: column;
}
`;
const MoreText = styled.div`
position: absolute;
color: #ffffff;
text-align: center;
text-transform: uppercase;
letter-spacing: 0.225em;
font-weight: 600;
font-size: 1.2rem;
z-index: 1;
left: 50%;
bottom: 10%;
transform: translate(-50%, -50%);
&:after {
content: "";
display: block;
height: 2rem;
width: 2rem;
left: 50%;
position: absolute;
margin: 1em 0 0 -0.75em;
background-image: url("arrow.svg");
background-size: cover;
background-repeat: no-repeat;
background-position: center;
}
`;
const SectionTwo = styled.section`
background-color: #21b2a6;
text-align: center;
padding: 10rem 0;
div {
width: 66%;
margin: 0 auto;
}
h2 {
font-size: 3rem;
padding: 1.35em 0;
color: #ffffff;
border-bottom: 2px solid #1d9c91;
text-transform: uppercase;
letter-spacing: 0.6rem;
margin: 0;
}
p {
text-transform: uppercase; color: #c8ece9; text-align: center; letter-spacing: 0.225em; font-size: 1.5rem;
}
h5 {
font-size: 1.4rem;
line-height: 2rem;
color: #ffffff;
border-bottom: 2px solid #1d9c91;
font-weight: 800;
letter-spacing: 0.225em;
text-transform: uppercase;
padding-bottom: 0.5rem;
margin-bottom: 5rem;
}
`;
// We need to export the styles defined
// for these 3 components (Banner, TextWrapper)
export { Banner, TextWrapper, MoreText };
|
/*
* Copyright (c) 2020 Elastos Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the 'Software'), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Logger } from 'src/app/logger';
import { EssentialsWeb3Provider } from 'src/app/model/essentialsweb3provider';
import Web3 from 'web3';
import { NFTAsset } from '../model/nfts/nftasset';
import { NFTResolvedInfo } from '../model/nfts/resolvedinfo';
import { WalletNetworkService } from './network.service';
import { WalletPrefsService } from './pref.service';
type ERC721Transfer = {
address: string; // NFT contract address - "0x020c7303664bc88ae92cE3D380BF361E03B78B81"
blockHash: string; // "0xf11791e3662ac314eee6f57eafcb3448754aa7198f2a93a505ddc5679b933894"
blockNumber: number; // 9635607
event: "Transfer";
// raw: {data: '0x57919fe4ec94a175881ded015092d6cc6ec106e84ac15d0e…0000000000000000000000000000000000000000000000001', topics: Array(4)}
returnValues: { // Matches the Transfer event signature
[0]: string; // sender - "0x02E8AD0687D583e2F6A7e5b82144025f30e26aA0"
[1]: string; // receiver - "0xbA1ddcB94B3F8FE5d1C0b2623cF221e099f485d1"
[2]: string; // token ID - "39608514200588865283440841425600775513887709291921581824093434814539493127892"
}
}
/**
* List of popular IPFS gateways that we want to replace with our preferred gateway instead.
*/
const IPFSGatewayPrefixesToReplace = [
"https://gateway.pinata.cloud/ipfs"
]
@Injectable({
providedIn: 'root'
})
export class ERC721Service {
/** Web3 variables to call smart contracts */
private web3: Web3;
private erc721ABI: any;
constructor(private prefs: WalletPrefsService, private http: HttpClient, private networkService: WalletNetworkService) {
this.networkService.activeNetwork.subscribe(activeNetwork => {
if (activeNetwork) {
this.web3 = null;
}
});
}
// Lazy web3 init for angular bundle optimization
private getWeb3(): Web3 {
if (this.web3)
return this.web3;
const trinityWeb3Provider = new EssentialsWeb3Provider(this.networkService.activeNetwork.value.getMainEvmRpcApiUrl());
this.web3 = new Web3(trinityWeb3Provider);
// Standard ERC20 contract ABI
this.erc721ABI = require('../../../assets/wallet/ethereum/Erc721EnumerableABI.json');
return this.web3;
}
public async getCoinInfo(address: string): Promise<NFTResolvedInfo> {
try {
const erc721Contract = new (this.getWeb3()).eth.Contract(this.erc721ABI, address);
Logger.log('wallet', 'erc721Contract', erc721Contract);
const nftName = await erc721Contract.methods.name().call();
Logger.log('wallet', 'NFT name:', nftName);
return {
name: nftName
};
} catch (err) {
Logger.log('wallet', 'getCoinInfo', err);
return null;
}
}
/**
* Finds all assets owned by a given user for a given NFT contract.
*
* Returns null if owner assets can't be retrieved (i.e. not a enumerable contract, non standard contract, etc)
*/
public async fetchAllAssets(accountAddress: string, contractAddress: string): Promise<NFTAsset[]> {
let assetsCouldBeRetrieved = false;
const erc721Contract = new (this.getWeb3()).eth.Contract(this.erc721ABI, contractAddress, { from: accountAddress });
// Make sure this is a enumerable NFT - If not, we can't get the assets.
// Problem: some contracts don't even implement supportsInterface().
/* const nftokenEnumerableInterface = await erc721Contract.methods.supportsInterface('0x780e9d63').call();
if (!nftokenEnumerableInterface) {
Logger.warn("wallet", "ERC721 contract is not enumerable");
return [];
} */
// Retrieve how many assets are owned by this account
const assetsNumber = await erc721Contract.methods.balanceOf(accountAddress).call();
console.log("assetsNumber", assetsNumber);
// Iterate over tokenOfOwnerByIndex() to get more info. If an exception occurs this probably
// means that tokenOfOwnerByIndex() is not implemented (not an enumerable ERC721).
let assets: NFTAsset[] = [];
try {
let tokenIDs: any[] = [];
// Some contracts implement getOwnerTokens() (crypto kitties) which directly returns the
// tokens ids without a loop. This is legacy from when ERC721Enumerable was not defined.
try {
tokenIDs = await erc721Contract.methods.getOwnerTokens(accountAddress).call();
assetsCouldBeRetrieved = true;
}
catch (e) {
// Method not implemented. Try the standard enumeration (ERC721Enumerable)
try {
for (let i = 0; i < assetsNumber; i++) {
const tokenID = await erc721Contract.methods.tokenOfOwnerByIndex(accountAddress, i).call();
assetsCouldBeRetrieved = true;
tokenIDs.push(tokenID);
}
}
catch (e) {
// Still no such method? Try the transfer event discovery way
tokenIDs = await this.fetchTokenIDsFromTransferEvents(accountAddress, contractAddress);
if (tokenIDs)
assetsCouldBeRetrieved = true;
}
}
console.log("tokenIDs", tokenIDs, assetsCouldBeRetrieved);
for (let i = 0; i < tokenIDs.length; i++) {
let tokenID = tokenIDs[i];
let asset = new NFTAsset();
asset.id = tokenID;
asset.displayableId = asset.id;
// Now try to get more information about this asset - ERC721Metadata / tokenURI()
let tokenURI: string = null;
try {
tokenURI = await erc721Contract.methods.tokenURI(tokenID).call();
}
catch (e) {
// Inexisting method, contract not adhering to the metadata interface?
// Try the legacy tokenMetadata() implemented by some contracts
try {
tokenURI = await erc721Contract.methods.tokenMetadata(tokenID).call();
}
catch (e) {
// Still nothing? That's ok, we'll display placeholder values.
// Silent catch
}
}
if (tokenURI) {
await this.extractAssetMetadata(asset, tokenURI);
}
assets.push(asset);
}
}
catch (e) {
// Silent catch
console.warn(e); // TMP
}
// If assets list couldn't be fetched, return null so that the caller knows this
// doesn't mean we have "0" asset.
if (!assetsCouldBeRetrieved)
return null;
return assets;
}
/**
* Method to discover ERC721 tokens owned by a user based on Transfer logs.
*/
public async fetchTokenIDsFromTransferEvents(accountAddress: string, contractAddress: string): Promise<any[]> {
// User's wallet address on 32 bytes
let paddedAccountAddress = '0x' + accountAddress.substr(2).padStart(64, "0"); // 64 = 32 bytes * 2 chars per byte // 20 bytes to 32 bytes
try {
// Get transfer logs from the EVM node
// More info at: https://docs.alchemy.com/alchemy/guides/eth_getlogs#what-are-event-signatures
const erc721Contract = new (this.getWeb3()).eth.Contract(this.erc721ABI, contractAddress, { from: accountAddress });
let transferEventTopic = this.web3.utils.sha3("Transfer(address,address,uint256)");
let transferInEvents = await erc721Contract.getPastEvents('Transfer', {
// All blocks
fromBlock: 0, toBlock: 'latest',
// transfer event signature + 2nd parameter should be the account address. (meaning "received the NFT")
topics: [
transferEventTopic,
null,
paddedAccountAddress // Received by us
]
}) as any as ERC721Transfer[];
// Also get transfer out events, so we can know which tokens are still in our possession
let transferOutEvents = await erc721Contract.getPastEvents('Transfer', {
// All blocks
fromBlock: 0, toBlock: 'latest',
// transfer event signature + 1st parameter should be the account address. (meaning "sent the NFT")
topics: [
transferEventTopic,
paddedAccountAddress // Sent by us
]
}) as any as ERC721Transfer[];
// Based on all transfers (in/out), rebuild the history of NFT ownerships until we can get
// The list of tokens that we still own
let allTransferEvents = [...transferInEvents, ...transferOutEvents];
// Sort by date ASC
allTransferEvents = allTransferEvents.sort((a, b) => a.blockNumber - b.blockNumber);
// Retrace history from old blocks to recent blocks
let ownedTokenIds: { [tokenId: string]: boolean } = {};
allTransferEvents.forEach(transferEvent => {
// User account as sender? Remove the token from the list
if (transferEvent.returnValues[0].toLowerCase() === accountAddress.toLowerCase())
delete ownedTokenIds[transferEvent.returnValues[2]];
// User account as received? Add the token to the list
if (transferEvent.returnValues[1].toLowerCase() === accountAddress.toLowerCase())
ownedTokenIds[transferEvent.returnValues[2]] = true;
});
return Object.keys(ownedTokenIds);
}
catch (e) {
Logger.warn("wallet", "Failed to get ERC1155 events", e);
return null;
}
}
/**
* Tries different ways to automatically extract metadata from a remote token uri, and then save
* the information in the asset.
*
* Expected format (https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md):
{
"title": "Asset Metadata",
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Identifies the asset to which this NFT represents"
},
"description": {
"type": "string",
"description": "Describes the asset to which this NFT represents"
},
"image": {
"type": "string",
"description": "A URI pointing to a resource with mime type image/* representing the asset to which this NFT represents. Consider making any images at a width between 320 and 1080 pixels and aspect ratio between 1.91:1 and 4:5 inclusive."
}
}
}
*/
private async extractAssetMetadata(asset: NFTAsset, tokenURI: string): Promise<any> {
// Unsupported url format
if (!tokenURI || (!tokenURI.startsWith("http") && !tokenURI.startsWith("ipfs"))) {
return;
}
Logger.log("wallet", "Extracting ERC721 metadata, original token uri:", tokenURI);
// If the url is a IPFS url, replace it with a gateway
tokenURI = this.replaceIPFSUrl(tokenURI);
try {
let metadata: any = await this.http.get(tokenURI).toPromise();
Logger.log("wallet", "Got NFT metadata", metadata);
// Name
if ("properties" in metadata && "name" in metadata.properties)
asset.name = metadata.properties.name.description || null;
else
asset.name = metadata.name || null;
// Description
if ("properties" in metadata && "description" in metadata.properties)
asset.description = metadata.properties.description.description || null;
else
asset.description = metadata.description || null;
// Picture
if ("properties" in metadata && "image" in metadata.properties)
asset.imageURL = this.replaceIPFSUrl(metadata.properties.image.description || null);
else
asset.imageURL = this.replaceIPFSUrl(metadata.image || null);
// Unset the image if not a valid url
if (asset.imageURL && !asset.imageURL.startsWith("http"))
asset.imageURL = null;
}
catch (e) {
// Silent catch
return;
}
}
/**
* If the url starts with ipfs, returns the gateway-accessible url.
* Otherwise, returns the given url.
*/
private replaceIPFSUrl(anyUrl: string): string {
if (!anyUrl)
return anyUrl;
if (anyUrl.startsWith("ipfs"))
return `https://ipfs.trinity-tech.io/ipfs/${anyUrl.replace("ipfs://", "")}`;
// Replace IPFS gateways potentially harcoded by NFTs, with the ipfs.io gateway, to reduce
// rate limiting api call errors (like on pinata).
// NOTE: not working well, maybe IPFS hashes can't be fetched (eg getting a vitrim or bunny hash through ttech.io gateway often times out)
for (let gateway of IPFSGatewayPrefixesToReplace) {
if (anyUrl.startsWith(gateway)) {
anyUrl = anyUrl.replace(gateway, "https://ipfs.trinity-tech.io/ipfs");
break; // Don't search further
}
}
return anyUrl;
}
/*public async getERC20Coin(address: string, ethAccountAddress: string) {
const coinInfo = await this.getCoinInfo(address, ethAccountAddress);
const newCoin = new ERC20Coin(coinInfo.coinSymbol, coinInfo.coinName, address, this.prefs.activeNetwork, false);
return newCoin;
} */
}
|
import { TestCase } from './testCase';
export class TestCaseImpl<I, O> implements TestCase<I, O> {
constructor(public input: I, public output: O, public ids?: Record<string, string>) {}
}
|
#!/bin/bash
SCRIPT=`realpath $0`
SCRIPT_PATH=`dirname $SCRIPT`
DATA_PATH="$SCRIPT_PATH/data"
ARGUMENTS=`$SCRIPT_PATH/arguments-from-config.rb $1`
if [ $# -lt 1 ]; then
echo "please specify config.json"
exit -1
fi
echo "\
0 0 1 * * /bin/bash -l -c \"$SCRIPT_PATH/average-and-truncate.sh day 30 month 100\"
0 0 * * * /bin/bash -l -c \"$SCRIPT_PATH/average-and-truncate.sh hour 24 day 100\"
0 * * * * /bin/bash -l -c \"$SCRIPT_PATH/average-and-truncate.sh minute 60 hour 100\"
* * * * * /bin/bash -l -c \"$SCRIPT_PATH/collect.sh $ARGUMENTS\"
"
|
/**
Copyright 2013 <NAME> project Ardulink http://www.ardulink.org/
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author <NAME>
*/
package org.zu.ardulink.connection.bluetooth;
import static org.zu.ardulink.util.Preconditions.checkNotNull;
import static org.zu.ardulink.util.Preconditions.checkState;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Semaphore;
import javax.bluetooth.BluetoothStateException;
import javax.bluetooth.DiscoveryAgent;
import javax.bluetooth.LocalDevice;
import javax.bluetooth.RemoteDevice;
import javax.bluetooth.ServiceRecord;
import javax.bluetooth.UUID;
import javax.microedition.io.Connector;
import javax.microedition.io.StreamConnection;
import javax.microedition.io.StreamConnectionNotifier;
import org.zu.ardulink.connection.Connection;
import org.zu.ardulink.connection.serial.AbstractSerialConnection;
/**
* [ardulinktitle] [ardulinkversion]
*
* @author <NAME> project Ardulink http://www.ardulink.org/
*
* [adsense]
*/
public class BluetoothConnection extends AbstractSerialConnection implements Connection {
// TODO should be replaced by Semaphore
private final Object lock = new Object();
private final ArdulinkDiscoveryListener listener = new ArdulinkDiscoveryListener(this, lock);
private Map<String, ServiceRecord> ports = new HashMap<String, ServiceRecord>();
private javax.microedition.io.Connection connection;
private StreamConnectionNotifier streamConnNotifier;
private StreamConnection streamConnection;
//read string from spp client
private InputStream inputStream;
private OutputStream outputStream;
public BluetoothConnection() {
super();
}
@Override
public List<String> getPortList() {
LocalDevice localDevice;
try {
localDevice = LocalDevice.getLocalDevice();
} catch (BluetoothStateException e) {
throw new RuntimeException(e);
}
DiscoveryAgent agent = localDevice.getDiscoveryAgent();
listener.reset();
try {
agent.startInquiry(DiscoveryAgent.GIAC, listener);
synchronized(lock){
lock.wait();
}
}
catch (Exception e) {
throw new RuntimeException(e);
}
for (RemoteDevice device : listener.getDevices()) {
try {
agent.searchServices(serviceName(), serialPortService(), device, listener);
synchronized(lock){
lock.wait();
}
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
return new ArrayList<String>(ports.keySet());
}
private static int[] serviceName() {
return new int[] { 0x0100 }; // Service name
}
private static UUID[] serialPortService() {
return new UUID[] { new UUID(0x1101) }; // Serial Port Service
}
@Override
public boolean connect(Object... params) {
checkState(checkNotNull(params, "Params must not be null").length == 1,
"This connection accepts exactly one String device name.");
checkState(params[0] instanceof String,
"This connection accepts a just a parameter with type String");
String deviceName = (String) params[0];
boolean retvalue = false;
retvalue = connect(deviceName);
return retvalue;
}
public boolean connect(String deviceName) {
boolean retvalue = false;
try {
ServiceRecord serviceRecord = ports.get(deviceName);
if(serviceRecord == null) {
writeLog("The connection could not be made. Device not discovered");
} else {
String url = serviceRecord.getConnectionURL(ServiceRecord.NOAUTHENTICATE_NOENCRYPT, false);
if(url == null) {
writeLog("The connection could not be made. Connection url not found");
} else {
connection = Connector.open(url);
if(connection instanceof StreamConnectionNotifier) {
streamConnNotifier = (StreamConnectionNotifier)connection;
streamConnection = streamConnNotifier.acceptAndOpen();
} else if(connection instanceof StreamConnection) {
streamConnNotifier = null;
streamConnection = (StreamConnection)connection;
} else {
throw new Exception("Connection class not known. " + connection.getClass().getCanonicalName());
}
//read string from spp client
inputStream = streamConnection.openInputStream();
outputStream = streamConnection.openOutputStream();
setInputStream(inputStream);
setOutputStream(outputStream);
startReader();
writeLog("connection on " + deviceName + " established");
getContact().connected(getId(), deviceName);
setConnected(true);
retvalue = true;
}
}
}
catch(Exception e) {
e.printStackTrace();
writeLog("The connection could not be made." + e.getMessage());
}
return retvalue;
}
@Override
public boolean disconnect() {
try {
if(isConnected()) {
stopReader();
inputStream.close();
outputStream.close();
streamConnection.close();
if(streamConnNotifier != null) {
streamConnNotifier.close();
}
setConnected(false);
}
getContact().disconnected(getId());
writeLog("connection closed");
}
catch(Exception e) {
e.printStackTrace();
writeLog("disconnection fails");
}
finally {
inputStream = null;
outputStream = null;
streamConnection = null;
streamConnNotifier = null;
connection = null;
}
return !isConnected();
}
public void setPorts(Map<String, ServiceRecord> ports) {
this.ports = ports;
}
}
|
<gh_stars>0
from __future__ import print_function
import pytest
from simtk import openmm
from simtk import unit
from simtk.openmm import app
import atomsmm
def execute(shifted, target):
rcut = 10*unit.angstroms
rswitch = 9.5*unit.angstroms
case = 'tests/data/q-SPC-FW'
pdb = app.PDBFile(case + '.pdb')
forcefield = app.ForceField(case + '.xml')
system = forcefield.createSystem(pdb.topology, nonbondedMethod=app.CutoffPeriodic)
force = atomsmm.NearNonbondedForce(rcut, rswitch, shifted)
force.importFrom(atomsmm.hijackForce(system, atomsmm.findNonbondedForce(system))).addTo(system)
integrator = openmm.VerletIntegrator(0.0*unit.femtoseconds)
platform = openmm.Platform.getPlatformByName('Reference')
simulation = app.Simulation(pdb.topology, system, integrator, platform)
simulation.context.setPositions(pdb.positions)
state = simulation.context.getState(getEnergy=True)
potential = state.getPotentialEnergy()
assert potential/potential.unit == pytest.approx(target)
def test_unshifted():
execute(False, 11517.016971940495)
def test_shifted():
execute(True, 3182.9377183815345)
|
<filename>apps/app/src/app/modules/administration/modules/user-management/modules/detail/guards/user-management-detail.guard.ts
import { Injectable } from '@angular/core';
import { ActivatedRouteSnapshot, CanActivate, CanDeactivate, Router, UrlTree } from '@angular/router';
import { catchError, Observable, of } from 'rxjs';
import { map } from 'rxjs/operators';
import { UserManagementDetailComponent } from '../components/user-management-detail/user-management-detail.component';
import { UserManagementDetailService } from '../services/user-management-detail.service';
@Injectable({
providedIn: 'root',
})
export class UserManagementDetailGuard implements CanActivate, CanDeactivate<UserManagementDetailComponent> {
constructor(private readonly userManagementDetailService: UserManagementDetailService, private router: Router) {}
canActivate(route: ActivatedRouteSnapshot): Observable<boolean | UrlTree> {
return this.userManagementDetailService.initialize(route.params.userId).pipe(
map((user) => !!user),
catchError(() => of(this.router.createUrlTree(['administration/user-management/list']))),
);
}
canDeactivate(): boolean {
this.userManagementDetailService.deactivate();
return true;
}
}
|
# USAGE
# python create_dataset.py --input videos/real.mov --output Dataset/Real
# python create_dataset.py --input videos/fake.mp4 --output Dataset/Fake
import numpy as np
import argparse
from imutils.video import VideoStream
import cv2
import os
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--input", type=str, required=False,
help="path to input video")
ap.add_argument("-o", "--output", type=str, required=True,
help="path to output directory")
args = vars(ap.parse_args())
protoPath = os.path.sep.join(["FaceDetector", "deploy.prototxt"])
modelPath = os.path.sep.join(["FaceDetector",
"res10_300x300_ssd_iter_140000.caffemodel"])
net = cv2.dnn.readNetFromCaffe(protoPath, modelPath)
if args["input"] is None:
vs = VideoStream(src=0).start()
else:
vs = cv2.VideoCapture(args["input"])
if not os.path.exists(args['output']):
os.makedirs(args['output'])
read = 0
saved = 0
while True:
if args["input"] is None:
frame = vs.read()
else:
(grabbed, frame) = vs.read()
if not grabbed:
break
read += 1
(h, w) = frame.shape[:2]
blob = cv2.dnn.blobFromImage(cv2.resize(frame, (300, 300)), 1.0,
(300, 300), (104.0, 177.0, 123.0))
net.setInput(blob)
detections = net.forward()
if len(detections) > 0:
i = np.argmax(detections[0, 0, :, 2])
confidence = detections[0, 0, i, 2]
if confidence > 0.5:
if frame != []:
p = os.path.sep.join([args["output"],"{}.png".format(saved)])
cv2.imwrite(p, frame)
saved += 1
print("[INFO] saved {} to disk".format(p))
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord("z"):
break
cv2.destroyAllWindows()
if args["input"] is None:
vs.stop()
else:
vs.release()
|
import { getHeader } from '../console';
import { revertAll } from '../git';
export const setup = () => {
getHeader('generi revert');
revertAll();
};
|
<reponame>dogballs/battle-city
import { InputBinding, KeyboardButtonCode } from '../../core';
import { InputControl } from '../InputControl';
// Suggested for multi-player mode, first player, left side of the keyboard,
// because primary tank spawns on the left side of base
export class SecondaryKeyboardInputBinding extends InputBinding {
constructor() {
super();
this.setDefault(InputControl.Up, KeyboardButtonCode.W);
this.setDefault(InputControl.Down, KeyboardButtonCode.S);
this.setDefault(InputControl.Left, KeyboardButtonCode.A);
this.setDefault(InputControl.Right, KeyboardButtonCode.D);
this.setDefault(InputControl.Select, KeyboardButtonCode.Space);
this.setDefault(InputControl.PrimaryAction, KeyboardButtonCode.F);
this.setDefault(InputControl.SecondaryAction, KeyboardButtonCode.G);
this.setDefault(InputControl.Rewind, KeyboardButtonCode.R);
this.setDefault(InputControl.FastForward, KeyboardButtonCode.T);
}
}
|
struct IconNode {
let iconUrl: String
let wikiName: String
let hdIconUrl: String
let wikiCode: String
let level: String
let idx: String
let parentCode: String
}
|
cd /storage/home/users/pjt6/phy/orthofinder
python /storage/home/users/pjt6/misc_python/BLAST_output_parsing/Blast_RBH_two_fasta_file_evalue.py --threads 2 -o ../GCA_002911725.1_ASM291172v1_cds_from_genomi.fa_GCA_000365505.1_Phyt_para_P1569_V1_cds_from_genomi.fa GCA_002911725.1_ASM291172v1_cds_from_genomi.fa GCA_000365505.1_Phyt_para_P1569_V1_cds_from_genomi.fa
|
<reponame>xfyre/tapestry-5<gh_stars>10-100
package testsubjects;
public class StaticMethodsIgnored
{
void anInstanceMethod()
{
}
static void aStaticMethod()
{
}
}
|
<reponame>bhits-dev/try-policy
package gov.samhsa.c2s.trypolicy.service.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class SampleDocDto {
private int id;
private boolean isSampleDocument;
private String documentName;
private String filePath;
}
|
<filename>webauthn4j-core/src/test/java/com/webauthn4j/validator/RpIdHashValidatorTest.java
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webauthn4j.validator;
import com.webauthn4j.data.client.Origin;
import com.webauthn4j.server.ServerProperty;
import com.webauthn4j.util.MessageDigestUtil;
import com.webauthn4j.validator.exception.BadRpIdException;
import org.junit.jupiter.api.Test;
import java.nio.charset.StandardCharsets;
import static org.junit.jupiter.api.Assertions.assertThrows;
/**
* Created by ynojima on 2017/08/27.
*/
@SuppressWarnings("ConstantConditions")
class RpIdHashValidatorTest {
private final Origin origin = Origin.create("https://example.com");
private final RpIdHashValidator target = new RpIdHashValidator();
@Test
void verifyRpIdHash_test() {
String rpIdA = "example.com";
String rpIdB = "example.com";
byte[] rpIdBytesA = rpIdA.getBytes(StandardCharsets.UTF_8);
byte[] rpIdHashA = MessageDigestUtil.createSHA256().digest(rpIdBytesA);
ServerProperty serverProperty = new ServerProperty(origin, rpIdB, null, null);
//When
target.validate(rpIdHashA, serverProperty);
}
@Test
void verifyRpIdHash_test_with_different_rpIds() {
String rpIdA = "sub.example.com";
String rpIdB = "example.com";
byte[] rpIdBytesA = rpIdA.getBytes(StandardCharsets.UTF_8);
byte[] rpIdHashA = MessageDigestUtil.createSHA256().digest(rpIdBytesA);
ServerProperty serverProperty = new ServerProperty(origin, rpIdB, null, null);
//When
assertThrows(BadRpIdException.class,
() -> target.validate(rpIdHashA, serverProperty)
);
}
@Test
void verifyRpIdHash_test_with_relyingParty_null() {
String rpIdA = "example.com";
byte[] rpIdBytesA = rpIdA.getBytes(StandardCharsets.UTF_8);
byte[] rpIdHashA = MessageDigestUtil.createSHA256().digest(rpIdBytesA);
//When
assertThrows(IllegalArgumentException.class,
() -> target.validate(rpIdHashA, null)
);
}
}
|
/opt/cloudera/parcels/CDH/lib/hive/bin/beeline -u "jdbc:hive2://server01:21050/;auth=noSasl" -n hsu -p hsu -d org.apache.hive.jdbc.HiveDriver --color=true --verbose=true
|
SELECT AVG(median_val) as median_val
FROM (SELECT DISTINCT val,
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY val)
OVER() median_val
FROM tbl
) tmp;
|
const generateRandomString = () => {
let result = '';
const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
const charactersLength = characters.length;
for (let i = 0; i < 8; i++) {
result += characters.charAt(Math.floor(Math.random() * charactersLength));
}
return result;
}
// Test
console.log(generateRandomString()); // e.g.: oem31hp8
|
#!/bin/bash
mkdir data/WN18RR
mkdir data/FB15k-237
mkdir data/kinship
mkdir saved_models
tar -xvf data/WN18RR.tar.gz -C data/WN18RR
tar -xvf data/FB15k-237.tar.gz -C data/FB15k-237
tar -xvf data/kinship.tar.gz -C data/kinship
python wrangle_KG.py WN18RR
python wrangle_KG.py FB15k-237
python wrangle_KG.py FB15k-237-attr
python wrangle_KG.py kinship
# Add new dataset
#mkdir data/DATA_NAME
#python wrangle_KG.py DATA_NAME
|
/*
* Copyright © 2019 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*
*/
export const createMockChannel = () => {
const channel = {
publish: jest.fn(),
once: jest.fn(),
invoke: jest.fn(),
subscribe: jest.fn(),
};
return channel;
};
export const createMockBus = () => ({
registerChannel: jest.fn().mockResolvedValue(undefined),
setup: jest.fn().mockResolvedValue(undefined),
});
|
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import { withRouter } from 'react-router-dom';
import { connect } from 'react-redux';
import { Main, PageHeader, PageHeaderTitle } from '@redhat-cloud-services/frontend-components';
import { Card, CardBody, Toolbar, ToolbarGroup, ToolbarItem, PaginationVariant } from '@patternfly/react-core';
import { ExclamationCircleIcon, LockIcon, PlusCircleIcon } from '@patternfly/react-icons';
import { baselinesTableActions } from '../BaselinesTable/redux';
import { compareActions } from '../modules';
import { historicProfilesActions } from '../HistoricalProfilesPopover/redux';
import { setHistory } from '../../Utilities/SetHistory';
import DriftTable from './DriftTable/DriftTable';
import ErrorAlert from '../ErrorAlert/ErrorAlert';
import TablePagination from '../Pagination/Pagination';
import AddSystemButton from './AddSystemButton/AddSystemButton';
import DriftToolbar from './DriftToolbar/DriftToolbar';
import EmptyStateDisplay from '../EmptyStateDisplay/EmptyStateDisplay';
import { PermissionContext } from '../../App';
export class DriftPage extends Component {
constructor(props) {
super(props);
this.state = {
emptyStateMessage: [
'You currently have no system or baselines displayed. Add at least two',
'systems or baselines to compare their facts.'
],
isFirstReference: true
};
this.props.clearSelectedBaselines('CHECKBOX');
}
async componentDidMount() {
await window.insights.chrome.auth.getUser();
}
setIsFirstReference = (value) => {
this.setState({
isFirstReference: value
});
}
onClose = () => {
const { revertCompareData, history, previousStateSystems } = this.props;
revertCompareData();
setHistory(history, previousStateSystems.map(system => system.id));
}
renderEmptyState = () => {
const { emptyStateMessage } = this.state;
const { error } = this.props;
if (error.status) {
return <EmptyStateDisplay
icon={ ExclamationCircleIcon }
color='#c9190b'
title={ 'Comparison cannot be displayed' }
text={ emptyStateMessage }
error={
'Error ' + error.status + ': ' + error.detail
}
button={ <AddSystemButton isTable={ false }/> }
/>;
} else {
return <EmptyStateDisplay
icon={ PlusCircleIcon }
color='#6a6e73'
title={ 'Add systems or baselines to compare' }
text={ emptyStateMessage }
button={ <AddSystemButton isTable={ false }/> }
/>;
}
}
render() {
const { addStateFilter, clearComparison, clearComparisonFilters, clearSelectedBaselines, emptyState, error, exportToCSV, factFilter,
filterByFact, history, loading, page, perPage, stateFilters, totalFacts, updatePagination, updateReferenceId } = this.props;
const { isFirstReference } = this.state;
return (
<React.Fragment>
<PageHeader>
<PageHeaderTitle title='Comparison'/>
</PageHeader>
<Main>
<PermissionContext.Consumer>
{ value =>
value.permissions.compareRead === false
? <EmptyStateDisplay
icon={ LockIcon }
color='#6a6e73'
title={ 'You do not have access to Drift comparison' }
text={ [ 'Contact your organization administrator(s) for more information.' ] }
/>
: <React.Fragment>
<ErrorAlert
error={ error }
onClose={ this.onClose }
/>
{ emptyState && !loading
? this.renderEmptyState()
: <div></div>
}
<Card className='pf-t-light pf-m-opaque-100'>
<CardBody>
<div>
{ !emptyState
? <DriftToolbar
loading={ loading }
history={ history }
page={ page }
perPage={ perPage }
totalFacts={ totalFacts }
updatePagination={ updatePagination }
clearComparison={ clearComparison }
clearComparisonFilters={ clearComparisonFilters }
exportToCSV={ exportToCSV }
updateReferenceId={ updateReferenceId }
setIsFirstReference={ this.setIsFirstReference }
clearSelectedBaselines={ clearSelectedBaselines }
factFilter={ factFilter }
filterByFact={ filterByFact }
stateFilters={ stateFilters }
addStateFilter={ addStateFilter }
/>
: null
}
<DriftTable
updateReferenceId={ updateReferenceId }
error={ error }
isFirstReference={ isFirstReference }
setIsFirstReference={ this.setIsFirstReference }
clearComparison= { clearComparison }
hasBaselinesReadPermissions={ value.permissions.baselinesRead }
hasBaselinesWritePermissions={ value.permissions.baselinesWrite }
hasInventoryReadPermissions={ value.permissions.inventoryRead }
/>
{ !emptyState && !loading ?
<Toolbar className="drift-toolbar">
<ToolbarGroup className="pf-c-pagination">
<ToolbarItem>
<TablePagination
page={ page }
perPage={ perPage }
total={ totalFacts }
isCompact={ false }
updatePagination={ updatePagination }
widgetId='drift-pagination-bottom'
variant={ PaginationVariant.bottom }
/>
</ToolbarItem>
</ToolbarGroup>
</Toolbar>
: null
}
</div>
</CardBody>
</Card>
</React.Fragment>
}
</PermissionContext.Consumer>
</Main>
</React.Fragment>
);
}
}
DriftPage.propTypes = {
perPage: PropTypes.number,
page: PropTypes.number,
totalFacts: PropTypes.number,
error: PropTypes.object,
loading: PropTypes.bool,
clearSelectedBaselines: PropTypes.func,
emptyState: PropTypes.bool,
updatePagination: PropTypes.func,
updateReferenceId: PropTypes.func,
clearComparison: PropTypes.func,
clearComparisonFilters: PropTypes.func,
history: PropTypes.object,
selectHistoricProfiles: PropTypes.func,
selectedHSPIds: PropTypes.array,
revertCompareData: PropTypes.func,
previousStateSystems: PropTypes.array,
exportToCSV: PropTypes.func,
factFilter: PropTypes.string,
filterByFact: PropTypes.func,
stateFilters: PropTypes.array,
addStateFilter: PropTypes.func
};
function mapDispatchToProps(dispatch) {
return {
clearSelectedBaselines: (tableId) => dispatch(baselinesTableActions.clearSelectedBaselines(tableId)),
updatePagination: (pagination) => dispatch(compareActions.updatePagination(pagination)),
updateReferenceId: (id) => dispatch(compareActions.updateReferenceId(id)),
clearComparison: () => dispatch(compareActions.clearComparison()),
clearComparisonFilters: () => dispatch(compareActions.clearComparisonFilters()),
selectHistoricProfiles: (historicProfileIds) => dispatch(historicProfilesActions.selectHistoricProfiles(historicProfileIds)),
revertCompareData: () => dispatch(compareActions.revertCompareData()),
exportToCSV: () => dispatch(compareActions.exportToCSV()),
filterByFact: (filter) => dispatch(compareActions.filterByFact(filter)),
addStateFilter: (filter) => dispatch(compareActions.addStateFilter(filter))
};
}
function mapStateToProps(state) {
return {
page: state.compareState.page,
perPage: state.compareState.perPage,
totalFacts: state.compareState.totalFacts,
error: state.compareState.error,
loading: state.compareState.loading,
emptyState: state.compareState.emptyState,
selectedHSPIds: state.historicProfilesState.selectedHSPIds,
previousStateSystems: state.compareState.previousStateSystems,
factFilter: state.compareState.factFilter,
stateFilters: state.compareState.stateFilters
};
}
export default withRouter(connect(mapStateToProps, mapDispatchToProps)(DriftPage));
|
<gh_stars>1-10
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.2-147
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2013.10.15 at 09:54:44 PM EDT
//
package org.restsql.service.testcase;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for SetupOrTeardown complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="SetupOrTeardown">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="resetSequence" type="{http://restsql.org/schema}ResetSequence" maxOccurs="unbounded" minOccurs="0"/>
* <element name="sql" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "SetupOrTeardown", propOrder = {
"resetSequence",
"sql"
})
public class SetupOrTeardown {
protected List<ResetSequence> resetSequence;
protected List<String> sql;
/**
* Gets the value of the resetSequence property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the resetSequence property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getResetSequence().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link ResetSequence }
*
*
*/
public List<ResetSequence> getResetSequence() {
if (resetSequence == null) {
resetSequence = new ArrayList<ResetSequence>();
}
return this.resetSequence;
}
/**
* Gets the value of the sql property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the sql property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getSql().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getSql() {
if (sql == null) {
sql = new ArrayList<String>();
}
return this.sql;
}
}
|
import IDistantVOBase from '../../IDistantVOBase';
export default class MailEventVO implements IDistantVOBase {
public static EVENT_NAMES: string[] = [
'mail_event.EVENT_Initie',
'mail_event.EVENT_Envoye', 'mail_event.EVENT_Delivre', 'mail_event.EVENT_Ouverture',
'mail_event.EVENT_Clic', 'mail_event.EVENT_Soft_bounce', 'mail_event.EVENT_Hard_bounce',
'mail_event.EVENT_Email_invalide', 'mail_event.EVENT_Error', 'mail_event.EVENT_Differe',
'mail_event.EVENT_Plainte', 'mail_event.EVENT_Desinscrit', 'mail_event.EVENT_Bloque',
];
public static EVENT_Initie: number = 0;
public static EVENT_Envoye: number = 1;
public static EVENT_Delivre: number = 2;
public static EVENT_Ouverture: number = 3;
public static EVENT_Clic: number = 4;
public static EVENT_Soft_bounce: number = 5;
public static EVENT_Hard_bounce: number = 6;
public static EVENT_Email_invalide: number = 7;
public static EVENT_Error: number = 8;
public static EVENT_Differe: number = 9;
public static EVENT_Plainte: number = 10;
public static EVENT_Desinscrit: number = 11;
public static EVENT_Bloque: number = 12;
public static API_TYPE_ID: string = "mail_event";
public id: number;
public _type: string = MailEventVO.API_TYPE_ID;
public mail_id: number;
public event: number;
public event_date: number;
public reason: string;
}
|
<gh_stars>1-10
/****************************************************************************
Copyright 2016 Apigee Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
****************************************************************************/
'use strict';
var should = require('should');
var util = require('util');
var config = require('../../config');
var proxyquire = require('proxyquire');
describe('browser', function() {
var URL = 'abc123';
var browserStubs = {
'child_process': {
exec: function(name, cb) {
cb(null, name);
}
}
};
var browser = proxyquire('../../lib/util/browser', browserStubs);
beforeEach(function() {
config.browser = undefined;
});
describe('Windows', function() {
it('should start', function(done) {
browser.open(URL, function(err, command) {
command.should.equal(util.format('start "" "%s"', URL));
done();
}, 'win32')
});
it('should honor config', function(done) {
var browserPath = config.browser = '/my/browser';
browser.open(URL, function(err, command) {
command.should.equal(util.format("start \"\" \"%s\" \"%s\"", browserPath, URL));
done();
}, 'win32')
});
});
describe('OS X', function() {
it('should open', function(done) {
browser.open(URL, function(err, command) {
command.should.equal(util.format('open "%s"', URL));
done();
}, 'darwin')
});
it('should honor config', function(done) {
var browserPath = config.browser = '/my/browser';
browser.open(URL, function(err, command) {
command.should.equal(util.format("open -a %s \"%s\"", browserPath, URL));
done();
}, 'darwin')
});
});
describe('Linux', function () {
it('should open', function(done) {
browser.open(URL, function(err, command) {
command.should.equal(util.format('xdg-open "%s"', URL));
done();
}, 'linux');
});
it('should honor config', function(done) {
var browserPath = config.browser = '/usr/bin/x-www-browser';
browser.open(URL, function(err, command) {
command.should.equal(util.format('%s "%s"', browserPath, URL));
done();
}, 'linux')
});
});
describe('other Unix', function() {
it('should err if not configured', function(done) {
config.browser = undefined;
browser.open(URL, function(err, command) {
should.exist(err);
done();
}, 'foo')
});
it('should honor config', function(done) {
var browserPath = config.browser = '/my/browser';
browser.open(URL, function(err, command) {
command.should.equal(util.format('%s "%s"', browserPath, URL));
done();
}, 'foo')
});
});
});
|
import React from 'react'
import classNames from 'classnames'
import './index.less'
const noop = ()=>{}
var PopupHeader = React.createClass({
render(){
const {leftText, rightText, title, showBottomBorder, onClickRight, onClickLeft, className, ...others} = this.props;
const cls = classNames('mt-popup-header', className);
return (
<div className={ cls }>
<div className="mt-popup-header-left" onClick={ onClickLeft }>
{ leftText }
</div>
<div className="mt-popup-header-title">
{ title }
</div>
<div className="mt-popup-header-right" onClick={ onClickRight }>
{ rightText }
</div>
</div>
);
}
})
PopupHeader.propTypes = {
leftText: React.PropTypes.string,
rightText: React.PropTypes.string,
title: React.PropTypes.string,
showBottomBorder: React.PropTypes.bool,
onClickRight: React.PropTypes.func,
onClickLeft: React.PropTypes.func,
};
PopupHeader.defaultProps = {
leftText: '',
rightText: '',
title: '',
showBottomBorder: true,
onClickRight: noop,
onClickLeft: noop,
};
export default PopupHeader
|
import express from 'express';
// Express web framework
import session from 'express-session';
// Express session management
import { toExpenses, fromExpenses } from './expenses';
// Module for converting expenses from/to application data format
import { getTrends } from './trends';
// Module for retrieving/calculating spending trends
// Create Express application
const app = express();
// Use Express sessions for user management
app.use(session({
secret: 'secret-key',
resave: false,
saveUninitialized: false,
cookie: { secure: false },
}));
// Main route
app.get('/', (req, res) => {
let session = req.session;
// Check if user is logged in
if (session.user) {
// Retrieve user's expenses
let expenses = toExpenses(session.user.expenses);
// Calculate trend data
let trends = getTrends(expenses);
// Render page
res.send(`
<!DOCTYPE html>
<html>
<head>
<title>Personal Finance Tracking</title>
</head>
<body>
<h1>Personal Finance Tracking</h1>
<p>Trends:</p>
<ul>
${Object.entries(trends).map(([k, v]) => `<li>${k}: ${v}</li>`).join('')}
</ul>
<form action="/expenses" method="post">
Add expense:
<label>
Category
<select name="category">
<option value="bills">Bills</option>
<option value="groceries">Groceries</option>
<option value="entertainment">Entertainment</option>
</select>
</label>
<label>
Amount
<input type="number" step="0.01" name="amount"/>
</label>
<input type="submit" />
</form>
</body>
</html>
`);
} else {
// Redirect to login page
res.redirect('/login');
}
});
// Route for adding/managing expenses
app.post('/expenses', (req, res) => {
let session = req.session;
// Check if user is logged in
if (session.user) {
// Retrieve user's expenses
let expenses = toExpenses(session.user.expenses);
// Add/update expense
expenses[req.body.category] = Number(req.body.amount);
// Save expenses
session.user.expenses = fromExpenses(expenses);
// Redirect to main page
res.redirect('/');
} else {
// Redirect to login page
res.redirect('/login');
}
});
// Start server
app.listen(3000);
|
VERSION=1.6.36
TAR=https://github.com/glennrp/libpng/archive/v${VERSION}.tar.gz
BUILD_DEPENDS=(zlib)
function recipe_version {
echo "$VERSION"
skip=1
}
function recipe_update {
echo "skipping update"
skip=1
}
function recipe_build {
sysroot="$(realpath ../sysroot)"
export CPPFLAGS="-I$sysroot/include"
export LDFLAGS="-L$sysroot/lib --static"
chmod +w config.sub
wget -O config.sub http://git.savannah.gnu.org/cgit/config.git/plain/config.sub
./configure --build=${BUILD} --host=${HOST} --prefix='/'
"$REDOX_MAKE" -j"$($NPROC)"
skip=1
}
function recipe_test {
echo "skipping test"
skip=1
}
function recipe_clean {
"$REDOX_MAKE" clean
skip=1
}
function recipe_stage {
dest="$(realpath $1)"
"$REDOX_MAKE" DESTDIR="$dest" install
rm -f "$dest/bin/"*-config "$dest/lib/"*.la
skip=1
}
|
class AuthenticationSystem:
def __init__(self):
self.users = dict()
def create_user(self, username, role, password):
if username not in self.users:
self.users[username] = {"role": role, "password": password}
else:
print("Username already taken!")
def authenticate(self, username, password):
if username in self.users:
if self.users[username]["password"] == password:
print("You are authenticated as '{}' with role '{}'".format(
username, self.users[username]["role"]))
else:
print("Wrong password!")
else:
print("Username not found!")
if __name__ == '__main__':
system = AuthenticationSystem()
system.create_user("username", "admin", "password")
system.authenticate("username", "password")
|
<filename>packages/app/src/index.tsx
import React from 'react';
import ReactDOM from 'react-dom';
import App from './App';
import { setMockApiDelay } from 'mock-api';
import { setupWorker } from 'msw';
import { mockFriendHandlers } from 'friends-api';
import './index.css';
if (process.env.REACT_APP_USE_MOCK_API === 'true') {
const worker = setupWorker(...mockFriendHandlers);
// eslint-disable-next-line @typescript-eslint/no-floating-promises
worker.start();
setMockApiDelay(750);
}
ReactDOM.render(
<React.StrictMode>
<App />
</React.StrictMode>,
document.getElementById('root')
);
|
import React, {ReactElement} from 'react';
import {Helmet} from 'react-helmet';
export interface IAppCommonHeadProps {
readonly basePath?: string;
}
export function AppHelmet({basePath}: IAppCommonHeadProps): ReactElement {
const prefix = basePath ?? '';
const robotoPath = `${prefix}/fonts/roboto/roboto-v20-latin-regular.woff`;
const roboto2Path = `${prefix}/fonts/roboto/roboto-v20-latin-regular.woff2`;
const robotoCondensedPath = `${prefix}/fonts/roboto-condensed/roboto-condensed-v18-latin-regular.woff`;
const robotoCondensed2Path = `${prefix}/fonts/roboto-condensed/roboto-condensed-v18-latin-regular.woff2`;
const fontAttributes = {as: 'font', type: 'font/woff2', crossOrigin: 'anonymous'};
return (
<Helmet>
{/*
preload fonts
*/}
<link rel="preload" href={roboto2Path} {...fontAttributes}/>
<link rel="preload" href={robotoCondensed2Path} {...fontAttributes}/>
<style type="text/css">{`
@font-face {
font-family: "Roboto";
font-style: normal;
font-weight: 400;
font-display: swap;
src: local("Roboto"), local("Roboto-Regular"),
url("${roboto2Path}") format("woff2"),
url("${robotoPath}") format("woff");
}
@font-face {
font-family: "Roboto Condensed";
font-style: normal;
font-weight: 400;
font-display: swap;
src: local("Roboto Condensed"), local("RobotoCondensed-Regular"),
url("${robotoCondensed2Path}") format("woff2"),
url("${robotoCondensedPath}") format("woff");
}
`}</style>
{/*
icons - based on code generated by https://realfavicongenerator.net
(see app-common README.md)
*/}
<link rel="apple-touch-icon" sizes="180x180" href={`${prefix}/apple-touch-icon.png`}/>
<link rel="icon" type="image/png" sizes="32x32" href={`${prefix}/favicon-32x32.png`}/>
<link rel="icon" type="image/png" sizes="16x16" href={`${prefix}/favicon-16x16.png`}/>
<link rel="manifest" href={`${prefix}/site.webmanifest`}/>
<link rel="mask-icon" href={`${prefix}/safari-pinned-tab.svg`} color="#5050ff"/>
<link rel="shortcut icon" href={`${prefix}/favicon.ico`}/>
<meta name="apple-mobile-web-app-title" content="AGE Online"/>
<meta name="application-name" content="AGE Online"/>
<meta name="msapplication-TileColor" content="#da532c"/>
<meta name="msapplication-config" content={`${prefix}/browserconfig.xml`}/>
<meta name="theme-color" content="#5050ff"/>
</Helmet>
);
}
|
package com.honyum.elevatorMan.service;
import android.app.Notification;
import android.app.PendingIntent;
import android.app.Service;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.IBinder;
import android.os.RemoteException;
import android.support.annotation.Nullable;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import android.widget.Toast;
import com.honyum.elevatorMan.R;
import com.honyum.elevatorMan.activity.WelcomeActivity;
/**
* Created by Star on 2017/9/8.
*/
public class DaemonService extends Service{
private MyBinder binder;
private static final String TAG = "DaemonService";
private MyServiceConnection conn;
@Override
public void onCreate() {
super.onCreate();
if(binder ==null){
binder = new MyBinder();
}
conn = new MyServiceConnection();
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
DaemonService.this.bindService(new Intent(DaemonService.this, LocationService.class), conn, Context.BIND_IMPORTANT);
Intent it = new Intent(this, WelcomeActivity.class);
PendingIntent contentIntent = PendingIntent.getActivity(this, 0, it, 0);//当点击消息时就会向系统发送openintent意图
Notification.Builder builder = new Notification.Builder(this);
builder.setContentText("定位中").setSmallIcon(R.drawable.logo).setContentTitle("梯美正在持续定位中!").setContentIntent(contentIntent);
startForeground(0,builder.build());
return START_STICKY;
}
private String mUserId = "";
private String mToken = "";
@Nullable
@Override
public IBinder onBind(Intent intent) {
return binder;
}
class MyBinder extends RemoteConnection.Stub{
@Override
public String getActivityInfo(String userId, String token) throws RemoteException {
mUserId = userId;
mToken = token;
return null;
}
}
@Override
public void onDestroy() {
super.onDestroy();
DaemonService.this.unbindService(conn);
}
class MyServiceConnection implements ServiceConnection {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
Log.i(TAG, "建立连接成功!");
}
@Override
public void onServiceDisconnected(ComponentName name) {
Log.i(TAG, "Location stop!");
//Toast.makeText(LocalService.this, "断开连接", 0).show();
//启动被干掉的
DaemonService.this.startService(new Intent(DaemonService.this, LocationService.class));
DaemonService.this.bindService(new Intent(DaemonService.this, LocationService.class), conn, Context.BIND_IMPORTANT);
}
}
}
|
<reponame>nhood-org/nhood-engine
package com.h8.nh.nhoodengine.core.workers;
import com.h8.nh.nhoodengine.core.DataFinder;
import com.h8.nh.nhoodengine.core.DataFinderCriteria;
import com.h8.nh.nhoodengine.core.DataFinderResult;
import com.h8.nh.nhoodengine.core.DataResourceKey;
import java.util.List;
public final class ResourcesResolveWorker<K extends DataResourceKey, D> implements Runnable {
private final DataFinder<K, D> dataFinder;
private final DataFinderCriteria<K> criteria;
private boolean hasErrors = false;
private ResourcesResolveWorker(
final DataFinder<K, D> dataFinder,
final DataFinderCriteria<K> criteria) {
this.dataFinder = dataFinder;
this.criteria = criteria;
}
public static <K extends DataResourceKey, D> ResourcesResolveWorker<K, D> of(
final DataFinder<K, D> dataFinder,
final DataFinderCriteria<K> criteria) {
return new ResourcesResolveWorker<>(dataFinder, criteria);
}
public boolean hasErrors() {
return hasErrors;
}
@Override
public void run() {
try {
List<DataFinderResult<K, D>> results = dataFinder.find(criteria);
System.out.println(Thread.currentThread().getName()
+ " : Retrieved data of size: " + results.size());
hasErrors = results.size() < criteria.getLimit();
} catch (Exception e) {
System.err.println(Thread.currentThread().getName()
+ " : Could not find data because of"
+ " an exception: " + e.getClass().getSimpleName() + " : " + e.getMessage());
// Uncomment for troubleshooting purposes only
// e.printStackTrace(System.err);
hasErrors = true;
}
}
}
|
#include "stdafx.h"
#include "io.h"
#include "direct.h"
#include "RegisterSharpCOM.h"
CRegisterSharpCOM::CRegisterSharpCOM(void)
{
}
CRegisterSharpCOM::~CRegisterSharpCOM(void)
{
}
CString CRegisterSharpCOM::GetCodeBase()
{
CString strRunPath = "";
char szIniPath[MAX_PATH];
DWORD dwResult = GetModuleFileName(NULL, szIniPath, sizeof(szIniPath));
if (0 != dwResult)
{
strRunPath = szIniPath;
strRunPath.Replace("/", "\\");
int index = strRunPath.ReverseFind('\\');
if (index != -1)
strRunPath = strRunPath.Left(index+1);
}
return strRunPath;
}
BOOL CRegisterSharpCOM::FindFolder(CString folder, BOOL folderOnly, CStringArray& resultList)
{
resultList.RemoveAll();
resultList.FreeExtra();
CString strRoot = folder;
strRoot.Replace("/", "\\");
if (strRoot.Right(1) != "\\")
strRoot += "\\";
long hFile = -1;
BOOL bFirst = TRUE;
struct _finddata_t c_file;
while(TRUE)
{
if (bFirst)
{
bFirst = FALSE;
if ((hFile = _findfirst(strRoot+"*.*", &c_file)) == -1L)
return FALSE;
}
else
{
if (_findnext(hFile, &c_file) == -1L)
break;
}
if (c_file.attrib & _A_SUBDIR)
{
if (folderOnly && (strcmp(c_file.name, ".") != 0) && (strcmp(c_file.name, "..") != 0))
resultList.Add(c_file.name);
}
else
{
resultList.Add(c_file.name);
}
}
if (hFile > 0)
_findclose( hFile );
return (resultList.GetCount() > 0);
}
int CRegisterSharpCOM::CompareAscending(const void *a, const void *b)
{
CString *pA = (CString*)a;
CString *pB = (CString*)b;
return (pA->Compare(*pB));
}
int CRegisterSharpCOM::CompareDescending(const void *a, const void *b)
{
CString *pA = (CString*)a;
CString *pB = (CString*)b;
return (-1 * (pA->Compare(*pB)));
}
void CRegisterSharpCOM::SortStringArray(CStringArray& csa, BOOL bDescending)
{
int iArraySize = csa.GetSize();
if (iArraySize <= 0)
return;
int iCSSize = sizeof (CString*);
void* pArrayStart = (void *)&csa[0];
if (bDescending)
qsort (pArrayStart, iArraySize, iCSSize, CompareDescending);
else
qsort (pArrayStart, iArraySize, iCSSize, CompareAscending);
}
BOOL CRegisterSharpCOM::RegisterDotNetAssembly(CString sDllName, int op) //type:0 unregister, 1=register, 2:re-register
{ // This uses the redistributable file regasm.exe to register Dot Net Assemblies.
BOOL bReturn;
CString sExeDir = GetCodeBase();
TCHAR buff[MAX_PATH];
GetWindowsDirectory(buff, MAX_PATH);
CString strParam;
strParam.Format(" \"%s%s\"", sExeDir, sDllName);
if (op == 0)
strParam.Format("/u \"%s%s\"", sExeDir, sDllName);
else if (op == 2)
strParam.Format("/codebase \"%s%s\"", sExeDir, sDllName);
CString sFrameworkFolder;
sFrameworkFolder.Format("%s\\Microsoft.NET\\Framework", buff); //RegAsm.exe
CStringArray frameworkVersion;
CString strFrameworkDetected = sFrameworkFolder+"\\";
if (!FindFolder(sFrameworkFolder+"\\", TRUE, frameworkVersion))
{
strFrameworkDetected = sFrameworkFolder+"64\\";
if (!FindFolder(sFrameworkFolder+"64\\", TRUE, frameworkVersion))
return FALSE;
}
SortStringArray(frameworkVersion, TRUE);
for(int i = 0; i < frameworkVersion.GetCount(); i ++)
{
CString sPath;
sPath.Format("%s%s\\RegAsm.exe", strFrameworkDetected, frameworkVersion[i]); //RegAsm.exe
if (access(sPath, 0) >= 0)
{
SHELLEXECUTEINFO ShExecInfo = {0};
ShExecInfo.cbSize = sizeof(SHELLEXECUTEINFO);
ShExecInfo.fMask = SEE_MASK_NOCLOSEPROCESS;
ShExecInfo.hwnd = NULL;
ShExecInfo.lpVerb = "open";
ShExecInfo.lpFile = sPath;
ShExecInfo.lpParameters = strParam;
ShExecInfo.lpDirectory = sExeDir;
ShExecInfo.nShow = SW_HIDE;
ShExecInfo.hInstApp = NULL;
bReturn = ShellExecuteEx(&ShExecInfo);
WaitForSingleObject(ShExecInfo.hProcess,70000);
break;
}
}
return bReturn;
}
|
<gh_stars>0
/*
* Copyright (c) 2015 IBM Corporation and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.brunel.data.modify;
import java.util.HashMap;
import java.util.Map;
/**
* Common code for Dataset transforms
*/
public abstract class DataOperation {
/**
* Splits the command into parts using the semi-colon as separator, then the indicated separator for map keys
*
* @param command command to be split into sections
* @return array of commands, or null if there are none
*/
static Map<String, String> map(String command, String sep) {
String[] parts = parts(command);
if (parts == null) return null;
Map<String, String> result = new HashMap<String, String>();
for (String c : parts) {
String[] s = c.split(sep);
String key = s[0].trim();
String value = s.length > 1 ? s[1].trim() : "";
result.put(key, value);
}
return result;
}
/**
* Splits the command into parts using the semi-colon as separator
*
* @param command
* @return array of commands, or null if there are none
*/
public static String[] parts(String command) {
String[] parts = command.split(";");
for (int i = 0; i < parts.length; i++) parts[i] = parts[i].trim();
return parts.length == 1 && parts[0].isEmpty() ? null : parts;
}
/**
* Splits the command into parts using the comma as separator
*
* @param items items as a comm-separated list
* @return array of commands, or null if there are none
*/
static String[] list(String items) {
String[] parts = items.split(",");
for (int i = 0; i < parts.length; i++) parts[i] = parts[i].trim();
return parts.length == 1 && parts[0].isEmpty() ? null : parts;
}
}
|
#!/bin/sh
set -e
mv _gitignore .gitignore
git init
sh scripts/bootstrap
git add -A .
git commit -m "Initial Commit"
|
#define MSC_CLASS "MediaStreamTrackFactory"
#include <iostream>
#include "MediaSoupClientErrors.hpp"
#include "MediaStreamTrackFactory.hpp"
#include "pc/test/fake_audio_capture_module.h"
#include "pc/test/fake_periodic_video_track_source.h"
#include "pc/test/frame_generator_capturer_video_track_source.h"
#include "system_wrappers/include/clock.h"
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "api/create_peerconnection_factory.h"
#include "api/video_codecs/builtin_video_decoder_factory.h"
#include "api/video_codecs/builtin_video_encoder_factory.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "modules/audio_mixer/audio_mixer_impl.h"
#include "test_audio_device.h"
using namespace mediasoupclient;
static rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory;
/* MediaStreamTrack holds reference to the threads of the PeerConnectionFactory.
* Use plain pointers in order to avoid threads being destructed before tracks.
*/
static rtc::Thread* networkThread;
static rtc::Thread* signalingThread;
static rtc::Thread* workerThread;
static void createFactory()
{
networkThread = rtc::Thread::Create().release();
signalingThread = rtc::Thread::Create().release();
workerThread = rtc::Thread::Create().release();
networkThread->SetName("network_thread", nullptr);
signalingThread->SetName("signaling_thread", nullptr);
workerThread->SetName("worker_thread", nullptr);
if (!networkThread->Start() || !signalingThread->Start() || !workerThread->Start())
{
MSC_THROW_INVALID_STATE_ERROR("thread start errored");
}
webrtc::PeerConnectionInterface::RTCConfiguration config;
auto defaultQueueFActory=webrtc::CreateDefaultTaskQueueFactory();
auto fakeAudioCaptureModule = webrtc::TestAudioDeviceModule::Create(defaultQueueFActory.get(),
webrtc::TestAudioDeviceModule::CreatePulsedNoiseCapturer(10000, 48000),
webrtc::TestAudioDeviceModule::CreateDiscardRenderer(48000), 1.f);
if (!fakeAudioCaptureModule)
{
MSC_THROW_INVALID_STATE_ERROR("audio capture module creation errored");
}
else
{
//webrtc::AudioState::Config audio_state_config;
//audio_state_config.audio_mixer = ;
//audio_state_config.audio_processing = ;
//audio_state_config.audio_device_module = fakeAudioCaptureModule;
// send_call_config->audio_state = webrtc::AudioState::Create(audio_state_config);
// recv_call_config->audio_state = webrtc::AudioState::Create(audio_state_config);
//fakeAudioCaptureModule->Init();
// RTC_CHECK(fakeAudioCaptureModule->RegisterAudioCallback(
// send_call_config->audio_state->audio_transport()) == 0);
}
factory = webrtc::CreatePeerConnectionFactory(
networkThread,
workerThread,
signalingThread,
fakeAudioCaptureModule,
webrtc::CreateBuiltinAudioEncoderFactory(),
webrtc::CreateBuiltinAudioDecoderFactory(),
webrtc::CreateBuiltinVideoEncoderFactory(),
webrtc::CreateBuiltinVideoDecoderFactory(),
nullptr,nullptr//webrtc::AudioMixerImpl::Create() /*audio_mixer*/,
/*webrtc::AudioProcessingBuilder().Create()*/ /*audio_processing*/);
if (!factory)
{
MSC_THROW_ERROR("error ocurred creating peerconnection factory");
}
//fakeAudioCaptureModule->Init();
// auto numDevice=fakeAudioCaptureModule->RecordingDevices();
// fakeAudioCaptureModule->InitPlayout();
// fakeAudioCaptureModule->InitRecording();
// fakeAudioCaptureModule->InitSpeaker();
// fakeAudioCaptureModule->SetMicrophoneVolume(100);
// fakeAudioCaptureModule->SetSpeakerVolume(100);
// fakeAudioCaptureModule->SetSpeakerMute(false);
// fakeAudioCaptureModule->StartPlayout();
// fakeAudioCaptureModule->StartRecording();
}
// Audio track creation.
rtc::scoped_refptr<webrtc::AudioTrackInterface> createAudioTrack(const std::string& label)
{
if (!factory)
createFactory();
cricket::AudioOptions options;
options.highpass_filter = false;
rtc::scoped_refptr<webrtc::AudioSourceInterface> source = factory->CreateAudioSource(options);
return factory->CreateAudioTrack(label, source);
}
// Video track creation.
rtc::scoped_refptr<webrtc::VideoTrackInterface> createVideoTrack(const std::string& /*label*/)
{
if (!factory)
createFactory();
auto* videoTrackSource =
new rtc::RefCountedObject<webrtc::FakePeriodicVideoTrackSource>(false /* remote */);
return factory->CreateVideoTrack(rtc::CreateRandomUuid(), videoTrackSource);
}
rtc::scoped_refptr<webrtc::VideoTrackInterface> createSquaresVideoTrack(const std::string& /*label*/)
{
if (!factory)
createFactory();
std::cout << "[INFO] getting frame generator" << std::endl;
auto* videoTrackSource = new rtc::RefCountedObject<webrtc::FrameGeneratorCapturerVideoTrackSource>(
webrtc::FrameGeneratorCapturerVideoTrackSource::Config(), webrtc::Clock::GetRealTimeClock(), false);
videoTrackSource->Start();
std::cout << "[INFO] creating video track" << std::endl;
return factory->CreateVideoTrack(rtc::CreateRandomUuid(), videoTrackSource);
}
|
<filename>scripts/index.js
import { Board } from "./classes.js";
let solutionDetails
function createModal(queens, genes){
let resultHTML = `<table class="chessboard">`
for(let i = 0; i < queens; i++){
resultHTML += `<tr class="chessboard">`
for(let j = 0; j < queens; j++){
resultHTML += `<td id=modal-${i+1}-${j+1} class="chessboard"></td>`
}
resultHTML += '</tr>'
}
resultHTML += `</table>`
$('#resultModal').append(resultHTML)
for(let i = 0; i < genes.length; i++){
$(`#modal-${i+1}-${parseInt(genes[i])+1}`).append(`<img src="images/wQ.png" class="img-fluid" alt="Responsive image">`)
}
}
function displaySolutionDetails(){
$('#resultDetails').append(`<h2>Result details</h2>
<br>`)
let resultDetailsDOM = $('#resultDetails')
for(let i = 0; i < solutionDetails.length; i++){
let tableHTML = ` <div class="showDetails" data-toggle="collapse" href="#details${i+1}"
type="button" role="button" aria-expanded="false"
aria-controls="details${i+1}">
<i class="fa-lg fas fa-caret-down"></i>
<h4 style="display:inline"> Generation ${i+1} </h4>
</div>
<div class="collapse" id="details${i+1}">
<table class="table">
<thead>
<tr>
<th scope="col">Position</th>
<th scope="col">Score</th>
<th scope="col">Generation</th>
<th scope="col">Genotype</th>
<th scope="col">Phenotype</th>
</tr>
</thead>
<tbody>`
for(let j = 0; j < solutionDetails[i].length; j++){
tableHTML += `<tr>
<th scope="row">${j+1}</th>
<td>${solutionDetails[i][j].score}</td>
<td>${solutionDetails[i][j].generation}</td>
<td class="genes">[${solutionDetails[i][j].genes}]</td>
<td>
<p class="showModal" type="button" role="button" style="color:#007bff">
Show phenotype
</p>
</td>
</tr>`
}
tableHTML += ` </tbody>
</table>
</div>
<br>`
$('#resultDetails').append(tableHTML)
}
}
function displayResult(queens, solution, generationsNumber){
let resultHTML = `<div class="container">
<br>
<div class="card text-center">
<div class="card-header">
Solution found!
</div>
<div class="card-body">
<h5 class="card-title">Phenotype</h5>
<table class="chessboard">`
for(let i = 0; i < queens; i++){
resultHTML += `<tr class="chessboard">`
for(let j = 0; j < queens; j++){
resultHTML += `<td id=${i+1}-${j+1} class="chessboard"></td>`
}
resultHTML += '</tr>'
}
resultHTML += ` </table>
<hr>
<h5 class="card-title">Genotype</h5>
<p class="card-text"><span id="genes"></span></p>
</div>
<div class="card-footer text-muted">
Generations needed: <span id="numberOfGenerationsNeeded"></span>
</div>
</div>
<br>
<div id="showResultDetailsContainer" class="container d-flex justify-content-center">
<button class="btn btn-primary" id="showResultDetailsButton">Show calculations details</button>
</div>
<hr>
<div class="container" id="resultDetails">
<div id="detailsSpinnerLoad" class=""></div>
<div id="printingDetails" class="container d-flex justify-content-center">
</div>
</div>`
$('#results').append(resultHTML)
let genes = solution.genes.slice()
for(let i = 0; i < genes.length; i++){
$(`#${i+1}-${genes[i]+1}`).append(`<img src="images/wQ.png" class="img-fluid" alt="Responsive image">`)
}
$('#numberOfGenerationsNeeded').text(generationsNumber)
$('#genes').text(`[${genes}]`)
}
function calculateResult(numberOfQueens){
const POPULATION_SIZE = 100
let boardWidth = numberOfQueens
let boardHeight = numberOfQueens
let generations = 1
let population = []
let historicalPopulation = []
for(let i = 0; i < POPULATION_SIZE; i++){
population.push(new Board(numberOfQueens,boardWidth,boardHeight, generations))
}
while(true){
//Sort population by attributes
population.sort(function(a, b) {
return a.score - b.score;
});
historicalPopulation.push(population)
//Evaluate if the goal is reached
if(population[0].calculateScore() == 0){
break
}
let newGeneration = []
//Elitism: 10% of the best goes to next generation
let subPopulationSize = Math.trunc(10*POPULATION_SIZE/100)
for(let i = 0; i < subPopulationSize; i++){
newGeneration.push(population[i])
}
//The remaining will be created fitting the 50% best candidates
subPopulationSize = Math.trunc(90*POPULATION_SIZE/100)
let halfPopulationSize = Math.trunc(50*POPULATION_SIZE/100)
let moreAdaptedParents = []
for(let i = 0; i < halfPopulationSize; i++){
moreAdaptedParents.push(population[i])
}
generations++
for(let i = 0; i < subPopulationSize; i++){
let parentA = moreAdaptedParents[Math.floor(Math.random() * moreAdaptedParents.length)].genes
let parentB = moreAdaptedParents[Math.floor(Math.random() * moreAdaptedParents.length)].genes
let newBoard = new Board(numberOfQueens,
boardWidth,
boardHeight,
generations,
parentA,
parentB)
newGeneration.push(newBoard)
}
population = newGeneration
}
solutionDetails = historicalPopulation
return [population[0], generations]
}
$('#calculateButton').click(function(e) {
e.preventDefault()
$('#calculateButton').attr("disabled", true)
$('#spinnerLoad').addClass('pl pl-puzzle')
let numberOfQueens = parseInt($('#numberOfQueens').val())
if(isNaN(numberOfQueens)){
alert("Select a valid value")
$('#spinnerLoad').removeClass('pl pl-puzzle')
$('#calculateButton').attr("disabled", false)
}else{
$('#results').empty()
$('#runningDetails').empty()
$('#runningDetails').append(`<p>Calculating solution...</p>`)
setTimeout(function () {
var start = new Date().getTime();
let results = calculateResult(numberOfQueens)
var end = new Date().getTime()
var time = end - start
console.log(results[0])
displayResult(numberOfQueens, results[0], results[1])
$('#runningDetails').empty()
$('#runningDetails').append(`<p>Calculation took ${time/1000} seconds </p>`)
$('#spinnerLoad').removeClass('pl pl-puzzle')
$('#calculateButton').attr("disabled", false)
}, 1000);
}
});
$(document).on('click', '#showResultDetailsButton', function(){
$('#detailsSpinnerLoad').addClass('pl pl-puzzle')
$('#showResultDetailsButton').toggle()
$('#printingDetails').empty()
$('#printingDetails').append(`<p>Printing solution details...</p>`)
setTimeout(function(){
displaySolutionDetails()
$('#printingDetails').empty()
$('#detailsSpinnerLoad').removeClass('pl pl-puzzle')
}, 1000);
});
$(document).on('click', '.showDetails', function(){
if($(this).find('svg').attr('class') == 'svg-inline--fa fa-caret-down fa-w-10 fa-lg'){
$(this).find('svg').attr('class', 'svg-inline--fa fa-caret-right fa-w-10 fa-lg')
}else{
$(this).find('svg').attr('class', 'svg-inline--fa fa-caret-down fa-w-10 fa-lg')
}
});
$(document).on('click', '.showModal', function(){
var genes = $(this).closest("tr")
.find(".genes")
.text()
.replace('[', '')
.replace(']', '')
.split(',')
let numberOfQueens = parseInt($('#numberOfQueens').val())
$('#resultModal').empty()
createModal(numberOfQueens, genes)
$('#detailModal').modal('toggle');
});
|
#!/bin/bash
##
## Case Name: verify-firmware-presence
## Preconditions:
## SOF firmware install at "/lib/firmware/intel/sof"
## Description:
## check target platform firmware file
## Case step:
## 1. check if target platform firmware exists
## 2. dump fw file md5sum
## Expect result:
## list target firmware md5sum
##
set -e
# source from the relative path of current folder
# shellcheck source=case-lib/lib.sh
source "$(dirname "${BASH_SOURCE[0]}")"/../case-lib/lib.sh
func_opt_parse_option "$@"
path=$(sof-dump-status.py -P)
platform=$(sof-dump-status.py -p)
fw="$path/sof-$platform.ri"
dlogi "Checking file: $fw"
[ -f "$fw" ] || die "File $fw is not found!"
dlogi "Found file: $(md5sum "$fw"|awk '{print $2, $1;}')"
|
from os import path, makedirs
from requests.auth import HTTPBasicAuth
import requests
import hashlib
from time import time, sleep
from sortedcontainers import SortedSet
from config import Config
class RequestHandler:
def __init__(self, term, extension):
# setup configuration
self.config = Config()
# setup GitHub OAuth
self.auth = HTTPBasicAuth(self.config.github['user'], self.config.github['token'])
# configure crawler specifics
self.github_url = 'https://api.github.com/search/code?q=' # use the GitHub search API
self.query = '{}+extension:{}'.format(term, extension) # search for contract in files with extension .sol
self.sort = '&sort='
self.order = '&order='
self.size_range = SortedSet()
self.size_range.update([0, 384001]) # stick to GitHub size restrictions
self.initial_items = []
def rate_limit(self, request):
limit = requests.get('https://api.github.com/rate_limit', auth=self.auth)
limit_json = limit.json()
if request is 'search':
remaining_search = limit_json["resources"]["search"]["remaining"]
reset_time = limit_json["resources"]["search"]["reset"]
if remaining_search is 0:
# wait until we can do search requests again
sleep_time = reset_time - int(time())
print "Search limit reached. Waiting {} seconds".format(sleep_time)
sleep(sleep_time)
elif request is 'core':
remaining_download = limit_json["resources"]["core"]["remaining"]
reset_time = limit_json["resources"]["core"]["reset"]
if remaining_download is 0:
# wait until we can do search requests again
sleep_time = reset_time - int(time())
print "Core limit is reached. Waiting {} seconds".format(sleep_time)
sleep(sleep_time)
def search_github(self, lower, upper, order_state):
self.rate_limit(request='search')
if isinstance(lower, int) and isinstance(upper, int) and isinstance(order_state, int):
base_url = self.github_url + self.query + "+size:>{}+size:<{}+size:{}".format(lower, upper, upper)
if order_state is 1:
url = base_url + self.sort + "indexed" + self.order + "desc"
elif order_state is 2:
url = base_url + self.sort + "indexed" + self.order + "asc"
else:
url = base_url
print "Get contracts from {}".format(url)
response = requests.get(url, auth=self.auth)
else:
response = requests.get(self.github_url + self.query, auth=self.auth)
if response.status_code is 200:
result = response.json()
else:
print "No valid GitHub credentials found."
result = None
return result
def get_total_count(self):
incomplete_results = True
result = dict()
# Get total number of files that contain search term
while incomplete_results:
print "Get total number of contracts from {}".format(self.github_url + self.query)
try:
result = self.search_github(None, None, None)
incomplete_results = result["incomplete_results"]
except TypeError:
print "Could not search GitHub"
break
# in case we have less then 1000 results, store this to limit API calls
self.initial_items = result["items"]
total_count = result["total_count"]
return total_count
def get_items(self, lower, upper, target_count, order_state):
items = self.initial_items
this_item_count = len(items)
incomplete_items = False
try:
result = self.search_github(lower, upper, order_state)
items = result["items"]
this_item_count = len(items)
incomplete_items = True if (this_item_count < target_count) else False
except TypeError:
print "Could not search GitHub"
return items, this_item_count, incomplete_items
def get_download_url_content(self, url):
self.rate_limit(request='core')
# GitHub only gives you the download url when you request it for each file
response = requests.get(url, auth=self.auth)
if response.status_code is 200:
result = response.json()
download_url = result["download_url"]
# This is the hash for the complete file line by line
content_full = result["content"]
# We want just one hash for the whole file for faster comparison of changes
content = hashlib.md5(content_full).hexdigest()
else:
print "No valid GitHub credentials found."
download_url = None
content = None
return download_url, content
def store_locally(self, url, repository_id, remote_path):
# get download url
download_url, content = self.get_download_url_content(url)
# create folder structure
current_path = path.dirname(path.abspath(__file__))
file_path = '{}/code-folder/{}/{}'.format(current_path, repository_id, remote_path)
local_path = file_path.rpartition("/")[0]
if not path.exists(local_path):
makedirs(local_path)
return file_path, download_url, content
def download(self, file_path, download_url):
self.rate_limit(request='core')
print "Downloading {}".format(file_path)
response = requests.get(download_url, auth=self.auth)
with open(file_path, 'wb') as out_file:
out_file.write(response.content)
del response
|
from __future__ import print_function
import os
import sys
import os.path as op
from collections import Counter, namedtuple
import pickle
import json
import numpy as np
import pandas as pd
import pybedtools
from seqcluster.libs.utils import file_exists
import seqcluster.libs.logger as mylog
from seqcluster.libs import do
from seqcluster.libs.read import load_data
from seqcluster.libs.mystats import up_threshold
from seqcluster.detect.cluster import detect_clusters, clean_bam_file, peak_calling, detect_complexity
from seqcluster.detect.description import best_precursor
from seqcluster.libs.annotation import anncluster
from seqcluster.libs.inputs import parse_ma_file
from seqcluster.detect.metacluster import reduceloci, _get_seqs
from seqcluster.libs.tool import generate_position_bed
from seqcluster.libs.classes import *
import seqcluster.libs.parameters as param
from seqcluster.db import make_database
logger = mylog.getLogger(__name__)
def cluster(args):
"""
Creating clusters
"""
args = _check_args(args)
read_stats_file = op.join(args.dir_out, "read_stats.tsv")
if file_exists(read_stats_file):
os.remove(read_stats_file)
bam_file, seq_obj = _clean_alignment(args)
logger.info("Parsing matrix file")
seqL, y, l = parse_ma_file(seq_obj, args.ffile)
# y, l = _total_counts(seqL.keys(), seqL)
logger.info("counts after: %s" % sum(y.values()))
logger.info("# sequences after: %s" % l)
dt = pd.DataFrame({'sample': y.keys(), 'counts': y.values()})
dt['step'] = 'aligned'
dt.to_csv(read_stats_file, sep="\t", index=False, header=False, mode='a')
if len(seqL.keys()) < 10:
logger.error("It seems you have low coverage. Please check your fastq files have enough sequences.")
raise ValueError("So few sequences.")
logger.info("Cleaning bam file")
y, l = _total_counts(list(seqL.keys()), seqL)
logger.info("counts after: %s" % sum(y.values()))
logger.info("# sequences after: %s" % l)
dt = pd.DataFrame({'sample': y.keys(), 'counts': y.values()})
dt['step'] = 'cleaned'
dt.to_csv(read_stats_file, sep="\t", index=False, header=False, mode='a')
clusL = _create_clusters(seqL, bam_file, args)
y, l = _total_counts(list(clusL.seq.keys()), clusL.seq, aligned=True)
logger.info("counts after: %s" % sum(y.values()))
logger.info("# sequences after: %s" % l)
dt = pd.DataFrame({'sample': y.keys(), 'counts': y.values()})
dt['step'] = 'clusters'
dt.to_csv(read_stats_file, sep="\t", index=False, header=False, mode='a')
logger.info("Solving multi-mapping events in the network of clusters")
clusLred = _cleaning(clusL, args.dir_out)
y, l = _total_counts(clusLred.clus, seqL)
logger.info("counts after: %s" % sum(y.values()))
logger.info("# sequences after: %s" % l)
dt = pd.DataFrame({'sample': y.keys(), 'counts': y.values()})
dt['step'] = 'meta-cluster'
dt.to_csv(read_stats_file, sep="\t", index=False, header=False, mode='a')
logger.info("Clusters up to %s" % (len(clusLred.clus.keys())))
if args.show:
logger.info("Creating sequences alignment to precursor")
clusLred = show_seq(clusLred, args.index)
clusLred = peak_calling(clusLred)
clusLred = _annotate(args, clusLred)
logger.info("Creating json and count matrix")
json_file = _create_json(clusLred, args)
logger.info("Output file in: %s" % args.dir_out)
if args.db:
name = args.db + ".db"
logger.info("Create database: database/" + name)
data = load_data(json_file)
out_dir = op.join(args.dir_out, "database")
make_database(data, name, out_dir)
logger.info("Finished")
def _check_args(args):
"""
check arguments before starting analysis.
"""
logger.info("Checking parameters and files")
args.dir_out = args.out
args.samplename = "pro"
global decision_cluster
global similar
if not os.path.isdir(args.out):
logger.warning("the output folder doens't exists")
os.mkdir(args.out)
if args.bed and args.gtf:
logger.error("cannot provide -b and -g at the same time")
raise SyntaxError
if args.debug:
logger.info("DEBUG messages will be showed in file.")
if args.bed:
args.list_files = args.bed
args.type_ann = "bed"
if args.gtf:
args.list_files = args.gtf
args.type_ann = "gtf"
logger.info("Output dir will be: %s" % args.dir_out)
if not all([file_exists(args.ffile), file_exists(args.afile)]):
logger.error("I/O error: Seqs.ma or Seqs.bam. ")
raise IOError("Seqs.ma or/and Seqs.bam doesn't exists.")
if hasattr(args, 'list_files'):
beds = args.list_files.split(",")
for filebed in beds:
if not file_exists(filebed):
logger.error("I/O error: {0}".format(filebed))
raise IOError("%s annotation files doesn't exist" % filebed)
param.decision_cluster = args.method
if args.similar:
param.similar = float(args.similar)
if args.min_seqs:
param.min_seqs = int(args.min_seqs)
return args
def _total_counts(seqs, seqL, aligned=False):
"""
Counts total seqs after each step
"""
total = Counter()
nseqs = 0
if isinstance(seqs, list):
if not aligned:
nseqs = len([total.update(seqL[s].freq) for s in seqs])
else:
nseqs = len([total.update(seqL[s].freq) for s in seqs if seqL[s].align > 0])
elif isinstance(seqs, dict):
[total.update(seqs[s].get_freq(seqL)) for s in seqs]
nseqs = sum(len(seqs[s].idmembers) for s in seqs)
return total, nseqs
def _write_size_table(data_freq, data_len, ann_valid, cluster_id):
dd = Counter()
for f, l in zip(data_freq, data_len):
dd[l] += np.mean(list(f.values()))
table = ""
for l in sorted(dd):
table += "%s\t%s\t%s\t%s\n" % (l, dd[l], ann_valid, cluster_id)
return table
def _get_annotation(c, loci):
"""get annotation of transcriptional units"""
data_ann_temp = {}
data_ann = []
counts = Counter()
for lid in c.loci2seq:
# original Py 2.7 code
#for dbi in loci[lid].db_ann.keys():
# data_ann_temp[dbi] = {dbi: map(lambda (x): loci[lid].db_ann[dbi].ann[x].name, loci[lid].db_ann[dbi].ann.keys())}
# suggestion by 2to3
for dbi in list(loci[lid].db_ann.keys()):
data_ann_temp[dbi] = {dbi: [loci[lid].db_ann[dbi].ann[x].name for x in list(loci[lid].db_ann[dbi].ann.keys())]}
logger.debug("_json_: data_ann_temp %s %s" % (dbi, data_ann_temp[dbi]))
counts[dbi] += 1
# original Py 2.7 code
#data_ann = data_ann + map(lambda (x): data_ann_temp[x], data_ann_temp.keys())
# suggestion by 2to3
data_ann = data_ann + [data_ann_temp[x] for x in list(data_ann_temp.keys())]
logger.debug("_json_: data_ann %s" % data_ann)
counts = {k: v for k, v in iter(counts.items())}
total_loci = sum([counts[db] for db in counts])
valid_ann = [k for k, v in iter(counts.items()) if up_threshold(v, total_loci, 0.7)]
return data_ann, valid_ann
def _get_counts(list_seqs, seqs_obj, factor):
scaled = {}
seq = namedtuple('seq', 'freq norm_freq')
for s in list_seqs:
if s not in factor:
factor[s] = 1
samples = seqs_obj[s].norm_freq.keys()
corrected_norm = np.array(list(seqs_obj[s].norm_freq.values())) * factor[s]
corrected_raw = np.array(list(seqs_obj[s].freq.values())) * factor[s]
scaled[s] = seq(dict(zip(samples, corrected_raw)), dict(zip(samples, corrected_norm)))
return scaled
def _sum_by_samples(seqs_freq, samples_order):
"""
Sum sequences of a metacluster by samples.
"""
n = len(seqs_freq[list(seqs_freq.keys())[0]].freq.keys())
y = np.array([0] * n)
for s in seqs_freq:
x = seqs_freq[s].freq
exp = [seqs_freq[s].freq[sam] for sam in samples_order]
y = list(np.array(exp) + y)
return y
def _annotate(args, setclus):
"""annotate transcriptional units with
gtf/bed files provided by -b/g option"""
logger.info("Creating bed file")
bedfile = generate_position_bed(setclus)
a = pybedtools.BedTool(bedfile, from_string=True)
beds = []
logger.info("Annotating clusters")
if hasattr(args, 'list_files'):
beds = args.list_files.split(",")
for filebed in beds:
logger.info("Using %s " % filebed)
db = os.path.basename(filebed)
b = pybedtools.BedTool(filebed)
c = a.intersect(b, wo=True)
setclus = anncluster(c, setclus, db, args.type_ann, args.feature_id)
return setclus
def _clean_alignment(args):
"""
Prepare alignment for cluster detection.
"""
logger.info("Clean bam file with highly repetitive reads with low counts. sum(counts)/n_hits > 1%")
bam_file, seq_obj = clean_bam_file(args.afile, args.mask)
logger.info("Using %s file" % bam_file)
detect_complexity(bam_file, args.ref, args.out)
return bam_file, seq_obj
def _create_clusters(seqL, bam_file, args):
"""
Cluster sequences and
create metaclusters with multi-mappers.
"""
clus_obj = []
cluster_file = op.join(args.out, "cluster.bed")
if not os.path.exists(op.join(args.out, 'list_obj.pk')):
if not file_exists(cluster_file):
logger.info("Parsing aligned file")
logger.info("Merging sequences")
bedtools = os.path.join(os.path.dirname(sys.executable), "bedtools")
bedtools = bedtools if os.path.exists(bedtools) else "bedtools"
parse_cmd = "awk '{i=i+1;print $1\"\\t\"$2\"\\t\"$3\"\\t\"$4\"\\t\"i\"\\t\"$6}'"
cmd = "{bedtools} bamtobed -i {bam_file} | {parse_cmd} | {bedtools} cluster -s -d 20 -i - > {cluster_file}"
do.run(cmd.format(**locals()))
c = pybedtools.BedTool(cluster_file)
logger.info("Creating clusters")
clus_obj = detect_clusters(c, seqL, args.min_seqs, args.non_un_gl)
with open(op.join(args.out, 'list_obj.pk'), 'wb') as output:
pickle.dump(clus_obj, output, pickle.HIGHEST_PROTOCOL)
else:
logger.info("Loading previous clusters")
with open(op.join(args.out, 'list_obj.pk'), 'rb') as input:
clus_obj = pickle.load(input)
# bedfile = pybedtools.BedTool(generate_position_bed(clus_obj), from_string=True)
# seqs_2_loci = bedfile.intersect(pybedtools.BedTool(aligned_bed, from_string=True), wo=True, s=True)
# seqs_2_position = add_seqs_position_to_loci(seqs_2_loci, seqL)
logger.info("%s clusters found" % (len(clus_obj.clusid)))
return clus_obj
def _cleaning(clusL, path):
"""
Load saved cluster and jump to next step
"""
backup = op.join(path, "list_obj_red.pk")
if not op.exists(backup):
clus_obj = reduceloci(clusL, path)
with open(backup, 'wb') as output:
pickle.dump(clus_obj, output, pickle.HIGHEST_PROTOCOL)
return clus_obj
else:
logger.info("Loading previous reduced clusters")
with open(backup, 'rb') as in_handle:
clus_obj = pickle.load(in_handle)
return clus_obj
def _create_json(clusL, args):
clus = clusL.clus
seqs = clusL.seq
loci = clusL.loci
data_clus = {}
out_count = os.path.join(args.dir_out, "counts.tsv")
out_single_count = os.path.join(args.dir_out, "counts_sequence.tsv")
out_size = os.path.join(args.dir_out, "size_counts.tsv")
out_bed = os.path.join(args.dir_out, "positions.bed")
samples_order = list(seqs[list(seqs.keys())[1]].freq.keys())
with open(out_count, 'w') as matrix, open(out_size, 'w') as size_matrix, open(out_bed, 'w') as out_bed, open(out_single_count, 'w') as matrix_single:
matrix.write("id\tnloci\tann\t%s\n" % "\t".join(samples_order))
matrix_single.write("id\tann\tsequence\t%s\n" % "\t".join(samples_order))
for cid in clus:
seqList = []
c = clus[cid]
seqList = _get_seqs(c)
logger.debug("_json_: %s" % seqList)
data_ann, valid_ann = _get_annotation(c, loci)
data_loci = best_precursor(c, loci)
idloci, chrom, s, e, st, size = data_loci[0]
annotation = valid_ann[0] if valid_ann else "none"
bed_line = "%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % (chrom, s, e, annotation, cid, st, len(seqList))
out_bed.write(bed_line)
# original Py 2.7 code
#data_seqs = map(lambda (x): {x: seqs[x].seq}, seqList)
# proposal by 2to3
data_seqs = [{x: seqs[x].seq} for x in seqList]
scaled_seqs = _get_counts(seqList, seqs, c.idmembers)
# original Py 2.7 code
#data_freq = map(lambda (x): scaled_seqs[x].freq, seqList)
#data_freq_w_id = map(lambda (x): {x: scaled_seqs[x].norm_freq}, seqList)
#data_len = map(lambda (x): seqs[x].len, seqList)
# proposal by 2to3
data_freq = [scaled_seqs[x].freq for x in seqList]
data_freq_w_id = [{x: scaled_seqs[x].norm_freq} for x in seqList]
data_len = [seqs[x].len for x in seqList]
sum_freq = _sum_by_samples(scaled_seqs, samples_order)
data_ann_str = [["%s::%s" % (name, ",".join(features)) for name, features in iter(k.items())] for k in data_ann]
data_valid_str = " ".join(valid_ann)
for s in seqList:
f = [seqs[s].freq[so] for so in samples_order]
if f.count(0) > 0.1 * len(f) and len(f) > 9:
continue
f = map(str, f)
print("\t".join([str(cid), data_valid_str, seqs[s].seq, "\t".join(f)]), file=matrix_single, end="\n")
matrix.write("%s\t%s\t%s|%s\t%s\n" % (cid, c.toomany, data_valid_str, ";".join([";".join(d) for d in data_ann_str]), "\t".join(map(str, sum_freq))))
size_matrix.write(_write_size_table(data_freq, data_len, data_valid_str, cid))
data_string = {'seqs': data_seqs, 'freq': data_freq_w_id,
'loci': data_loci, 'ann': data_ann,
'valid': valid_ann, 'peaks': clus[cid].peaks}
data_clus[cid] = data_string
out_file = os.path.join(args.dir_out, "seqcluster.json")
# import pdb; pdb.set_trace()
with open(out_file, 'w') as handle_out:
# https://stackoverflow.com/a/50577730/1772223
def default(o):
if isinstance(o, np.int64): return int(o)
raise TypeError
handle_out.write(json.dumps([data_clus], default=default, skipkeys=True, indent=2))
return out_file
|
<filename>django_event_tasks/admin.py<gh_stars>0
from django.contrib import admin
from django_event_tasks.models import TaskType, ReminderType, TimeType, TaskStatus, Task, TaskReminder
@admin.register(TaskType)
class TaskTypeAdmin(admin.ModelAdmin):
list_display = ('name', )
class TaskTypeReminderInline(admin.TabularInline):
model = TaskReminder
@admin.register(ReminderType)
class ReminderTypeAdmin(admin.ModelAdmin):
list_display = ('name', )
inlines = [TaskTypeReminderInline]
@admin.register(TimeType)
class TimeTypeAdmin(admin.ModelAdmin):
list_display = ('name', 'type', 'time', 'default')
@admin.register(TaskStatus)
class TaskStatusAdmin(admin.ModelAdmin):
list_display = ('name', 'type')
class TaskReminderInline(admin.TabularInline):
model = TaskReminder
@admin.register(Task)
class TaskAdmin(admin.ModelAdmin):
inlines = [TaskReminderInline]
list_display = ('task_type', 'created_by', 'time_type', 'task_due_date', 'task_status')
|
#!/bin/sh
set -eu
clang++ -fbracket-depth=999999 -march=native -mtune=native -std=gnu++11 -O3 -flto -fuse-ld=lld -fomit-frame-pointer -fwrapv -Wno-attributes -fno-strict-aliasing -Da24_hex='0x3039' -Da24_val='12345' -Da_minus_two_over_four_array='{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x30,0x39}' -Dbitwidth='64' -Dlimb_weight_gaps_array='{64,64,64}' -Dmodulus_array='{0x1f,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xe7}' -Dmodulus_bytes_val='21' -Dmodulus_limbs='3' -Dq_mpz='(1_mpz<<165) - 25' "$@"
|
<gh_stars>0
module ObjectSpace::AllocationTracer
VERSION = "0.6.0"
end
|
<filename>changingDOM/changeDom.js
var mainContent = document.getElementById("mainContent");
mainContent.setAttribute("align","right");
mainTitle = document.getElementById("mainTitle");
console.log(mainTitle.innerHTML);
var sidebar = document.getElementById("sidebar");
console.log(sidebar.innerHTML);
var arrayOfH1s = mainContent.getElementsByTagName("h1");
arrayOfH1s[0].innerHTML = "This is a new title";
|
#!/bin/bash
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set_config_params() {
echo "📝 Setting Config Parameters (Provide your own or defaults will be applied)"
echo "🔧 Configuring GCP Project"
PROJECT_ID=${PROJECT_ID:-$(gcloud config get-value "project")}
export PROJECT_ID
gcloud config set project "$PROJECT_ID"
export AX_REGION=${AX_REGION:-'europe-west1'}
export REGION=${REGION:-'europe-west1'}
gcloud config set compute/region "$REGION"
export ZONE=${ZONE:-'europe-west1-c'}
gcloud config set compute/zone "$ZONE"
printf "\n🔧 Apigee hybrid Configuration:\n"
export INGRESS_TYPE=${INGRESS_TYPE:-'external'} # internal|external
echo "- Ingress type $INGRESS_TYPE"
echo "- TLS Certificate ${CERT_TYPE:-let\'s encrypt}"
export GKE_CLUSTER_NAME=${GKE_CLUSTER_NAME:-apigee-hybrid}
export GKE_CLUSTER_MACHINE_TYPE=${GKE_CLUSTER_MACHINE_TYPE:-e2-standard-4}
echo "- GKE Node Type $GKE_CLUSTER_MACHINE_TYPE"
export APIGEE_CTL_VERSION='1.6.3'
echo "- Apigeectl version $APIGEE_CTL_VERSION"
export KPT_VERSION='v0.34.0'
echo "- kpt version $KPT_VERSION"
export CERT_MANAGER_VERSION='v1.2.0'
echo "- Cert Manager version $CERT_MANAGER_VERSION"
export ASM_VERSION='1.9'
echo "- ASM version $ASM_VERSION"
OS_NAME=$(uname -s)
if [[ "$OS_NAME" == "Linux" ]]; then
echo "- 🐧 Using Linux binaries"
export APIGEE_CTL='apigeectl_linux_64.tar.gz'
export KPT_BINARY='kpt_linux_amd64-0.34.0.tar.gz'
export JQ_VERSION='jq-1.6/jq-linux64'
elif [[ "$OS_NAME" == "Darwin" ]]; then
echo "- 🍏 Using macOS binaries"
export APIGEE_CTL='apigeectl_mac_64.tar.gz'
export KPT_BINARY='kpt_darwin_amd64-0.34.0.tar.gz'
export JQ_VERSION='jq-1.6/jq-osx-amd64'
else
echo "💣 Only Linux and macOS are supported at this time. You seem to be running on $OS_NAME."
exit 2
fi
printf "\n🔧 Derived config parameters\n"
echo "- GCP Project $PROJECT_ID"
PROJECT_NUMBER=$(gcloud projects describe "${PROJECT_ID}" --format="value(projectNumber)")
export PROJECT_NUMBER
export WORKLOAD_POOL="${PROJECT_ID}.svc.id.goog"
echo "- Workload Pool $WORKLOAD_POOL"
export MESH_ID="proj-${PROJECT_NUMBER}"
echo "- Mesh ID $MESH_ID"
# these will be set if the steps are run in order
INGRESS_IP=$(gcloud compute addresses list --format json --filter "name=apigee-ingress-ip" --format="get(address)" || echo "")
export INGRESS_IP
echo "- Ingress IP ${INGRESS_IP:-N/A}"
NAME_SERVER=$(gcloud dns managed-zones describe apigee-dns-zone --format="json" --format="get(nameServers[0])" 2>/dev/null || echo "")
export NAME_SERVER
echo "- Nameserver ${NAME_SERVER:-N/A}"
export QUICKSTART_ROOT="${QUICKSTART_ROOT:-$PWD}"
export QUICKSTART_TOOLS="$QUICKSTART_ROOT/tools"
export APIGEECTL_HOME=$QUICKSTART_TOOLS/apigeectl/apigeectl_$APIGEE_CTL_VERSION
export HYBRID_HOME=$QUICKSTART_ROOT/hybrid-files
echo "- Script root from: $QUICKSTART_ROOT"
}
token() { echo -n "$(gcloud config config-helper --force-auth-refresh | grep access_token | grep -o -E '[^ ]+$')" ; }
function wait_for_ready(){
local expected_output=$1
local action=$2
local message=$3
local max_iterations=120 # 10min
local iterations=0
local actual_out
echo -e "Waiting for $action to return output $expected_output"
echo -e "Start: $(date)\n"
while true; do
iterations="$((iterations+1))"
actual_out=$(bash -c "$action" || echo "error code $?")
if [ "$expected_output" = "$actual_out" ]; then
echo -e "\n$message"
break
fi
if [ "$iterations" -ge "$max_iterations" ]; then
echo "Wait timed out"
exit 1
fi
echo -n "."
sleep 5
done
}
ask_confirm() {
if [ ! "$QUIET_INSTALL" = "true" ]; then
printf "\n\n"
read -p "Do you want to continue with the config above? [Y/n]: " -n 1 -r REPLY; printf "\n"
REPLY=${REPLY:-Y}
if [[ "$REPLY" =~ ^[Yy]$ ]]; then
echo "starting provisioning"
else
exit 1
fi
fi
}
check_existing_apigee_resource() {
RESOURCE_URI=$1
echo "🤔 Checking if the Apigee resource '$RESOURCE_URI' already exists".
RESPONSE=$(curl -H "Authorization: Bearer $(token)" --silent "$RESOURCE_URI")
if [[ $RESPONSE == *"error"* ]]; then
echo "🤷♀️ Apigee resource '$RESOURCE_URI' does not exist yet"
return 1
else
echo "🎉 Apigee resource '$RESOURCE_URI' already exists"
return 0
fi
}
enable_all_apis() {
PROJECT_ID=${PROJECT_ID:-$(gcloud config get-value "project")}
echo "📝 Enabling all required APIs in GCP project \"$PROJECT_ID\""
echo -n "⏳ Waiting for APIs to be enabled"
gcloud services enable \
anthos.googleapis.com \
apigee.googleapis.com \
apigeeconnect.googleapis.com \
cloudresourcemanager.googleapis.com \
cloudtrace.googleapis.com \
compute.googleapis.com \
container.googleapis.com \
dns.googleapis.com \
gkeconnect.googleapis.com \
gkehub.googleapis.com \
iamcredentials.googleapis.com \
logging.googleapis.com \
meshca.googleapis.com \
meshconfig.googleapis.com \
meshtelemetry.googleapis.com \
monitoring.googleapis.com \
pubsub.googleapis.com \
stackdriver.googleapis.com \
--project "$PROJECT_ID"
}
create_apigee_org() {
echo "🚀 Create Apigee ORG - $PROJECT_ID"
if check_existing_apigee_resource "https://apigee.googleapis.com/v1/organizations/$PROJECT_ID" ; then
echo "(skipping org creation, already exists)"
return 0
fi
curl -X POST --fail -H "Authorization: Bearer $(token)" -H "content-type:application/json" \
-d "{
\"name\":\"$PROJECT_ID\",
\"displayName\":\"$PROJECT_ID\",
\"description\":\"Apigee hybrid Org\",
\"analyticsRegion\":\"$AX_REGION\",
\"runtimeType\":\"HYBRID\",
\"properties\" : {
\"property\" : [ {
\"name\" : \"features.hybrid.enabled\",
\"value\" : \"true\"
}, {
\"name\" : \"features.mart.connect.enabled\",
\"value\" : \"true\"
} ]
}
}" \
"https://apigee.googleapis.com/v1/organizations?parent=projects/$PROJECT_ID"
echo -n "⏳ Waiting for Apigeectl Org Creation "
wait_for_ready "0" "curl --silent -H \"Authorization: Bearer $(token)\" -H \"Content-Type: application/json\" https://apigee.googleapis.com/v1/organizations/$PROJECT_ID | grep -q \"subscriptionType\"; echo \$?" "Organization $PROJECT_ID is created."
echo "✅ Created Org '$PROJECT_ID'"
}
create_apigee_env() {
ENV_NAME=$1
echo "🚀 Create Apigee Env - $ENV_NAME"
if check_existing_apigee_resource "https://apigee.googleapis.com/v1/organizations/$PROJECT_ID/environments/$ENV_NAME"; then
echo "(skipping, env already exists)"
return
fi
curl -X POST --fail -H "Authorization: Bearer $(token)" -H "content-type:application/json" \
-d "{\"name\":\"$ENV_NAME\"}" \
"https://apigee.googleapis.com/v1/organizations/$PROJECT_ID/environments"
echo -n "⏳ Waiting for Apigeectl Env Creation "
wait_for_ready "0" "curl --silent -H \"Authorization: Bearer $(token)\" -H \"Content-Type: application/json\" https://apigee.googleapis.com/v1/organizations/$PROJECT_ID/environments/$ENV_NAME | grep -q \"$ENV_NAME\"; echo \$?" "Environment $ENV_NAME of Organization $PROJECT_ID is created."
echo "✅ Created Env '$ENV_NAME'"
}
create_apigee_envgroup() {
ENV_GROUP_NAME=$1
echo "🚀 Create Apigee Env Group - $ENV_GROUP_NAME"
if check_existing_apigee_resource "https://apigee.googleapis.com/v1/organizations/$PROJECT_ID/envgroups/$ENV_GROUP_NAME"; then
echo "(skipping, envgroup already exists)"
return
fi
curl -X POST --fail -H "Authorization: Bearer $(token)" -H "content-type:application/json" \
-d "{
\"name\":\"$ENV_GROUP_NAME\",
\"hostnames\":[\"$ENV_GROUP_NAME.${DNS_NAME:-$PROJECT_ID.apigee.com}\"],
}" \
"https://apigee.googleapis.com/v1/organizations/$PROJECT_ID/envgroups"
echo -n "⏳ Waiting for Apigeectl Env Creation "
wait_for_ready "0" "curl --silent -H \"Authorization: Bearer $(token)\" -H \"Content-Type: application/json\" https://apigee.googleapis.com/v1/organizations/$PROJECT_ID/envgroups/$ENV_GROUP_NAME | grep -q $ENV_GROUP_NAME; echo \$?" "Environment Group $ENV_GROUP_NAME of Organization $PROJECT_ID is created."
echo "✅ Created Env Group '$ENV_GROUP_NAME'"
}
add_env_to_envgroup() {
ENV_NAME=$1
ENV_GROUP_NAME=$2
echo "🚀 Adding Env $ENV_NAME to Env Group $ENV_GROUP_NAME"
local ENV_GROUPS_ATTACHMENT_URI
ENV_GROUPS_ATTACHMENT_URI="https://apigee.googleapis.com/v1/organizations/$PROJECT_ID/envgroups/$ENV_GROUP_NAME/attachments"
if curl --fail --silent -H "Authorization: Bearer $(token)" -H "content-type:application/json" "$ENV_GROUPS_ATTACHMENT_URI" | grep -q "\"environment\": \"$ENV_NAME\""; then
echo "(skipping, envgroup assignment already exists)"
return
else
curl -X POST --fail -q -H "Authorization: Bearer $(token)" -H "content-type:application/json" \
-d '{ "environment": "'"$ENV_NAME"'" }' "$ENV_GROUPS_ATTACHMENT_URI"
fi
echo "✅ Added Env $ENV_NAME to Env Group $ENV_GROUP_NAME"
}
configure_network() {
echo "🌐 Setup Networking"
ENV_GROUP_NAME="$1"
if [ -z "$(gcloud compute addresses list --format json --filter 'name=apigee-ingress-ip' --format='get(address)')" ]; then
if [[ "$INGRESS_TYPE" == "external" ]]; then
gcloud compute addresses create apigee-ingress-ip --region "$REGION"
else
gcloud compute addresses create apigee-ingress-ip --region "$REGION" --subnet default --purpose SHARED_LOADBALANCER_VIP
fi
fi
INGRESS_IP=$(gcloud compute addresses list --format json --filter "name=apigee-ingress-ip" --format="get(address)")
export INGRESS_IP
export DNS_NAME=${DNS_NAME:="$(echo "$INGRESS_IP" | tr '.' '-').nip.io"}
echo "setting hostname on env group to $ENV_GROUP_NAME.$DNS_NAME"
curl -X PATCH --silent -H "Authorization: Bearer $(token)" \
-H "Content-Type:application/json" https://apigee.googleapis.com/v1/organizations/"$PROJECT_ID"/envgroups/"$ENV_GROUP_NAME" \
-d "{\"hostnames\": [\"$ENV_GROUP_NAME.$DNS_NAME\"]}"
if [ -z "$(gcloud dns managed-zones list --filter 'name=apigee-dns-zone' --format='get(name)')" ]; then
if [[ "$INGRESS_TYPE" == "external" ]]; then
gcloud dns managed-zones create apigee-dns-zone --dns-name="$DNS_NAME" --description=apigee-dns-zone
else
gcloud dns managed-zones create apigee-dns-zone --dns-name="$DNS_NAME" --description=apigee-dns-zone --visibility="private" --networks="default"
fi
rm -f transaction.yaml
gcloud dns record-sets transaction start --zone=apigee-dns-zone
gcloud dns record-sets transaction add "$INGRESS_IP" \
--name="$ENV_GROUP_NAME.$DNS_NAME." --ttl=600 \
--type=A --zone=apigee-dns-zone
gcloud dns record-sets transaction describe --zone=apigee-dns-zone
gcloud dns record-sets transaction execute --zone=apigee-dns-zone
fi
if [[ "$INGRESS_TYPE" == "external" ]]; then
NAME_SERVER=$(gcloud dns managed-zones describe apigee-dns-zone --format="json" --format="get(nameServers[0])")
export NAME_SERVER
echo "👋 Add this as an NS record for $DNS_NAME: $NAME_SERVER"
fi
echo "✅ Networking set up"
}
create_gke_cluster() {
echo "🚀 Create GKE cluster"
if [ -z "$(gcloud container clusters list --filter "name=$GKE_CLUSTER_NAME" --format='get(name)')" ]; then
gcloud container clusters create "$GKE_CLUSTER_NAME" \
--region "$REGION" \
--node-locations "$ZONE" \
--release-channel stable \
--network default \
--subnetwork default \
--default-max-pods-per-node "110" \
--enable-ip-alias \
--machine-type "$GKE_CLUSTER_MACHINE_TYPE" \
--num-nodes "3" \
--enable-autoscaling \
--min-nodes "3" \
--max-nodes "6" \
--labels mesh_id="$MESH_ID" \
--workload-pool "$WORKLOAD_POOL" \
--logging SYSTEM,WORKLOAD \
--monitoring SYSTEM
fi
gcloud container clusters get-credentials "$GKE_CLUSTER_NAME" --region "$REGION"
kubectl create clusterrolebinding cluster-admin-binding \
--clusterrole cluster-admin --user "$(gcloud config get-value account)" || true
echo "✅ GKE set up"
}
install_certmanager() {
echo "👩🏽💼 Creating Cert Manager"
kubectl apply --validate=false -f https://github.com/jetstack/cert-manager/releases/download/$CERT_MANAGER_VERSION/cert-manager.yaml
}
install_asm() {
echo "🏗️ Preparing ASM install requirements"
mkdir -p "$QUICKSTART_TOOLS"/kpt
curl --fail -L -o "$QUICKSTART_TOOLS/kpt/kpt.tar.gz" "https://github.com/GoogleContainerTools/kpt/releases/download/${KPT_VERSION}/${KPT_BINARY}"
tar xzf "$QUICKSTART_TOOLS/kpt/kpt.tar.gz" -C "$QUICKSTART_TOOLS/kpt"
export PATH=$PATH:"$QUICKSTART_TOOLS"/kpt
mkdir -p "$QUICKSTART_TOOLS"/jq
curl --fail -L -o "$QUICKSTART_TOOLS"/jq/jq "https://github.com/stedolan/jq/releases/download/$JQ_VERSION"
chmod +x "$QUICKSTART_TOOLS"/jq/jq
export PATH=$PATH:"$QUICKSTART_TOOLS"/jq
echo "🏗️ Installing Anthos Service Mesh"
mkdir -p "$QUICKSTART_TOOLS"/istio-asm
curl --fail https://storage.googleapis.com/csm-artifacts/asm/install_asm_$ASM_VERSION > "$QUICKSTART_TOOLS"/istio-asm/install_asm
chmod +x "$QUICKSTART_TOOLS"/istio-asm/install_asm
# patch ASM installer to allow for cloud build SA
sed -i -e 's/iam.gserviceaccount.com/gserviceaccount.com/g' "$QUICKSTART_TOOLS"/istio-asm/install_asm
# patch ASM installer to use the new kubectl --dry-run syntax
sed -i -e 's/--dry-run/--dry-run=client/g' "$QUICKSTART_TOOLS"/istio-asm/install_asm
cat << EOF > "$QUICKSTART_TOOLS"/istio-asm/istio-operator-patch.yaml
apiVersion: install.istio.io/v1alpha1
kind: IstioOperator
spec:
components:
ingressGateways:
- name: istio-ingressgateway
enabled: true
k8s:
serviceAnnotations:
networking.gke.io/load-balancer-type: $INGRESS_TYPE
service:
type: LoadBalancer
loadBalancerIP: $INGRESS_IP
ports:
- name: status-port
port: 15021 # for ASM 1.7.x and above, else 15020
targetPort: 15021 # for ASM 1.7.x and above, else 15020
- name: https
port: 443
targetPort: 8443
EOF
rm -rf "$QUICKSTART_TOOLS"/istio-asm/install-out
mkdir -p "$QUICKSTART_TOOLS"/istio-asm/install-out
ln -s "$QUICKSTART_TOOLS/kpt/kpt" "$QUICKSTART_TOOLS"/istio-asm/install-out/kpt
"$QUICKSTART_TOOLS"/istio-asm/install_asm \
--project_id "$PROJECT_ID" \
--cluster_name "$GKE_CLUSTER_NAME" \
--cluster_location "$REGION" \
--output_dir "$QUICKSTART_TOOLS"/istio-asm/install-out \
--custom_overlay "$QUICKSTART_TOOLS"/istio-asm/istio-operator-patch.yaml \
--enable_all \
--mode install
echo "✅ ASM installed"
}
download_apigee_ctl() {
echo "📥 Setup Apigeectl"
APIGEECTL_ROOT="$QUICKSTART_TOOLS/apigeectl"
# Remove if it existed from an old install
if [ -d "$APIGEECTL_ROOT" ]; then rm -rf "$APIGEECTL_ROOT"; fi
mkdir -p "$APIGEECTL_ROOT"
curl --fail -L \
-o "$APIGEECTL_ROOT/apigeectl.tar.gz" \
"https://storage.googleapis.com/apigee-release/hybrid/apigee-hybrid-setup/$APIGEE_CTL_VERSION/$APIGEE_CTL"
tar xvzf "$APIGEECTL_ROOT/apigeectl.tar.gz" -C "$APIGEECTL_ROOT"
rm "$APIGEECTL_ROOT/apigeectl.tar.gz"
mv "$APIGEECTL_ROOT"/apigeectl_*_64 "$APIGEECTL_HOME"
echo "✅ Apigeectl set up in $APIGEECTL_HOME"
}
prepare_resources() {
echo "🛠️ Configure Apigee hybrid"
if [ -d "$HYBRID_HOME" ]; then rm -rf "$HYBRID_HOME"; fi
mkdir -p "$HYBRID_HOME"
mkdir -p "$HYBRID_HOME/overrides"
mkdir -p "$HYBRID_HOME/service-accounts"
ln -s "$APIGEECTL_HOME/tools" "$HYBRID_HOME/tools"
ln -s "$APIGEECTL_HOME/config" "$HYBRID_HOME/config"
ln -s "$APIGEECTL_HOME/templates" "$HYBRID_HOME/templates"
ln -s "$APIGEECTL_HOME/plugins" "$HYBRID_HOME/plugins"
echo "✅ Hybrid Config Setup"
}
create_cert() {
ENV_GROUP_NAME=$1
if [ "$CERT_TYPE" = "skip" ];then
return
fi
echo "🙈 Creating (temporary) self-signed cert - $ENV_GROUP_NAME"
mkdir -p "$HYBRID_HOME/certs"
CA_CERT_NAME="quickstart-ca"
# create CA cert if not exist
if [ -f "$HYBRID_HOME/certs/$CA_CERT_NAME.crt" ]; then
echo "CA already exists! Reusing that one."
else
openssl req -x509 -sha256 -nodes -days 365 -newkey rsa:2048 -subj "/CN=$DNS_NAME/O=Apigee Quickstart" -keyout "$HYBRID_HOME/certs/$CA_CERT_NAME.key" -out "$HYBRID_HOME/certs/$CA_CERT_NAME.crt"
fi
openssl req -out "$HYBRID_HOME/certs/$ENV_GROUP_NAME.csr" -newkey rsa:2048 -nodes -keyout "$HYBRID_HOME/certs/$ENV_GROUP_NAME.key" -subj "/CN=$ENV_GROUP_NAME.$DNS_NAME/O=Apigee Quickstart"
openssl x509 -req -days 365 -CA "$HYBRID_HOME/certs/$CA_CERT_NAME.crt" -CAkey "$HYBRID_HOME/certs/$CA_CERT_NAME.key" -set_serial 0 -in "$HYBRID_HOME/certs/$ENV_GROUP_NAME.csr" -out "$HYBRID_HOME/certs/$ENV_GROUP_NAME.crt"
cat "$HYBRID_HOME/certs/$ENV_GROUP_NAME.crt" "$HYBRID_HOME/certs/$CA_CERT_NAME.crt" > "$HYBRID_HOME/certs/$ENV_GROUP_NAME.fullchain.crt"
kubectl create secret tls tls-hybrid-ingress \
--cert="$HYBRID_HOME/certs/$ENV_GROUP_NAME.fullchain.crt" \
--key="$HYBRID_HOME/certs/$ENV_GROUP_NAME.key" \
-n istio-system --dry-run=client -o yaml | kubectl apply -f -
if [ "$CERT_TYPE" != "self-signed" ];then
echo "🔒 Creating let's encrypt cert - $ENV_GROUP_NAME"
cat <<EOF | kubectl apply -f -
apiVersion: cert-manager.io/v1
kind: Issuer
metadata:
name: letsencrypt
namespace: istio-system
spec:
acme:
email: admin@$DNS_NAME
server: https://acme-v02.api.letsencrypt.org/directory
privateKeySecretRef:
name: letsencrypt-issuer-account-key
solvers:
- http01:
ingress:
class: istio
---
apiVersion: cert-manager.io/v1
kind: Certificate
metadata:
name: tls-hybrid-ingress
namespace: istio-system
spec:
secretName: tls-hybrid-ingress
issuerRef:
name: letsencrypt
commonName: $ENV_GROUP_NAME.$DNS_NAME
dnsNames:
- $ENV_GROUP_NAME.$DNS_NAME
EOF
fi
}
create_sa() {
yes | "$APIGEECTL_HOME"/tools/create-service-account -e prod -d "$HYBRID_HOME/service-accounts"
echo -n "🔛 Enabling runtime synchronizer"
curl --fail -X POST -H "Authorization: Bearer $(token)" \
-H "Content-Type:application/json" \
"https://apigee.googleapis.com/v1/organizations/${PROJECT_ID}:setSyncAuthorization" \
-d "{\"identities\":[\"serviceAccount:apigee-synchronizer@${PROJECT_ID}.iam.gserviceaccount.com\"]}"
}
configure_runtime() {
ENV_NAME=$1
ENV_GROUP_NAME=$2
echo "Configure Overrides"
cat << EOF > "$HYBRID_HOME"/overrides/overrides.yaml
gcp:
projectID: $PROJECT_ID
region: "$REGION" # Analytics Region
# Apigee org name.
org: $PROJECT_ID
# Kubernetes cluster name details
k8sCluster:
name: $GKE_CLUSTER_NAME
region: "$REGION"
virtualhosts:
- name: $ENV_GROUP_NAME
sslSecret: tls-hybrid-ingress
instanceID: "$PROJECT_ID-$(date +%s)"
envs:
- name: $ENV_NAME
serviceAccountPaths:
synchronizer: "$HYBRID_HOME/service-accounts/$PROJECT_ID-apigee-synchronizer.json"
udca: "$HYBRID_HOME/service-accounts/$PROJECT_ID-apigee-udca.json"
runtime: "$HYBRID_HOME/service-accounts/$PROJECT_ID-apigee-runtime.json"
mart:
serviceAccountPath: "$HYBRID_HOME/service-accounts/$PROJECT_ID-apigee-mart.json"
connectAgent:
serviceAccountPath: "$HYBRID_HOME/service-accounts/$PROJECT_ID-apigee-mart.json"
udca:
serviceAccountPath: "$HYBRID_HOME/service-accounts/$PROJECT_ID-apigee-udca.json"
metrics:
enabled: true
serviceAccountPath: "$HYBRID_HOME/service-accounts/$PROJECT_ID-apigee-metrics.json"
watcher:
serviceAccountPath: "$HYBRID_HOME/service-accounts/$PROJECT_ID-apigee-watcher.json"
logger:
enabled: false
serviceAccountPath: "$HYBRID_HOME/service-accounts/$PROJECT_ID-apigee-logger.json"
EOF
}
install_runtime() {
pushd "$HYBRID_HOME" || return # because apigeectl uses pwd-relative paths
mkdir -p "$HYBRID_HOME"/generated
"$APIGEECTL_HOME"/apigeectl init -f "$HYBRID_HOME"/overrides/overrides.yaml --print-yaml > "$HYBRID_HOME"/generated/apigee-init.yaml || ( sleep 120 && "$APIGEECTL_HOME"/apigeectl init -f "$HYBRID_HOME"/overrides/overrides.yaml --print-yaml > "$HYBRID_HOME"/generated/apigee-init.yaml )
sleep 2 && echo -n "⏳ Waiting for Apigeectl init "
wait_for_ready "Running" "kubectl get po -l app=apigee-controller -n apigee-system -o=jsonpath='{.items[0].status.phase}' 2>/dev/null" "Apigee Controller: Running"
echo "waiting for 30s for the webhook certs to propagate" && sleep 30
"$APIGEECTL_HOME"/apigeectl apply -f "$HYBRID_HOME"/overrides/overrides.yaml --print-yaml > "$HYBRID_HOME"/generated/apigee-runtime.yaml || ( sleep 120 && "$APIGEECTL_HOME"/apigeectl apply -f "$HYBRID_HOME"/overrides/overrides.yaml --print-yaml > "$HYBRID_HOME"/generated/apigee-runtime.yaml )
sleep 2 && echo -n "⏳ Waiting for Apigeectl apply "
wait_for_ready "Running" "kubectl get po -l app=apigee-runtime -n apigee -o=jsonpath='{.items[0].status.phase}' 2>/dev/null" "Apigee Runtime: Running."
popd || return
echo "🎉🎉🎉 Hybrid installation completed!"
}
enable_trace() {
ENV_NAME=$1
echo -n "🕵️♀️ Turn on trace logs"
gcloud projects add-iam-policy-binding "$PROJECT_ID" \
--member "serviceAccount:apigee-runtime@${PROJECT_ID}.iam.gserviceaccount.com" \
--role=roles/cloudtrace.agent --project "$PROJECT_ID"
curl --fail -X PATCH -H "Authorization: Bearer $(token)" \
-H "Content-Type:application/json" \
"https://apigee.googleapis.com/v1/organizations/${PROJECT_ID}/environments/$ENV_NAME/traceConfig" \
-d "{\"exporter\":\"CLOUD_TRACE\",\"endpoint\":\"${PROJECT_ID}\",\"sampling_config\":{\"sampler\":\"PROBABILITY\",\"sampling_rate\":0.5}}"
}
deploy_example_proxy() {
echo "🦄 Deploy Sample Proxy"
ENV_NAME=$1
ENV_GROUP_NAME=$2
(cd "$QUICKSTART_ROOT/example-proxy" && zip -r apiproxy.zip apiproxy/*)
PROXY_REV=$(curl -X POST \
"https://apigee.googleapis.com/v1/organizations/${PROJECT_ID}/apis?action=import&name=httpbin-v0&validate=true" \
-H "Authorization: Bearer $(token)" \
-H "Content-Type: multipart/form-data" \
-F "zipFile=@$QUICKSTART_ROOT/example-proxy/apiproxy.zip" | grep '"revision": "[^"]*' | cut -d'"' -f4)
rm "$QUICKSTART_ROOT/example-proxy/apiproxy.zip"
curl -X POST \
"https://apigee.googleapis.com/v1/organizations/${PROJECT_ID}/environments/$ENV_NAME/apis/httpbin-v0/revisions/${PROXY_REV}/deployments?override=true" \
-H "Authorization: Bearer $(token)" \
-H "Content-Length: 0"
echo "✅ Sample Proxy Deployed"
echo "🤓 Try without DNS (first deployment takes a few seconds. Relax and breathe!):"
if [ "$CERT_TYPE" = "self-signed" ];then
echo "curl --cacert $QUICKSTART_ROOT/hybrid-files/certs/quickstart-ca.crt https://$ENV_GROUP_NAME.$DNS_NAME/httpbin/v0/anything"
else
echo "curl https://$ENV_GROUP_NAME.$DNS_NAME/httpbin/v0/anything (use -k while Let's encrypt is issuing your cert)"
fi
echo "👋 To reach your API via the FQDN: Make sure you add a DNS record for your FQDN or an NS record for $DNS_NAME: $NAME_SERVER"
echo "👋 During development you can also use --resolve $ENV_GROUP_NAME.$DNS_NAME:443:$INGRESS_IP to resolve the hostname for your curl command"
}
delete_apigee_keys() {
for SA in mart cassandra udca metrics synchronizer logger watcher distributed-trace runtime
do
delete_sa_keys "apigee-${SA}"
done
}
delete_sa_keys() {
SA=$1
for SA_KEY_NAME in $(gcloud iam service-accounts keys list --iam-account="${SA}@${PROJECT_ID}.iam.gserviceaccount.com" --format="get(name)" --filter="keyType=USER_MANAGED")
do
gcloud iam service-accounts keys delete "$SA_KEY_NAME" --iam-account="$SA@$PROJECT_ID.iam.gserviceaccount.com" -q
done
}
|
def find_lengths(strings):
lengths = [len(string) for string in strings]
return lengths
|
package com.oath.cyclops.anym.internal.adapters;
import com.oath.cyclops.anym.extensability.AbstractMonadAdapter;
import cyclops.stream.type.impl.ReactiveStreamX;
import cyclops.monads.AnyM;
import cyclops.monads.Witness;
import cyclops.monads.Witness.StreamWitness;
import cyclops.reactive.ReactiveSeq;
import cyclops.reactive.companion.Spouts;
import lombok.AllArgsConstructor;
import org.reactivestreams.Publisher;
import java.util.Iterator;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.stream.Stream;
import static cyclops.monads.AnyM.fromStream;
@AllArgsConstructor
public class ReactiveAdapter<W extends StreamWitness<W>> extends AbstractMonadAdapter<W> {
private final Supplier<Stream<?>> empty;
private final Function<?,Stream<?>> unit;
private final Function<Iterator<?>,Stream<?>> unitIterator;
private final W witness;
public final static ReactiveAdapter reactiveSeq = new ReactiveAdapter(()->Spouts.of(), t->Spouts.of(t), it->(Stream)ReactiveSeq.fromIterator((Iterator)it),Witness.reactiveSeq.REACTIVE);
private <U> Supplier<Stream<U>> getEmpty(){
return (Supplier)empty;
}
private <U> Function<U,Stream<U>> getUnit(){
return (Function)unit;
}
private <U> Function<Iterator<U>,Stream<U>> getUnitIterator(){
return (Function)unitIterator;
}
@Override
public <T> Iterable<T> toIterable(AnyM<W, T> t) {
return ()->stream(t).iterator();
}
@Override
public <T> AnyM<W, T> filter(AnyM<W, T> t, Predicate<? super T> fn) {
return fromStream(stream(t).filter(fn),witness);
}
<T> ReactiveSeq<T> stream(AnyM<W,T> anyM){
return anyM.unwrap();
}
@Override
public <T> AnyM<W, T> empty() {
return fromStream(this.<T>getEmpty().get(),witness);
}
@Override
public <T, T2, R> AnyM<W, R> zip(AnyM<W, ? extends T> t, AnyM<W, ? extends T2> t2, BiFunction<? super T, ? super T2, ? extends R> fn) {
return fromStream(stream(t).zip((a,b)->fn.apply(a,b),stream(t2)),witness);
}
@Override
public <T, R> AnyM<W, R> ap(AnyM<W,? extends Function<? super T,? extends R>> fn, AnyM<W, T> apply) {
return fromStream(stream(apply).zip((a,b)->b.apply(a),stream(fn)),witness);
}
@Override
public <T, R> AnyM<W, R> flatMap(AnyM<W, T> t,
Function<? super T, ? extends AnyM<W, ? extends R>> fn) {
return fromStream(((Stream)t.unwrap()).flatMap(fn.andThen(a-> (Stream)a.unwrap())),witness);
}
@Override
public <T> AnyM<W, T> unitIterable(Iterable<T> it) {
if(it instanceof ReactiveSeq){
W witnessToUse = it instanceof ReactiveStreamX ? witness : (W)Witness.reactiveSeq.ITERATIVE;
return fromStream((ReactiveSeq<T>)it,witnessToUse);
}
if(it instanceof Publisher){
return fromStream(Spouts.from((Publisher)it),witness);
}
return fromStream(this.<T>getUnitIterator().apply(it.iterator()),witness);
}
@Override
public <T> AnyM<W, T> unit(T o) {
return fromStream(this.<T>getUnit().apply(o),witness);
}
@Override
public <T, R> AnyM<W, R> map(AnyM<W, T> t, Function<? super T, ? extends R> fn) {
return fromStream(((Stream)t.unwrap()).map(fn),witness);
}
}
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-23 13:22
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('practiceapp', '0018_auto_20160823_1554'),
]
operations = [
migrations.AddField(
model_name='request',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2016, 8, 23, 13, 22, 40, 103594, tzinfo=utc), verbose_name='date published'),
preserve_default=False,
),
]
|
<reponame>Habens/cascade<gh_stars>10-100
package com.github.robindevilliers.welcometohell.steps;
import com.github.robindevilliers.cascade.annotations.*;
import com.github.robindevilliers.welcometohell.Utilities;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import static org.junit.Assert.assertEquals;
@Step(DateOfDeathQuestion.class)
@Narrative("Payment question.")
public interface SelectPayment {
@Narrative("Enter devilish dollars as payment and go to the vip section page.")
class DevilishDollars implements SelectPayment {
@Demands
private WebDriver webDriver;
@When
public void when() {
Utilities.selectOption(webDriver, "input", "DEVILISH-DOLLARS");
webDriver.findElement(By.cssSelector("button[type=submit]")).click();
Utilities.waitForPage(webDriver);
}
@Then
public void then() {
assertEquals("Welcome to Hell | VIP Section", webDriver.getTitle());
}
}
@Narrative("Enter heavenly half pennies as payment and go to the appeal page.")
class HeavenlyHalfPennies implements SelectPayment {
@Demands
private WebDriver webDriver;
@When
public void when() {
Utilities.selectOption(webDriver, "input", "HEAVENLY-HALF-PENNIES");
webDriver.findElement(By.cssSelector("button[type=submit]")).click();
Utilities.waitForPage(webDriver);
}
@Then
public void then() {
assertEquals("Welcome to Hell | Appeal", webDriver.getTitle());
}
}
@Narrative("Enter earthly fiat as payment and go to the appeal page.")
class EarthlyFiat implements SelectPayment {
@Demands
private WebDriver webDriver;
@When
public void when() {
Utilities.selectOption(webDriver, "input", "EARTHLY-FIAT");
webDriver.findElement(By.cssSelector("button[type=submit]")).click();
Utilities.waitForPage(webDriver);
}
@Then
public void then() {
assertEquals("Welcome to Hell | Appeal", webDriver.getTitle());
}
}
@Narrative("Enter gold as payment and go to the appeal page.")
class Gold implements SelectPayment {
@Demands
private WebDriver webDriver;
@When
public void when() {
Utilities.selectOption(webDriver, "input", "GOLD");
webDriver.findElement(By.cssSelector("button[type=submit]")).click();
Utilities.waitForPage(webDriver);
}
@Then
public void then() {
assertEquals("Welcome to Hell | Appeal", webDriver.getTitle());
}
}
@Narrative("Enter nothing as payment and go to the appeal page.")
class Nothing implements SelectPayment {
@Demands
private WebDriver webDriver;
@When
public void when() {
Utilities.selectOption(webDriver, "input", "NOTHING");
webDriver.findElement(By.cssSelector("button[type=submit]")).click();
Utilities.waitForPage(webDriver);
}
@Then
public void then() {
assertEquals("Welcome to Hell | Appeal", webDriver.getTitle());
}
}
}
|
<reponame>amelvill-umich/libbat
#pragma once
#include <memory>
#include <mutex>
#include <ostream>
#include <unordered_map>
#include <vector>
#include <parallel_hashmap/phmap.h>
#include "abstract_array.h"
#include "attribute.h"
#include "ba_treelet.h"
#include "box.h"
#include "owned_array.h"
#include "plane.h"
#include "query_stats.h"
#include "radix_tree_node.h"
struct AggregationTree;
// Scale quality by a log scale so that 0.5 quality = 50% data. Since the
// tree doubles the amount of data at each level we need to apply an inverse
// mapping of this to the quality level
inline float remap_quality(const float q)
{
return glm::clamp(std::log10(1.f + q * 9.f), 0.f, 1.f);
}
struct BATree {
friend struct AggregationTree;
Box bounds;
std::vector<AttributeDescription> attributes;
phmap::flat_hash_map<std::string, size_t> attrib_ids;
ArrayHandle<uint8_t> tree_mem;
ArrayHandle<RadixTreeNode> radix_tree;
ArrayHandle<uint64_t> treelet_offsets;
ArrayHandle<uint32_t> bitmap_dictionary;
ArrayHandle<uint16_t> node_bitmap_ids;
// May be null if not stored in file
ArrayHandle<glm::vec2> node_attrib_ranges;
// The treelets which have been loaded by the traversal
phmap::flat_hash_map<uint64_t, std::shared_ptr<BATreelet>> loaded_treelets;
size_t num_points = 0;
// The number of LOD primitives stored for each node which has LOD primitives
uint32_t num_lod_prims = 0;
// The min/max levels of detail which can be queried from the tree. Since LOD
// primitives aren't stored in the top-level coarse tree there's a min query
// depth we have to do to get data (min_lod). max_lod is the max depth of any
// path down to a leaf in the tree
uint32_t min_lod = 0;
uint32_t max_lod = 0;
bool enable_range_filtering = true;
bool enable_bitmap_filtering = true;
BATree(const Box &bounds,
const std::vector<AttributeDescription> &attributes,
const ArrayHandle<uint8_t> &tree_mem,
const ArrayHandle<RadixTreeNode> &radix_tree,
const ArrayHandle<uint64_t> &treelet_offsets,
const ArrayHandle<uint32_t> &bitmap_dictionary,
const ArrayHandle<uint16_t> &node_bitmap_ids,
const ArrayHandle<glm::vec2> &node_attrib_ranges,
size_t num_points,
uint32_t num_lod_prims,
uint32_t min_lod,
uint32_t max_lod);
BATree() = default;
// Query the particles contained in some box, retrieving just those particles
// which are new for the selected quality level given the previous one
template <typename Fn>
QueryStats query_box_progressive(const Box &b,
std::vector<AttributeQuery> *attrib_queries,
float prev_quality,
float current_quality,
const Fn &callback);
// Query all particles contained in some bounding box
// The callback should take the point id, position, and list of attributes to read
// the point's attributes from, if desired. The function signature should be:
// void (const size_t id, const glm::vec3 &pos, const std::vector<Attribute> &attributes)
template <typename Fn>
QueryStats query_box(const Box &b,
std::vector<AttributeQuery> *attrib_queries,
float quality,
const Fn &callback);
// For debugging/testing: query the splitting planes of the tree
void get_splitting_planes(std::vector<Plane> &planes,
const Box &query_box,
std::vector<AttributeQuery> *attrib_queries,
float quality);
// Utility function to iterate through the BATreelets and process them with the
// callback. Mainly for the inspector, to get meta-data about the treelets
template <typename Fn>
void iterate_treelets(const Fn &fn);
private:
// Query all particles in the bounding box, using the already provided query indices
// The query value should have previously had the log scale applied
template <typename Fn>
void query_box_log(const Box &b,
std::vector<AttributeQuery> *attrib_queries,
const std::vector<size_t> &query_indices,
float prev_quality,
float current_quality,
const Fn &callback,
QueryStats &stats);
// Get the splitting plans for the log-scaled quality level
void get_splitting_planes_log(std::vector<Plane> &planes,
const Box &query_box,
std::vector<AttributeQuery> *attrib_queries,
float quality);
bool node_overlaps_query(uint32_t n,
const std::vector<AttributeQuery> &query,
const std::vector<size_t> &query_indices,
QueryStats &stats) const;
std::shared_ptr<BATreelet> fetch_treelet(const size_t treelet_id);
};
template <typename Fn>
QueryStats BATree::query_box_progressive(const Box &b,
std::vector<AttributeQuery> *attrib_queries,
float prev_quality,
float current_quality,
const Fn &callback)
{
QueryStats stats;
if (!b.overlaps(bounds)) {
return stats;
}
std::vector<size_t> query_indices;
if (attrib_queries) {
for (auto &a : *attrib_queries) {
auto fnd = attrib_ids.find(a.name);
if (fnd == attrib_ids.end()) {
throw std::runtime_error("Request for attribute " + a.name +
" which does not exist");
}
query_indices.push_back(fnd->second);
a.data_type = attributes[fnd->second].data_type;
a.bitmask = a.query_bitmask(attributes[fnd->second].range);
}
}
prev_quality = remap_quality(prev_quality);
current_quality = remap_quality(current_quality);
query_box_log(
b, attrib_queries, query_indices, prev_quality, current_quality, callback, stats);
return stats;
}
template <typename Fn>
QueryStats BATree::query_box(const Box &b,
std::vector<AttributeQuery> *attrib_queries,
float quality,
const Fn &callback)
{
return query_box_progressive(b, attrib_queries, 0.f, quality, callback);
}
template <typename Fn>
void BATree::query_box_log(const Box &b,
std::vector<AttributeQuery> *attrib_queries,
const std::vector<size_t> &query_indices,
float prev_quality,
float current_quality,
const Fn &callback,
QueryStats &stats)
{
if (!b.overlaps(bounds)) {
return;
}
if (prev_quality == current_quality && current_quality != 0.f) {
return;
}
stats.query_depth = glm::clamp(
uint32_t(current_quality * (max_lod - min_lod) + min_lod), min_lod, max_lod);
const QueryQuality quality(prev_quality, current_quality, min_lod, max_lod);
std::array<size_t, 64> node_stack = {0};
size_t stack_idx = 0;
size_t current_node = 0;
while (true) {
const RadixTreeNode &node = radix_tree->at(current_node);
bool traverse_left = true;
if (!node.left_leaf()) {
traverse_left = b.lower[node.split_axis] < node.split_position;
if (attrib_queries && traverse_left) {
traverse_left = traverse_left &&
node_overlaps_query(
node.left_child(), *attrib_queries, query_indices, stats);
}
} else {
traverse_left = false;
auto treelet = fetch_treelet(node.left_child());
treelet->query_box(b, attrib_queries, query_indices, quality, callback, stats);
}
bool traverse_right = true;
if (!node.right_leaf()) {
traverse_right = b.upper[node.split_axis] > node.split_position;
if (attrib_queries && traverse_right) {
traverse_right =
traverse_right &&
node_overlaps_query(
node.right_child(), *attrib_queries, query_indices, stats);
}
} else {
traverse_right = false;
auto treelet = fetch_treelet(node.right_child());
treelet->query_box(b, attrib_queries, query_indices, quality, callback, stats);
}
// If both overlap, descend both children following the left first
if (traverse_left && traverse_right) {
node_stack[stack_idx] = node.right_child();
stack_idx++;
current_node = node.left_child();
} else if (traverse_left) {
current_node = node.left_child();
} else if (traverse_right) {
current_node = node.right_child();
} else {
// Pop the stack to get the next node to traverse
if (stack_idx > 0) {
--stack_idx;
current_node = node_stack[stack_idx];
} else {
break;
}
}
}
}
template <typename Fn>
void BATree::iterate_treelets(const Fn &fn)
{
for (size_t i = 0; i < treelet_offsets->size(); ++i) {
auto treelet = fetch_treelet(i);
fn(treelet);
}
}
|
const reverseList = (list) => {
return list.reverse();
};
const list = [1, 2, 3, 4, 5];
console.log('Original list:', list);
const reversedList = reverseList(list);
console.log('Reversed list:', reversedList);
|
<reponame>tunderdomb/treant
var assert = window.chai.assert
var it = window.it
var describe = window.describe
if (!it || !describe || !assert) {
throw new Error("Invalid test environment")
}
var pagination = document.getElementById("pagination")
var pagination2 = document.getElementById("pagination2")
var pagination3 = document.getElementById("pagination3")
var pagination4 = document.getElementById("pagination4")
var pagination5 = document.getElementById("pagination5")
var pagination6 = document.getElementById("pagination6")
var base = document.getElementById("base")
var extended = document.getElementById("extended")
var scope = document.getElementById("scope")
var scope2 = document.getElementById("scope2")
var customAttribute = document.getElementById("custom-attribute")
describe("Component()", function () {
describe("constructor", function () {
it("should work without arguments", function () {
var component = new treant.Component()
assert.isNull(component.element)
assert.isObject(component.components)
})
it("should work without `new` keyword", function () {
var component = treant.Component()
assert.instanceOf(component, treant.Component)
})
})
describe("instance.element", function () {
it("should set the element as the component's element", function () {
var component = new treant.Component(pagination)
assert.equal(component.element, pagination)
})
})
describe("prototype.assignSubComponents()", function () {
it("should not run if autoAssign is disabled", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.autoAssign = false
})
var component = new Pagination(pagination)
assert.isUndefined(component.components.pageNumber)
})
})
describe("prototype.dispatch", function () {
it("should dispatch a custom event", function () {
var Pagination = treant.register("pagination")
var component = new Pagination(pagination)
var dispatched = false
component.element.addEventListener("hello", function (e) {
dispatched = true
}, false)
component.dispatch("hello")
assert.isTrue(dispatched)
})
it("should dispatch a click event", function () {
var Pagination = treant.register("pagination")
var component = new Pagination(pagination)
var dispatched = false
component.element.addEventListener("click", function (e) {
dispatched = true
}, false)
component.dispatch("click")
assert.isTrue(dispatched)
})
it("should carry data", function () {
var Pagination = treant.register("pagination")
var component = new Pagination(pagination)
var data = {
hey: "ho"
}
var eventData = null
component.element.addEventListener("click", function (e) {
eventData = e.detail
}, false)
component.dispatch("click", data)
assert.equal(data, eventData)
})
it("should register an bubbling and non bubbling event definition", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.event("hey", {
bubbles: true
})
internals.event("ho", {
bubbles: false
})
})
var component = new Pagination(pagination)
var hey = false
var ho = false
document.body.addEventListener("hey", function (e) {
hey = true
}, false)
document.body.addEventListener("ho", function (e) {
ho = true
}, false)
component.dispatch("hey")
component.dispatch("ho")
assert.isTrue(hey)
assert.isFalse(ho)
})
it("should register a cancellable event definition", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.event("cancellable", {
cancelable: true,
bubbles: true
})
internals.event("notcancellable", {
cancelable: false,
bubbles: true
})
})
var component = new Pagination(pagination)
var cancellable = false
var notcancellable = false
component.element.addEventListener("cancellable", function (e) {
e.preventDefault()
}, false)
component.element.addEventListener("notcancellable", function (e) {
e.preventDefault()
}, false)
document.body.addEventListener("cancellable", function (e) {
cancellable = e.defaultPrevented
}, false)
document.body.addEventListener("notcancellable", function (e) {
notcancellable = e.defaultPrevented
}, false)
component.dispatch("cancellable")
component.dispatch("notcancellable")
assert.isTrue(cancellable)
assert.isFalse(notcancellable)
})
})
})
describe("CustomComponent", function () {
// new CustomComponent()
it("should call the custom constructor", function () {
var called = false
var Pagination = treant.register("pagination", {
onCreate: function () {
called = true
}
})
new Pagination(pagination)
assert.isTrue(called)
})
// new CustomComponent()+
it("should work with plugins", function () {
function testMethod () {}
function plugin () {
this.testMethod = testMethod
}
var Pagination = treant.register("pagination", plugin)
new Pagination(pagination)
assert.equal(Pagination.prototype.testMethod, testMethod)
})
// new CustomComponent()+
it("should work with multiple plugins", function () {
function testMethod () {}
function testMethod2 () {}
function plugin () {
this.testMethod = testMethod
}
function plugin2 () {
this.testMethod2 = testMethod2
}
var Pagination = treant.register("pagination", plugin, plugin2)
new Pagination(pagination)
assert.equal(Pagination.prototype.testMethod, testMethod)
assert.equal(Pagination.prototype.testMethod2, testMethod2)
})
// new CustomComponent()
it("should instantiate a custom component with the constructor", function () {
var Pagination = treant.register("pagination")
var component = new Pagination()
assert.instanceOf(component, Pagination)
})
// new CustomComponent(Element)
it("should set the element of the custom component to the first argument", function () {
var Pagination = treant.register("pagination")
var component = new Pagination(pagination)
assert.equal(component.element, pagination)
})
// new CustomComponent(Element, {})
it("should accept an options object as second argument", function () {
var testOptions = {}
var passedArguments = null
var Pagination = treant.register("pagination", {
onCreate: function (options) {
passedArguments = options
}
})
new Pagination(pagination, testOptions)
assert.equal(testOptions, passedArguments)
})
// CustomComponent extend Component
it("should be an instance of the base Component", function () {
var Pagination = treant.register("pagination")
var component = new Pagination(pagination)
assert.instanceOf(component, treant.Component)
})
describe("instance.components", function () {
// component.components.<subComponent>
it("should auto assign single sub components by default", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.components.pageNumber = null
})
var component = new Pagination(pagination)
assert.isDefined(component.components.pageNumber)
})
it("should work with dashed main component names", function () {
var Pagination = treant.register("custom-pagination", function (internals) {
internals.components.pageNumber = null
})
var component = new Pagination(customAttribute)
assert.isDefined(component.components.pageNumber)
})
// component.components.<Element>
it("should assign native dom elements for sub components", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.components.pageNumber = null
})
var component = new Pagination(pagination)
assert.instanceOf(component.components.pageNumber, Element)
})
// component.components.[<subComponent>]
it("should assign sub component arrays if defined", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.components.pageNumber = []
})
var component = new Pagination(pagination4)
assert.isArray(component.components.pageNumber)
assert.lengthOf(component.components.pageNumber, 4)
})
it("should assign default array for sub components", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.components.pageNumber = []
})
var component = new Pagination(pagination6, {})
assert.isArray(component.components.pageNumber)
assert.lengthOf(component.components.pageNumber, 0)
})
})
})
describe("register()", function () {
it("should return a constructor", function () {
var Pagination = treant.register("pagination")
assert.isFunction(Pagination)
})
it("should work without a custom constructor", function () {
var Pagination = treant.register("pagination")
var component = new Pagination(pagination)
})
it("should allow overwriting existing registry entries", function () {
var Pagination = treant.register("pagination")
var Pagination2 = treant.register("pagination")
assert.notEqual(Pagination, Pagination2)
})
})
describe("component()", function () {
// .component() signatures
// treant.component(String)
it("should accept string as first argument and find the first component in the document with that name", function () {
var component = treant.component("pagination")
assert.equal(component.element, pagination)
})
// treant.component(String)
it("should work dashed with component names", function () {
var component = treant.component("custom-pagination")
assert.equal(component.element, pagination2)
})
// treant.component(String)
it("should create custom components if defined", function () {
var Pagination = treant.register("pagination")
var component = treant.component("pagination")
assert.instanceOf(component, Pagination)
})
// treant.component(String, Element)
it("should scope the search with the second argument", function () {
var component = treant.component("pagination", scope)
assert.equal(component.element, pagination3)
})
it("should create an array of components", function () {
var Pagination = treant.register("pagination")
var components = treant.component.all("pagination", scope2)
assert.isArray(components)
assert.lengthOf(components, 4)
assert.instanceOf(components[0], Pagination)
})
})
describe("storage", function () {
it("should save and retrieve the component by element", function () {
var Pagination = treant.register("pagination")
var p = new Pagination(pagination)
//treant.storage.save(p)
assert.equal(p, treant.storage.get(pagination, "pagination"))
})
it("should save and remove the component by element", function () {
var Pagination = treant.register("pagination")
var p = new Pagination(pagination2)
//treant.storage.save(p)
treant.storage.remove(p)
assert.isNull(treant.storage.get(pagination2, "pagination"))
})
})
describe("Internals", function () {
it("should work with prototype object", function () {
function testMethod () {}
var Pagination = treant.register("pagination", {
testMethod: testMethod,
testProperty: 1
})
var component = new Pagination(pagination)
assert.equal(component.testMethod, testMethod)
assert.equal(component.testProperty, 1)
})
it("should create a constructor via a prototype object", function () {
var testOptions = {}
var passedOptions = null
function testConstructor (options) {
passedOptions = options
}
var Pagination = treant.register("pagination", {
onCreate: testConstructor
})
var component = new Pagination(pagination, testOptions)
assert.isUndefined(component.onCreate)
assert.equal(testOptions, passedOptions)
})
it("should create a constructor via a internals", function () {
var testOptions = {}
var passedOptions = null
function testConstructor (options) {
passedOptions = options
}
var Pagination = treant.register("pagination", function (interals) {
interals.onCreate(testConstructor)
})
var component = new Pagination(pagination, testOptions)
assert.isUndefined(component.onCreate)
assert.equal(testOptions, passedOptions)
})
it("should be chainable", function () {
var Pagination = treant.register("pagination", function (internals) {
internals
.onCreate(function (options) {
})
.event("close", {})
.attribute("value", 2)
.method("heyHo", function () {
this.letsGo()
})
.proto({
letsGo: function () {
this.heyHo()
}
})
})
var component = new Pagination(pagination)
assert.isFunction(component.heyHo)
assert.isFunction(component.letsGo)
})
it("should be available on the constructor", function () {
var Pagination = treant
.register("pagination")
Pagination
.onCreate(function (options) {})
.event("close", {})
.attribute("value", 2)
.method("heyHo", function () {
this.letsGo()
})
.proto({
letsGo: function () {
this.heyHo()
}
})
var component = new Pagination(pagination)
assert.isFunction(component.heyHo)
assert.isFunction(component.letsGo)
})
it("should enable auto assign", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.autoAssign = true
})
var component = new Pagination(pagination)
assert.isDefined(component.components.pageNumber)
})
it("should disable auto assign", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.autoAssign = false
})
var component = new Pagination(pagination)
assert.isUndefined(component.components.pageNumber)
})
it("should not convert sub components", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.convertSubComponents = false
})
var component = new Pagination(pagination)
assert.instanceOf(component.components.pageNumber, Element)
})
it("should collect all sub components into an array", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.components.pageNumber = []
})
var component = new Pagination(pagination)
assert.isArray(component.components.pageNumber)
})
describe("attributes", function () {
it("should define a custom attribute", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.attribute("custom-string")
})
var component = new Pagination(pagination5)
assert.isDefined(component.customString)
})
it("should define a string attribute", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.attribute("custom-string")
})
var component = new Pagination(pagination5)
assert.isString(component.customString)
assert.equal(component.customString, "hello")
})
it("should define a number attribute", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.attribute("number", {
type: "number"
})
})
var component = new Pagination(pagination5)
assert.isDefined(component.number)
assert.isNumber(component.number)
assert.equal(component.number, 10)
})
it("should define a number attribute with a default number value", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.attribute("number", {
'default': 20
})
})
var component = new Pagination(pagination5)
assert.isDefined(component.number)
assert.isNumber(component.number)
assert.equal(component.number, 10)
})
it("should define a number attribute with a default number value", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.attribute("number", 15)
})
var component = new Pagination(pagination5)
assert.isDefined(component.number)
assert.isNumber(component.number)
assert.equal(component.number, 10)
})
it("should define a boolean attribute", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.attribute("boolean", {
type: "boolean"
})
})
var component = new Pagination(pagination5)
assert.isDefined(component.boolean)
assert.isBoolean(component.boolean)
assert.equal(component.boolean, true)
})
it("should have a default string value", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.attribute("custom-string", "hello")
})
var component = new Pagination(pagination)
assert.isDefined(component.customString)
assert.isString(component.customString)
assert.equal(component.customString, "hello")
})
it("should have a default number value", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.attribute("number", 10)
internals.attribute("number2", {
'default': 20
})
})
var component = new Pagination(pagination)
assert.equal(pagination.getAttribute("number"), "10")
assert.isDefined(component.number)
assert.isNumber(component.number)
assert.equal(component.number, 10)
assert.isDefined(component.number2)
assert.isNumber(component.number2)
assert.equal(component.number2, 20)
})
it("should define a default boolean value", function () {
var Pagination = treant.register("pagination", function (internals) {
internals.attribute("boolean", true)
internals.attribute("boolean2", false)
internals.attribute("boolean3", {
'default': true
})
internals.attribute("boolean4", {
'default': false
})
})
var component = new Pagination(pagination)
assert.isDefined(component.boolean)
assert.isBoolean(component.boolean)
assert.equal(component.boolean, true)
assert.isDefined(component.boolean2)
assert.isBoolean(component.boolean2)
assert.equal(component.boolean2, false)
assert.isDefined(component.boolean3)
assert.isBoolean(component.boolean3)
assert.equal(component.boolean3, true)
assert.isDefined(component.boolean4)
assert.isBoolean(component.boolean4)
assert.equal(component.boolean4, false)
})
it("should call the onchange callback if the value changed", function () {
var oldValue = "old"
var newValue = "new"
var called = false
var Pagination = treant.register("pagination", function (internals) {
internals.attribute("test", {
default: oldValue,
onchange: function (old, value) {
called = true
}
})
})
var component = new Pagination(pagination6)
assert.isFalse(called)
component.test = oldValue
assert.isFalse(called)
component.test = newValue
assert.isTrue(called)
})
it("should call the onchange callback with proper arguments", function () {
var oldValue = false
var newValue = true
var testOldValue = null
var testNewValue = null
var Pagination = treant.register("pagination", function (internals) {
internals.attribute("test2", {
default: oldValue,
onchange: function (old, value) {
testOldValue = old
testNewValue = value
}
})
})
var component = new Pagination(pagination6)
assert.equal(component.test2, oldValue)
component.test2 = newValue
assert.equal(component.test2, newValue)
assert.equal(testOldValue, oldValue)
assert.equal(testNewValue, newValue)
})
})
describe("action", function () {
it("should delegate an event", function () {
var called = false
var Pagination = treant.register("pagination", function (internals) {
internals.components.pageNumber = null
internals.action("click").match(":page-number", function (e, pageNumber) {
called = true
})
})
var component = new Pagination(pagination)
component.components.pageNumber
.dispatchEvent(new window.CustomEvent("click", {bubbles: true}))
assert.isTrue(called)
})
it("should call it with the instance as context", function () {
var context
var Pagination = treant.register("pagination", function (internals) {
internals.components.pageNumber = null
internals.action("click").match(":page-number", function (e, pageNumber) {
context = this
})
})
var component = new Pagination(pagination)
component.components.pageNumber
.dispatchEvent(new window.CustomEvent("click", {bubbles: true}))
assert.equal(context, component)
})
it("should pass requested component", function () {
var passedArg
var Pagination = treant.register("pagination", function (internals) {
internals.components.pageNumber = null
internals.action("click").match(":page-number", function (e, pageNumber) {
passedArg = pageNumber
})
})
var component = new Pagination(pagination)
component.components.pageNumber
.dispatchEvent(new window.CustomEvent("click", {bubbles: true}))
assert.equal(passedArg, component.components.pageNumber)
})
it("should continue with matchers", function () {
var continued = false
var Pagination = treant.register("pagination", function (internals) {
internals.components.pageNumber = null
internals
.action("click")
.match(":page-number", function (e, pageNumber) {})
.match(function (e, pageNumber) {
continued = true
})
})
var component = new Pagination(pagination)
component.components.pageNumber
.dispatchEvent(new window.CustomEvent("click", {bubbles: true}))
assert.isTrue(continued)
})
it("should stop matching if a handler returns false", function () {
var continued = false
var Pagination = treant.register("pagination", function (internals) {
internals.components.pageNumber = null
internals
.action("click")
.match(":page-number", function (e, pageNumber) {
return false
})
.match(function (e) {
continued = true
})
})
var component = new Pagination(pagination)
component.components.pageNumber
.dispatchEvent(new window.CustomEvent("click", {bubbles: true}))
assert.isFalse(continued)
})
})
describe("extend", function () {
it("should be an instance of the base component", function () {
var Base = treant.register("base")
var Extended = treant.register("extended", Base)
var base = new Base(base)
var extended = new Extended(extended)
assert.instanceOf(extended, Base)
})
it("should be inherit components", function () {
var Base = treant.register("base", function (Base) {
Base.components.title = null
Base.components.button = []
})
var Extended = treant.register("extended", Base, function (Extended) {
Extended.components.extra = null
})
var b = new Base(base)
var e = new Extended(extended)
assert.isDefined(Extended.components.title)
assert.isDefined(Extended.components.button)
assert.isArray(Extended.components.button)
assert.isDefined(e.components.title)
assert.isDefined(e.components.button)
assert.isArray(e.components.button)
})
it("should ignore base-named sub component", function () {
var Base = treant.register("base", function (Base) {
Base.components.title = null
Base.components.button = []
})
var Extended = treant.register("extended", Base, function (Extended) {
Extended.components.extra = null
})
var b = new Base(base)
var e = new Extended(extended)
assert.lengthOf(e.components.button, 1)
})
it("should inherit prototype", function () {
function test () {}
var Base = treant.register("base", {
test: test
}, function (Base) {
Base.get("zero", function () {
return 0
})
Base.set("number", function (number) {
return this._number = number
})
})
var Extended = treant.register("extended", Base)
var b = new Base(base)
var e = new Extended(extended)
assert.equal(e.test, b.test)
assert.equal(e.zero, 0)
e.number = 2
assert.equal(e._number, 2)
})
it("should inherit constructors", function () {
var Base = treant.register("base", function (Base) {
Base.onCreate(function () {
this.hey = "ho"
})
})
var Extended = treant.register("extended", Base, function (Extended) {
Extended.onCreate(function () {
this.lets = "go"
})
})
var b = new Base(base)
var e = new Extended(extended)
assert.equal(e.hey, "ho")
assert.equal(e.lets, "go")
})
it("should inherit attributes", function () {
var Base = treant.register("base", function (Base) {
Base.attribute("base-attribute", false)
})
var Extended = treant.register("extended", Base)
var b = new Base(base)
var e = new Extended(extended)
assert.equal(e.baseAttribute, true)
e.baseAttribute = false
assert.equal(e.baseAttribute, false)
})
it("should inherit actions", function () {
var called = false
var passedComponent = null
var Base = treant.register("base", function (Base) {
Base.action("hey").match(":button", function (e, button) {
called = true
passedComponent = button
})
})
var Extended = treant.register("extended", Base)
var b = new Base(base)
var e = new Extended(extended)
e.components.button.dispatchEvent(new window.CustomEvent("hey", {bubbles: true}))
assert.isTrue(called)
assert.equal(passedComponent, e.components.button)
})
})
})
describe("hook", function () {
// setHookAttribute()
describe("setHookAttribute()", function () {
it("should be able to change hook attribute", function () {
treant.hook.setHookAttribute("component")
var element = treant.hook.findComponent("pagination")
assert.equal(element, customAttribute)
treant.hook.setHookAttribute("data-component")
})
})
// createComponentSelector()
//describe("createComponentSelector()", function () {
// it("should return a component selector that matches components", function () {})
//})
// findComponent()
describe("findComponent()", function () {
it("should find a component in the document", function () {
var element = treant.hook.findComponent("pagination")
assert.equal(element, pagination)
})
it("should find a component in the given element", function () {
var element = treant.hook.findComponent("pagination", scope)
assert.equal(element, pagination3)
})
})
// findAllComponent()
describe("findAllComponent()", function () {
it("should find all components in the document", function () {
var elements = treant.hook.findAllComponents("pagination", scope2)
assert.isArray(elements)
assert.lengthOf(elements, 4)
})
it("should find all components in the given element", function () {
var elements = treant.hook.findAllComponents("pagination", scope)
assert.isArray(elements)
assert.lengthOf(elements, 1)
})
})
// findSubComponents()
describe("findSubComponents()", function () {
it("should find the sub components of a given element", function () {
var elements = treant.hook.findSubComponents("pagination", pagination)
assert.isArray(elements)
assert.lengthOf(elements, 1)
elements = treant.hook.findSubComponents("pagination", pagination4)
assert.lengthOf(elements, 4)
})
})
// getComponentName()
describe("getComponentName()", function () {
it("should return the full component name", function () {
var name = treant.hook.getComponentName(pagination)
assert.equal(name, "pagination")
})
it("should return the full component name of a sub component camelized", function () {
var element = treant.hook.findSubComponents("pagination", pagination)[0]
var name = treant.hook.getComponentName(element)
assert.equal(name, "pagination:pageNumber")
})
it("should return the full component name of a sub component raw", function () {
var element = treant.hook.findSubComponents("pagination", pagination)[0]
var name = treant.hook.getComponentName(element, false)
assert.equal(name, "pagination:page-number")
})
})
// getMainComponentName()
describe("getMainComponentName()", function () {
it("should return the component name of a main element", function () {
var name = treant.hook.getMainComponentName(pagination)
assert.equal(name, "pagination")
})
it("should return the main part of a sub component", function () {
var element = treant.hook.findSubComponents("pagination", pagination)[0]
var name = treant.hook.getMainComponentName(element)
assert.equal(name, "pagination")
})
it("should return the main part of a sub component camelized", function () {
var element = treant.hook.findSubComponents("custom-pagination", pagination2)[0]
var name = treant.hook.getMainComponentName(element)
assert.equal(name, "customPagination")
})
it("should return the main part of a sub component raw", function () {
var element = treant.hook.findSubComponents("custom-pagination", pagination2)[0]
var name = treant.hook.getMainComponentName(element, false)
assert.equal(name, "custom-pagination")
})
})
// getSubComponentName()
describe("getSubComponentName()", function () {
it("should return an empty string for a main element", function () {
var name = treant.hook.getSubComponentName(pagination)
assert.equal(name, "")
})
it("should return the sub part of a sub component", function () {
var element = treant.hook.findSubComponents("pagination", pagination)[0]
var name = treant.hook.getSubComponentName(element)
assert.equal(name, "pageNumber")
})
it("should return the sub part of a sub component camelized", function () {
var element = treant.hook.findSubComponents("custom-pagination", pagination2)[0]
var name = treant.hook.getSubComponentName(element)
assert.equal(name, "pageNumber")
})
it("should return the sub part of a sub component raw", function () {
var element = treant.hook.findSubComponents("custom-pagination", pagination2)[0]
var name = treant.hook.getSubComponentName(element, false)
assert.equal(name, "page-number")
})
})
// assignSubComponents()
describe("assignSubComponents()", function () {
it("should assign sub component to an object", function () {
var object = {}
var elements = treant.hook.findSubComponents("pagination", pagination)
treant.hook.assignSubComponents(object, elements)
assert.isDefined(object.pageNumber)
})
it("should assign raw nodes by default", function () {
var object = {}
var elements = treant.hook.findSubComponents("pagination", pagination)
treant.hook.assignSubComponents(object, elements)
assert.isDefined(object.pageNumber)
assert.instanceOf(object.pageNumber, Element)
})
})
// filter()
//describe("filter()", function () {
// it("should filter components in an array", function () {})
//})
})
|
package main
import tgbotapi "github.com/go-telegram-bot-api/telegram-bot-api/v5"
var keyboards = map[string]tgbotapi.InlineKeyboardMarkup{
"sugaroid:yesno": tgbotapi.NewInlineKeyboardMarkup(
[]tgbotapi.InlineKeyboardButton{
tgbotapi.NewInlineKeyboardButtonData("Yes", "yes"),
tgbotapi.NewInlineKeyboardButtonData("No", "no"),
tgbotapi.NewInlineKeyboardButtonData("🤷", "idk"),
},
),
}
|
#!/bin/bash -x
trap "exit 10" SIGUSR1
SWARM_DIR=/b/swarming
SWARM_ZIP=swarming_bot.zip
DEPOT_TOOLS_DIR=/b/depot_tools
DEPOT_TOOLS_URL="https://chromium.googlesource.com/chromium/tools/depot_tools.git"
DEPOT_TOOLS_REV="da3a29e13e816459234b0b08ed1059300bae46dd"
if [ -z "$CROS_SSH_ID_FILE_PATH" ] ; then
echo "Must specify path to ssh keys via CROS_SSH_ID_FILE_PATH env var"
exit 1
else
# Pass an empty password via "-N ''".
su -c "/usr/bin/ssh-keygen -f $CROS_SSH_ID_FILE_PATH -N '' -t ed25519" chrome-bot
fi
# Some chromium tests need depot tools.
mkdir -p $DEPOT_TOOLS_DIR
chown chrome-bot:chrome-bot $DEPOT_TOOLS_DIR
su -c "cd $DEPOT_TOOLS_DIR && \
/usr/bin/git init && \
/usr/bin/git remote add origin $DEPOT_TOOLS_URL ; \
/usr/bin/git fetch origin $DEPOT_TOOLS_REV && \
/usr/bin/git reset --hard FETCH_HEAD" chrome-bot
mkdir -p $SWARM_DIR
chown chrome-bot:chrome-bot $SWARM_DIR
cd $SWARM_DIR
rm -rf swarming_bot*.zip
su -c "/usr/bin/curl -sSLOJ $SWARM_URL" chrome-bot
echo "Starting $SWARM_ZIP"
# Run the swarming bot in the background, and immediately wait for it. This
# allows the signal trapping to actually work.
su -c "/usr/bin/python $SWARM_ZIP start_bot" chrome-bot &
wait %1
exit $?
|
import 'mocha';
import { helloWorld, helloUser } from '../src/handlers/handler';
import { StubEvent, StubContext } from '../stubs/hello';
import * as expect from 'expect.js';
describe('helloWorld', function () {
let event: StubEvent;
let context: StubContext = new StubContext('helloWorld');
it('should return a status code of 200', function () {
helloWorld(event, context, function (error, result) {
expect(result!.statusCode).to.eql(200);
});
});
it('should return a message body with "Hello World!"', function () {
helloWorld(event, context, function (error, result) {
let responseBody = JSON.parse(result!.body);
expect(responseBody.message).to.eql('Hello World!');
});
});
});
describe('helloUser', function () {
let body: string = '{"name": "Jane"}';
let event: StubEvent = new StubEvent(body);
let context: StubContext = new StubContext('helloUser');
it('should return a status code of 200', function () {
helloUser(event, context, function (error, result) {
expect(result!.statusCode).to.eql(200);
});
});
it('should return a message body with "Hello {User}!"', function () {
helloUser(event, context, function (error, result) {
let responseBody = JSON.parse(result!.body);
expect(responseBody.message).to.eql('Hello Jane!');
});
});
});
|
// Copyright 2015 <NAME>.
//
// Distributed under the Boost Software License, Version 1.0.
//
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt
#include <boost/mp11/detail/config.hpp>
#if BOOST_MP11_MSVC
# pragma warning( disable: 4503 ) // decorated name length exceeded
#endif
#include <boost/mp11/algorithm.hpp>
#include <boost/mp11/list.hpp>
#include <boost/mp11/integral.hpp>
#include <boost/core/lightweight_test_trait.hpp>
#include <type_traits>
#include <tuple>
#include <utility>
struct X1 {};
using boost::mp11::mp_bool;
template<class T> using is_even = mp_bool< T::value % 2 == 0 >;
int main()
{
using boost::mp11::mp_list;
using boost::mp11::mp_count_if;
using boost::mp11::mp_size_t;
{
using L1 = mp_list<>;
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_count_if<L1, std::is_const>, mp_size_t<0>>));
using L2 = mp_list<X1, X1 const, X1*, X1 const, X1*, X1*>;
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_count_if<L2, std::is_volatile>, mp_size_t<0>>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_count_if<L2, std::is_const>, mp_size_t<2>>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_count_if<L2, std::is_pointer>, mp_size_t<3>>));
}
{
using L1 = std::tuple<>;
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_count_if<L1, std::is_const>, mp_size_t<0>>));
using L2 = std::tuple<X1, X1 const, X1*, X1 const, X1*, X1*>;
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_count_if<L2, std::is_volatile>, mp_size_t<0>>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_count_if<L2, std::is_const>, mp_size_t<2>>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_count_if<L2, std::is_pointer>, mp_size_t<3>>));
}
{
using L2 = std::pair<X1 const, X1*>;
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_count_if<L2, std::is_volatile>, mp_size_t<0>>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_count_if<L2, std::is_const>, mp_size_t<1>>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_count_if<L2, std::is_pointer>, mp_size_t<1>>));
}
{
using boost::mp11::mp_iota_c;
int const N = 1089;
using L = mp_iota_c<N>;
using R = mp_count_if<L, is_even>;
BOOST_TEST_TRAIT_TRUE((std::is_same<R, mp_size_t<(N + 1) / 2>>));
}
return boost::report_errors();
}
|
def evaluateTree(root):
if root is None:
return 0
if root.left is None and root.right is None:
return int(root.val)
left_sum = evaluateTree(root.left)
right_sum = evaluateTree(root.right)
if (root.val == '+'):
return left_sum + right_sum
elif (root.val == '-'):
return left_sum - right_sum
elif (root.val == '*'):
return left_sum * right_sum
else:
return left_sum / right_sum
|
from example.movies.models import Movie as MovieDB
from djangosolr.documents import Document, TextField
class Movie(Document):
text = TextField(stored=False)
class Meta:
model = MovieDB
type = 'movie'
|
//https://developer.mozilla.org/en-US/docs/Web/Events
export const domEventsList = [
'focus',
'blur',
'resize',
'scroll',
'keydown',
'keypress',
'keyup',
'mouseenter',
'mousemove',
'mousedown',
'mouseover',
'mouseup',
'click',
'dblclick',
'contextmenu',
'wheel',
'mouseleave',
'mouseout',
'select',
'change',
'input'
];
export const isDomEvent = e => domEventsList.includes(e);
|
<reponame>kweiberth/paypal-messaging-components
const fs = require('fs');
const got = require('got');
const devAccountMap = {
DEV00000000NI: ['US', 'ni'],
DEV000NINONUS: ['US', 'ni_non-us'],
DEV0000000EAZ: ['US', 'ezp_any_eqz'],
DEV0000000EAG: ['US', 'ezp_any_gtz'],
DEV0000000PSZ: ['US', 'pala_single_eqz'],
DEV0000000PSG: ['US', 'pala_single_gtz'],
DEV0000000PMZ: ['US', 'pala_multi_eqz'],
DEV0000000PMG: ['US', 'pala_multi_gtz'],
DEV0000000IAZ: ['DE', 'inst_any_eqz'],
DEV0000000IAG: ['DE', 'inst_any_gtz'],
DEV000000PQAG: ['DE', 'palaq_any_gtz'],
DEV000000PQAZ: ['DE', 'palaq_any_eqz']
};
module.exports = function proxyImadserv(app) {
app.get('/imadserver/upstream', (req, res) => {
const { call, currency_value: amount = 0, dimensions } = req.query;
const account = req.query.pub_id ? req.query.pub_id : req.query.client_id;
if (devAccountMap[account]) {
const banner =
dimensions !== 'x199x99'
? fs.readFileSync(`banners/${devAccountMap[account].join('/')}.json`, 'utf-8')
: fs.readFileSync(`banners/ni.json`, 'utf-8');
const bannerJSON = JSON.parse(banner);
const morsVars = {
total_payments: 12,
formattedMonthlyPayment: `$${Number(amount / 12).toFixed(2)}`
};
const populateVars = str =>
Object.entries(morsVars)
.reduce(
(accumulator, [morsVar, val]) =>
accumulator.replace(new RegExp(`\\\${CREDIT_OFFERS_DS.${morsVar}}`, 'g'), val),
str
)
.replace(/\r\n|\r|\n/g, '');
const populatedBanner = Object.entries(bannerJSON).reduce((accumulator, [key, value]) => {
return {
...accumulator,
[key]: populateVars(JSON.stringify(value))
};
}, {});
const wrappedMarkup = JSON.stringify({
content: {
json: populatedBanner
},
tracking_details: {
click_url: '',
impression_url: ''
}
});
res.send(`${call}(${wrappedMarkup})`);
} else {
const query = Object.entries(req.query)
.reduce((accumulator, [key, val]) => `${accumulator}&${key}=${val}`, '')
.slice(1);
got(`https://www.paypal.com/imadserver/upstream?${query}`)
.then(({ body, headers }) => {
delete headers['content-encoding']; // eslint-disable-line no-param-reassign
res.set(headers);
res.send(body);
})
.catch(err => console.log(err) || res.status(404).send());
}
});
};
|
<reponame>OswaldoRodriguez123/easy-dance-app<filename>public/assets/js/eosMenu.js
/*
* Jquery eosMenu 1.0
* tangpanqing
* https://github.com/tangpanqing/eosMenu
* released under MIT license
* last update 2016-09-17 23:00:00
*/
(function($) {
$.fn.extend({
"eosMenu": function(options) {
var defaluts = {
fontSize: '14px', //字体大小
color: '#eee', //字体颜色
hoverColor: '#eee', //鼠标悬停,字体颜色
background: '#2F4050', //背景颜色
subBackground: '#263442', //子级背景颜色
hoverBackground: '#293744', //鼠标悬停,背景颜色
height: '40px', //每行总高度
lineHeight: '40px', //每行行高
borderColor: '#293744', //边线颜色
hoverborderColor: '#293744', //鼠标悬停,边线颜色
zIndex: 10, //菜单主体层级
isAutoUrl: 1, //是否自动展开默认URL
defaultUrl: '#html', //默认链接
onItemClick: null, //点击菜单项时执行函数
onMenuTitleClick: null, //点击目录标题时执行函数
onGroupTitleClick: null, //点击菜单组标题时执行函数
};
var opts = $.extend({}, defaluts, options);
var extend_style = '<style>' +
'.eos-menu{' +
'font-size:' + opts.fontSize + ';' +
'color:' + opts.color + ';' +
'}' +
'.eos-menu .eos-item a{' +
'color:' + opts.color + ';' +
'}' +
'.eos-menu .eos-menu-content{' +
'z-index:' + opts.zIndex + ';' +
'}' +
'.eos-menu .eos-group-content .eos-item{' +
'background:' + opts.subBackground + ';' +
'}' +
'.eos-menu .eos-menu-title, .eos-menu .eos-group-title, .eos-menu .eos-item{' +
'height:' + opts.height + ';' +
'line-height:' + opts.lineHeight + ';' +
'background:' + opts.background + ';' +
'border-color:' + opts.borderColor + ';' +
'}' +
'.eos-menu .eos-menu-title .fa, .eos-menu .eos-group-title .fa, .eos-menu .eos-item .fa{' +
'line-height:' + opts.lineHeight + ';' +
'}' +
'.eos-menu .eos-menu-title:hover, .eos-menu .eos-group-title:hover, .eos-menu .eos-item:hover{' +
'color:' + opts.hoverColor + ';' +
'background:' + opts.hoverBackground + ';' +
'border-color:' + opts.hoverborderColor + ';' +
'}' +
'</style>';
$('head').append(extend_style);
this.each(function() {
var $this = $(this);
//打开或关闭菜单面板
$this.find('.eos-menu-title').click(function() {
var next = $(this).next();
if(next.hasClass('eos-menu-content')) {
var toHeight = next.outerHeight() ? 0 : getChildrenTotalHeight(next);
next.outerHeight(toHeight);
}
if(typeof opts.onMenuTitleClick == 'function') opts.onMenuTitleClick($(this));
})
//打开或关闭菜单组
$this.find('.eos-group-title').click(function() {
var next = $(this).next();
if(next.hasClass('eos-group-content')) {
var toHeight = next.outerHeight() ? 0 : getChildrenTotalHeight(next);
var changeHeight = toHeight - next.outerHeight();
var menuHeight = $this.find('.eos-menu-content').outerHeight();
next.outerHeight(toHeight);
$this.find('.eos-menu-content').outerHeight(menuHeight + changeHeight);
}
if(typeof opts.onGroupTitleClick == 'function') opts.onGroupTitleClick($(this));
})
//点击某一具体菜单
$this.find('.eos-item').click(function() {
if(typeof opts.onItemClick == 'function') opts.onItemClick($(this));
})
//如果自动展开默认链接
if(opts.isAutoUrl){
$this.find('[href="'+opts.defaultUrl+'"]').parents('.eos-group-content').addClass('auto-height');
}
});
//获取当前对象总高度
function getChildrenTotalHeight(obj) {
var outerHeight = 0
obj.children().each(function() {
outerHeight += $(this).outerHeight();
})
return outerHeight;
}
}
});
})(window.jQuery);
|
N = int(input())
while N:
sentence = input()
print(sentence[0].upper() + sentence[1:])
N -=1
|
from ctypes import cdll, c_char_p, c_void_p
class Dota2Comm:
def __init__(self, name):
self.__dota2comm = cdll.LoadLibrary("dota2comm.dll")
self.name = name
self.__name = name.encode()
self.__receiveMessage = self.__dota2comm.receiveMessage
self.__receiveMessage.argtypes = [c_char_p]
self.__receiveMessage.restype = c_void_p
def receive_and_decode_message(self):
message_pointer = self.__receiveMessage(self.__name)
if message_pointer:
message = cast(message_pointer, c_char_p).value.decode('utf-8')
return message
else:
return None
|
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2019 Xilinx, Inc. All Rights Reserved.
#
if [ -z "$PATH" ]; then
PATH=/data3/Xilinx/Vitis/2019.2/bin:/data3/Xilinx/Vivado/2019.2/ids_lite/ISE/bin/lin64:/data3/Xilinx/Vivado/2019.2/bin
else
PATH=/data3/Xilinx/Vitis/2019.2/bin:/data3/Xilinx/Vivado/2019.2/ids_lite/ISE/bin/lin64:/data3/Xilinx/Vivado/2019.2/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='/home/trevor/dev/mylab/sandbox/project_2/project_2.runs/design_1_axi_gpio_0_0_synth_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
EAStep vivado -log design_1_axi_gpio_0_0.vds -m64 -product Vivado -mode batch -messageDb vivado.pb -notrace -source design_1_axi_gpio_0_0.tcl
|
<gh_stars>1-10
export default {
ADD: 'ADD_RATE_EXCHANGE',
REMOVE: 'REMOVE_RATE_EXCHANGE',
RESET: 'RESET_RATE_EXCHANGE',
UPDATE: 'UPDATE_RATE_EXCHANGE',
};
|
ReportMailJob.set(wait_until: Time.now.tomorrow.midnight.ago(60)).perform_later
InitUsingLogJob.set(wait_until: Time.now.tomorrow.midnight).perform_later
#rake db:migrate:resetを実行するとdelayed_jobsが見つからない的なエラーが出るのでコメントアウトしておく。
|
const alphabet = Array.from("ABCDEFGHIJKLMNOPQRSTUVWXYZ");
|
<filename>api/_bot/commands/math.js
import nerdamer from 'nerdamer/all'
import mathjax from 'mathjax'
import sharp from 'sharp'
const getMathJax = (() => {
let MathJax = null
return async () => {
if (MathJax == null) {
MathJax = await mathjax.init({
loader: {
load: ['input/tex', 'output/svg']
},
startup: {
typeset: false
}
})
}
return MathJax
}
})()
export default async (ctx) => {
const MathJax = await getMathJax()
const input = ctx.state.command.args.join(' ')
const tex = input.includes('\\')
? input
: nerdamer(ctx.state.command.args.join(' ')).toTeX()
const result = await MathJax.tex2svgPromise(tex, {
display: true,
em: 16,
ex: 8,
containerWidth: 100 * 16
})
if (!result.errors) {
const scale = 4
const adaptor = MathJax.startup.adaptor
// change some properties of the svg to scale the svg
const [svg] = result.children
svg.attributes.width = `${parseFloat(svg.attributes.width) * scale}ex`
svg.attributes.height = `${
parseFloat(svg.attributes.height) * scale * 1.1
}ex`
// convert svg dom to string to png
const data = adaptor.innerHTML(result)
const image = sharp(Buffer.from(data, 'utf-8'))
const pngData = await image.png().toBuffer()
return ctx.replyWithPhoto(
{
source: pngData
},
{
reply_to_message_id: ctx.message.message_id
}
)
}
}
|
<filename>test/sdk.test.ts
import { logger } from "@sentry/utils";
import { flush } from "../src/js/sdk";
jest.mock("@sentry/react", () => {
const mockClient = {
flush: jest.fn(() => Promise.resolve(true)),
};
return {
getCurrentHub: jest.fn(() => ({
getClient: jest.fn(() => mockClient),
})),
};
});
jest.spyOn(logger, "error");
import { getCurrentHub } from "@sentry/react";
describe("flush", () => {
it("Calls flush on the client", async () => {
const mockClient = getCurrentHub().getClient();
expect(mockClient).toBeTruthy();
if (mockClient) {
const flushResult = await flush();
// eslint-disable-next-line @typescript-eslint/unbound-method
expect(mockClient.flush).toBeCalled();
expect(flushResult).toBe(true);
}
});
it("Returns false if flush failed and logs error", async () => {
const mockClient = getCurrentHub().getClient();
expect(mockClient).toBeTruthy();
if (mockClient) {
mockClient.flush = jest.fn(() => Promise.reject());
const flushResult = await flush();
// eslint-disable-next-line @typescript-eslint/unbound-method
expect(mockClient.flush).toBeCalled();
expect(flushResult).toBe(false);
// eslint-disable-next-line @typescript-eslint/unbound-method
expect(logger.error).toBeCalledWith("Failed to flush the event queue.");
}
});
});
|
#!/bin/bash
function cookie_local() {
pbpaste | jq '(.[] | .domain) |= "localhost"' | jq '(.[] | .expirationDate ) |= 2147483647' | pbcopy
}
function cookie_c20n() {
pbpaste | jq '(.[] | .domain) |= "apps.test.jstor.org"' | jq '(.[] | .expirationDate ) |= 2147483647' | pbcopy
}
function json_clean() {
pbpaste | jq . | pbcopy
}
function json_clean_sorted() {
pbpaste | jq --sort-keys . | pbcopy
}
function flippy() {
curl -H "Content-Type: application/json" -X POST -d '{"bit": true}' http://localhost:8181/watchable/dipswitch/mylists_write_ucm_client
curl -H "Content-Type: application/json" -X POST -d '{"bit": true}' http://localhost:8181/watchable/dipswitch/mylists_read_ucm_client
curl -H "Content-Type: application/json" -X POST -d '{"bit": false}' http://localhost:8181/watchable/dipswitch/mylists_write_rds_dao
}
function dbit() {
curl -H "Content-Type: application/json" -X PUT -d '{"value": "bolt-rds01.test.cirrostratus.org"}' http://localhost:8181/watchable/persistentvariable/MyListsDbHost
}
function safe_curl() {
# call this with a url argument, e.g.
# safecurl.sh "http://eureka.test.cirrostratus.org:8080/eureka/v2/apps/"
# separating the (verbose) curl options into an array for readability
hash curl 2>/dev/null || { echo >&2 "I require curl but it's not installed. Aborting."; return 1; }
hash jq 2>/dev/null || { echo >&2 "I require jq but it's not installed. Aborting."; return 1; }
hash sed 2>/dev/null || { echo >&2 "I require sed but it's not installed. Aborting."; return 1; }
curl_args=(
-H 'Accept:application/json'
-H 'Content-Type:application/json'
--write '\n%{http_code}\n'
--fail
--silent
)
#echo "${curl_args[@]}"
# prepend some headers, but pass on whatever arguments this script was called with
output=$(curl "${curl_args[@]}" "$@")
return_code=$?
if [ 0 -eq $return_code ]; then
# remove the "http_code" line from the end of the output, and parse it
echo "$output" | sed '$d' | jq .
else
# echo to stderr so further piping to jq will process empty output
>&2 echo "Failure: code=$output"
fi
}
function wti() {
# What The Issue: Find the Jira issue from the topic
JIRA_URL="https://jira.jstor.org"
JIRA_API_URI="/rest/api/2/"
# e.g. https://jira.jstor.org/rest/api/2/issue/CORE-5339
RESULT=$(safe_curl -u "$JIRA_LOGIN:$JIRA_PASSWORD" -X GET "${JIRA_URL}${JIRA_API_URI}issue/${1}" | jq '{"key": .key, "summary": .fields.summary, "description": .fields.description}')
KEY=$(echo $RESULT | jq -r '.key')
SUMMARY=$(echo $RESULT | jq -r '.summary')
DESCRIPTION=$(echo $RESULT | jq -r '.description')
echo "$KEY - $SUMMARY"
echo ""
echo "Resolves [$KEY](https://jira.jstor.org/browse/$KEY)"
echo ""
echo "###### Description"
echo "$DESCRIPTION"
echo ""
echo "###### Notify these people"
echo "@ithaka/cypress"
}
function jira_pull() {
# Formats a nice pull request message from a JIRA ticket
# NOTE: requires jq
# brew install jq
# NOTE: Does not currently use j2m to convert jira markdown to github
# npm install -g j2m
hash curl 2>/dev/null || { echo >&2 "I require curl but it's not installed. Run 'brew install curl' and try again. Aborting."; return 1; }
hash jq 2>/dev/null || { echo >&2 "I require jq but it's not installed. Run 'brew install jq' and try again. Aborting."; return 1; }
JIRA_URL="https://jira.jstor.org"
JIRA_API_URI="/rest/api/2/"
# getting login fo JIRA
if [[ -z $JIRA_LOGIN ]]; then
read -p "Enter your login for JIRA: " JIRA_LOGIN
fi
# getting password fo JIRA
if [[ -z $JIRA_PASSWORD ]]; then
read -p "Enter your login for JIRA: " JIRA_PASSWORD
fi
curl_args=(
-u "$JIRA_LOGIN:$JIRA_PASSWORD"
-X GET
--write '\n%{http_code}\n'
--fail
--silent
"${JIRA_URL}${JIRA_API_URI}issue/${1}"
)
#echo "${curl_args[@]}"
# prepend some headers, but pass on whatever arguments this script was called with
output=$(curl "${curl_args[@]}" "$@")
# Warning: Curl is weird about escaping special characters in passwords
RESULT=$(safe_curl "${curl_args[@]}" | jq '{"key": .key, "summary": .fields.summary, "description": .fields.description}')
# Preserve whitespace
IFS='🍔'
KEY=$(echo "${RESULT}" | jq -r '.key')
SUMMARY=$(echo "${RESULT}" | jq -r '.summary')
# JIRA numbered lists converted to Github numbered lists with sed
JIRA_DESCRIPTION=$(echo "${RESULT}" | jq -r '.description' | sed -E 's/^([\t| ]*)(# )([-\w]*)?/\11. \3/p')
if hash j2m 2>/dev/null; then
GITHUB_DESCRIPTION=$(echo "${JIRA_DESCRIPTION}" | j2m --toM --stdin)
else
GITHUB_DESCRIPTION="${JIRA_DESCRIPTION}"
fi
echo "${KEY}: ${SUMMARY}"
echo ""
echo "Resolves [${KEY}](https://jira.jstor.org/browse/${KEY})"
echo ""
echo "###### Description"
echo "${GITHUB_DESCRIPTION}"
echo ""
echo "###### Notify these people"
echo "@ithaka/labs"
unset IFS
}
function make_pull() {
# NOTE: Your topic branch must match jira issue e.g. CORE-5339
# Opens a pull request on GitHub for the project that the "origin"
# The description will be lifted from JIRA
# This command will abort operation if it detects that
# the current topic branch has local commits that are not yet pushed
# to its upstream branch on the remote.
# ALSO NOTE: requires hub. Install it with:
# brew install hub curl jq
hash git 2>/dev/null || { echo >&2 "I require git but it's not installed. Run 'brew install git' and try again. Aborting."; return 1; }
hash hub 2>/dev/null || { echo >&2 "I require hub but it's not installed. Run 'brew install hub' and try again. Aborting."; return 1; }
CURRENT_BRANCH="$(git rev-parse --abbrev-ref HEAD)"
# Preserve whitespace by setting IFS to something unlikely
IFS='|'
MESSAGE="$(jira_pull "${CURRENT_BRANCH}")"
# If when you execute the next line (hub) you get the error:
# Unprocessable Entity (HTTP 422) Invalid value for "head"
# Then you forgot to push. Yeah. Non-obvious
hub pull-request -o -m "${MESSAGE}"
RETVAL=$?
[ $RETVAL -ne 0 ] && echo "You forgot to push if you got Unprocessable Entity (HTTP 422)"
unset IFS
}
decode_base64_url() {
local len=$((${#1} % 4))
local result="$1"
if [ $len -eq 2 ]; then result="$1"'=='
elif [ $len -eq 3 ]; then result="$1"'='
fi
echo "$result" | tr '_-' '/+' | openssl enc -d -base64
}
decode_jwt(){
decode_base64_url $(echo -n $2 | cut -d "." -f $1) | jq .
}
decode_jwt_date(){
decode_base64_url $(echo -n $2 | cut -d "." -f $1) | jq 'if .exp then (.expStr = (.exp|todate)) else . end'
}
# Decode JWT header
alias jwth="decode_jwt 1"
# Decode JWT Payload
alias jwtp="decode_jwt 2"
# Decode JWT header and pretty print the expiration field
alias jwthd="decode_jwt_date 1"
# Decode JWT Payload and pretty print the expiration field
alias jwtpd="decode_jwt_date 2"
|
package validation
import (
"context"
"fmt"
"io/ioutil"
"path/filepath"
"reflect"
"strings"
"github.com/ghodss/yaml"
"github.com/open-policy-agent/opa/rego"
)
const (
defaultPackage = "com.segment.kubeapply"
defaultResult = "deny"
warnPrefix = "warn:"
)
// Policy wraps a policy module and a prepared query.
type PolicyChecker struct {
Module PolicyModule
Query rego.PreparedEvalQuery
ExtraFields map[string]interface{}
}
var _ Checker = (*PolicyChecker)(nil)
// PolicyModule contains information about a policy.
type PolicyModule struct {
Name string
// Contents is a string that stores the policy in rego format.
Contents string
// Package is the name of the package in the rego contents.
Package string
// Result is the variable that should be accessed to get the evaluation results.
Result string
// ExtraFields are added into the input and usable for policy evaluation.
ExtraFields map[string]interface{}
}
// NewPolicyChecker creates a new PolicyChecker from the given module.
func NewPolicyChecker(ctx context.Context, module PolicyModule) (*PolicyChecker, error) {
query, err := rego.New(
rego.Query(
fmt.Sprintf("result = data.%s.%s", module.Package, module.Result),
),
rego.Module(module.Package, module.Contents),
).PrepareForEval(ctx)
if err != nil {
return nil, err
}
return &PolicyChecker{
Module: module,
Query: query,
}, nil
}
// DefaultPoliciesFromGlobs creates policy checkers from one or more file policy globs, using
// the default package and result values.
func DefaultPoliciesFromGlobs(
ctx context.Context,
globs []string,
extraFields map[string]interface{},
) ([]*PolicyChecker, error) {
checkers := []*PolicyChecker{}
for _, glob := range globs {
matches, err := filepath.Glob(glob)
if err != nil {
return nil, err
}
for _, match := range matches {
contents, err := ioutil.ReadFile(match)
if err != nil {
return nil, err
}
checker, err := NewPolicyChecker(
ctx,
PolicyModule{
Name: filepath.Base(match),
Contents: string(contents),
Package: defaultPackage,
Result: defaultResult,
ExtraFields: extraFields,
},
)
if err != nil {
return nil, err
}
checkers = append(checkers, checker)
}
}
return checkers, nil
}
// Check runs a check against the argument resource using the current policy.
func (p *PolicyChecker) Check(ctx context.Context, resource Resource) CheckResult {
result := CheckResult{
CheckType: CheckTypeOPA,
CheckName: p.Module.Name,
}
if resource.Name == "" {
// Skip over resources that aren't likely to have any Kubernetes-related
// structure.
result.Status = StatusEmpty
result.Message = fmt.Sprintf("No resource content")
return result
}
data := map[string]interface{}{}
if err := yaml.Unmarshal(resource.Contents, &data); err != nil {
result.Status = StatusError
result.Message = fmt.Sprintf("Error unmarshalling yaml: %+v", err)
return result
}
for key, value := range p.Module.ExtraFields {
data[key] = value
}
results, err := p.Query.Eval(ctx, rego.EvalInput(data))
if err != nil {
result.Status = StatusError
result.Message = fmt.Sprintf("Error evaluating query: %+v", err)
return result
}
if len(results) != 1 {
result.Status = StatusError
result.Message = fmt.Sprintf("Did not get exactly one result: %+v", results)
return result
}
switch value := results[0].Bindings["result"].(type) {
case bool:
if value {
result.Status = StatusValid
result.Message = "Policy returned allowed = true"
} else {
result.Status = StatusInvalid
result.Message = "Policy returned allowed = false"
}
case []interface{}:
if len(value) == 0 {
result.Status = StatusValid
result.Message = "Policy returned 0 deny reasons"
} else {
invalidReasons := []string{}
warnReasons := []string{}
for _, subValue := range value {
subValueStr := fmt.Sprintf("%v", subValue)
if strings.HasPrefix(
strings.ToLower(subValueStr),
warnPrefix,
) {
// Treat this as a warning
warnReasons = append(
warnReasons,
subValueStr,
)
} else {
// Treat this as a denial
invalidReasons = append(
invalidReasons,
subValueStr,
)
}
}
if len(invalidReasons) == 0 {
result.Status = StatusWarning
result.Message = fmt.Sprintf(
"Policy returned %d warn reason(s)",
len(warnReasons),
)
result.Reasons = warnReasons
} else if len(warnReasons) == 0 {
result.Status = StatusInvalid
result.Message = fmt.Sprintf(
"Policy returned %d deny reason(s)",
len(invalidReasons),
)
result.Reasons = invalidReasons
} else {
result.Status = StatusInvalid
result.Message = fmt.Sprintf(
"Policy returned %d deny reason(s) and %d warn reason(s)",
len(invalidReasons),
len(warnReasons),
)
result.Reasons = append(invalidReasons, warnReasons...)
}
}
default:
result.Status = StatusError
result.Message = fmt.Sprintf(
"Got unexpected response type: %+v (%+v)",
reflect.TypeOf(value),
value,
)
}
return result
}
|
#!/usr/bin/env bash
set -ex
echo "PWD=$PWD"
python manage.py collectstatic --noinput
|
from bottle import Bottle, run, route, static_file
app = Bottle()
# Define the directory path for static files
PATH = '/path/to/static/files'
# Configure the static file serving
app.mount('/static', static_file(PATH, index_file='index.html'))
# Define a route to serve the index.html file for the root URL
@app.route('/')
def index():
return static_file('index.html', root=PATH)
# Run the Bottle application
if __name__ == '__main__':
run(app, host='localhost', port=8080)
|
using System;
using System.Collections.Generic;
public class CommandLineParser
{
private Dictionary<string, Action<string>> options;
public CommandLineParser()
{
options = new Dictionary<string, Action<string>>();
}
public void Add(string option, string description, Action<string> action)
{
options.Add(option, action);
}
public void Parse(string[] args)
{
for (int i = 0; i < args.Length; i++)
{
if (options.ContainsKey(args[i]))
{
string value = (i + 1 < args.Length && !options.ContainsKey(args[i + 1])) ? args[i + 1] : null;
options[args[i]](value);
}
}
}
}
public class JobSearchClient
{
public string LatLong { get; set; }
public bool ExcludeAgencies { get; set; }
public string Country { get; set; }
}
public class Program
{
public static void Main()
{
CommandLineParser parser = new CommandLineParser();
JobSearchClient client = new JobSearchClient();
parser.Add("latlong", "Return latitude/longitude.", v => client.LatLong = v);
parser.Add("excludeagencies", "Exclude recruitment agencies.", v => client.ExcludeAgencies = true);
parser.Add("c=|country=", "Specify country of job.", v => client.Country = v);
string[] args = { "latlong", "37.7749,-122.4194", "excludeagencies", "c=US" };
parser.Parse(args);
Console.WriteLine("Parsed values:");
Console.WriteLine("Latitude/Longitude: " + client.LatLong);
Console.WriteLine("Exclude Agencies: " + client.ExcludeAgencies);
Console.WriteLine("Country: " + client.Country);
}
}
|
#!/bin/bash
set -v # print commands as they're executed
set -e # fail and exit on any command erroring
GIT_COMMIT_ID=${1:-""}
[[ -z $GIT_COMMIT_ID ]] && echo "Must provide a commit" && exit 1
TMP_DIR=$(mktemp -d)
pushd $TMP_DIR
echo "Cloning tensorflow/datasets and checking out commit $GIT_COMMIT_ID"
git clone https://github.com/tensorflow/datasets.git
cd datasets
git checkout $GIT_COMMIT_ID
pip install wheel twine pyopenssl
# Build the distribution
echo "Building distribution"
python setup.py sdist
python setup.py bdist_wheel --universal
# Publish to PyPI
read -p "Publish? (y/n) " -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]
then
echo "Publishing to PyPI"
twine upload dist/*
else
echo "Skipping upload"
exit 1
fi
# Cleanup
rm -rf build/ dist/ tensorflow_datasets.egg-info/
popd
rm -rf $TMP_DIR
|
#!/bin/sh
java -Xmx1024M -Xms512M -jar ~/usr/minecraft.jar &
|
public class MultiplicationTable {
public static void main(String[] args) {
System.out.println("Multiplication Table for 7:");
int num = 7;
for (int i = 1; i <= 10; ++i) {
System.out.println(num + " * " + i + " = " + (num * i));
}
}
}
|
#!/usr/bin/env python
import ss
import sys
import json
import csv
import logging
import urllib
############ ENV Settings ############
logging.basicConfig(filename='mx-sync.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s')
############ GLOBALS ############
sourcePolicies = {}
AUTH = {}
try:
with open('config.json', 'r') as data:
AUTH = json.load(data)
except:
logging.warning("Missing \"config.json\" file, create file named config.json with the following contents:\n{\n\t\"ENDPOINT\": \"https://127.0.0.1:8083\",\n\t\"REGION\": \"us-east-1\",\n\t\"USERNAME\": \"admin\",\n\t\"PASSWORD\": \"<PASSWORD>\"\n}")
exit()
MX_SYNC_DATASET = '{"dataset-name":"mx_sync_log","columns":[{"name":"key","key":true},{"name":"type","key":false},{"name":"name","key":false},{"name":"status","key":false},{"name":"timestamp","key":false}]}'
MX_SYNC_LOG_RECORDS = {"records": []}
AWSREGIONS = ["us-west-1"]
MXs = ss.get_mx_instances_by_tagname('impv', 'mx-sync', AWSREGIONS)
DATASETS = {}
SIGNATURES = {}
IPGROUPS = {}
ALLPOLICIES = {}
# Parse CSV into dictionary with policy type and applied to assets
logging.warning("\n\n=========== Start MX policy sync ===========\n")
logging.warning('PATH2REPORT='+sys.argv[1])
PATH2REPORT = '/opt/SecureSphere/server/SecureSphere/jakarta-tomcat-secsph/webapps/SecureSphere/'+sys.argv[1]
# Example argv[1] = /WEB-INF/reptemp/Sync_Security_Policies_Report_admin_15Apr2019_04-40-59.csv (all policies)
# Example argv[1] = /WEB-INF/reptemp/Sync_Security_Policies_Report_admin_15Apr2019_05-25-06.csv (web policies)
with open(PATH2REPORT, 'r') as f:
reader = csv.reader(f)
for row in reader:
if row[0] != "Policy Name":
sourcePolicies[row[0]] = {"policyType": row[1]}
def run():
primary_session_id = ss.login(AUTH["ENDPOINT"], AUTH["USERNAME"], AUTH["PASSWORD"])
ss.initMxSyncLog(AUTH["ENDPOINT"], primary_session_id, MX_SYNC_DATASET)
# Iterate through each policy and pull out normalized list of datasets, ipGroups, and signatures
for policy_name in sourcePolicies:
policyAttr = sourcePolicies[policy_name]
if policyAttr["policyType"] in ss.policyMapping:
#print(ss.policyMapping[policyAttr["policyType"]])
logging.warning("Retrieving policyType \""+policyAttr["policyType"]+"\" policyName \""+policy_name+"\" from primary MX - REQUEST: \nGET /conf/policies/security/"+ss.policyMapping[policyAttr["policyType"]]+"/"+policy_name)
response = ss.makeCall(AUTH["ENDPOINT"], primary_session_id, "/conf/policies/security/"+ss.policyMapping[policyAttr["policyType"]]+"/"+urllib.quote(policy_name))
if response.status_code==404:
policyAttr["isok"] = False
else:
policyObj = response.json()
ALLPOLICIES[policy_name] = policyObj
sourcePolicies[policy_name]["config.json"] = policyObj
sourcePolicies[policy_name]["isok"] = True
logging.warning("RESPONSE: \n"+str(policyObj))
# No API call for anti-scraping
# firewallPolicies
# httpProtocolPolicies
# http2ProtocolPolicies
# webCorrelationPolicies
# snippetInjectionPolicies
# webApplicationSignaturesPolicies - signatures in predicates and exceptiosn
# httpProtocolSignaturesPolicies
# snippetInjectionPolicies
# streamSignaturesPolicies
# webApplicationSignaturesPolicies
# webProfilePolicies
curPolicyType = ss.policyMapping[policyAttr["policyType"]]
# check for rules->ipGroup in firewallPolicies
if "rules" in policyObj:
for rule in policyObj["rules"]:
if "ipGroup" in rule:
if rule["ipGroup"] not in ss.ignoreADCIpGroups:
# print("Capturing IPGroup \"" + rule["ipGroup"] + "\" for policy " + policy_name)
logging.warning("Capturing IPGroup \"" + rule["ipGroup"] + "\" for policy " + policy_name)
IPGROUPS[rule["ipGroup"]] = False
else:
# print("Ignoring IPGroup \"" + rule["ipGroup"] + "\" for policy " + policy_name)
logging.warning("Ignoring IPGroup \"" + rule["ipGroup"] + "\" for policy " + policy_name)
# IPGROUPS[ipGroup] = ss.getIPGroup(AUTH["ENDPOINT"], primary_session_id, ipGroup)
# check for exceptions->predicates->ipGroups in httpProtocolPolicies, http2ProtocolPolicies, webCorrelationPolicies, snippetInjectionPolicies
if "exceptions" in policyObj:
for exception in policyObj["exceptions"]:
if "predicates" in exception:
for predicate in exception["predicates"]:
if "ipGroups" in predicate:
for ipGroup in predicate["ipGroups"]:
if ipGroup not in ss.ignoreADCIpGroups:
# print("Capturing IPGroup \"" + ipGroup + "\" for policy " + policy_name)
logging.warning("Capturing IPGroup \"" + ipGroup + "\" for policy " + policy_name)
IPGROUPS[ipGroup] = False
else:
# print("Ignoring IPGroup \"" + ipGroup + "\" for policy " + policy_name)
logging.warning("Ignoring IPGroup \"" + ipGroup + "\" for policy " + policy_name)
# check matchCriteria - webApplicationCustomPolicies, webServiceCustomPolicies
if "matchCriteria" in policyObj:
for mc in policyObj["matchCriteria"]:
# matchCriteria->lookupDatasetSearch->searchInLookupDataset
# matchCriteria->enrichmentData->searchInLookupDataset
if mc["type"] == "lookupDatasetSearch" or mc["type"] == "enrichmentData":
for dataset in mc["searchInLookupDataset"]:
# print("Capturing lookupDatasetSearch dataset \"" + dataset + "\" for policy " + policy_name)
logging.warning("Capturing lookupDatasetSearch dataset \"" + dataset + "\" for policy " + policy_name)
DATASETS[dataset] = False
# DATASETS[dataset] = ss.getDataset(AUTH["ENDPOINT"], primary_session_id, dataset)
# matchCriteria->datasetAttributeLookup[]->searchInLookupDataset
elif mc["type"] == "datasetAttributeLookup":
for dataset in mc["searchInLookupDataset"]:
if dataset not in ss.ignoreADCDatasets:
# print("Capturing searchInLookupDataset dataset \"" + dataset + "\" for policy " + policy_name)
logging.warning("Capturing searchInLookupDataset dataset \"" + dataset + "\" for policy " + policy_name)
DATASETS[dataset] = False
else:
# print("Ignoring dataset \"" + dataset + "\" for policy " + policy_name)
logging.warning("Capturing dataset \"" + dataset + "\" for policy " + policy_name)
# DATASETS[dataset] = ss.getDataset(AUTH["ENDPOINT"], primary_session_id, dataset)
# logging.warning("Retrieving \""+dataset+"\" dataset for policy "+policy_name)
# matchCriteria->datasetAttributeLookup->lookupDataset
if dataset not in ss.ignoreADCDatasets:
# print("Capturing lookupDataset dataset \"" + mc["lookupDataset"] + "\" for policy " + policy_name)
logging.warning("Capturing lookupDataset dataset \"" + mc["lookupDataset"] + "\" for policy " + policy_name)
DATASETS[mc["lookupDataset"]] = False
else:
# print("Ignoring lookupDataset dataset \"" + mc["lookupDataset"] + "\" for policy " + policy_name)
logging.warning("Ignoring lookupDataset dataset \"" + mc["lookupDataset"] + "\" for policy " + policy_name)
# DATASETS[mc["lookupDataset"]] = ss.getDataset(AUTH["ENDPOINT"], primary_session_id, mc["lookupDataset"])
# logging.warning("Retrieving \"" + mc["lookupDataset"] + "\" dataset for policy " + policy_name)
elif mc["type"] == "signatures":
sourcePolicies[policy_name]["isok"] = False
# for signature in mc["signatures"]:
# sourcePolicies[policy_name]["isok"] = False
# SIGNATURES[signature["name"]] = False
# logging.warning("Retrieving \""+signature["name"]+"\" signature for policy "+policy_name)
# # print(mc["type"])
# matchCriteria->sourceIpAddresses[]
# matchCriteria->proxyIpAddresses[]
elif mc["type"] == "sourceIpAddresses" or mc["type"] == "proxyIpAddresses":
for ipGroup in mc["ipGroups"]:
if ipGroup not in ss.ignoreADCIpGroups:
# print("Capturing sourceIpAddresses ipGroup \"" + ipGroup + "\" for policy " + policy_name)
logging.warning("Capturing sourceIpAddresses ipGroup \"" + ipGroup + "\" for policy " + policy_name)
IPGROUPS[ipGroup] = False
else:
# print("Ignoring sourceIpAddresses ipGroup \"" + ipGroup + "\" for policy " + policy_name)
logging.warning("Ignoring sourceIpAddresses ipGroup \"" + ipGroup + "\" for policy " + policy_name)
# logging.warning("Retrieving IPGroup ("+ipGroup+") for policy " + policy_name)
# IPGROUPS[ipGroup] = ss.getIPGroup(AUTH["ENDPOINT"], primary_session_id, ipGroup)
else:
policyAttr["isok"] = False
logging.warning("Unsupported policy type \"" + policyAttr["policyType"] + "\", skipping policy policy \"" + policy_name + "\"")
# load normalized list of datasets
for dataset in DATASETS:
logging.warning("Retrieving \"" + dataset + "\" dataset")
DATASETS[dataset] = ss.getDataset(AUTH["ENDPOINT"], primary_session_id, dataset)
# load normalized list of ipGroups
for ipGroup in IPGROUPS:
IPGROUPS[ipGroup] = ss.getIPGroup(AUTH["ENDPOINT"], primary_session_id, ipGroup)
for MX in MXs:
cur_session_id = ss.login(MX["ENDPOINT"], AUTH["USERNAME"], AUTH["PASSWORD"])
# Migrate datasets
for dataset in DATASETS:
MX_SYNC_LOG_RECORDS["records"].append(ss.upsertDataset(MX["ENDPOINT"], cur_session_id, DATASETS[dataset]))
for ipGroup in IPGROUPS:
MX_SYNC_LOG_RECORDS["records"].append(ss.upsertIPGroup(MX["ENDPOINT"], cur_session_id, IPGROUPS[ipGroup]))
for policy_name in sourcePolicies:
policyAttr = sourcePolicies[policy_name]
try:
if policyAttr["policyType"] in ss.policyMapping:
#print(ss.policyMapping)
for asset in policyAttr["config.json"]["applyTo"]:
asset["serverGroupName"] = asset["serverGroupName"].replace(AUTH["REGION"], MX["REGION"])
MX_SYNC_LOG_RECORDS["records"].append(ss.upsertWebPolicy(MX["ENDPOINT"], cur_session_id, policy_name, policyAttr))
except KeyError as e:
logging.warning("KeyError:"+str(e))
ss.logout(MX["ENDPOINT"], cur_session_id)
datasetObj = json.loads(MX_SYNC_DATASET)
ss.makeCall(AUTH["ENDPOINT"], primary_session_id, "/conf/dataSets/" + datasetObj["dataset-name"] + "/data", "POST", json.dumps(MX_SYNC_LOG_RECORDS))
ss.logout(AUTH["ENDPOINT"], primary_session_id)
if __name__ == '__main__':
run()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.