text stringlengths 1 1.05M |
|---|
<gh_stars>1-10
package io.ray.runtime.context;
import io.ray.api.id.ActorId;
import io.ray.api.id.JobId;
import io.ray.api.id.TaskId;
import io.ray.api.id.UniqueId;
import io.ray.runtime.generated.Common.Address;
import io.ray.runtime.generated.Common.TaskType;
/** The context of worker. */
public interface WorkerContext {
/** ID of the current worker. */
UniqueId getCurrentWorkerId();
/** ID of the current job. */
JobId getCurrentJobId();
/** ID of the current actor. */
ActorId getCurrentActorId();
/** Type of the current task. */
TaskType getCurrentTaskType();
/** ID of the current task. */
TaskId getCurrentTaskId();
Address getRpcAddress();
}
|
import {Component} from 'react';
import C1 from './components/C1';
import C2 from './components/C2';
import C3Connected, {C3} from './components/C3';
export default class Container extends Component {
state = {
isEqual: false
};
componentDidMount() {
console.log(this.c2.refs.WrappedComponent);
console.log(this.c3);
this.setState({
isEqual: this.c1.getWrappedInstance() === this.c1.refs.WrappedComponent
});
}
render() {
return (
<div>
<p>获取组件ref</p>
<hr />
<C1 ref={ref => this.c1 = ref}/>
<C2 ref={ref => this.c2 = ref}/>
<C3 ref={ref => this.c3 = ref}/>
<C3Connected/>
<hr />
<p>this.c1.getWrappedInstance() === this.c1.refs.WrappedComponent</p>
<p>结果: {this.state.isEqual.toString()}</p>
<hr />
<p>如果没有使用{'{withRef: true}'},调用getWrappedInstance方法会报错:</p>
<p>Uncaught Error: To access the wrapped instance, you need to specify {'{ withRef: true }'} as the fourth argument of the connect() call.</p>
<p>this.c2.refs.WrappedComponent的的值是undefined</p>
</div>
)
}
}
|
binaryArray.sort();
// binaryArray = [0, 0, 0, 0, 1, 1, 1, 1] |
<reponame>kc1116/http-sniffer
package figlet4go
import (
"errors"
"io/ioutil"
"os"
"path/filepath"
"strconv"
"strings"
)
// Default font if no other valid given
const defaultFont string = "standard"
// Extension of a font file
const extension string = "flf"
// Builtin fonts to load
var defaultFonts []string = []string{
"standard",
"larry3d",
}
// Holds the available fonts
type fontManager struct {
// The already read fonts
fontLib map[string]*font
// The in given pathes found fonts
fontList map[string]string
}
// Create a new fontmanager
// Initializes the fontManager,
// loads the builtin fonts and returns it
func newFontManager() *fontManager {
fm := &fontManager{
fontLib: make(map[string]*font),
fontList: make(map[string]string),
}
fm.loadBuildInFont()
return fm
}
// Get a font by name
// Default font if no other font could be loaded
func (fm *fontManager) getFont(fontName string) *font {
// Get the font from the fontLib
_, ok := fm.fontLib[fontName]
// Font not found
if !ok {
// Try to load it from loaded fontList
err := fm.loadDiskFont(fontName)
// Font also not found here, use the default font
if err != nil {
fontName = defaultFont
}
}
return fm.fontLib[fontName]
}
// Loads all .flf files recursively in the fontPath path
// Saves the found font files in a map with the name as the key
// and the path as the value. Doesn't load them at this point
// for performance. Called in the AsciiRenderer
func (fm *fontManager) loadFontList(fontPath string) error {
// Walk through the path
return filepath.Walk(fontPath, func(path string, info os.FileInfo, err error) error {
// Return an error if occurred
if err != nil {
return err
}
// If the current item is a directory or has not the correct suffix
if info.IsDir() || !strings.HasSuffix(info.Name(), "."+extension) {
return nil
}
// Extract the font name
fontName := strings.TrimSuffix(info.Name(), "."+extension)
// Save the font to the list
fm.fontList[fontName] = path
return nil
})
}
// Load a font from disk
// The font must be registered in the fontList
func (fm *fontManager) loadDiskFont(fontName string) error {
// Get the fontpath
path, ok := fm.fontList[fontName]
// Font is not registered
if !ok {
return errors.New("Font Not Found: " + fontName)
}
// Read file contents
fontStr, err := ioutil.ReadFile(path)
if err != nil {
return err
}
// Parse the file contents
font, err := parseFontContent(string(fontStr))
if err != nil {
return err
}
// Register the font object in the fontLib
fm.fontLib[fontName] = font
return nil
}
// Load the builtin fonts from the bindata.go file
// Load all fonts specified on top (defaultFonts)
func (fm *fontManager) loadBuildInFont() error {
// Load each default font
for _, name := range defaultFonts {
// Get Contents
fontStr, err := Asset(name + "." + extension)
if err != nil {
return err
}
// Get the font
font, err := parseFontContent(string(fontStr))
if err != nil {
return err
}
// Register the font object in the fontLib
fm.fontLib[name] = font
}
return nil
}
// Parse a font from a content string
// Used to load fonts from disk and the builtin fonts
func parseFontContent(cont string) (*font, error) {
// Get all lines
lines := strings.Split(cont, "\n")
if len(lines) < 1 {
return nil, errors.New("Font content error")
}
// Get the header metadata
header := strings.Split(lines[0], " ")
// Line end of the comment
commentEndLine, _ := strconv.Atoi(header[5])
// Char height
height, _ := strconv.Atoi(header[1])
// Initialize the font
font := &font{
hardblank: header[0][len(header[0])-1:],
height: height,
fontSlice: lines[commentEndLine+1:],
}
return font, nil
}
|
#!/bin/bash
#
# Copyright (c) 2017-2020 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# This script will execute the Kata Containers Test Suite.
set -e
cidir=$(dirname "$0")
source "${cidir}/lib.sh"
export RUNTIME="containerd-shim-kata-v2"
export CI_JOB="${CI_JOB:-default}"
case "${CI_JOB}" in
"BAREMETAL-PMEM")
echo "INFO: Running pmem integration test"
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make pmem"
;;
"CRI_CONTAINERD_K8S")
echo "INFO: Running stability test"
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make stability"
echo "INFO: Containerd checks"
sudo -E PATH="$PATH" bash -c "make cri-containerd"
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make kubernetes"
echo "INFO: Running vcpus test"
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make vcpus"
echo "INFO: Skipping pmem test: Issue: https://github.com/kata-containers/tests/issues/3223"
echo "INFO: Running stability test with sandbox_cgroup_only"
export TEST_SANDBOX_CGROUP_ONLY=true
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make stability"
# echo "INFO: Running pmem integration test"
# sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make pmem"
echo "INFO: Running ksm test"
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make ksm"
;;
"CRI_CONTAINERD_K8S_COMPLETE")
echo "INFO: Running e2e kubernetes tests"
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make kubernetes-e2e"
;;
"CRI_CONTAINERD_K8S_MINIMAL")
echo "INFO: Running e2e kubernetes tests"
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make kubernetes-e2e"
echo "INFO: Running tracing test"
sudo -E PATH="$PATH" bash -c "make tracing"
;;
"CRIO_K8S")
echo "INFO: Running kubernetes tests"
sudo -E PATH="$PATH" bash -c "make kubernetes"
;;
"CRIO_K8S_COMPLETE")
echo "INFO: Running kubernetes tests (minimal) with CRI-O"
sudo -E PATH="$PATH" bash -c "make kubernetes-e2e"
;;
"CRIO_K8S_MINIMAL")
echo "INFO: Running kubernetes tests (minimal) with CRI-O"
sudo -E PATH="$PATH" bash -c "make kubernetes-e2e"
;;
"CLOUD-HYPERVISOR-K8S-CRIO")
echo "INFO: Running kubernetes tests"
sudo -E PATH="$PATH" bash -c "make kubernetes"
;;
"CLOUD-HYPERVISOR-K8S-CONTAINERD")
echo "INFO: Containerd checks"
sudo -E PATH="$PATH" bash -c "make cri-containerd"
echo "INFO: Running kubernetes tests with containerd"
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make kubernetes"
;;
"CLOUD-HYPERVISOR-K8S-CONTAINERD-MINIMAL")
echo "INFO: Running e2e kubernetes tests"
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make kubernetes-e2e"
;;
"CLOUD-HYPERVISOR-K8S-CONTAINERD-FULL")
echo "INFO: Running complete e2e kubernetes tests"
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make kubernetes-e2e"
;;
"FIRECRACKER")
echo "INFO: Running Kubernetes tests with Firecracker"
sudo -E PATH="$PATH" bash -c "make kubernetes"
;;
"VFIO")
echo "INFO: Running VFIO functional tests"
sudo -E PATH="$PATH" CRI_RUNTIME="containerd" bash -c "make vfio"
;;
"METRICS")
export RUNTIME="kata-runtime"
export CTR_RUNTIME="io.containerd.run.kata.v2"
sudo -E ln -sf "${config_path}/configuration-qemu.toml" "${config_path}/configuration.toml"
echo "INFO: Running qemu metrics tests"
sudo -E PATH="$PATH" ".ci/run_metrics_PR_ci.sh"
echo "INFO: Running cloud hypervisor metrics tests"
export KATA_HYPERVISOR="cloud-hypervisor"
tests_repo="github.com/kata-containers/tests"
pushd "${GOPATH}/src/${tests_repo}"
echo "INFO: Install cloud hypervisor"
sudo -E PATH="$PATH" ".ci/install_cloud_hypervisor.sh"
popd
echo "INFO: Use cloud hypervisor configuration"
export config_path="/usr/share/defaults/kata-containers"
sudo -E ln -sf "${config_path}/configuration-clh.toml" "${config_path}/configuration.toml"
echo "INFO: Running cloud hypervisor metrics tests"
sudo -E PATH="$PATH" ".ci/run_metrics_PR_ci.sh"
;;
"METRICS_EXPERIMENTAL")
sudo -E PATH="$PATH" bash -c "./integration/kubernetes/e2e_conformance/setup.sh"
# Some k8s cli commands have extra output using DEBUG env var.
unset DEBUG
sudo -E PATH="$PATH" bash -c 'make -C "./metrics/storage/fio-k8s/" "test"'
sudo -E PATH="$PATH" bash -c 'make -C "./metrics/storage/fio-k8s/" "run"'
sudo -E PATH="$PATH" bash -c "./integration/kubernetes/cleanup_env.sh"
;;
"VIRTIOFS_EXPERIMENTAL")
sudo -E PATH="$PATH" bash -c "make filesystem"
;;
*)
echo "INFO: Running checks"
sudo -E PATH="$PATH" bash -c "make check"
echo "INFO: Running functional and integration tests ($PWD)"
sudo -E PATH="$PATH" bash -c "make test"
;;
esac
|
package de.judgeman.H2SpringFx.Services;
import de.judgeman.H2SpringFx.Model.SettingEntry;
import de.judgeman.H2SpringFx.Repositories.SettingEntryRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* Created by <NAME> on Mon 30/03/2020
*/
@Service
public class SettingService {
public static final String LANGUAGE_ENTRY_KEY = "currentLanguage";
public static final String USE_DIALOG_ENTRY_KEY = "useDialog";
@Autowired
private SettingEntryRepository settingEntryRepository;
public void saveSetting(String key, String value) {
SettingEntry settingEntry = settingEntryRepository.findById(key).orElse(null);
if(settingEntry == null) {
settingEntry = new SettingEntry();
settingEntry.setKey(key);
}
settingEntry.setValue(value);
settingEntryRepository.save(settingEntry);
}
public String loadSetting(String key) {
SettingEntry settingEntry = settingEntryRepository.findById(key).orElse(null);
if(settingEntry == null) {
return null;
}
return settingEntry.getValue();
}
public boolean deleteSetting(String key) {
SettingEntry settingEntry = settingEntryRepository.findById(key).orElse(null);
if(settingEntry != null) {
settingEntryRepository.delete(settingEntry);
return true;
}
return false;
}
}
|
const fs = require("fs");
const path = require("path");
const NodemonPlugin = require("nodemon-webpack-plugin");
const TsconfigPathsPlugin = require("tsconfig-paths-webpack-plugin");
const CleanWebpackPlugin = require("clean-webpack-plugin");
const paths = {
TS_CONFIG: path.resolve(fs.realpathSync(process.cwd()), "tsconfig.json")
};
module.exports = {
entry: "./src/index.tsx",
output: {
path: path.resolve("dist"),
filename: "store-locator.js",
library: "StoreLocator",
libraryTarget: "umd"
},
resolve: {
extensions: [".webpack.js", ".web.js", ".ts", ".tsx", ".js"],
plugins: [new TsconfigPathsPlugin({ configFile: paths.TS_CONFIG })]
},
node: {
__dirname: true
},
module: {
rules: [
{
test: /\.tsx?$/,
exclude: /(node_modules)/,
use: "ts-loader"
},
{
test: /\.(png|jpg|gif|svg|eot|ttf|woff|woff2)$/,
use: {
loader: "url-loader",
options: {
limit: 100000
}
}
}
]
},
plugins: [new CleanWebpackPlugin(["dist"]), new NodemonPlugin()]
};
|
module Ehonda
class Configuration
class RedrivePolicyConfiguration
include Validatable
def initialize owner
@owner = owner
@enabled = false
@max_receive_count = 10
end
attr_accessor :enabled,
:max_receive_count,
:dead_letter_queue
def validate
unless (1..1000).include? max_receive_count
errors << "#{@owner.name}.redrive_policy.max_receive_count must be in the range 1..1000"
end
return unless enabled && dead_letter_queue.blank?
errors << "#{@owner.name}.redrive_policy.dead_letter_queue is required"
end
def copy_onto redrive_policy
redrive_policy.enabled = enabled
redrive_policy.max_receive_count = max_receive_count
redrive_policy.dead_letter_queue = dead_letter_queue
end
end
end
end
|
#!/usr/bin/env bash
# ==============================================================================
# Home Assistant Community Add-ons: Bashio
# Bashio is an bash function library for use with Home Assistant add-ons.
#
# It contains a set of commonly used operations and can be used
# to be included in add-on scripts to reduce code duplication across add-ons.
# ==============================================================================
# Defaults
readonly __BASHIO_DEFAULT_ADDON_CONFIG="/data/options.json"
readonly __BASHIO_DEFAULT_CACHE_DIR="/dev/shm/bashio"
readonly __BASHIO_DEFAULT_HIBP_ENDPOINT="https://api.pwnedpasswords.com/range"
readonly __BASHIO_DEFAULT_LOG_FORMAT="[{TIMESTAMP}] {LEVEL}: {MESSAGE}"
readonly __BASHIO_DEFAULT_LOG_LEVEL=5 # Defaults to INFO
readonly __BASHIO_DEFAULT_LOG_TIMESTAMP="%T"
readonly __BASHIO_DEFAULT_SUPERVISOR_API="http://supervisor"
readonly __BASHIO_DEFAULT_SUPERVISOR_TOKEN=""
# Exit codes
readonly __BASHIO_EXIT_OK=0 # Successful termination
readonly __BASHIO_EXIT_NOK=1 # Termination with errors
# Log levels
readonly __BASHIO_LOG_LEVEL_ALL=8
readonly __BASHIO_LOG_LEVEL_DEBUG=6
readonly __BASHIO_LOG_LEVEL_ERROR=2
readonly __BASHIO_LOG_LEVEL_FATAL=1
readonly __BASHIO_LOG_LEVEL_INFO=5
readonly __BASHIO_LOG_LEVEL_NOTICE=4
readonly __BASHIO_LOG_LEVEL_OFF=0
readonly __BASHIO_LOG_LEVEL_TRACE=7
readonly __BASHIO_LOG_LEVEL_WARNING=3
readonly -A __BASHIO_LOG_LEVELS=(
[${__BASHIO_LOG_LEVEL_OFF}]="OFF"
[${__BASHIO_LOG_LEVEL_FATAL}]="FATAL"
[${__BASHIO_LOG_LEVEL_ERROR}]="ERROR"
[${__BASHIO_LOG_LEVEL_WARNING}]="WARNING"
[${__BASHIO_LOG_LEVEL_NOTICE}]="NOTICE"
[${__BASHIO_LOG_LEVEL_INFO}]="INFO"
[${__BASHIO_LOG_LEVEL_DEBUG}]="DEBUG"
[${__BASHIO_LOG_LEVEL_TRACE}]="TRACE"
[${__BASHIO_LOG_LEVEL_ALL}]="ALL"
)
# Colors
readonly __BASHIO_COLORS_ESCAPE="\033[";
readonly __BASHIO_COLORS_RESET="${__BASHIO_COLORS_ESCAPE}0m"
readonly __BASHIO_COLORS_DEFAULT="${__BASHIO_COLORS_ESCAPE}39m"
readonly __BASHIO_COLORS_BLACK="${__BASHIO_COLORS_ESCAPE}30m"
readonly __BASHIO_COLORS_RED="${__BASHIO_COLORS_ESCAPE}31m"
readonly __BASHIO_COLORS_GREEN="${__BASHIO_COLORS_ESCAPE}32m"
readonly __BASHIO_COLORS_YELLOW="${__BASHIO_COLORS_ESCAPE}33m"
readonly __BASHIO_COLORS_BLUE="${__BASHIO_COLORS_ESCAPE}34m"
readonly __BASHIO_COLORS_MAGENTA="${__BASHIO_COLORS_ESCAPE}35m"
readonly __BASHIO_COLORS_CYAN="${__BASHIO_COLORS_ESCAPE}36m"
readonly __BASHIO_COLORS_LIGHT_GRAY="${__BASHIO_COLORS_ESCAPE}37m"
readonly __BASHIO_COLORS_BG_DEFAULT="${__BASHIO_COLORS_ESCAPE}49m"
readonly __BASHIO_COLORS_BG_BLACK="${__BASHIO_COLORS_ESCAPE}40m"
readonly __BASHIO_COLORS_BG_RED="${__BASHIO_COLORS_ESCAPE}41m"
readonly __BASHIO_COLORS_BG_GREEN="${__BASHIO_COLORS_ESCAPE}42m"
readonly __BASHIO_COLORS_BG_YELLOW="${__BASHIO_COLORS_ESCAPE}43m"
readonly __BASHIO_COLORS_BG_BLUE="${__BASHIO_COLORS_ESCAPE}44m"
readonly __BASHIO_COLORS_BG_MAGENTA="${__BASHIO_COLORS_ESCAPE}45m"
readonly __BASHIO_COLORS_BG_CYAN="${__BASHIO_COLORS_ESCAPE}46m"
readonly __BASHIO_COLORS_BG_WHITE="${__BASHIO_COLORS_ESCAPE}47m"
|
#!/bin/sh
pylint --reports=n --include-ids=y --disable=C0301,W0611,C0103,C0111,F0401,E0611,E1101,W0614,E0602,W0602,W0612,R0201,W0231,W0105,W0603,R0903,W0232,W0102,R0915,W0401,W0613,R0914,E0102,C0302,E1103,W0511,W0212,E0202,R0902,E1121,R0913,R0904,E1123,R0912,W0702,W0622,W0201 $1
|
<filename>app/home/db_router.py<gh_stars>0
from .models import (
# BIZ MODELS
TableCharts,
TableForecasts,
#DATA
HolidaysEventsModel,
OilModel,
SampleSubmissionModel,
StoresModel,
TestModel,
TrainModel,
TransactionsModel,
# MUS MODELS
TableSongs,
# VBT MODELS
TableBacktests,
TableCompanyInfo,
TableCurrencyuInfo,
# SYMBOL MODELS
# LOGS MODELS
# TableLogsMus,
# TableLogsBiz,
# TableLogsVbt,
)
# (Uncomment)
# SCHEMA / MODELS LIST
BIZ_MODELS = [
TableCharts,
TableForecasts,
HolidaysEventsModel,
OilModel,
SampleSubmissionModel,
StoresModel,
TestModel,
TrainModel,
TransactionsModel,
]
MUS_MODELS = [
TableSongs,
]
SYMBOL_MODELS = [
]
VBT_MODELS = [
TableBacktests,
TableCompanyInfo,
TableCurrencyuInfo,
]
# LOGS_MODELS = [
# TableLogsMus,
# TableLogsBiz,
# TableLogsVbt,
# ]
# (Uncomment)
class MyDBRouter(object):
#BIZ
######################################
def db_for_read(self, model, **hints):
if model in BIZ_MODELS:
return 'biz'
return None
def db_for_write(self, model, **hints):
if model in BIZ_MODELS:
return 'biz'
return None
#MUS
######################################
def db_for_read(self, model, **hints):
if model in MUS_MODELS:
return 'mus'
return None
def db_for_write(self, model, **hints):
if model in MUS_MODELS:
return 'mus'
return None
#VBT
######################################
def db_for_read(self, model, **hints):
if model in VBT_MODELS:
return 'vbt'
return None
def db_for_write(self, model, **hints):
if model in VBT_MODELS:
return 'vbt'
return None
#LOGS (Uncomment)
######################################
# def db_for_read(self, model, **hints):
# if model in LOGS_MODELS:
# return 'logs'
# return None
# def db_for_write(self, model, **hints):
# if model in LOGS_MODELS:
# return 'logs'
# return None |
#-*- coding: utf-8 -*-
#2#::.. Last edit: - Fri Feb 16 13:38:13 EST 2018 - by: - steelalive - ..::## #_# - VERSION=0.0.0.7 - #_# #@#160218#@# #2#
#3#::..#####################_MAIN_#######################..::#3#
# Author: #
die() {
error "$@"
exit 1
}
((EUID == 0)) || die 'This script must be run with root privileges'
[[ "$1" ]] && place="$1/"
for folder in proc sys dev run tmp; do
mkdir -p "$place$folder"
done
mount -v -o rbind /proc "${place}proc"
mount -v -o rbind /sys "${place}sys"
mount -v -o rbind /dev "${place}dev"
mount -v -o rbind /run "${place}run"
mount -v -o rbind /tmp "${place}tmp"
exit
mount proc "${place}proc" -t proc -o nosuid,noexec,nodev
mount sys "${place}sys" -t sysfs -o nosuid,noexec,nodev,ro
mount udev "${place}dev" -t devtmpfs -o mode=0755,nosuid
[[ -e dev/block ]] || mount -o rbind /dev "${place}dev"
mkdir "${place}dev/pts" "${place}dev/shm"
mount devpts "${place}dev/pts" -t devpts -o mode=0620,gid=5,nosuid,noexec
mount shm "${place}dev/shm" -t tmpfs -o mode=1777,nosuid,nodev
mount run "${place}run" -t tmpfs -o nosuid,nodev,mode=0755
mount tmp "${place}tmp" -t tmpfs -o mode=1777,strictatime,nodev,nosuid
#for i in sys proc dev run
#[[ -e /$i ]] || continue
#mount -o rbind /$i $i
#fi
|
#!/bin/bash
cd
cd exonum/examples/cryptocurrency-advanced/backend
rm -Rf example
mkdir example
N=$((5*$1))
IP=$2
exonum-cryptocurrency-advanced generate-template example/common.toml --validators-count 25
exonum-cryptocurrency-advanced generate-config example/common.toml example/$(($N-4)) --peer-address $IP:6331 -n
exonum-cryptocurrency-advanced generate-config example/common.toml example/$(($N-3)) --peer-address $IP:6332 -n
exonum-cryptocurrency-advanced generate-config example/common.toml example/$(($N-2)) --peer-address $IP:6333 -n
exonum-cryptocurrency-advanced generate-config example/common.toml example/$(($N-1)) --peer-address $IP:6334 -n
exonum-cryptocurrency-advanced generate-config example/common.toml example/$N --peer-address $IP:6335 -n
|
import Ember from 'ember';
export default Ember.Route.extend({
model: function() {
var host = this.modelFor('host').get('host');
var store = this.get('store');
return Ember.RSVP.all([
store.findAll('service'),
store.findAll('instance'),
]).then(() => {
return host;
});
}
});
|
import io.vertx.core.http.HttpServerResponse;
import java.util.HashSet;
/**
* Created by leng on 4/8/16.
*/
public class ResponseHeader {
public static void put(HttpServerResponse response, String headerKey, HashSet<String> headerList) {
// Convert the HashSet of header values to an array of strings
String[] headerValues = headerList.toArray(new String[0]);
// Add the header key and values to the HTTP server response
response.putHeader(headerKey, headerValues);
}
} |
import datetime
import operator
import time
from walrus.containers import ConsumerGroup
from walrus.containers import ConsumerGroupStream
from walrus.utils import basestring_type
from walrus.utils import decode
from walrus.utils import decode_dict
from walrus.utils import make_python_attr
def id_to_datetime(ts):
tsm, seq = ts.split(b'-', 1)
return datetime.datetime.fromtimestamp(int(tsm) / 1000.), int(seq)
def datetime_to_id(dt, seq=0):
tsm = time.mktime(dt.timetuple()) * 1000
return '%s-%s' % (int(tsm + (dt.microsecond / 1000)), seq)
class Message(object):
"""
A message stored in a Redis stream.
When reading messages from a :py:class:`TimeSeries`, the usual 2-tuple of
(message id, data) is unpacked into a :py:class:`Message` instance. The
message instance provides convenient access to the message timestamp as a
datetime. Additionally, the message data is UTF8-decoded for convenience.
"""
__slots__ = ('stream', 'timestamp', 'sequence', 'data', 'message_id')
def __init__(self, stream, message_id, data):
self.stream = stream
self.message_id = decode(message_id)
self.data = decode_dict(data)
self.timestamp, self.sequence = id_to_datetime(message_id)
def __repr__(self):
return '<Message %s %s: %s>' % (self.stream, self.message_id,
self.data)
def normalize_id(message_id):
if isinstance(message_id, basestring_type):
return message_id
elif isinstance(message_id, datetime.datetime):
return datetime_to_id(message_id)
elif isinstance(message_id, tuple):
return datetime_to_id(*message_id)
elif isinstance(message_id, Message):
return message_id.message_id
return message_id
def normalize_ids(id_list):
return [normalize_id(id) for id in id_list]
def xread_to_messages(resp):
if resp is None: return
accum = []
for stream, messages in resp:
accum.extend(xrange_to_messages(stream, messages))
# If multiple streams are present, sort them by timestamp.
if len(resp) > 1:
accum.sort(key=operator.attrgetter('message_id'))
return accum
def xrange_to_messages(stream, resp):
return [Message(stream, message_id, data) for message_id, data in resp]
class TimeSeriesStream(ConsumerGroupStream):
"""
Helper for working with an individual stream within the context of a
:py:class:`TimeSeries` consumer group. This object is exposed as an
attribute on a :py:class:`TimeSeries` object using the stream key for the
attribute name.
This class should not be created directly. It will automatically be added
to the ``TimeSeries`` object.
For example::
ts = db.time_series('events', ['stream-1', 'stream-2'])
ts.stream_1 # TimeSeriesStream for "stream-1"
ts.stream_2 # TimeSeriesStream for "stream-2"
This class implements the same methods as :py:class:`ConsumerGroupStream`,
with the following differences in behavior:
* Anywhere an ID (or list of IDs) is accepted, this class will also accept
a datetime, a 2-tuple of (datetime, sequence), a :py:class:`Message`, in
addition to a regular bytestring ID.
* Instead of returning a list of (message id, data) 2-tuples, this class
returns a list of :py:class:`Message` objects.
* Data is automatically UTF8 decoded when being read for convenience.
"""
__slots__ = ('database', 'group', 'key', '_consumer')
def ack(self, *id_list):
return super(TimeSeriesStream, self).ack(*normalize_ids(id_list))
def add(self, data, id='*', maxlen=None, approximate=True):
db_id = super(TimeSeriesStream, self).add(data, normalize_id(id),
maxlen, approximate)
return id_to_datetime(db_id)
def claim(self, *id_list, **kwargs):
resp = super(TimeSeriesStream, self).claim(*normalize_ids(id_list),
**kwargs)
return xrange_to_messages(self.key, resp)
def delete(self, *id_list):
return super(TimeSeriesStream, self).delete(*normalize_ids(id_list))
def get(self, id):
id = normalize_id(id)
messages = self.range(id, id, 1)
if messages:
return messages[0]
def range(self, start='-', stop='+', count=None):
resp = super(TimeSeriesStream, self).range(
normalize_id(start),
normalize_id(stop),
count)
return xrange_to_messages(self.key, resp)
def pending(self, start='-', stop='+', count=1000, consumer=None):
start = normalize_id(start)
stop = normalize_id(stop)
resp = self.database.xpending_range(self.key, self.group, start, stop,
count, consumer)
return [(id_to_datetime(msg['message_id']), decode(msg['consumer']),
msg['time_since_delivered'], msg['times_delivered'])
for msg in resp]
def read(self, count=None, block=None):
resp = super(TimeSeriesStream, self).read(count, block)
if resp is not None:
return xrange_to_messages(self.key, resp)
def set_id(self, id='$'):
return super(TimeSeriesStream, self).set_id(normalize_id(id))
class TimeSeries(ConsumerGroup):
"""
:py:class:`TimeSeries` is a consumer-group that provides a higher level of
abstraction, reading and writing message ids as datetimes, and returning
messages using a convenient, lightweight :py:class:`Message` class.
Rather than creating this class directly, use the
:py:meth:`Database.time_series` method.
Each registered stream within the group is exposed as a special attribute
that provides stream-specific APIs within the context of the group. For
more information see :py:class:`TimeSeriesStream`.
Example::
ts = db.time_series('groupname', ['stream-1', 'stream-2'])
ts.stream_1 # TimeSeriesStream for "stream-1"
ts.stream_2 # TimeSeriesStream for "stream-2"
:param Database database: Redis client
:param group: name of consumer group
:param keys: stream identifier(s) to monitor. May be a single stream
key, a list of stream keys, or a key-to-minimum id mapping. The
minimum id for each stream should be considered an exclusive
lower-bound. The '$' value can also be used to only read values
added *after* our command started blocking.
:param consumer: name for consumer within group
:returns: a :py:class:`TimeSeries` instance
"""
stream_key_class = TimeSeriesStream
def read(self, count=None, block=None):
"""
Read unseen messages from all streams in the consumer group. Wrapper
for :py:class:`Database.xreadgroup` method.
:param int count: limit number of messages returned
:param int block: milliseconds to block, 0 for indefinitely.
:returns: a list of :py:class:`Message` objects
"""
resp = super(TimeSeries, self).read(count, block)
return xread_to_messages(resp)
def set_id(self, id='$'):
return super(TimeSeries, self).set_id(normalize_id(id))
|
package net.romvoid95.curseforge.command.base.args;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import io.github.romvoid95.command.CommandEvent;
public class ArgumentIndex {
private List<IArgument<?>> idx;
private final Pattern multiWord = Pattern.compile("(?>\")\\s*(?:.*?)\\s*(?>\")");
public ArgumentIndex(CommandEvent event) {
this.idx = new ArrayList<>();
buildIndex(event);
}
private void buildIndex(CommandEvent event) {
if(event.getArgs().length() > 0) {
String commandArgs = event.getArgs();
String removeFromArr = "";
Matcher matcher = multiWord.matcher(commandArgs);
if(matcher.find()) {
removeFromArr = matcher.group(0).replace("'", "");
commandArgs = commandArgs.replace(matcher.group(0), "");
}
String[] argArr = commandArgs.split("\\s+");
int c = 0;
for (int i = 0; i < argArr.length; i++) {
idx.add(i, new Argument(argArr[i]));
c =+ 1;
}
if(removeFromArr.length() > 0) {
idx.add(c, new Argument(removeFromArr));
}
}
}
public Argument getArg(Integer index) {
return (Argument) idx.get(index);
}
public boolean isEmpty() {
return idx.isEmpty();
}
public int count() {
return idx.size();
}
public List<IArgument<?>> list() {
return idx;
}
}
|
#!/usr/bin/env bash
set -e
# Usage create-dash.sh SOURCE_FILE
[[ ! "${1}" ]] && echo "Usage: create-dash.sh SOURCE_FILE " && exit 1
# comment/add lines here to control which renditions would be created
renditions=(
# resolution bitrate audio-rate
# "426x240 400k 64k"
"3840x2160 14000k 192k"
"2560x1440 10000k 192k"
"1920x1080 5000k 192k"
"1280x720 2800k 128k"
"842x480 1400k 128k"
"640x360 800k 96k"
)
z_renditions=(
# resolution bitrate audio-rate
# "426x240 400k 64k"
"640x360 800k 96k"
"842x480 1400k 128k"
"1280x720 2800k 128k"
"1920x1080 5000k 192k"
"2560x1440 10000k 192k"
"3840x2160 14000k 192k"
)
segment_target_duration=2000000 # try to create a new segment every X seconds
max_bitrate_ratio=1.07 # maximum accepted bitrate fluctuations
rate_monitor_buffer_ratio=1.5 # maximum buffer size between bitrate conformance checks
#########################################################################
source="archive/${1}"
target="cache/dash/${source##*/}" # leave only last component of path
mkdir -p ${target}
min_height="$(echo ${2} |grep -oE '^[[:digit:]]+')"
if [ -z ${2} ];then
min_height=360
fi
echo $target
default_threshold=4
count=0
key_frames_interval="$(echo `ffprobe ${source} 2>&1 | grep -oE '[[:digit:]]+(.[[:digit:]]+)? fps' | grep -oE '[[:digit:]]+(.[[:digit:]]+)?'`*2 | bc || echo '')"
key_frames_interval=${key_frames_interval:-50}
key_frames_interval=$(echo `printf "%.1f\n" $(bc -l <<<"$key_frames_interval/10")`*10 | bc) # round
key_frames_interval=${key_frames_interval%.*} # truncate to integer
frame_resolution="$(ffprobe -v error -select_streams v:0 -show_entries stream=height,width -of csv=s=x:p=0 ${source})"
frame_width="$(echo ${frame_resolution} | grep -oE '^[[:digit:]]+')"
frame_height="$(echo ${frame_resolution} | grep -oE '[[:digit:]]+$')"
# static parameters that are similar for all renditions
static_params=" -c:a aac -ar 48000 -c:v libx264 -profile:v main -sc_threshold 0 -strict -2"
static_params+=" -g ${key_frames_interval} -keyint_min ${key_frames_interval}"
static_dash=" -use_timeline 1 -use_template 1 -min_seg_duration ${segment_target_duration}"
#static_dash+=" -adaptation_sets \"id=0,streams=v id=1,streams=a\""
# misc params
misc_params="-hide_banner -y"
cmd_map=""
cmd=""
for rendition in "${renditions[@]}"; do
# drop extraneous spaces
rendition="${rendition/[[:space:]]+/ }"
# rendition fields
resolution="$(echo ${rendition} | cut -d ' ' -f 1)"
bitrate="$(echo ${rendition} | cut -d ' ' -f 2)"
audiorate="$(echo ${rendition} | cut -d ' ' -f 3)"
# calculated fields
width="$(echo ${resolution} | grep -oE '^[[:digit:]]+')"
height="$(echo ${resolution} | grep -oE '[[:digit:]]+$')"
maxrate="$(echo "`echo ${bitrate} | grep -oE '[[:digit:]]+'`*${max_bitrate_ratio}" | bc)"
bufsize="$(echo "`echo ${bitrate} | grep -oE '[[:digit:]]+'`*${rate_monitor_buffer_ratio}" | bc)"
bandwidth="$(echo ${bitrate} | grep -oE '[[:digit:]]+')000"
name="${height}p"
if [ ${frame_height} -lt ${height} ] || [ ${height} -lt ${min_height} ]; then
continue
fi
cmd+=" -map 0:v -b:v:${count} ${bitrate} -s:v:${count} ${width}x${height} -maxrate:${count} ${maxrate%.*}k -bufsize:${count} ${bufsize%.*}k "
cmd+=" -map 0:a -b:a:${count} ${audiorate} -strict -2"
#cmd+=" -f dash -init_seg_name ${name}-init-\$RepresentationID\$.m4s -media_seg_name ${name}-\$RepresentationID\$-\$Number%05d\$.m4s ${target}/${name}.mpd"
let count+=1
if [ ${default_threshold} -lt ${count} ]; then
break
fi
done
cmd_param=${cmd_map}
cmd_param+=${static_params}
cmd_param+=${cmd}
cmd_param+=${static_dash}
cmd_param+=" -f dash ${target}/index.mpd"
# start conversion
echo -e "Executing command:\nffmpeg ${misc_params} -i ${source} ${cmd_param}"
ffmpeg ${misc_params} -i ${source} ${cmd_param}
echo "Done - encoded dash is at ${target}/"
|
#!/bin/bash
set -e
source ./scripts/mod_helpers.sh
if test "$(mod_filename compat)" = "compat.ko.gz" ; then
compr=".gz"
elif test "$(mod_filename compat)" = "compat.ko.xz" ; then
compr=".xz"
else
compr=""
fi
for driver in $(find ${BACKPORT_DIR} -type f -name *.ko); do
mod_name=${driver/${BACKPORT_DIR}/${KLIB}${KMODDIR}}${compr}
echo " uninstall" $mod_name
rm -f $mod_name
done
|
package HxCKDMS.bows.entity;
import io.netty.buffer.ByteBuf;
import java.util.List;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.enchantment.EnchantmentHelper;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.monster.EntityEnderman;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.entity.projectile.EntityArrow;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.play.server.S2BPacketChangeGameState;
import net.minecraft.util.AxisAlignedBB;
import net.minecraft.util.DamageSource;
import net.minecraft.util.MathHelper;
import net.minecraft.util.MovingObjectPosition;
import net.minecraft.util.Vec3;
import net.minecraft.world.World;
import cpw.mods.fml.common.network.ByteBufUtils;
import cpw.mods.fml.common.registry.IEntityAdditionalSpawnData;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class EntityHxCArrow extends EntityArrow implements IEntityAdditionalSpawnData {
public ItemStack arrowStack;
public int ticksInAir;
public int ticksInGround;
public int xTile = -1;
public int yTile = -1;
public int zTile = -1;
public Block inTile;
public int inData;
public boolean inGround;
public int knockbackStrength;
/** Changing this value does nothing **/
public float speed;
/** For loading **/
public EntityHxCArrow(World worldObj) {
super(worldObj);
}
/** For rendering random stuff **/
public EntityHxCArrow(World worldObj, double x, double y, double z) {
super(worldObj, x, y, z);
}
/** For players **/
public EntityHxCArrow(World worldObj, EntityLivingBase shooter, float speed, ItemStack stack) {
super(worldObj, shooter, speed);
this.arrowStack = stack;
this.speed = speed;
}
/** For mobs **/
public EntityHxCArrow(World worldObj, EntityLivingBase shooter, EntityLivingBase target, float speed, float dirRandomness, ItemStack stack) {
super(worldObj, shooter, target, speed, dirRandomness);
this.arrowStack = stack;
}
@Override
public void setThrowableHeading(double x, double y, double z, float speed, float dirRandomness) {
super.setThrowableHeading(x, y, z, speed, dirRandomness);
this.ticksInGround = 0;
}
@Override
@SideOnly(Side.CLIENT)
public void setVelocity(double xSpeed, double ySpeed, double zSpeed) {
super.setVelocity(xSpeed, ySpeed, zSpeed);
if (this.prevRotationPitch == 0.0F && this.prevRotationYaw == 0.0F) this.ticksInGround = 0;
}
@Override
public void readEntityFromNBT(NBTTagCompound tag) {
super.readEntityFromNBT(tag);
// Thanks, Obama
this.xTile = tag.getShort("xTile");
this.yTile = tag.getShort("yTile");
this.zTile = tag.getShort("zTile");
this.ticksInGround = tag.getShort("life");
this.inTile = Block.getBlockById(tag.getByte("inTile") & 255);
this.inData = tag.getByte("inData") & 255;
this.inGround = tag.getByte("inGround") == 1;
this.arrowStack = ItemStack.loadItemStackFromNBT(tag.getCompoundTag("arrowStack"));
}
@Override
public void writeEntityToNBT(NBTTagCompound tag) {
super.writeEntityToNBT(tag);
tag.setShort("xTile", (short) this.xTile);
tag.setShort("yTile", (short) this.yTile);
tag.setShort("zTile", (short) this.zTile);
tag.setShort("life", (short) this.ticksInGround);
tag.setByte("inTile", (byte) Block.getIdFromBlock(this.inTile));
tag.setByte("inData", (byte) this.inData);
tag.setByte("inGround", (byte) (this.inGround ? 1 : 0));
if (this.arrowStack != null) tag.setTag("arrowStack", this.arrowStack.writeToNBT(new NBTTagCompound()));
}
@Override
public void onCollideWithPlayer(EntityPlayer player) {
if (!this.worldObj.isRemote && this.inGround && this.arrowShake <= 0) {
boolean pickedUp = this.canBePickedUp == 1 || this.canBePickedUp == 2 && player.capabilities.isCreativeMode;
if (this.canBePickedUp == 1 && !player.inventory.addItemStackToInventory(this.arrowStack)) pickedUp = false;
if (pickedUp) {
this.playSound("random.pop", 0.2F, ((this.rand.nextFloat() - this.rand.nextFloat()) * 0.7F + 1.0F) * 2.0F);
player.onItemPickup(this, 1);
this.setDead();
}
}
}
@Override
public void setKnockbackStrength(int knockback) {
this.knockbackStrength = knockback;
}
public int getKnockbackStrength() {
return this.knockbackStrength;
}
@Override
public boolean canAttackWithItem() {
return false;
}
@Override
public void onUpdate() {
// No-clip is a thing now (pass through blocks and hit only entities)
//this.noClip = true;
this.onEntityUpdate();
if (this.prevRotationPitch == 0.0F && this.prevRotationYaw == 0.0F) {
float motionXZ = MathHelper.sqrt_double(this.motionX * this.motionX + this.motionZ * this.motionZ);
this.prevRotationYaw = this.rotationYaw = (float) (Math.atan2(this.motionX, this.motionZ) * 180.0D / Math.PI);
this.prevRotationPitch = this.rotationPitch = (float) (Math.atan2(this.motionY, motionXZ) * 180.0D / Math.PI);
}
Block inBlock = this.worldObj.getBlock(this.xTile, this.yTile, this.zTile);
if (!this.noClip && inBlock.getMaterial() != Material.air) {
inBlock.setBlockBoundsBasedOnState(this.worldObj, this.xTile, this.yTile, this.zTile);
AxisAlignedBB blockBB = inBlock.getCollisionBoundingBoxFromPool(this.worldObj, this.xTile, this.yTile, this.zTile);
if (blockBB != null && blockBB.isVecInside(Vec3.createVectorHelper(this.posX, this.posY, this.posZ))) this.inGround = true;
}
if (this.arrowShake > 0) --this.arrowShake;
if (this.inGround) {
int blockData = this.worldObj.getBlockMetadata(this.xTile, this.yTile, this.zTile);
// If hasn't moved blocks
if (inBlock == this.inTile && blockData == this.inData) {
++this.ticksInGround;
if (this.ticksInGround == 1200) this.setDead();
} else { // Block removed/changed
this.inGround = false;
this.motionX *= this.rand.nextFloat() * 0.2F;
this.motionY *= this.rand.nextFloat() * 0.2F;
this.motionZ *= this.rand.nextFloat() * 0.2F;
this.ticksInGround = 0;
this.ticksInAir = 0;
}
} else {
++this.ticksInAir;
Vec3 posVec = Vec3.createVectorHelper(this.posX, this.posY, this.posZ);
Vec3 nextPosVec = Vec3.createVectorHelper(this.posX + this.motionX, this.posY + this.motionY, this.posZ + this.motionZ);
MovingObjectPosition moveChecker = null;
if (!this.noClip) {
moveChecker = this.worldObj.func_147447_a(posVec, nextPosVec, false, true, false);
posVec = Vec3.createVectorHelper(this.posX, this.posY, this.posZ);
nextPosVec = Vec3.createVectorHelper(this.posX + this.motionX, this.posY + this.motionY, this.posZ + this.motionZ);
if (moveChecker != null) nextPosVec = Vec3.createVectorHelper(moveChecker.hitVec.xCoord, moveChecker.hitVec.yCoord, moveChecker.hitVec.zCoord);
}
Entity collEnt = null;
List nearbyEnts = this.worldObj.getEntitiesWithinAABBExcludingEntity(this, this.boundingBox.addCoord(this.motionX, this.motionY, this.motionZ).expand(1.0D, 1.0D, 1.0D));
double collDistance = 0.0D;
int i;
float checkDist;
for (i = 0; i < nearbyEnts.size(); ++i) {
Entity curEnt = (Entity) nearbyEnts.get(i);
if (curEnt.canBeCollidedWith() && (curEnt != this.shootingEntity || this.ticksInAir >= 5)) if (!(curEnt instanceof EntityPlayer && (((EntityPlayer) curEnt).capabilities.disableDamage || this.shootingEntity instanceof EntityPlayer && !((EntityPlayer) this.shootingEntity).canAttackPlayer((EntityPlayer) curEnt)))) {
checkDist = 0.3F;
AxisAlignedBB checkBB = curEnt.boundingBox.expand(checkDist, checkDist, checkDist);
MovingObjectPosition intersection = checkBB.calculateIntercept(posVec, nextPosVec);
if (intersection != null) {
double intersectDist = posVec.distanceTo(intersection.hitVec);
if (intersectDist < collDistance || collDistance == 0.0D) {
collEnt = curEnt;
collDistance = intersectDist;
}
}
}
}
if (collEnt != null) {
moveChecker = new MovingObjectPosition(collEnt);
this.setDead();
}
float motionXYZ;
float motionXZ;
if (moveChecker != null) if (moveChecker.entityHit != null) { // Hit an entity
motionXYZ = MathHelper.sqrt_double(this.motionX * this.motionX + this.motionY * this.motionY + this.motionZ * this.motionZ);
int damage = MathHelper.ceiling_double_int(motionXYZ * this.getDamage());
if (this.getIsCritical()) damage += this.rand.nextInt(damage / 2 + 2);
DamageSource source = null;
if (this.shootingEntity == null) source = DamageSource.causeArrowDamage(this, this);
else source = DamageSource.causeArrowDamage(this, this.shootingEntity);
if (this.isBurning() && !(moveChecker.entityHit instanceof EntityEnderman)) moveChecker.entityHit.setFire(5);
if (moveChecker.entityHit.attackEntityFrom(source, damage)) { // Damaged entity
if (moveChecker.entityHit instanceof EntityLivingBase) {
EntityLivingBase collEntBase = (EntityLivingBase) moveChecker.entityHit;
if (!this.worldObj.isRemote) collEntBase.setArrowCountInEntity(collEntBase.getArrowCountInEntity() + 1);
// Knockback
if (this.getKnockbackStrength() > 0) {
motionXZ = MathHelper.sqrt_double(this.motionX * this.motionX + this.motionZ * this.motionZ);
if (motionXZ > 0.0F) moveChecker.entityHit.addVelocity(this.motionX * this.knockbackStrength * 0.6000000238418579D / motionXZ, 0.1D, this.motionZ * this.getKnockbackStrength() * 0.6000000238418579D / motionXZ);
}
// Enchantment effects
if (this.shootingEntity != null && this.shootingEntity instanceof EntityLivingBase) {
EnchantmentHelper.func_151384_a(collEntBase, this.shootingEntity);
EnchantmentHelper.func_151385_b((EntityLivingBase) this.shootingEntity, collEntBase);
}
// Packet
if (this.shootingEntity != null && moveChecker.entityHit != this.shootingEntity && moveChecker.entityHit instanceof EntityPlayer && this.shootingEntity instanceof EntityPlayerMP) ((EntityPlayerMP) this.shootingEntity).playerNetServerHandler.sendPacket(new S2BPacketChangeGameState(6, 0.0F));
}
// Sound
this.playSound("random.bowhit", 1.0F, 1.2F / (this.rand.nextFloat() * 0.2F + 0.9F));
if (!(moveChecker.entityHit instanceof EntityEnderman)) this.setDead();
} else { // Didn't do damage
this.motionX *= -0.10000000149011612D;
this.motionY *= -0.10000000149011612D;
this.motionZ *= -0.10000000149011612D;
this.rotationYaw += 180.0F;
this.prevRotationYaw += 180.0F;
this.ticksInAir = 0;
}
} else { // Hit a block
this.xTile = moveChecker.blockX;
this.yTile = moveChecker.blockY;
this.zTile = moveChecker.blockZ;
this.inTile = this.worldObj.getBlock(this.xTile, this.yTile, this.zTile);
this.inData = this.worldObj.getBlockMetadata(this.xTile, this.yTile, this.zTile);
this.motionX = (float) (moveChecker.hitVec.xCoord - this.posX);
this.motionY = (float) (moveChecker.hitVec.yCoord - this.posY);
this.motionZ = (float) (moveChecker.hitVec.zCoord - this.posZ);
motionXYZ = MathHelper.sqrt_double(this.motionX * this.motionX + this.motionY * this.motionY + this.motionZ * this.motionZ);
this.posX -= this.motionX / motionXYZ * 0.05000000074505806D;
this.posY -= this.motionY / motionXYZ * 0.05000000074505806D;
this.posZ -= this.motionZ / motionXYZ * 0.05000000074505806D;
this.playSound("random.bowhit", 1.0F, 1.2F / (this.rand.nextFloat() * 0.2F + 0.9F));
this.inGround = true;
this.arrowShake = 7;
this.setIsCritical(false);
if (this.inTile.getMaterial() != Material.air) this.inTile.onEntityCollidedWithBlock(this.worldObj, this.xTile, this.yTile, this.zTile, this);
}
this.onAirTick();
this.posX += this.motionX;
this.posY += this.motionY;
this.posZ += this.motionZ;
motionXYZ = MathHelper.sqrt_double(this.motionX * this.motionX + this.motionZ * this.motionZ);
this.rotationYaw = (float) (Math.atan2(this.motionX, this.motionZ) * 180.0D / Math.PI);
for (this.rotationPitch = (float) (Math.atan2(this.motionY, motionXYZ) * 180.0D / Math.PI); this.rotationPitch - this.prevRotationPitch < -180.0F; this.prevRotationPitch -= 360.0F) {}
while (this.rotationPitch - this.prevRotationPitch >= 180.0F)
this.prevRotationPitch += 360.0F;
while (this.rotationYaw - this.prevRotationYaw < -180.0F)
this.prevRotationYaw -= 360.0F;
while (this.rotationYaw - this.prevRotationYaw >= 180.0F)
this.prevRotationYaw += 360.0F;
this.rotationPitch = this.prevRotationPitch + (this.rotationPitch - this.prevRotationPitch) * 0.2F;
this.rotationYaw = this.prevRotationYaw + (this.rotationYaw - this.prevRotationYaw) * 0.2F;
float speedModifier = 0.99F;
checkDist = this.getGravity();
if (this.isInWater()) speedModifier = 0.8F;
if (this.isWet()) this.extinguish();
this.motionX *= speedModifier;
this.motionY *= speedModifier;
this.motionZ *= speedModifier;
this.motionY -= checkDist;
this.setPosition(this.posX, this.posY, this.posZ);
this.func_145775_I();
}
}
protected void onAirTick() {
// Crit particles
if (this.getIsCritical()) for (int i = 0; i < 4; ++i)
this.worldObj.spawnParticle("crit", this.posX + this.motionX * i / 4.0D, this.posY + this.motionY * i / 4.0D, this.posZ + this.motionZ * i / 4.0D, -this.motionX, -this.motionY + 0.2D, -this.motionZ);
if (this.isInWater()) for (int i = 0; i < 4; ++i)
this.worldObj.spawnParticle("bubble", this.posX - this.motionX * 0.25D, this.posY - this.motionY * 0.25D, this.posZ - this.motionZ * 0.25D, this.motionX, this.motionY, this.motionZ);
}
@Override
public void writeSpawnData(ByteBuf buffer) {
ByteBufUtils.writeItemStack(buffer, this.arrowStack);
if (this.shootingEntity instanceof EntityPlayer) ByteBufUtils.writeUTF8String(buffer, ((EntityPlayer) this.shootingEntity).getCommandSenderName());
}
@Override
public void readSpawnData(ByteBuf buffer) {
try {
this.arrowStack = ByteBufUtils.readItemStack(buffer);
this.shootingEntity = this.worldObj.getPlayerEntityByName(ByteBufUtils.readUTF8String(buffer));
} catch (Exception ex) {
ex.printStackTrace();
}
}
public float getGravity() {
return 0.05F;
}
}
|
<reponame>pmcg95/g3-q-programmingchallenges
/*
* Copyright (c) 2015-present, Parse, LLC.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.parse.starter;
import android.os.Bundle;
import android.provider.Settings;
import android.support.v7.app.ActionBarActivity;
import android.view.Menu;
import android.view.MenuItem;
import com.parse.*;
import java.util.List;
import android.util.Log;
import android.widget.Button;
import android.view.View;
import android.widget.TextView;
public class MainActivity extends ActionBarActivity {
private String android_id;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
android_id = Settings.Secure.getString(this.getContentResolver(),
Settings.Secure.ANDROID_ID);
ParseAnalytics.trackAppOpenedInBackground(getIntent());
final Button button = (Button) findViewById(R.id.AddButton);
button.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
QQueue.addSelfToQueue("Q", android_id, new SaveCallback() {
@Override
public void done(ParseException e) {
getAndUpdateQueueValue();
}
});
}
});
final Button removeButton = (Button) findViewById(R.id.RemoveButton);
removeButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
QQueue.removeSelfFromQueue("Q", android_id, new DeleteCallback() {
@Override
public void done(ParseException e) {
getAndUpdateQueueValue();
}
});
}
});
final Button getPositionButton = (Button) findViewById(R.id.GetPosition);
getPositionButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
getAndUpdateQueueValue();
}
});
getAndUpdateQueueValue();
}
public void getAndUpdateQueueValue() {
QQueue.getPlaceInQueue("Q", android_id, new GetCallback<ParseObject>() {
public void done(ParseObject object, ParseException e) {
if (object == null) {
Log.d("getPlaceInQueue", "Doesn't exist");
final TextView textView = (TextView) findViewById(R.id.qText);
textView.setText("Not in queue");
} else {
Log.d("getPlaceInQueue", object.getNumber("place").toString());
final TextView textView = (TextView) findViewById(R.id.qText);
textView.setText(object.getInt("place")+"");
}
}
});
QQueue.getInfoAboutMe(android_id, new GetCallback<ParseObject>() {
public void done(ParseObject object, ParseException e) {
if (object == null) {
Log.d("getPlaceInQueue", "Doesn't exist");
} else {
final TextView textView = (TextView) findViewById(R.id.infoText);
textView.setText(object.getString("name") + " - " + object.getString("major") + " - " + object.getString("position"));
}
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
#!/bin/bash
for mod in $(ls *.go | sed 's/.go//')
do
go build -ldflags="-s -w" -buildmode=plugin -o ./"$mod".so ./"$mod".go
done
|
<reponame>mootshell/EasyAsPLY
#!/usr/bin/env python
from subprocess import call
import os
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser('Generate platform specific build projects')
parser.add_argument('-q', '--quite', action='store_true', default=False, help='Run without user interaction')
opt = parser.parse_args()
basePath = os.getcwd()
gypScript = os.path.realpath(basePath + "/ThirdParty/gyp/gyp_main.py")
projFile = "EasyAsPLY.gyp"
outputDir = "Build/Projects"
configPath = "Config"
os.environ['GYP_DEFINES'] = 'ROOT_DIR=\"' + basePath +'\"'
print "Generating %s into %s" % (projFile, outputDir)
callArgs = ["python",
gypScript,
"--no-duplicate-basename-check",
"--config-dir=" + configPath,
#"--debug=general",
"--depth=" + basePath,
"--generator-output=" + outputDir,
projFile
]
#print callArgs
call(callArgs);
if not opt.quite:
raw_input("Press Enter to continue...") |
export const AUTH_USER_LOGGED_IN = '[Auth] user logged in';
|
<reponame>mentix02/bog<filename>bof/src/components/PostList.js
import React from "react";
import PropTypes from "prop-types";
import { Row, Col } from "reactstrap";
import PostListItem from "./PostListItem";
function PostList(props) {
const { posts } = props;
return (
<Row>
<Col sm={12} md={{ size: 6, offset: 3 }}>
{posts.map((post, idx) => (
<PostListItem
key={idx}
slug={post.slug}
title={post.title}
author={post.user}
CreatedAt={new Date(post.CreatedAt).toDateString()}
/>
))}
</Col>
</Row>
);
}
PostList.propTypes = {
posts: PropTypes.arrayOf(
PropTypes.objectOf(
PropTypes.oneOfType([PropTypes.string, PropTypes.number])
)
).isRequired,
};
export default PostList;
|
#!/bin/bash
exit_code=0
echo "*** Running app component engine specs"
#bundle install --jobs=3 --retry=3
bundle exec rake db:create db:migrate
RAILS_ENV=test bundle exec rake db:create db:migrate
bundle exec rspec spec
exit_code+=$?
exit $exit_code
|
module.exports = {
PORT: process.env.PORT || 8000,
NODE_ENV: process.env.NODE_ENV || 'development',
DATABASE_URL: process.env.DATABASE_URL || 'postgres://mlyuurfwtyedfh:92ff09043eabd0824d9d8bb999<EMAIL>f<EMAIL>@ec<EMAIL>:5432/de8fhncugherm3&ssl=on',
TEST_DATABASE_URL: process.env.TEST_DATABASE_URL || 'postgresql://postgres@localhost/noteful-test',
API_TOKEN: process.env.API_TOKEN || '<PASSWORD>'
}; |
# for ThingsBoard Cloud
# Publish client-side attributes update
cat new-attributes-values.json | coap post coap://coap.thingsboard.cloud/api/v1/$ACCESS_TOKEN/attributes
# for local ThingsBoard
# Publish client-side attributes update
cat new-attributes-values.json | coap post coap://localhost/api/v1/$ACCESS_TOKEN/attributes
|
<filename>oily/divisibility_streaks.py
'''
https://projecteuler.net/problem=601
'''
import functools
import logging
import math
import pathlib
import textwrap
logger = logging.getLogger(__spec__.name)
def main(argv):
setup_logging(argv)
description = textwrap.dedent(solve.__doc__)
print(description)
logger.info(description)
solution = solve()
solution_description = f'Solution: {solution}'
print(solution_description)
logger.info(solution_description)
def setup_logging(argv):
log_level = None
if '--info' in argv:
log_level = logging.INFO
elif '--debug' in argv:
log_level = logging.DEBUG
if log_level is not None:
logging.basicConfig(level=log_level, format='%(levelname)s %(asctime)s %(message)s')
log_path = pathlib.Path(__file__).parent.parent / 'logs' / f'{__spec__.name}.log'
logger.addHandler(logging.FileHandler(log_path))
def solve() -> int:
'''
Sum of the number of streaks of size `i` in range `4^i` for `1 <= i <= 31`.
'''
return sum(
(
number_of_streaks_in_range(streak_size=i, range_end=(4 ** i))
for i in range(1, 32)
)
)
def number_of_streaks_in_range(streak_size: int, range_end: int) -> int:
'''
`number_of_streaks_in_range(streak_size, range_end) = P(s, N)`
=> The number of integers `n`, where `1 <= n <= N`, for which `streak(n) = s`.
'''
logger.info('number_of_streaks_in_range(streak_size=%i, range_end=%i)', streak_size, range_end)
result = 0
first_ending = None
# All streak ends are at jumps of LCM.
lcm = least_common_multiple(range(1, streak_size + 1))
logger.debug('streak_size=%i, lcm=%i', streak_size, lcm)
for ending_at in range(streak_size, range_end + 1, lcm):
if is_streak(ending_at=ending_at, streak_size=streak_size):
first_ending = ending_at
break
else:
# Some streak sizes are impossible, such as 5.
logger.debug('streak_size=%i => %i', streak_size, result)
return result
logger.debug('streak_size=%i, first_ending=%i', streak_size, first_ending)
for ending_at in range(first_ending, range_end + 1, lcm):
if is_streak(ending_at=ending_at, streak_size=streak_size):
result += 1
logger.info('streak_size=%i => %i', streak_size, result)
return result
def is_streak(ending_at: int, streak_size: int) -> bool:
# Streak does not end here if the next number continues the streak.
if is_divisible(ending_at + 1, streak_size + 1):
return False
# Step backwards because higher streaks are less common, which will let
# us return earlier for high numbers:
for back_step in range(0, streak_size):
earlier_streak_number = ending_at - back_step
earlier_streak_size = streak_size - back_step
if not is_divisible(earlier_streak_number, earlier_streak_size):
return False
return True
def is_divisible(dividend: int, divisor: int) -> bool:
return (dividend % divisor) == 0
def least_common_multiple(nums: 'List[int]') -> int:
def lcm_of_pair(a, b):
return (a * b) // math.gcd(a, b)
return functools.reduce(lcm_of_pair, nums)
if __name__ == '__main__':
import sys
main(sys.argv)
|
<reponame>esxjs/babel-plugin-esx-browser<gh_stars>1-10
import React from 'react';
const data = {
value: 'hi'
};
const Component = ({
value,
prop,
title
}) => React.createElement('div', null, [React.createElement('p', null, [" some content: ", title]), React.createElement('p', null, [" some ", value]), React.createElement('p', null, [" some ", prop, " prop "])]);
const App = ({
title
}) => {
return React.createElement(Component, {
prop: "static",
...data,
title: title
});
};
export default App;
|
# https://stackoverflow.com/questions/42564058/how-to-use-local-docker-images-with-minikube
# 1. sh initdb-docker.sh
# 2. kubectl proxy
yarn serve
|
public bool isPrimeNumber(int number) {
if (number <= 1) return false;
if (number == 2) return true;
for (int i = 2; i < number; i++) {
if (number % i == 0) return false;
}
return true;
} |
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2020 Xilinx, Inc. All Rights Reserved.
#
echo "This script was generated under a different operating system."
echo "Please update the PATH and LD_LIBRARY_PATH variables below, before executing this script"
exit
if [ -z "$PATH" ]; then
PATH=C:/APPZ/Xilinx/Vivado/2020.2/ids_lite/ISE/bin/nt64;C:/APPZ/Xilinx/Vivado/2020.2/ids_lite/ISE/lib/nt64:C:/APPZ/Xilinx/Vivado/2020.2/bin
else
PATH=C:/APPZ/Xilinx/Vivado/2020.2/ids_lite/ISE/bin/nt64;C:/APPZ/Xilinx/Vivado/2020.2/ids_lite/ISE/lib/nt64:C:/APPZ/Xilinx/Vivado/2020.2/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='D:/Documents/xcerny76/digital-electronics-1/labs/04-segment/display/display.runs/synth_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
EAStep vivado -log top.vds -m64 -product Vivado -mode batch -messageDb vivado.pb -notrace -source top.tcl
|
gcloud container clusters create apigee-docker-planet --machine-type n1-standard-2 --num-nodes 6 || echo "create cluster failed"
gcloud container clusters get-credentials apigee-docker-planet || exit 1
echo "deleting kube-apigee-install-scripts.zip if it was downloaded earlier"
rm kube-apigee-install-scripts.zip
wget https://raw.githubusercontent.com/rajanishgj/utilities/master/apigee-kube/kube-apigee-install-scripts.zip || exit 1
unzip -o kube-apigee-install-scripts.zip || exit 1
./devutils/kube-clean-setup.sh || exit 1
|
#!/bin/bash
# "postcompile": "cd ./app-packages/roothub-codegen && pwd && yarn build",
# "prebuild": "yarn",
# 本机 An unexpected error occurred: "Cannot create property '-npm-taobao-org-mirrors' on string '{\"-npm-taobao-org-mirrors\":true}'".
# 改成shell执行绕过
cd ./app-packages/roothub-codegen
pwd
yarn
yarn build
echo '构建完成!'
cd ../../
ls -l templates/codegen |
#!/bin/bash
# Script to check cdk templates against updated
# cdk. So deploy and destroy each template
# Usage:
# Call from base dir
npm install cdk -g
mkdir -p log
echo off
echo "Testing all without bootstrap"
for i in alb-update ec2-autostop OpsCenterRole r53 transitgateway vpc-bastion codepipeline iam-user selfEditing
do
echo Testing $i
date
cd $i
task edge-of-tomorrow >../log/$i.log 2>../log/$i.err
if [ $? -eq 0 ]
then
echo ✅ $i OK
else
echo ❌ $i NOK
fi
cd ..
echo "##############################"
done
echo "Testing all with bootstrap"
for i in aws-custom-resource-ses ec2-instanceconnect lambda-apigw lambda-schedule lambda-simple lambda-go
do
echo Testing $i
date
cd $i
task edge-of-tomorrow >../log/$i.log 2>../log/$i.err
if [ $? -eq 0 ]
then
echo ✅ $i OK
else
echo ❌ $i NOK
fi
cd ..
echo "##############################"
done
|
package com.boot.feign.article.fallback.impl;
import com.boot.feign.article.fallback.ArticleFallbackFeign;
import com.boot.pojo.Article;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Map;
@Component
@Slf4j
public class ArticleFallbackFeignImpl implements ArticleFallbackFeign {
@Override
public Map<String, Object> selectAllArticleByPage(int pageNum, int pageSize) {
return null;
}
@Override
public List<Article> selectAllArticleOrderByDesc() {
return null;
}
@Override
public List<Article> selectArticleByRecommendPage(int pageNum, int pageSize) {
return null;
}
@Override
public Article selectArticleByArticleIdNoComment(long id) {
return null;
}
@Override
public List<Article> selectArticleByRecommend() {
return null;
}
@Override
public int selectArticleCount() {
return 0;
}
@Override
public List<Article> queryArticleByCategoryName(String categoryName) {
return null;
}
@Override
public Map<String, Object> selectAllArticleByCreated(int pageNum, int pageSize) {
return null;
}
@Override
public int queryArticleByTitleCount(String title) {
return 0;
}
@Override
public List<Article> queryArticleByTitle(int pageNum, int pageSize, String title) {
return null;
}
@Override
public List<Article> selectArticleStatistic() {
return null;
}
}
|
<filename>shareloc/geofunctions/localization.py
#!/usr/bin/env python
# coding: utf8
#
# Copyright (c) 2022 Centre National d'Etudes Spatiales (CNES).
#
# This file is part of Shareloc
# (see https://github.com/CNES/shareloc).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Localization class for localization functions.
"""
# Standard imports
import logging
import numbers
# Third party imports
import numpy as np
# Shareloc imports
from shareloc.proj_utils import coordinates_conversion
class Localization:
"""base class for localization function.
Underlying model can be both multi layer localization grids or RPCs models
"""
def __init__(self, model, elevation=None, image=None, epsg=None):
"""
constructor
:param model : geometric model
:type model : shareloc.grid or shareloc.rpc
:param elevation : dtm or default elevation over ellipsoid if None elevation is set to 0
:type elevation : shareloc.dtm or float or np.ndarray
:param image : image class to handle geotransform
:type image : shareloc.image.Image
:param epsg : coordinate system of world points, if None model coordiante system will be used
:type epsg : int
"""
self.use_rpc = model.type == "rpc"
self.model = model
self.default_elevation = 0.0
self.dtm = None
if isinstance(elevation, (numbers.Number, list, np.ndarray)):
self.default_elevation = elevation
else:
self.dtm = elevation
self.image = image
self.epsg = epsg
def direct(self, row, col, h=None, using_geotransform=False):
"""
direct localization
:param row : sensor row
:type row : float or 1D np.ndarray
:param col : sensor col
:type col : float or 1D np.ndarray
:param h: altitude, if none DTM is used
:type h : float or 1D np.ndarray
:param using_geotransform: using_geotransform
:type using_geotransform : boolean
:return coordinates : [lon,lat,h] (2D np.array)
:rtype np.ndarray of 2D dimension
"""
if using_geotransform and self.image is not None:
row, col = self.image.transform_index_to_physical_point(row, col)
if h is not None:
coords = self.model.direct_loc_h(row, col, h)
epsg = self.model.epsg
elif self.dtm is not None:
coords = self.model.direct_loc_dtm(row, col, self.dtm)
epsg = self.dtm.epsg
else:
coords = self.model.direct_loc_h(row, col, self.default_elevation)
epsg = self.model.epsg
if self.epsg is not None and self.epsg != epsg:
return coordinates_conversion(coords, epsg, self.epsg)
return coords
def extent(self, margin=0.0):
"""
returns model extent:
* whole validity domains if image is not given
* image footprint if image is set
* epipolar footprint if right_model is set
:param margin: footprint margin (in degrees)
:type margin : float
:return extent : [lon_min,lat_min,lon max,lat max] (2D np.array)
:rtype numpy.array
"""
footprint = np.zeros([2, 2])
if self.image is not None:
logging.debug("image extent")
footprint[0, :] = [-0.5, -0.5]
footprint[1, :] = [-0.5 + self.image.nb_rows, -0.5 + self.image.nb_columns]
using_geotransform = True
else:
logging.debug("model extent")
footprint[0, :] = [self.model.row0, self.model.col0]
footprint[1, :] = [self.model.rowmax, self.model.colmax]
using_geotransform = False
on_ground_pos = self.direct(footprint[:, 0], footprint[:, 1], 0, using_geotransform=using_geotransform)
[lon_min, lat_min, __] = np.min(on_ground_pos, 0)
[lon_max, lat_max, __] = np.max(on_ground_pos, 0)
return np.array([lat_min - margin, lon_min - margin, lat_max + margin, lon_max + margin])
def inverse(self, lon, lat, h=None, using_geotransform=False):
"""
inverse localization
:param lat : latitude (or y)
:param lon : longitude (or x)
:param h : altitude
:param using_geotransform: using_geotransform
:type using_geotransform : boolean
:return coordinates : [row,col,h] (1D np.ndarray)
:rtype Tuple(1D np.ndarray row position, 1D np.ndarray col position, 1D np.ndarray alt)
"""
if not self.use_rpc and not hasattr(self.model, "pred_ofset_scale_lon"):
self.model.estimate_inverse_loc_predictor()
if h is None:
h = self.default_elevation
if self.epsg is not None and self.model.epsg != self.epsg:
if isinstance(lon, np.ndarray) and isinstance(lat, np.ndarray):
coords = np.full([lon.shape[0], 3], fill_value=0.0)
else:
coords = np.full([1, 3], fill_value=0.0)
coords[:, 0] = lon
coords[:, 1] = lat
coords[:, 2] = h
converted_coords = coordinates_conversion(coords, self.epsg, self.model.epsg)
lon = converted_coords[:, 0]
lat = converted_coords[:, 1]
h = converted_coords[:, 2]
row, col, __ = self.model.inverse_loc(lon, lat, h)
if using_geotransform and self.image is not None:
row, col = self.image.transform_physical_point_to_index(row, col)
return row, col, h
def coloc(model1, model2, row, col, elevation=None, image1=None, image2=None, using_geotransform=False):
"""
Colocalization : direct localization with model1, then inverse localization with model2
:param model1: geometric model 1
:type model1: shareloc.grid or shareloc.rpc
:param model2: geometric model 2
:type model2: shareloc.grid or shareloc.rpc
:param row: sensor row
:type row: int or 1D numpy array
:param col: sensor col
:type col: int or 1D numpy array
:param elevation: elevation
:type elevation: shareloc.dtm or float or 1D numpy array
:param image1 : image class to handle geotransform
:type image1 : shareloc.image.Image
:param image2 : image class to handle geotransform
:type image2 : shareloc.image.Image
:param using_geotransform: using_geotransform
:type using_geotransform : boolean
:return: Corresponding sensor position [row, col, True] in the geometric model 2
:rtype : Tuple(1D np.array row position, 1D np.array col position, 1D np.array alt)
"""
geometric_model1 = Localization(model1, elevation, image=image1)
geometric_model2 = Localization(model2, elevation, image=image2)
if not isinstance(row, (list, np.ndarray)):
row = np.array([row])
col = np.array([col])
ground_coord = geometric_model1.direct(row, col, using_geotransform=using_geotransform)
# Estimate sensor position (row, col, altitude) using inverse localization with model2
sensor_coord = np.zeros((row.shape[0], 3), dtype=np.float64)
sensor_coord[:, 0], sensor_coord[:, 1], sensor_coord[:, 2] = geometric_model2.inverse(
ground_coord[:, 0], ground_coord[:, 1], ground_coord[:, 2], using_geotransform
)
return sensor_coord[:, 0], sensor_coord[:, 1], sensor_coord[:, 2]
|
SELECT AVG(views)
FROM Articles
ORDER BY createdAt DESC
LIMIT 10; |
tabtitle() {
echo -en "\033]0;${*}\a"
}
|
"""
Write a code to convert a calendar date in the form of a string 'dd/mm/yyyy' or the format 'dd-mm-yyyy' to a ISO 8601 date
"""
def convert_date(date_string):
# Split the date into parts
parts = date_string.split('/')
if parts == 2:
parts = date_string.split('-')
# Get the day, month and year from the parts
day = int(parts[0])
month = int(parts[1])
year = int(parts[2])
# Create the ISO 8601 date
iso8601_date = '{0}-{1:02d}-{2:02d}'.format(year, month, day)
return iso8601_date
if __name__ == '__main__':
date_string = '10/12/2020'
iso8601_date = convert_date(date_string)
print(iso8601_date) |
<filename>src/components/AllDetails/index.js<gh_stars>1-10
import React from 'react';
import {
Container,
Table,
Row,
CellTh,
CellTd,
} from './style';
import useSwr from 'swr';
const fetcher = async (...args) =>
await fetch(...args).then((response) =>
response.json().then((response) => response.data)
);
export const AllDetails = () => {
const URL = 'https://www.trackcorona.live/api/provinces';
const { data, error } = useSwr(URL, { fetcher });
const usaData =
data && !error
? data.filter((i) => i.country_code === 'us')
: [];
console.log(usaData);
const rows = () =>
usaData.map((i, key) => (
<Row key={i.latitude}>
<CellTd orientation='center'>{i.country_code.toUpperCase()}</CellTd>
<CellTd orientation='left'>{i.location}</CellTd>
<CellTd orientation='right'>
{new Intl.NumberFormat('de-DE').format(i.confirmed)}
</CellTd>
<CellTd orientation='right'>
{new Intl.NumberFormat('de-DE').format(i.confirmed)}
</CellTd>
<CellTd orientation='right'>{new Intl.NumberFormat('de-DE').format(i.dead)}</CellTd>
<CellTd >
{i.recovered
? new Intl.NumberFormat('de-DE').format(i.recovered)
: 'Sin dato'}
</CellTd>
<CellTd orientation='right'>{`${(
(Date.parse(new Date()) - Date.parse(i.updated)) /
6000000
).toFixed(0)} min`}</CellTd>
<CellTd orientation='right'>{key + 1}</CellTd>
</Row>
));
return (
<Container>
<h1>All Data</h1>
<Table>
<thead>
<Row>
<CellTh orientation='center'>Country</CellTh>
<CellTh >State</CellTh>
<CellTh orientation='left'>Confirmed</CellTh>
<CellTh orientation='left'>Actives</CellTh>
<CellTh orientation='left'>Dead</CellTh>
<CellTh orientation='left'>Recovered</CellTh>
<CellTh orientation='left'>Update</CellTh>
<CellTh orientation='center'>Key</CellTh>
</Row>
</thead>
<tbody>{rows()}</tbody>
</Table>
</Container>
);
};
|
pkg_name=gocd-server
pkg_origin=core
pkg_version="18.12.0"
pkg_buildnumber="8222"
pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>"
pkg_license=("Apache-2.0")
pkg_source="https://download.gocd.org/binaries/${pkg_version}-${pkg_buildnumber}/generic/go-server-${pkg_version}-${pkg_buildnumber}.zip"
pkg_shasum="5ca5b1f504ef2e47ede02df8ed0145e9cccff18106f8356bb3e6a793df24a707"
pkg_description="GoCD is an open source tool which is used in software development to help teams and organizations automate the continuous delivery (CD) of software."
pkg_upstream_url="https://www.gocd.org"
pkg_filename="go-server-${pkg_version}-${pkg_buildnumber}.zip"
pkg_dirname="go-server-${pkg_version}"
pkg_deps=(
core/git
core/corretto8
)
pkg_bin_dirs=(bin)
pkg_exports=(
[port]=port
[ssl-port]=ssl-port
)
pkg_exposes=(port ssl-port)
do_build() {
return 0
}
do_install() {
mkdir -p "${pkg_prefix}/bin"
cp ./* "${pkg_prefix}/bin"
mv "${pkg_prefix}/bin/LICENSE" "${pkg_prefix}/"
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.location.basic;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import org.testng.annotations.Test;
import brooklyn.util.config.ConfigBag;
public class LocationConfigUtilsTest {
public static final String SSH_PRIVATE_KEY_FILE = System.getProperty("sshPrivateKey", "~/.ssh/id_rsa");
public static final String SSH_PUBLIC_KEY_FILE = System.getProperty("sshPrivateKey", "~/.ssh/id_rsa.pub");
@Test(groups="Integration")
public void testPreferPrivateKeyDataOverFile() throws Exception {
ConfigBag config = ConfigBag.newInstance();
config.put(LocationConfigKeys.PRIVATE_KEY_DATA, "mydata");
config.put(LocationConfigKeys.PRIVATE_KEY_FILE, SSH_PRIVATE_KEY_FILE);
String data = LocationConfigUtils.getPrivateKeyData(config);
assertEquals(data, "mydata");
}
@Test(groups="Integration")
public void testPreferPubilcKeyDataOverFile() throws Exception {
ConfigBag config = ConfigBag.newInstance();
config.put(LocationConfigKeys.PUBLIC_KEY_DATA, "mydata");
config.put(LocationConfigKeys.PUBLIC_KEY_FILE, SSH_PUBLIC_KEY_FILE);
String data = LocationConfigUtils.getPublicKeyData(config);
assertEquals(data, "mydata");
}
@Test(groups="Integration")
public void testReadsPrivateKeyFileWithTildaPath() throws Exception {
ConfigBag config = ConfigBag.newInstance();
config.put(LocationConfigKeys.PRIVATE_KEY_FILE, SSH_PRIVATE_KEY_FILE);
String data = LocationConfigUtils.getPrivateKeyData(config);
assertTrue(data != null && data.length() > 0);
}
@Test(groups="Integration")
public void testReadsPrivateKeyFileWithMultipleColonSeparatedFilesWithGoodLast() throws Exception {
ConfigBag config = ConfigBag.newInstance();
config.put(LocationConfigKeys.PRIVATE_KEY_FILE, "/path/does/not/exist:"+SSH_PRIVATE_KEY_FILE);
String data = LocationConfigUtils.getPrivateKeyData(config);
assertTrue(data != null && data.length() > 0);
}
@Test(groups="Integration")
public void testReadsPrivateKeyFileWithMultipleColonSeparatedFilesWithGoodFirst() throws Exception {
ConfigBag config = ConfigBag.newInstance();
config.put(LocationConfigKeys.PRIVATE_KEY_FILE, SSH_PRIVATE_KEY_FILE+":/path/does/not/exist");
String data = LocationConfigUtils.getPrivateKeyData(config);
assertTrue(data != null && data.length() > 0);
}
@Test(groups="Integration")
public void testReadsPublicKeyFileWithTildaPath() throws Exception {
ConfigBag config = ConfigBag.newInstance();
config.put(LocationConfigKeys.PUBLIC_KEY_FILE, SSH_PUBLIC_KEY_FILE);
String data = LocationConfigUtils.getPublicKeyData(config);
assertTrue(data != null && data.length() > 0);
}
@Test(groups="Integration")
public void testInfersPublicKeyFileFromPrivateKeyFile() throws Exception {
ConfigBag config = ConfigBag.newInstance();
config.put(LocationConfigKeys.PRIVATE_KEY_FILE, SSH_PRIVATE_KEY_FILE);
String data = LocationConfigUtils.getPublicKeyData(config);
assertTrue(data != null && data.length() > 0);
}
}
|
<filename>examples/app_info.js
/*
* express-mfs
*
* Copyright(c) 2020 <NAME>
* MIT Licensed
*/
"use strict";
// ==========================================================================================
// service info middleware
//
// mfs.info:
// The middleware provides a "service info" endpoint for your APIs.
//
// The middleware returns information about the platform: node version, cpus, arch,
// hostname, etc.
//
// Application level details are also returned: name, description, version,
// dependencies, upTime, pid, startDate, etc.
//
// The basic application details are read from the application's package.json file.
//
// Below is an example response:
// {
// "name": "my-microservice",
// "description": "example service",
// "version": "1.0.0",
// "dependencies": {
// "express": "^4.13.4",
// "debug": "^4.1.1",
// "express-mfs": "^1.0.0",
// },
// "nodeVersion": "v12.16.3",
// "hostname": "XXXXXXXX",
// "platform": "win32",
// "arch": "x64",
// "cpus": 12,
// "startDate": "Tue, 30 Jun 2020 16:39:14 GMT",
// "upTime": "0d:0h:0m:4s",
// "pid": 153392
// }
//
// ==========================================================================================
const express = require("express");
const mfs = require("../lib");
const API_PORT = 3000;
const app = express();
// JSON-based apis
app.use(mfs.json.only);
app.get("/serviceInfo", mfs.info);
// handle unknown routes
app.use(mfs.unknown);
// handle all errors
app.use(mfs.error);
app.listen(API_PORT);
console.log(`API Service Listening On Port: ${API_PORT}`);
|
#!/usr/bin/env bash
# The root of the build/dist directory
IAM_ROOT=$(dirname "${BASH_SOURCE[0]}")/../..
[[ -z ${COMMON_SOURCED} ]] && source ${IAM_ROOT}/scripts/install/common.sh
# Print info after install.
function iam::redis::info() {
cat << EOF
Redis Login: redis-cli --no-auth-warning -h ${REDIS_HOST} -p ${REDIS_PORT} -a '${REDIS_PASSWORD}'
EOF
}
# Install
function iam::redis::install() {
# 1. Install redis
iam::common::sudo "yum install -y redis"
# 2. Config
# 2.1 Set daemon
iam::common::sudo "sed -i '/^daemonize/{s/no/yes/}' /etc/redis.conf"
# 2.2 Delete # before `bind 127.0.0.1`
echo ${LINUX_PASSWORD} | sudo -S sed -i '/^# bind 127.0.0.1/{s/# //}' /etc/redis.conf
# 2.3 Set password
echo ${LINUX_PASSWORD} | sudo -S sed -i 's/^# requirepass.*$/requirepass '"${REDIS_PASSWORD}"'/' /etc/redis.conf
# 2.4 Turn down protected-mode
echo ${LINUX_PASSWORD} | sudo -S sed -i '/^protected-mode/{s/yes/no/}' /etc/redis.conf
# Disable firewall
iam::common::sudo "systemctl stop firewalld.service"
iam::common::sudo "systemctl disable firewalld.service"
# 4. Start Redis
iam::common::sudo "redis-server /etc/redis.conf"
iam::redis::status || return 1
iam::redis::info
iam::log::info "install Redis successfully"
}
# Uninstall
function iam::redis::uninstall() {
set +o errexit
iam::common::sudo "killall redis-server"
iam::common::sudo "yum -y remove redis"
iam::common::sudo "rm -rf /var/lib/redis"
set -o errexit
iam::log::info "uninstall Redis successfully"
}
# Check redis status
function iam::redis::status() {
if [[ -z "`pgrep redis-server`" ]]; then
iam::log::error "Redis not running, maybe not installed properly"
return 1
fi
redis-cli --no-auth-warning -h ${REDIS_HOST} -p ${REDIS_PORT} -a "${REDIS_PASSWORD}" --hotkeys || {
iam::log::error "can not login with ${REDIS_USERNAME}, redis maybe not initialized properly"
return 1
}
}
if [[ "$*" =~ iam::redis:: ]];then
eval $*
fi |
<reponame>lian01chen/chcj_bk
var CheckObject = function () {
this.checkName = function () {
}
this.checkEmail = function () {
}
this.checkPassword = function () {
}
}
var CheckObject = function () {}
CheckObject.prototype = {
checkName: function () {
console.log('checkName')
return this
},
checkEmail: function () {
console.log('checkEmail')
return this
},
checkPassword: function () {
console.log('checkPassword')
return this
}
}
let a = new CheckObject()
a.__proto__ === CheckObject.prototype // true
Function.prototype.addMethod = function(name,fn){
this[name] = fn
return this
} |
def get_prime_list(upper_limit):
prime_list = []
for num in range(2, upper_limit+1):
if all(num % i != 0 for i in range(2, num)):
prime_list.append(num)
return prime_list
upper_limit = 20
prime_list = get_prime_list(upper_limit)
print(f'The prime numbers in the range [2, {upper_limit}] are {prime_list}.') |
from typing import List, Tuple
def calculate_average_waiting_time(processes: List[Tuple[int, int]]) -> float:
n = len(processes)
total_waiting_time = 0
current_time = 0
for i in range(n):
arrival_time, burst_time = processes[i]
if arrival_time > current_time:
current_time = arrival_time
waiting_time = current_time - arrival_time
total_waiting_time += waiting_time
current_time += burst_time
return total_waiting_time / n |
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @flow
*/
'use strict';
var React = require('react');
var ReactDOM = require('react-dom');
var Draggable = require('./Draggable');
var assign = require('object-assign');
type Props = {
style?: {[key: string]: any},
left: () => React$Element,
right: () => React$Element,
initialWidth: number,
};
type DefaultProps = {};
type State = {
moving: boolean,
width: number,
};
class SplitPane extends React.Component {
props: Props;
defaultProps: DefaultProps;
state: State;
constructor(props: Props) {
super(props);
this.state = {
moving: false,
width: props.initialWidth,
};
}
onMove(x: number) {
var node = ReactDOM.findDOMNode(this);
this.setState({
width: (node.offsetLeft + node.offsetWidth) - x,
});
}
render() {
var rightStyle = assign({}, styles.rightPane, {
width: this.state.width,
});
return (
<div style={styles.container}>
<div style={styles.leftPane}>
{this.props.left()}
</div>
<Draggable
style={styles.dragger}
onStart={() => this.setState({moving: true})}
onMove={x => this.onMove(x)}
onStop={() => this.setState({moving: false})}>
<div style={styles.draggerInner} />
</Draggable>
<div style={rightStyle}>
{this.props.right()}
</div>
</div>
);
}
}
var styles = {
container: {
display: 'flex',
minWidth: 0,
flex: 1,
},
dragger: {
padding: '0 3px',
cursor: 'ew-resize',
position: 'relative',
zIndex: 1,
},
draggerInner: {
backgroundColor: '#ccc',
height: '100%',
width: 1,
},
rightPane: {
display: 'flex',
marginLeft: -3,
},
leftPane: {
display: 'flex',
marginRight: -3,
minWidth: 0,
flex: 1,
},
};
module.exports = SplitPane;
|
import React from 'react';
import Header from './sections/Header';
import Footer from './sections/Footer';
import AboutSection from './sections/AboutSection';
import ContactSection from './sections/ContactSection';
import ReviewSection from './sections/ReviewSection';
import BenefitSection from './sections/BenefitSection'
function About() {
return (
<>
<div class="container">
<Header />
<AboutSection>
<p>Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam.</p>
<p>Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam.</p>
</AboutSection>
</div>
<BenefitSection />
<ReviewSection />
<ContactSection />
<Footer id="sub-footer" showContact={false} />
</>
);
}
export default About;
|
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kubefed
import (
"fmt"
"io"
"net/url"
"k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime/schema"
federationapi "k8s.io/kubernetes/federation/apis/federation"
"k8s.io/kubernetes/federation/pkg/kubefed/util"
"k8s.io/kubernetes/pkg/kubectl/cmd/templates"
cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util"
"k8s.io/kubernetes/pkg/kubectl/resource"
"github.com/spf13/cobra"
)
var (
unjoin_long = templates.LongDesc(`
Unjoin removes a cluster from a federation.
Current context is assumed to be a federation endpoint.
Please use the --context flag otherwise.`)
unjoin_example = templates.Examples(`
# Unjoin removes the specified cluster from a federation.
# Federation control plane's host cluster context name
# must be specified via the --host-cluster-context flag
# to properly cleanup the credentials.
kubectl unjoin foo --host-cluster-context=bar`)
)
// NewCmdUnjoin defines the `unjoin` command that removes a cluster
// from a federation.
func NewCmdUnjoin(f cmdutil.Factory, cmdOut, cmdErr io.Writer, config util.AdminConfig) *cobra.Command {
cmd := &cobra.Command{
Use: "unjoin CLUSTER_NAME --host-cluster-context=HOST_CONTEXT",
Short: "Unjoins a cluster from a federation",
Long: unjoin_long,
Example: unjoin_example,
Run: func(cmd *cobra.Command, args []string) {
err := unjoinFederation(f, cmdOut, cmdErr, config, cmd, args)
cmdutil.CheckErr(err)
},
}
util.AddSubcommandFlags(cmd)
return cmd
}
// unjoinFederation is the implementation of the `unjoin` command.
func unjoinFederation(f cmdutil.Factory, cmdOut, cmdErr io.Writer, config util.AdminConfig, cmd *cobra.Command, args []string) error {
unjoinFlags, err := util.GetSubcommandFlags(cmd, args)
if err != nil {
return err
}
cluster, err := popCluster(f, unjoinFlags.Name)
if err != nil {
return err
}
if cluster == nil {
fmt.Fprintf(cmdErr, "WARNING: cluster %q not found in federation, so its credentials' secret couldn't be deleted", unjoinFlags.Name)
return nil
}
// We want a separate client factory to communicate with the
// federation host cluster. See join_federation.go for details.
hostFactory := config.HostFactory(unjoinFlags.Host, unjoinFlags.Kubeconfig)
err = deleteSecret(hostFactory, cluster.Spec.SecretRef.Name, unjoinFlags.FederationSystemNamespace)
if isNotFound(err) {
fmt.Fprintf(cmdErr, "WARNING: secret %q not found in the host cluster, so it couldn't be deleted", cluster.Spec.SecretRef.Name)
} else if err != nil {
return err
}
_, err = fmt.Fprintf(cmdOut, "Successfully removed cluster %q from federation\n", unjoinFlags.Name)
return err
}
// popCluster fetches the cluster object with the given name, deletes
// it and returns the deleted cluster object.
func popCluster(f cmdutil.Factory, name string) (*federationapi.Cluster, error) {
// Boilerplate to create the secret in the host cluster.
mapper, typer := f.Object()
gvks, _, err := typer.ObjectKinds(&federationapi.Cluster{})
if err != nil {
return nil, err
}
gvk := gvks[0]
mapping, err := mapper.RESTMapping(schema.GroupKind{Group: gvk.Group, Kind: gvk.Kind}, gvk.Version)
if err != nil {
return nil, err
}
client, err := f.ClientForMapping(mapping)
if err != nil {
return nil, err
}
rh := resource.NewHelper(client, mapping)
obj, err := rh.Get("", name, false)
if isNotFound(err) {
// Cluster isn't registered, there isn't anything to be done here.
return nil, nil
} else if err != nil {
return nil, err
}
cluster, ok := obj.(*federationapi.Cluster)
if !ok {
return nil, fmt.Errorf("unexpected object type: expected \"federation/v1beta1.Cluster\", got %T: obj: %#v", obj, obj)
}
// Remove the cluster resource in the federation API server by
// calling rh.Delete()
return cluster, rh.Delete("", name)
}
// deleteSecret deletes the secret with the given name from the host
// cluster.
func deleteSecret(hostFactory cmdutil.Factory, name, namespace string) error {
clientset, err := hostFactory.ClientSet()
if err != nil {
return err
}
return clientset.Core().Secrets(namespace).Delete(name, &metav1.DeleteOptions{})
}
// isNotFound checks if the given error is a NotFound status error.
func isNotFound(err error) bool {
statusErr := err
if urlErr, ok := err.(*url.Error); ok {
statusErr = urlErr.Err
}
return errors.IsNotFound(statusErr)
}
|
#! /bin/sh
username="$USER"
if [ "$username" == "aubjro" ]
then
module load gcc/6.1.0
fi
if [ -n "$PBS_JOBNAME" ]
then
source ${PBS_O_HOME}/.bash_profile
cd $PBS_O_WORKDIR
module load gcc/5.3.0
fi
simname="03pairs-dpp-root-0002-100k"
cfgpath="../configs/config-${simname}.yml"
outputdir="../simulations/validation/${simname}/batch003"
rngseed=870151476
nreps=100
mkdir -p "$outputdir"
simcoevolity --seed="$rngseed" -n "$nreps" -o "$outputdir" "$cfgpath"
|
#!/bin/bash
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
set -e
# OVERVIEW
# This script installs the sagemaker_run_notebook extension package in SageMaker Notebook Instance
#
# There are two parameters you need to set:
# 1. S3_LOCATION is the place in S3 where you put the extension tarball
# 2. TARBALL is the name of the tar file that you uploaded to S3. You should just need to check
# that you have the version right.
sudo -u ec2-user -i <<'EOF'
# PARAMETERS
VERSION=0.18.0
EXTENSION_NAME=sagemaker_run_notebook
# Set up the user setting and workspace directories
mkdir -p /home/ec2-user/SageMaker/.jupyter-user/{workspaces,user-settings}
# Run in the conda environment that the Jupyter server uses so that our changes are picked up
source /home/ec2-user/anaconda3/bin/activate JupyterSystemEnv
# Install the extension and rebuild JupyterLab so it picks up the new UI
pip install https://github.com/aws-samples/sagemaker-run-notebook/releases/download/v${VERSION}/sagemaker_run_notebook-${VERSION}.tar.gz
jupyter lab build
source /home/ec2-user/anaconda3/bin/deactivate
EOF
# Tell Jupyter to use the user-settings and workspaces directory on the EBS
# volume.
echo "export JUPYTERLAB_SETTINGS_DIR=/home/ec2-user/SageMaker/.jupyter-user/user-settings" >> /etc/profile.d/jupyter-env.sh
echo "export JUPYTERLAB_WORKSPACES_DIR=/home/ec2-user/SageMaker/.jupyter-user/workspaces" >> /etc/profile.d/jupyter-env.sh
# The Jupyter server needs to be restarted to pick up the server part of the
# extension. This needs to be done as root.
initctl restart jupyter-server --no-wait
|
"""Effect decorator for Noisemaker Composer Presets"""
EFFECTS = {}
def effect(func):
"""Function decorator for declaring composable effects."""
EFFECTS[func.__name__] = func
return func
def compose_effects(signal, *effect_names):
"""Compose and apply effects to the input signal."""
composed_signal = signal
for effect_name in effect_names:
if effect_name in EFFECTS:
composed_signal = EFFECTS[effect_name](composed_signal)
else:
raise ValueError(f"Effect '{effect_name}' not found")
return composed_signal
@effect
def echo(signal):
"""Echo effect function."""
return [s * 0.6 for s in signal] + [0] * 2000
@effect
def reverb(signal):
"""Reverb effect function."""
return [s * 0.4 for s in signal] + [0] * 3000
@effect
def chorus(signal):
"""Chorus effect function."""
return [s * 0.8 for s in signal] + [0] * 1500
# Example usage
input_signal = [0.5, 0.7, 0.3, 0.9]
composed_output = compose_effects(input_signal, 'echo', 'reverb', 'chorus')
print(composed_output) |
#!/bin/bash -ex
# Compile wheels
cd /io
mkdir -p wheelhouse.final
rm -rf build
rm -rf dist
for P in ${PYTHONS}; do
PYBIN=/opt/python/${P}/bin
"${PYBIN}"/python -m venv .venv
.venv/bin/pip install -U pip wheel cffi setuptools-rust
REGEX="cp3([0-9])*"
if [[ "${PYBIN}" =~ $REGEX ]]; then
PY_LIMITED_API="--py-limited-api=cp3${BASH_REMATCH[1]}"
fi
LDFLAGS="-L/opt/pyca/cryptography/openssl/lib" \
CFLAGS="-I/opt/pyca/cryptography/openssl/include -Wl,--exclude-libs,ALL" \
.venv/bin/python setup.py bdist_wheel $PY_LIMITED_API
auditwheel repair --plat ${PLAT} -w wheelhouse/ dist/cryptography*.whl
# Sanity checks
# NOTE(ianw) : no execstack on aarch64, comes from
# prelink, which was never supported. CentOS 8 does
# have it separate, skip for now.
if [[ ! "${PLAT}" =~ "aarch64" ]]; then
for f in wheelhouse/*.whl; do
unzip $f -d execstack.check
results=$(execstack execstack.check/cryptography/hazmat/bindings/*.so)
count=$(echo "$results" | grep -c '^X' || true)
if [ "$count" -ne 0 ]; then
exit 1
fi
rm -rf execstack.check
done
fi
.venv/bin/pip install cryptography --no-index -f wheelhouse/
.venv/bin/python -c "from cryptography.hazmat.backends.openssl.backend import backend;print('Loaded: ' + backend.openssl_version_text());print('Linked Against: ' + backend._ffi.string(backend._lib.OPENSSL_VERSION_TEXT).decode('ascii'))"
# Cleanup
mv wheelhouse/* wheelhouse.final
rm -rf .venv dist wheelhouse
done
|
test::EXPECT_NE() {
EXPECT_NE 'abc' 'def' 2>'/dev/null' 105>'/dev/null'
ASSERT_TRUE [ "${IMOSH_TEST_IS_FAILED}" -eq 0 ]
EXPECT_NE 'abc' 'abc' 2>'/dev/null' 105>'/dev/null'
ASSERT_TRUE [ "${IMOSH_TEST_IS_FAILED}" -ne 0 ]
IMOSH_TEST_IS_FAILED=0
}
|
docker build -t cybersec_project_backend ./backend && docker build -t cybersec_project_frontend ./frontend && docker-compose up -d |
#!/usr/bin/env bash
CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$CURRENT_DIR/helpers.sh"
gpu_low_fg_color=""
gpu_medium_fg_color=""
gpu_high_fg_color=""
gpu_low_default_fg_color="#[fg=green]"
gpu_medium_default_fg_color="#[fg=yellow]"
gpu_high_default_fg_color="#[fg=red]"
get_fg_color_settings() {
gpu_low_fg_color=$(get_tmux_option "@gpu_low_fg_color" "$gpu_low_default_fg_color")
gpu_medium_fg_color=$(get_tmux_option "@gpu_medium_fg_color" "$gpu_medium_default_fg_color")
gpu_high_fg_color=$(get_tmux_option "@gpu_high_fg_color" "$gpu_high_default_fg_color")
}
print_fg_color() {
local gpu_temp=$($CURRENT_DIR/gpu_temperature.sh | sed -e 's/%//')
local gpu_load_status=$(gpu_temp_status $gpu_temp)
if [ $gpu_load_status == "low" ]; then
echo "$gpu_low_fg_color"
elif [ $gpu_load_status == "medium" ]; then
echo "$gpu_medium_fg_color"
elif [ $gpu_load_status == "high" ]; then
echo "$gpu_high_fg_color"
fi
}
main() {
get_fg_color_settings
print_fg_color
}
main
|
<filename>baselines/bert-models/transformer_bert.py
import sys
import pandas as pd
import numpy as np
import argparse
from tqdm import tqdm
from pathlib import Path
import torch
# from datasets import load_dataset # this has to be installed first using - !pip install datasets
# transformers package can be installed using "!pip install transformers"
from transformers import AutoTokenizer, pipeline, AutoModelForTokenClassification
from transformers import BertTokenizer
import re, unicodedata, os, logging, unidecode, emoji
from html.parser import HTMLParser
from transformers import RobertaForTokenClassification
from transformers import RobertaConfig
from transformers import RobertaTokenizer
label_list = [
"O", # Outside of a named entity
"B-MISC", # Beginning of a miscellaneous entity right after another miscellaneous entity
"I-MISC", # Miscellaneous entity
"B-PER", # Beginning of a person's name right after another person's name
"I-PER", # Person's name
"B-ORG", # Beginning of an organisation right after another organisation
"I-ORG", # Organisation
"B-LOC", # Beginning of a location right after another location
"I-LOC" # Location
]
EMOJI_PATTERN = re.compile(
"["
"\U0001F1E0-\U0001F1FF" # flags (iOS)
"\U0001F300-\U0001F5FF" # symbols & pictographs
"\U0001F600-\U0001F64F" # emoticons
"\U0001F680-\U0001F6FF" # transport & map symbols
"\U0001F700-\U0001F77F" # alchemical symbols
"\U0001F780-\U0001F7FF" # Geometric Shapes Extended
"\U0001F800-\U0001F8FF" # Supplemental Arrows-C
"\U0001F900-\U0001F9FF" # Supplemental Symbols and Pictographs
"\U0001FA00-\U0001FA6F" # Chess Symbols
"\U0001FA70-\U0001FAFF" # Symbols and Pictographs Extended-A
"\U00002702-\U000027B0" # Dingbats
"\U000024C2-\U0001F251"
"]+"
)
# compile regexes
username_regex = re.compile(r'(^|[^@\w])@(\w{1,15})\b')
url_regex = re.compile(r'((www\.[^\s]+)|(https?://[^\s]+)|(http?://[^\s]+))')
control_char_regex = re.compile(r'[\r\n\t]+')
# translate table for punctuation
transl_table = dict([(ord(x), ord(y)) for x, y in zip(u"‘’´“”–-", u"'''\"\"--")])
# HTML parser
html_parser = HTMLParser()
def basic_preprocess(text):
# return empty string if text is NaN
if type(text)==float:
return ''
# remove emoji
text = re.sub(EMOJI_PATTERN, r' ', text)
text = re.sub(r'·', ' ', text)
# convert non-ASCII characters to utf-8
text = unicodedata.normalize('NFKD',text).encode('ascii', 'ignore').decode('utf-8', 'ignore')
return text
def preprocess_bert(text, do_lower_case=False):
"""Preprocesses input for NER"""
# standardize
text = standardize_text(text)
text = asciify_emojis(text)
text = standardize_punctuation(text)
if do_lower_case:
text = text.lower()
text = remove_unicode_symbols(text)
text = remove_accented_characters(text)
return text
def remove_accented_characters(text):
text = unidecode.unidecode(text)
return text
def remove_unicode_symbols(text):
text = ''.join(ch for ch in text if unicodedata.category(ch)[0] != 'So')
return text
def asciify_emojis(text):
"""
Converts emojis into text aliases. E.g. 👍 becomes :thumbs_up:
For a full list of text aliases see: https://www.webfx.com/tools/emoji-cheat-sheet/
"""
text = emoji.demojize(text)
return text
def standardize_text(text):
"""
1) Escape HTML
2) Replaces some non-standard punctuation with standard versions.
3) Replace \r, \n and \t with white spaces
4) Removes all other control characters and the NULL byte
5) Removes duplicate white spaces
"""
# escape HTML symbols
text = html_parser.unescape(text)
# standardize punctuation
text = text.translate(transl_table)
text = text.replace('…', '...')
# replace \t, \n and \r characters by a whitespace
text = re.sub(control_char_regex, ' ', text)
# remove all remaining control characters
text = ''.join(ch for ch in text if unicodedata.category(ch)[0] != 'C')
# replace multiple spaces with single space
text = ' '.join(text.split())
return text.strip()
def standardize_punctuation(text):
return ''.join([unidecode.unidecode(t) if unicodedata.category(t)[0] == 'P' else t for t in text])
def get_args():
print("Getting arguments .... \n")
args = argparse.ArgumentParser()
args.add_argument('--dataset', default='conll', help='enter the path of the csv datafile with ads/text')
args.add_argument('--model', default='transformer-bert', help='enter the path to the baseline model you want to use such as \
fine-tuned-bert,ht-bert,whole-mask-bert. For transformer-bert, simply type "transformer-bert".')
args.add_argument('--inp_column_name', default='description')
args.add_argument('--res_column_name', default='transformer-bert')
args.add_argument('--preprocess', default=True)
return args.parse_args()
def detokenize(names):
# this function converts the bert-tokenized words into detokenized simple string
# input: list of names in bert tokenized format
# output: list of names detokenized
print("Detokenizing ..... \n")
def is_subtoken(word):
if word[:2] == "##":
return True
else:
return False
new_names = []
for j, tokens in enumerate(names):
restored_text = []
for i in range(len(tokens)):
if not is_subtoken(tokens[i]) and (i+1)<len(tokens) and is_subtoken(tokens[i+1]):
restored_text.append(tokens[i] + tokens[i+1][2:])
if (i+2)<len(tokens) and is_subtoken(tokens[i+2]):
restored_text[-1] = restored_text[-1] + tokens[i+2][2:]
if (i+3)<len(tokens) and is_subtoken(tokens[i+3]):
restored_text[-1] = restored_text[-1] + tokens[i+3][2:]
elif not is_subtoken(tokens[i]):
restored_text.append(tokens[i])
new_names.append(restored_text)
return new_names
def character_ends(name):
# for those predictions with character ##
new_names = []
new_name = ""
for i in range(len(name)):
if '##' in name[i]:
new_name += name[i][2:]
else:
if new_name != "":
new_names.append(new_name)
new_name = ""
new_name += name[i]
if new_name != '':
new_names.append(new_name)
return new_names
def character_starts(name):
new_names = []
new_name = ""
for i in range(len(name)):
if chr(288) in name[i]:
if new_name != "" :
# print(new_name)
new_names.append(new_name)
new_name = ""
new_name += name[i][1:]
else:
new_name += name[i]
if new_name != '':
new_names.append(new_name)
return new_names
def post_process(names):
# this function converts the bert-tokenized words into detokenized simple string
# input: list of names in bert tokenized format
# output: list of names detokenized
print("Detokenizing ..... \n")
# joining together names which got tokenized with the # character
all_new_names = []
for name in names:
# print(name)
new_names = []
if len(name) == 1:
all_new_names.append(name)
continue
j = 0
new_name = ""
if chr(288) in ''.join(name): # some models return predictions with a special character (ascii code 288)
new_names = character_starts(name)
elif '##' in ''.join(name):
new_names = character_ends(name)
all_new_names.append(new_names)
# modifying the names to remove special characters
mod_names = []
for n in all_new_names:
tmp_names = []
for name in n:
name = name.replace("</s>", "")
name = name.replace("#", "")
name = name.replace("<s>", "")
name = name.replace(" ", "")
name = name.encode('ascii',errors='ignore')
name = name.decode()
if name != ' ' or name != '':
tmp_names.append(name.lower())
mod_names.append(tmp_names)
return mod_names
def extract_names(text, model_name):
# this function uses bert pipeline for NER
'''
input: list of text/ads
output: list of names extracted in the same order as the input list
'''
print("Extracting names ..... \n")
if model_name == 'transformer-bert':
transformer_entity_extractor = pipeline("ner")
else:
model = AutoModelForTokenClassification.from_pretrained(Path(model_name))
tokenizer = AutoTokenizer.from_pretrained(Path(model_name))
transformer_entity_extractor = pipeline('ner', model=model, tokenizer=tokenizer)
name_extracted = []
cnt = 0
for i, example in tqdm(enumerate(text)):
tt = transformer_entity_extractor(example)
names = []
for item in tt:
if len(item.keys()) == 0:
continue
if 'PER' in item['entity']:
# print('yes')
names.append(item['word'])
name_extracted.append(names)
return post_process(name_extracted)
def save_data(args, data_df):
# function writes the dataframe with the extracted names added as a column
data_file = args.dataset
if data_file.strip()[-3:] == 'tsv':
delimiter = '\t'
else:
delimiter = ','
data_df.to_csv(data_file, sep=delimiter, index=False)
def load_data(args):
# loads the data file which is a csv/tsv file with a column named 'description' and preprocesses it
data_file = args.dataset
print("Loading data ..... \n")
if data_file.strip()[-3:] == 'tsv':
delimiter = '\t'
else:
delimiter = ','
data_df = pd.read_csv(data_file, sep=delimiter)
if args.inp_column_name not in data_df.columns:
print("Input correct text column name in dataset \n")
exit()
if args.preprocess: # pre-processing the text
data_df[args.inp_column_name] = data_df[args.inp_column_name].apply(lambda x: basic_preprocess(x))
return data_df
def main():
args = get_args()
data = load_data(args)
extracted_names = extract_names(data[args.inp_column_name].values, args.model) # this function already does the detokenizing
data[args.res_column_name] = extracted_names
save_data(args, data)
print(data.head())
main() |
#!/bin/bash
go build -o fvt/wormhole_agent main.go
chmod +x fvt/wormhole_agent
export BUILD_ID=dontKillMe
pids=`ps aux|grep "wormhole_agent" | grep "fvt"|awk '{printf $2 " "}'`
if [ "$pids" = "" ] ; then
echo "No wormhole_agent was started"
else
for pid in $pids ; do
echo "kill wormhole_agent " $pid
kill -9 $pid
done
fi
nohup fvt/wormhole_agent client $1 > agent.out 2>&1 & |
#!/usr/bin/env bash
##
## This script upgrades an production VM.
##
## Required environment variables:
##
## LOKOLE_VM_PASSWORD
## LOKOLE_DNS_NAME
##
scriptdir="$(dirname "$0")"
scriptname="${BASH_SOURCE[0]}"
# shellcheck disable=SC1090
. "${scriptdir}/utils.sh"
#
# verify inputs
#
required_env "${scriptname}" "LOKOLE_VM_PASSWORD"
required_env "${scriptname}" "LOKOLE_DNS_NAME"
#
# upgrade production deployment
#
log "Upgrading VM ${LOKOLE_DNS_NAME}"
exec sshpass -p "${LOKOLE_VM_PASSWORD}" ssh -o StrictHostKeyChecking=no "opwen@${LOKOLE_DNS_NAME}" <"${scriptdir}/vm.sh"
|
function findSmallest(arr) {
let smallest = arr[0];
for (let i = 1; i < arr.length; i++) {
if (arr[i] < smallest) {
smallest = arr[i];
}
}
return smallest;
}
let result = findSmallest([99, 25, 78, 64]);
console.log(result); // 25 |
def limit_string(string):
"""Format the specified string to limit the number of characters to 6."""
return string[:6] |
#!/bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
curPath=`pwd`
rootPath=$(dirname "$curPath")
rootPath=$(dirname "$rootPath")
rootPath=$(dirname "$rootPath")
rootPath=$(dirname "$rootPath")
serverPath=$(dirname "$rootPath")
sourcePath=${serverPath}/source/php
LIBNAME=redis
LIBV=4.2.0
sysName=`uname`
actionType=$1
version=$2
NON_ZTS_FILENAME=`ls $serverPath/php/${version}/lib/php/extensions | grep no-debug-non-zts`
extDir=$serverPath/php/${version}/lib/php/extensions/${NON_ZTS_FILENAME}/
if [ "$sysName" == "Darwin" ];then
BAK='_bak'
else
BAK=''
fi
Install_lib()
{
isInstall=`cat $serverPath/php/$version/etc/php.ini|grep "${LIBNAME}.so"`
if [ "${isInstall}" != "" ];then
echo "php-$version 已安装${LIBNAME},请选择其它版本!"
return
fi
extFile=$extDir${LIBNAME}.so
if [ ! -f "$extFile" ];then
php_lib=$sourcePath/php_lib
mkdir -p $php_lib
if [ ! -d $php_lib/${LIBNAME}-${LIBV}.tgz ];then
wget -O $php_lib/${LIBNAME}-${LIBV}.tgz http://pecl.php.net/get/${LIBNAME}-${LIBV}.tgz
cd $php_lib && tar xvf ${LIBNAME}-${LIBV}.tgz
fi
cd $php_lib/${LIBNAME}-${LIBV}
$serverPath/php/$version/bin/phpize
./configure --with-php-config=$serverPath/php/$version/bin/php-config
make && make install && make clean
fi
if [ ! -f "$extFile" ];then
echo "ERROR!"
return
fi
echo "[${LIBNAME}]" >> $serverPath/php/$version/etc/php.ini
echo "extension=${LIBNAME}.so" >> $serverPath/php/$version/etc/php.ini
$serverPath/php/init.d/php$version reload
echo '==========================================================='
echo 'successful!'
}
Uninstall_lib()
{
if [ ! -f "$serverPath/php/$version/bin/php-config" ];then
echo "php-$version 未安装,请选择其它版本!"
return
fi
extFile=$extDir${LIBNAME}.so
if [ ! -f "$extFile" ];then
echo "php-$version 未安装${LIBNAME},请选择其它版本!"
echo "php-$version not install memcache, Plese select other version!"
return
fi
sed -i $BAK "/${LIBNAME}.so/d" $serverPath/php/$version/etc/php.ini
sed -i $BAK "/${LIBNAME}/d" $serverPath/php/$version/etc/php.ini
rm -f $extFile
$serverPath/php/init.d/php$version reload
echo '==============================================='
echo 'successful!'
}
if [ "$actionType" == 'install' ];then
Install_lib
elif [ "$actionType" == 'uninstall' ];then
Uninstall_lib
fi |
#!/bin/bash -eE
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)
# shellcheck source=env.sh
. "${SCRIPT_DIR}/env.sh"
TMP_DIR=/tmp/$(basename "$0" .sh)_$$
rm -rf "${TMP_DIR}"
mkdir -p "${TMP_DIR}"
set +eE
for BUNDLE_COMPONENT in proxy web.root arangodb q-server kafka statsd ton-node; do
if [ "${CLEAN_HOST}" = "yes" ]; then
cd "${DOCKER_COMPOSE_DIR}/${BUNDLE_COMPONENT}/" && docker-compose down --volumes --remove-orphans
else
cd "${DOCKER_COMPOSE_DIR}/${BUNDLE_COMPONENT}/" && docker-compose stop
fi
done
if [ "${CLEAN_HOST}" = "yes" ]; then
docker system prune --all --force --volumes
docker network create proxy_nw
fi
set -eE
sed -i "s|yourdomain.com|${HOSTNAME}|g" "${DOCKER_COMPOSE_DIR}/arangodb/docker-compose.yml"
sed -i "s|email for notification|${EMAIL_FOR_NOTIFICATIONS}|g" "${DOCKER_COMPOSE_DIR}/arangodb/docker-compose.yml"
sed -i "s|NETWORK_TYPE.*|NETWORK_TYPE=${NETWORK_TYPE}|g" "${DOCKER_COMPOSE_DIR}/arangodb/.env"
sed -i "s|host.yourdomain.com|${HOSTNAME}|g" "${DOCKER_COMPOSE_DIR}/web.root/.env"
sed -i "s|for notification|${EMAIL_FOR_NOTIFICATIONS}|g" "${DOCKER_COMPOSE_DIR}/web.root/docker-compose.yml"
rm -f "${DOCKER_COMPOSE_DIR}/proxy/htpasswd/arango.yourdomain.com"
echo "admin:\$apr1\$d0ifqbt3\$iayulpIOP2.IS4Sy1I2zJ0" >"${DOCKER_COMPOSE_DIR}/proxy/htpasswd/arango.${HOSTNAME}"
echo "#iJJ9fWxb9Z6CS1aPagoW" >>"${DOCKER_COMPOSE_DIR}/proxy/htpasswd/arango.${HOSTNAME}"
mv "${DOCKER_COMPOSE_DIR}/proxy/vhost.d/host.yourdomain.com" "${DOCKER_COMPOSE_DIR}/proxy/vhost.d/${HOSTNAME}"
for BUNDLE_COMPONENT in proxy web.root arangodb; do
cd "${DOCKER_COMPOSE_DIR}/${BUNDLE_COMPONENT}/" && docker-compose up -d
done
rm -rf "${DOCKER_COMPOSE_DIR}/q-server/build/ton-q-server"
cd "${DOCKER_COMPOSE_DIR}/q-server/build" && git clone --recursive "${TON_Q_SERVER_GITHUB_REPO}"
cd "${DOCKER_COMPOSE_DIR}/q-server/build/ton-q-server" && git checkout "${TON_Q_SERVER_GITHUB_COMMIT_ID}"
cd "${DOCKER_COMPOSE_DIR}/q-server" && docker-compose up -d
echo "INFO: Waiting for Kafka start..."
cd "${DOCKER_COMPOSE_DIR}/kafka" || exit 1
OK=0
while [ $OK -ne 100 ]; do
docker-compose up -d >"${TMP_DIR}/kafka.log" 2>&1
OK=$(awk '
BEGIN { fail = 0 }
{
if ($1 == "ERROR:") {
fail = fail + 1
}
}
END {
if (fail != 0) {
print 0
} else {
print 100
}
}
' "${TMP_DIR}/kafka.log")
done
rm -f "${TMP_DIR}/kafka.log"
OK=0
while [ $OK -ne 100 ]; do
OK=$(docker ps --format 'table {{.Image}} \t {{.Names}} \t {{.Status}}' | awk '
BEGIN { OK = 0 }
{
if (($2 == "check-connect") && ($3 == "Up")) {
i = NF;
if ($i == "(healthy)") {
OK = OK + 1
}
}
}
END {
if (OK != 1) {
print OK
} else {
print 100
}
}
')
sleep 1s
done
until [ "$(echo "${IntIP}" | grep "\." -o | wc -l)" -eq 3 ]; do
set +e
IntIP="$(curl -sS ipv4bot.whatismyipaddress.com)":${ADNL_PORT}
set -e
echo "INFO: IntIP = $IntIP"
done
sed -i "s|IntIP.*|IntIP=${IntIP}|g" "${DOCKER_COMPOSE_DIR}/statsd/.env"
cd "${DOCKER_COMPOSE_DIR}/statsd/" && docker-compose up -d
sed -i "s|ADNL_PORT.*|ADNL_PORT=${ADNL_PORT}|" "${DOCKER_COMPOSE_DIR}/ton-node/.env"
sed -i "s|NETWORK_TYPE.*|NETWORK_TYPE=${NETWORK_TYPE}|" "${DOCKER_COMPOSE_DIR}/ton-node/.env"
rm -rf "${DOCKER_COMPOSE_DIR}/ton-node/build/ton-node"
cd "${DOCKER_COMPOSE_DIR}/ton-node/build" && git clone --recursive "${TON_NODE_GITHUB_REPO}" ton-node
cd "${DOCKER_COMPOSE_DIR}/ton-node/build/ton-node" && git checkout "${TON_NODE_GITHUB_COMMIT_ID}"
echo "==============================================================================="
echo "INFO: starting node on ${HOSTNAME}..."
cd "${DOCKER_COMPOSE_DIR}/ton-node/" && docker-compose up -d
echo "INFO: starting node on ${HOSTNAME}... DONE"
echo "==============================================================================="
docker ps -a
rm -rf "${TMP_DIR}"
|
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_PLATFORM=GNU-Linux
CND_CONF=Debug
CND_DISTDIR=dist
CND_BUILDDIR=build
CND_DLIB_EXT=so
NBTMPDIR=${CND_BUILDDIR}/${CND_CONF}/${CND_PLATFORM}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/tutorial15
OUTPUT_BASENAME=tutorial15
PACKAGE_TOP_DIR=tutorial15/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package
rm -rf ${NBTMPDIR}
mkdir -p ${NBTMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory "${NBTMPDIR}/tutorial15/bin"
copyFileToTmpDir "${OUTPUT_PATH}" "${NBTMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/tutorial15.tar
cd ${NBTMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/tutorial15.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${NBTMPDIR}
|
#!/bin/bash
set -ex
rm -f core.*
python updateSitemap.js
sudo rm -fr build_to_delete tmp_build
echo "module.exports = { distDir: 'tmp_build' }" > next.config.js
npm run build
echo "module.exports = { distDir: '.next' }" > next.config.js
if [ -d .next ]; then
mv .next build_to_delete
fi
mv tmp_build .next
# Restart server
sudo rm -fr build_to_delete
sudo pm2 restart next
|
//#####################################################################
// Copyright 2009, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Namespace INTERSECTION
//#####################################################################
#include <PhysBAM_Tools/Vectors/VECTOR.h>
#include <PhysBAM_Geometry/Basic_Geometry/RAY.h>
#include <PhysBAM_Geometry/Basic_Geometry/TETRAHEDRON.h>
#include <PhysBAM_Geometry/Basic_Geometry_Intersections/RAY_TETRAHEDRON_INTERSECTION.h>
#include <PhysBAM_Geometry/Basic_Geometry_Intersections/RAY_TRIANGLE_3D_INTERSECTION.h>
namespace PhysBAM{
namespace INTERSECTION{
//#####################################################################
// Function Intersects
//#####################################################################
template<class T> bool Intersects(RAY<VECTOR<T,3> >& ray,const TETRAHEDRON<T>& tetrahedron, const T thickness)
{
bool intersection=false;
if(INTERSECTION::Intersects(ray,tetrahedron.triangle1,thickness)){intersection=true;ray.aggregate_id=1;}
if(INTERSECTION::Intersects(ray,tetrahedron.triangle2,thickness)){intersection=true;ray.aggregate_id=2;}
if(INTERSECTION::Intersects(ray,tetrahedron.triangle3,thickness)){intersection=true;ray.aggregate_id=3;}
if(INTERSECTION::Intersects(ray,tetrahedron.triangle4,thickness)){intersection=true;ray.aggregate_id=4;}
if(intersection) return true;
else return false;
}
//#####################################################################
template bool Intersects(RAY<VECTOR<float,3> >&,const TETRAHEDRON<float>&,const float);
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template bool Intersects(RAY<VECTOR<double,3> >&,const TETRAHEDRON<double>&,const double);
#endif
};
};
|
package com.packt.designpatterns.bp.commandpattern;
public class Customer {
public static void main(String[] args) {
// TODO Auto-generated method stub
Pizza pizza = new Pizza("Veg Supreme Pizza",10);
Order orderPizza = new OrderPizza(pizza);
pizza = new Pizza("Veg Supreme Pizza",5);
Order cancelPizza = new CancelPizza(pizza);
Waiter waiter = new Waiter();
waiter.takeOrder(orderPizza);
waiter.takeOrder(cancelPizza);
waiter.placeOrders();
}
}
|
<reponame>Haufe-Lexware/octane-gocd-plugin
package com.haufelexware.gocd.service;
import com.google.gson.Gson;
import com.haufelexware.gocd.dto.GoPipelineGroup;
import com.haufelexware.gocd.dto.GoPipelineGroups;
import com.haufelexware.util.Streams;
import com.thoughtworks.go.plugin.api.logging.Logger;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.HttpGet;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
/**
* This class encapsulates the API call to get all configured pipeline groups from Go.
* This API service is available since Go Version 14.3.0
* @see <a href="https://api.gocd.org/17.9.0/#config-listing">Config listing</a>
*/
public class GoGetPipelineGroups {
private static final Logger Log = Logger.getLoggerFor(GoGetPipelineGroups.class);
private final GoApiClient goApiClient;
public GoGetPipelineGroups(GoApiClient goApiClient) {
this.goApiClient = goApiClient;
}
public List<GoPipelineGroup> get() {
try {
HttpResponse response = goApiClient.execute(new HttpGet("/go/api/config/pipeline_groups"));
if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
String content = Streams.readAsString(response.getEntity().getContent());
return new Gson().fromJson(content, GoPipelineGroups.class);
} else {
Log.error("Request got HTTP-" + response.getStatusLine().getStatusCode());
}
} catch (IOException e) {
Log.error("Could not perform request", e);
}
return Collections.emptyList();
}
}
|
/**
* Created by xschen on 13/8/2017.
*/
package com.github.chen0040.leetcode.day18;
|
#!/usr/bin/env bash
tnpm i -g lerna @ali/tyarn
rm -rf node_modules package-lock.json yarn.lock
lerna clean -y
find ./packages -type f -name "package-lock.json" -exec rm -f {} \;
lerna bootstrap
|
#include <stdlib.h>
#include <string.h>
#include "misc.h"
struct reg
{
char *data;
const char *name;
int bits;
};
struct reg *reg_create(int bits, const char *name);
void reg_free(struct reg *r);
|
<reponame>yunsean/yoga
package com.yoga.content.comment.dto;
import com.yoga.core.base.BaseDto;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
@Getter
@Setter
@NoArgsConstructor
public class ListDto extends BaseDto {
private String filter;
private Boolean issued;
}
|
package main
import (
"crypto/rand"
"fmt"
"math/big"
)
type reactorCode struct {
one uint16
two uint16
}
func newReactorCode() (code reactorCode) {
limit := big.NewInt(10)
newDigit := func() uint16 {
digit, err := rand.Int(rand.Reader, limit)
if err != nil {
panic(err)
}
return uint16(digit.Int64())
}
saveCode := func() (value uint16) {
for value == 0 {
value = (newDigit() << 8) | (newDigit() << 4) | (newDigit() << 0)
}
return
}
code.one = saveCode()
code.two = saveCode()
return
}
func (code reactorCode) String() string {
return fmt.Sprintf("%03X%03X", code.one, code.two)
}
|
#!/usr/bin/env bash
#
# Dgraph Installer Script
#
# Homepage: https://dgraph.io
# Requires: bash, curl, tar or unzip
#
# Hello! This is a script that installs Dgraph
# into your PATH (which may require password authorization).
# Use it like this:
#
# $ curl https://get.dgraph.io | bash
#
# This should work on Mac, Linux, and BSD systems.
set -e
DIM='\033[2m'
BOLD='\033[1m'
RED='\033[91;1m'
GREEN='\033[32;1m'
RESET='\033[0m'
acceptLower=$(echo "$ACCEPT_LICENSE" | dd conv=lcase 2> /dev/null)
systemdLower=$(echo "$INSTALL_IN_SYSTEMD" | dd conv=lcase 2> /dev/null)
ACCEPT_LICENSE=${acceptLower:-n}
INSTALL_IN_SYSTEMD=${systemdLower:-n}
sudo_cmd=""
argVersion=
myShell=$(which bash)
print_instruction() {
printf '%b\n' "$BOLD$1$RESET"
}
print_step() {
printf '%b\n' "$DIM$1$RESET"
}
print_error() {
printf '%b\n' "$RED$1$RESET"
}
print_good() {
printf '%b\n' "$GREEN$1$RESET"
}
check_license_agreement() {
curl -s https://raw.githubusercontent.com/dgraph-io/dgraph/master/licenses/DCL.txt
cat << "EOF"
By downloading Dgraph you agree to the Dgraph Community License (DCL) terms
shown above. An open source (Apache 2.0) version of Dgraph without any
DCL-licensed enterprise features is available by building from the Dgraph
source code. See the source installation instructions for more info:
https://github.com/dgraph-io/dgraph#install-from-source
EOF
if [ ! "$ACCEPT_LICENSE" = "y" ]; then
read -p 'Do you agree to the terms of the Dgraph Community License? [Y/n] ' response < /dev/tty
[[ "x$response" == "x" || "$response" == [yY] || "$response" == [yY][eE][sS] ]] || return 1
else
echo 'Dgraph Community License terms accepted with -y/--accept-license option.'
fi
}
install_dgraph() {
printf "%b" "$BOLD"
cat << "EOF"
_____ _
| __ \ | |
| | | | __ _ _ __ __ _ _ __ | |__
| | | |/ _` | '__/ _` | '_ \| '_ \
| |__| | (_| | | | (_| | |_) | | | |
|_____/ \__, |_| \__,_| .__/|_| |_|
__/ | | |
|___/ |_|
EOF
printf "%b" "$RESET"
install_path="/usr/local/bin"
# Check curl is installed
if ! hash curl 2>/dev/null; then
print_error "Could not find curl. Please install curl and try again.";
exit 1;
fi
# Check tar is installed
if ! hash tar 2>/dev/null; then
print_error "Could not find tar. Please install tar and try again.";
exit 1;
fi
# Check sudo permissions
if hash sudo 2>/dev/null; then
sudo_cmd="sudo"
echo "Requires sudo permission to install Dgraph binaries to $install_path."
if ! $sudo_cmd -v; then
print_error "Need sudo privileges to complete installation."
exit 1;
fi
fi
if ! check_license_agreement; then
print_error 'You must agree to the license terms to install Dgraph.'
exit 1
fi
check_versions(){
toCompare=$(curl -s https://get.dgraph.io/latest | grep -o "${release_version}" | head -n1)
if [ "$release_version" == "$toCompare" ]; then
return
else
print_error "This version doesn't exist or it is a typo (Tip: You need to add \"v\" eg: v2.0.0-rc1)"
exit 1
fi
}
if [ -n "${VERSION}" ] || [ -n "${argVersion}" ]; then
# Environment variable is preferred over command-line argument
release_version="${VERSION:-${argVersion}}"
check_versions
print_step "Selected release version is $release_version."
else
release_version=$(curl -s https://get.dgraph.io/latest | grep -o '"tag_name": *"[^"]*' | grep -o '[^"]*$')
print_step "Latest release version is $release_version."
fi
platform="$(uname | tr '[:upper:]' '[:lower:]')"
digest_cmd=""
if hash shasum 2>/dev/null; then
digest_cmd="shasum -a 256"
elif hash sha256sum 2>/dev/null; then
digest_cmd="sha256sum"
elif hash openssl 2>/dev/null; then
digest_cmd="openssl dgst -sha256"
else
print_error "Could not find suitable hashing utility. Please install shasum or sha256sum and try again.";
exit 1
fi
if [ "$1" == "" ]; then
tag="$release_version"
else
print_error "Invalid argument '$1'."
exit 1
fi
checksum_file="dgraph-checksum-$platform-amd64".sha256
checksum_link=$( echo https://github.com/dgraph-io/dgraph/releases/download/"$tag"/"$checksum_file")
print_step "Downloading checksum file for ${tag} build."
if curl -L --progress-bar "$checksum_link" -o "/tmp/$checksum_file"; then
print_step "Download complete."
else
print_error "Sorry. Binaries not available for your platform. Please compile manually: https://dgraph.io/docs/deploy/#building-from-source"
echo
exit 1;
fi
dgraph=$(grep -m 1 /usr/local/bin/dgraph /tmp/"$checksum_file" | grep -E -o '[a-zA-Z0-9]{64}')
if [ "$dgraph" == "" ]; then
print_error "Sorry, we don't have binaries for this platform. Please build from source."
exit 1;
fi
print_step "Comparing checksums for dgraph binaries"
if $digest_cmd /usr/local/bin/dgraph &>/dev/null; then
dgraphsum=$($digest_cmd /usr/local/bin/dgraph | grep -E -o '[a-zA-Z0-9]{64}')
else
dgraphsum=""
fi
if [ "$dgraph" == "$dgraphsum" ]; then
print_good "You already have Dgraph $tag installed."
else
tar_file=dgraph-$platform-amd64".tar.gz"
dgraph_link="https://github.com/dgraph-io/dgraph/releases/download/$tag/"$tar_file
# Download and untar Dgraph binaries
if curl --output /dev/null --silent --head --fail "$dgraph_link"; then
print_step "Downloading $dgraph_link"
curl -L --progress-bar "$dgraph_link" -o "/tmp/$tar_file"
print_good "Download complete."
else
print_error "Sorry. Binaries not available for your platform. Please compile manually: https://dgraph.io/docs/deploy/#building-from-source"
echo
exit 1;
fi
print_step "Inflating binaries (password may be required).";
temp_dir=$(mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir')
tar -C "$temp_dir" -xzf /tmp/"$tar_file"
dgraphsum=$($digest_cmd "$temp_dir"/dgraph | awk '{print $1;}')
if [ "$dgraph" != "$dgraphsum" ]; then
print_error "Downloaded binaries checksum doesn't match with latest versions checksum"
exit 1;
fi
# Create /usr/local/bin directory if it doesn't exist.
$sudo_cmd mkdir -p /usr/local/bin
# Backup existing dgraph binaries in HOME directory
if hash dgraph 2>/dev/null; then
dgraph_path="$(which dgraph)"
dgraph_backup="dgraph_backup_olderversion"
print_step "Backing up older versions in ~/$dgraph_backup (password may be required)."
mkdir -p ~/$dgraph_backup
$sudo_cmd mv $dgraph_path* ~/$dgraph_backup/.
fi
$sudo_cmd mv "$temp_dir"/* /usr/local/bin/.
rm "/tmp/""$tar_file";
rm -rf "$temp_dir"
# Check installation
if hash dgraph 2>/dev/null; then
print_good "Dgraph binaries $tag have been installed successfully in /usr/local/bin.";
else
print_error "Installation failed. Please try again.";
exit 1;
fi
fi
print_instruction "Please visit https://dgraph.io/docs/get-started for further instructions on usage."
}
addGroup() {
if id "dgraph" &>/dev/null; then
echo 'user found - skipping' 1>&2
else
echo 'user not found - creating one...' 1>&2
$sudo_cmd groupadd --system dgraph
$sudo_cmd useradd --system -d /var/lib/dgraph -s /bin/false -g dgraph dgraph
fi
true
}
render_template() {
eval "echo \"$(cat $1)\""
}
gen() {
empty=''
description=$1
requires=$2
requiredBy=$3
cmd=$4
afterService=$5
render_template "$tmplTemp/service.tmpl"| $sudo_cmd tee $6 > /dev/null
}
setup_systemD() {
pathToTemplate="https://raw.githubusercontent.com/dgraph-io/Install-Dgraph/master/contrib"
systemdPath="/etc/systemd/system"
dgraphPath="/var/lib/dgraph"
tmplTemp="/tmp/dgraph"
$sudo_cmd mkdir -p $dgraphPath
$sudo_cmd mkdir -p $dgraphPath/{p,w,zw}
$sudo_cmd mkdir -p /var/log/dgraph
$sudo_cmd chown -R dgraph:dgraph /var/{lib,log}/dgraph
$sudo_cmd mkdir -p $tmplTemp
_getTmpl="$pathToTemplate/service.tmpl"
$sudo_cmd curl -LJ --progress-bar "$_getTmpl" -o "$tmplTemp/service.tmpl"
echo "#### Creating dgraph-ui.service ..."
gen "dgraph.io Web UI" \
"" \
"" \
"dgraph-ratel" \
"" \
$systemdPath/dgraph-ui.service
echo "#### Creating dgraph-alpha.service ..."
gen "dgraph.io Alpha instance" \
"Requires=dgraph-zero.service" \
"" \
"dgraph alpha --lru_mb 2048 -p /var/lib/dgraph/p -w /var/lib/dgraph/w" \
"dgraph-zero.service" \
$systemdPath/dgraph-alpha.service
echo "#### Creating dgraph-zero.service ..."
gen "dgraph.io Zero instance" \
"" \
"RequiredBy=dgraph-alpha.service" \
"dgraph zero --wal /var/lib/dgraph/zw" \
"" \
$systemdPath/dgraph-zero.service
$sudo_cmd rm -rf "$tmplTemp"
$sudo_cmd systemctl daemon-reload
$sudo_cmd systemctl enable dgraph-alpha
$sudo_cmd systemctl start dgraph-alpha
$sudo_cmd systemctl enable dgraph-ui
$sudo_cmd systemctl start dgraph-ui
true
}
function exit_error {
if [ "$?" -ne 0 ]; then
print_error "There was some problem while installing Dgraph. Please share the output of this script with us on https://discuss.dgraph.io so that we can resolve the issue for you."
fi
}
verify_system() {
if [ -x /sbin/openrc-run ]; then
HAS_OPENRC=true
print_error "Sorry we don't support OpenRC for now."
print_good "But you can install Dgraph, remove the flag and try again."
exit 1
fi
if hash systemctl 2>/dev/null; then
print_good "Habemus SYSTEMD."
INSTALL_IN_SYSTEMD="y"
return 0
fi
print_error "Systemd was not found."
rint_good "But you can install Dgraph, remove the flag and try again."
return 1
}
print_usage() {
echo "Usage:"
echo " -v='' | --version='v20.07.2' : Choose Dgraph's version manually."
echo " -s | --systemd : Install Dgraph as a service."
echo " -y | --accept-license : Automatically agree to the terms of the Dgraph Community License."
}
trap exit_error EXIT
for f in "$@"; do
case $f in
'-y'|'--accept-license')
ACCEPT_LICENSE=y
;;
'-s'|'--systemd')
verify_system
;;
'-v='*|'--version='*)
argVersion=${f#*=}
;;
'-h'|'--help')
print_usage
exit 0
;;
*)
print_error "unknown option $1"
print_usage
exit 1
;;
esac
shift
done
install_dgraph "$@"
if [ "$INSTALL_IN_SYSTEMD" = "y" ]; then
echo "Systemd installation was requested."
addGroup && setup_systemD
fi
|
/*
* Copyright 2004-2007 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.josql.utils;
import java.util.List;
import java.util.ArrayList;
import org.josql.Query;
import org.josql.exceptions.QueryExecutionException;
import org.josql.exceptions.QueryParseException;
import org.josql.expressions.Expression;
/**
* This class can be used as a convenient way of evaluating an expression
* without having to use the Query object itself.
* <p>
* In this way you can easily evaluate JoSQL expressions against objects.
* <p>
* Usage:
* <p>
* Use the static methods to evaluate the expression in one call, for instance to
* find out details about a file:
* <p>
* String exp = "path + ', size: ' + formatNumber(length) + ', last modified: ' + formatDate(lastModified)<br />
* String details = ExpressionEvaluator.getValue (exp, new File ('/home/me/myfile.txt'));
*/
public class ExpressionEvaluator
{
private Expression where = null;
private Query q = null;
/**
* Create a new expression evaluator.
*
* @param exp The expression to be evaluated.
* @param cl The class of the object(s) that the expression will be
* evaluated against.
* @throws QueryParseException If the expression cannot be parsed.
*/
public ExpressionEvaluator (String exp,
Class cl)
throws QueryParseException
{
this (exp,
cl,
null);
}
/**
* Create a new expression evaluator.
*
* @param exp The expression to be evaluated.
* @param cl The class of the object(s) that the expression will be
* evaluated against.
* @param fhs A list of function handlers that contain functions that will
* be used by the expression, can be null.
* @throws QueryParseException If the expression cannot be parsed.
*/
public ExpressionEvaluator (String exp,
Class cl,
List fhs)
throws QueryParseException
{
Query q = new Query ();
if (fhs != null)
{
for (int i = 0; i < fhs.size (); i++)
{
q.addFunctionHandler (fhs.get (i));
}
}
q.parse ("SELECT * FROM " + cl.getName () + " WHERE " + exp);
this.q = q;
this.where = q.getWhereClause ();
}
/**
* Get the query associated with the expression, use this to setup
* bind variables, function handlers and so on, which of course must
* be setup prior to evaluating the expression.
*
* @return The Query object.
*/
public Query getQuery ()
{
return this.q;
}
/**
* Evaluate the expression against the object passed in.
*
* @param o The object to evaluate the expression against.
* @return The value of calling Expression.isTrue (Query, Object).
* @throws QueryExecutionException If the expression cannot be executed.
*/
public boolean isTrue (Object o)
throws QueryExecutionException
{
if (o == null)
{
throw new NullPointerException ("Object passed in is null.");
}
return this.where.isTrue (o,
this.q);
}
/**
* Evaluate the expression against the list of objects passed in and
* return the value.
*
* @param l The list of objects to evaluate the expression against.
* @return The values gained when evaluating the expression against all
* the objects in the list.
* @throws QueryExecutionException If the expression cannot be executed.
*/
public List getValues (List l)
throws QueryExecutionException
{
if (l == null)
{
throw new NullPointerException ("List is null");
}
int s = l.size ();
List ret = new ArrayList (s);
for (int i = s - 1; i > -1; i--)
{
ret.set (i,
this.getValue (l.get (i)));
}
return ret;
}
/**
* Evaluate the expression against the object passed in and return the
* value.
*
* @param o The object to evaluate the expression against.
* @return The value gained when evaluating the expression against
* the object.
* @throws QueryExecutionException If the expression cannot be executed.
*/
public Object getValue (Object o)
throws QueryExecutionException
{
return this.where.getValue (o,
this.q);
}
/**
* Evaluate the expression against the object passed in.
*
* @param exp A string representation of the expression to evaluate.
* @param o The object to evaluate the expression against.
* @return The value of calling Expression.isTrue (Query, Object).
* @throws QueryParseException If the expression cannot be parsed.
* @throws QueryExecutionException If the expression cannot be executed.
*/
public static boolean isTrue (String exp,
Object o)
throws QueryParseException,
QueryExecutionException
{
if (o == null)
{
throw new NullPointerException ("Object passed in is null.");
}
ExpressionEvaluator ee = new ExpressionEvaluator (exp,
o.getClass ());
return ee.isTrue (o);
}
/**
* Evaluate the expression against the list of objects passed in and
* return the value.
*
* @param exp A string representation of the expression to evaluate.
* @param l The list of objects to evaluate the expression against.
* @return The values gained when evaluating the expression against all
* the objects in the list.
* @throws QueryParseException If the expression cannot be parsed.
* @throws QueryExecutionException If the expression cannot be executed.
*/
public static List getValues (String exp,
List l)
throws QueryParseException,
QueryExecutionException
{
if (l == null)
{
throw new NullPointerException ("List is null");
}
if (l.size () == 0)
{
return new ArrayList ();
}
Class c = null;
for (int i = 0; i < l.size (); i++)
{
Object o = l.get (i);
if (o != null)
{
c = o.getClass ();
if (c != null)
{
break;
}
}
}
if (c == null)
{
throw new NullPointerException ("All objects in the list are null");
}
ExpressionEvaluator ee = new ExpressionEvaluator (exp,
c);
return ee.getValues (l);
}
/**
* Evaluate the expression against the object passed in and return the
* value.
*
* @param exp A string representation of the expression to evaluate.
* @param o The object to evaluate the expression against.
* @return The value gained when evaluating the expression against
* the object.
* @throws QueryParseException If the expression cannot be parsed.
* @throws QueryExecutionException If the expression cannot be executed.
*/
public static Object getValue (String exp,
Object o)
throws QueryParseException,
QueryExecutionException
{
if (o == null)
{
throw new NullPointerException ("Object passed in is null.");
}
ExpressionEvaluator ee = new ExpressionEvaluator (exp,
o.getClass ());
return ee.getValue (o);
}
} |
package main
import (
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"net/http"
"strings"
"time"
)
type Hangman struct {
Entries map[string]bool
Placeholder []string
Chances int
maxChances int
Word string
Duration time.Duration
}
var dev = flag.Bool("dev", false, "dev mode")
func (h *Hangman) setWord() {
if *dev {
h.Word = "elephant"
return
}
resp, err := http.Get("https://random-word-api.herokuapp.com/word?number=5")
if err != nil {
fmt.Println("server down")
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Println("server down")
}
words := []string{}
err = json.Unmarshal(body, &words)
if err != nil {
fmt.Println("error while parsing")
}
h.Word = words[0]
}
func (h *Hangman) setPlaceholder() {
// list of "_" corrosponding to the number of letters in the word. [ _ _ _ _ _ ]
h.Placeholder = make([]string, len(h.Word), len(h.Word))
for i := range h.Placeholder {
h.Placeholder[i] = "_"
}
}
func (h Hangman) display(timeRemaining float64) {
fmt.Println()
fmt.Println()
fmt.Printf("placehoder: %v \n", h.Placeholder) // render the placeholder
fmt.Printf("Chances left: %d", h.Chances) // render the chances left
fmt.Println()
fmt.Printf("Entries: ")
for k := range h.Entries {
fmt.Printf("%s ", k)
}
fmt.Println()
fmt.Printf("\033[2K\r Time Remaining %v sec", timeRemaining)
fmt.Printf("\n Guess a letter or the word: ")
}
func (h *Hangman) play(result chan bool, totalTime float64) {
timeRemaining := totalTime
start := 0.0
go func() {
for range time.Tick(1 * time.Second) {
start++
timeRemaining = totalTime - start
}
}()
for {
//evaluate a loss! If user guesses a wrong letter or the wrong word, they lose a chance.
if h.Chances == 0 {
fmt.Println("Out of chances")
fmt.Println("Correct word is", h.Word)
result <- false
return
}
//evaluate a win!
if strings.Join(h.Placeholder, "") == h.Word {
result <- true
return
}
//Console display
h.display(timeRemaining)
// take the input
var str string
fmt.Scanln(&str)
if str == h.Word {
result <- true
return
}
if len(str) == 0 {
fmt.Println("please enter a character")
continue
}
if len(str) > 1 {
h.Entries[str] = true
h.Chances--
continue
}
if !strings.Contains(h.Word, str) {
_, ok := h.Entries[str]
if !ok {
h.Chances--
}
h.Entries[str] = true
continue
}
for i, v := range h.Word {
if strings.ContainsRune(str, v) {
h.Placeholder[i] = str
}
}
h.Entries[str] = true
// compare and update entries, placeholder and chances.
}
}
func main() {
flag.Parse()
h := Hangman{
Entries: make(map[string]bool),
Chances: 8,
maxChances: 8,
Duration: 20,
}
h.setWord()
h.setPlaceholder()
totalTime := h.Duration * time.Second
t := time.NewTimer(totalTime)
result := make(chan bool)
go h.play(result, totalTime.Seconds())
select {
case res := <-result:
if res {
fmt.Println("WON")
} else {
fmt.Println("Loss")
}
case <-t.C:
fmt.Println("Timeout")
fmt.Println("Correct word is", h.Word)
}
}
|
#!/bin/bash
#SBATCH --job-name=gc
#SBATCH --gres gpu:1
hostname
export OMP_NUM_THREADS=4
export OPENBLAS_NUM_THREADS=${OMP_NUM_THREADS}
export MKL_NUM_THREADS=${OMP_NUM_THREADS}
if [[ $# -lt 2 ]]
then
echo "Expect two arguments to specify config file and run id"
elif [[ $# -eq 2 ]]
then
python -u habitat_baselines/run.py --run-type train --exp-config habitat_baselines/config/gibson_r3/$1.pn.yaml --run-id $2
elif [[ $# -eq 3 ]]
then
python -u habitat_baselines/run.py --run-type train --exp-config habitat_baselines/config/gibson_r3/$1.pn.yaml --run-id $2 --ckpt-path ~/share/golden/$1/$1.$3.pth
else
for i in $(seq ${@:3})
do
echo $i
python -u habitat_baselines/run.py --run-type eval --exp-config habitat_baselines/config/gibson_r3/$1.pn.yaml --run-id $2 --ckpt-path ~/share/golden/$1/$1.$i.pth
done
fi |
#!/bin/bash
# dnf update -y
sudo dnf groupinstall "Development Tools" -y
sudo dnf install boost-devel -y
sudo dnf install gmp-devel -y
sudo dnf install mpfr-devel -y
sudo dnf install wget -y
echo
echo "######################################################"
echo "Install MKL:"
sudo dnf config-manager --add-repo https://yum.repos.intel.com/mkl/setup/intel-mkl.repo -y
sudo rpm --import https://yum.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB
sudo dnf install intel-mkl-2020.0-088 -y
echo 'export MKL_ROOT_DIR=/opt/intel/mkl' >> ~/.bashrc
echo 'export LD_LIBRARY_PATH=$MKL_ROOT_DIR/lib/intel64:/opt/intel/lib/intel64_lin:$LD_LIBRARY_PATH' >> ~/.bashrc
echo 'export LIBRARY_PATH=$MKL_ROOT_DIR/lib/intel64:$LIBRARY_PATH' >> ~/.bashrc
echo 'source /opt/intel/mkl/bin/mklvars.sh intel64' >> ~/.bashrc
echo
echo "######################################################"
echo "######################################################"
echo "Install kv library:"
echo "Create a new folder in Home directory. "
echo -n "Please input a new folder name: "
read foldername
folderpath="${HOME}/${foldername}"
echo "Check: ${folderpath}"
mkdir "${folderpath}"
cd "${folderpath}"
kv=`curl http://verifiedby.me/kv/ | grep -o -E "download/(kv-[0-9]+\.[0-9]+\.[0-9]*\.tar\.gz)"`
kvver=${kv#*/}
kvver=${kvver%.tar.gz}
urlkv="http://verifiedby.me/kv/${kv}"
wget "${urlkv}"
if [ $? -ne 0 ]; then
echo "[ERROR] Could not download..."
exit 1
else
tar -xvf "${kvver}.tar.gz"
cp -r "${kvver}/kv/" ./
cp -r "${kvver}/test/" ./
cp -r "${kvver}/example/" ./
rm "${kvver}.tar.gz"
rm -r "${kvver}"
fi
echo "######################################################"
echo "Install VCP library:"
wget --no-check-certificate https://github.com/koutasekine/vcp/archive/master.tar.gz
if [ $? -ne 0 ]; then
echo "[ERROR] Could not download..."
exit 1
else
tar -xvf master.tar.gz
cp -r vcp-master/vcp/ ./
cp -r vcp-master/test_matrix/ ./
cp -r vcp-master/test_PDE/ ./
rm "master.tar.gz"
rm -r vcp-master
fi
echo
echo "######################################################"
echo "Check for BLAS rounding mode changes: Please wait..."
source /opt/intel/mkl/bin/mklvars.sh intel64
cd "${folderpath}/test_matrix/"
g++ -I.. -std=c++11 -DNDEBUG -DKV_FASTROUND -O3 -m64 Check_pdblas_rounding.cpp -L${MKLROOT}/lib/intel64 -Wl,--no-as-needed -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm -ldl -lmpfr -fopenmp && ./a.out
g++ -I.. -std=c++11 -DNDEBUG -DKV_FASTROUND -O3 -m64 Check_pidblas_rounding.cpp -L${MKLROOT}/lib/intel64 -Wl,--no-as-needed -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm -ldl -lmpfr -fopenmp && ./a.out
echo
echo "######################################################"
echo "Check for Open MP rounding mode changes: Please wait..."
g++ -I.. -std=c++11 -DNDEBUG -DKV_FASTROUND -O3 -m64 Check_OpenMP.cpp -L${MKLROOT}/lib/intel64 -Wl,--no-as-needed -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm -ldl -lmpfr -fopenmp && ./a.out
echo "Finish!"
|
#!/usr/bin/env bash
# Copyright 2019 The OpenShift Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
source /home/nshaikh/work/src/test-goreleaser/hack/build-flags.sh
export TAG=$1
export LD_FLAGS="$(build_flags $(dirname $0)/..)"
export GITHUB_TOKEN=$2
echo ${TAG}
echo ${LD_FLAGS}
echo ${GITHUB_TOKEN}
goreleaser
|
from flask import Flask, request
app = Flask(__name__)
@app.route("/sum", methods=['GET'])
def get_sum():
num1 = request.args.get('num1', type=float)
num2 = request.args.get('num2', type=float)
return { 'sum': num1 + num2}
if __name__ == '__main__':
app.run() |
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import React, { useState, useEffect, useRef }from 'react';
export default function PortfolioSidebarList (props) {
const useLoad = useRef(props.loaded.current);
useEffect(()=> {
if (props.loaded.current === true) {
console.log("called after data fetch")
}
})
if (props.isLoaded === true) {
console.log(props.apiData.current.portfolioItems)
const PortfolioList = props.apiData.current.portfolioItems.map((item) => {
return (
<div key={item.id} className="portfolio-item">
<img src={item.thumb_image_url}/>
<div className="title-card">
<h1>{item.name}</h1>
<div className="actions">
<a className="edit-icon" onClick={() => props.handleEditClick(item)}>
<FontAwesomeIcon icon="edit" />
</a>
<a className="delete-icon" onClick={() => props.handleDeleteClick(item)}>
<FontAwesomeIcon icon="trash" />
</a>
</div>
</div>
</div>
)
});
return (
<div className="portfolio-items-wrapper">
{PortfolioList}
</div>
)
}
return (
<div>
Loading...
</div>
)
} |
class RemoveDatetimesForTrackingMetricsFromVisit < ActiveRecord::Migration[4.2]
def change
remove_column :visits, :accepted_at
remove_column :visits, :rejected_at
remove_column :visits, :withdrawn_at
remove_column :visits, :cancelled_at
end
end
|
<reponame>metrisk/approve-dependabot<filename>src/main.ts
import * as core from '@actions/core'
import * as github from '@actions/github'
import { buildApprovalRequest } from './buildApproveRequest'
import { validateAction } from './validateAction'
export async function run (): Promise<void> {
try {
const event = validateAction()
if (event === false) return
const token = process.env.GITHUB_TOKEN as string
const octo = github.getOctokit(token)
const approved = await core.group('Approving PR', async () => {
const approveRequest = buildApprovalRequest(event)
return await octo.pulls.createReview(approveRequest)
})
if (approved.status === 200) {
core.info('Approved')
return
} else {
core.error(JSON.stringify(approved))
core.setFailed('Failed to approve the PR')
return
}
} catch (error) {
core.setFailed(error)
}
}
// eslint-disable-next-line
run()
|
import React, { Component } from 'react';
import List from './List/List';
import style from './Navigation.module.scss';
import logo from '../../asset/logo/main_logo.png';
import {Link} from 'gatsby';
class Navs extends Component {
constructor(props) {
super(props);
this.state = {
isExpanded: false
};
}
handleToggle(e) {
e.preventDefault();
this.setState({
isExpanded: !this.state.isExpanded
});
}
render(){
const{ isExpanded } = this.state;
const link = ['/', 'About-the-Author', 'About-The-Book', 'Contact'];
return (
<div className={style.Navigation}>
<div className={style.containerCustomizeFlex} id={style.navcontainerBanner}>
<div className='row' id={style.rowOffMargin}>
<div className='col l3 m3 s3'>
<div id={style.imageContainer}>
<Link to='/'><img alt="logo" src={logo}/></Link>
</div>
</div>
<div className='col l8 m8 s8'>
<div id={style.nav}>
<div>
<div className={style.burgerNav} onClick={(e) => this.handleToggle(e)}>
<div className={style.burgerLinear}></div>
<div className={style.burgerLinear}></div>
<div className={style.burgerLinear}></div>
</div>
</div>
<div className={style.desktopMode}>
<ul>
<li>
<Link to="/" activeClassName={style.active}>Home</Link>
</li>
<li>
<Link to="/about-the-author" activeClassName={style.active}>About the Author</Link>
</li>
<li>
<Link to="/about-the-book" activeClassName={style.active}>About the Book</Link>
</li>
<li>
<Link to="/contact" activeClassName={style.active}>Contact</Link>
</li>
</ul>
</div>
</div>
</div>
</div>
</div>
<div className={`${style.mobileNav} ${isExpanded ? style.expandedNav : ''}`}>
<div className={style.closeNav} onClick={(e)=> this.setState({isExpanded : false})}>X</div>
<div className={style.mobileNavList}>
<ul>
<li>
<Link to="/" activeClassName={style.active}>Home</Link>
</li>
<li>
<Link to="/about-the-author" activeClassName={style.active}>About the Author</Link>
</li>
<li>
<Link to="/about-the-book" activeClassName={style.active}>About the Book</Link>
</li>
<li>
<Link to="/contact" activeClassName={style.active}>Contact</Link>
</li>
</ul>
</div>
</div>
</div>
)
}
}
export default Navs; |
<gh_stars>100-1000
// https://www.aceptaelreto.com/problem/statement.php?id=114
#include <bits/stdc++.h>
using namespace std;
using ll = long long;
int main() {
ios::sync_with_stdio(0), cin.tie(0);
ll n, x;
cin >> n;
while (n--) {
cin >> x;
if (!x) cout << "1\n";
else if (x == 3) cout << "6\n";
else if (x < 5) cout << x << '\n';
else cout << "0\n";
}
}
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
# encoding: utf-8
# coding: utf-8
from __future__ import absolute_import, print_function, unicode_literals
###############################################################
import ruamel.yaml as yaml
from ruamel.yaml.reader import Reader
from ruamel.yaml.scanner import RoundTripScanner # Scanner
from ruamel.yaml.parser import RoundTripParser # Parser,
from ruamel.yaml.composer import Composer
from ruamel.yaml.constructor import RoundTripConstructor # Constructor, SafeConstructor,
from ruamel.yaml.resolver import VersionedResolver # Resolver,
# from ruamel.yaml.nodes import MappingNode
from ruamel.yaml.compat import PY2, PY3, text_type, string_types, ordereddict, integer_types
import semantic_version # supports partial versions
import logging
import collections
import sys
import os
import time
import re, tokenize
import tempfile # See also https://security.openstack.org/guidelines/dg_using-temporary-files-securely.html
import subprocess # See also https://pymotw.com/2/subprocess/
import shlex
import shutil
# import paramiko # TODO: use instead of simple .call('ssh ...') ? Check for Pros and Cons!
import json
import pprint as PP
from abc import *
###############################################################
# logging.basicConfig(format='%(levelname)s [%(filename)s:%(lineno)d]: %(message)s', level=logging.DEBUG)
log = logging.getLogger(__name__)
# log.setLevel(logging.DEBUG)
_pp = PP.PrettyPrinter(indent=2, width=80)
###############################################################
# NOTE: Global variables
PEDANTIC = False # NOTE: to treat invalid values/keys as errors?
INPUT_DIRNAME = './' # NOTE: base location for external resources
_SSH_CONFIG_PATH = None
_HILBERT_STATION = 'hilbert-station' # NOTE: client-side CLI tool (driver)
_HILBERT_STATION_OPTIONS = '' # -v/-q? -t/-d?
_DRY_RUN_MODE = 0 # 0 - normal mode, 1 => server & client side dry-run mode!, 2 => server-side dry mode (no remote executions!),
###############################################################
def add_HILBERT_STATION_OPTIONS(opt):
"""Add new option to the global _HILBERT_STATION_OPTIONS string of options to client-side driver"""
global _HILBERT_STATION_OPTIONS
_save = _HILBERT_STATION_OPTIONS
if _HILBERT_STATION_OPTIONS:
_HILBERT_STATION_OPTIONS = _HILBERT_STATION_OPTIONS + ' ' + str(opt)
else:
_HILBERT_STATION_OPTIONS = str(opt)
return _save
def get_HILBERT_STATION_OPTIONS():
global _HILBERT_STATION_OPTIONS
return _HILBERT_STATION_OPTIONS
###############################################################
def get_SSH_CONFIG():
"""SSH_ACCESS_DB: 'config' file located in $HILBERT_SERVER_CONFIG_SSH_PATH, default: [$HOME/.ssh/]"""
global _SSH_CONFIG_PATH
if _SSH_CONFIG_PATH is not None:
return _SSH_CONFIG_PATH
_SSH_ACCESS_DB = "config"
d = os.environ.get('HILBERT_SERVER_CONFIG_SSH_PATH', None)
if d is not None:
log.debug("HILBERT_SERVER_CONFIG_SSH_PATH is '%s'", d)
d = os.path.abspath(d)
if os.path.exists(d):
d = os.path.join(d, _SSH_ACCESS_DB)
if os.path.exists(d):
log.debug("SSH_ACCESS_DB is expected under '%s'", d)
_SSH_CONFIG_PATH = d
return d
else:
if not PEDANTIC:
log.warning('SSH_ACCESS_DB [%s] is wrong!', d)
else:
d = 'SSH_ACCESS_DB [{}] is wrong!'.format(d)
log.error(d)
raise Exception(d)
else:
if not PEDANTIC:
log.warning('HILBERT_SERVER_CONFIG_SSH_PATH [%s] is wrong!', d)
else:
d = 'HILBERT_SERVER_CONFIG_SSH_PATH [{}] is wrong!'.format(d)
log.error(d)
raise Exception(d)
d = os.environ.get('HOME', None)
if d is None:
d = '[HOME] is not set! Cannot access SSH configuration file under default location ($HOME/.ssh/)!'
log.error(d)
raise Exception(d)
d = os.path.abspath(d)
if not os.path.exists(d):
d = "HOME: [{0}] is wrong! Cannot access SSH configuration file under default location ({0}/.ssh/)!".format(d)
log.error(d)
raise Exception(d)
d = os.path.join(d, ".ssh") # default .ssh/ location
if not os.path.exists(d):
d = "$HOME/.ssh: [{0}] is wrong! Cannot access SSH configuration file under default location ({0}/.ssh/)!".format(d)
log.error(d)
raise Exception(d)
d = os.path.join(d, _SSH_ACCESS_DB) # default .ssh/config location
if not os.path.exists(d):
d = "$HOME/.ssh/{1}: [{0}] is wrong! Cannot access SSH configuration file under default location ({0}/.ssh/{1})!".\
format(d, _SSH_ACCESS_DB)
log.error(d)
raise Exception(d)
log.debug("SSH_ACCESS_DB is expected under '%s'", d)
_SSH_CONFIG_PATH = d
return d
###############################################################
def get_INPUT_DIRNAME():
return INPUT_DIRNAME
def set_INPUT_DIRNAME(d):
global INPUT_DIRNAME
t = INPUT_DIRNAME
if t != d:
INPUT_DIRNAME = d
log.debug("INPUT_DIRNAME is '%s' now!", INPUT_DIRNAME)
return t
###############################################################
def start_dry_run_mode():
global _DRY_RUN_MODE
if _DRY_RUN_MODE == 0:
_DRY_RUN_MODE = 1
add_HILBERT_STATION_OPTIONS('-d')
log.debug("DRY_RUN mode is ON! Will do remote execution in dry-mode")
elif _DRY_RUN_MODE == 1:
# NOTE: dry_run mode twice?
_DRY_RUN_MODE = 2
log.debug("DRY_RUN mode is ON+ON! No remote executions!")
else:
log.debug("NOTE: DRY_RUN mode is already ON+ON!")
def get_DRY_RUN_MODE():
return _DRY_RUN_MODE
def get_NO_REMOTE_EXEC_MODE():
return (get_DRY_RUN_MODE() >= 2)
def get_NO_LOCAL_EXEC_MODE():
return (get_DRY_RUN_MODE() >= 1)
###############################################################
def start_pedantic_mode():
global PEDANTIC
if not PEDANTIC:
PEDANTIC = True
log.debug("PEDANTIC mode is ON!")
def get_PEDANTIC():
return PEDANTIC
###############################################################
if PY3 and (sys.version_info[1] >= 4):
class AbstractValidator(ABC):
"""AbstractValidator is the root Base class for any concrete implementation of entities
appearing in the general configuration file"""
@abstractmethod
def validate(self, d):
pass
elif PY2 or PY3:
class AbstractValidator:
"""AbstractValidator is the root Base class for any concrete implementation of entities
appearing in the general configuration file"""
__metaclass__ = ABCMeta
@abstractmethod
def validate(self, d):
pass
# elif PY3:
# class AbstractValidator(metaclass=ABCMeta):
# """AbstractValidator is the root Base class for any concrete implementation of entities
# appearing in the general configuration file"""
# @abstractmethod
# def validate(self, d):
# pass
else:
raise NotImplementedError("Unsupported Python version: '{}'".format(sys.version_info))
###############################################################
if PY3:
from urllib.parse import urlparse
from urllib.request import urlopen
elif PY2:
from urlparse import urlparse
from urllib2 import urlopen
###############################################################
if PY3:
def is_valid_id(k):
return k.isidentifier()
elif PY2:
def is_valid_id(k):
return re.match(tokenize.Name + '$', k)
###############################################################
def pprint(cfg):
global _pp
_pp.pprint(cfg)
###############################################################
# timeout=None,
def _execute(_cmd, shell=False, stdout=None, stderr=None, dry_run=False): # True??? Try several times? Overall timeout?
"""Same as subprocess.call but with logging and possible dry-run mode or pedantic error handling"""
__cmd = ' '.join(_cmd)
# stdout = tmp, stderr = open("/dev/null", 'w')
# stdout=open("/dev/null", 'w'), stderr=open("/dev/null", 'w'))
log.debug("Executing shell command: '{}'...".format(__cmd))
_shell = ''
if shell:
_shell = ' through the shell'
retcode = None
try:
# with subprocess.Popen(_cmd, shell=shell, stdout=stdout, stderr=stderr) as p:
# timeout=timeout,
if dry_run:
print("[Dry-Run-Mode] Execute [{0}]{1}".format(__cmd, _shell))
retcode = 0
else:
retcode = subprocess.call(_cmd, shell=shell, stdout=stdout, stderr=stderr)
except:
log.exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info()))
raise
assert retcode is not None
log.debug("Exit code: '{}'".format(retcode))
if retcode:
if not PEDANTIC: # Bad error code [{0}] while
log.warning("Error exit code {0}, while executing '{1}'!".format(retcode, __cmd))
else: # Pedantic mode?
log.error("Error exit code {0}, while executing '{1}'!".format(retcode, __cmd))
raise Exception("Error exit code {0}, while executing '{1}'".format(retcode, __cmd))
else:
log.debug("Successful command '{}' execution!".format(__cmd))
return retcode
###############################################################
def _get_line_col(lc):
if isinstance(lc, (list, tuple)):
l = lc[0]
c = lc[1]
else:
try:
l = lc.line
except:
log.exception("Cannot get line out of '{}': Missing .line attribute!".format(lc))
raise
try:
c = lc.col
except:
try:
c = lc.column
except:
log.exception("Cannot get column out of '{}': Missing .col/.column attributes!".format(lc))
raise
return l, c
###############################################################
class ConfigurationError(Exception):
def __init__(self, msg):
self._msg = msg
###############################################################
_up_arrow = '↑'
def _key_error(key, value, lc, error_message, e='K'):
(line, col) = _get_line_col(lc)
if key is None:
key = '*'
print('{}[line: {}, column: {}]: {}'.format(e, line + 1, col + 1, error_message.format(key)))
print('{}{}: {}'.format(' ' * col, key, value)) # NOTE: !?
# ! TODO: try to get access to original ruamel.yaml buffered lines...?
print('{}{}'.format(' ' * col, _up_arrow))
print('---')
def _key_note(key, lc, key_message, e='K'):
(line, col) = _get_line_col(lc)
if key is None:
key = '?'
print('{}[line: {}, column: {}]: {}'.format(e, line + 1, col + 1, key_message.format(key)))
print('---')
def _value_error(key, value, lc, error, e='E'):
(line, col) = _get_line_col(lc)
if key is None:
key = '?'
val_col = col + len(key) + 2
print('{}[line: {}, column: {}]: {}'.format(e, line + 1, val_col + 1, error.format(key)))
print('{}{}: {}'.format(' ' * col, key, value)) # NOTE: !?
# ! TODO: try to get access to original ruamel.yaml buffered lines...?
print('{}{}'.format(' ' * val_col, _up_arrow))
print('---')
# Unused???
# def value_warning(key, value, lc, error):
# _value_error(key, value, lc, error, e='W')
###############################################################
def xstr(s):
""" Convert None to default ?-string, if necessary """
if s is None:
return '?'
return str(s)
###############################################################
class VerboseRoundTripConstructor(RoundTripConstructor):
def construct_mapping(self, node, maptyp, deep=False):
m = RoundTripConstructor.construct_mapping(self, node, maptyp, deep=deep) # the actual construction!
# additionally go through all nodes in the mapping to detect overwrites:
starts = {} # already processed keys + locations and values
for key_node, value_node in node.value:
# keys can be list -> deep
key = self.construct_object(key_node, deep=True)
# lists are not hashable, but tuples are
if not isinstance(key, collections.Hashable):
if isinstance(key, list):
key = tuple(key)
value = self.construct_object(value_node, deep=deep)
# TODO: check the lines above in the original Constructor.construct_mapping code for any changes/updates
if key in starts: # Duplication detection
old = starts[key]
print("WARNING: Key re-definition within some mapping: ") # mapping details?
_key_error(key, old[1], old[0], "Previous Value: ")
_key_error(key, value, key_node.start_mark, "New Value: ")
print('===')
starts[key] = (key_node.start_mark, value) # in order to find all such problems!
return m
###############################################################
class VerboseRoundTripLoader(Reader, RoundTripScanner, RoundTripParser, Composer,
VerboseRoundTripConstructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
Reader.__init__(self, stream, loader=self)
RoundTripScanner.__init__(self, loader=self)
RoundTripParser.__init__(self, loader=self)
Composer.__init__(self, loader=self)
VerboseRoundTripConstructor.__init__(self, preserve_quotes=preserve_quotes, loader=self)
VersionedResolver.__init__(self, version, loader=self)
###############################################################
class BaseValidator(AbstractValidator):
"""Abstract Base Class for the Config entities"""
__version = [None] # NOTE: shared version among all Validator Classes!
_parent = None # NOTE: (for later) parent Validator
_id = None
_data = None # NOTE: result of valid validation
_default_input_data = None # NOTE: Default input to the parser instead of None
def __init__(self, *args, **kwargs):
parent = kwargs.pop('parent', None)
id = kwargs.pop('id', None)
parsed_result_is_data = kwargs.pop('parsed_result_is_data', False)
# TODO: FIXME: assure *args, **kwargs are empty!
super(BaseValidator, self).__init__()
assert self._parent is None
self._parent = parent
assert self._id is None
self._id = id
# parsed_result_is_data default:
# - False => parsed result is self
# - True => get_data()
self._parsed_result_is_data = parsed_result_is_data
self.__API_VERSION_ID = "$Id: d65ee1a5b8955e823caab27b6d936ba7c90b648d $"
def get_parent(self, cls=None):
if cls is None:
return self._parent
if self._parent is None:
return None
_p = self._parent
while isinstance(_p, BaseValidator):
if isinstance(_p, cls):
break
_t = _p._parent
if _t is None:
break
_p = _t
assert _p is not None
if isinstance(_p, cls):
return _p
log.error("Sorry: could not find parent of specified class ({0})!"
"Found top is of type: {1}".format(cls, type(_p)))
return None
def get_id(self):
if self._parent is not None:
return os.path.join(self._parent.get_id(), xstr(self._id))
else:
return xstr(self._id)
def get_api_version(self):
return self.__API_VERSION_ID
def set_data(self, d):
# assert self._data is None
# assert d is not None
self._data = d
def get_data(self):
_d = self._data
# assert _d is not None
return _d
@classmethod
def set_version(cls, v):
"""To be set once only for any Validator class!"""
assert len(cls.__version) == 1
# assert cls.__version[0] is None # NOTE: bad for testing!
cls.__version[0] = v
@classmethod
def get_version(cls, default=None):
assert len(cls.__version) == 1
if cls.__version[0] is not None:
return cls.__version[0]
return default
@abstractmethod
def validate(self, d): # abstract...
pass
@classmethod
def parse(cls, d, *args, **kwargs):
"""
return parsed value, throw exception if input is invalid!
:param d: input data to be parsed/validated
:param parent: parent/container object
:return: constructed instance of requested class or parsed data
"""
self = cls(*args, **kwargs)
log.debug("{1}::parse( input type: {0}, id: {2} )".format(type(d), type(self), self.get_id()))
if self.validate(d): # NOTE: validate should not **explicitly** throw exceptions!!!
if self._parsed_result_is_data:
return self.get_data()
return self
# NOTE: .parse should throw exceptions in case of invalid input data!
raise ConfigurationError(u"{}: {}".format("ERROR:", "Invalid data: '{0}' in {1} [{2}]!".format(d, type(self), self.get_id())))
def __repr__(self):
"""Print using pretty formatter"""
d = self.get_data() # vars(self) # ???
return PP.pformat(d, indent=4, width=100)
# def __str__(self):
# """Convert to string"""
#
# d = self.get_data() # vars(self) # ???
# return str(d)
def __eq__(self, other):
assert isinstance(self, BaseValidator)
if not isinstance(other, BaseValidator):
return self.data_dump() == other
assert isinstance(other, BaseValidator)
# assert self.get_api_version() == other.get_api_version()
return self.get_data() == other.get_data()
def __ne__(self, other):
return not (self == other) # More general than self.value != other.value
def data_dump(self):
_d = self.get_data()
if _d is None:
return _d
assert not isinstance(_d, (tuple, set)) # TODO: is this true in general?!?
if isinstance(_d, dict):
_dd = {}
for k in _d:
v = _d[k]
if isinstance(v, BaseValidator):
v = v.data_dump()
_dd[k] = v
return _dd
if isinstance(_d, list):
_dd = []
for idx, i in enumerate(_d):
v = i
if isinstance(v, BaseValidator):
v = v.data_dump()
_dd.insert(idx, v)
return _dd
# if isinstance(_d, string_types):
return _d
def query(self, what, _default=None):
"""
Generic query for data subset about this object
A/B/C/(all|keys|data|id)?
Get object under A/B/C and return
* it (if 'all') - default!
* its keys (if 'keys')
* its data dump (if 'data')
"""
# NOTE: no data dumping here! Result may be a validator!
log.debug("Querying '%s'", what)
_sep = "/"
what = what.strip(_sep)
if (what is None) or (what == ''):
what = 'all'
if what == 'all':
return self
if what == 'id':
return self.get_id()
_data = self.data_dump()
if what == 'data':
return _data
if what == 'keys':
if _data is None:
return _default
_keys = []
if isinstance(_data, dict):
# _keys = list(set(_keys + [k for k in _data.keys()]))
_keys = [text_type(k) for k in _data.keys()]
return _keys
s = StringValidator.parse(what)
if s in _data:
return _data[s]
ss = s.split(_sep) # NOTE: encode using pathes!
h = ss[0] # top header
t = _sep.join(ss[1:]).strip(_sep) # tail
assert h != '' # due to lstrip!
log.debug("Parsing Query object spec => head: [%s], tail: [%s]", h, t)
_d = self.get_data()
if h in _d:
d = _d[h]
if isinstance(d, BaseValidator):
return d.query(t, _default) # TODO: FIXME: avoid recursion...
log.warning("Could not query an object. Ignoring the tail: %s. Returning default [%s]. Currently found data: [%s: %s]", t, _default, h, d)
return _default
if h in _data: # computed data members? they are never Validators!
d = _data[h]
if (t == '') or (t == 'all') or (t == 'data'):
return d
if isinstance(d, BaseValidator):
return d.query(t, _default) # TODO: FIXME: avoid recursion...
# assert not isinstance(d, BaseValidator)
# return d.query(t, _default) # TODO: FIXME: avoid recursion...
# if (t == 'id') || (t == '')
log.warning("Wrong/unknown tail: [{0}] or synthetic data ! Currently found data: [{1}: {2}]".format(t, h, d))
# else:
log.warning("Sorry cannot query [{0}] of current object [{2}] of type [{1}]!".format(what, type(self), self.get_id()))
return _default
###############################################################
class BaseRecordValidator(BaseValidator):
"""Aggregation of data as a record with some fixed data members"""
# TODO: turn _default_type into a class-member (by moving it here)...?
def __init__(self, *args, **kwargs):
super(BaseRecordValidator, self).__init__(*args, **kwargs)
self._default_type = None # "default_base"
self._types = {}
self._create_optional = False # Instantiate missing optional keys
self._type = None
def detect_type(self, d):
"""determine the type of variadic data for the format version"""
assert not (self._default_type is None)
assert len(self._types) > 0
assert isinstance(d, dict)
return self._default_type
def detect_extra_rule(self, key, value):
"""Handling for extra un-recorded keys in the mapping"""
return None
def validate(self, d):
if d is None:
d = self._default_input_data
# ! TODO: assert that d is a mapping with lc!
self._type = self.detect_type(d)
assert self._type is not None
assert self._type in self._types
_rule = self._types[self._type]
_ret = True
_d = {}
_lc = d.lc # starting location of the mapping...?
for k in _rule.keys():
r = _rule[k]
if r[0] and (k not in d):
_key_note(k, _lc, "ERROR: Missing mandatory key `{}` (type: '%s')" % (self._type)) # Raise Exception?
_ret = False
# NOTE: the following will add all the missing default values
elif self._create_optional and (not r[0]): # Optional Values should have some default values!
# TODO: FIXME: catch exception in the following:
_k = None
_v = None
try:
_k = StringValidator.parse(k)
except ConfigurationError as err:
_key_note(k, _lc, "Error: invalid _optional_ key field '{}' (type: '%s')" % self._type)
pprint(err)
_ret = False
try:
_v = (r[1]).parse(None, parent=self, id=k) # Default Value!
except ConfigurationError as err:
_key_note(k, _lc, "Error: invalid default value (for optional key: '{}') (type: '%s')" % self._type)
pprint(err)
_ret = False
if _ret:
assert _k is not None
_d[_k] = _v
(s, c) = _get_line_col(_lc)
for offset, k in enumerate(d):
v = d.get(k)
k = text_type(k)
l = s + offset # ??
lc = (l, c)
_k = None
_v = None
if k in _rule:
try:
_k = StringValidator.parse(k)
except ConfigurationError as err:
_key_error(k, v, lc, "Error: invalid key field '{}' (type: '%s')" % self._type)
pprint(err)
_ret = False
try:
_v = (_rule[k][1]).parse(v, parent=self, id=k)
except ConfigurationError as err:
_value_error(k, v, lc, "Error: invalid field value (key: '{}') (type: '%s')" % self._type)
pprint(err)
_ret = False
else:
_extra_rule = self.detect_extra_rule(k, v) # (KeyValidator, ValueValidator)
if _extra_rule is None:
if PEDANTIC:
_key_error(k, v, lc, "ERROR: Unhandled extra Key: '{}' (type: '%s')" % self._type)
_ret = False
else:
_key_error(k, v, lc, "WARNING: Unhandled extra Key: '{}' (type: '%s')" % self._type)
continue
else:
try:
_k = (_extra_rule[0]).parse(k)
except ConfigurationError as err:
_key_error(k, v, lc, "Error: invalid key '{}' (type: '%s')" % self._type)
pprint(err)
_ret = False
try:
_v = (_extra_rule[1]).parse(v, parent=self, id=k)
except ConfigurationError as err:
# TODO: FIXME: wrong col (it was for key - not value)!
_value_error(k, v, lc, "Error: invalid field value '{}' value (type: '%s')" % self._type)
pprint(err)
_ret = False
if _ret:
assert _k is not None
_d[_k] = _v
if _ret:
self.set_data(_d)
return _ret
###############################################################
class ScalarValidator(BaseValidator):
"""Single scalar value out of YAML scalars: strings, numbert etc."""
def __init__(self, *args, **kwargs):
kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', True)
super(ScalarValidator, self).__init__(*args, **kwargs)
def validate(self, d):
"""check that data is a scalar: not a sequence or mapping or set"""
if d is None:
d = self._default_input_data # !
if d is not None:
if isinstance(d, (list, dict, tuple, set)): # ! Check if data is not a container?
log.error("value: '{}' is not a scalar value!!".format(d))
return False
if isinstance(d, string_types):
d = text_type(d)
# NOTE: None is also a scalar value...!
self.set_data(d)
return True
###############################################################
class StringValidator(ScalarValidator):
"""YAML String"""
def __init__(self, *args, **kwargs):
super(StringValidator, self).__init__(*args, **kwargs)
self._default_input_data = ''
def validate(self, d):
"""check whether data is a valid string. Note: should not care about format version"""
if d is None:
d = self._default_input_data
assert d is not None
s = ScalarValidator.parse(d, parent=self)
if not isinstance(s, string_types):
log.error("value: '{}' is not a string!!".format(d))
return False
self.set_data(text_type(d))
return True
###############################################################
class SemanticVersionValidator(BaseValidator):
def __init__(self, *args, **kwargs):
partial = kwargs.pop('partial', True)
# kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', False)
super(SemanticVersionValidator, self).__init__(*args, **kwargs)
# self._parsed_result_is_data = False
self._partial = partial
self._default_input_data = '0.0.1'
def validate(self, d):
"""check the string data to be a valid semantic version"""
if d is None:
d = self._default_input_data
log.debug("{1}::validate( input type: {0} at {2} )".format(type(d), type(self), self.get_id()))
# log.debug("SemanticVersionValidator::validate( input data: {0} )".format(str(d)))
try:
_t = StringValidator.parse(d, parent=self, parsed_result_is_data=True)
except:
log.warning("Input is not a string: {}".format(d))
try:
_t = StringValidator.parse(str(d), parent=self, parsed_result_is_data=True)
except:
log.error("Input cannot be converted into a version string: {}".format(d))
return False
# self.get_version(None) # ???
_v = None
try:
_v = semantic_version.Version(_t, partial=self._partial)
except:
log.exception("Wrong version data: '{0}' (see: '{1}')".format(d, sys.exc_info()))
return False
self.set_data(_v)
return True
def data_dump(self):
return str(self.get_data())
###############################################################
class BaseUIString(StringValidator):
"""String visible to users => non empty!"""
def __init__(self, *args, **kwargs):
super(BaseUIString, self).__init__(*args, **kwargs)
self._default_input_data = None
def validate(self, d):
"""check whether data is a valid (non-empty) string"""
if d is None:
d = self._default_input_data
if not super(BaseUIString, self).validate(d):
self.set_data(None)
return False
if bool(self.get_data()): # NOTE: Displayed string should not be empty!
return True
self.set_data(None)
return False
###############################################################
class BaseEnum(BaseValidator): # TODO: Generalize to not only strings...?
"""Enumeration/collection of several fixed strings"""
def __init__(self, *args, **kwargs):
kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', True)
super(BaseEnum, self).__init__(*args, **kwargs)
self._enum_list = [] # NOTE: will depend on the version...
def validate(self, d):
"""check whether data is in the list of fixed strings (see ._enum_list)"""
if d is None:
d = self._default_input_data
t = StringValidator.parse(d, parent=self, parsed_result_is_data=True)
if not (t in self._enum_list): # check withing a list of possible string values
log.error("string value: '{}' is not among known enum items!!".format(d))
return False
self.set_data(t)
return True
###############################################################
class ServiceType(BaseEnum): # Q: Is 'Service::type' mandatory? default: 'compose'
def __init__(self, *args, **kwargs):
super(ServiceType, self).__init__(*args, **kwargs)
compose = text_type('compose')
self._enum_list = [compose] # NOTE: 'docker' and others may be possible later on
self._default_input_data = compose
###############################################################
class StationOMDTag(BaseEnum): # Q: Is 'Station::omd_tag' mandatory? default: 'standalone'
def __init__(self, *args, **kwargs):
super(StationOMDTag, self).__init__(*args, **kwargs)
_v = text_type('standalone')
self._enum_list = [text_type('agent'), text_type('windows'), _v] # NOTE: possible values of omd_tag
self._default_input_data = _v
###############################################################
class StationPowerOnMethodType(BaseEnum): # Enum: [WOL], AMTvPRO, DockerMachine
def __init__(self, *args, **kwargs):
super(StationPowerOnMethodType, self).__init__(*args, **kwargs)
wol = text_type('WOL')
# NOTE: the list of possible values of PowerOnMethod::type (will depend on format version)
self._enum_list = [wol, text_type('DockerMachine')] # NOTE: 'AMTvPRO' and others may be possible later on
self._default_input_data = wol
###############################################################
class URI(BaseValidator):
"""Location of external file, either URL or local absolute or local relative to the input config file"""
def __init__(self, *args, **kwargs):
kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', True)
super(URI, self).__init__(*args, **kwargs)
self._type = None
def check(self):
global PEDANTIC
v = self.get_data()
if v is None:
return False
_ret = True
if PEDANTIC and (self._type == text_type('url')):
try:
urlopen(v).close()
except:
log.warning("URL: '{}' is not accessible!".format(v))
_ret = not PEDANTIC
# TODO: FIXME: base location should be the input file's dirname???
# elif not os.path.isabs(v):
# v = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), v))
return _ret
def validate(self, d):
"""check whether data is a valid URI"""
if d is None:
d = self._default_input_data
v = StringValidator.parse(d, parent=self, parsed_result_is_data=True)
_ret = True
# TODO: @classmethod def check_uri(v)
if urlparse(v).scheme != '':
self._type = text_type('url')
elif os.path.isfile(v): # Check whether file exists
self._type = text_type('file')
elif os.path.isdir(v): # Check whether directory exists
self._type = text_type('dir')
if not _ret:
log.warning("missing/unsupported resource location: {}".format(v))
_ret = (not PEDANTIC)
if _ret:
self.set_data(v)
return _ret
###############################################################
class BaseID(BaseValidator):
def __init__(self, *args, **kwargs):
kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', True)
super(BaseID, self).__init__(*args, **kwargs)
def validate(self, d):
"""check whether data is a valid ID string"""
if d is None:
d = self._default_input_data
v = StringValidator.parse(d, parent=self, parsed_result_is_data=True)
if not is_valid_id(v):
log.error("not a valid variable identifier! Input: '{}'".format(d))
return False
self.set_data(v)
return True
###############################################################
class ClientVariable(BaseID): #
def __init__(self, *args, **kwargs):
super(ClientVariable, self).__init__(*args, **kwargs)
def validate(self, d):
"""check whether data is a valid ID string"""
if d is None:
d = self._default_input_data
v = BaseID.parse(d, parent=self) # .get_data()
_ret = True
if not (v == v.lower() or v == v.upper()): # ! Variables are all lower or upper case!
log.error("a variable must be either in lower or upper case! Input: '{}'".format(d))
_ret = False
# NOTE: starting with hilbert_ or HILBERT_ with letters, digits and '_'??
if not re.match('^hilbert(_[a-z0-9]+)+$', v.lower()):
log.warning("variable should start with HILBERT/hilbert and contain words separated by underscores!"
" Input: '{}".format(d))
# _ret = False # still enable custom non-Hilbert variables!
if _ret:
self.set_data(v)
return _ret
###############################################################
class ServiceID(BaseID):
def __init__(self, *args, **kwargs):
super(ServiceID, self).__init__(*args, **kwargs)
###############################################################
class ApplicationID(BaseID):
def __init__(self, *args, **kwargs):
super(ApplicationID, self).__init__(*args, **kwargs)
###############################################################
class GroupID(BaseID):
def __init__(self, *args, **kwargs):
super(GroupID, self).__init__(*args, **kwargs)
###############################################################
class StationID(BaseID):
def __init__(self, *args, **kwargs):
super(StationID, self).__init__(*args, **kwargs)
###############################################################
class ProfileID(BaseID):
def __init__(self, *args, **kwargs):
super(ProfileID, self).__init__(*args, **kwargs)
###############################################################
class PresetID(BaseID):
def __init__(self, *args, **kwargs):
super(PresetID, self).__init__(*args, **kwargs)
###############################################################
class AutoDetectionScript(StringValidator):
def __init__(self, *args, **kwargs):
super(AutoDetectionScript, self).__init__(*args, **kwargs)
self._default_input_data = ''
def check_script(self, script):
assert script is not None
_ret = True
log.debug('Checking auto-detection script: {}'.format(script))
# NOTE: trying to check the BASH script: shellcheck & bash -n 'string':
fd, path = tempfile.mkstemp()
try:
with os.fdopen(fd, 'w') as tmp:
tmp.write(script)
_cmd = ["bash", "-n", path]
try:
# NOTE: Check for valid bash script
retcode = _execute(_cmd, dry_run=get_NO_LOCAL_EXEC_MODE())
except:
log.exception("Error while running '{}' to check auto-detection script!".format(' '.join(_cmd)))
return False # if PEDANTIC: # TODO: add a special switch?
if retcode != 0:
log.error(
"Error while running '{0}' to check auto-detection script: {1}!".format(' '.join(_cmd), retcode))
return False
# NOTE: additionall tool: shellcheck (haskell!)
# FIXME: what if this tool is missing!? TODO: Check for it once!
_cmd = ["shellcheck", "-s", "bash", path]
try:
# NOTE: Check for valid bash script
retcode = _execute(_cmd, dry_run=get_NO_LOCAL_EXEC_MODE())
except:
log.exception("Error while running '{}' to check auto-detection script!".format(' '.join(_cmd)))
return False
if retcode != 0:
log.error(
"Error while running '{0}' to check auto-detection script: {1}!".format(' '.join(_cmd), retcode))
return False
finally:
os.remove(path)
return True
def check(self):
global PEDANTIC
script = self.get_data()
if script is None:
return False
if not self.check_script(script):
if PEDANTIC:
log.error("Bad script: {0}".format(script))
return False
else:
log.warning("Wrong script: {0}".format(script))
return True
def validate(self, d):
"""check whether data is a valid script"""
if d is None:
d = self._default_input_data
if (d is None) or (d == '') or (d == text_type('')):
self.set_data(text_type(''))
return True
script = ''
try:
script = StringValidator.parse(d, parent=self, parsed_result_is_data=True)
if not bool(script): # NOTE: empty script is also fine!
self.set_data(script)
return True
except:
log.exception("Wrong input to AutoDetectionScript::validate: {}".format(d))
return False
self.set_data(script)
return True
###############################################################
class DockerComposeServiceName(StringValidator): # TODO: any special checks here?
def __init__(self, *args, **kwargs):
super(DockerComposeServiceName, self).__init__(*args, **kwargs)
def validate(self, d):
"""check whether data is a valid service name in file due to DockerComposeYAMLFile"""
if d is None:
d = self._default_input_data
try:
n = StringValidator.parse(d, parent=self, parsed_result_is_data=True)
self.set_data(n)
return True
except:
log.error("Wrong input to DockerComposeServiceName::validate: '{}'".format(d))
return False
###############################################################
class DockerComposeYAMLFile(URI):
def __init__(self, *args, **kwargs):
super(DockerComposeYAMLFile, self).__init__(*args, **kwargs)
self._default_input_data = text_type('docker-compose.yml')
def validate(self, d):
"""check whether data is a valid docker-compose file name"""
if d is None:
d = self._default_input_data
# TODO: call docker-compose on the referenced file! Currently in DockerService!?
return super(DockerComposeYAMLFile, self).validate(d)
###############################################################
class Icon(URI):
def __init__(self, *args, **kwargs):
super(Icon, self).__init__(*args, **kwargs)
def validate(self, d):
"""check whether data is a valid icon file name"""
if d is None:
d = self._default_input_data
# TODO: FIXME: check the file contents (or extention)
return super(Icon, self).validate(d)
###############################################################
class HostAddress(StringValidator):
"""SSH alias"""
def __init__(self, *args, **kwargs):
kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', False)
super(HostAddress, self).__init__(*args, **kwargs)
def validate(self, d):
"""check whether data is a valid ssh alias?"""
if d is None:
d = self._default_input_data
_h = StringValidator.parse(d, parent=self, parsed_result_is_data=True)
if _h.startswith("'") and _h.endswith("'"):
_h = _h[1:-1]
if _h.startswith('"') and _h.endswith('"'):
_h = _h[1:-1]
self.set_data(_h)
return True
def check(self):
global PEDANTIC
_h = self.get_data()
if _h is None:
return False
if PEDANTIC:
if not self.check_ssh_alias(_h, timeout=2):
return False
return True
def get_ip_address(self):
pass
def get_address(self):
return self.get_data()
def recheck(self):
return self.check_ssh_alias(self.get_address())
def rsync(self, source, target, **kwargs):
if get_NO_REMOTE_EXEC_MODE():
kwargs['dry_run'] = True
log.debug("About to rsync/ssh %s -> %s:%s...", source, str(self.get_address()), target)
log.debug("rsync(%s, %s, %s [%s])", self, source, target, str(kwargs))
assert self.recheck()
_h = str(self.get_address())
ssh_config = get_SSH_CONFIG()
__cmd = "rsync -crtbviuzpP -e \"ssh -q -F {3}\" \"{0}/\" \"{1}:{2}/\"".format(source, _h, target, ssh_config)
_cmd = shlex.split(__cmd)
# = ' '.join(_cmd)
# "scp -q -F {3} {0} {1}:{2}"
# client = paramiko.SSHClient()
# client.load_system_host_keys()
# log.debug("Rsync/SSH: [%s]...", _cmd)
log.debug("Rsync/SSH: [%s]...", str(_cmd))
try:
retcode = _execute(_cmd, **kwargs)
except:
log.exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info()))
if not PEDANTIC:
return 1
raise
assert retcode is not None
if not retcode:
log.debug("Command ({}) execution success!".format(__cmd))
return retcode
else:
log.error("Could not run rsync.ssh command: '{0}'! Return code: {1}".format(__cmd, retcode))
if PEDANTIC:
raise Exception("Could not run rsync/ssh command: '{0}'! Exception: {1}".format(__cmd, sys.exc_info()))
return retcode
# TODO: check/use SSH/SCP calls!
def scp(self, source, target, **kwargs):
if get_NO_REMOTE_EXEC_MODE():
kwargs['dry_run'] = True
assert self.recheck()
_h = self.get_address() # 'jabberwocky' #
_cmd = shlex.split(
"scp -q -F {3} {0} {1}:{2}".format(source, _h, target, get_SSH_CONFIG()))
__cmd = ' '.join(_cmd)
# client = paramiko.SSHClient()
# client.load_system_host_keys()
try:
retcode = _execute(_cmd, **kwargs)
except:
log.exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info()))
if not PEDANTIC:
return 1
raise
assert retcode is not None
if not retcode:
log.debug("Command ({}) execution success!".format(__cmd))
return retcode
else:
log.error("Could not run scp command: '{0}'! Return code: {1}".format(__cmd, retcode))
if PEDANTIC:
raise Exception("Could not run scp command: '{0}'! Exception: {1}".format(__cmd, sys.exc_info()))
return retcode
def ssh(self, cmd, **kwargs):
if get_NO_REMOTE_EXEC_MODE():
kwargs['dry_run'] = True
assert self.recheck()
_h = self.get_address() # 'jabberwocky'
# TODO: maybe respect SSH Settings for the parent station!?
_cmd = shlex.split(
"ssh -q -F {2} {0} {1}".format(_h, ' '.join(cmd), get_SSH_CONFIG()))
__cmd = ' '.join(_cmd)
# client = paramiko.SSHClient()
# client.load_system_host_keys()
try:
retcode = _execute(_cmd, **kwargs)
except:
log.exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info()))
raise
assert retcode is not None
if not retcode:
log.debug("Command ({}) execution success!".format(__cmd))
return retcode
else:
log.error("Could not run remote ssh command: '{0}'! Return code: {1}".format(__cmd, retcode))
# if PEDANTIC:
raise Exception("Could not run remote ssh command: '{0}'! Exception: {1}".format(__cmd, sys.exc_info()))
return retcode
@classmethod
def check_ssh_alias(cls, _h, **kwargs):
"""Check for ssh alias"""
timeout = kwargs.pop('timeout', 2)
if get_NO_REMOTE_EXEC_MODE():
kwargs['dry_run'] = True
log.debug("Checking ssh alias: '{0}'...".format(text_type(_h)))
try:
# client = paramiko.SSHClient()
# client.load_system_host_keys()
_cmd = ["ssh", "-q", "-F", get_SSH_CONFIG(), "-o",
"ConnectTimeout={}".format(timeout), _h, "exit 0"]
retcode = _execute(_cmd, **kwargs) # , stdout=open("/dev/null", 'w'), stderr=open("/dev/null", 'w')
if retcode:
log.warning("Non-functional ssh alias: '{0}' => exit code: {1}!".format(text_type(_h), retcode))
else:
log.debug("Ssh alias '{0}' is functional!".format(text_type(_h)))
return retcode == 0
except:
log.exception("Non-functional ssh alias: '{0}'. Moreover: Unexpected error: {1}".format(text_type(_h),
sys.exc_info()))
if PEDANTIC:
raise
return False
###############################################################
class HostMACAddress(StringValidator):
"""MAC Address of the station"""
def __init__(self, *args, **kwargs):
super(HostMACAddress, self).__init__(*args, **kwargs)
def validate(self, d):
"""check whether data is a valid ssh alias?"""
if d is None:
d = self._default_input_data
v = StringValidator.parse(d, parent=self)
if not re.match("[0-9a-f]{2}([-:])[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", v.lower()):
_value_error(None, d, d.lc, "ERROR: Wrong MAC Address: [{}]")
return False
# TODO: verify the existence of that MAC address?
self.set_data(v)
return True
###############################################################
class BoolValidator(ScalarValidator):
def __init__(self, *args, **kwargs):
super(BoolValidator, self).__init__(*args, **kwargs)
def validate(self, d):
"""check whether data is a valid string"""
if d is None:
d = self._default_input_data
if not isinstance(d, bool):
log.error("not a boolean value: '{}'".format(d))
return False
self.set_data(d)
return True
class StationVisibility(BoolValidator): ## "hidden": True / [False]
def __init__(self, *args, **kwargs):
super(StationVisibility, self).__init__(*args, **kwargs)
self._default_input_data = False
class AutoTurnon(BoolValidator): # Bool, False
def __init__(self, *args, **kwargs):
super(AutoTurnon, self).__init__(*args, **kwargs)
self._default_input_data = False # no powering on by default!?
###############################################################
class VariadicRecordWrapper(BaseValidator):
"""VariadicRecordWrapper record. Type is determined by the given 'type' field."""
def __init__(self, *args, **kwargs):
kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', True)
super(VariadicRecordWrapper, self).__init__(*args, **kwargs)
self._type_tag = text_type('type')
self._type_cls = None
self._default_type = None
self._types = {}
# TODO: make sure to use its .parse instead of .validate due to substitution in Wrapper objects!
def validate(self, d):
"""determine the type of variadic data for the format version"""
if d is None:
d = self._default_input_data
_ret = True
assert isinstance(d, dict)
assert self._default_type is not None
assert self._default_type in self._types
_rule = self._types[self._default_type] # Version dependent!
assert _rule is not None
assert self._type_cls is not None
if self._type_tag not in d:
_lc = d.lc # start of the current mapping
_key_error(self._type_tag, d, _lc, "ERROR: Missing mandatory key `{}`")
return False
t = None
try:
t = self._type_cls.parse(d[self._type_tag], parent=self, parsed_result_is_data=True, id=self._type_tag)
except:
log.exception("Wrong type data: {}".format(d[self._type_tag]))
return False
if t not in _rule:
_lc = d.lc # start of the current mapping
_key_error(self._type_tag, t, _lc, "ERROR: unsupported/wrong variadic type: '{}'")
return False
tt = _rule[t](parent=self, id='_')
if not tt.validate(d):
_lc = d.lc.key(self._type_tag)
_value_error('*', d, _lc, "ERROR: invalid mapping value '{}' for type '%s'" % t)
return False
self.set_data(tt) # TODO: completely replace this with tt??? access Methods?
return True
###############################################################
class StationPowerOnMethodWrapper(VariadicRecordWrapper):
"""StationPowerOnMethod :: Wrapper"""
def __init__(self, *args, **kwargs):
super(StationPowerOnMethodWrapper, self).__init__(*args, **kwargs)
T = StationPowerOnMethodType
self._type_cls = T
_wol_dm = {T.parse("WOL", parsed_result_is_data=True): WOL,
T.parse("DockerMachine", parsed_result_is_data=True): DockerMachine}
self._default_type = "default_poweron_wrapper"
self._types[self._default_type] = _wol_dm
###############################################################
class ServiceWrapper(VariadicRecordWrapper):
"""Service :: Wrapper"""
def __init__(self, *args, **kwargs):
super(ServiceWrapper, self).__init__(*args, **kwargs)
T = ServiceType
self._type_cls = T
_dc = {T.parse("compose", parsed_result_is_data=True): DockerComposeService}
self._default_type = "default_docker_compose_service_wrapper"
self._types[self._default_type] = _dc
###############################################################
class ApplicationWrapper(ServiceWrapper):
"""Application :: Wrapper"""
def __init__(self, *args, **kwargs):
super(ApplicationWrapper, self).__init__(*args, **kwargs)
t = ServiceType # NOTE: same for docker-compose Services and Applications!
self._type_cls = t
_dc = {t.parse("compose", parsed_result_is_data=True): DockerComposeApplication}
self._default_type = "default_docker_compose_application_wrapper"
self._types[self._default_type] = _dc
###############################################################
class DockerMachine(BaseRecordValidator):
"""DockerMachine :: StationPowerOnMethod"""
# _DM = 'docker-machine'
def __init__(self, *args, **kwargs):
super(DockerMachine, self).__init__(*args, **kwargs)
self._type_tag = text_type('type')
self._vm_host_address_tag = text_type('vm_host_address')
self._vm_name_tag = text_type('vm_name')
self._default_type = 'DockerMachine'
DM_rule = {
self._type_tag: (True, StationPowerOnMethodType), # Mandatory!
text_type('auto_turnon'): (False, AutoTurnon),
self._vm_name_tag: (True, StringValidator),
self._vm_host_address_tag: (True, HostAddress)
}
self._types = {self._default_type: DM_rule} # ! NOTE: AMT - maybe later...
def get_vm_name(self):
_d = self.get_data()
assert _d is not None
_a = _d.get(self._vm_name_tag, None)
if (_a is None) or (not bool(_a)):
log.error('Missing vm_name!')
raise Exception('Missing vm_name!')
if _a.startswith("'") and _a.endswith("'"):
_a = _a[1:-1]
if _a.startswith('"') and _a.endswith('"'):
_a = _a[1:-1]
return _a
def get_vm_host_address(self):
_d = self.get_data()
assert _d is not None
_a = _d.get(self._vm_host_address_tag, None)
if (_a is None) or (not bool(_a)):
log.error('Missing vm_host_address!')
raise Exception('Missing vm_host_address!')
return _a
def start(self): # , action, action_args):
_a = self.get_vm_host_address()
assert _a is not None
assert isinstance(_a, HostAddress)
_n = self.get_vm_name()
# DISPLAY =:0
_cmd = [_HILBERT_STATION, _HILBERT_STATION_OPTIONS, 'dm_start', _n] # self._DM
try:
_ret = _a.ssh(_cmd, shell=True) # TODO: check for that shell=True!!!???
except:
s = "Could not power-on virtual station {0} (at {1})".format(_n, _a)
if not PEDANTIC:
log.warning(s)
return False
else:
log.exception(s)
raise
return (_ret == 0)
# process call: ssh to vm_host + docker-machione start vm_id
# raise NotImplementedError("Running 'docker-machine start' action is not supported yet... Sorry!")
class WOL(BaseRecordValidator):
"""WOL :: StationPowerOnMethod"""
_WOL = 'wakeonlan'
def __init__(self, *args, **kwargs):
super(WOL, self).__init__(*args, **kwargs)
self._type_tag = text_type('type')
self._default_type = 'WOL'
self._MAC_tag = text_type('mac')
WOL_rule = {
self._type_tag: (True, StationPowerOnMethodType), # Mandatory!
text_type('auto_turnon'): (False, AutoTurnon),
self._MAC_tag: (True, HostMACAddress)
}
self._types = {self._default_type: WOL_rule}
def get_MAC(self):
_d = self.get_data()
assert _d is not None
_MAC = _d.get(self._MAC_tag, None)
assert _MAC is not None
assert _MAC != ''
return _MAC
def start(self): # , action, action_args):
_address = None
_parent = self.get_parent(cls=Station)
if _parent is not None:
if isinstance(_parent, Station):
_address = _parent.get_address()
assert _address is not None
assert isinstance(_address, HostAddress)
_address = _address.get_address()
_MAC = self.get_MAC()
_cmd = [self._WOL, _MAC] # NOTE: avoid IP for now? {"-i", _address, }
__cmd = ' '.join(_cmd)
try:
retcode = _execute(_cmd, dry_run=get_NO_LOCAL_EXEC_MODE())
assert retcode is not None
if not retcode:
log.debug("Command ({}) execution success!".format(__cmd))
# return
else:
log.error("Could not wakeup via '{0}'! Return code: {1}".format(__cmd, retcode))
if PEDANTIC:
raise Exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info()))
except:
log.exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info()))
if not PEDANTIC:
return
raise
if (_address is None) or (_address == ''):
log.warning("Sorry: could not get station's address for this WOL MethodObject!")
return
# if PEDANTIC: # NOTE: address should be present for other reasons anyway...
# raise Exception("Sorry: could not get station's address for this WOL MethodObject!")
# NOTE: also try with the station address (just in case):
# Q: any problems with this?
_cmd = [self._WOL, "-i", _address, _MAC]
__cmd = ' '.join(_cmd)
try:
retcode = _execute(_cmd, dry_run=get_NO_LOCAL_EXEC_MODE())
assert retcode is not None
if not retcode:
log.debug("Command ({}) execution success!".format(__cmd))
return
else:
log.error("Could not wakeup via '{0}'! Return code: {1}".format(__cmd, retcode))
# if PEDANTIC:
# raise Exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info()))
except:
log.exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info()))
# if not PEDANTIC:
# return
# raise
pass
###############################################################
class DockerComposeService(BaseRecordValidator):
"""DockerCompose :: Service data type"""
_DC = "docker-compose"
def __init__(self, *args, **kwargs):
super(DockerComposeService, self).__init__(*args, **kwargs)
self._type_tag = text_type('type')
self._prerun_detections_hook_tag = text_type('auto_detections')
self._preinit_hook_tag = text_type('pre_init')
self._ref_tag = text_type('ref')
self._file_tag = text_type('file')
_compose_rule = {
self._type_tag: (True, ServiceType), # Mandatory
self._prerun_detections_hook_tag: (False, AutoDetectionScript),
self._preinit_hook_tag: (False, AutoDetectionScript),
self._ref_tag: (True, DockerComposeServiceName),
self._file_tag: (False, DockerComposeYAMLFile)
}
self._default_type = "default_dc_service"
self._types = {self._default_type: _compose_rule}
self._create_optional = True
def get_ref(self):
_d = self.get_data()
assert self._ref_tag in _d
return _d[self._ref_tag]
def get_file(self):
_d = self.get_data()
assert self._file_tag in _d
return _d[self._file_tag]
def copy(self, tmpdir):
f = self.get_file()
t = os.path.join(tmpdir, f)
log.info("Copying resource file: '%s' -> '%s'...", f, t)
if not os.path.exists(t):
d = os.path.dirname(t)
if not os.path.exists(d):
os.mkdir(d, 7 * 8 + 7)
shutil.copy(f, t)
else:
log.debug("Target resource '%s' already exists!", t)
def to_bash_array(self, n):
_d = self.data_dump()
_min_compose = [self._type_tag, self._ref_tag, self._file_tag, self._prerun_detections_hook_tag, self._preinit_hook_tag]
return ' '.join(["['{2}:{0}']='{1}'".format(k, _d[k], n) for k in _min_compose])
@staticmethod
def check_service(_f, _n):
# TODO: Check the corresponding file for such a service -> Service in DockerService!
# TODO: FIXME: Read docker-compose.yml directly??? instead of using docker-compose config???
assert bool(_n)
assert os.path.exists(_f)
## dc = load_yaml_file(_f)
try:
with open(_f, 'r') as fh:
# NOTE: loading external Docker-Compose's YML file using a standard loader!
dc = yaml.load(fh, Loader=yaml.RoundTripLoader, version=(1, 1), preserve_quotes=True)
except (IOError, yaml.YAMLError) as e:
error_name = getattr(e, '__module__', '') + '.' + e.__class__.__name__
log.exception(u"{0}: Could not parse yaml file: '{1}' due to {2}".format(error_name, _f, e))
return False
assert dc is not None
ss = dc.get('services', None)
if ss is None:
if PEDANTIC:
log.error("Docker-Compose specification file '%s' contains no 'services'! Bad file format?", _f)
else:
log.warning("Docker-Compose specification file '%s' contains no 'services'! Bad file format?", _f)
return False
if _n in ss:
log.debug("Service/Application '%s' is available in '%s'", _n, _f)
else:
if PEDANTIC:
log.error("Missing service/application '%s' in file '%s'!", _n, _f)
else:
log.warning("Missing service/application '%s' in file '%s'!", _n, _f)
return False
return True
def check(self):
if not self.check_service(_f, _n):
if PEDANTIC:
return False
def validate(self, d):
if d is None:
d = self._default_input_data
if not BaseRecordValidator.validate(self, d): # TODO: use .parse?
assert self.get_data() is None
return False
_d = self.get_data()
# TODO: remove Validators (BaseString) from strings used as dict keys!
_f = _d[self._file_tag]
while isinstance(_f, BaseValidator):
_f = _f.get_data()
_n = _d[self._ref_tag]
while isinstance(_n, BaseValidator):
_n = _n.get_data()
if not os.path.exists(_f): # TODO: FIXME: use URI::check() instead??
if PEDANTIC:
log.error("Missing file with docker-compose configuration: '%s'. "
"Cannot check the service '%s'!", _f, _n)
return False
log.warning("Missing file with docker-compose configuration: '%s'. "
"Cannot check the service '%s'!", _f, _n)
return True
# if not self.check_service(_f, _n):
# if PEDANTIC:
# return False
return True # _ret
###############################################################
class DockerComposeApplication(DockerComposeService):
"""DockerCompose :: Application"""
def __init__(self, *args, **kwargs):
super(DockerComposeApplication, self).__init__(*args, **kwargs)
_compose_rule = (self._types[self._default_type]).copy()
self._compatibleStations_tag = text_type('compatibleStations')
_v = self.get_version(default=semantic_version.Version('0.7.0'))
if _v >= semantic_version.Version('0.7.0'):
self._compatibleStations_tag = text_type('compatible_stations')
_compose_rule.update({
text_type('name'): (True, BaseUIString), # NOTE: name for UI!
text_type('description'): (True, BaseUIString),
text_type('icon'): (False, Icon),
self._compatibleStations_tag: (True, Group) # TODO: optional? default: all stations?
})
self._types[self._default_type] = _compose_rule
self._station_list = []
def get_group(self):
"""Group of stations compatible with this application"""
_d = self.get_data()
if _d is None:
return None
return _d.get(self._compatibleStations_tag, None)
# def get_stations(self):
# return self._station_list
# def data_dump(self):
# _d = super(DockerComposeApplication, self).data_dump()
# l = self.get_stations()
# if l is not None:
# _d['station_list'] = [k for k in l]
# return _d
###############################################################
class Profile(BaseRecordValidator):
"""Profile"""
_services_tag = text_type('services')
_supported_types_tag = text_type('supported_types')
def __init__(self, *args, **kwargs):
super(Profile, self).__init__(*args, **kwargs)
self._default_type = "default_profile"
default_rule = {
text_type('name'): (True, BaseUIString),
text_type('description'): (True, BaseUIString),
text_type('icon'): (False, Icon),
self._services_tag: (True, ServiceList),
self._supported_types_tag: (False, ServiceTypeList) # Default value?
}
self._types = {self._default_type: default_rule}
self._station_list = set()
def get_services(self):
_d = self.get_data()
assert _d is not None
assert self._services_tag in _d
return _d[self._services_tag]
def get_supported_types(self):
_d = self.get_data()
assert _d is not None
return _d.get(self._supported_types_tag, None)
def add_station(self, sid, station):
assert isinstance(station, Station)
# if self._station_list is None: # TODO: FIXME: Here? or somewhere else?
# self._station_list = [station]
if sid not in self._station_list:
self._station_list.add(sid) # .append(station)
def get_stations(self):
return self._station_list
def data_dump(self):
_d = super(Profile, self).data_dump()
l = self.get_stations()
if l is not None:
_d['station_list'] = sorted(l) # .get_name()?
return _d
###############################################################
class StationSSHOptions(BaseRecordValidator): # optional: "Station::ssh_options" # record: user, port, key, key_ref
"""StationSSHOptions"""
def __init__(self, *args, **kwargs):
super(StationSSHOptions, self).__init__(*args, **kwargs)
self._default_type = "default_station_ssh_options"
default_rule = {
text_type('user'): (False, StringValidator),
text_type('key'): (False, StringValidator),
text_type('port'): (False, StringValidator),
# TODO: BaseInt?? http://stackoverflow.com/questions/4187185/how-can-i-check-if-my-python-object-is-a-number
text_type('key_ref'): (False, URI),
}
self._types = {self._default_type: default_rule}
def validate(self, d):
"""check whether data is a valid ssh connection options"""
if d is None:
d = self._default_input_data
_ret = super(StationSSHOptions, self).validate(d)
# TODO: Check for ssh connection: Use some Python SSH Wrapper (2/3)
return _ret
###############################################################
class StationType(BaseEnum): # NOTE: to be redesigned/removed later on together with Station::type!?
"""Type of station defines the set of required data fields!"""
def __init__(self, *args, **kwargs):
super(StationType, self).__init__(*args, **kwargs)
self._default_input_data = text_type('hidden') # NOTE: nothing is required. For extension only!
# NOTE: the list of possible values of Station::type (will depend on format version)
self._enum_list = [self._default_input_data,
text_type('standalone'), # No remote control via SSH & Hilbert client...
text_type('server'), # Linux with Hilbert client part installed but no remote control!
text_type('standard') # Linux with Hilbert client part installed!
] # ,text_type('special')
###############################################################
class Station(BaseRecordValidator): # Wrapper?
"""Station"""
_extends_tag = text_type('extends')
_client_settings_tag = text_type('client_settings')
_type_tag = text_type('type')
def __init__(self, *args, **kwargs):
super(Station, self).__init__(*args, **kwargs)
self._poweron_tag = text_type('poweron_settings')
self._ssh_options_tag = text_type('ssh_options')
self._address_tag = text_type('address')
self._ishidden_tag = text_type('hidden') # TODO: deprecate with "fake" station type?
self._profile_tag = text_type('profile')
self._default_type = "default_station"
self._name_tag = text_type('name')
default_rule = {
Station._extends_tag: (False, StationID), # TODO: NOTE: to be redesigned later on: e.g. use Profile!?
self._name_tag: (True, BaseUIString),
text_type('description'): (True, BaseUIString),
text_type('icon'): (False, Icon),
self._profile_tag: (True, ProfileID),
self._address_tag: (True, HostAddress),
self._poweron_tag: (False, StationPowerOnMethodWrapper), # !! variadic, PowerOnType...
self._ssh_options_tag: (False, StationSSHOptions), # !!! record: user, port, key, key_ref
text_type('omd_tag'): (True, StationOMDTag), # ! like ServiceType: e.g. agent. Q: Is this mandatory?
self._ishidden_tag: (False, StationVisibility), # Q: Is this mandatory?
Station._client_settings_tag: (False, StationClientSettings), # IDMap : (BaseID, BaseString)
Station._type_tag: (False, StringValidator) # NOTE: to be redesigned later on!
} #
self._types = {self._default_type: default_rule}
self._compatible_applications = {}
self._profile = None
def add_application(self, app_id, app):
assert isinstance(app, DockerComposeApplication)
self._compatible_applications[app_id] = app
def get_compatible_applications(self):
return self._compatible_applications
def data_dump(self):
_d = super(Station, self).data_dump()
l = self.get_compatible_applications()
if l is not None:
_d['compatible_applications'] = sorted(l.keys())
# + ':' + self._compatible_applications[k].get_name()
return _d
def get_name(self, _default=None):
_d = self.get_data()
assert _d is not None
return _d.get(self._name_tag, _default)
def is_hidden(self):
_d = self.get_data()
assert _d is not None
_h = _d.get(self._ishidden_tag, None)
if _h is None:
_h = StationVisibility.parse(None, parent=self, parsed_result_is_data=True)
return _h
def get_hilbert(self): # TODO: cache this!
_root = self.get_parent(cls=Hilbert)
assert _root is not None
assert isinstance(_root, Hilbert)
return _root
def get_profile_ref(self):
_d = self.get_data()
assert _d is not None
_profile_id = _d.get(self._profile_tag, None)
assert _profile_id is not None
assert _profile_id != ''
# log.debug("Station's profile ID: %s", _profile_id)
return _profile_id
def set_profile(self, _profile):
assert isinstance(_profile, Profile)
if self._profile is None:
self._profile = _profile
else:
assert _profile == self._profile
def get_profile(self):
if self._profile is not None:
return self._profile # .query('all') # check ref?!
ref = self.get_profile_ref()
_parent = self.get_hilbert() # TODO: FIXME: move to Hilbert: _parent.get_all_*() ??
log.debug("Querying global profile: '%s'...", ref)
_profile = _parent.query('Profiles/{}/all'.format(ref))
assert _profile is not None
assert isinstance(_profile, Profile)
self.set_profile(_profile)
return _profile
def get_all_services(self): # TODO: FIXME: move to Hilbert: _parent.get_all_*()
_parent = self.get_hilbert()
_services = _parent.query('Services/all')
assert _services is not None
assert isinstance(_services, GlobalServices)
return _services
def get_all_applications(self):
_parent = self.get_hilbert()
_apps = _parent.query('Applications/all')
assert _apps is not None
assert isinstance(_apps, GlobalApplications)
return _apps
def get_address(self): # TODO: IP?
_d = self.get_data()
assert _d is not None
_a = _d.get(self._address_tag, None)
if (_a is None) or (not bool(_a)):
log.error('Missing station address!')
raise Exception('Missing station address!')
assert isinstance(_a, HostAddress)
log.debug('HostAddress: {}'.format(_a))
return _a
def shutdown(self):
_a = self.get_address()
assert _a is not None
assert isinstance(_a, HostAddress)
try:
_ret = _a.ssh([_HILBERT_STATION, _HILBERT_STATION_OPTIONS, "stop"])
except:
s = "Could not stop Hilbert on the station {}".format(_a)
if not PEDANTIC:
log.warning(s)
return False
else:
log.exception(s)
raise
if _ret != 0:
return False
try:
_ret = _a.ssh([_HILBERT_STATION, _HILBERT_STATION_OPTIONS, "shutdown", "now"])
except:
s = "Could not schedule immediate shutdown on the station {}".format(_a)
if not PEDANTIC:
log.warning(s)
return False
else:
log.exception(s)
raise
if _ret != 0:
log.error("Bad attempt to immediately shutdown the station {} ".format(_a))
return False
try:
_ret = _a.ssh([_HILBERT_STATION, _HILBERT_STATION_OPTIONS, "shutdown"])
except:
s = "Could not schedule delayed shutdown on the station {}".format(_a)
if not PEDANTIC:
log.warning(s)
return False
else:
log.exception(s)
raise
return (_ret == 0)
def deploy(self):
# TODO: get_client_settings()
_d = self.get_data()
_a = self.get_address()
assert _a is not None
assert isinstance(_a, HostAddress)
_settings = _d.get(self._client_settings_tag, None)
if _settings is None:
if not PEDANTIC:
log.warning('Missing client settings for this station. Nothing to deploy!')
else:
log.error('Missing client settings for this station. Nothing to deploy!')
raise Exception('Missing client settings for this station. Nothing to deploy!')
if isinstance(_settings, BaseValidator):
_settings = _settings.get_data()
default_app_id = _settings.get('hilbert_station_default_application', None)
# TODO: check default_app_id!
# TODO: all compatible applications!?
_profile_ref = self.get_profile_ref()
_profile = self.get_profile() # _profile_ref
# TODO: FIXME: add type checking! NOTE: compatibility should be verified beforehand!
# _supported_service_types = _profile.get_supported_types() # no need here!
_serviceIDs = _profile.get_services()
assert _serviceIDs is not None
assert isinstance(_serviceIDs, ServiceList) # list of ServiceID
_serviceIDs = _serviceIDs.get_data() # Note: IDs from config file - NOT Service::ref!
assert isinstance(_serviceIDs, list) # list of strings (with ServiceIDs)?
# TODO: FIXME: All supported/compatible applications??!?
all_apps = self.get_compatible_applications() # self.get_all_applications().get_data()
all_services = self.get_all_services().get_data()
# TODO: deployment should create a temporary directory + /station.cfg + /docker-compose.yml etc!?
tmpdir = tempfile.mkdtemp()
predictable_filename = 'station.cfg'
remote_tmpdir = os.path.join("/tmp", "{0}_{1}_{2}_{3}".format(str(_a.get_address()), _profile_ref,
os.path.basename(tmpdir), "%.20f" % time.time()))
saved_umask = os.umask(7 * 8 + 7) # Ensure the file is read/write by the creator only
path = os.path.join(tmpdir, predictable_filename)
# print path
try:
with open(path, "w") as tmp:
# TODO: FIXME: list references into docker-compose.yml???
# TODO: use bash array to serialize all Services/Applications!
# NOTE: Only handles (IDs) are to be used below:
# NOTE: ATM only compose && Application/ServiceIDs == refs to the same docker-compose.yml!
# TODO: NOTE: may differ depending on Station::type!
# NOTE: the following variables should be set in order to generate a valid config file:
# hilbert_station_services_and_applications
# hilbert_station_profile_services
# hilbert_station_compatible_applications
tmp.write('declare -Agr hilbert_station_services_and_applications=(\\\n')
# ss = []
for k in _serviceIDs:
s = all_services.get(k, None) # TODO: check compatibility during verification!
if s is None:
log.error("Wrong Service ID '%s' is not known globally!", k)
else:
assert s is not None
assert isinstance(s, DockerComposeService)
# TODO: s.check()
# ss.append(s.get_ref())
tmp.write(' {} \\\n'.format(s.to_bash_array(k)))
s.copy(tmpdir)
# TODO: collect all **compatible** applications!
# aa = []
for k in all_apps:
a = all_apps[k] # TODO: check compatibility during verification!
assert a is not None
assert isinstance(a, DockerComposeApplication)
# TODO: a.check()
# aa.append(a.get_ref())
tmp.write(' {} \\\n'.format(a.to_bash_array(k)))
a.copy(tmpdir)
tmp.write(')\n')
tmp.write("declare -agr hilbert_station_profile_services=({})\n".format(' '.join(_serviceIDs)))
tmp.write(
"declare -agr hilbert_station_compatible_applications=({})\n".format(' '.join(all_apps.keys())))
app = _settings.get('hilbert_station_default_application', '') # NOTE: ApplicationID!
if app != '':
if app in all_apps:
app = all_apps[app] # DC Reference!
assert app is not None
assert isinstance(app, DockerComposeApplication)
# TODO: app.check()
app.copy(tmpdir)
else:
log.warning('Default application %s is not in the list of compatible apps!', app)
for k in sorted(_settings.keys(), reverse=True):
if k.startswith('HILBERT_'):
# NOTE: HILBERT_* are exports for services/applications (docker-compose.yml)
tmp.write("declare -xg {0}='{1}'\n".format(k, str(_settings.get(k, ''))))
elif k.startswith('hilbert_'):
# NOTE: hilbert_* are exports for client-side tool: `hilbert-station`
tmp.write("declare -rg {0}='{1}'\n".format(k, str(_settings.get(k, ''))))
else:
if not PEDANTIC:
log.debug("Non-hilbert station setting: [%s]!")
tmp.write("declare -xg {0}='{1}'\n".format(k, str(_settings.get(k, ''))))
else:
log.warning("Non-hilbert station setting: [%s]! Not allowed in pedantic mode!")
# NOTE: tmp is now generated!
try:
# _cmd = ["scp", path, "{0}:/tmp/{1}".format(_a, os.path.basename(path))] # _HILBERT_STATION, 'deploy'
# _a.scp(path, remote_tmpdir, shell=False)
log.debug("About to deploy %s -> %s... (%s)", tmpdir, remote_tmpdir, str(_a.get_address()))
_a.rsync(tmpdir, remote_tmpdir) # , dry_run=dry_run)
except:
log.debug("Exception during deployment!")
s = "Could not deploy new local settings to {}".format(_a)
if not PEDANTIC:
log.warning(s)
return False
else:
log.exception(s)
raise
# except: # IOError as e:
# print 'IOError'
# else:
# os.remove(path)
finally:
log.debug("Temporary Station Configuration File: {}".format(path))
os.umask(saved_umask)
if not get_NO_LOCAL_EXEC_MODE():
shutil.rmtree(tmpdir) # NOTE: tmpdir is not empty!
else:
print("[Dry-Run-Mode] Keeping temporary location [{0}] that was prepared for deployment to [{1}]".format(tmpdir, str(_a.get_address())))
_cmd = [_HILBERT_STATION, _HILBERT_STATION_OPTIONS, "init", remote_tmpdir]
try:
_ret = _a.ssh(_cmd)
except:
s = "Could not initialize the station using the new configuration file with {}".format(' '.join(_cmd))
if not PEDANTIC:
log.warning(s)
return False
else:
log.exception(s)
raise
if _ret != 0:
log.error("Could not initialize the station!")
return False
# ### see existing deploy.sh!?
# TODO: what about other external resources? docker-compose*.yml etc...?
# TODO: restart hilbert-station?
# raise NotImplementedError("Cannot deploy local configuration to this station!")
return True
def app_change(self, app_id):
_a = self.get_address()
assert _a is not None
assert isinstance(_a, HostAddress)
try:
_ret = _a.ssh([_HILBERT_STATION, _HILBERT_STATION_OPTIONS, "app_change", app_id])
except:
s = "Could not change top application on the station '{0}' to '{1}'".format(_a, app_id)
if not PEDANTIC:
log.warning(s)
return False
else:
log.exception(s)
raise
return (_ret == 0)
# raise NotImplementedError("Cannot switch to a different application on this station!")
def poweron(self):
_d = self.get_data()
assert _d is not None
poweron = _d.get(self._poweron_tag, None)
if poweron is None:
log.error("Missing/wrong Power-On Method configuration for this station!")
raise Exception("Missing/wrong Power-On Method configuration for this station!")
return poweron.start() # , action_args????
def run_action(self, action, action_args):
"""
Run the given action on/with this station
:param action_args: arguments to the action
:param action:
start (poweron)
stop (shutdown)
cfg_deploy
app_change <ApplicationID>
# start [<ServiceID/ApplicationID>]
# finish [<ServiceID/ApplicationID>]
:return: nothing.
"""
if action not in ['start', 'stop', 'cfg_deploy', 'app_change']:
raise Exception("Running action '{0}({1})' is not supported!".format(action, action_args))
# Run 'ssh address hilbert-station action action_args'?!
if action == 'start':
_ret = self.poweron() # action_args
elif action == 'cfg_deploy':
_ret = self.deploy() # action_args
elif action == 'stop':
_ret = self.shutdown() # action_args
elif action == 'app_change':
_ret = self.app_change(action_args) # ApplicationID
# elif action == 'start':
# self.start_service(action_args)
# elif action == 'finish':
# self.finish_service(action_args)
return _ret
def get_base(self):
_d = self.get_data()
assert _d is not None
_b = _d.get(self._extends_tag, None) # StationID (validated...)
# if _b is not None:
# if isinstance(_b, BaseValidator):
# _b = _b.get_data()
return _b
def extend(delta, base): # delta == self!
assert delta.get_base() is not None
assert base.get_base() is None
# NOTE: at early stage there may be no parent data...
if delta.get_parent() is not None:
if delta.get_parent().get_data() is not None:
assert delta.get_base() in delta.get_parent().get_data()
assert delta.get_parent().get_data().get(delta.get_base(), None) == base
_d = delta.get_data()
_b = base.get_data()
assert delta._extends_tag in _d
assert delta._extends_tag not in _b
del _d[delta._extends_tag]
assert delta.get_base() is None
# NOTE: Extend/merge the client settings:
k = delta._client_settings_tag
bb = _b.get(k, None)
if bb is not None:
dd = _d.get(k, None)
if dd is None:
dd = StationClientSettings.parse(None, parent=delta, id=k)
assert isinstance(dd, StationClientSettings)
assert isinstance(bb, StationClientSettings)
dd.extend(bb)
_d[k] = dd
# NOTE: the following is an application of delta to base data
for k in _b: # NOTE: take from base only the missing parts
assert k != delta._extends_tag
if k == delta._client_settings_tag:
continue
v = _d.get(k, None)
if v is None: # key from base is missing or None in delte?
_d[k] = _b[k] # TODO: is copy() required for complicated structures?
###############################################################
class BaseIDMap(BaseValidator):
"""Mapping: SomeTypeID -> AnyType"""
def __init__(self, *args, **kwargs):
super(BaseIDMap, self).__init__(*args, **kwargs)
self._default_type = None
self._types = {} # type -> (TypeID, Type)
self._default_input_data = {} # NOTE: bears no .lc with line & col data!
def detect_type(self, d):
"""determine the type of variadic data for the format version"""
assert not (self._default_type is None)
assert len(self._types) > 0
return self._default_type
def validate(self, d):
_input_data = d
if d is None:
d = self._default_input_data
assert isinstance(d, dict)
self._type = self.detect_type(d)
assert self._type is not None
assert self._type in self._types
(_id_rule, _rule) = self._types[self._type]
try:
_lc = d.lc # starting position?
except:
if _input_data is not None:
log.warning("Input data bears no ruamel.yaml line/column data!")
_lc = (0, 0)
(s, c) = _get_line_col(_lc)
_d = {}
_ret = True
for offset, k in enumerate(d):
v = d[k] # TODO: d[offset]???
l = s + offset
_lc = (l, c)
_id = None
_vv = None
try:
_id = _id_rule.parse(k)
except ConfigurationError as err:
_key_error(k, v, _lc, "Invalid ID: '{}' (type: '%s')" % (self._type)) # Raise Exception?
pprint(err)
_ret = False
try:
_vv = _rule.parse(v, parent=self, id=k)
except ConfigurationError as err:
_value_error(k, v, _lc, "invalid Value (for ID: '{}') (type: '%s')" % (self._type)) # Raise Exception?
pprint(err)
_ret = False
if _ret:
assert _id is not None
assert _vv is not None
# _id = _id.get_data() # ??
_d[_id] = _vv # .get_data()
if _ret:
self.set_data(_d)
return _ret
###############################################################
class GlobalServices(BaseIDMap):
def __init__(self, *args, **kwargs):
super(GlobalServices, self).__init__(*args, **kwargs)
self._default_type = "default_global_services"
self._types = {self._default_type: (ServiceID, ServiceWrapper)}
# def validate(self, data):
# _ret = BaseID.validate(self, data)
# ### TODO: Any post processing?
# return _ret
###############################################################
# "client_settings": (False, StationClientSettings) # IDMap : (BaseID, BaseString)
class StationClientSettings(BaseIDMap):
def __init__(self, *args, **kwargs):
super(StationClientSettings, self).__init__(*args, **kwargs)
self._default_type = "default_station_client_settings"
self._types = {
self._default_type: (ClientVariable, ScalarValidator)
} # ! TODO: only strings for now! More scalar types?! BaseScalar?
# TODO: FIXME: check for default hilbert applicationId!
def extend(delta, base):
assert isinstance(base, StationClientSettings)
_b = base.get_data()
if _b is not None:
assert isinstance(_b, dict)
_b = _b.copy()
# NOTE: merge and override settings from the base using the current delta:
_d = delta.get_data()
if _d is not None:
_b.update(_d)
delta.set_data(_b)
###############################################################
class GlobalApplications(BaseIDMap):
def __init__(self, *args, **kwargs):
super(GlobalApplications, self).__init__(*args, **kwargs)
self._default_type = "default_global_applications"
self._types = {self._default_type: (ApplicationID, ApplicationWrapper)}
###############################################################
class GlobalProfiles(BaseIDMap):
def __init__(self, *args, **kwargs):
super(GlobalProfiles, self).__init__(*args, **kwargs)
self._default_type = "default_global_profiles"
self._types = {self._default_type: (ProfileID, Profile)}
# def validate(self, data):
# _ret = BaseID.validate(self, data)
# ### TODO: Any post processing?
# return _ret
###############################################################
class GlobalStations(BaseIDMap):
"""Global mapping of station IDs to Station's"""
def __init__(self, *args, **kwargs):
super(GlobalStations, self).__init__(*args, **kwargs)
self._default_type = "default_global_stations"
self._types = {self._default_type: (StationID, Station)} # NOTE: {StationID -> Station}
def validate(self, d):
"""Extension mechanism on top of the usual ID Mapping parsing"""
if not BaseIDMap.validate(self, d):
return False
# if d is None:
# d = self._default_input_data
sts = self.get_data() # NOTE: may be handy for postprocessing!
_ret = True
_processed = {}
_todo = {}
for k in sts:
v = sts[k] # TODO: popitem as below?
_b = v.get_base()
if _b is None: # TODO: FIXME: add to Station API!
_processed[k] = v
else:
assert _b in sts # NOTE: any station extends some _known_ station!
_todo[k] = v
_chg = True
while bool(_todo) and _chg:
_chg = False
_rest = {}
while bool(_todo):
k, v = _todo.popitem()
_b = v.get_base()
assert k != _b # no infinite self-recursive extensions!
# print(_b, ' :base: ', type(_b))
assert _b in _processed
if _b in _processed:
v.extend(_processed[_b])
_processed[k] = v
assert v.get_base() is None
_chg = True
else:
_rest[k] = v
_todo = _rest
if bool(_todo):
log.error('Cyclic dependencies between stations: {}'.format(_todo))
_ret = False
# if _ret:
# self.set_data(_processed)
# TODO: FIXME: check for required fields after extension only!!!
return _ret
###############################################################
class BaseList(BaseValidator):
"""List of entities of the same type"""
def __init__(self, *args, **kwargs):
super(BaseList, self).__init__(*args, **kwargs)
self._default_type = None
self._types = {}
def validate(self, d):
"""String or a sequence of strings"""
# log.debug("LIST INPUT: [%s], LIST INPUT TYPE: [%s]", d, type(d))
if d is None:
d = self._default_input_data
assert self._default_type is not None
assert len(self._types) > 0
# NOTE: determine the class of items based on the version and sample data
self._type = self._types[self._default_type]
assert self._type is not None
if (not isinstance(d, (list, dict, tuple, set))) and isinstance(d, string_types):
try: # NOTE: Test single string item:
_d = [self._type.parse(StringValidator.parse(d, parent=self))]
self.set_data(_d)
return True
except:
pass # Not a single string entry...
# NOTE: handle a collection (sequence or mapping) of given _type:
_d = []
_ret = True
for idx, i in enumerate(d): # What about a string?
_v = None
try:
_v = self._type.parse(i, parent=self, id='['+str(idx)+']')
_d.insert(idx, _v) # append?
except ConfigurationError as err:
_lc = d.lc
_value_error("[%d]" % idx, d, _lc, "Wrong item in the given sequence!")
pprint(err)
_ret = False
if _ret:
self.set_data(_d)
return _ret
###############################################################
class GroupIDList(BaseList):
"""List of GroupIDs or a single GroupID!"""
def __init__(self, *args, **kwargs):
super(GroupIDList, self).__init__(*args, **kwargs)
self._default_type = "default_GroupID_list"
self._types = {self._default_type: GroupID}
###############################################################
class ServiceList(BaseList):
"""List of ServiceIDs or a single ServiceID!"""
def __init__(self, *args, **kwargs):
super(ServiceList, self).__init__(*args, **kwargs)
self._default_type = "default_ServiceID_list"
self._types = {self._default_type: ServiceID}
###############################################################
class ServiceTypeList(BaseList):
"""List of ServiceType's or a single ServiceType!"""
def __init__(self, *args, **kwargs):
super(ServiceTypeList, self).__init__(*args, **kwargs)
self._default_type = "default_ServiceType_list"
self._types = {self._default_type: ServiceType}
###############################################################
class Group(BaseRecordValidator): # ? TODO: GroupSet & its .parent?
"""Group"""
def __init__(self, *args, **kwargs):
super(Group, self).__init__(*args, **kwargs)
self._default_type = "default_group"
self._include_tag = text_type('include')
self._exclude_tag = text_type('exclude')
self._intersectWith_tag = text_type('intersectWith')
_v = self.get_version(default=semantic_version.Version('0.7.0'))
if _v >= semantic_version.Version('0.7.0'):
self._intersectWith_tag = text_type('intersect_with')
self._exclude_list = None
self._intersection_list = None
self._include_list = None
self._station_list = None
default_rule = {
self._include_tag: (False, GroupIDList),
self._exclude_tag: (False, GroupIDList),
self._intersectWith_tag: (False, GroupIDList)
}
# text_type('name'): (False, BaseUIString),
# text_type('description'): (False, BaseUIString),
# text_type('icon'): (False, Icon)
self._types = {self._default_type: default_rule}
def detect_extra_rule(self, key, value): # Any extra unlisted keys in the mapping?
if value is None: # Set item!
return GroupID, ScalarValidator
return None
def validate(self, d):
if d is None:
d = self._default_input_data
_ret = BaseRecordValidator.validate(self, d)
if not _ret:
return _ret
##############################################################
# NOTE: pre-process group definitions & get rid of shortcut notation for unions
_d = self.get_data()
_include_list = _d.get(self._include_tag, None)
if _include_list is not None:
assert isinstance(_include_list, GroupIDList)
_include_list = _include_list.get_data()
assert isinstance(_include_list, list)
# _include_list = [k for k in _include_list.keys()]
else:
_include_list = []
assert _include_list is not None
assert isinstance(_include_list, list)
for k in _d:
assert k in _d
v = _d[k]
if v is not None:
continue
_include_list.append(k)
self._include_list = set(_include_list)
# log.debug('union: [%s]', str(self._include_list))
if not self._include_list:
log.debug('Group without union: [%s]', str(d))
_exclude_list = _d.get(self._exclude_tag, None)
if _exclude_list is not None:
assert isinstance(_exclude_list, GroupIDList)
_exclude_list = _exclude_list.get_data()
assert isinstance(_exclude_list, list)
_exclude_list = set(_exclude_list)
self._exclude_list = _exclude_list
# log.debug('exclusion: [%s]', str(self._exclude_list))
_intersection_list = _d.get(self._intersectWith_tag, None)
if _intersection_list is not None:
assert isinstance(_intersection_list, GroupIDList)
_intersection_list = _intersection_list.get_data()
assert isinstance(_intersection_list, list)
_intersection_list = set(_intersection_list)
self._intersection_list = _intersection_list
# log.debug('intersection: [%s]', str(self._intersection_list))
return True
def get_stations(self):
return self._station_list
def set_stations(self, l):
assert l is not None
assert isinstance(l, (set, frozenset))
self._station_list = l
def data_dump(self):
l = self.get_stations()
if l is not None:
assert isinstance(l, (set, frozenset))
return sorted(l)
assert self._include_list is not None
assert isinstance(self._include_list, (set, frozenset))
_d = {self._include_tag: sorted(self._include_list)}
if self._exclude_list is not None:
assert isinstance(self._exclude_list, (set, frozenset))
_d[self._exclude_tag] = sorted(self._exclude_list)
if self._intersection_list is not None:
assert isinstance(self._intersection_list, (set, frozenset))
_d[self._intersectWith_tag] = sorted(self._intersection_list)
return _d
def computable(self, known_groups):
"""if computable => return computed station list otherwise None!"""
_station_list = set()
assert self._include_list is not None
assert isinstance(self._include_list, (set, frozenset))
for k in self._include_list:
if k not in known_groups:
return None
else:
_station_list = _station_list.union(known_groups[k])
if self._exclude_list is not None:
assert isinstance(self._exclude_list, (set, frozenset))
for k in self._exclude_list:
if k not in known_groups:
return None
else:
_station_list = _station_list.difference(known_groups[k])
if self._intersection_list is not None:
assert isinstance(self._intersection_list, (set, frozenset))
_intersection = set()
for k in self._intersection_list:
if k not in known_groups:
return None
else:
_intersection = _intersection.union(known_groups[k])
_station_list = _station_list.intersection(_intersection)
return _station_list
###############################################################
class GlobalGroups(BaseIDMap):
def __init__(self, *args, **kwargs):
super(GlobalGroups, self).__init__(*args, **kwargs)
self._default_type = "default_global_groups"
self._types = {self._default_type: (GroupID, Group)}
# self._de
###############################################################
class Preset(BaseRecordValidator):
"""Preset"""
def __init__(self, *args, **kwargs):
super(Preset, self).__init__(*args, **kwargs)
self._default_type = "default_preset"
# self.__tag = "Version"
default_rule = {
# self.__tag: (True , ??), # Mandatory
# self.__tag: (False, ??), # Optional
}
self._types = {self._default_type: default_rule}
raise NotImplementedError("Presets are not supported yet!")
###############################################################
class GlobalPresets(BaseIDMap): # Dummy for now!
def __init__(self, *args, **kwargs):
super(GlobalPresets, self).__init__(*args, **kwargs)
self._default_type = "default_global_presets"
self._types = {self._default_type: (PresetID, Preset)}
def validate(self, d):
if d is None:
d = self._default_input_data
log.warning("Presets are not supported yet!")
# raise NotImplementedError("Presets are not supported yet!")
return True
###############################################################
class Hilbert(BaseRecordValidator):
"""General Hilbert Configuration format"""
def __init__(self, *args, **kwargs):
kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', False)
# TODO: add an option like 'INPUT_DIRNAME'? E.g. here and to ::parse() method?
super(Hilbert, self).__init__(*args, **kwargs) # This is the Main Root of all Validators!
self._default_type = "default_global"
self._version_tag = text_type('Version')
self._applications_tag = text_type('Applications')
self._services_tag = text_type('Services')
self._profiles_tag = text_type('Profiles')
self._stations_tag = text_type('Stations')
self._groups_tag = text_type('Groups')
### explicit (optional) Type?
default_rule = {
self._version_tag: (True, SemanticVersionValidator),
# Mandatory, specifies supported Types of Config's Entity
self._services_tag: (False, GlobalServices),
self._applications_tag: (False, GlobalApplications),
self._profiles_tag: (True, GlobalProfiles),
self._stations_tag: (True, GlobalStations),
self._groups_tag: (False, GlobalGroups), # Optional
text_type('Presets'): (False, GlobalPresets), # Optional. May be removed! default?
}
self._types = {self._default_type: default_rule}
self._default_input_data = None
@classmethod
def parse(cls, d, *args, **kwargs):
self = cls(*args, **kwargs)
if self._version_tag not in d:
_key_note(self._version_tag, d.lc, "ERROR: Missing mandatory '{}' key field!")
raise ConfigurationError(u"{}: {}".format("ERROR:", "Missing version tag '{0}' in the input: '{1}'!".format(
self._version_tag, d)))
try:
_v = SemanticVersionValidator.parse(d[self._version_tag], parent=self, partial=True,
parsed_result_is_data=True, id=self._version_tag)
except:
_value_error(self._version_tag, d, d.lc, "Wrong value of global '{}' specification!")
raise
self.set_version(_v) # NOTE: globally available now!
if self.validate(d): # NOTE: validate should not **explicitly** throw exceptions!!!
if self._parsed_result_is_data:
return self.get_data()
return self
# NOTE: .parse should!
raise ConfigurationError(u"{}: {}".format("ERROR:", "Invalid data: '{}'!".format(d)))
def validate(self, d):
global PEDANTIC
if d is None:
d = self._default_input_data
assert isinstance(d, dict)
_ret = BaseRecordValidator.validate(self, d)
for offset, k in enumerate(d):
if k == self._version_tag:
if offset != 0:
if not PEDANTIC:
log.warning("'{}' specified correctly but not ahead of everything else (offset: {})!"
.format(self._version_tag, offset))
else:
log.error("'{}' specified correctly but not ahead of everything else (offset: {})!"
.format(self._version_tag, offset))
_ret = False
break
if not _ret:
log.error("Wrong Hilbert configuration!")
return _ret
_d = self.get_data()
# NOTE: check uniqueness of keys among (Services/Applications):
# TODO: add get_service(s) and get_application(s)?
# _services = self.query("{0}/{1}".format(self._services_tag, 'keys'))
# _applications = self.query("{0}/{1}".format(self._applications_tag, 'keys')) # _d.get()
__services = d.get(self._services_tag, {}) # Rely on the above and use get_data?
__applications = d.get(self._applications_tag, {})
if (__services is not None) and (__applications is not None):
if (len(__services) > 0) and (len(__applications) > 0):
for k in __services:
if k in __applications:
log.error("'{}' is both a ServiceID and an ApplicationID:".format(k))
_key_error(k, __services[k], __services.lc.key(k), "Service key: {}")
_key_error(k, __applications[k], __applications.lc.key(k), "Application key: {}")
_ret = False
if not _ret:
log.warning('duplicating service and application ID!')
return _ret
_stations = _d.get(self._stations_tag, {})
if isinstance(_stations, BaseValidator):
_stations = _stations.get_data()
_profiles = _d.get(self._profiles_tag, {})
if isinstance(_profiles, BaseValidator):
_profiles = _profiles.get_data()
_groups = _d.get(self._groups_tag, {})
if isinstance(_groups, BaseValidator):
_groups = _groups.get_data()
__profiles = d.get(self._profiles_tag, {}) # Rely on the above and use get_data?
__stations = d.get(self._stations_tag, {})
__groups = d.get(self._groups_tag, {})
_implicit_groups = {}
# ! NOTE: check for GroupID <-> ProfileID
# if __profiles is not None:
# if len(__profiles) > 0:
for k in _profiles:
assert k in __profiles
if k in __groups:
log.error("'{}' is both a ProfileID and a GroupID:".format(k))
_key_error(k, __profiles[k], __profiles.lc.key(k), "Profile key: {}")
_key_error(k, __groups[k], __groups.lc.key(k), "Groups key: {}")
_ret = False
if not _ret:
log.error('bad configuration: duplicating profile and group ID!')
return _ret
# NOTE: checking uniqueness of IDs among (Stations vs Profiles&Groups)
# if __stations is not None:
# if len(__stations) > 0:
for k in _stations:
assert k in __stations
if k in __groups:
log.error("'{}' is both a StationID and a GroupID:".format(k))
_key_error(k, __stations[k], __stations.lc.key(k), "Station key: {}")
_key_error(k, __groups[k], __groups.lc.key(k), "Groups key: {}")
_ret = False
if k in __profiles:
log.error("'{}' is both a StationID and a ProfileID:".format(k))
_key_error(k, __stations[k], __stations.lc.key(k), "Station key: {}")
_key_error(k, __profiles[k], __profiles.lc.key(k), "Profile key: {}")
_ret = False
s = _stations[k]
assert s is not None
assert isinstance(s, Station)
p_ref = s.get_profile_ref()
if p_ref not in _profiles:
log.error("bad configuration: station '%s' has unknown/invalid profile specification: '%s'", k, p_ref)
return False
p = _profiles[p_ref]
assert p is not None
p.add_station(k, s)
s.set_profile(p)
_implicit_groups[k] = set([k]) # NOTE: -> {k} ???
if not _ret:
log.error("bad configuration: duplicating: station IDs same as group or profile!")
return _ret
# if __profiles is not None:
# if len(__profiles) > 0:
for k in _profiles:
# assert k in __profiles
assert k not in _implicit_groups
_implicit_groups[k] = _profiles[k].get_stations()
if not _implicit_groups[k]:
log.info('Profile [%s] corresponds to no stations!', k)
# for k in _implicit_groups:
# print('[[[{}]]]]'.format(k))
# pprint(len(_implicit_groups[k]))
_todo = [k for k in _groups.keys()]
n = len(_todo)
while n > 0:
n = 0
if len(_todo) > 0:
_rest = []
for k in _todo: # TODO: FIXME: while/pop?
assert k in _groups
assert k not in _implicit_groups
l = _groups[k].computable(_implicit_groups)
if l is not None:
assert isinstance(l, (set, frozenset))
_groups[k].set_stations(l)
_implicit_groups[k] = l
if not l:
log.info('Explicit Group [%s] is empty!', k)
n = n + 1
# print('Group [[[', k, ']]]: ')
# pprint(_groups[k].data_dump())
else: # NOTE: not computable ATM - try later...
_rest.append(k)
_todo = _rest
# for k in _implicit_groups:
# print('[[[{}]]]]'.format(k))
# pprint(len(_implicit_groups[k]))
if len(_todo) > 0:
if not PEDANTIC:
log.warning("Bad group definition(s) detected: %s", str(_todo))
else:
log.error("Bad group definition(s) detected: %s", str(_todo))
return False
# pprint(_groups[k].data_dump())
# print('new groups: ', n)
# pprint(_implicit_groups.keys())
# ! NOTE: Process all applications:
_applications = _d.get(self._applications_tag, {})
if isinstance(_applications, BaseValidator):
_applications = _applications.get_data()
# if __applications is not None:
# if len(__applications) > 0:
for k in _applications:
app = _applications[k]
assert isinstance(app, DockerComposeApplication)
g = app.get_group()
assert isinstance(g, Group)
l = g.computable(_implicit_groups)
if l is not None:
g.set_stations(l)
for sid in g.get_stations():
assert sid in _stations
s = _stations[sid]
assert isinstance(s, Station)
s.add_application(k, app)
else:
if PEDANTIC:
log.error("Bad Group-Of-Compatible-Stations definition in application [%s]: ", k)
_key_error(k, __applications[k], __applications.lc.key(k), "Application key: {}")
_ret = False
else: # NOTE: ignore such apps...
log.warning("Bad Group-Of-Compatible-Stations definition in application [%s]: ", k)
_key_error(k, __applications[k], __applications.lc.key(k), "Application key: {}")
return _ret
###############################################################
def load_yaml(f, loader=VerboseRoundTripLoader, version=(1, 2), preserve_quotes=True):
try:
return yaml.load(f, Loader=loader, version=version, preserve_quotes=preserve_quotes)
except (IOError, yaml.YAMLError) as e:
error_name = getattr(e, '__module__', '') + '.' + e.__class__.__name__
raise ConfigurationError(u"{}: {}".format(error_name, e))
def load_yaml_file(filename):
with open(filename, 'r') as fh:
return load_yaml(fh)
###############################################################
def parse_hilbert(d, parent=None):
assert d is not None
return Hilbert.parse(d, parent=parent, parsed_result_is_data=False, id="/")
###############################################################
def yaml_dump(*args, **kwargs):
pretty = kwargs.pop('pretty', True)
# kwargs['allow_unicode'] = kwargs.pop('allow_unicode', True)
# kwargs['encoding'] = kwargs.pop('encoding', 'utf-8')
kwargs['explicit_start'] = kwargs.pop('explicit_start', False)
kwargs['explicit_end'] = kwargs.pop('explicit_end', False)
# kwargs['tags'] = kwargs.pop('tags', False)
kwargs['canonical'] = kwargs.pop('canonical', False)
if pretty:
# kwargs['version'] = kwargs.pop('version', (1,2))
kwargs['default_flow_style'] = kwargs.pop('default_flow_style', False)
kwargs['indent'] = kwargs.pop('indent', 2)
kwargs['width'] = kwargs.pop('width', 80)
kwargs['line_break'] = kwargs.pop('line_break', True)
kwargs['block_seq_indent'] = kwargs.pop('block_seq_indent', 2)
if isinstance(args[0], dict) and (len(args[0]) > 0):
kwargs['top_level_colon_align'] = kwargs.pop('top_level_colon_align', True)
kwargs['prefix_colon'] = kwargs.pop('prefix_colon', '\t')
else:
kwargs['default_flow_style'] = kwargs.pop('default_flow_style', True)
kwargs['default_style'] = kwargs.pop('default_style', 'json-like-with-type-!!tags')
kwargs['indent'] = kwargs.pop('indent', 0)
kwargs['width'] = kwargs.pop('width', 99999)
kwargs['line_break'] = kwargs.pop('line_break', False)
kwargs['block_seq_indent'] = kwargs.pop('block_seq_indent', 0)
# def round_trip_dump(data, stream=None, Dumper=RoundTripDumper,
# default_style=None, default_flow_style=None,
# canonical=None, indent=None, width=None,
# allow_unicode=None, line_break=None,
# encoding=enc, explicit_start=None, explicit_end=None,
# version=None, tags=None, block_seq_indent=None,
# top_level_colon_align=None, prefix_colon=None):
# type: (Any, StreamType, Any,
# Any, Any,
# bool, Union[None, int], Union[None, int],
# bool, Any,
# Any, Union[None, bool], Union[None, bool],
# VersionType, Any, Any,
# Any, Any) -> Union[None, str] # NOQA
return yaml.round_trip_dump(*args, **kwargs)
###############################################################
def json_dump(*args, **kwargs):
pretty = kwargs.pop('pretty', True)
kwargs['check_circular'] = kwargs.pop('check_circular', False)
kwargs['allow_nan'] = kwargs.pop('allow_nan', True)
if pretty:
# kwargs['ensure_ascii'] = kwargs.pop('ensure_ascii', False)
# kwargs['encoding'] = kwargs.pop('encoding', 'utf-8')
kwargs['indent'] = kwargs.pop('indent', 2)
kwargs['sort_keys'] = kwargs.pop('sort_keys', True)
kwargs['separators'] = kwargs.pop('separators', (',', ': '))
else: # compact?!
kwargs['indent'] = kwargs.pop('indent', None)
kwargs['separators'] = kwargs.pop('separators', (',', ':'))
# skipkeys = False, ensure_ascii = True, check_circular = True,
# allow_nan = True, cls = None, indent = None, separators = None,
# encoding = 'utf-8', default = None, sort_keys = False, ** kw):
return json.dumps(*args, **kwargs)
###############################################################
#class unicode(unicode):
# def __repr__(self):
# return unicode.__repr__(self).lstrip("u")
def apply_str(d):
if isinstance(d, dict):
_d = {}
for k,v in d.items():
_d[apply_str(k)] = apply_str(v)
return _d
elif isinstance(d, list):
_d = []
for idx, v in enumerate(d):
_d.insert(idx, apply_str(v))
return _d
# return map(apply_str, d)
elif isinstance(d, string_types):
return str(d)
else:
# assert not isinstance(d, set)
# assert not isinstance(d, tuple)
return d
|
<reponame>Defkil/htwg-scala-durak
package de.htwg.se.durak.aview.gui
import de.htwg.se.durak.aview.gui.scenes.{MainMenu, PlayScene, PlayerSelectScene, WaitScene}
import de.htwg.se.durak.controller.GameDataChanged
import de.htwg.se.durak.controller.controllerComponent.ControllerInterface
import javafx.application.Platform
import scalafx.application.JFXApp
import scalafx.application.JFXApp.PrimaryStage
import scalafx.scene.Scene
import scala.swing.Reactor
class GUI(controller: ControllerInterface) extends JFXApp with Reactor {
var input = ""
listenTo(controller)
def updateGUI(): Unit = {
if(controller.roundData.siteID == -1) {
Platform.exit()
} else {
stage.scene = route(controller.roundData.siteID, controller.roundData.param)
}
}
reactions += {
case event: GameDataChanged => updateGUI()
}
stage = new PrimaryStage {
title = "Durak"
}
updateGUI() // set scene
def route(siteID: Int, param: Option[List[String]]): Scene = { //: List[String] = {
siteID match {
case 0 => new MainMenu(controller)
case 3 => new PlayerSelectScene(controller)
case 10 => new WaitScene(controller)
case 11 => new PlayScene(controller)
case 12 => new PlayScene(controller)
}
}
}
|
<filename>src/pages/settings/Payments/AddDebitCardPage.js<gh_stars>100-1000
import React, {Component} from 'react';
import {withOnyx} from 'react-native-onyx';
import {
View,
ScrollView,
} from 'react-native';
import HeaderWithCloseButton from '../../../components/HeaderWithCloseButton';
import Navigation from '../../../libs/Navigation/Navigation';
import ScreenWrapper from '../../../components/ScreenWrapper';
import TextInputWithLabel from '../../../components/TextInputWithLabel';
import styles from '../../../styles/styles';
import StatePicker from '../../../components/StatePicker';
import Text from '../../../components/Text';
import TextLink from '../../../components/TextLink';
import withLocalize, {withLocalizePropTypes} from '../../../components/withLocalize';
import compose from '../../../libs/compose';
import {addBillingCard} from '../../../libs/actions/PaymentMethods';
import Button from '../../../components/Button';
import KeyboardAvoidingView from '../../../components/KeyboardAvoidingView';
import FixedFooter from '../../../components/FixedFooter';
import Growl from '../../../libs/Growl';
import {
isValidAddress, isValidExpirationDate, isValidZipCode, isValidDebitCard, isValidSecurityCode,
} from '../../../libs/ValidationUtils';
import CheckboxWithLabel from '../../../components/CheckboxWithLabel';
const propTypes = {
/* Onyx Props */
...withLocalizePropTypes,
};
const defaultProps = {
};
class DebitCardPage extends Component {
constructor(props) {
super(props);
this.state = {
nameOnCard: '',
cardNumber: '',
expirationDate: '',
securityCode: '',
billingAddress: '',
city: '',
selectedState: '',
zipCode: '',
acceptedTerms: false,
isAddingCard: false,
};
this.toggleTermsOfService = this.toggleTermsOfService.bind(this);
this.handleExpirationInput = this.handleExpirationInput.bind(this);
this.handleCardNumberInput = this.handleCardNumberInput.bind(this);
this.submit = this.submit.bind(this);
}
/**
* @returns {Boolean}
*/
validate() {
if (this.state.nameOnCard === '') {
Growl.error(this.props.translate('addDebitCardPage.error.invalidName'));
return false;
}
if (!isValidDebitCard(this.state.cardNumber.replace(/ /g, ''))) {
Growl.error(this.props.translate('addDebitCardPage.error.debitCardNumber'));
return false;
}
if (!isValidExpirationDate(this.state.expirationDate)) {
Growl.error(this.props.translate('addDebitCardPage.error.expirationDate'));
return false;
}
if (!isValidSecurityCode(this.state.securityCode)) {
Growl.error(this.props.translate('addDebitCardPage.error.securityCode'));
return false;
}
if (!isValidAddress(this.state.billingAddress)) {
Growl.error(this.props.translate('addDebitCardPage.error.address'));
return false;
}
if (this.state.city === '') {
Growl.error(this.props.translate('addDebitCardPage.error.addressCity'));
return false;
}
if (this.state.selectedState === '') {
Growl.error(this.props.translate('addDebitCardPage.error.addressState'));
return false;
}
if (!isValidZipCode(this.state.zipCode)) {
Growl.error(this.props.translate('addDebitCardPage.error.zipCode'));
return false;
}
if (!this.state.acceptedTerms) {
Growl.error(this.props.translate('addDebitCardPage.error.acceptedTerms'));
return false;
}
return true;
}
submit() {
if (!this.validate()) {
return;
}
this.setState({isAddingCard: true});
addBillingCard(this.state);
}
toggleTermsOfService() {
this.setState(prevState => ({acceptedTerms: !prevState.acceptedTerms}));
}
handleExpirationInput(expirationDate) {
let newExpirationDate = expirationDate;
const isErasing = expirationDate.length < this.state.expirationDate.length;
if (expirationDate.length === 2 && !isErasing) {
newExpirationDate = `${expirationDate}/`;
}
this.setState({expirationDate: newExpirationDate});
}
handleCardNumberInput(newCardNumber) {
if (/^[0-9]{0,16}$/.test(newCardNumber)) {
this.setState({cardNumber: newCardNumber});
}
}
render() {
return (
<ScreenWrapper>
<KeyboardAvoidingView>
<HeaderWithCloseButton
title={this.props.translate('addDebitCardPage.addADebitCard')}
onCloseButtonPress={() => Navigation.dismissModal(true)}
/>
<ScrollView style={styles.flex1} contentContainerStyle={styles.p5}>
<TextInputWithLabel
label={this.props.translate('addDebitCardPage.nameOnCard')}
placeholder={this.props.translate('addDebitCardPage.nameOnCard')}
containerStyles={[styles.flex1, styles.mb2]}
onChangeText={nameOnCard => this.setState({nameOnCard})}
value={this.state.nameOnCard}
/>
<TextInputWithLabel
label={this.props.translate('addDebitCardPage.debitCardNumber')}
placeholder={this.props.translate('addDebitCardPage.debitCardNumber')}
keyboardType="number-pad"
containerStyles={[styles.flex1, styles.mb2]}
onChangeText={cardNumber => this.handleCardNumberInput(cardNumber)}
value={this.state.cardNumber}
/>
<View style={[styles.flexRow, styles.mb2]}>
<TextInputWithLabel
label={this.props.translate('addDebitCardPage.expiration')}
placeholder={this.props.translate('addDebitCardPage.expirationDate')}
keyboardType="number-pad"
containerStyles={[styles.flex2, styles.mr4]}
onChangeText={expirationDate => this.handleExpirationInput(expirationDate)}
value={this.state.expirationDate}
/>
<TextInputWithLabel
label={this.props.translate('addDebitCardPage.cvv')}
placeholder="123"
keyboardType="number-pad"
containerStyles={[styles.flex2]}
onChangeText={securityCode => this.setState({securityCode})}
value={this.state.securityCode}
/>
</View>
<TextInputWithLabel
label={this.props.translate('addDebitCardPage.billingAddress')}
placeholder={this.props.translate('addDebitCardPage.streetAddress')}
containerStyles={[styles.flex1, styles.mb2]}
onChangeText={billingAddress => this.setState({billingAddress})}
value={this.state.billingAddress}
/>
<TextInputWithLabel
label={this.props.translate('common.city')}
placeholder={this.props.translate('addDebitCardPage.cityName')}
containerStyles={[styles.flex1, styles.mb2]}
onChangeText={city => this.setState({city})}
value={this.state.city}
/>
<View style={[styles.flexRow, styles.mb6]}>
<View style={[styles.flex2, styles.mr4]}>
<Text style={[styles.mb1, styles.formLabel]}>
{this.props.translate('common.state')}
</Text>
<StatePicker
onChange={state => this.setState({selectedState: state})}
value={this.state.selectedState}
/>
</View>
<TextInputWithLabel
label={this.props.translate('common.zip')}
placeholder={this.props.translate('common.zip')}
containerStyles={[styles.flex2]}
onChangeText={zipCode => this.setState({zipCode})}
value={this.state.zipCode}
/>
</View>
<CheckboxWithLabel
isChecked={this.state.acceptedTerms}
onPress={this.toggleTermsOfService}
LabelComponent={() => (
<Text>
{`${this.props.translate('common.iAcceptThe')} `}
<TextLink href="https://use.expensify.com/terms">
{`${this.props.translate('addDebitCardPage.expensifyTermsOfService')}`}
</TextLink>
</Text>
)}
/>
</ScrollView>
<FixedFooter>
<Button
success
onPress={this.submit}
style={[styles.w100]}
text={this.props.translate('common.save')}
isLoading={this.state.isAddingCard}
pressOnEnter
/>
</FixedFooter>
</KeyboardAvoidingView>
</ScreenWrapper>
);
}
}
DebitCardPage.propTypes = propTypes;
DebitCardPage.defaultProps = defaultProps;
DebitCardPage.displayName = 'DebitCardPage';
export default compose(
withLocalize,
withOnyx({
}),
)(DebitCardPage);
|
const express = require('express');
const request = require('request');
const app = express();
app.get('/convert', (req, res) => {
let from = req.query.from;
let to = req.query.to;
let amount = req.query.amount;
let url = `http://data.fixer.io/api/latest?access_key=YOUR_ACCESS_KEY&symbols=${from},${to}`;
request(url, (err, response, body) => {
if (err) throw err;
else {
let data = JSON.parse(body);
let rate = data.rates[to] / data.rates[from];
let convertedAmount = amount * rate;
res.send({
amount: amount,
from: from,
to: to,
convertedAmount: convertedAmount
});
}
});
});
const port = process.env.PORT || 3000;
app.listen(port, () => console.log(`Currency converter app listening on port ${port}!`)); |
#!/usr/bin/env bash
# myShellEnv v 1.0 [aeondigital.com.br]
#
# Registre o comando adicionando um novo item no array de comandos
# respeitando os seguintes campos (separados por ponto e virgula).
#
# Veja mais informações sobre como preencher esta informação no
# arquivo /src/config/variables.sh
MSE_NOTES_RAW_COMMAND+=("DeleteLine;Completely remove the indicated line;dl;l")
#
# Executa o comando.
mse_notes_execCmdDeleteLine() {
printf ":: ${mseCmdType} ${mseCmdTargetLine}\n"
unset MSE_NOTES_FILE_CONTENT[$mseCmdTargetIndex]
mse_notes_execCmdRefreshNote "1"
}
|
#!/bin/bash
docker build -t=aceseqimage:${1-latest} .
|
//===--- Lexer.cpp - Modula-2 Language Lexer --------------------*- C++ -*-===//
//
// Part of the M2Lang Project, under the Apache License v2.0 with
// LLVM Exceptions. See LICENSE file for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
///
/// \file
/// Defines the lexer implementation.
///
//===----------------------------------------------------------------------===//
#include "m2lang/Lexer/Lexer.h"
using namespace m2lang;
namespace {
// Constants for TokenKinds.def
enum {
KEYPIM = 0x1,
KEYISO = 0x2,
KEYISOGS = 0x4,
KEYISOOO = 0x8,
KEYR10 = 0x10,
KEYM2P = 0x20,
KEYALL = KEYPIM | KEYISO | KEYISOGS | KEYISOOO | KEYR10 | KEYM2P
};
/// How a keyword is treated in the selected standard.
enum KeywordStatus {
KS_Disabled, // Disabled
KS_Enabled // Enabled
};
/// Translates flags as specified in TokenKinds.def into keyword status
/// in the given language standard.
static KeywordStatus getKeywordStatus(const LangOptions &LangOpts,
unsigned Flags) {
if (Flags == KEYALL)
return KS_Enabled;
if (LangOpts.PIM && (Flags & KEYPIM))
return KS_Enabled;
if (LangOpts.ISO && (Flags & KEYISO))
return KS_Enabled;
if (LangOpts.ISOGenerics && (Flags & KEYISOGS))
return KS_Enabled;
if (LangOpts.ISOObjects && (Flags & KEYISOOO))
return KS_Enabled;
if (LangOpts.M2R10 && (Flags & KEYR10))
return KS_Enabled;
if (LangOpts.M2Plus && (Flags & KEYM2P))
return KS_Enabled;
return KS_Disabled;
}
} // namespace
void KeywordFilter::addKeyword(llvm::StringRef Keyword,
tok::TokenKind TokenCode, unsigned Flags,
const LangOptions &LangOpts) {
if (getKeywordStatus(LangOpts, Flags) == KS_Enabled) {
HashTable.insert(std::make_pair(Keyword, TokenCode));
}
}
void KeywordFilter::addKeywords(const LangOptions &LangOpts) {
// Add keywords and tokens for the current language.
#define KEYWORD(NAME, FLAGS) \
addKeyword(llvm::StringRef(#NAME), tok::kw_##NAME, FLAGS, LangOpts);
#include "m2lang/Basic/TokenKinds.def"
}
// TODO Optimize and move to separate file
namespace charinfo {
LLVM_READNONE inline bool isASCII(char c) {
return static_cast<unsigned char>(c) <= 127;
}
LLVM_READNONE inline bool isVerticalWhitespace(char c) {
return isASCII(c) && (c == '\r' || c == '\n');
}
LLVM_READNONE inline bool isHorizontalWhitespace(char c) {
return isASCII(c) && (c == ' ' || c == '\t' || c == '\f' || c == '\v');
}
LLVM_READNONE inline bool isWhitespace(char c) {
return isHorizontalWhitespace(c) || isVerticalWhitespace(c);
}
LLVM_READNONE inline bool isDigit(char c) {
return isASCII(c) && c >= '0' && c <= '9';
}
LLVM_READNONE inline bool isOctalDigit(char c) {
return isASCII(c) && c >= '0' && c <= '7';
}
LLVM_READNONE inline bool isHexDigit(char c) {
return isASCII(c) && (isDigit(c) || (c >= 'A' && c <= 'F'));
}
LLVM_READNONE inline bool isIdentifierHead(char c) {
return isASCII(c) &&
(c == '_' || (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z'));
}
LLVM_READNONE inline bool isIdentifierBody(char c) {
return isIdentifierHead(c) || isDigit(c);
}
} // namespace charinfo
void Lexer::next(Token &token) {
while (*CurPtr && charinfo::isWhitespace(*CurPtr)) {
++CurPtr;
}
if (!*CurPtr) {
token.Kind= tok::eof;
return;
}
if (charinfo::isIdentifierHead(*CurPtr)) {
identifier(token);
return;
} else if (charinfo::isDigit(*CurPtr)) {
number(token);
return;
} else if (*CurPtr == '"' || *CurPtr == '\'') {
string(token);
return;
} else {
switch (*CurPtr) {
case '=':
formTokenWithChars(token, CurPtr + 1, tok::equal);
break;
case '#':
formTokenWithChars(token, CurPtr + 1, tok::hash);
break;
case '+':
formTokenWithChars(token, CurPtr + 1, tok::plus);
break;
case '-':
formTokenWithChars(token, CurPtr + 1, tok::minus);
break;
case '*':
if (*(CurPtr + 1) == '>' && (LangOpts.ISO || LangOpts.M2R10))
formTokenWithChars(token, CurPtr + 2, tok::stargreater);
else
formTokenWithChars(token, CurPtr + 1, tok::star);
break;
case '/':
formTokenWithChars(token, CurPtr + 1, tok::slash);
break;
case '(':
if (*(CurPtr + 1) == '*') {
comment(token);
}
else if (*(CurPtr + 1) == '!')
formTokenWithChars(token, CurPtr + 2, tok::l_square);
else if (*(CurPtr + 1) == ':')
formTokenWithChars(token, CurPtr + 2, tok::l_brace);
else
formTokenWithChars(token, CurPtr + 1, tok::l_paren);
break;
case '[':
formTokenWithChars(token, CurPtr + 1, tok::l_square);
break;
case '{':
formTokenWithChars(token, CurPtr + 1, tok::l_brace);
break;
case ')':
formTokenWithChars(token, CurPtr + 1, tok::r_paren);
break;
case ']':
formTokenWithChars(token, CurPtr + 1, tok::r_square);
break;
case '}':
formTokenWithChars(token, CurPtr + 1, tok::r_brace);
break;
case '^':
formTokenWithChars(token, CurPtr + 1, tok::caret);
break;
case '|':
formTokenWithChars(token, CurPtr + 1, tok::pipe);
break;
case ',':
formTokenWithChars(token, CurPtr + 1, tok::comma);
break;
case ';':
formTokenWithChars(token, CurPtr + 1, tok::semi);
break;
case '.':
if (*(CurPtr + 1) == '.')
formTokenWithChars(token, CurPtr + 2, tok::ellipsis);
else
formTokenWithChars(token, CurPtr + 1, tok::period);
break;
case ':':
if (*(CurPtr + 1) == '=')
formTokenWithChars(token, CurPtr + 2, tok::colonequal);
else if (*(CurPtr + 1) == ')')
formTokenWithChars(token, CurPtr + 2, tok::r_brace);
else
formTokenWithChars(token, CurPtr + 1, tok::colon);
break;
case '<':
if (*(CurPtr + 1) == '=')
formTokenWithChars(token, CurPtr + 2, tok::lessequal);
else if (*(CurPtr + 1) == '>' && !LangOpts.M2R10)
formTokenWithChars(token, CurPtr + 2, tok::hash);
else if (*(CurPtr + 1) == '*' && (LangOpts.ISO || LangOpts.M2R10))
formTokenWithChars(token, CurPtr + 2, tok::lessstar);
else
formTokenWithChars(token, CurPtr + 1, tok::less);
break;
case '>':
if (*(CurPtr + 1) == '=')
formTokenWithChars(token, CurPtr + 2, tok::greaterequal);
else
formTokenWithChars(token, CurPtr + 1, tok::greater);
break;
case '&':
if (LangOpts.M2R10)
Diags->report(getLoc(), diag::err_not_allowed_in_r10);
formTokenWithChars(token, CurPtr + 1, tok::kw_AND);
break;
case '~':
if (LangOpts.M2R10)
Diags->report(getLoc(), diag::err_not_allowed_in_r10);
formTokenWithChars(token, CurPtr + 1, tok::kw_NOT);
break;
case '!':
if (!LangOpts.ISO)
Diags->report(getLoc(), diag::err_requires_iso);
if (*(CurPtr + 1) == ')')
formTokenWithChars(token, CurPtr + 2, tok::r_square);
else
formTokenWithChars(token, CurPtr + 1, tok::pipe);
break;
case '@':
if (!LangOpts.ISO)
Diags->report(getLoc(), diag::err_requires_iso);
formTokenWithChars(token, CurPtr + 1, tok::caret);
break;
default:
token.Kind = tok::unknown;
}
return;
}
}
void Lexer::identifier(Token &token) {
const char *start = CurPtr;
const char *end = CurPtr + 1;
while (charinfo::isIdentifierBody(*end))
++end;
llvm::StringRef Name(start, end - start);
formTokenWithChars(token, end, Keywords.getKeyword(Name, tok::identifier));
}
void Lexer::number(Token &token) {
const char *start = CurPtr;
const char *end = CurPtr + 1;
tok::TokenKind kind = tok::unknown;
// TODO Check language variant
bool maybeOctal = charinfo::isOctalDigit(*start);
bool isHex = false;
while (*end) {
// End of number reached if digit is not a hexadecimal digit
// Hexadecimal digits B and C require a check if they are really digits
// or format specifiers.
if (!charinfo::isHexDigit(*end) ||
((*end == 'B' || *end == 'C') && *(end + 1) &&
!charinfo::isHexDigit(*(end + 1))))
break;
maybeOctal &= charinfo::isOctalDigit(*end);
if (!charinfo::isDigit(*end))
isHex = true;
++end;
}
switch (*end) {
case 'B': /* octal number */
if (!maybeOctal)
Diags->report(getLoc(), diag::err_non_octal_digit_in_number);
LLVM_FALLTHROUGH;
case 'H': /* hex number */
kind = tok::integer_literal;
++end;
break;
default: /* decimal number */
if (isHex)
Diags->report(getLoc(), diag::err_hex_digit_in_decimal);
kind = tok::integer_literal;
break;
case 'C': /* octal char const */
if (!maybeOctal)
Diags->report(getLoc(), diag::err_non_octal_digit_in_char);
kind = tok::char_literal;
++end;
break;
case '.': /* real number or .. */
if (*(end + 1) == '.') {
kind = tok::integer_literal;
break;
}
kind = tok::real_literal;
++end;
while (charinfo::isDigit(*end))
++end;
if (*end == 'E') { // scale factor
++end;
if (*end == '+' || *end == '-')
++end;
if (!charinfo::isDigit(*end))
Diags->report(getLoc(), diag::err_exponent_has_no_digits);
while (charinfo::isDigit(*end))
++end;
}
break;
}
formTokenWithChars(token, end, kind);
}
void Lexer::string(Token &token) {
const char *start = CurPtr;
const char *end = CurPtr + 1;
while (*end && *end != *start && !charinfo::isVerticalWhitespace(*end))
++end;
if (charinfo::isVerticalWhitespace(*end)) {
Diags->report(getLoc(), diag::err_unterminated_char_or_string);
}
formTokenWithChars(token, end + 1, tok::string_literal);
}
void Lexer::comment(Token &token) {
const char *end = CurPtr + 2;
unsigned level = 1;
while (*end && level) {
// Check for nested comment.
if (*end == '(' && *(end + 1) == '*') {
end += 2;
level++;
}
// Check for end of comment
else if (*end == '*' && *(end + 1) == ')') {
end += 2;
level--;
} else
++end;
}
if (!*end) {
Diags->report(getLoc(), diag::err_unterminated_block_comment);
}
formTokenWithChars(token, end, tok::comment);
}
void Lexer::formTokenWithChars(Token &Result, const char *TokEnd,
tok::TokenKind Kind) {
size_t TokLen = TokEnd - CurPtr;
Result.Ptr = CurPtr;
Result.Length = TokLen;
Result.Kind = Kind;
CurPtr = TokEnd;
} |
#!/bin/bash
ibmcloud plugin install src/extra/build/extra-linux-amd64 -f && EXTRA_PORT=8080 ibmcloud extra |
<reponame>pjmolina/event-backend<gh_stars>10-100
var searchToolController = function ($scope, $timeout) {
var lastTimeoutTyping = null;
$scope.searchContext = {
pageSize: 12,
searchText: '',
totalItems: 0,
isLoadingData: false
};
//$scope.search = null;
$scope.class = '';
$scope.innerSearchText = '';
$scope.clearSearch = function() {
$scope.innerSearchText = '';
invokeSearch();
};
$scope.typedTextSearch = function() {
//await for end of typing (200ms) to avoid innecesary calls to server-side
if (lastTimeoutTyping) {
//cancel previous if pending
$timeout.cancel(lastTimeoutTyping);
}
$scope.searchContext.isUserIsTyping = true;
//delay for 200 ms just in case the user keeps typing
lastTimeoutTyping = $timeout(invokeSearch, 200, true);
};
var lastSearch = null;
function invokeSearch() {
$scope.searchContext.isUserIsTyping = false;
lastTimeoutTyping = null;
var isRepeatedSearch = (lastSearch == $scope.innerSearchText);
lastSearch = $scope.innerSearchText;
$scope.searchContext.searchText = $scope.innerSearchText;
if ($scope.searchCallback && !isRepeatedSearch) {
$scope.searchCallback($scope.searchContext);
}
}
};
searchToolController.$inject = ['$scope', '$timeout'];
angular.module('myApp').directive("searchTool", [function () {
return {
controller: searchToolController,
restrict: 'E',
replace: true,
//transclude: true,
scope: {
searchContext: '=ngModel',
searchCallback: '=search',
class: '@class'
},
templateUrl: '/app/directives/searchTool.html',
link: function (scope, elem, attr) {
}
};
}]); |
package com.java.study.zuo.vedio.basic.chapter3;
/**
* <Description>
*
* @author hushiye
* @since 2020-08-22 18:20
*/
public class SmallerEqualBigger {
public static Node smallerEqualBigger(Node node, int targetValue) {
if (node == null) {
return null;
}
Node smallNode = null;
Node smallHeadNode = null;
Node equalsNode = null;
Node equalsHeadNode = null;
Node biggerNode = null;
Node biggerHeadNode = null;
while (node != null) {
Node nextNode = node.next;
node.setNext(null);
if (node.value < targetValue) {
if (smallHeadNode == null) {
smallHeadNode = node;
} else {
smallNode.setNext(node);
}
smallNode = node;
} else if (node.value == targetValue) {
if (equalsHeadNode == null) {
equalsHeadNode = node;
} else {
equalsNode.setNext(node);
}
equalsNode = node;
} else {
if (biggerHeadNode == null) {
biggerHeadNode = node;
} else {
biggerNode.setNext(node);
}
biggerNode = node;
}
node = nextNode;
}
if (smallHeadNode != null){
smallNode.setNext(equalsHeadNode);
equalsNode = equalsNode == null ? smallNode : equalsNode;
}
if (equalsNode != null){
equalsNode.setNext(biggerHeadNode);
}
return smallHeadNode != null ? smallHeadNode : equalsHeadNode != null ? equalsHeadNode : biggerHeadNode;
}
public static void printLinkedList(Node node) {
System.out.print("Linked List: ");
while (node != null) {
System.out.print(node.getValue() + " ");
node = node.getNext();
}
System.out.println();
}
public static void main(String[] args) {
Node head1 = new Node(7);
head1.next = new Node(9);
head1.next.next = new Node(1);
head1.next.next.next = new Node(8);
head1.next.next.next.next = new Node(5);
head1.next.next.next.next.next = new Node(2);
head1.next.next.next.next.next.next = new Node(5);
// head1 = listPartition1(head1, 4);
head1 = smallerEqualBigger(head1, 7);
printLinkedList(head1);
}
}
|
/**
* Created by Moiz.Kachwala on 15-06-2016.
*/
"use strict";
var express = require("express");
var ProjectController = require("./../../controllers/ProjectController");
var router = express.Router();
var ProjectRoutes = (function () {
function ProjectRoutes() {
this._projectController = new ProjectController();
}
Object.defineProperty(ProjectRoutes.prototype, "routes", {
get: function () {
var controller = this._projectController;
router.get("/projects", controller.retrieve);
router.post("/projects", controller.create);
router.put("/projects/:_id", controller.update);
router.get("/projects/:_id", controller.findById);
router.delete("/projects/:_id", controller.delete);
return router;
},
enumerable: true,
configurable: true
});
return ProjectRoutes;
}());
Object.seal(ProjectRoutes);
module.exports = ProjectRoutes;
//# sourceMappingURL=ProjectRoutes.js.map |
#!/usr/bin/env bash
SELF=${0##*/} SDIR=${0%/*}
# -------------------------------------------------------------------------------------------------------------------- #
: '
The MIT License (MIT)
Copyright © 2021 by John Celoria.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'
# Set some defaults -------------------------------------------------------------------------------------------------- #
VERSION=0.2
BLOCK_LIST=${BLOCK_LIST:-list.txt}
OUTPUT_FORMAT=${OUTPUT_FORMAT:-xml}
OUTPUT_FILE=${OUTPUT_FILE:-mailFilters.$OUTPUT_FORMAT}
# Functions ---------------------------------------------------------------------------------------------------------- #
function help() {
cat << EOF
Usage: ${SELF} [OPTION]...
Generate a Gmail filter from a list of domains
-h, --help Display this help message and exit
-l, --list The list of domains/emails to read (default: ${BLOCK_LIST})
-f, --format The output format (default: ${OUTPUT_FORMAT})
-o, --outfile The output file name (default: ${OUTPUT_FILE})
EOF
return
}
# -------------------------------------------------------------------------------------------------------------------- #
function log() {
local level levels=(notice warning crit)
level="+($(IFS='|';echo "${levels[*]}"))"
shopt -s extglob; case ${1} in
${level}) level=${1}; shift ;;
*) level=notice ;;
esac; shopt -u extglob
[[ -z ${RETVAL} ]] && { for RETVAL in "${!levels[@]}"; do
[[ ${levels[${RETVAL}]} = "${level}" ]] && break
done }
logger -s -p ${level} -t "[${SELF}:${FUNCNAME[1]}()]" -- $@;
}
# -------------------------------------------------------------------------------------------------------------------- #
function die() { local retval=${RETVAL:-$?}; log "$@"; exit ${retval}; }
# -------------------------------------------------------------------------------------------------------------------- #
function generate-xml() {
cat <<@@
<?xml version='1.0' encoding='UTF-8'?><feed xmlns='http://www.w3.org/2005/Atom' xmlns:apps='http://schemas.google.com/apps/2006'>
@@
cat ${BLOCK_LIST} | while read line; do
cat <<@@
<entry>
<category term='filter'></category>
<title>Mail Filter</title>
<apps:property name='from' value='${line}'/>
<apps:property name='shouldTrash' value='true'/>
<apps:property name='sizeOperator' value='s_sl'/>
<apps:property name='sizeUnit' value='s_smb'/>
</entry>
@@
done
cat <<@@
</feed>
@@
}
# -------------------------------------------------------------------------------------------------------------------- #
function generate-csv() {
for line in $(<${BLOCK_LIST}); do
echo ${line}
done
}
# Sanity checks ------------------------------------------------------------------------------------------------------ #
while getopts ":hf:-:" OPT; do
if [[ "${OPT}" = "-" ]]; then
OPT="${OPTARG%%=*}"
OPTARG="${OPTARG#$OPT}"
OPTARG="${OPTARG#=}"
fi
case "${OPT}" in
h|help) help >&2; exit 1 ;;
l|list) BLOCK_LIST=${OPTARG} ;;
f|format) OUTPUT_FORMAT=${OPTARG} ;;
o|outfile) OUTPUT_FILE=${OPTARG} ;;
??*) RETVAL=2; die "Invalid short option: -${OPT}" ;;
\?) RETVAL=3; die "Invalid long option: --${OPT}" ;;
:) RETVAL=4; die "Option -${OPTARG} requires an argument." ;;
esac
done; shift $((OPTIND-1))
# main --------------------------------------------------------------------------------------------------------------- #
case ${OUTPUT_FORMAT} in
csv) generate-csv | tee -a ${OUTPUT_FILE} ;;
*) generate-xml | tee -a ${OUTPUT_FILE} ;;
esac
# -------------------------------------------------------------------------------------------------------------------- #
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.