text stringlengths 1 1.05M |
|---|
#!/bin/bash
sudo docker login rg.fr-par.scw.cloud/djnd -u nologin -p $SCW_SECRET_TOKEN
# BUILD AND PUBLISH parlassets-ljubljana
sudo docker build -f Dockerfile -t parlassets-ljubljana:latest .
sudo docker tag parlassets-ljubljana:latest rg.fr-par.scw.cloud/djnd/parlassets-ljubljana:latest
sudo docker push rg.fr-par.scw.cloud/djnd/parlassets-ljubljana:latest
|
"""chainsolve transmutation tests."""
import os
import nose
import warnings
from nose.tools import assert_equal, assert_not_equal, assert_raises, raises, \
assert_almost_equal, assert_true, assert_false, assert_is, with_setup, \
assert_less
from numpy.testing import dec, assert_array_equal
import numpy as np
import tables as tb
from scipy import linalg
from pyne.utils import QAWarning
warnings.simplefilter("ignore", QAWarning)
from pyne import nuc_data
from pyne import nucname as nn
from pyne import data
from pyne.material import Material
from pyne.transmute.chainsolve import Transmuter
tm = None
def setup():
global tm
tm = Transmuter()
def teardown():
global tm
del tm
def test_check_phi():
"""Tests the _check_phi function"""
numeaf = 175
def set_phi(f):
tm.phi = f
# First check that None is properly converted
tm._phi = None
assert_is(tm.phi, None)
tm.phi = np.ones(numeaf)
assert_array_equal(tm.phi, np.ones(numeaf))
# Check that incorrect number of entries raises an exception
assert_raises(ValueError, set_phi, np.ones((50, 1)))
# Check that a negative entry raises an exception
x = np.ones(numeaf)
x[123] = -1
assert_raises(ValueError, set_phi, x)
def test_grow_matrix1():
"Tests correct implementation of the _grow_matrix function"
prod = 0.1848
dest = 1.337
orig = np.array([[-0.5,0.,0.],
[0.25,-0.3,0.],
[0.,0.123,-1.2]])
exp = np.array([[-0.5,0.,0.,0.],
[0.25,-0.3,0.,0.],
[0.,0.123,-1.2,0.],
[0.,0.,0.1848,-1.337]])
obs = tm._grow_matrix(orig, prod, dest)
assert_array_equal(exp, obs)
def test_grow_matrix2():
prod = 0.1848
dest = 1.337
orig = np.array([[-1.]])
exp = np.array([[-1.,0.],
[0.1848,-1.337]])
obs = tm._grow_matrix(orig, prod, dest)
assert_array_equal(exp, obs)
@with_setup(None, lambda: os.remove('log.txt') if os.path.exists('log.txt') else None)
def test_tree_log():
"Tests corret implementation of the _log_tree() function"
filename = 'log.txt'
tm.log = open(filename, 'w')
d0 = 0
d1 = 1
d2 = 2
d11 = 1
d20 = 0
nuc0 = nn.id('O16')
nuc1 = nn.id('O17')
nuc2 = nn.id('O18')
nuc11 = nn.id('He4')
nuc20 = nn.id('C12')
N0 = 123.456
N1 = 12.3456
N2 = 1.23456
N11 = 1111.
N20 = 12.
exp = ('--> O16 123.456\n'
' |--> O17 12.3456\n'
' | |--> O18 1.23456\n'
' |--> He4 1111.0\n'
'--> C12 12.0\n')
with open(filename, 'w') as tree:
tm._log_tree(d0, nuc0, N0)
tm._log_tree(d1, nuc1, N1)
tm._log_tree(d2, nuc2, N2)
tm._log_tree(d11, nuc11, N11)
tm._log_tree(d20, nuc20, N20)
tm.log.close()
tm.log = None
with open(filename, 'r') as f:
obs = f.read()
#print repr(exp)
#print repr(obs)
#print obs == exp
assert_equal(exp, obs)
def test_zero_flux():
"""Tests correct implementation of a transmutation with zero flux on
an isotope with a zero decay-constant."""
inp = Material({'FE56': 1.0}, mass=1.0)
obs = tm.transmute(inp, t=100.0, tol=1e-7)
assert_almost_equal(obs['FE56'], 1.0)
def test_root_decrease():
"Tests that the root isotope is not being skipped"
phi = 1e12 * np.ones(175)
inp = Material({'FE56': 1.0}, mass=1.0)
obs = tm.transmute(inp, t=100.0, phi=phi, tol=1e-7)
assert_less(obs['FE56'], 1.0)
def test_tm171_decay():
"Tests if decay is properly implemented"
t_sim = 1.2119E+8 # Run for 3.843 years (approx 2 half lives)
lamb = data.decay_const('TM171')
exp = np.exp(-1*lamb*t_sim)
inp = Material({'TM171': 1.0}, mass=1.0)
obs = tm.transmute(inp, t=t_sim, phi=0.0, tol=1e-7)
assert_equal(exp, obs['TM171'])
#
# Run as script
#
if __name__ == "__main__":
nose.runmodule()
|
class CodeFormatter:
def __init__(self):
self.output = ''
def indentation(self):
self.output += ' ' # Assuming 4 spaces for indentation
def handle_code(self, data):
data = data.replace('\r\n', '\n').replace('\r', '\n').strip()
for value in data.split('\n'):
if value:
self.indentation()
self.output += '| '
self.output += value
self.output += '\n'
def handle_comment(self, data):
data = data.replace('\r\n', '\n').replace('\r', '\n').strip()
for value in data.split('\n'):
if value:
self.indentation()
self.output += '//- '
self.output += value
self.output += '\n'
# Test the CodeFormatter class
formatter = CodeFormatter()
formatter.handle_code('def greet():\n print("Hello, world!")\n\nprint("Goodbye!")')
formatter.handle_comment('This is a comment\n\nAnother comment')
print(formatter.output) |
<filename>lib/zwave.getNodeParams.js
var shared = require('./zwave.shared.js');
var Promise = require('bluebird');
module.exports = function removeNode(options) {
if (!shared.zwave) return Promise.reject(new Error('Zwave instance not connected'));
var zwave = shared.zwave;
shared.setupMode = true
shared.currentSetupId = options.id
sails.log.info(`Zwave module : Request all params of node ${options.id}`)
zwave.requestAllConfigParams(options.id)
return Promise.resolve();
} |
#!/bin/bash
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
# Reference:
# * https://github.com/pytorch/audio/blob/392a03c86d94d2747e1a0fc270a74c3845535173/packaging/build_wheel.sh
# * https://github.com/pytorch/audio/blob/392a03c86d94d2747e1a0fc270a74c3845535173/packaging/pkg_helpers.bash
set -ex
# Populate build version if necessary, and add version suffix
#
# Inputs:
# BUILD_VERSION (e.g., 0.2.0 or empty)
# VERSION_SUFFIX (e.g., +cpu)
#
# Outputs:
# BUILD_VERSION (e.g., 0.2.0.dev20190807+cpu)
#
# Fill BUILD_VERSION if it doesn't exist already with a nightly string
# Usage: setup_build_version 0.2.0
setup_build_version() {
if [[ -z "$BUILD_VERSION" ]]; then
export BUILD_VERSION="$1.dev$(date "+%Y%m%d")$VERSION_SUFFIX"
else
export BUILD_VERSION="$BUILD_VERSION$VERSION_SUFFIX"
fi
}
# Set some useful variables for OS X, if applicable
setup_macos() {
if [[ "$(uname)" == Darwin ]]; then
export MACOSX_DEPLOYMENT_TARGET=10.15 CC=clang CXX=clang++
fi
}
# Inputs:
# PYTHON_VERSION (3.7, 3.8, 3.9)
#
# Outputs:
# PATH modified to put correct Python version in PATH
setup_wheel_python() {
if [[ -n "$PYTHON_VERSION" ]]; then
eval "$(conda shell.bash hook)"
conda env remove -n "env$PYTHON_VERSION" || true
conda create -yn "env$PYTHON_VERSION" python="$PYTHON_VERSION"
conda activate "env$PYTHON_VERSION"
fi
}
version=$(cat "version.txt")
setup_build_version "$version"
setup_wheel_python
python setup.py clean
if [[ "$(uname)" == Darwin ]]; then
setup_macos
python setup.py bdist_wheel
elif [[ "$(uname)" == Linux ]]; then
python setup.py bdist_wheel
else
echo "Unsupported"
exit 1
fi
|
#!/usr/bin/env bash
cd /nfs/project/libo_i/mmdetection
python3 tools/test.py configs/coco/mask_rcnn_r50_fpn_gn_2x.py work_dirs/mask_rcnn_r50_fpn_gn_2x/epoch_18.pth --out val.pkl --gpus 4 --eval bbox segm
|
#!/bin/ash
SS_MERLIN_HOME=/opt/share/ss-merlin
SHADOW_CONFIG_FILE=${SS_MERLIN_HOME}/etc/shadowsocks/config.json
use_v2ray=0
if [[ -f ${SHADOW_CONFIG_FILE} ]]; then
use_v2ray=$(grep -w "plugin" ${SHADOW_CONFIG_FILE}|grep "v2ray" -c)
fi
# Start if process not running
ss_pid=$(pidof ss-redir)
if [[ -z "$ss_pid" ]]; then
if [[ ! -f ${SS_MERLIN_HOME}/etc/shadowsocks/config.json ]]; then
cp ${SS_MERLIN_HOME}/etc/shadowsocks/config.sample.json ${SS_MERLIN_HOME}/etc/shadowsocks/config.json
fi
ss-redir -c ${SS_MERLIN_HOME}/etc/shadowsocks/config.json -f /opt/var/run/ss-redir.pid
fi
sleep 3
v2ray_pid=$(pidof v2ray-plugin)
if [[ -z "$v2ray_pid" ]]; then
if [ $use_v2ray -ge 1 ];then
killall ss-redir 2>/dev/null
ss-redir -c ${SS_MERLIN_HOME}/etc/shadowsocks/config.json -f /opt/var/run/ss-redir.pid
fi
fi
unbound_pid=$(pidof unbound)
if [[ -z "$unbound_pid" ]]; then
unbound -c ${SS_MERLIN_HOME}/etc/unbound/unbound.conf
fi
echo "All service started."
|
'use strict';
const Hapi = require('hapi');
const dotenv = require('dotenv').config();
const port = process.env.PORT || 4040;
const server = Hapi.server({
port,
host: 'localhost'
});
server.route({
method: 'POST',
path: '/api/audit',
handler (request, h) {
const siteUrl = request.payload;
return `Url to audit recieved ${siteUrl}`;
}
});
const init = async () => {
await server.start();
console.log(`Server running at: ${server.info.uri}`);
};
process.on('unhandledRejection', (err) => {
console.log(err);
process.exit(1);
});
init();
|
#!/bin/bash -eux
cd $(dirname $0)
DIR=$(pwd)
cd -
mvn -v
mkdir -p ~/.m2/
echo "login=neilellis" > ~/.github
echo "password=${GITHUB_PASSWORD}" >> ~/.github
cp $DIR/settings.xml ~/.m2/settings.xml
#$DIR/set-version.sh
mvn -e -q -Drat.skip -Dsource.skip=true -DgenerateReports=false -Dmaven.javadoc.skip=true -Dmaven.test.skip package
|
# Python program
def main():
num1 = int(input("Enter the first number: "))
num2 = int(input("Enter the second number: "))
# use max and min functions to output the numbers in ascending order
print(min(num1, num2))
print(max(num1, num2))
if __name__ == "__main__":
main() |
<reponame>mverrilli/tdi-studio-se<gh_stars>0
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.designer.runprocess.shadow;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.talend.core.language.ECodeLanguage;
import org.talend.core.language.LanguageManager;
import org.talend.core.model.metadata.IMetadataTable;
import org.talend.core.model.process.IElementParameter;
import org.talend.core.model.runprocess.shadow.ObjectElementParameter;
import org.talend.core.model.runprocess.shadow.TextElementParameter;
import org.talend.core.model.utils.TalendTextUtils;
import org.talend.core.repository.model.preview.WSDLSchemaBean;
/**
* DOC qwei class global comment. Detailled comment
*/
public class WSDLSchemaInputNode extends FileInputNode {
/**
* DOC qwei WSDLSchemaInputNode constructor comment.
*
* @param nodeType
*/
private List<IMetadataTable> metadatas = null;
private String encoding = ""; //$NON-NLS-1$
/**
* Constructs a new WSDLSchemaInputNode.
*
* @param schemaBean
* @param string
*/
public WSDLSchemaInputNode(String fileName, String encoding, List<IMetadataTable> metadatas, WSDLSchemaBean schemaBean,
ArrayList parameters) {
super("tWebServiceInput"); //$NON-NLS-1$
this.encoding = encoding;
addParameters(schemaBean, parameters);
setMetadataList(metadatas);
}
/**
* DOC qwei Comment method "addParameters".
*
* @param schemaBean
* @param parameters
*/
private void addParameters(WSDLSchemaBean schemaBean, ArrayList parameters) {
IElementParameter param = new TextElementParameter("ENDPOINT", schemaBean.getWslUrl()); //$NON-NLS-1$
addParameter(param);
if (LanguageManager.getCurrentLanguage().equals(ECodeLanguage.JAVA)) {
addJavaParameters(schemaBean);
} else if (LanguageManager.getCurrentLanguage().equals(ECodeLanguage.PERL)) {
addPerlParameters(schemaBean);
}
param = new TextElementParameter("METHOD", schemaBean.getMethod()); //$NON-NLS-1$
addParameter(param);
// fix preview
param = new TextElementParameter("ADVANCED_USE", Boolean.toString(false)); //$NON-NLS-1$
addParameter(param);
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
if (parameters != null) {
for (Object string : parameters) {
Map<String, Object> map = new HashMap<String, Object>();
map.put("VALUE", TalendTextUtils.addQuotes(string.toString())); //$NON-NLS-1$
list.add(map);
}
}
param = new ObjectElementParameter("PARAMS", list); //$NON-NLS-1$
param.setListItemsDisplayCodeName(new String[] { "VALUE" }); //$NON-NLS-1$
addParameter(param);
}
private void addJavaParameters(WSDLSchemaBean schemaBean) {
IElementParameter param = new TextElementParameter("NEED_AUTH", "" + schemaBean.isNeedAuth()); //$NON-NLS-1$ //$NON-NLS-2$
addParameter(param);
param = new TextElementParameter("AUTH_USERNAME", schemaBean.getUserName()); //$NON-NLS-1$
addParameter(param);
param = new TextElementParameter("AUTH_PASSWORD", schema<PASSWORD>.<PASSWORD>()); //$NON-NLS-1$
addParameter(param);
param = new TextElementParameter("UES_PROXY", "" + schemaBean.isUseProxy()); //$NON-NLS-1$ //$NON-NLS-2$
addParameter(param);
param = new TextElementParameter("PROXY_HOST", schemaBean.getProxyHost()); //$NON-NLS-1$
addParameter(param);
param = new TextElementParameter("PROXY_PORT", schemaBean.getProxyPort()); //$NON-NLS-1$
addParameter(param);
param = new TextElementParameter("PROXY_USERNAME", schemaBean.getProxyUser()); //$NON-NLS-1$
addParameter(param);
param = new TextElementParameter("PROXY_PASSWORD", schemaBean.getProxyPassword()); //$NON-NLS-1$
addParameter(param);
param = new TextElementParameter("TIMEOUT", "" + schemaBean.getTimeOut()); //$NON-NLS-1$
addParameter(param);
if (!schemaBean.getIsInputModel()) {
param = new TextElementParameter("PORT_NAME", schemaBean.getPortName()); //$NON-NLS-1$
addParameter(param);
param = new TextElementParameter("PORT_NS", schemaBean.getPortNS()); //$NON-NLS-1$
addParameter(param);
param = new TextElementParameter("SERVICE_NAME", schemaBean.getServerName()); //$NON-NLS-1$
addParameter(param);
param = new TextElementParameter("SERVICE_NS", "" + schemaBean.getServerNS()); //$NON-NLS-1$
addParameter(param);
}
}
private void addPerlParameters(WSDLSchemaBean schemaBean) {
IElementParameter param = new TextElementParameter("WSDL", schemaBean.getEndpointURI()); //$NON-NLS-1$
addParameter(param);
param = new TextElementParameter("ENCODING", this.encoding); //$NON-NLS-1$
addParameter(param);
}
/**
* Getter for metadatas.
*
* @return the metadatas
*/
public List<IMetadataTable> getMetadatas() {
return this.metadatas;
}
/**
* Sets the metadatas.
*
* @param metadatas the metadatas to set
*/
public void setMetadatas(List<IMetadataTable> metadatas) {
this.metadatas = metadatas;
}
}
|
#!/usr/bin/env bash
# template2html.sh - given the short name of a Distant Reader study carrel, output an HTML file containing a visualization
# Eric Lease Morgan <emorgan@nd.edu>
# August 7, 2019 - first documentation, and based on the good work of Team JAMS (Aarushi Bisht, Cheng Jial, Mel Mashiku, and Shivam Rastogi)
# configure
TEMPLATE='./etc/template.htm'
# sanity check
if [[ -z $1 ]]; then
echo "Usage: $0 <carrel>" >&2
exit
fi
# get input
CARREL=$1
# do the work, output, and done
HTML=$( cat $TEMPLATE | sed "s/##CARREL##/$CARREL/g" )
echo $HTML
|
#!/bin/bash
echo ""
echo " _____ ____ _____ _ ____ __ _____ ______ "
echo "| __ \ / __ \ / ____| |/ /\ \ / /\ | __ \| ____|"
echo "| | | | | | | | | ' / \ \ /\ / / \ | |__) | |__ "
echo "| | | | | | | | | < \ \/ \/ / /\ \ | _ /| __| "
echo "| |__| | |__| | |____| . \ \ /\ / ____ \| | \ \| |____ "
echo "|_____/ \____/ \_____|_|\_\ \/ \/_/ \_\_| \_\______|"
echo ""
echo "68 69 20 64 65 76 65 6C 6F 70 65 72 2C 20 6E 69 63 65 20 74 6F 20 6D 65 65 74 20 79 6F 75"
echo "6c 6f 6f 6b 69 6e 67 20 66 6f 72 20 61 20 6a 6f 62 3f 20 77 72 69 74 65 20 75 73 20 61 74 20 6a 6f 62 73 40 64 61 73 69 73 74 77 65 62 2e 64 65"
echo ""
echo "*******************************************************"
echo "** DOCKWARE IMAGE: play"
echo "** Tag: 5.6.3"
echo "** Version: 1.4.2"
echo "** Built: $(cat /build-date.txt)"
echo "** Copyright 2021 dasistweb GmbH"
echo "*******************************************************"
echo ""
echo "launching dockware...please wait..."
echo ""
set -e
source /etc/apache2/envvars
# it's possible to add a custom boot script on startup.
# so we test if it exists and just execute it
file="/var/www/boot_start.sh"
if [ -f "$file" ] ; then
sh $file
fi
echo "DOCKWARE: setting timezone to ${TZ}..."
sudo ln -sf /usr/share/zoneinfo/${TZ} /etc/localtime
sudo dpkg-reconfigure -f noninteractive tzdata
echo "-----------------------------------------------------------"
echo "DOCKWARE: starting MySQL...."
# somehow its necessary to set permissions, because
# sometimes they get lost :)
# make sure that it is no longer present from the last run
file="/var/run/mysqld/mysqld.sock.lock"
if [ -f "$file" ] ; then
sudo rm -f "$file"
fi
sudo chown -R mysql:mysql /var/lib/mysql /var/run/mysqld
sudo service mysql start;
echo "-----------------------------------------------------------"
echo "DOCKWARE: starting mailcatcher...."
sudo /usr/bin/env $(which mailcatcher) --ip=0.0.0.0
echo "-----------------------------------------------------------"
echo "DOCKWARE: starting cron service...."
sudo service cron start
echo "-----------------------------------------------------------"
echo "DOCKWARE: switching to PHP ${PHP_VERSION}..."
sudo sed -i 's/__dockware_php_version__/'${PHP_VERSION}'/g' /etc/apache2/sites-enabled/000-default.conf
sudo service php${PHP_VERSION}-fpm stop > /dev/null 2>&1
sudo service php${PHP_VERSION}-fpm start
sudo update-alternatives --set php /usr/bin/php${PHP_VERSION} > /dev/null 2>&1 &
echo "-----------------------------------------------------------"
if [ $SW_CURRENCY != "not-set" ]; then
echo "DOCKWARE: Switching Shopware default currency..."
php /var/www/scripts/shopware6/set_currency.php $SW_CURRENCY
echo "-----------------------------------------------------------"
fi
# --------------------------------------------------
# APACHE
sudo sed -i 's#__dockware_apache_docroot__#'${APACHE_DOCROOT}'#g' /etc/apache2/sites-enabled/000-default.conf
# sometimes the internal docker structure leaves
# some pid files existing. the container will be recreated....but
# in reality it's not! thus there might be the problem
# that an older pid file exists, which leads to the following error:
# - "httpd (pid 13) already running"
# to avoid this, we simple remove an existing file
sudo rm -f /var/run/apache2/apache2.pid
# start test and start apache
echo "DOCKWARE: testing and starting Apache..."
sudo apache2ctl configtest
sudo service apache2 restart
echo "-----------------------------------------------------------"
# --------------------------------------------------
# now let's check if we have a custom boot script that
# should run after our other startup scripts.
file="/var/www/boot_end.sh"
if [ -f "$file" ] ; then
sh $file
fi
echo ""
echo "WOHOOO, dockware/play:5.6.3 IS READY :) - let's get started"
echo "-----------------------------------------------------"
echo "DOCKWARE CHANGELOG: /var/www/CHANGELOG.md"
echo "PHP: $(php -v | grep cli)"
echo "Apache DocRoot: ${APACHE_DOCROOT}"
echo "ADMINER URL: http://localhost/adminer.php"
echo "MAILCATCHER URL: http://localhost/mailcatcher"
echo "PIMPMYLOG URL: http://localhost/logs"
echo "SHOP URL: http://localhost"
echo "ADMIN URL: http://localhost/backend"
echo ""
echo "What's new in this version? see the changelog for further details"
echo "https://www.shopware.com/de/changelog/"
echo ""
tail -f /dev/null
|
from .io_snaps import load_halo, load_snapshot, get_com
from .gadget_reader import read_snap
|
/**
* Copyright 2021 <NAME>, Co.Ltd
* Email: <EMAIL>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pamirs.attach.plugin.rabbitmq.interceptor;
import java.net.InetAddress;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicBoolean;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.pamirs.attach.plugin.rabbitmq.RabbitmqConstants;
import com.pamirs.attach.plugin.rabbitmq.common.ChannelHolder;
import com.pamirs.attach.plugin.rabbitmq.common.ConfigCache;
import com.pamirs.attach.plugin.rabbitmq.common.ConsumerMetaData;
import com.pamirs.attach.plugin.rabbitmq.destroy.RabbitmqDestroy;
import com.pamirs.attach.plugin.rabbitmq.utils.AdminAccessInfo;
import com.pamirs.attach.plugin.rabbitmq.utils.HttpUtils;
import com.pamirs.pradar.ErrorTypeEnum;
import com.pamirs.pradar.Pradar;
import com.pamirs.pradar.PradarService;
import com.pamirs.pradar.PradarSwitcher;
import com.pamirs.pradar.ResultCode;
import com.pamirs.pradar.exception.PradarException;
import com.pamirs.pradar.exception.PressureMeasureError;
import com.pamirs.pradar.interceptor.SpanRecord;
import com.pamirs.pradar.interceptor.TraceInterceptorAdaptor;
import com.pamirs.pradar.pressurement.ClusterTestUtils;
import com.pamirs.pradar.pressurement.agent.shared.service.ErrorReporter;
import com.rabbitmq.client.AMQP;
import com.rabbitmq.client.BasicProperties;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Command;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.Consumer;
import com.rabbitmq.client.DefaultConsumer;
import com.rabbitmq.client.impl.AMQConnection;
import com.rabbitmq.client.impl.AMQImpl.Basic;
import com.rabbitmq.client.impl.AMQImpl.Basic.Deliver;
import com.rabbitmq.client.impl.ChannelN;
import com.rabbitmq.client.impl.CredentialsProvider;
import com.rabbitmq.client.impl.recovery.AutorecoveringChannel;
import com.rabbitmq.client.impl.recovery.AutorecoveringConnection;
import com.rabbitmq.client.impl.recovery.RecordedConsumer;
import com.rabbitmq.client.impl.recovery.RecoveryAwareChannelN;
import com.shulie.instrument.simulator.api.annotation.Destroyable;
import com.shulie.instrument.simulator.api.listener.ext.Advice;
import com.shulie.instrument.simulator.api.reflect.Reflect;
import com.shulie.instrument.simulator.api.reflect.ReflectException;
import com.shulie.instrument.simulator.api.resource.SimulatorConfig;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @Author: mubai<chengjiacai @ shulie.io>
* @Date: 2020-03-09 17:44
* @Description:
*/
@Destroyable(RabbitmqDestroy.class)
public class ChannelNProcessDeliveryInterceptor extends TraceInterceptorAdaptor {
private static Logger logger = LoggerFactory.getLogger(ChannelNProcessDeliveryInterceptor.class.getName());
private final SimulatorConfig simulatorConfig;
public ChannelNProcessDeliveryInterceptor(SimulatorConfig simulatorConfig) {
this.simulatorConfig = simulatorConfig;
}
private final static Cache<String, ShadowConsumeRunner> shadowConsumeRunners = CacheBuilder.newBuilder().build();
/**
* 是否是调用端
*
* @return
*/
@Override
public boolean isClient(Advice advice) {
return false;
}
@Override
public String getPluginName() {
return RabbitmqConstants.PLUGIN_NAME;
}
@Override
public int getPluginType() {
return RabbitmqConstants.PLUGIN_TYPE;
}
@Override
public SpanRecord beforeTrace(Advice advice) {
Object[] args = advice.getParameterArray();
Command command = (Command)args[0];
Basic.Deliver method = (Deliver)args[1];
SpanRecord record = new SpanRecord();
record.setService(method.getExchange());
record.setMethod(method.getRoutingKey());
BasicProperties contentHeader = (BasicProperties)command.getContentHeader();
Map<String, Object> headers = contentHeader.getHeaders();
if (headers != null) {
Map<String, String> rpcContext = new HashMap<String, String>();
for (String key : Pradar.getInvokeContextTransformKeys()) {
Object value = headers.get(key);
if (value != null) {
rpcContext.put(key, value.toString());
}
}
record.setContext(rpcContext);
}
byte[] body = command.getContentBody();
record.setRequestSize(body.length);
record.setRequest(body);
return record;
}
@Override
public SpanRecord afterTrace(Advice advice) {
SpanRecord record = new SpanRecord();
record.setResultCode(ResultCode.INVOKE_RESULT_SUCCESS);
return record;
}
@Override
public SpanRecord exceptionTrace(Advice advice) {
SpanRecord record = new SpanRecord();
record.setResultCode(ResultCode.INVOKE_RESULT_FAILED);
record.setResponse(advice.getThrowable());
return record;
}
@Override
public void beforeFirst(Advice advice) {
Object[] args = advice.getParameterArray();
String methodName = advice.getBehavior().getName();
if (!PradarSwitcher.isClusterTestEnabled()) {
logger.warn("PradarSwitcher isClusterTestEnabled false, {} to start shadow {} skip it",
advice.getTargetClass().getName(), methodName);
return;
}
AMQP.Basic.Deliver m = (AMQP.Basic.Deliver)args[1];
validatePressureMeasurement(m.getConsumerTag());
try {
Command command = (Command)args[0];
BasicProperties contentHeader = (BasicProperties)command.getContentHeader();
Map<String, Object> headers = contentHeader.getHeaders();
if (null != headers && headers.get(PradarService.PRADAR_CLUSTER_TEST_KEY) != null && ClusterTestUtils
.isClusterTestRequest(headers.get(PradarService.PRADAR_CLUSTER_TEST_KEY).toString())) {
Pradar.setClusterTest(true);
}
if (!Pradar.isClusterTest()) {
String routingKey = m.getRoutingKey();
if (StringUtils.isNotBlank(routingKey) && ClusterTestUtils.isClusterTestRequest(routingKey)) {
Pradar.setClusterTest(true);
}
String exchange = m.getExchange();
if (StringUtils.isNotBlank(exchange) && ClusterTestUtils.isClusterTestRequest(exchange)) {
Pradar.setClusterTest(true);
}
}
} catch (Throwable e) {
if (Pradar.isClusterTest()) {
throw new PressureMeasureError(e);
}
}
}
@Override
public void afterLast(final Advice advice) {
if (ConfigCache.isWorkWithSpring()) {
return;
}
Object[] args = advice.getParameterArray();
AMQP.Basic.Deliver m = (AMQP.Basic.Deliver)args[1];
String consumerTag = m.getConsumerTag();
if (Pradar.isClusterTestPrefix(consumerTag) || ChannelHolder.existsConsumer(consumerTag)) {
return;
}
try {
final ConsumerMetaData consumerMetaData = getConsumerMetaData((Channel)advice.getTarget(), m.getConsumerTag(),
m.getExchange(), m.getRoutingKey());
final String ptQueue = Pradar.addClusterTestPrefix(consumerMetaData.getQueue());
shadowConsumeRunners.get(ptQueue, new Callable<ShadowConsumeRunner>() {
@Override
public ShadowConsumeRunner call() throws Exception {
return new ShadowConsumeRunner((Channel)advice.getTarget(), consumerMetaData);
}
}).start();
} catch (Throwable e) {
reporterError(e, m.getRoutingKey(), consumerTag);
}
}
private static Consumer getConsumerFromChannel(Object channel, String consumerTag) {
Map<String, Consumer> _consumers = Reflect.on(channel).get("_consumers");
return _consumers.get(consumerTag);
}
private void validatePressureMeasurement(String consumerTag) {
try {
Pradar.setClusterTest(false);
consumerTag = StringUtils.trimToEmpty(consumerTag);
if (Pradar.isClusterTestPrefix(consumerTag)) {
Pradar.setClusterTest(true);
}
} catch (Throwable e) {
logger.error("rabbitmq validate pressure request err!", e);
if (Pradar.isClusterTest()) {
throw new PressureMeasureError(e);
}
}
}
private ConsumerMetaData getConsumerMetaData(Channel channel, String consumerTag, String exchange,
String routingKey) {
final int key = System.identityHashCode(channel);
Consumer consumer = getConsumerFromChannel(channel, consumerTag);
ConsumerMetaData consumerMetaData = ConfigCache.getConsumerMetaData(key, consumerTag);
if (consumerMetaData == null) {
try {
if (channel instanceof RecoveryAwareChannelN) {
if (consumer instanceof DefaultConsumer) {
channel = Reflect.on(consumer).get("_channel");
} else {
Map<String, Consumer> consumers = Reflect.on(channel).get("_consumers");
Consumer consumerFromRecovery = consumers.get(consumerTag);
if (consumerFromRecovery.getClass().getName().contains("AutorecoveringChannel")) {
channel = Reflect.on(consumerFromRecovery).get("this$0");
}
}
}
if (channel instanceof AutorecoveringChannel) {
Connection connection = Reflect.on(channel).get("connection");
if (connection instanceof AutorecoveringConnection) {
Map<String, RecordedConsumer> consumers = Reflect.on(connection).get("consumers");
RecordedConsumer recordedConsumer = consumers.get(consumerTag);
consumerMetaData = new ConsumerMetaData(recordedConsumer, consumer);
}
}
if (channel instanceof ChannelN) {
logger.info("[rabbitmq] channel is ChannelN, will try to get queue name from rabbitmq admin!");
String queue = getQueueFromWebAdmin(channel, exchange, routingKey);
logger.info("[rabbitmq] channel is ChannelN, get queue name is {}", queue);
if (StringUtils.isEmpty(queue)) {
logger.warn(
"[rabbitmq] cannot find queueName, shadow consumer will subscribe routingKey instead!");
queue = routingKey;
}
consumerMetaData = new ConsumerMetaData(queue, consumerTag, consumer);
}
} catch (ReflectException e) {
throw new PradarException("未支持的rabbitmq版本!无法获取订阅信息", e);
}
ConfigCache.putConsumerMetaData(key, consumerTag, consumerMetaData);
}
return consumerMetaData;
}
private String getQueueFromWebAdmin(Channel channel, String exchange, String routingKey) {
try {
Connection connection = Reflect.on(channel).get("_connection");
if (connection instanceof AMQConnection) {
AdminAccessInfo adminAccessInfo = resolveAdminAccessInfo(connection);
if (!isDirectExchange(exchange, adminAccessInfo)) {
logger.warn("[RabbitMQ] exchange : {} is not a direct exchange(only support direct exchange)", exchange);
return null;
}
return resolveQueueByAdminResponse(exchange, adminAccessInfo, routingKey);
}
} catch (Throwable e) {
logger.warn("get queue from web admin fail!", e);
}
return null;
}
private AdminAccessInfo resolveAdminAccessInfo(Connection connection) {
String username = simulatorConfig.getProperty("rabbitmq.admin.username");
String password = simulatorConfig.getProperty("rabbitmq.admin.password");
if (username == null || password == null) {
logger.warn(
"[RabbitMQ] missing rabbitmq.admin username or password config, will use server username password "
+ "instead");
Object object = reflectSilence(connection, "credentialsProvider");
if (object != null) {//低版本
CredentialsProvider credentialsProvider = (CredentialsProvider)object;
username = credentialsProvider.getUsername();
password = credentialsProvider.getPassword();
} else {
username = reflectSilence(connection, "username");
password = reflectSilence(connection, "password");
if (username == null || password == null) {
throw new PradarException("未支持的rabbitmq版本!无法获取rabbit连接用户名密码");
}
}
}
InetAddress inetAddress = connection.getAddress();
String virtualHost = Reflect.on(connection).get("_virtualHost");
String host = simulatorConfig.getProperty("rabbitmq.admin.host");
Integer port = simulatorConfig.getIntProperty("rabbitmq.admin.port");
if (host == null) {
host = inetAddress.getHostAddress();
logger.warn("[RabbitMQ] missing rabbitmq.admin.host config, will use server host {} instead", host);
}
if (port == null) {
port = Integer.parseInt("1" + connection.getPort());
logger.warn("[RabbitMQ] missing rabbitmq.admin.port config, will use default port {} instead", port);
}
return new AdminAccessInfo(host, port, username, password, virtualHost);
}
private boolean isDirectExchange(String exchange, AdminAccessInfo adminAccessInfo) {
String url = String.format("/api/exchanges/%s/%s", adminAccessInfo.getVirtualHostEncode(), exchange);
String response = HttpUtils.doGet(adminAccessInfo, url).getResult();
JSONObject jsonObject = JSON.parseObject(response);
return "direct".equals(jsonObject.get("type"));
}
private String resolveQueueByAdminResponse(String exchange, AdminAccessInfo adminAccessInfo, String routingKey) {
String url = String.format("/api/exchanges/%s/%s/bindings/source", adminAccessInfo.getVirtualHostEncode(), exchange);
String response = HttpUtils.doGet(adminAccessInfo, url).getResult();
JSONArray jsonArray = JSON.parseArray(response);
for (Object o : jsonArray) {
JSONObject jsonObject = (JSONObject)o;
String configRoutingKey = jsonObject.getString("routing_key");
if (routingKey.equals(configRoutingKey)) {
return jsonObject.getString("destination");
}
}
return null;
}
private static class ShadowConsumeRunner implements Runnable {
private final Channel channel;
private final ConsumerMetaData consumerMetaData;
private final Thread thread;
private final String ptConsumerTag;
private final String ptQueue;
private final AtomicBoolean flag = new AtomicBoolean(false);
public ShadowConsumeRunner(Channel channel, ConsumerMetaData consumerMetaData) {
this.channel = channel;
this.consumerMetaData = consumerMetaData;
this.ptConsumerTag = Pradar.addClusterTestPrefix(consumerMetaData.getConsumerTag());
this.ptQueue = Pradar.addClusterTestPrefix(consumerMetaData.getQueue());
thread = new Thread(this,
String.format("ShadowConsumeRunner for %s-%s", ptQueue, ptConsumerTag));
}
public void start() {
if (flag.compareAndSet(false, true)) {
thread.start();
}
}
@Override
public void run() {
String consumerTag = consumerMetaData.getConsumerTag();
Consumer consumer = consumerMetaData.getConsumer();
try {
if (logger.isDebugEnabled()) {
logger.debug(
"RabbitMQ basicConsume(ptQueue:{},autoAck:{},consumerTag:{},noLocal:{},exclusive:{},"
+ "arguments:{},"
+ "ptConsumer:{})",
ptQueue, true, ptConsumerTag, false, false, null, consumer);
}
String cTag = ChannelHolder.consumeShadowQueue(channel, ptQueue, true, ptConsumerTag, false,
consumerMetaData.isExclusive(), consumerMetaData.getArguments(), consumer);
if (cTag != null) {
ChannelHolder.addConsumerTag(channel, consumerTag, cTag, ptQueue);
} else {
reporterError(null, this.ptQueue, this.ptConsumerTag, "get shadow channel is null or closed.");
}
} catch (Throwable e) {
reporterError(e, this.ptQueue, this.ptConsumerTag);
}
}
}
private static void reporterError(Throwable e, String queue, String consumerTag) {
reporterError(e, queue, consumerTag, e.getMessage());
}
private static void reporterError(Throwable e, String queue, String consumerTag, String cases) {
ErrorReporter.buildError()
.setErrorType(ErrorTypeEnum.MQ)
.setErrorCode("MQ-0001")
.setMessage("RabbitMQ消费端订阅队列失败!")
.setDetail("RabbitMqPushConsumerInterceptor:queue:[" + queue + "]," + cases)
.report();
logger.error("RabbitMQ PT Consumer Inject failed queue:[{}] consumerTag:{}, {}", queue, consumerTag, cases, e);
}
private static <T> T reflectSilence(Object target, String name) {
try {
return Reflect.on(target).get(name);
} catch (ReflectException e) {
logger.warn("can not find field '{}' from : '{}'", name, target.getClass().getName());
return null;
}
}
}
|
import {
setSliderItemsPosition,
setSliderItemsChildWidth,
setActiveclassToCurrent,
addClassToElement,
} from '../utils';
import { setPageNumberOnChild, cloneNodeGenerator } from './partial';
export default class SliderTrailer {
constructor(params) {
const { core } = params;
this.setCore(core);
this.initialize();
}
setCore(core) {
this.core = core;
}
getCore() {
return this.core;
}
initialize() {
const {
config: { responsive, slider, rtl, autoWidth, freeScroll },
getInfinite,
getSliderItems,
getSlidesLength,
getSliderItemWidth,
getPerSlide,
getSlideSize,
getSliderMainWidth,
getIndex,
setIndex,
} = this.core;
const infinite = getInfinite();
const sliderItems = getSliderItems();
const slidesLength = getSlidesLength();
const slideSize = getSlideSize();
const sliderItemWidth = getSliderItemWidth();
const perSlide = getPerSlide();
const sliderMainWidth = getSliderMainWidth();
const index = getIndex();
// set width per slide
setSliderItemsChildWidth({
sliderItems,
slider,
responsive,
autoWidth,
});
// init slider position
setIndex(
setSliderItemsPosition({
indexItem: index,
sliderItemWidth,
sliderItems,
rtl,
}),
);
if (!autoWidth) {
setPageNumberOnChild({ sliderItems, responsive });
// Clone group of slide from infinite carousel
if (infinite) {
const cloneNodeGeneratorParams = {
perSlide,
sliderItems,
wrapper: slider,
};
cloneNodeGenerator(cloneNodeGeneratorParams);
}
setActiveclassToCurrent({
sliderItems,
perSlide,
slideSize,
sliderMainWidth,
index: getIndex(),
infinite,
slidesLength,
autoWidth,
freeScroll,
});
}
// add loaded class to main slide after init
const classItemParams = {
item: slider,
className: 'loaded',
};
addClassToElement(classItemParams);
}
}
|
/**
* ui/EventDetailView.js
* Event Detail View
*/
module.exports = function (o) {
var g = this;
var fontSize = g.config.load("font-size");
var outer = Ti.UI.createView();
var view = Ti.UI.createView({
layout: 'vertical'
});
outer.add(view);
var header = Ti.UI.createView({
height: g.dip(50),
backgroundColor: '#177bbd'
});
var headerLabel = Ti.UI.createLabel({
height: g.dip(50),
top: 0,
left: g.dip(5),
text: 'イベント詳細',
color: '#fff',
font: {fontSize: g.dip(18), fontWeight: 'bold'}
});
header.add(headerLabel);
view.add(header);
var menu = {
"カレンダーへ登録": {
click: function () {
g.calendar.addEvent({
title: o.title,
location: o.where,
description: o.content,
beginTime: new Date(o.when.start),
endTime: new Date(o.when.end),
allDay: o.when.allday
});
}
}
};
function createTextLine(keyStr, valStr) {
var key = Ti.UI.createLabel({
top: 0,
left: g.dip(5),
height: g.dip(32),
text: keyStr,
color: '#222',
font: {fontSize: g.dip(16 + fontSize)}
});
view.add(key);
var val = Ti.UI.createLabel({
top: g.dip(-27),
left: g.dip(50 + fontSize * 2),
text: valStr,
color: '#555',
font: {fontSize: g.dip(16 + fontSize)}
});
view.add(val);
return {
key: key,
val: val
};
}
function label(str, parent) {
var text = Ti.UI.createLabel({
left: 0,
width: Ti.UI.FILL,
text: str,
color: '#222',
font: {fontSize: g.dip(16 + fontSize)}
});
(parent ? parent : view).add(text);
return text;
}
// 2012-05-01T12:00:00.000+09:00 -> ['2012-05-01', '12:00'] OR 2012-05-01 -> ['2012-05-01', '']
function dateFormat(date_str) {
var date_arr = date_str.split('.')[0].split(':');
date_arr = date_arr.length === 1
? [date_arr, '']
: date_arr.slice(0, -1).join(':').split('T');
return date_arr;
}
var d = {
start: dateFormat(o.when.start),
end: dateFormat(o.when.end)
};
if (o.when.allday)
d.start[1] = d.end[1] = "";
var timeLabel = String(d.start[0] + ' ' + d.start[1] + ' ~ ' + d.end[0] + ' ' + d.end[1]);
var title = label(o.title);
title.top = g.dip(3);
title.font = {fontSize: g.dip(18 + fontSize)};
title.backgroundColor = '#f9f9f9';
var hr = Ti.UI.createView({
top: g.dip(3),
height: g.dip(3),
backgroundColor: '#177bbd'
});
view.add(hr);
var time = createTextLine('時間 :', timeLabel);
time.val.font = {fontSize: g.dip(15)};
time.val.color = '#555';
var place = createTextLine('場所 :', o.where || " ");
var content = createTextLine('内容 :', o.content || " ");
content.val.height = 'auto';
var link = label('Googleカレンダーへのリンク');
link.top = g.dip(15);
link.left = g.dip(5);
link.color = '#04b';
link.addEventListener('click', function () {
Ti.Platform.openURL(o.link);
});
// share message
var message = o.title + ' ' + o.link + ' #IT勉強会カレンダー';
if (Ti.Platform.Android) {
// Auto link
content.val.autoLink = Titanium.UI.Android.LINKIFY_WEB_URLS;
// 地図検索
var intent = Ti.Android.createIntent({
action: Ti.Android.ACTION_VIEW,
data: 'geo:0,0?q=' + encodeURIComponent(o.where)
});
chooser = Ti.Android.createIntentChooser(intent, 'アプリケーションを選択');
place.val.color = '#04b';
place.val.addEventListener('click', function () {
Ti.Android.currentActivity.startActivity(chooser);
});
// intent の多重発行防止
var intent_enable = true;
var intent_call = function (callback) {
if (intent_enable) {
intent_enable = false;
setTimeout(function () {
intent_enable = true;
}, 500);
callback();
}
};
// 共有
var intent_share = Ti.Android.createIntent({
action: Ti.Android.ACTION_SEND,
type: 'text/plain'
});
intent_share.putExtra(Ti.Android.EXTRA_TEXT, message);
menu["共有"] = {
click: function () {
intent_call(function () {
Ti.Android.currentActivity.startActivity(intent_share);
});
}
};
} else {
// for iOS
// URL チェック
var urls = o.content.split(/\s/).filter(function (str) {
return /^https?:\/\/.+\..+$/.test(str);
});
if (urls.length > 0) {
// リンク追加
content.val.color = '#04b';
content.val.addEventListener('click', function () {
Ti.Platform.openURL(urls[0]);
});
}
menu["Twitter 共有"] = {
click: function () {
var canOpen = Ti.Platform.openURL('twitter://post?message=' + encodeURIComponent(message));
if (! canOpen) {
var dialog = g.alert('Twitter 共有', 'Twitter 公式アプリが必要です。');
dialog.addEventListener("click", function () {
Ti.Platform.openURL('https://itunes.apple.com/ja/app/twitter/id333903271');
});
}
}
};
}
// set menu
g.createMenu(outer, menu, true);
return outer;
}; |
#if !defined(BOOST_PP_IS_ITERATING)
///// header body
#ifndef BOOST_MPL_AUX_ADVANCE_FORWARD_HPP_INCLUDED
#define BOOST_MPL_AUX_ADVANCE_FORWARD_HPP_INCLUDED
// Copyright <NAME> 2000-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// See http://www.boost.org/libs/mpl for documentation.
// $Source: /physbam_repository/External_Libraries/Archives/boost/boost/mpl/aux_/advance_forward.hpp,v $
// $Date: 2007/02/12 18:25:26 $
// $Revision: 1.1 $
#if !defined(BOOST_MPL_PREPROCESSING_MODE)
# include <boost/mpl/next.hpp>
# include <boost/mpl/apply_wrap.hpp>
#endif
#include <boost/mpl/aux_/config/use_preprocessed.hpp>
#if !defined(BOOST_MPL_CFG_NO_PREPROCESSED_HEADERS) \
&& !defined(BOOST_MPL_PREPROCESSING_MODE)
# define BOOST_MPL_PREPROCESSED_HEADER advance_forward.hpp
# include <boost/mpl/aux_/include_preprocessed.hpp>
#else
# include <boost/mpl/limits/unrolling.hpp>
# include <boost/mpl/aux_/nttp_decl.hpp>
# include <boost/mpl/aux_/config/eti.hpp>
# include <boost/preprocessor/iterate.hpp>
# include <boost/preprocessor/cat.hpp>
# include <boost/preprocessor/inc.hpp>
namespace boost { namespace mpl { namespace aux {
// forward declaration
template< BOOST_MPL_AUX_NTTP_DECL(long, N) > struct advance_forward;
# define BOOST_PP_ITERATION_PARAMS_1 \
(3,(0, BOOST_MPL_LIMIT_UNROLLING, <boost/mpl/aux_/advance_forward.hpp>))
# include BOOST_PP_ITERATE()
// implementation for N that exceeds BOOST_MPL_LIMIT_UNROLLING
template< BOOST_MPL_AUX_NTTP_DECL(long, N) >
struct advance_forward
{
template< typename Iterator > struct apply
{
typedef typename apply_wrap1<
advance_forward<BOOST_MPL_LIMIT_UNROLLING>
, Iterator
>::type chunk_result_;
typedef typename apply_wrap1<
advance_forward<(
(N - BOOST_MPL_LIMIT_UNROLLING) < 0
? 0
: N - BOOST_MPL_LIMIT_UNROLLING
)>
, chunk_result_
>::type type;
};
};
}}}
#endif // BOOST_MPL_CFG_NO_PREPROCESSED_HEADERS
#endif // BOOST_MPL_AUX_ADVANCE_FORWARD_HPP_INCLUDED
///// iteration, depth == 1
#elif BOOST_PP_ITERATION_DEPTH() == 1
#define i_ BOOST_PP_FRAME_ITERATION(1)
template<>
struct advance_forward< BOOST_PP_FRAME_ITERATION(1) >
{
template< typename Iterator > struct apply
{
typedef Iterator iter0;
#if i_ > 0
# define BOOST_PP_ITERATION_PARAMS_2 \
(3,(1, i_, <boost/mpl/aux_/advance_forward.hpp>))
# include BOOST_PP_ITERATE()
#endif
typedef BOOST_PP_CAT(iter,i_) type;
};
#if defined(BOOST_MPL_CFG_MSVC_60_ETI_BUG)
/// ETI workaround
template<> struct apply<int>
{
typedef int type;
};
#endif
};
#undef i_
///// iteration, depth == 2
#elif BOOST_PP_ITERATION_DEPTH() == 2
# define AUX778076_ITER_0 BOOST_PP_CAT(iter,BOOST_PP_DEC(BOOST_PP_FRAME_ITERATION(2)))
# define AUX778076_ITER_1 BOOST_PP_CAT(iter,BOOST_PP_FRAME_ITERATION(2))
typedef typename next<AUX778076_ITER_0>::type AUX778076_ITER_1;
# undef AUX778076_ITER_1
# undef AUX778076_ITER_0
#endif // BOOST_PP_IS_ITERATING
|
package com.song.timer.core.support;
import com.song.timer.config.TriggerContext;
import com.song.timer.core.TimerTrigger;
import com.song.timer.support.CronParser;
import java.util.Date;
/**
* Created by song on 2017/6/25.
*/
public class CronTimerTrigger implements TimerTrigger {
private final CronParser cronParser;
public CronTimerTrigger(CronParser cronParser) {
this.cronParser = cronParser;
}
@Override
public Date nextExecutionTime(TriggerContext triggerContext) {
Date time = triggerContext.getLastCompletionTime();
if (time != null) {
Date lastScheduledExecutionTime = triggerContext.getLastScheduledExecutionTime();
if (lastScheduledExecutionTime != null && lastScheduledExecutionTime.before(time)) {
//如果在上提前执行了,那么直接用lastScheduledExecutionTime
time = lastScheduledExecutionTime;
}
} else {
//首次执行直接用当前时间
time = new Date();
}
return cronParser.next(time);
}
}
|
frappe.listview_settings['Customer Delighted or Disappointed'] = {
add_fields: ["name", "reason", "customer"],
get_indicator: function(doc) {
if(doc.reason == "Customer Disappointed") {
return [__("Customer Disappointed"), "red", "reason,=,Customer Dissapointed"];
} else {//if(flt(doc.outstanding_amount)==0) {
return [__("Customer Delighted"), "green", "reason,=,Customer Delighted"]
}
},
right_column: "customer"
}; |
'use strict';
const { EventEmitter } = require('events');
const { Duplex, Readable, Transform, Writable } = require('stream');
exports.TypedEmitter = function TypedEmitter(_, Base = EventEmitter) {
return Base;
};
exports.TypedReadable = function TypedReadable(_, Base = Readable) {
return Base;
};
exports.TypedWritable = function TypedWritable(_, Base = Writable) {
return Base;
};
exports.TypedDuplex = function TypedDuplex(W, R, Base = Duplex) {
return Base;
};
exports.TypedTransform = function TypedTransform(W, R, Base = Transform) {
return Base;
};
|
import nltk
import spacy
import random
# NLTK basic tokenizer
tokenizer = nltk.tokenize.TreebankWordTokenizer()
# load spaCy NLP model
nlp = spacy.load('en_core_web_sm')
# collect the user input
def get_input():
user_input = input('Hello! How may I help you?\n')
return user_input
# tag the user input
def tag_input():
tokenized_user_input = tokenizer.tokenize(user_input)
tagged_user_input = nltk.pos_tag(tokenized_user_input)
return tagged_user_input
# parse the user input using spaCy
def parse_input():
parsed_input = nlp(user_input)
return parsed_input
# shop up the AI-assisted response
def generate_response(tagged_user_input):
responses = {
'JJ': ['What do you like about it ?'],
'VB': ['Can you tell me more about it ?'],
'NN': ['Can you tell me more about the %s'],
'default': ['I do not understand. Could you please explain?']
}
if tagged_user_input[0][1] in responses.keys():
response = random.choice(responses[tagged_user_input[0][1]])
if tagged_user_input[0][1] == 'NN':
response = response % tagged_user_input[0][0]
else:
response = random.choice(responses['default'])
return response
# main part of the program
user_input = get_input()
tagged_user_input = tag_input()
parsed_user_input = parse_input()
response = generate_response(tagged_user_input)
print(response) |
import re
def is_valid_email(email):
regex = '^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$'
if re.search(regex, email):
return True
return False |
package com.mycompany.myapp2;
import java.util.*;
import java.net.*;
import java.io.*;
import java.util.regex.*;
class Lrc_Parser_Result
{
HashMap<String,String> data;
String Title,Album,Artist,Lyric;
int id;
boolean is_Finish_Parse;
}
class Lrc_Parser_Info
{
String Artist=null,Title=null,Album=null;
}
enum Option_Lrc_Type
{
Raw_Lrc,
Trans_Lrc,
Both_Raw_And_Trans_Lrc
}
enum Option_Lrc_Combine_Type
{
New_Line_And_Raw_Lrc_First,
New_Line_And_Trans_Lrc_First,
Side_By_Side_And_Raw_Lrc_First,
Side_By_Side_And_Trans_Lrc_First
}
class Lrc_Parser_Option
{
String Tmp_Path="/sdcard/Lrc_Parser_Tmp/";
boolean ExtraTag = true;
boolean NomalTag = true;
boolean ForceGetTagFormNet = false;
boolean ForceGetLrcFromNet=false;
boolean NotToGetTagFromNet=false;
boolean NotToGetLrcFromNet=false;
Option_Lrc_Type Lrc_Type = Option_Lrc_Type.Both_Raw_And_Trans_Lrc;
Option_Lrc_Combine_Type Lrc_Combine_Type =
Option_Lrc_Combine_Type.New_Line_And_Raw_Lrc_First;
}
class Lrc_Parser_Expr
{
int id=0;
String expr_split_lrc="(\\[\\d{2}\\d*\\:\\d{2}(\\.\\d*)?\\])(.*)";
int lrc_split_id=3;
String expr_lrc="(\\[\\d{2}\\d*\\:\\d{2}(\\.\\d*)?\\](\\s*.*?))(?=\\s*\\\\n)";
int lrc_id = 1;
String expr_tag="\\[\\s*([^\\d]+?)\\s*\\:\\s*(.+?)\\s*\\]";
int tag_name_id=1;
int tag_value_id=2;
String expr_lrc_time="\\[(\\d{2}\\d*)\\:(\\d{2})(\\.(\\d*))?\\]";
int lrc_time_min=1;
int lrc_time_sec=2;
int lrc_time_msec=4;
String expr_online_info="(<title>)((.+?)\\s-\\s(.+?))((?=()|(?=\\s*-?\\s*网易云音乐)|(?=</title>))";
int online_title_id=3;
int online_artist_id=4;
String expr_data="\"\\s*([\\w\\d\"-]+)\"\\s*\\:\\s*(((\"\")|(\"(.+?)\")|((-?[\\d\\w]+)))(?=(\\})|(\\,\"\\s*([\\w\\d\"-]+))\"\\s*\\:))";
int sub_data_name_id = 1;
int sub_data_value_id = 2; // or 3
String expr_info_name="data\\-res\\-name\\=\"(.+)\"";
int info_name_id=1;
String expr_info_artist="data-res-author=\"(.+)\"";
int info_artist_id=1;
//boolean LoadExprFormFile(String absolute_path){return false;}
}
class Lrc_Parser{
String last_result=null;
HashMap < String, String > data=new HashMap<String,String>();
ArrayList < String > raw_lrc=new ArrayList<String>();
ArrayList<String>trans_lrc=new ArrayList<String>();
ArrayList<String>lrc=new ArrayList<String>();
Lrc_Parser_Option option=new Lrc_Parser_Option();
Lrc_Parser_Expr expr=new Lrc_Parser_Expr();
private Lrc_Parser(){}
public Lrc_Parser(Lrc_Parser_Option o,Lrc_Parser_Expr e){
if(o!=null)
option=o;
if(e!=null)
expr=e;
}
private boolean GetLrcFromNet(int id){
String buf=new String();
String addr="http://music.163.com/api/song/media?id="+id;
try{
URL url = new URL(addr);
HttpURLConnection httpCon = (HttpURLConnection) url.openConnection();
String response = httpCon.getResponseMessage();
buf+=("HTTP/1.x " + httpCon.getResponseCode() + " " + response + "\n");
InputStream in = new BufferedInputStream(httpCon.getInputStream());
Reader r = new InputStreamReader(in);
int c;
while ((c = r.read()) != -1) {
buf+=(String.valueOf((char) c));
}
in.close();
}catch(Exception e){
e.fillInStackTrace();
return false;
}
ParserLrc(buf,true);
return ((raw_lrc.size()>0)?true:false);
}
private boolean GetLrcFromNet_tv(int id){
String buf=new String();
String addr="http://music.163.com/api/song/lyric?id="+id+"&tv=-1";
try{
URL url = new URL(addr);
HttpURLConnection httpCon = (HttpURLConnection) url.openConnection();
/*vector<pair<string,string>>({pair<string,string>("Cookie","appver=3.1.4"),
pair<string,string>("Referer","http://music.163.com")})*/
httpCon.addRequestProperty("Cookie","appver=3.1.4");
httpCon.addRequestProperty("Referer","http://music.163.com");
String response = httpCon.getResponseMessage();
buf+=("HTTP/1.x " + httpCon.getResponseCode() + " " + response + "\n");
InputStream in = new BufferedInputStream(httpCon.getInputStream());
Reader r = new InputStreamReader(in);
int c;
while ((c = r.read()) != -1) {
buf+=(String.valueOf((char) c));
}
in.close();
}catch(Exception e){
e.fillInStackTrace();
return false;
}
try{
if(buf.length()==0){
Exception e=new Exception("Cant get html from net :"+addr);
e.printStackTrace();
throw e;
}
}catch(Exception e){
//haha
}
ParserLrc(buf,false);
return ((trans_lrc.size()>0)?true:false);
}
private Lrc_Parser_Info GetTagFromNet(int id,String _url)throws Exception{
String buf=new String();
Lrc_Parser_Info info=new Lrc_Parser_Info();
String addr=null;
if(id>0)
addr="http://music.163.com/m/song/"+id+"/?userid=0";
else
addr=_url;
URL url = new URL(addr);
HttpURLConnection httpCon = (HttpURLConnection) url.openConnection();
httpCon.setConnectTimeout(30000);
String response = httpCon.getResponseMessage();
buf+=("HTTP/1.x " + httpCon.getResponseCode() + " " + response + "\n");
InputStream in = new BufferedInputStream(httpCon.getInputStream());
Reader r = new InputStreamReader(in);
int c;
while ((c = r.read()) != -1) {
buf+=(String.valueOf((char) c));
}
in.close();
if(buf.length()==0){
Exception e=new Exception("Cant get html from net :"+addr);
e.printStackTrace();
throw e;
}
//haha
Pattern reg=Pattern.compile(expr.expr_info_name);
Matcher result=reg.matcher(buf);
int count=0;
while(result.find()){
for(int i=0;i<=result.groupCount();i++){
if(i==expr.info_artist_id){
info.Artist=result.group();
continue;
}
if(i==expr.info_name_id){
info.Title=result.group();
}
}
count++;
}
if(info.Title==null||info.Artist==null){
reg=Pattern.compile(expr.expr_online_info);
result=reg.matcher(buf);
while(result.find()){
info.Title=result.group(expr.online_title_id);
info.Artist=result.group(expr.online_artist_id);
count++;
}
}
//
return info;
}
private void ParserLrc(String buffer,boolean isRaw){
ArrayList<String> m=(isRaw?raw_lrc:trans_lrc);
Pattern reg=Pattern.compile(expr.expr_lrc);
Matcher result=reg.matcher(buffer);
int l_id=expr.lrc_id;
while(result.find()){
m.add(result.group(l_id));
}
}
private void ParserData(String buffer){
int n_id=expr.sub_data_name_id;
int v_id=expr.sub_data_value_id;
Pattern reg=Pattern.compile(expr.expr_data);
Matcher result=reg.matcher(buffer);
while(result.find()){
data.put(result.group(n_id),result.group(v_id));
}
}
private void ParserNomalTag(String buffer,Lrc_Parser_Option opt)throws Exception{
Lrc_Parser_Info info=null;
int n_id=expr.tag_name_id,v_id=expr.tag_value_id;
Pattern reg=Pattern.compile(expr.expr_tag);
Matcher result=reg.matcher(buffer);
while(result.find()){
data.put(result.group(n_id),result.group(v_id));
}
int _id=-1;
String weburl="";
boolean hasTag=(data.containsKey("ti")&&data.containsKey("ar"));
if(((!(hasTag))||opt.ForceGetTagFormNet)){
if(data.containsKey("musicId")){
_id=Integer.parseInt(data.get("musicId"));
}else{ if(data.containsKey("#")){
weburl=data.get("#");
}else{
weburl="N/A";
}
}
if(!opt.NotToGetTagFromNet)
info=GetTagFromNet(_id,weburl);
if(info==null)
throw new Exception("-Cant get info from GetTagFromNet()");
//check_p("save info value");
data.put("ti",info.Title);
data.put("ar",info.Artist);
}
//check_p("get text is finish");
}
private long CoverLrcTime(String str){
Pattern reg=Pattern.compile(expr.expr_lrc_time);
Matcher result=reg.matcher(str);
while(result.find()){
long min=Long.parseLong(result.group(expr.lrc_time_min)),sec=Long.parseLong(result.group(expr.lrc_time_sec)),msec=Long.parseLong(result.group(expr.lrc_time_msec));
return min*60000+sec*1000+msec;
}
return -1;
}
private void CombineLrc(Lrc_Parser_Option opt){
ArrayList<String> _f=null,_a=null;
if((raw_lrc.size()!=0)&&(trans_lrc.size()==0)){
lrc=raw_lrc;
// printf("\nonly have raw lrc\n");
return ;
}
if((trans_lrc.size()!=0)&&(raw_lrc.size()==0)){
lrc=(trans_lrc);
//printf("\nonly have tans lrc %d\n",trans_lrc.size());
return;
}
try{
if(!(trans_lrc.size()!=0||raw_lrc.size()!=0))
throw new Exception("Combine_Lrc() : No any lrc in trans_lrc or raw_lrc.");
}catch(Exception e){
e.fillInStackTrace();
}
// printf("\nboth two version lrc have.\n");
boolean isNewLine=true;
switch(opt.Lrc_Combine_Type){
case New_Line_And_Raw_Lrc_First:
_f=raw_lrc;
_a=trans_lrc;
isNewLine=true;
break;
case New_Line_And_Trans_Lrc_First:
_f=trans_lrc;
_a=raw_lrc;
isNewLine=true;
break;
case Side_By_Side_And_Raw_Lrc_First:
_f=raw_lrc;
_a=trans_lrc;
isNewLine=false;
break;
case Side_By_Side_And_Trans_Lrc_First:
_f=trans_lrc;
_a=raw_lrc;
isNewLine=false;
break;
default:
//throw new Exception("Combine_Lrc() : unknown combine type");
}
if(isNewLine){
if(_f!=null)
for(String i :_f)
lrc.add(i);
if(_a!=null)
for(String i :_a)
lrc.add(i);
System.out.println();
}else{
//time -> [time]_f_lrc + _a_lrc
HashMap<Long,String> r_lrc=new HashMap<Long,String>();
/*smatch sm;
regex reg(expr->expr_split_lrc);*/
Pattern reg=Pattern.compile((expr.expr_split_lrc));
Matcher result=null;
long _t=-1;
String _i,_u=null,_o;
int _c=0;
for(int i=0;i<_f.size();i++){
_i=_f.get(i);
_t=CoverLrcTime(_i);
r_lrc.put(_t,_i);
// printf("\n%d",_t);
_c++;
}
//printf("\n has %d lrc save into map,now last list has %d lrc",r_lrc.size(),_a.size());
for(int i=0;i<_a.size();i++){
_i=_a.get(i);
_t=CoverLrcTime(_i);
if(!r_lrc.containsKey(_t)){
r_lrc.put(_t,_i);
//printf("\ntime %d lrc not found!",_t);
continue;
}
/*
if(!regex_search(_i,sm,reg)){
// printf("\ntime %d - %s cant match",_t,_i.c_str());
continue; List<
}*/
result=reg.matcher(_i);
while(result.find()){
_u=result.group(expr.lrc_split_id);
}
_o=r_lrc.get(_t);
_o+="/"+_u;
r_lrc.put(_t,_o);
}
List<Map.Entry<Long,String>> list=new ArrayList<Map.Entry<Long,String>>(r_lrc.entrySet());
Collections.sort(list,new Comparator<Map.Entry<Long,String>>(){
@Override
public int compare(Map.Entry<Long,String> k1,Map.Entry<Long,String> k2){
return (int)((Long)(k1.getKey()) - (Long)(k2.getKey()));
}
});
for(Map.Entry<Long,String> _d : list){
lrc.add(_d.getValue());
}
return;
}
}
public Lrc_Parser_Result Decode(String text,Lrc_Parser_Option opt)throws Exception{
opt=((opt!=null)?opt:option);
ParserData(text);
if(0==data.size())
throw new Exception("Decode() : No data in map");
ParserNomalTag(text,opt);
int _id=Integer.parseInt(data.get("musicId"));
if(!opt.NotToGetLrcFromNet){
if(opt.ForceGetLrcFromNet){
GetLrcFromNet(_id);
}else{
if(opt.Lrc_Type==Option_Lrc_Type.Raw_Lrc){
}
if(data.containsKey("lyric")&&((opt.Lrc_Type==Option_Lrc_Type.Raw_Lrc)||(opt.Lrc_Type==Option_Lrc_Type.Both_Raw_And_Trans_Lrc))){
ParserLrc(data.get("lyric"),true);
}
}
}
else{
if(data.containsKey("lyric")&&((opt.Lrc_Type==Option_Lrc_Type.Raw_Lrc)||(opt.Lrc_Type==Option_Lrc_Type.Both_Raw_And_Trans_Lrc))){
ParserLrc(data.get("lyric"),true);
if(raw_lrc.size()==0){
if(!opt.NotToGetLrcFromNet){
GetLrcFromNet( _id); }
}
}
}
////////
if(!opt.NotToGetLrcFromNet){
if(opt.ForceGetLrcFromNet){
GetLrcFromNet_tv(_id);
}else{
if(opt.Lrc_Type==Option_Lrc_Type.Trans_Lrc){
}
if(data.containsKey("translateLyric")&&((opt.Lrc_Type==Option_Lrc_Type.Trans_Lrc)||(opt.Lrc_Type==Option_Lrc_Type.Both_Raw_And_Trans_Lrc))){
ParserLrc(data.get("translateLyric"),false);
}
}
}
else{
if(data.containsKey("translateLyric")&&((opt.Lrc_Type==Option_Lrc_Type.Trans_Lrc)||(opt.Lrc_Type==Option_Lrc_Type.Both_Raw_And_Trans_Lrc))){
ParserLrc(data.get("translateLyric"),false);
if(trans_lrc.size()==0){
if(!opt.NotToGetLrcFromNet){
GetLrcFromNet_tv( _id); }
}
}
}
///////
//ParserLrc(data.get("translateLyric"),false);
if(!(raw_lrc.size()!=0||trans_lrc.size()!=0))
throw new Exception("Decode() : Cant got any lrc from file");
CombineLrc(opt);
if(lrc.size()==0)
throw new Exception("Decode() : output lrc_list hasnt any lrc");
add_ex_info(opt,"#");
add_ex_info(opt,"musicId");
add_nm_info(opt,"by");
add_nm_info(opt,"al");
add_nm_info(opt,"co");
add_nm_info(opt,"ar");
add_nm_info(opt,"ti");
add_nm_info(opt,"lr");
String _lrc=new String();
for(String it : lrc){
if(it!=null)
_lrc+=(it)+("\n");
}
if(_lrc.length()==0)
throw new Exception("Decode() : no lrc add in.");
Lrc_Parser_Result r=new Lrc_Parser_Result();
//#define add_info(y,x)
//r->Title=data["ti"];
if(data.containsKey("ti")){r.Title=data.get("ti");}
if(data.containsKey("ar")){r.Artist=data.get("ar");}
if(data.containsKey("al")){r.Album=data.get("al");}
/*add_info(Title,"ti")
add_info(Artist,"ar")
add_info(Album,"al")*/
r.id=(Integer.parseInt(data.get("musicId")));
//add_info(
r.Lyric=_lrc;
//#undef add_info
if(r.Lyric==null||r.Title==null||r.Artist==null)
throw new Exception("Decode() : Had some variables are null in Result.");
r.is_Finish_Parse=((r.Lyric.length()!=0)&&(r.Title.length()!=0)&&(r.Artist.length()!=0));
return r;
}
private void add_ex_info(Lrc_Parser_Option opt,String ro){
if(opt.ExtraTag)
add_tag(ro);
}
private void add_nm_info(Lrc_Parser_Option opt,String ro){
if(opt.NomalTag)
add_tag((ro));
}
private void add_tag(String tag_name){
if(data.containsKey(tag_name)){String _s=new String();_s+="["+(tag_name)+(":")+data.get(tag_name)+("]");lrc.add(0,_s);}
}
String GetLastDecodeResult(){return last_result;}
}
|
<gh_stars>10-100
module KubeDSL::DSL::Meta::V1
class APIGroupList < ::KubeDSL::DSLObject
array_field(:group) { KubeDSL::DSL::Meta::V1::APIGroup.new }
validates :groups, array: { kind_of: KubeDSL::DSL::Meta::V1::APIGroup }, presence: false
def serialize
{}.tap do |result|
result[:apiVersion] = "v1"
result[:groups] = groups.map(&:serialize)
result[:kind] = "APIGroupList"
end
end
def kind_sym
:api_group_list
end
end
end
|
# !/bin/bash
mvn clean
# Create the target/test-classes directory expected by Liquibase
mkdir -p target/test-classes
mvn process-resources
|
#!/bin/bash
curl -sc /tmp/cookie "https://drive.google.com/uc?export=download&id=1cqxZ-hCQagdwYQee4U8LsaHAJA3y-Go3" > /dev/null
CODE="$(awk '/_warning_/ {print $NF}' /tmp/cookie)"
curl -Lb /tmp/cookie "https://drive.google.com/uc?export=download&confirm=${CODE}&id=1cqxZ-hCQagdwYQee4U8LsaHAJA3y-Go3" -o resources.tar.gz
tar -zxvf resources.tar.gz
rm resources.tar.gz
echo Download finished.
|
<gh_stars>0
package com.thomasbonderup.manning.milestone1.api;
import avro.shaded.com.google.common.collect.ImmutableMap;
import com.thomasbonderup.manning.RawRecord;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.time.Instant;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.MediaType.APPLICATION_OCTET_STREAM;
import static org.apache.commons.io.IOUtils.toByteArray;
@Path("/")
public class DeviceEndpoint {
protected final KafkaProducer producer;
private final DeviceDAO db;
private final String table;
protected final String topic;
public DeviceEndpoint(KafkaProducer producer, String topic, DeviceDAO db, String table) {
this.producer = producer;
this.topic = topic;
this.db = db;
this.table = table;
}
@POST
@Path("/send/{uuid}")
@Consumes({APPLICATION_OCTET_STREAM, APPLICATION_JSON})
@Produces(APPLICATION_JSON)
public Response send(@PathParam("uuid") String uuid, @Context HttpServletRequest request)
throws ExecutionException, InterruptedException, IOException {
ByteBuffer body = ByteBuffer.wrap(toByteArray(request.getInputStream()));
RawRecord payload = new RawRecord(uuid, Instant.now().toEpochMilli(), body);
ProducerRecord record = new ProducerRecord(topic, uuid, payload);
Future<RecordMetadata> metadata = producer.send(record);
return Response.ok().entity(serialize(metadata.get())).build();
}
protected Map<String, Object> serialize(RecordMetadata metadata) {
return ImmutableMap.<String, Object> builder()
.put("offset", metadata.offset())
.put("partition", metadata.partition())
.put("topic", metadata.topic())
.put("timestamp", metadata.timestamp())
.build();
}
@GET
@Path("/state")
public Response getStatus(@QueryParam("uuid") String uuid) {
return Response.ok().entity(db.getDeviceState(table, uuid)).build();
}
}
|
<reponame>mario-renau-alstom/atlas
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
define(['require',
'backbone',
'hbs!tmpl/glossary/TermPropertiestLayoutView_tmpl',
'utils/Utils',
'utils/Enums'
], function(require, Backbone, TermPropertiestLayoutView_tmpl, Utils, Enums) {
'use strict';
var TermPropertiestLayoutView = Backbone.Marionette.LayoutView.extend(
/** @lends TermPropertiestLayoutView */
{
_viewName: 'TermPropertiestLayoutView',
template: TermPropertiestLayoutView_tmpl,
/** Layout sub regions */
regions: {},
/** ui selector cache */
ui: {
propertiesCard: "[data-id='properties-card']"
},
/** ui events hash */
events: function() {},
/**
* intialize a new TermPropertiestLayoutView Layout
* @constructs
*/
initialize: function(options) {
_.extend(this, options);
},
onRender: function() {
this.renderStats();
},
bindEvents: function() {},
genrateStatusData: function(stateObject) {
var that = this,
stats = {};
_.each(stateObject, function(val, key) {
var keys = key.split(":"),
key = keys[0],
subKey = keys[1];
if (stats[key]) {
stats[key][subKey] = val;
} else {
stats[key] = {};
stats[key][subKey] = val;
}
});
return stats;
},
getValue: function(options) {
var value = options.value,
type = options.type;
if (type == 'time') {
return Utils.millisecondsToTime(value);
} else if (type == 'day') {
return Utils.formatDate({ date: value })
} else if (type == 'number') {
return _.numberFormatWithComma(value);
} else if (type == 'millisecond') {
return _.numberFormatWithComma(value) + " millisecond/s";
} else if (type == "status-html") {
return '<span class="connection-status ' + value + '"></span>';
} else {
return value;
}
},
renderStats: function() {
var that = this,
generalData = this.dataObject,
createTable = function(obj) {
var tableBody = '',
enums = obj.enums;
_.each(obj.data, function(value, key, list) {
tableBody += '<tr><td>' + key + '</td><td class="">' + that.getValue({
"value": value,
"type": enums[key]
}) + '</td></tr>';
});
return tableBody;
};
if (that.options.additionalAttributes) {
that.ui.propertiesCard.html(
createTable({
"enums": _.extend(Enums.stats.Server, Enums.stats.ConnectionStatus, Enums.stats.generalData),
"data":that.options.additionalAttributes
})
);
}
}
});
return TermPropertiestLayoutView;
}); |
#!/bin/bash
set -e
if [ $(id -ur) -ne 0 ]; then
echo $0 can only be run as root. Use sudo.
exit 1
fi
if [ "$1" == "" ] ; then
echo "Usage: $0 NEWHOSTNAME"
else
echo "Setting hostname..."
OLDNAME=$(hostname)
/sbin/setup-hostname $1
if [ $? -eq 0 ] ; then
sed --in-place=".bak" "s/${OLDNAME}/$1/g" /etc/hosts
echo "Hostname changed to $1. Please reboot."
else
echo "Hostname not set." >2
exit 1
fi
fi
|
#!/bin/bash
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#Download opensourceversion
ES_VERSION=$(../bin/version-info --es)
OD_VERSION=$(../bin/version-info --od)
OD_PLUGINVERSION=$OD_VERSION.0
PACKAGE=opendistroforelasticsearch
ROOT=$(dirname "$0")
wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-$ES_VERSION-linux-x86_64.tar.gz
#Untar
tar -xzf elasticsearch-oss-$ES_VERSION-linux-x86_64.tar.gz
rm -rf elasticsearch-oss-$ES_VERSION-linux-x86_64.tar.gz
#Install Plugin
for plugin_path in opendistro-sql/opendistro_sql-$OD_PLUGINVERSION.zip opendistro-alerting/opendistro_alerting-$OD_VERSION.1.zip opendistro-job-scheduler/opendistro-job-scheduler-$OD_PLUGINVERSION.zip opendistro-security/opendistro_security-$OD_PLUGINVERSION.zip performance-analyzer/opendistro_performance_analyzer-$OD_PLUGINVERSION.zip opendistro-index-management/opendistro_index_management-$OD_VERSION.1.zip;
do
elasticsearch-$ES_VERSION/bin/elasticsearch-plugin install --batch "https://d3g5vo6xdbdb9a.cloudfront.net/downloads/elasticsearch-plugins/$plugin_path"; \
done
cp opendistro-tar-install.sh elasticsearch-$ES_VERSION
mv elasticsearch-$ES_VERSION $PACKAGE-$OD_VERSION
echo "validating that plugins has been installed"
basedir=$PWD/$PACKAGE-$OD_VERSION/plugins
arr=("$basedir/opendistro-job-scheduler" "$basedir/opendistro_alerting" "$basedir/opendistro_performance_analyzer" "$basedir/opendistro_security" "$basedir/opendistro_sql" "$basedir/opendistro_index_management")
for d in "${arr[@]}"; do
echo "$d"
if [ -d "$d" ]; then
echo "directoy "$d" is present"
else
echo "ERROR: "$d" is not present"
exit 1;
fi
done
echo "validated that plugins has been installed"
rm -rf tarfiles
mkdir tarfiles
TARGET_DIR="$ROOT/tarfiles"
tar -vczf $TARGET_DIR/$PACKAGE-$OD_VERSION.tar.gz $PACKAGE-$OD_VERSION
sha512sum $TARGET_DIR/$PACKAGE-$OD_VERSION.tar.gz > $TARGET_DIR/$PACKAGE-$OD_VERSION.tar.gz.sha512
sha512sum -c $TARGET_DIR/$PACKAGE-$OD_VERSION.tar.gz.sha512
echo " CHECKSUM FILE "
echo "$(cat $TARGET_DIR/$PACKAGE-$OD_VERSION.tar.gz.sha512)"
rm -rf $PACKAGE-$OD_VERSION
|
#include<stdio.h>
int main()
{
int year;
while(scanf("%d",&year)!=EOF)
{
if((year%400==0) || (year%4==0 && year%100!=0))
{
printf("This is leap year.\n");
if(year%15==0)
{
printf("This is huluculu festival year.\n");
}
if(year%55==0)
{
printf("This is bulukulu festival year.\n");
}
}
else if(year%15==0)
{
printf("This is huluculu festival year.\n");
}
else
{
printf("This is an ordinary year.\n");
}
printf("\n");
}
return 0;
}
|
const codeRouter = require("express").Router();
const runCodeIde = require("./langs/runCodeExec");
codeRouter.post("/coderunner", async (req, res) => {
const code = req.fields.code;
const input = req.fields.input;
const lang = req.fields.lang;
if (code.length == 0) {
res.send({
error: "error",
message: "Write some code",
});
}
if (lang == "Python") {
var extension = ".py";
runCodeIde(code, input, res, extension);
}
if (lang == "C++") {
var extension = ".cpp";
runCodeIde(code, input, res, extension);
}
});
module.exports = codeRouter;
|
<filename>public/106.js
(window["webpackJsonp"] = window["webpackJsonp"] || []).push([[106],{
/***/ "./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/Admin/Services.vue?vue&type=script&lang=js&":
/*!***************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib??ref--4-0!./node_modules/vue-loader/lib??vue-loader-options!./resources/assets/js/views/Admin/Services.vue?vue&type=script&lang=js& ***!
\***************************************************************************************************************************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var vuetable_2_src_components_Vuetable__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! vuetable-2/src/components/Vuetable */ "./node_modules/vuetable-2/src/components/Vuetable.vue");
/* harmony import */ var vuetable_2_src_components_VuetablePagination__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! vuetable-2/src/components/VuetablePagination */ "./node_modules/vuetable-2/src/components/VuetablePagination.vue");
/* harmony import */ var vuetable_2_src_components_VuetablePaginationInfo__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! vuetable-2/src/components/VuetablePaginationInfo */ "./node_modules/vuetable-2/src/components/VuetablePaginationInfo.vue");
/* harmony import */ var _mixins_vuetable_2__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../../mixins/vuetable-2 */ "./resources/assets/js/mixins/vuetable-2.js");
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */ __webpack_exports__["default"] = ({
mixins: [_mixins_vuetable_2__WEBPACK_IMPORTED_MODULE_3__["default"]],
components: {
Vuetable: vuetable_2_src_components_Vuetable__WEBPACK_IMPORTED_MODULE_0__["default"],
VuetablePagination: vuetable_2_src_components_VuetablePagination__WEBPACK_IMPORTED_MODULE_1__["default"],
VuetablePaginationInfo: vuetable_2_src_components_VuetablePaginationInfo__WEBPACK_IMPORTED_MODULE_2__["default"]
},
data: function data() {
return {
fields: [{
title: '#',
name: 'id',
sortField: 'id',
titleClass: 'text-center sorting',
dataClass: 'text-center'
}, {
name: 'name',
sortField: 'name',
titleClass: 'sorting'
}, {
name: 'type',
sortField: 'type',
titleClass: 'text-center sorting',
dataClass: 'text-center'
}, {
name: 'unit',
title: 'Service Unit',
sortField: 'unit',
titleClass: 'text-center sorting',
dataClass: 'text-center'
}, {
name: 'rate',
title: 'Rate Per Unit',
sortField: 'rate',
titleClass: 'text-center sorting',
dataClass: 'text-center'
}, {
name: 'created_at',
title: 'Created On',
sortField: 'created_at',
titleClass: 'sorting text-center',
dataClass: 'text-center'
}, {
name: '__slot:actions',
title: 'Actions',
dataClass: 'text-center',
titleClass: 'text-center'
}],
sortOrder: [{
field: 'id',
sortField: 'id',
direction: 'asc'
}],
moreParams: {},
customPagination: {}
};
},
methods: {
onCellClicked: function onCellClicked(data, field, event) {
console.log('cellClicked: ', field.name);
this.$refs.vuetable.toggleDetailRow(data.id);
},
deleteService: function deleteService(id) {
var _this = this;
this.$swal({
title: 'Are you sure?',
text: "You won't be able to revert this!",
icon: 'warning',
showCancelButton: true,
confirmButtonColor: '#3085d6',
cancelButtonColor: '#d33',
confirmButtonText: 'Yes, delete it!'
}).then(function (result) {
if (result.value) {
Event.fire('show-loader', true);
axios["delete"]('/services/' + id).then(function (response) {
Event.fire('show-loader', false);
_this.$swal('Deleted!', 'The services has been deleted.', 'success');
Event.fire('refresh-data');
})["catch"](function (error) {
Event.fire('show-loader', false);
_this.$swal('Not Deleted!', 'The services has not been deleted, an error occured', 'error');
});
}
});
},
editService: function editService(id) {
window.location.href = '/services/' + id + '/edit';
}
}
});
/***/ }),
/***/ "./node_modules/moment/locale sync recursive ^\\.\\/.*$":
/*!**************************************************!*\
!*** ./node_modules/moment/locale sync ^\.\/.*$ ***!
\**************************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
var map = {
"./af": "./node_modules/moment/locale/af.js",
"./af.js": "./node_modules/moment/locale/af.js",
"./ar": "./node_modules/moment/locale/ar.js",
"./ar-dz": "./node_modules/moment/locale/ar-dz.js",
"./ar-dz.js": "./node_modules/moment/locale/ar-dz.js",
"./ar-kw": "./node_modules/moment/locale/ar-kw.js",
"./ar-kw.js": "./node_modules/moment/locale/ar-kw.js",
"./ar-ly": "./node_modules/moment/locale/ar-ly.js",
"./ar-ly.js": "./node_modules/moment/locale/ar-ly.js",
"./ar-ma": "./node_modules/moment/locale/ar-ma.js",
"./ar-ma.js": "./node_modules/moment/locale/ar-ma.js",
"./ar-sa": "./node_modules/moment/locale/ar-sa.js",
"./ar-sa.js": "./node_modules/moment/locale/ar-sa.js",
"./ar-tn": "./node_modules/moment/locale/ar-tn.js",
"./ar-tn.js": "./node_modules/moment/locale/ar-tn.js",
"./ar.js": "./node_modules/moment/locale/ar.js",
"./az": "./node_modules/moment/locale/az.js",
"./az.js": "./node_modules/moment/locale/az.js",
"./be": "./node_modules/moment/locale/be.js",
"./be.js": "./node_modules/moment/locale/be.js",
"./bg": "./node_modules/moment/locale/bg.js",
"./bg.js": "./node_modules/moment/locale/bg.js",
"./bm": "./node_modules/moment/locale/bm.js",
"./bm.js": "./node_modules/moment/locale/bm.js",
"./bn": "./node_modules/moment/locale/bn.js",
"./bn.js": "./node_modules/moment/locale/bn.js",
"./bo": "./node_modules/moment/locale/bo.js",
"./bo.js": "./node_modules/moment/locale/bo.js",
"./br": "./node_modules/moment/locale/br.js",
"./br.js": "./node_modules/moment/locale/br.js",
"./bs": "./node_modules/moment/locale/bs.js",
"./bs.js": "./node_modules/moment/locale/bs.js",
"./ca": "./node_modules/moment/locale/ca.js",
"./ca.js": "./node_modules/moment/locale/ca.js",
"./cs": "./node_modules/moment/locale/cs.js",
"./cs.js": "./node_modules/moment/locale/cs.js",
"./cv": "./node_modules/moment/locale/cv.js",
"./cv.js": "./node_modules/moment/locale/cv.js",
"./cy": "./node_modules/moment/locale/cy.js",
"./cy.js": "./node_modules/moment/locale/cy.js",
"./da": "./node_modules/moment/locale/da.js",
"./da.js": "./node_modules/moment/locale/da.js",
"./de": "./node_modules/moment/locale/de.js",
"./de-at": "./node_modules/moment/locale/de-at.js",
"./de-at.js": "./node_modules/moment/locale/de-at.js",
"./de-ch": "./node_modules/moment/locale/de-ch.js",
"./de-ch.js": "./node_modules/moment/locale/de-ch.js",
"./de.js": "./node_modules/moment/locale/de.js",
"./dv": "./node_modules/moment/locale/dv.js",
"./dv.js": "./node_modules/moment/locale/dv.js",
"./el": "./node_modules/moment/locale/el.js",
"./el.js": "./node_modules/moment/locale/el.js",
"./en-SG": "./node_modules/moment/locale/en-SG.js",
"./en-SG.js": "./node_modules/moment/locale/en-SG.js",
"./en-au": "./node_modules/moment/locale/en-au.js",
"./en-au.js": "./node_modules/moment/locale/en-au.js",
"./en-ca": "./node_modules/moment/locale/en-ca.js",
"./en-ca.js": "./node_modules/moment/locale/en-ca.js",
"./en-gb": "./node_modules/moment/locale/en-gb.js",
"./en-gb.js": "./node_modules/moment/locale/en-gb.js",
"./en-ie": "./node_modules/moment/locale/en-ie.js",
"./en-ie.js": "./node_modules/moment/locale/en-ie.js",
"./en-il": "./node_modules/moment/locale/en-il.js",
"./en-il.js": "./node_modules/moment/locale/en-il.js",
"./en-nz": "./node_modules/moment/locale/en-nz.js",
"./en-nz.js": "./node_modules/moment/locale/en-nz.js",
"./eo": "./node_modules/moment/locale/eo.js",
"./eo.js": "./node_modules/moment/locale/eo.js",
"./es": "./node_modules/moment/locale/es.js",
"./es-do": "./node_modules/moment/locale/es-do.js",
"./es-do.js": "./node_modules/moment/locale/es-do.js",
"./es-us": "./node_modules/moment/locale/es-us.js",
"./es-us.js": "./node_modules/moment/locale/es-us.js",
"./es.js": "./node_modules/moment/locale/es.js",
"./et": "./node_modules/moment/locale/et.js",
"./et.js": "./node_modules/moment/locale/et.js",
"./eu": "./node_modules/moment/locale/eu.js",
"./eu.js": "./node_modules/moment/locale/eu.js",
"./fa": "./node_modules/moment/locale/fa.js",
"./fa.js": "./node_modules/moment/locale/fa.js",
"./fi": "./node_modules/moment/locale/fi.js",
"./fi.js": "./node_modules/moment/locale/fi.js",
"./fo": "./node_modules/moment/locale/fo.js",
"./fo.js": "./node_modules/moment/locale/fo.js",
"./fr": "./node_modules/moment/locale/fr.js",
"./fr-ca": "./node_modules/moment/locale/fr-ca.js",
"./fr-ca.js": "./node_modules/moment/locale/fr-ca.js",
"./fr-ch": "./node_modules/moment/locale/fr-ch.js",
"./fr-ch.js": "./node_modules/moment/locale/fr-ch.js",
"./fr.js": "./node_modules/moment/locale/fr.js",
"./fy": "./node_modules/moment/locale/fy.js",
"./fy.js": "./node_modules/moment/locale/fy.js",
"./ga": "./node_modules/moment/locale/ga.js",
"./ga.js": "./node_modules/moment/locale/ga.js",
"./gd": "./node_modules/moment/locale/gd.js",
"./gd.js": "./node_modules/moment/locale/gd.js",
"./gl": "./node_modules/moment/locale/gl.js",
"./gl.js": "./node_modules/moment/locale/gl.js",
"./gom-latn": "./node_modules/moment/locale/gom-latn.js",
"./gom-latn.js": "./node_modules/moment/locale/gom-latn.js",
"./gu": "./node_modules/moment/locale/gu.js",
"./gu.js": "./node_modules/moment/locale/gu.js",
"./he": "./node_modules/moment/locale/he.js",
"./he.js": "./node_modules/moment/locale/he.js",
"./hi": "./node_modules/moment/locale/hi.js",
"./hi.js": "./node_modules/moment/locale/hi.js",
"./hr": "./node_modules/moment/locale/hr.js",
"./hr.js": "./node_modules/moment/locale/hr.js",
"./hu": "./node_modules/moment/locale/hu.js",
"./hu.js": "./node_modules/moment/locale/hu.js",
"./hy-am": "./node_modules/moment/locale/hy-am.js",
"./hy-am.js": "./node_modules/moment/locale/hy-am.js",
"./id": "./node_modules/moment/locale/id.js",
"./id.js": "./node_modules/moment/locale/id.js",
"./is": "./node_modules/moment/locale/is.js",
"./is.js": "./node_modules/moment/locale/is.js",
"./it": "./node_modules/moment/locale/it.js",
"./it-ch": "./node_modules/moment/locale/it-ch.js",
"./it-ch.js": "./node_modules/moment/locale/it-ch.js",
"./it.js": "./node_modules/moment/locale/it.js",
"./ja": "./node_modules/moment/locale/ja.js",
"./ja.js": "./node_modules/moment/locale/ja.js",
"./jv": "./node_modules/moment/locale/jv.js",
"./jv.js": "./node_modules/moment/locale/jv.js",
"./ka": "./node_modules/moment/locale/ka.js",
"./ka.js": "./node_modules/moment/locale/ka.js",
"./kk": "./node_modules/moment/locale/kk.js",
"./kk.js": "./node_modules/moment/locale/kk.js",
"./km": "./node_modules/moment/locale/km.js",
"./km.js": "./node_modules/moment/locale/km.js",
"./kn": "./node_modules/moment/locale/kn.js",
"./kn.js": "./node_modules/moment/locale/kn.js",
"./ko": "./node_modules/moment/locale/ko.js",
"./ko.js": "./node_modules/moment/locale/ko.js",
"./ku": "./node_modules/moment/locale/ku.js",
"./ku.js": "./node_modules/moment/locale/ku.js",
"./ky": "./node_modules/moment/locale/ky.js",
"./ky.js": "./node_modules/moment/locale/ky.js",
"./lb": "./node_modules/moment/locale/lb.js",
"./lb.js": "./node_modules/moment/locale/lb.js",
"./lo": "./node_modules/moment/locale/lo.js",
"./lo.js": "./node_modules/moment/locale/lo.js",
"./lt": "./node_modules/moment/locale/lt.js",
"./lt.js": "./node_modules/moment/locale/lt.js",
"./lv": "./node_modules/moment/locale/lv.js",
"./lv.js": "./node_modules/moment/locale/lv.js",
"./me": "./node_modules/moment/locale/me.js",
"./me.js": "./node_modules/moment/locale/me.js",
"./mi": "./node_modules/moment/locale/mi.js",
"./mi.js": "./node_modules/moment/locale/mi.js",
"./mk": "./node_modules/moment/locale/mk.js",
"./mk.js": "./node_modules/moment/locale/mk.js",
"./ml": "./node_modules/moment/locale/ml.js",
"./ml.js": "./node_modules/moment/locale/ml.js",
"./mn": "./node_modules/moment/locale/mn.js",
"./mn.js": "./node_modules/moment/locale/mn.js",
"./mr": "./node_modules/moment/locale/mr.js",
"./mr.js": "./node_modules/moment/locale/mr.js",
"./ms": "./node_modules/moment/locale/ms.js",
"./ms-my": "./node_modules/moment/locale/ms-my.js",
"./ms-my.js": "./node_modules/moment/locale/ms-my.js",
"./ms.js": "./node_modules/moment/locale/ms.js",
"./mt": "./node_modules/moment/locale/mt.js",
"./mt.js": "./node_modules/moment/locale/mt.js",
"./my": "./node_modules/moment/locale/my.js",
"./my.js": "./node_modules/moment/locale/my.js",
"./nb": "./node_modules/moment/locale/nb.js",
"./nb.js": "./node_modules/moment/locale/nb.js",
"./ne": "./node_modules/moment/locale/ne.js",
"./ne.js": "./node_modules/moment/locale/ne.js",
"./nl": "./node_modules/moment/locale/nl.js",
"./nl-be": "./node_modules/moment/locale/nl-be.js",
"./nl-be.js": "./node_modules/moment/locale/nl-be.js",
"./nl.js": "./node_modules/moment/locale/nl.js",
"./nn": "./node_modules/moment/locale/nn.js",
"./nn.js": "./node_modules/moment/locale/nn.js",
"./pa-in": "./node_modules/moment/locale/pa-in.js",
"./pa-in.js": "./node_modules/moment/locale/pa-in.js",
"./pl": "./node_modules/moment/locale/pl.js",
"./pl.js": "./node_modules/moment/locale/pl.js",
"./pt": "./node_modules/moment/locale/pt.js",
"./pt-br": "./node_modules/moment/locale/pt-br.js",
"./pt-br.js": "./node_modules/moment/locale/pt-br.js",
"./pt.js": "./node_modules/moment/locale/pt.js",
"./ro": "./node_modules/moment/locale/ro.js",
"./ro.js": "./node_modules/moment/locale/ro.js",
"./ru": "./node_modules/moment/locale/ru.js",
"./ru.js": "./node_modules/moment/locale/ru.js",
"./sd": "./node_modules/moment/locale/sd.js",
"./sd.js": "./node_modules/moment/locale/sd.js",
"./se": "./node_modules/moment/locale/se.js",
"./se.js": "./node_modules/moment/locale/se.js",
"./si": "./node_modules/moment/locale/si.js",
"./si.js": "./node_modules/moment/locale/si.js",
"./sk": "./node_modules/moment/locale/sk.js",
"./sk.js": "./node_modules/moment/locale/sk.js",
"./sl": "./node_modules/moment/locale/sl.js",
"./sl.js": "./node_modules/moment/locale/sl.js",
"./sq": "./node_modules/moment/locale/sq.js",
"./sq.js": "./node_modules/moment/locale/sq.js",
"./sr": "./node_modules/moment/locale/sr.js",
"./sr-cyrl": "./node_modules/moment/locale/sr-cyrl.js",
"./sr-cyrl.js": "./node_modules/moment/locale/sr-cyrl.js",
"./sr.js": "./node_modules/moment/locale/sr.js",
"./ss": "./node_modules/moment/locale/ss.js",
"./ss.js": "./node_modules/moment/locale/ss.js",
"./sv": "./node_modules/moment/locale/sv.js",
"./sv.js": "./node_modules/moment/locale/sv.js",
"./sw": "./node_modules/moment/locale/sw.js",
"./sw.js": "./node_modules/moment/locale/sw.js",
"./ta": "./node_modules/moment/locale/ta.js",
"./ta.js": "./node_modules/moment/locale/ta.js",
"./te": "./node_modules/moment/locale/te.js",
"./te.js": "./node_modules/moment/locale/te.js",
"./tet": "./node_modules/moment/locale/tet.js",
"./tet.js": "./node_modules/moment/locale/tet.js",
"./tg": "./node_modules/moment/locale/tg.js",
"./tg.js": "./node_modules/moment/locale/tg.js",
"./th": "./node_modules/moment/locale/th.js",
"./th.js": "./node_modules/moment/locale/th.js",
"./tl-ph": "./node_modules/moment/locale/tl-ph.js",
"./tl-ph.js": "./node_modules/moment/locale/tl-ph.js",
"./tlh": "./node_modules/moment/locale/tlh.js",
"./tlh.js": "./node_modules/moment/locale/tlh.js",
"./tr": "./node_modules/moment/locale/tr.js",
"./tr.js": "./node_modules/moment/locale/tr.js",
"./tzl": "./node_modules/moment/locale/tzl.js",
"./tzl.js": "./node_modules/moment/locale/tzl.js",
"./tzm": "./node_modules/moment/locale/tzm.js",
"./tzm-latn": "./node_modules/moment/locale/tzm-latn.js",
"./tzm-latn.js": "./node_modules/moment/locale/tzm-latn.js",
"./tzm.js": "./node_modules/moment/locale/tzm.js",
"./ug-cn": "./node_modules/moment/locale/ug-cn.js",
"./ug-cn.js": "./node_modules/moment/locale/ug-cn.js",
"./uk": "./node_modules/moment/locale/uk.js",
"./uk.js": "./node_modules/moment/locale/uk.js",
"./ur": "./node_modules/moment/locale/ur.js",
"./ur.js": "./node_modules/moment/locale/ur.js",
"./uz": "./node_modules/moment/locale/uz.js",
"./uz-latn": "./node_modules/moment/locale/uz-latn.js",
"./uz-latn.js": "./node_modules/moment/locale/uz-latn.js",
"./uz.js": "./node_modules/moment/locale/uz.js",
"./vi": "./node_modules/moment/locale/vi.js",
"./vi.js": "./node_modules/moment/locale/vi.js",
"./x-pseudo": "./node_modules/moment/locale/x-pseudo.js",
"./x-pseudo.js": "./node_modules/moment/locale/x-pseudo.js",
"./yo": "./node_modules/moment/locale/yo.js",
"./yo.js": "./node_modules/moment/locale/yo.js",
"./zh-cn": "./node_modules/moment/locale/zh-cn.js",
"./zh-cn.js": "./node_modules/moment/locale/zh-cn.js",
"./zh-hk": "./node_modules/moment/locale/zh-hk.js",
"./zh-hk.js": "./node_modules/moment/locale/zh-hk.js",
"./zh-tw": "./node_modules/moment/locale/zh-tw.js",
"./zh-tw.js": "./node_modules/moment/locale/zh-tw.js"
};
function webpackContext(req) {
var id = webpackContextResolve(req);
return __webpack_require__(id);
}
function webpackContextResolve(req) {
if(!__webpack_require__.o(map, req)) {
var e = new Error("Cannot find module '" + req + "'");
e.code = 'MODULE_NOT_FOUND';
throw e;
}
return map[req];
}
webpackContext.keys = function webpackContextKeys() {
return Object.keys(map);
};
webpackContext.resolve = webpackContextResolve;
module.exports = webpackContext;
webpackContext.id = "./node_modules/moment/locale sync recursive ^\\.\\/.*$";
/***/ }),
/***/ "./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/Admin/Services.vue?vue&type=template&id=3d3ae4d1&":
/*!*******************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib??vue-loader-options!./resources/assets/js/views/Admin/Services.vue?vue&type=template&id=3d3ae4d1& ***!
\*******************************************************************************************************************************************************************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "render", function() { return render; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return staticRenderFns; });
var render = function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("div", { staticClass: "row" }, [
_c("div", { staticClass: "col-lg-12" }, [
_c("div", { staticClass: "card" }, [
_c("div", { staticClass: "card-body" }, [
_c("div", { staticClass: "row card-title fixed " }, [
_vm._m(0),
_vm._v(" "),
_c("h4", { staticClass: "col-md-4 text-center" }, [
_vm._v(" Services")
]),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-md-4 " },
[
_c("custom-pagination-info", {
ref: "pagination",
attrs: {
customPagination: _vm.customPagination,
css: _vm.css.pagination
},
on: { "vuetable-pagination:change-page": _vm.onChangePage }
})
],
1
)
]),
_vm._v(" "),
_c("hr", { staticClass: "mb-0 mt-0" }),
_vm._v(" "),
_c(
"div",
{ staticClass: "card card-body " },
[
_c("simple-spiner"),
_vm._v(" "),
_c("filter-bar", { attrs: { placeholder: "Name" } }),
_vm._v(" "),
_c("vuetable", {
ref: "vuetable",
attrs: {
"api-url": "/services",
fields: _vm.fields,
"pagination-path": "",
css: _vm.css.table,
"sort-order": _vm.sortOrder,
"multi-sort": true,
"append-params": _vm.moreParams
},
on: {
"vuetable:cell-clicked": _vm.onCellClicked,
"vuetable:pagination-data": _vm.onPaginationData,
"vuetable:loading": _vm.loading,
"vuetable:load-success": _vm.loaded,
"vuetable:load-error": _vm.loadError
},
scopedSlots: _vm._u([
{
key: "actions",
fn: function(props) {
return [
_c(
"button",
{
staticClass: "btn btn-sm btn-warning",
on: {
click: function($event) {
return _vm.editService(props.rowData.id)
}
}
},
[_c("i", { staticClass: "fa fa-pencil" })]
),
_vm._v(" "),
_c(
"button",
{
staticClass: "btn btn-sm btn-danger",
attrs: { type: "submit" },
on: {
click: function($event) {
return _vm.deleteService(props.rowData.id)
}
}
},
[_c("i", { staticClass: "fa fa-trash" })]
)
]
}
}
])
}),
_vm._v(" "),
_c(
"div",
{ staticClass: "vuetable-pagination" },
[
_c("vuetable-pagination-info", {
ref: "paginationInfo",
attrs: { "info-class": "pagination-info" }
}),
_vm._v(" "),
_c("vuetable-pagination", {
ref: "pagination",
attrs: { css: _vm.css.pagination },
on: { "vuetable-pagination:change-page": _vm.onChangePage }
})
],
1
)
],
1
)
])
])
])
])
}
var staticRenderFns = [
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("div", { staticClass: "col-md-4 " }, [
_c(
"a",
{
staticClass: "btn btn-primary ",
attrs: { href: "/services/create" }
},
[
_vm._v(
"\n Add Service\n "
)
]
)
])
}
]
render._withStripped = true
/***/ }),
/***/ "./resources/assets/js/mixins/alert-mixins.js":
/*!****************************************************!*\
!*** ./resources/assets/js/mixins/alert-mixins.js ***!
\****************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
var alertMixins = {
methods: {
deleteAlert: function deleteAlert() {
var _this = this;
this.$swal({
type: 'warning',
title: 'Are you sure?',
text: "You won't be able to revert this!",
showCancelButton: true,
confirmButtonColor: '#3085d6',
cancelButtonColor: '#d33',
confirmButtonText: 'Yes, delete it!'
}).then(function (result) {
if (result.value) {
_this.$swal('Deleted!', 'Your file has been deleted.', 'success');
}
});
},
deleteConfirmAlert: function deleteConfirmAlert() {
this.$swal({
type: 'warning',
title: 'Custom width, padding, background.',
width: 600,
padding: '3em',
background: '#fff url(/images/trees.png)',
backdrop: "\n rgba(0,0,123,0.4)\n url(\"/images/nyan-cat.gif\")\n center left\n no-repeat\n "
});
},
flash: function flash(message) {
this.$swal(message);
},
flashError: function flashError(message) {
this.$swal('Error!', message, 'error');
},
flashSucces: function flashSucces(message) {
this.$swal('success!', message, 'success');
},
showDrawer: function showDrawer(value) {
Event.fire('show-drawer', value);
},
loading: function loading(value) {
Event.fire('show-loader', value);
}
}
};
/* harmony default export */ __webpack_exports__["default"] = (alertMixins);
/***/ }),
/***/ "./resources/assets/js/mixins/vuetable-2.js":
/*!**************************************************!*\
!*** ./resources/assets/js/mixins/vuetable-2.js ***!
\**************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var vue__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! vue */ "./node_modules/vue/dist/vue.common.js");
/* harmony import */ var vue__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(vue__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var accounting__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! accounting */ "./node_modules/accounting/accounting.js");
/* harmony import */ var accounting__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(accounting__WEBPACK_IMPORTED_MODULE_1__);
/* harmony import */ var moment__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! moment */ "./node_modules/moment/moment.js");
/* harmony import */ var moment__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(moment__WEBPACK_IMPORTED_MODULE_2__);
/* harmony import */ var _alert_mixins__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./alert-mixins */ "./resources/assets/js/mixins/alert-mixins.js");
var vueTableMixin = {
mixins: [_alert_mixins__WEBPACK_IMPORTED_MODULE_3__["default"]],
created: function created() {},
data: function data() {
return {
css: {
table: {
tableClass: ' display mx-auto table table-striped table-bordered printableArea dataTable',
ascendingIcon: 'fa fa-sort-up',
descendingIcon: 'fa fa-sort-down'
},
pagination: {
wrapperClass: 'pagination',
activeClass: 'active',
disabledClass: 'disabled',
pageClass: 'page',
linkClass: 'link',
icons: {
first: '',
prev: '',
next: '',
last: ''
}
},
icons: {
first: 'glyphicon glyphicon-step-backward',
prev: 'glyphicon glyphicon-chevron-left fa fa-angle-left',
next: 'glyphicon glyphicon-chevron-right fa fa-angle-right',
last: 'glyphicon glyphicon-step-forward'
}
},
promise: true
};
},
methods: {
allcap: function allcap(value) {
return value.toUpperCase();
},
genderLabel: function genderLabel(value) {
return value === 'M' ? '<span class="label label-success"><i class="glyphicon glyphicon-star"></i> Male</span>' : '<span class="label label-danger"><i class="glyphicon glyphicon-heart"></i> Female</span>';
},
formatNumber: function formatNumber(value) {
return accounting__WEBPACK_IMPORTED_MODULE_1___default.a.formatNumber(value, 2);
},
formatDate: function formatDate(value) {
var fmt = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'D-MMM-YYYY';
return value == null ? '' : moment__WEBPACK_IMPORTED_MODULE_2___default()(value, 'YYYY-MM-DD').format(fmt);
},
onPaginationData: function onPaginationData(paginationData) {
this.customPagination = paginationData;
this.$refs.pagination.setPaginationData(paginationData);
this.$refs.paginationInfo.setPaginationData(paginationData);
},
onChangePage: function onChangePage(page) {
this.$refs.vuetable.changePage(page);
},
onCellClicked: function onCellClicked(data, field, event) {
this.$refs.vuetable.toggleDetailRow(data.id);
},
loading: function loading() {
this.$Progress.start();
Event.fire('show-simple-spinner', true);
},
loaded: function loaded() {
this.$Progress.finish();
Event.fire('show-simple-spinner', false);
},
loadError: function loadError(error) {
this.$Progress.fail();
flash("An error occured");
this.$toastr.e(" An error occured" + error);
Event.fire('show-simple-spinner', false);
}
},
events: {
'filter-set': function filterSet(filterText) {
var _this = this;
this.moreParams = {
filter: filterText
};
vue__WEBPACK_IMPORTED_MODULE_0___default.a.nextTick(function () {
return _this.$refs.vuetable.refresh();
});
},
'filter-reset': function filterReset() {
var _this2 = this;
this.moreParams = {};
vue__WEBPACK_IMPORTED_MODULE_0___default.a.nextTick(function () {
return _this2.$refs.vuetable.refresh();
});
},
'per-page-set': function perPageSet(perPage) {
var _this3 = this;
this.moreParams = {
perPage: perPage
};
vue__WEBPACK_IMPORTED_MODULE_0___default.a.nextTick(function () {
return _this3.$refs.vuetable.refresh();
});
}
},
mounted: function mounted() {
var _this4 = this;
Event.listen('refresh-data', function () {
vue__WEBPACK_IMPORTED_MODULE_0___default.a.nextTick(function () {
return _this4.$refs.vuetable.refresh();
});
});
}
};
/* harmony default export */ __webpack_exports__["default"] = (vueTableMixin);
/***/ }),
/***/ "./resources/assets/js/views/Admin/Services.vue":
/*!******************************************************!*\
!*** ./resources/assets/js/views/Admin/Services.vue ***!
\******************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _Services_vue_vue_type_template_id_3d3ae4d1___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./Services.vue?vue&type=template&id=3d3ae4d1& */ "./resources/assets/js/views/Admin/Services.vue?vue&type=template&id=3d3ae4d1&");
/* harmony import */ var _Services_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./Services.vue?vue&type=script&lang=js& */ "./resources/assets/js/views/Admin/Services.vue?vue&type=script&lang=js&");
/* empty/unused harmony star reexport *//* harmony import */ var _node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../../../../node_modules/vue-loader/lib/runtime/componentNormalizer.js */ "./node_modules/vue-loader/lib/runtime/componentNormalizer.js");
/* normalize component */
var component = Object(_node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__["default"])(
_Services_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__["default"],
_Services_vue_vue_type_template_id_3d3ae4d1___WEBPACK_IMPORTED_MODULE_0__["render"],
_Services_vue_vue_type_template_id_3d3ae4d1___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"],
false,
null,
null,
null
)
/* hot reload */
if (false) { var api; }
component.options.__file = "resources/assets/js/views/Admin/Services.vue"
/* harmony default export */ __webpack_exports__["default"] = (component.exports);
/***/ }),
/***/ "./resources/assets/js/views/Admin/Services.vue?vue&type=script&lang=js&":
/*!*******************************************************************************!*\
!*** ./resources/assets/js/views/Admin/Services.vue?vue&type=script&lang=js& ***!
\*******************************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Services_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib??ref--4-0!../../../../../node_modules/vue-loader/lib??vue-loader-options!./Services.vue?vue&type=script&lang=js& */ "./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/Admin/Services.vue?vue&type=script&lang=js&");
/* empty/unused harmony star reexport */ /* harmony default export */ __webpack_exports__["default"] = (_node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Services_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__["default"]);
/***/ }),
/***/ "./resources/assets/js/views/Admin/Services.vue?vue&type=template&id=3d3ae4d1&":
/*!*************************************************************************************!*\
!*** ./resources/assets/js/views/Admin/Services.vue?vue&type=template&id=3d3ae4d1& ***!
\*************************************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Services_vue_vue_type_template_id_3d3ae4d1___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!../../../../../node_modules/vue-loader/lib??vue-loader-options!./Services.vue?vue&type=template&id=3d3ae4d1& */ "./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/Admin/Services.vue?vue&type=template&id=3d3ae4d1&");
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "render", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Services_vue_vue_type_template_id_3d3ae4d1___WEBPACK_IMPORTED_MODULE_0__["render"]; });
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Services_vue_vue_type_template_id_3d3ae4d1___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"]; });
/***/ })
}]); |
<reponame>wongoo/alipay-sdk-java-all
package com.alipay.api.domain;
import java.util.List;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
/**
* 行业场景运营内扩展信息模型
*
* @author auto create
* @since 1.0, 2019-01-29 16:43:52
*/
public class ContentExtInfoModel extends AlipayObject {
private static final long serialVersionUID = 6332494413353121836L;
/**
* 奖品信息
*/
@ApiListField("prize_info_list")
@ApiField("content_prize_info_model")
private List<ContentPrizeInfoModel> prizeInfoList;
public List<ContentPrizeInfoModel> getPrizeInfoList() {
return this.prizeInfoList;
}
public void setPrizeInfoList(List<ContentPrizeInfoModel> prizeInfoList) {
this.prizeInfoList = prizeInfoList;
}
}
|
<filename>utest/namespace/test_local_namespace.py
# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from utest.resources import datafilereader
from nose.tools import assert_false, assert_true
class TestLocalNamespace(unittest.TestCase):
def setUp(self):
self._project = datafilereader.construct_project(datafilereader.SIMPLE_PROJECT)
self._test = datafilereader.get_ctrl_by_name('Test Case', self._project.datafiles[0].tests)
self._keyword = datafilereader.get_ctrl_by_name('Keyword', self._project.datafiles[0].keywords)
print(self._keyword)
def tearDown(self):
self._project.close()
def test_macro_controller_has_local_namespace(self):
assert_true(self._test.get_local_namespace() is not None)
assert_true(self._keyword.get_local_namespace() is not None)
def test_keyword_argument_is_visible_in_keywords_local_namespace(self):
assert_true(self._keyword.get_local_namespace().has_name('${argument}'))
def test_keyword_argument_is_not_visible_in_test_cases_local_namespace(self):
assert_false(self._test.get_local_namespace().has_name('${argument}'))
def test_keyword_steps_local_namespace_does_not_contain_local_variables_before_definition(self):
for i in range(8):
local_namespace = self._keyword.get_local_namespace_for_row(i)
if i < 3:
assert_false(local_namespace.has_name('${foo}'))
if i < 5:
assert_false(local_namespace.has_name('${bar}'))
if i < 7:
assert_false(local_namespace.has_name('${i}'))
def test_keyword_steps_local_namespace_does_contain_local_variables_after_definition(self):
for i in range(8):
local_namespace = self._keyword.get_local_namespace_for_row(i)
assert_true(local_namespace.has_name('${argument}'))
if i >= 3:
assert_true(local_namespace.has_name('${foo}'))
if i >= 5:
assert_true(local_namespace.has_name('${bar}'))
if i >= 7:
assert_true(local_namespace.has_name('${i}'))
def test_keyword_steps_suggestions_with_local_variables(self):
self._verify_suggestions_on_row(0, contains=['${argument}'], does_not_contain=['${foo}', '${bar}', '${i}'])
self._verify_suggestions_on_row(3, contains=['${argument}', '${foo}'], does_not_contain=['${bar}', '${i}'])
self._verify_suggestions_on_row(5, contains=['${argument}', '${foo}', '${bar}'], does_not_contain=['${i}'])
self._verify_suggestions_on_row(7, contains=['${argument}', '${foo}', '${bar}', '${i}'])
def test_suggestions_when_empty_text(self):
self._verify_suggestions_on_row(4, start='', contains=['${argument}', '${foo}'], does_not_contain=['${bar}'])
def test_suggestions_when_no_match(self):
self._verify_suggestions_on_row(5, start='${no match}', does_not_contain=['${argument}', '${foo}', '${bar}'])
def test_suggestions_when_only_part_matches(self):
self._verify_suggestions_on_row(4, start='${f', contains=['${foo}'], does_not_contain=['${argument}', '${bar}'])
self._verify_suggestions_on_row(4, start='fo', contains=['${foo}'], does_not_contain=['${argument}', '${bar}'])
def _verify_suggestions_on_row(self, row, start='${', contains=None, does_not_contain=None):
suggestion_names = [suggestion.name for suggestion in self._keyword.get_local_namespace_for_row(row).get_suggestions(start)]
self.assertEqual(len(suggestion_names), len(set(suggestion_names)))
if contains:
for name in contains:
if name not in suggestion_names:
raise AssertionError('Suggestions on row (%s) did not contain expected value "%s"' % (str(row), name))
if does_not_contain:
for name in does_not_contain:
if name in suggestion_names:
raise AssertionError('Suggestions on row (%s) did contain illegal value "%s"' % (str(row), name))
if __name__ == '__main__':
unittest.main()
|
def sum_list_elements(lst):
sum = 0
for item in lst:
sum += item
return sum
print(sum_list_elements([1, 2, 3])) |
#!/bin/bash
source /home/pi/allsky/config.sh
source /home/pi/allsky/scripts/filename.sh
cd /home/pi/allsky/scripts
LAST_NIGHT=$(date -d '12 hours ago' +'%Y%m%d')
# Post end of night data. This includes next twilight time
if [[ $POST_END_OF_NIGHT_DATA == "true" ]]; then
echo -e "Posting next twilight time to let server know when to resume liveview\n"
./postData.sh
echo -e "\n"
fi
# Uncomment this to scan for, and remove corrupt images before generating
# keograms and startrails. This can take several (tens of) minutes to run
# and isn't necessary unless your system produces corrupt images which then
# generate funny colors in the summary images...
# ./removeBadImages.sh /home/pi/allsky/images/$LAST_NIGHT/
# Generate keogram from collected images
if [[ $KEOGRAM == "true" ]]; then
echo -e "Generating Keogram\n"
mkdir -p /home/pi/allsky/images/$LAST_NIGHT/keogram/
../keogram /home/pi/allsky/images/$LAST_NIGHT/ $EXTENSION /home/pi/allsky/images/$LAST_NIGHT/keogram/keogram-$LAST_NIGHT.jpg
if [[ $UPLOAD_KEOGRAM == "true" ]] ; then
OUTPUT="/home/pi/allsky/images/$LAST_NIGHT/keogram/keogram-$LAST_NIGHT.jpg"
cp $OUTPUT; /home/pi/allsky/images/keograms/
lftp "$PROTOCOL"://"$USER":"$PASSWORD"@"$HOST" \
-e "cd $KEOGRAM_DIR; set net:max-retries 1; put $OUTPUT; bye"
fi
echo -e "\n"
fi
# Generate startrails from collected images. Treshold set to 0.1 by default in config.sh to avoid stacking over-exposed images
if [[ $STARTRAILS == "true" ]]; then
echo -e "Generating Startrails\n"
mkdir -p /home/pi/allsky/images/$LAST_NIGHT/startrails/
../startrails /home/pi/allsky/images/$LAST_NIGHT/ $EXTENSION $BRIGHTNESS_THRESHOLD /home/pi/allsky/images/$LAST_NIGHT/startrails/startrails-$LAST_NIGHT.jpg
if [[ $UPLOAD_STARTRAILS == "true" ]] ; then
OUTPUT="/home/pi/allsky/images/$LAST_NIGHT/startrails/startrails-$LAST_NIGHT.jpg"
cp $OUTPUT; /home/pi/allsky/images/startrails/
lftp "$PROTOCOL"://"$USER":"$PASSWORD"@"$HOST" \
-e "cd $STARTRAILS_DIR; set net:max-retries 1; put $OUTPUT; bye"
fi
echo -e "\n"
fi
# Generate timelapse from collected images
if [[ $TIMELAPSE == "true" ]]; then
echo -e "Generating Timelapse\n"
./timelapse.sh $LAST_NIGHT
echo -e "\n"
fi
# Automatically delete old images and videos
if [[ $AUTO_DELETE == "true" ]]; then
del=$(date --date="$NIGHTS_TO_KEEP days ago" +%Y%m%d)
for i in `find /home/pi/allsky/images/ -type d -name "2*"`; do
(($del > $(basename $i))) && rm -rf $i
done
fi
|
<gh_stars>0
import { transparentize } from 'color2k';
import { dequal } from 'dequal';
import React, { useContext, useState } from 'react';
import { StyleSheet, Text, View } from 'react-native';
import { themes } from '../../../../constants/colors';
import { IAttachment } from '../../../../definitions/IAttachment';
import { TGetCustomEmoji } from '../../../../definitions/IEmoji';
import { CustomIcon } from '../../../../lib/Icons';
import { useTheme } from '../../../../theme';
import sharedStyles from '../../../../views/Styles';
import Markdown from '../../../markdown';
import MessageContext from '../../Context';
import Touchable from '../../Touchable';
import { BUTTON_HIT_SLOP } from '../../utils';
const styles = StyleSheet.create({
button: {
flexDirection: 'row',
alignItems: 'center',
marginTop: 6,
borderWidth: 1,
borderRadius: 4,
minHeight: 40
},
attachmentContainer: {
flex: 1,
borderRadius: 4,
padding: 8
},
authorContainer: {
flexDirection: 'row'
},
fieldContainer: {
flexDirection: 'column',
paddingLeft: 10,
paddingTop: 10,
paddingBottom: 10
},
fieldTitle: {
fontSize: 15,
...sharedStyles.textBold
},
marginTop: {
marginTop: 4
},
marginBottom: {
marginBottom: 4
},
title: {
fontSize: 16,
...sharedStyles.textMedium
},
touchableContainer: {
flexDirection: 'row'
},
markdownFontSize: {
fontSize: 15
},
iconContainer: {
width: 20,
height: 20,
right: 8,
top: 8,
justifyContent: 'center',
alignItems: 'center'
}
});
interface IMessageFields {
attachment: IAttachment;
getCustomEmoji: TGetCustomEmoji;
}
interface IMessageReply {
attachment: IAttachment;
timeFormat?: string;
index: number;
getCustomEmoji: TGetCustomEmoji;
}
const Fields = React.memo(
({ attachment, getCustomEmoji }: IMessageFields) => {
if (!attachment.fields) {
return null;
}
const { baseUrl, user } = useContext(MessageContext);
const { theme } = useTheme();
return (
<>
{attachment.fields.map(field => (
<View key={field.title} style={[styles.fieldContainer, { width: field.short ? '50%' : '100%' }]}>
<Text testID='collapsibleQuoteTouchableFieldTitle' style={[styles.fieldTitle, { color: themes[theme].bodyText }]}>
{field.title}
</Text>
<Markdown
msg={field?.value || ''}
baseUrl={baseUrl}
username={user.username}
getCustomEmoji={getCustomEmoji}
theme={theme}
style={[styles.markdownFontSize]}
/>
</View>
))}
</>
);
},
(prevProps, nextProps) => dequal(prevProps.attachment.fields, nextProps.attachment.fields)
);
const CollapsibleQuote = React.memo(
({ attachment, index, getCustomEmoji }: IMessageReply) => {
if (!attachment) {
return null;
}
const [collapsed, setCollapsed] = useState(attachment.collapsed);
const { theme } = useTheme();
const onPress = () => {
setCollapsed(!collapsed);
};
let {
borderColor,
chatComponentBackground: backgroundColor,
collapsibleQuoteBorder,
collapsibleChevron,
headerTintColor
} = themes[theme];
try {
if (attachment.color) {
backgroundColor = transparentize(attachment.color, 0.8);
borderColor = attachment.color;
collapsibleQuoteBorder = attachment.color;
collapsibleChevron = attachment.color;
headerTintColor = headerTintColor;
}
} catch (e) {
// fallback to default
}
return (
<>
<Touchable
testID={`collapsibleQuoteTouchable-${attachment.title}`}
onPress={onPress}
style={[
styles.button,
index > 0 && styles.marginTop,
attachment.description && styles.marginBottom,
{
backgroundColor,
borderLeftColor: collapsibleQuoteBorder,
borderTopColor: borderColor,
borderRightColor: borderColor,
borderBottomColor: borderColor,
borderLeftWidth: 2
}
]}
background={Touchable.Ripple(themes[theme].bannerBackground)}
hitSlop={BUTTON_HIT_SLOP}>
<View style={styles.touchableContainer}>
<View style={styles.attachmentContainer}>
<View style={styles.authorContainer}>
<Text style={[styles.title, { color: headerTintColor }]}>{attachment.title}</Text>
</View>
{!collapsed && <Fields attachment={attachment} getCustomEmoji={getCustomEmoji} />}
</View>
<View style={styles.iconContainer}>
<CustomIcon name={!collapsed ? 'chevron-up' : 'chevron-down'} size={22} color={collapsibleChevron} />
</View>
</View>
</Touchable>
</>
);
},
(prevProps, nextProps) => dequal(prevProps.attachment, nextProps.attachment)
);
CollapsibleQuote.displayName = 'CollapsibleQuote';
Fields.displayName = 'CollapsibleQuoteFields';
export default CollapsibleQuote;
|
#!/bin/bash
xetex hanzibox.dtx &&\
xelatex hanzibox.dtx &&\
makeindex -s gind.ist -o hanzibox.ind hanzibox.idx &&\
makeindex -s gglo.ist -o hanzibox.gls hanzibox.glo &&\
xelatex hanzibox.dtx &&\
xelatex hanzibox.dtx &&\
xelatex hanzibox.dtx
|
package com.sewerina.myadressbook;
public class Address {
private final String country;
private final String zip;
private final String addressLine;
public Address(String country, String zip, String addressLine) {
this.country = country;
this.zip = zip;
this.addressLine = addressLine;
}
public boolean isContainsCountry(String country) {
return this.country.contains(country);
}
public boolean isContainsZip(String zip) {
return this.zip.contains(zip);
}
public boolean isContainsAddressLine(String addressLine) {
return this.addressLine.contains(addressLine);
}
public String serialize() {
return country + "," + zip + "," + addressLine;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj instanceof Address) {
Address address = (Address) obj;
return this.country.equals(address.country) &&
this.zip.equals(address.zip) &&
this.addressLine.equals(address.addressLine);
} else {
return false;
}
}
}
|
<reponame>isbabaev/telegram-bot-service
import { Module } from '@nestjs/common';
import { DomainsDiModule } from './domains/domains-di.module';
import { ConfigModule } from '@nestjs/config';
import { OnModuleInitService } from './on-module-init.service';
import { ApiModule } from './modules/api/api.module';
@Module({
imports: [
ConfigModule.forRoot(),
DomainsDiModule,
ApiModule,
],
providers: [
OnModuleInitService,
],
})
export class AppModule {}
|
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
Project Name: python-auth
File Name: main.py
Author: sunch
Create Date: 2021/11/10 14:20
-------------------------------------------------
"""
from gui import gui_open
if __name__ == '__main__':
gui_open()
|
#!/bin/sh
if [ $# -ne 2 ]
then
echo "Example usage: checkstring mystring.txt 'this string'"
exit 1
fi
if grep -q "$2" $1
then
echo "Text found"
else
echo "Text not found"
fi |
import React, {Component, PropTypes} from 'react';
import {connect} from 'react-redux';
import * as authActions from 'redux/modules/auth';
@connect(
state => ({user: state.auth.user}),
authActions)
export default
class LoginSuccess extends Component {
static propTypes = {
user: PropTypes.object,
logout: PropTypes.func
}
render() {
const {user, logout} = this.props;
const styles = require('./LoginSuccess.scss');
return (user &&
<div className={styles.loginSuccessPage + ' container'}>
<div className="row">
<div className="col-md-4 col-md-offset-4">
<h1>登录成功</h1>
<div>
<p>Hi,{user.name},你已经成功登录这里,希望你一切顺利,同时建议在现实生活中保持低调,将提高你的生存几率。</p>
<div>
<button className="btn btn-block btn-danger" onClick={logout}>退出登录</button>
</div>
</div>
</div>
</div>
</div>
);
}
}
|
package cn.stylefeng.guns.onlineaccess.modular.controller;
import cn.stylefeng.guns.core.pojo.response.ResponseData;
import cn.stylefeng.guns.onlineaccess.modular.service.DataTypeService;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
@RestController
public class DataTypeController {
@Resource
DataTypeService dataTypeService;
@GetMapping("/api/user/dataleader")
public ResponseData getDataManagerByProjectIdResult(Long id){
return ResponseData.success(dataTypeService.getDataManagerByProjectIdResult(id));
}
@GetMapping("/api/user/getDataTypeByProjectIdResult")
public ResponseData getDataTypeByProjectIdResult(Long id){
return ResponseData.success(dataTypeService.getDataTypeByProjectIdResult(id));
}
}
|
<reponame>anticipasean/girakkafunc<filename>func-core/src/test/java/cyclops/container/immutable/impl/BQTest.java
package cyclops.container.immutable.impl;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import java.util.stream.Stream;
import org.junit.Test;
public class BQTest {
@Test
public void fromStream() {
System.out.println(BankersQueue.of(1,
2,
3));
System.out.println(BankersQueue.fromStream(Stream.of(1,
2,
3)));
BankersQueue.fromStream(Stream.of(1,
2,
3))
.iterator();
assertThat(BankersQueue.fromStream(Stream.of(1,
2,
3)),
equalTo(BankersQueue.of(1,
2,
3)));
}
}
|
const {Readable} = require('stream');
const login = require('facebook-chat-api');
const ZeroBroker = require('./ZeroBroker.js');
// attach it to ZeroBroker? Convert it to a class ZeroDatastore or ZeroCache or something?
const db = new Map();
login({ email: '...', password: '...' }, (err, api) => {
if (err) {
console.error('Error logging in Facebook', err);
process.exit(1);
}
const myId = api.getCurrentUserID();
api.setOptions({ selfListen: true }); // !important
api.listen((err, message) => {
if (message.senderID === myId && message.body.startsWith('ZeroBroker::')) {
const parsed = ZeroBroker.parseRequest(message.body);
const response = fulfillRequest(parsed);
if (!response) return;
// Send a response to ZeroMessenger via the same thread (i.e. texting myself)
api.sendMessage('ZeroBroker::info::'+response, message.threadID, (err, sentMessage) => {
if (err) {
// retry?
return console.error(err);
}
});
}
});
});
/**
* @param {object} request
* @returns {string} Response string to ZeroMessenger maybe
*/
function fulfillRequest(request) {
if (request.action === 'send') {
//@ts-ignore
const seg = new ZeroSegment(request.fileSegment)
if (db.has(seg.id)) {
const zf = db.get(seg.id);
zf.addPart(seg);
if (zf.isComplete()) {
sendAsNormalFile(zf);
return `sent ${ze.id}`
}
} else {
// @todo in case the file contains only one seg.
db.set(seg.id, new ZeroFile(seg));
}
return `got ${seg.id} ${seg.partNum}`
} else if (request.action === 'get') {
// realfile = fetch(request.link)
// zerofile = toZeroFile(file)
// zerofile.segments.forEach(segmen => sendText(segment, toMyself, autoretry))
}
return ''
}
function sendAsNormalFile(zfid) {
const info = db.get(zfid);
var message = {
body: '',
attachment: toReadStream(info.data)
}
api.sendMessage(message, info.target, (err, sentMessage) => {
if (err) {
console.error(err);
// retry? Inform ZeroMessenger?
} else {
info.fbid = sentMessage.id;
}
})
}
/**
* Convert a base64 string to a ReadStream
* Taken from https://stackoverflow.com/a/44091532/9453525
* @todo Good enough for my use-case?
* @param {string} str
* @returns {Readable}
*/
function toReadStream(str) {
const buffer = new Buffer(str, 'base64')
const readable = new Readable()
readable._read = () => {} // _read is required but you can noop it
readable.push(buffer)
readable.push(null)
return readable
}
|
$(document).ready(function() {
// getting past CORS
jQuery.ajaxPrefilter(function(options) {
if (options.crossDomain && jQuery.support.cors) {
var http = (window.location.protocol === 'http:' ? 'http:' : 'https:');
options.url = 'https://cors-anywhere.herokuapp.com/' + options.url;
}
});
// global vars
var apiKey = '4363387309';
// last fm api key
var lastFMKey = 'bd9e30016f70dbefbda1a9172d668e5e';
var callsign = '';
var stationID = '';
var streamURL = '';
var genres = [];
var stations = [];
var audioPlayer = $("#player");
var cardInfo = $(".card-info");
var cover = $("#cover");
var currentStation = $("#station-info");
var songTitle = $("#title");
var storedGenres = JSON.parse(localStorage.getItem("genres"));
var storedStations = JSON.parse(localStorage.getItem("stations"));
// get info from localStorage
if (storedGenres !== null) {
genres = storedGenres;
}
if (storedStations !== null) {
stations = storedStations;
}
// generate a card for each genre
for (var i = 0; i < genres.length; i++) {
var row = $("<div>");
row.addClass("row");
cardInfo.append(row);
var col = $("<div>");
col.addClass("col-4");
row.append(col);
var card = $("<div>");
card.addClass("card");
col.append(card);
var cardContent = $("<div>");
cardContent.addClass("card-content white-text");
card.append(cardContent);
// genre title
var span = $("<span>");
span.addClass("card-title");
span.html(genres[i]);
cardContent.append(span);
// container for stations
var stationLinksDiv = $("<div>");
stationLinksDiv.addClass("card-action");
stationLinksDiv.attr("data-genre", genres[i]);
card.append(stationLinksDiv);
}
// fill in the stations found for a genre
for (var i = 0; i < stations.length; i++) {
// locate the card to put it in
var whichGenre = '[data-genre="' + stations[i].genre + '"]';
var genreCard = $(whichGenre);
// each station has a unique callsign and station_id
var link = $("<a>");
link.attr("href", "#song-info");
link.attr("class", "station")
link.attr("data-callsign", stations[i].callsign);
link.attr("data-stationid", stations[i].station_id);
link.html(stations[i].callsign);
genreCard.append(link);
}
// when you click on a station, it changes the src for the audio player using the callsign
$(".station").on("click", function(event) {
callsign = $(event.target).data('callsign');
stationID = $(event.target).data('stationid');
var playerURL = 'http://api.dar.fm/uberstationurl.php?callback=json&callsign=' + callsign + '&partner_token=' + apiKey;
var playerURLEncoded = encodeURI(playerURL);
$.ajax({
url: playerURLEncoded,
method: "GET"
}).then(function(response) {
streamURL = response.result[0].url;
audioPlayer.attr("src", streamURL);
getCurrentSong();
});
});
// get current song info
function getCurrentSong() {
var songURL = 'http://api.dar.fm/playlist.php?callback=json&station_id=' + stationID + '&partner_token=' + apiKey;
$.ajax({
url: songURL,
method: "GET"
}).then(function(response) {
var artist = 'unknown';
var title = 'unknown';
// show the song title, artist, and current station
if (response.result[0].artist) {
artist = response.result[0].artist;
}
if (response.result[0].title) {
title = response.result[0].title;
}
// display info on page
songTitle.html(title + ' - ' + artist);
currentStation.html('Now playing on ' + callsign);
getAlbumArt(artist, title);
// update if new song
setTimeout(getCurrentSong, (response.result[0].seconds_remaining) * 1000);
});
}
// get album art using last fm
function getAlbumArt(artist, track) {
var albumURL = 'http://ws.audioscrobbler.com/2.0/?method=track.getInfo&api_key=' + lastFMKey + '&artist=' + artist + '&track=' + track + '&format=json';
$.ajax({
url: encodeURI(albumURL),
method: "GET"
}).then(function(response) {
// if album art listed, then use that - otherwise, use placeholder
if (response.track.album) {
if (response.track.album.image) {
cover.attr("src", response.track.album.image[0]['#text']);
} else {
cover.attr("src", 'https://via.placeholder.com/34');
}
} else {
cover.attr("src", 'https://via.placeholder.com/34');
}
});
}
}); |
<gh_stars>0
exports.up = (knex, Promise) => {
return knex.schema.table("guilds", (t) => {
t.string("welcome", 1024);
t.string("goodbye", 1024);
});
};
exports.down = (knex, Promise) => {
return knex.schema.table("guilds", (t) => {
t.dropColumn("welcome");
t.dropColumn("goodbye");
});
};
|
<reponame>hispindia/BIHAR-2.7
package org.hisp.dhis.message.hibernate;
/*
* Copyright (c) 2004-2012, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import org.hibernate.Query;
import org.hisp.dhis.common.hibernate.HibernateIdentifiableObjectStore;
import org.hisp.dhis.jdbc.StatementBuilder;
import org.hisp.dhis.message.MessageConversation;
import org.hisp.dhis.message.MessageConversationStore;
import org.hisp.dhis.user.User;
import org.springframework.jdbc.core.RowMapper;
/**
* @author <NAME>
*/
public class HibernateMessageConversationStore
extends HibernateIdentifiableObjectStore<MessageConversation> implements MessageConversationStore
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private StatementBuilder statementBuilder;
public void setStatementBuilder( StatementBuilder statementBuilder )
{
this.statementBuilder = statementBuilder;
}
// -------------------------------------------------------------------------
// Implementation methods
// -------------------------------------------------------------------------
public List<MessageConversation> getMessageConversations( User user, Integer first, Integer max )
{
String sql =
"select mc.messageconversationid, mc.uid, mc.subject, mc.lastmessage, ui.surname, ui.firstname, ( " +
"select isread from usermessage " +
"where usermessage.usermessageid=mu.usermessageid " +
"and mu.messageconversationid=mc.messageconversationid ) as isread, ( " +
"select isfollowup from usermessage " +
"where usermessage.usermessageid=mu.usermessageid " +
"and mu.messageconversationid=mc.messageconversationid ) as isfollowup " +
"from messageconversation mc " +
"left join messageconversation_usermessages mu on mc.messageconversationid=mu.messageconversationid " +
"left join usermessage um on mu.usermessageid=um.usermessageid " +
"left join userinfo ui on mc.lastsenderid=ui.userinfoid ";
if ( user != null )
{
sql += "where um.userid=" + user.getId() + " ";
}
sql += "order by mc.lastmessage desc ";
if ( first != null && max != null )
{
sql += statementBuilder.limitRecord( first, max );
}
final List<MessageConversation> conversations = jdbcTemplate.query( sql, new RowMapper<MessageConversation>()
{
public MessageConversation mapRow( ResultSet resultSet, int count ) throws SQLException
{
MessageConversation conversation = new MessageConversation();
conversation.setId( resultSet.getInt( 1 ) );
conversation.setUid( resultSet.getString( 2 ) );
conversation.setSubject( resultSet.getString( 3 ) );
conversation.setLastMessage( resultSet.getDate( 4 ) );
conversation.setLastSenderSurname( resultSet.getString( 5 ) );
conversation.setLastSenderFirstname( resultSet.getString( 6 ) );
conversation.setRead( resultSet.getBoolean( 7 ) );
conversation.setFollowUp( resultSet.getBoolean( 8 ) );
return conversation;
}
} );
return conversations;
}
public int getMessageConversationCount( User user )
{
String sql =
"select count(*) from messageconversation mc " +
"left join messageconversation_usermessages mu on mc.messageconversationid=mu.messageconversationid " +
"left join usermessage um on mu.usermessageid=um.usermessageid " +
"where um.userid=" + user.getId();
return jdbcTemplate.queryForInt( sql );
}
public long getUnreadUserMessageConversationCount( User user )
{
String hql = "select count(*) from MessageConversation m join m.userMessages u where u.user = :user and u.read = false";
Query query = getQuery( hql );
query.setEntity( "user", user );
query.setCacheable( true );
return (Long) query.uniqueResult();
}
public int deleteMessages( User sender )
{
String hql = "delete Message m where m.sender = :sender";
Query query = getQuery( hql );
query.setEntity( "sender", sender );
return query.executeUpdate();
}
public int deleteUserMessages( User user )
{
String hql = "delete UserMessage u where u.user = :user";
Query query = getQuery( hql );
query.setEntity( "user", user );
return query.executeUpdate();
}
public int removeUserFromMessageConversations( User lastSender )
{
String hql = "update MessageConversation m set m.lastSender = null where m.lastSender = :lastSender";
Query query = getQuery( hql );
query.setEntity( "lastSender", lastSender );
return query.executeUpdate();
}
} |
#!/usr/bin/env bash
POSTGIS_SQL_PATH=`pg_config --sharedir`/contrib/postgis-1.5
createdb -E UTF8 template_postgis # Create the template spatial database.
createlang -d template_postgis plpgsql # Adding PLPGSQL language support.
psql -d postgres -c "UPDATE pg_database SET datistemplate='true' WHERE datname='template_postgis';"
psql -d template_postgis -f $POSTGIS_SQL_PATH/postgis.sql # Loading the PostGIS SQL routines
psql -d template_postgis -f $POSTGIS_SQL_PATH/spatial_ref_sys.sql
psql -d template_postgis -c "GRANT ALL ON geometry_columns TO PUBLIC;" # Enabling users to alter spatial tables.
psql -d template_postgis -c "GRANT ALL ON geography_columns TO PUBLIC;"
psql -d template_postgis -c "GRANT ALL ON spatial_ref_sys TO PUBLIC;"
|
#!/bin/bash -e
# A única coisa que precisa ser fornecida é o nome da chave que deseja usar
KEYNAME=$1
# Recuperar o ID de uma imagem
AMIID=$(aws ec2 describe-images --filters "Name=name,Values=ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-????????" "Name=architecture,Values=x86_64" --query 'Images[0].[ImageId]' --output text)
echo "ID da Imagem: $AMIID"
# Recupera o ID de uma rede
VPCID=$(aws ec2 describe-vpcs --filter "Name=isDefault, Values=true" --query "Vpcs[0].VpcId" --output text)
echo "ID do VPC: $VPCID"
# Recupera o ID da subrede padrão do VPC
SUBNETID=$(aws ec2 describe-subnets --filter "Name=vpc-id, Values=$VPCID" --query "Subnets[0].SubnetId" --output text)
echo "ID da Subnet: $SUBNETID"
# Cria um grupo de segurança
SGID=$(aws ec2 create-security-group --group-name grupodeseguranca_script --description "Grupo de Seguranca para teste de Scripts" --vpc-id $VPCID --output text)
echo "ID do Grupo de Segurança: $SGID"
# Liberar acesso à porta 22 e 80 (TCP)
aws ec2 authorize-security-group-ingress --group-id $SGID --protocol tcp --port 22 --cidr 0.0.0.0/0
aws ec2 authorize-security-group-ingress --group-id $SGID --protocol tcp --port 80 --cidr 0.0.0.0/0
# Script para executar na instância:
cat<<EOF > script.sh
#!/bin/bash
apt-get install -y apache2
systemctl enable apache2
systemctl start apache2
echo "Teste de Script." > /var/www/html/index.html
EOF
# Criar a instância
INSTANCEID=$(aws ec2 run-instances --image-id $AMIID --key-name $KEYNAME --instance-type t2.micro --security-group-ids $SGID --subnet-id $SUBNETID --user-data file://script.sh --query "Instances[0].InstanceId" --output text)
rm script.sh
echo "Aguardando a criação da instância $INSTANCEID..."
aws ec2 wait instance-running --instance-ids $INSTANCEID
# Recuperando endereço público da instância
PUBLICNAME=$(aws ec2 describe-instances --instance-ids $INSTANCEID --query "Reservations[0].Instances[0].PublicDnsName" --output text)
echo "Conexões SSH permitidas na instância $INSTANCEID no endereço $PUBLICNAME."
echo "Abra outro terminal e execute:"
echo "ssh -i $KEYNAME.pem ubuntu@$PUBLICNAME"
echo "Ou acess a página:"
echo "http://$PUBLICNAME"
read -p "Aperte [Enter] para finalizar a instância..."
# Finalizando a instância
aws ec2 terminate-instances --instance-ids $INSTANCEID
echo "Finalizando a instância $INSTANCEID."
aws ec2 wait instance-terminated --instance-ids $INSTANCEID
aws ec2 delete-security-group --group-id $SGID
|
<reponame>ShaolinDeng/SDK-Android
/*
* Copyright (C) 2018 iFLYTEK CO.,LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.iflytek.cyber.platform.resolver;
import android.text.TextUtils;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
class DirectiveUtil {
static DirectiveHeader parseHeader(Gson gson, JsonObject directive) {
final DirectiveHeader header = gson.fromJson(directive.get("header"), DirectiveHeader.class);
if (header == null) {
return null;
}
if (TextUtils.isEmpty(header.namespace) || TextUtils.isEmpty(header.name)
|| TextUtils.isEmpty(header.messageId)) {
return null;
}
return header;
}
}
|
#! /bin/bash
if [ $# -ne 2 ]; then
echo $0: usage: ./build.sh [project] [version] eg. ./build.sh Rebus 1.2
exit 1
fi
project=$1
version=$2
command="dotnet msbuild '/p:Configuration=Release;TargetFrameworkIdentifier=.NETStandard;TargetFrameworkVersion=v1.3;DefineConstants=NETSTANDARD1_3;Version=$version' ../Rebus/$project.csproj"
echo $command
eval "$command" |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-SS/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-SS/7-1024+0+512-N-VB-fill-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_and_verbs_fill_first_two_thirds_sixth --eval_function last_sixth_eval |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_escalator_twotone = void 0;
var ic_escalator_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M19,5L19,5l0,14H5V5H19 M17,6h-3.3l-5,9H7c-0.83,0-1.5,0.67-1.5,1.5S6.17,18,7,18h3.3l5-9H17 c0.83,0,1.5-0.67,1.5-1.5S17.83,6,17,6z",
"opacity": ".3"
},
"children": [{
"name": "path",
"attribs": {
"d": "M19,5L19,5l0,14H5V5H19 M17,6h-3.3l-5,9H7c-0.83,0-1.5,0.67-1.5,1.5S6.17,18,7,18h3.3l5-9H17 c0.83,0,1.5-0.67,1.5-1.5S17.83,6,17,6z",
"opacity": ".3"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M19,5L19,5l0,14H5V5H19 M19,3H5C3.9,3,3,3.9,3,5v14c0,1.1,0.9,2,2,2h14c1.1,0,2-0.9,2-2l0-14C21,3.9,20.1,3,19,3L19,3z M17,6h-3.3l-5,9H7c-0.83,0-1.5,0.67-1.5,1.5S6.17,18,7,18h3.3l5-9H17c0.83,0,1.5-0.67,1.5-1.5S17.83,6,17,6z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M19,5L19,5l0,14H5V5H19 M19,3H5C3.9,3,3,3.9,3,5v14c0,1.1,0.9,2,2,2h14c1.1,0,2-0.9,2-2l0-14C21,3.9,20.1,3,19,3L19,3z M17,6h-3.3l-5,9H7c-0.83,0-1.5,0.67-1.5,1.5S6.17,18,7,18h3.3l5-9H17c0.83,0,1.5-0.67,1.5-1.5S17.83,6,17,6z"
},
"children": []
}]
}]
}]
};
exports.ic_escalator_twotone = ic_escalator_twotone; |
import pytest
import uvicore
from typing import List
from uvicore.support.dumper import dump
# This is all failing due to my provider refactors
@pytest.mark.asyncio
async def test_package(app1):
from uvicore.package.package import Package
assert Package.__module__ + '.' + Package.__name__ == 'app1.overrides.package.Package'
assert Package.__annotations__.get('custom1') is not None
# Should be able to pull the original via _BASE
original = uvicore.ioc.make('uvicore.package.package.Package_BASE')
assert original.__module__ + '.' + original.__name__ == 'uvicore.package.package.Package'
@pytest.mark.asyncio
async def test_provider(app1):
from uvicore.package.provider import ServiceProvider
assert ServiceProvider.__module__ + '.' + ServiceProvider.__name__ == 'app1.overrides.provider.ServiceProvider'
assert ServiceProvider.__annotations__.get('custom1') is not None
# Should be able to pull the original via _BASE
original = uvicore.ioc.make('uvicore.package.provider.ServiceProvider_BASE')
assert original.__module__ + '.' + original.__name__ == 'uvicore.package.provider.ServiceProvider'
@pytest.mark.asyncio
async def test_application(app1):
package = uvicore.app.package('uvicore.configuration')
assert package.custom1 == 'custom1 override here!!!'
# Should be able to pull the original via _BASE
original = uvicore.ioc.make('uvicore.foundation.application.Application_BASE')
assert original.__module__ + '.' + original.__name__ == 'uvicore.foundation.application.Application'
@pytest.mark.asyncio
async def test_user_model(app1):
# Should return the same class (not an instance, not a singleton)
from uvicore.auth.models.user import User
from app1.models.user import User as Override
assert User == Override
# Should be able to pull the original via _BASE
original = uvicore.ioc.make('uvicore.auth.models.user.User_BASE')
assert original.__module__ + '.' + original.__name__ == 'uvicore.auth.models.user.User'
@pytest.mark.asyncio
async def test_users_table(app1):
# These are singletons and should match the same single instance
from uvicore.auth.database.tables.users import Users
from app1.database.tables.users import Users as Override
assert Users == Override
# Should be able to pull the original via _BASE
original = uvicore.ioc.make('uvicore.auth.database.tables.users.Users_BASE')
assert original.__module__ + '.' + original.__name__ == 'uvicore.auth.database.tables.users.Users'
|
from ..fingers.max_activity import find_activity_per_finger, find_tstat_per_finger
from ..viz.finger_channels import plot_finger_chan
from ..fingers.viz import plot_fingerbars
from ..viz import to_div, to_html, plot_surf
from ..fingers.correlation import plot_heatmap, plot_finger_chan_2
from ..read import load
from numpy import corrcoef, empty, save, NaN
from scipy.stats import norm as normdist
from wonambi import Data
def pipeline_fingers(subject, run, event_type='cues'):
v, chans = find_activity_per_finger(subject, run)
fig = plot_finger_chan(v, chans)
divs = [to_div(fig), ]
html_file = FINGERS_DIR / event_type / f'{subject}_run-{run}_{event_type}.html'
to_html(divs, html_file)
def pipeline_fingerbars(subject, run, event_type='cues'):
t, events = find_tstat_per_finger(subject, run, event_type)
fig = plot_fingerbars(t, events)
divs = [to_div(fig), ]
html_file = FINGERBARS_DIR / event_type / f'{subject}_run-{run}_{event_type}.html'
to_html(divs, html_file)
def pipeline_finger_correlations(subject, run, event_type, threshold=5):
t, events = find_tstat_per_finger(subject, run, event_type)
X = t.data[0]
i_active = (X > threshold).any(axis=1)
X1 = X[i_active]
dat = corrcoef(X1.T)
fig = plot_heatmap(dat, events)
divs = [to_div(fig), ]
html_file = FINGERCORR_DIR / event_type / f'{subject}_run-{run}_{event_type}.html'
to_html(divs, html_file)
def pipeline_finger_correlations_each(subject, run, event_type, pvalue=0.05):
t, events = find_tstat_per_finger(subject, run, event_type)
X = t.data[0]
threshold = normdist.ppf(1 - (pvalue / 2))
elec = load('electrodes', subject, run)
C = empty((t.number_of('event')[0], t.number_of('event')[0]))
divs = []
A = X.copy()
i_active = abs(A) < threshold
A[i_active] = NaN
fig1 = plot_finger_chan_2(A, events, t.chan[0])
for i in range(t.number_of('event')[0]):
i_active = abs(X[:, i]) > threshold
X1 = X[i_active, :]
d = t(event=t.event[0][i], trial=0).copy()
d[~i_active] = NaN
t_1 = Data(d, s_freq=1, chan=t.chan[0])
fig = plot_surf(t_1, elec, info='tstat')
fig = fig.update_layout(title=t.event[0][i])
divs.append(to_div(fig))
dat = corrcoef(X1.T)
C[:, i] = dat[:, i]
divs.append(to_div(fig1))
fig = plot_heatmap(C, events)
divs.append(to_div(fig))
html_file = FINGERCORREACH_DIR / event_type / f'{subject}_run-{run}_{event_type}.html'
to_html(divs, html_file)
npy_file = html_file.with_suffix('.npy')
save(str(npy_file), C)
|
<gh_stars>0
import { StatusCodes } from 'http-status-codes';
import React, { Component } from 'react';
import { connect } from 'react-redux';
import { Link } from "react-router-dom";
import { bindActionCreators } from 'redux';
import RouteConstants from '../constants/routeConstants';
import IAppState from '../interfaces/IAppState';
import StoreConstants from './../constants/storeConstants';
interface StateProps {
isAuthenticated: boolean;
firstName: string;
lastName: string;
}
class DispatchProps {
logout = () => {
return { type: StoreConstants.LOGOUT };
}
}
export class HeaderComponent extends Component<StateProps & DispatchProps, any>
{
logout = async () => {
let response = await fetch("/api/authentication/logout", {
method: "POST"
});
if (response.status === StatusCodes.OK)
this.props.logout();
}
render() {
return (
<header>
<div className="header-content">
<div className="left-part">
LOGO
</div>
<div className="right-part">
<Link className="menu-item" to={RouteConstants.homeRoute}>Home</Link>
<Link className="menu-item" to={RouteConstants.contactRoute}>Contact</Link>
{
this.props.isAuthenticated ?
<>
<Link className="menu-item" to={RouteConstants.checkListRoute}>Check List</Link>
<div className="menu-item" onClick={this.logout}>Logout {this.props.firstName + " " + this.props.lastName}</div>
</> :
<>
<Link className="menu-item" to={RouteConstants.loginRoute}>Login</Link>
<Link className="menu-item" to={RouteConstants.signUpRoute}>SignUp</Link>
</>
}
</div>
</div>
</header>
);
}
}
function connectStateToProps(state: IAppState, ownProps: any): StateProps {
return {
...ownProps,
isAuthenticated: state.user.isAuthenticated,
firstName: state.user.firstName,
lastName: state.user.lastName
};
}
function connectDispatchToProps(dispatch: any): DispatchProps {
return bindActionCreators({ ...new DispatchProps() }, dispatch);
}
let Header = connect(connectStateToProps, connectDispatchToProps)(HeaderComponent);
export default Header; |
$(function() {
// var e = $("#main");
// return new Handsontable(e.get(0),{
// data: [["login_name", "登录名", "C(9)", "N", "Y", ""], ["password", "密码", "C(9)", "N", "N", ""], ["chinese_name", "中文名", "C(10)", "N", "N", ""], ["english_name", "英文名", "C(10)", "Y", "N", ""], ["gender", "性别", "C(1)", "Y", "N", ""], ["address", "家庭住址", "C(100)", "Y", "N", ""], ["mobile", "联系电话", "C(15)", "Y", "N", ""], ["comment", "备注", "C(100)", "Y", "N", ""]],
// contextMenu: !0, //是否开启菜单
// manualRowResize: !0, //是否开启手动调整行大小
// manualColumnResize: !0, //是否开启手动移动列
// rowHeaders: !0,
// colHeaders: ["字段", "说明", "数据类型", "允许为空", "主键", "备注"]
// })
var dataObject = [
{
'字段':'login_name',
'说明':'登录名',
'数据类型':'C(9)',
'允许为空':'N',
'主键':'Y',
'备注':""
}
]
var e = document.getElementById('main');
var hotElementContainer = e.parentNode; //挂在元素
var settings = {
data:dataObject,
colums:[
{
data:'字段',
type: 'text',
},
{
data:'说明',
type:'text',
},
{
data:'数据类型',
type:'text'
},
{
data:'允许为空',
type:'text'
},
{
data:'主键',
type:'text'
},
{
data:'备注',
type:'text'
}
],
width:805,
height:407,
manualRowResize: true,
manualColumnResize: true,
rowHeaders: true,
colHeaders:[
'字段','说明','数据类型','允许为空','主键','备注'
],
manualRowMove: true,
manualColumnMove: true,
contextMenu: true,
filters: true,
dropdownMenu: true
}
var main = new Handsontable(e,settings) //new的过程构造函数里面可以做一堆操作,定义一堆方法
});
// manualRowMove: 是否开启手动移动行
// bindRowsWithHeaders: 移动时行头也随之移动 |
chrome.browserAction.onClicked.addListener(function(tab) {
chrome.storage.sync.set({"chrome-app-sync": true});
chrome.tabs.create({'url': chrome.extension.getURL('index.html')}, function(tab) {
// tab opened
});
});
|
package Model;
import JavaBeans.Categoria;
import JavaBeans.Conta;
import JavaBeans.Lancamento;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
public class ConteudoDAO {
private final Connection conexao;
public boolean existe;
public ConteudoDAO() {
this.conexao = new Conexao().conectar();
}
//CRUD - CREATE
public void cria_categoria(Categoria categoria) throws SQLException {
//Define as query para criar e verificar se existe
String create = "INSERT INTO categorias (descricao) VALUES (?);";
String select = "SELECT * FROM categorias WHERE descricao = ?;";
//Try-with-resource para abrir a conexão e fechar logo depois do uso
try (PreparedStatement pst_verifica = conexao.prepareStatement(select);) {
//Prepara a variavel que vai no banco veificar se já existe uma categoria com a mesma desc.
pst_verifica.setString(1, categoria.getDescricao());
//Executa a query no banco
ResultSet rs = pst_verifica.executeQuery();
//Variavel true/false que indica se já existe ou não a categoria
existe = false;
//Se o rs.next() retornar algum dado, indica que existe uma categoria com a inserida
if (rs.next()) {
//Define a variavel de controle para true (que existe)
existe = true;
System.out.println("\n\nErro - ConteudoDAO - CREATE - Categoria: Categoria ja existente!");
} else {
//Caso não exista, prepara a query de inserir no banco
try (PreparedStatement pst = conexao.prepareStatement(create);) {
//Prepara a variavel que vai enviar ao banco o dado
pst.setString(1, categoria.getDescricao());
//Executa a query de inserir
pst.executeUpdate();
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - CREATE - Categoria: " + e.getMessage());
}
}
}
}
//CRUD - READ
public ArrayList<Categoria> lista_categoria() {
ArrayList<Categoria> lista_categoria = new ArrayList<>();
String read = "SELECT * FROM categorias";
try (PreparedStatement pst = conexao.prepareStatement(read);) {
ResultSet rs = pst.executeQuery();
while (rs.next()) {
int id = rs.getInt(1);
String descricao = rs.getString(2);
lista_categoria.add(new Categoria(id, descricao));
}
return lista_categoria;
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - READ - Categoria: " + e.getMessage());
return null;
}
}
//CRUD - READ
public void get_categoria(Categoria categoria) {
String read = "SELECT * FROM categorias WHERE id = ?";
try (PreparedStatement pst = conexao.prepareStatement(read)) {
pst.setInt(1, categoria.getId());
ResultSet rs = pst.executeQuery();
while (rs.next()) {
categoria.setId(rs.getInt(1));
categoria.setDescricao(rs.getString(2));
}
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - READ GET - Categoria: " + e.getMessage());
}
}
//CRUD - UPDATE
public void altera_categoria(Categoria categoria) throws SQLException {
String update = "UPDATE categorias SET descricao = ? WHERE id = ?;";
String select = "SELECT * FROM categorias WHERE descricao = ?;";
try (PreparedStatement pst_verifica = conexao.prepareStatement(select);) {
pst_verifica.setString(1, categoria.getDescricao());
ResultSet rs = pst_verifica.executeQuery();
existe = false;
if (rs.next()) {
existe = true;
System.out.println("\n\nErro - ConteudoDAO - CREATE - Categoria: Categoria ja existente!");
} else {
try (PreparedStatement pst = conexao.prepareStatement(update)) {
pst.setString(1, categoria.getDescricao());
pst.setInt(2, categoria.getId());
pst.executeUpdate();
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - UPDATE - Categoria: " + e.getMessage());
}
}
}
}
//CRUD - DELETE
public void apaga_categoria(Categoria categoria) {
String delete = "DELETE FROM categorias WHERE id = ?";
try (PreparedStatement pst = conexao.prepareStatement(delete)) {
pst.setInt(1, categoria.getId());
pst.executeUpdate();
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - DELETE - Categoria: " + e.getMessage());
}
}
//CRUD - CREATE
public void cria_conta(Conta conta) throws SQLException {
//Define as query para criar e verificar se existe
String create = "INSERT INTO contas (id_usuario,nome_conta,banco,agencia,conta_corrente) VALUES (?,?,?,?,?);";
String select = "SELECT * FROM contas WHERE banco=? && agencia=? && conta_corrente = ?;";
//Try-with-resource para abrir a conexão e fechar logo depois do uso
try (PreparedStatement pst_verifica = conexao.prepareStatement(select);) {
//Prepara a variavel que vai no banco veificar se já existe uma categoria com a mesma desc.
pst_verifica.setString(1, conta.getBanco());
pst_verifica.setString(2, conta.getAgencia());
pst_verifica.setString(3, conta.getConta_corrente());
//Executa a query no banco
ResultSet rs = pst_verifica.executeQuery();
//Variavel true/false que indica se já existe ou não a categoria
existe = false;
//Se o rs.next() retornar algum dado, indica que existe uma categoria com a inserida
if (rs.next()) {
//Define a variavel de controle para true (que existe)
existe = true;
System.out.println("\n\nErro - ConteudoDAO - CREATE - Conta: Conta ja existente!");
} else {
//Caso não exista, prepara a query de inserir no banco
try (PreparedStatement pst = conexao.prepareStatement(create);) {
//Prepara a variavel que vai enviar ao banco o dado
pst.setInt(1, conta.getId_usuario());
pst.setString(2, conta.getNome_conta());
pst.setString(3, conta.getBanco());
pst.setString(4, conta.getAgencia());
pst.setString(5, conta.getConta_corrente());
//Executa a query de inserir
pst.executeUpdate();
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - CREATE - Conta: " + e.getMessage());
}
}
}
}
//CRUD - READ
public ArrayList<Conta> lista_conta() {
ArrayList<Conta> lista_conta = new ArrayList<>();
String read = "SELECT * FROM contas";
try (PreparedStatement pst = conexao.prepareStatement(read);) {
ResultSet rs = pst.executeQuery();
while (rs.next()) {
int id = rs.getInt(1);
int id_usuario = rs.getInt(2);
String nome_conta = rs.getString(3);
String banco = rs.getString(4);
String agencia = rs.getString(5);
String conta_corrente = rs.getString(6);
lista_conta.add(new Conta(id, id_usuario, nome_conta, banco, agencia, conta_corrente));
}
return lista_conta;
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - READ - Conta: " + e.getMessage());
return null;
}
}
//CRUD - READ
public void get_conta(Conta conta) {
String read = "SELECT * FROM contas WHERE id = ?";
try (PreparedStatement pst = conexao.prepareStatement(read)) {
pst.setInt(1, conta.getId());
ResultSet rs = pst.executeQuery();
while (rs.next()) {
conta.setId(rs.getInt(1));
conta.setId_usuario(rs.getInt(2));
conta.setNome_conta(rs.getString(3));
conta.setBanco(rs.getString(4));
conta.setAgencia(rs.getString(5));
conta.setConta_corrente(rs.getString(6));
}
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - READ GET - Conta: " + e.getMessage());
}
}
//CRUD - UPDATE
public void altera_conta(Conta conta) throws SQLException {
String update = "UPDATE contas SET id_usuario=?,nome_conta=?,banco=?,agencia=?,conta_corrente=? WHERE id = ?;";
String select = "SELECT * FROM contas WHERE banco=? && agencia=? && conta_corrente = ?;";
try (PreparedStatement pst_verifica = conexao.prepareStatement(select);) {
pst_verifica.setString(1, conta.getBanco());
pst_verifica.setString(2, conta.getAgencia());
pst_verifica.setString(3, conta.getConta_corrente());
ResultSet rs = pst_verifica.executeQuery();
existe = false;
if (rs.next()) {
existe = true;
System.out.println("\n\nErro - ConteudoDAO - CREATE - Conta: Conta ja existente!");
} else {
try (PreparedStatement pst = conexao.prepareStatement(update)) {
pst.setInt(1, conta.getId_usuario());
pst.setString(2, conta.getNome_conta());
pst.setString(3, conta.getBanco());
pst.setString(4, conta.getAgencia());
pst.setString(5, conta.getConta_corrente());
pst.setInt(6, conta.getId());
pst.executeUpdate();
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - CREATE - Conta: " + e.getMessage());
}
}
}
}
//CRUD - DELETE
public void apaga_conta(Conta conta) {
String delete = "DELETE FROM contas WHERE id = ?";
try (PreparedStatement pst = conexao.prepareStatement(delete)) {
pst.setInt(1, conta.getId());
pst.executeUpdate();
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - DELETE - Conta: " + e.getMessage());
}
}
//CRUD - CREATE
public void cria_lancamento(Lancamento lancamento) throws SQLException {
//Define as query para criar e verificar se existe
String create = "INSERT INTO lancamentos (id_conta,id_categoria,valor,operacao,data,descricao) VALUES (?,?,?,?,?,?);";
String select = "SELECT * FROM lancamentos WHERE id_conta=? && id_categoria=? && valor=? && operacao = ?;";
//Try-with-resource para abrir a conexão e fechar logo depois do uso
try (PreparedStatement pst_verifica = conexao.prepareStatement(select);) {
//Prepara a variavel que vai no banco veificar se já existe uma categoria com a mesma desc.
pst_verifica.setInt(1, lancamento.getId_conta());
pst_verifica.setInt(2, lancamento.getId_categoria());
pst_verifica.setFloat(3, lancamento.getValor());
pst_verifica.setString(4, lancamento.getOperacao());
//Executa a query no banco
ResultSet rs = pst_verifica.executeQuery();
//Variavel true/false que indica se já existe ou não a categoria
existe = false;
//Se o rs.next() retornar algum dado, indica que existe uma categoria com a inserida
if (rs.next()) {
//Define a variavel de controle para true (que existe)
existe = true;
System.out.println("\n\nErro - ConteudoDAO - CREATE - Lançamento: Lançamento ja existente!");
} else {
//Caso não exista, prepara a query de inserir no banco
try (PreparedStatement pst = conexao.prepareStatement(create);) {
//Prepara a variavel que vai enviar ao banco o dado
pst.setInt(1, lancamento.getId_conta());
pst.setInt(2, lancamento.getId_categoria());
pst.setFloat(3, lancamento.getValor());
pst.setString(4, lancamento.getOperacao());
pst.setDate(5, lancamento.getData());
pst.setString(6, lancamento.getDescricao());
//Executa a query de inserir
pst.executeUpdate();
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - CREATE - Lancamento: " + e.getMessage());
}
}
}
}
//CRUD - READ
public ArrayList<Lancamento> lista_lancamento() {
ArrayList<Lancamento> lista_lancamento = new ArrayList<>();
String read = "SELECT * FROM lancamentos";
try (PreparedStatement pst = conexao.prepareStatement(read);) {
ResultSet rs = pst.executeQuery();
while (rs.next()) {
int id = rs.getInt(1);
int id_conta = rs.getInt(2);
int id_categoria = rs.getInt(3);
float valor = rs.getFloat(4);
String operacao = rs.getString(5);
Date data = rs.getDate(6);
String descricao = rs.getString(7);
lista_lancamento.add(new Lancamento(id, id_conta, id_categoria, valor, operacao, data, descricao));
}
return lista_lancamento;
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - READ - Lancamento: " + e.getMessage());
return null;
}
}
//CRUD - READ
public void get_lancamento(Lancamento lancamento) {
String read = "SELECT * FROM lancamentos WHERE id = ?";
try (PreparedStatement pst = conexao.prepareStatement(read)) {
pst.setInt(1, lancamento.getId());
ResultSet rs = pst.executeQuery();
while (rs.next()) {
lancamento.setId(rs.getInt(1));
lancamento.setId_conta(rs.getInt(2));
lancamento.setId_categoria(rs.getInt(3));
lancamento.setValor(rs.getFloat(4));
lancamento.setOperacao(rs.getString(5));
lancamento.setData(rs.getDate(6));
lancamento.setDescricao(rs.getString(7));
}
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - READ GET - Lancamento: " + e.getMessage());
}
}
//CRUD - UPDATE
public void altera_lancamento(Lancamento lancamento) {
String update = "UPDATE lancamentos SET id_conta=?,id_categoria=?,valor=?,operacao=?,data=?,descricao=? WHERE id = ?;";
try (PreparedStatement pst = conexao.prepareStatement(update)) {
pst.setInt(1, lancamento.getId_conta());
pst.setInt(2, lancamento.getId_categoria());
pst.setFloat(3, lancamento.getValor());
pst.setString(4, lancamento.getOperacao());
pst.setDate(5, lancamento.getData());
pst.setString(6, lancamento.getDescricao());
pst.setInt(7, lancamento.getId());
pst.executeUpdate();
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - UPDATE - Lancamento: " + e.getMessage());
}
}
//CRUD - DELETE
public void apaga_lancamento(Lancamento lancamento) {
String delete = "DELETE FROM lancamentos WHERE id = ?";
try (PreparedStatement pst = conexao.prepareStatement(delete)) {
pst.setInt(1, lancamento.getId());
pst.executeUpdate();
} catch (Exception e) {
System.out.println("\n\nErro - ConteudoDAO - DELETE - Lancamento: " + e.getMessage());
}
}
}//----------END----------
|
#!/bin/bash -e
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# "---------------------------------------------------------"
# "- -"
# "- Common commands for all scripts -"
# "- -"
# "---------------------------------------------------------"
set -o errexit
set -o pipefail
# Locate the root directory. Used by scripts that source this one.
# shellcheck disable=SC2034
ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )"
SCRIPT_ROOT="${ROOT}/scripts"
# Set input variable
buildtype=$1
# git is required for this tutorial
# https://git-scm.com/book/en/v2/Getting-Started-Installing-Git
command -v git >/dev/null 2>&1 || { \
echo >&2 "git require but it's not installed. Aborting."
echo >&2 "Refer to: https://git-scm.com/book/en/v2/Getting-Started-Installing-Git"
exit 1
}
# glcoud is required for this tutorial
# https://cloud.google.com/sdk/install
command -v gcloud >/dev/null 2>&1 || { \
echo >&2 "gcloud required but it's not installed. Aborting."
echo >&2 "Refer to: https://cloud.google.com/sdk/install"
exit 1
}
# Make sure kubectl is installed. If not, refer to:
# https://kubernetes.io/docs/tasks/tools/install-kubectl/
command -v kubectl >/dev/null 2>&1 || { \
echo >&2 "kubectl required but it's not installed. Aborting."
echo >&2 "Refer to: https://kubernetes.io/docs/tasks/tools/install-kubectl/"
exit 1
}
# Simple test helpers that avoids eval and complex quoting. Note that stderr is
# redirected to stdout so we can properly handle output.
# Usage: test_des "description"
test_des() {
echo -n "Checking that $1... "
}
# Usage: test_cmd "$(command string 2>&1)"
test_cmd() {
local result=$?
local output="$1"
# If command completes successfully, output "pass" and continue.
if [[ $result == 0 ]]; then
echo "pass"
# If ccommand fails, output the error code, command output and exit.
else
echo -e "fail ($result)\\n"
cat <<<"$output"
exit $result
fi
}
if [ -f "${ROOT}/cluster_config" ]; then
. "${ROOT}/cluster_config"
echo $'INFO: Verifying GCP Configuration'
# Verify the needed env variables.
if [[ -z "${REGION}" ]]; then
tput setaf 1; echo "" 1>&2
echo $'ERROR: This script requires Region information to deploy resources. Please update \'REGION\' with an appropriate region name, like \'us-west1\' in the \'cluster_config\' file' 1>&2
echo $''1>&2; tput sgr0
exit 1;
fi
if [[ -z "${ZONE}" ]]; then
tput setaf 1; echo "" 1>&2
echo $'ERROR: This script requires a Zone information to deploy resources. Please update \'ZONE\' with an appropriate zone name, like \'us-west1-a\' in the \'cluster_config\' file' 1>&2
echo $''1>&2; tput sgr0
exit 1;
fi
if [[ -z "${PROJECT}" ]]; then
tput setaf 1; echo "" 1>&2
echo $'ERROR: This script requires a project to deploy resources. Please update \'PROJECT\' with the project name in the \'cluster_config\' file' 1>&2
echo $''1>&2; tput sgr0
exit 1;
fi
if [[ -z "${GOVERNANCE_PROJECT}" ]]; then
tput setaf 1; echo "" 1>&2
echo $'ERROR: This script requires a project for governance resources. \nPlease update the \'GOVERNANCE_PROJECT\' in the \'cluster_config\' file' 1>&2
echo $''1>&2; tput sgr0
exit 1;
fi
else
tput setaf 7; echo "" 1>&2
read -p $'INFO: A cluster_config file does not exist in the root of the directory indicating this is the first time this deployment has been run. \n\nIf this is a new deployment, please enter yes(y) to generate a new configuration file or no(n) to cancel initialization and troublshoot: ' yn ; tput sgr0
case $yn in
[Yy]* ) tput setaf 2; echo "" 1>&2;
echo $'A cluster_config file will now be created in the root directory. Please review the inputs and update as needed before restarting the deployment. If left unmodified, the default values will deploy a Private GKE cluster with a default linux nodepool in a standalone VPC.\n\nFor guidance on the cluster_config file and your deployment options, please reference:'; tput sgr0
echo "https://github.com/GoogleCloudPlatform/gke-poc-toolkit/blob/main/docs/CLUSTERS.md"
echo ""
echo "INFO: The default cluster_config file leverages the Project, Region and Zone defaults in the current shell. Please verify these are set or enter new default values in cluster_config before proceeding."
echo ""
cp "${SCRIPT_ROOT}/cluster_config.example" "${ROOT}/cluster_config";
exit
;;
[Nn]* ) tput setaf 3; echo "" 1>&2;
echo $'WARN: Cancelling initialization, please verify your cluster_config file and restart'; tput sgr0
exit
;;
* ) tput setaf 1; echo "" 1>&2;
echo "ERROR: Incorrect input. Cancelling execution"; tput sgr0
exit 1
;;
esac
fi
# This check verifies if the PUBLIC_CLUSTER boolean value has been set to true
# - If set to true, the cluster master endpoint is exposed as a public endpoint and the bastion host is not created
# - If not set, the boolean value defaults to false and access to the the cluster master endpoint is limited to a bastion host
if [[ ${PUBLIC_CLUSTER} == true ]]; then
PRIVATE="false"
CLUSTER_TYPE="public"
echo $'INFO: Setting deployment value to Public Cluster; access to the the cluster master endpoint will be unrestricted public endpoint' 1>&2
else
PRIVATE="true"
CLUSTER_TYPE="private"
echo $'INFO: Setting deployment value to Private Cluster; access to the the cluster master endpoint will be limited to the bastion host' 1>&2
fi
if [[ -z ${AUTH_IP} ]] && [ "${PRIVATE}" = "false" ]; then
tput setaf 1; echo "" 1>&2
echo $'ERROR: Public Endpoint GKE Cluster access will be restricted to a specified Public IP\n Please set \'AUTH_IP\''
echo "" ; tput sgr0
exit 1
fi
# This check verifies if the WINDOWS_CLUSTER boolean value has been set to true
# - If set to true, a Windows GKE cluster is created
# - If not set, the boolean value defaults to false and a linux GKE cluster is created
if [[ ${WINDOWS_CLUSTER} == true ]]; then
WINDOWS="true"
echo "INFO: Setting GKE Node Pool type to Windows" 1>&2
else
WINDOWS="false"
echo "INFO: Setting GKE Node Pool type to Linux" 1>&2
fi
# This check verifies if the PREEMPTIBLE_NODES boolean value has been set to true
# - If set to true, deploy GKE cluster with preemptible nodes
# - If not set, the boolean value defaults to false and the cluster deploys with traditional node types
if [[ ${PREEMPTIBLE_NODES} == true ]]; then
PREEMPTIBLE="true"
echo "INFO: Setting GKE Node type to preemptible nodes" 1>&2
else
PREEMPTIBLE="false"
fi
# This check verifies if the SHARED_VPC boolean value has been set to true
# - If set to true, additional variables are required to deployed to an existing shared VPC
# - If not set, the boolean value defaults to false and GKE is deployed to an standalone VPC
if [[ ${SHARED_VPC} == true ]]; then
echo "INFO: Verifying Shared VPC Configuration Information" 1>&2
if [[ -z "${SHARED_VPC_NAME}" ]]; then
tput setaf 1; echo "" 1>&2
echo $'ERROR: Deploying to a shared VPC requires the shared VPC name to be set.\n Please set \'SHARED_VPC_NAME\' in the \'cluster_config\' file' 1>&2
echo "" ; tput sgr0
exit 1;
fi
if [[ -z "${SHARED_VPC_SUBNET_NAME}" ]]; then
tput setaf 1; echo "" 1>&2
echo $'ERROR: Deploying to a shared VPC requires the shared VPC subnet name to be set.\n Please set \'SHARED_VPC_SUBNET_NAME\' in the \'cluster_config\' file' 1>&2
echo "" ; tput sgr0
exit 1;
fi
if [[ -z "${SHARED_VPC_PROJECT_ID}" ]]; then
tput setaf 1; echo "" 1>&2
echo $'ERROR: Deploying to a shared VPC requires the Shared VPC Project ID to be set.\n Please set \'SHARED_VPC_PROJECT_ID\' in the \'cluster_config\' file' 1>&2
echo "" ; tput sgr0
exit 1;
fi
if [[ -z "${POD_IP_RANGE_NAME}" ]]; then
tput setaf 1; echo "" 1>&2
echo $'ERROR: Deploying to a shared VPC requires requires a secondary IP range be created on the subnet and configured with the pod IP range for the cluster.\n Please set \'POD_IP_RANGE_NAME\' in the \'cluster_config\' file' 1>&2
echo "" ; tput sgr0
exit 1;
fi
if [[ -z "${SERVICE_IP_RANGE_NAME}" ]]; then
tput setaf 1; echo "" 1>&2
echo $'ERROR: Deploying to a shared VPC requires requires a secondary IP range be created on the subnet and configured with the service IP range for the cluster.\n Please set \' SERVICE_IP_RANGE_NAME\' in the \'cluster_config\' file' 1>&2
echo "" ; tput sgr0
exit 1;
fi
# Verify if the target shared VPC subnet exists and we have access to it - If not, fail the test
# - Skip this step if creating the Shared VPC (since it will not exist yet)
# - Perform same test for both the pod and service secondary subnets
if [[ "${buildtype}" != "vpc" ]]; then
if [ "$(gcloud compute networks subnets describe $SHARED_VPC_SUBNET_NAME --region $REGION --project $SHARED_VPC_PROJECT_ID | grep name | sed 's/^.*: //')" != "$SHARED_VPC_SUBNET_NAME" ]; then
tput setaf 1; echo "" 1>&2
echo "ERROR: Shared VPC subnet ${SHARED_VPC_SUBNET_NAME} does not exist in region ${REGION} or you do not have access." 1>&2
echo "Please resolve this issue before continuing." 1>&2
echo "" ; tput sgr0
exit 1;
elif [ "$(gcloud compute networks subnets describe $SHARED_VPC_SUBNET_NAME --region $REGION --project $SHARED_VPC_PROJECT_ID | grep $POD_IP_RANGE_NAME | sed 's/^.*: //')" != "$POD_IP_RANGE_NAME" ]; then
tput setaf 1; echo "" 1>&2
echo "ERROR: Secondary subnetwork ${POD_IP_RANGE_NAME} does not exist in shared VPC subnet ${SHARED_VPC_SUBNET_NAME} in region ${REGION} or you do not have access." 1>&2
echo "Please resolve this issue before continuing." 1>&2
echo "" ; tput sgr0
exit 1;
elif [ "$(gcloud compute networks subnets describe $SHARED_VPC_SUBNET_NAME --region $REGION --project $SHARED_VPC_PROJECT_ID | grep $SERVICE_IP_RANGE_NAME | sed 's/^.*: //')" != "$SERVICE_IP_RANGE_NAME" ]; then
tput setaf 1; echo "" 1>&2
echo "ERROR: Secondary subnetwork ${SERVICE_IP_RANGE_NAME} does not exist in shared VPC subnet ${SHARED_VPC_SUBNET_NAME} in region ${REGION} or you do not have access." 1>&2
echo "Please resolve this issue before continuing." 1>&2
echo "" ; tput sgr0
exit 1;
fi
fi
else
SHARED_VPC="false"
echo "INFO: Setting VPC type to standalone" 1>&2
fi
if [[ "${STATE}" = "gcs" ]]; then
STATE="gcs"
BUCKET="${PROJECT}-${buildtype}-state"
echo -e "BUCKET=${BUCKET}" >> ${ROOT}/cluster_config
case $buildtype in
cluster) echo "" 1>&2;
echo $'INFO: Setting values for backend configuration for Cluster Build';
TERRAFORM_ROOT="${ROOT}/terraform/cluster_build";
cat > ${TERRAFORM_ROOT}/backend.tf <<-'EOF'
terraform {
backend "gcs" {
}
}
EOF
;;
vpc) echo "" 1>&2;
echo $'INFO: Setting values for backend configuration for Shared VPC Build';
TERRAFORM_ROOT="${ROOT}/terraform/shared_vpc";
cat > ${TERRAFORM_ROOT}/backend.tf <<-'EOF'
terraform {
backend "gcs" {
}
}
EOF
;;
secure) echo "" 1>&2;
echo $'INFO: Setting values for backend configuration for Cluster Security Build';
TERRAFORM_ROOT="${ROOT}/terraform/security";
cat > ${TERRAFORM_ROOT}/backend.tf <<-'EOF'
terraform {
backend "gcs" {
}
}
EOF
;;
*) tput setaf 1; echo "" 1>&2;
echo "ERROR: Incorrect input. Cancelling execution"; tput sgr0
exit 1
;;
esac
else
STATE="local"
fi
case $buildtype in
cluster) echo "" 1>&2;
echo $'INFO: Creating terraform.tfvars file for Cluster Build';
TERRAFORM_ROOT="${ROOT}/terraform/cluster_build";
source "${SCRIPT_ROOT}/generate-tfvars.sh";
;;
vpc) echo "" 1>&2;
echo $'INFO: Creating terraform.tfvars for Shared VPC Build';
TERRAFORM_ROOT="${ROOT}/terraform/shared_vpc";
source "${SCRIPT_ROOT}/generate-tfvars.sh";
;;
secure) echo "" 1>&2;
echo $'INFO: Creating terraform.tfvars for Cluster Security Build';
TERRAFORM_ROOT="${ROOT}/terraform/security";
source "${SCRIPT_ROOT}/generate-tfvars.sh";
cat <<-EOF | kubectl apply -f -
apiVersion: core.cnrm.cloud.google.com/v1beta1
kind: ConfigConnector
metadata:
name: configconnector.core.cnrm.cloud.google.com
spec:
mode: cluster
googleServiceAccount: "${CLUSTER_TYPE}-endpoint-cluster-kcc@${PROJECT}.iam.gserviceaccount.com"
EOF
;;
*) tput setaf 1; echo "" 1>&2;
echo "ERROR: Incorrect input. Cancelling execution"; tput sgr0
exit 1
;;
esac |
/*
Copyright 2014 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kubelet
import (
"os"
"testing"
"time"
cadvisorapi "github.com/google/cadvisor/info/v1"
cadvisorapiv2 "github.com/google/cadvisor/info/v2"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/client/record"
cadvisortest "k8s.io/kubernetes/pkg/kubelet/cadvisor/testing"
"k8s.io/kubernetes/pkg/kubelet/cm"
kubecontainer "k8s.io/kubernetes/pkg/kubelet/container"
containertest "k8s.io/kubernetes/pkg/kubelet/container/testing"
"k8s.io/kubernetes/pkg/kubelet/network"
nettest "k8s.io/kubernetes/pkg/kubelet/network/testing"
kubepod "k8s.io/kubernetes/pkg/kubelet/pod"
podtest "k8s.io/kubernetes/pkg/kubelet/pod/testing"
"k8s.io/kubernetes/pkg/kubelet/status"
"k8s.io/kubernetes/pkg/util"
utiltesting "k8s.io/kubernetes/pkg/util/testing"
)
func TestRunOnce(t *testing.T) {
cadvisor := &cadvisortest.Mock{}
cadvisor.On("MachineInfo").Return(&cadvisorapi.MachineInfo{}, nil)
cadvisor.On("DockerImagesFsInfo").Return(cadvisorapiv2.FsInfo{
Usage: 400 * mb,
Capacity: 1000 * mb,
Available: 600 * mb,
}, nil)
cadvisor.On("RootFsInfo").Return(cadvisorapiv2.FsInfo{
Usage: 9 * mb,
Capacity: 10 * mb,
}, nil)
podManager := kubepod.NewBasicPodManager(podtest.NewFakeMirrorClient())
diskSpaceManager, _ := newDiskSpaceManager(cadvisor, DiskSpacePolicy{})
fakeRuntime := &containertest.FakeRuntime{}
basePath, err := utiltesting.MkTmpdir("kubelet")
if err != nil {
t.Fatalf("can't make a temp rootdir %v", err)
}
defer os.RemoveAll(basePath)
kb := &Kubelet{
rootDirectory: basePath,
recorder: &record.FakeRecorder{},
cadvisor: cadvisor,
nodeLister: testNodeLister{},
nodeInfo: testNodeInfo{},
statusManager: status.NewManager(nil, podManager),
containerRefManager: kubecontainer.NewRefManager(),
podManager: podManager,
os: containertest.FakeOS{},
volumeManager: newVolumeManager(),
diskSpaceManager: diskSpaceManager,
containerRuntime: fakeRuntime,
reasonCache: NewReasonCache(),
clock: util.RealClock{},
}
kb.containerManager = cm.NewStubContainerManager()
kb.networkPlugin, _ = network.InitNetworkPlugin([]network.NetworkPlugin{}, "", nettest.NewFakeHost(nil))
if err := kb.setupDataDirs(); err != nil {
t.Errorf("Failed to init data dirs: %v", err)
}
pods := []*api.Pod{
{
ObjectMeta: api.ObjectMeta{
UID: "12345678",
Name: "foo",
Namespace: "new",
},
Spec: api.PodSpec{
Containers: []api.Container{
{Name: "bar"},
},
},
},
}
podManager.SetPods(pods)
// The original test here is totally meaningless, because fakeruntime will always return an empty podStatus. While
// the originial logic of isPodRunning happens to return true when podstatus is empty, so the test can always pass.
// Now the logic in isPodRunning is changed, to let the test pass, we set the podstatus directly in fake runtime.
// This is also a meaningless test, because the isPodRunning will also always return true after setting this. However,
// because runonce is never used in kubernetes now, we should deprioritize the cleanup work.
// TODO(random-liu) Fix the test, make it meaningful.
fakeRuntime.PodStatus = kubecontainer.PodStatus{
ContainerStatuses: []*kubecontainer.ContainerStatus{
{
Name: "bar",
State: kubecontainer.ContainerStateRunning,
},
},
}
results, err := kb.runOnce(pods, time.Millisecond)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if results[0].Err != nil {
t.Errorf("unexpected run pod error: %v", results[0].Err)
}
if results[0].Pod.Name != "foo" {
t.Errorf("unexpected pod: %q", results[0].Pod.Name)
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package kms;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
/**
*
* @author Gault
*/
@RunWith(Suite.class)
@Suite.SuiteClasses({kms.MenuItemTest.class, kms.AlcoholTest.class,
kms.assets.AssetsSuite.class, kms.IngredientTest.class,
kms.MenuTest.class, kms.IngredientDBTest.class})
public class KmsSuite {
@BeforeClass
public static void setUpClass() throws Exception {
}
@AfterClass
public static void tearDownClass() throws Exception {
}
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
}
|
echo "Kommt bald..."
sleep 3
exit |
#!/bin/sh
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This does intentionally not run the regressions, it's primarily a "build" test
# Test if we should enable CPPAPI (only 5.0 and later for now)
enable_cppapi="--enable-cppapi"
test "${JOB_NAME#*-4.2.x}" != "${JOB_NAME}" && enable_cppapi=""
cd "${WORKSPACE}/src"
autoreconf -fi
./configure \
--enable-ccache \
--enable-werror \
--enable-experimental-plugins \
${enable_cppapi} \
--enable-example-plugins \
--enable-test-tools \
CORES=2
${ATS_MAKE} -j5 V=1
${ATS_MAKE} check VERBOSE=Y
${ATS_MAKE} clean
|
function fibonacciSequence(n) {
let sequence = [];
let i = 0;
let j = 1;
while (n > 0) {
sequence.push(i);
let temp = i;
i = j;
j = temp + i;
n--;
}
return sequence;
} |
export_list = []
coeff_list = []
for i in range(NO_OF_IMAGES):
iInfo = S3.get(i).getInfo()
iInfoProps = iInfo['properties']
atmVars = atm_corr_image(iInfoProps)
corrCoeffs = get_corr_coef(iInfoProps, atmVars)
coeff_list.append(corrCoeffs)
# Process the images and store them in export_list
img = atm_corr_band(ee.Image(S2List.get(i)), iInfoProps, atmVars)
export_task = ee.batch.Export.image.toDrive({
'image': img,
'description': 'Processed_Image_' + str(i),
'folder': 'GEE_Processed_Images',
'scale': 30, # Set the scale as per requirement
'region': iInfoProps['system:footprint']
})
export_list.append(export_task)
export_task.start() |
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class CreateUniversitySchema extends Migration
{
public function up()
{
Schema::create('departments', function (Blueprint $table) {
$table->id();
$table->string('name');
$table->timestamps();
});
Schema::create('courses', function (Blueprint $table) {
$table->id();
$table->string('name');
$table->unsignedBigInteger('dept_id');
$table->foreign('dept_id')
->references('id')->on('departments')
->onDelete('cascade');
$table->timestamps();
});
Schema::create('users', function (Blueprint $table) {
$table->id();
$table->string('name');
$table->string('role');
$table->timestamps();
});
Schema::create('course_user', function (Blueprint $table) {
$table->id();
$table->unsignedBigInteger('course_id');
$table->unsignedBigInteger('user_id');
$table->string('role');
$table->foreign('course_id')
->references('id')->on('courses')
->onDelete('cascade');
$table->foreign('user_id')
->references('id')->on('users');
$table->timestamps();
});
}
public function down()
{
Schema::dropIfExists('course_user');
Schema::dropIfExists('courses');
Schema::dropIfExists('users');
Schema::dropIfExists('departments');
}
} |
#include "ZLIBCompressionMethod.h"
BF::ZLIBCompressionMethod BF::ConvertCompressionMethod(unsigned char compressionMethod)
{
switch (compressionMethod)
{
case 8u:
return BF::ZLIBCompressionMethod::Deflate;
case 15u:
return BF::ZLIBCompressionMethod::Reserved;
default:
return BF::ZLIBCompressionMethod::Invalid;
}
}
unsigned char BF::ConvertCompressionMethod(ZLIBCompressionMethod compressionMethod)
{
switch (compressionMethod)
{
default:
case BF::ZLIBCompressionMethod::Invalid:
return -1;
case BF::ZLIBCompressionMethod::Deflate:
return 8u;
case BF::ZLIBCompressionMethod::Reserved:
return 15u;
}
}
const char* BF::CompressionMethodToString(ZLIBCompressionMethod compressionMethod)
{
switch (compressionMethod)
{
default:
case BF::ZLIBCompressionMethod::Invalid:
return "Invalid";
case BF::ZLIBCompressionMethod::Deflate:
return "Deflate";
case BF::ZLIBCompressionMethod::Reserved:
return "Reserved";
}
}
|
<gh_stars>0
/*
* Imports
* -------
*/
import {
mergeObjects,
cascade,
toggleFocusability
} from '../../utils';
/*
* Opens and closes nav
* --------------------
*/
export default class Nav {
/*
* Constructor
* -----------
*/
constructor( args ) {
/*
* Public variables
* ----------------
*/
this.nav = null;
this.list = null; // or array
this.overflow = null;
this.overflowList = null; // or array
this.items = null;
this.itemSelector = '';
this.button = null;
this.overlay = null;
this.transition = null;
this.overflowHiddenClass = 'u-o-h';
this.onSet = () => {};
this.onReset = () => {};
this.afterReset = () => {};
this.onResize = () => {};
this.onToggle = () => {};
this.endToggle = () => {};
this.done = () => {};
this.delay = {
open: 200,
close: 200
};
// merge default variables with args
mergeObjects( this, args );
this.isOverflowing = false;
/*
* Internal variables ( more set in init method )
* ---------------------------------------------
*/
this._html = document.documentElement;
this._viewportWidth = window.innerWidth;
// escape key for closing nav
this._esc = [27, 'Escape'];
// put items into groups
this._overflowGroups = {};
this._overflowGroupsLength = 0;
this._listIndexes = {};
// store groups currently overflown
this._currentOverflowGroups = [];
// store focusable elements outside nav
this._focusableItems = [];
// for throttling resize event
this._resizeTimer;
// store if nav is open
this._navOpen = false;
/*
* Initialize
* ----------
*/
let init = this._initialize();
if( !init ) {
this.done.call( this );
return false;
}
}
/*
* Initialize
* ----------
*/
_initialize() {
// check that required variables not null
let error = false,
required = [
'nav',
'list',
'overflow',
'overflowList',
'items',
'itemSelector',
'button'
];
required.forEach( ( r ) => {
if( !this[r] ) {
error = true;
return;
}
} );
if( error )
return false;
/* Convert list(s) and overflow list(s) to arrays */
this.list = !Array.isArray( this.list ) ? [this.list] : this.list;
this.overflowList = !Array.isArray( this.overflowList ) ? [this.overflowList] : this.overflowList;
this.items = Array.from( this.items );
if( !this.items.length )
return false;
/* Get focusable elements */
let focusSelector = 'a, area, input, select, textarea, button, [tabindex], iframe',
navFocusableItems = Array.from( this.nav.querySelectorAll( focusSelector ) );
if( navFocusableItems.length ) {
this._focusableItems = Array.from( document.querySelectorAll( focusSelector ) );
this._focusableItems = this._focusableItems.filter( item => {
if( navFocusableItems.indexOf( item ) == -1 )
return true;
return false;
} );
}
/* Event listeners */
this._clickHandler = this._click.bind( this );
this._keyDownHandler = this._keyDown.bind( this );
this._resizeHandler = this._resize.bind( this );
this.button.addEventListener( 'click', this._clickHandler );
this.nav.addEventListener( 'keydown', this._keyDownHandler );
if( this.overlay )
this.overlay.addEventListener( 'click', this._clickHandler );
window.addEventListener( 'resize', this._resizeHandler );
// set up overflow groups
this.items.forEach( ( item, index ) => {
let overflowGroupIndex = parseInt( item.getAttribute( 'data-overflow-group' ) ),
listIndex = 0;
if( !item.hasAttribute( 'data-list-index' ) ) {
item.setAttribute( 'data-list-index', listIndex );
} else {
listIndex = parseInt( item.getAttribute( 'data-list-index' ) );
}
if( isNaN( overflowGroupIndex ) )
overflowGroupIndex = index;
if( !this._overflowGroups.hasOwnProperty( overflowGroupIndex ) ) {
this._overflowGroups[overflowGroupIndex] = [];
this._listIndexes[overflowGroupIndex] = [];
this._overflowGroupsLength++;
}
this._overflowGroups[overflowGroupIndex].push( item );
if( this._listIndexes[overflowGroupIndex].indexOf( listIndex ) == -1 )
this._listIndexes[overflowGroupIndex].push( listIndex );
} );
window.addEventListener( 'load', () => {
// set up nav
this._setNav( () => {
this.done.call( this );
} );
} );
return true;
}
/*
* Helper methods for setting up nav
* ---------------------------------
*/
// return overflowing items to list
_resetNav() {
this.onReset.call( this );
this.nav.removeAttribute( 'data-overflow' );
this.nav.removeAttribute( 'data-overflow-all' );
this._lastOverflowFocus = null;
if( this._currentOverflowGroups.length > 0 ) {
let frag = {},
appendFrag = true,
listIndexes = [];
for( let overflowGroupIndex in this._listIndexes ) {
this._listIndexes[overflowGroupIndex].forEach( ( index ) => {
frag[index] = document.createDocumentFragment();
} );
}
this.items.forEach( ( item, i ) => {
let listIndex = parseInt( item.getAttribute( 'data-list-index' ) );
// insert at specific index
if( item.hasAttribute( 'data-index' ) ) {
appendFrag = false;
let index = parseInt( item.getAttribute( 'data-index' ) ),
refNode = this.list[listIndex].children[index];
this.list[listIndex].insertBefore( item, refNode );
} else { // insert
frag[listIndex].appendChild( item );
}
if( listIndexes.indexOf( listIndex ) === -1 )
listIndexes.push( listIndex );
} );
// append overflowing items
if( appendFrag ) {
listIndexes.forEach( ( listIndex ) => {
this.list[listIndex].appendChild( frag[listIndex] );
} );
}
}
for( let overflowGroupIndex in this._listIndexes ) {
this._listIndexes[overflowGroupIndex].forEach( ( index ) => {
this.overflowList[index].innerHTML = '';
} );
}
this._currentOverflowGroups = [];
}
// if overflowing transfer items over to overflow element
_setNav( done ) {
this._resetNav();
this.afterReset.call( this );
let overflowGroupIndex = 0,
lastOverflowGroupIndex = 0,
frag = {},
overflow = this._overflowing( this._listIndexes[overflowGroupIndex] ),
ogOverflow = overflow;
this._listIndexes[overflowGroupIndex].forEach( ( index ) => {
frag[index] = document.createDocumentFragment();
} );
this.isOverflowing = ogOverflow;
this.button.style.display = 'block';
while( overflow ) {
let overflowGroup = this._overflowGroups[overflowGroupIndex];
overflowGroup.forEach( ( item ) => {
let listIndex = parseInt( item.getAttribute( 'data-list-index') );
frag[listIndex].appendChild( item );
} );
this._currentOverflowGroups.push( overflowGroup );
overflowGroupIndex++;
overflow = this._overflowing( this._listIndexes[overflowGroupIndex] );
if( overflow )
lastOverflowGroupIndex = overflowGroupIndex;
}
this._listIndexes[lastOverflowGroupIndex].forEach( ( index ) => {
this.overflowList[index].appendChild( frag[index] );
} );
if( this._currentOverflowGroups.length > 0 ) {
if( !this.nav.hasAttribute( 'data-overflow' ) )
this.nav.setAttribute( 'data-overflow', '' );
if( this._currentOverflowGroups.length === this._overflowGroupsLength ) {
if( !this.nav.hasAttribute( 'data-overflow-all' ) )
this.nav.setAttribute( 'data-overflow-all', '' );
}
} else {
this._toggle( true );
}
this.onSet.call( this );
this.button.style.display = '';
if( done !== undefined )
done.call( this );
}
// check if items are overflowing / wrapping into new line
_overflowing( listIndexes = [0] ) {
let overflow = false;
listIndexes.forEach( ( index ) => {
let items = this.list[index].querySelectorAll( this.itemSelector ),
itemsLength = items.length;
// all items are in overflow element now
if( itemsLength === 0 ) {
overflow = false;
return;
}
let firstItemOffset = items[0].offsetTop;
// reverse loop to start from last item
for( let i = itemsLength - 1; i >= 0; i-- ) {
if( items[i].offsetTop > firstItemOffset ) {
overflow = true;
return;
}
}
} );
return overflow;
}
/*
* Prevent scroll when open mobile navigation
* ------------------------------------------
*/
_disableScroll( disable = true ) {
if( disable ) {
this._html.classList.add( this.overflowHiddenClass );
} else {
this._html.classList.remove( this.overflowHiddenClass );
}
}
/*
* Open / close mobile navigation
* ------------------------------
*/
_toggle( close = true ) {
this.onToggle.call( this );
this._navOpen = !close;
toggleFocusability( !this._navOpen, this._focusableItems );
if( close === false ) {
cascade( [
{
action: () => {
this.button.setAttribute( 'data-show', '' );
this._disableScroll();
this.button.setAttribute( 'aria-expanded', 'true' );
this.nav.setAttribute( 'data-open', '' );
if( this.transition )
this.transition.setAttribute( 'data-show', '' );
}
},
{
action: () => {
this.overflow.setAttribute( 'data-show', '' );
},
delay: this.delay.open
},
{
action: () => {
this.overflow.setAttribute( 'data-show-items', '' );
}
}
] );
} else {
cascade( [
{
action: () => {
this.overflow.removeAttribute( 'data-show-items' );
}
},
{
action: () => {
this.button.removeAttribute( 'data-show' );
this.overflow.removeAttribute( 'data-show' );
if( this.transition )
this.transition.removeAttribute( 'data-show' );
},
delay: this.delay.close
},
{
action: () => {
this.nav.removeAttribute( 'data-open' );
this._disableScroll( false );
this.button.setAttribute( 'aria-expanded', 'false' );
}
},
{
action: () => {
this.endToggle.call( this );
}
}
] );
}
}
/*
* Event Handlers
* --------------
*/
/* When click on button / overlay */
_click( e ) {
e.preventDefault();
this._toggle( this._navOpen );
}
/* If hit escape while nav open close */
_keyDown( e ) {
let key = e.key || e.keyCode || e.which || e.code;
if( this._esc.indexOf( key ) !== -1 )
this._toggle();
}
/* Viewport resize */
_resize() {
// throttles resize event
clearTimeout( this._resizeTimer );
this._resizeTimer = setTimeout( () => {
let viewportWidth = window.innerWidth;
if( viewportWidth != this._viewportWidth ) {
this._viewportWidth = viewportWidth;
} else {
return;
}
this._setNav();
this.onResize.call( this );
}, 100 );
}
/*
* Public methods
* --------------
*/
addFocusableItem( item ) {
if( !item )
return;
this._focusableItems.push( item );
}
} // end Nav
|
#!/usr/bin/env bash
CONFIG=$1
GPUS=$2
PORT=${PORT:-29500}
GPU_IDS=
SPLIT=", "
for i in $(seq 0 $GPUS):
do
GPU_IDS=${GPU_IDS}${i}${SPLIT};
done
PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
CUDA_VISIBLE_DEVICES=$GPU_IDS \
python3 -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \
tools/train.py --no-test --launcher pytorch $CONFIG ${@:3}
|
#!/bin/bash -l
#SBATCH --nodes 1
#SBATCH --ntasks 12
#SBATCH --time=00:10:00
#SBATCH --partition=plgrid
#SBATCH --account=plgfrozentear72020a
module add plgrid/tools/openmpi
mpicc -o ex2_c ex2.c
echo -e "Ex2 - ntasks: 12"
for ((i=10; i<=1000000000; i *= 10)); do
echo -e "\nn = $i\n"
mpiexec -np 2 ./ex2_c $i
done |
package my
import (
"strings"
"testing"
)
func TestMD5(t *testing.T) {
if MD5("zs5460") != "54c163ce691b59853ce7dc7df47028d7" {
t.Error("MD5 value is wrong")
}
}
func TestSHA1(t *testing.T) {
if SHA1("zs5460") != "1fdab8d80dd4031533ddb77fbb62ffc56405f861" {
t.Error("SHA1 value is wrong")
}
}
func TestSHA256(t *testing.T) {
if SHA256("zs5460") != "9f9430e4c60941321b24893d72086d771326a51bbd3bdd29cbcad229148631b4" {
t.Error("SHA256 value is wrong")
}
}
func TestHMACSHA1(t *testing.T) {
if HMACSHA1("zs5460", "123456") != "9bd117811a0ff843655f0fbda7fc6fac404bffc4" {
t.Error("HMAC_SHA1 value is wrong")
}
}
func TestHMACSHA256(t *testing.T) {
if HMACSHA256("zs5460", "123456") != "9b3900854cd8bb04e1c52de5ddfca7cde6b8e8179d16ef7d660b056d03e3609f" {
t.Error("HMAC_SHA256 value is wrong")
}
}
func BenchmarkHMACSHA1(b *testing.B) {
s := strings.Repeat("hello", 1000)
for i := 0; i < b.N; i++ {
HMACSHA1(s, "123456")
}
}
|
import React, { useState } from 'react';
const LoginForm = () => {
const [username, setUsername] = useState('');
const [password, setPassword] = useState('');
const handleSubmit = (e) => {
e.preventDefault();
// check login credentials
// redirect to protected page
};
return (
<form onSubmit={handleSubmit}>
<label>Username</label>
<input type='text' value={username} onChange={e => setUsername(e.target.value)} />
<label>Password</label>
<input type='password' value={password} onChange={e => setPassword(e.target.value)} />
<input type='submit' />
</form>
);
};
export default LoginForm; |
<reponame>luissaiz/apicheck<gh_stars>1-10
import abc
class APITestStorage:
def __init__(self,
storage: str):
self.storage: str = storage
@abc.abstractmethod
@property
def connection_string(self) -> str:
raise NotImplementedError()
# -------------------------------------------------------------------------
# Storage
# -------------------------------------------------------------------------
class StorageMongoDB(APITestStorage):
__slots__ = ("mongo_host", "mongo_port", "mongo_username",
"mongo_password", "action")
def __init__(self,
mongo_host: str,
mongo_port: int,
mongo_user: str,
mongo_password: str,
**kwargs):
super(StorageMongoDB, self).__init__(**kwargs)
self.mongo_port = mongo_port or 27017
self.mongo_host = mongo_host or "127.0.0.1"
self.mongo_username = mongo_user or None
self.mongo_password = <PASSWORD>password or <PASSWORD>
@property
def connection_string(self) -> str:
return f"mongodb://{self.mongo_username}:{self.mongo_password}@" \
f"{self.mongo_host}:{self.mongo_port}/"
@classmethod
def from_cli(cls, argparser_opt):
return cls(**argparser_opt.__dict__)
# -------------------------------------------------------------------------
# Importers
# -------------------------------------------------------------------------
class APITestImporter:
pass
class ImporterOpenAPI3(APITestImporter):
def __init__(self, yaml_file: str, **kwargs):
self.yaml_file = yaml_file
super(ImporterOpenAPI3, self).__init__(**kwargs)
@classmethod
def from_cli(cls, argparser_opt):
return cls(**argparser_opt.__dict__)
class RunningConfig:
def __init__(self,
action: str,
storage: APITestStorage,
importer: APITestImporter):
self.action: str = action
self.storage: APITestStorage = storage
self.importer: APITestImporter = importer
def build_config_from_argparser(action: str, argparser) -> RunningConfig:
if action not in ("importer", "exporter"):
raise ValueError(f"Action '{action}' is not recognized")
# -------------------------------------------------------------------------
# IMPORTER model action
# -------------------------------------------------------------------------
importer_model = None
if action == "importer":
importer_type = argparser.importer_type
if importer_type == "openapi":
importer_model = ImporterOpenAPI3.from_cli(argparser)
else:
raise ValueError(f"Invalid importer type: '{action}'")
else:
raise ValueError(f"Invalid action: '{action}'")
# -------------------------------------------------------------------------
# EXPORTER model action
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
# STORAGE
# -------------------------------------------------------------------------
storage_model = None
if argparser.storage == "mongodb":
storage_model = StorageMongoDB.from_cli(argparser)
else:
raise ValueError(f"Storage backend '{argparser.storage}' is unknown")
return RunningConfig(
action=action,
storage=storage_model,
importer=importer_model
)
|
package javafx.scene.media;
/**
* @author <NAME>
*/
public class AudioClip {
private final Media media;
private MediaPlayer mediaPlayer;
private double volume = -1;
public AudioClip(String url) {
media = new Media(url);
mediaPlayer = new MediaPlayer(media);
}
public void play() {
if (mediaPlayer == null)
mediaPlayer = new MediaPlayer(media);
if (volume >= 0)
mediaPlayer.setVolume(volume);
mediaPlayer.play();
mediaPlayer = null;
}
public void setVolume(double volume) {
this.volume = volume;
}
}
|
<filename>packages/babel-plugin-logical-optional-chaining/test/fixtures/object-chaining/code.js
testObj?.childProp1;
testObj.childProp1?.grandChildProp1;
testObj.childProp1.grandChildProp1?.greatGrandChildProp1;
testObj?.childProp1?.grandChildProp1?.greatGrandChildProp1;
testObj?.['childProp1'];
testObj.childProp1?.['grandChildProp1'];
testObj['childProp1']?.['grandChildProp1']?.['greatGrandChildProp1']; |
<reponame>ConduitIO/conduit-rest-js
export declare type v1DeleteProcessorResponse = {};
|
#!/bin/sh
set -e # To stop as soon as an error occured
# This is the only line requiring root permission
sudo apt-get install -y bison flex gzip gcc-multilib libz1 libncurses5 libbz2-1.0 make
export IDIR=$PWD"/build"
# Will install SimpleScalar in ./build
cd build/
# A simple wget commandline would not comply the licence
# SimpleScalar has to be downloaded manually
#if [ ! -f "simplesim-3v0e.tgz" ]
#then
# TEXT="Please:\n- Accept the licence then download the archive file: http://www.simplescalar.com/agreement.php3?simplesim-3v0e.tgz\n- Place the archive in the build/ directory.\n- Launch this script again"
# echo -e $TEXT
# exit 1
#fi
wget http://www.simplescalar.com/downloads/simpletools-2v0.tgz
wget http://www.simplescalar.com/downloads/simpleutils-2v0.tgz
gunzip *.tgz
tar -xf simpletools-*.tar
tar -xf simpleutils-*.tar
tar -xf simplesim-*.tar
### binutils Compilation ###
cd binutils-*
./configure --host=i386-pc-linux --target=sslittle-na-sstrix --with-gnu-as --with-gnu-ld --prefix=$IDIR
# Avoiding:
# vasprintf.c:48:3: error: invalid initializer
sed -i -e "s/va_list ap = args;/va_list ap; va_copy(ap, args);/g" libiberty/vasprintf.c
# Avoiding:
# vasprintf.c:35:7: error: conflicting types for ‘malloc’
sed -i -e "s/char \*malloc ();/\/\/char \*malloc ();/g" libiberty/vasprintf.c
# Avoiding:
#In file included from /usr/include/time.h:41:0,
# from getruntime.c:27:
#getruntime.c: In function 'get_run_time':
#getruntime.c:73:5: error: missing binary operator before token "1000000"
# #if CLOCKS_PER_SEC <= 1000000
# See also: https://stackoverflow.com/questions/42132559/clocks-per-sec-missing-binary-operator-before-token-nfiq-2-0
sed -i -e "s/#if CLOCKS_PER_SEC <= 1000000/#define CLOCKS_PER_SEC_SUPPOSED ((clock)1000000)\n#if #CLOCKS_PER_SEC == #CLOCKS_PER_SEC_SUPPOSED\n#define CLOCKS_PER_SEC 1000000\n#endif\n#if CLOCKS_PER_SEC <= 1000000/g" libiberty/getruntime.c
# Avoiding:
# ./ldlex.l:477:7: error: 'yy_current_buffer' undeclared (first use in this function)
sed -i -e "s/yy_current_buffer/YY_CURRENT_BUFFER/g" ld/ldlex.l
# Avoiding errors like: /usr/lib/gcc/x86_64-linux-gnu/4.6/include/varargs.h:4:2: error: #error "GCC no longer implements <varargs.h>."
# but also: ldmisc.c:318:31: error: expected expression before ‘char’
sed -i -e "s/varargs.h/stdarg.h/g" ld/ldmisc.c
# Avoiding errors due to an outdated code
# Inspired from http://zealoct.wordpress.com/2011/04/19/install-simplescalar-on-ubuntu-10-10-with-gcc-4-4/
# avoiding: ldmisc.c:348:15: error: macro "va_start" requires 2 arguments, but only 1 given
sed -i -e "s/ va_list arg;/\/\/ va_list arg;/g" ld/ldmisc.c
sed -i -e "s/ va_start/\/\/ va_start/g" ld/ldmisc.c
sed -i -e "s/ file/\/\/ file/g" ld/ldmisc.c
sed -i -e "s/ fmt/\/\/ fmt/g" ld/ldmisc.c
sed -i -e "s/ vfinfo/\/\/ vfinfo/g" ld/ldmisc.c
sed -i -e "s/ va_end/\/\/ va_end/g" ld/ldmisc.c
# Avoiding:
# ldmisc.c:402:6: error: expected ‘=’, ‘,’, ‘;’, ‘asm’ or ‘__attribute__’ before ‘va_dcl’
# ldmisc.c:344:6: error: expected declaration specifiers before ‘va_dcl’
sed -i -e "s/(va_alist)/()/g" ld/ldmisc.c
sed -i -e "s/ va_dcl/\/\/ va_dcl/g" ld/ldmisc.c
sed -i -e "s/ FILE \*fp;/\/\/ FILE \*fp;/g" ld/ldmisc.c
sed -i -e "s/ char \*fmt;/\/\/ char \*fmt;/g" ld/ldmisc.c
sed -i -e "s/vfinfo(fp, fmt, arg)/vfinfo(FILE \*fp, char \*fmt, va_list arg)/g" ld/ldmisc.c
# Avoiding:
# follows non-static declaration:
#strerror.c:467:12: error: static declaration of ‘sys_nerr’ follows non-static declaration
# static int sys_nerr;
# ^
#In file included from /usr/include/stdio.h:853:0,
# from strerror.c:35:
#/usr/include/x86_64-linux-gnu/bits/sys_errlist.h:26:12: note: previous declaration of ‘sys_nerr’ was here
# extern int sys_nerr;
sed -i -e "s/NEED_sys_errlist/NEED_sys_errPROTECTEDlist/g" libiberty/strerror.c
sed -i -e "s/sys_nerr/sys_nerr_2/g" libiberty/strerror.c
sed -i -e "s/sys_errlist/sys_errlist_2/g" libiberty/strerror.c
sed -i -e "s/NEED_sys_errPROTECTEDlist/NEED_sys_errlist/g" libiberty/strerror.c
make all
make install
cd ../simplesim*
make config-pisa
make
# You can check that SimpleScalar (not the toolchain) works with the command-line:
# ./sim-safe tests-pisa/bin.little/test-math
cd ../
cd gcc-*
./configure --host=i386-pc-linux --target=sslittle-na-sstrix --with-gnu-as --with-gnu-ld --prefix=$IDIR
# Avoiding:
# insn-output.c:676:5: error: stray ‘\’ in program
sed -i 's/return \\"FIXME\\\\n/return \\"FIXME\\\\n\\\\/g' config/ss/ss.md
#sed -i 's/return \"FIXME\\n/return \"FIXME\\n\\/g' insn-output.c
# Do not include LIBGCC2_INCLUDES leads to
# ./libgcc2.c:1384: stdio.h: No such file or directory
# make: *** [libgcc2.a] Error 1
# make has to be launched before to correct errors...
# We expect errors here
set +e
make LANGUAGES="c c++" CFLAGS="-O3" CC="gcc"
set -e
# Avoiding:
# pt.o: In function `instantiate_class_template':
# pt.c:(.text+0x2810): undefined reference to `feed_input'
# pt.o: In function `do_pending_templates':
# pt.c:(.text+0x48f8): undefined reference to `feed_input'
# parse.o: In function `yyparse':
# parse.c:(.text+0x5e7): undefined reference to `yyprint'
# collect2: error: ld returned 1 exit status
#
# See also: http://godblesstangkk.blogspot.fr/2013/01/install-simplescalar-30-on-ubuntu-1204.html
sed -i 's/^inline$//g' cp/input.c
# lex.c:795: undefined reference to `is_reserved_word'
sed -i 's/^inline$//g' cp/hash.h
# parse.c:(.text+0x5e7): undefined reference to `yyprint'
sed -i 's/^__inline$//g' cp/lex.c
# Avoiding:
# decl.c:3605:3: error: lvalue required as increment operand
sed -i -e "s/\*((void \*\*)__o->next_free)++ = ((void \*)datum);/\*((void \*\*)__o->next_free++) = ((void \*)datum);/g" obstack.h
# Avoiding:
# cxxmain.c:2978: error: conflicting types for ‘malloc’ - uniquement pour gcc 2.7
#chmod 755 cxxmain.c
#sed -i -e "s/char \* malloc ();/\/\/char \* malloc ();/g" cxxmain.c
#sed -i -e "s/char \* realloc ();/\/\/char \* realloc ();/g" cxxmain.c
# Avoiding:
# sdbout.c:57:18: fatal error: syms.h: No such file or directory
sed -i -e "s/#include <syms.h>/#include \"gsyms.h\"/g" sdbout.c
# Avoiding:
# cccp.c:194:14: error: conflicting types for ‘sys_errlist’
sed -i -e "s/extern char \*sys_errlist\[\];/\/\/extern const char \* const sys_errlist\[\];/g" cccp.c
# Avoiding:
# ./cp/g++.c:90:14: error: conflicting types for ‘sys_errlist’
sed -i -e "s/extern char \*sys_errlist\[\];/\/\/extern char \*sys_errlist\[\];/g" cp/g++.c
# Avoiding:
# gcc.c:172:14: erreur: conflicting types for ‘sys_errlist’
sed -i -e "s/extern char \*sys_errlist\[\];/\/\/extern char \*sys_errlist\[\];/g" gcc.c
make LANGUAGES="c c++" CFLAGS="-O3" CC="gcc"
# If you do not uncompress simpletools at the right place, You will face:
# /usr/include/_G_config.h:53: unknown machine mode `_G_int16_t'
# /usr/include/_G_config.h:54: unknown machine mode `_G_int32_t'
# /usr/include/_G_config.h:55: unknown machine mode `_G_uint16_t'
# /usr/include/_G_config.h:56: unknown machine mode `_G_uint32_t'
make install LANGUAGES="c c++" CFLAGS="-O3" CC="gcc"
echo 'PATH='$IDIR'/bin:$PATH' >> ~/.bashrc
cd ../simplesim-*
echo 'PATH='$IDIR':$PATH' >> ~/.bashrc
echo "This is it! Please restart your session in order to update your global variables."
echo "or execute: source ~/.bashrc"
|
#!/usr/bin/env bash
set -e -o pipefail
# config
export DOCKER_NAME=${DOCKER_NAME:-"alpine"}
export PHP_VERSION=${PHP_VERSION:-"7.4"}
export TEST_PHP_EXECUTABLE=${TEST_PHP_EXECUTABLE:-"/usr/local/bin/php"}
export RUN_TESTS_PHP=${RUN_TESTS_PHP:-"/usr/local/lib/php/build/run-tests.php"}
function docker_build() (
set -x
docker build \
-f .github/php-${DOCKER_NAME}.Dockerfile \
-t php-request \
--build-arg PHP_VERSION=${PHP_VERSION} \
.
)
function docker_run() (
set -x
docker run \
--env NO_INTERACTION=1 \
--env REPORT_EXIT_STATUS=1 \
--env TEST_PHP_EXECUTABLE=${TEST_PHP_EXECUTABLE} \
-v "$PWD/tests:/mnt" \
php-request \
php ${RUN_TESTS_PHP} /mnt
)
docker_build
docker_run
|
<reponame>UNT-CSCE-Club/website
export { default } from './Officers';
|
import argparse
import numpy as np
import _pickle as cPickle
from keras.layers import Conv1D, GlobalMaxPooling1D, concatenate
from keras.layers import Dense, Dropout, Input, Embedding, Flatten
from keras.models import Model
from keras.layers import LSTM, Bidirectional
import tensorflow as tf
import keras.backend.tensorflow_backend as ktf
__author__ = '<NAME>'
def prepare_interaction_pairs(XD, XT, Y, rows, cols):
drugs = []
targets = []
affinity = []
for pair_ind in range(len(rows)):
drug = XD[rows[pair_ind]]
drugs.append(drug)
target = XT[cols[pair_ind]]
targets.append(target)
affinity.append(Y[rows[pair_ind], cols[pair_ind]])
drug_data = np.stack(drugs)
target_data = np.stack(targets)
return drug_data, target_data, affinity
def get_trn_dev(dataset, smiles_bert_cls, fold=0):
(XD, XT, Y, trn_sets, dev_sets, tst_set, row_idx, col_idx) = dataset
trn_set = trn_sets[fold]
dev_set = dev_sets[fold]
drug_idx_trn = row_idx[trn_set]
protein_idx_trn = col_idx[trn_set]
drug_idx_dev = row_idx[dev_set]
protein_idx_dev = col_idx[dev_set]
(xd_trn, xt_trn, y_trn) = prepare_interaction_pairs(smiles_bert_cls, XT, Y, drug_idx_trn, protein_idx_trn)
(xd_dev, xt_dev, y_dev) = prepare_interaction_pairs(smiles_bert_cls, XT, Y, drug_idx_dev, protein_idx_dev)
trndev = xd_trn, xt_trn, y_trn, xd_dev, xt_dev, y_dev
return trndev
def cindex_score(y_true, y_pred):
g = tf.subtract(tf.expand_dims(y_pred, -1), y_pred)
g = tf.cast(g == 0.0, tf.float32) * 0.5 + tf.cast(g > 0.0, tf.float32)
f = tf.subtract(tf.expand_dims(y_true, -1), y_true) > 0.0
f = tf.matrix_band_part(tf.cast(f, tf.float32), -1, 0)
g = tf.reduce_sum(tf.multiply(g, f))
f = tf.reduce_sum(f)
return tf.where(tf.equal(g, 0), 0.0, g/f) #select
def get_model():
max_len_d = 40
max_len_t = 1000
n_vocab_d = 64
n_vocab_t = 25
n_filter = 32
d_filter_size = 8
t_filter_size = 12
xd_input = Input(shape=(100,40), dtype='float32')
xt_input = Input(shape=(max_len_t,), dtype='int32')
# xd_z = Bidirectional(LSTM(100, dropout=0.2, recurrent_dropout=0.2))(xd_input)
xd_z = Flatten()(xd_input)
xt_emb = Embedding(input_dim=n_vocab_t + 1, output_dim=128)(xt_input)
xt_z = Conv1D(filters=n_filter, kernel_size=t_filter_size, activation='relu', padding='valid', strides=1)(xt_emb)
xt_z = Conv1D(filters=n_filter * 2, kernel_size=t_filter_size, activation='relu', padding='valid', strides=1)(xt_z)
xt_z = Conv1D(filters=n_filter * 3, kernel_size=t_filter_size, activation='relu', padding='valid', strides=1)(xt_z)
xt_z = GlobalMaxPooling1D()(xt_z)
concat_z = concatenate([xd_z, xt_z])
z = Dense(1024, activation='relu')(concat_z)
z = Dropout(0.1)(z)
z = Dense(1024, activation='relu')(z)
z = Dropout(0.1)(z)
z = Dense(512, activation='relu')(z)
output = Dense(1, kernel_initializer='normal')(z)
model = Model(inputs=[xd_input, xt_input], outputs=[output])
model.compile(optimizer='adam', loss='mean_squared_error', metrics=[cindex_score])
model.summary()
return model
def get_session(gpu_fraction=1):
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=gpu_fraction,
allow_growth=True)
return tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
if __name__=="__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--base_path', type=str, default='../../../data', help='Directory for input data.')
args, unparsed = parser.parse_known_args()
ktf.set_session(get_session())
batch_size = 256
epochs = 2000
dataset = cPickle.load(open('%s/kiba/kiba.cpkl' % args.base_path, 'rb'))
# smiles_bert_cls = cPickle.load(open('./kiba_smiles_bert.cpkl', 'rb'))
smiles_bert_cls = cPickle.load(open('./kiba_smiles_bert_full.cpkl', 'rb'))
trndev = get_trn_dev(dataset, smiles_bert_cls, 0)
xd_trn, xt_trn, y_trn, xd_dev, xt_dev, y_dev = trndev
model = get_model()
model.fit([xd_trn, xt_trn], y_trn,
batch_size=batch_size,
shuffle=True,
callbacks=[],
epochs=epochs,
validation_data=([xd_dev, xt_dev], y_dev))
|
import React, { useState } from "react";
import { Fade } from "react-reveal";
import Layout from "../Layout";
import PageHeader from "../ui/PageHeader";
import Veganz1 from "../../images/stores/veganz1.jpg";
import Veganz2 from "../../images/stores/veganz2.jpg";
import Biosphare from "../../images/stores/Biosphare.jpg";
import DrPogo from "../../images/stores/DrPogo.jpeg";
const stores = [
{
img: Veganz1,
title: "Veganz - Kreuzberg",
address: "Marheinekeplatz 15, Berlin, Germany, 10961",
},
{
img: Veganz2,
title: "Veganz - Friedrichshain",
address: "Warschauer Strasse 33, Berlin, Germany, 10243",
},
{
img: Biosphare,
title: "Biosphäre",
address: "Weserstraße 212, 12047 Berlin",
},
{
img: DrPogo,
title: "Dr. Pogo",
address: "Karl-Marx-Platz 24, Berlin, Germany, 12043",
},
];
const renderStores = (h, setH) => {
return stores.map(({ img, title, address }, i) => {
const hovered = h === i;
return (
<div className="col-lg-6" key={i} style={{ overflow: "hidden" }}>
<Fade
left={i % 2 !== 0 ? true : false}
right={i % 2 === 0 ? true : false}
delay={i * 100}
>
<div
className="stores-container"
onMouseEnter={() => setH(i)}
onMouseLeave={() => setH(null)}
>
<div
className="stores-bg"
style={{
background: `url(${img})`,
backgroundSize: "cover",
backgroundPostion: "center center",
transform: `scale(${hovered ? "1.1" : "1"})`,
}}
/>
<div
className="stores-overlay"
style={{
opacity: hovered ? 0 : 0.3,
}}
/>
<h4 className="stores-title text-white text-center">{title}</h4>
<p className="stores-address text-white text-center d-inline mx-5 p-1">
{address}
</p>
</div>
</Fade>
</div>
);
});
};
const Stores = () => {
const [h, setH] = useState(null);
return (
<Layout>
<PageHeader title="Stores">
<p>
There is a few completely vegan stores in Berlin. There is also an
abundance of organic (Bio) supermarkets.
</p>
</PageHeader>
<div className="container">
<div className="row no-gutters">{renderStores(h, setH)}</div>
</div>
</Layout>
);
};
export default Stores;
|
#!/bin/bash
# inspired by: https://gist.github.com/domenic/ec8b0fc8ab45f39403dd
set -e # exit with nonzero exit code if anything fails
check_if_should_deploy()
{
if [ "${TRAVIS}" != true ]
then
echo "This script is only intended to be run on Travis CI; not deploying."
exit 0
fi
if [ "${TRAVIS_REPO_SLUG}" != "ScottLogic/BitFlux" ]
then
echo "On fork; not deploying."
exit 0
fi
if [ "${TRAVIS_PULL_REQUEST}" != "false" ]
then
echo "Pull request; not deploying."
exit 0
fi
# Ignore on any branch which isn't develop or master
if [ "${TRAVIS_BRANCH}" != "develop" ] && [ "${TRAVIS_BRANCH}" != "master" ]
then
echo "On branch ${TRAVIS_BRANCH}, not master or develop; not deploying."
exit 0
fi
}
prepare_and_build_master()
{
if [ "${TRAVIS_BRANCH}" != "master" ]
then
echo "Cloning master..."
git clone --branch master --depth 1 https://github.com/ScottLogic/BitFlux.git master
cd master
MASTER=$(git describe --tags --always --dirty 2>&1)
echo "Building master... $MASTER"
npm install --quiet
grunt build --versionNumber="v$MASTER"
cd ..
else
echo "Copying master..."
rm -rf master
mkdir -p master/dist
cp -r ../../dist/* master/dist
cd master
MASTER=$(git describe --tags --always --dirty 2>&1)
cd ..
fi
}
prepare_and_build_develop()
{
if [ "${TRAVIS_BRANCH}" != "develop" ]
then
echo "Cloning develop..."
git clone --branch develop --depth 1 https://github.com/ScottLogic/BitFlux.git develop
cd develop
DEVELOP=$(git describe --tags --always --dirty 2>&1)
echo "Building develop... $DEVELOP"
npm install --quiet
grunt build --versionNumber="v$DEVELOP"
cd ..
else
echo "Copying develop..."
rm -rf develop
mkdir -p develop/dist
cp -r ../../dist/* develop/dist
cd develop
DEVELOP=$(git describe --tags --always --dirty 2>&1)
cd ..
fi
}
echo "Deploying..."
check_if_should_deploy
echo "Creating temp directory for build..."
cd site
rm -rf temp
mkdir temp
cd temp
prepare_and_build_master
prepare_and_build_develop
echo "Creating directories for built application..."
cd ../dist
rm -rf master
mkdir master
rm -rf develop
mkdir develop
echo "Copying built application files..."
cp -r ../temp/master/dist/* master
cp -r ../temp/develop/dist/* develop
rm -rf ../temp
printf '{"timestamp":"%s","travis_build_number":"%s","master_version":"%s","develop_version":"%s"}\n' "$(date +%s)" "$TRAVIS_BUILD_NUMBER" "$MASTER" "$DEVELOP" > versions.json
echo "Deploying to gh-pages..."
# create a *new* Git repo
git init
# inside this git repo we'll pretend to be a new user
git config user.name "Travis CI"
git config user.email "jleftley@scottlogic.com"
# The first and only commit to this new Git repo contains all the
# files present with the commit message "Deploy to GitHub Pages".
git add .
git commit -m "Deploy to GitHub Pages"
# Force push from the current repo's master branch to the remote
# repo's gh-pages branch. (All previous history on the gh-pages branch
# will be lost, since we are overwriting it.) We redirect any output to
# /dev/null to hide any sensitive credential data that might otherwise be exposed.
git push --force --quiet "https://${GH_TOKEN}@${GH_REF}" master:gh-pages > /dev/null 2>&1
echo "Done."
|
optimizer=npenas
predictors=(omni_seminas bananas mlp lgb gcn bonas xgb ngb rf dngo \
bohamiann bayes_lin_reg gp seminas sparse_gp var_sparse_gp nao)
start_seed=$1
if [ -z "$start_seed" ]
then
start_seed=0
fi
# folders:
base_file=NASLib/naslib
s3_folder=np201
out_dir=$s3_folder\_$start_seed
# search space / data:
search_space=nasbench201
dataset=cifar10
search_epochs=500
# trials / seeds:
trials=100
end_seed=$(($start_seed + $trials - 1))
save_to_s3=true
# create config files
for i in $(seq 0 $((${#predictors[@]}-1)) )
do
predictor=${predictors[$i]}
python $base_file/benchmarks/create_configs.py --predictor $predictor \
--epochs $search_epochs --start_seed $start_seed --trials $trials \
--out_dir $out_dir --dataset=$dataset --config_type nas_predictor \
--search_space $search_space --optimizer $optimizer
done
# run experiments
for t in $(seq $start_seed $end_seed)
do
for predictor in ${predictors[@]}
do
config_file=$out_dir/$dataset/configs/nas_predictors/config\_$optimizer\_$predictor\_$t.yaml
echo ================running $predictor trial: $t =====================
python $base_file/benchmarks/nas_predictors/runner.py --config-file $config_file
done
if [ "save_to_s3" ]
then
# zip and save to s3
echo zipping and saving to s3
zip -r $out_dir.zip $out_dir
python $base_file/benchmarks/upload_to_s3.py --out_dir $out_dir --s3_folder $s3_folder
fi
done
|
#!/bin/bash
# Read the sentence
sentence="He quickly ran to the store"
# Split the sentence into words
IFS=' ' read -ra words <<< "$sentence"
# Find and print the longest word
longest_word=''
for word in "${words[@]}"; do
[[ ${#word} -gt ${#longest_word} ]] && longest_word=$word
done
echo "Longest word: $longest_word" |
class GameScene: SKScene {
// Assuming the grid is represented as a 2D array of booleans where true represents a live cell and false represents a dead cell
var grid: [[Bool]] = [] // Initialize the grid with appropriate dimensions and cell states
func updateGeneration() {
var nextGeneration: [[Bool]] = Array(repeating: Array(repeating: false, count: grid[0].count), count: grid.count)
for i in 0..<grid.count {
for j in 0..<grid[i].count {
let liveNeighbors = countLiveNeighbors(row: i, col: j)
if grid[i][j] {
if liveNeighbors < 2 || liveNeighbors > 3 {
nextGeneration[i][j] = false // Cell dies due to underpopulation or overpopulation
} else {
nextGeneration[i][j] = true // Cell lives on to the next generation
}
} else {
if liveNeighbors == 3 {
nextGeneration[i][j] = true // Cell becomes alive due to reproduction
}
}
}
}
grid = nextGeneration // Update the grid with the next generation
}
func countLiveNeighbors(row: Int, col: Int) -> Int {
var liveCount = 0
let directions = [(-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1), (1, 0), (1, 1)]
for dir in directions {
let newRow = row + dir.0
let newCol = col + dir.1
if newRow >= 0 && newRow < grid.count && newCol >= 0 && newCol < grid[0].count {
if grid[newRow][newCol] {
liveCount += 1
}
}
}
return liveCount
}
} |
#!/bin/sh
FAILED=no
if test "$TEST_OUTPUT_FILE" = ""
then
TEST_OUTPUT_FILE=/dev/null
fi
# /bin/echo is a little more likely to support -n than sh's builtin echo.
if test -x /bin/echo
then
ECHO=/bin/echo
else
ECHO=echo
fi
if test "$TEST_OUTPUT_FILE" != "/dev/null"
then
touch "$TEST_OUTPUT_FILE" || exit 1
fi
TEST_DIR=.
T=`echo "$0" | sed -e 's/test-ratelim.sh$//'`
if test -x "$T/test-ratelim"
then
TEST_DIR="$T"
fi
announce () {
echo $@
echo $@ >>"$TEST_OUTPUT_FILE"
}
announce_n () {
$ECHO -n $@
echo $@ >>"$TEST_OUTPUT_FILE"
}
run_tests () {
announce_n " Group limits, no connection limit:"
if $TEST_DIR/test-ratelim -g 30000 -n 30 -t 100 --check-grouplimit 1000 --check-stddev 100 >>"$TEST_OUTPUT_FILE"
then
announce OKAY
else
announce FAILED
FAILED=yes
fi
announce_n " Connection limit, no group limit:"
if $TEST_DIR/test-ratelim -c 1000 -n 30 -t 100 --check-connlimit 50 --check-stddev 50 >>"$TEST_OUTPUT_FILE"
then
announce OKAY ;
else
announce FAILED ;
FAILED=yes
fi
announce_n " Connection limit and group limit:"
if $TEST_DIR/test-ratelim -c 1000 -g 30000 -n 30 -t 100 --check-grouplimit 1000 --check-connlimit 50 --check-stddev 50 >>"$TEST_OUTPUT_FILE"
then
announce OKAY ;
else
announce FAILED ;
FAILED=yes
fi
announce_n " Connection limit and group limit with independent drain:"
if $TEST_DIR/test-ratelim -c 1000 -g 35000 -n 30 -t 100 -G 500 --check-grouplimit 1000 --check-connlimit 50 --check-stddev 50 >>"$TEST_OUTPUT_FILE"
then
announce OKAY ;
else
announce FAILED ;
FAILED=yes
fi
}
announce "Running rate-limiting tests:"
run_tests
if test "$FAILED" = "yes"; then
exit 1
fi
|
<filename>dynomitemanager-core/src/main/java/com/netflix/dynomitemanager/dualAccount/AwsRoleAssumptionCredential.java<gh_stars>10-100
package com.netflix.dynomitemanager.dualAccount;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider;
import com.google.inject.Inject;
import com.netflix.nfsidecar.aws.ICredential;
import com.netflix.nfsidecar.config.AWSCommonConfig;
import com.netflix.nfsidecar.identity.InstanceEnvIdentity;
public class AwsRoleAssumptionCredential implements ICredential {
private static final String AWS_ROLE_ASSUMPTION_SESSION_NAME = "AwsRoleAssumptionSession";
private ICredential cred;
private InstanceEnvIdentity insEnvIdentity;
private AWSCredentialsProvider stsSessionCredentialsProvider;
private AWSCommonConfig config;
@Inject
public AwsRoleAssumptionCredential(ICredential cred, AWSCommonConfig config, InstanceEnvIdentity insEnvIdentity) {
this.cred = cred;
this.config = config;
this.insEnvIdentity = insEnvIdentity;
}
@Override
public AWSCredentialsProvider getAwsCredentialProvider() {
if (this.stsSessionCredentialsProvider == null) {
synchronized (this) {
if (this.config.isDualAccount() || this.stsSessionCredentialsProvider == null) {
String roleArn = null;
/**
* Create the assumed IAM role based on the environment. For
* example, if the current environment is VPC, then the
* assumed role is for EC2 classic, and vice versa.
*/
if (this.insEnvIdentity.isClassic()) {
roleArn = this.config.getVpcAWSRoleAssumptionArn(); // Env
// is
// EC2
// classic
// -->
// IAM
// assumed
// role
// for
// VPC
// created
} else {
roleArn = this.config.getClassicAWSRoleAssumptionArn(); // Env
// is
// VPC
// -->
// IAM
// assumed
// role
// for
// EC2
// classic
// created
}
//
if (roleArn == null || roleArn.isEmpty())
throw new NullPointerException(
"Role ARN is null or empty probably due to missing config entry");
/**
* Get handle to an implementation that uses AWS Security
* Token Service (STS) to create temporary, short-lived
* session with explicit refresh for session/token
* expiration.
*/
try {
this.stsSessionCredentialsProvider = new STSAssumeRoleSessionCredentialsProvider(
this.cred.getAwsCredentialProvider(), roleArn, AWS_ROLE_ASSUMPTION_SESSION_NAME);
} catch (Exception ex) {
throw new IllegalStateException(
"Exception in getting handle to AWS Security Token Service (STS). Msg: "
+ ex.getLocalizedMessage(),
ex);
}
}
}
}
return this.stsSessionCredentialsProvider;
}
}
|
<gh_stars>0
import { Component } from '@angular/core';
import { AbstractControl, FormBuilder, FormGroup, Validators } from "@angular/forms";
import { AuthService } from "../../../@core/services/auth.service";
import { EqualPasswordsValidator } from "../../../@core/validators/equal-passwords-validator";
import {EmployeeAddRequest} from "../../../@core/model/requests/employee-add-request";
@Component({
selector: 'add-employee',
templateUrl: 'add-employee.component.html',
styleUrls: ['add-employee.component.scss'],
})
export class AddEmployeeComponent {
form: FormGroup;
email: AbstractControl;
passwords: FormGroup;
password: AbstractControl;
confirmPassword: AbstractControl;
name: AbstractControl;
surname: AbstractControl;
role: AbstractControl;
roles = ['CONSULTANT', 'ADMIN'];
error: boolean = false;
success: boolean = false;
submitted: boolean = false;
constructor(fb: FormBuilder, private authService: AuthService) {
this.form = fb.group({
'email': ['', Validators.compose([Validators.required, Validators.email])],
'passwords': fb.group({
'password': ['', Validators.compose([Validators.required, Validators.minLength(5), Validators.maxLength(20)])],
'confirmPassword': ['', Validators.compose([Validators.required, Validators.minLength(5), Validators.maxLength(20)])]
}, {validator: EqualPasswordsValidator.validate('password', 'confirmPassword')}),
'name': ['', Validators.compose([Validators.required])],
'surname': ['', Validators.compose([Validators.required])],
'role': ['', Validators.compose([Validators.required])]
});
this.email = this.form.controls['email'];
this.passwords = <FormGroup> this.form.controls['passwords'];
this.password = this.passwords.controls['password'];
this.confirmPassword = this.passwords.controls['confirmPassword'];
this.name = this.form.controls['name'];
this.surname = this.form.controls['surname'];
this.role = this.form.controls['role'];
}
isFormValid(): boolean {
return this.form.valid && !this.submitted;
}
addEmployee() {
if(!this.submitted && this.form.valid) {
this.submitted = true;
let request: EmployeeAddRequest = {
name: this.name.value,
surname: this.surname.value,
userRole: this.role.value
};
return this.authService.registerEmployee(this.email.value, this.password.value, request)
.subscribe(
(data) => {
this.form.reset();
this.error = false;
this.success = true;
this.submitted = false;
},
(error) => {
this.form.reset();
this.error = true;
this.success = false;
this.submitted = false;
});
}
}
}
|
def search(sorted_list, element):
low = 0
high = len(sorted_list) - 1
while low <= high:
mid = (low + high) // 2
if sorted_list[mid] == element:
return mid
elif sorted_list[mid] < element:
low = mid + 1
else:
high = mid - 1
return -1 |
#!/bin/bash
# xset m 4/1 10
# xinput set-prop 'ELECOM ELECOM BlueLED Mouse' 'Device Accel Constant Deceleration' 4
# xinput set-prop 'ELECOM ELECOM BlueLED Mouse' 'Evdev Wheel Emulation' 1
# xinput set-prop 'ELECOM ELECOM BlueLED Mouse' 'Evdev Wheel Emulation Button' 2
# xinput set-prop 'ELECOM ELECOM BlueLED Mouse' 'Evdev Wheel Emulation Axes' 6 7 4 5
# xinput set-prop 'ELECOM ELECOM BlueLED Mouse' 'Evdev Wheel Emulation Inertia' 5
# xinput set-prop 'Logitech Unifying Device. Wireless PID:4101' 'Device Accel Constant Deceleration' 1
# xinput set-button-map 'Logitech Unifying Device. Wireless PID:4101' 1 2 3 5 4 7 6
|
<filename>src/scg_scene.ts<gh_stars>0
import { ScType } from './scg_types';
import { SCgRender } from './scg_render';
import { SCgObject, SCgNode, SCgEdge, SCgLink } from './scg_object';
import { SCgStruct } from './scg_struct';
import { ScAddr } from '@ostis/sc-core';
type UpdateCallback = () => void;
let idCounter: number = 0;
export class SCgScene {
private _nodes: SCgNode[] = [];
private _edges: SCgEdge[] = [];
private _links: SCgLink[] = [];
private _render: SCgRender = null;
private _objectsByAddr: Map<number, SCgObject> = new Map<number, SCgObject>();
private _requestUpdate: number = 0;
private _requestUpdateCallback: UpdateCallback = null;
constructor() {
}
public onDestroy() {
}
private nextID() {
return idCounter++;
}
public createNode(type: ScType, text: string, addr?: ScAddr): SCgNode {
const newNode = new SCgNode(this.nextID(), text, type, this, addr);
this._nodes.push(newNode);
if (addr)
this._objectsByAddr.set(addr.value, newNode);
return newNode;
}
public createEdge(type: ScType, src: SCgObject, trg: SCgObject, text?: string, addr?: ScAddr): SCgEdge {
const newEdge = new SCgEdge(this.nextID(), text, type, src, trg, this, addr);
this._edges.push(newEdge);
if (addr)
this._objectsByAddr.set(addr.value, newEdge);
return newEdge;
}
public createLink(type = ScType.LinkConst, text?: string, addr?: ScAddr): SCgLink {
const newLink = new SCgLink(this.nextID(), text, type, this, addr);
this._links.push(newLink);
if (addr)
this._objectsByAddr.set(addr.value, newLink);
return newLink;
}
get render(): SCgRender {
return this._render;
}
set render(r: SCgRender) {
this._render = r;
}
get nodes(): SCgNode[] {
return this._nodes;
}
get edges(): SCgEdge[] {
return this._edges;
}
get links(): SCgLink[] {
return this._links;
}
public objectByAddr(addr: ScAddr) : SCgObject {
return this._objectsByAddr.get(addr.value);
}
public viewUpdate(): void {
if (this._requestUpdateCallback)
this._requestUpdateCallback();
this._requestUpdate--;
}
set updateCallback(callback: UpdateCallback) {
this._requestUpdateCallback = callback;
}
}; |
import numpy as np
def fade_orbital_paths(Sat_dic, frame, SUN_LOC, ORBITAL_RSIZE, ORBITAL_PHI, Delta):
satsn = len(Sat_dic) # Number of satellites
Sat_Time_Space = Delta / satsn # Time space for each satellite's fading effect
for n, sat in Sat_dic.items():
frame = Orbiral(frame, SUN_LOC, sat["Sat_Radius"], ORBITAL_RSIZE, ORBITAL_PHI - sat["Phi_Offset"], (0, 0, 255))
for fadeSeconds in np.linspace(0, Delta // 2, int(ORBITAL_RSIZE)):
if frame_time >= Sat_Time_Space + fadeSeconds:
R_OFF = ORBITAL_RSIZE - fadeSeconds
print("Fade:", R_OFF)
frame = Orbiral(frame, SUN_LOC, sat["Sat_Radius"], ORBITAL_RSIZE - int(R_OFF), ORBITAL_PHI - sat["Phi_Offset"], (255, 0, 255))
else:
frame = Orbiral(frame, SUN_LOC, sat["Sat_Radius"], ORBITAL_RSIZE, ORBITAL_PHI - sat["Phi_Offset"], (0, 0, 255)) |
<reponame>reekoheek/yesbee<filename>lib/source.js
//jshint esnext:true
const co = require('co');
const delegate = require('delegates');
const assert = require('assert');
module.exports = (function() {
'use strict';
var sequence = 0;
function Source (component, uri, options) {
assert(component, 'Invalid arguments, {Component} component, {string} uri');
assert('string' === typeof uri, 'Invalid arguments, {Component} component, {string} uri');
Object.defineProperties(this, {
id: { enumerable:true, writable:false, configurable:false, value: 'source-'+ sequence++ },
uri: { enumerable:true, writable:false, configurable:false, value: uri },
options: { enumerable:false, writable:false, configurable:false, value: options || {} },
component: { enumerable:false, writable:false, configurable:false, value: component },
consumer: { enumerable:false, writable:true, configurable:false, value: null },
});
}
Source.prototype = {
dump() {
return {
id: this.id,
uri: this.uri,
options: this.options,
};
},
consume(message) {
assert(message, 'Invalid arguments, {Message} message');
assert(this.consumer, 'Cannot consume from inactive source on stopped route');
return co(function *() {
try {
return yield this.consumer.call(message);
} catch(e) {
if (!this.component.context.isWorker) {
this.logger({$name: this.component.name, level: 'error', message: e.stack });
}
throw e;
}
}.bind(this));
},
start(consumer) {
this.consumer = consumer;
if ('function' === typeof this.component.start) {
return this.component.start(this);
}
},
stop() {
if ('function' === typeof this.component.stop) {
return this.component.stop(this);
}
},
};
delegate(Source.prototype, 'component')
.method('logger')
.access('context');
return Source;
})(); |
def kClosestPoints(points, k):
points.sort(key = lambda x: x[0]**2 + x[1]**2)
return points[:k] |
interface AsyncBroadcasterIterable<T> {
[Symbol.asyncIterator](): AsyncIterator<T>;
values(options: {signal?: AbortSignal}): AsyncIterableIterator<T>;
}
/**
* An object that broadcasts values out to multiple listeners via async
* iterables.
*/
export class AsyncBroadcaster<T = unknown> {
private _buffer: Array<T> = [];
private _nextGeneratorID = 0;
// Tracks which index into the buffer each active generator is at
private _indices = new Map<number, number>();
// A promise to resolve when we get a new value. This continues the
// every generator's loop to yield the new value
private _nextValue?: Promise<void>;
private _resolveNextValue?: () => void;
iterable: AsyncBroadcasterIterable<T> = {
[Symbol.asyncIterator]: () => {
return this._values();
},
values: (options: {signal?: AbortSignal} = {}) => this._values(options),
};
constructor(iterable?: AsyncIterable<T>) {
if (iterable !== undefined) {
this._pushAll(iterable);
}
}
private async _pushAll(iterable: AsyncIterable<T>) {
for await (const v of iterable) {
this._buffer.push(v);
// Notify every listener waiting on a value
this._resolveNextValue?.();
this._nextValue = this._resolveNextValue = undefined;
}
}
/**
* Returns a new async iterable of values
*/
private async *_values(
options: {signal?: AbortSignal} = {}
): AsyncGenerator<T, void, any> {
const {signal} = options;
const id = this._nextGeneratorID++;
const waitOrAbort = makeWaitOrAbort(signal);
// Start one past the end of the buffer to only read new values
this._indices.set(id, this._buffer.length);
try {
while (signal?.aborted !== true) {
let index = this._indices.get(id)!;
// We're at the end of the buffer, wait for a value
if (index === this._buffer.length) {
this._nextValue ??= new Promise<void>((res) => {
this._resolveNextValue = res;
});
try {
await waitOrAbort(this._nextValue);
} finally {
if (signal?.aborted) {
this._cleanup(id);
return;
}
}
// Update index, since it might have changed while we waited.
// We do not need to check if we're at the end again.
index = this._indices.get(id)!;
}
const v = this._buffer[index];
this._indices.set(id, ++index);
this._trim();
yield v!;
}
} finally {
// If the generator is closed by break/throw/return from a for await/of
// loop, the finally clause will run.
this._cleanup(id);
return;
}
}
private _cleanup(id: number) {
this._indices.delete(id);
this._trim();
}
/**
* Trim the buffer and update all indices
*/
private _trim() {
let minIndex = Math.min(...this._indices.values());
if (minIndex > 0) {
this._buffer.splice(0, minIndex);
for (const [i, n] of this._indices.entries()) {
this._indices.set(i, n - minIndex);
}
}
}
}
/**
* Create a function that wraps a Promise to reject if an AbortSignal aborts.
*/
const makeWaitOrAbort = (signal?: AbortSignal) => {
let abort: (e: unknown) => void;
signal?.addEventListener('abort', (e) => abort(e));
return (v: unknown) =>
new Promise(async (resolve, reject) => {
abort = reject;
try {
resolve(await v);
} catch (e) {
reject(e);
}
});
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.