text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Pocketcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Check that all logs are terminated with '\n'
#
# Some logs are continued over multiple lines. They should be explicitly
# commented with \* Continued *\
#
# There are some instances of LogPrintf() in comments. Those can be
# ignored
export LC_ALL=C
UNTERMINATED_LOGS=$(git grep --extended-regexp "LogPrintf?\(" -- "*.cpp" | \
grep -v '\\n"' | \
grep -v "/\* Continued \*/" | \
grep -v "LogPrint()" | \
grep -v "LogPrintf()")
if [[ ${UNTERMINATED_LOGS} != "" ]]; then
echo "All calls to LogPrintf() and LogPrint() should be terminated with \\n"
echo
echo "${UNTERMINATED_LOGS}"
exit 1
fi
|
#!/bin/bash
set -e
CWD=`pwd`
do_build() {
# comment out line below to exclude naprrqlhp from build
# include_hp
echo "Building NAPRRQL..."
mkdir -p $OUTPUT
cd $CWD
cd ./app/naprrql
go get
GOOS="$GOOS" GOARCH="$GOARCH" go build -ldflags="$LDFLAGS" -o $OUTPUT/$HARNESS
cd $CWD
cd ./app
mkdir -p naprrql/in
rm -rf $OUTPUT/in/*.xml $OUTPUT/in/*.zip $OUTPUT/kvs
rsync -a naprrql/naprrql.toml naprrql/gql_schemas naprrql/in naprrql/public naprrql/reporting_templates $OUTPUT/
}
include_hp() {
echo "Including NAPRRQLHP..."
cd $CWD
cd ./app/naprrqlhp
go get
GOOS="$GOOS" GOARCH="$GOARCH" go build -ldflags="$LDFLAGS" -o $OUTPUT/$HPHARNESS
}
do_zip() {
cd $OUTPUT
cd ..
zip -qr ../$ZIP naprrql
cd $CWD
}
build_mac64() {
# MAC OS X (64 only)
echo "Building Mac binaries..."
GOOS=darwin
GOARCH=amd64
LDFLAGS="-s -w"
OUTPUT=$CWD/build/Mac/naprrql
# GNATS=nats-streaming-server
HARNESS=naprrql
AUDITDIFFHARNESS=napcomp
HPHARNESS=naprrqlhp
ZIP=naprrql-Mac.zip
do_build
#do_upx
# do_shells
# do_zip
echo "...all Mac binaries built..."
}
build_windows64() {
# WINDOWS 64
echo "Building Windows64 binaries..."
GOOS=windows
GOARCH=amd64
LDFLAGS="-s -w"
OUTPUT=$CWD/build/Win64/naprrql
# GNATS=nats-streaming-server.exe
HARNESS=naprrql.exe
HPHARNESS=naprrqlhp.exe
AUDITDIFFHARNESS=napcomp.exe
ZIP=naprrql-Win64.zip
do_build
#do_upx
# do_bats
# do_zip
echo "...all Windows64 binaries built..."
}
build_linux64() {
# LINUX 64
echo "Building Linux64 binaries..."
GOOS=linux
GOARCH=amd64
LDFLAGS="-s -w"
OUTPUT=$CWD/build/Linux64/naprrql
# GNATS=nats-streaming-server
HARNESS=naprrql
HPHARNESS=naprrqlhp
AUDITDIFFHARNESS=napcomp
ZIP=naprrql-Linux64.zip
do_build
#do_goupx
# do_shells
# do_zip
echo "...all Linux64 binaries built..."
}
# TODO ARM
# GOOS=linux GOARCH=arm GOARM=7 go build -o $CWD/build/LinuxArm7/go-nias/aggregator
if [ "$1" = "L64" ]
then
build_linux64
elif [ "$1" = "W64" ]
then
build_windows64
elif [ "$1" = "M64" ]
then
build_mac64
else
build_mac64
build_windows64
build_linux64
fi
|
import fixUrl from './fixUrl'
it('fixes the darn urls', () => {
const mockData = [
[
'https://www.flickr.com/photos/andreassofus/30677591587/in/explore-2018-10-29/',
'https://www.flickr.com/photos/andreassofus/30677591587',
],
[
'https://www.flickr.com/photos/megane_wakui/with/44607625822/',
'https://www.flickr.com/photos/megane_wakui',
],
[
'https://www.flickr.com/photos/dtt67/45588983952/in/explore-2018-10-30/',
'https://www.flickr.com/photos/dtt67/45588983952',
],
[
'https://www.flickr.com/photos/oldroger/27963939197/in/faves-57803084@N07/',
'https://www.flickr.com/photos/oldroger/27963939197',
],
]
mockData.forEach(([input, output]) => {
expect(fixUrl(input)).toEqual(output)
})
})
it('accepts link without "www"', () => {
const mockData = [
[
'https://flickr.com/photos/andreassofus/30677591587/in/explore-2018-10-29/',
'https://flickr.com/photos/andreassofus/30677591587',
],
[
'https://flickr.com/photos/megane_wakui/with/44607625822/',
'https://flickr.com/photos/megane_wakui',
],
[
'https://flickr.com/photos/dtt67/45588983952/in/explore-2018-10-30/',
'https://flickr.com/photos/dtt67/45588983952',
],
[
'https://flickr.com/photos/oldroger/27963939197/in/faves-57803084@N07/',
'https://flickr.com/photos/oldroger/27963939197',
],
]
mockData.forEach(([input, output]) => {
expect(fixUrl(input)).toEqual(output)
})
})
|
#!/bin/bash
# /Library/Application\ Support/SMART\ Technologies/Activation\ Wizard.app/Contents/MacOS/Activation\ Wizard --puid notebook_14 --uipack notebook_10 --m=4 --v=4 --pks "" --a --auto-renew
# /Library/Application\ Support/SMART\ Technologies/Activation\ Wizard\ 2.app/Contents/MacOS/Activation\ Wizard --puid=notebook_14 --uipack=notebook_10 --m=4 --v=5 --pks "" --a
/Library/Application\ Support/SMART\ Technologies/Activation\ Wizard\ 2.app/Contents/MacOS/Activation\ Wizard\ 2 --puid=notebook_14 --uipack=notebook_10 --m=4 --v=5 --pks "" --a |
def classify_sentiment(comments):
results = []
for comment in comments:
if "great" in comment or "love" in comment:
results.append("positive")
elif "hate" in comment or "bad" in comment:
results.append("negative")
else:
results.append("neutral")
return results
print(classify_sentiment(comments)) |
<filename>src/test.java
import java.awt.EventQueue;
import javax.swing.JFrame;
import javax.swing.JPasswordField;
import java.awt.BorderLayout;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.SwingConstants;
import javax.swing.JTextField;
import javax.swing.BoxLayout;
import java.awt.GridLayout;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.LayoutStyle.ComponentPlacement;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
public class test {
private JFrame frame;
private JTextField textField;
private JPasswordField passwordField;
/**
* Launch the application.
*/
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
test window = new test();
window.frame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the application.
*/
public test() {
initialize();
}
/**
* Initialize the contents of the frame.
*/
private void initialize() {
frame = new JFrame();
frame.setBounds(100, 100, 450, 300);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
textField = new JTextField();
textField.setColumns(10);
JLabel lblLogin = new JLabel("Login");
passwordField = new JPasswordField();
JLabel lblPassword = new JLabel("Password");
JButton btnLogin = new JButton("Login");
btnLogin.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
passwordField.selectAll();
if(textField.getText().equals("<PASSWORD>") && passwordField.getText().equals("<PASSWORD>"))
{
frame.dispose();
try
{
adminGUI.main();
//adminGUI.main();
}catch(Exception e1){e1.printStackTrace();}
}
else if(textField.getText().equals("usr") && passwordField.getText().equals("help"))
{
frame.dispose();
try
{
userLogin.main();
}catch(Exception e2){e2.printStackTrace();}
}
else
{
JOptionPane.showMessageDialog(null, "Your username or password is incorrect please try again!", null, JOptionPane.PLAIN_MESSAGE, null);
System.out.println("The username or password is incorrect please try again!");
}
}
});
JButton btnContactInfo = new JButton("Contact Info");
btnContactInfo.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent arg0)
{
JOptionPane.showMessageDialog(null, "Name: IIT Bank Ticket System \nLocation: test"
+ "\nTelephone: \nContact Person: <NAME> (aka IT Guru)", null, JOptionPane.PLAIN_MESSAGE, null);
}
});
JButton btnExit = new JButton("Exit");
btnExit.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
//frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.dispose();
}
});
GroupLayout groupLayout = new GroupLayout(frame.getContentPane());
groupLayout.setHorizontalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGap(195)
.addComponent(lblLogin, GroupLayout.PREFERRED_SIZE, 36, GroupLayout.PREFERRED_SIZE))
.addGroup(groupLayout.createSequentialGroup()
.addGap(123)
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING, false)
.addComponent(passwordField, Alignment.TRAILING)
.addComponent(textField, Alignment.TRAILING, GroupLayout.DEFAULT_SIZE, 176, Short.MAX_VALUE)))
.addGroup(groupLayout.createSequentialGroup()
.addGap(186)
.addComponent(lblPassword))
.addGroup(Alignment.TRAILING, groupLayout.createSequentialGroup()
.addContainerGap()
.addComponent(btnExit)
.addPreferredGap(ComponentPlacement.RELATED, 268, Short.MAX_VALUE)
.addComponent(btnLogin))
.addGroup(groupLayout.createSequentialGroup()
.addContainerGap()
.addComponent(btnContactInfo)))
.addContainerGap())
);
groupLayout.setVerticalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addContainerGap()
.addComponent(btnContactInfo)
.addGap(34)
.addComponent(lblLogin, GroupLayout.PREFERRED_SIZE, 14, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(textField, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addGap(23)
.addComponent(lblPassword)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(passwordField, GroupLayout.PREFERRED_SIZE, 21, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(ComponentPlacement.RELATED, 55, Short.MAX_VALUE)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(btnLogin)
.addComponent(btnExit))
.addContainerGap())
);
frame.getContentPane().setLayout(groupLayout);
}
}
|
<filename>common/src/main/java/com/codefinity/microcontinuum/common/port/adapter/persistance/ConnectionProvider.java
package com.codefinity.microcontinuum.common.port.adapter.persistance;
import java.sql.Connection;
import java.sql.SQLException;
import javax.sql.DataSource;
public class ConnectionProvider {
private static final ThreadLocal<Connection> connectionHolder = new ThreadLocal<Connection>();
public static void closeConnection() {
try {
Connection connection = connection();
if (connection != null) {
connection.close();
// System.out.println("---CONNECTION CLOSED");
}
} catch (SQLException e) {
throw new IllegalStateException(
"Cannot close connection because: "
+ e.getMessage(),
e);
} finally {
connectionHolder.set(null);
}
}
public static Connection connection() {
Connection connection = connectionHolder.get();
return connection;
}
public static Connection connection(DataSource aDataSource) {
Connection connection = connection();
try {
if (connection == null) {
connection = aDataSource.getConnection();
connectionHolder.set(connection);
// System.out.println("CONNECTION OPENED");
}
} catch (SQLException e) {
throw new IllegalStateException(
"Connection not provided because: "
+ e.getMessage(),
e);
}
return connection;
}
}
|
#!/bin/bash
set -e
. /etc/init.d/atl-functions
. /etc/init.d/atl-confluence-common
trap 'atl_error ${LINENO}' ERR
if [[ "x${ATL_CONFLUENCE_DATA_CENTER}" == "xtrue" ]]; then
ATL_HAZELCAST_NETWORK_AWS_HOST_HEADER="${ATL_HAZELCAST_NETWORK_AWS_HOST_HEADER:-"ec2.${ATL_HAZELCAST_NETWORK_AWS_IAM_REGION}.amazonaws.com"}"
fi
# We are using ALB so Confluence will startup without Synchrony-Proxy and using Synchrony at port 8091 of LB
function start {
atl_log "=== BEGIN: service atl-init-confluence start ==="
atl_log "Initialising ${ATL_CONFLUENCE_FULL_DISPLAY_NAME}"
installConfluence
if [[ "xtrue" == "x$(atl_toLowerCase ${ATL_NGINX_ENABLED})" ]]; then
configureNginx
fi
updateHostName "${ATL_PROXY_NAME}"
configureConfluenceHome
exportCatalinaOpts
configureConfluenceEnvironmentVariables
atl_configureThreadHeapScripts
if [[ -n "${ATL_AUTOLOGIN_COOKIE_AGE}" ]]; then
atl_autologinCookieAge "${ATL_CONFLUENCE_USER}" "${ATL_CONFLUENCE_INSTALL_DIR}/confluence/WEB-INF/classes/seraph-config.xml" "${ATL_AUTOLOGIN_COOKIE_AGE}"
fi
if [[ "x${ATL_POSTGRES_ENABLED}" == "xtrue" ]]; then
createConfluenceDbAndRole
elif [[ -n "${ATL_DB_NAME}" ]]; then
configureRemoteDb
fi
atl_log "=== BEGIN: service atl-init-confluence runLocalAnsible ==="
runLocalAnsible
atl_log "=== END: service atl-init-confluence runLocalAnsible ==="
atl_recursiveChown "root" "confluence" "/etc/atl"
if [ "${ATL_ENVIRONMENT}" != "prod" ]; then
local baseURL="${ATL_TOMCAT_SCHEME}://${ATL_PROXY_NAME}${ATL_TOMCAT_CONTEXTPATH}"
if updateBaseUrl ${baseURL} ${ATL_DB_HOST} ${ATL_DB_PORT} ${ATL_DB_NAME}; then echo "baseUrl updated";fi
fi
goCONF
atl_log "=== END: service atl-init-confluence start ==="
}
function updateBaseUrl {
atl_log "=== BEGIN: Updating Server URL ==="
local QUERY_RESULT=''
local BASE_URL=$1
local DB_HOST=$2
local DB_PORT=$3
local DB_NAME=$4
set -f
(su postgres -c "psql -w -h ${DB_HOST} -p ${DB_PORT} -d ${DB_NAME} -t --command \"update bandana set bandanavalue=regexp_replace(bandanavalue, '<baseUrl>.*</baseUrl>', '<baseUrl>${BASE_URL}</baseUrl>') where bandanacontext = '_GLOBAL' and bandanakey = 'atlassian.confluence.settings';\"") >> "${ATL_LOG}" 2>&1
atl_log "=== END: Server baseUrl update ==="
}
function configureConfluenceEnvironmentVariables (){
atl_log "=== BEGIN: service configureConfluenceEnvironmentVariables ==="
if [ -n "${ATL_JVM_HEAP}" ]; then
if [[ ! "${ATL_JVM_HEAP}" =~ ^.*[mMgG]$ ]]; then
ATL_JVM_HEAP="${ATL_JVM_HEAP}m"
fi
su "${ATL_CONFLUENCE_USER}" -c "sed -i -r 's/^(.*)Xmx(\w+) (.*)$/\1Xmx${ATL_JVM_HEAP} \3/' /opt/atlassian/confluence/bin/setenv.sh" >> "${ATL_LOG}" 2>&1
su "${ATL_CONFLUENCE_USER}" -c "sed -i -r 's/^(.*)Xms(\w+) (.*)$/\1Xms${ATL_JVM_HEAP} \3/' /opt/atlassian/confluence/bin/setenv.sh" >> "${ATL_LOG}" 2>&1
fi
atl_resolveHostNamesAndIps > /dev/null 2>&1
cat <<EOT | su "${ATL_CONFLUENCE_USER}" -c "tee -a \"${ATL_CONFLUENCE_INSTALL_DIR}/bin/setenv.sh\"" > /dev/null
CATALINA_OPTS="\${CATALINA_OPTS} -XX:+PrintAdaptiveSizePolicy"
CATALINA_OPTS="\${CATALINA_OPTS} -XX:+PrintGCDetails"
CATALINA_OPTS="\${CATALINA_OPTS} -XX:NumberOfGCLogFiles=10"
CATALINA_OPTS="\${CATALINA_OPTS} -XX:GCLogFileSize=5m"
CATALINA_OPTS="\${CATALINA_OPTS} -XX:+UseGCLogFileRotation"
CATALINA_OPTS="\${CATALINA_OPTS} -XX:+PrintTenuringDistribution"
CATALINA_OPTS="\${CATALINA_OPTS} -Dfile.encoding=UTF-8"
CATALINA_OPTS="\${CATALINA_OPTS} -Dconfluence.upgrade.recovery.file.enabled=false"
CATALINA_OPTS="\${CATALINA_OPTS} -Djava.net.preferIPv4Stack=true"
CATALINA_OPTS="\${CATALINA_OPTS} -Djira.executor.threadpool.size=16"
CATALINA_OPTS="\${CATALINA_OPTS} -Datlassian.event.thread_pool_configuration.queue_size=4096"
CATALINA_OPTS="\${CATALINA_OPTS} -Dshare.group.email.mapping=atlassian-all:atlassian-all@atlassian.com,atlassian-staff:atlassian-staff@atlassian.com"
CATALINA_OPTS="\${CATALINA_OPTS} -Datlassian.plugins.enable.wait=300"
CATALINA_OPTS="\${CATALINA_OPTS} -Dsynchrony.service.url=${ATL_SYNCHRONY_SERVICE_URL} -Dsynchrony.proxy.enabled=false ${ATL_CATALINA_OPTS}"
export CATALINA_OPTS
EOT
if [[ "x${ATL_CONFLUENCE_DATA_CENTER}" == "xtrue" ]]; then
cat <<EOT | su "${ATL_CONFLUENCE_USER}" -c "tee -a \"${ATL_CONFLUENCE_INSTALL_DIR}/bin/setenv.sh\"" > /dev/null
CATALINA_OPTS="\${CATALINA_OPTS} -Dconfluence.cluster.hazelcast.max.no.heartbeat.seconds=60"
CATALINA_OPTS="\${CATALINA_OPTS} -Dconfluence.cluster.node.name=${_ATL_PRIVATE_IPV4}"
export CATALINA_OPTS
EOT
fi
atl_log "=== END: service configureConfluenceEnvironmentVariables ==="
}
function createInstanceStoreDirs {
atl_log "=== BEGIN: service atl-init-confluence create-instance-store-dirs ==="
atl_log "Initialising ${ATL_CONFLUENCE_FULL_DISPLAY_NAME}"
local CONFLUENCE_DIR=${1:?"The instance store directory for ${ATL_CONFLUENCE_NAME} must be supplied"}
if [[ ! -e "${CONFLUENCE_DIR}" ]]; then
atl_log "Creating ${CONFLUENCE_DIR}"
mkdir -p "${CONFLUENCE_DIR}" >> "${ATL_LOG}" 2>&1
else
atl_log "Not creating ${CONFLUENCE_DIR} because it already exists"
fi
atl_log "Creating ${CONFLUENCE_DIR}/caches"
mkdir -p "${CONFLUENCE_DIR}/caches" >> "${ATL_LOG}" 2>&1
atl_log "Creating ${CONFLUENCE_DIR}/tmp"
mkdir -p "${CONFLUENCE_DIR}/tmp" >> "${ATL_LOG}" 2>&1
atl_log "Changing ownership of the contents of ${CONFLUENCE_DIR} to ${ATL_CONFLUENCE_USER}"
chown -R "${ATL_CONFLUENCE_USER}":"${ATL_CONFLUENCE_USER}" "${CONFLUENCE_DIR}"
atl_log "=== END: service atl-init-confluence create-instance-store-dirs ==="
}
function configureSharedHome {
atl_log "=== BEGIN: service atl-init-confluence configureSharedHome ==="
local CONFLUENCE_SHARED="${ATL_APP_DATA_MOUNT}/${ATL_CONFLUENCE_SERVICE_NAME}/shared-home"
if mountpoint -q "${ATL_APP_DATA_MOUNT}" || mountpoint -q "${CONFLUENCE_SHARED}"; then
atl_log "Linking ${CONFLUENCE_SHARED} to ${ATL_CONFLUENCE_SHARED_HOME}"
mkdir -p "${CONFLUENCE_SHARED}"
chown -H "${ATL_CONFLUENCE_USER}":"${ATL_CONFLUENCE_USER}" "${CONFLUENCE_SHARED}" >> "${ATL_LOG}" 2>&1
if ! chown -H "${ATL_CONFLUENCE_USER}":"${ATL_CONFLUENCE_USER}" ${CONFLUENCE_SHARED}/* >> "${ATL_LOG}" 2>&1; then
atl_log "Chown on contents of shared home failed most likley because this is a new cluster or instance and no contents yet exist, moving on"
fi
su "${ATL_CONFLUENCE_USER}" -c "ln -fs \"${CONFLUENCE_SHARED}\" \"${ATL_CONFLUENCE_SHARED_HOME}\"" >> "${ATL_LOG}" 2>&1
else
atl_log "No mountpoint for shared home exists."
fi
atl_log "=== END: service atl-init-confluence configureSharedHome ==="
}
function configureConfluenceHome {
atl_log "Configuring ${ATL_CONFLUENCE_HOME}"
mkdir -p "${ATL_CONFLUENCE_HOME}" >> "${ATL_LOG}" 2>&1
if [[ "x${ATL_CONFLUENCE_DATA_CENTER}" == "xtrue" ]]; then
configureSharedHome
fi
atl_log "Setting ownership of ${ATL_CONFLUENCE_HOME} to '${ATL_CONFLUENCE_USER}' user"
chown -R -H "${ATL_CONFLUENCE_USER}":"${ATL_CONFLUENCE_USER}" "${ATL_CONFLUENCE_HOME}" >> "${ATL_LOG}" 2>&1
atl_log "Done configuring ${ATL_CONFLUENCE_HOME}"
}
function configureDbProperties {
local LOCAL_CFG_XML="${ATL_CONFLUENCE_HOME}/confluence.cfg.xml"
declare -A SERVER_PROPS=(
["hibernate.connection.driver_class"]="${ATL_JDBC_DRIVER}"
["hibernate.connection.url"]="${ATL_JDBC_URL}"
["hibernate.connection.password"]="${ATL_JDBC_PASSWORD}"
["hibernate.connection.username"]="${ATL_JDBC_USER}"
["hibernate.c3p0.max_size"]="${ATL_DB_POOLMAXSIZE}"
["hibernate.c3p0.min_size"]="${ATL_DB_POOLMINSIZE}"
["hibernate.c3p0.timeout"]="${ATL_DB_TIMEOUT}"
["hibernate.c3p0.idle_test_period"]="${ATL_DB_IDLETESTPERIOD}"
["hibernate.c3p0.max_statements"]="${ATL_DB_MAXSTATEMENTS}"
["hibernate.c3p0.validate"]="${ATL_DB_VALIDATE}"
["hibernate.c3p0.preferredTestQuery"]="select version();"
["hibernate.c3p0.acquire_increment"]="${ATL_DB_ACQUIREINCREMENT}"
)
if [[ "x${ATL_CONFLUENCE_DATA_CENTER}" != "xtrue" ]] && [[ -f "${LOCAL_CFG_XML}" ]] && grep "setupStep>complete" "${LOCAL_CFG_XML}" >> "${ATL_LOG}" 2>&1; then
# Confluence Server uses an additional EBS volume for local-home, but if we find a completed XML config, we'll need to update it
atl_log "Found existing Confluence Server config in local-home; editing confluence.cfg.xml with updated configuration options"
for PROP in "${!SERVER_PROPS[@]}"; do
xmlstarlet edit --inplace --update "/confluence-configuration/properties/property[@name='${PROP}']" --value "${SERVER_PROPS[${PROP}]}" "${LOCAL_CFG_XML}"
done
else
# Otherwise, consider this a new install (or Data Center node) and we'll create the configuration from scratch
atl_log "Configuring ${ATL_CONFLUENCE_SHORT_DISPLAY_NAME} DB settings"
local PRODUCT_CONFIG_NAME="confluence"
local CONFLUENCE_SETUP_STEP="setupstart"
local CONFLUENCE_SETUP_TYPE="custom"
local CONFLUENCE_BUILD_NUMBER="0"
cat <<EOT | su "${ATL_CONFLUENCE_USER}" -c "tee \"${LOCAL_CFG_XML}\"" > /dev/null
<?xml version="1.0" encoding="UTF-8"?>
<${PRODUCT_CONFIG_NAME}-configuration>
<setupStep>${CONFLUENCE_SETUP_STEP}</setupStep>
<setupType>${CONFLUENCE_SETUP_TYPE}</setupType>
<buildNumber>${CONFLUENCE_BUILD_NUMBER}</buildNumber>
<properties>
<property name="confluence.database.choice">postgresql</property>
<property name="confluence.database.connection.type">database-type-standard</property>
<property name="hibernate.dialect">com.atlassian.confluence.impl.hibernate.dialect.PostgreSQLDialect</property>
<property name="webwork.multipart.saveDir">\${localHome}/temp</property>
<property name="attachments.dir">\${confluenceHome}/attachments</property>
EOT
for PROP in "${!SERVER_PROPS[@]}"; do
echo " <property name=\"${PROP}\">${SERVER_PROPS[${PROP}]}</property>" | su "${ATL_CONFLUENCE_USER}" -c "tee -a \"${LOCAL_CFG_XML}\"" > /dev/null
done
if [[ "x${ATL_CONFLUENCE_DATA_CENTER}" == "xtrue" ]]; then
cat <<EOT | su "${ATL_CONFLUENCE_USER}" -c "tee -a \"${LOCAL_CFG_XML}\"" > /dev/null
<property name="confluence.cluster">true</property>
<property name="confluence.cluster.home">${ATL_CONFLUENCE_SHARED_HOME}</property>
<property name="confluence.cluster.aws.iam.role">${ATL_HAZELCAST_NETWORK_AWS_IAM_ROLE}</property>
<property name="confluence.cluster.aws.region">${ATL_HAZELCAST_NETWORK_AWS_IAM_REGION}</property>
<property name="confluence.cluster.aws.host.header">${ATL_HAZELCAST_NETWORK_AWS_HOST_HEADER}</property>
<property name="confluence.cluster.aws.tag.key">${ATL_HAZELCAST_NETWORK_AWS_TAG_KEY}</property>
<property name="confluence.cluster.aws.tag.value">${ATL_HAZELCAST_NETWORK_AWS_TAG_VALUE}</property>
<property name="confluence.cluster.join.type">aws</property>
<property name="confluence.cluster.name">${ATL_AWS_STACK_NAME}</property>
<property name="confluence.cluster.ttl">1</property>
<property name="shared-home">${ATL_CONFLUENCE_SHARED_HOME}</property>
EOT
fi
cat <<EOT | su "${ATL_CONFLUENCE_USER}" -c "tee -a \"${LOCAL_CFG_XML}\"" > /dev/null
</properties>
</${PRODUCT_CONFIG_NAME}-configuration>
EOT
su "${ATL_CONFLUENCE_USER}" -c "chmod 600 \"${LOCAL_CFG_XML}\"" >> "${ATL_LOG}" 2>&1
atl_log "Done configuring ${ATL_CONFLUENCE_SHORT_DISPLAY_NAME} to use the ${ATL_CONFLUENCE_SHORT_DISPLAY_NAME} DB role ${ATL_CONFLUENCE_DB_USER}"
fi
}
function createConfluenceDbAndRole {
if atl_roleExists ${ATL_CONFLUENCE_DB_USER}; then
atl_log "${ATL_CONFLUENCE_DB_USER} role already exists. Skipping database and role creation. Skipping dbconfig.xml update"
else
local PASSWORD=$(cat /proc/sys/kernel/random/uuid)
atl_createRole "${ATL_CONFLUENCE_SHORT_DISPLAY_NAME}" "${ATL_CONFLUENCE_DB_USER}" "${PASSWORD}"
atl_createDb "${ATL_CONFLUENCE_SHORT_DISPLAY_NAME}" "${ATL_CONFLUENCE_DB_NAME}" "${ATL_CONFLUENCE_DB_USER}"
configureDbProperties "org.postgresql.Driver" "jdbc:postgresql://localhost/${ATL_CONFLUENCE_DB_NAME}" "${ATL_CONFLUENCE_DB_USER}" "${PASSWORD}"
fi
}
function configureRemoteDb {
atl_log "Configuring remote DB for use with ${ATL_CONFLUENCE_SHORT_DISPLAY_NAME}"
if [[ -n "${ATL_DB_PASSWORD}" ]]; then
atl_configureDbPassword "${ATL_DB_PASSWORD}" "*" "${ATL_DB_HOST}" "${ATL_DB_PORT}"
if atl_roleExists ${ATL_JDBC_USER} "postgres" ${ATL_DB_HOST} ${ATL_DB_PORT}; then
atl_log "${ATL_JDBC_USER} role already exists. Skipping role creation."
atl_log "Setting password for ${ATL_JDBC_USER}."
atl_configureDbUserPassword "${ATL_JDBC_USER}" "${ATL_JDBC_PASSWORD}" "${ATL_DB_HOST}" "${ATL_DB_PORT}"
else
atl_createRole "${ATL_CONFLUENCE_SHORT_DISPLAY_NAME}" "${ATL_JDBC_USER}" "${ATL_JDBC_PASSWORD}" "${ATL_DB_HOST}" "${ATL_DB_PORT}"
atl_createRemoteDb "${ATL_CONFLUENCE_SHORT_DISPLAY_NAME}" "${ATL_DB_NAME}" "${ATL_JDBC_USER}" "${ATL_DB_HOST}" "${ATL_DB_PORT}" "C" "C" "template0"
fi
configureDbProperties "${ATL_JDBC_DRIVER}" "${ATL_JDBC_URL}" "${ATL_JDBC_USER}" "${ATL_JDBC_PASSWORD}"
fi
}
function configureNginx {
updateHostName "${ATL_HOST_NAME}"
atl_addNginxProductMapping "${ATL_CONFLUENCE_NGINX_PATH}" 8080
}
function installConfluence {
atl_log "Checking if ${ATL_CONFLUENCE_SHORT_DISPLAY_NAME} has already been installed"
if [[ "${ATL_USE_COLLECTD}" = true && -e /etc/init.d/collectd ]]; then
atl_log "Creating file /etc/ld.so.conf.d/confluence.conf"
echo /usr/lib/jvm/jre-1.7.0-openjdk.x86_64/lib/amd64/server/ > /etc/ld.so.conf.d/confluence.conf
sudo ldconfig
if [[ $ATL_STARTCOLLECTD == "true" ]]; then
service collectd restart
fi
atl_log "Creating file /etc/ld.so.conf.d/confluence.conf ==> done"
fi
if [[ -d "${ATL_CONFLUENCE_INSTALL_DIR}" ]]; then
local ERROR_MESSAGE="${ATL_CONFLUENCE_SHORT_DISPLAY_NAME} install directory ${ATL_CONFLUENCE_INSTALL_DIR} already exists - aborting installation"
atl_log "${ERROR_MESSAGE}"
atl_fatal_error "${ERROR_MESSAGE}"
fi
atl_log "Downloading ${ATL_CONFLUENCE_SHORT_DISPLAY_NAME} ${ATL_CONFLUENCE_VERSION} from ${ATL_CONFLUENCE_INSTALLER_DOWNLOAD_URL}"
if ! curl -L -f --silent "${ATL_CONFLUENCE_INSTALLER_DOWNLOAD_URL}" -o "$(atl_tempDir)/installer" >> "${ATL_LOG}" 2>&1; then
local ERROR_MESSAGE="Could not download installer from ${ATL_CONFLUENCE_INSTALLER_DOWNLOAD_URL} - aborting installation"
atl_log "${ERROR_MESSAGE}"
atl_fatal_error "${ERROR_MESSAGE}"
fi
chmod +x "$(atl_tempDir)/installer" >> "${ATL_LOG}" 2>&1
cat <<EOT >> "$(atl_tempDir)/installer.varfile"
launch.application\$Boolean=false
rmiPort\$Long=8005
app.defaultHome=${ATL_CONFLUENCE_HOME}
app.install.service\$Boolean=true
existingInstallationDir=${ATL_CONFLUENCE_INSTALL_DIR}
sys.confirmedUpdateInstallationString=false
sys.languageId=en
sys.installationDir=${ATL_CONFLUENCE_INSTALL_DIR}
executeLauncherAction\$Boolean=true
httpPort\$Long=8080
portChoice=default
executeLauncherAction\$Boolean=false
EOT
cp $(atl_tempDir)/installer.varfile /tmp/installer.varfile.bkp
atl_log "Creating ${ATL_CONFLUENCE_SHORT_DISPLAY_NAME} install directory"
mkdir -p "${ATL_CONFLUENCE_INSTALL_DIR}"
atl_log "Installing ${ATL_CONFLUENCE_SHORT_DISPLAY_NAME} to ${ATL_CONFLUENCE_INSTALL_DIR}"
"$(atl_tempDir)/installer" -q -varfile "$(atl_tempDir)/installer.varfile" >> "${ATL_LOG}" 2>&1
atl_log "Installed ${ATL_CONFLUENCE_SHORT_DISPLAY_NAME} to ${ATL_CONFLUENCE_INSTALL_DIR}"
atl_log "Cleaning up"
rm -rf "$(atl_tempDir)"/installer* >> "${ATL_LOG}" 2>&1
chown -R "${ATL_CONFLUENCE_USER}":"${ATL_CONFLUENCE_USER}" "${ATL_CONFLUENCE_INSTALL_DIR}"
atl_log "${ATL_CONFLUENCE_SHORT_DISPLAY_NAME} installation completed"
}
function noCONF {
atl_log "Stopping ${ATL_CONFLUENCE_SERVICE_NAME} service"
service "${ATL_CONFLUENCE_SERVICE_NAME}" stop >> "${ATL_LOG}" 2>&1
}
function goCONF {
atl_log "Starting ${ATL_CONFLUENCE_SERVICE_NAME} service"
service "${ATL_CONFLUENCE_SERVICE_NAME}" start >> "${ATL_LOG}" 2>&1
}
function updateHostName {
atl_configureTomcatConnector "${1}" "8080" "8081" "${ATL_CONFLUENCE_USER}" \
"${ATL_CONFLUENCE_INSTALL_DIR}/conf" \
"${ATL_CONFLUENCE_INSTALL_DIR}/confluence/WEB-INF"
STATUS="$(service "${ATL_CONFLUENCE_SERVICE_NAME}" status || true)"
if [[ "${STATUS}" =~ .*\ is\ running ]]; then
atl_log "Restarting ${ATL_CONFLUENCE_SHORT_DISPLAY_NAME} to pick up host name change"
noCONF
goCONF
fi
}
case "$1" in
start)
$1
;;
create-instance-store-dirs)
createInstanceStoreDirs $2
;;
update-host-name)
updateHostName $2
;;
*)
echo "Usage: $0 {start|init-instance-store-dirs|update-host-name}"
RETVAL=1
esac
exit ${RETVAL}
|
<filename>lib/poolparty/net/init.rb
# Load the core net libraries. These are neccessary for any of the remoter_bases to function.
%w(remote_instance remote_bases remoter_base).each do |file|
require File.join(::File.dirname(__FILE__),file+'.rb')
end
# Register available remoter_bases
Dir["#{::File.dirname(__FILE__)}/remoter_bases/*/*.rb"].each do |base|
name = File.join(::File.basename(base, ::File.extname(base)))
require base
end |
<reponame>thatgeeman/pybx
import unittest
import numpy as np
from pybx.sample import get_example
np.random.seed(1)
params = {
"feature_szs": [(2, 2), (3, 3), (4, 4)],
"feature_sz": (2, 2),
"asp_ratio": 1 / 2.,
"image_sz": (10, 10, 3),
"data_dir": './data',
}
results = {
"scaled_ans": (9.0, 6.0),
}
class SampleTestCase(unittest.TestCase):
def test_example(self):
im, ann, lgts, _ = get_example(image_sz=params["image_sz"], feature_sz=params["feature_sz"], logits=True,
pth=params["data_dir"], load_ann=True)
self.assertEqual(im.shape, params["image_sz"])
r = ann[0]['x_max'], ann[1]['y_min']
self.assertEqual(r, results["scaled_ans"])
self.assertEqual(lgts.shape, params["feature_sz"])
if __name__ == '__main__':
unittest.main()
|
<reponame>viettrung9012/home-panel
// @flow
import React, { useEffect, useCallback } from 'react';
import { RouteComponentProps } from 'react-router-dom';
import io from 'socket.io-client';
import authentication from '@feathersjs/authentication-client';
import feathers from '@feathersjs/feathers';
import socketio from '@feathersjs/socketio-client';
import { createMuiTheme, responsiveFontSizes } from '@material-ui/core/styles';
import { ThemeProvider } from '@material-ui/styles';
import {
ThemeProps,
defaultPalette,
defaultTheme
} from './Configuration/Config';
import clone from './Utils/clone';
import Loading from './Utils/Loading';
import Login from './Login';
import Main from './Main';
import parseTheme from './Utils/parseTheme';
import 'typeface-roboto';
import '@mdi/font/css/materialdesignicons.min.css';
interface OnboardingProps extends RouteComponentProps {}
let socket: SocketIOClient.Socket, client: any;
function Onboarding(props: OnboardingProps) {
const [loginAttempted, setLoginAttempted] = React.useState(false);
const [loginCredentials, setLoggedIn] = React.useState();
const [config, setConfig] = React.useState();
const [configId, setConfigId] = React.useState();
const [theme, setTheme] = React.useState(
responsiveFontSizes(
createMuiTheme({
palette: defaultPalette
})
)
);
useEffect(() => {
if (!client) {
client = feathers();
let path: string = clone(props.location.pathname);
let url: string = `${process.env.REACT_APP_API_PROTOCOL ||
window.location.protocol}//${process.env.REACT_APP_API_HOSTNAME ||
window.location.hostname}:${
process.env.REACT_APP_API_PORT || process.env.NODE_ENV === 'development'
? '8234'
: window.location.port
}`;
socket = io(url, { path: `${path}/socket.io`.replace('//', '/') });
client.configure(socketio(socket));
client.configure(authentication());
client.path = path;
}
}, [props.location]);
function handleSetTheme(palette: ThemeProps) {
setTheme(
responsiveFontSizes(createMuiTheme({ palette: parseTheme(palette) }))
);
}
const getConfig = useCallback((userId: string) => {
(async () => {
const configService = await client.service('config');
let getter = await configService.find({ userId });
if (!getter.data[0]) {
await configService.create({ createNew: true });
getConfig(userId);
return;
}
process.env.NODE_ENV === 'development' &&
console.log('Config:', getter.data[0]);
const configLcl = getter.data[0].config;
setConfig(configLcl);
setConfigId(getter.data[0]._id);
if (configLcl.theme) handleSetTheme(configLcl.theme);
})();
}, []);
const handleLogin = useCallback(
(data?: any, callback?: (error?: string) => void) => {
(async () => {
try {
let clientData;
if (!client) {
console.warn('Feathers app is undefined');
} else if (!data) clientData = await client.reAuthenticate();
else clientData = await client.authenticate(data, callback);
console.log(clientData.user);
setLoggedIn(clientData.user);
setLoginAttempted(true);
getConfig(clientData.user._id);
} catch (error) {
console.error('Error in handleLogin:', error);
setLoginAttempted(true);
setLoggedIn(undefined);
if (callback) callback(`Login error: ${error.message}`);
}
})();
},
[getConfig]
);
useEffect(() => {
if (!loginCredentials) handleLogin();
}, [loginCredentials, handleLogin]);
function handleCreateAccount(data: any, callback?: (error?: string) => void) {
socket.emit('create', 'users', data, (error: any) => {
if (error) {
console.error('Error creating account:', error);
if (callback) callback(`Error creating account: ${error.message}`);
} else {
handleLogin({ strategy: 'local', ...data }, callback);
}
});
}
async function handleLogout() {
localStorage.removeItem('hass_tokens');
localStorage.removeItem('hass_url');
await client.logout();
window.location.replace(window.location.href);
}
function handleConfigChange(config: any) {
socket.emit('patch', 'config', configId, { config }, (error: any) => {
if (error) console.error('Error updating', configId, ':', error);
else {
setConfig(config);
process.env.NODE_ENV === 'development' &&
console.log('Updated config:', configId, config);
}
});
}
if (!loginAttempted)
return <Loading text="Attempting Login. Please Wait.." />;
const cssOverrides = `
a {color: ${(config && config.theme && config.theme.link_color) ||
defaultTheme.link_color};}
`;
return (
<ThemeProvider theme={theme}>
<style>{cssOverrides}</style>
{loginCredentials ? (
<Main
{...props}
config={config}
editing={0}
loginCredentials={loginCredentials}
handleConfigChange={handleConfigChange}
handleLogout={handleLogout}
handleSetTheme={handleSetTheme}
/>
) : (
<Login
{...props}
handleCreateAccount={handleCreateAccount}
handleLogin={handleLogin}
/>
)}
</ThemeProvider>
);
}
export default Onboarding;
|
<reponame>minuk8932/Algorithm_BaekJoon<gh_stars>1-10
package back_tracking;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* 백준 15650번: N 과 M 2
*
* @see https://www.acmicpc.net/problem/15650/
*
*/
public class Boj15650 {
private static StringBuilder sb = new StringBuilder();
private static final String SPACE = " ";
private static final String NEW_LINE = "\n";
private static int[] arr;
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int N = Integer.parseInt(st.nextToken());
int M = Integer.parseInt(st.nextToken());
br.close();
arr = new int[N + 1];
backTracking(N, M, 1, 0);
System.out.println(sb);
}
private static void backTracking(int n, int m, int start, int count) {
if(count == m) {
for(int i = 1; i < arr.length; i++) { // 다 찾았네!
if(arr[i] != 0) sb.append(arr[i]).append(SPACE);
}
sb.append(NEW_LINE);
return;
}
if(start > n) return;
arr[start] = start;
backTracking(n, m, start + 1, count + 1);
arr[start] = 0; // 백트래킹
backTracking(n, m, start + 1, count);
}
}
|
package com.wczy.gulimall.member.dao;
import com.wczy.gulimall.member.entity.MemberReceiveAddressEntity;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* 会员收货地址
*
* @author wczy
* @email <EMAIL>
* @date 2021-11-15 20:50:15
*/
@Mapper
public interface MemberReceiveAddressDao extends BaseMapper<MemberReceiveAddressEntity> {
}
|
#!/bin/bash
FN="scTHI.data_1.6.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.14/data/experiment/src/contrib/scTHI.data_1.6.0.tar.gz"
"https://bioarchive.galaxyproject.org/scTHI.data_1.6.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-scthi.data/bioconductor-scthi.data_1.6.0_src_all.tar.gz"
)
MD5="20f84fbf97a954050265834e23a77777"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
// Define the ErrorCodesEnum
enum ErrorCodesEnum {
FORBIDDEN,
NOT_FOUND,
METHOD_NOT_SUPPORTED,
INTERNAL_SERVER_ERROR,
LOGGED_OUT,
FAIL_INIT,
COMING_SOON
}
// Implement the IPageError interface
class PageError implements IPageError {
code: ErrorCodesEnum;
title: string;
titleKey?: string;
text?: string;
textKey?: string;
constructor(code: ErrorCodesEnum, title: string, titleKey?: string, text?: string, textKey?: string) {
this.code = code;
this.title = title;
this.titleKey = titleKey;
this.text = text;
this.textKey = textKey;
}
// Method to generate error page based on error code
generateErrorPage(errorCode: ErrorCodesEnum): string {
switch (errorCode) {
case ErrorCodesEnum.FORBIDDEN:
return this.generatePageContent("403 Forbidden", this.titleKey, this.text, this.textKey);
case ErrorCodesEnum.NOT_FOUND:
return this.generatePageContent("404 Not Found", this.titleKey, this.text, this.textKey);
case ErrorCodesEnum.METHOD_NOT_SUPPORTED:
return this.generatePageContent("405 Method Not Allowed", this.titleKey, this.text, this.textKey);
case ErrorCodesEnum.INTERNAL_SERVER_ERROR:
return this.generatePageContent("500 Internal Server Error", this.titleKey, this.text, this.textKey);
case ErrorCodesEnum.LOGGED_OUT:
return this.generatePageContent("401 Unauthorized", this.titleKey, this.text, this.textKey);
case ErrorCodesEnum.FAIL_INIT:
return this.generatePageContent("500 Internal Server Error", this.titleKey, this.text, this.textKey);
case ErrorCodesEnum.COMING_SOON:
return this.generatePageContent("503 Service Unavailable", this.titleKey, this.text, this.textKey);
default:
return this.generatePageContent("Error", this.titleKey, this.text, this.textKey);
}
}
// Helper method to generate page content
private generatePageContent(title: string, titleKey?: string, text?: string, textKey?: string): string {
if (titleKey) {
// Fetch title from localization file using titleKey
// Example: title = localizationService.getTitle(titleKey);
}
let pageContent = `<h1>${title}</h1>`;
if (text || textKey) {
if (textKey) {
// Fetch text from localization file using textKey
// Example: text = localizationService.getText(textKey);
}
pageContent += `<p>${text}</p>`;
}
return pageContent;
}
} |
package gov.cms.bfd.pipeline.rda.grpc;
import static gov.cms.bfd.pipeline.sharedutils.PipelineApplicationState.RDA_PERSISTENCE_UNIT_NAME;
import static org.junit.jupiter.api.Assertions.assertEquals;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.zaxxer.hikari.HikariDataSource;
import gov.cms.bfd.model.rda.Mbi;
import gov.cms.bfd.model.rif.schema.DatabaseSchemaManager;
import gov.cms.bfd.pipeline.sharedutils.DatabaseOptions;
import gov.cms.bfd.pipeline.sharedutils.PipelineApplicationState;
import java.sql.Connection;
import java.sql.DriverManager;
import java.time.Clock;
import javax.persistence.EntityManager;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
public class RdaPipelineTestUtils {
public static void assertMeterReading(long expected, String meterName, Meter meter) {
assertEquals(expected, meter.getCount(), "Meter " + meterName);
}
public static void assertGaugeReading(long expected, String gaugeName, Gauge<?> gauge) {
assertEquals(Long.valueOf(expected), gauge.getValue(), "Gauge " + gaugeName);
}
/**
* Creates a temporary in-memory HSQLDB that is destroyed when the test ends plus a
* PipelineApplicationState and EntityManager using that db, passes them to the provided lambda
* function, then closes them and destroys the database.
*
* @param testClass used to create a db name
* @param clock used for the app state
* @param test lambda to receive the appState and perform some testing
*/
public static void runTestWithTemporaryDb(Class<?> testClass, Clock clock, DatabaseConsumer test)
throws Exception {
final String dbUrl = "jdbc:hsqldb:mem:" + testClass.getSimpleName();
// the HSQLDB database will be destroyed when this connection is closed
try (Connection dbLifetimeConnection =
DriverManager.getConnection(dbUrl + ";shutdown=true", "", "")) {
final DatabaseOptions dbOptions = new DatabaseOptions(dbUrl, "", "", 10);
final MetricRegistry appMetrics = new MetricRegistry();
final HikariDataSource dataSource =
PipelineApplicationState.createPooledDataSource(dbOptions, appMetrics);
DatabaseSchemaManager.createOrUpdateSchema(dataSource);
try (PipelineApplicationState appState =
new PipelineApplicationState(appMetrics, dataSource, RDA_PERSISTENCE_UNIT_NAME, clock)) {
final EntityManager entityManager =
appState.getEntityManagerFactory().createEntityManager();
try {
test.accept(appState, entityManager);
} finally {
entityManager.close();
}
}
}
}
/**
* Looks for a record in the MbiCache table using the given EntityManager.
*
* @param entityManager used to perform the query
* @param mbi mbi string to look for
* @return null if not cached otherwise the Mbi record from database
*/
public static Mbi lookupCachedMbi(EntityManager entityManager, String mbi) {
entityManager.getTransaction().begin();
final CriteriaBuilder builder = entityManager.getCriteriaBuilder();
final CriteriaQuery<Mbi> criteria = builder.createQuery(Mbi.class);
final Root<Mbi> root = criteria.from(Mbi.class);
criteria.select(root).where(builder.equal(root.get(Mbi.Fields.mbi), mbi));
final var records = entityManager.createQuery(criteria).getResultList();
entityManager.getTransaction().commit();
return records.isEmpty() ? null : records.get(0);
}
@FunctionalInterface
public interface DatabaseConsumer {
void accept(PipelineApplicationState appState, EntityManager entityManager) throws Exception;
}
}
|
package org.renan.android.firebase.auth;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
import com.google.android.gms.auth.api.signin.GoogleSignIn;
import com.google.android.gms.auth.api.signin.GoogleSignInAccount;
import com.google.android.gms.auth.api.signin.GoogleSignInClient;
import com.google.android.gms.auth.api.signin.GoogleSignInOptions;
import com.google.android.gms.common.api.ApiException;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.Task;
import org.renan.android.dev.gsi.R;
/**
* Created by stan on 10/03/2018.
*/
public class QGoogleSignIn
{
public final static String WAIT_FOR_ASYNC_ANSWER = "wait_async";
public final static int ERROR_INVALID_ACCOUNT = 0x10001;
public final static int ERROR_INVALID_TOKEN = 0x10002;
// TODO : return a more complex answer, if necessary
public static String getTokenIdFromSignedAccount(Context context)
{
assert (null != context);
String token = "";
final GoogleSignInAccount account = GoogleSignIn.getLastSignedInAccount(context);
if (null != account)
{
token = account.getIdToken();
if (null == token)
token = "";
}
assert (null != token);
return token;
}
// TODO : return a more complex answer, if necessary
public static String refreshToken(final Context context, final boolean silently)
{
String token = "";
// TODO : extend request to other things
final GoogleSignInOptions gso = new GoogleSignInOptions.Builder(GoogleSignInOptions.DEFAULT_SIGN_IN)
.requestEmail()
.requestIdToken(context.getString(R.string.default_web_client_id))
.build();
if (null == gso)
return "";
final GoogleSignInClient gsiClient = GoogleSignIn.getClient(context, gso);
if (null == gsiClient)
return "";
final Task<GoogleSignInAccount> task = gsiClient.silentSignIn();
if (task.isSuccessful())
{
Log.i("refreshToken", "Immediate successful silentSignIn()");
final GoogleSignInAccount account = task.getResult();
if (null != account)
{
token = account.getIdToken();
if (null == token)
token = "";
}
}
else
{
task.addOnCompleteListener(new OnCompleteListener<GoogleSignInAccount>()
{
@Override
public void onComplete(Task<GoogleSignInAccount> task)
{
try
{
Log.i("refreshToken.onComplete", "Delayed result from silentSignIn()");
final GoogleSignInAccount account = task.getResult(ApiException.class);
if (null == account)
onFailedRefresh(ERROR_INVALID_ACCOUNT, silently);
else
{
final String token = account.getIdToken();
if (null == token)
onFailedRefresh(ERROR_INVALID_TOKEN, silently);
else
onSuccessfulRefresh(token);
}
} catch (ApiException apiException)
{
Log.w("refreshToken.onComplete", "error, statusCode=" + apiException.getStatusCode());
// Please refer to GoogleSignInStatusCodes Javadoc for details
onFailedRefresh(apiException.getStatusCode(), silently);
}
}
});
// We inform the caller that an answer is to be received
// Important : the answer could be received before this method
// returns. Consequently it should be interpreted as "do nothing,
// it is handled elsewhere".
token = WAIT_FOR_ASYNC_ANSWER;
}
assert (null != token);
return token;
}
// public static boolean tokenIsWaitForAnswer(String token)
// {
// return 0 == WAIT_FOR_ASYNC_ANSWER.compareTo(token);
// }
//
public static native void onSuccessfulRefresh(String token);
public static native void onFailedRefresh(int statusCode, boolean silently);
public static native void onSuccessfulSignOut();
public static Intent getGSIIntent(Context context)
{
assert (null != context);
assert (context instanceof Activity);
// TODO : extend request to other things
final GoogleSignInOptions gso = new GoogleSignInOptions.Builder(GoogleSignInOptions.DEFAULT_SIGN_IN)
.requestEmail()
.requestIdToken(context.getString(R.string.default_web_client_id))
.build();
if (null == gso)
return null;
final GoogleSignInClient gsiClient = GoogleSignIn.getClient(context, gso);
if (null == gsiClient)
return null;
final Intent signInIntent = gsiClient.getSignInIntent();
return signInIntent;
}
// TODO : return a more complex answer, if necessary
// 1- data
// 2- error code from the exception in order to handle various cases
// https://stackoverflow.com/questions/12325860/jni-getting-exception-info-attempted-to-read-or-write-protected-memory
// or a big object including an error code
public static String handleSignInResult(Intent data)
{
assert (null != data);
assert (data instanceof Intent);
String token = "";
// The Task returned from this call is always completed, no need to attach
// a listener.
// See: https://developers.google.com/identity/sign-in/android/
final Task<GoogleSignInAccount> task = GoogleSignIn.getSignedInAccountFromIntent(data);
try
{
final GoogleSignInAccount account = task.getResult(ApiException.class);
token = account.getIdToken();
}
catch (ApiException e)
{
// The ApiException status code indicates the detailed failure reason.
// Please refer to the GoogleSignInStatusCodes class reference for more information.
Log.w("handleSignInResult", "signInResult:failed code=" + e.getStatusCode());
Log.w("handleSignInResult", e);
}
assert (null != token);
return token;
}
// return false if it did not succeed
// return true if sign out of GSI completed
// may return true if sign out did not succeed yet, but is still in progress
// we do not distinguish the two cases.
// Behaviour intended : do nothing until it returns true either directly
// or with its callback when the Task is finished.
public static boolean signOut(final Context context)
{
String token = "";
// TODO : extend request to other things
final GoogleSignInOptions gso = new GoogleSignInOptions.Builder(GoogleSignInOptions.DEFAULT_SIGN_IN)
.requestEmail()
.requestIdToken(context.getString(R.string.default_web_client_id))
.build();
if (null == gso)
return false;
final GoogleSignInClient gsiClient = GoogleSignIn.getClient(context, gso);
if (null == gsiClient)
return false;
final Task<Void> task = gsiClient.signOut();
if (task.isSuccessful())
{
Log.i("signOut", "Immediate successful signOut() from GSI");
return true;
}
else
{
task.addOnCompleteListener(new OnCompleteListener<Void>()
{
@Override
public void onComplete(Task<Void> task)
{
Log.i("signOut.onComplete", "Future signOut() completed");
onSuccessfulSignOut();
}
});
}
return false;
}
}
|
/**
* Classe de gestion des pages du DefaultController
*
* <pre>
* Julien 12/02/15 Création
* </pre>
* @author Julien
* @version 1.0
* @package Rubizz
*/
function Default()
{
// ==== Constructeur ====
} // Default
Default.prototype = {
/**
* Traitements lancés en fin de chargement de la page
*/
ready: function(psLoginUrl)
{
// Ouverture du popup de login
Default.prototype._manageLoginPopupOpening();
// Soumission du popup de login
Default.prototype._manageLoginPopupSubmitting(psLoginUrl);
// Fermeture du popup de login
Default.prototype._manageLoginPopupClosing();
// Ouverture du popup de register
Default.prototype._manageRegisterPopupOpening();
// Soumission du popup de register
Default.prototype._manageRegisterPopupSubmitting(psLoginUrl);
// Fermeture du popup de register
Default.prototype._manageRegisterPopupClosing();
// Ouverture du popup de login depuis le popup de register
$(document).on("click", "#div_RBZ_registerPopup #div_RBZ_notice a.RBZ_login", function() {
$("#div_RBZ_registerPopup #div_RBZ_close").click();
$("span.RBZ_default_login a").click();
return false;
});
// Ouverture du popup de login depuis le popup de register
$(document).on("click", "#div_RBZ_loginPopup #div_RBZ_notice a.RBZ_register", function() {
$("#div_RBZ_loginPopup #div_RBZ_close").click();
$("span.RBZ_default_register a").click();
return false;
});
}, // ready
_manageLoginPopupOpening: function()
{
$(document).on("click", "#a_RBZ_login", function() {
var lsUrl = $(this).attr('hrefbis');
Default.prototype.ajaxPopupLogin(lsUrl);
scroll(0, 0);
return false;
});
}, // _manageLoginPopupOpening
_manageLoginPopupSubmitting: function(psLoginUrl)
{
$(document).on("click", "#input_RBZ_login_form ", function() {
Default.prototype.ajaxPopupLoginSubmit('form_RBZ_login_form', psLoginUrl);
return false;
});
}, // _manageLoginPopupSubmitting
_manageLoginPopupClosing: function()
{
$(document).on("click", "#div_RBZ_loginPopup #div_RBZ_close", function() {
$("#div_RBZ_loginPopup").detach();
$("#div_RBZ_loginShadow").hide();
return false;
});
}, // _manageLoginPopupClosing
_manageRegisterPopupOpening: function()
{
$(document).on("click", "a.RBZ_register", function() {
var lsUrl = $(this).attr('hrefbis');
window.scrollTo(0, 0);
Default.prototype.ajaxPopupRegister(lsUrl);
return false;
});
}, // _manageRegisterPopupOpening
_manageRegisterPopupSubmitting: function(psLoginUrl)
{
$(document).on("click", "#input_RBZ_register_form ", function() {
Default.prototype.ajaxPopupRegisterSubmit('form_RBZ_register_form', psLoginUrl);
return false;
});
}, // _manageLoginPopupSubmitting
_manageRegisterPopupClosing: function()
{
$(document).on("click", "#div_RBZ_registerPopup #div_RBZ_close", function() {
$("#div_RBZ_registerPopup").detach();
$("#div_RBZ_registerShadow").hide();
return false;
});
}, // _manageRegisterPopupClosing
/**
* Appel du popup de login en ajax
*/
ajaxPopupLogin: function(psUrl)
{
// ==== Chargement de la popup ====
// ---- Définition de la popup ----
$.get(psUrl, function(psAjax) {
if (psAjax == 'refresh') {
location.reload();
}
$("#div_RBZ_loginPopup").remove();
// ---- Redéfinition de la taille du masque ----
$("#div_RBZ_loginShadow").css({ width : $(document).width(), height : $(document).height() });
$("#div_RBZ_loginShadow").show();
$("body").append(psAjax);
// ---- Centrage ----
var liWidth = $("#div_RBZ_loginPopup").width(), liDocumentWidth = $(window).width(), liLeft = 0;
if (liWidth < liDocumentWidth) liLeft = Math.round((liDocumentWidth - liWidth) / 2);
$("#div_RBZ_loginPopup").css('left', liLeft);
});
}, // ajaxPopupLogin
/**
* Validation du formulaire dans le popup de login
*/
ajaxPopupLoginSubmit: function(psFormName, psUrl)
{
var loForm = $("#" + psFormName);
var loData = new FormData(loForm[0]);
$.ajax({
url: loForm.attr('action'),
type: 'POST',
data: loData,
cache: false,
contentType: false,
processData: false,
success: function(psReturn) {
var laJson = $.parseJSON(psReturn);
if ('OK' == laJson.status) {
//$("#div_RBZ_loginPopup").remove();
//$("#div_RBZ_loginShadow").hide();
if (laJson.url != undefined) {
window.location = laJson.url;
} else {
if (psUrl != undefined) {
window.location = psUrl;
}
}
} else {
$("#div_RBZ_error").text('');
$("#div_RBZ_error").append(laJson.error);
}
}
});
}, // ajaxPopupLoginSubmit
/**
* Appel du popup de register en ajax
*/
ajaxPopupRegister: function(psUrl)
{
// ==== Définition de la popup ====
$.get(psUrl, function(psAjax) {
if (psAjax == 'refresh') {
location.reload();
}
$("#div_RBZ_registerPopup").remove();
// ==== Redéfinition de la taille du masque ====
$("#div_RBZ_registerShadow").css({ width : $(document).width(), height : $(document).height() });
$("#div_RBZ_registerShadow").show();
$("body").append(psAjax);
// ==== Centrage ====
var liWidth = $("#div_RBZ_registerPopup").width(), liDocumentWidth = $(window).width(), liLeft = 0;
if (liWidth < liDocumentWidth) liLeft = Math.round((liDocumentWidth - liWidth) / 2);
$("#div_RBZ_registerPopup").css('left', liLeft);
});
}, // ajaxPopupRegister
/**
* Validation du formulaire dans le popup de register
*/
ajaxPopupRegisterSubmit: function(psFormName, psUrl)
{
var loForm = $("#" + psFormName);
var loData = new FormData(loForm[0]);
$.ajax({
url: loForm.attr('action'),
type: 'POST',
data: loData,
cache: false,
contentType: false,
processData: false,
success: function(psReturn) {
var laJson = $.parseJSON(psReturn);
if ('OK' == laJson.status) {
//$("#div_RBZ_loginPopup").remove();
//$("#div_RBZ_loginShadow").hide();
if (laJson.url != undefined) {
window.location = laJson.url;
} else {
if (psUrl != undefined) {
window.location = psUrl;
}
}
} else {
$("#div_RBZ_error").text('');
$("#div_RBZ_error").append(laJson.error);
}
}
});
}, // ajaxPopupLoginSubmit
/**
* Token de fin
*/
_endPrototype : null
}; // Default.prototype
//==== Définition de l'objet Default goDefault ====
var goDefault = new Default();
|
declare const _default: {
rpc: {
predictionMarkets: {
marketOutcomeShareId: {
description: string;
params: ({
name: string;
type: string;
isOptional?: undefined;
} | {
name: string;
type: string;
isOptional: boolean;
})[];
type: string;
};
};
};
types: {
MarketCreation: {
_enum: string[];
};
MarketEnd: {
_enum: {
Block: string;
Timestamp: string;
};
};
MarketId: string;
MarketType: {
_enum: string[];
};
MarketStatus: {
_enum: string[];
};
Market: {
creator: string;
creation: string;
creator_fee: string;
oracle: string;
end: string;
metadata: string;
market_type: string;
market_status: string;
report: string;
categories: string;
resolved_outcome: string;
};
Report: {
at: string;
by: string;
outcome: string;
};
MarketDispute: {
at: string;
by: string;
outcome: string;
};
};
};
export default _default;
|
#!/usr/bin/env bash
read -r -d '' EXPECTED <<EOF
// Copyright © DATE Banzai Cloud
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
EOF
STATUS=0
FILES=$(find . -name "*.go" -not -path "./vendor/*" -not -path "*zz_generated*")
for FILE in $FILES; do
# Replace the actual year with DATE so we can ignore the year when
# checking for the license header.
HEADER=$(head -n 13 $FILE | sed -E -e 's/Copyright © [0-9]+/Copyright © DATE/')
if [ "$HEADER" != "$EXPECTED" ]; then
echo "incorrect license header: $FILE"
STATUS=1
fi
done
exit $STATUS
|
<reponame>radjan29901/ONG_MasterThesis_MVP<filename>web-app/server/src/app.js
const express = require('express')
const bodyParser = require('body-parser')
const cors = require('cors')
const morgan = require('morgan')
var network = require('./fabric/network.js');
const { stdout } = require('process');
const app = express()
app.use(morgan('combined'))
app.use(bodyParser.json())
app.use(cors())
app.get('/get_auth_info', (req, res) => {
network.get_auth_info()
.then((response) => {
res.send(response)
});
})
app.get('/checkAuth', (req, res) => {
network.checkAuth()
.then((response) => {
res.send(response)
});
})
app.get('/queryAllDonations', (req, res) => {
network.queryAllDonations()
.then((response) => {
var carsRecord = JSON.parse(response);
res.send(carsRecord)
});
})
app.get('/queryAllDonationsAdmin', (req, res) => {
network.queryAllDonationsAdmin()
.then((response) => {
var carsRecord = JSON.parse(response);
res.send(carsRecord)
});
})
app.post('/auth', (req, res) => {
var username=req.body.username;
var password=req.body.password;
console.log(username);
console.log(password);
network.auth(username,password)
.then((response) => {
})
})
app.post('/create_user', (req, res) => {
var appAdmin=req.body.appAdmin;
var appAdminSecret=req.body.appAdminSecret;
var username=req.body.userName;
var orgMSPID=req.body.orgMSPID;
var caName=req.body.caName;
var exec = require('child_process').exec, child;
var arguments=appAdmin+" "+appAdminSecret+" "+username+" "+orgMSPID+" "+caName
exec('sudo node enrollAdmin.js '+arguments,
function (error, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
return stdout;
})
function getDate(){
var today = new Date();
var dd = String(today.getDate()).padStart(2, '0');
var mm = String(today.getMonth() + 1).padStart(2, '0'); //January is 0!
var yyyy = today.getFullYear();
today = dd + '/' + mm + '/' + yyyy;
return today;
}
app.post('/CreateDonation', (req, res) => {
console.log(req.body);
network.queryAllDonations()
.then((response) => {
console.log(response);
var donationRecord = JSON.parse(response);
var numDonation = donationRecord.length;
var newKey = 'DONATION' + numDonation;
var destinataire='ONG'
network.createDonation(newKey, destinataire, req.body.amount, getDate(),'1')
console.log("has been send!!!!!!!!!!!!!!")
.then((response) => {
res.send(response)
})
})
})
app.post('/change_type', (req, res) => {
network.change_type(req.body.id, req.body.type )
.then((response) => {
res.send(response)
})
})
app.listen(process.env.PORT || 8081) |
<reponame>cogwurx/jquery-mobile
test( "Textinput widget works correctly", function() {
var theButton = $( "#theButton" ).button(),
onlyTextNodes = function() { return this.nodeType == 3; };
deepEqual( theButton.parent().is( "div.ui-btn.ui-input-btn" ), true, "The input-based button is wrapped in a div.ui-btn.ui-input-btn" );
deepEqual( theButton.parent().contents().filter( onlyTextNodes ).first().text(), "The Button", "The input-based button has a text node containing the button's value attributes." );
});
|
package com.cjy.flb.domain;
import org.scribe.builder.api.DefaultApi20;
import org.scribe.extractors.JsonRefreshTokenExtractor;
import org.scribe.extractors.JsonTokenExtractor;
import org.scribe.extractors.TokenExtractor;
import org.scribe.model.OAuthConfig;
import org.scribe.utils.OAuthEncoder;
/**
* Created by Administrator on 2016/3/12 0012.
*/
public class Cdc12320ApiTest extends DefaultApi20 {
private static final String URL = "https://caijiyun.cn";
private static final String JAR_VERSION = "2.1";
private static final String AUTHORIZE_HOST = URL;
private static final String AUTHORIZE_URL = URL +
"/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code";
private static final String SCOPED_AUTHORIZE_URL = URL +
"/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code&scope=%s";
public Cdc12320ApiTest()
{
}
public TokenExtractor getAccessTokenExtractor()
{
return new JsonTokenExtractor();
}
public TokenExtractor getRefreshTokenExtractor()
{
return new JsonRefreshTokenExtractor();
}
public String getAccessTokenEndpoint()
{
return URL+"/oauth/token";
}
public String getAuthorizationUrl(OAuthConfig config)
{
return config.hasScope() ? String.format(URL +
"/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code&scope=%s",
new Object[]{config.getApiKey(),
OAuthEncoder.encode(config.getCallback()),
OAuthEncoder.encode(config.getScope())}) : String.format(URL +
"/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code",
new Object[]{config.getApiKey(), OAuthEncoder.encode(config.getCallback())});
}
}
|
package com.zdy2;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.BeanNameGenerator;
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
import org.springframework.core.type.AnnotationMetadata;
/**
* @Auther: weiliang
* @Date: 2020/12/31 23:22
* @Description:
*/
public class ImportBeanDefinitionRegistrar2 implements ImportBeanDefinitionRegistrar {
public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata,
BeanDefinitionRegistry registry,
BeanNameGenerator importBeanNameGenerator) {
// 此处可以循环某个包下所有的mapper
// for (int i = 0; i < ; i++) {
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(FactoryBeanLuban2.class);
AbstractBeanDefinition beanDefinition = builder.getBeanDefinition(); // BeanDefinitionBuilder用于从容器中取得beandefinition
// 此处还可以通过beanDefinition获得参数。参数可以是上面循环遍历得到的。这样就解决了大量mapper需要处理的问题。
beanDefinition.getConstructorArgumentValues().addGenericArgumentValue("com.zdy2.CityMapper");
registry.registerBeanDefinition("xxx", beanDefinition);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.config;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.configuration.AbstractConfiguration;
import com.netflix.config.ConcurrentMapConfiguration;
/**
* Disable delimiter parsing for string
*/
@SuppressWarnings("unchecked")
public class ConcurrentMapConfigurationExt extends ConcurrentMapConfiguration {
public ConcurrentMapConfigurationExt() {
super();
setDelimiterParsingDisabled(true);
}
public ConcurrentMapConfigurationExt(Map<String, Object> mapToCopy) {
super();
setDelimiterParsingDisabled(true);
map = new ConcurrentHashMap<>(mapToCopy);
}
public ConcurrentMapConfigurationExt(AbstractConfiguration config) {
super();
config.setDelimiterParsingDisabled(true);
for (Iterator<String> i = config.getKeys(); i.hasNext(); ) {
String name = i.next();
Object value = config.getProperty(name);
map.put(name, value);
}
}
}
|
<gh_stars>1-10
/**
*
* @creatTime 下午4:53:17
* @author Eddy
*/
package org.eddy.tiger.annotated.impl;
import java.lang.reflect.Constructor;
import javax.enterprise.inject.spi.AnnotatedConstructor;
/**
* @author Eddy
*
*/
@SuppressWarnings("all")
public class AnnotatedConstructorImpl<X> extends AnnotatedCallableImpl<X> implements AnnotatedConstructor<X> {
private Constructor<X> constructor;
/**
* 构造函数
*
* @creatTime 下午12:56:25
* @author Eddy
*/
public AnnotatedConstructorImpl() {
// TODO Auto-generated constructor stub
}
public AnnotatedConstructorImpl(Constructor constructor) {
super(constructor);
this.constructor = constructor;
if (!constructor.isAccessible()) constructor.setAccessible(true);
}
/*
* (non-Javadoc)
*
* @see javax.enterprise.inject.spi.AnnotatedConstructor#getJavaMember()
*/
@Override
public Constructor<X> getJavaMember() {
return this.constructor;
}
}
|
package dao
import (
"shippo-server/internal/model"
)
type TempDao struct {
*Dao
}
func NewTempDao(s *Dao) *TempDao {
return &TempDao{s}
}
// 根据订单号查询订单
func (d *TempDao) Temp_trade_20220108_findByTradeId(id string) (p model.Temp_trade_20220108, err error) {
err = d.db.Where("trade_id", id).Limit(1).Find(&p).Error
return
}
// 根据用户QQ查询订单
func (d *TempDao) Temp_trade_20220108_findByUserQQ(qq string) (p []model.Temp_trade_20220108, err error) {
err = d.db.Where("user_qq", qq).Find(&p).Error
return
}
// 创建订单
func (d *TempDao) Temp_trade_20220108_save(p model.Temp_trade_20220108) (model.Temp_trade_20220108, error) {
return p, d.db.Save(&p).Error
}
// 查询出订单金额 >= 233;订单状态为(0正常)的订单
func (d *TempDao) Temp_trade_20220108_findSuccess() (p []model.Temp_trade_20220108_FindSuccessResult, err error) {
err = d.db.Model(&model.Temp_trade_20220108{}).Select("user_qq", "sum(trade_amount) as amount").
Group("user_qq").Where("amount_status", 0).Having("amount>=233").Find(&p).Error
return
}
|
<filename>src/test/java/reposense/parser/GroupConfigParserTest.java
package reposense.parser;
import static reposense.util.TestUtil.loadResource;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import reposense.model.FileType;
import reposense.model.GroupConfiguration;
public class GroupConfigParserTest {
private static final Path GROUP_CONFIG_MULTI_LOCATION_FILE = loadResource(GroupConfigParserTest.class,
"GroupConfigParserTest/groupconfig_multipleLocation_test.csv");
private static final Path GROUP_CONFIG_EMPTY_LOCATION_FILE = loadResource(GroupConfigParserTest.class,
"GroupConfigParserTest/groupconfig_emptyLocation_test.csv");
private static final Path GROUP_CONFIG_DIFFERENT_COLUMN_ORDER_FILE = loadResource(GroupConfigParserTest.class,
"GroupConfigParserTest/groupconfig_differentColumnOrder_test.csv");
private static final Path GROUP_CONFIG_MISSING_OPTIONAL_HEADER_FILE = loadResource(GroupConfigParserTest.class,
"GroupConfigParserTest/groupconfig_missingOptionalHeader_test.csv");
private static final Path GROUP_CONFIG_MISSING_MANDATORY_HEADER_FILE = loadResource(GroupConfigParserTest.class,
"GroupConfigParserTest/groupconfig_missingMandatoryHeader_test.csv");
private static final Path GROUP_CONFIG_UNKNOWN_HEADER_FILE = loadResource(GroupConfigParserTest.class,
"GroupConfigParserTest/groupconfig_unknownHeader_test.csv");
private static final String TEST_REPO_BETA_LOCATION = "https://github.com/reposense/testrepo-Beta.git";
private static final List<FileType> TEST_REPO_BETA_GROUPS = Arrays.asList(
new FileType("Code", Arrays.asList("**/*.java", "**/*.py")),
new FileType("Docs", Collections.singletonList("docs/**")));
private static final String TEST_REPO_DELTA_LOCATION = "https://github.com/reposense/testrepo-Delta.git";
private static final List<FileType> TEST_REPO_DELTA_GROUPS = Arrays.asList(
new FileType("Main", Collections.singletonList("src/main/**")),
new FileType("Test", Arrays.asList("src/test/**", "src/systest/**")));
@Test
public void groupConfig_emptyLocation_success() throws Exception {
GroupConfigCsvParser groupConfigCsvParser = new GroupConfigCsvParser(GROUP_CONFIG_EMPTY_LOCATION_FILE);
List<GroupConfiguration> groupConfigs = groupConfigCsvParser.parse();
Assertions.assertEquals(2, groupConfigs.size());
GroupConfiguration actualReposenseConfig = groupConfigs.get(0);
Assertions.assertEquals(2, actualReposenseConfig.getGroupsList().size());
GroupConfiguration actualEmptyLocationConfig = groupConfigs.get(1);
Assertions.assertEquals(1, actualEmptyLocationConfig.getGroupsList().size());
}
@Test
public void groupConfig_multipleLocations_success() throws Exception {
GroupConfigCsvParser groupConfigCsvParser = new GroupConfigCsvParser(GROUP_CONFIG_MULTI_LOCATION_FILE);
List<GroupConfiguration> groupConfigs = groupConfigCsvParser.parse();
Assertions.assertEquals(2, groupConfigs.size());
GroupConfiguration actualBetaConfig = groupConfigs.get(0);
Assertions.assertEquals(TEST_REPO_BETA_LOCATION, actualBetaConfig.getLocation().toString());
Assertions.assertEquals(TEST_REPO_BETA_GROUPS, actualBetaConfig.getGroupsList());
GroupConfiguration actualDeltaConfig = groupConfigs.get(1);
Assertions.assertEquals(TEST_REPO_DELTA_LOCATION, actualDeltaConfig.getLocation().toString());
Assertions.assertEquals(TEST_REPO_DELTA_GROUPS, actualDeltaConfig.getGroupsList());
}
@Test
public void groupConfig_differentColumnOrder_success() throws Exception {
GroupConfigCsvParser groupConfigCsvParser = new GroupConfigCsvParser(GROUP_CONFIG_DIFFERENT_COLUMN_ORDER_FILE);
List<GroupConfiguration> groupConfigs = groupConfigCsvParser.parse();
Assertions.assertEquals(2, groupConfigs.size());
GroupConfiguration actualBetaConfig = groupConfigs.get(0);
Assertions.assertEquals(TEST_REPO_BETA_LOCATION, actualBetaConfig.getLocation().toString());
Assertions.assertEquals(TEST_REPO_BETA_GROUPS, actualBetaConfig.getGroupsList());
GroupConfiguration actualDeltaConfig = groupConfigs.get(1);
Assertions.assertEquals(TEST_REPO_DELTA_LOCATION, actualDeltaConfig.getLocation().toString());
Assertions.assertEquals(TEST_REPO_DELTA_GROUPS, actualDeltaConfig.getGroupsList());
}
@Test
public void groupConfig_missingOptionalHeader_success() throws Exception {
GroupConfigCsvParser groupConfigCsvParser = new GroupConfigCsvParser(GROUP_CONFIG_MISSING_OPTIONAL_HEADER_FILE);
List<GroupConfiguration> groupConfigs = groupConfigCsvParser.parse();
Assertions.assertEquals(1, groupConfigs.size());
Assertions.assertEquals(3, groupConfigs.get(0).getGroupsList().size());
}
@Test
public void groupConfig_missingMandatoryHeader_throwsInvalidCsvException() throws Exception {
GroupConfigCsvParser groupConfigCsvParser = new GroupConfigCsvParser(
GROUP_CONFIG_MISSING_MANDATORY_HEADER_FILE);
Assertions.assertThrows(InvalidCsvException.class, () -> groupConfigCsvParser.parse());
}
@Test
public void groupConfig_unknownHeader_throwsInvalidHeaderException() throws Exception {
GroupConfigCsvParser groupConfigCsvParser = new GroupConfigCsvParser(
GROUP_CONFIG_UNKNOWN_HEADER_FILE);
Assertions.assertThrows(InvalidHeaderException.class, () -> groupConfigCsvParser.parse());
}
}
|
<reponame>dxzhan/RuntimeAgent<gh_stars>0
package com.resms.lightsentinel.manager.listener;
import com.resms.lightsentinel.manager.LightSentinelManagerBootstrap;
import com.resms.lightsentinel.common.listener.AbstractLightSentinelListener;
import com.resms.lightsentinel.common.handler.LightSentinelEventRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* LightSentinel事件监听器
*
* @author sam
*/
@Component
public class LightSentinelManagerListener extends AbstractLightSentinelListener {
private static final Logger logger = LoggerFactory.getLogger(LightSentinelManagerListener.class);
@Autowired
public LightSentinelManagerListener(LightSentinelEventRegistry factory, LightSentinelManagerBootstrap bootstrap) {
super(factory,bootstrap);
}
} |
<filename>mcosm/src/main/java/bleach/mcosm/command/OSMCommand.java<gh_stars>10-100
package bleach.mcosm.command;
import java.math.BigDecimal;
import java.math.RoundingMode;
import bleach.mcosm.McOSM;
import bleach.mcosm.gui.GuiOSM;
import bleach.mcosm.utils.GeoPos;
import net.minecraft.client.Minecraft;
import net.minecraft.command.CommandBase;
import net.minecraft.command.CommandException;
import net.minecraft.command.ICommandSender;
import net.minecraft.command.WrongUsageException;
import net.minecraft.server.MinecraftServer;
import net.minecraft.util.text.TextComponentString;
public class OSMCommand extends CommandBase {
@Override
public String getName() {
return "osm";
}
@Override
public String getUsage(ICommandSender sender) {
return "/osm | /osm <lat1> <lon1> <lat2> <lon2> | /osm stop";
}
@Override
public void execute(MinecraftServer server, ICommandSender sender, String[] args) throws CommandException {
if (args.length == 0) {
double[] d = GeoPos.toLatLonBTE(Minecraft.getMinecraft().player.getPosition());
if (Double.isNaN(d[0]) || !Double.isFinite(d[0]) || Double.isNaN(d[0]) || !Double.isFinite(d[1])) {
throw new CommandException("Error: Invalid Current GeoPos", new Object[0]);
}
McOSM.guiQueue.add(new GuiOSM(
new BigDecimal(d[0] - 0.001).setScale(6, RoundingMode.HALF_UP).doubleValue(),
new BigDecimal(d[1] - 0.001).setScale(6, RoundingMode.HALF_UP).doubleValue(),
new BigDecimal(d[0] + 0.001).setScale(6, RoundingMode.HALF_UP).doubleValue(),
new BigDecimal(d[1] + 0.001).setScale(6, RoundingMode.HALF_UP).doubleValue()));
} else if (args.length == 4) {
try {
McOSM.guiQueue.add(new GuiOSM(Double.parseDouble(args[0]), Double.parseDouble(args[1]),
Double.parseDouble(args[2]), Double.parseDouble(args[3])));
} catch (NumberFormatException e) {
e.printStackTrace();
throw new CommandException("Invalid Coordinate [Arg ?]", new Object[0]);
}
} else if (args.length == 1 && args[0].equalsIgnoreCase("stop")) {
McOSM.osmInst.stop();
sender.sendMessage(new TextComponentString("Stopped all running OSM instances!"));
} else {
throw new WrongUsageException(getUsage(sender), new Object[0]);
}
}
}
|
#!/bin/sh
#
# Copyright (c) 2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: GPL-2.0
#
wrimg=$1
uboot=$2
dev=$3
print_help() {
echo Usage:
echo $0 wrlinux-*.ustart.img.gz octeontx-bootfs-uboot-t96.img /dev/sdX
echo
exit 1
}
if [ $# -ne 3 -o ! -e $dev ]; then
print_help
fi
echo $wrimg | grep -q -e "ustart.img.gz$" -e ".wic$"
if [ $? -ne 0 -o ! -e $wrimg ]; then
echo "The wrlinux image $wrimg is invalid or doesn't exist"
exit 1
fi
if [ ! -e $uboot ]; then
echo "The u-boot imaage $uboot doesn't exist"
exit 1
fi
if [ ! -e $dev ]; then
echo "The device doesn't exist: $dev "
exit 1
fi
echo "Writing $wrimg and $uboot to $dev"
echo -n "Are you sure to destroy all data on $dev?(Y/N) "
read confirm
if [ "$confirm" != "y" -a "$confirm" != "Y" ]; then
echo Aborted
exit 1
fi
echo "umount $dev*"
sudo umount $dev*
if [ "${wrimg%.wic}" = "${wrimg}" ]; then
zcat $wrimg | sudo dd of=$dev bs=1M status=progress conv=fsync
else
sudo dd if=$wrimg of=$dev bs=1M status=progress conv=fsync
fi
sudo dd if=$uboot of=$dev bs=512 seek=128 skip=128 conv=fsync
sudo losetup -f -P $uboot
uboot_dev=`losetup -j $uboot | sed -e '1q' | sed -e 's/:.*//'`
sudo partprobe $uboot_dev
sudo partprobe $dev
mkdir -p uboot_mnt && sudo mount $uboot_dev"p1" ./uboot_mnt
if [ "${dev#/dev/mmcblk}" != ${dev} ] ; then
dev="${dev}p"
elif [ "${dev#/dev/nbd}" != ${dev} ] ; then
dev="${dev}p"
elif [ "${dev#/dev/nvme}" != ${dev} ] ; then
dev="${dev}p"
elif [ "${dev#/dev/loop}" != ${dev} ] ; then
dev="${dev}p"
fi
mkdir -p wrboot && sudo mount ${dev}1 ./wrboot && sudo cp -r ./uboot_mnt/* ./wrboot/
sudo umount $uboot_dev"p1" wrboot; sudo losetup -d $uboot_dev
rm -fr uboot_mnt wrboot
|
from django.db import models
class Message(models.Model):
name = models.CharField(max_length=140)
email = models.EmailField()
phone = models.CharField(max_length=55, blank=True, null=True)
country = models.CharField(max_length=74, blank=True, null=True)
city = models.CharField(max_length=55, blank=True, null=True)
subject = models.CharField(max_length=255)
message = models.TextField()
is_read = models.BooleanField(default=False)
date_created = models.DateField(auto_now_add=True)
class Meta:
ordering = [
"is_read",
"-date_created",
"email",
]
|
package io.opensphere.core.event;
import java.util.concurrent.Executor;
import io.opensphere.core.util.Service;
/**
* Listener handle for EventManager events.
*
* @param <T> The event type.
*/
public class EventManagerListenerHandle<T extends Event> implements Service
{
/** The event manager. */
private final EventManager myEventManager;
/** The event type. */
private Class<T> myType;
/** The listener instance. */
private EventListener<? super T> mySubscriber;
/**
* Constructor.
*
* @param eventManager the event manager.
* @param type The event type.
* @param subscriber The listener instance.
*/
public EventManagerListenerHandle(EventManager eventManager, Class<T> type, EventListener<? super T> subscriber)
{
this(eventManager, type, subscriber, null);
}
/**
* Constructor.
*
* @param eventManager the event manager.
* @param type The event type.
* @param subscriber The listener instance.
* @param executor The executor on which to notify the subscriber
*/
public EventManagerListenerHandle(EventManager eventManager, Class<T> type, EventListener<? super T> subscriber,
Executor executor)
{
myEventManager = eventManager;
myType = type;
mySubscriber = executor == null ? subscriber : event -> executor.execute(() -> subscriber.notify(event));
}
/**
* Constructor.
*
* @param eventManager the event manager.
*/
protected EventManagerListenerHandle(EventManager eventManager)
{
this(eventManager, null, null);
}
@Override
public void open()
{
myEventManager.subscribe(myType, mySubscriber);
}
@Override
public void close()
{
myEventManager.unsubscribe(myType, mySubscriber);
}
/**
* Sets the subscriber.
*
* @param type The event type.
* @param subscriber The listener instance.
*/
protected void setSubscriber(Class<T> type, EventListener<? super T> subscriber)
{
myType = type;
mySubscriber = subscriber;
}
}
|
<reponame>BarelyAliveMau5/JogoDoLixo
package com.lixo;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.math.RandomXS128;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.utils.Array;
import com.lixo.Lixeira.Cor;
import com.lixo.Logica.EstadoJogo;
import com.lixo.Projetil.TipoProjetil;
public class Mundo
{
public static final Vector2 gravidade = new Vector2(-1f, -30f);
public static final float INTERVALO_LANCAMENTO = 3f;
public Logica logica_do_jogo;
public Vector2 aceleracao;
Rectangle limite_chao;
Rectangle limite_parede;
Lixo jogo;
Array<Lixeira> lixeiras;
Projetil projetil;
Array<Projetil> projeteis;
float ultimo_lance;
public Mundo(Lixo jogo)
{
projetil = new Projetil(TipoProjetil.INVISIVEL, 0,0);
projeteis = new Array<Projetil>();
aceleracao = new Vector2(1,1);
this.jogo = jogo;
ultimo_lance = 5f;
logica_do_jogo = new Logica();
limite_chao = new Rectangle(0,-60,Assets.TELA_LARGURA+100,100);
limite_parede = new Rectangle(Assets.TELA_LARGURA -10,0 ,100,Assets.TELA_ALTURA*2);
lixeiras = new Array<Lixeira>();
Array<Vector2> posicoes = new Array<Vector2>();
for (float i=0.54f;i<0.91f; i+=0.12f)
posicoes.add(new Vector2(Assets.TELA_LARGURA * i - 50, Assets.TELA_ALTURA * 0.1f));
//sempre gerar lixeiras aleatorias. ponto extra pra uma coisa simples
RandomXS128 rnd = new RandomXS128();
for (int i=0;i<4; i++)
posicoes.swap((int)(rnd.nextFloat() * 4), (int)(rnd.nextFloat()*4));
lixeiras.add(new Lixeira(Cor.AMARELA, posicoes.pop()));
lixeiras.add(new Lixeira(Cor.AZUL, posicoes.pop()));
lixeiras.add(new Lixeira(Cor.VERDE, posicoes.pop()));
lixeiras.add(new Lixeira(Cor.VERMELHA, posicoes.pop()));
}
public void checarLimites(Array<Projetil> projeteis)
{
boolean hit;
for(Projetil projetil : projeteis)
{
hit = false;
if (projetil.ativo)
{
if ( (limite_chao.contains(projetil.posicao)
|| limite_parede.contains(projetil.posicao)))
{
projetil.desativar();
hit = true;
logica_do_jogo.errou(false);
Assets.tocarSom(Assets.som_soco);
continue;
//System.out.println("gotcha x="+projetil.posicao.x + ", y="+projetil.posicao.y);
}
for(Lixeira lixeira : lixeiras)
{
if (!hit) {
switch (lixeira.testarContatoLixo(projetil.posicao, projetil.tipo))
{
case ACERTOU:
logica_do_jogo.acertou();
hit = true;
break;
case ERROU:
hit = true;
logica_do_jogo.errou(true);
break;
case SEM_CONTATO:
hit = false;
default:
break;
}
}
}
if (hit)
{
if (projetil.tipo == TipoProjetil.VIDRO)
Assets.tocarSom(Assets.som_vidro);
else
Assets.tocarSom(Assets.som_lixo);
projetil.desativar();
projetil.desaparecer();
}
}
}
}
public double getForca()
{
return Math.sqrt(aceleracao.y * aceleracao.y + aceleracao.x * aceleracao.x);
}
public double getAngulo()
{
return Math.toDegrees(Math.atan2(aceleracao.y, aceleracao.x));
}
boolean proximoLancePossivel()
{
ultimo_lance = ultimo_lance + Gdx.graphics.getDeltaTime();
if (ultimo_lance > INTERVALO_LANCAMENTO)
return true;
else
return false;
}
public void lancarProjetil()
{
if (proximoLancePossivel())
{
ultimo_lance = 0f;
projeteis.add(new Projetil(logica_do_jogo.getProjetil(),
Assets.TELA_LARGURA * 0.15f,
Assets.TELA_ALTURA * 0.32f));
projeteis.peek().lancar(aceleracao);
/* projetil = new Projetil(logica_do_jogo.getProjetil(),
Assets.TELA_LARGURA * 0.15f,
Assets.TELA_ALTURA * 0.32f);
projetil.lancar(aceleracao); */
logica_do_jogo.proximoProjetil();
}
}
public void setAngulo(boolean aumentar)
{
double forca = getForca();
double angulo = getAngulo();
if (angulo > 39)
angulo = 39;
if (angulo < 11)
angulo = 11;
double tetha = Math.toRadians(aumentar? angulo-1 : angulo+1);
aceleracao.x = (float) (forca* Math.cos(tetha));
aceleracao.y = (float) (forca* Math.sin(tetha));
}
public void setForca(boolean aumentar)
{
double forca = aumentar ? getForca()+0.1f : getForca()-0.1f;
double tetha = Math.toRadians(getAngulo());
if (forca > 20)
forca = 20;
if (forca < 5)
forca = 5;
aceleracao.x = (float) (forca* Math.cos(tetha));
aceleracao.y = (float) (forca* Math.sin(tetha));
}
public void atualizar()
{
checarLimites(projeteis);
proximoLancePossivel();
if(logica_do_jogo.estado != EstadoJogo.JOGANDO)
jogo.setScreen(new Fim(jogo, logica_do_jogo.estado, logica_do_jogo.getChances()+5));
}
public void desenhar(Batch batch)
{
for (Lixeira lixeira : lixeiras)
lixeira.desenhar(batch);
float angulo = (float) getAngulo();
/* aviso de codigo mega gambiarra */
batch.draw(Assets.seta_ponta, aceleracao.x *10 + Assets.TELA_LARGURA * 0.125f , aceleracao.y *10+Assets.TELA_ALTURA * 0.25f,
0f, 8f, 16f, 16f, 2f, 2f, angulo);
batch.draw(Assets.borracha, Assets.TELA_LARGURA * 0.15f , Assets.TELA_ALTURA * 0.32f,
0f, 8f, (float) -getForca() *4f, 8f, 1f, 1f, angulo);
batch.draw(logica_do_jogo.getProjetilTextureRegion(),
(float) -getForca() *4.2f + Assets.TELA_LARGURA * 0.15f -16f,
(float) -getForca() *3f + Assets.TELA_ALTURA * 0.32f,
Assets.TELA_LARGURA * 0.15f-32,
Assets.TELA_ALTURA * 0.3f-64,32,48,1,1,angulo-30f);
batch.draw(Assets.borracha, Assets.TELA_LARGURA * 0.08f , Assets.TELA_ALTURA * 0.32f,
0f, 8f, (float) -getForca() *3f, 8f, 1f, 1f, angulo *1.3f + 10f);
/* fim da super gambiarra */
for(Projetil projetil : projeteis)
projetil.desenhar(jogo.batch, Gdx.graphics.getDeltaTime());
}
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-3111-1
#
# Security announcement date: 2016-10-27 00:00:00 UTC
# Script generation date: 2017-02-06 21:05:45 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: i386
#
# Vulnerable packages fix on version:
# - firefox:49.0.2+build2-0ubuntu0.12.04.1
#
# Last versions recommanded by security team:
# - firefox:51.0.1+build2-0ubuntu0.12.04.2
#
# CVE List:
# - CVE-2016-5287
# - CVE-2016-5288
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade firefox=51.0.1+build2-0ubuntu0.12.04.2 -y
|
<reponame>chec/headlesscommerce.org
import { GetStaticProps } from "next";
import shuffle from "lodash.shuffle";
import { NextSeo } from "next-seo";
import { getPageBySlug, getAllAgencies } from "../lib/queries";
import Layout from "../components/layout";
import Agencies from "../components/agencies";
export const getStaticProps: GetStaticProps = async () => {
const { page } = await getPageBySlug("agencies");
const { agencies } = await getAllAgencies();
return {
props: {
page,
agencies: shuffle(agencies),
},
revalidate: 5,
};
};
export const AgenciesPage = ({ page, agencies }) => {
return (
<Layout {...page}>
<NextSeo title={page.title} />
<div className="max-w-5xl mx-auto">
<Agencies items={agencies} />
</div>
</Layout>
);
};
export default AgenciesPage;
|
public class StringArrayList {
private ArrayList<String> list;
public StringArrayList() {
list = new ArrayList<String>();
}
public void add(String s) {
list.add(s);
}
public String get(int i) {
return list.get(i);
}
public void remove(int i) {
list.remove(i);
}
public int size() {
return list.size();
}
} |
package com.github.fitzoh.spring.ec2;
import com.amazonaws.services.ec2.AmazonEC2;
import com.amazonaws.services.ec2.model.DescribeInstancesRequest;
import org.springframework.cloud.client.ServiceInstance;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Amazon EC2 based {@link DiscoveryClient}
*/
public class Ec2DiscoveryClient implements DiscoveryClient {
private final AmazonEC2 ec2;
private final List<String> serviceIds;
private final Map<String, Ec2ServiceProperties> servicesById;
public Ec2DiscoveryClient(AmazonEC2 ec2, Ec2DiscoveryProperties ec2DiscoveryProperties) {
this.ec2 = ec2;
this.serviceIds = ec2DiscoveryProperties.getServiceIds();
this.servicesById = ec2DiscoveryProperties.getServicesById();
}
@Override
public String description() {
return "Amazon EC2 Discovery Client";
}
@Override
public List<ServiceInstance> getInstances(String serviceId) {
Ec2ServiceProperties serviceProperties = servicesById.get(serviceId);
DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest();
return ec2.describeInstances(describeInstancesRequest)
.getReservations()
.stream()
.flatMap(reservation -> reservation.getInstances().stream())
.map(instance -> new Ec2ServiceInstance(serviceProperties, instance))
.collect(Collectors.toList());
}
@Override
public List<String> getServices() {
return serviceIds;
}
}
|
#!/bin/bash
set -e
PID=0
sigterm_handler() {
echo "Caught SIGTERM signal! Shutting down..."
if [ $PID -ne 0 ]; then
kill -SIGTERM "$PID"
wait "$PID"
fi
exit 143; # 128 + 15 -- SIGTERM
}
sigint_handler() {
echo "Caught SIGINT signal! Shutting down..."
if [ $PID -ne 0 ]; then
kill -SIGTERM "$PID"
wait "$PID"
fi
exit 130; # 128 + 2 -- SIGINT
}
trap 'kill ${!}; sigterm_handler' SIGTERM
trap 'sigint_handler' SIGINT
mkdir /www
echo "Start copying mounted folders..."
rsync --relative -az --exclude node_modules/ \
/root/./docs \
/root/./NativeScript \
/root/./nativescript-angular \
/root/./nativescript-sdk-examples-ng \
/root/./sidekick-docs \
/root/./nativescript-cli \
/www
/www/docs/build/build-docs.sh
/www/docs/build/nginx-setup.sh
cd /www/docs/build/docs-watcher
npm install
node index.js & PID="$!"
while true
do
tail -f /dev/null & wait ${!}
done |
package com.h8.nh.nhoodengine.matrix.utils;
import com.h8.nh.nhoodengine.core.DataResource;
import com.h8.nh.nhoodengine.core.DataResourceKey;
import com.h8.nh.nhoodengine.matrix.impl.DataMatrixCellResource;
import java.util.UUID;
public final class DataResourceUtils {
private DataResourceUtils() {
}
public static DataResource<DataResourceKey, Object> resource(DataResourceKey key) {
return new DataResource<>(key, UUID.randomUUID());
}
public static DataMatrixCellResource<DataResourceKey> matrixCellResource(DataResourceKey key) {
return new DataMatrixCellResource<>(UUID.randomUUID(), key);
}
}
|
#!/bin/bash -eux
if [[ "$PACKER_BUILDER_TYPE" == virtualbox* ]]; then
## https://access.redhat.com/site/solutions/58625 (subscription required)
# add 'single-request-reopen' so it is included when /etc/resolv.conf is generated
echo 'RES_OPTIONS="single-request-reopen"' >> /etc/sysconfig/network
service network restart
echo 'Slow DNS fix applied (single-request-reopen)'
else
echo 'Slow DNS fix not required for this platform, skipping'
fi
|
#!/bin/sh
#
# MIT License
#
# Copyright (c) 2018 Ingo Theiss <ingo.theiss@i-matrixx.de>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
PKG_STR_WIZARD_INSTALL_TITLE="Please specify the openHAB network settings"
PKG_STR_HTTP_PORT="HTTP port"
PKG_STR_HTTP_PORT_DEFAULT="48080"
PKG_STR_HTTPS_PORT="HTTPS port"
PKG_STR_HTTPS_PORT_DEFAULT="48443"
PKG_STR_INVALID_PORT="The port number must be from 1 to 65535."
PKG_STR_LISTEN_INET_ADDR="Listen address"
##
# NAME
# create_listen_addr_store
# SYNOPSIS
# Build a list of all ip addresses for selection
# FUNCTION
# This function is responsible for building list for all ip addresses to listen on.
# INPUTS
# -
# RESULT
# A list of ip addresses consumable by an Ext.data.ArrayStore
##
function create_listen_addr_store()
{
# A list with all ip addresses for selection
local listen_addr_list="[\"0.0.0.0\"], [\"127.0.0.1\"]"
# Add all ip addresses with scop global to the list
listen_addr_list+="$(ip -o -f inet addr show | awk '/scope global/ {split($4,addr,"/*"); print " ,[\"" addr[1] "\"]"}' | sort)"
printf "${listen_addr_list}"
}
##
# NAME
# create_install_settings_step
# SYNOPSIS
# Build an installation wizard step
# FUNCTION
# This function is responsible for building an installation wizard step.
# INPUTS
# -
# RESULT
# Text consumable by the Synology wizard
##
function create_install_settings_step()
{
local step=""
# The default is to listen on all interfaces
local default_inet_addr="0.0.0.0"
# A list with possible listen addresses
local inet_addr_store="$(create_listen_addr_store)"
step="$(/bin/cat <<-EOF
{
"step_title": "${PKG_STR_WIZARD_INSTALL_TITLE}",
"items": [{
"type": "textfield",
"subitems": [{
"key": "pkgwizard_http_port",
"desc": "${PKG_STR_HTTP_PORT}",
"defaultValue": "${PKG_STR_HTTP_PORT_DEFAULT}",
"invalidText": "${PKG_STR_INVALID_PORT}",
"validator": {
"allowBlank": false,
"minLength": 1,
"maxLength": 5,
"fn": "{
var port = parseInt(arguments[0]);
var regExp = new RegExp('^[0-9]*$');
var isValid = ((arguments[0].match(regExp)) && (0 < port) && (65536 > port));
return isValid;
}"
}
},{
"key": "pkgwizard_https_port",
"desc": "${PKG_STR_HTTPS_PORT}",
"defaultValue": "${PKG_STR_HTTPS_PORT_DEFAULT}",
"invalidText": "${PKG_STR_INVALID_PORT}",
"validator": {
"allowBlank": false,
"minLength": 1,
"maxLength": 5,
"fn": "{
var port = parseInt(arguments[0]);
var regExp = new RegExp('^[0-9]*$');
var isValid = ((arguments[0].match(regExp)) && (0 < port) && (65536 > port));
return isValid;
}"
}
}]
},{
"type": "combobox",
"subitems": [{
"key": "pkgwizard_inet_addr",
"desc": "${PKG_STR_LISTEN_INET_ADDR}",
"editable": false,
"mode": "local",
"value": "${default_inet_addr}",
"valueField": "inet_addr",
"displayField": "inet_addr",
"store": {
"xtype": "arraystore",
"fields": ["inet_addr"],
"data": [${inet_addr_store}]
}
}]
}]
}
EOF
)"
printf "${step}\n"
}
get_install_wizard_steps()
{
local steps="$(create_install_settings_step)"
if [[ -n "${steps}" ]]
then
printf "[${steps}]\n"
fi
}
##
# NAME
# get_install_wizard_steps
# SYNOPSIS
# Create an install wizard step
# FUNCTION
# This function is responsible for building an an install wizard step.
# INPUTS
# -
# RESULT
# Text consumable by the Synology wizard
##
function get_install_wizard_steps()
{
local steps="$(create_install_settings_step)"
if [[ -n "${steps}" ]]
then
printf "[${steps}]\n"
fi
}
typeset install_wizard_steps="$(get_install_wizard_steps)"
if [[ -z "${get_install_wizard_steps}" ]]
then
exit 0
fi
printf "${install_wizard_steps}\n" > ${SYNOPKG_TEMP_LOGFILE}
exit 0
|
#!/bin/bash
cp -r ./app/Service/Interfaces/ ../txf-admin/app/Service/Interfaces/
cp -r ./app/Com/ResponseCode.php ../txf-admin/app/Com/ResponseCode.php
cp -r ./app/Com/RedisKeyMap.php ../txf-admin/app/Com/RedisKeyMap.php
cp -r ./app/Com/Log.php ../txf-admin/app/Com/Log.php
rm -rf ./runtime/container/proxy/
rm -rf ../txf-admin/runtime/container/proxy/
echo "publish success" |
import React, { useState } from 'react';
// material-ui
import { makeStyles } from '@material-ui/styles';
import {
Avatar, Box, ButtonBase, Card, CardContent, Grid, InputAdornment, OutlinedInput, Popper
} from '@material-ui/core';
// third-party
import PopupState, { bindPopper, bindToggle } from 'material-ui-popup-state';
// project imports
// assets
import { IconAdjustmentsHorizontal, IconSearch, IconX } from '@tabler/icons';
// style constant
const useStyles = makeStyles((theme) => ({
notchedOutline: {
borderColor: "#858689 !important",
color:"#fff"
},
focused: {
"& $notchedOutline": {
borderColor: "yellow"
}
},
searchControl: {
height: '45px',
width: '100%',
color:"#aeb0b2",
// marginLeft: '16px',
paddingRight: '16px',
paddingLeft: '16px',
'& input': {
background: 'transparent !important',
paddingLeft: '5px !important'
},
},
startAdornment: {
fontSize: '1rem',
color:"#aeb0b2"
},
headerAvatar: {
commonAvatar: {
cursor: 'pointer',
borderRadius: '8px'
},
mediumAvatar: {
width: '30px',
height: '30px',
fontSize: '1.2rem'
},
background: '#7e4dfc',
color: '#5e35b1',
'&:hover': {
background: '#000',
color: '#fff'
}
},
closeAvatar: {
commonAvatar: {
cursor: 'pointer',
borderRadius: '8px'
},
mediumAvatar: {
width: '34px',
height: '34px',
fontSize: '1.2rem'
},
background: '#fbe9e7',
color: '#d84315',
'&:hover': {
background: '#d84315',
color: '#fbe9e7'
}
},
popperContainer: {
zIndex: 1100,
width: '99%',
top: '-55px !important',
padding: '0 12px',
},
cardContent: {
padding: '12px !important'
},
card: {
background: '#fff',
},
}));
// ===========================|| SEARCH INPUT ||=========================== //
const SearchSection = (props) => {
const classes = useStyles();
const [value, setValue] = useState('');
const respSearchFunc=()=>{
props.reqSearch(value)
}
return (
<>
<Box sx={{ display: { xs: 'none', md: 'block' } }} style={{
color:"#fff !important"
}} >
<OutlinedInput
className={classes.searchControl}
classes={{notchedOutline: classes.notchedOutline}}
id="input-search-header"
value={value}
onChange={(e) => setValue(e.target.value)}
placeholder="Search"
startAdornment={(
<InputAdornment position="start">
<IconSearch stroke={1.5} size="1rem" className={classes.startAdornment} />
</InputAdornment>
)}
endAdornment={(
<InputAdornment position="end">
<ButtonBase sx={{ borderRadius: '7px' }} onClick={()=>respSearchFunc()}>
<Avatar
variant="rounded"
className={classes.headerAvatar}
style={{
backgroundColor: '#FFBC00', color: '#000', fontSize: '7px', width: '30px', height: '30px'
}}
>
<IconAdjustmentsHorizontal stroke={1.5} size="1.3rem" />
</Avatar>
</ButtonBase>
</InputAdornment>
)}
aria-describedby="search-helper-text"
inputProps={{
'aria-label': 'weight',
}}
/>
</Box>
</>
);
};
export default SearchSection;
|
<filename>pkg/service/tenant.go<gh_stars>0
package service
import (
"encoding/json"
"net/http"
"github.com/labstack/echo/v4"
"github.com/projectsyn/lieutenant-api/pkg/api"
synv1alpha1 "github.com/projectsyn/lieutenant-operator/pkg/apis/syn/v1alpha1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"sigs.k8s.io/controller-runtime/pkg/client"
)
// ListTenants lists all tenants
func (s *APIImpl) ListTenants(c echo.Context) error {
ctx := c.(*APIContext)
tenantList := &synv1alpha1.TenantList{}
if err := ctx.client.List(ctx.context, tenantList, client.InNamespace(s.namespace)); err != nil {
return err
}
tenants := []api.Tenant{}
for _, tenant := range tenantList.Items {
apiTenant := api.NewAPITenantFromCRD(tenant)
tenants = append(tenants, *apiTenant)
}
return ctx.JSON(http.StatusOK, tenants)
}
// CreateTenant creates a new tenant
func (s *APIImpl) CreateTenant(c echo.Context) error {
ctx := c.(*APIContext)
var newTenant *api.CreateTenantJSONRequestBody
if err := ctx.Bind(&newTenant); err != nil {
return echo.NewHTTPError(http.StatusBadRequest, err)
}
apiTenant := &api.Tenant{
TenantProperties: api.TenantProperties(*newTenant),
}
id, err := api.GenerateTenantID()
if err != nil {
return err
}
apiTenant.TenantId = id
tenant := api.NewCRDFromAPITenant(*apiTenant)
tenant.Namespace = s.namespace
if err := ctx.client.Create(ctx.context, tenant); err != nil {
return err
}
apiTenant = api.NewAPITenantFromCRD(*tenant)
return ctx.JSON(http.StatusCreated, apiTenant)
}
// DeleteTenant deletes a tenant
func (s *APIImpl) DeleteTenant(c echo.Context, tenantID api.TenantIdParameter) error {
ctx := c.(*APIContext)
deleteTenant := &synv1alpha1.Tenant{
ObjectMeta: metav1.ObjectMeta{
Name: string(tenantID),
Namespace: s.namespace,
},
}
if err := ctx.client.Delete(ctx.context, deleteTenant); err != nil {
return err
}
return ctx.NoContent(http.StatusNoContent)
}
// GetTenant gets a tenant
func (s *APIImpl) GetTenant(c echo.Context, tenantID api.TenantIdParameter) error {
ctx := c.(*APIContext)
tenant := &synv1alpha1.Tenant{}
if err := ctx.client.Get(ctx.context, client.ObjectKey{Name: string(tenantID), Namespace: s.namespace}, tenant); err != nil {
return err
}
apiTenant := api.NewAPITenantFromCRD(*tenant)
return ctx.JSON(http.StatusOK, apiTenant)
}
// UpdateTenant udpates a tenant
func (s *APIImpl) UpdateTenant(c echo.Context, tenantID api.TenantIdParameter) error {
ctx := c.(*APIContext)
var patchTenant api.TenantProperties
dec := json.NewDecoder(ctx.Request().Body)
if err := dec.Decode(&patchTenant); err != nil {
return echo.NewHTTPError(http.StatusBadRequest, err)
}
existingTenant := &synv1alpha1.Tenant{}
if err := ctx.client.Get(ctx.context, client.ObjectKey{Name: string(tenantID), Namespace: s.namespace}, existingTenant); err != nil {
return err
}
if patchTenant.DisplayName != nil {
existingTenant.Spec.DisplayName = *patchTenant.DisplayName
}
if patchTenant.GitRepo != nil {
if patchTenant.GitRepo.Url != nil {
existingTenant.Spec.GitRepoURL = *patchTenant.GitRepo.Url
}
}
if err := ctx.client.Update(ctx.context, existingTenant); err != nil {
return err
}
apiTenant := api.NewAPITenantFromCRD(*existingTenant)
return ctx.JSON(http.StatusOK, apiTenant)
}
|
import { Type } from "../types";
export const itemsReducer = (state = [], action) => {
switch (action.type) {
case Type.ITEM_DATA_FETCH_SUCCESS:
return action.payload;
case Type.ITEM_ADD_SUCCESS:
return [...state, action.payload];
case Type.ITEM_DELETE_SUCCESS:
return state.filter((item) => item.id !== action.payload);
case Type.ITEM_UPDATE_SUCCESS:
return state.map((item) =>
item.id === action.payload.id ? action.payload : item
);
default:
return state;
}
};
|
const {ApolloServer} = require("apollo-server");
const {ApolloGateway, RemoteGraphQLDataSource} = require("@apollo/gateway");
class AuthenticatedDataSource extends RemoteGraphQLDataSource {
willSendRequest({request, context}) {
request.http.headers.set('Authorization', context.authHeaderValue);
}
}
const gateway = new ApolloGateway({
serviceList: [
{name: "auth-service", url: "http://localhost:8081/graphql"},
{name: "planet-service", url: "http://localhost:8082/graphql"},
{name: "satellite-service", url: "http://localhost:8083/graphql"}
],
buildService({name, url}) {
return new AuthenticatedDataSource({url});
},
});
const server = new ApolloServer({
gateway, subscriptions: false, context: ({req}) => ({
authHeaderValue: req.headers.authorization
})
});
server.listen().then(({url}) => {
console.log(`🚀 Server ready at ${url}`);
});
|
/**
* @module react
*/
import React from 'react'
/**
* @module Counter
*/
import Counter from 'components/buttons/Counter'
/**
* @module Icon
*/
import Icon from 'components/icon'
/**
* @module expect
*/
import chai, { expect } from 'chai'
/**
* @module shallow
*/
import { shallow } from 'enzyme'
/**
* @module ChaiEnzyme
*/
import chaiEnzyme from 'chai-enzyme'
chai.use(chaiEnzyme())
describe('Components - buttons - Counter', () => {
let wrapper
beforeEach(() => {
wrapper = shallow(<Counter />)
})
it('should exist', () => {
expect(wrapper).to.exist
})
it('should have default count state of 0', () => {
expect(wrapper.state('count')).to.equal(0)
})
it('should have default min prop to be 0', () => {
expect(wrapper.instance().props.min).to.equal(0)
})
it('should have default max prop to be 50', () => {
expect(wrapper.instance().props.max).to.equal(50)
})
it('should update count state to 10 if the default count prop is initially set to 10', () => {
wrapper = shallow(<Counter defaultCount={10} />)
expect(wrapper.state('count')).to.equal(10)
})
it('should update count state to 5 if the default count prop is initially set to 10 but the max is 5', () => {
wrapper = shallow(<Counter defaultCount={10} max={5} />)
expect(wrapper.state('count')).to.equal(5)
})
it('should update count state to 2 if the default count prop is initially set to 1 but the min is 2', () => {
wrapper = shallow(<Counter defaultCount={1} min={2} />)
expect(wrapper.state('count')).to.equal(2)
})
it('should update count state to 10 if the min prop changes to 10', () => {
wrapper = shallow(<Counter defaultCount={8} />)
wrapper.setProps({
min: 10
})
expect(wrapper.state('count')).to.equal(10)
})
it('should update count state to 10 if the max prop changes to 10', () => {
wrapper = shallow(<Counter defaultCount={12} />)
wrapper.setProps({
max: 10
})
expect(wrapper.state('count')).to.equal(10)
})
it('should increment by 1 if the plus icon is clicked', () => {
wrapper.find(Icon).last().simulate('click')
expect(wrapper.state('count')).to.equal(1)
})
it('should decrement by 1 if the plus icon is clicked', () => {
wrapper.find(Icon).first().simulate('click')
expect(wrapper.state('count')).to.equal(0)
})
it('should have a count of an empty string', () => {
wrapper.instance().handleInputChange({
target: {
value: ''
}
})
expect(wrapper.state('count')).to.equal('')
})
it('should have a count of 10', () => {
wrapper.instance().handleInputChange({
target: {
value: 10
}
})
expect(wrapper.state('count')).to.equal(10)
})
})
|
import tensorflow as tf
import numpy as np
# Define the model
model = tf.keras.Sequential([
tf.keras.layers.Input(shape=(3,)),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(2, activation='softmax')
])
# Compile the model
model.compile( optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy']
) |
<reponame>BFergerson/Chronetic
package io.chronetic.evolution.pool;
import static org.junit.Assert.*;
import io.chronetic.data.ChronoSeries;
import io.chronetic.data.measure.ChronoScaleUnit;
import io.chronetic.evolution.pool.allele.ChronoAllele;
import io.chronetic.evolution.pool.allele.ChronoFrequency;
import io.chronetic.evolution.pool.allele.ChronoPattern;
import org.jenetics.util.ISeq;
import org.jetbrains.annotations.NotNull;
import org.junit.Test;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Collections;
public class ChronotypeTest {
@Test
public void invalidChronotype1() {
ChronoSeries chronoSeries = testChronoSeries();
ISeq<ChronoAllele> alleleSeq = ISeq.of(
new ChronoFrequency(ChronoUnit.SECONDS, 0, 1, 1, Instant.now()),
new ChronoPattern(ChronoScaleUnit.asFactual(chronoSeries, ChronoUnit.SECONDS), 0, 49),
new ChronoPattern(ChronoScaleUnit.asFactual(chronoSeries, ChronoUnit.SECONDS), 0, 0)
);
ISeq<ChronoGene> geneSeq = ISeq.of(alleleSeq.map(ChronoGene::new));
Chronosome chronosome = new Chronosome(geneSeq, chronoSeries);
Chronotype chronotype = new Chronotype(chronoSeries, ISeq.of(Collections.singleton(chronosome)));
assertFalse(chronotype.isValid());
}
@Test
public void invalidChronotype2() {
ChronoSeries chronoSeries = testChronoSeries();
ISeq<ChronoAllele> alleleSeq = ISeq.of(
new ChronoFrequency(ChronoUnit.SECONDS, 0, 1, 1, Instant.now())
);
ISeq<ChronoGene> geneSeq = ISeq.of(alleleSeq.map(ChronoGene::new));
Chronosome chronosome = new Chronosome(geneSeq, chronoSeries);
Chronotype chronotype = new Chronotype(chronoSeries, ISeq.of(chronosome, chronosome));
assertFalse(chronotype.isValid());
}
@Test
public void validChronotypeTest1() {
ChronoSeries chronoSeries = testChronoSeries();
ISeq<ChronoAllele> alleleSeq = ISeq.of(
new ChronoFrequency(ChronoUnit.SECONDS, 0, 1, 1, Instant.now()),
new ChronoPattern(ChronoScaleUnit.asFactual(chronoSeries, ChronoUnit.SECONDS), 0, 0),
new ChronoPattern(ChronoScaleUnit.asFactual(chronoSeries, ChronoUnit.HOURS), 0, 1)
);
ISeq<ChronoAllele> alleleSeq2 = ISeq.of(
new ChronoFrequency(ChronoUnit.SECONDS, 0, 1, 1, Instant.now()),
new ChronoPattern(ChronoScaleUnit.asFactual(chronoSeries, ChronoUnit.SECONDS), 0, 0),
new ChronoPattern(ChronoScaleUnit.asFactual(chronoSeries, ChronoUnit.HOURS), 0, 2)
);
ISeq<ChronoGene> geneSeq = ISeq.of(alleleSeq.map(ChronoGene::new));
Chronosome chronosome = new Chronosome(geneSeq, chronoSeries);
geneSeq = ISeq.of(alleleSeq2.map(ChronoGene::new));
Chronosome chronosome2 = new Chronosome(geneSeq, chronoSeries);
Chronotype chronotype = new Chronotype(chronoSeries, ISeq.of(chronosome, chronosome2));
assertTrue(chronotype.isValid());
}
@NotNull
public static ChronoSeries testChronoSeries() {
return ChronoSeries.of(false, Instant.now(), Instant.now());
}
}
|
/*
* Copyright (c) 2017, MegaEase
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package util
import (
"fmt"
"net/url"
"os"
"strings"
"github.com/pkg/errors"
)
// FileExtensions describe that what's the extended file name of the EaseMesh configuration file should have
var FileExtensions = []string{".json", ".yaml", ".yml"}
type (
// VisitorBuilder is a builder that build a visitor to visit func
VisitorBuilder interface {
HTTPAttemptCount(httpGetAttempts int) VisitorBuilder
FilenameParam(filenameOptions *FilenameOptions) VisitorBuilder
CommandParam(commandOptions *CommandOptions) VisitorBuilder
Command() VisitorBuilder
Do() ([]Visitor, error)
File() VisitorBuilder
URL(httpAttemptCount int, urls ...*url.URL) VisitorBuilder
Stdin() VisitorBuilder
}
visitorBuilder struct {
visitors []Visitor
decoder Decoder
httpGetAttempts int
errs []error
singleItemImplied bool
commandOptions *CommandOptions
filenameOptions *FilenameOptions
stdinInUse bool
}
// CommandOptions holds command option
CommandOptions struct {
// Kind is required.
Kind string
// Name is allowed to be empty.
Name string
}
// FilenameOptions holds filename option
FilenameOptions struct {
Filenames []string
Recursive bool
}
)
// NewVisitorBuilder return a VisitorBuilder
func NewVisitorBuilder() VisitorBuilder {
return &visitorBuilder{httpGetAttempts: 3, decoder: newDefaultDecoder()}
}
func (b *visitorBuilder) HTTPAttemptCount(httpGetAttempts int) VisitorBuilder {
b.httpGetAttempts = httpGetAttempts
return b
}
func (b *visitorBuilder) FilenameParam(filenameOptions *FilenameOptions) VisitorBuilder {
b.filenameOptions = filenameOptions
return b
}
func (b *visitorBuilder) CommandParam(commandOptions *CommandOptions) VisitorBuilder {
b.commandOptions = commandOptions
return b
}
func (b *visitorBuilder) Command() VisitorBuilder {
if b.commandOptions == nil {
return b
}
b.visitors = append(b.visitors, newCommandVisitor(
b.commandOptions.Kind,
b.commandOptions.Name,
))
return b
}
func (b *visitorBuilder) Do() ([]Visitor, error) {
b.Command()
b.File()
if len(b.errs) != 0 {
return nil, fmt.Errorf("%+v", b.errs)
}
return b.visitors, nil
}
func (b *visitorBuilder) File() VisitorBuilder {
if b.filenameOptions == nil {
return b
}
recursive := b.filenameOptions.Recursive
paths := b.filenameOptions.Filenames
for _, s := range paths {
switch {
case s == "-":
b.Stdin()
case strings.Index(s, "http://") == 0 || strings.Index(s, "https://") == 0:
url, err := url.Parse(s)
if err != nil {
b.errs = append(b.errs, fmt.Errorf("the URL passed to filename %q is not valid: %v", s, err))
continue
}
b.URL(b.httpGetAttempts, url)
default:
if !recursive {
b.singleItemImplied = true
}
b.path(recursive, s)
}
}
return b
}
func (b *visitorBuilder) URL(httpAttemptCount int, urls ...*url.URL) VisitorBuilder {
for _, u := range urls {
b.visitors = append(b.visitors, &urlVisitor{
URL: u,
streamVisitor: newStreamVisitor(nil, b.decoder, u.String()),
HTTPAttemptCount: httpAttemptCount,
})
}
return b
}
func (b *visitorBuilder) Stdin() VisitorBuilder {
if b.stdinInUse {
b.errs = append(b.errs, errors.Errorf("Stdin already in used"))
}
b.stdinInUse = true
b.visitors = append(b.visitors, FileVisitorForSTDIN(b.decoder))
return b
}
func (b *visitorBuilder) path(recursive bool, paths ...string) VisitorBuilder {
for _, p := range paths {
_, err := os.Stat(p)
if os.IsNotExist(err) {
b.errs = append(b.errs, fmt.Errorf("the path %q does not exist", p))
continue
}
if err != nil {
b.errs = append(b.errs, fmt.Errorf("the path %q cannot be accessed: %v", p, err))
continue
}
visitors, err := expandPathsToFileVisitors(b.decoder, p, recursive, FileExtensions)
if err != nil {
b.errs = append(b.errs, fmt.Errorf("error reading %q: %v", p, err))
}
b.visitors = append(b.visitors, visitors...)
}
if len(b.visitors) == 0 {
b.errs = append(b.errs, fmt.Errorf("error reading %v: recognized file extensions are %v", paths, FileExtensions))
}
return b
}
|
#!/bin/bash
echo "127.0.0.1 centos-master
127.0.0.1 centos-minion" >> /etc/hosts
|
²#!/bin/bash
_build_mode=$1
_build_arch=$2
_project_path=$3
_project_name=$4
_build_args=$5
_gcc_version=$(gcc -dumpversion)
_dir_locals="(( nil . (\n\t
(flycheck-clang-tidy-build-path . \"$_project_path/build/$_build_mode/$_build_arch/\")\n\t
(projectile-enable-caching . t)\n\t
(projectile-project-name . \"$_project_name\")\n\t
(projectile-project-root . \"$_project_path/\")\n\t
)\n\t
)
(c-mode . ((mode . c++))) )\n\t"
echo -e $_dir_locals > $_project_path/.dir-locals.el
|
import * as homeController from "../controllers/home";
import * as userController from "../controllers/user";
import * as apiController from "../controllers/api";
import * as contactController from "../controllers/contact";
import * as formController from "../controllers/form";
import logger from "../util/logger";
import { Express, Request, Response, NextFunction } from "express";
const corsMiddleware = (req: Request, res: Response, next: NextFunction) => {
logger.debug(req.method + " at " + req.url);
res.setHeader("Access-Control-Allow-Origin", "http://192.168.1.104:4200");
res.setHeader("Access-Control-Allow-Credentials", "true");
res.setHeader(
"Access-Control-Allow-Methods",
"POST, GET, PUT, DELETE, OPTIONS"
);
res.setHeader(
"Access-Control-Allow-Headers",
"Origin, X-Requested-With, Content-Type, Accept, Authorization"
);
next();
};
export default (app: Express, passportConfig: any) => {
/**
* Primary app routes.
*/
// app.options("*", (req: Request, res: Response) => {
// console.log("/options");
// res.setHeader("Access-Control-Allow-Origin", "http://192.168.1.104:4200");
// res.setHeader("Access-Control-Allow-Credentials", "true");
// res.setHeader(
// "Access-Control-Allow-Methods",
// "POST, GET, PUT, DELETE, OPTIONS"
// );
// res.setHeader(
// "Access-Control-Allow-Headers",
// "Origin, X-Requested-With, Content-Type, Accept, Authorization"
// );
// res.send({ result: true, message: "Want more options?" });
// });
app.all("*", corsMiddleware);
app.options("*", (req, res) => {
res.send({ result: true, message: "You just want Attention!" });
});
app.get("/api/", homeController.index);
// app.get("/api/login", userController.getLogin);
app.post("/api/login", userController.postLogin);
app.get("/api/logout", userController.logout);
// app.get("/api/forgot", userController.getForgot);
app.post("/api/forgot", userController.postForgot);
// app.get("/api/reset/:token", userController.getReset);
app.post("/api/reset/:token", userController.postReset);
// app.get("/api/signup", userController.getSignup);
app.post("/api/signup", userController.postSignup);
// app.get("/api/contact", contactController.getContact);
app.post("/api/contact", contactController.postContact);
app.get(
"/api/account",
passportConfig.isAuthenticated,
userController.getAccount
);
app.post(
"/api/addDetails",
passportConfig.isAuthenticated,
userController.postAddDetails
);
app.post(
"/api/account/password",
passportConfig.isAuthenticated,
userController.postUpdatePassword
);
app.post(
"/api/account/delete",
passportConfig.isAuthenticated,
userController.postDeleteAccount
);
/* ============================================================
==================== get requests ====================*/
app.get("/api/getAllCategories", apiController.getAllCategories);
app.get("/api/getAllSubCategories", apiController.getAllSubCategories);
app.get("/api/category/:id", apiController.getSubCategories);
app.get(
"/api/getBuyerRequests/:page/:limit",
homeController.getBuyerRequests
);
app.get("/api/getHourlies/:page/:limit", homeController.getHourlies);
app.get("/api/getMicrojobs/:page/:limit", homeController.getMicrojobs);
app.get(
"/api/currentUser",
passportConfig.isAuthenticated,
userController.currentUser
);
app.get(
"/api/getSingleBuyerRequest/:id",
homeController.getSingleBuyerRequest
);
app.get("/api/getSingleMicrojob/:id", homeController.getSingleMicrojob);
app.get("/api/getSinglePortfolio/:id", homeController.getSinglePortfolio);
app.get("/api/getSingleHourly/:id", homeController.getSingleHourly);
app.get("/api/getUserBox/:id", userController.getUserBox);
app.get("/api/findUsername/:username", userController.findUername);
app.get("/api/getSignedUrl/:filename", userController.getSignedUrl);
app.get(
"/api/getMyMicrojobs",
passportConfig.isAuthenticated,
homeController.getMyMicrojobs
);
app.get(
"/api/getMyHourlies",
passportConfig.isAuthenticated,
homeController.getMyHourlies
);
app.get(
"/api/getMyBuyerRequests",
passportConfig.isAuthenticated,
homeController.getMyBuyerRequests
);
app.get(
"/api/getMyPortfolio/:id",
homeController.getMyPortfolio
);
app.get(
"/api/getMyHourlyOrder/",
passportConfig.isAuthenticated,
homeController.getMyHourlyOrder
);
app.get(
"/api/getMyRequestOrder/",
passportConfig.isAuthenticated,
homeController.getMyRequestOrder
);
app.get(
"/api/getSentRequestOrder/",
passportConfig.isAuthenticated,
homeController.getSentRequestOrder
);
app.get(
"/api/getMySentOrders/:type",
passportConfig.isAuthenticated,
homeController.getMySentOrders
);
app.get(
"/api/getMyMicrojobOrder/",
passportConfig.isAuthenticated,
homeController.getMyMicrojobOrder
);
app.get("/api/getCategoryPost/:type/:id", homeController.getCategoryPost);
app.get(
"/api/getSubCategoryPost/:type/:id",
homeController.getSubCategoryPost
);
app.get(
"/api/getMessageList",
passportConfig.isAuthenticated,
homeController.getMessageList
);
app.get(
"/api/getMessages/:room",
passportConfig.isAuthenticated,
homeController.getMessages
);
app.get("/api/singleOrder/:type/:id", homeController.getMyOrder);
app.get("/api/verify/:email/:token", userController.verifyUser);
app.get("/api/getReviews/:type/:id", homeController.getReviews);
app.get("/api/getSmallUserDetails/:id", userController.getSmallUserDetails);
/* ============================================================
==================== post requests ====================*/
app.post("/api/category", apiController.postCategory);
app.post("/api/subCategory", apiController.postSubCategory);
app.post("/api/postBuyerRequest", formController.postBuyerRequest);
app.post("/api/postMicrojob", formController.postMicrojob);
app.post("/api/updateMicrojob", formController.updateMicrojob);
app.post("/api/updateHourly", formController.updateHourly);
app.post("/api/updateBuyerRequest", formController.updateBuyerRequest);
app.post("/api/postHourly", formController.postHourly);
app.post("/api/postPortfolio", formController.postPortfolio);
app.post("/api/updatePortfolio", formController.updatePortfolio);
app.post(
"/api/postUpdateDetails",
passportConfig.isAuthenticated,
userController.postUpdateDetails
);
app.post(
"/api/postMicrojobOrder",
passportConfig.isAuthenticated,
formController.postMicrojobOrder
);
app.post(
"/api/postRequestOrder",
passportConfig.isAuthenticated,
formController.postBuyerRequestOrder
);
app.post(
"/api/postHourlyOrder",
passportConfig.isAuthenticated,
formController.postHourlyOrder
);
app.post(
"/api/deletePost",
passportConfig.isAuthenticated,
homeController.deletePost
);
app.post(
"/api/acceptOrder",
passportConfig.isAuthenticated,
formController.acceptOrder
);
app.post(
"/api/declineOrder",
passportConfig.isAuthenticated,
formController.declineOrder
);
app.post(
"/api/completeOrder",
passportConfig.isAuthenticated,
formController.completeOrder
);
app.post(
"/api/disputeOrder",
passportConfig.isAuthenticated,
formController.disputeOrder
);
app.post(
"/api/addRating",
passportConfig.isAuthenticated,
formController.addRating
);
app.post("/api/addMessage", formController.addMessages);
/* ============================================================
==================== wallet requests ====================*/
app.post(
"/api/wallet/updatePubAddress",
userController.postUpdatePublicAddress
);
app.post("/api/wallet/postSignup", userController.postWalletSignup);
app.post("/api/wallet/setEscrowTxId", userController.setEscrowTxId);
app.post(
"/api/wallet/setPaymentSignature1",
userController.setPaymentSignature1
);
app.post("/api/wallet/setPaymentTxId", userController.setPaymentTxId);
app.get(
"/api/wallet/getPendingBuyerSignature",
userController.getPendingBuyerSignature
);
app.get(
"/api/wallet/getPendingSellerSignature",
userController.getPendingSellerSignature
);
app.get(
"/api/wallet/getPendingMediatedDeals",
userController.getPendingMediatedDeals
);
app.get("/api/wallet/getPendingEscrow", userController.getSendingEscrow); // the user have to pay for these deals
// app.get("/api/wallet/getReceivingEscrow", userController.getReceivingEscrow);
// testing
app.get("/api/test/getTesting", userController.testing);
};
|
package net.sf.saxon.s9api;
public class SaxonApiException extends Exception { }
|
<reponame>isomorfeus/isomorfeus-project<filename>isomorfeus-data/lib/isomorfeus_data/lucid_file/base.rb<gh_stars>1-10
module LucidFile
class Base
def self.inherited(base)
base.include LucidFile::Mixin
if RUBY_ENGINE != 'opal'
Isomorfeus.add_valid_data_class(base)
end
end
end
end
|
#!/bin/bash
root_dir="/home/tomita/1212"
initial_date=($(ls $root_dir))
forecast_data=2020121200
for i in ${!initial_date[@]};
do
for file_ in $(find "${root_dir}/${initial_date[$i]}" -type f | grep troposphere- | sort);
do
echo $file_
python3 grad_pt.py --file $file_
done
for pressure in 850 500 300;
do
mkdir -p "/home/tomita/winterfig/${forecast_data}/${initial_date[$i]:4:-1}/grad_pt/${pressure}"
mv *${pressure}.png "/home/tomita/winterfig/${forecast_data}/${initial_date[$i]:4:-1}/grad_pt/${pressure}"
done
done
|
/*
* GTreeBarrier.java
*
* Created on March 26, 2006, 5:07 PM
*
* From "Multiprocessor Synchronization and Concurrent Data Structures",
* by <NAME> and <NAME>.
* Copyright 2006 Elsevier Inc. All rights reserved.
*/
package tamp.ch17.Barrier.barrier;
/**
* Generic tree barrier.
* Solution to homework assignment.
**/
public class GTreeBarrier implements Barrier {
int radix;
int leaves;
Node[] leaf;
public GTreeBarrier(int size, int radix) {
this.radix = radix;
leaves = 0;
this.leaf = new Node[size / radix];
int depth = 0;
// compute tree depth
while (size > 1) {
depth++;
size = size / radix;
}
Node root = new Node();
build(root, depth - 1);
}
// recursive tree constructor
void build(Node parent, int depth) {
// are we at a leaf node?
if (depth == 0) {
leaf[leaves++] = parent;
} else {
for (int i = 0; i < radix; i++) {
Node child = new Node(parent);
build(child, depth - 1);
}
}
}
/**
* Block until all threads have reached barrier.
*/
public void await() {
int me = ThreadID.get();
Node myLeaf = leaf[me / radix];
myLeaf.await(me);
}
private class Node {
final Barrier barrier;
Node parent;
final ThreadLocal<Boolean> threadSense;
volatile boolean done;
// construct root node
public Node() {
done = false;
this.threadSense = new ThreadLocal<Boolean>() {
protected Boolean initialValue() {
return !Node.this.done;
}
;
};
parent = null;
barrier = new SenseBarrier(radix);
}
public Node(Node _parent) {
this();
parent = _parent;
}
public void await(int me) {
boolean sense = threadSense.get();
barrier.await();
if (me % radix == 0) {
if (parent != null) { // root?
parent.await(me / radix);
}
done = sense;
} else {
while (done != sense) {
}
;
}
threadSense.set(!sense);
}
}
} |
#include "ofApp.h"
//--------------------------------------------------------------
void ofApp::setup(){
//Setup Spacebreww connection and subscribers
string host = "spacebrew.icts.sbg.ac.at";
string name = "of-Seat";
string description = "Generates parameters for servos";
spacebrew.addSubscribe("carHeading", "String");
spacebrew.addSubscribe("carPositionX", "String");
spacebrew.addSubscribe("carPositionY", "String");
spacebrew.addSubscribe("carPositionZ", "String");
spacebrew.addSubscribe("trafficHeading", "String");
spacebrew.addSubscribe("trafficPositionX", "String");
spacebrew.addSubscribe("trafficPositionY", "String");
spacebrew.addSubscribe("trafficPositionZ", "String");
// ofSetLogLevel(OF_LOG_VERBOSE);
spacebrew.connect( host, 9000, name, description );
Spacebrew::addListener(this, spacebrew);
ofSetVerticalSync(true);
ofBackground(20, 70, 60);
ofEnableSmoothing();
ofEnableDepthTest();
// user + seat camera
camEasyCam.setDistance(20);
camEasyCam.setNearClip(0);
camEasyCam.setFarClip(200);
camSeat.setPosition(0, 0, 0);
camSeat.lookAt(ofVec3f(0,0,-1));
camSeat.setFov(95);
camSeat.setNearClip(0);
camSeat.setFarClip(200);
viewMain.x = 0;
viewMain.y = 0;
viewMain.width = ofGetWidth();
viewMain.height = ofGetHeight();
viewSeat.x = 0;
viewSeat.y = 0;
viewSeat.width = ofGetWidth()/4;
viewSeat.height = ofGetHeight()/4;
scene.allocate(256, 256);
// camSeat.scale = 20;
ofEnableDepthTest();
}
//--------------------------------------------------------------
void ofApp::update(){
scene.begin();
ofClear(0,0,0,255);
camSeat.begin();
// ofPushStyle();
// ofSetColor(0, 0, 255);
// ofRect(0, 0, 16, 16);
// ofPopStyle();
drawScene(false);
camSeat.end();
scene.end();
}
void ofApp::drawScene(bool bWithGrid) {
if(bWithGrid) {
ofPushStyle();
ofSetColor(255, 100, 100);
ofDrawGrid(20.0f);
ofPopStyle();
}
ofEnableLighting();
//--
// Draw Traffic
ofPushStyle();
light.enable();
light.setPosition(ofVec3f(0,0,0));
ofSetColor(255,0,0);
for (int i = 0; i < traffic.size(); i++) {
ofPushMatrix();
ofRotateY(driver.heading - traffic[i].heading);
ofTranslate(driver.position - traffic[i].position);
ofDrawBox(0,0,0, 1.5,1.1,3);
ofPopMatrix();
}
light.disable();
ofDisableLighting();
//
//--
ofPopStyle();
}
//--------------------------------------------------------------
void ofApp::draw(){
camEasyCam.begin(viewMain);
drawScene(true);
camEasyCam.end();
ofDisableDepthTest();
scene.draw(viewSeat);
ofEnableDepthTest();
}
void ofApp::onMessage( Spacebrew::Message & msg ){
if ( msg.name.find("traffic") == 0){
vector<float> values;
float value;
stringstream input(msg.value);
while (input >> value) {
values.push_back(value);
if (input.peek() == ',') input.ignore();
}
while (traffic.size() < values.size()) {
vehicle car;
car.heading = 0;
car.position.set(0,0,0);
traffic.push_back(car);
}
if (msg.name == "trafficHeading") {
for (int i = 0; i < values.size(); i++) {
traffic[i].heading = values[i];
}
}
else if (msg.name == "trafficPositionX") {
for (int i = 0; i < values.size(); i++) {
traffic[i].position.x = values[i];
}
}
else if (msg.name == "trafficPositionY") {
for (int i = 0; i < values.size(); i++) {
traffic[i].position.y = values[i];
}
}
else if (msg.name == "trafficPositionZ") {
for (int i = 0; i < values.size(); i++) {
traffic[i].position.z = values[i];
}
}
}
else {
if (msg.name == "carHeading") {
driver.heading = ofToFloat(msg.value);
}
else if (msg.name == "carPositionX") {
driver.position.x = ofToFloat(msg.value);
}
else if (msg.name == "carPositionY") {
driver.position.y = ofToFloat(msg.value);
}
else if (msg.name == "carPositionZ") {
driver.position.z = ofToFloat(msg.value);
}
}
}
//--------------------------------------------------------------
void ofApp::keyPressed(int key){
}
//--------------------------------------------------------------
void ofApp::keyReleased(int key){
}
//--------------------------------------------------------------
void ofApp::mouseMoved(int x, int y ){
}
//--------------------------------------------------------------
void ofApp::mouseDragged(int x, int y, int button){
}
//--------------------------------------------------------------
void ofApp::mousePressed(int x, int y, int button){
}
//--------------------------------------------------------------
void ofApp::mouseReleased(int x, int y, int button){
}
//--------------------------------------------------------------
void ofApp::windowResized(int w, int h){
}
//--------------------------------------------------------------
void ofApp::gotMessage(ofMessage msg){
}
//--------------------------------------------------------------
void ofApp::dragEvent(ofDragInfo dragInfo){
}
|
#!/usr/bin/env bash
## https://superuser.com/questions/601198/how-can-i-automatically-convert-all-source-code-files-in-a-folder-recursively
#tex_file=$(mktemp) ## Random temp file name
tex_file=tmp.tex ## Random temp file name
cat<<EOF >$tex_file ## Print the tex file header
\documentclass[a4paper, 12pt]{article}
\usepackage[margin=1in]{geometry}
\usepackage{listings}
\usepackage[usenames,dvipsnames]{color} %% Allow color names
\lstdefinestyle{customasm}{
belowcaptionskip=1\baselineskip,
xleftmargin=\parindent,
language=C, %% Change this to whatever you write in
breaklines=true, %% Wrap long lines
basicstyle=\footnotesize\ttfamily,
commentstyle=\itshape\color{Gray},
stringstyle=\color{Black},
keywordstyle=\bfseries\color{OliveGreen},
identifierstyle=\color{blue},
%% xleftmargin=-8em,
}
\usepackage[colorlinks=true,linkcolor=blue]{hyperref}
\begin{document}
\tableofcontents
EOF
find ./src -type f ! -regex ".*/\..*" ! -name ".*" ! -name "*~" ! -name 'src2pdf.sh' ! -name "*.o" ! -name "*.lo"|
sed 's/^\..//' | ## Change ./foo/bar.src to foo/bar.src
while read i; do ## Loop through each file
name=${i//_/\\_} ## escape underscores
echo "\newpage" >> $tex_file ## start each section on a new page
echo "\section{${name}}" >> $tex_file ## Create a section for each filename
## This command will include the file in the PDF
echo "\lstinputlisting[style=customasm]{$i}" >>$tex_file
done &&
echo "\end{document}" >> $tex_file
pdflatex $tex_file -output-directory . &&
pdflatex $tex_file -output-directory . ## This needs to be run twice
## for the TOC to be generated |
#
# Install Python 3
# Check for most recent software available
source "${__LIB_PATH}/orun_sw_check-update" &&
# Install Python 3 on this system
source "${__LIB_PATH}/install_python3_rhel.sh" ||
source "${__LIB_PATH}/install_python3_deb.sh" ||
source "${__LIB_PATH}/install_python3_bsd.sh" ||
source "${__LIB_PATH}/install_python3_mac.sh" ||
return 1
return 0
|
<reponame>lgoldstein/communitychest
/*
*
*/
package net.community.chest.javaagent.dumper.filter;
import net.community.chest.dom.transform.XmlConvertible;
/**
* <P>Copyright as per GPLv2</P>
* @author <NAME>.
* @since Aug 14, 2011 9:41:42 AM
*/
public interface XmlConvertibleClassFilter extends ClassFilter, XmlConvertible<ClassFilter> {
// no further methods
}
|
def sort_ascending(arr):
for i in range(len(arr)):
min_index = i
for j in range(i, len(arr)):
if arr[j] < arr[min_index]:
min_index = j
arr[min_index], arr[i] = arr[i], arr[min_index]
return arr
result = sort_ascending([6, 5, 3, 8, 1, 2])
print (result) |
export SSH_KEY="${SSH_KEY:-/localhost/.ssh/id_rsa}"
# Attempt Re-use existing agent if one exists
if [ -f "${SSH_AGENT_CONFIG}" ]; then
echo "* Found SSH agent config"
. "${SSH_AGENT_CONFIG}"
fi
trap ctrl_c INT
function ctrl_c() {
echo "* Okay, nevermind =)"
killall -9 ssh-agent
rm -f "${SSH_AUTH_SOCK}"
}
# Otherwise launch a new agent
if [ -z "${SSH_AUTH_SOCK}" ] || ! [ -e "${SSH_AUTH_SOCK}" ]; then
ssh-agent | grep -v '^echo' >"${SSH_AGENT_CONFIG}"
. "${SSH_AGENT_CONFIG}"
# Add keys (if any) to the agent
if [ -n "${SSH_KEY}" ] && [ -f "${SSH_KEY}" ]; then
echo "Add your local private SSH key to the key chain. Hit ^C to skip."
ssh-add "${SSH_KEY}"
fi
fi
# Clean up
trap - INT
unset -f ctrl_c
|
#include <iostream>
#include <vector>
class Observer {
// Define the Observer class
};
class DeviceNamePolicyHandler {
public:
// Constructor and other necessary methods
~DeviceNamePolicyHandler() = default;
void AddObserver(Observer* observer) {
observers_.push_back(observer);
}
void RemoveObserver(Observer* observer) {
observers_.erase(std::remove(observers_.begin(), observers_.end(), observer), observers_.end());
}
// Other necessary methods for handling device name policy
void NotifyObservers() {
for (Observer* observer : observers_) {
// Notify each observer about the change in device name policy
}
}
private:
std::vector<Observer*> observers_;
}; |
<filename>parameters-experimental/src/main/java/be/kwakeroni/parameters_exp/client/api/EntryIdentifier.java
//package be.kwakeroni.parameters.client.api;
//
//import EntryType;
//
//public interface EntryIdentifier<ET extends EntryType> {
//}
|
import argparse
import sys
ENCODE_OUT = 'utf-8'
# Create the main parser
parser = argparse.ArgumentParser(description='QuantGov CLI Tool')
# Create subparsers for different commands
subparsers = parser.add_subparsers(dest='command', help='Available commands')
# Add the existing subcommand "nlp"
nlp_subcommands = subparsers.add_parser('nlp', help='NLP operations')
# Add the "analyze" subcommand
analyze_subcommand = subparsers.add_parser('analyze', help='Analyze the QuantGov Corpus')
# Add the required argument for "corpus" path
analyze_subcommand.add_argument('corpus', help='Path to a QuantGov Corpus directory')
# Add the optional argument for output format
analyze_subcommand.add_argument('-f', '--format', default='json', choices=['json', 'csv'],
help='Specify the output format for the analysis results')
# Add the optional argument for verbose mode
analyze_subcommand.add_argument('-v', '--verbose', action='store_true', help='Enable verbose mode for detailed output')
# Parse the arguments
args = parser.parse_args()
# Access the parsed arguments
print(args) |
import { ExitCode } from '@src/index';
/**
* Gets all runnable scripts.
*
* @param {string} [scope]
* @returns {string[]}
*/
declare const listAll: (scope?: string | undefined) => Promise<ExitCode>;
export default listAll;
|
import codecs
MESSAGE_HEADER_LEN = 10 # Example length of the message header
class StreamError(Exception):
pass
class GenericMessageHeader:
@staticmethod
def parse(header_bytes):
# Custom logic to parse the message header
# Example: return parsed header as a string
return "ParsedHeader"
def process_message(msg: str) -> str:
if not msg:
return ""
try:
msg = codecs.decode(msg, "hex")
except ValueError:
return "Invalid input message"
try:
msgHeader = GenericMessageHeader.parse(msg[0:MESSAGE_HEADER_LEN])
return f"Message Header: {msgHeader}"
except StreamError as e:
pass # Continue processing the next message
return "" |
#!/bin/bash
# Copyright 2021 The Kubeflow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script is used to push all Katib images.
# Run ./scripts/v1beta1/push.sh <IMAGE_REGISTRY> <TAG>
set -e
REGISTRY=$1
TAG=$2
if [[ -z "$REGISTRY" || -z "$TAG" ]]; then
echo "Image registry and tag must be set"
echo "Usage: $0 <image-registry> <image-tag>" 1>&2
exit 1
fi
VERSION="v1beta1"
echo "Pushing images for Katib ${VERSION}..."
echo "Image registry: ${REGISTRY}"
echo "Image tag: ${TAG}"
# Katib core images
echo -e "\nPushing Katib controller image...\n"
docker push ${REGISTRY}/katib-controller:${TAG}
echo -e "\nPushing Katib DB manager image...\n"
docker push ${REGISTRY}/katib-db-manager:${TAG}
echo -e "\nPushing Katib UI image...\n"
docker push ${REGISTRY}/katib-ui:${TAG}
echo -e "\nPushing Katib cert generator image...\n"
docker push ${REGISTRY}/cert-generator:${TAG}
echo -e "\nPushing file metrics collector image...\n"
docker push ${REGISTRY}/file-metrics-collector:${TAG}
echo -e "\nPushing TF Event metrics collector image...\n"
docker push ${REGISTRY}/tfevent-metrics-collector:${TAG}
# Suggestion images
echo -e "\nPushing suggestion images..."
echo -e "\nPushing hyperopt suggestion...\n"
docker push ${REGISTRY}/suggestion-hyperopt:${TAG}
echo -e "\nPushing chocolate suggestion...\n"
docker push ${REGISTRY}/suggestion-chocolate:${TAG}
echo -e "\nPushing hyperband suggestion...\n"
docker push ${REGISTRY}/suggestion-hyperband:${TAG}
echo -e "\nPushing skopt suggestion...\n"
docker push ${REGISTRY}/suggestion-skopt:${TAG}
echo -e "\nPushing goptuna suggestion...\n"
docker push ${REGISTRY}/suggestion-goptuna:${TAG}
echo -e "\nPushing ENAS suggestion...\n"
docker push ${REGISTRY}/suggestion-enas:${TAG}
echo -e "\nPushing DARTS suggestion...\n"
docker push ${REGISTRY}/suggestion-darts:${TAG}
# Early stopping images
echo -e "\nPushing early stopping images...\n"
echo -e "\nPushing median stopping rule...\n"
docker push ${REGISTRY}/earlystopping-medianstop:${TAG}
# Training container images
echo -e "\nPushing training container images..."
echo -e "\nPushing mxnet mnist training container example...\n"
docker push ${REGISTRY}/mxnet-mnist:${TAG}
echo -e "\nPushing PyTorch mnist training container example...\n"
docker push ${REGISTRY}/pytorch-mnist:${TAG}
echo -e "\nPushing Keras CIFAR-10 CNN training container example for ENAS with GPU support...\n"
docker push ${REGISTRY}/enas-cnn-cifar10-gpu:${TAG}
echo -e "\nPushing Keras CIFAR-10 CNN training container example for ENAS with CPU support...\n"
docker push ${REGISTRY}/enas-cnn-cifar10-cpu:${TAG}
echo -e "\nPushing PyTorch CIFAR-10 CNN training container example for DARTS...\n"
docker push ${REGISTRY}/darts-cnn-cifar10:${TAG}
echo -e "\nAll Katib images with ${TAG} tag have been pushed successfully!\n"
|
module.exports = {
User: require("./user"),
Company: require("./company"),
Review: require("./review"),
SalaryReport: require('./salaryReport')
};
|
<reponame>uwap/BahnhofsAbfahrten
// @flow
import { middlewares } from './logger';
import axios from 'axios';
import createAdmin from './admin';
import errorHandler from './errorHandler';
import http from 'http';
import Koa, { type Middleware } from 'koa';
import KoaBodyparser from 'koa-bodyparser';
import koaStatic from 'koa-static';
import path from 'path';
// eslint-disable-next-line no-underscore-dangle
global.__SERVER__ = true;
function transformStats(stats) {
const newStats = {};
Object.keys(stats).forEach(key => {
if (!Array.isArray(stats[key])) {
stats[key] = [stats[key]];
}
newStats[key] = {
css: [],
js: [],
};
stats[key].forEach(val => {
if (val.endsWith('js')) {
newStats[key].js.push(val);
} else if (val.endsWith('css')) {
newStats[key].css.push(val);
}
});
});
return newStats;
}
function hotHelper(getMiddleware: () => Middleware) {
if (process.env.NODE_ENV === 'production') {
return getMiddleware();
}
return (ctx, next) => getMiddleware()(ctx, next);
}
export async function createApp() {
const app = new Koa();
app.use(errorHandler);
middlewares.forEach(m => app.use(m));
app.use(KoaBodyparser());
let apiRoutes = require('./Controller').default;
let serverRender = require('./render').default;
let seoController = require('./seo').default;
if (process.env.NODE_ENV !== 'test' && process.env.NODE_ENV !== 'production') {
await require('./middleware/webpackDev')(app);
app.use((ctx, next) => {
serverRender = require('./render').default;
apiRoutes = require('./Controller').default;
seoController = require('./seo').default;
ctx.stats = transformStats(ctx.state.webpackStats.toJson().assetsByChunkName);
return next();
});
}
app.use(hotHelper(() => apiRoutes.routes()));
app.use(
koaStatic(path.resolve(process.env.NODE_ENV === 'production' ? 'dist/client' : 'public'), {
maxAge: 2592000000, // 30 days
})
);
app.use(hotHelper(() => seoController));
if (process.env.NODE_ENV === 'production') {
// $FlowFixMe
const stats = require(path.resolve('dist/client/static/stats.json'));
app.use((ctx, next) => {
ctx.stats = transformStats(stats.assetsByChunkName);
return next();
});
}
app.use(hotHelper(() => serverRender));
return app;
}
export default async () => {
const app = await createApp();
const port = process.env.WEB_PORT || 9042;
let server;
if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test' && !process.env.NO_SSL) {
const https = require('https');
const fs = require('fs');
// eslint-disable-next-line no-sync
const key = fs.readFileSync('./secrets/ssl/privkey.pem');
// eslint-disable-next-line no-sync
const cert = fs.readFileSync('./secrets/ssl/server.pem');
server = https.createServer(
{
key,
cert,
},
app.callback()
);
axios.defaults.baseURL = `https://local.marudor.de:${port}`;
} else {
axios.defaults.baseURL = `http://localhost:${port}`;
server = http.createServer(app.callback());
}
server.listen(port);
if (process.env.NODE_ENV !== 'production') {
// eslint-disable-next-line no-console
console.log('running in DEV mode!');
}
createAdmin();
};
|
<gh_stars>0
package com.mycompany.smartparkingmanagement.servlets;
import com.mycompany.smartparkingmanagement.dao.Dao;
import com.mycompany.smartparkingmanagement.entities.Admin;
import com.mycompany.smartparkingmanagement.entities.LoginBean;
import com.mycompany.smartparkingmanagement.entities.Message;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
public class EditServlet extends HttpServlet {
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
//get the user from the session...
HttpSession s = request.getSession();
try {
//fetch all data
String email = request.getParameter("email");
String name = request.getParameter("name");
String password = request.getParameter("password");
String cpass = request.getParameter("confirmpass");
if (!password.equals(cpass)) {
Message msg = new Message("Password not matched", "error", "alert-danger");
s.setAttribute("message", msg);
response.sendRedirect("Profile.jsp");
} else {
LoginBean lg = (LoginBean) s.getAttribute("User");
lg.setFirstName(name);
lg.setEmailid(email);
lg.setPassword(password);
Dao dao = new Dao();
//update to db
boolean ans = dao.updateUserDetails(lg);
if (ans) {
Message msg = new Message("Detail Updated Succesfully", "success", "alert-success");
s.setAttribute("message", msg);
response.sendRedirect("Profile.jsp");
} else {
Message msg = new Message("Details Not Updated", "error", "alert-danger");
System.out.println("Something went Wrong");
s.setAttribute("message", msg);
response.sendRedirect("Profile.jsp");
}
}
} catch (IOException e) {
e.printStackTrace();
response.sendRedirect("error_page.jsp");
}
}
}
|
<reponame>lneveu/ppod
import React from 'react';
import '../App.scss';
import Clock from './Clock';
import Slider from './Slider';
function App() {
return (
<div className="app">
<div className="header">Perseverance Picture Of the Day</div>
<div className="container">
<Slider></Slider>
</div>
<div className="footer">
<Clock></Clock>
<div className="copyright">
<span>images from </span>
<a href="https://mars.nasa.gov/mars2020/multimedia/raw-images/" target="_blank" rel="noreferrer noopener">
Nasa Mars Perseverance Website
</a>
</div>
</div>
</div>
);
}
export default App;
|
#!/usr/bin/env sh
# abort on errors
#set -e
GIT_NAME=$(git config --get user.name)
GIT_EMAIL=$(git config --get user.email)
# build
cd docs || exit
yarn docs:build
cd - || exit
# navigate into the build output directory
cd docs/content/.vuepress/dist || exit
git init
git config user.name "$GIT_NAME"
git config user.email "$GIT_EMAIL"
git add -A
git commit -m 'Updated documentation'
git push -f git@github.com:ezypeeze/nuxt-neo.git master:gh-pages
cd -
|
<reponame>ashishgoyal18/vyavsaay
/**
* This function displays the laoder icon on the screen
*/
function show_loader()
{
$("#loading_icon").show();
$("#transparent_layer").show();
}
/**
* This function hides the loader icon
*/
function hide_loader()
{
$("#loading_icon").hide();
$("#transparent_layer").hide();
}
function show_filter(element)
{
$(element).parent().find('.filter').toggle();
$(element).parent().find('.filter').focus();
}
function longPressEditable(element)
{
$(element).each(function()
{
var pressTimer;
$(this).on('touchend',function()
{
clearTimeout(pressTimer);
}).on('touchstart',function()
{
var input_box=this;
pressTimer = window.setTimeout(function()
{
$(input_box).removeAttr('readonly');
$(input_box).focus();
},500);
});
$(this).dblclick(function()
{
$(this).removeAttr('readonly');
});
});
}
|
package com.company.example.hazelcast.context.scan;
import static org.hibernate.cfg.AvailableSettings.C3P0_ACQUIRE_INCREMENT;
import static org.hibernate.cfg.AvailableSettings.C3P0_MAX_SIZE;
import static org.hibernate.cfg.AvailableSettings.C3P0_MAX_STATEMENTS;
import static org.hibernate.cfg.AvailableSettings.C3P0_MIN_SIZE;
import static org.hibernate.cfg.AvailableSettings.C3P0_TIMEOUT;
import static org.hibernate.cfg.AvailableSettings.DRIVER;
import static org.hibernate.cfg.AvailableSettings.HBM2DDL_AUTO;
import static org.hibernate.cfg.AvailableSettings.PASS;
import static org.hibernate.cfg.AvailableSettings.SHOW_SQL;
import static org.hibernate.cfg.AvailableSettings.URL;
import static org.hibernate.cfg.AvailableSettings.USER;
import java.util.Properties;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.ComponentScans;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
import org.springframework.orm.hibernate5.HibernateTransactionManager;
import org.springframework.orm.hibernate5.LocalSessionFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import com.company.example.hazelcast.repository.model.Event;
import com.company.example.hazelcast.repository.model.EventToProcess;
@Configuration
@PropertySource("classpath:db.properties")
@EnableTransactionManagement
@ComponentScans(value = { @ComponentScan("com.company.example.hazelcast.repository.dao"), @ComponentScan("com.company.example.hazelcast.repository.model")})
public class PersistenceConfig {
@Autowired
private Environment env;
@Bean
public LocalSessionFactoryBean getSessionFactory() {
LocalSessionFactoryBean factoryBean = new LocalSessionFactoryBean();
Properties props = new Properties();
// Setting JDBC properties
props.put(DRIVER, env.getProperty("mysql.driver"));
props.put(URL, env.getProperty("mysql.jdbcUrl"));
props.put(USER, env.getProperty("mysql.username"));
props.put(PASS, env.getProperty("mysql.password"));
// Setting Hibernate properties
props.put(SHOW_SQL, env.getProperty("hibernate.show_sql"));
props.put(HBM2DDL_AUTO, env.getProperty("hibernate.hbm2ddl.auto"));
// Setting C3P0 properties
props.put(C3P0_MIN_SIZE, env.getProperty("hibernate.c3p0.min_size"));
props.put(C3P0_MAX_SIZE, env.getProperty("hibernate.c3p0.max_size"));
props.put(C3P0_ACQUIRE_INCREMENT, env.getProperty("hibernate.c3p0.acquire_increment"));
props.put(C3P0_TIMEOUT, env.getProperty("hibernate.c3p0.timeout"));
props.put(C3P0_MAX_STATEMENTS, env.getProperty("hibernate.c3p0.max_statements"));
factoryBean.setHibernateProperties(props);
factoryBean.setAnnotatedClasses(Event.class, EventToProcess.class);
return factoryBean;
}
@Bean
public HibernateTransactionManager getTransactionManager() {
HibernateTransactionManager transactionManager = new HibernateTransactionManager();
transactionManager.setSessionFactory(getSessionFactory().getObject());
return transactionManager;
}
} |
package de.bitbrain.braingdx.physics;
import com.badlogic.gdx.physics.box2d.Body;
import com.badlogic.gdx.physics.box2d.BodyDef;
import com.badlogic.gdx.physics.box2d.FixtureDef;
import com.badlogic.gdx.physics.box2d.World;
import de.bitbrain.braingdx.world.GameObject;
/**
* Responsible for adding physics objects to the world
*
* @since 0.5.0
*/
public interface PhysicsManager {
Body addBody(BodyDef bodyDef, FixtureDef fixtureDef, Object type);
Body addBody(BodyDef bodyDef, float width, float height, Object type);
Body attachBody(BodyDef bodyDef, FixtureDef fixtureDef, GameObject gameObject);
void setGravity(float x, float y);
void setPositionIterations(int positionIterations);
void setVelocityIterations(int velocityIterations);
World getPhysicsWorld();
void setIterationCount(int count);
}
|
package com.professorvennie.bronzeage.api.tiles;
import com.professorvennie.bronzeage.api.enums.RedstoneMode;
/**
* Created by ProfessorVennie on 12/14/2014 at 6:23 PM.
*/
public interface IRedstoneControlable {
public RedstoneMode getRedStoneMode();
public void setRedstoneMode(RedstoneMode mode);
}
|
<gh_stars>0
var structarmnn_1_1_layer_type_of_impl_3_01_layer_type_1_1_space_to_batch_nd_01_4 =
[
[ "Type", "structarmnn_1_1_layer_type_of_impl_3_01_layer_type_1_1_space_to_batch_nd_01_4.xhtml#aa139eef86a0726f0b0a0b90c60a41302", null ]
]; |
<reponame>alterem/smartCityService<filename>scs-openapi/src/main/java/com/zhcs/entity/BrknewsDetailEntity.java
package com.zhcs.entity;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
public class BrknewsDetailEntity implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value="openId" ,required=true)
private String openId;
@ApiModelProperty(value="事件id" ,required=true)
private Long id;
public String getOpenId() {
return openId;
}
public void setOpenId(String openId) {
this.openId = openId;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
|
class ItemsController < ApplicationController
# skip_before_action :authorized
def show
item = Item.find_by(id: params[:id])
render json: item, include: "**"
end
end
|
#!/bin/sh -e
export HOST_IP="${HOST_IP:-$(/sbin/ip route|awk '/default/ { print $3 }')}"
export ETCD_HOST="${ETCD_HOST:-$HOST_IP}"
export ETCD_PORT="${ETCD_PORT:-4001}"
export CHECK_YODA="${CHECK_YODA:-}"
export CHECK_YODA_HOST="${CHECK_YODA_HOST:-$HOST_IP}"
export MACHINE_ID="${MACHINE_ID:-local}"
if [ -f "/usr/sbin/nrpe" ]; then
NRPE_EXEC="/usr/sbin/nrpe"
else
NRPE_EXEC="/usr/bin/nrpe"
fi
# Add disk plugin
CHECKDISKS="${CHECKDISKS:-$( ls -d -1 /mnt/* || echo)}"
if [ ! -z "$CHECKDISKS" ]; then
NAGIOS_DRIVES="$(echo "$CHECKDISKS" | awk -F "[ \t\n,]+" '{for (driveCnt = 1; driveCnt <= NF; driveCnt++) printf "-p %s ",$driveCnt}')"
echo "command[check_disk]=$NAGIOS_PLUGINS_DIR/check_disk -w 20% -c 10% $NAGIOS_DRIVES" | tee $NAGIOS_CONF_DIR/nrpe.d/disk.cfg > /dev/null
fi
# Start NREP Server
$NRPE_EXEC -c $NAGIOS_CONF_DIR/nrpe.cfg -d
# Wait for NRPE Daemon to exit
PID=$(ps -ef | grep -v grep | grep "${NRPE_EXEC}" | awk '{print $2}')
if [ ! "$PID" ]; then
echo "Error: Unable to start nrpe daemon..."
# exit 1
fi
while [ -d /proc/$PID ] && [ -z `grep zombie /proc/$PID/status` ]; do
echo "NRPE: $PID (running)..."
sleep 60s
done
echo "NRPE daemon exited. Quitting.." |
// Copyright © 2021 The Things Network Foundation, The Things Industries B.V.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package commands
import (
"context"
"crypto/tls"
"strconv"
"time"
pbtypes "github.com/gogo/protobuf/types"
"go.thethings.network/lorawan-stack/v3/pkg/cluster"
"go.thethings.network/lorawan-stack/v3/pkg/errors"
ttnredis "go.thethings.network/lorawan-stack/v3/pkg/redis"
"go.thethings.network/lorawan-stack/v3/pkg/ttnpb"
"google.golang.org/grpc"
)
const defaultPaginationLimit = 1000
// NewClusterComponentConnection connects returns a new cluster instance and a connection to a specified peer.
// The connection to a cluster peer is retried specified number of times before returning an error in case
// of connection not being ready.
func NewClusterComponentConnection(ctx context.Context, config Config, delay time.Duration, maxRetries int, role ttnpb.ClusterRole) (*grpc.ClientConn, cluster.Cluster, error) {
clusterOpts := []cluster.Option{}
if config.Cluster.TLS {
tlsConf := config.TLS
tls := &tls.Config{
MinVersion: tls.VersionTLS12,
InsecureSkipVerify: tlsConf.InsecureSkipVerify,
}
if err := tlsConf.Client.ApplyTo(tls); err != nil {
return nil, nil, err
}
clusterOpts = append(clusterOpts, cluster.WithTLSConfig(tls))
}
c, err := cluster.New(ctx, &config.Cluster, clusterOpts...)
if err != nil {
return nil, nil, err
}
if err := c.Join(); err != nil {
return nil, nil, err
}
var cc *grpc.ClientConn
for i := 0; i < maxRetries; i++ {
time.Sleep(delay)
cc, err = c.GetPeerConn(ctx, role, nil)
if err == nil {
return cc, c, nil
}
}
return nil, nil, err
}
// FetchIdentityServerApplications returns the list of all non-expired applications in the Identity Server.
func FetchIdentityServerApplications(ctx context.Context, client ttnpb.ApplicationRegistryClient, clusterAuth grpc.CallOption, paginationDelay time.Duration) ([]*ttnpb.Application, error) {
pageCounter := uint32(1)
applicationList := make([]*ttnpb.Application, 0)
for {
res, err := client.List(ctx, &ttnpb.ListApplicationsRequest{
Collaborator: nil,
FieldMask: &pbtypes.FieldMask{Paths: []string{"ids"}},
Limit: defaultPaginationLimit,
Page: pageCounter,
Deleted: true,
}, clusterAuth)
if err != nil {
return nil, err
}
applicationList = append(applicationList, res.Applications...)
if len(res.Applications) == 0 {
break
}
pageCounter++
time.Sleep(paginationDelay)
}
return applicationList, nil
}
// FetchIdentityServerEndDevices returns the list of all devices in the Identity Server.
func FetchIdentityServerEndDevices(ctx context.Context, client ttnpb.EndDeviceRegistryClient, clusterAuth grpc.CallOption, paginationDelay time.Duration) ([]*ttnpb.EndDevice, error) {
pageCounter := uint32(1)
deviceList := make([]*ttnpb.EndDevice, 0)
for {
res, err := client.List(ctx, &ttnpb.ListEndDevicesRequest{
ApplicationIds: nil,
FieldMask: &pbtypes.FieldMask{Paths: []string{"ids"}},
Limit: defaultPaginationLimit,
Page: pageCounter,
}, clusterAuth)
if err != nil {
return nil, err
}
deviceList = append(deviceList, res.EndDevices...)
if len(res.EndDevices) == 0 {
break
}
pageCounter++
time.Sleep(paginationDelay)
}
return deviceList, nil
}
func setToArray(set map[string]struct{}) []string {
keys := make([]string, len(set))
i := 0
for k := range set {
keys[i] = k
i++
}
return keys
}
func schemaVersionKey(cl *ttnredis.Client) string {
return cl.Key("schema-version")
}
func recordSchemaVersion(cl *ttnredis.Client, version int) error {
logger.WithField("version", version).Info("Setting schema version")
return cl.Set(ctx, schemaVersionKey(cl), version, 0).Err()
}
func getSchemaVersion(cl *ttnredis.Client) (int, error) {
schemaVersionString, err := cl.Get(ctx, schemaVersionKey(cl)).Result()
if err != nil {
if errors.IsNotFound(ttnredis.ConvertError(err)) {
return 0, nil
}
return 0, err
}
schemaVersion, err := strconv.ParseInt(schemaVersionString, 10, 32)
if err != nil {
return 0, err
}
logger.WithField("version", schemaVersion).Info("Existing database schema version")
return int(schemaVersion), nil
}
|
<filename>examples/extensions-zalando/src/main/java/org/zalando/intellij/swagger/examples/extensions/zalando/validator/zally/model/LintingResponseErrorDecoder.java<gh_stars>1000+
package org.zalando.intellij.swagger.examples.extensions.zalando.validator.zally.model;
import feign.Response;
import feign.codec.ErrorDecoder;
public class LintingResponseErrorDecoder implements ErrorDecoder {
@Override
public Exception decode(String methodKey, Response response) {
return new ZallyClientError(response.reason());
}
}
|
def reverse_linked_list(head):
prev = None
curr = head
next = None
while curr is not None:
next = curr.next
curr.next = prev
prev = curr
curr = next
head = prev
return head |
#!/bin/bash
# script to check if a given directory is empty or not
# check if one argument is given
if [ -z "$1" ]
then
echo "Error: Please provide one argument: the name of the directory!"
exit 1
fi
# check if the argument is really a directory
if [ ! -d $1 ]
then
echo "Error: The given argument is not a directory!"
exit 1
fi
# check if the directory is empty
if [ ! "$(ls -A $1)" ]
then
echo "The given directory is empty!"
else
echo "The given directory is not empty!"
fi |
<gh_stars>1-10
/*
* Copyright 2016 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm;
import android.content.Context;
import android.os.Build;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.util.Date;
import io.realm.entities.AllTypes;
import io.realm.entities.pojo.AllTypesRealmModel;
import io.realm.entities.pojo.InvalidRealmModel;
import io.realm.entities.pojo.PojoWithRealmListOfRealmObject;
import io.realm.entities.pojo.RealmModelWithRealmListOfRealmModel;
import io.realm.entities.pojo.RealmModelWithRealmModelField;
import io.realm.entities.pojo.RealmObjectWithRealmListOfRealmModel;
import io.realm.entities.pojo.RealmObjectWithRealmModelField;
import io.realm.exceptions.RealmException;
import io.realm.rule.RunInLooperThread;
import io.realm.rule.RunTestInLooperThread;
import io.realm.rule.TestRealmConfigurationFactory;
import static io.realm.internal.test.ExtraTests.assertArrayEquals;
import static org.hamcrest.number.OrderingComparison.greaterThanOrEqualTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeThat;
// Tests API methods when using a model class implementing RealmModel instead
// of extending RealmObject.
@RunWith(AndroidJUnit4.class)
public class RealmModelTests {
private final static int TEST_DATA_SIZE = 10;
private Context context;
private Realm realm;
@Rule
public final TestRealmConfigurationFactory configFactory = new TestRealmConfigurationFactory();
@Rule
public final RunInLooperThread looperThread = new RunInLooperThread();
@Before
public void setUp() {
// Injecting the Instrumentation instance is required
// for your test to run with AndroidJUnitRunner.
context = InstrumentationRegistry.getInstrumentation().getContext();
RealmConfiguration realmConfig = configFactory.createConfiguration();
realm = Realm.getInstance(realmConfig);
}
@After
public void tearDown() {
if (realm != null) {
realm.close();
}
}
private void populateTestRealm(Realm realm, int objects) {
realm.beginTransaction();
realm.delete(AllTypesRealmModel.class);
for (int i = 0; i < objects; ++i) {
AllTypesRealmModel allTypes = new AllTypesRealmModel();
allTypes.columnLong = i;
allTypes.columnBoolean = (i % 3) == 0;
allTypes.columnBinary = new byte[]{1, 2, 3};
allTypes.columnDate = new Date();
allTypes.columnDouble = Math.PI + i;
allTypes.columnFloat = 1.234567f;
allTypes.columnString = "test data ";
allTypes.columnByte = 0x2A;
realm.copyToRealm(allTypes);
}
realm.commitTransaction();
}
@Test
public void createObject() {
for (int i = 1; i < 43; i++) { // Using i = 0 as PK will crash subsequent createObject
// since createObject uses default values.
realm.beginTransaction();
realm.createObject(AllTypesRealmModel.class, i);
realm.commitTransaction();
}
long size = realm.where(AllTypesRealmModel.class).count();
assertEquals("Realm.get is returning wrong result set", 42, size);
}
@Test
public void copyToRealm() {
populateTestRealm(realm, TEST_DATA_SIZE);
long size = realm.where(AllTypesRealmModel.class).count();
assertEquals("Realm.get is returning wrong result set", TEST_DATA_SIZE, size);
}
@Test
public void copyFromRealm() {
populateTestRealm(realm, TEST_DATA_SIZE);
AllTypesRealmModel realmObject = realm.where(AllTypesRealmModel.class)
.sort(AllTypesRealmModel.FIELD_LONG)
.findAll()
.first();
AllTypesRealmModel unmanagedObject = realm.copyFromRealm(realmObject);
assertArrayEquals(realmObject.columnBinary, unmanagedObject.columnBinary);
assertEquals(realmObject.columnString, unmanagedObject.columnString);
assertEquals(realmObject.columnLong, unmanagedObject.columnLong);
assertEquals(realmObject.columnFloat, unmanagedObject.columnFloat, 0.00000000001);
assertEquals(realmObject.columnDouble, unmanagedObject.columnDouble, 0.00000000001);
assertEquals(realmObject.columnBoolean, unmanagedObject.columnBoolean);
assertEquals(realmObject.columnDate, unmanagedObject.columnDate);
assertEquals(realmObject.hashCode(), unmanagedObject.hashCode());
}
@Test
public void copyToRealmOrUpdate() {
realm.executeTransaction(new Realm.Transaction() {
@Override
public void execute(Realm realm) {
AllTypesRealmModel obj = new AllTypesRealmModel();
obj.columnLong = 1;
realm.copyToRealm(obj);
AllTypesRealmModel obj2 = new AllTypesRealmModel();
obj2.columnLong = 1;
obj2.columnString = "Foo";
realm.copyToRealmOrUpdate(obj2);
}
});
assertEquals(1, realm.where(AllTypesRealmModel.class).count());
AllTypesRealmModel obj = realm.where(AllTypesRealmModel.class).findFirst();
assertNotNull(obj);
assertEquals("Foo", obj.columnString);
}
@Test
public void createOrUpdateAllFromJson() throws IOException {
assumeThat(Build.VERSION.SDK_INT, greaterThanOrEqualTo(Build.VERSION_CODES.HONEYCOMB));
realm.beginTransaction();
realm.createOrUpdateAllFromJson(AllTypesRealmModel.class, TestHelper.loadJsonFromAssets(context, "list_alltypes_primarykey.json"));
realm.commitTransaction();
assertEquals(1, realm.where(AllTypesRealmModel.class).count());
AllTypesRealmModel obj = realm.where(AllTypesRealmModel.class).findFirst();
assertNotNull(obj);
assertEquals("Bar", obj.columnString);
assertEquals(2.23F, obj.columnFloat, 0.000000001);
assertEquals(2.234D, obj.columnDouble, 0.000000001);
assertEquals(true, obj.columnBoolean);
assertArrayEquals(new byte[]{1, 2, 3}, obj.columnBinary);
assertEquals(new Date(2000), obj.columnDate);
assertEquals("Dog4", obj.columnRealmObject.getName());
assertEquals(2, obj.columnRealmList.size());
assertEquals("Dog5", obj.columnRealmList.get(0).getName());
}
// 'where' with filed selection.
@Test
public void query() {
populateTestRealm(realm, TEST_DATA_SIZE);
assertEquals(5, realm.where(AllTypesRealmModel.class).greaterThanOrEqualTo(AllTypesRealmModel.FIELD_DOUBLE, 8.1415).count());
}
// Async where with filed selection.
@Test
@RunTestInLooperThread
public void async_query() {
Realm realm = looperThread.getRealm();
populateTestRealm(realm, TEST_DATA_SIZE);
final RealmResults<AllTypesRealmModel> allTypesRealmModels = realm.where(AllTypesRealmModel.class).distinct(AllTypesRealmModel.FIELD_STRING).findAllAsync();
looperThread.keepStrongReference(allTypesRealmModels);
allTypesRealmModels.addChangeListener(new RealmChangeListener<RealmResults<AllTypesRealmModel>>() {
@Override
public void onChange(RealmResults<AllTypesRealmModel> object) {
assertEquals(1, allTypesRealmModels.size());
looperThread.testComplete();
}
});
}
@Test
public void dynamicObject() {
populateTestRealm(realm, TEST_DATA_SIZE);
AllTypesRealmModel typedObj = realm.where(AllTypesRealmModel.class).findFirst();
assertNotNull(typedObj);
DynamicRealmObject dObj = new DynamicRealmObject(typedObj);
realm.beginTransaction();
dObj.setByte(AllTypesRealmModel.FIELD_BYTE, (byte) 42);
assertEquals(42, dObj.getLong(AllTypesRealmModel.FIELD_BYTE));
assertEquals(42, typedObj.columnByte);
dObj.setBlob(AllTypesRealmModel.FIELD_BINARY, new byte[]{1, 2, 3});
Assert.assertArrayEquals(new byte[]{1, 2, 3}, dObj.getBlob(AllTypesRealmModel.FIELD_BINARY));
Assert.assertArrayEquals(new byte[]{1, 2, 3}, typedObj.columnBinary);
realm.cancelTransaction();
}
@Test
@RunTestInLooperThread
public void dynamicRealm() {
populateTestRealm(looperThread.getRealm(), TEST_DATA_SIZE);
final DynamicRealm dynamicRealm = DynamicRealm.getInstance(looperThread.getConfiguration());
dynamicRealm.beginTransaction();
DynamicRealmObject dog = dynamicRealm.createObject(AllTypesRealmModel.CLASS_NAME, 42);
assertEquals(42, dog.getLong(AllTypesRealmModel.FIELD_LONG));
dynamicRealm.commitTransaction();
RealmResults<DynamicRealmObject> allAsync = dynamicRealm.where(AllTypesRealmModel.CLASS_NAME).equalTo(AllTypesRealmModel.FIELD_LONG, 42).findAll();
allAsync.load();
assertTrue(allAsync.isLoaded());
assertEquals(1, allAsync.size());
dynamicRealm.beginTransaction();
allAsync.deleteAllFromRealm();
dynamicRealm.commitTransaction();
RealmResults<DynamicRealmObject> results = dynamicRealm.where(AllTypesRealmModel.CLASS_NAME).findAll();
assertEquals(TEST_DATA_SIZE, results.size());
for (int i = 0; i < TEST_DATA_SIZE; i++) {
assertEquals(Math.PI + i, results.get(i).getDouble(AllTypesRealmModel.FIELD_DOUBLE), 0.0000001);
assertEquals((i % 3) == 0, results.get(i).getBoolean(AllTypesRealmModel.FIELD_BOOLEAN));
}
dynamicRealm.close();
looperThread.testComplete();
}
// Exception expected when using in schema model not annotated.
// A valid model need to implement the interface RealmModel and annotate the class with @RealmClass.
// We expect in this test a runtime exception 'InvalidRealmModel is not part of the schema for this Realm.'.
@Test(expected = RealmException.class)
public void invalidModelDefinition() {
realm.beginTransaction();
realm.createObject(InvalidRealmModel.class);
realm.commitTransaction();
}
// Tests the behaviour of a RealmModel, containing a RealmList
// of other RealmModel, in managed and unmanaged mode.
@Test
public void realmModelWithRealmListOfRealmModel() {
RealmList<AllTypesRealmModel> allTypesRealmModels = new RealmList<AllTypesRealmModel>();
AllTypesRealmModel allTypePojo;
for (int i = 0; i < 10; i++) {
allTypePojo = new AllTypesRealmModel();
allTypePojo.columnLong = i;
allTypesRealmModels.add(allTypePojo);
}
AllTypesRealmModel pojo1 = allTypesRealmModels.get(1);
assertEquals(1, pojo1.columnLong);
allTypesRealmModels.move(1, 0);
assertEquals(0, allTypesRealmModels.indexOf(pojo1));
RealmModelWithRealmListOfRealmModel model = new RealmModelWithRealmListOfRealmModel();
model.setColumnRealmList(allTypesRealmModels);
realm.beginTransaction();
realm.copyToRealm(model);
realm.commitTransaction();
RealmResults<RealmModelWithRealmListOfRealmModel> all = realm.where(RealmModelWithRealmListOfRealmModel.class).findAll();
assertEquals(1, all.size());
assertEquals(10, all.first().getColumnRealmList().size());
assertEquals(1, all.first().getColumnRealmList().first().columnLong);
}
// Tests the behaviour of a RealmModel, containing a RealmList
// of RealmObject, in managed and unmanaged mode.
@Test
public void realmModelWithRealmListOfRealmObject() {
RealmList<AllTypes> allTypes = new RealmList<AllTypes>();
AllTypes allType;
for (int i = 0; i < 10; i++) {
allType = new AllTypes();
allType.setColumnLong(i);
allTypes.add(allType);
}
AllTypes pojo1 = allTypes.get(1);
assertEquals(1, pojo1.getColumnLong());
allTypes.move(1, 0);
assertEquals(0, allTypes.indexOf(pojo1));
PojoWithRealmListOfRealmObject model = new PojoWithRealmListOfRealmObject();
model.setColumnRealmList(allTypes);
realm.beginTransaction();
realm.copyToRealm(model);
realm.commitTransaction();
RealmResults<PojoWithRealmListOfRealmObject> all = realm.where(PojoWithRealmListOfRealmObject.class).findAll();
assertEquals(1, all.size());
assertEquals(10, all.first().getColumnRealmList().size());
assertEquals(1, all.first().getColumnRealmList().first().getColumnLong());
}
// Tests the behaviour of a RealmObject, containing a RealmList
// of RealmModel, in managed and unmanaged mode.
@Test
public void realmObjectWithRealmListOfRealmModel() {
RealmList<AllTypesRealmModel> allTypesRealmModel = new RealmList<AllTypesRealmModel>();
AllTypesRealmModel allTypePojo;
for (int i = 0; i < 10; i++) {
allTypePojo = new AllTypesRealmModel();
allTypePojo.columnLong = i;
allTypesRealmModel.add(allTypePojo);
}
AllTypesRealmModel pojo1 = allTypesRealmModel.get(1);
assertEquals(1, pojo1.columnLong);
allTypesRealmModel.move(1, 0);
assertEquals(0, allTypesRealmModel.indexOf(pojo1));
RealmObjectWithRealmListOfRealmModel model = new RealmObjectWithRealmListOfRealmModel();
model.setColumnRealmList(allTypesRealmModel);
realm.beginTransaction();
realm.copyToRealm(model);
realm.commitTransaction();
RealmResults<RealmObjectWithRealmListOfRealmModel> all = realm.where(RealmObjectWithRealmListOfRealmModel.class).findAll();
assertEquals(1, all.size());
assertEquals(10, all.first().getColumnRealmList().size());
assertEquals(1, all.first().getColumnRealmList().first().columnLong);
}
// Tests the behaviour of a RealmModel, containing a RealmModel field.
@Test
public void realmModelWithRealmModelField() {
RealmModelWithRealmModelField realmModelWithRealmModelField = new RealmModelWithRealmModelField();
AllTypesRealmModel allTypePojo = new AllTypesRealmModel();
allTypePojo.columnLong = 42;
realmModelWithRealmModelField.setAllTypesRealmModel(allTypePojo);
realm.beginTransaction();
realm.copyToRealm(realmModelWithRealmModelField);
realm.commitTransaction();
RealmResults<RealmModelWithRealmModelField> all = realm.where(RealmModelWithRealmModelField.class).findAll();
assertEquals(1, all.size());
assertEquals(42, all.first().getAllTypesRealmModel().columnLong);
}
// Tests the behaviour of a RealmObject, containing a RealmModel field.
@Test
public void realmObjectWithRealmModelField() {
RealmObjectWithRealmModelField realmObjectWithRealmModelField = new RealmObjectWithRealmModelField();
AllTypesRealmModel allTypePojo = new AllTypesRealmModel();
allTypePojo.columnLong = 42;
realmObjectWithRealmModelField.setAllTypesRealmModel(allTypePojo);
realm.beginTransaction();
realm.copyToRealm(realmObjectWithRealmModelField);
realm.commitTransaction();
RealmResults<RealmObjectWithRealmModelField> all = realm.where(RealmObjectWithRealmModelField.class).findAll();
assertEquals(1, all.size());
assertEquals(42, all.first().getAllTypesRealmModel().columnLong);
}
}
|
<gh_stars>0
package auth
import (
"net/http"
"github.com/gin-gonic/gin"
"github.com/yerzhan-karatay/survey-webapp-backend/errors"
"github.com/yerzhan-karatay/survey-webapp-backend/models"
)
// MakeHTTPHandler mounts auth services to gin handler
func MakeHTTPHandler(r *gin.Engine, s Service) *gin.Engine {
r.POST("/api/login", func(ctx *gin.Context) {
var request models.AuthCredentials
if err := ctx.ShouldBindJSON(&request); err != nil {
ctx.Error(ErrBadRequest)
return
}
token, err := s.Login(ctx, request)
if err != nil {
ctx.Error(ErrNotFound)
} else {
ctx.JSON(http.StatusOK, TokenResponse{
Token: token,
})
}
return
})
return r
}
var (
// ErrBadRequest means params are not correct
ErrBadRequest = errors.NewHTTPError(400, "Bad request")
// ErrNotFound means user was not found in the db
ErrNotFound = errors.NewHTTPError(http.StatusNotFound, "Email or password are incorrect")
)
|
<filename>src/TextareaMarkdownEditor.tsx
import classNames from 'classnames';
import * as React from 'react';
import { RefObject } from 'react';
import EnhancedTextarea from 'react-enhanced-textarea';
import EditContext from './EditorContext';
import EditorMenu from './EditorMenu';
import { IMarkerGroup } from './type';
export interface ITextareaMarkdownEditor {
id?: string;
textareaId?: string;
className?: string;
viewerClassName?: string;
viewerStyle?: object;
placeholder?: string;
style?: object;
textareaStyle?: object;
rows?: number;
defaultValue?: string;
value?: string;
autoFocus?: boolean;
readOnly?: boolean;
onChange?: (value: string) => {};
onKeyDown?: (event: React.KeyboardEvent) => {};
onKeyPress?: (event: React.KeyboardEvent) => {};
doParse: (text: string) => string;
language?: string;
markers?: IMarkerGroup[];
onCopy?: (event: React.ClipboardEvent) => void;
onCopyCapture?: (event: React.ClipboardEvent) => void;
onPaste?: (event: React.ClipboardEvent) => void;
onPasteCapture?: (event: React.ClipboardEvent) => void;
}
interface ITextareaMarkdownEditorState {
edit: boolean;
lineMarkers: string[];
value?: string;
}
class TextareaMarkdownEditor extends React.Component<ITextareaMarkdownEditor, ITextareaMarkdownEditorState> {
public static defaultProps = {
language: 'en',
readOnly: false,
rows: 5,
};
private textareaRef: RefObject<EnhancedTextarea>;
constructor(props: ITextareaMarkdownEditor) {
super(props);
this.textareaRef = React.createRef();
this.state = {
edit: !props.readOnly,
lineMarkers: [],
value: props.defaultValue,
};
this.toggleEdit = this.toggleEdit.bind(this);
this.onChange = this.onChange.bind(this);
this.focus = this.focus.bind(this);
this.mark = this.mark.bind(this);
this.markLine = this.markLine.bind(this);
this.registerLineMarker = this.registerLineMarker.bind(this);
this.markTemplate = this.markTemplate.bind(this);
}
public focus() {
this.textareaRef.current!.focus();
}
public append(content: string) {
this.textareaRef.current!.append(content);
}
public mark(prefix: string, suffix: string, defaultText: string, multipleLine?: boolean) {
if (multipleLine) {
this.textareaRef.current!.toggleMultipleLineMarker({ prefix, suffix, defaultText });
} else {
this.textareaRef.current!.toggleMarker({ prefix, suffix, defaultText });
}
}
public markLine(marker: string) {
this.textareaRef.current!.toggleLineMarker(marker);
}
public registerLineMarker(marker: string) {
const index = this.state.lineMarkers.indexOf(marker);
if (index < 0) {
this.setState({ ...this.state, lineMarkers: [...this.state.lineMarkers, marker] });
}
}
public markTemplate(template: string, multipleLine?: boolean) {
if (multipleLine) {
this.textareaRef.current!.toggleMultipleLineTemplate(template);
} else {
this.textareaRef.current!.toggleTemplate(template);
}
}
public render() {
const { readOnly = false } = this.props;
return (
<div id={this.props.id} className={classNames('tme-container', this.props.className)} style={this.props.style}>
<EditContext.Provider
value={{
focus: this.focus,
mark: this.mark,
markLine: this.markLine,
registerLineMarker: this.registerLineMarker,
template: this.markTemplate,
}}
>
<EditorMenu
markers={this.props.markers}
readOnly={readOnly}
language={this.props.language!}
isEditing={this.state.edit}
toggleEdit={this.toggleEdit}
/>
{this.state.edit ? (
<EnhancedTextarea
id={this.props.textareaId}
className="tme-textarea"
ref={this.textareaRef}
rows={this.props.rows}
style={this.props.textareaStyle}
autoFocus={this.props.autoFocus}
defaultValue={this.props.value ? undefined : this.state.value}
value={this.props.value}
onChange={this.onChange}
onKeyDown={this.props.onKeyDown}
onKeyPress={this.props.onKeyPress}
lineMarkers={this.state.lineMarkers}
onPaste={this.props.onPaste}
onPasteCapture={this.props.onPasteCapture}
onCopy={this.props.onCopy}
onCopyCapture={this.props.onCopyCapture}
placeholder={this.props.placeholder}
/>
) : (
<div
className={classNames('tme-viewer', this.props.viewerClassName)}
style={this.props.viewerStyle}
dangerouslySetInnerHTML={{
__html: this.state.value ? this.props.doParse(this.state.value) : '',
}}
/>
)}
</EditContext.Provider>
</div>
);
}
private toggleEdit() {
this.setState({ ...this.state, edit: !this.state.edit });
}
private onChange(value: string) {
this.setState({ ...this.state, value });
if (this.props.onChange) {
this.props.onChange(value);
}
}
}
export default TextareaMarkdownEditor;
|
<gh_stars>0
let signinPage = `
<section id="signin-page">
<h2>
Sign In!
</h2>
<div class="signin-form">
<form name="sign-in" id="signInForm">
<ul class="form-signin">
<li>
<label>Email<span class="required">*</span>:</label>
<input type="email" name="email" placeholder="Your Email" />
</li>
<li>
<label>Password<span class="required">*</span>:</label>
<input type="password" name="password" placeholder="Password" />
</li>
<li>
<input type="submit" value="Submit" />
</li>
<li id="required-mssg">
<span class="required">*All fields must be filled!</span>
</li>
<li id="confirm-mssg">
<span class="required"
>Please check if your email and password is correct.</span
>
</li>
</ul>
</form>
</div>
</section>
`;
|
#!/bin/bash
set -e
SELF=$(dirname $(readlink -f $0))
tmp="tmp-git-deploy"
cd $SELF
echo ":: Cleaning previous build"
rm -rf ./"$tmp"
echo ":: Building"
git worktree remove "$tmp" &>/dev/null || true
git worktree add "$tmp" gh-pages
mdbook build
echo ":: Copying generated files"
cp -rp book/* "$tmp"/
echo ":: Pushing to gh-page branch"
cd "$tmp"
git add -A
git commit -m "specification updated on $(date)" || true
git push origin gh-pages -f
cd -
|
package models.Entity;
import controllers.Amphibious;
import controllers.Arial;
import controllers.Locomotion;
import controllers.Terrestrial;
import models.Equipment.Equipment;
import models.Occupation.*;
import models.StatContainer.*;
import models.Inventory.*;
import utilities.Direction.Direction;
import utilities.KeyCommand.KeyCommand;
import utilities.KeyCommand.MovementKeyCommand;
import utilities.KeyCommand.SkillKeyCommand;
import utilities.Location.Location;
import models.Map.Map;
import javax.swing.*;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.util.HashMap;
import java.util.Set;
public class Avatar extends Entity {
//
private HashMap<Integer,KeyCommand> keyMap;
public Avatar(int level, Occupation occupation) {
setName("Avatar");
setLocation(new Location(0,0,0));
setDirection(utilities.Direction.Direction.South);
setOccupation(occupation);
setStats(new AvatarStats(occupationModifier(), level));
setInventory(new Inventory(25));
setCurrentState(States.ATTACK);
setEquipment(new Equipment());
}
public Image getImage(){
Image image = new ImageIcon("res/fireball.png").getImage();
return image;
}
public void setKeyCommand(Map map){
keyMap = new HashMap<>();
Locomotion locomotion = new Terrestrial(this, map);
// Movement
KeyCommand moveNorth = new MovementKeyCommand(map, this, Direction.North, locomotion);
keyMap.put(KeyEvent.VK_8, moveNorth);
KeyCommand moveNorthWest = new MovementKeyCommand(map, this, Direction.NorthWest, locomotion);
keyMap.put(KeyEvent.VK_7, moveNorthWest);
KeyCommand moveSouthWest = new MovementKeyCommand(map, this, Direction.SouthWest, locomotion);
keyMap.put(KeyEvent.VK_1, moveSouthWest);
KeyCommand moveSouth = new MovementKeyCommand(map, this, Direction.South, locomotion);
keyMap.put(KeyEvent.VK_2, moveSouth);
KeyCommand moveSouthEast = new MovementKeyCommand(map, this, Direction.SouthEast, locomotion);
keyMap.put(KeyEvent.VK_3, moveSouthEast);
KeyCommand moveNorthEast = new MovementKeyCommand(map, this, Direction.NorthEast, locomotion);
keyMap.put(KeyEvent.VK_9, moveNorthEast);
KeyCommand moveUp = new MovementKeyCommand(map, this, Direction.Up, locomotion);
keyMap.put(KeyEvent.VK_SPACE, moveUp);
KeyCommand moveDown = new MovementKeyCommand(map, this, Direction.Down, locomotion);
keyMap.put(KeyEvent.VK_ENTER, moveDown);
moveNorthWest = new MovementKeyCommand(map, this, Direction.NorthWest, locomotion);
keyMap.put(KeyEvent.VK_Q, moveNorthWest);
moveSouthWest = new MovementKeyCommand(map, this, Direction.SouthWest, locomotion);
keyMap.put(KeyEvent.VK_A, moveSouthWest);
moveSouth = new MovementKeyCommand(map, this, Direction.South, locomotion);
keyMap.put(KeyEvent.VK_S, moveSouth);
moveSouthEast = new MovementKeyCommand(map, this, Direction.SouthEast, locomotion);
keyMap.put(KeyEvent.VK_D, moveSouthEast);
moveNorthEast = new MovementKeyCommand(map, this, Direction.NorthEast, locomotion);
keyMap.put(KeyEvent.VK_E, moveNorthEast);
moveNorth = new MovementKeyCommand(map, this, Direction.North, locomotion);
keyMap.put(KeyEvent.VK_W, moveNorth);
KeyCommand skill1 = new SkillKeyCommand(map, this, 0);
keyMap.put(KeyEvent.VK_1, skill1);
KeyCommand skill2 = new SkillKeyCommand(map, this, 1);
keyMap.put(KeyEvent.VK_2, skill2);
KeyCommand skill3 = new SkillKeyCommand(map, this, 2);
keyMap.put(KeyEvent.VK_3, skill3);
KeyCommand skill4 = new SkillKeyCommand(map, this, 3);
keyMap.put(KeyEvent.VK_4, skill4);
}
public HashMap<Integer,KeyCommand> getKeyMap(){
return keyMap;
}
public Set<Integer> getKeys(){
return keyMap.keySet();
}
}
|
#!/bin/sh
CFG=/config/configFile.cfg
# This copies the sample config if one is not present in the config dir
if [ ! -f $CFG ]; then
echo 'No config file, providing sample'
cp /root/Packt-Publishing-Free-Learning/src/configFileTemplate.cfg $CFG
fi
# If environment arguments have been provided, switch the values in the sample config to these
if [ -n "$PACKT_EMAIL" ]; then
echo 'ENV PACKT_EMAIL provided'
sed -i s/email=.*/email="$PACKT_EMAIL"/ $CFG
else
echo 'ENV PACKTEMAIL not set'
fi
if [ -n "$PACKT_PASSWORD" ]; then
echo 'ENV PACKT_PASSWORD provided'
sed -i s/password=.*/password="$PACKT_PASSWORD"/ $CFG
else
echo 'ENV PACKTEMAIL not set'
fi
if [ -n "$PACKT_DOWNLOAD_FORMATS" ]; then
echo "ENV PACKT_DOWNLOAD_FORMATS provided as '$PACKT_DOWNLOAD_FORMATS'"
sed -i s/download_formats:.*/download_formats:\ "$PACKT_DOWNLOAD_FORMATS"/ $CFG
else
echo 'ENV PACKT_DOWNLOAD_FORMATS not set'
fi
if [ -n "$PACKT_DOWNLOAD_BOOK_TITLES" ]; then
echo "ENV PACKT_DOWNLOAD_BOOK_TITLES provided as '$PACKT_DOWNLOAD_BOOK_TITLES'"
sed -i s/download_book_titles:.*/download_book_titles:\ "$PACKT_DOWNLOAD_BOOK_TITLES"/ $CFG
else
echo 'ENV PACKT_DOWNLOAD_BOOK_TITLES not set'
sed -i s/download_book_titles:.*/download_book_titles:/ $CFG
fi
if [ -n "$PACKT_ANTICAPTCHA_KEY" ]; then
echo 'ENV PACKT_ANTICAPTCHA_Key provided'
sed -i s/key:.*/key:\ "$PACKT_ANTICAPTCHA_KEY"/ $CFG
else
echo 'ENV PACKT_ANTICAPTCHA_KEY not set'
fi
echo 'Replacing path with /data'
sed -i s@download_folder_path:.*@download_folder_path:\ \\/data@ $CFG
echo 'Set logfile path to /data'
sed -i s@ebook_extra_info_log_file_path:.*@ebook_extra_info_log_file_path:\ \\/data\\/eBookMetadata.log@ $CFG
echo 'Start crond'
crond -f
|
#!/bin/bash
dieharder -d 205 -g 12 -S 4127041140
|
<filename>7-assets/past-student-repos/Lambda-School-master/Week 7/Sprint-Challenge-Single-Page-Apps/src/components/CharacterCard.js<gh_stars>0
import React from "react";
export default function CharacterCard(props) {
return (
<div className="char-container">
<img src={props.info.image} />
<h3>{props.info.name}</h3>
<h5>
{props.info.species} - {props.info.status}
</h5>
<p>Location: {props.info.location.name}</p>
<p>Origin: {props.info.origin.name}</p>
</div>
);
}
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
B=`basename $0 .sh`
cd `dirname $0`
source ./rungraphd
rm -rf $D
rungraphd -d${D} -bty <<-'EOF'
write ("concept" value="c1" (<- "is-edit-of" ("wikiversion" value="A" (<- "is-author-of" ("person" (-> "attribute" name="fullname" value="Afirst Alast"))))) (<- "is-edit-of" ("wikiversion" value="B" (<- "is-author-of" ("person" (-> "attribute" name="fullname" value="Bfirst Blast"))))) (<- "is-edit-of" ("wikiversion" value="C" (<- "is-author-of" ("person" (-> "attribute" name="fullname" value="Cfirst Clast"))))))
read (value="c1" result=contents (<- "is-edit-of" result=((contents)) ("wikiversion" result=(value $fullname) (<- "is-author-of" ("person" (-> "attribute" name="fullname" $fullname=value))))))
EOF
rm -rf $D
|
#!/usr/bin/env bash
# Create and activate a fresh virtual environment with `source virtualenv.sh`.
rm -rf "${OUTPUT_DIR}/venv"
# Try to use 'venv' if it is available, then fallback to 'virtualenv' since some systems provide 'venv' although it is non-functional.
if [ -z "${ANSIBLE_TEST_PREFER_VENV:-}" ] || [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^2\. ]] || ! "${ANSIBLE_TEST_PYTHON_INTERPRETER}" -m venv --system-site-packages "${OUTPUT_DIR}/venv" > /dev/null 2>&1; then
rm -rf "${OUTPUT_DIR}/venv"
"${ANSIBLE_TEST_PYTHON_INTERPRETER}" -m virtualenv --system-site-packages --python "${ANSIBLE_TEST_PYTHON_INTERPRETER}" "${OUTPUT_DIR}/venv"
fi
set +ux
source "${OUTPUT_DIR}/venv/bin/activate"
set -ux
|
<gh_stars>0
/*
* Copyright 2014-2020 The Ideal Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
package ideal.development.constructs;
import ideal.library.elements.*;
import javax.annotation.Nullable;
import ideal.runtime.elements.*;
import ideal.development.elements.*;
public class type_declaration_construct extends base_construct {
public final readonly_list<annotation_construct> annotations;
public final kind kind;
public final action_name name;
public final @Nullable list_construct parameters;
public final readonly_list<construct> body;
public type_declaration_construct(readonly_list<annotation_construct> annotations,
kind kind,
action_name name,
@Nullable list_construct parameters,
readonly_list<construct> body,
position pos) {
super(pos);
this.annotations = annotations;
this.kind = kind;
this.name = name;
this.parameters = parameters;
this.body = body;
// TODO: we should signal error instead.
assert parameters == null || !parameters.elements.is_empty();
assert body != null;
}
public boolean has_parameters() {
return parameters != null;
}
public readonly_list<construct> children() {
list<construct> result = new base_list<construct>();
do_append_all(result, annotations);
if (parameters != null) {
result.append(parameters);
}
if (body != null) {
result.append_all(body);
}
return result;
}
@Override
public string to_string() {
return utilities.describe(this, name);
}
}
|
/*
*
* Original Author: <NAME>
* mail: <EMAIL>
* https://github.com/faisalsami/odoo-xmlrpc
*
* Refactored by: <NAME>
* mail: <EMAIL>
* https://github.com/mbb3-mitch/odoo-xmlrpc-promise
*
*/
const xmlrpc = require('xmlrpc');
const { promisify } = require('util');
class Odoo {
constructor(config = {}) {
const url = new URL(config.url);
this.host = url.hostname;
this.port = config.port || url.port;
this.db = config.db;
this.username = config.username;
this.password = <PASSWORD>;
this.uid = 0;
this.secure = url.protocol === 'https:';
}
connect(callback) {
const clientOptions = {
host: this.host,
port: this.port,
path: '/xmlrpc/2/common',
};
if (this.secure == false) {
this.client = xmlrpc.createClient(clientOptions);
} else {
this.client = xmlrpc.createSecureClient(clientOptions);
}
const params = [this.db, this.username, this.password, {}];
this.client.methodCall('authenticate', params, (error, value) => {
if (error) {
return callback(error, null);
}
if (!value) {
return callback({ message: 'No UID returned from authentication.' }, null);
}
this.uid = value;
return callback(null, value);
});
}
async promise_connect() {
const clientOptions = {
host: this.host,
port: this.port,
path: '/xmlrpc/2/common',
};
if (this.secure == false) {
this.client = xmlrpc.createClient(clientOptions);
} else {
this.client = xmlrpc.createSecureClient(clientOptions);
}
this.client.methodCallPromise = promisify(this.client.methodCall);
const params = [this.db, this.username, this.password, {}];
try {
const value = await this.client.methodCallPromise('authenticate', params);
this.uid = value;
return this.uid;
} catch (err) {
throw err;
}
}
execute_kw(model, method, params, callback) {
const clientOptions = {
host: this.host,
port: this.port,
path: '/xmlrpc/2/object',
};
if (this.secure == false) {
this.client = xmlrpc.createClient(clientOptions);
} else {
this.client = xmlrpc.createSecureClient(clientOptions);
}
const fparams = [this.db, this.uid, this.password, model, method];
params.forEach((param) => {
fparams.push(param);
});
this.client.methodCall('execute_kw', fparams, (error, value) => {
if (error) {
return callback(error, null);
}
return callback(null, value);
});
}
async promise_execute_kw(model, method, positionalParams = [], keyParams = {}) {
const clientOptions = {
host: this.host,
port: this.port,
path: '/xmlrpc/2/object',
};
if (this.secure === false) {
this.client = xmlrpc.createClient(clientOptions);
} else {
this.client = xmlrpc.createSecureClient(clientOptions);
}
this.client.methodCallPromise = promisify(this.client.methodCall);
return this.client.methodCallPromise('execute_kw', [
this.db,
this.uid,
this.password,
model,
method,
positionalParams,
keyParams,
]);
}
}
module.exports = Odoo;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.