text stringlengths 1 1.05M |
|---|
<gh_stars>1-10
#ifndef PANEL_HH
#define PANEL_HH
#include <QWidget>
namespace Ui {
class Panel;
}
class Panel : public QWidget
{
Q_OBJECT
public:
explicit Panel(QWidget *parent = 0);
~Panel();
private:
Ui::Panel *ui;
};
#endif // PANEL_HH
|
#!/bin/bash
# Copyright 2020-2022 Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted for any purpose (including commercial purposes)
# provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or materials provided with the distribution.
#
# 3. In addition, redistributions of modified forms of the source or binary
# code must carry prominent notices stating that the original code was
# changed and the date of the change.
#
# 4. All publications or advertising materials mentioning features or use of
# this software are asked, but not required, to acknowledge that it was
# developed by Intel Corporation and credit the contributors.
#
# 5. Neither the name of Intel Corporation, nor the name of any Contributor
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
set -eux
# shellcheck disable=SC2153
mapfile -t TEST_TAG_ARR <<< "$TEST_TAG_ARG"
if $TEST_RPMS; then
rm -rf "$PWD"/install/tmp
mkdir -p "$PWD"/install/tmp
# set the shared dir
# TODO: remove the need for a shared dir by copying needed files to
# the test nodes
export DAOS_TEST_SHARED_DIR=${DAOS_TEST_SHARED_DIR:-$PWD/install/tmp}
logs_prefix="/var/tmp"
else
rm -rf "$DAOS_BASE"/install/tmp
mkdir -p "$DAOS_BASE"/install/tmp
logs_prefix="$DAOS_BASE/install/lib/daos/TESTING"
cd "$DAOS_BASE"
fi
# Disable CRT_PHY_ADDR_STR to allow launch.py to set it
unset CRT_PHY_ADDR_STR
# Disable OFI_INTERFACE to allow launch.py to pick the fastest interface
unset OFI_INTERFACE
# At Oct2018 Longmond F2F it was decided that per-server logs are preferred
# But now we need to collect them! Avoid using 'client_daos.log' due to
# conflicts with the daos_test log renaming.
# shellcheck disable=SC2153
export D_LOG_FILE="$TEST_TAG_DIR/daos.log"
# The dmg pool destroy can take up to 3 minutes to timeout. To help ensure
# that the avocado test tearDown method is run long enough to account for this
# use a 240 second timeout when running tearDown after the test has timed out.
mkdir -p ~/.config/avocado/
cat <<EOF > ~/.config/avocado/avocado.conf
[datadir.paths]
logs_dir = $logs_prefix/ftest/avocado/job-results
data_dir = $logs_prefix/ftest/avocado/data
[job.output]
loglevel = DEBUG
[runner.timeout]
process_died = 240
[sysinfo.collectibles]
files = \$HOME/.config/avocado/sysinfo/files
# File with list of commands that will be executed and have their output
# collected
commands = \$HOME/.config/avocado/sysinfo/commands
EOF
mkdir -p ~/.config/avocado/sysinfo/
cat <<EOF > ~/.config/avocado/sysinfo/commands
ps axf
dmesg
df -h
EOF
cat <<EOF > ~/.config/avocado/sysinfo/files
/proc/mounts
EOF
# apply patches to Avocado
pydir=""
for loc in /usr/lib/python2*/site-packages/ \
/usr/lib/python3*/site-packages/ \
/usr/local/lib/python3*/site-packages/; do
if [ -f "$loc"/avocado/core/runner.py ]; then
pydir=$loc
break
fi
done
if [ -z "${pydir}" ]; then
echo "Could not determine avocado installation location"
exit 1
fi
PATCH_DIR="$PREFIX"/lib/daos/TESTING/ftest
# https://github.com/avocado-framework/avocado/pull/4345 fixed somewhere
# before 69.2
if grep "self.job.result_proxy.notify_progress(False)" \
"$pydir"/avocado/core/runner.py; then
echo "Applying patch avocado-job-result_proxy-reference-fix.patch"
if ! cat < "$PATCH_DIR"/avocado-job-result_proxy-reference-fix.patch | \
sudo patch -p1 -d "$pydir"; then
echo "Failed to apply avocado PR-4345 patch"
exit 1
fi
fi
# https://github.com/avocado-framework/avocado/pull/2908 fixed in
# https://github.com/avocado-framework/avocado/pull/3076/
if ! grep "runner.timeout.process_died" "$pydir"/avocado/core/runner.py; then
# this version of runner.py is older than 82.0
if ! grep TIMEOUT_TEARDOWN "$pydir"/avocado/core/runner.py; then
echo "Applying patch avocado-teardown-timeout.patch"
if ! cat < "$PATCH_DIR"/avocado-teardown-timeout.patch | \
sudo patch -p1 -d "$pydir"; then
echo "Failed to apply avocado PR-3076 patch"
exit 1
fi
fi
fi
# https://github.com/avocado-framework/avocado/pull/3154 - fixed somewhere
# before 69.2
if ! grep "def phase(self)" \
"$pydir"/avocado/core/test.py; then
echo "Applying patch avocado-report-test-phases-common.patch"
if ! filterdiff -p1 -x selftests/* < \
"$PATCH_DIR"/avocado-report-test-phases-common.patch | \
sed -e '/selftests\/.*/d' | \
sudo patch -p1 -d "$pydir"; then
echo "Failed to apply avocado PR-3154 patch - common portion"
exit 1
fi
if grep "^TEST_STATE_ATTRIBUTES = " "$pydir"/avocado/core/test.py; then
echo "Applying patch avocado-report-test-phases-py3.patch"
if ! cat < "$PATCH_DIR"/avocado-report-test-phases-py3.patch | \
sudo patch -p1 -d "$pydir"; then
echo "Failed to apply avocado PR-3154 patch - py3 portion"
exit 1
fi
else
echo "Applying patch avocado-report-test-phases-py2.patch"
if ! cat < "$PATCH_DIR"/avocado-report-test-phases-py2.patch | \
sudo patch -p1 -d "$pydir"; then
echo "Failed to apply avocado PR-3154 patch - py2 portion"
exit 1
fi
fi
fi
# apply fix for https://github.com/avocado-framework/avocado/issues/2908 - fixed
# somewhere before 69.2
if grep "TIMEOUT_TEST_INTERRUPTED" \
"$pydir"/avocado/core/runner.py; then
sudo ed <<EOF "$pydir"/avocado/core/runner.py
/TIMEOUT_TEST_INTERRUPTED/s/[0-9]*$/60/
wq
EOF
fi
# apply fix for https://jira.hpdd.intel.com/browse/DAOS-6756 for avocado 69.x -
# fixed somewhere before 82.0
if grep "TIMEOUT_PROCESS_DIED" \
"$pydir"/avocado/core/runner.py; then
sudo ed <<EOF "$pydir"/avocado/core/runner.py
/TIMEOUT_PROCESS_DIED/s/[0-9]*$/60/
wq
EOF
fi
# apply fix for https://github.com/avocado-framework/avocado/pull/2922 - fixed
# somewhere before 69.2
if grep "testsuite.setAttribute('name', 'avocado')" \
"$pydir"/avocado/plugins/xunit.py; then
sudo ed <<EOF "$pydir"/avocado/plugins/xunit.py
/testsuite.setAttribute('name', 'avocado')/s/'avocado'/os.path.basename(os.path.dirname(result.logfile))/
wq
EOF
fi
# Fix for bug to be filed upstream - fixed somewhere before 69.2
if grep "self\.job\.result_proxy\.notify_progress(False)" \
"$pydir"/avocado/core/runner.py; then
sudo ed <<EOF "$pydir"/avocado/core/runner.py
/self\.job\.result_proxy\.notify_progress(False)/d
wq
EOF
fi
pushd "$PREFIX"/lib/daos/TESTING/ftest
# make sure no lingering corefiles or junit files exist
rm -f core.* ./*_results.xml
# see if we just wanted to set up
if ${SETUP_ONLY:-false}; then
exit 0
fi
# check if slurm needs to be configured for soak
if [[ "${TEST_TAG_ARG}" =~ soak ]]; then
if ! ./slurm_setup.py -d -c "$FIRST_NODE" -n "${TEST_NODES}" -s -i; then
exit "${PIPESTATUS[0]}"
else
rc=0
fi
fi
# need to increase the number of oopen files (on EL8 at least)
ulimit -n 4096
launch_args="-jcrisa"
# processing cores is broken on EL7 currently
id="$(lsb_release -si)"
if { [ "$id" = "CentOS" ] &&
[[ $(lsb_release -s -r) != 7.* ]]; } ||
[ "$id" = "AlmaLinux" ] ||
[ "$id" = "Rocky" ] ||
[ "$id" = "RedHatEnterpriseServer" ] ||
[ "$id" = "openSUSE" ]; then
launch_args+="p"
fi
# Clean stale job results
if [ -d "${logs_prefix}/ftest/avocado/job-results" ]; then
rm -rf "${logs_prefix}/ftest/avocado/job-results"
fi
# now run it!
# shellcheck disable=SC2086
export WITH_VALGRIND
# shellcheck disable=SC2086
if ! ./launch.py "${launch_args}" -th "${LOGS_THRESHOLD}" \
-ts "${TEST_NODES}" ${LAUNCH_OPT_ARGS} ${TEST_TAG_ARR[*]}; then
rc=${PIPESTATUS[0]}
else
rc=0
fi
# daos_test uses cmocka framework which generates a set of xml of its own.
# Post-processing the xml files here to put them in proper categories
# for publishing in Jenkins
dt_xml_path="${logs_prefix}/ftest/avocado/job-results/daos_test"
FILES=("${dt_xml_path}"/*/test-results/*/data/*.xml)
COMP="FTEST_daos_test"
./scripts/post_process_xml.sh "${COMP}" "${FILES[@]}"
exit $rc
|
<reponame>evandrogibicoski/policy-fly-vuetify
export default {
name: 'NavbarBanner',
props: ['date', 'primaryName', 'coveragePeriod']
}
|
<reponame>Layton85/akordyukov
package ru.job4j.chess.figures;
/**
* OccupiedWayException - class for exceptions.
* OccupiedWayException extends RuntimeException class.
* OccupiedWayException throws when Figure can`t reach specified destination because of another figure on the way.
* @author <NAME> (<EMAIL>)
* @version $Id$
* @since 0.1
*/
public class OccupiedWayException extends RuntimeException {
/**
* Constructor
* @param msg - exception message.
*/
public OccupiedWayException(String msg) {
super(msg);
}
}
|
package dbis.piglet.codegen.scala_lang
import dbis.piglet.codegen.{CodeEmitter, CodeGenContext, CodeGenException}
import dbis.piglet.op.{Distinct, PigOperator}
/**
* Created by kai on 03.12.16.
*/
class DistinctEmitter extends CodeEmitter[Distinct] {
override def template: String = """val <out> = <in>.distinct<if (profiling)>.map{t =>
| PerfMonitor.sampleSize(t,"<lineage>", accum, randFactor)
| t
|}<endif>""".stripMargin
override def code(ctx: CodeGenContext, op: Distinct): String =
render(Map("out" -> op.outPipeName, "in" -> op.inPipeName, "lineage" -> op.lineageSignature))
}
object DistinctEmitter {
lazy val instance = new DistinctEmitter
} |
<gh_stars>0
/*==============================================================================
Program: Gutenberg
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0.txt
==============================================================================*/
// Qt includes
#include <QCoreApplication>
#include <QDebug>
#include <QFile>
#include <QTextStream>
#include <QStringList>
// Self includes
#include "QFontIconParser.h"
int main(int argc, char* argv[])
{
QCoreApplication app( argc, argv );
// Check arguments
QStringList arguments = app.arguments();
if (arguments.size() != 3)
{
qCritical() << "There is an incorrect number of arguments.";
qCritical() << "Usage: ";
qCritical() << " Gutenberger file1 file2";
return EXIT_FAILURE;
}
// Parse icons of the input file
QString input = arguments[1];
QMap<QString, QString> nameToIcon;
QFontIconParser parser(&app);
if (!parser.load(input, nameToIcon))
{
qCritical() << "Error during the parsing.";
return EXIT_FAILURE;
}
// Open and write output file
QString output = arguments[2];
QFile headerFile(output);
if (!headerFile.open( QIODevice::WriteOnly | QIODevice::Text ))
{
qCritical() << "Cannot open file " << output << " for writing.";
}
QTextStream stream( &headerFile );
stream << "/*=============================================================================="
<< "\n"
<< "Automatically generated by Gunterberger."
<< "\n"
<< "==============================================================================*/"
<< "\n"
<< "#ifndef __Gutenberg_H\n"
<< "#define __Gutenberg_H\n"
<< "\n"
<< "#include <QChar>\n"
<< "#include <QDebug>\n"
<< "#include <QString>\n"
<< "\n"
<< "class Gutenberg\n"
<< "{\n"
<< "public:\n"
<< "static QString unicodeFromIconName(QString name)\n"
<< " {\n";
QMap<QString, QString>::iterator it;
for (it = nameToIcon.begin(); it != nameToIcon.end(); ++it)
{
stream << " if (name == \"" << it.key() << "\")\n"
<< " return \"" << it.value() << "\";\n";
}
stream << "\n"
<< " qCritical() << \"Icon not found\";\n"
<< " return \"\";\n"
<< " };\n";
stream << "\n"
<< " static QChar icon(QString name)\n"
<< " {\n"
<< " QString unicode = Gutenberg::unicodeFromIconName(name);\n"
<< " if (unicode.isEmpty())\n"
<< " {\n"
<< " return 0;\n"
<< " }\n"
<< "\n"
<< " bool ok;\n"
<< " QChar icon = unicode.toInt(&ok, 16);\n"
<< " if (!ok)\n"
<< " {\n"
<< " qCritical() << \"Error while converting: \" << unicode;\n"
<< " return 0;\n"
<< " }\n"
<< " return icon;\n"
<< " };\n"
<< "};\n"
<< "#endif\n"
<< "\n";
headerFile.close();
return EXIT_SUCCESS;
}
|
<filename>pirates/piratesgui/CannonDefenseEndOfWavePanel.py
# File: C (Python 2.4)
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from pirates.piratesgui.CannonDefenseScorePanelBase import CannonDefenseScorePanelBase
from pirates.piratesgui.CannonDefenseScorePanelBase import RoundCompleteFlags
from pirates.piratesgui.GuiButton import GuiButton
from pirates.piratesgui import PiratesGuiGlobals
from pirates.piratesbase import PiratesGlobals
from pirates.piratesbase import PLocalizer
class CannonDefenseEndOfWavePanel(CannonDefenseScorePanelBase):
def __init__(self, waveNumber, roundComplete, panelNumber, numOfPanels, **kw):
CannonDefenseScorePanelBase.__init__(self, panelNumber, numOfPanels, **None)
self.lblCountDown = None
self.lblTreasureStolen = None
self.lblTreasureLeft = None
self.roundComplete = roundComplete
self.waveNumber = waveNumber
self._createPanel()
def _createPanel(self):
startX = 0.77000000000000002
widthX = 0.39000000000000001
self._createHeader(self, self.waveNumber, self.roundComplete)
self._createTreasureLabels(self, 1.2150000000000001)
self._createWaveResultsLabel(self, 0.96999999999999997)
self._createPlayerNames(self, self.playerLbls, startX, 0.96999999999999997, widthX)
self._createStatsLabels(self, PLocalizer.CannonDefense['ShipsSunkWave'], self.shipsSunkTotalslbl, startX, 0.81999999999999995, widthX)
self._createStatsLabels(self, PLocalizer.CannonDefense['DamageDealtWave'], self.damageTotalslbl, startX, 0.71999999999999997, widthX)
self._createStatsLabels(self, PLocalizer.CannonDefense['AccuracyWave'], self.accuracyTotalslbl, startX, 0.62, widthX)
self._createStatsLabels(self, PLocalizer.CannonDefense['ShotsFiredWave'], self.shotsFiredTotalslbl, startX, 0.52000000000000002, widthX)
self._createGoldAwardedLabel(self, 0.22)
self._createFooter(self)
def _createHeader(self, myParent, waveNumber, roundComplete):
textColor = PiratesGuiGlobals.TextFG1
if roundComplete == RoundCompleteFlags.GAME_DEFEAT:
waveCompleteTxt = PLocalizer.CannonDefense['GameOver']
textColor = PiratesGuiGlobals.TextOV6
else:
waveCompleteTxt = PLocalizer.CannonDefense['WaveComplete'] % waveNumber
if roundComplete == RoundCompleteFlags.GAME_VICTORY:
textColor = PiratesGuiGlobals.TextFG25
headingTxtScale = PiratesGuiGlobals.TextScaleLarge * 4
DirectLabel(parent = myParent, relief = None, state = DGG.DISABLED, text = waveCompleteTxt, text_scale = headingTxtScale, text_align = TextNode.ACenter, text_fg = textColor, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0, 0, 0), text_font = self.headingfont, textMayChange = 0, pos = (1.2, 0, 1.3500000000000001))
def _createFooter(self, myParent):
txtScale = PiratesGuiGlobals.TextScaleLarge * 1.5
if self.roundComplete == RoundCompleteFlags.WAVE_COMPLETE:
nextWaveTxt = PLocalizer.CannonDefense['NextWave'] % '?'
self.lblCountDown = DirectLabel(parent = myParent, relief = None, state = DGG.DISABLED, text = nextWaveTxt, text_scale = txtScale, text_align = TextNode.ARight, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0, 0, 0), text_font = self.bodyfont, textMayChange = 1, pos = (2.25, 0, -0.059999999999999998))
self.lblCountDown.hide()
else:
DirectLabel(parent = myParent, relief = None, state = DGG.DISABLED, text = '%s/%s' % (self.panelNumber, self.numOfPanels), text_scale = txtScale, text_align = TextNode.ACenter, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0, 0, 0), text_font = self.bodyfont, textMayChange = 0, pos = (2.1000000000000001, 0, 0.029999999999999999))
self.nextButton = GuiButton(parent = self, pos = (2.1000000000000001, 0, -0.050000000000000003), text = PLocalizer.CannonDefense['Next'])
def _createTreasureLabels(self, myParent, startY):
txtScale = PiratesGuiGlobals.TextScaleLarge * 1.45
self.lblTreasureStolen = DirectLabel(parent = myParent, relief = None, state = DGG.DISABLED, text = '', text_scale = txtScale, text_align = TextNode.ALeft, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0, 0, 0), text_font = self.headingfont, textMayChange = 1, pos = (0.29999999999999999, 0, startY))
self.lblTreasureLeft = DirectLabel(parent = myParent, relief = None, state = DGG.DISABLED, text = '', text_scale = txtScale, text_align = TextNode.ARight, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0, 0, 0), text_font = self.bodyfont, textMayChange = 1, pos = (2.1000000000000001, 0, startY))
def _createWaveResultsLabel(self, myParent, startY):
DirectLabel(parent = myParent, relief = None, state = DGG.DISABLED, text = PLocalizer.CannonDefense['WaveResults'], text_scale = PiratesGuiGlobals.TextScaleLarge * 1.5, text_align = TextNode.ALeft, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0, 0, 0), text_font = self.headingfont, textMayChange = 1, pos = (0.22, 0, startY))
def _createGoldAwardedLabel(self, myParent, startY):
txtScale = PiratesGuiGlobals.TextScaleLarge * 1.5
self.lblGoldAwarded = DirectLabel(parent = myParent, relief = None, state = DGG.DISABLED, text = '', text_scale = txtScale, text_align = TextNode.ARight, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0, 0, 0), text_font = self.headingfont, textMayChange = 1, pos = (2.1899999999999999, 0, startY - 0.074999999999999997))
self.lblTreasureEarned = DirectLabel(parent = myParent, relief = None, state = DGG.DISABLED, text = '', text_scale = txtScale, text_align = TextNode.ARight, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0, 0, 0), text_font = self.headingfont, textMayChange = 1, pos = (2.1899999999999999, 0, startY))
self.lblTreasureEarned.hide()
def setGoldAwarded(self, gold, goldBonus = 0):
if goldBonus:
self.lblGoldAwarded['text'] = PLocalizer.CannonDefense['PayShareBonus'] % (str(gold), str(goldBonus))
else:
self.lblGoldAwarded['text'] = PLocalizer.CannonDefense['PayShare'] % str(gold)
def setTreasureAwarded(self, amount):
if amount == 0:
return None
self.lblTreasureEarned['text'] = PLocalizer.CannonDefense['TreasureEarned'] % amount
self.lblTreasureEarned.show()
def updateCountDown(self, timeLeft):
self.lblCountDown['text'] = PLocalizer.CannonDefense['NextWave'] % timeLeft
if self.lblCountDown.isHidden():
self.lblCountDown.show()
def setTreasureStats(self, treasureStolen, treasureRemaining):
self.lblTreasureStolen['text'] = '%s %s %s' % (PLocalizer.CannonDefense['TreasureStolen'], treasureStolen, PLocalizer.CannonDefense['Treasure'])
self.lblTreasureLeft['text'] = '%s %s %s' % (PLocalizer.CannonDefense['TreasureRemaining'], treasureRemaining, PLocalizer.CannonDefense['Treasure'])
|
#!/bin/bash -ie
#Note - ensure that the -e flag is set to properly set the $? status if any command fails
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
# Since we are using the system jruby, we need to make sure our jvm process
# uses at least 1g of memory, If we don't do this we can get OOM issues when
# installing gems. See https://github.com/elastic/logstash/issues/5179
export JRUBY_OPTS="-J-Xmx1g"
export GRADLE_OPTS="-Xmx2g -Dorg.gradle.daemon=false -Dorg.gradle.logging.level=info"
export CI=true
./gradlew runXPackIntegrationTests |
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.List;
import java.util.function.Consumer;
public class FileProcessor {
public List<String> readFile(String filePath) throws IOException {
try (BufferedReader reader = new BufferedReader(new FileReader(filePath))) {
return reader.lines().toList();
}
}
public void processFile(List<String> lines, Consumer<String> action) {
lines.forEach(action);
}
public static void main(String[] args) {
FileProcessor fileProcessor = new FileProcessor();
try {
List<String> fileLines = fileProcessor.readFile("input.txt");
Consumer<String> toUpperCaseAction = line -> System.out.println(line.toUpperCase());
fileProcessor.processFile(fileLines, toUpperCaseAction);
} catch (IOException e) {
System.err.println("Error reading the file: " + e.getMessage());
}
}
} |
<reponame>NickGraeff/launchkey-java
package com.iovation.launchkey.sdk.example.cli;
import com.iovation.launchkey.sdk.client.DirectoryClient;
import com.iovation.launchkey.sdk.domain.directory.Device;
import com.iovation.launchkey.sdk.domain.directory.DirectoryUserDeviceLinkData;
import picocli.CommandLine;
import java.io.File;
import java.util.List;
import java.util.Scanner;
@CommandLine.Command(name = "directory")
class DirectoryCommand {
@CommandLine.ParentCommand
private RootCommand rootCommand;
@CommandLine.Parameters(index = "0", paramLabel = "<DIR_ID>",
description = "Directory ID. It is found in the Keys section of the Directory's page in Dashboard.")
private String directoryId;
@CommandLine.Parameters(index = "1", paramLabel = "<KEY_FILE>",
description = "File location of the RSA Private Key of the RSA public/private key pair whose public key " +
"is associated with the Directory.")
private File privateKeyFile;
@CommandLine.Command(name = "device-link")
void deviceLink(
@CommandLine.Parameters(paramLabel = "<UNIQUE_IDENTIFIER>",
description = "Unique identifier of the user for your application") String identifier,
@CommandLine.Option(names = {"-t", "--ttl"}, arity = "0..1",
description = "[Directory Service Only] Title of the authorization request.") Integer ttl
) throws Exception {
DirectoryUserDeviceLinkData result = getDirectoryClient().linkDevice(identifier, ttl);
System.out.println();
System.out.println("Device link request successful");
System.out.println(" QR Code URL: " + result.getQrCodeUrl());
System.out.println(" Manual verification code: " + result.getCode());
System.out.println();
}
@CommandLine.Command(name = "device-unlink")
void deivceUnlink(
@CommandLine.Parameters(paramLabel = "<UNIQUE_IDENTIFIER>",
description = "Unique identifier of the user for your application") String identifier,
@CommandLine.Parameters(paramLabel = "<DEVICE_ID>",
description = "Identifier of the Device you wish to unink") String deviceId) throws Exception {
System.out.println();
getDirectoryClient().unlinkDevice(identifier, deviceId);
System.out.println("Device unlinked");
System.out.println();
}
@CommandLine.Command(name = "devices-list")
void devicesList(@CommandLine.Parameters(paramLabel = "<UNIQUE_IDENTIFIER>",
description = "Unique identifier of the user for your application") String identifier) throws Exception {
List<Device> devices = getDirectoryClient().getLinkedDevices(identifier);
System.out.println();
System.out.println("Devices:");
for (Device device : devices) {
System.out.println(" " + device.getId() + ":");
System.out.println(" Name: " + device.getName());
System.out.println(" Type: " + device.getType());
System.out.println(" Status: " + device.getStatus());
System.out.println(" Created: " + device.getCreated());
System.out.println(" Updated: " + device.getUpdated());
}
System.out.println();
}
private DirectoryClient getDirectoryClient() throws Exception {
Scanner sc = new Scanner(privateKeyFile);
StringBuilder buffer = new StringBuilder();
while (sc.hasNextLine()) {
buffer.append(sc.nextLine()).append("\n");
}
String key = buffer.toString();
return rootCommand.getFactoryFactory()
.makeDirectoryFactory(directoryId, key).makeDirectoryClient();
}
}
|
def partition(arr,low,high):
i = ( low-1 )
pivot = arr[high]
for j in range(low , high):
# If current element is smaller than the pivot
if arr[j] < pivot:
# increment index of smaller element
i = i+1
arr[i],arr[j] = arr[j],arr[i]
arr[i+1],arr[high] = arr[high],arr[i+1]
return ( i+1 )
def quickSort(arr,low,high):
if low < high:
# pi is partitioning index, arr[p] is now
# at right place
pi = partition(arr,low,high)
# Separately sort elements before
# partition and after partition
quickSort(arr, low, pi-1)
quickSort(arr, pi+1, high) |
<reponame>johnny21-9/goit-react-hw-01-components<gh_stars>0
import PropTypes from 'prop-types';
import style from './StatList.module.css';
export default function StatsList({ data, title }) {
return (
<section className={style.statistics}>
{title && <h2 className={style.title}>{title}</h2>}
<ul className={style.statList}>
{data.map(item => (
<li className={style.item} key={item.id}>
<span className={style.label}>{item.label}</span>
<span className={style.percentage}>{item.percentage}</span>
</li>
))}
</ul>
</section>
);
}
StatsList.propTypes = {
title: PropTypes.string,
data: PropTypes.arrayOf(
PropTypes.shape({
id: PropTypes.string.isRequired,
label: PropTypes.string,
percentage: PropTypes.number,
}),
),
};
|
#pragma once
#include <memory>
#include "boost/optional.hpp"
#include "common/Aliases.hpp"
#include "common/Atomic.hpp"
namespace chatterino {
struct Emote;
using EmotePtr = std::shared_ptr<const Emote>;
class EmoteMap;
class BttvEmotes final
{
static constexpr const char *globalEmoteApiUrl =
"https://api.betterttv.net/2/emotes";
static constexpr const char *bttvChannelEmoteApiUrl =
"https://api.betterttv.net/2/channels/";
public:
BttvEmotes();
std::shared_ptr<const EmoteMap> emotes() const;
boost::optional<EmotePtr> emote(const EmoteName &name) const;
void loadEmotes();
static void loadChannel(const QString &channelName,
std::function<void(EmoteMap &&)> callback);
private:
Atomic<std::shared_ptr<const EmoteMap>> global_;
};
} // namespace chatterino
|
#!/usr/bin/env bash
set -o nounset
set -o errexit
set -o pipefail
set -o xtrace
NAMESPACE=${NAMESPACE:-assisted-installer}
CLUSTER_ID=${CLUSTER_ID:-""}
ADDITIONAL_PARAMS=${ADDITIONAL_PARAMS:-""}
KUBECTL=${KUBECTL:-kubectl}
LOGS_DEST=${LOGS_DEST:-build}
JUNIT_REPORT_DIR=${JUNIT_REPORT_DIR:-"reports/"}
KUBE_CRS=( clusterdeployment infraenv agentclusterinstall agent )
function download_service_logs() {
mkdir -p ${LOGS_DEST} || true
if [ "${DEPLOY_TARGET:-}" = "onprem" ]; then
podman ps -a || true
for service in "installer" "db"; do
podman logs ${service} > ${LOGS_DEST}/onprem_${service}.log || true
done
else
CRS=node,pod,svc,deployment,pv,pvc
if [ ${ENABLE_KUBE_API} == "true" ]; then
collect_kube_api_resources
CRS+=$(printf ",%s" "${KUBE_CRS[@]}")
fi
${KUBECTL} cluster-info
${KUBECTL} get ${CRS} -n ${NAMESPACE} -o wide || true
${KUBECTL} get pods -n ${NAMESPACE} -o=custom-columns=NAME:.metadata.name --no-headers | xargs -r -I {} sh -c "${KUBECTL} logs {} -n ${NAMESPACE} --all-containers > ${LOGS_DEST}/k8s_{}.log" || true
${KUBECTL} get events -n ${NAMESPACE} --sort-by=.metadata.creationTimestamp > ${LOGS_DEST}/k8s_events.log || true
${KUBECTL} get events -n ${NAMESPACE} --sort-by=.metadata.creationTimestamp --output json > ${LOGS_DEST}/k8s_events.json || true
skipper run ./src/junit_log_parser.py --src "${LOGS_DEST}" --dst "${JUNIT_REPORT_DIR}"
fi
}
function download_cluster_logs() {
if [ "${REMOTE_SERVICE_URL:-}" != '""' ]; then
SERVICE_URL=${REMOTE_SERVICE_URL}
else
if [ "${DEPLOY_TARGET:-}" = "onprem" ]; then
SERVICE_URL=http://localhost:8090
else
SERVICE_URL=$(KUBECONFIG=${HOME}/.kube/config minikube service assisted-service -n ${NAMESPACE} --url)
fi
fi
skipper run -e JUNIT_REPORT_DIR python3 -m src.assisted_test_infra.download_logs ${SERVICE_URL} ${LOGS_DEST} --cluster-id ${CLUSTER_ID} ${ADDITIONAL_PARAMS}
}
function collect_kube_api_resources() {
for CR in "${KUBE_CRS[@]}"
do
${KUBECTL} get ${CR} -n ${NAMESPACE} -o=custom-columns=NAME:.metadata.name --no-headers | xargs -r -I {} sh -c "${KUBECTL} get -ojson ${CR} {} -n ${NAMESPACE} > ${LOGS_DEST}/${CR}_{}.json" || true
done
}
"$@"
|
<reponame>h3xag0nx/iron-autocompound<filename>src/commands/generate-config.ts
import { JsonRpcProvider } from '@ethersproject/providers';
import { isAddress } from '@ethersproject/address';
import { Command } from 'commander';
import { VaultFactory } from '../contracts/vault-factory';
import toml from '@iarna/toml';
import { promises as fs } from 'fs';
import path from 'path';
import chalk from 'chalk';
import { Wallet } from '@ethersproject/wallet';
import { Config } from '../model';
const command = new Command('config')
.description('Generate config template from user address')
.requiredOption('-a, --address <string>', 'User address')
.option('-o, --output <string>', 'Config file location', './config.toml')
.option(
'--rpc <string>',
'RPC URL (optional)',
'https://rpc-mainnet.matic.network'
)
.option('--schedule <string>', 'Default compound schedule', '* 0 * * * *')
.action(async (options, command) => {
if (!isAddress(options.address)) {
console.error('Invalid option: address');
process.exit(1);
}
if (!(options.rpc as string).match(/https?:\/\//)) {
console.error('Invalid option: RPC URL');
process.exit(1);
}
try {
const provider = new JsonRpcProvider(options.rpc);
const vaultFactory = new VaultFactory(provider);
const userVault = await vaultFactory.getUserVaults(options.address);
const vaults = userVault.map((address: string) => {
return {
address,
disabled: false,
};
});
const wallet = Wallet.createRandom();
const config: Config = {
rpc: options.rpc,
privateKey: wallet.privateKey,
harvester: wallet.address,
schedule: options.schedule,
vaults,
};
const output = path.resolve(process.cwd(), options.output);
await fs.writeFile(output, toml.stringify(config as any));
console.log(chalk.green(`Config file created at ${output}`));
console.log(
'Please set the following address as harvester for all of your vaults'
);
console.log('Compound address: ', wallet.address);
console.log(
"You can run the 'explorer' command to print out link to contracts"
);
} catch (e) {
console.error(chalk.red(`Cannot generate config file due to error`));
console.error(e.message);
process.exit(9);
}
});
export default command;
|
package dev.riyenas.osam.service;
import dev.riyenas.osam.domain.admin.Admin;
import dev.riyenas.osam.domain.admin.AdminRepository;
import dev.riyenas.osam.domain.device.Device;
import dev.riyenas.osam.domain.device.DeviceRepository;
import dev.riyenas.osam.domain.log.ReturnLog;
import dev.riyenas.osam.domain.log.ReturnLogRepository;
import dev.riyenas.osam.domain.soldier.Soldier;
import dev.riyenas.osam.web.dto.device.DeviceLogResponseDto;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
@Service
@RequiredArgsConstructor
@Log4j2
public class DeviceService {
private final DeviceRepository deviceRepository;
private final AdminRepository adminRepository;
private final ReturnLogRepository returnLogRepository;
public List<DeviceLogResponseDto> returnStatus(Long adminId) {
Admin admin = adminRepository.findById(adminId).orElseThrow(() ->
new IllegalArgumentException("관리자를 조회할수 없습니다.")
);
List<DeviceLogResponseDto> deviceLogResponseDtos = new ArrayList<>();
for(Soldier soldier : admin.getSoldiers()) {
for(Device device : soldier.getDevices()) {
ReturnLog returnLog = returnLogRepository.findByDeviceIdOne(device.getId()).orElseGet(() -> {
ReturnLog nonReturn = ReturnLog.builder().build();
nonReturn.stateNonPass();
return nonReturn;
});
deviceLogResponseDtos.add(
new DeviceLogResponseDto(device, returnLog)
);
}
}
return deviceLogResponseDtos.stream()
.sorted(Comparator.comparing(DeviceLogResponseDto::getOrder))
.collect(Collectors.toList());
}
public List<DeviceLogResponseDto> returnStatusAll() {
List<DeviceLogResponseDto> deviceLogResponseDtos = new ArrayList<>();
List<Device> devices = deviceRepository.findAll();
for(Device device : devices) {
ReturnLog returnLog = returnLogRepository.findByDeviceIdOne(device.getId()).orElseGet(() -> {
ReturnLog nonReturn = ReturnLog.builder().build();
nonReturn.stateNonPass();
return nonReturn;
});
deviceLogResponseDtos.add(
new DeviceLogResponseDto(device, returnLog)
);
}
return deviceLogResponseDtos.stream()
.sorted(Comparator.comparing(DeviceLogResponseDto::getOrder))
.collect(Collectors.toList());
}
}
|
/*
* Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved.
* Licensed under the MIT License.
*/
/**
* A Java interface to the ONNX Runtime.
*
* <p>Provides access to the same execution backends as the C library. Non-representable types in
* Java (such as fp16) are converted into the nearest Java primitive type when accessed through this
* API.
*
* <p>There are two shared libraries required: <code>onnxruntime</code> and <code>onnxruntime4j_jni
* </code>. The loader is in {@link ai.onnxruntime.OnnxRuntime} and the logic is in this order:
*
* <ol>
* <li>The user may signal to skip loading of a shared library using a property in the form <code>
* onnxruntime.native.LIB_NAME.skip</code> with a value of <code>true</code>. This means the
* user has decided to load the library by some other means.
* <li>The user may specify an explicit location of the shared library file using a property in
* the form <code>onnxruntime.native.LIB_NAME.path</code>. This uses {@link
* java.lang.System#load}.
* <li>The shared library is autodiscovered:
* <ol>
* <li>If the shared library is present in the classpath resources, load using {@link
* java.lang.System#load} via a temporary file. Ideally, this should be the default use
* case when adding JAR's/dependencies containing the shared libraries to your
* classpath.
* <li>If the shared library is not present in the classpath resources, then load using
* {@link java.lang.System#loadLibrary}, which usually looks elsewhere on the filesystem
* for the library. The semantics and behavior of that method are system/JVM dependent.
* Typically, the <code>java.library.path</code> property is used to specify the
* location of native libraries.
* </ol>
* </ol>
*
* For troubleshooting, all shared library loading events are reported to Java logging at the level
* FINE.
*/
package ai.onnxruntime;
|
<reponame>serrgio/construcao-software
package br.com.serrgio.aula;
/**
* Serviços oferecidos para calculo de números primos
*/
public class NumerosPrimos {
/**
*
* Crivo eratóstenes
*
* @param a
* @param n
*
* @throws IllegalArgumentException caso o valor de n seja menor que 1 ou
* para todo i que seja maior ou igual a 2 e menor ou igual a n
*
*/
public static void crivoEratostenes(int[] a, int n) {
if (n < 1) {
throw new IllegalArgumentException("O valor de n é inválido");
}
for (int aux = 0; aux < a.length; aux++) {
if (((2 <= aux) && (aux <= n)) && (!(a[aux] == 0))) {
throw new IllegalArgumentException("O valor contido em i é inválido");
}
}
int i = 2;
while (i <= (n / 2)) {
if (a.length == 0) {
int c = i + i;
while (c <= n) {
a[c] = 1;
c = c + i;
}
}
i = i + 1;
}
}
}
|
<reponame>forgedsoftware/2048game
var Game2048Visual = function (control, scoreControl, size) {
this.game = new Game2048(4);
this.control = control;
this.scoreControl = scoreControl;
this.size = size;
this.fps = 60;
}
Game2048Visual.prototype.start = function () {
var self = this;
this.canvas = document.createElement("canvas");
this.canvas.width = this.size;
this.canvas.height = this.size;
this.context = this.canvas.getContext("2d");
this.control.appendChild(this.canvas);
this.game.start();
window.addEventListener('keydown', function (event) {
switch (event.keyCode) {
case 37: // Left
self.game.moveLeft();
break;
case 38: // Up
self.game.moveUp();
break;
case 39: // Right
self.game.moveRight();
break;
case 40: // Down
self.game.moveDown();
break;
}
self.scoreControl.innerHTML = self.game.score;
}, false);
var onEachFrameFunc = onEachFrame(this);
onEachFrameFunc((this.run)());
};
Game2048Visual.prototype.run = function () {
var loops = 0, skipTicks = 1000 / this.fps,
maxFrameSkip = 10,
nextGameTick = (new Date).getTime(),
lastGameTick,
self = this;
return function() {
loops = 0;
while ((new Date).getTime() > nextGameTick) {
nextGameTick += skipTicks;
loops++;
}
if (loops) self.draw();
}
};
Game2048Visual.prototype.restart = function () {
this.game = new Game2048(4);
this.game.start();
};
Game2048Visual.prototype.wait100 = function (cb) {
setInterval(cb, 100);
}
Game2048Visual.prototype.draw = function () {
// Clear
this.context.clearRect(0, 0, this.size, this.size);
// Draw Shell
this.context.fillStyle = "gray";
roundRect(this.context, 0, 0, this.size, this.size, 15, true, false)
//this.context.fillRect(0, 0, this.size, this.size);
this.context.fillStyle = "darkgray";
var offsetSize = (this.size-20)/4;
for (var i = 0; i < 4; i++) {
for (var j = 0; j < 4; j++) {
var squareSize = offsetSize - 20;
var x = i*offsetSize + 20;
var y = j*offsetSize + 20;
//this.context.fillRect(x, y, squareSize, squareSize);
roundRect(this.context, x, y, squareSize, squareSize, 15, true, false)
}
}
// Print Game Items
for (var a = 0; a < 4; a++) {
for (var b = 0; b < 4; b++) {
var entry = this.game.board[b][a];
if (entry) {
var colors = this.colorForValue(entry.value);
this.context.fillStyle = colors.bg;
var squareSize = offsetSize - 20;
var xVal = a*offsetSize + 20;
var yVal = b*offsetSize + 20;
//this.context.fillRect(xVal, yVal, squareSize, squareSize);
roundRect(this.context, xVal, yVal, squareSize, squareSize, 15, true, false)
var fontDetails = this.fontForValue(entry.value, squareSize);
this.context.font = fontDetails.font;
this.context.fillStyle = colors.txt;
this.context.fillWeight = "bold";
this.context.fillText(entry.value, xVal + fontDetails.xOffset, yVal + fontDetails.yOffset);
}
}
}
};
/**
* Draws a rounded rectangle using the current state of the canvas.
* If you omit the last three params, it will draw a rectangle
* outline with a 5 pixel border radius
* @param {CanvasRenderingContext2D} ctx
* @param {Number} x The top left x coordinate
* @param {Number} y The top left y coordinate
* @param {Number} width The width of the rectangle
* @param {Number} height The height of the rectangle
* @param {Number} radius The corner radius. Defaults to 5;
* @param {Boolean} fill Whether to fill the rectangle. Defaults to false.
* @param {Boolean} stroke Whether to stroke the rectangle. Defaults to true.
*/
function roundRect(ctx, x, y, width, height, radius, fill, stroke) {
if (typeof stroke == "undefined" ) {
stroke = true;
}
if (typeof radius === "undefined") {
radius = 5;
}
ctx.beginPath();
ctx.moveTo(x + radius, y);
ctx.lineTo(x + width - radius, y);
ctx.quadraticCurveTo(x + width, y, x + width, y + radius);
ctx.lineTo(x + width, y + height - radius);
ctx.quadraticCurveTo(x + width, y + height, x + width - radius, y + height);
ctx.lineTo(x + radius, y + height);
ctx.quadraticCurveTo(x, y + height, x, y + height - radius);
ctx.lineTo(x, y + radius);
ctx.quadraticCurveTo(x, y, x + radius, y);
ctx.closePath();
if (stroke) {
ctx.stroke();
}
if (fill) {
ctx.fill();
}
}
Game2048Visual.prototype.fontForValue = function (value, squareSize) {
var valueSize = value.toString().length;
var size = (10 - valueSize)*4;
var offset = [48, 36, 24, 18, 14, 12, 11, 11, 11]
return {
font: "bold " + size + "pt sans-serif",
xOffset: offset[valueSize],
yOffset: 70 - (valueSize * 2)
}
}
Game2048Visual.prototype.colorForValue = function (value) {
// Original tile colors - sourced from original game
var colors = {
"2": { bg: "#eee4da", txt: "#776e65" },
"4": { bg: "#ede0c8", txt: "#776e65" },
"8": { bg: "#f2b179", txt: "#f9f6f2" },
"16": { bg: "#f59563", txt: "#f9f6f2" },
"32": { bg: "#f67c5f", txt: "#f9f6f2" },
"64": { bg: "#f65e3b", txt: "#f9f6f2" },
"128": { bg: "#edcf72", txt: "#f9f6f2" },
"256": { bg: "#edcc61", txt: "#f9f6f2" },
"512": { bg: "#edc850", txt: "#f9f6f2" },
"1024": { bg: "#edc53f", txt: "#f9f6f2" },
"2048": { bg: "#edc22e", txt: "#f9f6f2" },
};
return colors["" + value] || { bg: "#3c3a32", txt: "#f9f6f2" };
}
function onEachFrame(self) {
var requestAnimationFrame = window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame;
if (requestAnimationFrame) {
return function (cb) {
var _cb = function () { cb(); requestAnimationFrame(_cb); }
_cb();
};
} else {
return function (cb) {
setInterval(cb, 1000 / self.fps);
}
}
}
window.Game2048Visual = Game2048Visual; |
#!/usr/bin/env bash
set -e
set -o pipefail
CI_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${CI_DIR}/common/build.sh"
source "${CI_DIR}/common/test.sh"
source "${CI_DIR}/common/suite.sh"
set -x
enter_suite tests
check_core_dumps --delete quiet
prepare_build
build_nvim
if [ "$CLANG_SANITIZER" != "TSAN" ]; then
# Additional threads are only created when the builtin UI starts, which
# doesn't happen in the unit/functional tests
run_test run_unittests
run_test run_functionaltests
fi
run_test run_oldtests
run_test install_nvim
end_tests
|
#!/bin/sh -x
NPOI=15000
FEATDIR=../../data
FEATDIR=.
DRYRUN=--dry-run
CODEBOOK=codebook_kmeans_cart_15000_12_512__0.4.csv
parallel -N2 ${DRYRUN} \
./vq.py -i ../../data/{1}.csv \
-c ${CODEBOOK} \
-o KMeans_cart_15000_12_512__0.4_${NPOI}_{2}_features.csv -N ${NPOI} \
::: training train test test
# cv10 =
# cv33 =
# cv66 =
#exit
./clf_ert.py \
--in-y-train-csv ../../data/training.csv \
--in-test-labels-csv ../../data/test.csv \
--in-test-feat-csv ${FEATDIR}/KMeans_cart_15000_12_512__0.4_45000_test_inv_features.csv \
--in-train-feat-csv ${FEATDIR}/KMeans_cart_15000_12_512__0.4_45000_train_inv_features_x3.csv \
-o submission18.csv \
-N 3000 \
-X 3
|
Implement a streamlined shopping experience with a simple checkout process and user-friendly interface. Reduce the number of steps required for the purchase to ensure customers are able to complete their purchases quickly and easily. Ensure that valid payment options are clearly visible and integrated into the website, such as credit card processing. Create an intuitive navigation system with categories, product pages, and search capabilities, to make it easy for customers to find what they are looking for. Consider providing helpful tips or tutorials to explain different parts of the website, such as how to use the checkout process. |
def array_sort(array):
# Iterate over the array
for i in range(len(array)):
# Find the minimum element
min_position = i
for j in range(i + 1, len(array)):
if array[min_position] > array[j]:
min_position = j
# Swap minimum element to current position
array[i], array[min_position] = array[min_position], array[i]
return array |
package com.mamuya.datrastocospringbootapi.service;
import com.mamuya.datrastocospringbootapi.entities.Product;
import java.util.List;
public interface ProductService {
Product save(Product product);
Product findById(int id);
boolean existsById(int id);
List<Product> findAll();
long count();
void deleteById(int id);
}
|
<?php
$db = new SQLite3('database.db');
$query = 'SELECT * FROM users';
$result = $db->query($query);
while ($row = $result->fetchArray()){
echo "Name: " . $row['name'] . " | Age: " . $row['age'] . " | Email: " . $row['email'] . "\n";
}
?> |
var searchData=
[
['heteroatom',['heteroatom',['../class_smol_dock_1_1_amino_acid.html#a08692b12e7f53812c5258bd8b805875dad390be0ae98a5c4a22ca9b0b7e726f40',1,'SmolDock::AminoAcid']]],
['histidine',['histidine',['../class_smol_dock_1_1_amino_acid.html#a08692b12e7f53812c5258bd8b805875da77d85f9928565a0fe8f709d63982efec',1,'SmolDock::AminoAcid']]],
['hydrogen',['hydrogen',['../class_smol_dock_1_1_atom.html#a57e9a532fd04e1846c0d83edebb9fd41ad4ac1478a4d8a4f591d35e3d75f3de65',1,'SmolDock::Atom']]],
['hydrogenacceptor',['hydrogenAcceptor',['../class_smol_dock_1_1_atom.html#a77191f0919af58d733764e229cb1f973ad68216b54381a7b2b87b7924ab0191c4',1,'SmolDock::Atom']]],
['hydrogendonor',['hydrogenDonor',['../class_smol_dock_1_1_atom.html#a77191f0919af58d733764e229cb1f973ade31e8114bf9ba2ec47d55c59f61853d',1,'SmolDock::Atom']]]
];
|
import torch
def masked_fill_for_qa(
prediction: torch.Tensor, entity_mask: torch.Tensor = None
) -> torch.Tensor:
if entity_mask is not None:
return prediction.masked_fill((1 - entity_mask).bool(), float("-inf"))
else:
return prediction |
# Loading Libraries
from sklearn.neighbors import NearestNeighbors
# Training data
items = [['Apple', 'Fruit'], ['Orange', 'Fruit'], ['Tomato', 'Vegetable']]
# Transforming items into numerical format
vectorizer = CountVectorizer()
X = vectorizer.fit_transform(items)
# Training model
model = NearestNeighbors(n_neighbors=2, algorithm='brute', metric='cosine')
model.fit(X)
# Recommend similar items
user_input = ['Carrot', 'Vegetable']
user_input_transformed = vectorizer.transform(user_input)
# Predict similar items
similar_items = model.kneighbors(user_input_transformed, return_distance=False)
# Print the recommendations
print("Recommended items:", vectorizer.inverse_transform(similar_items)[0]) |
#!/bin/bash
echo Reducing PATH size by removing duplicates and truncating to satisfy MKL, etc
PREVIFS="$IFS"
NEWPATH="${PATH%%:*}"
IFS=":"
for P in $PATH; do
FOUND=0
for P2 in $NEWPATH; do
if [[ "$P" == "$P2" ]]; then
FOUND=1
fi
done
if [[ "$FOUND" == "0" ]] && [[ ${#NEWPATH} -lt 3000 ]]; then
NEWPATH=$NEWPATH:$P
fi
done
IFS="$PREVIFS"
echo ${#PATH}
echo ${#NEWPATH}
export PATH=$NEWPATH
set -vx
export PROJ=$1
export APPVEYOR_BUILD_FOLDER=`pwd`
echo Building $PROJ
echo Platform: $OS
echo MSYS2 system: $MSYSTEM
echo Extension: $EXT
echo Branch: $APPVEYOR_REPO_BRANCH
bash --version
g++ --version
java -version
mvn --version
/c/python27/python --version
pip --version
unzip --version
gpg --version
pip install requests
mkdir -p /c/Downloads
if [[ "$APPVEYOR_PULL_REQUEST_NUMBER" == "" ]] && [[ "$APPVEYOR_REPO_BRANCH" == "release" ]]; then
/c/python27/python $APPVEYOR_BUILD_FOLDER/ci/gDownload.py /c/Users/appveyor/settings.tar.gz
tar xzf /c/Users/appveyor/settings.tar.gz -C /c/Users/appveyor/
tar xzf /c/Users/appveyor/settings.tar.gz -C /home/appveyor/
fi
echo Perform download files out of main repo
cd ..
if [[ "$PROJ" =~ flycapture ]]; then
echo Flycapture install
if [ "$OS" == "windows-x86_64" ]; then
if [[ $(find /c/Downloads/FlyCapture_2.13.3.31_x64.msi -type f -size +1000000c 2>/dev/null) ]]; then
echo "Found flycap in cache and size seems ok"
else
echo "Downloading flycap to cache as not found"
/c/python27/python $APPVEYOR_BUILD_FOLDER/ci/gDownload.py 14QM7W5RHhvZanF1UBobgEIvwdy6VwTht /c/Downloads/FlyCapture_2.13.3.31_x64.msi
fi
# we can get this msi file by starting the installation from the exe file
cmd /c 'msiexec /quiet /i C:\Downloads\FlyCapture_2.13.3.31_x64.msi ADDLOCAL=ALL'
elif [ "$OS" == "windows-x86" ]; then
if [[ $(find /c/Downloads/FlyCapture_2.13.3.31_x86.msi -type f -size +1000000c 2>/dev/null) ]]; then
echo "Found flycap32 in cache and size seems ok"
else
echo "Downloading flycap32 to cache as not found"
/c/python27/python $APPVEYOR_BUILD_FOLDER/ci/gDownload.py 1ctSSAMF5IkxTKWiiLtID-ltmm27pHFdr /c/Downloads/FlyCapture_2.13.3.31_x86.msi
fi
# we can get this msi file by starting the installation from the exe file
cmd /c 'msiexec /quiet /i C:\Downloads\FlyCapture_2.13.3.31_x86.msi ADDLOCAL=ALL'
fi
echo "Finished flycapture install"
fi
if [[ "$PROJ" =~ spinnaker ]]; then
echo Spinnaker install
if [ "$OS" == "windows-x86_64" ]; then
if [[ $(find /c/Downloads/Spinnaker_*_v140_x64.msi -type f -size +1000000c 2>/dev/null) ]]; then
echo "Found spinnaker in cache and size seems ok"
else
echo "Downloading spinnaker to cache as not found"
/c/python27/python $APPVEYOR_BUILD_FOLDER/ci/gDownload.py 1sjFe7KyvjxPEmEFp9xP3wIs3QCOHAN1m /c/Downloads/Spinnaker_Binaries_v140_x64.msi
/c/python27/python $APPVEYOR_BUILD_FOLDER/ci/gDownload.py 1gLfpCE3XkcXbWaoFqT6kQqQm_EO55vYS /c/Downloads/Spinnaker_SourceCode_v140_x64.msi
fi
# we can get these msi files by starting the installation from the exe file
cmd /c 'msiexec /quiet /i C:\Downloads\Spinnaker_Binaries_v140_x64.msi ADDLOCAL=ALL INSTALLFOLDER="C:\Program Files\Point Grey Research\Spinnaker"'
cmd /c 'msiexec /quiet /i C:\Downloads\Spinnaker_SourceCode_v140_x64.msi ADDLOCAL=ALL INSTALLFOLDER="C:\Program Files\Point Grey Research\Spinnaker"'
elif [ "$OS" == "windows-x86" ]; then
if [[ $(find /c/Downloads/Spinnaker_*_v140_x86.msi -type f -size +1000000c 2>/dev/null) ]]; then
echo "Found spinnaker32 in cache and size seems ok"
else
echo "Downloading spinnaker32 to cache as not found"
/c/python27/python $APPVEYOR_BUILD_FOLDER/ci/gDownload.py 1YJcLAaf8Bf2XzC7Puv4ZwXdibdmVcwNg /c/Downloads/Spinnaker_Binaries_v140_x86.msi
/c/python27/python $APPVEYOR_BUILD_FOLDER/ci/gDownload.py 1BG51avW4q605c2KGGJ3ehjhUlVFBObqh /c/Downloads/Spinnaker_SourceCode_v140_x86.msi
fi
# we can get these msi files by starting the installation from the exe file
cmd /c 'msiexec /quiet /i C:\Downloads\Spinnaker_Binaries_v140_x86.msi ADDLOCAL=ALL INSTALLFOLDER="C:\Program Files (x86)\Point Grey Research\Spinnaker"'
cmd /c 'msiexec /quiet /i C:\Downloads\Spinnaker_SourceCode_v140_x86.msi ADDLOCAL=ALL INSTALLFOLDER="C:\Program Files (x86)\Point Grey Research\Spinnaker"'
fi
echo "Finished spinnaker install"
fi
if [ "$PROJ" == "mkl" ]; then
echo Installing mkl
curl -L -o mkl.exe "http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15247/w_mkl_2019.3.203.exe"
./mkl.exe --s --x --f .
./install.exe install --output=mkllog.txt -eula=accept
sleep 60
cat mkllog.txt
echo Finished mkl
fi
if [ "$PROJ" == "cuda" ] || [ "$EXT" == "-gpu" ]; then
echo Installing cuda
curl -L -o cuda_10.1.105_418.96_windows.exe "https://developer.nvidia.com/compute/cuda/10.1/Prod/local_installers/cuda_10.1.105_418.96_windows.exe"
curl -L -o cudnn-10.1-windows7-x64-v7.5.0.56.zip "https://developer.download.nvidia.com/compute/redist/cudnn/v7.5.0/cudnn-10.1-windows7-x64-v7.5.0.56.zip"
./cuda_10.1.105_418.96_windows.exe -s
sleep 60
unzip ./cudnn-10.1-windows7-x64-v7.5.0.56.zip
mv ./cuda/bin/*.dll /c/Program\ Files/NVIDIA\ GPU\ Computing\ Toolkit/CUDA/v10.1/bin
mv ./cuda/include/*.h /c/Program\ Files/NVIDIA\ GPU\ Computing\ Toolkit/CUDA/v10.1/include
mv ./cuda/lib/x64/*.lib /c/Program\ Files/NVIDIA\ GPU\ Computing\ Toolkit/CUDA/v10.1/lib/x64
echo Finished cuda install
fi
if [ "$PROJ" == "tensorflow" ]; then
curl -L http://downloads.sourceforge.net/project/swig/swigwin/swigwin-3.0.12/swigwin-3.0.12.zip -o swigwin-3.0.12.zip
unzip -o swigwin-3.0.12.zip -d /c/
echo "adding bazel for tensorflow"
curl -L https://github.com/bazelbuild/bazel/releases/download/0.19.2/bazel-0.19.2-windows-x86_64.exe -o /c/msys64/usr/bin/bazel.exe; export CURL_STATUS=$?
if [ "$CURL_STATUS" != "0" ]; then
echo "Download failed here, so can't proceed with the build.. Failing.."
exit 1
fi
/c/Python36-x64/python -m pip install -U --user numpy
/c/Python36-x64/python -m pip install -U --user keras_applications==1.0.6 --no-deps
/c/Python36-x64/python -m pip install -U --user keras_preprocessing==1.0.5 --no-deps
fi
# copy Python 3.6 back to default installation directory
cp -a "/c/Python36-x64" "/C/Program Files/Python36"
# install an older less buggy version of GCC
curl -L -o mingw-w64-i686-gcc-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-gcc-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-i686-gcc-ada-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-gcc-ada-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-i686-gcc-objc-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-gcc-objc-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-i686-gcc-libs-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-gcc-libs-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-i686-gcc-fortran-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-gcc-fortran-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-i686-gcc-libgfortran-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-gcc-libgfortran-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-i686-binutils-2.31.1-1-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-binutils-2.31.1-1-any.pkg.tar.xz
curl -L -o mingw-w64-i686-crt-git-6.0.0.5136.897300fe-1-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-crt-git-6.0.0.5136.897300fe-1-any.pkg.tar.xz
curl -L -o mingw-w64-i686-tools-git-6.0.0.5111.3bc5ab74-1-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-tools-git-6.0.0.5111.3bc5ab74-1-any.pkg.tar.xz
curl -L -o mingw-w64-i686-headers-git-6.0.0.5136.897300fe-1-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-headers-git-6.0.0.5136.897300fe-1-any.pkg.tar.xz
curl -L -o mingw-w64-i686-libmangle-git-6.0.0.5079.3b7a42fd-1-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-libmangle-git-6.0.0.5079.3b7a42fd-1-any.pkg.tar.xz
curl -L -o mingw-w64-i686-winstorecompat-git-5.0.0.4760.d3089b5-1-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-winstorecompat-git-5.0.0.4760.d3089b5-1-any.pkg.tar.xz
curl -L -o mingw-w64-i686-winpthreads-git-6.0.0.5134.2416de71-1-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-winpthreads-git-6.0.0.5134.2416de71-1-any.pkg.tar.xz
curl -L -o mingw-w64-i686-libwinpthread-git-6.0.0.5134.2416de71-1-any.pkg.tar.xz http://repo.msys2.org/mingw/i686/mingw-w64-i686-libwinpthread-git-6.0.0.5134.2416de71-1-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-gcc-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-gcc-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-gcc-ada-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-gcc-ada-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-gcc-objc-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-gcc-objc-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-gcc-libs-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-gcc-libs-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-gcc-fortran-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-gcc-fortran-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-gcc-libgfortran-7.3.0-2-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-gcc-libgfortran-7.3.0-2-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-binutils-2.31.1-1-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-binutils-2.31.1-1-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-crt-git-6.0.0.5136.897300fe-1-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-crt-git-6.0.0.5136.897300fe-1-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-tools-git-6.0.0.5111.3bc5ab74-1-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-tools-git-6.0.0.5111.3bc5ab74-1-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-headers-git-6.0.0.5136.897300fe-1-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-headers-git-6.0.0.5136.897300fe-1-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-libmangle-git-6.0.0.5079.3b7a42fd-1-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libmangle-git-6.0.0.5079.3b7a42fd-1-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-winstorecompat-git-5.0.0.4760.d3089b5-1-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-winstorecompat-git-5.0.0.4760.d3089b5-1-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-winpthreads-git-6.0.0.5134.2416de71-1-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-winpthreads-git-6.0.0.5134.2416de71-1-any.pkg.tar.xz
curl -L -o mingw-w64-x86_64-libwinpthread-git-6.0.0.5134.2416de71-1-any.pkg.tar.xz http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-libwinpthread-git-6.0.0.5134.2416de71-1-any.pkg.tar.xz
pacman -U --noconfirm *.pkg.tar.xz
# get rid of some stuff we don't use to avoid running out of disk space and that may actually interfere with our builds
rm -Rf /c/go*
rm -Rf /c/Qt*
#rm -Rf /c/Ruby*
rm -Rf /c/cygwin*
#rm -Rf /c/Miniconda*
#rm -Rf /c/Libraries/boost*
rm -Rf /c/Libraries/llvm*
rm -Rf /c/Program\ Files/LLVM*
rm -Rf /c/Program\ Files\ \(x86\)/Microsoft\ DirectX\ SDK*
rm -Rf /c/ProgramData/Microsoft/AndroidNDK*
df -h
# try to download partial builds, which doesn't work from AppVeyor's hosted VMs always returning "Connection state changed (MAX_CONCURRENT_STREAMS == 100)!" for some reason
#DOWNLOAD_FILE="$PROJ-cppbuild.zip"
#DOWNLOAD_ADDRESS="https://ci.appveyor.com/api/projects/bytedeco/javacpp-presets/artifacts/$DOWNLOAD_FILE"
#if curl -fsSL -G -v -o "$DOWNLOAD_FILE" "$DOWNLOAD_ADDRESS" --data-urlencode "all=true" --data-urlencode "job=Environment: PROJ=$PROJ, OS=$OS, EXT=$EXT, PARTIAL_CPPBUILD=1"; then
# unzip -o $DOWNLOAD_FILE -d $APPVEYOR_BUILD_FOLDER
#fi
du -csh $HOME/* $HOME/.cache/* $HOME/.ccache/* /c/Users/appveyor/* /c/Users/appveyor/.m2/* /c/Users/downloads/*
echo Finished setting up env in setup.sh
|
#!/bin/sh
# Directories
SELF_DIR=$(cd $(dirname $0);pwd)
# Make book with template
BOOK_DIR=$SELF_DIR/TortoisePlayground.playgroundbook
if [ -e $BOOK_DIR ]; then
rm -rf $BOOK_DIR
fi
mkdir $BOOK_DIR
cp -rf $SELF_DIR/template/* $BOOK_DIR
# Copy sources
echo "Copy sources..."
FROM_SOURCES_DIR=Tortoise.playground/Sources
TO_SOURCES_DIR=$BOOK_DIR/Contents/Sources
if [ ! -e $TO_SOURCES_DIR ]; then
rm -f $TO_SOURCES_DIR/*
mkdir $TO_SOURCES_DIR
fi
cp $FROM_SOURCES_DIR/* $TO_SOURCES_DIR
# Copy Contents.swift
echo "[Page1] Copy Contents.swift"
cp -f $SELF_DIR/Tortoise.playground/Pages/Page1.xcplaygroundpage/Contents.swift $BOOK_DIR/Contents/Chapters/Main.playgroundchapter/Pages/Page1.playgroundpage
echo "[Page2] Copy Contents.swift"
cp -f $SELF_DIR/Tortoise.playground/Pages/Page2.xcplaygroundpage/Contents.swift $BOOK_DIR/Contents/Chapters/Main.playgroundchapter/Pages/Page2.playgroundpage
echo "Done."
|
function validate(value, name, options) {
try {
// Perform validation logic here
// If validation fails, throw a ValidationError
// If validation passes, return true
} catch (e) {
const validationError = new ValidationError(e.message, name, value);
if (options.throw === false) {
return validationError;
}
throw validationError;
}
return true;
} |
<gh_stars>0
package evilcraft.entities.block;
import net.minecraft.client.renderer.entity.Render;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import evilcraft.api.config.ElementTypeCategory;
import evilcraft.api.config.EntityConfig;
import evilcraft.api.config.configurable.ConfigurableProperty;
import evilcraft.blocks.LightningBomb;
import evilcraft.render.block.RenderBombPrimed;
/**
* Config for {@link EntityLightningBombPrimed}.
* @author rubensworks
*
*/
public class EntityLightningBombPrimedConfig extends EntityConfig {
/**
* The unique instance.
*/
public static EntityLightningBombPrimedConfig _instance;
/**
* The amount of ticks (on average), this bomb should tick before explosion.
*/
@ConfigurableProperty(category = ElementTypeCategory.ENTITY, comment = "The amount of ticks (on average), this bomb should tick before explosion.")
public static int fuse = 100;
/**
* Make a new instance.
*/
public EntityLightningBombPrimedConfig() {
super(
true,
"entityLightningBomb",
null,
EntityLightningBombPrimed.class
);
}
@SideOnly(Side.CLIENT)
@Override
public Render getRender() {
return new RenderBombPrimed(LightningBomb.getInstance());
}
}
|
const { Model } = require('objection');
module.exports = function (app) {
const { client, connection } = app.get('postgres');
const knex = require('knex')({ client, connection, useNullAsDefault: false });
Model.knex(knex);
app.set('knex', knex);
};
|
from django.test import TestCase
from textcritical.context_processors import is_async
class TestContextProcessors(TestCase):
class Request(object):
def __init__(self, is_ajax=False, args=None):
if args is not None:
self.GET = args
else:
self.GET = {'param' : 'nothing'}
self.is_ajax_param = is_ajax
def is_ajax(self):
return self.is_ajax_param
def test_is_async(self):
self.assertEqual(is_async(TestContextProcessors.Request(False))['is_async'], False, "Failed to correctly identify non-AJAX request")
self.assertEqual(is_async(TestContextProcessors.Request(True))['is_async'], True, "Failed to correctly identify AJAX request")
self.assertEqual(is_async(TestContextProcessors.Request(False, {'async' : None}))['is_async'], True, "Failed to correctly identify async request based on parameter")
self.assertEqual(is_async(TestContextProcessors.Request(False, {'async' : '1'}))['is_async'], True, "Failed to correctly identify async request based on parameter")
self.assertEqual(is_async(TestContextProcessors.Request(False, {'async' : '0'}))['is_async'], False, "Failed to correctly identify non-async request based on parameter")
self.assertEqual(is_async(TestContextProcessors.Request(True, {'async' : '1'}))['is_async'], True, "Failed to correctly identify AJAX request (along with parameter)")
|
@RestController
public class ProductController {
@Autowired
private ProductService productService;
@GetMapping("/products")
public List<Product> getAllProducts() {
return productService.getAllProducts();
}
} |
package org.slos.battle.decision;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.slos.util.ToJson;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MasterChoiceContext implements ToJson {
private Map<Long, ChoiceGate> choiceTree;
private Map<String, List<ChoiceGate>> choiceGatesById = new HashMap<>();
private long latestDecisionHash;
@JsonIgnore
private final ChoiceGateFactory choiceGateFactory;
public static final long START_HASH = -1l;
private Long totalChoicesMade = 0l;
public void reset() {
latestDecisionHash = START_HASH;
for(ChoiceGate choiceGate : choiceTree.values()) {
choiceGate.reset();
}
}
public MasterChoiceContext() {
this.choiceGateFactory = new ChoiceGateFactory(this);
choiceTree = new HashMap<>();
latestDecisionHash = START_HASH;
}
public Long getTotalChoicesMade() {
return totalChoicesMade;
}
public Long getHash(ChoiceGate choiceGate, Choice result) {
return (choiceGate.getGateId().hashCode() * 7) + (latestDecisionHash * 5 ) + (choiceGate.getChoiceGateContext().getUUIDForChoice(result).hashCode() * 7);
}
protected void registerChoiceGate(ChoiceGate choiceGate) {
choiceTree.put(latestDecisionHash, choiceGate);
List<ChoiceGate> choiceGates = choiceGatesById.get(choiceGate.getGateId());
if (choiceGates == null) {
choiceGates = new ArrayList<>();
choiceGatesById.put(choiceGate.getGateId(), choiceGates);
}
choiceGates.add(choiceGate);
}
public Map<String, List<ChoiceGate>> getChoiceGatesById() {
return choiceGatesById;
}
public void registerChoice(DefaultChoiceGate choiceGate, Choice choice) {
totalChoicesMade++;
latestDecisionHash = getHash(choiceGate, choice);
}
public ChoiceGateFactory getChoiceGateFactory() {
return choiceGateFactory;
}
public ChoiceGate getChoiceGateInstance(Long hash) {
return choiceTree.get(hash);
}
public void registerChoiceGate(DefaultChoiceGate choiceGate, Choice result) {
Long hash = getHash(choiceGate, result);
choiceTree.put(hash, choiceGate);
}
public Long getLatestDecisionHash() {
return latestDecisionHash;
}
public int getGateTotal() {
return choiceTree.size();
}
@Override
public String toString() {
return "MasterChoiceContext{" +
"choiceTree=" + choiceTree.size() +
", latestDecisionHash=" + latestDecisionHash +
", totalChoicesMade=" + totalChoicesMade +
'}';
}
}
|
#!/bin/bash
#Rather than trying to juggle git and resets and stuff, this
#script will create the relevant repos, copy files into place and
#create the commits. This way we can always be sure we're working from
#the same start
docstr="Usage: Run this script from parent directory of cloned repo, into which the example dirs will be put"
if [ $# -ne 1 ]
then
echo "Please supply language to use, either c or f90"
exit 1
fi
if [[ $1 = 'c' ]]
then
ext='c'
elif [[ $1 = 'f90' ]]
then
ext='f90'
else
echo "Supply either c or f90"
exit 1
fi
scrp_dir=`dirname $0`"/"
ls $scrp_dir"Create_repos.sh" &> /dev/null
if [ $? -ne 0 ]
then
echo "Could not find scripts "$docstr
exit 1
fi
githead=`git rev-parse --verify HEAD &>/dev/null`
if [ $? -eq 0 ]
then
echo "Cannot create repos inside exisiting repo "$docstr
exit 1
fi
echo "Creating repositories"
errs=0
wkdir=`pwd`
$scrp_dir"Create_One.sh" "../"$scrp_dir $wkdir $ext
if [ $? -ne 0 ]
then
echo "Error creating One"
let "errs++"
else
echo ' ****************** Created One'
fi
$scrp_dir"Create_Two.sh" "../"$scrp_dir $wkdir $ext
if [ $? -ne 0 ]
then
echo "Error creating Two"
let "errs++"
else
echo ' ****************** Created Two'
fi
$scrp_dir"Create_Three.sh" "../"$scrp_dir $wkdir $ext
if [ $? -ne 0 ]
then
echo "Error creating Three"
let "errs++"
else
echo ' ****************** Created Three'
fi
$scrp_dir"Create_Four.sh" "../"$scrp_dir $wkdir $ext
if [ $? -ne 0 ]
then
echo "Error creating Four"
let "errs++"
else
echo ' ****************** Created Four'
fi
$scrp_dir"Create_Five.sh" "../"$scrp_dir $wkdir $ext
if [ $? -ne 0 ]
then
echo "Error creating Five"
let "errs++"
else
echo ' ****************** Created Five'
fi
$scrp_dir"Create_Test.sh" "../"$scrp_dir $wkdir $ext
if [ $? -ne 0 ]
then
echo "Error creating Test"
let "errs++"
else
echo ' ****************** Created Test'
fi
echo $errs " errors"
|
<reponame>org-binbab/solid-tx<gh_stars>0
/*
* This file is part of the Solid TX project.
*
* Copyright (c) 2015. sha1(OWNER) = df334a7237f10846a0ca302bd323e35ee1463931
* --> See LICENSE.txt for more information.
*
* @author BinaryBabel OSS (http://code.binbab.org)
*/
package org.binarybabel.solidtx;
/**
* Function interface used to create entity callbacks.
*/
public interface TxFn {
/**
* Function called after entity is available,
* or a final error is received/determined.
*
* @param stack the TxStack
* @param obj the result entity or null
* @param e exception if object is null
*/
public void run(TxStack stack, Object obj, TxException e);
}
|
<gh_stars>0
package br.com.mbecker.jagastei.db;
import android.database.Cursor;
import java.util.ArrayList;
import java.util.List;
class ModelBuilder {
private static GastoModel convertGastoModel(Cursor c) {
GastoModel v = new GastoModel();
v.setId(c.getLong(c.getColumnIndex(JaGasteiContract.GastoEntry._ID)));
v.setValor(c.getDouble(c.getColumnIndex(JaGasteiContract.GastoEntry.COLUMN_NAME_VALOR)));
v.setQuando(c.getLong(c.getColumnIndex(JaGasteiContract.GastoEntry.COLUMN_NAME_QUANDO)));
v.setMesAno(c.getString(c.getColumnIndex(JaGasteiContract.GastoEntry.COLUMN_NAME_MESANO)));
v.setLat(c.getDouble(c.getColumnIndex(JaGasteiContract.GastoEntry.COLUMN_NAME_LAT)));
v.setLng(c.getDouble(c.getColumnIndex(JaGasteiContract.GastoEntry.COLUMN_NAME_LNG)));
v.setObs(c.getString(c.getColumnIndex(JaGasteiContract.GastoEntry.COLUMN_NAME_OBS)));
return v;
}
private static TagModel convertTagModel(Cursor c) {
long id = c.getLong(c.getColumnIndex(JaGasteiContract.TagEntry._ID));
String tag = c.getString(c.getColumnIndex(JaGasteiContract.TagEntry.COLUMN_NAME_TAG_NAME));
String gastos = c.getString(c.getColumnIndex(JaGasteiContract.TagEntry.COLUMN_NAME_ID_GASTO));
return new TagModel(id, tag, gastos);
}
public static GastoModel buildGasto(Cursor c) {
if (c.moveToFirst()) {
GastoModel v = convertGastoModel(c);
c.close();
return v;
}
return null;
}
public static List<GastoModel> buildGastoLista(Cursor c) {
List<GastoModel> lst = new ArrayList<>(40);
if (c.moveToFirst()) {
do {
lst.add(convertGastoModel(c));
} while (c.moveToNext());
c.close();
}
return lst;
}
public static List<TagModel> buildTagLista(Cursor c) {
List<TagModel> lst = new ArrayList<>(40);
if (c.moveToFirst()) {
do {
lst.add(convertTagModel(c));
} while (c.moveToNext());
c.close();
}
return lst;
}
public static TagModel buildTag(Cursor c) {
return convertTagModel(c);
}
}
|
/*
* Copyright 2019 Red Hat, Inc.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.vertx.ext.auth.webauthn;
import io.vertx.codegen.annotations.GenIgnore;
import io.vertx.codegen.annotations.Nullable;
import io.vertx.codegen.annotations.VertxGen;
/**
* AttestationConveyancePreference
* https://www.w3.org/TR/webauthn/#attestation-convey
*/
@VertxGen
public enum Attestation {
NONE("none"),
INDIRECT("indirect"),
DIRECT("direct");
private final String value;
Attestation(String value) {
this.value = value;
}
@Override
public String toString() {
return value;
}
@Nullable
@GenIgnore(GenIgnore.PERMITTED_TYPE)
public static Attestation of(String string) {
for (Attestation el : values()) {
if (el.toString().equals(string)) {
return el;
}
}
return null;
}
}
|
yarn download:docs-dm en
yarn download:docs-dm en release-1.0
yarn download:docs-dm en release-2.0
yarn download:docs-dm zh
yarn download:docs-dm zh release-1.0
yarn download:docs-dm zh release-2.0
|
#!/bin/bash
for try in {1..10} ; do
# try any invalid url - just for getting a access log entry
curl "http://squidguard:3128/startupTest${try}.html" > /dev/null
if [ $? -eq 0 ]; then
break
fi
sleep 5
done
|
import UIKit
class CustomControl: UIControl {
private var backgroundColors: [UInt: UIColor] = [:]
func setBackgroundColor(_ color: UIColor, for state: UIControl.State) {
backgroundColors[state.rawValue] = color
}
func backgroundColor(for state: UIControl.State) -> UIColor? {
return backgroundColors[state.rawValue]
}
// Additional methods for setting and managing control states can be added here
} |
<filename>zelda-webui/src/main/java/zelda/po/PageObjectMethod.java
package zelda.po;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
/**
* @ClassName: PageObjectMethod
* @Description: method
* @Author: zhzh.yin
* @Date: 2020-04-27 17:06
* @Verion: 1.0
*/
public class PageObjectMethod {
public List<HashMap<String,String>> step = new ArrayList<>();
public List<HashMap<String, String>> getStep() {
return step;
}
public void setStep(List<HashMap<String, String>> step) {
this.step = step;
}
}
|
<filename>fallen/Ledit/Source/gi.cpp<gh_stars>100-1000
//
// Interface between the game and the light editor.
//
#include <MFStdLib.h>
#include "game.h"
#include "gi.h"
#include "morph.h"
#include "night.h"
#include "ob.h"
#include "trip.h"
#include "io.h"
#include "animtmap.h"
#include "dirt.h"
#include "mist.h"
#include "puddle.h"
#include "road.h"
#include "drip.h"
#include "shadow.h"
#include "interfac.h"
#include "mav.h"
#include "ns.h"
#include "elev.h"
#include "fc.h"
//
// The current view.
//
SLONG GI_view;
void GI_init()
{
FC_init();
AENG_init();
MORPH_load();
ANIM_init();
GI_view = 0;
}
SLONG GI_load_map(CBYTE *name)
{
//
// This is important stuff... I guess.
//
void global_load(void); // Defined in game.cpp loads stuff that gets screwed up by the editor.
// global_load(); // this gets called almost immediately in ELEV_game_init()
//
// Load our map.
//
ELEV_game_init(
name,
NULL,
NULL,
NULL);
//
// Start off looking at the city.
//
GI_view = GI_VIEW_CITY;
return TRUE; // Nothing ever goes wrong... honest!
}
void GI_set_view(SLONG view)
{
switch(view)
{
case GI_VIEW_CITY:
if (GI_view == GI_VIEW_SEWERS)
{
//
// Clean up sewer stuff and initialise the city view.
//
GAME_FLAGS &= ~GF_SEWERS;
NS_cache_fini();
DIRT_init(100, 3, 3, INFINITY, INFINITY, INFINITY, INFINITY);
}
break;
case GI_VIEW_SEWERS:
if (GI_view == GI_VIEW_CITY)
{
//
// Clean up city stuff and initialise the sewer view.
//
GAME_FLAGS |= GF_SEWERS;
DIRT_init(0, 0, 0, INFINITY, INFINITY, INFINITY, INFINITY);
NIGHT_destroy_all_cached_info();
NS_cache_init();
}
break;
default:
ASSERT(0);
break;
}
GI_view = view;
}
void GI_render_view_into_backbuffer(
SLONG cam_x,
SLONG cam_y,
SLONG cam_z,
SLONG cam_yaw,
SLONG cam_pitch,
SLONG cam_roll)
{
if (GAME_FLAGS & GF_SEWERS)
{
//
// Animate the water.
//
DIRT_set_focus(cam_x,cam_z,0x800);
DIRT_process();
}
//
// Increase the gameturn, otherwise facets aren't drawn!
//
GAME_TURN += 1;
AENG_set_camera(
cam_x,
cam_y,
cam_z,
cam_yaw,
cam_pitch,
cam_roll);
AENG_draw(FALSE);
}
SLONG GI_get_pixel_world_pos(
SLONG sx,
SLONG sy,
SLONG *world_x,
SLONG *world_y,
SLONG *world_z,
SLONG inside)
{
return AENG_raytraced_position(
sx,
sy,
world_x,
world_y,
world_z,
inside);
}
ULONG GI_light_draw(
SLONG sx,
SLONG sy,
SLONG lx,
SLONG ly,
SLONG lz,
ULONG colour,
UBYTE highlight)
{
ULONG ans;
ans = AENG_light_draw(
sx,
sy,
lx,
ly,
lz,
colour,
highlight);
return ans; // Make sure our flags are the same as the AENG flags!
}
void GI_fini()
{
AENG_fini();
}
//---------------------------------------------------------------
// GUY.
//---------------------------------------------------------------
ULONG GI_waypoint_draw(
SLONG sx,
SLONG sy,
SLONG lx,
SLONG ly,
SLONG lz,
ULONG colour,
UBYTE highlight)
{
ULONG ans;
ans = AENG_waypoint_draw(
sx,
sy,
lx,
ly,
lz,
colour,
highlight);
return ans; // Make sure our flags are the same as the AENG flags!
}
//---------------------------------------------------------------
ULONG GI_rad_trigger_draw(
SLONG sx,
SLONG sy,
SLONG lx,
SLONG ly,
SLONG lz,
ULONG rad,
ULONG colour,
UBYTE highlight)
{
ULONG ans;
ans = AENG_rad_trigger_draw(
sx,
sy,
lx,
ly,
lz,
rad,
colour,
highlight);
return ans; // Make sure our flags are the same as the AENG flags!
}
//---------------------------------------------------------------
void GI_groundsquare_draw(
SLONG lx,
SLONG ly,
SLONG lz,
ULONG colour,
UBYTE polyinit)
{
AENG_groundsquare_draw(lx,ly,lz,colour,polyinit);
}
//---------------------------------------------------------------
|
#!/bin/bash
FN="JASPAR2020_0.99.10.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.13/data/annotation/src/contrib/JASPAR2020_0.99.10.tar.gz"
"https://bioarchive.galaxyproject.org/JASPAR2020_0.99.10.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-jaspar2020/bioconductor-jaspar2020_0.99.10_src_all.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-jaspar2020/bioconductor-jaspar2020_0.99.10_src_all.tar.gz"
)
MD5="bfcaf41ebf0935b8d146afd37719de2d"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
<gh_stars>100-1000
//
// AmapFlutterStreamManager.h
// amap_location_flutter_plugin
//
// Created by ldj on 2018/10/30.
//
#import <Foundation/Foundation.h>
#import <Flutter/Flutter.h>
NS_ASSUME_NONNULL_BEGIN
@class AmapFlutterStreamHandler;
@interface AmapFlutterStreamManager : NSObject
+ (instancetype)sharedInstance ;
@property (nonatomic, strong) AmapFlutterStreamHandler* streamHandler;
@end
@interface AmapFlutterStreamHandler : NSObject<FlutterStreamHandler>
@property (nonatomic, strong,nullable) FlutterEventSink eventSink;
@end
NS_ASSUME_NONNULL_END
|
#!/usr/bin/env bash
# Copyright 2015 Midokura SARL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Keep track of the current directory
DEVMIDO_DIR=$(cd $(dirname $0) && pwd)
# Import common functions
source $DEVMIDO_DIR/functions
# Check if run as root
if [[ $EUID -eq 0 ]]; then
die $LINENO "You cannot run this script as root."
fi
MIDORC=$DEVMIDO_DIR/midorc
if [[ ! -r $MIDORC ]]; then
die $LINENO "Missing $MIDORC"
fi
source $MIDORC
set -o xtrace
# Hard coded screen name
SCREEN_NAME=mido
# Clean up the remainder of the screen processes
stop_process midonet-api
stop_process midolman
# Remove the screen session
SESSION=$(screen -ls | awk -v "pat=[0-9].mido" '$0 ~ pat { print $1 }')
screen -X -S $SESSION quit
# The midolman Java calls keep running after killing the processes
for p in $(ps aux | grep mido | grep java | awk '{print $2}'); do
sudo kill -9 "$p"
done
|
python -u experiment.py --scenario class_iid --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 1.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.001,reg_coef-1.0/ | tee ./toybox_gridsearch_outputs/class_iid/SI/lr-0.001,reg_coef-1.0/log.log
wait
python -u experiment.py --scenario class_iid --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 100.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.001,reg_coef-100.0/ | tee ./toybox_gridsearch_outputs/class_iid/SI/lr-0.001,reg_coef-100.0/log.log
wait
python -u experiment.py --scenario class_iid --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 500.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.001,reg_coef-500.0/ | tee ./toybox_gridsearch_outputs/class_iid/SI/lr-0.001,reg_coef-500.0/log.log
wait
python -u experiment.py --scenario class_iid --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.0001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 1.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.0001,reg_coef-1.0/ | tee ./toybox_gridsearch_outputs/class_iid/SI/lr-0.0001,reg_coef-1.0/log.log
wait
python -u experiment.py --scenario class_iid --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.0001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 100.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.0001,reg_coef-100.0/ | tee ./toybox_gridsearch_outputs/class_iid/SI/lr-0.0001,reg_coef-100.0/log.log
wait
python -u experiment.py --scenario class_iid --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.0001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 500.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.0001,reg_coef-500.0/ | tee ./toybox_gridsearch_outputs/class_iid/SI/lr-0.0001,reg_coef-500.0/log.log
wait
python -u experiment.py --scenario class_iid --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 1e-05 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 1.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-1e-05,reg_coef-1.0/ | tee ./toybox_gridsearch_outputs/class_iid/SI/lr-1e-05,reg_coef-1.0/log.log
wait
python -u experiment.py --scenario class_iid --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 1e-05 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 100.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-1e-05,reg_coef-100.0/ | tee ./toybox_gridsearch_outputs/class_iid/SI/lr-1e-05,reg_coef-100.0/log.log
wait
python -u experiment.py --scenario class_iid --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 1e-05 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 500.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-1e-05,reg_coef-500.0/ | tee ./toybox_gridsearch_outputs/class_iid/SI/lr-1e-05,reg_coef-500.0/log.log
wait
python -u experiment.py --scenario class_instance --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 1.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.001,reg_coef-1.0/ | tee ./toybox_gridsearch_outputs/class_instance/SI/lr-0.001,reg_coef-1.0/log.log
wait
python -u experiment.py --scenario class_instance --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 100.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.001,reg_coef-100.0/ | tee ./toybox_gridsearch_outputs/class_instance/SI/lr-0.001,reg_coef-100.0/log.log
wait
python -u experiment.py --scenario class_instance --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 500.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.001,reg_coef-500.0/ | tee ./toybox_gridsearch_outputs/class_instance/SI/lr-0.001,reg_coef-500.0/log.log
wait
python -u experiment.py --scenario class_instance --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.0001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 1.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.0001,reg_coef-1.0/ | tee ./toybox_gridsearch_outputs/class_instance/SI/lr-0.0001,reg_coef-1.0/log.log
wait
python -u experiment.py --scenario class_instance --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.0001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 100.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.0001,reg_coef-100.0/ | tee ./toybox_gridsearch_outputs/class_instance/SI/lr-0.0001,reg_coef-100.0/log.log
wait
python -u experiment.py --scenario class_instance --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 0.0001 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 500.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-0.0001,reg_coef-500.0/ | tee ./toybox_gridsearch_outputs/class_instance/SI/lr-0.0001,reg_coef-500.0/log.log
wait
python -u experiment.py --scenario class_instance --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 1e-05 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 1.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-1e-05,reg_coef-1.0/ | tee ./toybox_gridsearch_outputs/class_instance/SI/lr-1e-05,reg_coef-1.0/log.log
wait
python -u experiment.py --scenario class_instance --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 1e-05 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 100.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-1e-05,reg_coef-100.0/ | tee ./toybox_gridsearch_outputs/class_instance/SI/lr-1e-05,reg_coef-100.0/log.log
wait
python -u experiment.py --scenario class_instance --n_runs 2 --model_type resnet --model_name ResNet18 --agent_type regularization --agent_name SI --optimizer SGD --batch_size 20 --lr 1e-05 --momentum 0.9 --weight_decay 0.0001 --pretrained --n_epoch 1 --reg_coef 500.0 --memory_size 15 --dataroot /media/data/morgan_data/toybox/images --filelist_root dataloaders --dataset toybox --output_dir toybox_gridsearch_outputs --n_workers 20 --gpuid 1 --validate --custom_folder SI/lr-1e-05,reg_coef-500.0/ | tee ./toybox_gridsearch_outputs/class_instance/SI/lr-1e-05,reg_coef-500.0/log.log
|
#!/usr/bin/env bash
VERSION=$(sentry-cli releases propose-version || exit)
sed -e "s/(version)/$VERSION/g" < django.yaml | kubectl apply -f - || exit
kubectl apply -f nginx.yaml
|
#!/usr/bin/env bash
//RUNNER=./FontForge-2020-11-07-21ad4a1-x86_64.AppImage;
set -xe
rm -rf ./build/
mkdir -p ./build
# If we don't have FontForge dependency, fetch it.
if ! test -x ./FontForge.AppImage; then
echo "Downloading the latest FontForge AppImage from Github...";
# Download the latest fontforge. It comes bundled with Python 3.
curl -s https://api.github.com/repos/fontforge/fontforge/releases |
grep -m 1 "https://.*\.AppImage" | sed -E 's/.*(https:.*\.AppImage).*/\1/' |
xargs curl -L --output FontForge.AppImage;
# Make sure it's executable
chmod +x ./FontForge.AppImage;
fi;
# Use the fontforge binary to execute our generator script.
# NOTE:
# There are limitations to this. Due to how AppImages are packaged,
# the builtin python executable will always be started in a temporary
# environment because it's assumed that said binary will be self-contained
# and not need access to the CWD at all. (At least I think, that's what
# I gathered from my time experimenting with it trying to get this to work)
# So we can only inject a script to load from STDIN which removes our ability
# to append any command line arguments. To work around this, I added
cat ./scripts/generate_font.py |
sed "1 i\\argv = [\"$PWD\", \"/config.json\"]" |
./FontForge.AppImage -lang=py -script > ./build/mapping.txt;
./scripts/inte_fish.sh > ./build/icons.fish
./scripts/inte_bash.sh > ./build/icons_bash.sh
./scripts/inte_bash_export.sh > ./build/icons_bash_export.sh
./scripts/inte_without_codepoint.sh > ./build/icons_bash_without_codepoint.sh
./scripts/inte_emacs.sh > ./build/icons-in-terminal.el
./scripts/inte_c_header.sh > ./build/icons-in-terminal.h
chmod +x ./build/icons.fish
chmod +x ./build/icons_bash.sh
mv icons-in-terminal.ttf ./build/
set +xe
echo -e "\nEverything seems good, now you can run install.sh"
|
package com.github.open96.jypm.playlist;
public enum PLAYLIST_STATUS {
QUEUED, DOWNLOADING, DOWNLOADED, FAILED, UNKNOWN, CONVERTING
}
|
(function iife() {
var configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'turn:numb.viagenie.ca', credential: 'webrtcdemo', username: 'louis%40mozilla.com' },
],
},
pcConstraints = {
optional: [],
},
offerOptions = {
offerToReceiveAudio: 1,
offerToReceiveVideo: 1,
voiceActivityDetection: false,
},
localStream,
_socket = new WebSocket('wss://192.168.1.112/'),
type = 'receiver',
video,
rvideo,
pc;
function getKey(_type) {
var key = type + '_desc';
if (!!_type) {
key = _type + '_desc';
}
return key;
}
function setDescription(desc, _type) {
var key = getKey(_type);
localStorage[key] = JSON.stringify(desc);
}
function getDescription(_type) {
var key = getKey(_type);
return JSON.parse(localStorage[key]);
}
function sendDesc(_type, desc) {
_socket.send(encodeURIComponent(JSON.stringify({ type: _type + '_desc', desc: desc })));
}
function sendIceCandidate(candidate) {
_socket.send(encodeURIComponent(JSON.stringify({ type: 'candidate', candidate: candidate, target: type })));
}
function onAddIceCandidateSuccess() {
console.log('AddIceCandidate success.');
}
function onAddIceCandidateError(error) {
console.log('Failed to add ICE Candidate: ' + error.toString());
}
function onicecandidate(event) {
if (!event || !event.candidate || window.iceSent) return;
console.log(event);
if (event.candidate) {
console.log('Sending ICE candidate');
sendIceCandidate(event.candidate);
}
window.iceSent = true;
};
function onaddstream(event) {
console.log('Received remote stream');
console.log(event);
};
function call() {
type = 'caller';
console.log('Starting call');
video = document.querySelector('video#video');
pc = new RTCPeerConnection(configuration, pcConstraints);
console.log('Created local peer connection object pc1');
pc.onaddstream = onaddstream;
pc.onicecandidate = onicecandidate;
callerSendStream();
sendOffer();
window.pc = pc;
}
function setIceCadidate(candidate) {
console.log('Ice Candiate set, type ', type, candidate);
pc.addIceCandidate(
new RTCIceCandidate(candidate)
).then(
onAddIceCandidateSuccess,
onAddIceCandidateError
);
}
function callerSendStream() {
pc.setRemoteDescription(getDescription('receiver')).then(
function afterSetRemoteDescription() {
},
onSetSessionDescriptionError
);
navigator.mediaDevices.getUserMedia({
audio: true,
video: true,
})
.then(gotStream)
.catch(function catchHandler(e) {
console.log('getUserMedia() error: ' + e.name);
});
}
function answer() {
type = 'receiver';
console.log('Answering call');
video = document.querySelector('video#video');
pc = new RTCPeerConnection(configuration, pcConstraints);
console.log('Created Peer Connection ' + type);
pc.onaddstream = onaddstream;
pc.onicecandidate = onicecandidate;
pc.setRemoteDescription(getDescription('caller')).then(
function afterSetRemoteDescription() {
pc.createAnswer().then(
onAnswerDescription,
onCreateSessionDescriptionError
);
},
onSetSessionDescriptionError
);
navigator.mediaDevices.getUserMedia({
audio: true,
video: true,
})
.then(answerGotStream)
.catch(function catchHandler(e) {
console.log('getUserMedia() error: ' + e.name);
});
}
function onAnswerDescription(desc) {
console.log('on Answer Description \n' + desc.sdp);
setDescription(desc, 'receiver');
pc.setLocalDescription(desc).then(
onSetLocalDescriptionSuccess,
onSetSessionDescriptionError
);
}
function onSetLocalDescriptionSuccess(event) {
console.log('setLocalDescriptionSuccss', event);
sendDesc(type, getDescription());
// desc.sdp = forceChosenAudioCodec(desc.sdp);
}
function onCallDescription(desc) {
console.log('on Call Description \n' + desc.sdp);
setDescription(desc, 'caller');
pc.setLocalDescription(desc).then(
onSetLocalDescriptionSuccess,
onSetSessionDescriptionError
);
}
function sendStream(stream) {
var videoTracks;
localStream = stream;
videoTracks = localStream.getVideoTracks();
if (videoTracks.length > 0) {
console.log('Using Audio device: ' + videoTracks[0].label);
}
console.log('Adding Local Stream to peer connection');
pc.addStream(localStream);
}
function sendOffer() {
pc.createOffer(
offerOptions
).then(
onCallDescription,
onCreateSessionDescriptionError
);
}
function gotStream(stream) {
console.log('Sending Caller Stream');
video.src = URL.createObjectURL(stream);
sendStream(stream);
}
function answerGotStream(stream) {
console.log('Sending Receiver Stream');
video.src = URL.createObjectURL(stream);
sendStream(stream);
}
function onCreateSessionDescriptionError(error) {
console.log('Failed to create session description: ' + error.toString());
}
function onSetSessionDescriptionError(error) {
console.log('Failed to set session description: ' + error.toString());
}
// window.addEventListener('storage', function addEventListener() {
// console.log('message received', event);
// console.log(localStorage.caller_desc);
// console.log(localStorage.receiver_desc);
//
// if (!localStorage.state == 'calling') {
// onAnswerReceived();
// } else {
// answer();
// }
// });
_socket.onmessage = function onMessage(e) {
var data = JSON.parse(decodeURIComponent(e.data));
console.log('Recived on Message: ', data);
if (data.type === 'caller_desc' && type === 'receiver') {
setDescription(data.desc, 'caller');
answer();
} else if (data.type === 'receiver_desc' && type === 'caller') {
setDescription(data.desc, 'receiver');
} else if (data.type === 'candidate' && data.target !== type) {
setIceCadidate(data.candidate);
}
};
window.RingCallSDK = {
call: call,
answer: answer,
callerSendStream: callerSendStream,
};
})(window);
|
#!/bin/bash
# File pefixes.
exec_root="test"
out_root="output"
sys="blis"
#sys="lonestar5"
#sys="ul252"
#sys="ul264"
if [ ${sys} = "blis" ]; then
export GOMP_CPU_AFFINITY="0-3"
nt=4
elif [ ${sys} = "lonestar5" ]; then
export GOMP_CPU_AFFINITY="0-23"
nt=12
elif [ ${sys} = "ul252" ]; then
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/home/field/intel/mkl/lib/intel64"
export GOMP_CPU_AFFINITY="0-51"
nt=26
elif [ ${sys} = "ul264" ]; then
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/home/field/intel/mkl/lib/intel64"
export GOMP_CPU_AFFINITY="0-63"
nt=32
fi
# Delay between test cases.
delay=0.02
# Threadedness to test.
#threads="st mt"
threads="st mt"
# Datatypes to test.
#dts="d s"
dts="d"
# Operations to test.
ops="gemm"
# Transpose combintions to test.
trans="nn nt tn tt"
# Storage combinations to test.
#stors="rrr rrc rcr rcc crr crc ccr ccc"
stors="rrr ccc"
# Problem shapes to test.
shapes="sll lsl lls lss sls ssl lll"
# FGVZ: figure out how to probe what's in the directory and
# execute everything that's there?
sms="6"
sns="8"
sks="10"
# Implementations to test.
impls="vendor blissup blislpab openblas eigen"
#impls="vendor"
#impls="blissup"
#impls="blislpab"
#impls="openblas"
#impls="eigen"
# Save a copy of GOMP_CPU_AFFINITY so that if we have to unset it, we can
# restore the value.
GOMP_CPU_AFFINITYsave=${GOMP_CPU_AFFINITY}
# Example: test_dgemm_nn_rrc_m6npkp_blissup_st.x
for th in ${threads}; do
for dt in ${dts}; do
for op in ${ops}; do
for tr in ${trans}; do
for st in ${stors}; do
for sh in ${shapes}; do
for sm in ${sms}; do
for sn in ${sns}; do
for sk in ${sks}; do
for im in ${impls}; do
if [ "${th}" = "mt" ]; then
# Specify the multithreading depending on which
# implementation is about to be tested.
if [ "${im:0:4}" = "blis" ]; then
unset OMP_NUM_THREADS
export BLIS_NUM_THREADS=${nt}
elif [ "${im}" = "openblas" ]; then
unset OMP_NUM_THREADS
export OPENBLAS_NUM_THREADS=${nt}
elif [ "${im}" = "eigen" ]; then
export OMP_NUM_THREADS=${nt}
elif [ "${im}" = "vendor" ]; then
unset OMP_NUM_THREADS
export MKL_NUM_THREADS=${nt}
fi
export nt_use=${nt}
else # if [ "${th}" = "st" ];
# Use single-threaded execution.
export OMP_NUM_THREADS=1
export BLIS_NUM_THREADS=1
export OPENBLAS_NUM_THREADS=1
export MKL_NUM_THREADS=1
export nt_use=1
fi
# Multithreaded OpenBLAS seems to have a problem
# running properly if GOMP_CPU_AFFINITY is set.
# So we temporarily unset it here if we are about
# to execute OpenBLAS, but otherwise restore it.
if [ ${im} = "openblas" ]; then
unset GOMP_CPU_AFFINITY
else
export GOMP_CPU_AFFINITY="${GOMP_CPU_AFFINITYsave}"
fi
# Limit execution of non-BLIS implementations to
# rrr/ccc storage cases.
if [ "${im:0:4}" != "blis" ] && \
[ "${st}" != "rrr" ] && \
[ "${st}" != "ccc" ]; then
continue;
fi
# Further limit execution of libxsmm to
# ccc storage cases.
if [ "${im:0:7}" = "libxsmm" ] && \
[ "${st}" != "ccc" ]; then
continue;
fi
# Extract the shape chars for m, n, k.
chm=${sh:0:1}
chn=${sh:1:1}
chk=${sh:2:1}
# Construct the shape substring (e.g. m6npkp)
shstr=""
if [ ${chm} = "s" ]; then
shstr="${shstr}m${sm}"
else
shstr="${shstr}mp"
fi
if [ ${chn} = "s" ]; then
shstr="${shstr}n${sn}"
else
shstr="${shstr}np"
fi
if [ ${chk} = "s" ]; then
shstr="${shstr}k${sk}"
else
shstr="${shstr}kp"
fi
# Ex: test_dgemm_nn_rrc_m6npkp_blissup_st.x
# Construct the name of the test executable.
exec_name="${exec_root}_${dt}${op}_${tr}_${st}_${shstr}_${im}_${th}.x"
# Construct the name of the output file.
out_file="${out_root}_${th}_${dt}${op}_${tr}_${st}_${shstr}_${im}.m"
echo "Running (nt = ${nt_use}) ./${exec_name} > ${out_file}"
# Run executable.
./${exec_name} > ${out_file}
sleep ${delay}
done
done
done
done
done
done
done
done
done
done
|
<reponame>Mayur2520/SAMPT<filename>src/app/tabs/home/home.component.ts
import { Component, OnInit } from '@angular/core';
import { DashboardService } from '../../services/dashboard.service';
import { ActivatedRoute, Router } from '@angular/router';
import { Storage } from '@ionic/storage';
@Component({
selector: 'app-home',
templateUrl: './home.component.html',
styleUrls: ['./home.component.scss'],
})
export class HomeComponent implements OnInit {
dashboarCounts:any = [{advancebal: null,
compl:null,
customers: null,
loadnbal: null,
newconn:null,
newenq:null,
planscount: null,
totalcollection: null,
username: null}];
constructor(private _DashboardService : DashboardService, private _storage:Storage, private router: Router) { }
userdetails:any = {fullname:''};
ngOnInit() {
this._storage.get('userdetails').then((val) => {
if(val == null)
{
// this.router.navigate(['/login']);
}
else
{
this.userdetails = JSON.parse(val);
this.getDashboardCounts(this.userdetails.Userlevel,this.userdetails.userId);
}
});
}
getDashboardCounts(userLevel,userid)
{
this._DashboardService.getDashboardCounts(userLevel,userid).subscribe(
data => {
this.dashboarCounts = data;
});
}
RedirectTo(redirectLink)
{
this.router.navigate([redirectLink]);
}
}
|
package cn.shiyanjun.platform.scheduled.constants;
public interface ScheduledConstants {
String PLATFORM_ID = "platform_id";
// JSON keys
String HEARTBEAT_TYPE_TASK_PROGRESS = "taskProgress";
String JOB_COUNT = "jobCount";
String TASK_COUNT = "taskCount";
String LAST_UPDATE_TS = "lastUpdateTs";
String PARAMS = "params";
String QUEUE = "queue";
String TASKS = "tasks";
String JOB_ID = "jobId";
String JOB_STATUS = "jobStatus";
String JOB_TYPE = "jobType";
String SEQ_NO = "seqNo";
String STAGES = "stages";
String STATUS = "status";
String TASK_ID = "taskId";
String RESULT_COUNT = "resultCount";
String TASK_STATUS = "taskStatus";
String PARSED_EXPRESSION = "parsedExpression";
String OUTPUT_PATH = "outputPath";
String EXP_CODE = "expCode";
String TASK_TYPE = "taskType";
String ROLE = "role";
// Constants
String NEED_RECOVERING = "needRecovering";
String YES = "YES";
}
|
<reponame>bhupendra1011/netiflexy<filename>src/__test__/NavBar.test.js<gh_stars>0
import React from "react";
import { create } from "react-test-renderer";
import NavBar from "../NavBar";
test("snapshor", () => {
const c = create(<NavBar />);
expect(c.toJSON()).toMatchSnapshot();
});
|
<filename>AndroidList/app/src/main/java/com/prt2121/androidlist/Item.java
package com.prt2121.androidlist;
/**
* Created by pt2121 on 1/9/16.
*/
public class Item {
private String title;
private String description;
public Item(String title, String description) {
this.title = title;
this.description = description;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Item item = (Item) o;
if (title != null ? !title.equals(item.title) : item.title != null) return false;
return description != null ? description.equals(item.description) : item.description == null;
}
@Override public int hashCode() {
int result = title != null ? title.hashCode() : 0;
result = 31 * result + (description != null ? description.hashCode() : 0);
return result;
}
@Override public String toString() {
return "Item{" +
"title='" + title + '\'' +
", description='" + description + '\'' +
'}';
}
}
|
<gh_stars>10-100
#ifndef SPX_RANDOMBYTES_H
#define SPX_RANDOMBYTES_H
extern void randombytes(unsigned char * x,unsigned long long xlen);
#endif
|
#!/bin/sh
# nightly build script
eval "$(make print-LOGSDIR,PRODUCT_VERSION)"
for RECYCLE in $(cd ${LOGSDIR}; find . -name "[0-9]*" -type f | sort -r | tail -n +7); do
(cd ${LOGSDIR}; rm ${RECYCLE})
done
(make clean-obj 2>&1) > /dev/null
mkdir -p ${LOGSDIR}/${PRODUCT_VERSION}
for STAGE in update info base kernel xtools distfiles; do
# we don't normally clean these stages
(time make ${STAGE} 2>&1) > ${LOGSDIR}/${PRODUCT_VERSION}/${STAGE}.log
done
for FLAVOUR in OpenSSL LibreSSL; do
if [ -z "${1}" ]; then
(make clean-packages FLAVOUR=${FLAVOUR} 2>&1) > /dev/null
fi
for STAGE in ports plugins core test; do
(time make ${STAGE} FLAVOUR=${FLAVOUR} 2>&1) \
> ${LOGSDIR}/${PRODUCT_VERSION}/${STAGE}-${FLAVOUR}.log
done
done
tar -C ${LOGSDIR} -czf ${LOGSDIR}/${PRODUCT_VERSION}.tgz ${PRODUCT_VERSION}
rm -rf ${LOGSDIR}/latest
mv ${LOGSDIR}/${PRODUCT_VERSION} ${LOGSDIR}/latest
|
<reponame>Evino/chablis
import React from 'react';
import { storiesOf } from '@storybook/react';
import CustomTooltip from '.';
const divStyle = {
marginTop: '30px',
marginLeft: '100px',
};
const buttonStyle = {
position: 'relative',
};
const ImageURL = 'https://evino-res.cloudinary.com/image/upload/v1614018200/web/assets/escala.png';
const CustomTooltipWithImage = (
<CustomTooltip type="default" show align="bottom-start" className="custom-tooltip">
<img src={ImageURL} alt="Escala" />
<p>
Classificação de acordo com a legislação brasileira em relação ao
conteúdo de açúcar, expresso em gramas por litro de glicose (g/L).
</p>
</CustomTooltip>
);
storiesOf('Custom Tooltip', module).addWithInfo('Default', () => (
<div style={divStyle}>
<button type="button" style={buttonStyle}>
Click me!
{CustomTooltipWithImage}
</button>
</div>
));
|
package com.atjl.util.character;
import com.atjl.util.common.SystemUtil;
import com.atjl.util.json.JSONFastJsonUtil;
import com.atjl.util.json.JSONFmtUtil;
import org.junit.*;
import org.junit.rules.ExpectedException;
import static org.junit.Assert.assertEquals;
public class StringFormatUtilTest {
@Test
public void testToLowerCaseFirstOne() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testToUpperCaseFirstOne() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testReplaceSpecialLikeCharacter() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testReplaceBlank() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testAddDimmer() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testFormatFentoYuan() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testConcat() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testDeleteSpace() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testRangeWeight() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testLpad() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testReplaceSpecialCharacter() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testEscapeSpecialCharacter() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testTrim() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testTrimQuote() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testReplaceAll() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testGetUUID() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testGetListId() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testFormatterString() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testStringToArray() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testDecodeUnicode() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testEncodeUnicode() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testAddZeroFront() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testToStringArray() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testReplace() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testGetContentBySep() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testGetValByKey() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testSetValByKey() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testDelStrKey() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testFirstChar() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testLastChar() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testArrayToString() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testPreComplement() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testSufComplement() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testToBytesRaw() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testToBytesHex() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testEncodeInfo() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testEncodeForInfoDecodeType() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testDecodeInfo() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testEncodeStr() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testGetNullString() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testGetIVRNullString() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testDecodeForStrDimNum() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testCutDouble2point() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testClean() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testIntegerToZhCn() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testDecodeForInfoDecodeType() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testFormatDate() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testFilterOuterQuote() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testRmHtmlRaw() throws Exception {
String res = StringFormatUtil.rmHtml("<aaaa>a<isdfkjsdj>b<sdkjfjskld>cdef</sdkjlfjklds>");
assertEquals(res, "abcdef");
res = StringFormatUtil.rmHtml("a123<aaaa>a<isdfkjsdj>b<sdkjfjskld>cdef</sdkjlfjklds>cdd");
assertEquals(res, "a123abcdefcdd");
res = StringFormatUtil.rmHtml("</sdkjlfjklds>");
assertEquals(res, "");
res = StringFormatUtil.rmHtml("</sdkjl");
assertEquals(res, "</sdkjl");
res = StringFormatUtil.rmHtml("");
assertEquals(res, "");
res = StringFormatUtil.rmHtml(null);
assertEquals(res, null);
}
@Test
public void testRmHtmlForRawN() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Test
public void testRmHtmlInner() throws Exception {
long t = System.currentTimeMillis();
try {
Object res = null;
System.out.println("res: succ " + JSONFmtUtil.formatJsonConsole(JSONFastJsonUtil.objectToJson(res)));
} catch (Exception e) {
System.out.println("res: error " + e);
e.printStackTrace();
}
long cost = System.currentTimeMillis() - t;
System.out.println("cost:" + cost);
}
@Before
public void before() throws Exception {
}
@After
public void after() throws Exception {
}
@BeforeClass
public static void beforeClass() throws Exception {
String dir = System.getProperty("user.dir");
System.out.println("now " + dir);
String config = dir.substring(0, dir.lastIndexOf("\\")) + "\\config";
System.out.println("config " + config);
SystemUtil.addClasspath(config);
}
@Rule
public final ExpectedException expectedException = ExpectedException.none();
}
|
sudo zypper --non-interactive install -y cross-i386-binutils
sudo zypper --non-interactive install -y gcc-32bit
sudo zypper --non-interactive install -y libXtst-devel-32bit
sudo zypper --non-interactive install -y glu-devel-32bit
sudo zypper --non-interactive install -y libgthread-2_0-0-32bit
sudo zypper --non-interactive install -y freetype2-devel-32bit
sudo zypper --non-interactive install -y libxml2-devel-32bit
sudo zypper --non-interactive install -y libicu-devel-32bit
sudo zypper --non-interactive install -y libbz2-1-32bit
sudo zypper --non-interactive install -y libbz2-devel-32bit
sudo zypper --non-interactive install -y libXxf86vm1-32bit
sudo zypper --non-interactive install -y gtk2-devel-32bit
sudo zypper --non-interactive install -y gdk-pixbuf-devel-32bit
sudo zypper --non-interactive install -y cairo-devel-32bit
sudo zypper --non-interactive install -y pango-devel-32bit
sudo zypper --non-interactive install -y libX11-devel-32bit
sudo ( cd /usr/lib ; ln -s libgobject-2.0.so.0 libgobject-2.0.so )
sudo ( cd /usr/lib ; ln -s libglib-2.0.so.0 libglib-2.0.so )
sudo ( cd /usr/lib ; ln -s libgthread-2.0.so.0 libgthread-2.0.so )
sudo ( cd /usr/lib ; ln -s libgmodule-2.0.so.0 libgmodule-2.0.so )
sudo ( cd /usr/lib ; ln -s libatk-1.0.so.0 libatk-1.0.so )
|
def square_root_list(input_list):
output_list = []
for elem in input_list:
output_list.append(math.sqrt(elem))
return output_list
square_root_list(input_list)
# Output: [2.0, 3.0, 4.0] |
package com.xiaochen.mobilesafe.engine;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import com.xiaochen.mobilesafe.R;
import com.xiaochen.mobilesafe.db.domain.ProcessInfo;
import android.app.ActivityManager;
import android.app.ActivityManager.MemoryInfo;
import android.app.ActivityManager.RunningAppProcessInfo;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
public class ProcessInfoProvider {
/**获取运行的进程总数
* @param context 上下文环境
* @return 运行的进程总数
*/
public static int getProcessCount(Context context){
// 创建activity管理者对象
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
// 获取运行中的进程集合
List<RunningAppProcessInfo> runningAppProcesses = am.getRunningAppProcesses();
// 返回集合的大小 即运行的进程总数
return runningAppProcesses.size();
}
/**返回可用内存
* @param context 上下文环境
* @return 可用内存 单位为byte
*/
public static long getAvailable(Context context){
// 创建activity管理者对象
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
// 构建存储可用内存对象
MemoryInfo memoryInfo = new MemoryInfo();
// 给MemoryInfo对象赋值
am.getMemoryInfo(memoryInfo);
// 返回可用内存大小
return memoryInfo.availMem;
}
// @SuppressLint("NewApi")
/**获取总内存
* @param context 上下文环境
* @return 总内存大小 单位byte
*/
public static long getTotalMemory(Context context){
/*// 创建activity管理者对象
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
// 构建存储可用内存对象
MemoryInfo memoryInfo = new MemoryInfo();
// 给MemoryInfo对象赋值
am.getMemoryInfo(memoryInfo);
// 返回可用内存大小 需要api16才能使用
return memoryInfo.totalMem;*/
// 兼容低版本
FileReader fileReader = null;
BufferedReader bufferedReader = null;
try {
fileReader = new FileReader("proc/meminfo");
bufferedReader = new BufferedReader(fileReader);
String readLine = bufferedReader.readLine();
char[] charArray = readLine.toCharArray();
StringBuffer stringBuffer = new StringBuffer();
for (char c : charArray) {
if(c>='0' && c<='9'){
stringBuffer.append(c);
}
}
String string = stringBuffer.toString();
return Long.parseLong(string)*1024;
} catch (Exception e) {
e.printStackTrace();
}finally{
try {
if(fileReader!=null && bufferedReader!=null){
bufferedReader.close();
fileReader.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return 0;
}
/**获取正在运行的应用信息集合
* @param context 上下文环境
* @return 正在运行的应用信息集合
*/
public static List<ProcessInfo> getProcessInfo(Context context){
// 创建一个包含进程信息的集合
List<ProcessInfo> processInfoList = new ArrayList<ProcessInfo>();
// 获得activity管理者对象
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
// 获得包管理者对象
PackageManager pm = context.getPackageManager();
// 通过ActivityManager对象 拿到包含运行中进程信息的集合
List<RunningAppProcessInfo> runningAppProcesses = am.getRunningAppProcesses();
// 遍历集合
for (RunningAppProcessInfo info : runningAppProcesses) {
// 创建javabean对象
ProcessInfo processInfo = new ProcessInfo();
// 拿到包名 设置给javabean对象processInfo
String packageName = info.processName;
processInfo.setPackageName(packageName);
// 通过ActivityManager对象 拿到进程占用内存的信息processMemoryInfo (参数为一个int类型的数组 里面是进程号)
android.os.Debug.MemoryInfo[] processMemoryInfo = am.getProcessMemoryInfo(new int[]{info.pid});
// 拿到数组中的第一个元素 即当前进程的内存占用信息memoryInfo
android.os.Debug.MemoryInfo memoryInfo = processMemoryInfo[0];
// 得到当前进程占用的内存大小 单位byte
processInfo.setOccupyMemory(memoryInfo.getTotalPrivateDirty()*1024);
try {
// 通过PackageManager对象得到对应包名应用的信息applicationInfo
ApplicationInfo applicationInfo = pm.getApplicationInfo(packageName, 0);
// 获得应用的名称 并设置给javabean对象
processInfo.setName(applicationInfo.loadLabel(pm).toString());
// 获得应用的图标 并设置给javabean对象
processInfo.setIcon(applicationInfo.loadIcon(pm));
// 根据状态码 判断是否是系统应用
if((applicationInfo.flags & ApplicationInfo.FLAG_SYSTEM) == ApplicationInfo.FLAG_SYSTEM){
processInfo.setSystem(true);
}else{
processInfo.setSystem(false);
}
} catch (NameNotFoundException e) {
// 异常情况的处理 当找不到对应的Application时
processInfo.setName(packageName);
processInfo.setIcon(context.getResources().getDrawable(R.drawable.ic_launcher_null));
processInfo.setSystem(true);
e.printStackTrace();
}
processInfoList.add(processInfo);
}
return processInfoList;
}
/**获取正在运行的应用包名
* @param context 上下文环境
* @return 正在运行的应用包名
*/
public static List<String> getProcessPackageName(Context context){
// 创建一个包含进程信息的集合
List<String> processPackageNameList = new ArrayList<String>();
// 获得activity管理者对象
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
// 通过ActivityManager对象 拿到包含运行中进程信息的集合
List<RunningAppProcessInfo> runningAppProcesses = am.getRunningAppProcesses();
// 遍历集合
for (RunningAppProcessInfo info : runningAppProcesses) {
String packageName = info.processName;
processPackageNameList.add(packageName);
}
return processPackageNameList;
}
/**杀死后台进程
* @param context 上下文环境
* @param processInfo ProcessInfo进程信息的javabean对象
*/
public static void clearMem(Context context, ProcessInfo processInfo){
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
am.killBackgroundProcesses(processInfo.getPackageName());
}
/**杀死所有进程
* @param context 上下文环境
*/
public static void killAllProcess(Context context){
// 通过ActivityManager获得运行中的进程信息集合
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<RunningAppProcessInfo> runningAppProcesses = am.getRunningAppProcesses();
// 遍历集合 杀死后台进程
for (RunningAppProcessInfo info : runningAppProcesses) {
// 如果是本应用进程就跳出本次循环
if (info.processName.equals("com.xiaochen.mobilesafe")) {
continue;
}
am.killBackgroundProcesses(info.processName);
}
}
}
|
#!/bin/bash
#
# This is a wrapper script for nano v2.0.6, which passes its arguments through to the real nano,
# after first trying to figure out the best indentation settings for the file(s) being edited,
# and adding more options to nano's command line, if needed, to adjust its indentation behavior.
#
# It first tries to use EditorConfig settings, as reported by the editorconfig CLI. This works
# on both existing and new files, since finding a relevant .editorconfig file only depends on
# the edited file's path. If the editorconfig CLI isn't installed, this step is skipped; if it
# is installed but you don't want this script to use it, put this in your .bashrc and/or .zshrc:
# export NICER_NANO_NO_EDITORCONFIG=true
#
# If EditorConfig settings aren't found (or aren't used), the script next tries to read up to
# 20 KB from the file, and detect its indentation style. This, of course, only works on files
# that already exist.
#
# For all of this to work, neither /etc/nanorc nor ~/.nanorc can contain "set tabstospaces";
# that's because nano doesn't provide a command-line option which tells it to use tabs for
# indentation, so if you've told it to use spaces for indentation in one of its config files,
# this script has no way to change that setting. And _that_, in turn, means that if this script
# can't detect a file's indentation style, nano's default setting of using tabs for indentation
# will come into play. If you're more a spaces-for-indentation kind of person, put this into
# your .bashrc and/or .zshrc to tell this script to default to having nano indent with spaces
# when it can't figure out what else to do: export NICER_NANO_PREFER_SPACES=true
#
# You can always pass --tabstospaces (or -E) if you want to indent a given file with spaces,
# and this script will dutifully pass that setting along to nano, regardless of what EditorConfig
# thinks or the contents already in the file. You can also pass --tabs, which isn't an actual
# nano option, but which this script takes as the opposite of --tabstospaces, and which will
# make it _not_ tell nano to indent with spaces, regardless of EditorConfig's opinion, etc.
# If you pass both --tabstospaces and --tabs, --tabstospaces always wins, regardless of order.
#
# The --tabsize (or -T) option is also passed through, setting the tab display width when
# indenting with tabs, or the number of spaces to use when indenting with spaces. If you don't
# pass it, either the relevant EditorConfig setting or the existing indentation width in the file
# will be used, or -- if neither of those are applicable/available -- the `tabsize` setting from
# nano's config files, or its compiled-in default of 8.
#
# Copyright (c) 2020 Jason Jackson. MIT License.
#
# Parse the command line like nano v2.0.6 will
# (Just enough to understand which file(s) will be edited, and a few relevant options)
declare -a files
maybe_file="" # Arguments that start with "+" might or might not be file names
options_ended=false # Options can be interspersed with file names, but "--" ends all options
other_optarg="" # An option argument other than for -s/--speller or -T/--tabsize
speller=""
tabsize=""
tabstospaces=""
will_edit=true
for arg; do
# Remove options which aren't real nano options from the command line we'll pass to nano
# https://unix.stackexchange.com/questions/258512/how-to-remove-a-positional-parameter-from
shift
[[ $arg == "--tabs" && $options_ended == false ]] || set -- "$@" "$arg"
# Process the argument
if [[ $tabsize == "pending" ]]; then
tabsize="$arg"
continue
elif [[ $speller == "pending" ]]; then
speller="$arg"
continue
elif [[ $other_optarg == "pending" ]]; then
other_optarg="" # We don't care what the option's argument value actually is,
continue # we just need to avoid handling it as an option or file name
fi
if [[ $options_ended == false && $arg == -* && $arg != - ]]; then
if [[ $arg == "--" ]]; then
# This can come between a '+n,n' argument and a file name, with no effect
options_ended=true
elif [[ "--help" == "$arg"* || "--version" == "$arg"* ]]; then
will_edit=false # Nano will error on --h or --v; --he or longer and --ve or longer work
elif [[ ("--speller" == "$arg"* || "--speller" == "${arg//=*/}"*) && $arg == "--sp"* ]]; then
# If an equal sign is present, the rest of the arg is spell checker, else the next arg is
[[ $arg == *=* ]] && speller="${arg//*=/}" || speller="pending"
elif [[ ("--tabsize" == "$arg"* || "--tabsize" == "${arg//=*/}"*) && $arg == "--tabsi"* ]]; then
# If an equal sign is present, the rest of the arg is tab size, else the next arg is
[[ $arg == *=* ]] && tabsize="${arg//*=/}" || tabsize="pending"
elif [[ "--tabstospaces" == "$arg"* && "$arg" == "--tabst"* ]]; then
tabstospaces=true
elif [[ $arg == "--tabs" ]]; then
# This isn't a real nano option; it means the inverse of '--tabstospaces',
# i.e. requests to use tabs for indentation regardless of EditorConfig / file content
[[ $tabstospaces == true ]] || tabstospaces=false
elif [[ $arg == --* ]]; then
continue # A long option we don't care about
else # Begins with a single dash, must be one or more short option(s)
last_index=$(( ${#arg} - 1 ))
for (( i=1; i <= last_index; i++ )); do
ch=${arg:$i:1}
if [[ $ch == "?" || $ch == "h" || $ch == "V" ]];
then will_edit=false
elif [[ $ch == "E" ]]; then
tabstospaces=true
elif [[ $ch == "T" ]]; then
# If characters immediately follow a single-char option,
# the rest of $arg is its optarg, otherwise the next argument is
(( i < last_index )) && tabsize="${arg:$i+1}" || tabsize="pending"
elif [[ $ch == "s" ]]; then
(( i < last_index )) && speller="${arg:$i+1}" || speller="pending"
elif [[ "CQYor" == *$ch* ]]; then
(( i < last_index )) || other_optarg="pending"
continue # The rest of this argument, if any, is optarg
fi
done
fi
continue
fi
if [[ $arg == +* ]]; then
# Nano appears to handle all arguments which begin with "+" the same way,
# including plain "+", and things that are invalid as row/column notation like "+xyz"
if [[ -n $maybe_file ]]; then
maybe_file=""
files+=("$arg")
else
maybe_file="$arg"
fi
else
# This is either an ordinary argument,
# or one that looks like an option but "--" was previously passed
maybe_file=""
files+=("$arg") # Could be empty -- that's fine, it's a valid file name to nano
fi
done
if [[ -n $maybe_file ]]; then
files+=("$maybe_file")
fi
if [[ "${tabsize}${speller}${other_optarg}" == *"pending"* ]]; then
will_edit=false
fi
# Detect indentation, if needed
if [[ $will_edit == true && ${#files[@]} != 0 && (-z $tabstospaces || -z $tabsize) ]]; then
self="$0"
while [[ -L $self ]]; do
self="$(readlink "$self")"
done
if [[ $NICER_NANO_NO_EDITORCONFIG != true ]] && type -t editorconfig > /dev/null; then
type -t use-editorconfig > /dev/null || source "$(dirname -- "$self")/functions/use-editorconfig.sh"
use_editorconfig=true
fi
indent_conflict=false
indent_style=""
indent_size=""
for file in "${files[@]}"; do
[[ -n $file ]] || continue
_indent_style="" _indent_size="" # Assigned in use-editorconfig/detect-indent
[[ $use_editorconfig == true ]] && use-editorconfig "$file"
if [[ (-z $tabstospaces && -z $_indent_style) || (-z $tabsize && -z $_indent_size) ]]; then
type -t detect-indent > /dev/null || source "$(dirname -- "$self")/functions/detect-indent.sh"
detect-indent "$file"
fi
# if tabstospaces isn't set, and we detected an indent style:
# if we don't have an indent style yet, adopt the one we detected
# else if the detected style doesn't match the previous one, conflict & break
if [[ -z $tabstospaces && -n $_indent_style ]]; then
if [[ -z $indent_style ]]; then
indent_style="$_indent_style"
elif [[ "$indent_style" != "$_indent_style" ]]; then
indent_conflict=true
break
fi
fi
# if tabsize isn't set, and we detected an indent size:
# if we don't have an indent size yet, adopt the one we detected
# else if the detected size doesn't match the previous one:
# if indent style is space, conflict & break
if [[ -z $tabsize && -n $_indent_size ]]; then
if [[ -z $indent_size ]]; then
indent_size="$_indent_size"
elif [[ "$indent_size" != "$_indent_size" &&
($tabstospaces == true || "$indent_style" == "space") ]]
then
indent_conflict=true
break
fi
fi
done
fi
# Speak up if we've detected conflicting indentation styles
if [[ $indent_conflict == true ]]; then
if [[ $indent_style == "tab" ]]; then
blanket_indent="tabs"
elif [[ -n $tabsize || -n $indent_size ]]; then
blanket_indent="${tabsize:-$indent_size} spaces"
else
blanket_indent="spaces"
fi
if [[ -t 0 && -t 1 ]]; then
terminal=true # We'll prompt if stdin/stdout are connected to a terminal
bright='\033[1m'
normal='\033[0m'
fi
echo -e "${bright}Conflicting indentation settings were detected across different files.${normal}"
echo "Nano uses the same indentation settings for all files loaded in a session,"
echo "so it will indent newly-added lines with $blanket_indent in all of them."
if [[ $terminal == true ]]; then
echo -en "${bright}Continue [y|n]? ${normal}"
read -rn 1
echo
[[ $REPLY == "Y" || $REPLY == "y" ]] || exit 0
fi
fi
if [[ $will_edit == true ]]; then
# Enable spell-checking
if [[ -n $SPELL || -n $speller ]] || grep -Eq "^\s*set speller" ~/.nanorc; then
# A spell-checker has already been configured/requested
true
elif type -p aspell > /dev/null; then
export SPELL='aspell check'
elif type -p hunspell > /dev/null; then
export SPELL='hunspell'
elif type -p ispell > /dev/null; then
export SPELL='ispell -M'
fi
# If this script is "nano", unset tabstospaces if needed
# (but take it under advisement that this user prefers spaces for indentation)
if [[ "$(basename -- "$0")" == "nano" ]]; then
if grep -Eq "^\s*set tabstospaces" ~/.nanorc; then
NICER_NANO_PREFER_SPACES=true
if ! grep -Fiq "Added by nicer-nano" ~/.nanorc; then
echo -e "\n# Added by nicer-nano" >> ~/.nanorc
echo "unset tabstospaces" >> ~/.nanorc
fi
fi
fi
fi
# Run nano
if [[ -z $tabstospaces &&
($indent_style == "space" || ($indent_style == "" && $NICER_NANO_PREFER_SPACES == true)) ]]
then
nano_args="--tabstospaces"
fi
if [[ -z $tabsize && -n $indent_size ]]; then
[[ -z $nano_args ]] || nano_args+=" "
nano_args+="--tabsize=$indent_size"
fi
if [[ $NICER_NANO_TESTING_CMD_LINE == true ]]; then
echo "file names: ${files[*]:--}"
echo "will edit: $will_edit"
echo "\$tabstospaces: ${tabstospaces:--}"
echo "\$tabsize: ${tabsize:--}"
cmd="echo"
elif [[ $NICER_NANO_TESTING_CONFLICTS == true ]]; then
cmd="echo"
else
cmd="exec"
fi
$cmd /usr/bin/nano $nano_args "$@"
|
<gh_stars>0
import { OrderSpec, EventApi } from '@fullcalendar/common';
import { TableSeg } from './TableSeg';
import { TableCellModel } from './TableCell';
export declare function computeFgSegPlacement(// for one row. TODO: print mode?
cellModels: TableCellModel[], segs: TableSeg[], dayMaxEvents: boolean | number, dayMaxEventRows: boolean | number, eventHeights: {
[instanceIdAndFirstCol: string]: number;
}, maxContentHeight: number | null, colCnt: number, eventOrderSpecs: OrderSpec<EventApi>[]): {
segsByFirstCol: TableSeg[][];
segsByEachCol: TableSeg[][];
segIsHidden: {
[instanceId: string]: boolean;
};
segTops: {
[instanceId: string]: number;
};
segMarginTops: {
[instanceId: string]: number;
};
moreCnts: number[];
moreTops: {
[col: string]: number;
};
paddingBottoms: {
[col: string]: number;
};
};
//# sourceMappingURL=event-placement.d.ts.map |
<gh_stars>0
const { Command } = require('klasa');
module.exports = class extends Command {
constructor(...args) {
super(...args, {
guarded: true,
description: 'Returns with your ping',
});
}
async run(message) {
const msg = await message.send('Pinging');
return message.send(`**Roundrip took: \`${(msg.editedTimestamp || msg.createdTimestamp) - (message.editedTimestamp || message.createdTimestamp)} ms\` | HeartBeat: \`${Math.round(this.client.ws.ping)} ms\`**`);
}
};
|
package string_handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
*
* @author minchoba
* 백준 1212번: 8진수 2진수
*
* @see https://www.acmicpc.net/problem/1212/
*
*/
public class Boj1212 {
private static final String[] bin = {"000", "001", "010", "011", "100", "101", "110", "111"};
public static void main(String[] args) throws Exception{
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
String num = br.readLine();
StringBuilder sb = new StringBuilder();
boolean isPre = true; // 맨 첫자리 구분
for(char n : num.toCharArray()) {
if(isPre) {
sb.append(Integer.parseInt(bin[n - 48])); // 가장 앞의 2진수는 정수형으로 버퍼에 담아줌
isPre = false;
continue;
}
sb.append(bin[n - 48]); // 나머지는 문자열로 담아줌
}
System.out.println(sb.toString()); // 결과값 한번에 출력
}
}
|
import React, {useState} from 'react';
import {Form, Input} from 'antd';
import axios from 'axios';
import Weather from './Weather';
const SearchWeather = () => {
const [data, setData] = useState(null);
const [location, setLocation] = useState('');
const handleSubmit = (e) => {
e.preventDefault();
axios.get('http://api.openweathermap.org/data/2.5/weather', {
params: {
q: location,
appid: insert_your_api_key_here
}
}).then((response) => {
const data = response.data;
setData(data);
});
}
return (
<div>
<Form onSubmit={handleSubmit}>
<Input
value={location}
onChange={e => setLocation(e.target.value)}
placeholder="Enter your location"
/>
<Form.Item>
<Button type="primary" htmlType="submit" >
Submit
</Button>
</Form.Item>
</Form>
{data && <Weather data={data} />}
</div>
);
};
export default SearchWeather; |
import java.util.Map;
public class MappingSystem {
public String applyMapping(String input, Map<String, String> mapper) {
StringBuilder output = new StringBuilder();
int i = 0;
while (i < input.length()) {
boolean found = false;
for (String key : mapper.keySet()) {
if (input.startsWith(key, i)) {
output.append(mapper.get(key));
i += key.length();
found = true;
break;
}
}
if (!found) {
output.append(input.charAt(i));
i++;
}
}
return output.toString();
}
} |
<reponame>skoussa/hashbrown-cms<gh_stars>0
'use strict';
/**
* The base class for all controllers
*
* @memberof HashBrown.Server.Controller
*/
class ControllerBase {
/**
* Initialises this controller
*/
static init(app) {
throw new Error('The "init" method must be overridden');
}
/**
* Authenticates a request
*
* @param {String} token
*
* @returns {HashBrown.Entity.Resource.User} User object
*/
static async authenticate(token) {
// No token was provided
if(!token) {
throw new Error('You need to be logged in to do that');
}
let user = await HashBrown.Service.UserService.findToken(token);
// No user was found
if(!user) {
throw new Error('You need to be logged in to do that');
}
return user;
}
/**
* Authorises a request
*
* @param {HashBrown.Entity.Resource.User} user
* @param {String} project
* @param {String} scope
* @param {Boolean} needsAdmin
*/
static authorize(user, project = '', scope = '', needsAdmin = false) {
checkParam(user, 'user', HashBrown.Entity.Resource.User);
checkParam(project, 'project', String);
checkParam(scope, 'scope', String);
checkParam(needsAdmin, 'needsAdmin', Boolean);
// No user was provided
if(!user) {
throw new Error('You need to be logged in to do that');
}
// Admin is required, and user isn't admin
if(needsAdmin && !user.isAdmin) {
throw new Error('You need to be admin to do that');
}
// A project is defined, and the user doesn't have it
if(project && !user.hasScope(project)) {
throw new Error('You do not have permission to use this project');
}
// A scope is defined, and the user doesn't have it
if(scope && !user.hasScope(project, scope)) {
throw new Error(`You do not have permission to use the "${scope}" scope in this project`);
}
}
/**
* Sets project variables
*
* @param {Object} req
*/
static async setProjectVariables(req) {
let values = req.originalUrl.split('/');
let project = null;
let environment = null;
if(values) {
if(!values[0]) { values.shift(); }
if(values[0] === 'api') { values.shift(); }
if(values[0] === 'media') { values.shift(); }
req.project = values[0];
if(values[1] !== 'settings') {
req.environment = values[1];
}
}
// Check if project and environment exist
let projectExists = await HashBrown.Service.ProjectService.projectExists(req.project);
if(!projectExists) {
throw new Error('Project "' + req.project + '" could not be found');
}
if(req.environment) {
let environmentExists = await HashBrown.Service.ProjectService.environmentExists(req.project, req.environment);
if(!environmentExists) {
throw new Error(`Environment "${req.environment}" was not found for project "${req.project}" in ${this.name} using path "${req.route.path}"`);
}
}
}
}
module.exports = ControllerBase;
|
#!/bin/bash
docker build -f Dockerfile-core --build-arg=PROXY=$PROXY -t docker.io/jesseolsen/core-hubot:latest .
|
#!/bis/sh
#Alt+Enter
gsettings list-recursively | grep -iE "'.?alt.?enter'" | awk '//{print $1 " " $2 }' | xargs -I{} gsettings set {} '"[<Super>enter]"'
#Ctrl+Space
gsettings list-recursively | grep -iE "'.?(ctrl|control).?space'" | awk '//{print $1 " " $2 }' | xargs -I{} gsettings set {} '"[]"'
#Ctrl+Shift+Space
gsettings list-recursively | grep -iE "'.?(ctrl|control).?space'" | awk '//{print $1 " " $2 }' | xargs -I{} gsettings set {} '"[]"'
|
<reponame>Inego/smallrye-mutiny
package io.smallrye.mutiny.converters.multi;
import java.util.concurrent.CompletionStage;
import io.smallrye.mutiny.Multi;
import io.smallrye.mutiny.converters.MultiConverter;
public class FromCompletionStage<T> implements MultiConverter<CompletionStage<T>, T> {
public static final FromCompletionStage INSTANCE = new FromCompletionStage();
private FromCompletionStage() {
// Avoid direct instantiation
}
@Override
public Multi<T> from(CompletionStage<T> instance) {
return Multi.createFrom().completionStage(instance);
}
}
|
<reponame>Teles1/LuniaAsio
#include "QuestManager.h"
#include <StageServer/Protocol/FromServer.h>
#include <StageServer/User/User.h>
#include <StageServer/User/UserManager.h>
#include <StageServer/User/IUserRoom.h>
namespace Lunia {
namespace XRated {
namespace StageServer {
class QuestException : public Exception
{
public:
virtual bool IsCritical() { return false; }
QuestException(const wchar_t* message) : Exception(message) {}
QuestException(const String& message) : Exception(message) {}
};
class QuestCriticalException : public QuestException
{
public:
bool IsCritical() { return true; }
QuestCriticalException(const wchar_t* message) : QuestException(message) {}
QuestCriticalException(const String& message) : QuestException(message) {}
};
void WriteQuestInfo(const QuestManager::Quest& quest)
{
LoggerInstance().Error(L"Quest Hash {} , Current State {}", quest.Id, quest.CurrentState);
LoggerInstance().Error(L"ExpiredDate : {}", quest.ExpiredDate.ToString().c_str());
size_t max = quest.Params.size();
for (size_t i = 0; i < max; ++i)
{
LoggerInstance().Error("Param .{} : {}", i, quest.Params[i]);
}
}
bool IsDuplicateQuestList(const std::vector<QuestManager::Quest>& quests)
{
bool duplicated = false;
std::vector<uint32> duplicatedHashs;
std::vector<QuestManager::Quest>::const_iterator iter = quests.begin();
std::vector<QuestManager::Quest>::const_iterator end = quests.end();
while (iter != end) {
uint32 hash = iter->Id;
if (duplicatedHashs.end() != std::find(duplicatedHashs.begin(), duplicatedHashs.end(), hash)) {
++iter;
continue;
}
std::vector<QuestManager::Quest>::const_iterator iterTemp = iter;
int count = 0;
++iterTemp;
while (iterTemp != quests.end()) {
iterTemp = std::find_if(iterTemp, quests.end(), QuestManager::Quest::FindByHash(hash));
if (iterTemp != quests.end()) {
WriteQuestInfo(*iterTemp);
++iterTemp;
++count;
}
}
if (count > 0) {
//duplicated
WriteQuestInfo(*iter);
duplicated = true;
duplicatedHashs.push_back(hash);
LoggerInstance().Error("Duplicate Quest Hash : {} , Count : {}", hash, (count + 1));
}
++iter;
}
return duplicated;
}
bool IsDuplicateQuestList(const std::vector<QuestManager::Quest>& quests, uint32 hash)
{
bool duplicated = false;
std::vector<QuestManager::Quest>::const_iterator iter = std::find_if(quests.begin(), quests.end(), QuestManager::Quest::FindByHash(hash));
if (iter == quests.end()) {
return false;
}
std::vector<QuestManager::Quest>::const_iterator iterTemp = iter;
++iterTemp;
int count = 0;
while (iterTemp != quests.end()) {
iterTemp = std::find_if(iterTemp, quests.end(), QuestManager::Quest::FindByHash(hash));
if (iterTemp != quests.end()) {
WriteQuestInfo(*iterTemp);
++iterTemp;
++count;
}
}
if (count > 0) {
WriteQuestInfo(*iter);
duplicated = true;
LoggerInstance().Error("Duplicate Quest Hash : {} , Count : {}", hash, (count + 1));
}
return duplicated;
}
QuestManager::Quest::Quest(uint32 questHash, const Database::Info::QuestInfo* info, const DateTime::Date& expiredDate, bool parameterChanged)
: XRated::Quest(questHash, expiredDate)
, Info(info)
, ParameterChanged(parameterChanged)
{
if (!info)
throw QuestCriticalException(fmt::format(L"invalid or unknown quest information : {}", questHash).c_str());
}
QuestManager::Quest::Quest(uint32 questHash, const Database::Info::QuestInfo* info, const DateTime::Date& expiredDate)
: XRated::Quest(questHash, expiredDate)
, Info(info)
, ParameterChanged(false)
{
if (!info)
throw QuestCriticalException(fmt::format(L"invalid or unknown quest information : {}", questHash).c_str());
}
QuestManager::Quest::Quest(uint32 questHash)
: XRated::Quest(questHash, XRated::Quest::NotUsedDate)
, Info(Database::DatabaseInstance().InfoCollections.Quests.Retrieve(questHash))
, ParameterChanged(false)
{
if (Info == NULL)
throw QuestCriticalException(fmt::format(L"invalid or unknown quest information : {}", questHash).c_str());
}
uint8 QuestManager::GetQuestState(const UserSharedPtr user, uint32 questHash) const
{
AutoLock lock(cs);
{
std::vector<Quest>::const_iterator i = std::find_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(questHash));
if (i != workingQuests.end())
{
/* because Quest::CurrentState is used as current objective state internally, it should be converted as real current state */
if (i->CurrentState == XRated::Quest::State::Succeeded && GetValidResult(user, i->Info->CompleteCondition, false) != ValidQuestConditionResult::Success)
return XRated::Quest::State::Working;
return i->CurrentState;
}
}
{
std::map<uint32, uint32>::const_iterator i = completedQuests.find(questHash);
if ((i != completedQuests.end()) && (i->second > 0)) return XRated::Quest::State::Completed;
}
return XRated::Quest::State::NotAccepted;
}
uint8 QuestManager::SetQuestState(UserSharedPtr user, uint32 questHash, uint8 newState)
{
AutoLock lock(cs);
uint8 oldState(XRated::Quest::State::NotAccepted);
std::vector<Quest>::iterator i = std::find_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(questHash));
if (i != workingQuests.end())
{
oldState = i->CurrentState;
if (oldState == newState) // nothing has been changed
return oldState;
switch (newState) // special case
{
case XRated::Quest::State::Offered: // ignorible
return oldState;
case XRated::Quest::State::NotAccepted: // drop the quest
if (DropQuest(user, questHash) == true) {
//Logger().Write(IAdmLogger::ImportantLogger, "questdrop-setqueststate", user->GetName(), Http::Logger::Parameter() << questHash);
}
return oldState;
}
i->CurrentState = newState;
}
else
{
switch (newState)
{
case XRated::Quest::State::Succeeded:
case XRated::Quest::State::Failed:
case XRated::Quest::State::Completed:
case XRated::Quest::State::NotAccepted: // no-working quest cannot be dropped
LoggerInstance().Error("quest state can be changed in working quest list");
return XRated::Quest::State::NotAccepted;
}
if (newState != XRated::Quest::State::Offered) // 'Offered' state doesn't affect server-side, is just an event to client.
{
workingQuests.push_back(Quest(questHash));
workingQuests.back().CurrentState = newState;
//Logger().Write(IAdmLogger::ImportantLogger, "questchange", user->GetName(), Http::Logger::Parameter()
// << questHash << newState );
}
}
/* notice the new state to user */
Protocol::FromServer::Quest::ChangeState response;
response.QuestHash = questHash;
response.NewState = newState;
if (newState == XRated::Quest::State::Completed)
{
response.CompletedCount = completedQuests[questHash];
}
else
{
response.CompletedCount = 0;
}
user->Send(response);
ChangeActiveQuestState(questHash, newState);
return oldState;
}
uint32 QuestManager::GetQuestParameter(const UserSharedPtr UserSharedPtr, uint32 questHash, uint8 paramIndex) const
{
AutoLock lock(cs);
std::vector<Quest>::const_iterator i = std::find_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(questHash));
if (i != workingQuests.end() && i->Params.size() > paramIndex)
{
return i->Params[paramIndex];
}
LoggerInstance().Error("invalid quest({}) or parameter({}) requested", questHash, paramIndex);
return 0; // unalbe to find the result
}
uint32 QuestManager::SetQuestParameter(UserSharedPtr user, uint32 questHash, uint8 paramIndex, uint32 newValue)
{
AutoLock lock(cs);
uint32 oldParam(0);
std::vector<Quest>::iterator i = std::find_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(questHash));
if (i != workingQuests.end() && i->Params.size() > paramIndex)
{
oldParam = i->Params[paramIndex];
i->Params[paramIndex] = newValue;
/* notice the new parameter to user */
if (oldParam != newValue)
{
Protocol::FromServer::Quest::Update response;
response.Quest = *i;
user->Send(response);
i->ParameterChanged = true;
}
/* update quest state by parameter change */
switch (i->CurrentState)
{
case XRated::Quest::State::CompletedAndDropWaiting:
case XRated::Quest::State::FailedAndDropWaiting:
return oldParam;
break;
default:
{
uint8 oldState = i->CurrentState;
i->CurrentState = i->Info->UpdateParameter(L""/*should be "script"?*/, 0, 0, false, i->Params);
if (oldState != i->CurrentState)
{
Protocol::FromServer::Quest::ChangeState response;
response.QuestHash = questHash;
response.NewState = i->CurrentState;
if (i->CurrentState == XRated::Quest::State::Completed)
{
response.CompletedCount = completedQuests[questHash];
}
else
{
response.CompletedCount = 0;
}
user->Send(response);
ChangeActiveQuestState(questHash, i->CurrentState);
i->ParameterChanged = true;
}
return oldParam;
}
break;
}
}
LoggerInstance().Error("invalid quest({}) or parameter({}) requested", questHash, paramIndex);
return oldParam;
}
void QuestManager::Dispatch(const UserSharedPtr user, Protocol::ToServer::Quest::Accept& packet)
{
AcceptQuest(user, packet.QuestHash, packet.SelectRewardIndex, true);
user->AcceptedQuest(packet.QuestHash);
}
void QuestManager::Dispatch(const UserSharedPtr user, Protocol::ToServer::Quest::SharedAnswer& packet)
{
AcceptQuest(user, packet.QuestHash, packet.SelectRewardIndex, false);
user->AcceptedQuest(packet.QuestHash);
}
void QuestManager::AcceptQuest(UserSharedPtr user, uint32 questHash, uint32 SelectRewardIndex, bool checkLocation)
{
AutoLock userLock(user->GetSyncObject());
const Database::Info::QuestInfo* info(Database::DatabaseInstance().InfoCollections.Quests.Retrieve(questHash));
try {
do // dummy do to validate quest accept request - make sure 'break' for CRITICAL invalid tag like something client can validate first
{
AutoLock lock(cs);
DateTime now = DateTime::Now();
if (dbRequested.find(user) != dbRequested.end()) return; // already doing something( accept or complete
if (!user->GetPlayer()) return; // invalid character state
if (!info)
{
LoggerInstance().Error("invalid quest({}) accept info==NULL", questHash);
// unable to find the quest information on server
throw QuestCriticalException(fmt::format(L"quest({}) info==NULL", questHash).c_str());
}
if (info->IsEventQuest() && !UserManagerInstance().IsEventQuestPeriod(info->GetEventType()))
{
LoggerInstance().Error("not event period quest({}) accept", questHash);
throw QuestException(fmt::format(L"not event period quest"));
}
if (checkLocation == true) {
if (info->AcceptLocation.StageGroupHash && info->AcceptLocation != user->GetCurrentStage() && !IsItemActivityQuest(info)) // check accept location
{
LoggerInstance().Error("invalid quest({}) accept location({},{}) - user location({},{})", questHash
, info->AcceptLocation.StageGroupHash, info->AcceptLocation.Level
, user->GetCurrentStage().StageGroupHash, user->GetCurrentStage().Level);
return; // could be invalid quest data set
}
/* TODO : client side npc validation - currently, only the condition works in a stage */
if (info->AcceptLocation.StageGroupHash && info->AcceptSourceHash && !(user->GetRoom()->GetCurrentStageGroupInfo()->GameType & XRated::Constants::PeacefulGameTypeMask) && !IsItemActivityQuest(info))
{
float distance = user->GetRoom()->GetObjectDistance(user, info->AcceptSourceHash);
if (distance < 0/*not exist*/ || distance>240)
{
/*std::string message = fmt::format("invalid quest({}) target({}) accept distance({})", questHash, info->AcceptSourceHash, distance);
Logger().Write(IAdmLogger::NormalLogger, "critical", user->GetName(), message.c_str();
ALLM_ERROR((message.c_str());*/
return; // just cancel the command
}
}
}
std::vector<Quest>::iterator iter = std::find_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(questHash));
if (iter != workingQuests.end()) {
if (info->IsHaveResetDate() == false) {
LoggerInstance().Error("invalid quest({}) accept workingQuest : have working quest list(this quest is not ReSetQuest)", questHash);
if (IsDuplicateQuestList(workingQuests) == true) {
LoggerInstance().Error("this user have Duplicated WorkingQuestList : {} [IsHaveResetDate Wrong]", user->GetSerial());
}
WriteQuestInfo(*iter);
user->CriticalError(fmt::format("invalid quest({})-accept requested, already have quest", questHash).c_str(), false);
return;
//break; // a quest cannot work concurrently.
}
if (((XRated::Quest::State::CompletedAndDropWaiting == iter->CurrentState) || (XRated::Quest::State::FailedAndDropWaiting == iter->CurrentState)) == false) {
LoggerInstance().Error("invalid quest({}) accept workingQuest: have working quest list, current state is not accept ready : state[{}](this quest is ReSetQuest)", questHash, iter->CurrentState);
if (IsDuplicateQuestList(workingQuests) == true) {
LoggerInstance().Error("this user have Duplicated WorkingQuestList :{} [ReSetQuestState Wrong]", user->GetSerial());
}
WriteQuestInfo(*iter);
user->CriticalError(fmt::format("invalid quest({})-accept requested, already have quest", questHash).c_str(), false);
return;
}
if ((now.GetDate() <= iter->ExpiredDate)) {
now.Add(DateTime::Unit::Second, 300);
if ((now.GetDate() <= iter->ExpiredDate)) {
LoggerInstance().Error("invalid quest({}) accept workingQuest: have working quest list, this quest is not reset ready:(this quest is ReSetQuest)", questHash);
if (IsDuplicateQuestList(workingQuests) == true) {
LoggerInstance().Error("this user have Duplicated WorkingQuestList : {} [ReSetQuestExpired Wrong]", user->GetSerial());
}
WriteQuestInfo(*iter);
// a quest cannot work concurrently.
throw QuestCriticalException(L"have working quest list, this quest is not reset ready");
}
LoggerInstance().Warn(L"Quest request accept to fast(reset quest) : {}, {}", user->GetSerial(), questHash);
}
workingQuests.erase(iter);
}
//if ( std::find_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(questHash))!=workingQuests.end() )
//{
// LoggerInstance().Error("invalid quest({}) accept workingQuest", questHash);
// break; // a quest cannot work concurrently.
//}
if (GetValidQuestCount() >= XRated::Quest::MaxCount)
{
LoggerInstance().Error("invalid quest({}) accept out of range", questHash);
// out of range
throw QuestCriticalException(fmt::format(L"accept out of range"));
}
{
std::map<uint32, uint32>::const_iterator iter = completedQuests.find(questHash);
if ((info->IsRepeatQuest() == false) && (iter != completedQuests.end()) && (iter->second > 0))
{
LoggerInstance().Error("invalid quest({}) accept already complete", questHash);
// an unrepeatable quest is requested again.
throw QuestCriticalException(fmt::format(L"accept already complete"));
}
}
if (info->IsRepeatQuest() == true && IsRepeatableQuest(*info, questHash) == false)
{
LoggerInstance().Error("invalid quest({}) not repeatable quest - completeCount({}) repeatableCount({})", questHash
, completedQuests.find(questHash)->second, info->MaximumCompleteableCount);
// over complete count quest is requested again.
throw QuestCriticalException(fmt::format(L"not repeatable quest - completeCount({}) repeatableCount({})", completedQuests.find(questHash)->second, info->MaximumCompleteableCount));
}
if (info->AcceptCondition.IsGuildQuest() == true) {
if ((user->IsPartOfGuild() == false)) {
if (user->GetRequestedInitGuildInfo() == true) {
LoggerInstance().Error("Not Recved Init GuildInfo From DB");
}
user->CriticalError(fmt::format("AcceptCondition::Quest::IsValid Failed, not joined guild user - character({})", StringUtil::To<std::string>(user->GetName()).c_str()).c_str(), false);
return;
}
if (user->GetGuildInfo().GuildLevel < info->AcceptCondition.GuildLevel) {
user->CriticalError(fmt::format("AcceptCondition::Quest::IsValid Failed, invalid guildLevel condition - character({})", StringUtil::To<std::string>(user->GetName()).c_str()).c_str(), false);
return;
}
}
ValidQuestConditionResult result = GetValidResult(user, info->AcceptCondition, true);
if (result != ValidQuestConditionResult::Success) {
LoggerInstance().Error("invalid quest({}) accept quest condition", questHash);
const wchar_t* errorMsg = GetValidQuestConditionErrorMsg(result);
if (errorMsg != NULL) {
throw QuestCriticalException(fmt::format(L"Vaild : {}", errorMsg));
}
throw QuestCriticalException(fmt::format(L"Vaild : Unknown"));
}
//edit by kds218
if (ValidateInventory(user, info->AcceptReward, SelectRewardIndex, questHash, 1) == false) return; // invalid inventory states
DateTime::Date expiredDate(info->GetExpiredDate(now).GetDate());
if (GiveReward(user, info->AcceptReward, SelectRewardIndex, 1) == false)
{
/* just return, because this is not that serious. client could not check the item slot validation */
return;
}
workingQuests.push_back(Quest(questHash, info, expiredDate, true));
/* notice the success of quest accept */
Protocol::FromServer::Quest::Accept response;
response.QuestHash = questHash;
response.NewState = XRated::Quest::State::Working;
response.ExpiredDate = expiredDate;
user->Send(response);
ChangeActiveQuestState(questHash, XRated::Quest::State::Working);
/*Logger().Write(IAdmLogger::ImportantLogger, "questaccept", user->GetName(), Http::Logger::Parameter()
<< questHash << user->GetLevel();*/
return;
} while (false);
}
catch (QuestException& e) {
user->CriticalError(fmt::format("invalid accept quest({}) : {} {}", questHash, e.what()).c_str(), e.IsCritical());
return;
}
//default...
user->CriticalError(fmt::format("invalid quest({})-accept requested", questHash).c_str(), true);
}
void QuestManager::Dispatch(const UserSharedPtr user, Protocol::ToServer::Quest::Complete& packet)
{
AutoLock userLock(user->GetSyncObject());
const Database::Info::QuestInfo* info(Database::DatabaseInstance().InfoCollections.Quests.Retrieve(packet.QuestHash));
try {
do // dummy do to validate quest accept request - make sure 'break' for CRITICAL invalid tag like something client can validate first
{
{
AutoLock lock(cs);
if (dbRequested.find(user) != dbRequested.end()) return; // already doing something( accept or complete
if (user->GetPlayer() == NULL) return; // invalid character state
if (info == NULL) {
LoggerInstance().Error("invalid quest({}) complete info==NULL", packet.QuestHash);
throw QuestCriticalException(fmt::format(L"complete info==NULL"));
// unable to find the quest information on server
}
if (info->IsEventQuest() && !UserManagerInstance().IsEventQuestPeriod(info->GetEventType()))
{
LoggerInstance().Error("not event period quest({}) complete", packet.QuestHash);
throw QuestException(fmt::format(L"not event period complete quest"));
}
std::vector<Quest>::iterator quest = std::find_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(packet.QuestHash));
if (quest == workingQuests.end()) {
LoggerInstance().Error("invalid quest({}) not accepted quest", packet.QuestHash);
user->CriticalError(fmt::format("invalid quest({})-complete requested, not exsist quest", packet.QuestHash).c_str(), false);
return;
}
else if ((info->IsHaveResetDate() == true)) {
if ((quest->CurrentState == XRated::Quest::State::CompletedAndDropWaiting) || (quest->CurrentState == XRated::Quest::State::FailedAndDropWaiting)) {
user->CriticalError(fmt::format("invalid quest({})-complete requested, complete or failed and drop waiting quest", packet.QuestHash).c_str(), false);
return;
}
}
if (IsDuplicateQuestList(workingQuests, packet.QuestHash) == true) {
LoggerInstance().Error("this user have Duplicated WorkingQuestList user : {} [CompleteQuestChecker]", user->GetSerial());
user->CriticalError(fmt::format("Dulpicated WorkingQuestList - Request Complete Quest").c_str(), true);
return;
}
//if ( !user->IsAdmitable( 6500 ) ) { //FloodCheck. Quest has a high factor
if (!user->IsAdmitable(500)) { //FloodCheck. Quest has a high factor
user->CriticalError(fmt::format("Packet Flood - Quest::Complete [{}/{}]", packet.QuestHash, packet.SelectRewardIndex).c_str(), true);
return;
}
/* TODO : client side npc validation - currently, only the condition works in a stage */
if (info->AcceptLocation.StageGroupHash && info->CompleteTarget.empty() == false && !(user->GetRoom()->GetCurrentStageGroupInfo()->GameType & XRated::Constants::PeacefulGameTypeMask))
{
float distance(-1);
for (std::vector<uint32>::const_iterator i = info->CompleteTarget.begin(), end = info->CompleteTarget.end(); i != end; ++i)
{
distance = user->GetRoom()->GetObjectDistance(user, *i);
if (distance > 0 && distance < 240) { distance = 0; break; } // right npc or character
distance = -1;
}
if (distance < 0)
{
std::string message = fmt::format("valid npc target({} or more) not exist(distance:{})", info->CompleteTarget[0], distance);
/*Logger().Write(IAdmLogger::NormalLogger, "critical", user->GetName(), message.c_str());*/
LoggerInstance().Error(message.c_str());
Protocol::FromServer::Quest::Error response;
response.QuestHash = packet.QuestHash;
response.Result = XRated::QuestErrorType::FarDistance;
user->Send(response);
return; // just cancel the command
}
}
/* validate complete objectives */
if (quest->Info->UpdateParameter(L"", 0, 0, false, quest->Params) != Quest::State::Succeeded &&
quest->CurrentState != XRated::Quest::State::Succeeded) {
if (IsDuplicateQuestList(workingQuests) == true) {
LoggerInstance().Error("this user have Duplicated WorkingQuestList user : {} [CompleteQuest::UpdateParameter Wrong]", user->GetSerial());
}
LoggerInstance().Error("validate quest({}) condition Failed", packet.QuestHash);
WriteQuestInfo(*quest);
throw QuestCriticalException(fmt::format(L"validate condition Failed"));
}
/* NPC cannot be validate because of client object (quest can be completed by client object which is unknown by server) */
if (info->CompleteCondition.IsGuildQuest() == true) {
if ((user->IsPartOfGuild() == false)) {
if (user->GetRequestedInitGuildInfo() == true) {
LoggerInstance().Error("Not Recved Init GuildInfo From DB");
}
user->CriticalError(fmt::format("CompleteCondition::Quest::IsValid Failed, not joined guild user - character({})", StringUtil::To<std::string>(user->GetName()).c_str()).c_str(), false);
return;
}
if (user->GetGuildInfo().GuildLevel < info->CompleteCondition.GuildLevel) {
user->CriticalError(fmt::format("CompleteCondition::Quest::IsValid Failed, invalid guildLevel condition - character({})", StringUtil::To<std::string>(user->GetName()).c_str()).c_str(), false);
return;
}
}
ValidQuestConditionResult result = GetValidResult(user, info->CompleteCondition, true, packet.count);
if (result != ValidQuestConditionResult::Success) {
LoggerInstance().Error("invalid quest({}) complete quest condition", packet.QuestHash);
const wchar_t* errorMsg = GetValidQuestConditionErrorMsg(result);
if (errorMsg != NULL) {
throw QuestCriticalException(fmt::format(L"Vaild : {}", errorMsg));
}
throw QuestCriticalException(fmt::format(L"Vaild : Unknown"));
}
if (ValidateInventory(user, info->CompleteReward, packet.SelectRewardIndex, packet.QuestHash, packet.count) == false) return; // invalid inventory states
int deleteWorkingQuest = 1;
/* finally */
DateTime::Date now = DateTime::Date::Now();
if ((info->IsHaveResetDate() == true) && (quest->ExpiredDate >= now)) {
//' request : name, quest hash, quest current state.
quest->CurrentState = XRated::Quest::State::CompletedAndDropWaiting;
//ChangeActiveQuestState(quest->Id, quest->CurrentState);
deleteWorkingQuest = 0;
}
if (GiveReward(user, info->CompleteReward, packet.SelectRewardIndex, packet.count) == false)
{
// just return, because this is not that serious. client could not check the item slot validation
return;
}
std::map<uint32, uint32>::iterator iter = completedQuests.find(packet.QuestHash);
if (iter == completedQuests.end())
completedQuests[packet.QuestHash] = packet.count;
else
iter->second += packet.count;
// notice the success of quest complete
Protocol::FromServer::Quest::ChangeState response;
response.QuestHash = packet.QuestHash;
response.NewState = XRated::Quest::State::Completed;
response.Parameter = packet.SelectRewardIndex;
response.CompletedCount = packet.count;
switch (quest->CurrentState)
{
case Quest::State::CompletedAndDropWaiting:
if (IsRepeatableQuest(*(quest->Info), packet.QuestHash) == false) {
workingQuests.erase(remove_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(packet.QuestHash)), workingQuests.end()); // should not be workingQuests.end()
}
response.NewState = XRated::Quest::State::CompletedAndDropWaiting;
break;
default:
workingQuests.erase(remove_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(packet.QuestHash)), workingQuests.end()); // should not be workingQuests.end()
break;
}
user->Send(response);
ChangeActiveQuestState(packet.QuestHash, XRated::Quest::State::Completed);
//if (user->achievement.IsConnected()) {
// Net::Protocol::Achievement::ServerProtocol::QuestCompleted packet;
// packet.characterName = user->GetName();
// packet.count = response.CompletedCount;
// packet.questHash = response.QuestHash;
// packet.questType = quest->Info->GetEventType(); // dunno what this is yet;
// DateTime::Date date;
// packet.date = date.Now().ToString();
// user->achievement.Send(packet);
//}
//Logger().Write(IAdmLogger::ImportantLogger, "questcomplete", user->GetName(), Http::Logger::Parameter()
// << packet.QuestHash << packet.SelectRewardIndex << user->GetLevel();
}
user->CompletedQuest(packet.QuestHash);
return;
} while (false);
}
catch (QuestException& e) {
user->CriticalError(fmt::format("invalid complete quest({}) : {}", packet.QuestHash, e.what()).c_str(), e.IsCritical());
return;
}
user->CriticalError(fmt::format("invalid quest({})-complete requested", packet.QuestHash).c_str(), true);
}
void QuestManager::Dispatch(const UserSharedPtr user, Protocol::ToServer::Quest::Drop& packet)
{
AutoLock userLock(user->GetSyncObject());
AutoLock lock(cs);
if (DropQuest(user, packet.QuestHash) == true) {
/*Logger().Write(IAdmLogger::ImportantLogger, "questdrop", user->GetName(), Http::Logger::Parameter()
<< packet.QuestHash);*/
}
else {
/*Logger().Write(IAdmLogger::ImportantLogger, "questdropfailed", user->GetName(), Http::Logger::Parameter()
<< packet.QuestHash);*/
}
}
void QuestManager::Dispatch(const UserSharedPtr user, Protocol::ToServer::Quest::ShareToPartyPlayer& packet)
{
const Database::Info::QuestInfo* info = NULL;
{
AutoLock userLock(user->GetSyncObject());
AutoLock lock(cs);
try {
info = XRated::Database::DatabaseInstance().InfoCollections.Quests.Retrieve(packet.QuestHash);
if (!info) {
throw QuestCriticalException(fmt::format(L"can not found quest info"));
}
std::vector<Quest>::iterator iter = std::find_if(workingQuests.begin(), workingQuests.end(), Quest::FindByHash(packet.QuestHash));
if (iter == workingQuests.end()) {
throw QuestCriticalException(fmt::format(L"not exist workingQuests"));
}
if (iter->Info->IsShareQuest() == false) {
throw QuestCriticalException(fmt::format(L"Imposible share to quest"));
}
}
catch (QuestException& e) {
user->CriticalError(fmt::format("Wrong ShareToPartyPlayer, Quest({}) : {}", packet.QuestHash, e.what()).c_str(), e.IsCritical());
return;
}
}
user->ShareQuestToParty(*info, packet.QuestHash);
}
bool QuestManager::DropQuest(UserSharedPtr user, uint32 questHash)
{
/* drop the quest */
std::vector<Quest>::iterator i = std::find_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(questHash));
if (i == workingQuests.end())
{
/* invalid quest to drop */
return false;
}
const Database::Info::QuestInfo* info(Database::DatabaseInstance().InfoCollections.Quests.Retrieve(questHash));
if (info == NULL) {
return false;
}
DateTime::Date now(DateTime::Date::Now());
if ((info->IsHaveResetDate() == true) && (i->ExpiredDate >= now)) {
//' request : name, quest hash, quest current state.
i->CurrentState = XRated::Quest::State::FailedAndDropWaiting;
Protocol::FromServer::Quest::ChangeState response;
response.QuestHash = i->Id;
response.NewState = i->CurrentState;
response.CompletedCount = 0;
user->Send(response);
}
else {
workingQuests.erase(i);
/* notice to user */
Protocol::FromServer::Quest::Drop response;
response.QuestHash = questHash;
user->Send(response);
}
ChangeActiveQuestState(questHash, XRated::Quest::State::NotAccepted);
return true;
}
void QuestManager::DropGuildQuest(UserSharedPtr user)
{
std::vector<Quest>::iterator i = workingQuests.begin();
for (; i != workingQuests.end(); ) {
if ((*i).Info->IsGuildQuest()) {
DateTime::Date now(DateTime::Date::Now());
uint32 questHash = (*i).Id;
LoggerInstance().Info("[QuestManager::DropGuildQuest] Guild quest found. ({})", (int)questHash);
if (((*i).Info->IsHaveResetDate() == true) && (i->ExpiredDate >= now)) {
i->CurrentState = XRated::Quest::State::FailedAndDropWaiting;
Protocol::FromServer::Quest::ChangeState response;
response.QuestHash = i->Id;
response.NewState = i->CurrentState;
response.CompletedCount = 0;
user->Send(response);
++i;
ChangeActiveQuestState(questHash, XRated::Quest::State::NotAccepted);
LoggerInstance().Info("[QuestManager::DropGuildQuest] Guild quest Set to FailedAndDropWaiting. ({})", (int)questHash);
}
else {
i = workingQuests.erase(i);
/* notice to user */
Protocol::FromServer::Quest::Drop response;
response.QuestHash = questHash;
user->Send(response);
LoggerInstance().Info("[QuestManager::DropGuildQuest] Guild quest Removed. ({})", (int)questHash);
}
}
else {
++i;
}
}
}
void QuestManager::RefreshCompletedQuests(UserSharedPtr user, const StageLocation& location)
{
//' request : character name, stage group hash, stage level
Net::Api request("Quest/CompletedList");
request << user->GetSerial() << location.StageGroupHash << location.Level;
request.GetAsync(this, &QuestManager::CompletedQuestList, user);
}
void QuestManager::RefreshWorkingQuests(UserSharedPtr user)
{
//' request : character name
Net::Api request("Quest/WorkingList");
request << user->GetSerial();
request.GetAsync(this, &QuestManager::WorkingQuestList, user);
}
void QuestManager::Release(UserSharedPtr user)
{
AutoLock lock(cs);
completedQuests.clear();
// origin completedQuests Count backup table clear. 2008-02-19 by kwind.
originCompletedQuests.clear();
//insertCompletedQuests.clear();
//updateCompletedQuests.clear();
/* need to update runtime-parameters */
workingQuests.clear();
itemQuests.clear();
// origin workingQuests backup table clear. 2008-02-19 by kwind.
originWorkingQuests.clear();
//insertWorkingQuests.clear();
//updateWorkingQuests.clear();
//dropWorkingQuests.clear();
}
void QuestManager::ObjectDestroyed(UserSharedPtr user, uint32 id, uint8 team)
{
AutoLock lock(cs);
for (std::vector<Quest>::iterator i = std::find_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByState(XRated::Quest::State::Working))
; i != workingQuests.end()
; i = std::find_if(i + 1, workingQuests.end(), XRated::Quest::FindByState(XRated::Quest::State::Working)))
{
uint8 state = i->Info->UpdateParameter(L"DestroyObject", id, 1/*count*/, (user->GetTeamNumber() == team), i->Params);
i->ParameterChanged = true;
if (state == i->CurrentState) continue;
i->CurrentState = state;
switch (i->CurrentState)
{
case XRated::Quest::State::Failed:
case XRated::Quest::State::Succeeded:
if (i->CurrentState == XRated::Quest::State::Succeeded && GetValidResult(user, i->Info->CompleteCondition, false) != ValidQuestConditionResult::Success) // working ; invalid condition
break;
Protocol::FromServer::Quest::ChangeState packet;
packet.QuestHash = i->Id;
packet.NewState = i->CurrentState;
if (i->CurrentState == XRated::Quest::State::Completed)
{
packet.CompletedCount = completedQuests[i->Id];
}
else
{
packet.CompletedCount = 0;
}
user->Send(packet);
ChangeActiveQuestState(i->Id, i->CurrentState);
break;
}
}
}
void QuestManager::CompletedQuestList(const UserSharedPtr& user, const Net::Answer& answer)
{
if (!user->IsConnected()) return;
//' response : quest hashes
if (answer)
{
/* completed quests which is related for this stage are listed like, previous quests of relay quest in this stage. */
AutoLock lock(cs);
completedQuests.clear();
uint16 i(0);
for(auto& quest : answer.at("quests").get<json>())
{
uint32 questHash = quest.at("id").get<uint32>();
if (!Database::DatabaseInstance().InfoCollections.Quests.IsExistQuest(questHash))
{
LoggerInstance().Error("CompletedQuestList, Not Exist QuestInfo : {}", questHash);
++i;
Net::Api request("DeleteQuest");
request << user->GetSerial() << questHash;
request.GetAsync();
continue;
}
else
{
const Database::Info::QuestInfo* questInfo = Database::DatabaseInstance().InfoCollections.Quests.Retrieve(questHash);
if (questInfo)
{
if (questInfo->IsEventQuest() && !UserManagerInstance().IsEventQuestPeriod(questInfo->GetEventType()))
{
LoggerInstance().Error("CompletedQuestList, Not Period EventQuestInfo : {}", questHash);
++i;
Net::Api request("DeleteQuest");
request << user->GetSerial() << questHash;
request.GetAsync();
continue;
}
}
}
uint32 completeCount = quest.at("completeCount").get<uint32>();
completedQuests[questHash] = completeCount;
}
// origin list backup.
originCompletedQuests = completedQuests;
//insertCompletedQuests.clear();
//updateCompletedQuests.clear();
dbRequestCompletCount.clear();
Protocol::FromServer::Quest::CompletedList response;
response.Quests = completedQuests;
user->Send(response);
}
else
{
//user->CriticalError(fmt::format("CompletedQuestList Error DB Response Quest List").c_str());
return;
}
}
void QuestManager::WorkingQuestList(const UserSharedPtr& user, const Net::Answer& answer)
{
if (!user->IsConnected()) return;
//' response : quests(separated : quest hash, state, param1, param2, param3)
if (answer)
{
AutoLock lock(cs);
bool sendQuestEventNotice = false;
workingQuests.clear();
uint16 i(0);
//assert( packet->GetCount() % (1/*hash*/ + 1/*state*/ + XRated::Quest::MaxParameterCount) == 0 ); /// .asp should be updated if this fails
originWorkingQuests.clear();
std::map< uint32, Quest > originWorkingQuestFinder;
//std::map< uint32 , Quest > duplicatedList;
DateTime::Date now = DateTime::Date::Now();
try {
for(auto& questdb : answer.at("quests").get<json>())
{
uint32 questHash = questdb.at("id").get<uint32>();
if (!Database::DatabaseInstance().InfoCollections.Quests.IsExistQuest(questHash))
{
LoggerInstance().Error("WorkingQuest, Not Exist QuestInfo : {}", questHash);
i += 5;
Net::Api request("DeleteQuest");
request << user->GetSerial() << questHash;
request.GetAsync();
continue;
}
originWorkingQuests.push_back(Quest(questHash));
Quest& quest(originWorkingQuests.back());
//workingQuests.push_back( Quest(StringUtil::ToInt( (*packet)[i++])) );
//Quest& quest(workingQuests.back();
questdb.at("currentState").get_to(quest.CurrentState);
for (std::vector<uint32>::iterator j = quest.Params.begin(); j != quest.Params.end(); ++j)
{
questdb.at("params").get_to(*j);
}
// fixed by kwind, 2008-01-22
questdb.at("expiredDate").get_to(quest.ExpiredDate);
std::map< uint32, Quest >::iterator finder = originWorkingQuestFinder.find(questHash);
if (finder != originWorkingQuestFinder.end())
{
duplicatedList.insert(std::make_pair(finder->first, finder->second));
originWorkingQuests.pop_back();
std::list<Quest>::iterator originIter = std::find_if(originWorkingQuests.begin(), originWorkingQuests.end(), XRated::Quest::FindByHash(finder->first));
if (originIter != originWorkingQuests.end())
{
originWorkingQuests.erase(originIter);
}
LoggerInstance().Warn("asdf originWorkingQuests delete questHash={} because duplicated!!", questHash);
continue;
}
originWorkingQuestFinder.insert(std::make_pair(questHash, quest));
if ((XRated::Quest::State::CompletedAndDropWaiting == quest.CurrentState) || (XRated::Quest::State::FailedAndDropWaiting == quest.CurrentState)) {
if (quest.Info->IsHaveResetDate() == true) {
if ((now > quest.ExpiredDate)) {
continue;
}
}
}
if (quest.Info->IsEventQuest() && !UserManagerInstance().IsEventQuestPeriod(quest.Info->GetEventType()))
{
sendQuestEventNotice = true;
continue;
}
workingQuests.push_back(quest);
}
}
catch (QuestException& e) {
LoggerInstance().Error("WorkingQuest, CriticalError : {}", e.what());
originWorkingQuests.clear();
workingQuests.clear();
user->CriticalError("SetWorkingQuestList Failed", e.IsCritical());
return;
}
//user->RemoveDuplicatedQuest( duplicatedList );
//originWorkingQuests.assign( workingQuests.begin(), workingQuests.end() );
//insertWorkingQuests.clear();
//updateWorkingQuests.clear();
//dropWorkingQuests.clear();
Protocol::FromServer::Quest::WorkingList response;
for (std::vector<Quest>::const_iterator i = workingQuests.begin(); i != workingQuests.end(); ++i)
{
response.Quests.push_back(*i);
}
user->Send(response);
if (sendQuestEventNotice)
{
Protocol::FromServer::QuestEvent::NoticeQuestEvent sendPacket;
sendPacket.EventNotice = Protocol::FromServer::QuestEvent::NoticeQuestEvent::Notice::DeleteWorkingQuestBecauseNotEventPeriod;
user->Send(sendPacket);
}
}
else
{
user->CriticalError(fmt::format("WorkingQuestList Error DB Response Quest List").c_str());
return;
}
}
bool QuestManager::IsRepeatableQuest(const Database::Info::QuestInfo& info, uint32 questHash)
{
std::map<uint32/*Quest hash*/, uint32/*Completed Count*/>::const_iterator iter = completedQuests.find(questHash);
if (iter != completedQuests.end()) {
return info.IsRepeatQuest(iter->second);
}
return true;
}
void QuestManager::ShareQuest(UserSharedPtr user, const std::wstring& shareOwner, const Database::Info::QuestInfo& info, uint32 questHash)
{
AutoLock lock(cs);
do {
std::map<uint32/*Quest hash*/, uint32/*Completed Count*/>::const_iterator iter = completedQuests.find(questHash);
if (iter == completedQuests.end()) {
if (info.AcceptLocation != user->GetCurrentStage()) {
if (info.Next.second != nullptr) {
if ((info.Next.second->AcceptLocation) != user->GetCurrentStage()) {
break;
}
}
else {
break;
}
}
}
Protocol::FromServer::Quest::Share packet;
packet.ShareOwner = shareOwner;
packet.QuestHash = questHash;
user->Send(packet);
return;
} while (false);
std::vector< std::pair<uint32, std::wstring> >::iterator iter = dbRequestCompletCount.begin();
std::vector< std::pair<uint32, std::wstring> >::iterator end = dbRequestCompletCount.end();
while (iter != end) {
if (iter->first == questHash) {
return;
}
++iter;
}
dbRequestCompletCount.push_back(std::pair<uint32, std::wstring>(questHash, shareOwner));
Net::Api request("Quest/GetCompletedCount");
request << user->GetSerial() << questHash;
request.GetAsync(this, &QuestManager::CompletedQuestCountWIthShare, user);
}
void QuestManager::CompletedQuestCountWIthShare(const UserSharedPtr& user, const Net::Answer& answer)
{
if (answer && user->IsConnected())
{
uint32 questHash = answer.at("hash").get<uint32>();
uint32 completeCount = answer.at("completeCount").get<uint32>();
const Database::Info::QuestInfo* info(Database::DatabaseInstance().InfoCollections.Quests.Retrieve(questHash));
if (!info) {
LoggerInstance().Error("can not found quest info :{}", questHash);
return;
}
{
AutoLock lock(cs);
{
std::map<uint32, uint32>::iterator iter = completedQuests.find(questHash);
if (iter != completedQuests.end()) {
LoggerInstance().Error("QuestShare Failed : have completed(current list) count : Quest({})", questHash);
return;
}
}
{
std::map<uint32, uint32>::iterator iter = originCompletedQuests.find(questHash);
if (iter != originCompletedQuests.end()) {
LoggerInstance().Error("QuestShare Failed : have completed(origin list) count : Quest({})", questHash);
return;
}
}
completedQuests[questHash] = completeCount;
originCompletedQuests[questHash] = completeCount;
{
if (completeCount > 0) {
Protocol::FromServer::Quest::CompletedCount packet;
packet.QuestHash = questHash;
packet.Count = completeCount;
user->Send(packet);
}
}
{
std::vector< std::pair<uint32, std::wstring> >::iterator iter = dbRequestCompletCount.begin();
std::vector< std::pair<uint32, std::wstring> >::iterator end = dbRequestCompletCount.end();
while (iter != end) {
if (iter->first == questHash) {
Protocol::FromServer::Quest::Share packet;
packet.ShareOwner = iter->second;
packet.QuestHash = questHash;
user->Send(packet);
dbRequestCompletCount.erase(iter);
break;
}
++iter;
}
}
}
}
}
bool QuestManager::IsItemActivityQuest(const Database::Info::QuestInfo* info)
{
if (!info) return false;
if (info->AcceptLocation.StageGroupHash == XRated::Constants::HiddenQuestStageGroupHash &&
info->AcceptLocation.Level == XRated::Constants::HiddenQuestStageGroupLevel)
return true;
return false;
}
const wchar_t* QuestManager::GetValidQuestConditionErrorMsg(ValidQuestConditionResult result) const
{
switch (result)
{
case Failed_PlayerIsNull:
return L"Failed_PlayerIsNull";
case Failed_ScriptOnlyQuest:
return L"Failed_ScriptOnlyQuest";
case Failed_NotEnoughCharacterStatus:
return L"Failed_NotEnoughCharacterStatus";
case Failed_NotJoinedGuild:
return L"Failed_NotJoinedGuild";
case Failed_NotEnoughGuildLevel:
return L"Failed_NotEnoughGuildLevel";
case Failed_NotEnoughItems:
return L"Failed_NotEnoughItems";
case Failed_HaveNotLicenses:
return L"Failed_HaveNotLicenses";
case Failed_HaveNotCompleteQuests:
return L"Failed_HaveNotCompleteQuests";
case Success:
return L"Success";
}
return L"Failed_Unknown";
}
QuestManager::ValidQuestConditionResult QuestManager::GetValidResult(const UserSharedPtr user, const Database::Info::QuestInfo::Condition& condition, bool wirteLog, uint32 count) const
{
Logic::Player* player(user->GetPlayer());
if (player == NULL)
{
LoggerInstance().Error("invalid character({}) state for a quest", user->GetSerial());
return ValidQuestConditionResult::Failed_PlayerIsNull;
}
/* state can be changed by only script */
if (condition.ScriptOnly)
{
if (wirteLog == true) {
LoggerInstance().Error("Quest::IsValid Failed, only for script - character({})", user->GetSerial());
}
return ValidQuestConditionResult::Failed_ScriptOnlyQuest;
}
/* character validation */
if (condition.IsValid(player->GetPlayerData()) == false)
{
if (wirteLog == true) {
LoggerInstance().Error("Quest::IsValid Failed, not enough character status- character({})", user->GetSerial());
}
return ValidQuestConditionResult::Failed_NotEnoughCharacterStatus;
}
/* guild quest validateion */
if (condition.IsGuildQuest() == true) {
if ((user->IsPartOfGuild() == false)) {
if (wirteLog == true) {
LoggerInstance().Error("Quest::IsValid Failed, not joined guild user - character({})", user->GetSerial());
}
return ValidQuestConditionResult::Failed_NotJoinedGuild;
}
if (user->GetGuildInfo().GuildLevel < condition.GuildLevel) {
if (wirteLog == true) {
LoggerInstance().Error("Quest::IsValid Failed, invalid guildLevel condition - character({})", user->GetSerial());
}
return ValidQuestConditionResult::Failed_NotEnoughGuildLevel;
}
}
/* item validation */
if (!condition.Items.empty())
{
struct Condition : public Database::Info::ItemInfo::IFindCondition
{
uint32 Hash;
Condition(uint32 hash) : Hash(hash) {}
bool operator()(const Database::Info::ItemInfo& info) const { return (info.Hash == Hash); }
};
int availablequestcount = 0;
std::vector< Database::Info::QuestInfo::Condition::Item >::const_iterator i;
for (i = condition.Items.begin(); i != condition.Items.end(); ++i)
{
int itemcount = user->GetItemCount(Condition(i->ItemHash), true);
int temp = itemcount / i->Count;
if (availablequestcount == 0)
{
availablequestcount = temp;
}
else if (temp < availablequestcount)
{
availablequestcount = temp;
}
if (itemcount < i->Count)
{
if (wirteLog == true) {
LoggerInstance().Error("not enough items({} of {} required) of a user({})", i->ItemHash, i->Count, user->GetSerial());
}
else {
LoggerInstance().Info("not enough items({} of {} required) of a user({})", i->ItemHash, i->Count, user->GetSerial());
}
return ValidQuestConditionResult::Failed_NotEnoughItems;
}
}
if (availablequestcount < count)
{
if (wirteLog == true)
{
LoggerInstance().Error("not enough items({} of {} required) of a user({})", i->ItemHash, i->Count, user->GetSerial());
}
else
{
LoggerInstance().Info("not enough items({} of {} required) of a user({})", i->ItemHash, i->Count, user->GetSerial());
}
return ValidQuestConditionResult::Failed_NotEnoughItems;
}
}
/* license validation */
if (!condition.Licenses.empty())
{
for (std::vector< StageLocation >::const_iterator i = condition.Licenses.begin(); i != condition.Licenses.end(); ++i)
{
if (user->IsAbleToJoinStage(*i) == false)
{
if (wirteLog == true) {
LoggerInstance().Error("Quest::IsValid Failed, wrong licenses condition- character({})", user->GetSerial());
}
return ValidQuestConditionResult::Failed_HaveNotLicenses;
}
}
}
/* quest validation */
if (!condition.Quests.empty())
{
for (std::vector< uint32 >::const_iterator i = condition.Quests.begin(); i != condition.Quests.end(); ++i)
{
std::map<uint32, uint32>::const_iterator completedIter = completedQuests.find(*i);
if ((completedIter == completedQuests.end()) || (completedIter->second < 1))
{
if (wirteLog == true) {
LoggerInstance().Error("Quest::IsValid Failed, wrong completedQuests condition- character({})", user->GetSerial());
}
return ValidQuestConditionResult::Failed_HaveNotCompleteQuests;
}
}
}
/* finally */
return ValidQuestConditionResult::Success;
}
bool QuestManager::ValidateInventory(UserSharedPtr user, const Database::Info::QuestInfo::Reward& reward, uint32 selectItemIndex, uint32 questHash, uint32 count) const
{
/* calculate reward items */
typedef std::vector< std::pair<XRated::Item, uint16/*count*/> > RewardItems;
RewardItems toDrop;
RewardItems toGet;
std::vector< std::pair<uint32/*item hash*/, uint32/*item count*/> > vR, vA; // to validate slot count
if (!reward.SelectableItems.empty())
{
if (reward.SelectableItems.size() < selectItemIndex) // out of index range - kind of ciritical
{
LoggerInstance().Error("out of select item index({} size:{})", selectItemIndex, reward.SelectableItems.size());
user->Close(); // critical
return false;
}
RewardItems& target(reward.SelectableItems[selectItemIndex].TakeAway ? toDrop : toGet);
target.push_back(std::pair(XRated::Item(reward.SelectableItems[selectItemIndex].ItemHash, reward.SelectableItems[selectItemIndex].Instance), reward.SelectableItems[selectItemIndex].Count));
std::vector< std::pair<uint32, uint32> >& validateInfo(reward.SelectableItems[selectItemIndex].TakeAway ? vR : vA);
validateInfo.push_back(std::pair<uint32, uint32>(reward.SelectableItems[selectItemIndex].ItemHash, reward.SelectableItems[selectItemIndex].Count));
}
for (auto i = reward.Items.begin(); i != reward.Items.end(); ++i)
{
RewardItems& target(i->TakeAway ? toDrop : toGet);
target.push_back(std::pair(XRated::Item(i->ItemHash, i->Instance), i->Count));
std::vector< std::pair<uint32, uint32> >& validateInfo(i->TakeAway ? vR : vA);
validateInfo.push_back(std::pair<uint32, uint32>(i->ItemHash, i->Count));
}
/* item validation */
int requiredSlotCount(user->GetRequiredSlotCount(vR, vA, count));
if (static_cast<int>(user->GetBlankSlotCountOfNormalBag()) < requiredSlotCount) // not enough item slots to put-in
{
Protocol::FromServer::Quest::Error packet;
packet.QuestHash = questHash;
packet.Result = XRated::QuestErrorType::Type::NotEnoughInventory;
user->Send(packet);
return false;
}
return true;
}
bool QuestManager::GiveReward(UserSharedPtr user, const Database::Info::QuestInfo::Reward& reward, uint32 selectItemIndex, uint32 count) const
{
/* money validation */
if (reward.Money < 0 && user->GetMoney() < (static_cast<uint32>(std::abs(reward.Money))) * count)
{
Protocol::FromServer::Quest::Error packet;
packet.Result = XRated::QuestErrorType::Type::NotEnoughMoney;
user->Send(packet);
LoggerInstance().Info(L"not enough money");
return false; // not enough money
}
/* calculate reward items */
typedef std::vector< std::pair<XRated::Item, uint16/*count*/> > RewardItems;
RewardItems toDrop;
RewardItems toGet;
std::vector< std::pair<uint32/*item hash*/, uint32/*item count*/> > vR, vA; // to validate slot count
if (!reward.SelectableItems.empty())
{
if (reward.SelectableItems.size() < selectItemIndex) // out of index range - kind of ciritical
{
LoggerInstance().Error("out of select item index({} size:{})", selectItemIndex, reward.SelectableItems.size());
user->Close(); // critical
return false;
}
RewardItems& target(reward.SelectableItems[selectItemIndex].TakeAway ? toDrop : toGet);
target.push_back(std::pair(XRated::Item(reward.SelectableItems[selectItemIndex].ItemHash, reward.SelectableItems[selectItemIndex].Instance), reward.SelectableItems[selectItemIndex].Count));
std::vector< std::pair<uint32, uint32> >& validateInfo(reward.SelectableItems[selectItemIndex].TakeAway ? vR : vA);
validateInfo.push_back(std::pair<uint32, uint32>(reward.SelectableItems[selectItemIndex].ItemHash, reward.SelectableItems[selectItemIndex].Count));
}
for (std::vector<Database::Info::QuestInfo::Reward::Item>::const_iterator i = reward.Items.begin(); i != reward.Items.end(); ++i)
{
RewardItems& target(i->TakeAway ? toDrop : toGet);
target.push_back(std::pair(XRated::Item(i->ItemHash, i->Instance), i->Count));
std::vector< std::pair<uint32, uint32> >& validateInfo(i->TakeAway ? vR : vA);
validateInfo.push_back(std::pair<uint32, uint32>(i->ItemHash, i->Count));
}
/* item validation */
int requiredSlotCount(user->GetRequiredSlotCount(vR, vA, count));
if (static_cast<int>(user->GetBlankSlotCountOfNormalBag()) < requiredSlotCount) // not enough item slots to put-in
{
Protocol::FromServer::Quest::Error packet;
packet.Result = XRated::QuestErrorType::Type::NotEnoughInventory;
user->Send(packet);
return false;
}
// Money OverFlow Validation
if (reward.Money > 0 && (user->GetMoney() > user->GetMoney() + (reward.Money * count) || user->GetMoney() + (reward.Money * count) > XRated::Constants::GoldMaxLimit))
{
LoggerInstance().Warn("QUEST REWARD GOLD OVER FLOW!");
Protocol::FromServer::Quest::Error packet;
packet.Result = XRated::QuestErrorType::Type::GoldOverflow;
user->Send(packet);
return false;
}
/* give money */
user->SetMoney(user->GetMoney() + (reward.Money * count));
//if (user->achievement.IsConnected()) {
// // send
// Net::Protocol::Achievement::ServerProtocol::GoldGainChanged packet;
// packet.characterName = user->GetName();
// packet.checkAchieve = uint8(0);
// packet.goldGainType = Net::Protocol::Achievement::ServerProtocol::GoldGainChanged::goldGainType::Mission;
// packet.gold = uint64(reward.Money * count);
// user->achievement.Send(packet);
// // check only
// packet.characterName = user->GetName();
// packet.checkAchieve = uint8(1);
// packet.goldGainType = Net::Protocol::Achievement::ServerProtocol::GoldGainChanged::goldGainType::Mission;
// packet.gold = uint64(0);
// user->achievement.Send(packet);
//}
/* get rid of items */
if (!toDrop.empty())
{
for (RewardItems::const_iterator i = toDrop.begin(); i != toDrop.end(); ++i)
user->ItemRemove(i->first.Id, i->second * count); // User::ItemRemove() sends packet automatically. instance(like enchant bitfields) is ignored at removing item
}
/* give items */
if (!toGet.empty())
{
for (RewardItems::const_iterator i = toGet.begin(); i != toGet.end(); ++i)
{
user->ItemFill(i->first.Id, (i->second * count));
//user->ItemAdd(i->first.Id, 0/*unused*/, i->second); // item instance of reward is ignored, and this method work auto-identify and item raise sending AcquireItem packet
// arcrus. Log if acquired item is LuniChip.
uint32 chipItemCredit = 0;
if (i->first.Id == XRated::Database::DatabaseInstance().InfoCollections.LuniaChipItems.GetChipHash(XRated::Gamble::ChipType::Pink))
chipItemCredit = i->second * count;
else if (i->first.Id == XRated::Database::DatabaseInstance().InfoCollections.LuniaChipItems.GetChipHash(XRated::Gamble::ChipType::Green))
chipItemCredit = i->second * 100 * count;
else if (i->first.Id == XRated::Database::DatabaseInstance().InfoCollections.LuniaChipItems.GetChipHash(XRated::Gamble::ChipType::Yellow))
chipItemCredit = i->second * 10000 * count;
if (chipItemCredit > 0) // at least 1 chip acquired.
{
// Logging : race-buychip
/*Logger().Write(IAdmLogger::ImportantLogger, "race-buychip", user->GetName(), Http::Logger::Parameter()
<< chipItemCredit
<< (reward.Money * count)
);*/
}
}
}
/* give licenses */
if (!reward.Licenses.empty())
{
for (std::vector<StageLicense>::const_iterator i = reward.Licenses.begin(); i != reward.Licenses.end(); ++i)
{
if (user->AddStageLicense(*i) == true) {
Protocol::FromServer::AcquireLicense packet;
packet.serial = user->GetSerial();
packet.stageLicense = *i;
packet.sharedOtherPlayers = 0; // 3.1 by ultimate
user->SendToAll(packet);
//user->Send(packet);
}
}
}
if (reward.Exp)
user->AddExp(XRated::Constants::ExpAcquiredType::ExpQuest, static_cast<uint32>(reward.Exp * ConfigInstance().Get("BaseExp",1.0f) * count), false); /* quest exp reward should not be affected by PC room */
if (reward.WarExp)
user->AddWarExp(XRated::Constants::ExpAcquiredType::ExpQuest, static_cast<uint32>(reward.WarExp * ConfigInstance().Get("BaseExp", 1.0f) * count));
if (reward.StateBundleHash)
user->AddStateBundle(reward.StateBundleHash);
/* finally */
return true;
}
uint32 QuestManager::GetValidQuestCount() const
{
AutoLock lock(cs);
uint32 count(0);
for (std::vector<Quest>::const_iterator i = workingQuests.begin(); i != workingQuests.end(); ++i)
{
switch (i->CurrentState)
{
case XRated::Quest::State::Completed:
case XRated::Quest::State::CompletedAndDropWaiting:
case XRated::Quest::State::FailedAndDropWaiting:
break;
default:
++count;
break;
}
}
return count;
}
//--------------------------------------------------------------------------------------------------------------------------------------------
// Active Quest Item
//--------------------------------------------------------------------------------------------------------------------------------------------
void QuestManager::Dispatch(const UserSharedPtr user, Protocol::ToServer::Quest::ActivityItem& packet)
{
AutoLock userLock(user->GetSyncObject());
AutoLock lock(cs);
const Database::Info::QuestInfo* info(Database::DatabaseInstance().InfoCollections.Quests.Retrieve(packet.QuestHash));
//------------------------------------------------------------------------------------------------------------------------------
if (std::find_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(packet.QuestHash)) != workingQuests.end())
{
user->CriticalError("already working quest by activityItem");
return;
}
std::map<uint32, uint32>::const_iterator iter = completedQuests.find(packet.QuestHash);
if ((info->IsRepeatQuest() == false) && ((iter != completedQuests.end()) && (iter->second > 0)))
{
user->CriticalError("already completed quest by activityItem");
return;
}
//------------------------------------------------------------------------------------------------------------------------------
std::vector<Quest>::const_iterator i = std::find_if(itemQuests.begin(), itemQuests.end(), XRated::Quest::FindByHash(packet.QuestHash));
if (i != itemQuests.end())
{
Protocol::FromServer::Quest::ActivityItem packet;
user->Send(packet);
LoggerInstance().Info(L"Send To Client Active Item Result");
return;
}
do // dummy do to validate quest UseActivityItem request - make sure 'break' for CRITICAL invalid tag
{
if (dbRequested.find(user) != dbRequested.end()) break; // already doing something( accept or complete or activityItem
if (!info) break; // unable to find the quest information on server
dbRequested.insert(user);
//' request : character name, quest hashs
Net::Api request("Quest/ActivityItem");
//Add Basic ActiveItemQuestInfo
LoggerInstance().Info("Request active item({}) info to DB", packet.QuestHash);
AddItemQuest(packet.QuestHash, XRated::Quest::State::NotAccepted);
request << user->GetSerial() << packet.QuestHash;
if (!info->AcceptCondition.Quests.empty())
{
for (auto i = info->AcceptCondition.Quests.begin(); i != info->AcceptCondition.Quests.end(); ++i)
{
LoggerInstance().Info("Request active item({}) info to DB", *i);
AddItemQuest(*i, XRated::Quest::State::NotAccepted);
request << *i;
}
}
request.GetAsync(this, &QuestManager::ActiveItemQuestInfo, user);
return;
} while (false);
/* TODO : ciritical policy */
LoggerInstance().Error("invalid quest({})-activityitem requested", packet.QuestHash);
}
void QuestManager::ActiveItemQuestInfo(const UserSharedPtr& user, const Net::Answer& answer)
{
AutoLock lock(cs);
dbRequested.erase(user);
//' response : quest hashes
if (answer)
{
for(auto& quest : answer.at("quests").get<json>())
{
uint32 questHash = quest.at("hash").get<uint32>();
auto ai = std::find_if(itemQuests.begin(), itemQuests.end(), XRated::Quest::FindByHash(questHash));
if (ai != itemQuests.end())
ai->CurrentState = XRated::Quest::State::Completed;
else
AddItemQuest(questHash, XRated::Quest::State::Completed);
//ActivityQuestItem Update
LoggerInstance().Info("Recieve active item({}) info from DB", questHash);
}
}
LoggerInstance().Info(L"Send To Client Active Item Result");
Protocol::FromServer::Quest::ActivityItem sendPacket;
user->Send(sendPacket);
}
void QuestManager::AddItemQuest(uint32 questHash, uint8 currentState)
{
AutoLock lock(cs);
std::vector<Quest>::iterator i = std::find_if(itemQuests.begin(), itemQuests.end(), XRated::Quest::FindByHash(questHash));
if (i == itemQuests.end())
{
itemQuests.push_back(Quest(questHash));
itemQuests.back().CurrentState = currentState;
}
else
ChangeActiveQuestState(questHash, currentState);
}
void QuestManager::ChangeActiveQuestState(uint32 questHash, uint8 newState)
{
//AutoLock lock(cs);
std::vector<Quest>::iterator i = std::find_if(itemQuests.begin(), itemQuests.end(), XRated::Quest::FindByHash(questHash));
if (i != itemQuests.end())
{
if (i->CurrentState == newState) return;
if (newState == XRated::Quest::State::Completed) i->CurrentState = newState;
else if (i->CurrentState != XRated::Quest::State::Completed) i->CurrentState = newState;
}
}
bool QuestManager::ClearQuestList()
{
AutoLock lock(cs);
workingQuests.clear();
completedQuests.clear();
originWorkingQuests.clear();
originCompletedQuests.clear();
return true;
}
void QuestManager::ValidActiveItem(UserSharedPtr user, uint32 questHash)
{
AutoLock lock(cs);
auto i = std::find_if(itemQuests.begin(), itemQuests.end(), XRated::Quest::FindByHash(questHash));
if (i != itemQuests.end())
{
/* notice to client */
Protocol::FromServer::Quest::ShowDetailWindow packet;
packet.QuestHash = questHash;
packet.Result = static_cast<XRated::QuestErrorType::Type>(IsValidItemQuest(user, *i, questHash));
user->Send(packet);
}
else
{
Protocol::FromServer::Quest::Error response;
response.QuestHash = questHash;
response.Result = XRated::QuestErrorType::Type::NoError;
user->Send(response);
LoggerInstance().Error("unexpected : quest({}) not found at itemQuests", questHash);
return; // should not be happened
}
}
uint8 QuestManager::IsValidItemQuest(const UserSharedPtr user, const Quest& questInfo, uint32 questHash) const
{
AutoLock lock(cs);
if (GetValidQuestCount() >= XRated::Quest::MaxCount)
{
LoggerInstance().Error("invalid quest({}) accept out of range", questHash);
return XRated::QuestErrorType::Type::AlreadyFullQuestCount;
}
/* working quests validation */
if (std::find_if(workingQuests.begin(), workingQuests.end(), XRated::Quest::FindByHash(questHash)) != workingQuests.end()) // a quest cannot work concurrently.
{
LoggerInstance().Error("already working quest");
return XRated::QuestErrorType::Type::AlreadyWorking;
}
/* completed quests validation */
//if ( questInfo.Info->IsRepeatQuest()==false &&
// (completedQuests.find(questHash)!=completedQuests.end() || questInfo.CurrentState == XRated::Quest::State::Completed) )
//{
// ALLM_INFO((L"already completed quest");
// return XRated::QuestErrorType::Type::AlreadyCompletedQuest;
//}
std::map<uint32, uint32>::const_iterator iter = completedQuests.find(questHash);
if (questInfo.Info->IsRepeatQuest() == false && iter != completedQuests.end() && iter->second > 0)
{
LoggerInstance().Error("invalid quest({}) accept already complete", questHash);
return XRated::QuestErrorType::Type::AlreadyCompletedQuest;
}
if (questInfo.Info->IsRepeatQuest() == true)
{
if (iter != completedQuests.end()) {
if (questInfo.Info->IsRepeatQuest(iter->second) == false) {
LoggerInstance().Error("invalid quest({}) not repeatable quest - completeCount({}) repeatableCount({})", questHash
, completedQuests.find(questHash)->second, questInfo.Info->MaximumCompleteableCount);
return XRated::QuestErrorType::Type::CompletedTooManyTimes;
}
}
}
const Database::Info::QuestInfo::Condition& condition = questInfo.Info->AcceptCondition;
Logic::Player* player(user->GetPlayer());
if (player == NULL)
{
return XRated::QuestErrorType::Type::InvalidUser;
}
/* state can be changed by only script */
if (condition.ScriptOnly)
{
LoggerInstance().Error("only for script");
return XRated::QuestErrorType::Type::OnlyScript;
}
/* character validation */
if (condition.IsValid(player->GetPlayerData()) == false)
{
LoggerInstance().Error("not enough character status");
return XRated::QuestErrorType::Type::MoreCharacterStatusRequired;
}
/* guild quest validateion */
if (condition.IsGuildQuest() == true) {
if ((user->IsPartOfGuild() == false)) {
return false;
}
if (user->GetGuildInfo().GuildLevel < condition.GuildLevel) {
return false;
}
}
/* item validation */
if (!condition.Items.empty())
{
struct Condition : public Database::Info::ItemInfo::IFindCondition
{
uint32 Hash;
Condition(uint32 hash) : Hash(hash) {}
bool operator()(const Database::Info::ItemInfo& info) const { return (info.Hash == Hash); }
};
for (std::vector< Database::Info::QuestInfo::Condition::Item >::const_iterator i = condition.Items.begin(); i != condition.Items.end(); ++i)
{
if (user->GetItemCount(Condition(i->ItemHash), true) < i->Count)
{
LoggerInstance().Info(L"not enough items");
return XRated::QuestErrorType::Type::NotEnoughItems;
}
}
}
/* license validation */
if (!condition.Licenses.empty())
{
for (std::vector< StageLocation >::const_iterator i = condition.Licenses.begin(); i != condition.Licenses.end(); ++i)
{
if (!user->IsAbleToJoinStage(*i))
{
LoggerInstance().Info("not enough stage licenses");
return XRated::QuestErrorType::Type::NotEnoughLicense;
}
}
}
/* quest validation */
if (!condition.Quests.empty())
{
for (std::vector< uint32 >::const_iterator i = condition.Quests.begin(); i != condition.Quests.end(); ++i)
{
std::map<uint32, uint32>::const_iterator completedIter = completedQuests.find(*i);
if ((completedIter == completedQuests.end()) || completedIter->second < 1)
{
auto ai = std::find_if(itemQuests.begin(), itemQuests.end(), XRated::Quest::FindByHash(*i));
if (ai != itemQuests.end() && ai->CurrentState == XRated::Quest::State::Completed)
{
}
else
{
LoggerInstance().Info("not enough preceded quest");
return XRated::QuestErrorType::Type::PrecededQuestRequired;
}
}
}
}
/* finally */
return XRated::QuestErrorType::Type::NoError;
}
void QuestManager::GetChangedQuestInfoAndOriginBackup(ChangeQuestInfo& info)
{
AutoLock lock(cs);
{
std::vector<Quest>::iterator iter = workingQuests.begin();
std::vector<Quest>::iterator end = workingQuests.end();
while (iter != end) {
std::list<Quest>::iterator originIter = std::find_if(originWorkingQuests.begin(), originWorkingQuests.end(), XRated::Quest::FindByHash((*iter).Id));
if (originIter != originWorkingQuests.end()) {
const Quest& now((*iter));
const Quest& origin((*originIter));
if ((now.ParameterChanged == true) || (now.CurrentState != origin.CurrentState) || (now.ExpiredDate != origin.ExpiredDate)) {
info.workingQuestsToUpdate.push_back((*iter));
}
originWorkingQuests.erase(originIter);
}
else {
info.workingQuestsToInsert.push_back((*iter));
}
++iter;
}
info.workingQuestsToDelete.assign(originWorkingQuests.begin(), originWorkingQuests.end());
{
std::map<uint32, Quest>::iterator iter = duplicatedList.begin();
std::map<uint32, Quest>::iterator end = duplicatedList.end();
while (iter != end)
{
LoggerInstance().Warn("asdf workingQuestsToDelete push questHash={} from duplicatedList!!", iter->first);
info.workingQuestsToDelete.push_back(iter->second);
++iter;
}
}
}
{
std::map<uint32/*Quest hash*/, uint32/*Completed Count*/>::const_iterator iter = completedQuests.begin();
std::map<uint32, uint32>::const_iterator end = completedQuests.end();
while (iter != end) {
std::map<uint32, uint32>::iterator originIter = originCompletedQuests.find(iter->first);
if (originIter != originCompletedQuests.end()) {
//update
if (iter->second != originIter->second) {
const Database::Info::QuestInfo* questInfo(Database::DatabaseInstance().InfoCollections.Quests.Retrieve(iter->first));
info.completedQuestsToUpdate.push_back(ComplatedQuest(iter->first, questInfo, iter->second));
}
}
else {
//insert
const Database::Info::QuestInfo* questInfo(Database::DatabaseInstance().InfoCollections.Quests.Retrieve(iter->first));
info.completedQuestsToInsert.push_back(ComplatedQuest(iter->first, questInfo, iter->second));
}
++iter;
}
}
//back up
originWorkingQuests.clear();
originWorkingQuests.assign(workingQuests.begin(), workingQuests.end());
originCompletedQuests = completedQuests;
}
void QuestManager::ChangeQuestInfo::Clear()
{
workingQuestsToInsert.clear();
workingQuestsToUpdate.clear();
workingQuestsToDelete.clear();
completedQuestsToInsert.clear();
completedQuestsToUpdate.clear();
}
void QuestManager::DropEventQuests(uint8 eventType)
{
std::vector<Quest>::iterator i = workingQuests.begin();
for (; i != workingQuests.end(); )
{
if ((*i).Info->EventQuestType == eventType)
{
i = workingQuests.erase(i);
}
else
{
++i;
}
}
}
}
}
} |
<reponame>handexing/wish<gh_stars>10-100
package com.wish.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import org.springframework.format.annotation.DateTimeFormat;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
/**
* @author handx <EMAIL>
* @date 2017年4月28日 上午11:04:12
*/
@Entity
@Table(name = "ARTICLE")
public class Article {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "ID")
private Long id;
@Column(name = "TITLE")
private String title;
@Column(name = "CONTENT")
private String content;
@Column(name = "STATUS")
private Integer status;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
@Column(name = "CREATE_TIME")
private Date createTime;
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
@Column(name = "UPDATE_TIME")
private Date updateTime;
public String getContent() {
return content;
}
public Date getCreateTime() {
return createTime;
}
public Long getId() {
return id;
}
public Integer getStatus() {
return status;
}
public String getTitle() {
return title;
}
public Date getUpdateTime() {
return updateTime;
}
public void setContent(String content) {
this.content = content;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public void setId(Long id) {
this.id = id;
}
public void setStatus(Integer status) {
this.status = status;
}
public void setTitle(String title) {
this.title = title;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
@Override
public String toString() {
return "Article [id=" + id + ", title=" + title + ", content=" + content + ", status=" + status
+ ", createTime=" + createTime + ", updateTime=" + updateTime + "]";
}
}
|
<reponame>kaosat-dev/justDoGl
import { vecToStr } from './utils'
import { evaluateModule } from './evaluators'
export function cylinder (module, context) {
console.log('cylinder', module, context)
const params = module.argnames.reduce(function (rs, argName, index) {
rs[argName] = evaluateModule(module.argexpr[index])
return rs
}, {})
const transforms = context.transforms || 'pos'
/*evaluated version : ie we actually have the values of our variables
const r1 = forceDecimal( (params['r1'] ? params['r1'] : params['r']) * 2)
const r2 = forceDecimal( (params['r2'] ? params['r2'] : params['r']) * 2)
const h = forceDecimal(parseFloat(params['h']) + 0.1)
const res = params['$fn'] ? parseInt(params['$fn']) : undefined
const center = params['center'] ? params['center'] : false
let pos = !center ? [0, -h, 0] : [0, 0, 0] // [-size[0] / 2, -size[1] / 2, -size[2] / 2]*/
const r1 = (params['r1'] ? params['r1'] : params['r']) // +'* 2.'
const r2 = (params['r2'] ? params['r2'] : params['r']) // +'* 2.'
const h = params['h'] + '+0.1'
const res = params['$fn'] ? parseInt(params['$fn'], 10) : undefined
const center = params['center'] ? params['center'] : false
let pos = !center ? [0, 0, `${h}`] : [0, 0, 0] // [-size[0] / 2, -size[1] / 2, -size[2] / 2]
console.log('pos', pos)
let result
if (!res || res && res > 100) {
result = ` sdConeSection( opRotY(${transforms} + ${vecToStr(pos)},_PI) , ${h}, ${r1}, ${r2})`
}
if (res && res < 100) { // TODO: how to make generic ??
result = ` sdHexPrism(${transforms} + ${vecToStr(pos)}, ${h} ,${r1})`
}
return result
}
export function cuboid (module, context) {
console.log('cuboid', module, context)
const params = module.argnames.reduce(function (rs, argName, index) {
rs[argName] = evaluateModule(module.argexpr[index])
return rs
}, {})
const transforms = context.transforms || 'pos'
let size = params['size'] || params[undefined]
size = [`${size[0]}/2.0`, `${size[1]}/2.0`, `${size[2]}/2.0`]
const center = params['center'] ? params['center'] : false
let pos = !center ? size : [0, 0, 0] // [-size[0] / 2, -size[1] / 2, -size[2] / 2]
return ` sdBox(${transforms} + ${vecToStr(pos)}, ${vecToStr(size)})`
}
export function sphere (module, context) {
console.log('sphere', module)
const params = module.argnames.reduce(function (rs, argName, index) {
rs[argName] = evaluateModule(module.argexpr[index])
return rs
}, {})
const transforms = context.transforms || 'pos'
const r = params['r'] ? params['r'] : '1'
const center = params['center'] ? params['center'] : false
// let pos = !center ? [size[0], -size[1], size[2]] : [0, 0, 0] // [-size[0] / 2, -size[1] / 2, -size[2] / 2]
return ` sdSphere(${transforms}, ${r})`
}
|
#!/usr/bin/env bash
#
# Environment variables:
# $valgrind Specify the valgrind command line, if not
# then a default command line is used
set -xe
testdir=$(readlink -f $(dirname $0))
rootdir=$(readlink -f $(dirname $0)/../..)
source "$rootdir/test/common/autotest_common.sh"
cd "$rootdir"
function unittest_bdev() {
$valgrind $testdir/lib/bdev/bdev.c/bdev_ut
$valgrind $testdir/lib/bdev/nvme/bdev_ocssd.c/bdev_ocssd_ut
$valgrind $testdir/lib/bdev/nvme/bdev_nvme.c/bdev_nvme_ut
$valgrind $testdir/lib/bdev/raid/bdev_raid.c/bdev_raid_ut
$valgrind $testdir/lib/bdev/bdev_zone.c/bdev_zone_ut
$valgrind $testdir/lib/bdev/gpt/gpt.c/gpt_ut
$valgrind $testdir/lib/bdev/part.c/part_ut
$valgrind $testdir/lib/bdev/scsi_nvme.c/scsi_nvme_ut
$valgrind $testdir/lib/bdev/vbdev_lvol.c/vbdev_lvol_ut
$valgrind $testdir/lib/bdev/vbdev_zone_block.c/vbdev_zone_block_ut
$valgrind $testdir/lib/bdev/mt/bdev.c/bdev_ut
}
function unittest_blob() {
# We do not compile blob_ut on systems with too old Cunit, so do
# not try to execute it if it doesn't exist
if [[ -e $testdir/lib/blob/blob.c/blob_ut ]]; then
$valgrind $testdir/lib/blob/blob.c/blob_ut
fi
$valgrind $testdir/lib/blobfs/tree.c/tree_ut
$valgrind $testdir/lib/blobfs/blobfs_async_ut/blobfs_async_ut
# blobfs_sync_ut hangs when run under valgrind, so don't use $valgrind
$testdir/lib/blobfs/blobfs_sync_ut/blobfs_sync_ut
$valgrind $testdir/lib/blobfs/blobfs_bdev.c/blobfs_bdev_ut
}
function unittest_event() {
$valgrind $testdir/lib/event/subsystem.c/subsystem_ut
$valgrind $testdir/lib/event/app.c/app_ut
$valgrind $testdir/lib/event/reactor.c/reactor_ut
}
function unittest_ftl() {
$valgrind $testdir/lib/ftl/ftl_ppa/ftl_ppa_ut
$valgrind $testdir/lib/ftl/ftl_band.c/ftl_band_ut
$valgrind $testdir/lib/ftl/ftl_reloc.c/ftl_reloc_ut
$valgrind $testdir/lib/ftl/ftl_wptr/ftl_wptr_ut
$valgrind $testdir/lib/ftl/ftl_md/ftl_md_ut
$valgrind $testdir/lib/ftl/ftl_io.c/ftl_io_ut
}
function unittest_iscsi() {
$valgrind $testdir/lib/iscsi/conn.c/conn_ut
$valgrind $testdir/lib/iscsi/param.c/param_ut
$valgrind $testdir/lib/iscsi/tgt_node.c/tgt_node_ut
$valgrind $testdir/lib/iscsi/iscsi.c/iscsi_ut
$valgrind $testdir/lib/iscsi/init_grp.c/init_grp_ut
$valgrind $testdir/lib/iscsi/portal_grp.c/portal_grp_ut
}
function unittest_json() {
$valgrind $testdir/lib/json/json_parse.c/json_parse_ut
$valgrind $testdir/lib/json/json_util.c/json_util_ut
$valgrind $testdir/lib/json/json_write.c/json_write_ut
$valgrind $testdir/lib/jsonrpc/jsonrpc_server.c/jsonrpc_server_ut
}
function unittest_nvme() {
$valgrind $testdir/lib/nvme/nvme.c/nvme_ut
$valgrind $testdir/lib/nvme/nvme_ctrlr.c/nvme_ctrlr_ut
$valgrind $testdir/lib/nvme/nvme_ctrlr_cmd.c/nvme_ctrlr_cmd_ut
$valgrind $testdir/lib/nvme/nvme_ctrlr_ocssd_cmd.c/nvme_ctrlr_ocssd_cmd_ut
$valgrind $testdir/lib/nvme/nvme_ns.c/nvme_ns_ut
$valgrind $testdir/lib/nvme/nvme_ns_cmd.c/nvme_ns_cmd_ut
$valgrind $testdir/lib/nvme/nvme_ns_ocssd_cmd.c/nvme_ns_ocssd_cmd_ut
$valgrind $testdir/lib/nvme/nvme_qpair.c/nvme_qpair_ut
$valgrind $testdir/lib/nvme/nvme_pcie.c/nvme_pcie_ut
$valgrind $testdir/lib/nvme/nvme_poll_group.c/nvme_poll_group_ut
$valgrind $testdir/lib/nvme/nvme_quirks.c/nvme_quirks_ut
$valgrind $testdir/lib/nvme/nvme_tcp.c/nvme_tcp_ut
$valgrind $testdir/lib/nvme/nvme_transport.c/nvme_transport_ut
$valgrind $testdir/lib/nvme/nvme_io_msg.c/nvme_io_msg_ut
$valgrind $testdir/lib/nvme/nvme_pcie_common.c/nvme_pcie_common_ut
}
function unittest_nvmf() {
$valgrind $testdir/lib/nvmf/ctrlr.c/ctrlr_ut
$valgrind $testdir/lib/nvmf/ctrlr_bdev.c/ctrlr_bdev_ut
$valgrind $testdir/lib/nvmf/ctrlr_discovery.c/ctrlr_discovery_ut
$valgrind $testdir/lib/nvmf/subsystem.c/subsystem_ut
$valgrind $testdir/lib/nvmf/tcp.c/tcp_ut
}
function unittest_scsi() {
$valgrind $testdir/lib/scsi/dev.c/dev_ut
$valgrind $testdir/lib/scsi/lun.c/lun_ut
$valgrind $testdir/lib/scsi/scsi.c/scsi_ut
$valgrind $testdir/lib/scsi/scsi_bdev.c/scsi_bdev_ut
$valgrind $testdir/lib/scsi/scsi_pr.c/scsi_pr_ut
}
function unittest_sock() {
$valgrind $testdir/lib/sock/sock.c/sock_ut
$valgrind $testdir/lib/sock/posix.c/posix_ut
# Check whether uring is configured
if grep -q '#define SPDK_CONFIG_URING 1' $rootdir/include/spdk/config.h; then
$valgrind $testdir/lib/sock/uring.c/uring_ut
fi
}
function unittest_util() {
$valgrind $testdir/lib/util/base64.c/base64_ut
$valgrind $testdir/lib/util/bit_array.c/bit_array_ut
$valgrind $testdir/lib/util/cpuset.c/cpuset_ut
$valgrind $testdir/lib/util/crc16.c/crc16_ut
$valgrind $testdir/lib/util/crc32_ieee.c/crc32_ieee_ut
$valgrind $testdir/lib/util/crc32c.c/crc32c_ut
$valgrind $testdir/lib/util/string.c/string_ut
$valgrind $testdir/lib/util/dif.c/dif_ut
$valgrind $testdir/lib/util/iov.c/iov_ut
$valgrind $testdir/lib/util/math.c/math_ut
$valgrind $testdir/lib/util/pipe.c/pipe_ut
}
# if ASAN is enabled, use it. If not use valgrind if installed but allow
# the env variable to override the default shown below.
if [ -z ${valgrind+x} ]; then
if grep -q '#undef SPDK_CONFIG_ASAN' $rootdir/include/spdk/config.h && hash valgrind; then
valgrind='valgrind --leak-check=full --error-exitcode=2'
else
valgrind=''
fi
fi
# setup local unit test coverage if cov is available
# lcov takes considerable time to process clang coverage.
# Disabling lcov allow us to do this.
# More information: https://github.com/spdk/spdk/issues/1693
CC_TYPE=$(grep CC_TYPE $rootdir/mk/cc.mk)
if hash lcov && grep -q '#define SPDK_CONFIG_COVERAGE 1' $rootdir/include/spdk/config.h && ! [[ "$CC_TYPE" == *"clang"* ]]; then
cov_avail="yes"
else
cov_avail="no"
fi
if [ "$cov_avail" = "yes" ]; then
# set unit test output dir if not specified in env var
if [ -z ${UT_COVERAGE+x} ]; then
UT_COVERAGE="ut_coverage"
fi
mkdir -p $UT_COVERAGE
export LCOV_OPTS="
--rc lcov_branch_coverage=1
--rc lcov_function_coverage=1
--rc genhtml_branch_coverage=1
--rc genhtml_function_coverage=1
--rc genhtml_legend=1
--rc geninfo_all_blocks=1
"
export LCOV="lcov $LCOV_OPTS --no-external"
# zero out coverage data
$LCOV -q -c -i -d . -t "Baseline" -o $UT_COVERAGE/ut_cov_base.info
fi
# workaround for valgrind v3.13 on arm64
if [ $(uname -m) = "aarch64" ]; then
export LD_HWCAP_MASK=1
fi
run_test "unittest_pci_event" $valgrind $testdir/lib/env_dpdk/pci_event.c/pci_event_ut
run_test "unittest_include" $valgrind $testdir/include/spdk/histogram_data.h/histogram_ut
run_test "unittest_bdev" unittest_bdev
if grep -q '#define SPDK_CONFIG_CRYPTO 1' $rootdir/include/spdk/config.h; then
run_test "unittest_bdev_crypto" $valgrind $testdir/lib/bdev/crypto.c/crypto_ut
fi
if grep -q '#define SPDK_CONFIG_REDUCE 1' $rootdir/include/spdk/config.h; then
run_test "unittest_bdev_reduce" $valgrind $testdir/lib/bdev/compress.c/compress_ut
fi
if grep -q '#define SPDK_CONFIG_PMDK 1' $rootdir/include/spdk/config.h; then
run_test "unittest_bdev_pmem" $valgrind $testdir/lib/bdev/pmem/bdev_pmem_ut
fi
if grep -q '#define SPDK_CONFIG_RAID5 1' $rootdir/include/spdk/config.h; then
run_test "unittest_bdev_raid5" $valgrind $testdir/lib/bdev/raid/raid5.c/raid5_ut
fi
run_test "unittest_blob_blobfs" unittest_blob
run_test "unittest_event" unittest_event
if [ $(uname -s) = Linux ]; then
run_test "unittest_ftl" unittest_ftl
fi
run_test "unittest_accel" $valgrind $testdir/lib/accel/accel.c/accel_engine_ut
run_test "unittest_ioat" $valgrind $testdir/lib/ioat/ioat.c/ioat_ut
if grep -q '#define SPDK_CONFIG_IDXD 1' $rootdir/include/spdk/config.h; then
run_test "unittest_idxd" $valgrind $testdir/lib/idxd/idxd.c/idxd_ut
fi
run_test "unittest_iscsi" unittest_iscsi
run_test "unittest_json" unittest_json
run_test "unittest_notify" $valgrind $testdir/lib/notify/notify.c/notify_ut
run_test "unittest_nvme" unittest_nvme
run_test "unittest_log" $valgrind $testdir/lib/log/log.c/log_ut
run_test "unittest_lvol" $valgrind $testdir/lib/lvol/lvol.c/lvol_ut
if grep -q '#define SPDK_CONFIG_RDMA 1' $rootdir/include/spdk/config.h; then
run_test "unittest_nvme_rdma" $valgrind $testdir/lib/nvme/nvme_rdma.c/nvme_rdma_ut
fi
if grep -q '#define SPDK_CONFIG_NVME_CUSE 1' $rootdir/include/spdk/config.h; then
run_test "unittest_nvme_cuse" $valgrind $testdir/lib/nvme/nvme_cuse.c/nvme_cuse_ut
fi
run_test "unittest_nvmf" unittest_nvmf
if grep -q '#define SPDK_CONFIG_FC 1' $rootdir/include/spdk/config.h; then
run_test "unittest_nvmf_fc" $valgrind $testdir/lib/nvmf/fc.c/fc_ut
run_test "unittest_nvmf_fc_ls" $valgrind $testdir/lib/nvmf/fc_ls.c/fc_ls_ut
fi
if grep -q '#define SPDK_CONFIG_RDMA 1' $rootdir/include/spdk/config.h; then
run_test "unittest_nvmf_rdma" $valgrind $testdir/lib/nvmf/rdma.c/rdma_ut
fi
run_test "unittest_scsi" unittest_scsi
run_test "unittest_sock" unittest_sock
run_test "unittest_thread" $valgrind $testdir/lib/thread/thread.c/thread_ut
run_test "unittest_util" unittest_util
if grep -q '#define SPDK_CONFIG_VHOST 1' $rootdir/include/spdk/config.h; then
run_test "unittest_vhost" $valgrind $testdir/lib/vhost/vhost.c/vhost_ut
fi
if [ "$cov_avail" = "yes" ] && ! [[ "$CC_TYPE" == *"clang"* ]]; then
$LCOV -q -d . -c -t "$(hostname)" -o $UT_COVERAGE/ut_cov_test.info
$LCOV -q -a $UT_COVERAGE/ut_cov_base.info -a $UT_COVERAGE/ut_cov_test.info -o $UT_COVERAGE/ut_cov_total.info
$LCOV -q -a $UT_COVERAGE/ut_cov_total.info -o $UT_COVERAGE/ut_cov_unit.info
$LCOV -q -r $UT_COVERAGE/ut_cov_unit.info "$rootdir/app/*" -o $UT_COVERAGE/ut_cov_unit.info
$LCOV -q -r $UT_COVERAGE/ut_cov_unit.info "$rootdir/dpdk/*" -o $UT_COVERAGE/ut_cov_unit.info
$LCOV -q -r $UT_COVERAGE/ut_cov_unit.info "$rootdir/examples/*" -o $UT_COVERAGE/ut_cov_unit.info
$LCOV -q -r $UT_COVERAGE/ut_cov_unit.info "$rootdir/include/*" -o $UT_COVERAGE/ut_cov_unit.info
$LCOV -q -r $UT_COVERAGE/ut_cov_unit.info "$rootdir/lib/vhost/rte_vhost/*" -o $UT_COVERAGE/ut_cov_unit.info
$LCOV -q -r $UT_COVERAGE/ut_cov_unit.info "$rootdir/test/*" -o $UT_COVERAGE/ut_cov_unit.info
rm -f $UT_COVERAGE/ut_cov_base.info $UT_COVERAGE/ut_cov_test.info
genhtml $UT_COVERAGE/ut_cov_unit.info --output-directory $UT_COVERAGE
# git -C option not used for compatibility reasons
(cd $rootdir && git clean -f "*.gcda")
fi
set +x
echo
echo
echo "====================="
echo "All unit tests passed"
echo "====================="
if [ "$cov_avail" = "yes" ]; then
echo "Note: coverage report is here: $rootdir/$UT_COVERAGE"
else
echo "WARN: lcov not installed or SPDK built without coverage!"
fi
if grep -q '#undef SPDK_CONFIG_ASAN' $rootdir/include/spdk/config.h && [ "$valgrind" = "" ]; then
echo "WARN: neither valgrind nor ASAN is enabled!"
fi
echo
echo
|
<filename>commands/commands.go
package commands
//go:generate sqlboiler --no-hooks psql
//REMOVED: generate easyjson commands.go
import (
"context"
"github.com/rukongai/yagpdb/bot/eventsystem"
"github.com/rukongai/yagpdb/commands/models"
"github.com/rukongai/yagpdb/common"
"github.com/rukongai/yagpdb/common/config"
"github.com/rukongai/yagpdb/common/featureflags"
"github.com/jonas747/dcmd/v4"
"github.com/rukongai/discordgo/v5"
"github.com/mediocregopher/radix/v3"
"github.com/volatiletech/sqlboiler/queries/qm"
)
var logger = common.GetPluginLogger(&Plugin{})
type CtxKey int
const (
CtxKeyCmdSettings CtxKey = iota
CtxKeyChannelOverride
CtxKeyExecutedByCC
)
type MessageFilterFunc func(evt *eventsystem.EventData, msg *discordgo.Message) bool
var (
confSetTyping = config.RegisterOption("yagpdb.commands.typing", "Wether to set typing or not when running commands", true)
)
// These functions are called on every message, and should return true if the message should be checked for commands, false otherwise
var MessageFilterFuncs []MessageFilterFunc
type Plugin struct{}
func (p *Plugin) PluginInfo() *common.PluginInfo {
return &common.PluginInfo{
Name: "Commands",
SysName: "commands",
Category: common.PluginCategoryCore,
}
}
func RegisterPlugin() {
plugin := &Plugin{}
common.RegisterPlugin(plugin)
err := common.GORM.AutoMigrate(&common.LoggedExecutedCommand{}).Error
if err != nil {
logger.WithError(err).Fatal("Failed migrating logged commands database")
}
common.InitSchemas("commands", DBSchemas...)
}
type CommandProvider interface {
// This is where you should register your commands, called on both the webserver and the bot
AddCommands()
}
func InitCommands() {
// Setup the command system
CommandSystem = &dcmd.System{
Root: &dcmd.Container{
HelpTitleEmoji: "ℹ️",
HelpColor: 0xbeff7a,
RunInDM: true,
IgnoreBots: true,
},
ResponseSender: &dcmd.StdResponseSender{LogErrors: true},
Prefix: &Plugin{},
}
// We have our own middleware before the argument parsing, this is to check for things such as whether or not the command is enabled at all
CommandSystem.Root.AddMidlewares(YAGCommandMiddleware)
CommandSystem.Root.AddCommand(cmdHelp, cmdHelp.GetTrigger())
CommandSystem.Root.AddCommand(cmdPrefix, cmdPrefix.GetTrigger())
for _, v := range common.Plugins {
if adder, ok := v.(CommandProvider); ok {
adder.AddCommands()
}
}
}
func GetCommandPrefixRedis(guild int64) (string, error) {
var prefix string
err := common.RedisPool.Do(radix.Cmd(&prefix, "GET", "command_prefix:"+discordgo.StrID(guild)))
if err == nil && prefix == "" {
prefix = defaultCommandPrefix()
}
return prefix, err
}
var _ featureflags.PluginWithFeatureFlags = (*Plugin)(nil)
const (
featureFlagHasCustomPrefix = "commands_has_custom_prefix"
featureFlagHasCustomOverrides = "commands_has_custom_overrides"
)
func (p *Plugin) UpdateFeatureFlags(guildID int64) ([]string, error) {
prefix, err := GetCommandPrefixRedis(guildID)
if err != nil {
return nil, err
}
var flags []string
if defaultCommandPrefix() != prefix {
flags = append(flags, featureFlagHasCustomPrefix)
}
channelOverrides, err := models.CommandsChannelsOverrides(qm.Where("guild_id=?", guildID), qm.Load("CommandsCommandOverrides")).AllG(context.Background())
if err != nil {
return nil, err
}
if isCustomOverrides(channelOverrides) {
flags = append(flags, featureFlagHasCustomOverrides)
}
return flags, nil
}
func isCustomOverrides(overrides []*models.CommandsChannelsOverride) bool {
if len(overrides) == 0 {
return false
}
if len(overrides) == 1 && overrides[0].Global {
// check if this is default
g := overrides[0]
if !g.AutodeleteResponse &&
!g.AutodeleteTrigger &&
g.CommandsEnabled &&
len(g.RequireRoles) == 0 &&
len(g.IgnoreRoles) == 0 &&
len(g.R.CommandsCommandOverrides) == 0 {
return false
}
}
return true
}
func (p *Plugin) AllFeatureFlags() []string {
return []string{
featureFlagHasCustomPrefix, // Set if the server has a custom command prefix
featureFlagHasCustomOverrides, // set if the server has custom command and/or channel overrides
}
}
|
import unittest
from python_datastructures.trie import Trie
class Test_Trie(unittest.TestCase):
def setUp(self):
words = ['apple', 'app', 'android', 'and']
self.trie = Trie()
self.trie.build(words)
def test_add(self):
self.trie.add("amazon")
self.assertEqual(self.trie.wordcount, 5)
def test_contains(self):
self.assertEqual(self.trie.contains("app"), True)
self.assertEqual(self.trie.contains("amazon"), False)
self.assertEqual(self.trie.contains("ap"), True)
if __name__ == "__main__":
unittest.main()
|
#!/bin/bash
SOURCE_DIR="/home/user/documents"
DEST_DIR="/home/user/backups"
# Make sure the destination directory exists
mkdir -p ${DEST_DIR}
# Create a timestamp
NOW=$(date +"%Y-%m-%d")
# Create the filename
FILENAME="${DEST_DIR}/${NOW}.tar.gz"
# Perform the backup
tar -zcvf $FILENAME $SOURCE_DIR |
#!/bin/bash
# Copyright 2021 4Paradigm
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eE
# goto toplevel directory
cd "$(dirname "$0")/.."
# install thirdparty hybrise
HYBRIDSE_THIRDPARTY="$(pwd)/thirdparty"
../steps/setup_thirdparty.sh "${HYBRIDSE_THIRDPARTY}"
if uname -a | grep -q Darwin; then
# in case coreutils not install on mac
alias nproc='sysctl -n hw.logicalcpu'
fi
rm -rf build
mkdir -p build && cd build
cmake .. -DCMAKE_BUILD_TYPE=Release -DJAVASDK_ENABLE=ON -DTESTING_ENABLE=OFF -DPYSDK_ENABLE=OFF -DEXAMPLES_ENABLE=OFF
make -j"$(nproc)"
|
<reponame>getmetamapper/metamapper<gh_stars>10-100
# -*- coding: utf-8 -*-
import abc
class BaseSearchBackend(metaclass=abc.ABCMeta):
""" Base interface for a search backend. It only implements one method, `search`, which
needs to return a list of dictionaries or similar iterable of objects that support __getitem__.
"""
def __init__(self, workspace, user, **kwargs):
self.workspace = workspace
self.user = user
@abc.abstractmethod
def execute(self, query, types=None, datastores=None, start=0, size=100, **facets):
""" Search the backend with a given query string. This needs to return the following signature:
[
{'pk': '...', 'model_name': '...', 'score': '...', 'datastore_id': '...'},
...
]
We then use those dictionaries to re-build the database objects to return via GraphQL.
"""
pass
|
<reponame>CapCap/my-first-client
package example;
import org.bouncycastle.crypto.params.Ed25519PrivateKeyParameters;
import com.diem.*;
import com.diem.jsonrpc.JsonRpc.Account;
import com.diem.jsonrpc.JsonRpc.Event;
import java.security.SecureRandom;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
/**
* GetEventsExample demonstrates how to subscribe to a specific events stream base on events key
*/
public class GetEventsExample {
public static final String CURRENCY_CODE = "XUS";
public static void main(String[] args) throws DiemException {
//connect to testnet
DiemClient client = Testnet.createClient();
//create new account
SecureRandom random = new SecureRandom();
Ed25519PrivateKeyParameters privateKeyParams = new Ed25519PrivateKeyParameters(random);
Ed25519PrivateKey privateKey = new Ed25519PrivateKey(privateKeyParams);
AuthKey authKey = AuthKey.ed25519(privateKey.publicKey());
Testnet.mintCoins(client, 100000000, authKey.hex(), CURRENCY_CODE);
//get account events key
Account account = client.getAccount(authKey.accountAddress());
String eventsKey = account.getReceivedEventsKey();
//start minter to demonstrates events creation
startMinter(client, authKey);
//demonstrates events subscription
subscribe(client, eventsKey);
}
public static void subscribe(DiemClient client, String eventsKey) {
Runnable listener = () -> {
long start = 0;
for (int i = 0; i < 15; i++) {
List<Event> events;
try {
events = client.getEvents(eventsKey, start, 10);
} catch (DiemException e) {
throw new RuntimeException(e);
}
start += events.size();
System.out.println(events.size() + " new events found");
for (int j = 0; j < events.size(); j++) {
System.out.println("Event #" + (j + 1) + ":");
System.out.println(events.get(j));
}
try {
Thread.sleep(3_000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
};
Thread listenerThread = new Thread(listener);
listenerThread.start();
}
private static void startMinter(DiemClient client, AuthKey authKey) {
Runnable minter = () -> {
for (int i = 0; i < 10; i++) {
int amount = 1000000;
Testnet.mintCoins(client, amount, authKey.hex(), CURRENCY_CODE);
try {
Thread.sleep(1_000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
};
Thread minterThread = new Thread(minter);
minterThread.start();
}
}
|
<filename>src/main/java/net/blay09/mods/cookingforblockheads/client/GuiRecipeBook.java
package net.blay09.mods.cookingforblockheads.client;
import cpw.mods.fml.common.Optional;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import net.blay09.mods.cookingforblockheads.container.ContainerRecipeBook;
import net.blay09.mods.cookingforblockheads.container.slot.SlotRecipe;
import net.blay09.mods.cookingforblockheads.network.MessageSort;
import net.blay09.mods.cookingforblockheads.network.MessageSwitchRecipe;
import net.blay09.mods.cookingforblockheads.network.NetworkHandler;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.client.gui.GuiTextField;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.client.resources.I18n;
import net.minecraft.inventory.Slot;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.StatCollector;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.entity.player.ItemTooltipEvent;
import org.lwjgl.input.Keyboard;
import org.lwjgl.input.Mouse;
import org.lwjgl.opengl.GL11;
import yalter.mousetweaks.api.IMTModGuiContainer;
@Optional.Interface(modid = "MouseTweaks", iface = "yalter.mousetweaks.api.IMTModGuiContainer")
public class GuiRecipeBook extends GuiContainer implements IMTModGuiContainer {
private static final int SCROLLBAR_COLOR = 0xFFAAAAAA;
private static final int SCROLLBAR_Y = 8;
private static final int SCROLLBAR_WIDTH = 7;
private static final int SCROLLBAR_HEIGHT = 77;
private static final ResourceLocation guiTexture = new ResourceLocation("cookingforblockheads", "textures/gui/gui.png");
private static final int VISIBLE_ROWS = 4;
private final ContainerRecipeBook container;
private boolean registered;
private int scrollBarScaledHeight;
private int scrollBarXPos;
private int scrollBarYPos;
private int currentOffset;
private int mouseClickY = -1;
private int indexWhenClicked;
private int lastNumberOfMoves;
private GuiButton btnNextRecipe;
private GuiButton btnPrevRecipe;
private GuiTextField searchBar;
private GuiButtonSort[] sortButtons;
private GuiButtonSort btnSortName;
private GuiButtonSort btnSortHunger;
private GuiButtonSort btnSortSaturation;
private final String[] noIngredients;
private final String[] noSelection;
private Slot hoverSlot;
public GuiRecipeBook(ContainerRecipeBook container) {
super(container);
this.container = container;
noIngredients = StatCollector.translateToLocal("cookingforblockheads:no_ingredients").split("\\\\n");
noSelection = StatCollector.translateToLocal("cookingforblockheads:no_selection").split("\\\\n");
}
@Override
public void initGui() {
ySize = 174;
super.initGui();
btnPrevRecipe = new GuiButton(0, width / 2 - 79, height / 2 - 51, 13, 20, "<");
btnPrevRecipe.visible = false;
buttonList.add(btnPrevRecipe);
btnNextRecipe = new GuiButton(1, width / 2 - 9, height / 2 - 51, 13, 20, ">");
btnNextRecipe.visible = false;
buttonList.add(btnNextRecipe);
btnSortName = new GuiButtonSort(2, width / 2 + 87, height / 2 - 80, 196, "cookingforblockheads:sort_by_name.tooltip");
buttonList.add(btnSortName);
btnSortHunger = new GuiButtonSort(3, width / 2 + 87, height / 2 - 60, 216, "cookingforblockheads:sort_by_hunger.tooltip");
buttonList.add(btnSortHunger);
btnSortSaturation = new GuiButtonSort(4, width / 2 + 87, height / 2 - 40, 236, "cookingforblockheads:sort_by_saturation.tooltip");
buttonList.add(btnSortSaturation);
sortButtons = new GuiButtonSort[] {btnSortName, btnSortHunger, btnSortSaturation};
searchBar = new GuiTextField(fontRendererObj, guiLeft + xSize - 85, guiTop - 10, 70, 10);
searchBar.setVisible(false);
if(!registered) {
MinecraftForge.EVENT_BUS.register(this);
registered = true;
}
recalculateScrollBar();
}
@Override
public void onGuiClosed() {
super.onGuiClosed();
if(registered) {
MinecraftForge.EVENT_BUS.unregister(this);
registered = false;
}
}
@Override
protected void actionPerformed(GuiButton button) {
super.actionPerformed(button);
if(button == btnPrevRecipe) {
container.prevRecipe();
NetworkHandler.instance.sendToServer(new MessageSwitchRecipe(-1));
} else if(button == btnNextRecipe) {
container.nextRecipe();
NetworkHandler.instance.sendToServer(new MessageSwitchRecipe(1));
} else if(button == btnSortName) {
container.sortingChanged();
NetworkHandler.instance.sendToServer(new MessageSort(0));
} else if(button == btnSortHunger) {
container.sortingChanged();
NetworkHandler.instance.sendToServer(new MessageSort(1));
} else if(button == btnSortSaturation) {
container.sortingChanged();
NetworkHandler.instance.sendToServer(new MessageSort(2));
}
}
public void recalculateScrollBar() {
int scrollBarTotalHeight = SCROLLBAR_HEIGHT - 1;
this.scrollBarScaledHeight = (int) (scrollBarTotalHeight * Math.min(1f, ((float) VISIBLE_ROWS / (Math.ceil(container.getAvailableRecipeCount() / 3f)))));
this.scrollBarXPos = guiLeft + xSize - SCROLLBAR_WIDTH - 9;
this.scrollBarYPos = guiTop + SCROLLBAR_Y + ((scrollBarTotalHeight - scrollBarScaledHeight) * currentOffset / Math.max(1, (int) Math.ceil((container.getAvailableRecipeCount() / 3f)) - VISIBLE_ROWS));
}
@Override
public void handleMouseInput() {
super.handleMouseInput();
int delta = Mouse.getEventDWheel();
if (delta == 0) {
return;
}
setCurrentOffset(delta > 0 ? currentOffset - 1 : currentOffset + 1);
}
@Override
protected void mouseMovedOrUp(int mouseX, int mouseY, int state) {
super.mouseMovedOrUp(mouseX, mouseY, state);
if (state != -1 && mouseClickY != -1) {
mouseClickY = -1;
indexWhenClicked = 0;
lastNumberOfMoves = 0;
}
}
@Override
protected void keyTyped(char c, int keyCode) {
if(!searchBar.textboxKeyTyped(c, keyCode)) {
super.keyTyped(c, keyCode);
}
}
@Override
protected void mouseClicked(int mouseX, int mouseY, int button) {
super.mouseClicked(mouseX, mouseY, button);
if(button == 1 && mouseX >= searchBar.xPosition && mouseX < searchBar.xPosition + searchBar.width && mouseY >= searchBar.yPosition && mouseY < searchBar.yPosition + searchBar.height) {
searchBar.setText("");
} else {
searchBar.mouseClicked(mouseX, mouseY, button);
}
if (mouseX >= scrollBarXPos && mouseX <= scrollBarXPos + SCROLLBAR_WIDTH && mouseY >= scrollBarYPos && mouseY <= scrollBarYPos + scrollBarScaledHeight) {
mouseClickY = mouseY;
indexWhenClicked = currentOffset;
}
}
@Override
protected void drawGuiContainerBackgroundLayer(float f, int mouseX, int mouseY) {
if(container.isRecipeListDirty()) {
setCurrentOffset(currentOffset);
container.markDirty(false);
}
GL11.glColor4f(1f, 1f, 1f, 1f);
mc.getTextureManager().bindTexture(guiTexture);
int x = (width - xSize) / 2;
int y = (height - ySize) / 2;
drawTexturedModalRect(x, y, 0, 0, xSize, ySize);
if (mouseClickY != -1) {
float pixelsPerFilter = (SCROLLBAR_HEIGHT - scrollBarScaledHeight) / (float) Math.max(1, (int) Math.ceil(container.getAvailableRecipeCount() / 3f) - VISIBLE_ROWS);
if (pixelsPerFilter != 0) {
int numberOfFiltersMoved = (int) ((mouseY - mouseClickY) / pixelsPerFilter);
if (numberOfFiltersMoved != lastNumberOfMoves) {
setCurrentOffset(indexWhenClicked + numberOfFiltersMoved);
lastNumberOfMoves = numberOfFiltersMoved;
}
}
}
boolean hasVariants = container.hasVariants();
btnPrevRecipe.visible = hasVariants;
btnNextRecipe.visible = hasVariants;
boolean hasRecipes = container.getAvailableRecipeCount() > 0;
btnSortName.enabled = hasRecipes;
btnSortHunger.enabled = hasRecipes;
btnSortSaturation.enabled = hasRecipes;
if(!container.hasSelection()) {
int curY = guiTop + 79 / 2 - noSelection.length / 2 * fontRendererObj.FONT_HEIGHT;
for(String s : noSelection) {
fontRendererObj.drawStringWithShadow(s, guiLeft + 23 + 27 - fontRendererObj.getStringWidth(s) / 2, curY, 0xFFFFFFFF);
curY += fontRendererObj.FONT_HEIGHT + 5;
}
} else if(container.isFurnaceRecipe()) {
drawTexturedModalRect(guiLeft + 23, guiTop + 19, 54, 174, 54, 54);
} else {
drawTexturedModalRect(guiLeft + 23, guiTop + 19, 0, 174, 54, 54);
}
GuiContainer.drawRect(scrollBarXPos, scrollBarYPos, scrollBarXPos + SCROLLBAR_WIDTH, scrollBarYPos + scrollBarScaledHeight, SCROLLBAR_COLOR);
if(container.getAvailableRecipeCount() == 0) {
GuiContainer.drawRect(guiLeft + 97, guiTop + 7, guiLeft + 168, guiTop + 85, 0xAA222222);
int curY = guiTop + 79 / 2 - noIngredients.length / 2 * fontRendererObj.FONT_HEIGHT;
for(String s : noIngredients) {
fontRendererObj.drawStringWithShadow(s, guiLeft + 97 + 36 - fontRendererObj.getStringWidth(s) / 2, curY, 0xFFFFFFFF);
curY += fontRendererObj.FONT_HEIGHT + 5;
}
}
// searchBar.drawTextBox();
hoverSlot = getSlotAtPosition(mouseX, mouseY);
}
@SubscribeEvent
public void onItemTooltip(ItemTooltipEvent event) {
if(hoverSlot != null && hoverSlot instanceof SlotRecipe && event.itemStack == hoverSlot.getStack()) {
if(container.gotRecipeInfo() && container.canClickCraft((currentOffset * 3) + hoverSlot.getSlotIndex())) {
if (container.isMissingTools()) {
event.toolTip.add("\u00a7c" + I18n.format("cookingforblockheads:missing_tools"));
} else {
if (Keyboard.isKeyDown(Keyboard.KEY_LSHIFT)) {
event.toolTip.add("\u00a7a" + I18n.format("cookingforblockheads:click_to_craft_all"));
} else {
event.toolTip.add("\u00a7a" + I18n.format("cookingforblockheads:click_to_craft_one"));
}
}
} else if(container.gotRecipeInfo() && container.canClickSmelt((currentOffset * 3) + hoverSlot.getSlotIndex())) {
if(container.isMissingOven()) {
event.toolTip.add("\u00a7c" + I18n.format("cookingforblockheads:missing_oven"));
} else {
if (Keyboard.isKeyDown(Keyboard.KEY_LSHIFT)) {
event.toolTip.add("\u00a7a" + I18n.format("cookingforblockheads:click_to_smelt_all"));
} else {
event.toolTip.add("\u00a7a" + I18n.format("cookingforblockheads:click_to_smelt_one"));
}
}
} else {
event.toolTip.add("\u00a7e" + I18n.format("cookingforblockheads:click_to_see_recipe"));
}
}
}
@Override
public void drawScreen(int mouseX, int mouseY, float partialTicks) {
super.drawScreen(mouseX, mouseY, partialTicks);
if(btnSortName.func_146115_a() && btnSortName.enabled) { // isMouseOver
func_146283_a(btnSortName.getTooltipLines(), mouseX, mouseY); // drawHoveringText
} else if(btnSortHunger.func_146115_a() && btnSortHunger.enabled) {// isMouseOver
func_146283_a(btnSortHunger.getTooltipLines(), mouseX, mouseY);// drawHoveringText
} else if(btnSortSaturation.func_146115_a() && btnSortSaturation.enabled) {// isMouseOver
func_146283_a(btnSortSaturation.getTooltipLines(), mouseX, mouseY);// drawHoveringText
}
}
public void setCurrentOffset(int currentOffset) {
this.currentOffset = Math.max(0, Math.min(currentOffset, (int) Math.ceil(container.getAvailableRecipeCount() / 3f) - VISIBLE_ROWS));
container.setScrollOffset(this.currentOffset);
recalculateScrollBar();
}
private Slot getSlotAtPosition(int x, int y) {
for (int k = 0; k < inventorySlots.inventorySlots.size(); ++k) {
Slot slot = (Slot) inventorySlots.inventorySlots.get(k);
if(isMouseOverSlot(slot, x, y)) {
return slot;
}
}
return null;
}
private boolean isMouseOverSlot(Slot slotIn, int mouseX, int mouseY) {
return func_146978_c(slotIn.xDisplayPosition, slotIn.yDisplayPosition, 16, 16, mouseX, mouseY);
}
@Override
public int getAPIVersion() {
return 1;
}
@Override
public String getModName() {
return "Cooking for Blockheads";
}
@Override
public boolean isMouseTweaksDisabled() {
return true;
}
@Override
public boolean isWheelTweakDisabled() {
return true;
}
@Override
public boolean isCraftingOutputSlot(Object o, Object o1) {
return false;
}
@Override
public Object getModContainer() {
return inventorySlots;
}
@Override
public int getModSlotCount(Object o) {
return inventorySlots.inventorySlots.size();
}
@Override
public Object getModSlot(Object o, int i) {
return inventorySlots.getSlot(i);
}
@Override
public Object getModSelectedSlot(Object o, int i) {
return null;
}
@Override
public void clickModSlot(Object o, Object o1, int i, boolean b) {}
@Override
public void disableRMBDragIfRequired(Object o, Object o1, boolean b) {}
public GuiButtonSort[] getSortButtons() {
return sortButtons;
}
}
|
#!/bin/bash
set -e
sudo apt install -y libelf-dev
mkdir -p $ROCM_BUILD_DIR/rocr-runtime
cd $ROCM_BUILD_DIR/rocr-runtime
pushd .
START_TIME=`date +%s`
cmake \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=$ROCM_INSTALL_DIR \
-DCPACK_PACKAGING_INSTALL_PREFIX=$ROCM_INSTALL_DIR \
-DCPACK_GENERATOR=DEB \
-G Ninja \
$ROCM_GIT_DIR/ROCR-Runtime/src
ninja
ninja package
sudo dpkg -i *.deb
END_TIME=`date +%s`
EXECUTING_TIME=`expr $END_TIME - $START_TIME`
echo "elapse : "$EXECUTING_TIME"s"
popd
|
package auth
import (
"encoding/json"
"errors"
"net/http"
"github.com/GodYao1995/Goooooo/internal/domain"
"github.com/GodYao1995/Goooooo/internal/pkg/consts"
"github.com/GodYao1995/Goooooo/internal/pkg/errno"
"github.com/GodYao1995/Goooooo/internal/pkg/res"
"github.com/GodYao1995/Goooooo/internal/pkg/session"
"github.com/gin-gonic/gin"
)
func AuthMiddleware(store *session.RedisStore) gin.HandlerFunc {
return func(ctx *gin.Context) {
resp := res.CommonResponse{Code: 1}
var user domain.UserSession
session, err := store.New(ctx.Request, consts.SESSIONID)
if errors.Is(err, errno.ErrorRedisEmpty) || errors.Is(err, http.ErrNoCookie) {
resp.Message = errno.ErrorUserNotLogin.Error()
ctx.AbortWithStatusJSON(http.StatusUnauthorized, resp)
return
}
err = json.Unmarshal(session.Values[consts.SROREKEY].([]byte), &user)
if err != nil {
ctx.Abort()
return
}
ctx.Set(consts.SROREKEY, user)
ctx.Next()
}
}
|
#!/bin/bash
#check for //todo in all .c and .h files
grep -nH -in --colour=always todo *.c *.h
|
package reconciler
import (
"context"
"fmt"
"net"
"github.com/k8snetworkplumbingwg/whereabouts/pkg/allocate"
whereaboutsv1alpha1 "github.com/k8snetworkplumbingwg/whereabouts/pkg/api/v1alpha1"
"github.com/k8snetworkplumbingwg/whereabouts/pkg/logging"
"github.com/k8snetworkplumbingwg/whereabouts/pkg/storage"
"github.com/k8snetworkplumbingwg/whereabouts/pkg/storage/kubernetes"
"github.com/k8snetworkplumbingwg/whereabouts/pkg/types"
v1 "k8s.io/api/core/v1"
)
type ReconcileLooper struct {
ctx context.Context
k8sClient kubernetes.Client
liveWhereaboutsPods map[string]podWrapper
orphanedIPs []OrphanedIPReservations
orphanedClusterWideIPs []whereaboutsv1alpha1.OverlappingRangeIPReservation
}
type OrphanedIPReservations struct {
Pool storage.IPPool
Allocations []types.IPReservation
}
func NewReconcileLooper(kubeConfigPath string, ctx context.Context) (*ReconcileLooper, error) {
logging.Debugf("NewReconcileLooper - Kubernetes config file located at: %s", kubeConfigPath)
k8sClient, err := kubernetes.NewClient(kubeConfigPath)
if err != nil {
return nil, logging.Errorf("failed to instantiate the Kubernetes client: %+v", err)
}
logging.Debugf("successfully read the kubernetes configuration file located at: %s", kubeConfigPath)
pods, err := k8sClient.ListPods()
if err != nil {
return nil, err
}
ipPools, err := k8sClient.ListIPPools(ctx)
if err != nil {
return nil, logging.Errorf("failed to retrieve all IP pools: %v", err)
}
whereaboutsPodRefs := getPodRefsServedByWhereabouts(ipPools)
looper := &ReconcileLooper{
ctx: ctx,
k8sClient: *k8sClient,
liveWhereaboutsPods: indexPods(pods, whereaboutsPodRefs),
}
if err := looper.findOrphanedIPsPerPool(ipPools); err != nil {
return nil, err
}
if err := looper.findClusterWideIPReservations(); err != nil {
return nil, err
}
return looper, nil
}
func (rl *ReconcileLooper) findOrphanedIPsPerPool(ipPools []storage.IPPool) error {
for _, pool := range ipPools {
orphanIP := OrphanedIPReservations{
Pool: pool,
}
for _, ipReservation := range pool.Allocations() {
logging.Debugf("the IP reservation: %s", ipReservation)
if ipReservation.PodRef == "" {
_ = logging.Errorf("pod ref missing for Allocations: %s", ipReservation)
continue
}
if !rl.isPodAlive(ipReservation.PodRef, ipReservation.IP.String()) {
logging.Debugf("pod ref %s is not listed in the live pods list", ipReservation.PodRef)
orphanIP.Allocations = append(orphanIP.Allocations, ipReservation)
}
}
if len(orphanIP.Allocations) > 0 {
rl.orphanedIPs = append(rl.orphanedIPs, orphanIP)
}
}
return nil
}
func (rl ReconcileLooper) isPodAlive(podRef string, ip string) bool {
for livePodRef, livePod := range rl.liveWhereaboutsPods {
if podRef == livePodRef {
livePodIPs := livePod.ips
logging.Debugf(
"pod reference %s matches allocation; Allocation IP: %s; PodIPs: %s",
livePodRef,
ip,
livePodIPs)
_, isFound := livePodIPs[ip]
return isFound
}
}
return false
}
func composePodRef(pod v1.Pod) string {
return fmt.Sprintf("%s/%s", pod.GetNamespace(), pod.GetName())
}
func (rl ReconcileLooper) ReconcileIPPools() ([]net.IP, error) {
matchByPodRef := func(reservations []types.IPReservation, podRef string) int {
foundidx := -1
for idx, v := range reservations {
if v.PodRef == podRef {
return idx
}
}
return foundidx
}
var err error
var totalCleanedUpIps []net.IP
for _, orphanedIP := range rl.orphanedIPs {
currentIPReservations := orphanedIP.Pool.Allocations()
podRefsToDeallocate := findOutPodRefsToDeallocateIPsFrom(orphanedIP)
var deallocatedIP net.IP
for _, podRef := range podRefsToDeallocate {
currentIPReservations, deallocatedIP, err = allocate.IterateForDeallocation(currentIPReservations, podRef, matchByPodRef)
if err != nil {
return nil, err
}
}
logging.Debugf("Going to update the reserve list to: %+v", currentIPReservations)
if err := orphanedIP.Pool.Update(rl.ctx, currentIPReservations); err != nil {
return nil, logging.Errorf("failed to update the reservation list: %v", err)
}
totalCleanedUpIps = append(totalCleanedUpIps, deallocatedIP)
}
return totalCleanedUpIps, nil
}
func (rl *ReconcileLooper) findClusterWideIPReservations() error {
clusterWideIPReservations, err := rl.k8sClient.ListOverlappingIPs(rl.ctx)
if err != nil {
return logging.Errorf("failed to list all OverLappingIPs: %v", err)
}
for _, clusterWideIPReservation := range clusterWideIPReservations {
ip := clusterWideIPReservation.GetName()
podRef := clusterWideIPReservation.Spec.PodRef
if !rl.isPodAlive(podRef, ip) {
logging.Debugf("pod ref %s is not listed in the live pods list", podRef)
rl.orphanedClusterWideIPs = append(rl.orphanedClusterWideIPs, clusterWideIPReservation)
}
}
return nil
}
func (rl ReconcileLooper) ReconcileOverlappingIPAddresses() error {
var failedReconciledClusterWideIPs []string
for _, overlappingIPStruct := range rl.orphanedClusterWideIPs {
if err := rl.k8sClient.DeleteOverlappingIP(rl.ctx, &overlappingIPStruct); err != nil {
logging.Errorf("failed to remove cluster wide IP: %s", overlappingIPStruct.GetName())
failedReconciledClusterWideIPs = append(failedReconciledClusterWideIPs, overlappingIPStruct.GetName())
continue
}
logging.Debugf("removed stale overlappingIP allocation [%s]", overlappingIPStruct.GetName())
}
if len(failedReconciledClusterWideIPs) != 0 {
return logging.Errorf("could not reconcile cluster wide IPs: %v", failedReconciledClusterWideIPs)
}
return nil
}
func findOutPodRefsToDeallocateIPsFrom(orphanedIP OrphanedIPReservations) []string {
var podRefsToDeallocate []string
for _, orphanedAllocation := range orphanedIP.Allocations {
podRefsToDeallocate = append(podRefsToDeallocate, orphanedAllocation.PodRef)
}
return podRefsToDeallocate
}
|
/*
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package javaguide.async.controllers;
import play.mvc.Result;
import play.libs.F.Function;
import play.libs.F.Function0;
import play.libs.F.Promise;
import play.mvc.Controller;
public class Application extends Controller {
//#async
public static Promise<Result> index() {
Promise<Integer> promiseOfInt = Promise.promise(
new Function0<Integer>() {
public Integer apply() {
return intensiveComputation();
}
}
);
return promiseOfInt.map(
new Function<Integer, Result>() {
public Result apply(Integer i) {
return ok("Got result: " + i);
}
}
);
}
//#async
public static int intensiveComputation() { return 2;}
}
|
# -*- coding: utf-8 -*-
#
# This class was auto-generated from the API references found at
# https://epayments-api.developer-ingenico.com/s2sapi/v1/
#
from ingenico.connect.sdk.data_object import DataObject
from ingenico.connect.sdk.domain.definitions.amount_of_money import AmountOfMoney
from ingenico.connect.sdk.domain.payment.definitions.sdk_data_input import SdkDataInput
from ingenico.connect.sdk.domain.payment.definitions.three_d_secure_data import ThreeDSecureData
class AbstractThreeDSecure(DataObject):
__authentication_amount = None
__authentication_flow = None
__challenge_canvas_size = None
__challenge_indicator = None
__exemption_request = None
__prior_three_d_secure_data = None
__sdk_data = None
__skip_authentication = None
__transaction_risk_level = None
@property
def authentication_amount(self):
"""
Type: :class:`ingenico.connect.sdk.domain.definitions.amount_of_money.AmountOfMoney`
"""
return self.__authentication_amount
@authentication_amount.setter
def authentication_amount(self, value):
self.__authentication_amount = value
@property
def authentication_flow(self):
"""
Type: str
"""
return self.__authentication_flow
@authentication_flow.setter
def authentication_flow(self, value):
self.__authentication_flow = value
@property
def challenge_canvas_size(self):
"""
Type: str
"""
return self.__challenge_canvas_size
@challenge_canvas_size.setter
def challenge_canvas_size(self, value):
self.__challenge_canvas_size = value
@property
def challenge_indicator(self):
"""
Type: str
"""
return self.__challenge_indicator
@challenge_indicator.setter
def challenge_indicator(self, value):
self.__challenge_indicator = value
@property
def exemption_request(self):
"""
Type: str
"""
return self.__exemption_request
@exemption_request.setter
def exemption_request(self, value):
self.__exemption_request = value
@property
def prior_three_d_secure_data(self):
"""
Type: :class:`ingenico.connect.sdk.domain.payment.definitions.three_d_secure_data.ThreeDSecureData`
"""
return self.__prior_three_d_secure_data
@prior_three_d_secure_data.setter
def prior_three_d_secure_data(self, value):
self.__prior_three_d_secure_data = value
@property
def sdk_data(self):
"""
Type: :class:`ingenico.connect.sdk.domain.payment.definitions.sdk_data_input.SdkDataInput`
"""
return self.__sdk_data
@sdk_data.setter
def sdk_data(self, value):
self.__sdk_data = value
@property
def skip_authentication(self):
"""
Type: bool
"""
return self.__skip_authentication
@skip_authentication.setter
def skip_authentication(self, value):
self.__skip_authentication = value
@property
def transaction_risk_level(self):
"""
Type: str
"""
return self.__transaction_risk_level
@transaction_risk_level.setter
def transaction_risk_level(self, value):
self.__transaction_risk_level = value
def to_dictionary(self):
dictionary = super(AbstractThreeDSecure, self).to_dictionary()
if self.authentication_amount is not None:
dictionary['authenticationAmount'] = self.authentication_amount.to_dictionary()
if self.authentication_flow is not None:
dictionary['authenticationFlow'] = self.authentication_flow
if self.challenge_canvas_size is not None:
dictionary['challengeCanvasSize'] = self.challenge_canvas_size
if self.challenge_indicator is not None:
dictionary['challengeIndicator'] = self.challenge_indicator
if self.exemption_request is not None:
dictionary['exemptionRequest'] = self.exemption_request
if self.prior_three_d_secure_data is not None:
dictionary['priorThreeDSecureData'] = self.prior_three_d_secure_data.to_dictionary()
if self.sdk_data is not None:
dictionary['sdkData'] = self.sdk_data.to_dictionary()
if self.skip_authentication is not None:
dictionary['skipAuthentication'] = self.skip_authentication
if self.transaction_risk_level is not None:
dictionary['transactionRiskLevel'] = self.transaction_risk_level
return dictionary
def from_dictionary(self, dictionary):
super(AbstractThreeDSecure, self).from_dictionary(dictionary)
if 'authenticationAmount' in dictionary:
if not isinstance(dictionary['authenticationAmount'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['authenticationAmount']))
value = AmountOfMoney()
self.authentication_amount = value.from_dictionary(dictionary['authenticationAmount'])
if 'authenticationFlow' in dictionary:
self.authentication_flow = dictionary['authenticationFlow']
if 'challengeCanvasSize' in dictionary:
self.challenge_canvas_size = dictionary['challengeCanvasSize']
if 'challengeIndicator' in dictionary:
self.challenge_indicator = dictionary['challengeIndicator']
if 'exemptionRequest' in dictionary:
self.exemption_request = dictionary['exemptionRequest']
if 'priorThreeDSecureData' in dictionary:
if not isinstance(dictionary['priorThreeDSecureData'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['priorThreeDSecureData']))
value = ThreeDSecureData()
self.prior_three_d_secure_data = value.from_dictionary(dictionary['priorThreeDSecureData'])
if 'sdkData' in dictionary:
if not isinstance(dictionary['sdkData'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['sdkData']))
value = SdkDataInput()
self.sdk_data = value.from_dictionary(dictionary['sdkData'])
if 'skipAuthentication' in dictionary:
self.skip_authentication = dictionary['skipAuthentication']
if 'transactionRiskLevel' in dictionary:
self.transaction_risk_level = dictionary['transactionRiskLevel']
return self
|
<reponame>trunksbomb/Cyclic
/*******************************************************************************
* The MIT License (MIT)
*
* Copyright (C) 2014-2018 <NAME> (aka Lothrazar)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package com.lothrazar.cyclicmagic.gui.core;
import com.lothrazar.cyclicmagic.util.Const;
import com.lothrazar.cyclicmagic.util.Const.ScreenSize;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.inventory.Container;
import net.minecraft.inventory.Slot;
public class ContainerBase extends Container {
private Const.ScreenSize screenSize = ScreenSize.STANDARD;
protected void setScreenSize(Const.ScreenSize ss) {
this.screenSize = ss;
}
public Const.ScreenSize getScreenSize() {
return screenSize;
}
@Override
public boolean canInteractWith(EntityPlayer playerIn) {
return true;
}
protected void bindPlayerInventory(InventoryPlayer inventoryPlayer) {
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 9; j++) {
addSlotToContainer(new Slot(inventoryPlayer, j + i * 9 + 9,
getScreenSize().playerOffsetX() + j * Const.SQ, /// X
getScreenSize().playerOffsetY() + i * Const.SQ// Y
));
}
}
bindPlayerHotbar(inventoryPlayer);
}
protected void bindPlayerHotbar(InventoryPlayer inventoryPlayer) {
for (int i = 0; i < 9; i++) {
addSlotToContainer(new Slot(inventoryPlayer, i, getScreenSize().playerOffsetX() + i * Const.SQ, getScreenSize().playerOffsetY() + Const.PAD / 2 + 3 * Const.SQ));
}
}
}
|
class Matrix:
def __init__(self, data):
self.data = data
def __repr__(self):
return str(self.data)
def shape(self):
"""
Shape of matrix
"""
return (len(self.data), len(self.data[0]))
def add_scalar(self, scalar):
"""
Scalar addition
"""
return [[v + scalar for v in row] for row in self.data]
def add_matrix(self, other):
"""
Matrix addition
"""
return [[v1 + v2 for (v1, v2) in zip(row1, row2)] for (row1, row2) in zip(self.data, other.data)]
def subtract_scalar(self, scalar):
"""
Scalar subtraction
"""
return [[v - scalar for v in row] for row in self.data]
def subtract_matrix(self, other):
"""
Matrix subtraction
"""
return [[v1 - v2 for (v1, v2) in zip(row1, row2)] for (row1, row2) in zip(self.data, other.data)]
def multiply_scalar(self, scalar):
"""
Scalar multiplication
"""
return [[v*scalar for v in row] for row in self.data]
def transpose(self):
"""
Matrix transposition
"""
return [list(v) for v in list(zip(*self.data))] |
def normalize(lst):
max_num = max(lst)
normalized_lst = [(num-max_num)/max_num for num in lst]
return normalized_lst
lst = [7,8,10]
normalized_values = normalize(lst)
print(normalized_values) |
#!/bin/bash
set -e
setup_gcp_kubernetes() {
payload=$1
source=$2
gcloud_service_account_key_file=$(jq -r '.source.gcloud_service_account_key_file // ""' < $payload)
gcloud_project_name=$(jq -r '.source.gcloud_project_name // ""' < $payload)
gcloud_k8s_cluster_name=$(jq -r '.source.gcloud_k8s_cluster_name // ""' < $payload)
gcloud_k8s_zone=$(jq -r '.source.gcloud_k8s_zone // ""' < $payload)
if [ -z "$gcloud_service_account_key_file" ] || [ -z "$gcloud_project_name" ] || [ -z "$gcloud_k8s_cluster_name" ] || [ -z "$gcloud_k8s_zone" ]; then
echo "invalid payload for gcloud auth, please pass all required params"
exit 1
fi
echo "$gcloud_service_account_key_file" >> /gcloud.json
gcloud_service_account_name=($(cat /gcloud.json | jq -r ".client_email"))
gcloud auth activate-service-account ${gcloud_service_account_name} --key-file /gcloud.json
gcloud config set account ${gcloud_service_account_name}
gcloud config set project ${gcloud_project_name}
gcloud container clusters get-credentials ${gcloud_k8s_cluster_name} --zone ${gcloud_k8s_zone}
kubectl version
}
setup_kubernetes() {
payload=$1
source=$2
mkdir -p /root/.kube
kubeconfig_path=$(jq -r '.params.kubeconfig_path // ""' < $payload)
absolute_kubeconfig_path="${source}/${kubeconfig_path}"
if [ -f "$absolute_kubeconfig_path" ]; then
cp "$absolute_kubeconfig_path" "/root/.kube/config"
else
# Setup kubectl
cluster_url=$(jq -r '.source.cluster_url // ""' < $payload)
if [ -z "$cluster_url" ]; then
echo "invalid payload (missing cluster_url)"
exit 1
fi
if [[ "$cluster_url" =~ https.* ]]; then
cluster_ca=$(jq -r '.source.cluster_ca // ""' < $payload)
admin_key=$(jq -r '.source.admin_key // ""' < $payload)
admin_cert=$(jq -r '.source.admin_cert // ""' < $payload)
token=$(jq -r '.source.token // ""' < $payload)
token_path=$(jq -r '.params.token_path // ""' < $payload)
ca_path="/root/.kube/ca.pem"
echo "$cluster_ca" | base64 -d > $ca_path
kubectl config set-cluster default --server=$cluster_url --certificate-authority=$ca_path
if [ -f "$source/$token_path" ]; then
kubectl config set-credentials admin --token=$(cat $source/$token_path)
elif [ ! -z "$token" ]; then
kubectl config set-credentials admin --token=$token
else
mkdir -p /root/.kube
key_path="/root/.kube/key.pem"
cert_path="/root/.kube/cert.pem"
echo "$admin_key" | base64 -d > $key_path
echo "$admin_cert" | base64 -d > $cert_path
kubectl config set-credentials admin --client-certificate=$cert_path --client-key=$key_path
fi
kubectl config set-context default --cluster=default --user=admin
else
kubectl config set-cluster default --server=$cluster_url
kubectl config set-context default --cluster=default
fi
kubectl config use-context default
fi
kubectl version
}
setup_tls() {
tls_enabled=$(jq -r '.source.tls_enabled // "false"' < $payload)
tillerless=$(jq -r '.source.tillerless // "false"' < $payload)
if [ "$tls_enabled" = true ]; then
if [ "$tillerless" = true ]; then
echo "Setting both tls_enabled and tillerless is not supported"
exit 1
fi
helm_ca=$(jq -r '.source.helm_ca // ""' < $payload)
helm_key=$(jq -r '.source.helm_key // ""' < $payload)
helm_cert=$(jq -r '.source.helm_cert // ""' < $payload)
if [ -z "$helm_ca" ]; then
echo "invalid payload (missing helm_ca)"
exit 1
fi
if [ -z "$helm_key" ]; then
echo "invalid payload (missing helm_key)"
exit 1
fi
if [ -z "$helm_cert" ]; then
echo "invalid payload (missing helm_cert)"
exit 1
fi
helm_ca_cert_path="/root/.helm/ca.pem"
helm_key_path="/root/.helm/key.pem"
helm_cert_path="/root/.helm/cert.pem"
echo "$helm_ca" > $helm_ca_cert_path
echo "$helm_key" > $helm_key_path
echo "$helm_cert" > $helm_cert_path
fi
}
setup_helm() {
# $1 is the name of the payload file
# $2 is the name of the source directory
init_server=$(jq -r '.source.helm_init_server // "false"' < $1)
# Compute tiller_namespace as follows:
# If kubeconfig_tiller_namespace is set, then tiller_namespace is the namespace from the kubeconfig
# If tiller_namespace is set and it is the name of a file, then tiller_namespace is the contents of the file
# If tiller_namespace is set and it is not the name of a file, then tiller_namespace is the literal
# Otherwise tiller_namespace defaults to kube-system
kubeconfig_tiller_namespace=$(jq -r '.source.kubeconfig_tiller_namespace // "false"' <$1)
if [ "$kubeconfig_tiller_namespace" = "true" ]
then
tiller_namespace=$(kubectl config view --minify -ojson | jq -r .contexts[].context.namespace)
else
tiller_namespace=$(jq -r '.source.tiller_namespace // "kube-system"' < $1)
if [ "$tiller_namespace" != "kube-system" -a -f "$2/$tiller_namespace" ]
then
tiller_namespace=$(cat "$2/$tiller_namespace")
fi
fi
tillerless=$(jq -r '.source.tillerless // "false"' < $payload)
tls_enabled=$(jq -r '.source.tls_enabled // "false"' < $payload)
history_max=$(jq -r '.source.helm_history_max // "0"' < $1)
stable_repo=$(jq -r '.source.stable_repo // ""' < $payload)
if [ "$tillerless" = true ]; then
echo "Using tillerless helm"
helm_bin="helm tiller run ${tiller_namespace} -- helm"
else
helm_bin="helm"
fi
if [ -n "$stable_repo" ]; then
echo "Stable Repo URL : ${stable_repo}"
stable_repo="--stable-repo-url=${stable_repo}"
fi
if [ "$init_server" = true ]; then
if [ "$tillerless" = true ]; then
echo "Setting both init_server and tillerless is not supported"
exit 1
fi
tiller_service_account=$(jq -r '.source.tiller_service_account // "default"' < $1)
helm_init_wait=$(jq -r '.source.helm_init_wait // "false"' <$1)
helm_init_wait_arg=""
if [ "$helm_init_wait" = "true" ]; then
helm_init_wait_arg="--wait"
fi
if [ "$tls_enabled" = true ]; then
tiller_key=$(jq -r '.source.tiller_key // ""' < $payload)
tiller_cert=$(jq -r '.source.tiller_cert // ""' < $payload)
if [ -z "$tiller_key" ]; then
echo "invalid payload (missing tiller_key)"
exit 1
fi
if [ -z "$tiller_cert" ]; then
echo "invalid payload (missing tiller_cert)"
exit 1
fi
tiller_key_path="/root/.helm/tiller_key.pem"
tiller_cert_path="/root/.helm/tiller_cert.pem"
helm_ca_cert_path="/root/.helm/ca.pem"
echo "$tiller_key" > $tiller_key_path
echo "$tiller_cert" > $tiller_cert_path
$helm_bin init --tiller-tls --tiller-tls-cert $tiller_cert_path --tiller-tls-key $tiller_key_path --tiller-tls-verify --tls-ca-cert $tiller_key_path --tiller-namespace=$tiller_namespace --service-account=$tiller_service_account --history-max=$history_max $stable_repo --upgrade $helm_init_wait_arg
else
$helm_bin init --tiller-namespace=$tiller_namespace --service-account=$tiller_service_account --history-max=$history_max $stable_repo --upgrade $helm_init_wait_arg
fi
wait_for_service_up tiller-deploy 10
else
export HELM_HOST=$(jq -r '.source.helm_host // ""' < $1)
$helm_bin init -c --tiller-namespace $tiller_namespace $stable_repo > /dev/null
fi
tls_enabled_arg=""
if [ "$tls_enabled" = true ]; then
tls_enabled_arg="--tls"
fi
$helm_bin version $tls_enabled_arg --tiller-namespace $tiller_namespace
helm_setup_purge_all=$(jq -r '.source.helm_setup_purge_all // "false"' <$1)
if [ "$helm_setup_purge_all" = "true" ]; then
local release
for release in $(helm ls -aq --tiller-namespace $tiller_namespace )
do
helm delete $tls_enabled_arg --purge "$release" --tiller-namespace $tiller_namespace
done
fi
}
wait_for_service_up() {
SERVICE=$1
TIMEOUT=$2
if [ "$TIMEOUT" -le "0" ]; then
echo "Service $SERVICE was not ready in time"
exit 1
fi
RESULT=`kubectl get endpoints --namespace=$tiller_namespace $SERVICE -o jsonpath={.subsets[].addresses[].targetRef.name} 2> /dev/null || true`
if [ -z "$RESULT" ]; then
sleep 1
wait_for_service_up $SERVICE $((--TIMEOUT))
fi
}
setup_repos() {
repos=$(jq -c '(try .source.repos[] catch [][])' < $1)
plugins=$(jq -c '(try .source.plugins[] catch [][])' < $1)
kubeconfig_tiller_namespace=$(jq -r '.source.kubeconfig_tiller_namespace // "false"' <$1)
if [ "$kubeconfig_tiller_namespace" = "true" ]
then
tiller_namespace=$(kubectl config view --minify -ojson | jq -r .contexts[].context.namespace)
else
tiller_namespace=$(jq -r '.source.tiller_namespace // "kube-system"' < $1)
fi
local IFS=$'\n'
for pl in $plugins; do
plurl=$(echo $pl | jq -cr '.url')
plversion=$(echo $pl | jq -cr '.version // ""')
if [ -n "$plversion" ]; then
plversionflag="--version $plversion"
fi
helm plugin install $plurl $plversionflag
done
for r in $repos; do
name=$(echo $r | jq -r '.name')
url=$(echo $r | jq -r '.url')
username=$(echo $r | jq -r '.username // ""')
password=$(echo $r | jq -r '.password // ""')
echo Installing helm repository $name $url
if [[ -n "$username" && -n "$password" ]]; then
helm repo add $name $url --tiller-namespace $tiller_namespace --username $username --password $password
else
helm repo add $name $url --tiller-namespace $tiller_namespace
fi
done
helm repo update
}
setup_resource() {
tracing_enabled=$(jq -r '.source.tracing_enabled // "false"' < $1)
if [ "$tracing_enabled" = "true" ]; then
set -x
fi
gcloud_cluster_auth=$(jq -r '.source.gcloud_cluster_auth // "false"' < $1)
if [ "$gcloud_cluster_auth" = "true" ]; then
echo "Initializing kubectl access using gcloud service account file"
setup_gcp_kubernetes $1 $2
else
echo "Initializing kubectl using certificates"
setup_kubernetes $1 $2
fi
echo "Updating helm in server side..."
helm init --upgrade || true
kubectl rollout status deployment tiller-deploy -n kube-system || true
echo "Initializing helm..."
setup_tls $1
setup_helm $1 $2
setup_repos $1
}
|
from typing import cast, List
import pandas as pd
from sqlalchemy import Integer
from sqlalchemy.sql.schema import Column
from datapipe.metastore import MetaTable
from datapipe.store.database import DBConn, MetaKey
from datapipe.types import DataDF, DataSchema, MetaSchema
import pytest
from pytest_cases import parametrize_with_cases, parametrize
from .util import assert_df_equal
class CasesTestDF:
@parametrize('N', [pytest.param(N) for N in [10, 100, 1000]])
def case_single_idx(self, N):
return (
['id'],
[
Column('id', Integer, primary_key=True),
],
[],
cast(DataDF, pd.DataFrame(
{
'id': range(N),
'a': range(N)
},
))
)
@parametrize('N', [pytest.param(N) for N in [10, 100, 1000]])
def case_single_idx_with_meta(self, N):
return (
['id'],
[
Column('id', Integer, primary_key=True),
],
[
Column('item_id', Integer, MetaKey()),
],
cast(DataDF, pd.DataFrame(
{
'id': range(N),
'item_id': range(N),
'a': range(N)
},
))
)
@parametrize('N', [pytest.param(N) for N in [10, 100, 1000]])
def case_multi_idx(self, N):
return (
['id1', 'id2'],
[
Column('id1', Integer, primary_key=True),
Column('id2', Integer, primary_key=True)
],
[],
cast(DataDF, pd.DataFrame(
{
'id1': range(N),
'id2': range(N),
'a': range(N)
},
))
)
@parametrize('N', [pytest.param(N) for N in [10, 100, 1000]])
def case_multi_idx_with_meta(self, N):
return (
['id1', 'id2'],
[
Column('id1', Integer, primary_key=True),
Column('id2', Integer, primary_key=True)
],
[
Column('item_id', Integer, MetaKey()),
Column('product_id', Integer, MetaKey()),
],
cast(DataDF, pd.DataFrame(
{
'id1': range(N),
'id2': range(N),
'item_id': range(N),
'product_id': range(N),
'a': range(N)
},
))
)
@parametrize_with_cases('index_cols,primary_schema,meta_schema,test_df', cases=CasesTestDF, import_fixtures=True)
def test_insert_rows(
dbconn: DBConn,
index_cols: List[str],
primary_schema: DataSchema,
meta_schema: MetaSchema,
test_df: DataDF
):
mt = MetaTable(
name='test',
dbconn=dbconn,
primary_schema=primary_schema,
meta_schema=meta_schema
)
keys = set(mt.primary_keys) | set(mt.meta_keys.keys())
new_df, changed_df, new_meta_df, changed_meta_df = mt.get_changes_for_store_chunk(test_df)
assert_df_equal(new_df, test_df, index_cols=index_cols)
assert(len(new_df) == len(test_df))
assert(len(new_meta_df) == len(test_df))
assert(len(changed_df) == 0)
assert(len(changed_meta_df) == 0)
assert_df_equal(new_meta_df[index_cols], new_df[index_cols], index_cols=index_cols)
assert_df_equal(new_meta_df[keys], new_df[keys], index_cols=index_cols)
mt.insert_meta_for_store_chunk(new_meta_df=new_meta_df)
mt.update_meta_for_store_chunk(changed_meta_df=changed_meta_df)
meta_df = mt.get_metadata()
assert_df_equal(meta_df[index_cols], test_df[index_cols], index_cols=index_cols)
assert_df_equal(meta_df[keys], test_df[keys], index_cols=index_cols)
assert(not meta_df['hash'].isna().any())
@parametrize_with_cases('index_cols,primary_schema,meta_schema,test_df', cases=CasesTestDF, import_fixtures=True)
def test_get_metadata(
dbconn: DBConn,
index_cols: List[str],
primary_schema: DataSchema,
meta_schema: MetaSchema,
test_df: DataDF
):
mt = MetaTable(
name='test',
dbconn=dbconn,
primary_schema=primary_schema,
meta_schema=meta_schema
)
new_df, changed_df, new_meta_df, changed_meta_df = mt.get_changes_for_store_chunk(test_df)
mt.insert_meta_for_store_chunk(new_meta_df=new_meta_df)
part_df = test_df.iloc[0:2]
part_idx = part_df[index_cols]
keys = set(mt.primary_keys) | set(mt.meta_keys.keys())
assert_df_equal(
mt.get_metadata(part_idx)[index_cols],
part_idx,
index_cols=index_cols
)
assert_df_equal(
mt.get_metadata(part_idx)[keys],
part_df[keys],
index_cols=index_cols
)
|
<gh_stars>0
import Scalar from './Scalar';
import ScalarType from './ScalarType';
import Variable from './Variable';
export {Scalar, ScalarType, Variable}; |
<filename>python_modules/dagster/dagster/core/storage/event_log/sqlite/alembic/versions/72686963a802_0_11_0_db_text_to_db_string_unique_.py
"""0.11.0 db.Text to MySQLCompatabilityTypes.UniqueText for MySQL Support
Revision ID: 72686963a802
Revises: 6e1f65d7<PASSWORD>
Create Date: 2021-03-11 15:02:29.174707
"""
import sqlalchemy as sa
from alembic import op
from dagster.core.storage.migration.utils import has_table
from dagster.core.storage.sql import MySQLCompatabilityTypes
# pylint: disable=no-member
# revision identifiers, used by Alembic.
revision = "72686963a802"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
if has_table("secondary_indexes"):
with op.batch_alter_table("secondary_indexes") as batch_op:
batch_op.alter_column(
"name",
type_=MySQLCompatabilityTypes.UniqueText,
existing_type=sa.Text,
)
if has_table("asset_keys"):
with op.batch_alter_table("asset_keys") as batch_op:
batch_op.alter_column(
"asset_key", type_=MySQLCompatabilityTypes.UniqueText, existing_type=sa.Text
)
def downgrade():
if has_table("secondary_indexes"):
with op.batch_alter_table("secondary_indexes") as batch_op:
batch_op.alter_column(
"name",
type_=sa.Text,
existing_type=MySQLCompatabilityTypes.UniqueText,
)
if has_table("asset_keys"):
with op.batch_alter_table("asset_keys") as batch_op:
batch_op.alter_column(
"asset_key",
type_=sa.Text,
existing_type=MySQLCompatabilityTypes.UniqueText,
)
|
package org.glamey.training.io.cache;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.concurrent.TimeUnit;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
/**
* @author zhouyang.zhou. 2017.06.05.16.
*/
public class FifoCacheTest {
private final FifoCache fifoCache = new FifoCache(100);
@BeforeMethod
public void setUp() throws Exception {
for (int i = 0; i < 101; i++) {
fifoCache.put(i, "v_" + i);
}
}
@Test
public void testCache() throws InterruptedException {
assertTrue(fifoCache.get(0) == null);
assertEquals("v_1", fifoCache.get(1));
assertEquals("v_100", fifoCache.get(100));
fifoCache.remove(99);
assertTrue(fifoCache.get(99) == null);
}
@AfterMethod
public void setDown() {
fifoCache.clear();
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.