text stringlengths 1 1.05M |
|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package freemarker.debug.impl;
import java.rmi.RemoteException;
import java.rmi.server.UnicastRemoteObject;
import java.util.Collection;
import java.util.List;
import freemarker.debug.Breakpoint;
import freemarker.debug.Debugger;
import freemarker.debug.DebuggerListener;
/**
*/
class RmiDebuggerImpl
extends
UnicastRemoteObject
implements
Debugger {
private static final long serialVersionUID = 1L;
private final RmiDebuggerService service;
protected RmiDebuggerImpl(RmiDebuggerService service) throws RemoteException {
this.service = service;
}
public void addBreakpoint(Breakpoint breakpoint) {
service.addBreakpoint(breakpoint);
}
public Object addDebuggerListener(DebuggerListener listener) {
return service.addDebuggerListener(listener);
}
public List getBreakpoints() {
return service.getBreakpointsSpi();
}
public List getBreakpoints(String templateName) {
return service.getBreakpointsSpi(templateName);
}
public Collection getSuspendedEnvironments() {
return service.getSuspendedEnvironments();
}
public void removeBreakpoint(Breakpoint breakpoint) {
service.removeBreakpoint(breakpoint);
}
public void removeDebuggerListener(Object id) {
service.removeDebuggerListener(id);
}
public void removeBreakpoints() {
service.removeBreakpoints();
}
public void removeBreakpoints(String templateName) {
service.removeBreakpoints(templateName);
}
}
|
import java.util.Scanner;
class Ejercicio03{
static Scanner teclado=new Scanner(System.in);
static void ejercicio03FTT(){
//Inicio
System.out.println("Buenos dias, bienvenido a la campaña de vacuancion");
//Introduccion de datos
String sexo, vacuna;
int años;
try{
System.out.println("Por favor introdusca en mayuscula la letra que defina su sexo:\nA=Hombre\nB=Mujer");
sexo=teclado.next();
System.out.println("Por favor introdusca su edad:");
años=teclado.nextInt();
//Proceso
if(sexo.equals("A") && años>=70){
vacuna="C";
}else if(sexo.equals("B") && años>=70){
vacuna="C";
}else if(sexo.equals("A") && años<=69 && años>=16){
vacuna="A";
}else if(sexo.equals("A") && años<16){
vacuna="A";
}else if(sexo.equals("B") && años<16){
vacuna="A";
}else{
if(sexo.equals("B") && años<=69 && años>=16){
vacuna="B";
}else{
vacuna="Error en definicion de sexo, por favor vuelva a intertarlo usando la la letra mayuscula...";
}
}
System.out.println("De acuerdo a los datos ingresados usted recibira la vacuna tipo: "+vacuna);
}catch(Exception e){
System.out.println("Error en el ingreso de datos!!! \nPor favor vuelva a intertarlo...");
}
}
public static void main(String[] arg){
ejercicio03FTT();
}
}
|
//
// SourceTimeFunctionNetCDF.cpp
// AxiSEM3D
//
// Created by <NAME> on 8/21/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
// source-time function from NetCDF
#include "SourceTimeFunctionNetCDF.hpp"
#include "NetCDF_Reader.hpp"
// construct derived
template <typename T, int ndim, int nCols> void
SourceTimeFunctionNetCDF<T, ndim, nCols>::constructDerived(int bufferSize) {
// variable id
mTimeID = mReader->getVariableID("time_points", mTimePoints);
mPatternReID = mReader->getVariableID(mVariableName + "_RE", sReadRe);
mPatternImID = mReader->getVariableID(mVariableName + "_IM", sReadIm);
// dimensions
std::vector<numerical::Int> dims;
mReader->getVariableDimensions(mPatternReID, dims);
if (dims.size() != 2) {
throw std::runtime_error("SourceTimeFunctionNetCDF::"
"SourceTimeFunctionNetCDF || "
"STF data must have 2 dimensions.");
}
mTotalTimeStepsInFile = (int)dims[0];
mNu_1 = (int)(dims[1] / nCols);
if (dims[1] % nCols != 0 || mNu_1 == 0 || mTotalTimeStepsInFile < 2) {
throw std::runtime_error("SourceTimeFunctionNetCDF::"
"SourceTimeFunctionNetCDF || "
"Invalid dimensions of STF data.");
}
// buffers
if (!mAlignedToTimeStep && bufferSize < 2) {
throw std::runtime_error("SourceTimeFunctionNetCDF::"
"SourceTimeFunctionNetCDF || "
"Buffer size must be greater than 2 "
"for unaligned STF.");
}
// must fill time with min double
mTimePoints.assign(bufferSize, std::numeric_limits<double>::lowest());
mPatterns.resize(bufferSize);
for (CTMatXND &pattern: mPatterns) {
pattern.resize(mNu_1, nCols);
}
// static buffer for reading
int totalSize = bufferSize * (int)dims[1];
if (sReadRe.cols() < totalSize) {
sReadRe.resize(totalSize);
sReadIm.resize(totalSize);
}
}
// load next buffer chunk
template <typename T, int ndim, int nCols> void
SourceTimeFunctionNetCDF<T, ndim, nCols>::loadNextBufferChunk() {
// check remaining
int haveBeenRead = mTimeStepOfPatternLast + 1;
int remainInFile = mTotalTimeStepsInFile - haveBeenRead;
if (remainInFile == 0) {
throw std::runtime_error("SourceTimeFunctionNetCDF::"
"loadNextBufferChunk || EOF reached.");
}
// update start
if (mAlignedToTimeStep) {
mTimeStepOfPattern0 = mTimeStepOfPatternLast + 1;
} else {
// special at 0
if (mTimeStepOfPatternLast == -1) {
mTimeStepOfPattern0 = 0;
} else {
mTimeStepOfPattern0 = mTimeStepOfPatternLast;
remainInFile += 1;
}
}
// count
int readCount = std::min((int)mTimePoints.size(), remainInFile);
mTimeStepOfPatternLast = mTimeStepOfPattern0 + readCount - 1;
// read time
mReader->readVariable(mTimeID, "time_points", mTimePoints,
{mTimeStepOfPattern0}, {readCount});
// read data
mReader->readVariable(mPatternReID, mVariableName + "_RE", sReadRe,
{mTimeStepOfPattern0, 0},
{readCount, mNu_1 * nCols});
mReader->readVariable(mPatternImID, mVariableName + "_IM", sReadIm,
{mTimeStepOfPattern0, 0},
{readCount, mNu_1 * nCols});
// copy data
int pos = 0;
for (int itime = 0; itime < readCount; itime++) {
for (int alpha = 0; alpha < mNu_1; alpha++) {
mPatterns[itime].row(alpha).real() =
sReadRe.block(0, pos, 1, nCols);
mPatterns[itime].row(alpha).imag() =
sReadIm.block(0, pos, 1, nCols);
pos += nCols;
}
}
}
|
/*
* dbtype_Security.cpp
*/
#include <string>
#include <ostream>
#include <sstream>
#include <algorithm>
#include <bitset>
#include <vector>
#include "osinterface/osinterface_OsTypes.h"
#include "dbtypes/dbtype_Security.h"
#include "logging/log_Logger.h"
namespace
{
const static std::string SECURITY_FLAGS_STRING[] =
{
"r",
"w",
"b",
"c",
"?"
};
const static std::string SECURITY_FLAGS_LONG_STRING[] =
{
"read",
"write",
"base",
"chown",
"invalid"
};
}
namespace mutgos
{
namespace dbtype
{
// -----------------------------------------------------------------------
SecurityFlag Security::security_flag_from_string(const std::string &flag)
{
SecurityFlag result = SECURITYFLAG_invalid;
for (MG_UnsignedInt index = SECURITYFLAG_read; index < SECURITYFLAG_invalid;
++index)
{
if ((SECURITY_FLAGS_STRING[index] == flag) or
SECURITY_FLAGS_LONG_STRING[index] == flag)
{
result = (SecurityFlag) index;
break;
}
}
return result;
}
// -----------------------------------------------------------------------
Security::Security(void)
: list_flags(SECURITYFLAG_invalid),
other_flags(SECURITYFLAG_invalid)
{
}
// -----------------------------------------------------------------------
Security::Security(const Security &rhs)
: list_flags(rhs.list_flags),
other_flags(rhs.other_flags),
admin_ids(rhs.admin_ids),
list_ids(rhs.list_ids)
{
}
// -----------------------------------------------------------------------
Security::~Security()
{
// Nothing extra to destruct
}
// -----------------------------------------------------------------------
Security &Security::operator=(const Security &rhs)
{
if (&rhs != this)
{
list_flags = rhs.list_flags;
other_flags = rhs.other_flags;
admin_ids = rhs.admin_ids;
list_ids = rhs.list_ids;
}
return *this;
}
// -----------------------------------------------------------------------
bool Security::operator==(const Security &rhs) const
{
if (&rhs == this)
{
return true;
}
else
{
bool lists_equal = true;
lists_equal = (admin_ids.size() == rhs.admin_ids.size()) and
(list_ids.size() == rhs.list_ids.size());
lists_equal = lists_equal and std::equal(
admin_ids.begin(), admin_ids.end(), rhs.admin_ids.begin());
lists_equal = lists_equal and std::equal(
list_ids.begin(), list_ids.end(), rhs.list_ids.begin());
return lists_equal and (list_flags == rhs.list_flags) and
(other_flags == rhs.other_flags);
}
}
// -----------------------------------------------------------------------
Security::RemoveAddPair Security::diff_ids(const Security &rhs) const
{
RemoveAddPair result;
// Do the diff, checking each ID set in turn.
diff_id_set(admin_ids, rhs.admin_ids, result);
diff_id_set(list_ids, rhs.list_ids, result);
// This can cause an ID to be in the both add and remove list. Remove
// those instances...
SecurityIds ids_to_remove;
if ((not result.first.empty()) and (not result.second.empty()))
{
SecurityIds::iterator first_iter = result.first.begin();
SecurityIds::iterator second_iter;
bool erase = false;
while (first_iter != result.first.end())
{
erase = false;
second_iter = result.second.begin();
while (second_iter != result.second.end())
{
if (*first_iter == *second_iter)
{
erase = true;
break;
}
++second_iter;
}
if (erase)
{
first_iter = result.first.erase(first_iter);
result.second.erase(second_iter);
}
else
{
++first_iter;
}
}
}
return result;
}
// -----------------------------------------------------------------------
std::string Security::to_string(void) const
{
std::string output;
to_string_internal(list_flags.size(), output);
return output;
}
// -----------------------------------------------------------------------
bool Security::get_list_security_flag(const SecurityFlag flag) const
{
return secure_get_flag(list_flags, flag);
}
// -----------------------------------------------------------------------
bool Security::get_other_security_flag(const SecurityFlag flag) const
{
return secure_get_flag(other_flags, flag);
}
// -----------------------------------------------------------------------
bool Security::set_list_security_flag(
const SecurityFlag flag,
const bool value)
{
return (allow_flag(flag) ?
secure_set_flag(list_flags, flag, value) : false);
}
// -----------------------------------------------------------------------
bool Security::set_other_security_flag(
const SecurityFlag flag,
const bool value)
{
return (allow_flag(flag) ?
secure_set_flag(other_flags, flag, value) : false);
}
// -----------------------------------------------------------------------
const Security::SecurityIds &Security::get_admin_ids(void) const
{
return admin_ids;
}
// -----------------------------------------------------------------------
bool Security::is_admin(const Id &id) const
{
SecurityIds::const_iterator admin_iter =
std::find(list_ids.begin(), list_ids.end(), id);
return (admin_iter != admin_ids.end());
}
// -----------------------------------------------------------------------
bool Security::add_admin(const Id &id)
{
if (id.is_default())
{
// Default IDs are not allowed to avoid confusion.
return false;
}
// Remove from 'list' if there, since ID shouldn't be both
// an admin and in the list
//
SecurityIds::iterator list_iter =
std::find(list_ids.begin(), list_ids.end(), id);
if (list_iter != list_ids.end())
{
list_ids.erase(list_iter);
}
// Add to admin list if not found
//
list_iter = std::find(admin_ids.begin(), admin_ids.end(), id);
if (list_iter == admin_ids.end())
{
admin_ids.push_back(id);
return true;
}
return false;
}
// -----------------------------------------------------------------------
bool Security::remove_admin(const Id &id)
{
SecurityIds::iterator admin_iter =
std::find(admin_ids.begin(), admin_ids.end(), id);
if (admin_iter != admin_ids.end())
{
if (admin_ids.size() == 1)
{
// Nothing left.
admin_ids.clear();
}
else if (*admin_iter == admin_ids.back())
{
// Shortcut for removing last item
admin_ids.pop_back();
}
else
{
// Put the last one in place of what's being erased,
// then pop the last one.
//
*admin_iter = admin_ids.back();
admin_ids.pop_back();
}
return true;
}
return false;
}
// -----------------------------------------------------------------------
void Security::clear_admins(void)
{
admin_ids.clear();
}
// -----------------------------------------------------------------------
const Security::SecurityIds &Security::get_list_ids(void) const
{
return list_ids;
}
// -----------------------------------------------------------------------
bool Security::is_in_list(const Id &id) const
{
return (std::find(list_ids.begin(), list_ids.end(), id) !=
list_ids.end());
}
// -----------------------------------------------------------------------
bool Security::add_to_list(const Id &id)
{
if (id.is_default())
{
// Default IDs are not allowed to avoid confusion.
return false;
}
SecurityIds::iterator admin_iter =
std::find(admin_ids.begin(), admin_ids.end(), id);
if (admin_iter == admin_ids.end())
{
SecurityIds::iterator list_iter =
std::find(list_ids.begin(), list_ids.end(), id);
if (list_iter == list_ids.end())
{
list_ids.push_back(id);
return true;
}
}
return false;
}
// -----------------------------------------------------------------------
bool Security::remove_from_list(const Id &id)
{
SecurityIds::iterator list_iter =
std::find(list_ids.begin(), list_ids.end(), id);
if (list_iter != list_ids.end())
{
if (list_ids.size() == 1)
{
// Nothing left.
list_ids.clear();
}
else if (*list_iter == list_ids.back())
{
// Shortcut for removing last item
list_ids.pop_back();
}
else
{
// Put the last one in place of what's being erased,
// then pop the last one.
//
*list_iter = list_ids.back();
list_ids.pop_back();
}
}
return false;
}
// -----------------------------------------------------------------------
void Security::clear_list(void)
{
list_ids.clear();
}
// -----------------------------------------------------------------------
bool Security::allow_flag(const SecurityFlag flag) const
{
return flag < SECURITYFLAG_invalid;
}
// -----------------------------------------------------------------------
void Security::to_string_internal(
const MG_UnsignedInt flag_count,
std::string &output_string) const
{
std::ostringstream strstream;
for (MG_UnsignedInt index = 0; index < flag_count; ++index)
{
strstream
<< (list_flags[index] ? SECURITY_FLAGS_STRING[index] : "-");
}
strstream << ":";
for (MG_UnsignedInt index = 0; index < flag_count; ++index)
{
strstream
<< (other_flags[index] ? SECURITY_FLAGS_STRING[index] : "-");
}
if (not admin_ids.empty())
{
bool first = true;
// Also spit out the list of space separated admin IDs
//
strstream << ":(ADMIN_FIELD ";
for (SecurityIds::const_iterator iter = admin_ids.begin();
iter != admin_ids.end();
++iter)
{
if (first)
{
first = false;
}
else
{
strstream << " ";
}
strstream << iter->to_string();
}
strstream << ")";
}
if (not list_ids.empty())
{
bool first = true;
// Also spit out the list of space separated group IDs
//
strstream << ":(LIST_FIELD ";
for (SecurityIds::const_iterator iter = list_ids.begin();
iter != list_ids.end();
++iter)
{
if (first)
{
first = false;
}
else
{
strstream << " ";
}
strstream << iter->to_string();
}
strstream << ")";
}
output_string = strstream.str();
}
// -----------------------------------------------------------------------
void Security::diff_id_set(
const SecurityIds &lhs,
const SecurityIds &rhs,
RemoveAddPair &result) const
{
// Do the diff, checking each list_id in turn.
for (SecurityIds::const_iterator from_iter = lhs.begin();
from_iter != lhs.end();
++from_iter)
{
if (std::find(rhs.begin(), rhs.end(), *from_iter) != rhs.end())
{
// Not in other, so it was removed
result.first.push_back(*from_iter);
}
}
for (SecurityIds::const_iterator to_iter = rhs.begin();
to_iter != rhs.end();
++to_iter)
{
if (std::find(lhs.begin(), lhs.end(), *to_iter) != lhs.end())
{
// Not in original, so it was added
result.second.push_back(*to_iter);
}
}
}
// -----------------------------------------------------------------------
bool Security::secure_get_flag(
const Security::SecurityFlagContainer &container,
const SecurityFlag flag) const
{
if ((flag < 0) or (flag >= SECURITYFLAG_invalid))
{
LOG(error, "dbtype", "secure_get_flag",
"Flag to get is not valid!");
return false;
}
else
{
return container[flag];
}
}
// -----------------------------------------------------------------------
bool Security::secure_set_flag(
Security::SecurityFlagContainer &container,
const SecurityFlag flag,
const bool value)
{
if ((flag < 0) or (flag >= SECURITYFLAG_invalid))
{
LOG(error, "dbtype", "secure_set_flag",
"Flag to set is not valid!");
return false;
}
else
{
container[flag] = value;
return true;
}
}
} /* namespace dbtype */
} /* namespace mutgos */
|
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/FBSnapshotTestCase/FBSnapshotTestCase.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Nimble/Nimble.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Nimble-Snapshots/Nimble_Snapshots.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Quick/Quick.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/FBSnapshotTestCase/FBSnapshotTestCase.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Nimble/Nimble.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Nimble-Snapshots/Nimble_Snapshots.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Quick/Quick.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#!/bin/bash -ef
conda init bash
echo "conda version = $(conda --version)"
conda create -n testenv
conda install -n testenv -yq python=3.8 numpy scipy scikit-learn matplotlib pandas lxml mkl sphinx==3.5.4 numpydoc pillow pandas
conda install -n testenv -yq nibabel sphinx-gallery sphinxcontrib-bibtex junit-xml -c conda-forge
source activate testenv
python -m pip install --user --upgrade --progress-bar off pip setuptools
python -m pip install .
|
#!/bin/bash
BASE_DIR=nodeconf
NODE_NAME=r6
FRR_PATH=/usr/lib/frr
#enable IPv4 forwarding
#sysctl -w net.ipv4.ip_forward=1
sysctl -w net.ipv6.conf.all.forwarding=1
#disable reverse path filtering (needed for dynamic routing)
#sysctl -w net.ipv4.conf.all.rp_filter=0
#sysctl -w net.ipv4.conf.default.rp_filter=0
#the following for loop also disables all and default
#for i in /proc/sys/net/ipv4/conf/*/rp_filter ; do
# echo 0 > $i
#done
echo "no service integrated-vtysh-config" >> /etc/frr/vtysh.conf
chown frr:frrvty $BASE_DIR/$NODE_NAME
#chown quagga:quagga $BASE_DIR/$NODE_NAME
$FRR_PATH/zebra -f "$PWD"/$BASE_DIR/$NODE_NAME/zebra.conf -d -z "$PWD"/$BASE_DIR/$NODE_NAME/zebra.sock -i "$PWD"/$BASE_DIR/$NODE_NAME/zebra.pid
sleep 1
$FRR_PATH/isisd -f "$PWD"/$BASE_DIR/$NODE_NAME/isisd.conf -d -z "$PWD"/$BASE_DIR/$NODE_NAME/zebra.sock -i "$PWD"/$BASE_DIR/$NODE_NAME/isisd.pid
# enable Segment Routing for IPv6
sysctl -w net.ipv6.conf.all.seg6_enabled=1
for dev in $(ip -o -6 a | awk '{ print $2 }' | grep -v "lo")
do
sysctl -w net.ipv6.conf."$dev".seg6_enabled=1
done
|
#!/bin/tcsh
#PBS -A NTDD0005
#PBS -N testb
#PBS -q regular
#PBS -l walltime=12:00:00
#PBS -j oe
#PBS -M apinard@ucar.edu
#PBS -l select=1:ncpus=1
module load conda
conda activate ldcpy_env
setenv TMPDIR /glade/scratch/$USER/temp
mkdir -p $TMPDIR
python ./compute_batch.py -o '/glade/scratch/apinard/3D/TMQ_calcs.csv' -j './batch_scripts/3d_dssim_scripts/TMQ.json' -ts 480 -tt 495 -v -ld
|
from typing import Callable
def get_run_generator(test_run: str) -> Callable[[], str]:
# Assume this function is already implemented
pass
def generate_test_cases(debug_mode: bool, test_runs: list) -> dict:
test_cases = {}
if debug_mode:
for run in test_runs:
input_generator = get_run_generator(run)
test_cases[run] = input_generator()
return test_cases
# Example usage
DEBUG = True
TEST_RUNS = ['run1', 'run2', 'run3']
result = generate_test_cases(DEBUG, TEST_RUNS)
print(result) |
<reponame>YourBetterAssistant/yourbetterassistant<filename>events/guild/messageCreate.ts
"use strict";
import Levels from "discord-xp";
import count from "../../Utils/count";
import level from "../../Utils/level";
import check from "../../Utils/checkChatChannel";
import Logger from "../../lib/logger";
import Trainer from "../../lib/trainer";
import { prefixLoad, clearCache as newCache } from "../../Utils/prefix-load";
let process = require("process");
import config from "../../botconfig/config.json"; //loading config file with token and prefix, and settings
import { prefix as globalPrefix } from "../../botconfig/config.json";
import ee from "../../botconfig/embed.json"; //Loading all embed settings like color footertext and icon ...
import Discord, { Client, Message, TextChannel } from "discord.js"; //this is the official discord.js wrapper for the Discord Api, which we use!
import funcs from "../../handlers/functions"; //Loading all needed functions
Levels.setURL(config.mongoPath);
import unknownCommand from "../../Schemas/unknownCommand";
import {
checkAutoMod,
forceNewCache as forceAutoCacheMod,
} from "../../Utils/checkAutoMod";
import autoMod from "../../Constructors/autoModUser";
import levellingEnabled from "../../Schemas/levellingEnabled";
//here the event starts
let prefix;
const logger = new Logger("Events - MessageCreate");
module.exports = async (client: Client, message: Message) => {
const automod = new autoMod(message);
const guildPrefixes: { [key: string]: string } = {};
try {
//if the message is not in a guild (aka in dms), return aka ignore the inputs
// if the message author is a bot, return aka ignore the inputs
setInterval(newCache, 3600000);
setInterval(forceAutoCacheMod, 3600000);
if (message.author.bot) return;
//if the channel is on partial fetch it
if (message.channel.partial) await message.channel.fetch();
//if the message is on partial fetch it
if (message.partial) await message.fetch();
//get the current prefix from the botconfig/config.json
if (!message.guild) {
const embed = new Discord.MessageEmbed()
.setTitle("Support DM")
.setDescription(message.content)
.setFooter(`Asked By ${message.author.username}`)
.setColor("RED");
const owner = await client.users.fetch("827388013062389761");
owner.send(message.content + "\nAsked by " + message.author.tag);
const channel = (await client.channels.fetch(
"879949650415722556"
)) as TextChannel;
channel?.send({ embeds: [embed] });
return message.channel.send(
"My DMS are for support messages only, the message sent will be forwarded to the owner and to our support server for an answer please join our server at https://discord.gg/h2YfQbKFTR"
);
}
Trainer(message.content);
await prefixLoad(client, guildPrefixes, globalPrefix);
await count(message);
await check(message);
await checkAutoMod(message).then(async (found) => {
if (found?.strictmode === "true") {
await automod.checkProfanity();
await automod.allCaps();
} else if (found?.strictmode === "false") {
await automod.allCaps();
}
});
if (
message.content.toLowerCase() === "ded chat" ||
message.content.toLowerCase() === "dead chat"
)
return message.channel.send(
"Good Eye Why Not Try To Start A Conversation?"
);
const levelTrue = await levellingEnabled.findOne({
guildID: message.guild.id,
});
if (levelTrue) {
await level(message);
const randomAmountOfXp = Math.floor(Math.random() * 29) + 1; // Min 1, Max 30
const hasLeveledUp = await Levels.appendXp(
message.author.id,
message.guild.id,
randomAmountOfXp
);
if (hasLeveledUp) {
const user = await Levels.fetch(message.author.id, message.guild.id);
message.channel.send(
`${message.author}, congratulations! You have leveled up to **${user.level}**. :tada:`
);
}
}
prefix = guildPrefixes[message.guild.id] || globalPrefix; //comment ||guildPrefixes[message.guild.id] to be able to only use b!
client.prefix = guildPrefixes || globalPrefix;
//the prefix can be a Mention of the Bot / The defined Prefix of the Bot
const prefixRegex = new RegExp(
`^(<@!?${client.user?.id}>|${funcs.escapeRegex(prefix)})\\s*`
);
//if its not that then return
if (!prefixRegex.test(message.content)) return;
//now define the right prefix either ping or not ping
const [_null, matchedPrefix] = message.content.match(
prefixRegex
) as RegExpMatchArray;
//create the arguments with sliceing of of the rightprefix length
const args = message.content.slice(matchedPrefix.length).trim().split(/ +/);
//creating the cmd argument by shifting the args by 1
const cmd = args.shift()?.toLowerCase();
//if no cmd added return error
if (cmd?.length! === 0) {
let embed = new Discord.MessageEmbed()
.setColor("BLUE")
.setFooter(ee.footertext, ee.footericon)
.setTitle(`Hugh? I got pinged? Imma give you some help`)
.setDescription(
`To see all Commands type: \`${client.prefix[message.guild.id]}help\``
);
if (message.content.startsWith(`<@!${client.user?.id}>`))
return message.channel.send({ embeds: [embed] });
return;
}
//get the command from the collection
let command = client.commands.get(cmd);
//if the command does not exist, try to get it by his alias
if (!command) command = client.commands.get(client.aliases.get(cmd));
//if the command is now valid
if (command) {
if (!client.cooldowns.has(command.name)) {
//if its not in the cooldown, set it too there
client.cooldowns.set(command.name, new Discord.Collection());
}
const now = Date.now(); //get the current time
const timestamps = client.cooldowns.get(command.name); //get the timestamp of the last used commands
const cooldownAmount = (command.cooldown || 2) * 1000; //get the cooldownamount of the command, if there is no cooldown there will be automatically 1 sec cooldown, so you cannot spam it^^
if (timestamps.has(message.author.id)) {
//if the user is on cooldown
const expirationTime =
timestamps.get(message.author.id) + cooldownAmount; //get the amount of time he needs to wait until he can run the cmd again
if (now < expirationTime) {
//if he is still on cooldonw
const timeLeft = expirationTime - now; //get the lefttime
let embed = new Discord.MessageEmbed()
.setColor("RED")
.setFooter(ee.footertext, ee.footericon)
.setTitle(
`❌ Please wait ${funcs.duration(
timeLeft
)} before reusing the \`${command.name}\` command.`
);
return message.channel.send({ embeds: [embed] });
//send an information message
}
}
timestamps.set(message.author.id, now); //if he is not on cooldown, set it to the cooldown
setTimeout(() => timestamps.delete(message.author.id), cooldownAmount); //set a timeout function with the cooldown, so it gets deleted later on again
try {
//if Command has specific permission return error
if (
command.memberpermissions &&
!message.member?.permissions.has(command.memberpermissions)
) {
let e = new Discord.MessageEmbed()
.setColor("RED")
.setFooter(ee.footertext, ee.footericon)
.setTitle("❌ Error | You are not allowed to run this command!")
.setDescription("You Do Not Have The Required Perms!");
return message.channel.send({ embeds: [e] }).then((msg) =>
setTimeout(() => {
msg
.delete()
.catch(() => logger.error("Couldn't Delete --> Ignore"));
}, 1000)
);
}
//if the Bot has not enough permissions return error
// let required_perms = ["ADD_REACTIONS","VIEW_CHANNEL","SEND_MESSAGES",
// "EMBED_LINKS", "CONNECT", "SPEAK"]
// if(!message.guild.me.permissions.has(required_perms)){
// try{ message.react("❌"); }catch{}
// let embed=new Discord.MessageEmbed()
// .setColor(ee.wrongcolor)
// .setFooter(ee.footertext, ee.footericon)
// .setTitle("❌ Error | I don't have enough Permissions!")
// .setDescription("Please give me just `ADMINISTRATOR`, because I need it to delete Messages, Create Channel and execute all Admin Commands.\n If you don't want to give me them, then those are the exact Permissions which I need: \n> `" + required_perms.join("`, `") +"`")
// message.channel.send({embeds:[embed]
// })
// }
//run the command with the parameters: client, message, args, user, text, prefix,
command.run(client, message, args);
} catch (e: any) {
logger.error(e.stack);
let em = new Discord.MessageEmbed()
.setColor("RED")
.setFooter(ee.footertext, ee.footericon)
.setTitle(
"❌ Something went wrong while, running the: `" +
command.name +
"` command"
)
.setDescription(`\`\`\`${e.message}\`\`\``)
.addField("Guild:", message.guild.id.toString())
.addField("GuildName", message.guild.name)
.setTimestamp(new Date());
const channel = client.channels.cache.get(
"889101477421912064"
) as TextChannel;
message.channel.send(
`Something happened while running \`${command.name}\`, This has been logged and reported to the developers`
);
return channel?.send({ embeds: [em] });
}
} else {
//if the command is not found send an info msg
const d = await unknownCommand.findOne({ guildId: message.guild.id });
if (d) {
let embed = new Discord.MessageEmbed()
.setColor("RED")
.setFooter(ee.footertext, ee.footericon)
.setTitle(`❌ Unkown command, try: **\`${prefix}help\`**`)
.setDescription(
`To get help on a specific command, type \`${prefix}help [command name]\``
);
const m = await message.channel.send({ embeds: [embed] });
setTimeout(function () {
m.delete();
}, 2000);
}
}
} catch (e) {
const { erroHandler: err } = require("../../handlers/errorHandler");
err(e, message);
}
/**
* @INFO
* Bot Coded by Tomato#6966 | https://github.com/Tomato6966/Discord-Js-Handler-Template
* @INFO
* Work for Milrato Development | https://milrato.eu
* @INFO
* Please mention Him / Milrato Development, when using this Code!
* @INFO
*/
};
|
import React, { useState } from "react";
const ValidInteger = () => {
const [input, setInput] = useState("");
const handleChange = e => {
setInput(e.target.value);
};
const isValidInt = str => {
return /^-?\d+$/.test(str);
};
return (
<div>
<h2>Enter a number:</h2>
<input type="number" value={input} onChange={handleChange} />
<p>{isValidInt(input) ? "Valid Integer" : "Invalid Integer"}</p>
</div>
);
};
export default ValidInteger; |
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.portfolio.manager;
import java.io.File;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Date;
import org.junit.Assert;
import org.junit.Test;
import org.olat.modules.portfolio.Citation;
import org.olat.modules.portfolio.CitationSourceType;
import org.olat.modules.portfolio.model.CitationXml;
/**
*
* Initial date: 22 juin 2021<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class MetadataXStreamTest {
@Test
public void readCitationXml() throws URISyntaxException {
URL citationUrl = MetadataXStreamTest.class.getResource("citation.xml");
File citationFile = new File(citationUrl.toURI());
Citation citation = (Citation)MetadataXStream.get().fromXML(citationFile);
Assert.assertNotNull(citation);
Assert.assertEquals("SBN-3458794958", citation.getIsbn());
Assert.assertEquals("Volumen 23", citation.getVolume());
}
@Test
public void writeReadCitationXml() throws URISyntaxException {
CitationXml citation = new CitationXml();
citation.setLastVisit(new Date());
citation.setEdition("First edition");
citation.setItemType(CitationSourceType.film);
String xml = MetadataXStream.get().toXML(citation);
Citation reloaded = (Citation)MetadataXStream.get().fromXML(xml);
Assert.assertNotNull(reloaded);
Assert.assertEquals("First edition", reloaded.getEdition());
Assert.assertEquals(CitationSourceType.film, reloaded.getItemType());
}
}
|
<gh_stars>10-100
package main
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"strconv"
"strings"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
)
const (
envAWSRegion = "AWS_REGION"
envS3Bucket = "S3_BUCKET_NAME"
envS3Action = "S3_ACTION"
)
type Message struct {
Log string
}
func main() {
region := os.Getenv(envAWSRegion)
if region == "" {
exitErrorf("[TEST FAILURE] AWS Region required. Set the value for environment variable- %s", envAWSRegion)
}
bucket := os.Getenv(envS3Bucket)
if bucket == "" {
exitErrorf("[TEST FAILURE] Bucket name required. Set the value for environment variable- %s", envS3Bucket)
}
s3Client, err := getS3Client(region)
if err != nil {
exitErrorf("[TEST FAILURE] Unable to create new S3 client: %v", err)
}
s3Action := os.Getenv(envS3Action)
if s3Action == "validate" {
// Validate the data on the s3 bucket
getS3ObjectsResponse := getS3Objects(s3Client, bucket)
validate(s3Client, getS3ObjectsResponse, bucket)
} else {
// Clean the s3 bucket-- delete all objects
deleteS3Objects(s3Client, bucket)
}
}
// Creates a new S3 Client
func getS3Client(region string) (*s3.S3, error) {
sess, err := session.NewSession(&aws.Config{
Region: aws.String(region)},
)
if err != nil {
return nil, err
}
return s3.New(sess), nil
}
// Returns all the objects from a S3 bucket
func getS3Objects(s3Client *s3.S3, bucket string) *s3.ListObjectsV2Output {
input := &s3.ListObjectsV2Input{
Bucket: aws.String(bucket),
MaxKeys: aws.Int64(100),
}
response, err := s3Client.ListObjectsV2(input)
if err != nil {
exitErrorf("[TEST FAILURE] Error occured to get the objects from bucket: %q., %v", bucket, err)
}
return response
}
// Validates the log messages. Our log producer is designed to send 1000 integers [0 - 999].
// Both of the Kinesis Streams and Kinesis Firehose try to send each log maintaining the "at least once" policy.
// To validate, we need to make sure all the valid numbers [0 - 999] are stored at least once.
func validate(s3Client *s3.S3, response *s3.ListObjectsV2Output, bucket string) {
logCounter := make([]int, 1000)
for index := range logCounter {
logCounter[index] = 1
}
for i := range response.Contents {
input := &s3.GetObjectInput{
Bucket: aws.String(bucket),
Key: response.Contents[i].Key,
}
obj := getS3Object(s3Client, input)
dataByte, err := ioutil.ReadAll(obj.Body)
if err != nil {
exitErrorf("[TEST FAILURE] Error to parse GetObject response. %v", err)
}
data := strings.Split(string(dataByte), "\n")
for _, d := range data {
if d == "" {
continue
}
var message Message
decodeError := json.Unmarshal([]byte(d), &message)
if decodeError != nil {
exitErrorf("[TEST FAILURE] Json Unmarshal Error:", decodeError)
}
number, convertionError := strconv.Atoi(message.Log)
if convertionError != nil {
exitErrorf("[TEST FAILURE] String to Int convertion Error:", convertionError)
}
if number < 0 || number >= 1000 {
exitErrorf("[TEST FAILURE] Invalid number: %d found. Expected value in range (0 - 999)", number)
}
logCounter[number] = 0
}
}
sum := 0
for i := range logCounter {
sum += logCounter[i]
}
if sum > 0 {
exitErrorf("[TEST FAILURE] Validation Failed. Number of missing log records: %d", sum)
} else {
fmt.Println("[TEST SUCCESSFULL] Found all the log records.")
}
}
// Retrieves an object from a S3 bucket
func getS3Object(s3Client *s3.S3, input *s3.GetObjectInput) *s3.GetObjectOutput {
obj, err := s3Client.GetObject(input)
if err != nil {
exitErrorf("[TEST FAILURE] Error occured to get s3 object: %v", err)
}
return obj
}
// Delete all the objects from a specified S3 bucket
func deleteS3Objects(s3Client *s3.S3, bucket string) {
// Setup BatchDeleteIterator to iterate through a list of objects.
iter := s3manager.NewDeleteListIterator(s3Client, &s3.ListObjectsInput{
Bucket: aws.String(bucket),
})
// Traverse the iterator deleting each object
if err := s3manager.NewBatchDeleteWithClient(s3Client).Delete(aws.BackgroundContext(), iter); err != nil {
exitErrorf("[CLEAN FAILURE] Unable to delete the objects from the bucket %q., %v", bucket, err)
}
fmt.Println("[CLEAN SUCCESSFUL] All the objects are deleted from the bucket:", bucket)
}
func exitErrorf(msg string, args ...interface{}) {
fmt.Fprintf(os.Stderr, msg+"\n", args...)
os.Exit(1)
}
|
<gh_stars>0
export * from './Leaderboard';
export * from './PeriodicLeaderboard';
export * from './LeaderboardMatrix';
|
package io.opensphere.search.controller;
import java.util.Collection;
import java.util.List;
import io.opensphere.core.MapManager;
import io.opensphere.core.TimeManager;
import io.opensphere.core.model.GeographicBoundingBox;
import io.opensphere.core.model.LatLonAlt;
import io.opensphere.core.model.time.TimeSpan;
import io.opensphere.core.search.ClearableResultsSearchProvider;
import io.opensphere.core.search.ResultsSearchProvider;
import io.opensphere.core.search.SearchProvider;
import io.opensphere.core.search.SearchRegistry;
import io.opensphere.core.search.SearchResult;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.lang.Pair;
import io.opensphere.core.util.lang.ThreadUtilities;
import io.opensphere.search.model.SearchModel;
/**
* Executes searches for all {@link ResultsSearchProvider} installed in the
* system.
*/
public class SearchExecutor
{
/**
* Used to get the viewport boundaries.
*/
private final MapManager myMapManager;
/**
* The model of the search, contains the keyword to search on.
*/
private final SearchModel myModel;
/**
* Contains all of the installed {@link SearchProvider}.
*/
private final SearchRegistry mySearchRegistry;
/**
* Gets the time spans to perform the search for.
*/
private final TimeManager myTimeManager;
/**
* Constructs a new search executor.
*
* @param model The model used by the search.
* @param searchRegistry Contains all of the installed
* {@link SearchProvider}.
* @param timeManager Contains the time spans to perform the search for.
* @param mapManager Used to get the viewport boundaries.
*/
public SearchExecutor(SearchModel model, SearchRegistry searchRegistry, TimeManager timeManager, MapManager mapManager)
{
myModel = model;
mySearchRegistry = searchRegistry;
myTimeManager = timeManager;
myMapManager = mapManager;
}
/**
* Performs a search against all {@link ResultsSearchProvider} that are
* enabled.
*/
public void performSearch()
{
performSearch(myModel.getSelectedSearchTypes());
}
/**
* Performs a search against all {@link ResultsSearchProvider} that are
* enabled.
*
* @param types The search types to search for.
*/
public void performSearch(Collection<String> types)
{
String keyword = myModel.getKeyword().get();
Pair<LatLonAlt, LatLonAlt> boundingBox = getBoundingBox();
TimeSpan span = getTimeSpan();
List<SearchProvider> providers = mySearchRegistry.getProviders(types);
for (SearchProvider provider : providers)
{
if (provider instanceof ResultsSearchProvider)
{
ThreadUtilities.runBackground(() -> performSearch((ResultsSearchProvider)provider, keyword,
boundingBox.getFirstObject(), boundingBox.getSecondObject(), span));
}
}
}
/**
* Clears all the results from the previous search.
*/
public void clearSearch()
{
List<SearchProvider> providers = mySearchRegistry.getProviders(myModel.getSelectedSearchTypes());
for (SearchProvider provider : providers)
{
if (provider instanceof ClearableResultsSearchProvider)
{
ClearableResultsSearchProvider clearableProvider = (ClearableResultsSearchProvider)provider;
ThreadUtilities.runBackground(() -> clearableProvider.clearResults());
}
}
myModel.getResultCount().clear();
myModel.getTotalResultCount().clear();
}
/**
* Gets the viewports bounding box to be used by the search providers.
*
* @return The viewports bounding box.
*/
private Pair<LatLonAlt, LatLonAlt> getBoundingBox()
{
GeographicBoundingBox box = myMapManager.getVisibleBoundingBox();
return new Pair<>(box.getLowerLeft().getLatLonAlt(), box.getUpperRight().getLatLonAlt());
}
/**
* Gets the timespan to be used by the search providers.
*
* @return The time span.
*/
private TimeSpan getTimeSpan()
{
List<TimeSpan> loadSpans = New.list(myTimeManager.getLoadTimeSpans());
TimeSpan span = null;
for (TimeSpan loadSpan : loadSpans)
{
if (span == null)
{
span = loadSpan;
}
else
{
span = span.simpleUnion(loadSpan);
}
}
return span;
}
/**
* Performs a search using the provider given.
*
* @param provider The provider to use for the search.
* @param keyword The keyword to search on.
* @param lowerLeft The lower left of the viewport bounding box.
* @param upperRight The upper right of the viewport bounding box.
* @param span The timespan to perform a search for.
*/
private void performSearch(ResultsSearchProvider provider, String keyword, LatLonAlt lowerLeft, LatLonAlt upperRight,
TimeSpan span)
{
List<SearchResult> results = provider.performSearch(keyword, lowerLeft, upperRight, span);
for (SearchResult result : results)
{
result.setSearchType(provider.getType());
}
myModel.getResultCount().put(provider.getType(), Integer.valueOf(results.size()));
myModel.getTotalResultCount().put(provider.getType(), Integer.valueOf(provider.getTotalResultCount()));
myModel.getAllResults().addAll(results);
}
}
|
@discardableResult
func batchDelete<T: NSManagedObject>(_ type: T.Type, _ condition: WhereCondition? = nil) throws -> Int {
let context = persistentContainer.viewContext
let request = NSFetchRequest<NSFetchRequestResult>(entityName: T.entityName)
if let condition = condition {
request.predicate = condition.predicate
}
let deleteRequest = NSBatchDeleteRequest(fetchRequest: request)
deleteRequest.resultType = .resultTypeObjectIDs
do {
let result = try context.execute(deleteRequest) as? NSBatchDeleteResult
if let objectIDs = result?.result as? [NSManagedObjectID] {
let changes = [NSDeletedObjectsKey: objectIDs]
NSManagedObjectContext.mergeChanges(fromRemoteContextSave: changes, into: [context])
return objectIDs.count
} else {
return 0
}
} catch {
throw error
}
} |
<filename>Algorithms/Java/AddTwoNumbers.java
/**
* Created by huqiu on 17-3-6.
* Not simplest and elegant, but faster than Editorial Solution.
*/
class ListNode {
int val;
ListNode next;
ListNode(int x) { val = x; }
}
public class AddTwoNumbers {
public ListNode addTwoNumbers(ListNode l1, ListNode l2) {
ListNode ans = new ListNode(-1);
ListNode p = ans;
int carry = 0;
while(l1 != null && l2 != null) {
int dig = l1.val + l2.val + carry;
if (dig >= 10) {
carry = 1;
dig = dig % 10;
}
else carry = 0;
ListNode fresh = new ListNode(dig);
fresh.next = null;
p.next = fresh;
p = fresh;
l1 = l1.next;
l2 = l2.next;
}
while (l1 != null) {
int dig = l1.val + carry;
if (dig >= 10) {
carry = 1;
dig = dig % 10;
}
else carry = 0;
ListNode fresh = new ListNode(dig);
fresh.next = null;
p.next = fresh;
p = fresh;
l1 = l1.next;
}
while (l2 != null) {
int dig = l2.val + carry;
if (dig >= 10) {
carry = 1;
dig = dig % 10;
}
else carry = 0;
ListNode fresh = new ListNode(dig);
fresh.next = null;
p.next = fresh;
p = fresh;
l2 = l2.next;
}
if (carry > 0) {
ListNode fresh = new ListNode(1);
fresh.next = null;
p.next = fresh;
}
ans = ans.next;
// while(ans.next != null) {
// System.out.print(ans.val + " ");
// ans = ans.next;
// }
// System.out.print(ans.val + " ");
return ans;
}
public static void main(String[] args) {
ListNode b = new ListNode(9);
b.next = new ListNode(9);
ListNode p = b.next;
for (int i=0;i<40;i++) {
p.next = new ListNode(9);
p = p.next;
}
ListNode a = new ListNode(1);
// b.next = new ListNode(6);
// b.next.next = new ListNode(4);
// b.next.next.next = new ListNode(9);
AddTwoNumbers atn = new AddTwoNumbers();
atn.addTwoNumbers(a, b);
}
}
|
package com.github.Mrak2017.carpoketbook.monobackend;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class MonobackendApplicationTests {
@Test
void contextLoads() {
}
}
|
/**
* K1 AdHoc 3
* Task 19: ein Array mit 20 Stellen mit random Zahlen von 0-9 befüllen, iterativ den Index der ersten vorkommenden
* Ziffer bestimmen und in eimem Array speichern, wenn nicht vorhanden -1
*/
public class Task26 {
public static void main(String[] args) {
}
}
|
<filename>main.js
/*
*
* Parse Action Descriptor Code
* Author: <NAME> (https://github.com/JavierAroche)
*
*/
const parseBtn = document.getElementById('parse');
const clearBtn = document.getElementById('clear');
const input = document.getElementById('input');
const inputCount = document.getElementById('inputCount');
const output = document.getElementById('output');
const outputTextArea = document.getElementById('outputTextArea');
const outputCount = document.getElementById('outputCount');
const dropDown = document.getElementById('dropDown');
const cleanVariables = document.getElementById('cleanVariables');
const cleanParams = document.getElementById('cleanParams');
const copyBtn = document.getElementById('copy');
input.focus();
/*
* @public
* Parse button click event
*/
parseBtn.addEventListener('click', () => {
let data = input.value;
let parsedData = data;
if(cleanVariables.checked) {
parsedData = parser.cleanVariables(parsedData);
}
switch(dropDown.value) {
case 'cleanJSX':
parsedData = parser.cleanJSX(parsedData);
break;
case 'sortIDs':
parsedData = parser.sortIDs(parsedData);
break;
case 'createFunction':
let functionName = document.getElementById('functionName').value;
parsedData = parser.cleanJSX(parsedData);
parsedData = parser.createFunction(parsedData, functionName);
break;
}
parser.clipboard = parsedData;
outputTextArea.value = parsedData;
output.innerHTML = Prism.highlight(parsedData, Prism.languages.javascript);
parser.getLineCounts();
});
/*
* @public
* Get line count on change
*/
input.addEventListener('input', () => {
parser.getLineCounts();
})
/*
* @public
* Clear button click event
*/
clearBtn.addEventListener('click', () => {
input.value = '';
parser.getLineCounts();
});
/*
* @public
* Copy to clipboard button click event
*/
copyBtn.addEventListener('click', () => {
outputTextArea.select();
document.execCommand("Copy");
});
class Parser {
/*
* Constructor
*/
constructor() {
this.clipboard = '';
}
/*
* Remove unnecessary charID and stringID variables for a shorter code
*/
cleanJSX(data) {
let lines = data.split('\n');
let variables = {};
let actionLines = [];
// Identify lines with charIDs or stringIDs
lines.forEach(line => {
if(line.match('var') && (line.match('charIDToTypeID') || line.match('stringIDToTypeID'))) {
let variableName = line.replace(/[\s+]*var /, '').replace(/ =.+/, '');
let id = line.replace(/[\s+]*.+= /, '').replace(';', '');
variables[variableName] = id;
} else if(line !== '') {
let cleanLine;
if(line[0] === ' ' || line[0] === ' ') {
cleanLine = line.replace(/\s+/, '');
} else {
cleanLine = line;
}
actionLines.push(cleanLine);
}
});
// Cleanup lines
let parsedLines = [];
actionLines.forEach(actionLine => {
let idNames = actionLine.match(/id\w+/g);
let parsedLine = actionLine;
if(idNames && idNames.length > 0) {
idNames.forEach(function(idName) {
parsedLine = parsedLine.replace(idName, variables[idName]);
});
}
parsedLines.push(parsedLine);
});
return parsedLines.join('\n');
}
/*
* Sort IDs by placing them at the top for easier readibility
*/
sortIDs(data) {
let lines = data.split('\n');
let variables = {};
let actionLines = [];
lines.forEach(line => {
if(line.match('var') && (line.match('charIDToTypeID') || line.match('stringIDToTypeID'))) {
let variableName = line.replace(/[\s+]*var /, '').replace(/ =.+/, '');
let id = line.replace(/[\s+]*.+= /, '').replace(';', '');
variables[variableName] = id;
} else {
let cleanLine = line;
if(cleanLine[0] === ' ') {
cleanLine = cleanLine.replace(/\s+/, '');
}
actionLines.push(cleanLine);
}
});
let varIDs = '';
for(let i in variables) {
varIDs = varIDs + 'var ' + i + ' = ' + variables[i] + ';' + '\n';
};
return varIDs + '\n' + actionLines.join('\n');
}
/*
* Create a function based on the action descriptor code
*/
createFunction(data, functionName) {
let lines = data.split('\n');
let variables = [];
let lists = {};
let parsedLines = [];
let lineSplit, lineValue, lineProperty, parsedLine, listNumber;
lines.forEach(line => {
if(line.match(/list|ref/) && line.match(/putIndex|putInteger|putIdentifier/)) {
// Get list/ref number and capitalize first letter
listNumber = line.match(/\w+/)[0];
// Get value
lineValue = Number(line.match(/ \d+ /)[0]);
if(!lists.hasOwnProperty(listNumber)) {
lists[listNumber] = [];
}
lists[listNumber].push(lineValue);
// Replace found value with param value
parsedLine = line.replace(/ \d+ /, ` params.${listNumber}[${lists[listNumber].length - 1}] `);
parsedLines.push(parsedLine);
} else if(line.match(/putBoolean|putUnitDouble|putDouble|putInteger|putIdentifier|putIndex|putString|putName|putPath/)) {
lineSplit = line.split(', ');
// Get value and property name
lineValue = lineSplit[lineSplit.length - 1].replace(/\);/, '');
lineProperty = lineSplit[0].replace(/.+(stringIDToTypeID|charIDToTypeID)/, '').match(/\w+/)[0];
if(cleanParams.checked) {
lineProperty = this.replaceConstant(lineProperty);
}
// Add to array which will become the params object
variables.push(`${lineProperty}: ${lineValue.replace(/"""/g,'"')}`);
// Replace found value with param value
parsedLine = line.replace(lineValue, `params.${lineProperty}`);
parsedLines.push(parsedLine);
} else {
parsedLines.push(line);
}
});
// Add lists as variables
for(let key in lists) {
variables.push(`${key}: [${lists[key].join(',')}]`);
}
// Create function string
let functionString =
`function ${functionName}(params) {
${parsedLines.join('\n ')}
}
var params = {
${variables.join(',\n ')}
};
${functionName}(params);`;
return functionString;
}
/*
* Replace constant from list
*/
replaceConstant(variable) {
if(constants.hasOwnProperty(variable)) {
return constants[variable];
} else {
return variable.charAt(0).toLowerCase() + variable.slice(1);
}
}
/*
* Clean variables to start count from 1
*/
cleanVariables(data) {
let lines = data.split('\n');
let variables = {};
let actionLines = [];
let descCount = 1;
let refCount = 1;
let listCount = 1;
// Identify lines with variables
lines.forEach(line => {
if(line.match('var') && (line.match('ActionDescriptor') || line.match('ActionReference') || line.match('ActionList'))) {
actionLines.push(line);
let variableName = line.replace(/[\s+]*var /, '').replace(/ =.+/, '');
let newVariableName;
if(line.match('ActionDescriptor')) {
newVariableName = 'desc' + descCount;
descCount = descCount + 1;
} else if(line.match('ActionReference')) {
newVariableName = 'ref' + refCount;
refCount = refCount + 1;
} else if(line.match('ActionList')) {
newVariableName = 'list' + listCount;
listCount = listCount + 1;
}
variables[variableName] = newVariableName;
} else {
let cleanLine = line;
if(cleanLine[0] === ' ') {
cleanLine = cleanLine.replace(/\s+/, '');
}
actionLines.push(cleanLine);
}
});
// Cleanup variable names
let parsedLines = [];
actionLines.forEach(actionLine => {
let parsedLine = actionLine;
let descVars = actionLine.match(/desc\w+/g);
if(descVars && descVars.length > 0) {
descVars.forEach(function(descVar) {
parsedLine = parsedLine.replace(descVar, variables[descVar]);
});
}
let refVars = actionLine.match(/ref\w+/g);
if(refVars && refVars.length > 0) {
refVars.forEach(function(refVar) {
parsedLine = parsedLine.replace(refVar, variables[refVar]);
});
}
let listVars = actionLine.match(/list\w+/g);
if(listVars && listVars.length > 0) {
listVars.forEach(function(listVar) {
parsedLine = parsedLine.replace(listVar, variables[listVar]);
});
}
parsedLines.push(parsedLine);
});
return parsedLines.join('\n');
}
/*
* Get line counts
*/
getLineCounts() {
let inputData = input.value;
let inputLines = [];
if(inputData !== '') {
inputLines = inputData.split('\n');
}
let outputData = output.innerHTML;
let outputLines = [];
if(outputData !== '') {
outputLines = outputData.split('\n');
}
inputCount.innerHTML = inputLines.length;
outputCount.innerHTML = outputLines.length;
}
}
let parser = new Parser();
|
<reponame>Eugene-Fedorenko/prebid-server
package orbidder
import (
"encoding/json"
"testing"
"github.com/eugene-fedorenko/prebid-server/adapters/adapterstest"
"github.com/eugene-fedorenko/prebid-server/config"
"github.com/eugene-fedorenko/prebid-server/openrtb_ext"
"github.com/stretchr/testify/assert"
)
func TestUnmarshalOrbidderExtImp(t *testing.T) {
ext := json.RawMessage(`{"accountId":"orbidder-test", "placementId":"center-banner", "bidfloor": 0.1}`)
impExt := new(openrtb_ext.ExtImpOrbidder)
assert.NoError(t, json.Unmarshal(ext, impExt))
assert.Equal(t, &openrtb_ext.ExtImpOrbidder{
AccountId: "orbidder-test",
PlacementId: "center-banner",
BidFloor: 0.1,
}, impExt)
}
func TestJsonSamples(t *testing.T) {
bidder, buildErr := Builder(openrtb_ext.BidderOrbidder, config.Adapter{
Endpoint: "https://orbidder-test"})
if buildErr != nil {
t.Fatalf("Builder returned unexpected error %v", buildErr)
}
adapterstest.RunJSONBidderTest(t, "orbiddertest", bidder)
}
|
<reponame>uk-gov-mirror/companieshouse.ch-account-ui<gh_stars>0
import PropTypes from 'prop-types'
import React from 'react'
import Footer from '../components/general-ui/Footer'
import CookieBanners from '../components/general-ui/interaction/CookieBanners'
import { CookiesProvider } from 'react-cookie'
function MyApp ({ Component, pageProps }) {
React.useEffect(() => {
import('govuk-frontend').then(({ initAll }) => {
document.body.className = document.body.className ? document.body.className.replace('js-disabled', '') : ''
document.body.className = document.body.className ? document.body.className + ' js-enabled' : 'js-enabled'
initAll()
})
}, [])
return (<CookiesProvider>
<CookieBanners />
<Component {...pageProps} />
<Footer />
</CookiesProvider>)
}
export default MyApp
MyApp.propTypes = {
Component: PropTypes.any,
pageProps: PropTypes.any
}
|
<filename>serviceProvider/src/main/java/com/dam/provider/ConfigProperties.java
package com.dam.provider;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import com.dam.exception.CsServiceException;
import com.dam.provider.rest.consumer.Client;
import com.fasterxml.jackson.databind.JsonNode;
@ConfigurationProperties(prefix = "service")
public class ConfigProperties {
@Autowired
Client client;
@Value("${server.port}")
String serverPort;
@Value("${service.user.name}")
private String userName;
@Value("${service.user.password}")
private String password;
@Value("${service.configuration.port}")
private String configServicePort;
@Value("${service.configuration.protocol}")
private String configServiceProtocol;
@Value("${service.configuration.host}")
private String configServiceHost;
@Value("${service.authentication.port}")
private String authenticationServicePort;
@Value("${service.authentication.protocol}")
private String authenticationServiceProtocol;
@Value("${service.authentication.host}")
private String authenticationServiceHost;
@Value("${token.cache.maxage}")
private Long tokenCacheMaxAge;
@Value("${token.cache.active}")
private Boolean tokenCacheActive;
private Map<String, Domain> domainList = new HashMap<>();
private JsonHelper jsonHelper = new JsonHelper();
private static boolean initialized = false;
public ConfigProperties() {
}
public void init() throws CsServiceException {
if (initialized) {
return;
}
JsonNode loginData = login();
readDomainConfigListFromDb(loginData);
initialized = true;
}
private JsonNode login() throws CsServiceException {
try {
JsonNode loginBody = jsonHelper.createEmptyNode();
loginBody = jsonHelper.addToJsonNode(loginBody, "userName", this.userName);
loginBody = jsonHelper.addToJsonNode(loginBody, "password", <PASSWORD>.password);
ResponseEntity<JsonNode> responseEntity = client.postLogin(loginBody);
return responseEntity.getBody();
} catch (CsServiceException cse) {
System.out.println(
"Login fehl geschlagen. Service Provider konnte nicht authentifiziert werden. Der Service wird beendet.");
System.exit(500);
return null;
}
}
private void readDomainConfigListFromDb(JsonNode loginData) throws CsServiceException {
Long userId = jsonHelper.extractLongFromNode(loginData, "userId");
String tokenId = jsonHelper.extractStringFromJsonNode(loginData, "tokenId");
String key = "domain";
Map<String, String> requestParams = new HashMap<>();
Map<String, String> headers = new HashMap<>();
requestParams.put("userId", userId.toString());
requestParams.put("key", key);
headers.put("requestoruserid", userId.toString());
headers.put("tokenid", tokenId);
headers.put("rights", "RWD+RWD");
String action = "getConfiguration";
String URI = configServiceProtocol + "://" + configServiceHost + ":" + configServicePort + "/" + action;
ResponseEntity<JsonNode> responseEntity = client.sendMessageWithBodyAsOptional(URI, null, requestParams,
headers, HttpMethod.GET);
JsonNode responseNode = responseEntity.getBody();
Boolean isList = jsonHelper.extractBooleanFromNode(responseNode, "isList");
if (isList) {
JsonNode domainListNode = jsonHelper.extractNodeFromNode(responseNode, "configurations");
List<Object> domainNodes = jsonHelper.toArray(domainListNode, JsonNode.class);
for (Object obj : domainNodes) {
try {
JsonNode objNode = JsonNode.class.cast(obj);
String domainNodeAsString = jsonHelper.extractNodeFromNode(objNode, "value").toString();
domainNodeAsString = prepareJsonString(domainNodeAsString);
JsonNode domainNode = jsonHelper.getObjectMapper().readTree(domainNodeAsString);
Domain domain = jsonHelper.getObjectMapper().treeToValue(domainNode, Domain.class);
domainList.put(domain.getDomainName(), domain);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} else {
JsonNode configurationNode = jsonHelper.extractNodeFromNode(responseNode, "configuration");
try {
String domainNodeAsString = jsonHelper.extractNodeFromNode(configurationNode, "value").toString();
domainNodeAsString = prepareJsonString(domainNodeAsString);
JsonNode domainNode = jsonHelper.getObjectMapper().readTree(domainNodeAsString);
Domain domain = jsonHelper.getObjectMapper().treeToValue(domainNode, Domain.class);
domainList.put(domain.getDomainName(), domain);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
private String prepareJsonString(String jsonString) {
jsonString = jsonString.trim();
if (jsonString.contains("\\")) {
jsonString = jsonString.replace("\\", "");
}
if (jsonString.contains("\"{")) {
jsonString = jsonString.replace("\"{", "{");
}
if (jsonString.contains("}\"")) {
jsonString = jsonString.replace("}\"", "}");
}
return jsonString;
}
public String getAuthenticationUrl() {
return this.authenticationServiceProtocol + "://" + this.authenticationServiceHost + ":"
+ this.authenticationServicePort;
}
public String getServiceUrl(String domainName) {
Domain domain = domainList.get(domainName);
if (null == domain) {
return null;
}
return domain.getUrl();
}
public String getServerPort() {
return serverPort;
}
public void setServerPort(String serverPort) {
this.serverPort = serverPort;
}
public Long getTokenCacheMaxAge() {
return tokenCacheMaxAge;
}
public Boolean getTokenCacheActive() {
return tokenCacheActive;
}
}
|
package ex;
import java.util.Scanner;
/*
* Topic: ISBN(International Standard Book Number) 是一種世界共通的書籍編碼方法,世界上任 何一本書籍之出版,皆有著唯一的一組 ISBN 碼。此碼由十個位數組成,每一位數可 以為 0~9 的任何一個數字,或者為 X ,代表此位數為 10 。其判斷方法如下,首先, 將此 ISBN 碼的十個位數分開,自左而右依次為第一位數,第二位數至第十位數,接 著進行第一次的累加,使得第二位數成為第一位數到第二位數的和,第三位數為第一 位數到第三位數的累加和,第十位數為第一位數到第十位數的累加和;進行完第一次 的累加和後,接著再依照相同之方法進行第二次的累加動作,我們稱此時最後所求得 之累加和為此 ISBN 碼之識別碼,倘若此識別碼為 11 的倍數,則此 ISBN 碼為合法 的。例如,若輸入之 ISBN 碼為 0 1 3 1 6 2 9 5 9 X ,則經由計算可得其識別碼為 165 ,乃是 11 之倍數,故此為一合法之 ISBN 碼。輸入一串 ISBN 碼,以空格隔開。
* Date: 2016/12/05
* Author: 103051089 林冠磊
*/
public class ex01_103051089 {
public static void main(String[] args) {
// TODO Auto-generated method stub
Scanner sc = new Scanner(System.in);
int[][] ISBNdata = new int[3][10];
String str = sc.nextLine();int k = 0,count= 0,temp = 0;
while(count<str.length()){
if(str.charAt(count)=='X'){
temp = 10;
}else{
temp = str.charAt(count)-'0';
}
ISBNdata[0][k]=temp;
count+=2;
k++;
}
for(int i = 1;i<ISBNdata.length;i++){
for(int j = 0 ;j<ISBNdata[0].length-1;j++){
ISBNdata[i][j+1] = ISBNdata[i][j]+ISBNdata[i-1][j+1];
}
}
for(int i = 0;i<ISBNdata.length;i++){
for(int j = 0 ;j<ISBNdata[0].length;j++){
System.out.print(ISBNdata[i][j]+"\t");
}
System.out.println();
}
if(ISBNdata[2][9]%11==0){
System.out.println("Pass");
}
}
}
|
package main
import (
"os"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/google/zoekt"
"github.com/google/zoekt/build"
)
func TestMergeMeta(t *testing.T) {
dir := t.TempDir()
repoNames := []string{"repo0", "repo1", "repo2", "repo3"}
var repoFns []string
for _, name := range repoNames {
opts := build.Options{
IndexDir: dir,
RepositoryDescription: zoekt.Repository{
Name: name,
RawConfig: map[string]string{
"public": "1",
},
},
}
opts.SetDefaults()
b, err := build.NewBuilder(opts)
if err != nil {
t.Fatalf("NewBuilder: %v", err)
}
if err := b.AddFile("F", []byte(strings.Repeat("abc", 100))); err != nil {
t.Fatalf("AddFile: %v", err)
}
if err := b.Finish(); err != nil {
t.Errorf("Finish: %v", err)
}
repoFns = append(repoFns, opts.FindAllShards()...)
}
// update meta on repo3 then test it changed
opts := &build.Options{
IndexDir: dir,
RepositoryDescription: zoekt.Repository{
Name: "repo3",
RawConfig: map[string]string{
"public": "0",
},
},
}
opts.SetDefaults()
if err := mergeMeta(opts); err != nil {
t.Fatal(err)
}
repos, _, _ := zoekt.ReadMetadataPath(repoFns[3])
if got, want := repos[0].RawConfig["public"], "0"; got != want {
t.Fatalf("failed to update metadata of repo3. Got public %q want %q", got, want)
}
// create a compound shard. Use a new indexdir to avoid the need to cleanup
// old shards.
dir = t.TempDir()
tmpFn, dstFn, err := merge(dir, repoFns)
if err != nil {
t.Fatal(err)
}
if err := os.Rename(tmpFn, dstFn); err != nil {
t.Fatal(err)
}
readPublic := func() []string {
var public []string
repos, _, _ := zoekt.ReadMetadataPath(dstFn)
for _, r := range repos {
public = append(public, r.RawConfig["public"])
}
return public
}
if d := cmp.Diff([]string{"1", "1", "1", "0"}, readPublic()); d != "" {
t.Fatalf("wanted only repo3 to be marked private:\n%s", d)
}
// Update a repo1 in compound shard to be private
opts = &build.Options{
IndexDir: dir,
RepositoryDescription: zoekt.Repository{
Name: "repo1",
RawConfig: map[string]string{
"public": "0",
},
},
}
opts.SetDefaults()
if err := mergeMeta(opts); err != nil {
t.Fatal(err)
}
if d := cmp.Diff([]string{"1", "0", "1", "0"}, readPublic()); d != "" {
t.Fatalf("wanted only repo1 to be marked private:\n%s", d)
}
}
func merge(dstDir string, names []string) (string, string, error) {
var files []zoekt.IndexFile
for _, fn := range names {
f, err := os.Open(fn)
if err != nil {
return "", "", err
}
defer f.Close()
indexFile, err := zoekt.NewIndexFile(f)
if err != nil {
return "", "", err
}
defer indexFile.Close()
files = append(files, indexFile)
}
return zoekt.Merge(dstDir, files...)
}
|
#!/bin/bash
MIN_SAMPLES=100
OPTIND=1
while getopts "o:i:p:e:" opt; do
case ${opt} in
o)
output="${OPTARG}"
;;
i)
input="$OPTARG"
;;
p)
prune_by="${OPTARG}"
;;
e)
exceptions="${OPTARG}"
;;
*)
echo "Unknown parameter"
exit 1
;;
esac
done
shift "$((OPTIND-1))"
[ -z "${input}" ] || [ ! -d "${input}" ] && { echo "Bad input: ${input}"; exit 1; }
[ -z "${output}" ] && { echo "No output"; exit 1; }
[ -z "${exceptions}" ] && { echo "No exceptions"; exit 1; }
[ -z "${prune_by}" ] || [ ! -d "${prune_by}" ] && { echo "Bad prune by dir: ${prune_by}"; exit 1; }
[ -e "${output}" ] && { echo "Output already exists!"; exit 1; }
[ -e "${exceptions}" ] && { echo "Exceptions already exists!"; exit 1; }
mkdir -p "${output}"
mkdir -p "${exceptions}"
shopt -s globstar nullglob
for f in "${input}"/**/*.smt2; do
benchmark="$(realpath --relative-to="${input}" "${f}")"
json="${prune_by}/${benchmark}.json"
samples="${prune_by}/${benchmark}.samples"
sat=$(grep -c ":status.*[^n]sat" "${input}/${benchmark}")
if [ "${sat}" -eq 0 ]; then
# its unsat or unknown, ignore
continue
fi
if [ -e "${samples}" ] && [ ! -e "${json}" ]; then
echo "${benchmark}: Found samples file but no JSON, uncaught exception or OOM killer?"
mkdir -p "$(dirname "${exceptions}/${benchmark}")"
cp -a "${input}/${benchmark}" "${exceptions}/${benchmark}"
continue
fi
if [ ! -e "${json}" ]; then
echo "${benchmark}: No JSON, yet sat -- didn't run?"
mkdir -p "$(dirname "${exceptions}/${benchmark}")"
cp -a "${input}/${benchmark}" "${exceptions}/${benchmark}"
continue
fi
nsamples=$(jq '.["valid samples"]' "${json}")
if [ -z "${nsamples}" ] || [ "${nsamples}" -lt ${MIN_SAMPLES} ]; then
echo "${benchmark}: Valid samples=<${nsamples}> < ${MIN_SAMPLES}, pruning"
continue
fi
echo "${benchmark}: Good. :)"
mkdir -p "$(dirname "${output}/${benchmark}")"
cp -a "${input}/${benchmark}" "${output}/${benchmark}"
done
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-2314-1
#
# Security announcement date: 2011-09-29 00:00:00 UTC
# Script generation date: 2017-01-01 21:06:18 UTC
#
# Operating System: Debian 6 (Squeeze)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - puppet:2.6.2-5+squeeze1
#
# Last versions recommanded by security team:
# - puppet:2.6.2-5+squeeze10
#
# CVE List:
# - CVE-2011-3848
# - CVE-2011-3870
# - CVE-2011-3869
# - CVE-2011-3871
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade puppet=2.6.2-5+squeeze10 -y
|
// Copyright 2014 Intel Corporation. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ozone/platform/ozone_wayland_window.h"
#include <vector>
#include "base/bind.h"
#include "ozone/platform/messages.h"
#include "ozone/platform/ozone_gpu_platform_support_host.h"
#include "ozone/platform/window_manager_wayland.h"
#include "ui/base/cursor/ozone/bitmap_cursor_factory_ozone.h"
#include "ui/events/ozone/events_ozone.h"
#include "ui/events/platform/platform_event_source.h"
#include "ui/gfx/screen.h"
#include "ui/platform_window/platform_window_delegate.h"
namespace ui {
OzoneWaylandWindow::OzoneWaylandWindow(PlatformWindowDelegate* delegate,
OzoneGpuPlatformSupportHost* sender,
WindowManagerWayland* window_manager,
const gfx::Rect& bounds)
: delegate_(delegate),
sender_(sender),
window_manager_(window_manager),
transparent_(false),
bounds_(bounds),
parent_(0),
state_(UNINITIALIZED),
region_(NULL),
cursor_type_(-1) {
static int opaque_handle = 0;
opaque_handle++;
handle_ = opaque_handle;
delegate_->OnAcceleratedWidgetAvailable(opaque_handle, 1.0);
}
OzoneWaylandWindow::~OzoneWaylandWindow() {
sender_->RemoveChannelObserver(this);
PlatformEventSource::GetInstance()->RemovePlatformEventDispatcher(this);
if (region_)
delete region_;
}
void OzoneWaylandWindow::InitPlatformWindow(
PlatformWindowType type, gfx::AcceleratedWidget parent_window) {
PlatformEventSource::GetInstance()->AddPlatformEventDispatcher(this);
switch (type) {
case PLATFORM_WINDOW_TYPE_POPUP:
case PLATFORM_WINDOW_TYPE_MENU: {
parent_ = parent_window;
if (!parent_ && window_manager_->GetActiveWindow())
parent_ = window_manager_->GetActiveWindow()->GetHandle();
type_ = ui::POPUP;
ValidateBounds();
window_manager_->OnRootWindowCreated(this);
break;
}
case PLATFORM_WINDOW_TYPE_TOOLTIP: {
parent_ = parent_window;
if (!parent_ && window_manager_->GetActiveWindow())
parent_ = window_manager_->GetActiveWindow()->GetHandle();
type_ = ui::TOOLTIP;
bounds_.set_origin(gfx::Point(0, 0));
break;
}
case PLATFORM_WINDOW_TYPE_BUBBLE:
case PLATFORM_WINDOW_TYPE_WINDOW:
parent_ = 0;
type_ = ui::WINDOW;
window_manager_->OnRootWindowCreated(this);
break;
case PLATFORM_WINDOW_TYPE_WINDOW_FRAMELESS:
NOTIMPLEMENTED();
break;
default:
break;
}
sender_->AddChannelObserver(this);
}
void OzoneWaylandWindow::SetTitle(const base::string16& title) {
title_ = title;
if (!sender_->IsConnected())
return;
sender_->Send(new WaylandDisplay_Title(handle_, title_));
}
void OzoneWaylandWindow::SetWindowShape(const SkPath& path) {
ResetRegion();
if (transparent_)
return;
region_ = new SkRegion();
SkRegion clip_region;
clip_region.setRect(0, 0, bounds_.width(), bounds_.height());
region_->setPath(path, clip_region);
AddRegion();
}
void OzoneWaylandWindow::SetOpacity(unsigned char opacity) {
if (opacity == 255) {
if (transparent_) {
AddRegion();
transparent_ = false;
}
} else if (!transparent_) {
ResetRegion();
transparent_ = true;
}
}
void OzoneWaylandWindow::RequestDragData(const std::string& mime_type) {
sender_->Send(new WaylandDisplay_RequestDragData(mime_type));
}
void OzoneWaylandWindow::RequestSelectionData(const std::string& mime_type) {
sender_->Send(new WaylandDisplay_RequestSelectionData(mime_type));
}
void OzoneWaylandWindow::DragWillBeAccepted(uint32_t serial,
const std::string& mime_type) {
sender_->Send(new WaylandDisplay_DragWillBeAccepted(serial, mime_type));
}
void OzoneWaylandWindow::DragWillBeRejected(uint32_t serial) {
sender_->Send(new WaylandDisplay_DragWillBeRejected(serial));
}
gfx::Rect OzoneWaylandWindow::GetBounds() {
return bounds_;
}
void OzoneWaylandWindow::SetBounds(const gfx::Rect& bounds) {
int original_x = bounds_.x();
int original_y = bounds_.y();
bounds_ = bounds;
if (type_ == ui::TOOLTIP)
ValidateBounds();
if ((original_x != bounds_.x()) || (original_y != bounds_.y())) {
sender_->Send(new WaylandDisplay_MoveWindow(handle_, parent_,
type_, bounds_));
}
delegate_->OnBoundsChanged(bounds_);
}
void OzoneWaylandWindow::Show() {
state_ = ui::SHOW;
SendWidgetState();
}
void OzoneWaylandWindow::Hide() {
state_ = ui::HIDE;
if (type_ == ui::TOOLTIP)
delegate_->OnCloseRequest();
else
SendWidgetState();
}
void OzoneWaylandWindow::Close() {
if (type_ != ui::TOOLTIP)
window_manager_->OnRootWindowClosed(this);
}
void OzoneWaylandWindow::SetCapture() {
window_manager_->GrabEvents(handle_);
}
void OzoneWaylandWindow::ReleaseCapture() {
window_manager_->UngrabEvents(handle_);
}
void OzoneWaylandWindow::ToggleFullscreen() {
gfx::Screen *screen = gfx::Screen::GetScreenByType(gfx::SCREEN_TYPE_NATIVE);
if (!screen)
NOTREACHED() << "Unable to retrieve valid gfx::Screen";
SetBounds(screen->GetPrimaryDisplay().bounds());
state_ = ui::FULLSCREEN;
SendWidgetState();
}
void OzoneWaylandWindow::Maximize() {
state_ = ui::MAXIMIZED;
SendWidgetState();
}
void OzoneWaylandWindow::Minimize() {
SetBounds(gfx::Rect());
state_ = ui::MINIMIZED;
SendWidgetState();
}
void OzoneWaylandWindow::Restore() {
window_manager_->Restore(this);
state_ = ui::RESTORE;
SendWidgetState();
}
void OzoneWaylandWindow::SetCursor(PlatformCursor cursor) {
if (window_manager_->GetPlatformCursor() == cursor)
return;
scoped_refptr<BitmapCursorOzone> bitmap =
BitmapCursorFactoryOzone::GetBitmapCursor(cursor);
bitmap_ = bitmap;
window_manager_->SetPlatformCursor(cursor);
if (!sender_->IsConnected())
return;
SetCursor();
}
void OzoneWaylandWindow::MoveCursorTo(const gfx::Point& location) {
sender_->Send(new WaylandDisplay_MoveCursor(location));
}
void OzoneWaylandWindow::ConfineCursorToBounds(const gfx::Rect& bounds) {
}
////////////////////////////////////////////////////////////////////////////////
// WindowTreeHostDelegateWayland, ui::PlatformEventDispatcher implementation:
bool OzoneWaylandWindow::CanDispatchEvent(
const ui::PlatformEvent& ne) {
return window_manager_->event_grabber() == handle_;
}
uint32_t OzoneWaylandWindow::DispatchEvent(
const ui::PlatformEvent& ne) {
DispatchEventFromNativeUiEvent(
ne, base::Bind(&PlatformWindowDelegate::DispatchEvent,
base::Unretained(delegate_)));
return POST_DISPATCH_STOP_PROPAGATION;
}
void OzoneWaylandWindow::OnChannelEstablished() {
sender_->Send(new WaylandDisplay_Create(handle_,
parent_,
bounds_.x(),
bounds_.y(),
type_));
if (state_)
sender_->Send(new WaylandDisplay_State(handle_, state_));
if (title_.length())
sender_->Send(new WaylandDisplay_Title(handle_, title_));
AddRegion();
SetCursor();
}
void OzoneWaylandWindow::OnChannelDestroyed() {
}
void OzoneWaylandWindow::SendWidgetState() {
if (!sender_->IsConnected())
return;
sender_->Send(new WaylandDisplay_State(handle_, state_));
}
void OzoneWaylandWindow::AddRegion() {
if (sender_->IsConnected() && region_ && !region_->isEmpty()) {
const SkIRect& rect = region_->getBounds();
sender_->Send(new WaylandDisplay_AddRegion(handle_,
rect.left(),
rect.top(),
rect.right(),
rect.bottom()));
}
}
void OzoneWaylandWindow::ResetRegion() {
if (region_) {
if (sender_->IsConnected() && !region_->isEmpty()) {
const SkIRect& rect = region_->getBounds();
sender_->Send(new WaylandDisplay_SubRegion(handle_,
rect.left(),
rect.top(),
rect.right(),
rect.bottom()));
}
delete region_;
region_ = NULL;
}
}
void OzoneWaylandWindow::SetCursor() {
if (bitmap_) {
sender_->Send(new WaylandDisplay_CursorSet(bitmap_->bitmaps(),
bitmap_->hotspot()));
} else {
sender_->Send(new WaylandDisplay_CursorSet(std::vector<SkBitmap>(),
gfx::Point()));
}
}
void OzoneWaylandWindow::ValidateBounds() {
DCHECK(parent_);
gfx::Rect parent_bounds = window_manager_->GetWindow(parent_)->GetBounds();
int x = bounds_.x() - parent_bounds.x();
int y = bounds_.y() - parent_bounds.y();
if (x < parent_bounds.x()) {
x = parent_bounds.x();
} else {
int width = x + bounds_.width();
if (width > parent_bounds.width())
x -= width - parent_bounds.width();
}
if (y < parent_bounds.y()) {
y = parent_bounds.y();
} else {
int height = y + bounds_.height();
if (height > parent_bounds.height())
y -= height - parent_bounds.height();
}
bounds_.set_origin(gfx::Point(x, y));
}
PlatformImeController* OzoneWaylandWindow::GetPlatformImeController() {
return nullptr;
}
} // namespace ui
|
# Editor
# Preferred editor for local and remote sessions
if [[ -n "${SSH_CONNECTION}" ]]; then
# remote
if [ command -v vim >/dev/null 2>&1 ]; then
export EDITOR='vim'
export VISUAL='vim'
else
export EDITOR='vi'
export VISUAL='vi'
fi
else
# local
if [ -n "$NVIM_LISTEN_ADDRESS" ]; then
# Prevent editors from being launched inside Neovim
export EDITOR="nvr -cc split --remote-wait +'set bufhidden=wipe'"
export VISUAL="nvr -cc split --remote-wait +'set bufhidden=wipe'"
else
export EDITOR='nvim'
export VISUAL='nvim'
fi
fi
# History
export HISTSIZE=10000
export HISTFILESIZE=$HISTSIZE
export SAVEHIST=$HISTSIZE
export HISTCONTROL='ignoredups'
export HISTIGNORE='ls:cd:cd -:pwd:exit:date:* --help'
# Locale
export LC_ALL='en_US.UTF-8'
export LANG='en_US.UTF-8'
export LANGUAGE='en_US.UTF-8'
# SSH
export SSH_KEY_PATH=~/.ssh/id_rsa.pub
# Pager settings
# Default pager
export PAGER='less'
# less options
less_opts=(
# Quit if entire file fits on first screen.
--quit-if-one-screen
# Ignore case in searches that do not contain uppercase.
--ignore-case
# Allow ANSI colour escapes, but no other escapes.
--RAW-CONTROL-CHARS
# Quiet the terminal bell. (when trying to scroll past the end of the buffer)
--quiet
# Do not complain when we are on a dumb terminal.
--dumb
)
export LESS="${less_opts[*]}"
# Path to your development directory
export C=$CODE
# Vim
export VIMCONFIG=~/.vim
export VIMDATA=~/.vim
# Neovim
export NVIMCONFIG=~/.config/nvim
export NVIMDATA=~~/.local/share/nvim
# FZF
export FZF_DEFAULT_COMMAND='ag --hidden --ignore .git -g ""'
##############################################################################
# PATH & MANPATH #
##############################################################################
# Extend $PATH without duplicates
_extend_path() {
if ! $( echo "$PATH" | tr ":" "\n" | grep -qx "$1" ); then
PATH="$PATH:$1"
fi
}
# Extend $MANPATH without duplicates
_extend_man_path() {
if ! $( echo "$MANPATH" | tr ":" "\n" | grep -qx "$1" ); then
MANPATH="$MANPATH:$1"
fi
}
# Remove existed duplicates and empty lines
_clear_path() {
echo -n "$1" | awk -v RS=: -v ORS=: 'length($0) != 0 && !arr[$0]++'
}
# Add custom bin to $PATH
[ -d /usr/local/heroku/bin ] && _extend_path /usr/local/heroku/bin
[ -d $HOME ] && _extend_path $HOME/.bin
[ -d $HOME/.cargo/bin ] && _extend_path $HOME/.cargo/bin
[ -d $HOME/.local/bin ] && _extend_path $HOME/.local/bin
[ -d $DOTFILES/bin ] && _extend_path $DOTFILES/bin
[ -d $HOME/.rvm/bin ] && _extend_path $HOME/.rvm/bin # last!
# Add custom bin to $MANPATH
[ -d /usr/local/man ] && _extend_man_path /usr/local/man
[ -d /usr/local/man ] && _extend_man_path /usr/local/man
PATH=$(_clear_path $PATH)
MANPATH=$(_clear_path $MANPATH)
|
<gh_stars>0
import React, { useEffect } from 'react';
import InProgress from './loading.gif';
import './App.css';
import HeaderWOS from './components/HeaderWOS.js';
import Footer from './components/Footer.js';
import { Modal, Button } from 'react-bootstrap';
import SFstateLogo from './assets/SFstateLogo.jpg';
import { Container } from 'react-bootstrap';
import { Link, BrowserRouter as Router, Route } from 'react-router-dom';
import { Redirect, useHistory } from 'react-router-dom';
import axios from 'axios';
const BeginChangePasswordPage = () => {
const [message, setMessage] = React.useState('');
useEffect(() => {
axios
.post(
`${window.CSC675_ENDPOINT_URL}/api/beginChangePassword`,
{},
{ withCredentials: true },
)
.then(res => {
if (res.data.error) {
setMessage(res.data.error);
} else {
setMessage(
'An email has been sent to your email address for password change!',
);
}
})
.catch(e => console.log(e));
}, []);
return (
<>
<section>
<div className="App">
<HeaderWOS />
<div style={{ marginTop: '100px', height: '500px' }}>
<h2>{message}</h2>
</div>
</div>
</section>
<Footer />
</>
);
};
export default BeginChangePasswordPage;
|
declare module '*.css';
type SvgrComponent = React.FunctionComponent<React.SVGAttributes<SVGElement>>;
declare module '*.svg' {
const value: SvgrComponent;
export default value;
}
|
package events
import (
"sync"
"github.com/google/uuid"
log "github.com/sirupsen/logrus"
)
type event struct {
topic string
message *Message
}
type bus struct {
mtx sync.RWMutex
topicMap map[string]map[uint32]Listener
busChan chan *event
closeChan chan bool
}
type Message map[string]interface{}
type Listener func(message *Message)
type Handle struct {
topic string
id uint32
}
const _queueSize = 10
// global event bus
var ebus *bus
func Start() {
// do not start twice
if ebus != nil {
return
}
// initialize bus
ebus = &bus{
topicMap: make(map[string]map[uint32]Listener),
busChan: make(chan *event, _queueSize),
closeChan: make(chan bool),
}
// start service
go service()
}
func service() {
for {
select {
// get events from channel for incoming events
case event := <-ebus.busChan:
if event.topic != "" {
var listeners map[uint32]Listener
ebus.mtx.RLock()
if foundListeners, found := ebus.topicMap[event.topic]; found {
listeners = foundListeners
}
ebus.mtx.RUnlock()
if listeners != nil {
for id, listen := range listeners {
log.Tracef("Dispatched event message{%v} on topic %s to listener %d", event.message, event.topic, id)
if listen != nil {
listen(event.message)
}
}
}
}
case <-ebus.closeChan:
ebus.closeChan <- true
return
}
}
}
func Stop() {
ebus.closeChan <- true
<-ebus.closeChan
close(ebus.closeChan)
close(ebus.busChan)
ebus = nil
}
func Send(topic string, message *Message) {
if topic == "" {
return
}
if ebus == nil {
return
}
ebus.busChan <- &event{
topic: topic,
message: message,
}
}
func Listen(topic string, listener Listener) *Handle {
// return basically a no-op handler
if ebus == nil {
return &Handle{
topic: "",
}
}
if _, found := ebus.topicMap[topic]; !found {
ebus.mtx.Lock()
if _, found := ebus.topicMap[topic]; !found {
ebus.topicMap[topic] = make(map[uint32]Listener)
}
ebus.mtx.Unlock()
}
// create new id and keep listener
id := uuid.New().ID()
ebus.mtx.Lock()
ebus.topicMap[topic][id] = listener
ebus.mtx.Unlock()
return &Handle{
topic: topic,
id: id,
}
}
func unsubscribe(handle *Handle) {
if handle == nil || ebus == nil {
return
}
if _, found := ebus.topicMap[handle.topic]; found {
ebus.mtx.Lock()
if _, found := ebus.topicMap[handle.topic]; found && ebus.topicMap[handle.topic] != nil {
delete(ebus.topicMap[handle.topic], handle.id)
}
ebus.mtx.Unlock()
}
}
func (handle *Handle) Close() {
if ebus == nil || handle.topic == "" {
return
}
unsubscribe(handle)
log.Debugf("Removed handler for topic: %s", handle.topic)
}
|
<gh_stars>1-10
package com.pharmacySystem.service.implementations;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.pharmacySystem.model.grade.GradeType;
import com.pharmacySystem.model.medicine.Ingredient;
import com.pharmacySystem.model.medicine.Medicine;
import com.pharmacySystem.model.user.Patient;
import com.pharmacySystem.repository.GradeRepository;
import com.pharmacySystem.repository.MedicineRepository;
import com.pharmacySystem.repository.PatientRepository;
import com.pharmacySystem.service.interfaces.IMedicineService;
@Service
public class MedicineService implements IMedicineService{
@Autowired
private MedicineRepository medicineRepository;
@Autowired
private GradeRepository gradeRepository;
@Autowired
private PatientRepository patientRepository;
@Override
public List<Medicine> findAll() {
List<Medicine> medicines = medicineRepository.findAll();
for(Medicine medicine : medicines) {
medicine.setAverageGrade(gradeRepository.findAverageGradeByGradedIdAndGradeType(medicine.getId(), GradeType.MEDICINE_GRADE));
}
return medicines;
}
@Override
public Medicine findById(Long id) {
Medicine medicine = medicineRepository.findById(id).orElse(null);
medicine.setAverageGrade(gradeRepository.findAverageGradeByGradedIdAndGradeType(medicine.getId(), GradeType.MEDICINE_GRADE));
return medicine;
}
@Override
public boolean patientIsAllergicToMedicine(long patientId, long medicineId) {
Patient patient = patientRepository.findById(patientId).orElse(null);
Medicine medicine = medicineRepository.findById(medicineId).orElse(null);
if (patient == null || medicine == null) {
return false;
}
for (Ingredient patientAllergy : patient.getAllergies()) {
if(medicine.getMedicineSpecification().getIngredients().contains(patientAllergy)) {
return true;
}
}
return false;
}
} |
<filename>src/querystring.js
/*
* querystring.js - v2.0.0
* Querystring utility in Javascript
* https://github.com/EldonMcGuinness/querystring.js
*
* Made by <NAME>
* Edited by notJudahRR
* Under MIT License
*
* Parameters:
* str - A string that you want the script to treat as the querystring
* instead of the actual page's querystring.
*/
(function(global, factory, NodeFactory){
if(global.document){
factory(global);
}else{
NodeFactory(global);
}
// typeof window !== "undefined" ? window : module.exports means
// if window is defined than make the global object window but if it is not than make it module.exports
})(typeof window !== "undefined" ? window : module.exports, function(global){
"use strict";
function sortItem(key, val, qso){
if (typeof(key) === "string"){
qso[key] = [qso[key], val];
}else if (typeof(qso[key]) === "object"){
qso[key].push(val);
}
}
function parseQueryItem(qso, qs){
var item = qs.split("=");
item = item.map(function(n){return decodeURIComponent(n);});
var key = item[0];
var val = (item[1] === "") ? null : item[1];
if (key in qso){
// If a key already exists then make this an object
sortItem(key, val, qso);
}else{
// If no key exists just set it as a string
qso[key] = val;
}
}
function querystring(str = document.URL){
// If str is empty than use the URL of the webpage
var qso ={};
var qs = (str || document.location.search);
// Check for an empty querystring
if (qs === ""){
return qso;
}
// Normalize the querystring
qs = qs.substring(qs.indexOf("?") +1)
.replace(/;/g, "&")
.replace(/&&+/g, "&")
.replace(/&$/,"");
// Break the querystring into parts
qs = qs.split("&");
// Build the querystring object
qs.map(parseQueryItem.bind(null, qso));
return qso;
}
global.querystring = querystring;
}, function(global){
"use strict";
function querystring(str){
var url = require("url");
var urlParsed = url.parse(str, true);
return urlParsed.query;
}
global = querystring;
});
|
<filename>media-api/app/AppLoader.scala
import com.gu.mediaservice.lib.play.GridAppLoader
class AppLoader extends GridAppLoader("media-api", new MediaApiComponents(_))
|
<reponame>GhostLee/tdx-go<filename>core/hub.go
package core
import "sync"
// todo: 链接池,无感切换
type Hub struct {
lock sync.Mutex
Clients []Client
}
func (hub *Hub) Do() error {
return nil
}
|
// Copyright 2020-2022 Buf Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package bufanalysis
import (
"crypto/sha256"
"fmt"
"io"
"sort"
"strconv"
"strings"
)
const (
// FormatText is the text format for FileAnnotations.
FormatText Format = iota + 1
// FormatJSON is the JSON format for FileAnnotations.
FormatJSON
// FormatMSVS is the MSVS format for FileAnnotations.
FormatMSVS
)
var (
// AllFormatStrings is all format strings without aliases.
//
// Sorted in the order we want to display them.
AllFormatStrings = []string{
"text",
"json",
"msvs",
}
// AllFormatStringsWithAliases is all format strings with aliases.
//
// Sorted in the order we want to display them.
AllFormatStringsWithAliases = []string{
"text",
"gcc",
"json",
"msvs",
}
stringToFormat = map[string]Format{
"text": FormatText,
// alias for text
"gcc": FormatText,
"json": FormatJSON,
"msvs": FormatMSVS,
}
formatToString = map[Format]string{
FormatText: "text",
FormatJSON: "json",
FormatMSVS: "msvs",
}
)
// Format is a FileAnnotation format.
type Format int
// String implements fmt.Stringer.
func (f Format) String() string {
s, ok := formatToString[f]
if !ok {
return strconv.Itoa(int(f))
}
return s
}
// ParseFormat parses the Format.
//
// The empty strings defaults to FormatText.
func ParseFormat(s string) (Format, error) {
s = strings.ToLower(strings.TrimSpace(s))
if s == "" {
return FormatText, nil
}
f, ok := stringToFormat[s]
if ok {
return f, nil
}
return 0, fmt.Errorf("unknown format: %q", s)
}
// FileInfo is a minimal FileInfo interface.
type FileInfo interface {
Path() string
ExternalPath() string
}
// FileAnnotation is a file annotation.
type FileAnnotation interface {
// Stringer returns the string representation of this annotation.
fmt.Stringer
// FileInfo is the FileInfo for this annotation.
//
// This may be nil.
FileInfo() FileInfo
// StartLine is the starting line.
//
// If the starting line is not known, this will be 0.
StartLine() int
// StartColumn is the starting column.
//
// If the starting column is not known, this will be 0.
StartColumn() int
// EndLine is the ending line.
//
// If the ending line is not known, this will be 0.
// If the ending line is the same as the starting line, this will be explicitly
// set to the same value as start_line.
EndLine() int
// EndColumn is the ending column.
//
// If the ending column is not known, this will be 0.
// If the ending column is the same as the starting column, this will be explicitly
// set to the same value as start_column.
EndColumn() int
// Type is the type of annotation, typically an ID representing a failure type.
Type() string
// Message is the message of the annotation.
Message() string
}
// NewFileAnnotation returns a new FileAnnotation.
func NewFileAnnotation(
fileInfo FileInfo,
startLine int,
startColumn int,
endLine int,
endColumn int,
typeString string,
message string,
) FileAnnotation {
return newFileAnnotation(
fileInfo,
startLine,
startColumn,
endLine,
endColumn,
typeString,
message,
)
}
// SortFileAnnotations sorts the FileAnnotations.
//
// The order of sorting is:
//
// ExternalPath
// StartLine
// StartColumn
// Type
// Message
// EndLine
// EndColumn
func SortFileAnnotations(fileAnnotations []FileAnnotation) {
sort.Stable(sortFileAnnotations(fileAnnotations))
}
// DeduplicateAndSortFileAnnotations deduplicates the FileAnnotations based on their
// string representation and sorts them according to the order specified in SortFileAnnotations.
func DeduplicateAndSortFileAnnotations(fileAnnotations []FileAnnotation) []FileAnnotation {
deduplicated := make([]FileAnnotation, 0, len(fileAnnotations))
seen := make(map[string]struct{}, len(fileAnnotations))
for _, fileAnnotation := range fileAnnotations {
key := hash(fileAnnotation)
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
deduplicated = append(deduplicated, fileAnnotation)
}
SortFileAnnotations(deduplicated)
return deduplicated
}
// PrintFileAnnotations prints the file annotations separated by newlines.
func PrintFileAnnotations(writer io.Writer, fileAnnotations []FileAnnotation, formatString string) error {
format, err := ParseFormat(formatString)
if err != nil {
return err
}
switch format {
case FormatText:
return printAsText(writer, fileAnnotations)
case FormatJSON:
return printAsJSON(writer, fileAnnotations)
case FormatMSVS:
return printAsMSVS(writer, fileAnnotations)
default:
return fmt.Errorf("unknown FileAnnotation Format: %v", format)
}
}
// hash returns a hash value that uniquely identifies the given FileAnnotation.
func hash(fileAnnotation FileAnnotation) string {
path := ""
if fileInfo := fileAnnotation.FileInfo(); fileInfo != nil {
path = fileInfo.ExternalPath()
}
hash := sha256.New()
_, _ = hash.Write([]byte(path))
_, _ = hash.Write([]byte(strconv.Itoa(fileAnnotation.StartLine())))
_, _ = hash.Write([]byte(strconv.Itoa(fileAnnotation.StartColumn())))
_, _ = hash.Write([]byte(strconv.Itoa(fileAnnotation.EndLine())))
_, _ = hash.Write([]byte(strconv.Itoa(fileAnnotation.EndColumn())))
_, _ = hash.Write([]byte(fileAnnotation.Type()))
_, _ = hash.Write([]byte(fileAnnotation.Message()))
return string(hash.Sum(nil))
}
type sortFileAnnotations []FileAnnotation
func (a sortFileAnnotations) Len() int { return len(a) }
func (a sortFileAnnotations) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
func (a sortFileAnnotations) Less(i int, j int) bool { return fileAnnotationCompareTo(a[i], a[j]) < 0 }
// fileAnnotationCompareTo returns a value less than 0 if a < b, a value
// greater than 0 if a > b, and 0 if a == b.
func fileAnnotationCompareTo(a FileAnnotation, b FileAnnotation) int {
if a == nil && b == nil {
return 0
}
if a == nil && b != nil {
return -1
}
if a != nil && b == nil {
return 1
}
aFileInfo := a.FileInfo()
bFileInfo := b.FileInfo()
if aFileInfo == nil && bFileInfo != nil {
return -1
}
if aFileInfo != nil && bFileInfo == nil {
return 1
}
if aFileInfo != nil && bFileInfo != nil {
if aFileInfo.ExternalPath() < bFileInfo.ExternalPath() {
return -1
}
if aFileInfo.ExternalPath() > bFileInfo.ExternalPath() {
return 1
}
}
if a.StartLine() < b.StartLine() {
return -1
}
if a.StartLine() > b.StartLine() {
return 1
}
if a.StartColumn() < b.StartColumn() {
return -1
}
if a.StartColumn() > b.StartColumn() {
return 1
}
if a.Type() < b.Type() {
return -1
}
if a.Type() > b.Type() {
return 1
}
if a.Message() < b.Message() {
return -1
}
if a.Message() > b.Message() {
return 1
}
if a.EndLine() < b.EndLine() {
return -1
}
if a.EndLine() > b.EndLine() {
return 1
}
if a.EndColumn() < b.EndColumn() {
return -1
}
if a.EndColumn() > b.EndColumn() {
return 1
}
return 0
}
|
package com.netshell.test.java.dependency;
import com.netshell.test.java.Test;
import java.util.*;
public class DependencyResolutionTest implements Test {
private List<DependencyTestNode> list = createTestSource();
@Override
public void execute() throws Exception {
printList(list, "****************************************Init List********************************************");
/*PriorityQueue<DependencyTestNode> integers = new PriorityQueue<>(getComparator());
integers.addAll(list);
DependencyTestNode i;
while ((i = integers.poll()) != null) {
System.out.println(i);
}*/
printList(parseDependencies(list), "****************************************final List********************************************");
}
private void printList(List<DependencyTestNode> list, String initMessage) {
System.out.println(initMessage);
for (DependencyTestNode dependencyTestNode : list) {
System.out.println(dependencyTestNode);
}
System.out.println("**********************************************************************************************\n");
}
private List<DependencyTestNode> parseDependencies(List<DependencyTestNode> relatedInstances) {
// TODO: Need to confirm whether to parse nested relatedInstances
List<DependencyTestNode> instancesList = new ArrayList<>();
final String s = "****************************************%d itr List********************************************";
int i=0;
for (DependencyTestNode relatedInstance : relatedInstances) {
printList(instancesList, String.format(s, i));
if (instancesList.size() == 0) {
instancesList.add(relatedInstance);
} else {
addInstance(instancesList, relatedInstance);
}
i++;
}
return instancesList;
}
private void addInstance(List<DependencyTestNode> instancesList, DependencyTestNode relatedInstance) {
int i;
for (i = 0; i < instancesList.size(); i++) {
final DependencyTestNode node = instancesList.get(i);
if (node.dependencies.contains(relatedInstance.dependencyId)) {
break;
}
}
instancesList.add(i, relatedInstance);
shift(instancesList, relatedInstance, i + 1);
/*for (Integer dependency : relatedInstance.dependencies) {
for (int k = i; k < instancesList.size(); k++) {
final DependencyTestNode node = instancesList.get(k);
if (node.dependencyId == dependency && i < k) i = k;
}
}*/
/*for (int k = 0; k < instancesList.size(); k++) {
final DependencyTestNode node = instancesList.get(k);
if (node.dependencies.contains(relatedInstance.dependencyId) && i < k) {
throw new RuntimeException("Cyclic Reference " + relatedInstance + " and " + node);
}
}*/
// instancesList.add(i, relatedInstance);
}
private void shift(List<DependencyTestNode> instancesList, DependencyTestNode relatedInstance, int k) {
for (int i = k; i < instancesList.size(); i++) {
final DependencyTestNode node = instancesList.get(i);
if (relatedInstance.dependencies.contains(node.dependencyId)) {
instancesList.remove(i);
int index = k - 2;
/*if (index == -1) {
index = 0;
}*/
instancesList.add(index, node);
}
}
}
private Comparator<DependencyTestNode> getComparator() {
return (o1, o2) -> o2.dependencies.contains(o1.dependencyId) ? -1 : 0;
}
private List<DependencyTestNode> createTestSource() {
final List<DependencyTestNode> list = new ArrayList<>();
/*for (int i = 0; i < 10; i++) {
final DependencyTestNode e = new DependencyTestNode();
e.dependencyId = i;
for (int j = 0; j < Math.random() * 10; j++) {
final int dep = (int) (Math.random() * 10);
if (dep == i) {
j--;
continue;
}
e.dependencies.add(dep);
}
list.add(e);
}
list.get(list.size() - 1).dependencies.clear();*/
DependencyTestNode node0 = new DependencyTestNode();
node0.dependencyId = 0;
node0.dependencies = new HashSet<>(Arrays.asList(1, 3));
list.add(node0);
DependencyTestNode node2 = new DependencyTestNode();
node2.dependencyId = 2;
node2.dependencies = new HashSet<>(Arrays.asList(3, 6));
list.add(node2);
DependencyTestNode node3 = new DependencyTestNode();
node3.dependencyId = 3;
node3.dependencies = new HashSet<>(Collections.singletonList(1));
list.add(node3);
DependencyTestNode node4 = new DependencyTestNode();
node4.dependencyId = 4;
node4.dependencies = new HashSet<>(Arrays.asList(2, 8));
list.add(node4);
DependencyTestNode node5 = new DependencyTestNode();
node5.dependencyId = 5;
node5.dependencies = new HashSet<>(Collections.singletonList(7));
list.add(node5);
DependencyTestNode node6 = new DependencyTestNode();
node6.dependencyId = 6;
node6.dependencies = new HashSet<>(Arrays.asList(5, 7));
list.add(node6);
DependencyTestNode node7 = new DependencyTestNode();
node7.dependencyId = 7;
node7.dependencies = new HashSet<>(Arrays.asList(1, 3, 4, 5));
list.add(node7);
DependencyTestNode node1 = new DependencyTestNode();
node1.dependencyId = 1;
node1.dependencies = new HashSet<>();
list.add(node1);
return list;
}
private static final class DependencyTestNode {
int dependencyId;
Set<Integer> dependencies = new HashSet<>();
@Override
public String toString() {
return "DependencyTestNode{" +
"dependencyId=" + dependencyId +
", dependencies=" + dependencies +
'}';
}
}
}
|
<filename>public/components/dropdowns/city-dropdown.js
'use strict';
const e = React.createElement;
class CityDropdown extends React.Component {
constructor(props) {
super(props);
this.state = {
isLoading: true,
isOpen: false,
searchText: '',
selectedCities: [],
searchResults: [],
cities: []
};
}
componentWillReceiveProps(nextProps) {
if (nextProps.stateName == this.props.stateName) { // nothing to change
return;
}
axios.get('/api/city?state=' + nextProps.stateName)
.then((response) => {
this.setState({
cities: response.data,
searchResults: response.data,
selectedCities: [],
isLoading: false
});
})
.catch((error) => {
// handle error
console.log(error);
})
.finally(() => {
this.setState({
isLoading: false
});
});
}
/**
Initialize component hook
*/
componentDidMount() {
axios.get('/api/city?state=' + this.props.stateName)
.then((response) => {
this.setState({
cities: response.data,
searchResults: response.data,
isLoading: false
});
})
.catch((error) => {
// handle error
console.log(error);
})
.finally(() => {
this.setState({
isLoading: false
});
});
}
/**
Function that returns the label of the dropdown
*/
getDropdownSelectionLabel() {
if (this.state.isLoading) { // wait for API to complete
return 'Loading...'
}
else {
return this.state.selectedCities.join(',') || 'Select Cities';
}
}
/**
Function to toggle the dropdown
*/
toggleDropdown() {
this.setState({
isOpen: !this.state.isOpen,
searchText: ''
});
}
/**
Function to search
*/
searchList(event) {
let updatedList = this.state.cities.filter(function(item) {
var city = item.toLowerCase(),
filter = event.target.value.toLowerCase();
return city.includes(filter);
});
console.log("searchResults: ", updatedList);
this.setState({
searchText: event.target.value,
searchResults: updatedList
});
}
/**
Function to add or remove state from the selected states array
*/
addOrRemoveCity(event) {
var cityValue = event.target.textContent,
indexOfCity = this.state.selectedCities.indexOf(cityValue);
this.state.searchText = '';
if (indexOfCity > -1) {
this.setState({
selectedCities: this.state.selectedCities.splice(indexOfCity, 1)
});
}
else {
// for single select, the selectedCuisines can have only one entry
if (!this.props.isMultiSelect && this.state.selectedCities.length) {
this.state.selectedCities.pop();
this.state.selectedCities.push(cityValue);
this.setState({
selectedCities: this.state.selectedCities
});
}
else { // IF MULTI SELECT, array length can be more that 1
this.state.selectedCities.push(cityValue);
this.setState({
selectedCities: this.state.selectedCities
});
}
}
this.props.onCitySelect(this.state.selectedCities);
}
/**
React's function to render the DOM
*/
render() {
let cityList, selectedList;
cityList = this.state.searchResults.map((city) => {
if (this.state.selectedCities.indexOf(city) < 0) {
return (
<div key={city} onClick={this.addOrRemoveCity.bind(this)} className="dropdown-item">
{city}
</div>
);
}
else {
return null;
}
});
selectedList = this.state.selectedCities.map((city) => {
return (
<div key={city} onClick={this.addOrRemoveCity.bind(this)} className="active dropdown-item">
{city}
</div>
);
});
// HTML
return (
<div className="city-dropdown react-dropdown position-relative dropdown" className={this.state.isOpen ? 'active dropdown' : 'dropdown'}>
<button className="btn btn-secondary dropdown-toggle w-100" type="button" id="cityDropdownMenuButton" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
{this.getDropdownSelectionLabel()}
</button>
<div className="dropdown-menu position-absolute w-100" aria-labelledby="cityDropdownMenuButton">
<div className="p-2">
<input type="text" placeholder="Search Cities" className="form-control" onChange={this.searchList.bind(this)}/>
</div>
<h6 className="p-2">Selected Cities</h6>
{selectedList}
<div class="dropdown-divider"></div>
{cityList}
</div>
</div>
);
}
} |
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome Welcome Welcome.
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
import React from "react";
import { Line } from "react-chartjs-2";
import axios from "axios";
const LightChart = () => {
const [lightData, setLightData] = useState({});
const fetchData = async () => {
try {
const response = await axios.get("http://api.example.com/light-data");
setLightData({
labels: response.data.map(dataPoint => dataPoint.time),
datasets: [
{
label: "Light Data",
data: response.data.map(dataPoint => dataPoint.light),
fill: false,
backgroundColor: "rgba(75,192,192,0.4)",
borderColor: "rgba(75,192,192,1)",
borderWidth: 1
}
]
});
} catch (err) {
console.log(err);
}
};
useEffect(() => {
fetchData();
}, []);
return (
<div>
<h2>Light Chart</h2>
<Line data={lightData} width={100} height={50} />
</div>
);
};
export default LightChart; |
#!/bin/bash
dieharder -d 15 -g 24 -S 3649729312
|
<filename>src/main/java/org/xtwy/oldthriftrpc/FrameworkServiceImpl.java<gh_stars>0
package org.xtwy.oldthriftrpc;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TMemoryBuffer;
import org.xtwy.media.Media;
import com.hzins.thrift.demo.FrameworkService;
import com.hzins.thrift.demo.ThriftRequest;
import com.hzins.thrift.demo.ThriftResponse;
public class FrameworkServiceImpl implements FrameworkService.Iface{
@Override
public ThriftResponse execute(ThriftRequest request) throws TException {
Object response = Media.execute(request);
try {
byte[] b = buildThriftToBinary(response);
ThriftResponse res = new ThriftResponse();
res.setBody(b);
return res;
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
public byte[] buildThriftToBinary(Object msg) throws Exception{
Method method = msg.getClass().getMethod("write", TProtocol.class);
TMemoryBuffer buffer = new TMemoryBuffer(1024);
TProtocol prot = new TBinaryProtocol(buffer);
method.invoke(msg, prot);
byte[] b = buffer.getArray();
return b;
}
}
|
import org.apache.spark.ml.classification.{LogisticRegression, LogisticRegressionModel}
import org.apache.spark.ml.feature.{HashingTF, Tokenizer}
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.ml.Pipeline
// Load training data
val training = spark.read.format("libsvm").load("data/sentiment.train.libsvm")
// Configure an ML pipeline
val tokenizer = new Tokenizer().setInputCol("text").setOutputCol("words")
val hashingTF = new HashingTF().setNumFeatures(1000).setInputCol(tokenizer.getOutputCol).setOutputCol("features")
val lr = new LogisticRegression().setMaxIter(10).setRegParam(0.01)
val pipeline = new Pipeline().setStages(Array(tokenizer, hashingTF, lr))
// Fit the pipeline to training data
val model = pipeline.fit(training)
// Transform the model with a sentence
val sentence = "I love this movie!"
val sentenceDataFrame = spark.createDataFrame(Seq(
(0, sentence)
)).toDF("label", "text")
val prediction = model.transform(sentenceDataFrame)
val result = prediction.select("prediction").head.get(0).asInstanceOf[Vector].toArray
println(s"Prediction is ${result.indexOf(result.max)}")
// Output is 0 (positive sentiment) |
import java.util.ArrayList;
public class Filme {
int idFilme;
String nome;
String dataDeLancamento;
double previsaoDeInvestimento;
ArrayList<HeroisDoFilme> heroisDoFilme;
ArrayList<Heroi> herois;
public Filme(int idFilme, String nome, String dataDeLancamento, double previsaoDeInvestimento) {
this.idFilme = idFilme;
this.nome = nome;
this.dataDeLancamento = dataDeLancamento;
this.previsaoDeInvestimento = previsaoDeInvestimento;
this.heroisDoFilme = new ArrayList<>();
this.herois = new ArrayList<>();
}
public void quantidadeDeHeroisNoFilme(HeroisDoFilme herois) {
double valorInvestido = 0;
double saldo = 0;
if (valorInvestido > saldo) {
System.out.println("Lucro");
} else {
System.out.println("Perda");
}
}
public void totalInvestidoComOsHerois() {
}
public void setIdFilme(int idFilme) {
this.idFilme = idFilme;
}
public void setNome(String nome) {
this.nome = nome;
}
public void setDataDeLancamento(String dataDeLancamento) {
this.dataDeLancamento = dataDeLancamento;
}
public void setPrevisaoDeInvestimento(double previsaoDeInvestimento) {
this.previsaoDeInvestimento = previsaoDeInvestimento;
}
public void setHeroi(ArrayList<Heroi> herois) {
this.herois = herois;
}
public ArrayList<HeroisDoFilme>getFilmes() {
return this.heroisDoFilme;
}
public int getIdFilme() {
return this.idFilme;
}
public String getNome() {
return this.nome;
}
public String getDataDeLancamento() {
return this.dataDeLancamento;
}
public double getPrevisaoDeInvestimento() {
return this.previsaoDeInvestimento;
}
public ArrayList<HeroisDoFilme> getHeroi() {
return this.heroisDoFilme;
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof Filme)) {
return false;
}
Filme filme = (Filme) o;
return idFilme == filme.idFilme;
}
@Override
public String toString() {
String print = " ==== Filme ==== " +
"\nId: " + getIdFilme() +
"\nNome: " + getNome() +
"\nData de Lançamento: " + getDataDeLancamento() +
"\nInvestimento: " + getPrevisaoDeInvestimento();
return print;
}
}
|
#!/bin/bash
set -xeou pipefail
DOCKER_REGISTRY=${DOCKER_REGISTRY:-kubedb}
IMG=redis
DB_VERSION=5.0.3
TAG="$DB_VERSION"
docker pull $IMG:$DB_VERSION-alpine
docker tag $IMG:$DB_VERSION-alpine "$DOCKER_REGISTRY/$IMG:$TAG"
docker push "$DOCKER_REGISTRY/$IMG:$TAG"
|
#!/bin/bash
# sd_flasher.sh
#
# July 30, 2013
# Ryan E. Guerra
#
# Linux-only!
# Automatically flash SD cards for WARPv3. Detects any .bin files in
# the local directory called "warp_configs" and lets you choose one.
# Handles all the magic memory location numbers for a single-program
# SD card.
#
# Not complete: doesn't handle multiple SD program locations. See
# http://warp.rice.edu/trac/wiki/howto/SD_Config for more details.
#
# This is also greedy: it assumes the SD card appears as /dev/sdb
# which is NOT always true!! Especially on multiple-HD systems,
# this could totally overwrite one of your drives!!
#
TARGET="/dev/sdb"
CUR_DIR="`dirname \"$0\"`"
echo "sd_flasher.sh utility v1.0"
echo "This will totally nuke everything on $TARGET. Are you sure? [Y/N]"
read RESPONSE
if [[ "$RESPONSE" == "y" || "$RESPONSE" == "Y" ]]; then
FILES=`ls $CUR_DIR/warp_configs/*.bin`
COUNTER=-1
echo "Choose a flash file..."
for F in $FILES
do
let COUNTER=COUNTER+1
FILE[$COUNTER]=$F
PFILE=`basename $F`
echo "($COUNTER) $PFILE"
done
read NUM
echo "sudo dd bs=512 seek=131072 if=${FILE[$NUM]} of=$TARGET"
sudo dd bs=512 seek=131072 if=${FILE[$NUM]} of=$TARGET
echo
echo "Don't forget to unmount the SD card before removing!"
else
echo "Exiting w/o doing anything..."
fi
echo "Done!"
exit 0
|
import React, { FC, useState } from 'react';
import {
Dialog,
DialogActions,
DialogContent,
DialogTitle,
makeStyles,
} from '@material-ui/core';
import * as Sentry from '@sentry/react';
import { Button } from '<src>/components';
import { useSpectacleContext } from '<src>/contexts/spectacle-provider';
import { BatchCommit } from '<src>/hooks/useBatchCommits';
import { formatTimeAgo } from '<src>/utils';
type ConfirmResetModalProps = {
onClose: () => void;
onSave?: () => void;
batchCommit: BatchCommit;
};
export const ConfirmResetModal: FC<ConfirmResetModalProps> = ({
onClose,
onSave: onSaveProp,
batchCommit,
}) => {
const spectacle = useSpectacleContext();
const [isSaving, setIsSaving] = useState(false);
const classes = useStyles();
const onSave = async (batchCommitId: string) => {
onSaveProp && onSaveProp();
try {
const results = await spectacle.mutate<{}, { batchCommitId: string }>({
query: `
mutation X(
$batchCommitId: ID!
) {
resetToCommit(batchCommitId: $batchCommitId)
}
`,
variables: {
batchCommitId,
},
});
if (results.errors) {
console.error(results.errors);
throw new Error(JSON.stringify(results.errors));
}
} catch (e) {
console.error(e);
Sentry.captureException(e);
throw e;
}
};
return (
<Dialog open={true} onClose={onClose}>
<DialogTitle>Reset to commit {batchCommit.batchId}</DialogTitle>
<DialogContent>
<h4 className={classes.commitDetailsHeader}>Details</h4>
<div className={classes.commitDetails}>
<p className={classes.commitMessage}>{batchCommit.commitMessage}</p>
<p className={classes.commitTimeAgo}>
{formatTimeAgo(new Date(batchCommit.createdAt))}
</p>
</div>
</DialogContent>
<DialogContent>
This will hard reset your specification to this commit (there will be no
history for this reset). Are you sure you want to continue?
</DialogContent>
<DialogActions>
<Button onClick={onClose} color="default">
Cancel
</Button>
<Button
disabled={isSaving}
onClick={async () => {
setIsSaving(true);
await onSave(batchCommit.batchId);
onClose();
}}
color="primary"
loading={isSaving}
>
Confirm
</Button>
</DialogActions>
</Dialog>
);
};
const useStyles = makeStyles((theme) => ({
commitDetailsHeader: {
marginTop: 0,
},
commitDetails: {
borderLeft: `2px solid ${theme.palette.grey[300]}`,
paddingLeft: theme.spacing(2),
},
commitMessage: {
margin: 0,
},
commitTimeAgo: {
margin: 0,
},
}));
|
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
var mongoosePaginate = require('mongoose-paginate');
var customTitleSchema = new Schema(
{
title: { type: String, required: true, unique: true },
userId: { type: Schema.Types.ObjectId, ref: "User", autopopulate: true },
},
{ timestamps: { createdAt: "created_at", updatedAt: "updated_at" } }, { strict: false }
);
customTitleSchema.plugin(mongoosePaginate);
module.exports = mongoose.model('CustomTitles', customTitleSchema); |
<filename>server/src/modes/pug/index.ts<gh_stars>0
import { TextDocument, Position, Range } from 'vscode-languageserver-types';
import { LanguageMode } from '../../embeddedSupport/languageModes';
import { prettierify } from '../../utils/prettier';
import { VLSFormatConfig } from '../../config';
import { getFileFsPath } from '../../utils/paths';
export function getPugMode(workspacePath: string): LanguageMode {
let config: any = {};
return {
getId() {
return 'pug';
},
configure(c) {
config = c;
},
format(document, currRange, formattingOptions) {
if (config.vetur.format.defaultFormatter['pug'] === 'none') {
return [];
}
const { value, range } = getValueAndRange(document, currRange);
return prettierify(
value,
getFileFsPath(document.uri),
workspacePath,
range,
config.vetur.format as VLSFormatConfig,
'pug',
false
);
},
onDocumentRemoved() {},
dispose() {}
};
}
function getValueAndRange(document: TextDocument, currRange: Range): { value: string; range: Range } {
let value = document.getText();
let range = currRange;
if (currRange) {
const startOffset = document.offsetAt(currRange.start);
const endOffset = document.offsetAt(currRange.end);
value = value.substring(startOffset, endOffset);
} else {
range = Range.create(Position.create(0, 0), document.positionAt(value.length));
}
return { value, range };
}
|
#! /usr/bin/env bash
set -e
OUTPUT_DIR=$PWD/gen_data
###############################################################################
# change these variables for other WMT data
###############################################################################
OUTPUT_DIR_DATA="${OUTPUT_DIR}/wmt14_ende_data"
OUTPUT_DIR_BPE_DATA="${OUTPUT_DIR}/wmt14_ende_data_bpe"
LANG1="en"
LANG2="de"
# each of TRAIN_DATA: data_url data_file_lang1 data_file_lang2
TRAIN_DATA=(
'http://statmt.org/wmt13/training-parallel-europarl-v7.tgz'
'europarl-v7.de-en.en' 'europarl-v7.de-en.de'
'http://statmt.org/wmt13/training-parallel-commoncrawl.tgz'
'commoncrawl.de-en.en' 'commoncrawl.de-en.de'
'http://data.statmt.org/wmt17/translation-task/training-parallel-nc-v12.tgz'
'news-commentary-v12.de-en.en' 'news-commentary-v12.de-en.de'
)
# each of DEV_TEST_DATA: data_url data_file_lang1 data_file_lang2
# source & reference
DEV_TEST_DATA=(
'http://data.statmt.org/wmt17/translation-task/dev.tgz'
'newstest2013-ref.de.sgm' 'newstest2013-src.en.sgm'
'http://statmt.org/wmt14/test-full.tgz'
'newstest2014-deen-ref.en.sgm' 'newstest2014-deen-src.de.sgm'
)
###############################################################################
###############################################################################
# change these variables for other WMT data
###############################################################################
# OUTPUT_DIR_DATA="${OUTPUT_DIR}/wmt14_enfr_data"
# OUTPUT_DIR_BPE_DATA="${OUTPUT_DIR}/wmt14_enfr_data_bpe"
# LANG1="en"
# LANG2="fr"
# # each of TRAIN_DATA: ata_url data_tgz data_file
# TRAIN_DATA=(
# 'http://www.statmt.org/wmt13/training-parallel-commoncrawl.tgz'
# 'commoncrawl.fr-en.en' 'commoncrawl.fr-en.fr'
# 'http://www.statmt.org/wmt13/training-parallel-europarl-v7.tgz'
# 'training/europarl-v7.fr-en.en' 'training/europarl-v7.fr-en.fr'
# 'http://www.statmt.org/wmt14/training-parallel-nc-v9.tgz'
# 'training/news-commentary-v9.fr-en.en' 'training/news-commentary-v9.fr-en.fr'
# 'http://www.statmt.org/wmt10/training-giga-fren.tar'
# 'giga-fren.release2.fixed.en.*' 'giga-fren.release2.fixed.fr.*'
# 'http://www.statmt.org/wmt13/training-parallel-un.tgz'
# 'un/undoc.2000.fr-en.en' 'un/undoc.2000.fr-en.fr'
# )
# # each of DEV_TEST_DATA: data_url data_tgz data_file_lang1 data_file_lang2
# DEV_TEST_DATA=(
# 'http://data.statmt.org/wmt16/translation-task/dev.tgz'
# '.*/newstest201[45]-fren-ref.en.sgm' '.*/newstest201[45]-fren-src.fr.sgm'
# 'http://data.statmt.org/wmt16/translation-task/test.tgz'
# '.*/newstest2016-fren-ref.en.sgm' '.*/newstest2016-fren-src.fr.sgm'
# )
###############################################################################
mkdir -p $OUTPUT_DIR_DATA $OUTPUT_DIR_BPE_DATA
# Extract training data
for ((i=0;i<${#TRAIN_DATA[@]};i+=3)); do
data_url=${TRAIN_DATA[i]}
data_tgz=${data_url##*/} # training-parallel-commoncrawl.tgz
data=${data_tgz%.*} # training-parallel-commoncrawl
data_lang1=${TRAIN_DATA[i+1]}
data_lang2=${TRAIN_DATA[i+2]}
if [ ! -e ${OUTPUT_DIR_DATA}/${data_tgz} ]; then
echo "Download "${data_url}
echo "Dir "${OUTPUT_DIR_DATA}/${data_tgz}
wget -O ${OUTPUT_DIR_DATA}/${data_tgz} ${data_url}
fi
if [ ! -d ${OUTPUT_DIR_DATA}/${data} ]; then
echo "Extract "${data_tgz}
mkdir -p ${OUTPUT_DIR_DATA}/${data}
tar_type=${data_tgz:0-3}
if [ ${tar_type} == "tar" ]; then
tar -xvf ${OUTPUT_DIR_DATA}/${data_tgz} -C ${OUTPUT_DIR_DATA}/${data}
else
tar -xvzf ${OUTPUT_DIR_DATA}/${data_tgz} -C ${OUTPUT_DIR_DATA}/${data}
fi
fi
# concatenate all training data
for data_lang in $data_lang1 $data_lang2; do
for f in `find ${OUTPUT_DIR_DATA}/${data} -regex ".*/${data_lang}"`; do
data_dir=`dirname $f`
data_file=`basename $f`
f_base=${f%.*}
f_ext=${f##*.}
if [ $f_ext == "gz" ]; then
gunzip $f
l=${f_base##*.}
f_base=${f_base%.*}
else
l=${f_ext}
fi
if [ $i -eq 0 ]; then
cat ${f_base}.$l > ${OUTPUT_DIR_DATA}/train.$l
else
cat ${f_base}.$l >> ${OUTPUT_DIR_DATA}/train.$l
fi
done
done
done
# Clone mosesdecoder
if [ ! -d ${OUTPUT_DIR}/mosesdecoder ]; then
echo "Cloning moses for data processing"
git clone https://github.com/moses-smt/mosesdecoder.git ${OUTPUT_DIR}/mosesdecoder
fi
# Extract develop and test data
dev_test_data=""
for ((i=0;i<${#DEV_TEST_DATA[@]};i+=3)); do
data_url=${DEV_TEST_DATA[i]}
data_tgz=${data_url##*/} # training-parallel-commoncrawl.tgz
data=${data_tgz%.*} # training-parallel-commoncrawl
data_lang1=${DEV_TEST_DATA[i+1]}
data_lang2=${DEV_TEST_DATA[i+2]}
if [ ! -e ${OUTPUT_DIR_DATA}/${data_tgz} ]; then
echo "Download "${data_url}
wget -O ${OUTPUT_DIR_DATA}/${data_tgz} ${data_url}
fi
if [ ! -d ${OUTPUT_DIR_DATA}/${data} ]; then
echo "Extract "${data_tgz}
mkdir -p ${OUTPUT_DIR_DATA}/${data}
tar_type=${data_tgz:0-3}
if [ ${tar_type} == "tar" ]; then
tar -xvf ${OUTPUT_DIR_DATA}/${data_tgz} -C ${OUTPUT_DIR_DATA}/${data}
else
tar -xvzf ${OUTPUT_DIR_DATA}/${data_tgz} -C ${OUTPUT_DIR_DATA}/${data}
fi
fi
for data_lang in $data_lang1 $data_lang2; do
for f in `find ${OUTPUT_DIR_DATA}/${data} -regex ".*/${data_lang}"`; do
echo "input-from-sgm"
data_dir=`dirname $f`
data_file=`basename $f`
data_out=`echo ${data_file} | cut -d '-' -f 1` # newstest2016
l=`echo ${data_file} | cut -d '.' -f 2` # en
dev_test_data="${dev_test_data}\|${data_out}" # to make regexp
if [ ! -e ${OUTPUT_DIR_DATA}/${data_out}.$l ]; then
${OUTPUT_DIR}/mosesdecoder/scripts/ems/support/input-from-sgm.perl \
< $f > ${OUTPUT_DIR_DATA}/${data_out}.$l
fi
done
done
done
# Tokenize data
for l in ${LANG1} ${LANG2}; do
for f in `ls ${OUTPUT_DIR_DATA}/*.$l | grep "\(train\|newstest2013\)\.$l$"`; do
f_base=${f%.*} # dir/train dir/newstest2013
f_out=$f_base.tok.$l
f_tmp=$f_base.tmp.$l
if [ ! -e $f_out ]; then
echo "Tokenize "$f
cat $f | \
${OUTPUT_DIR}/mosesdecoder/scripts/tokenizer/normalize-punctuation.perl $l | \
${OUTPUT_DIR}/mosesdecoder/scripts/tokenizer/remove-non-printing-char.perl | \
tee -a $tmp/valid.raw.$l | \
${OUTPUT_DIR}/mosesdecoder/scripts/tokenizer/tokenizer.perl -a -l $l -threads 8 >> $f_out
echo $f_out
fi
done
done
for l in ${LANG1} ${LANG2}; do
for f in `ls ${OUTPUT_DIR_DATA}/*.$l | grep "\(newstest2014\)\.$l$"`; do
f_base=${f%.*} # dir/newstest2014
f_out=$f_base.tok.$l
if [ ! -e $f_out ]; then
echo "Tokenize "$f
cat $f | \
${OUTPUT_DIR}/mosesdecoder/scripts/tokenizer/tokenizer.perl -a -l $l -threads 8 >> $f_out
echo $f_out
fi
done
done
# Clean data
for f in ${OUTPUT_DIR_DATA}/train.${LANG1} ${OUTPUT_DIR_DATA}/train.tok.${LANG1}; do
f_base=${f%.*} # dir/train dir/train.tok
f_out=${f_base}.clean
if [ ! -e $f_out.${LANG1} ] && [ ! -e $f_out.${LANG2} ]; then
echo "Clean "${f_base}
${OUTPUT_DIR}/mosesdecoder/scripts/training/clean-corpus-n.perl $f_base ${LANG1} ${LANG2} ${f_out} 1 256
fi
done
python -m pip install subword-nmt
# Generate BPE data and vocabulary
for num_operations in 33708; do
if [ ! -e ${OUTPUT_DIR_BPE_DATA}/bpe.${num_operations} ]; then
echo "Learn BPE with ${num_operations} merge operations"
cat ${OUTPUT_DIR_DATA}/train.tok.clean.${LANG1} ${OUTPUT_DIR_DATA}/train.tok.clean.${LANG2} | \
subword-nmt learn-bpe -s $num_operations > ${OUTPUT_DIR_BPE_DATA}/bpe.${num_operations}
fi
for l in ${LANG1} ${LANG2}; do
for f in `ls ${OUTPUT_DIR_DATA}/*.$l | grep "\(train${dev_test_data}\)\.tok\(\.clean\)\?\.$l$"`; do
f_base=${f%.*} # dir/train.tok dir/train.tok.clean dir/newstest2016.tok
f_base=${f_base##*/} # train.tok train.tok.clean newstest2016.tok
f_out=${OUTPUT_DIR_BPE_DATA}/${f_base}.bpe.${num_operations}.$l
if [ ! -e $f_out ]; then
echo "Apply BPE to "$f
subword-nmt apply-bpe -c ${OUTPUT_DIR_BPE_DATA}/bpe.${num_operations} < $f > $f_out
fi
done
done
if [ ! -e ${OUTPUT_DIR_BPE_DATA}/vocab.bpe.${num_operations} ]; then
echo "Create vocabulary for BPE data"
cat ${OUTPUT_DIR_BPE_DATA}/train.tok.clean.bpe.${num_operations}.${LANG1} ${OUTPUT_DIR_BPE_DATA}/train.tok.clean.bpe.${num_operations}.${LANG2} | \
subword-nmt get-vocab | cut -f1 -d ' ' > ${OUTPUT_DIR_BPE_DATA}/vocab.bpe.${num_operations}
fi
done
# Adapt to the reader
for f in ${OUTPUT_DIR_BPE_DATA}/*.bpe.${num_operations}.${LANG1}; do
f_base=${f%.*} # dir/train.tok.clean.bpe.32000 dir/newstest2016.tok.bpe.32000
f_out=${f_base}.${LANG1}-${LANG2}
if [ ! -e $f_out ]; then
paste -d '\t' $f_base.${LANG1} $f_base.${LANG2} > $f_out
fi
done
if [ ! -e ${OUTPUT_DIR_BPE_DATA}/vocab_all.bpe.${num_operations} ]; then
sed '1i\<s>\n<e>\n<unk>' ${OUTPUT_DIR_BPE_DATA}/vocab.bpe.${num_operations} > ${OUTPUT_DIR_BPE_DATA}/vocab_all.bpe.${num_operations}
fi
echo "All done."
|
package com.atguigu.gulimall.cart.vo;
import org.springframework.util.CollectionUtils;
import java.math.BigDecimal;
import java.util.List;
/**
* 整个购物车
*/
public class Cart {
/**
* 购物项列表
*/
private List<CartItem> items;
/**
* 商品数量
*/
private Integer countNum;
/**
* 商品类型数量
*/
private Integer countType;
/**
* 商品总价
*/
private BigDecimal totalAmount;
/**
* 减免价格
*/
private BigDecimal reduce = new BigDecimal("0.00");
public List<CartItem> getItems() {
return items;
}
public void setItems(List<CartItem> items) {
this.items = items;
}
public Integer getCountNum() {
return this.items.stream().map(CartItem::getCount).reduce(Integer::sum).orElse(0);
}
public Integer getCountType() {
return CollectionUtils.isEmpty(this.items) ? 0 : this.items.size();
}
public BigDecimal getTotalAmount() {
return this.items.stream().filter(CartItem::getCheck)
.map(CartItem::getTotalPrice).reduce(BigDecimal::add)
.orElse(new BigDecimal("0"));
}
public BigDecimal getReduce() {
return reduce;
}
public void setReduce(BigDecimal reduce) {
this.reduce = reduce;
}
}
|
#The data has been sorted in ascending order of the average monthly temperature in January (i.e, the second column) in a tsv file
sort -n -k2 -t$'\t'
|
import sys
if __name__ == "__main__":
arg = sys.argv[1]
print(arg) |
package com.qtimes.domain.dagger.qualifier;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import javax.inject.Qualifier;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Created by lt
*/
@Qualifier
@Documented
@Retention(RUNTIME)
public @interface ContextLevel {
String APPLICATION = "Application";
String ACTIVITY = "Activity";
String FRAGMENT = "Fragment";
String value() default APPLICATION;
} |
config() {
NEW="$1"
OLD="$(dirname $NEW)/$(basename $NEW .new)"
# If there's no config file by that name, mv it over:
if [ ! -r $OLD ]; then
mv $NEW $OLD
elif [ "$(cat $OLD | md5sum)" = "$(cat $NEW | md5sum)" ]; then
# toss the redundant copy
rm $NEW
fi
# Otherwise, we leave the .new copy for the admin to consider...
}
preserve_perms() {
NEW="$1"
OLD="$(dirname ${NEW})/$(basename ${NEW} .new)"
if [ -e ${OLD} ]; then
cp -a ${OLD} ${NEW}.incoming
cat ${NEW} > ${NEW}.incoming
mv ${NEW}.incoming ${NEW}
fi
config ${NEW}
}
preserve_perms etc/rc.d/rc.nginx.new
config etc/nginx/fastcgi_params.new
config etc/nginx/fastcgi.conf.new
config etc/nginx/mime.types.new
config etc/nginx/nginx.conf.new
config etc/nginx/koi-utf.new
config etc/nginx/koi-win.new
config etc/nginx/scgi_params.new
config etc/nginx/uwsgi_params.new
config etc/nginx/win-utf.new
if [ -d /etc/monit.d/ ]; then
if [ ! -r /etc/monit.d/nginx ]; then
cp /usr/doc/%pkgnam%-%version%/SlackBuild/monit.nginx /etc/monit.d/nginx
fi
fi
|
<reponame>ariscahyadi/onos-1.14-with-indopronos-app
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.models.microsemi;
import com.google.common.collect.ImmutableMap;
import org.onosproject.yang.AbstractYangModelRegistrator;
import org.onosproject.yang.gen.v1.ietfsystemmicrosemi.rev20160505.IetfSystemMicrosemi;
import org.apache.felix.scr.annotations.Component;
import org.onosproject.yang.gen.v1.mseacfm.rev20160229.MseaCfm;
import org.onosproject.yang.gen.v1.mseasoampm.rev20160229.MseaSoamPm;
import org.onosproject.yang.gen.v1.mseasoamfm.rev20160229.MseaSoamFm;
import org.onosproject.yang.gen.v1.mseasafiltering.rev20160412.MseaSaFiltering;
import org.onosproject.yang.gen.v1.mseaunievcservice.rev20160317.MseaUniEvcService;
import org.onosproject.yang.model.DefaultYangModuleId;
import org.onosproject.yang.model.YangModuleId;
import org.onosproject.yang.runtime.AppModuleInfo;
import org.onosproject.yang.runtime.DefaultAppModuleInfo;
import java.util.List;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
@Component(immediate = true)
public class MicrosemiModelRegistrator extends AbstractYangModelRegistrator {
public MicrosemiModelRegistrator() {
super(MicrosemiModelRegistrator.class, getAppInfo());
}
private static Map<YangModuleId, AppModuleInfo> getAppInfo() {
Map<YangModuleId, AppModuleInfo> appInfo = new HashMap<>();
List<String> sysMicrosemiFeatures = new ArrayList<>();
sysMicrosemiFeatures.add("serial-number");
appInfo.put(new DefaultYangModuleId("ietf-system-microsemi", "2016-05-05"),
new DefaultAppModuleInfo(IetfSystemMicrosemi.class, sysMicrosemiFeatures));
appInfo.put(new DefaultYangModuleId("msea-uni-evc-service", "2016-03-17"),
new DefaultAppModuleInfo(MseaUniEvcService.class, null));
appInfo.put(new DefaultYangModuleId("msea-cfm", "2016-02-29"),
new DefaultAppModuleInfo(MseaCfm.class, null));
appInfo.put(new DefaultYangModuleId("msea-soam-fm", "2016-02-29"),
new DefaultAppModuleInfo(MseaSoamFm.class, null));
appInfo.put(new DefaultYangModuleId("msea-soam-pm", "2016-02-29"),
new DefaultAppModuleInfo(MseaSoamPm.class, null));
appInfo.put(new DefaultYangModuleId("msea-sa-filtering", "2016-04-12"),
new DefaultAppModuleInfo(MseaSaFiltering.class, null));
return ImmutableMap.copyOf(appInfo);
// TODO: Do some other registration tasks...
}
} |
sentence = "The quick brown fox jumps over the lazy dog"
word_list = sentence.split(" ")
print(word_list)
# Output: ['The', 'quick', 'brown', 'fox', 'jumps', 'over', 'the', 'lazy', 'dog'] |
/**
* Copyright 2015-2017 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package zipkin.internal;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.stream.IntStream;
import org.junit.Test;
import zipkin.BinaryAnnotation;
import zipkin.CodecTest;
import zipkin.DependencyLink;
import zipkin.Span;
import zipkin.TestObjects;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
public final class ThriftCodecTest extends CodecTest {
private final ThriftCodec codec = new ThriftCodec();
@Override
protected ThriftCodec codec() {
return codec;
}
@Test
public void readSpanFromByteBuffer() throws IOException {
for (Span span : TestObjects.TRACE) {
byte[] bytes = codec().writeSpan(span);
assertThat(codec().readSpan(ByteBuffer.wrap(bytes)))
.isEqualTo(span);
}
}
@Test
public void sizeInBytes_span() throws IOException {
Span span = TestObjects.LOTS_OF_SPANS[0];
assertThat(ThriftCodec.SPAN_WRITER.sizeInBytes(span))
.isEqualTo(codec().writeSpan(span).length);
}
@Test
public void sizeInBytes_trace() throws IOException {
assertThat(ThriftCodec.listSizeInBytes(ThriftCodec.SPAN_WRITER, TestObjects.TRACE))
.isEqualTo(codec().writeSpans(TestObjects.TRACE).length);
}
@Test
public void sizeInBytes_links() throws IOException {
assertThat(ThriftCodec.listSizeInBytes(ThriftCodec.DEPENDENCY_LINK_ADAPTER, TestObjects.LINKS))
.isEqualTo(codec().writeDependencyLinks(TestObjects.LINKS).length);
}
@Test
public void readDependencyLinksFromByteBuffer() throws IOException {
List<DependencyLink> links = asList(
DependencyLink.create("foo", "bar", 2),
DependencyLink.builder()
.parent("bar")
.child("baz")
.callCount(3)
.errorCount(1).build()
);
byte[] bytes = codec().writeDependencyLinks(links);
assertThat(codec().readDependencyLinks(ByteBuffer.wrap(bytes)))
.isEqualTo(links);
}
@Test
public void writeLargeAnnotations() {
Span span = TestObjects.LOTS_OF_SPANS[0].toBuilder().binaryAnnotations(asList(
BinaryAnnotation.builder()
.key("Large.value")
.type(BinaryAnnotation.Type.BYTES)
.value(new byte[ThriftCodec.STRING_LENGTH_LIMIT + 1])
.build()
)).build();
thrown.expect(AssertionError.class);
thrown.expectMessage("Could not write");
codec().writeSpan(span);
}
@Test
public void writeLargeSpanName() {
StringBuilder sb = new StringBuilder();
IntStream.range(0, ThriftCodec.STRING_LENGTH_LIMIT + 1).forEach((i) -> sb.append("."));
Span span = Span.builder()
.traceId(0L)
.parentId(0L)
.id(0L)
.name(sb.toString())
.build();
thrown.expect(AssertionError.class);
thrown.expectMessage("Could not write");
codec().writeSpan(span);
}
}
|
from typing import Tuple
def scale_xy(x: int, y: int, scale: float) -> Tuple[int, int]:
"""
Scales the dimensions of a video clip by a given factor.
Args:
x (int): The original x dimension of the video clip.
y (int): The original y dimension of the video clip.
scale (float): The scaling factor to be applied.
Returns:
Tuple[int, int]: The new scaled x and y dimensions.
"""
return int(x * scale), int(y * scale) |
<gh_stars>0
#!/usr/bin/env node
/**
* Wechaty - Wechat for Bot. Connecting ChatBots
*
* Licenst: ISC
* https://github.com/wechaty/wechaty
*
*/
import Wechaty from '../src/wechaty'
const w = Wechaty.instance()
console.log(w.version())
|
def encodeVariableLength(value):
byte_stream = bytearray()
while value >= 0x80:
byte_stream.append((value & 0x7F) | 0x80)
value >>= 7
byte_stream.append(value & 0x7F)
return byte_stream |
import React from 'react';
import {UseKeyboardHeight, useKeyboardHeight} from './useKeyboardHeight';
export const withKeyboardHeight = <T extends UseKeyboardHeight = UseKeyboardHeight>(
WrappedComponent: React.ComponentType<T>
) => {
const displayName =
WrappedComponent.displayName || WrappedComponent.name || 'Component';
const ComponentWithKeyboardHeight = (props: Omit<T, keyof UseKeyboardHeight>) => {
const keyboardHeight = useKeyboardHeight();
return <WrappedComponent {...(props as T)} keyboardHeight={keyboardHeight}/>;
};
ComponentWithKeyboardHeight.displayName = `withKeyboardHeight(${displayName})`;
return ComponentWithKeyboardHeight;
};
export default withKeyboardHeight;
|
<reponame>eserozvataf/js-boilerplate
import React from 'react';
import { Button, Container, Divider, Grid, Header, Image, Segment, Icon } from 'semantic-ui-react';
import WhiteImage from './white-image.png';
import NanImage from './nan.jpg';
function Home() {
return (
<>
<Segment inverted textAlign="center" style={{ padding: '4em 0em' }} vertical>
<Container text>
<Header
as="h1"
content="Darty React App"
inverted
style={{
fontSize: '3em',
fontWeight: 'normal',
marginBottom: 0,
}}
/>
<Header
as="h2"
content="Do whatever you want when you want to."
inverted
style={{
fontSize: '1.5em',
fontWeight: 'normal',
marginTop: '0.2em',
}}
/>
<Button primary size="huge">
Get Started
<Icon name="right arrow" />
</Button>
</Container>
</Segment>
<Segment style={{ padding: '8em 0em' }} vertical>
<Grid container stackable verticalAlign="middle">
<Grid.Row>
<Grid.Column width={8}>
<Header as="h3" style={{ fontSize: '2em' }}>
We Help Companies and Companions
</Header>
<p style={{ fontSize: '1.33em' }}>
We can give your company superpowers to do things that they
never thought possible. Let us delight your customers and
empower your needs... through pure data analytics.
</p>
<Header as="h3" style={{ fontSize: '2em' }}>
We Make Bananas That Can Dance
</Header>
<p style={{ fontSize: '1.33em' }}>
Yes that's right, you thought it was the stuff of dreams, but
even bananas can be bioengineered.
</p>
</Grid.Column>
<Grid.Column floated="right" width={6}>
<Image bordered rounded size="large" src={WhiteImage} />
</Grid.Column>
</Grid.Row>
<Grid.Row>
<Grid.Column textAlign="center">
<Button size="huge">Check Them Out</Button>
</Grid.Column>
</Grid.Row>
</Grid>
</Segment>
<Segment style={{ padding: '0em' }} vertical>
<Grid celled="internally" columns="equal" stackable>
<Grid.Row textAlign="center">
<Grid.Column style={{ paddingBottom: '5em', paddingTop: '5em' }}>
<Header as="h3" style={{ fontSize: '2em' }}>
"What a Company"
</Header>
<p style={{ fontSize: '1.33em' }}>That is what they all say about us</p>
</Grid.Column>
<Grid.Column style={{ paddingBottom: '5em', paddingTop: '5em' }}>
<Header as="h3" style={{ fontSize: '2em' }}>
"I shouldn't have gone with their competitor."
</Header>
<p style={{ fontSize: '1.33em' }}>
<Image avatar src={NanImage} />
<b>Nan</b>
{' '}
Chief Fun Officer Acme Toys
</p>
</Grid.Column>
</Grid.Row>
</Grid>
</Segment>
<Segment style={{ padding: '8em 0em' }} vertical>
<Container text>
<Header as="h3" style={{ fontSize: '2em' }}>
Breaking The Grid, Grabs Your Attention
</Header>
<p style={{ fontSize: '1.33em' }}>
Instead of focusing on content creation and hard work, we have learned
how to master the art of doing nothing by providing massive amounts of
whitespace and generic content that can seem massive, monolithic and
worth your attention.
</p>
<Button as="a" size="large">
Read More
</Button>
<Divider as="h4" className="header" horizontal style={{ margin: '3em 0em', textTransform: 'uppercase' }}>
Case Studies
</Divider>
<Header as="h3" style={{ fontSize: '2em' }}>
Did We Tell You About Our Bananas?
</Header>
<p style={{ fontSize: '1.33em' }}>
Yes I know you probably disregarded the earlier boasts as non-sequitur
filler content, but it's really true. It took years of gene splicing and
combinatory DNA research, but our bananas can really dance.
</p>
<Button as="a" size="large">
I'm Still Quite Interested
</Button>
</Container>
</Segment>
</>
);
}
export {
Home as default,
};
|
function arrayReverse(list) {
let reversedList = [];
for (let i = list.length - 1; i >= 0; i--) {
let item = list[i];
reversedList.push(item);
}
return reversedList;
} |
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "otbTestMain.h"
void RegisterTests()
{
REGISTER_TEST(otbPrintableImageFilter);
REGISTER_TEST(otbConcatenateScalarValueImageFilterTest);
REGISTER_TEST(otbShiftScaleImageAdaptor);
REGISTER_TEST(otbImageToVectorImageCastFilter);
REGISTER_TEST(otbFunctionWithNeighborhoodToImageFilter);
REGISTER_TEST(otbSqrtSpectralAngleImageFilter);
REGISTER_TEST(otbStreamingShrinkImageFilter);
REGISTER_TEST(otbUnaryImageFunctorWithVectorImageFilter);
REGISTER_TEST(otbPrintableImageFilterWithMask);
REGISTER_TEST(otbStreamingResampleImageFilter);
REGISTER_TEST(otbVectorImageToAmplitudeImageFilter);
REGISTER_TEST(otbUnaryFunctorNeighborhoodWithOffsetImageFilter);
REGISTER_TEST(otbStreamingResampleImageFilterCompareWithITK);
REGISTER_TEST(otbRegionProjectionResampler);
REGISTER_TEST(otbUnaryFunctorWithIndexImageFilter);
REGISTER_TEST(otbMeanFunctorImageTest);
REGISTER_TEST(otbVectorImageTo3DScalarImageFilter);
REGISTER_TEST(otbTileImageFilter);
REGISTER_TEST(otbMatrixImageFilterTest);
REGISTER_TEST(otbMatrixTransposeMatrixImageFilter);
REGISTER_TEST(otbUnaryFunctorNeighborhoodImageFilter);
REGISTER_TEST(otbStreamingInnerProductVectorImageFilter);
REGISTER_TEST(otbPhaseFunctorTest);
REGISTER_TEST(otbChangeLabelImageFilter);
REGISTER_TEST(otbBoxAndWhiskerImageFilter);
REGISTER_TEST(otbVectorRescaleIntensityImageFilter);
REGISTER_TEST(otbLog10ThresholdedImageFilterTest);
REGISTER_TEST(otbExtractROIResample);
REGISTER_TEST(otbLocalGradientVectorImageFilterTest);
REGISTER_TEST(otbBinaryImageMinimalBoundingRegionCalculator);
REGISTER_TEST(otbPerBandVectorImageFilterWithSobelFilter);
REGISTER_TEST(otbBinaryImageDensityFunction);
REGISTER_TEST(otbThresholdVectorImageFilterTest);
REGISTER_TEST(otbPerBandVectorImageFilterWithMeanFilter);
REGISTER_TEST(otbAmplitudeFunctorTest);
REGISTER_TEST(otbMultiplyByScalarImageFilterTest);
REGISTER_TEST(otbClampImageFilterTest);
REGISTER_TEST(otbClampImageFilterConversionTest);
REGISTER_TEST(otbConcatenateVectorImageFilter);
REGISTER_TEST(otbBinaryImageToDensityImageFilter);
REGISTER_TEST(otbSpectralAngleDistanceImageFilter);
REGISTER_TEST(otbEuclideanDistanceMetricWithMissingValue);
REGISTER_TEST(otbNRIBandImagesToOneNComplexBandsImage);
REGISTER_TEST(otbOneRIBandImageToOneComplexBandImage);
REGISTER_TEST(otbTwoNRIBandsImageToNComplexBandsImage);
REGISTER_TEST(otbChangeNoDataValueFilter);
REGISTER_TEST(otbImageToNoDataMaskFilter);
REGISTER_TEST(otbChangeInformationImageFilter);
REGISTER_TEST(otbGridResampleImageFilter);
REGISTER_TEST(otbMaskedIteratorDecoratorNominal);
REGISTER_TEST(otbMaskedIteratorDecoratorDegenerate);
REGISTER_TEST(otbMaskedIteratorDecoratorExtended);
}
|
class AddPlaysToStories < ActiveRecord::Migration[5.1]
def change
add_column :stories, :plays, :integer, default: 0
end
end
|
package com.yoga.weixinapp.wxapi;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.google.gson.annotations.SerializedName;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
@JsonInclude(JsonInclude.Include.NON_NULL)
public class WxTokenResult extends WxBaseResult {
@SerializedName("access_token")
private String accessToken;
@SerializedName("expires_in")
private Integer expiresIn;
}
|
<reponame>soerenetler/neural_question_generation
import tensorflow as tf
import numpy as np
from utils import _create_cell
class Encoder(tf.keras.layers.Layer):
def __init__(self, pre_embedding=None, vocab_size=34004, embedding_dim=300, embedding_trainable=True, enc_type='bi',
num_layer=1, hidden_size=512,
cell_type='lstm', dropout=0.1, batch_sz=64):
super(Encoder, self).__init__()
self.num_layer = num_layer
self.hidden_size = hidden_size
self.cell_type = cell_type
self.dropout = dropout
self.enc_type = enc_type
self.pre_embedding = pre_embedding
self.vocab_size = vocab_size
self.embedding_dim = embedding_dim
self.embedding_trainable = embedding_trainable
self.batch_sz = batch_sz
print("INIT - encoder - num_layer: ", num_layer)
print("INIT - encoder - hidden_size: ", hidden_size)
print("INIT - encoder - cell_type: ", cell_type)
print("INIT - encoder - dropout: ", dropout)
print("INIT - encoder - enc_type: ", enc_type)
print("INIT - encoder - pre_embedding: ", pre_embedding)
print("INIT - encoder - vocab_size: ", vocab_size)
print("INIT - encoder - embedding_dim: ", embedding_dim)
print("INIT - encoder - embedding_trainable: ", embedding_trainable)
print("INIT - encoder - batch_sz: ", batch_sz)
# Embedding Layer
if self.pre_embedding == None:
self.embd_layer = tf.keras.layers.Embedding(self.vocab_size,
self.embedding_dim,
trainable=True)
else:
embedding_matrix = np.load(self.pre_embedding)
print("embedding_matrix:", embedding_matrix.shape)
init = tf.keras.initializers.Constant(embedding_matrix)
self.embd_layer = tf.keras.layers.Embedding(embedding_matrix.shape[0],
self.embedding_dim,
embeddings_initializer=init,
trainable=self.embedding_trainable)
enc_cell = _create_cell(self.hidden_size, self.cell_type, self.num_layer, dropout=self.dropout)
print("get_initial_state:", enc_cell.get_initial_state(
batch_size=64, dtype=tf.float32))
# Encoder
if self.enc_type == 'mono':
print("enc_cell: ", enc_cell)
self.rnn = tf.keras.layers.RNN(enc_cell, return_sequences=True,
return_state=True)
elif self.enc_type == 'bi':
print(enc_cell)
self.rnn = tf.keras.layers.Bidirectional(tf.keras.layers.RNN(
enc_cell, return_sequences=True,
return_state=True))
else:
raise ValueError('Invalid input %s' % self.enc_type)
def call(self, inputs, hidden, training=False):
embd_inputs = self.embd_layer(inputs)
#print("embd_inputs:", embd_inputs.shape)
#print("initial_state:", hidden)
#print("rnn: ", self.rnn)
result_encoder = self.rnn(
embd_inputs, initial_state=hidden, training=training)
#print("result_encoder:", result_encoder)
if self.enc_type == 'mono':
if self.cell_type == 'gru':
encoder_output, encoder_state = result_encoder
# encoder_output: [batch_size, max_time, hidden_size]
# encoder_state: last hidden_state of encoder, [batch_size, hidden_size]
else: # lstm
encoder_output, encoder_state_h, encoder_state_c = result_encoder
encoder_state = [encoder_state_h, encoder_state_c]
elif self.enc_type == 'bi':
if self.cell_type == 'gru':
encoder_output = tf.concat(encoder_output, -1)
if self.num_layer == 1:
encoder_state = tf.concat(encoder_state, -1)
else: # multi layer
encoder_state = tuple(tf.concat(
[state_fw, state_bw], -1) for state_fw, state_bw in zip(encoder_state[0], encoder_state[1]))
else: # lstm
print("result_encoder ", len(result_encoder))
encoder_output, encoder_state_h, encoder_state_c = result_encoder
encoder_output = tf.concat(encoder_output, -1)
if self.num_layer == 1:
encoder_state_c = tf.concat(
[encoder_state[0].c, encoder_state[1].c], 1)
encoder_state_h = tf.concat(
[encoder_state[0].h, encoder_state[1].h], 1)
encoder_state = dict(c=encoder_state_c, h=encoder_state_h)
else: # multi layer
_encoder_state = list()
for state_fw, state_bw in zip(encoder_state[0], encoder_state[1]):
partial_state_c = tf.concat(
[state_fw.c, state_bw.c], 1)
partial_state_h = tf.concat(
[state_fw.h, state_bw.h], 1)
partial_state = dict(
c=partial_state_c, h=partial_state_h)
_encoder_state.append(partial_state)
encoder_state = tuple(_encoder_state)
return encoder_output, encoder_state
def initialize_hidden_state(self):
if self.enc_type == 'mono':
if self.cell_type == 'gru':
return tf.zeros((self.batch_sz, self.hidden_size))
else: # LSTM
return [tf.zeros((self.batch_sz, self.hidden_size)), tf.zeros((self.batch_sz, self.hidden_size))]
else: # bi
if self.cell_type == 'gru':
return [tf.zeros((self.batch_sz, self.hidden_size))] *2
else: # LSTM
return [tf.zeros((self.batch_sz, self.hidden_size)), tf.zeros((self.batch_sz, self.hidden_size))] * 2
|
#!/bin/bash
set -e
export PATH="/opt/conda/bin:$PATH"
export CMAKE_PREFIX_PATH="/opt/conda"
python -c "import scipy; import torch; print(torch.__version__); print(scipy.__version__)"
|
import pytesseract
import cv2
#read the image with OpenCV
image = cv2.imread('document.png')
#convert from OpenCV image to Tesseract image
text = pytesseract.image_to_string(image)
print(text) #outputs: Hello, World! |
<filename>open-sphere-plugins/csv-common/src/main/java/io/opensphere/csvcommon/detect/ValueWithConfidence.java
package io.opensphere.csvcommon.detect;
/**
* Holds a value with associated confidence.
*
* @param <T> The type of the value.
*/
public class ValueWithConfidence<T>
{
/** The value. */
private T myValue;
/** The confidence (0.0 to 1.0). */
private float myConfidence;
/**
* Constructor.
*/
public ValueWithConfidence()
{
}
/**
* Constructor.
*
* @param value the value
* @param confidence the confidence (0.0 to 1.0)
*/
public ValueWithConfidence(T value, float confidence)
{
myValue = value;
setConfidenceInternal(confidence);
}
/**
* Gets the confidence.
*
* @return the confidence (0.0 to 1.0)
*/
public float getConfidence()
{
return myConfidence;
}
/**
* Gets the value.
*
* @return the value
*/
public T getValue()
{
return myValue;
}
/**
* Sets the confidence.
*
* @param confidence the confidence (0.0 to 1.0)
*/
public void setConfidence(float confidence)
{
setConfidenceInternal(confidence);
}
/**
* Sets the value.
*
* @param value the value
*/
public void setValue(T value)
{
myValue = value;
}
@Override
public String toString()
{
return "ValueWithConfidence [value=" + myValue + ", confidence=" + myConfidence + "]";
}
/**
* Sets the confidence.
*
* @param confidence the confidence
*/
private void setConfidenceInternal(float confidence)
{
if (confidence < 0.0 || confidence > 1.0)
{
throw new IllegalArgumentException(
"Confidence value of " + confidence + " is outside the allowed range of 0.0 to 1.0");
}
myConfidence = confidence;
}
}
|
<filename>internal/gitaly/hook/manager.go
package hook
import (
"context"
"io"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/storage"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
// ReferenceTransactionState is the state of the Git reference transaction. It reflects the first
// parameter of the reference-transaction hook. See githooks(1) for more information.
type ReferenceTransactionState int
const (
// ReferenceTransactionPrepared indicates all reference updates have been queued to the
// transaction and were locked on disk.
ReferenceTransactionPrepared = ReferenceTransactionState(iota)
// ReferenceTransactionCommitted indicates the reference transaction was committed and all
// references now have their respective new value.
ReferenceTransactionCommitted
// ReferenceTransactionAborted indicates the transaction was aborted, no changes were
// performed and the reference locks have been released.
ReferenceTransactionAborted
)
// Manager is an interface providing the ability to execute Git hooks.
type Manager interface {
// PreReceiveHook executes the pre-receive Git hook and any installed custom hooks. stdin
// must contain all references to be updated and match the format specified in githooks(5).
PreReceiveHook(ctx context.Context, repo *gitalypb.Repository, pushOptions, env []string, stdin io.Reader, stdout, stderr io.Writer) error
// PostReceiveHook executes the post-receive Git hook and any installed custom hooks. stdin
// must contain all references to be updated and match the format specified in githooks(5).
PostReceiveHook(ctx context.Context, repo *gitalypb.Repository, pushOptions, env []string, stdin io.Reader, stdout, stderr io.Writer) error
// UpdateHook executes the update Git hook and any installed custom hooks for the reference
// `ref` getting updated from `oldValue` to `newValue`.
UpdateHook(ctx context.Context, repo *gitalypb.Repository, ref, oldValue, newValue string, env []string, stdout, stderr io.Writer) error
// ReferenceTransactionHook executes the reference-transaction Git hook. stdin must contain
// all references to be updated and match the format specified in githooks(5).
ReferenceTransactionHook(ctx context.Context, state ReferenceTransactionState, env []string, stdin io.Reader) error
}
// GitLabHookManager is a hook manager containing Git hook business logic. It
// uses the GitLab API to authenticate and track ongoing hook calls.
type GitLabHookManager struct {
locator storage.Locator
gitlabClient gitlab.Client
hooksConfig config.Hooks
txManager transaction.Manager
}
// NewManager returns a new hook manager
func NewManager(locator storage.Locator, txManager transaction.Manager, gitlabClient gitlab.Client, cfg config.Cfg) *GitLabHookManager {
return &GitLabHookManager{
locator: locator,
gitlabClient: gitlabClient,
hooksConfig: cfg.Hooks,
txManager: txManager,
}
}
|
<gh_stars>0
#include<bits/stdc++.h>
using namespace std;
int main(){
int m;
cin >> m;
if(m <= 2) cout << "NO" << endl;
else if(m %2 ==0) cout <<"YES" << endl;
else cout << "NO" << endl;
return 0;
} |
package config
import (
"fmt"
"github.com/spf13/viper"
)
type Server struct {
Host string `json:"host" yaml:"host" mapstructure:"host" yaml.mapstructure:"host"`
Port string `json:"port" yaml:"port" mapstructure:"port" yaml.mapstructure:"port"`
EnableMetrics bool `json:"enable_metrics" yaml:"enable_metrics" mapstructure:"enable_metrics" yaml.mapstructure:"enable_metrics"`
Health ServerHealth `json:"health" yaml:"health" mapstructure:"health"`
}
type ServerHealth struct {
Enabled bool `json:"enabled" yaml:"enabled" mapstructure:"enabled"`
Path string `json:"path" yaml:"path" mapstructure:"path"`
Listener string `json:"listener" yaml:"listener" mapstructure:"listener"`
}
func DefaultServerConfig() Server {
v := viper.GetViper()
return Server{
Host: v.GetString("server.host"),
Port: v.GetString("server.port"),
Health: ServerHealth{
Enabled: true,
Path: "/healthz",
},
EnableMetrics: false,
}
}
func (c Server) HostPort() string {
return fmt.Sprintf("%s:%s", c.Host, c.Port)
}
|
<filename>modules/component-web-core/src/main/java/com/nortal/spring/cw/core/web/component/element/ElementLabel.java
package com.nortal.spring.cw.core.web.component.element;
import java.io.Serializable;
/**
* Elemendi labeli ehk nimetuse abstraktsioon. Antud liides on mõeldud
* kasutamiseks kohas kus on vajadus erinevate elementide juures tingimuslikult
* kuvada elemendi erinevaid nimetusi.
*
* @author <NAME>
* @since 03.12.2013
*/
public interface ElementLabel extends Serializable {
/**
* Meetod tagastab elemendi nime vastavalt implementatsioonile
*
* @return {@link String}
*/
String getLabel();
}
|
#!/bin/sh
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
#settings
LOG_DIR=/server/vistrails/logs
Xvfb_CMD=/usr/bin/Xvfb
VIRTUAL_DISPLAY=":6"
VISTRAILS_DIR=/server/vistrails/git/vistrails
ADDRESS="vis-7.sci.utah.edu"
PORT="8081"
CONF_FILE="server.cfg"
NUMBER_OF_OTHER_VISTRAILS_INSTANCES="1"
MULTI_OPTION="--multithread"
if (("$#" > "0")); then
VIRTUAL_DISPLAY="$1"
fi
if (("$#" > "1")); then
ADDRESS="$2"
fi
if (("$#" > "2")); then
PORT="$3"
fi
if (("$#" > "3")); then
NUMBER_OF_OTHER_VISTRAILS_INSTANCES="$4"
fi
if (("$#" == "5")); then
if(("$5" == "0")); then
MULTI_OPTION=""
else
MULTI_OPTION="--multithread"
fi
fi
Xvfb_PARAM="$VIRTUAL_DISPLAY -screen 0 1280x960x24"
PID="$LOG_DIR/pid.$PORT.vistrails"
LOG_XVFB="$LOG_DIR/xvfb$VIRTUAL_DISPLAY.log"
#try to find Process ID of running X-Server
echo "checking if Xvfb is already running..."
echo "ps -eaf | grep $Xvfb_CMD | grep $VIRTUAL_DISPLAY | awk '{print \$2}'"
pid=`ps -eaf | grep $Xvfb_CMD | grep $VIRTUAL_DISPLAY | awk '{print \$2}'`
if [ "$pid" ]; then
echo "Xvfb already running [pid=${pid}]"
else
#start a virtual server
if [ -x $Xvfb_CMD ]; then
$Xvfb_CMD $Xvfb_PARAM>& $LOG_XVFB &
sleep 5
#Make sure it started
pid=`ps -eaf | grep $Xvfb_CMD | grep $VIRTUAL_DISPLAY | awk '{print $2}'`
if [ "$pid" ]; then
echo "done."
else
echo "FAILED."
fi
else
echo "Error: Could not find $Xvfb_CMD. Cannot start Xvfb."
fi
fi
echo -n "Starting VisTrails in Server Mode on display $VIRTUAL_DISPLAY.0 - "
cd $VISTRAILS_DIR
export DISPLAY=$VIRTUAL_DISPLAY
python stop_vistrails_server.py http://$ADDRESS:$PORT
#give some time for quitting
sleep 5
#try again because sometimes it doesn't quit
python stop_vistrails_server.py http://$ADDRESS:$PORT
sleep 5
#finally kill it if it still did not respond because it was hanging
kill -9 `cat $PID`
python vistrails_server.py --rpc-server=$ADDRESS --rpc-port $PORT --rpc-config $CONF_FILE --rpc-instances $NUMBER_OF_OTHER_VISTRAILS_INSTANCES $MULTI_OPTION&
echo $! > $PID
|
def isAnagram(word1, word2):
if (len(word1) != len(word2)):
return false;
# To sort the words
word1 = ''.join(sorted(word1));
word2 = ''.join(sorted(word2));
# To check whether two strings are equal or not
return word1 == word2; |
def prime_factors(n):
result = []
while n % 2 == 0:
result.append(2)
n //= 2
for i in range(3, int(math.sqrt(n)) + 1, 2):
while n % i == 0:
result.append(i)
n //= i
if n > 2:
result.append(n)
return result |
const tree = require('ascii-tree');
const { basename } = require('path');
const fg = require('fast-glob');
/**
* Checks if 2 arrays have value equality.
*
* @param {Array} array1 - First array.
* @param {Array} array2 - Second array.
* @returns Whether the arrays are equal.
*/
function arrayEqual(array1, array2) {
if (array1.length !== array2.length) {
return false;
}
return array1.every((item, index) => item === array2[index]);
}
/**
* Finds how much overlap there is at the start of 2 arrays.
*
* @param {Array} array1 - First array.
* @param {Array} array2 - Second array.
* @returns The amount of items in array1 that overlap (from the start) with array2.
*/
function findArrayOverlap(array1, array2) {
for (let i = array1.length; i > 0; i--) {
if (arrayEqual(array1.slice(0, i), array2.slice(0, i))) {
return i;
}
}
return 0;
}
const defaultGlobOptions = {
dot: true
};
// fast-glob options that can break things, so shouldn't be configurable.
const forcedGlobOptions = {
stats: false,
onlyFiles: true,
onlyDirectories: false,
unique: true,
markDirectories: false,
absolute: false,
transform: null
};
/**
* Generates an ascii tree structure.
*
* @param {Object} options - Options to configure what is included in the generated tree.
* @param {Boolean} options.path - Whether to display the root path instead of ".".
* @param {Array} options.globs - An array of globs.
* @param {Object} options.globOptions - Options passed to {@link https://github.com/mrmlnc/fast-glob#options-1}.
* @returns {string} Ascii tree structure.
*/
function generate({
path = false,
globs = ['./**/*', '!node_modules', '!.git'],
globOptions = {}
} = {}) {
globOptions = {
...defaultGlobOptions,
...globOptions,
...forcedGlobOptions
};
const files = fg.sync(globs, globOptions);
let previous = [];
const root = path ? globOptions.cwd : '.';
const input =
`#${root}\r\n` +
files
.map((name) => {
const count = (name.match(/\//g) || []).length;
let out = '';
let parts = name.split('/').slice(0, -1);
const overlap = findArrayOverlap(parts, previous);
const relativeParts = overlap > 0 ? parts.slice(overlap) : parts;
if (
relativeParts.length > 0 &&
!arrayEqual(previous, relativeParts) &&
count > 0
) {
for (let index = 0; index < relativeParts.length; index++) {
out +=
'#'.repeat(overlap + index + 2) + relativeParts[index] + '\r\n';
}
depth = count;
}
previous = parts;
return out + '#'.repeat(count + 2) + basename(name);
})
.join('\r\n');
return tree.generate(input);
}
exports.generate = generate;
|
#!/usr/bin/env sh
# Copyright (c) 2004-present Facebook All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# Get some property from release
# Usage:
# get_latest_release user/repo tag_name
get_latest_release() {
curl --silent "https://api.github.com/repos/$1/releases/latest" | # Get latest release from GitHub api
grep "\"$2\":" | # Get tag line
sed -E 's/.*"([^"]+)".*/\1/' # Pluck JSON value
}
# Getting last version
NEW_VER=$(get_latest_release $MK_REPO tag_name)
if [ $NEW_VER = $MK_VERSION ]; then
echo 'Up to date';
else
echo "A new version is available"
body=$(get_latest_release $MK_REPO body)
echo "Upgrading from $MK_VERSION to $NEW_VER
Release notes
************************
$body
************************
";
# Downloading and executing upgrade script
curl -sL https://raw.githubusercontent.com/$MK_REPO/master/scripts/upgrade.sh | ( echo "MK_REPO=$MK_REPO; MK_VERSION=$NEW_VER; "; cat - ) | sh
fi; |
<filename>Documentation/_add_debug_8hpp.js
var _add_debug_8hpp =
[
[ "AddDebugImpl", "classarmnn_1_1optimizations_1_1_add_debug_impl.xhtml", "classarmnn_1_1optimizations_1_1_add_debug_impl" ],
[ "InsertDebugLayer", "_add_debug_8hpp.xhtml#aa76c76565125ad77092403176d74fd85", null ]
]; |
class Event(object):
"""
Base class for events
Fabio Manganiello, 2015 <blacklight86@gmail.com>
"""
def __init__(self, component=None, **kwargs):
"""
Constructor
kwargs -- key-value associations for the attributes of the object
"""
self.__kwargs = kwargs
self.component = component
vars(self).update(kwargs)
def get(self, attr):
" Get an event attribute by name. Return None if the attribute doesn't exist "
return self.__kwargs[attr] if attr in self.__kwargs else None
def serialize(self):
" Serialize the event using pickle "
import pickle
return pickle.dumps(self)
@classmethod
def deserialize(cls, event):
" Deserialize and return the event object using pickle "
import pickle
obj = pickle.loads(event)
assert isinstance(obj, cls)
return obj
def to_json(self):
" Serialize as JSON "
import json
attrs = self.__kwargs
return json.dumps(attrs)
@classmethod
def from_json(cls, attrs):
" Deserialize and initialize from JSON "
import json
attrs = dict(json.loads(attrs))
return Event(**attrs)
def __eq__(self, event):
"""
Return true if event equals self.
Two events are considered "equal" if:
- Their types are the same, or one is a direct subclass of the other;
- All of their constructor parameters are equal, unless a certain attribute is an instance of AttributeValueAny.
"""
if not self.__same_classes(self, event):
return False
for (attr, value) in self.__kwargs.items():
if not self.__same_values(value, event.__kwargs[attr]):
return False
return True
@classmethod
def __same_classes(cls, obj1, obj2):
return True \
if (type(obj1) == Event or type(obj2) == Event) \
else type(obj1) == type(obj2)
@classmethod
def __same_values(cls, value1, value2):
if not cls.__same_classes(value1, value2) \
and not isinstance(value1, AttributeValueAny) \
and not isinstance(value2, AttributeValueAny):
return False
return value1 == value2
class StopEvent(Event):
"""
A special event used to asynchronously stop components, workers and sockets
Fabio Manganiello, 2015 <blacklight86@gmail.com>
"""
class AttributeValueAny(object):
"""
When an event attribute type is AttributeValueAny,
that attribute won't be taken into account when
two events are compared through == operator or
explicit __eq__ method invocation.
Fabio Manganiello, 2015 <blacklight86@gmail.com>
"""
def __eq__(self, value):
""" Always return True. Any value equals "any" """
return True
def __repr__(self):
return "__ANY__"
# vim:sw=4:ts=4:et: |
<reponame>eengineergz/Lambda
exports.seed = function(knex) {
return knex('resources').insert([
{
resourceid: 1,
resourcename: 'Lambda Student',
resourcedescription: 'a soon to be hired developer'
},
{
resourceid: 2,
resourcename: 'MacBook Pro #1',
resourcedescription: 'an overly expensive laptop computer'
}
]);
};
|
<gh_stars>1-10
tabs = {};
chrome.tabs.onUpdated.addListener(function(tabId, changeInfo, tab) {
tabs[tabId] = tab;
});
chrome.tabs.onRemoved.addListener(function(tabId, removeInfo){
// Only allow closing if the window is closing down
if (removeInfo.isWindowClosing) {
return;
}
// Otherwise, re-open the tab if it's pinned
if (tabs[tabId] && tabs[tabId].pinned) {
var tab = tabs[tabId];
delete tabs[tabId];
// Chrome doesn't allow us from stopping a removal AFAIK, so just re-open
// the tab in the same place as a pinned tab
chrome.tabs.create({
pinned: true,
url: tab.url,
active: true,
index: tab.index
}, function(newTab) {
tabs[newTab.id] = newTab;
});
}
});
|
public class AverageCalculator
{
private List<int> data;
public AverageCalculator(List<int> data)
{
this.data = data;
}
public double CalculateAverage()
{
double sum = 0;
for (int i = 0; i < data.Count; i++)
{
sum += data[i];
}
return sum / data.Count;
}
} |
import React from 'react'
import {Card ,CardContent ,Typography} from "@material-ui/core"
import "./Styles.css"
function Message({username , message}) {
const isUser = username === message.username
// console.log(username,message)
return (
<div className={`message ${isUser && "message_user"}`}>
<Card className={isUser ? "message_usercard" : "message_guestcard"}>
<CardContent>
<Typography variant="h5" component="h2" style={{fontSize:"15px",color:"#ee6c4d"}}>
{message.username}
</Typography>
<Typography color="white" variant="h5" component="h2" style={{fontSize:"25px"}}>
{message.text}
</Typography>
</CardContent>
</Card>
</div>
)
}
export default Message
|
<filename>filestack/src/main/java/com/filestack/android/internal/SelectionSaver.java
package com.filestack.android.internal;
import com.filestack.android.Selection;
import java.util.ArrayList;
/** Manages a user's file selections. Used to save selections and notify others about changes. */
public interface SelectionSaver {
boolean toggleItem(Selection selection);
boolean isSelected(Selection selection);
void setItemChangeListener(Listener listener);
ArrayList<Selection> getItems();
void clear();
boolean isEmpty();
interface Listener {
void onEmptyChanged(boolean isEmpty);
}
}
|
def maxSubArraySum(arr):
max_so_far = 0
max_ending_here = 0
for i in range(0, len(arr)):
max_ending_here = max_ending_here + arr[i]
if (max_ending_here < 0):
max_ending_here = 0
elif (max_so_far < max_ending_here):
max_so_far = max_ending_here
return max_so_far
# Driver code
arr = [-2, 11, -4, 13, -5, 2]
print(maxSubArraySum(arr))
# Output: 20 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.redis
import scala.collection.JavaConverters._
import redis.clients.jedis.Jedis
import redis.clients.util.{JedisClusterCRC16, SafeEncoder}
/**
* RedisConfig holds the state of the cluster nodes, and uses consistent hashing to map
* keys to nodes
*/
class RedisConfig(val initialHost: RedisEndpoint) extends Serializable {
val initialAddr = initialHost.host
val dbAmount = getDBAmount(initialHost)
val nodes = getNodes(initialHost)
val hosts = nodes.filter(_.idx == 0)
def getConnect(db: Int): Jedis = {
val conn = initialHost.connect()
conn.select(db)
conn
}
/**
* @return initialHost's auth
*/
def getAuth: String = {
initialHost.auth
}
/**
* @return selected db number
*/
def getDB: Int = {
initialHost.dbNum
}
def getRandomNode(): RedisNode = {
val rnd = scala.util.Random.nextInt().abs % hosts.length
hosts(rnd)
}
/**
* @param sPos start slot number
* @param ePos end slot number
* @return a list of RedisNode whose slots union [sPos, ePos] is not null
*/
def getNodesBySlots(sPos: Int, ePos: Int): Array[RedisNode] = {
/* This function judges if [sPos1, ePos1] union [sPos2, ePos2] is not null */
def inter(sPos1: Int, ePos1: Int, sPos2: Int, ePos2: Int) =
if (sPos1 <= sPos2) ePos1 >= sPos2 else ePos2 >= sPos1
nodes
.filter(node => inter(sPos, ePos, node.startSlot, node.endSlot))
.filter(_.idx == 0) // master only now
}
/**
* @param key
* *IMPORTANT* Please remember to close after using
* @return jedis who is a connection for a given key
*/
def connectionForKey(key: String): Jedis = {
getHost(key).connect
}
/**
* @param initialHost any redis endpoint of a cluster or a single server
* @return true if the target server is in cluster mode
*/
private def clusterEnabled(initialHost: RedisEndpoint): Boolean = {
val conn = initialHost.connect()
val info = conn.info.split("\n")
val version = info.filter(_.contains("redis_version:"))(0)
val clusterEnable = info.filter(_.contains("cluster_enabled:"))
val mainVersion = version.substring(14, version.indexOf(".")).toInt
val res = mainVersion > 2 && clusterEnable.length > 0 && clusterEnable(0).contains("1")
conn.close
res
}
/**
* @param key
* @return host whose slots should involve key
*/
def getHost(key: String): RedisNode = {
val slot = JedisClusterCRC16.getSlot(key)
hosts.filter(host => {
host.startSlot <= slot && host.endSlot >= slot
})(0)
}
private def getDBAmount(initialHost: RedisEndpoint): Int = {
val conn = initialHost.connect()
conn.configGet("databases").get(1).toInt
}
/**
* @param initialHost any redis endpoint of a single server
* @return list of nodes
*/
private def getNonClusterNodes(initialHost: RedisEndpoint): Array[RedisNode] = {
val master = (initialHost.host, initialHost.port)
val conn = initialHost.connect()
val replinfo = conn.info("Replication").split("\n")
conn.close
// If this node is a slave, we need to extract the slaves from its master
if (replinfo.filter(_.contains("role:slave")).length != 0) {
val host = replinfo.filter(_.contains("master_host:"))(0).trim.substring(12)
val port = replinfo.filter(_.contains("master_port:"))(0).trim.substring(12).toInt
// simply re-enter this function witht he master host/port
getNonClusterNodes(
initialHost = new RedisEndpoint(host, port, initialHost.auth, initialHost.dbNum))
} else {
// this is a master - take its slaves
val slaves = replinfo
.filter(x => (x.contains("slave") && x.contains("online")))
.map(rl => {
val content = rl.substring(rl.indexOf(':') + 1).split(",")
val ip = content(0)
val port = content(1)
(ip.substring(ip.indexOf('=') + 1), port.substring(port.indexOf('=') + 1).toInt)
})
val nodes = master +: slaves
val range = nodes.size
(0 until range)
.map(
i =>
RedisNode(
new RedisEndpoint(
nodes(i)._1,
nodes(i)._2,
initialHost.auth,
initialHost.dbNum,
initialHost.timeout),
0,
16383,
i,
range))
.toArray
}
}
/**
* @param initialHost any redis endpoint of a cluster server
* @return list of nodes
*/
private def getClusterNodes(initialHost: RedisEndpoint): Array[RedisNode] = {
val conn = initialHost.connect()
val res = conn
.clusterSlots()
.asScala
.flatMap { slotInfoObj =>
{
val slotInfo = slotInfoObj.asInstanceOf[java.util.List[java.lang.Object]]
val sPos = slotInfo.get(0).toString.toInt
val ePos = slotInfo.get(1).toString.toInt
/*
* We will get all the nodes with the slots range [sPos, ePos],
* and create RedisNode for each nodes, the total field of all
* RedisNode are the number of the nodes whose slots range is
* as above, and the idx field is just an index for each node
* which will be used for adding support for slaves and so on.
* And the idx of a master is always 0, we rely on this fact to
* filter master.
*/
(0 until (slotInfo.size - 2)).map(i => {
val node = slotInfo.asScala(i + 2).asInstanceOf[java.util.List[java.lang.Object]]
val host = SafeEncoder.encode(node.get(0).asInstanceOf[Array[scala.Byte]])
val port = node.get(1).toString.toInt
RedisNode(
new RedisEndpoint(
host,
port,
initialHost.auth,
initialHost.dbNum,
initialHost.timeout),
sPos,
ePos,
i,
slotInfo.size - 2)
})
}
}
.toArray
conn.close()
res
}
/**
* @param initialHost any redis endpoint of a cluster or a single server
* @return list of nodes
*/
def getNodes(initialHost: RedisEndpoint): Array[RedisNode] = {
if (clusterEnabled(initialHost)) {
getClusterNodes(initialHost)
} else {
getNonClusterNodes(initialHost)
}
}
}
|
<reponame>DarkKowalski/mochizuki-bot<filename>lib/mochizuki/fetcher.rb
# frozen_string_literal: true
require 'faraday'
require 'uri'
require 'nokogiri'
module Mochizuki
class Fetcher
def initialize(logger = Mochizuki.logger, config = Mochizuki.config)
@logger = logger
@config = config
@uri = URI("http://#{@config.api_host}:#{config.api_port}")
@request_body = {
'__EVENTTARGET' => '',
'__EVENTARGUMENT' => '',
'__LASTFOCUS' => '',
'__VIEWSTATE' => '',
'__VIEWSTATEGENERATOR' => 'CA0B0334',
'drlouming' => '',
'drceng' => '',
'dr_ceng' => '',
'drfangjian' => '',
'radio' => 'usedR',
'ImageButton1.x' => '0',
'ImageButton1.y' => '0'
}
@cookie = nil
end
def fetch
@logger.info 'Try to query'
fetch_viewstate
fetch_cookie
power = fetch_power
if power.nil?
@logger.warn 'Failed to query.'
return
end
Mochizuki.update_status(power)
power
end
private
def fetch_viewstate
@request_body['drlouming'] = @config.campus
@request_body['drceng'] = @config.building
@request_body['dr_ceng'] = @config.floor
@request_body['drfangjian'] = @config.dorm
3.times do
resp = Faraday.post(@uri) do |req|
req.headers = { 'Content-Type' => 'application/x-www-form-urlencoded' }
req.body = URI.encode_www_form(@request_body)
end
html = Nokogiri::HTML(resp.body)
@request_body['__VIEWSTATE'] = html.at_css('input#__VIEWSTATE')['value']
end
end
def fetch_cookie
resp = Faraday.post(@uri) do |req|
req.headers = { 'Content-Type' => 'application/x-www-form-urlencoded' }
req.body = URI.encode_www_form(@request_body)
end
@cookie = resp.headers['set-cookie'].split('; ')[0]
end
def fetch_power
resp = Faraday.get("#{@uri}/usedRecord1.aspx") do |req|
req.headers = { 'Cookie' => @cookie, 'Content-Type' => 'application/x-www-form-urlencoded' }
end
html = Nokogiri::HTML(resp.body)
html.xpath('//h6').text.scan(/(\d+[,.]\d+)/)[0][0]
end
end
end
|
<filename>orders.go
package goftx
import (
"encoding/json"
"fmt"
"net/http"
"github.com/pkg/errors"
"github.com/jnlin/goftx/models"
)
const (
apiOrders = "/orders"
apiGetOrdersHistory = "/orders/history"
apiModifyOrder = "/orders/%d/modify"
apiModifyOrderByClientID = "/orders/by_client_id/%d/modify"
apiTriggerOrders = "/conditional_orders"
apiGetOrderTriggers = "/conditional_orders/%d/triggers"
apiGetTriggerOrdersHistory = "/conditional_orders/history"
apiModifyTriggerOrder = "/conditional_orders/%d/modify"
)
type Orders struct {
client *Client
}
func (o *Orders) GetOpenOrders(market string) ([]*models.Order, error) {
requestParams := Request{
Auth: true,
Method: http.MethodGet,
URL: fmt.Sprintf("%s%s", apiUrl, apiOrders),
}
if market != "" {
requestParams.Params = map[string]string{
"market": market,
}
}
request, err := o.client.prepareRequest(requestParams)
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result []*models.Order
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) GetOrdersHistory(params *models.GetOrdersHistoryParams) ([]*models.Order, error) {
queryParams, err := PrepareQueryParams(params)
if err != nil {
return nil, errors.WithStack(err)
}
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodGet,
URL: fmt.Sprintf("%s%s", apiUrl, apiGetOrdersHistory),
Params: queryParams,
})
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result []*models.Order
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) GetOpenTriggerOrders(params *models.GetOpenTriggerOrdersParams) ([]*models.TriggerOrder, error) {
queryParams, err := PrepareQueryParams(params)
if err != nil {
return nil, errors.WithStack(err)
}
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodGet,
URL: fmt.Sprintf("%s%s", apiUrl, apiTriggerOrders),
Params: queryParams,
})
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result []*models.TriggerOrder
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) GetOrderTriggers(orderID int64) ([]*models.Trigger, error) {
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodGet,
URL: fmt.Sprintf("%s%s", apiUrl, fmt.Sprintf(apiGetOrderTriggers, orderID)),
})
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result []*models.Trigger
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) GetTriggerOrdersHistory(params *models.GetTriggerOrdersHistoryParams) ([]*models.TriggerOrder, error) {
queryParams, err := PrepareQueryParams(params)
if err != nil {
return nil, errors.WithStack(err)
}
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodGet,
URL: fmt.Sprintf("%s%s", apiUrl, apiGetTriggerOrdersHistory),
Params: queryParams,
})
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result []*models.TriggerOrder
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) PlaceOrder(payload *models.PlaceOrderPayload) (*models.Order, error) {
body, err := json.Marshal(payload)
if err != nil {
return nil, errors.WithStack(err)
}
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodPost,
URL: fmt.Sprintf("%s%s", apiUrl, apiOrders),
Body: body,
})
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result *models.Order
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) PlaceTriggerOrder(payload *models.PlaceTriggerOrderPayload) (*models.TriggerOrder, error) {
err := payload.Validate()
if err != nil {
return nil, errors.WithStack(err)
}
body, err := json.Marshal(payload)
if err != nil {
return nil, errors.WithStack(err)
}
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodPost,
URL: fmt.Sprintf("%s%s", apiUrl, apiTriggerOrders),
Body: body,
})
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result *models.TriggerOrder
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) ModifyOrder(payload *models.ModifyOrderPayload, orderID int64) (*models.Order, error) {
body, err := json.Marshal(payload)
if err != nil {
return nil, errors.WithStack(err)
}
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodPost,
URL: fmt.Sprintf("%s%s", apiUrl, fmt.Sprintf(apiModifyOrder, orderID)),
Body: body,
})
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result *models.Order
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) ModifyOrderByClientID(payload *models.ModifyOrderPayload, clientOrderID int64) (*models.Order, error) {
body, err := json.Marshal(payload)
if err != nil {
return nil, errors.WithStack(err)
}
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodPost,
URL: fmt.Sprintf("%s%s", apiUrl, fmt.Sprintf(apiModifyOrderByClientID, clientOrderID)),
Body: body,
})
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result *models.Order
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) ModifyTriggerOrder(payload *models.ModifyTriggerOrderPayload, orderID int64) (*models.TriggerOrder, error) {
body, err := json.Marshal(payload)
if err != nil {
return nil, errors.WithStack(err)
}
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodPost,
URL: fmt.Sprintf("%s%s", apiUrl, fmt.Sprintf(apiModifyTriggerOrder, orderID)),
Body: body,
})
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result *models.TriggerOrder
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) GetOrder(orderID int64) (*models.Order, error) {
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodGet,
URL: fmt.Sprintf("%s%s/%d", apiUrl, apiOrders, orderID),
})
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result *models.Order
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) GetOrderByClientID(clientOrderID int64) (*models.Order, error) {
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodGet,
URL: fmt.Sprintf("%s%s/by_client_id/%d", apiUrl, apiOrders, clientOrderID),
})
if err != nil {
return nil, errors.WithStack(err)
}
response, err := o.client.do(request)
if err != nil {
return nil, errors.WithStack(err)
}
var result *models.Order
err = json.Unmarshal(response, &result)
if err != nil {
return nil, errors.WithStack(err)
}
return result, nil
}
func (o *Orders) CancelOrder(orderID int64) error {
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodDelete,
URL: fmt.Sprintf("%s%s/%d", apiUrl, apiOrders, orderID),
})
if err != nil {
return errors.WithStack(err)
}
_, err = o.client.do(request)
if err != nil {
return errors.WithStack(err)
}
return nil
}
func (o *Orders) CancelOrderByClientID(clientOrderID int64) error {
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodDelete,
URL: fmt.Sprintf("%s%s/by_client_id/%d", apiUrl, apiOrders, clientOrderID),
})
if err != nil {
return errors.WithStack(err)
}
_, err = o.client.do(request)
if err != nil {
return errors.WithStack(err)
}
return nil
}
func (o *Orders) CancelOpenTriggerOrder(triggerOrderID int64) error {
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodDelete,
URL: fmt.Sprintf("%s%s/%d", apiUrl, apiTriggerOrders, triggerOrderID),
})
if err != nil {
return errors.WithStack(err)
}
_, err = o.client.do(request)
if err != nil {
return errors.WithStack(err)
}
return nil
}
func (o *Orders) CancelAllOrders(payload *models.CancelAllOrdersPayload) error {
body, err := json.Marshal(payload)
if err != nil {
return errors.WithStack(err)
}
request, err := o.client.prepareRequest(Request{
Auth: true,
Method: http.MethodDelete,
URL: fmt.Sprintf("%s%s", apiUrl, apiOrders),
Body: body,
})
if err != nil {
return errors.WithStack(err)
}
_, err = o.client.do(request)
if err != nil {
return errors.WithStack(err)
}
return nil
}
|
#!/bin/bash
set -x
Uri=${1}
HANAVER=${2}
HANAUSR=${3}
HANAPWD=${4}
HANASID=${5}
HANANUMBER=${6}
vmSize=${7}
SUBEMAIL=${8}
SUBID=${9}
SUBURL=${10}
#if needed, register the machine
if [ "$SUBEMAIL" != "" ]; then
if [ "$SUBURL" != "" ]; then
SUSEConnect -e $SUBEMAIL -r $SUBID --url $SUBURL
else
SUSEConnect -e $SUBEMAIL -r $SUBID
fi
fi
#decode hana version parameter
HANAVER=${HANAVER^^}
if [ "${HANAVER}" = "SAP HANA PLATFORM EDITION 2.0 SPS01 REV 10 (51052030)" ]
then
hanapackage="51052030"
else
echo "not 51052030"
if [ "$HANAVER" = "SAP HANA PLATFORM EDITION 2.0 SPS02 (51052325)" ]
then
hanapackage="51052325"
else
echo "not 51052325"
if [ "$HANAVER" = "SAP HANA PLATFORM EDITION 2.0 SPS03 REV30 (51053061)" ]
then
hanapackage="51053061"
else
echo "not 5105361"
if [ "$HANAVER" = "SAP HANA PLATFORM EDITION 2.0 SPS04 REV40 (51053787)" ]
then
hanapackage="51053787"
else
echo "not 51053787"
if [ "$HANAVER" = "SAP HANA PLATFORM EDITION 2.0 SPS05 REV52 (51054623)" ]
then
hanapackage="51054623"
else
echo "not 51053061, default to 51054623"
hanapackage="51054623"
fi
fi
fi
fi
fi
#get the VM size via the instance api
VMSIZE=`curl -H Metadata:true "http://169.254.169.254/metadata/instance/compute/vmSize?api-version=2017-08-01&format=text"`
#install hana prereqs
zypper install -y glibc-2.22-51.6
zypper install -y systemd-228-142.1
zypper install -y unrar
zypper install -y sapconf
zypper install -y saptune
mkdir /etc/systemd/login.conf.d
mkdir /hana
mkdir /hana/data
mkdir /hana/log
mkdir /hana/shared
mkdir /hana/backup
mkdir /usr/sap
zypper in -t pattern -y sap-hana
saptune solution apply HANA
saptune daemon start
# step2
echo $Uri >> /tmp/url.txt
cp -f /etc/waagent.conf /etc/waagent.conf.orig
sedcmd="s/ResourceDisk.EnableSwap=n/ResourceDisk.EnableSwap=y/g"
sedcmd2="s/ResourceDisk.SwapSizeMB=0/ResourceDisk.SwapSizeMB=2048/g"
cat /etc/waagent.conf | sed $sedcmd | sed $sedcmd2 > /etc/waagent.conf.new
cp -f /etc/waagent.conf.new /etc/waagent.conf
#don't restart waagent, as this will kill the custom script.
#service waagent restart
# this assumes that 5 disks are attached at lun 0 through 4
echo "Creating partitions and physical volumes"
pvcreate -ff -y /dev/disk/azure/scsi1/lun0
pvcreate -ff -y /dev/disk/azure/scsi1/lun1
pvcreate -ff -y /dev/disk/azure/scsi1/lun2
pvcreate -ff -y /dev/disk/azure/scsi1/lun3
pvcreate -ff -y /dev/disk/azure/scsi1/lun4
pvcreate -ff -y /dev/disk/azure/scsi1/lun5
if [ $VMSIZE == "Standard_E16s_v3" ] || [ "$VMSIZE" == "Standard_E32s_v3" ] || [ "$VMSIZE" == "Standard_E64s_v3" ] || [ "$VMSIZE" == "Standard_GS5" ] || [ "$VMSIZE" == "Standard_M32ts" ] || [ "$VMSIZE" == "Standard_M32ls" ] || [ "$VMSIZE" == "Standard_M64ls" ] || [ $VMSIZE == "Standard_DS14_v2" ] ; then
echo "logicalvols start" >> /tmp/parameter.txt
#shared volume creation
sharedvglun="/dev/disk/azure/scsi1/lun0"
vgcreate sharedvg $sharedvglun
lvcreate -l 100%FREE -n sharedlv sharedvg
#usr volume creation
usrsapvglun="/dev/disk/azure/scsi1/lun1"
vgcreate usrsapvg $usrsapvglun
lvcreate -l 100%FREE -n usrsaplv usrsapvg
#backup volume creation
backupvglun="/dev/disk/azure/scsi1/lun2"
vgcreate backupvg $backupvglun
lvcreate -l 100%FREE -n backuplv backupvg
#data volume creation
datavg1lun="/dev/disk/azure/scsi1/lun3"
datavg2lun="/dev/disk/azure/scsi1/lun4"
datavg3lun="/dev/disk/azure/scsi1/lun5"
vgcreate datavg $datavg1lun $datavg2lun $datavg3lun
PHYSVOLUMES=3
STRIPESIZE=64
lvcreate -i$PHYSVOLUMES -I$STRIPESIZE -l 60%FREE -n datalv datavg
lvcreate -i$PHYSVOLUMES -I$STRIPESIZE -l 100%FREE -n loglv datavg
mkfs.xfs /dev/datavg/datalv
mkfs.xfs /dev/datavg/loglv
mkfs -t xfs /dev/sharedvg/sharedlv
mkfs -t xfs /dev/backupvg/backuplv
mkfs -t xfs /dev/usrsapvg/usrsaplv
mount -t xfs /dev/datavg/loglv /hana/log
echo "/dev/mapper/datavg-loglv /hana/log xfs defaults 0 0" >> /etc/fstab
echo "logicalvols end" >> /tmp/parameter.txt
fi
if [ $VMSIZE == "Standard_M64s" ]; then
#this is the medium size
# this assumes that 6 disks are attached at lun 0 through 5
echo "Creating partitions and physical volumes"
pvcreate -ff -y /dev/disk/azure/scsi1/lun6
pvcreate -ff -y /dev/disk/azure/scsi1/lun7
pvcreate -ff -y /dev/disk/azure/scsi1/lun8
pvcreate -ff -y /dev/disk/azure/scsi1/lun9
echo "logicalvols start" >> /tmp/parameter.txt
#shared volume creation
sharedvglun="/dev/disk/azure/scsi1/lun0"
vgcreate sharedvg $sharedvglun
lvcreate -l 100%FREE -n sharedlv sharedvg
#usr volume creation
usrsapvglun="/dev/disk/azure/scsi1/lun1"
vgcreate usrsapvg $usrsapvglun
lvcreate -l 100%FREE -n usrsaplv usrsapvg
#backup volume creation
backupvg1lun="/dev/disk/azure/scsi1/lun2"
backupvg2lun="/dev/disk/azure/scsi1/lun3"
vgcreate backupvg $backupvg1lun $backupvg2lun
lvcreate -l 100%FREE -n backuplv backupvg
#data volume creation
datavg1lun="/dev/disk/azure/scsi1/lun4"
datavg2lun="/dev/disk/azure/scsi1/lun5"
datavg3lun="/dev/disk/azure/scsi1/lun6"
datavg4lun="/dev/disk/azure/scsi1/lun7"
vgcreate datavg $datavg1lun $datavg2lun $datavg3lun $datavg4lun
PHYSVOLUMES=4
STRIPESIZE=64
lvcreate -i$PHYSVOLUMES -I$STRIPESIZE -l 100%FREE -n datalv datavg
#log volume creation
logvg1lun="/dev/disk/azure/scsi1/lun8"
logvg2lun="/dev/disk/azure/scsi1/lun9"
vgcreate logvg $logvg1lun $logvg2lun
PHYSVOLUMES=2
STRIPESIZE=32
lvcreate -i$PHYSVOLUMES -I$STRIPESIZE -l 100%FREE -n loglv logvg
mount -t xfs /dev/logvg/loglv /hana/log
echo "/dev/mapper/logvg-loglv /hana/log xfs defaults 0 0" >> /etc/fstab
mkfs.xfs /dev/datavg/datalv
mkfs.xfs /dev/logvg/loglv
mkfs -t xfs /dev/sharedvg/sharedlv
mkfs -t xfs /dev/backupvg/backuplv
mkfs -t xfs /dev/usrsapvg/usrsaplv
echo "logicalvols end" >> /tmp/parameter.txt
fi
if [ $VMSIZE == "Standard_M64ms" ] || [ $VMSIZE == "Standard_M128S" ]; then
# this assumes that 6 disks are attached at lun 0 through 9
echo "Creating partitions and physical volumes"
pvcreate -ff -y /dev/disk/azure/scsi1/lun6
pvcreate -ff -y /dev/disk/azure/scsi1/lun7
pvcreate -ff -y /dev/disk/azure/scsi1/lun8
echo "logicalvols start" >> /tmp/parameter.txt
#shared volume creation
sharedvglun="/dev/disk/azure/scsi1/lun0"
vgcreate sharedvg $sharedvglun
lvcreate -l 100%FREE -n sharedlv sharedvg
#usr volume creation
usrsapvglun="/dev/disk/azure/scsi1/lun1"
vgcreate usrsapvg $usrsapvglun
lvcreate -l 100%FREE -n usrsaplv usrsapvg
#backup volume creation
backupvg1lun="/dev/disk/azure/scsi1/lun2"
backupvg2lun="/dev/disk/azure/scsi1/lun3"
vgcreate backupvg $backupvg1lun $backupvg2lun
lvcreate -l 100%FREE -n backuplv backupvg
#data volume creation
datavg1lun="/dev/disk/azure/scsi1/lun4"
datavg2lun="/dev/disk/azure/scsi1/lun5"
datavg3lun="/dev/disk/azure/scsi1/lun6"
vgcreate datavg $datavg1lun $datavg2lun $datavg3lun
PHYSVOLUMES=3
STRIPESIZE=64
lvcreate -i$PHYSVOLUMES -I$STRIPESIZE -l 100%FREE -n datalv datavg
#log volume creation
logvg1lun="/dev/disk/azure/scsi1/lun7"
logvg2lun="/dev/disk/azure/scsi1/lun8"
vgcreate logvg $logvg1lun $logvg2lun
PHYSVOLUMES=2
STRIPESIZE=32
lvcreate -i$PHYSVOLUMES -I$STRIPESIZE -l 100%FREE -n loglv logvg
mount -t xfs /dev/logvg/loglv /hana/log
echo "/dev/mapper/logvg-loglv /hana/log xfs defaults 0 0" >> /etc/fstab
mkfs.xfs /dev/datavg/datalv
mkfs.xfs /dev/logvg/loglv
mkfs -t xfs /dev/sharedvg/sharedlv
mkfs -t xfs /dev/backupvg/backuplv
mkfs -t xfs /dev/usrsapvg/usrsaplv
echo "logicalvols end" >> /tmp/parameter.txt
fi
if [ $VMSIZE == "Standard_M128ms" ] || [ $VMSIZE == "Standard_M208ms_v2" ]; then
# this assumes that 6 disks are attached at lun 0 through 5
echo "Creating partitions and physical volumes"
pvcreate -ff -y /dev/disk/azure/scsi1/lun6
pvcreate -ff -y /dev/disk/azure/scsi1/lun7
pvcreate -ff -y /dev/disk/azure/scsi1/lun8
pvcreate -ff -y /dev/disk/azure/scsi1/lun9
pvcreate -ff -y /dev/disk/azure/scsi1/lun10
echo "logicalvols start" >> /tmp/parameter.txt
#shared volume creation
sharedvglun="/dev/disk/azure/scsi1/lun0"
vgcreate sharedvg $sharedvglun
lvcreate -l 100%FREE -n sharedlv sharedvg
#usr volume creation
usrsapvglun="/dev/disk/azure/scsi1/lun1"
vgcreate usrsapvg $usrsapvglun
lvcreate -l 100%FREE -n usrsaplv usrsapvg
#backup volume creation
backupvg1lun="/dev/disk/azure/scsi1/lun2"
backupvg2lun="/dev/disk/azure/scsi1/lun3"
vgcreate backupvg $backupvg1lun $backupvg2lun
lvcreate -l 100%FREE -n backuplv backupvg
#data volume creation
datavg1lun="/dev/disk/azure/scsi1/lun4"
datavg2lun="/dev/disk/azure/scsi1/lun5"
datavg3lun="/dev/disk/azure/scsi1/lun6"
datavg4lun="/dev/disk/azure/scsi1/lun7"
datavg5lun="/dev/disk/azure/scsi1/lun8"
vgcreate datavg $datavg1lun $datavg2lun $datavg3lun $datavg4lun $datavg5lun
PHYSVOLUMES=4
STRIPESIZE=64
lvcreate -i$PHYSVOLUMES -I$STRIPESIZE -l 100%FREE -n datalv datavg
#log volume creation
logvg1lun="/dev/disk/azure/scsi1/lun9"
logvg2lun="/dev/disk/azure/scsi1/lun10"
vgcreate logvg $logvg1lun $logvg2lun
PHYSVOLUMES=2
STRIPESIZE=32
lvcreate -i$PHYSVOLUMES -I$STRIPESIZE -l 100%FREE -n loglv logvg
mount -t xfs /dev/logvg/loglv /hana/log
echo "/dev/mapper/logvg-loglv /hana/log xfs defaults 0 0" >> /etc/fstab
mkfs.xfs /dev/datavg/datalv
mkfs.xfs /dev/logvg/loglv
mkfs -t xfs /dev/sharedvg/sharedlv
mkfs -t xfs /dev/backupvg/backuplv
mkfs -t xfs /dev/usrsapvg/usrsaplv
fi
#!/bin/bash
echo "mounthanashared start" >> /tmp/parameter.txt
mount -t xfs /dev/sharedvg/sharedlv /hana/shared
mount -t xfs /dev/backupvg/backuplv /hana/backup
mount -t xfs /dev/usrsapvg/usrsaplv /usr/sap
mount -t xfs /dev/datavg/datalv /hana/data
echo "mounthanashared end" >> /tmp/parameter.txt
echo "write to fstab start" >> /tmp/parameter.txt
echo "/dev/mapper/datavg-datalv /hana/data xfs defaults 0 0" >> /etc/fstab
echo "/dev/mapper/sharedvg-sharedlv /hana/shared xfs defaults 0 0" >> /etc/fstab
echo "/dev/mapper/backupvg-backuplv /hana/backup xfs defaults 0 0" >> /etc/fstab
echo "/dev/mapper/usrsapvg-usrsaplv /usr/sap xfs defaults 0 0" >> /etc/fstab
echo "write to fstab end" >> /tmp/parameter.txt
if [ ! -d "/hana/data/sapbits" ]
then
mkdir "/hana/data/sapbits"
fi
HANAVER=${HANAVER^^}
if [ "${HANAVER}" = "SAP HANA PLATFORM EDITION 2.0 SPS01 REV 10 (51052030)" ]
then
hanapackage="51052030"
else
echo "not 51052030"
if [ "$HANAVER" = "SAP HANA PLATFORM EDITION 2.0 SPS02 (51052325)" ]
then
hanapackage="51052325"
else
echo "not 51052325"
if [ "$HANAVER" = "SAP HANA PLATFORM EDITION 2.0 SPS03 REV30 (51053061)" ]
then
hanapackage="51053061"
else
echo "not 5105361"
if [ "$HANAVER" = "SAP HANA PLATFORM EDITION 2.0 SPS04 REV40 (51053787)" ]
then
hanapackage="51053787"
else
echo "not 51053787"
if [ "$HANAVER" = "SAP HANA PLATFORM EDITION 2.0 SPS05 REV52 (51054623)" ]
then
hanapackage="51054623"
else
echo "not 51053061, default to 51054623"
hanapackage="51054623"
fi
fi
fi
fi
fi
#####################
SAPBITSDIR="/hana/data/sapbits"
if [ "${hanapackage}" = "51054623" ] || [ "${hanapackage}" = "51053787" ]
then
cd $SAPBITSDIR
mkdir ${hanapackage}
cd ${hanapackage}
/usr/bin/wget --quiet $Uri/SapBits/${hanapackage}.ZIP
unzip ./${hanapackage}.ZIP
cd $SAPBITSDIR
#add additional requirement
zypper install -y libatomic1
else
cd /hana/data/sapbits
/usr/bin/wget --quiet $Uri/SapBits/${hanapackage}_part1.exe
/usr/bin/wget --quiet $Uri/SapBits/${hanapackage}_part2.rar
/usr/bin/wget --quiet $Uri/SapBits/${hanapackage}_part3.rar
/usr/bin/wget --quiet $Uri/SapBits/${hanapackage}_part4.rar
cd $SAPBITSDIR
echo "hana unrar start" >> /tmp/parameter.txt
#!/bin/bash
cd $SAPBITSDIR
unrar -o- x ${hanapackage}_part1.exe
echo "hana unrar end" >> /tmp/parameter.txt
fi
#####################
#!/bin/bash
cd /hana/data/sapbits
echo "hana download start" >> /tmp/parameter.txt
/usr/bin/wget --quiet $Uri/SapBits/md5sums
/usr/bin/wget --quiet "https://raw.githubusercontent.com/JanelleJames-devops/Deloitte-SAP-ARM-v2/master/hdbinst.cfg"
echo "hana download end" >> /tmp/parameter.txt
date >> /tmp/testdate
cd /hana/data/sapbits
echo "hana prepare start" >> /tmp/parameter.txt
cd /hana/data/sapbits
#!/bin/bash
cd /hana/data/sapbits
myhost=$(hostname)
sedcmd="s/REPLACE-WITH-HOSTNAME/$myhost/g"
sedcmd2="s/\/hana\/shared\/sapbits\/51052325/\/hana\/data\/sapbits\/${hanapackage}/g"
sedcmd3="s/root_user=root/root_user=$HANAUSR/g"
sedcmd4="s/AweS0me@PW/$HANAPWD/g"
sedcmd5="s/sid=H10/sid=$HANASID/g"
sedcmd6="s/number=00/number=$HANANUMBER/g"
cat hdbinst.cfg | sed $sedcmd | sed $sedcmd2 | sed $sedcmd3 | sed $sedcmd4 | sed $sedcmd5 | sed $sedcmd6 > hdbinst-local.cfg
echo "hana preapre end" >> /tmp/parameter.txt
#put host entry in hosts file using instance metadata api
VMIPADDR=$(curl -H Metadata:true "http://169.254.169.254/metadata/instance/network/interface/0/ipv4/ipAddress/0/privateIpAddress?api-version=2017-08-01&format=text")
VMNAME=$(hostname)
cat >>/etc/hosts <<EOF
$VMIPADDR $VMNAME
EOF
#!/bin/bash
echo "install hana start" >> /tmp/parameter.txt
cd /hana/data/sapbits/${hanapackage}/DATA_UNITS/HDB_LCM_LINUX_X86_64
/hana/data/sapbits/${hanapackage}/DATA_UNITS/HDB_LCM_LINUX_X86_64/hdblcm -b --configfile /hana/data/sapbits/hdbinst-local.cfg
echo "install hana end" >> /tmp/parameter.txt
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import numpy as np
def hbox_grid_sample(boxes, point_num_per_line=3):
bin_w, bin_h = (boxes[:, 2] - boxes[:, 0]) / (point_num_per_line-1), (boxes[:, 3] - boxes[:, 1]) / (point_num_per_line-1)
shift_x = np.expand_dims(np.arange(0, point_num_per_line), axis=0)
shift_y = np.expand_dims(np.arange(0, point_num_per_line), axis=0)
shift_x, shift_y = np.meshgrid(shift_x, shift_y)
shift_x = np.reshape(shift_x, [-1, 1])
shift_y = np.reshape(shift_y, [-1, 1])
shifts = np.concatenate([shift_x, shift_y], axis=-1)
shifts = np.reshape(shifts, [-1, point_num_per_line**2*2])
shifts = np.array(np.tile(shifts, [boxes.shape[0], 1]).reshape([-1, point_num_per_line**2, 2]), np.float32)
bin_w = np.reshape(bin_w, [-1, 1])
bin_h = np.reshape(bin_h, [-1, 1])
shifts[:, :, 0] *= bin_w
shifts[:, :, 1] *= bin_h
shifts += boxes[:, np.newaxis, 0:2]
return shifts.reshape([-1, point_num_per_line**2*2])
def rbox_border_sample(boxes, point_num_per_line=3):
shift_x = np.arange(0, point_num_per_line-1)
shift_y = np.arange(0, point_num_per_line-1)
shift_x = np.reshape(shift_x, [-1, 1])
shift_y = np.reshape(shift_y, [-1, 1])
sample_points_list = []
for i in range(4):
width = boxes[:, (i*2+2)%8] - boxes[:, (i*2)%8]
height = boxes[:, (i*2+3)%8] - boxes[:, (i*2+1)%8]
bin_w, bin_h = width / (point_num_per_line-1), height / (point_num_per_line-1)
shifts = np.concatenate([shift_x, shift_y], axis=-1)
shifts = np.array(np.tile(shifts, [boxes.shape[0], 1]).reshape([-1, point_num_per_line-1, 2]), np.float32)
bin_w = np.reshape(bin_w, [-1, 1])
bin_h = np.reshape(bin_h, [-1, 1])
shifts[:, :, 0] *= bin_w
shifts[:, :, 1] *= bin_h
shifts += boxes[:, np.newaxis, i*2:(i*2)+2]
sample_points_list.append(shifts)
sample_points = np.concatenate(sample_points_list, axis=1)
return sample_points.reshape([-1, (point_num_per_line - 1) * 4 * 2])
if __name__ == '__main__':
# print(hbox_grid_sample(np.array([[3, 3, 12, 12], [0, 0, 4, 4]]), 3))
print(rbox_border_sample(np.array([[3, 3, 12, 3, 12, 12, 3, 12], [0, 0, 4, 0, 4, 4, 0, 4]]), 3))
|
# Copyright (C) 2021-2022 by the FEM on Colab authors
#
# This file is part of FEM on Colab.
#
# SPDX-License-Identifier: MIT
set -e
set -x
# Install pybind11
PYBIND11_INSTALL_SCRIPT_PATH=${PYBIND11_INSTALL_SCRIPT_PATH:-"PYBIND11_INSTALL_SCRIPT_PATH_IN"}
[[ $PYBIND11_INSTALL_SCRIPT_PATH == http* ]] && wget ${PYBIND11_INSTALL_SCRIPT_PATH} -O /tmp/pybind11-install.sh && PYBIND11_INSTALL_SCRIPT_PATH=/tmp/pybind11-install.sh
source $PYBIND11_INSTALL_SCRIPT_PATH
# Install boost (and its dependencies)
BOOST_INSTALL_SCRIPT_PATH=${BOOST_INSTALL_SCRIPT_PATH:-"BOOST_INSTALL_SCRIPT_PATH_IN"}
[[ $BOOST_INSTALL_SCRIPT_PATH == http* ]] && wget ${BOOST_INSTALL_SCRIPT_PATH} -O /tmp/boost-install.sh && BOOST_INSTALL_SCRIPT_PATH=/tmp/boost-install.sh
source $BOOST_INSTALL_SCRIPT_PATH
# Install slepc4py (and its dependencies)
SLEPC4PY_INSTALL_SCRIPT_PATH=${SLEPC4PY_INSTALL_SCRIPT_PATH:-"SLEPC4PY_INSTALL_SCRIPT_PATH_IN"}
[[ $SLEPC4PY_INSTALL_SCRIPT_PATH == http* ]] && wget ${SLEPC4PY_INSTALL_SCRIPT_PATH} -O /tmp/slepc4py-install.sh && SLEPC4PY_INSTALL_SCRIPT_PATH=/tmp/slepc4py-install.sh
source $SLEPC4PY_INSTALL_SCRIPT_PATH
# Download and uncompress library archive
FIREDRAKE_ARCHIVE_PATH=${FIREDRAKE_ARCHIVE_PATH:-"FIREDRAKE_ARCHIVE_PATH_IN"}
[[ $FIREDRAKE_ARCHIVE_PATH == http* ]] && wget ${FIREDRAKE_ARCHIVE_PATH} -O /tmp/firedrake-install.tar.gz && FIREDRAKE_ARCHIVE_PATH=/tmp/firedrake-install.tar.gz
if [[ $FIREDRAKE_ARCHIVE_PATH != skip ]]; then
rm -rf /usr/local/lib/python3.7/dist-packages/cftime*
rm -rf /usr/local/lib/python3.7/dist-packages/networkx*
rm -rf /usr/local/lib/python3.7/dist-packages/netCDF4*
tar -xzf $FIREDRAKE_ARCHIVE_PATH --strip-components=2 --directory=/usr/local
fi
|
##
## Install base tools
##
echo '[INFO] Initializing environment...'
. ./.env
echo '[INFO] Installing build essentials...'
sudo apt-get install build-essential --yes
echo '[INFO] Installing go v1.17.2...'
wget -q -O - https://git.io/vQhTU | bash -s -- --version 1.17.2
##
## Preparing tmp directory
##
mkdir ./tmp
cd ./tmp
##
## Install IBC enabled binaries
##
echo '[INFO] Installing Osmosis binary...'
git clone https://github.com/osmosis-labs/osmosis
cd osmosis
git checkout v6.0.0
make install
cd ..
echo '[INFO] Installing Lum Network binary...'
git clone https://github.com/lum-network/chain.git lum
cd lum
git checkout v1.0.5
go mod tidy
make install
cd ..
echo '[INFO] Installing Ki binary...'
git clone https://github.com/KiFoundation/ki-tools.git
cd ki-tools
git checkout -b v2.0.1 tags/2.0.1
make install
cd ..
echo '[INFO] Installing Gaiad binary...'
git clone https://github.com/cosmos/gaia
cd gaia
git checkout v5.0.2
make install
cd ..
##
## Install go relayer
##
echo '[INFO] Installing Go Relayer...'
git clone https://github.com/lum-network/relayer.git
cd relayer
git checkout main
make install
cd ..
##
## Leaving tmp directory
##
cd ..
##
## Installing chain daemons
##
echo '[INFO] Installing networks daemons...'
sudo cp ./daemons/* /etc/systemd/system/.
sudo systemctl daemon-reload
sudo systemctl enable osmosisd
sudo systemctl enable lumd
sudo systemctl enable kid
sudo systemctl enable gaiad
|
#!/bin/bash
#BSUB -n 1 -M 2 -W 359
set -e -x -o pipefail -u
R1=$1
EXTENSION=$2
[[ ! -z "$EXTENSION" ]] || EXTENSION=.fastq.gz
X1=$( basename $R1 $EXTENSION )
BWGA192_IDX=( $( cut -f 1 src/bdeplex/barcode.192.txt ) )
BWGA192_SEQ=( $( cut -f 2 src/bdeplex/barcode.192.txt ) )
echo "searching for" ${BWGA192_IDX[$LSB_JOBINDEX]} ${BWGA192_SEQ[$LSB_JOBINDEX]}
OUT=$3
[[ ! -z "$OUT" ]] || OUT=bsplit/
[ -d $OUT ] || mkdir $OUT
zgrep -B 1 -A 2 -E "${BWGA192_SEQ[$LSB_JOBINDEX]}[ACTGN]{83}" ${R1} | sed -r '/^--$/d' | gzip -c > ${OUT}/${X1}.${BWGA192_IDX[$LSB_JOBINDEX]}.fq.gz
echo ${OUT}/${X1}.${BWGA192_IDX[$LSB_JOBINDEX]} $( zcat ${OUT}/${X1}.${BWGA192_IDX[$LSB_JOBINDEX]}.fq.gz | awk 'NR%4==1' | wc -l ) | tr ' ' "\t" > ${OUT}/${X1}.${BWGA192_IDX[$LSB_JOBINDEX]}.nreads.txt
|
class Person:
def __init__(self, name, year):
self.name = name
self.year = year
person = Person("Jane", 1998) |
package sma.rhythmtapper.framework;
import android.content.Context;
import android.os.Vibrator;
import wseemann.media.FFmpegMediaMetadataRetriever;
/**
* Created by Peter on 23.01.2017.
*/
public interface Game {
Audio getAudio();
Audio getFileAudio();
Input getInput();
FileIO getFileIO();
Graphics getGraphics();
Vibrator getVibrator();
void setScreen(Screen screen);
Screen getCurrentScreen();
Screen getInitScreen();
int getScreenX();
int getScreenY();
void goToActivity(Class<?> activity);
FFmpegMediaMetadataRetriever createVideo(String video);
Context getContext();
}
|
package com.nostalgia.persistence.model;
import java.io.Serializable;
public class IdenticonRequest implements Serializable{
/**
*
*/
private static final long serialVersionUID = -8172140527350196775L;
public String seed;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.