text
stringlengths 1
1.05M
|
|---|
import Head from 'next/head';
export const PageLayout = ({ children }) => {
return (
<>
<Head>
<title>Diazno 2.0</title>
<link rel='icon' href='/favicon.ico' />
</Head>
<div className='bg-scorpion'>{children}</div>
</>
);
};
|
def reverse_string(string):
new_words = []
words = string.split(' ')
for word in words:
rev_word = word[::-1]
new_words.append(rev_word)
rev_string = ' '.join(new_words)
return rev_string
reverse_string("The quick brown fox jumps over the lazy dog.")
|
<reponame>comediadesign/uniforms
import FormControl from '@material-ui/core/FormControl';
import FormControlLabel from '@material-ui/core/FormControlLabel';
import FormHelperText from '@material-ui/core/FormHelperText';
import FormLabel from '@material-ui/core/FormLabel';
import Radio from '@material-ui/core/Radio';
import RadioGroup from '@material-ui/core/RadioGroup';
import createMuiTheme from '@material-ui/core/styles/createMuiTheme';
import ThemeProvider from '@material-ui/styles/ThemeProvider/ThemeProvider';
import React from 'react';
import { RadioField } from 'uniforms-material';
import { render } from 'uniforms/__suites__';
import createContext from './_createContext';
import mount from './_mount';
describe('@RTL - RadioField tests', () => {
test('<RadioField> - default props are not passed when MUI theme props are specified', () => {
const theme = createMuiTheme({
props: { MuiFormControl: { fullWidth: false, margin: 'normal' } },
});
const { container } = render(
<ThemeProvider theme={theme}>
<RadioField name="x" />
</ThemeProvider>,
{ x: { type: String } },
);
const elements = container.getElementsByClassName(
'MuiFormControl-marginNormal',
);
expect(elements).toHaveLength(1);
expect(elements[0].classList.contains('MuiFormControl-fullWidth')).toBe(
false,
);
});
test('<RadioField> - default props are passed when MUI theme props are absent', () => {
const theme = createMuiTheme({});
const { container } = render(
<ThemeProvider theme={theme}>
<RadioField name="x" />
</ThemeProvider>,
{ x: { type: String } },
);
const elements = container.getElementsByClassName(
'MuiFormControl-marginDense',
);
expect(elements).toHaveLength(1);
expect(elements[0].classList.contains('MuiFormControl-fullWidth')).toBe(
true,
);
});
test('<RadioField> - explicit props are passed when MUI theme props are specified', () => {
const theme = createMuiTheme({
props: { MuiFormControl: { fullWidth: true, margin: 'dense' } },
});
const explicitProps = {
fullWidth: false,
margin: 'normal' as const,
};
const { container } = render(
<ThemeProvider theme={theme}>
<RadioField name="x" {...explicitProps} />
</ThemeProvider>,
{ x: { type: String } },
);
const elements = container.getElementsByClassName(
'MuiFormControl-marginNormal',
);
expect(elements).toHaveLength(1);
expect(elements[0].classList.contains('MuiFormControl-fullWidth')).toBe(
false,
);
});
});
test('<RadioField> - renders a set of Radio buttons', () => {
const element = <RadioField name="x" />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(Radio)).toHaveLength(2);
});
test('<RadioField> - renders a set of Radio buttons wrapped with RadioGroup', () => {
const element = <RadioField name="x" />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(RadioGroup)).toHaveLength(1);
expect(wrapper.find(RadioGroup).find(Radio)).toHaveLength(2);
});
test('<RadioField> - renders a set of Radio buttons with correct disabled state', () => {
const element = <RadioField name="x" disabled />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(FormControl).prop('disabled')).toBe(true);
});
test('<RadioField> - renders a RadioGroup with correct id (inherited)', () => {
const element = <RadioField name="x" />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(RadioGroup)).toHaveLength(1);
expect(wrapper.find(RadioGroup).prop('id')).toBeTruthy();
});
test('<RadioField> - renders a RadioGroup with correct id (specified)', () => {
const element = <RadioField name="x" id="y" />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(RadioGroup)).toHaveLength(1);
expect(wrapper.find(RadioGroup).prop('id')).toBe('y');
});
test('<RadioField> - renders a RadioGroup with correct name', () => {
const element = <RadioField name="x" />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(RadioGroup)).toHaveLength(1);
expect(wrapper.find(RadioGroup).prop('name')).toBe('x');
});
test('<RadioField> - renders a set of Radio buttons with correct options', () => {
const element = <RadioField name="x" />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(Radio)).toHaveLength(2);
expect(wrapper.find(FormControlLabel).at(0).prop('label')).toBe('a');
expect(wrapper.find(FormControlLabel).at(1).prop('label')).toBe('b');
});
test('<RadioField> - renders a set of Radio buttons with correct options (transform)', () => {
const element = <RadioField name="x" transform={x => x.toUpperCase()} />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(Radio)).toHaveLength(2);
expect(wrapper.find(FormControlLabel).at(0).prop('label')).toBe('A');
expect(wrapper.find(FormControlLabel).at(1).prop('label')).toBe('B');
});
test('<RadioField> - renders a RadioGroup with correct value (default)', () => {
const element = <RadioField name="x" />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(RadioGroup)).toHaveLength(1);
expect(wrapper.find(RadioGroup).prop('value')).toBeFalsy();
});
test('<RadioField> - renders a RadioGroup with correct value (model)', () => {
const element = <RadioField name="x" />;
const wrapper = mount(
element,
createContext(
{ x: { type: String, allowedValues: ['a', 'b'] } },
{ model: { x: 'b' } },
),
);
expect(wrapper.find(RadioGroup)).toHaveLength(1);
expect(wrapper.find(RadioGroup).prop('value')).toBe('b');
});
test('<RadioField> - renders a RadioGroup with correct value (specified)', () => {
const element = <RadioField name="x" value="b" />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(RadioGroup)).toHaveLength(1);
expect(wrapper.find(RadioGroup).prop('value')).toBe('b');
});
test('<RadioField> - renders a RadioGroup which correctly reacts on change', () => {
const onChange = jest.fn();
const element = <RadioField name="x" />;
const wrapper = mount(
element,
createContext(
{ x: { type: String, allowedValues: ['a', 'b'] } },
{ onChange },
),
);
expect(wrapper.find(RadioGroup)).toHaveLength(1);
// @ts-expect-error Provide a valid value.
wrapper.find(RadioGroup).props().onChange!({ target: { value: 'b' } });
expect(onChange).toHaveBeenLastCalledWith('x', 'b');
});
test('<RadioField> - renders a RadioGroup which correctly reacts on change (same value)', () => {
const onChange = jest.fn();
const element = <RadioField name="x" />;
const wrapper = mount(
element,
createContext(
{ x: { type: String, allowedValues: ['a', 'b'] } },
{ model: { x: 'b' }, onChange },
),
);
expect(wrapper.find(RadioGroup)).toHaveLength(1);
// @ts-expect-error Provide a valid value.
wrapper.find(RadioGroup).props().onChange!({ target: { value: 'a' } });
expect(onChange).toHaveBeenLastCalledWith('x', 'a');
});
test('<RadioField> - renders a label', () => {
const element = <RadioField name="x" label="y" />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(FormLabel)).toHaveLength(1);
expect(wrapper.find(FormLabel).text()).toBe('y *');
});
test('<RadioField> - renders a helperText', () => {
const element = <RadioField name="x" helperText="Helper" />;
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(FormHelperText)).toHaveLength(1);
expect(wrapper.find(FormHelperText).text()).toBe('Helper');
});
test('<RadioField> - renders a TextField with correct error text (specified)', () => {
const error = new Error();
const element = (
<RadioField name="x" error={error} showInlineError errorMessage="Error" />
);
const wrapper = mount(
element,
createContext({ x: { type: String, allowedValues: ['a', 'b'] } }),
);
expect(wrapper.find(FormHelperText).text()).toBe('Error');
});
test('<RadioField> - works with special characters', () => {
mount(
<RadioField name="x" />,
createContext({ x: { type: String, allowedValues: ['ă', 'ș'] } }),
);
});
|
<reponame>LuJie0403/iterlife-zeus
package com.iterlife.zeus.demo.service.impl;
public class ByeServiceImpl {
public void bye(String name) {
System.out.println("Bye:" + name);
}
}
|
mix test --exclude pendong
mix coveralls --exclude pendong
mix credo
mix dogma
|
from typing import List
def calculate_average(nums: List[str]) -> float:
total = 0
count = 0
for num in nums:
if num:
try:
total += float(num)
count += 1
except ValueError:
pass
return total / count if count > 0 else 0
|
//////////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2004 <NAME>
// Use, modification and distribution is subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#include <algorithm>
#include <stdexcept>
#include "../sqlite/sqlite3.h"
#include "query.hpp"
#include "binders.hpp"
#include "exception.hpp"
#include "statement.hpp"
#include "session.hpp"
//////////////////////////////////////////////////////////////////////////////
namespace sqlitepp {
//////////////////////////////////////////////////////////////////////////////
namespace { // implementation details
//////////////////////////////////////////////////////////////////////////////
// fix: some older compliler will provide a public copy constructor for
// std::basic_ostringstream<T>
// check for Visual Studio C++ older then 2010 or GCC
#if (defined(_MSC_VER) && (_MSC_VER < 1600)) || defined(__GNUC__)
template <typename T>
void swap(std::basic_ostringstream<T>& s1, std::basic_ostringstream<T>& s2)
{
std::basic_string<T> const s1_str = s1.str();
typename std::basic_ostringstream<T>::pos_type const s1_pos = s1.tellp();
s1.str(s2.str());
s1.seekp(s2.tellp());
s1.clear();
s2.str(s1_str);
s2.seekp(s1_pos);
s2.clear();
}
#endif
template<typename T>
inline void delete_object(T* obj)
{
delete obj;
}
//----------------------------------------------------------------------------
//////////////////////////////////////////////////////////////////////////////
} // namespace { // implementation details
//////////////////////////////////////////////////////////////////////////////
//
// query
//
query::query()
{
}
//----------------------------------------------------------------------------
query::query(string_t const& sql)
{
sql_ << sql;
}
//----------------------------------------------------------------------------
query::~query()
{
clear();
}
//----------------------------------------------------------------------------
void query::sql(string_t const& text)
{
sql_.str(text);
sql_.seekp(0, std::ios_base::end).clear();
}
//----------------------------------------------------------------------------
void query::clear() // throw()
{
// clear binders
std::for_each(intos_.begin(), intos_.end(), delete_object<into_binder>);
intos_.clear();
std::for_each(uses_.begin(), uses_.end(), delete_object<use_binder>);
uses_.clear();
// clear sql
sql(string_t());
}
//----------------------------------------------------------------------------
bool query::empty() const // throw()
{
return sql_.str().empty() && intos_.empty() && uses_.empty();
}
//----------------------------------------------------------------------------
query& query::put(into_binder_ptr i)
{
if ( !i.get() )
{
throw std::invalid_argument("null into binder");
}
intos_.push_back(i.release());
return *this;
}
//----------------------------------------------------------------------------
query& query::put(use_binder_ptr u)
{
if ( !u.get() )
{
throw std::invalid_argument("null use binder");
}
uses_.push_back(u.release());
return *this;
}
//----------------------------------------------------------------------------
void swap(query& q1, query& q2)
{
// swap binders
swap(q1.intos_, q2.intos_);
swap(q1.uses_, q2.uses_);
// swap sql streams
swap(q1.sql_, q2.sql_);
q1.sql_.clear();
q2.sql_.clear();
}
//----------------------------------------------------------------------------
//////////////////////////////////////////////////////////////////////////////
//
// prepare_query
//
prepare_query::prepare_query(statement& st)
: st_(&st)
{
}
//----------------------------------------------------------------------------
prepare_query::prepare_query(prepare_query& src)
{
swap(*this, src);
st_ = src.st_; src.st_ = 0;
}
//----------------------------------------------------------------------------
prepare_query::~prepare_query() NOEXCEPT_FALSE
{
if ( st_ )
{
// move query to statement.
swap(st_->q(), *this);
st_->finalize();
}
}
//----------------------------------------------------------------------------
//////////////////////////////////////////////////////////////////////////////
//
// once_query
//
once_query::once_query(session& s)
: s_(&s)
{
}
//----------------------------------------------------------------------------
once_query::once_query(once_query& src)
{
swap(*this, src);
s_ = src.s_; src.s_ = 0;
}
//----------------------------------------------------------------------------
once_query::~once_query() NOEXCEPT_FALSE
{
if ( s_ )
{
if ( !s_->is_open() )
{
throw session_not_open();
}
// execute statement in session.
statement st(*s_);
swap(st.q(), *this);
st.exec();
}
}
//----------------------------------------------------------------------------
//////////////////////////////////////////////////////////////////////////////
} // namespace sqlitepp
//////////////////////////////////////////////////////////////////////////////
|
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
. "$DIR/../prelude.sh"
cd src
set -o errexit
mongo_binary=dist-test/bin/mongo${exe}
activate_venv
bin_ver=$($python -c "import yaml; print(yaml.safe_load(open('compile_expansions.yml'))['version']);" | tr -d '[ \r\n]')
# Due to SERVER-23810, we cannot use $mongo_binary --quiet --nodb --eval "version();"
mongo_ver=$($mongo_binary --version | perl -pe '/version v([^\"]*)/; $_ = $1;' | tr -d '[ \r\n]')
# The versions must match
if [ "$bin_ver" != "$mongo_ver" ]; then
echo "The mongo version is $mongo_ver, expected version is $bin_ver"
exit 1
fi
|
<reponame>smartxyh/moose<filename>renderer/components/FilesList/index.ts<gh_stars>100-1000
export * from "./FilesList";
|
#!/bin/bash
python RunSimulation.py --Geo 1.0 --sim_num 83
|
#!/bin/bash
# This script installs MongoDB on your machine. This will require editing, depending on where you want to install
# MongoDB, or if have already installed MongoDB.
export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
if [ $DISTRO == "ubuntu" ]; then
# Ubuntu installation
sudo rm /etc/apt/sources.list.d/mongodb*.list
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 4B7C549A058F8B6B
# For Mongo 3.6
# echo "deb [ arch=amd64,arm64 ] http://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/3.6 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.6.list
# For Mongo 4.2
echo "deb [arch=amd64] http://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.2.list
sudo apt-get update
sudo apt-get install --yes mongodb-org
elif [ $DISTRO == "redhat" ]; then
# Red Hat installation
# Update repo file (assumes MongoDB 3.6) and then install
# For Mongo 3.6
sudo cp $TCAPY_CUEMACRO/batch_scripts/installation/mongodb-org-3.6.repo /etc/yum.repos.d/mongodb-org-3.6.repo
# For Mongo 4.2
# sudo cp $TCAPY_CUEMACRO/batch_scripts/installation/mongodb-org-4.2.repo /etc/yum.repos.d/mongodb-org-4.2.repo
sudo yum install --yes mongodb-org
fi
# Create data folder and make MongoDB the owner
sudo mkdir -p /data/db
sudo chown -R mongodb:mongodb /data/db
sudo chmod -R a+rw /data/db
# Make sure to edit mongo.conf to your tcapy log folder location!
|
import MergeResult from './merge-result';
import { JoinFunction, ConflictFunction } from './collater';
export interface DiffOptions {
splitFunction: (s: string) => string[];
joinFunction: JoinFunction;
conflictFunction: ConflictFunction;
}
export default function merge(left: string, base: string, right: string, options?: DiffOptions): MergeResult;
|
<reponame>joshiejack/Husbandry
package uk.joshiejack.husbandry.entity.ai;
import net.minecraft.entity.MobEntity;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.tags.ITag;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.IWorldReader;
import uk.joshiejack.husbandry.api.IMobStats;
import uk.joshiejack.husbandry.tileentity.AbstractFoodSupplyTileEntity;
import javax.annotation.Nonnull;
public class EatFoodGoal extends AbstractMoveToBlockGoal {
private final ITag.INamedTag<Item> food;
public EatFoodGoal(MobEntity entity, IMobStats<?> stats, ITag.INamedTag<Item> food) {
this(entity, stats, food, Orientation.BESIDE, 8);
}
public EatFoodGoal(MobEntity entity, IMobStats<?> stats, ITag.INamedTag<Item> food, Orientation orientation, int distance) {
super(entity, stats, orientation, distance);
this.food = food;
}
@Override
public boolean canUse() {
return stats.isHungry() && entity.getRandom().nextInt(5) == 0 && super.canUse();
}
@Override
protected boolean isValidTarget(@Nonnull IWorldReader world, @Nonnull BlockPos pos) {
TileEntity tile = world.getBlockEntity(pos);
if (tile instanceof AbstractFoodSupplyTileEntity) {
ItemStack contents = ((AbstractFoodSupplyTileEntity)tile).getItem(0);
return contents.getCount() > 0 && food.contains(contents.getItem());
} else return false;
}
@Override
public void tick() {
super.tick();
entity.getLookControl().setLookAt((double) blockPos.getX() + 0.5D, blockPos.getY() + 1,
(double) blockPos.getZ() + 0.5D, 10.0F, (float) entity.getMaxHeadXRot());
if (isNearDestination()) {
TileEntity tile = entity.level.getBlockEntity(blockPos);
if (tile instanceof AbstractFoodSupplyTileEntity) {
((AbstractFoodSupplyTileEntity) tile).consume();
stats.feed();
entity.playAmbientSound();
tryTicks = 9999;
}
}
}
}
|
module.exports = {
transform: {
"\\.[jt]sx?$": "babel-jest"
},
moduleNameMapper: {
"\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$":
"<rootDir>/__mocks__/fileMock.js",
"\\.(css|less|scss)$": "<rootDir>/__mocks__/styleMock.js"
//webpack aliases
}
};
|
#!/bin/sh
if [ -n "$DESTDIR" ] ; then
case $DESTDIR in
/*) # ok
;;
*)
/bin/echo "DESTDIR argument must be absolute... "
/bin/echo "otherwise python's distutils will bork things."
exit 1
esac
DESTDIR_ARG="--root=$DESTDIR"
fi
echo_and_run() { echo "+ $@" ; "$@" ; }
echo_and_run cd "/home/nvidia/catkin_ws/src/rosserial/rosserial_client"
# snsure that Python install destination exists
echo_and_run mkdir -p "$DESTDIR/home/nvidia/catkin_ws/install/lib/python2.7/dist-packages"
# Note that PYTHONPATH is pulled from the environment to support installing
# into one location when some dependencies were installed in another
# location, #123.
echo_and_run /usr/bin/env \
PYTHONPATH="/home/nvidia/catkin_ws/install/lib/python2.7/dist-packages:/home/nvidia/catkin_ws/build/lib/python2.7/dist-packages:$PYTHONPATH" \
CATKIN_BINARY_DIR="/home/nvidia/catkin_ws/build" \
"/usr/bin/python" \
"/home/nvidia/catkin_ws/src/rosserial/rosserial_client/setup.py" \
build --build-base "/home/nvidia/catkin_ws/build/rosserial/rosserial_client" \
install \
$DESTDIR_ARG \
--install-layout=deb --prefix="/home/nvidia/catkin_ws/install" --install-scripts="/home/nvidia/catkin_ws/install/bin"
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Swiftilities/Swiftilities.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Swiftilities/Swiftilities.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
CREATE TABLE users (
name VARCHAR(255) NOT NULL PRIMARY KEY,
city VARCHAR(255)
);
|
def largest_element_matrix(matrix):
# set largest element to the first element in matrix
largest_element = matrix[0][0]
for row in matrix:
for elem in row:
if elem > largest_element:
largest_element = elem
return largest_element
|
package com.github.johanbrorson.zebroid.configuration;
import java.io.File;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import com.github.johanbrorson.zebroid.utils.PropertyHelper;
import org.apache.commons.configuration2.CombinedConfiguration;
import org.apache.commons.configuration2.FileBasedConfiguration;
import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.configuration2.SystemConfiguration;
import org.apache.commons.configuration2.builder.FileBasedConfigurationBuilder;
import org.apache.commons.configuration2.builder.fluent.FileBasedBuilderParameters;
import org.apache.commons.configuration2.builder.fluent.Parameters;
import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler;
import org.apache.commons.configuration2.ex.ConfigurationException;
import org.apache.commons.configuration2.tree.OverrideCombiner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Configuration {
private static final String PROPERTIES_FILE_NAME = "server.properties";
private static final Logger logger =
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static Configuration instance;
private PropertiesConfiguration config = new PropertiesConfiguration();
private Configuration() throws ConfigurationException {
CombinedConfiguration combinedConfig = new CombinedConfiguration();
combinedConfig.setNodeCombiner(new OverrideCombiner());
combinedConfig.addConfiguration(new SystemConfiguration());
if (getOverrideFile().exists()) {
combinedConfig.addConfiguration(
getFileBasedConfiguration(getOverrideFileBasedBuilderParameters()));
}
combinedConfig.addConfiguration(
getFileBasedConfiguration(getDefaultFileBasedBuilderParameters()));
config.append(combinedConfig);
}
/**
* Gets a instance of the Configuration class.
*
* @return The Configuration instance
* @throws ConfigurationException Failed to initialize a Configuration object
*/
public static Configuration getInstance() throws ConfigurationException {
if (Configuration.instance == null) {
Configuration.instance = new Configuration();
}
return Configuration.instance;
}
private FileBasedConfiguration getFileBasedConfiguration(
FileBasedBuilderParameters builderParams) throws ConfigurationException {
FileBasedConfigurationBuilder<FileBasedConfiguration> builder =
new FileBasedConfigurationBuilder<FileBasedConfiguration>(PropertiesConfiguration.class)
.configure(builderParams.setListDelimiterHandler(new DefaultListDelimiterHandler(',')));
return builder.getConfiguration();
}
private FileBasedBuilderParameters getOverrideFileBasedBuilderParameters() {
return new Parameters().fileBased().setFile(getOverrideFile());
}
private File getOverrideFile() {
final String configFileProperty = "zebroid.configuration";
// Check if the properties file location has been set in system property,
// otherwise use properties file in working directory
if (PropertyHelper.isValid(configFileProperty)) {
String fileLocation = PropertyHelper.getProperty(configFileProperty);
logger.debug("Use properties file set by system property: " + fileLocation);
return new File(fileLocation);
} else {
return new File(PropertyHelper.getProperty("user.dir"), PROPERTIES_FILE_NAME);
}
}
private FileBasedBuilderParameters getDefaultFileBasedBuilderParameters() {
URL defaultProperties = getClass().getClassLoader().getResource(PROPERTIES_FILE_NAME);
return new Parameters().fileBased().setURL(defaultProperties);
}
/**
* Gets a string associated with the given configuration key. The default value is returned, if
* the key doesn't map to an existing object.
*
* @param propertyKey The configuration key
* @param defaultValue The default value
* @return String with the value of the key
*/
public String getString(final String propertyKey, String defaultValue) {
return config.getString(propertyKey, defaultValue);
}
/**
* Gets an integer associated with the given configuration key.
*
* @param propertyKey The configuration key
* @return Integer with the value of the key
*/
public Integer getInteger(final String propertyKey, final Integer defaultValue) {
return config.getInteger(propertyKey, defaultValue);
}
}
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.taxhistory.utils
import java.util.UUID
import org.joda.time.LocalDate
import org.mockito.Mockito
import play.api.mvc.{AnyContent, Request, Result}
import uk.gov.hmrc.auth
import uk.gov.hmrc.auth.core.{AuthConnector, Enrolment}
import uk.gov.hmrc.auth.core.authorise.Predicate
import uk.gov.hmrc.domain.Nino
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.taxhistory.connectors.CitizenDetailsConnector
import uk.gov.hmrc.taxhistory.model.api.{Employment, PayAsYouEarn}
import uk.gov.hmrc.taxhistory.model.nps.EmploymentStatus
import uk.gov.hmrc.taxhistory.model.utils.TestUtil
import uk.gov.hmrc.taxhistory.services.SaAuthService
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.ExecutionContext.Implicits.global
/**
* A test version of SaAuthService which returns a predicate without calling citizenDetailsConnector
* rather than interrogating a real auth service.
*/
case class TestSaAuthService() extends SaAuthService(authConnector = Mockito.mock(classOf[AuthConnector]), citizenDetailsConnector = Mockito.mock(classOf[CitizenDetailsConnector])) with TestUtil {
val testEmploymentId: UUID = java.util.UUID.randomUUID
val testStartDate: LocalDate = LocalDate.now()
val testPaye: PayAsYouEarn =
PayAsYouEarn(
employments = List(Employment(
employmentId = testEmploymentId,
startDate = Some(testStartDate),
payeReference = "SOME_PAYE", employerName = "<NAME>",
employmentStatus = EmploymentStatus.Live, worksNumber = "00191048716")),
allowances = List.empty,
incomeSources = Map.empty,
benefits = Map.empty,
payAndTax = Map.empty,
taxAccount = None,
statePension = None
)
val validNino: Nino = randomNino()
val unauthorisedNino: Nino = randomNino()
val forbiddenNino: Nino = randomNino()
override def authorisationPredicate(nino: Nino)(implicit hc: HeaderCarrier, ec: ExecutionContext): Future[Predicate] = {
val checkIndividual: Predicate = auth.core.Nino(hasNino = true, nino = Some(nino.value))
val checkAgentServicesWithDigitalHandshake: Predicate =
Enrolment("THIS-STRING-IS-NOT-RELEVANT")
.withIdentifier("NINO", nino.value)
.withDelegatedAuthRule("afi-auth")
Future.successful(checkIndividual or checkAgentServicesWithDigitalHandshake)
}
override def withSaAuthorisation(nino: Nino)(action: Request[AnyContent] => Future[Result])(implicit hc: HeaderCarrier, request: Request[AnyContent]): Future[Result] = {
nino.nino match {
case validNino.nino => Future.successful(Ok(PayAsYouEarn.formats.writes(testPaye)))
case unauthorisedNino.nino => Future.successful(Unauthorized)
case forbiddenNino.nino => Future.successful(Forbidden)
}
}
}
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
# Load data
data = pd.read_csv('data.csv')
# Split data into feature and target
features = data.drop('Target', axis=1)
target = data['Target']
# Split data into train and test sets
X_train, X_test, y_train, y_test = train_test_split(features, target, test_size=0.2, random_state=0)
# Create logistic regression model
model = LogisticRegression()
model.fit(X_train, y_train)
# Predictions
y_pred = model.predict(X_test)
# Accuracy
accuracy = model.score(X_test, y_test)
print('The model accuracy is {}'.format(accuracy))
|
#!/bin/bash
#SBATCH -J Act_tanhrev_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py tanhrev 52 Adadelta 2 0.5709367317167637 0.9364043769811019 runiform 0.3
|
#!/bin/bash
ECR_REPO="dropwizard"
AWS_ACCOUNT_ID="XXXXXXXXXX"
REGION="eu-west-1"
docker build -t $ECR_REPO .
aws ecr get-login --no-include-email --region ${REGION}
docker tag $ECR_REPO:latest $AWS_ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/$ECR_REPO:latest
docker push $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$ECR_REPO:latest
|
/*
* Merlin
*
* API Guide for accessing Merlin's model management, deployment, and serving functionalities
*
* API version: 0.14.0
* Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git)
*/
package client
import (
"context"
"io/ioutil"
"net/http"
"net/url"
"strings"
"github.com/antihax/optional"
)
// Linger please
var (
_ context.Context
)
type LogApiService service
/*
LogApiService Retrieve log from a container
* @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background().
* @param cluster
* @param namespace
* @param componentType
* @param optional nil or *LogApiLogsGetOpts - Optional Parameters:
* @param "ProjectName" (optional.String) -
* @param "ModelId" (optional.String) -
* @param "ModelName" (optional.String) -
* @param "VersionId" (optional.String) -
* @param "PredictionJobId" (optional.String) -
* @param "ContainerName" (optional.String) -
* @param "Prefix" (optional.String) -
* @param "Follow" (optional.String) -
* @param "Previous" (optional.String) -
* @param "SinceSeconds" (optional.String) -
* @param "SinceTime" (optional.String) -
* @param "Timestamps" (optional.String) -
* @param "TailLines" (optional.String) -
* @param "LimitBytes" (optional.String) -
*/
type LogApiLogsGetOpts struct {
ProjectName optional.String
ModelId optional.String
ModelName optional.String
VersionId optional.String
PredictionJobId optional.String
ContainerName optional.String
Prefix optional.String
Follow optional.String
Previous optional.String
SinceSeconds optional.String
SinceTime optional.String
Timestamps optional.String
TailLines optional.String
LimitBytes optional.String
}
func (a *LogApiService) LogsGet(ctx context.Context, cluster string, namespace string, componentType string, localVarOptionals *LogApiLogsGetOpts) (*http.Response, error) {
var (
localVarHttpMethod = strings.ToUpper("Get")
localVarPostBody interface{}
localVarFileName string
localVarFileBytes []byte
)
// create path and map variables
localVarPath := a.client.cfg.BasePath + "/logs"
localVarHeaderParams := make(map[string]string)
localVarQueryParams := url.Values{}
localVarFormParams := url.Values{}
if localVarOptionals != nil && localVarOptionals.ProjectName.IsSet() {
localVarQueryParams.Add("project_name", parameterToString(localVarOptionals.ProjectName.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.ModelId.IsSet() {
localVarQueryParams.Add("model_id", parameterToString(localVarOptionals.ModelId.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.ModelName.IsSet() {
localVarQueryParams.Add("model_name", parameterToString(localVarOptionals.ModelName.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.VersionId.IsSet() {
localVarQueryParams.Add("version_id", parameterToString(localVarOptionals.VersionId.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.PredictionJobId.IsSet() {
localVarQueryParams.Add("prediction_job_id", parameterToString(localVarOptionals.PredictionJobId.Value(), ""))
}
localVarQueryParams.Add("cluster", parameterToString(cluster, ""))
localVarQueryParams.Add("namespace", parameterToString(namespace, ""))
localVarQueryParams.Add("component_type", parameterToString(componentType, ""))
if localVarOptionals != nil && localVarOptionals.ContainerName.IsSet() {
localVarQueryParams.Add("container_name", parameterToString(localVarOptionals.ContainerName.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.Prefix.IsSet() {
localVarQueryParams.Add("prefix", parameterToString(localVarOptionals.Prefix.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.Follow.IsSet() {
localVarQueryParams.Add("follow", parameterToString(localVarOptionals.Follow.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.Previous.IsSet() {
localVarQueryParams.Add("previous", parameterToString(localVarOptionals.Previous.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.SinceSeconds.IsSet() {
localVarQueryParams.Add("since_seconds", parameterToString(localVarOptionals.SinceSeconds.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.SinceTime.IsSet() {
localVarQueryParams.Add("since_time", parameterToString(localVarOptionals.SinceTime.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.Timestamps.IsSet() {
localVarQueryParams.Add("timestamps", parameterToString(localVarOptionals.Timestamps.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.TailLines.IsSet() {
localVarQueryParams.Add("tail_lines", parameterToString(localVarOptionals.TailLines.Value(), ""))
}
if localVarOptionals != nil && localVarOptionals.LimitBytes.IsSet() {
localVarQueryParams.Add("limit_bytes", parameterToString(localVarOptionals.LimitBytes.Value(), ""))
}
// to determine the Content-Type header
localVarHttpContentTypes := []string{}
// set Content-Type header
localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)
if localVarHttpContentType != "" {
localVarHeaderParams["Content-Type"] = localVarHttpContentType
}
// to determine the Accept header
localVarHttpHeaderAccepts := []string{}
// set Accept header
localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)
if localVarHttpHeaderAccept != "" {
localVarHeaderParams["Accept"] = localVarHttpHeaderAccept
}
if ctx != nil {
// API Key Authentication
if auth, ok := ctx.Value(ContextAPIKey).(APIKey); ok {
var key string
if auth.Prefix != "" {
key = auth.Prefix + " " + auth.Key
} else {
key = auth.Key
}
localVarHeaderParams["Authorization"] = key
}
}
r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)
if err != nil {
return nil, err
}
localVarHttpResponse, err := a.client.callAPI(r)
if err != nil || localVarHttpResponse == nil {
return localVarHttpResponse, err
}
localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)
localVarHttpResponse.Body.Close()
if err != nil {
return localVarHttpResponse, err
}
if localVarHttpResponse.StatusCode >= 300 {
newErr := GenericSwaggerError{
body: localVarBody,
error: localVarHttpResponse.Status,
}
return localVarHttpResponse, newErr
}
return localVarHttpResponse, nil
}
|
import { NextApiRequest, NextApiResponse } from "next"
import { tracktryRes } from "../../lib/tracktry"
export default async function (req: NextApiRequest, res: NextApiResponse) {
const { query } = req
console.log("hi")
try {
var postData = { tracking_number: "RA018458445JP" }
var url = "http://api.tracktry.com/v1/carriers/detect"
tracktryRes(url, postData, "POST", function (data) {
console.log(data)
})
res.status(200)
res.json({ authorized: true })
} catch (err) {
res.status(500)
res.json({ authorized: false, error: err.message })
}
}
|
<gh_stars>0
package com.alipay.api.domain;
import java.util.List;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
/**
* 场馆查询子场馆详情
*
* @author auto create
* @since 1.0, 2021-11-29 20:37:19
*/
public class SubVenueQueryInfo extends AlipayObject {
private static final long serialVersionUID = 4216147935215126745L;
/**
* 入场要求
*/
@ApiField("admission_requirement")
private String admissionRequirement;
/**
* 公告
*/
@ApiField("announcement")
private String announcement;
/**
* 是否可预定 Y/N (不传默认可预定)
*/
@ApiField("bookable")
private String bookable;
/**
* 场馆介绍
*/
@ApiField("desc")
private String desc;
/**
* 器材租赁信息
*/
@ApiField("equipment_rental")
private String equipmentRental;
/**
* 设施列表 场馆设施:1地板、2灯光、3淋浴、4餐饮、5WiFi 、6更衣室、7充电宝、8停车场、9休息区
*/
@ApiListField("facility_list")
@ApiField("number")
private List<Long> facilityList;
/**
* 场馆名称
*/
@ApiField("name")
private String name;
/**
* 营业时间,不传用主场馆的
*/
@ApiField("opening_hours")
private String openingHours;
/**
* 服务商场馆ID
*/
@ApiField("out_sub_venue_id")
private String outSubVenueId;
/**
* 收款方支付宝账户
*/
@ApiField("payee_account")
private String payeeAccount;
/**
* 收款方式
account/空值:通过支付宝账号收款
smid:通过smid收款
*/
@ApiField("payment_method")
private String paymentMethod;
/**
* 收款方式(间连/直连)
*/
@ApiField("payment_type")
private String paymentType;
/**
* 联系电话,多个逗号隔开,不传用主场馆的
*/
@ApiListField("phone")
@ApiField("string")
private List<String> phone;
/**
* 场馆图片链接列表 最多5张
*/
@ApiListField("picture_list")
@ApiField("string")
private List<String> pictureList;
/**
* 场馆主图海报链接
*/
@ApiField("poster")
private String poster;
/**
* 场馆售卖产品类型集合,逗号隔开
calendar:价格日历
ticket:票券
course: 课程
*/
@ApiListField("product_type_list")
@ApiField("string")
private List<String> productTypeList;
/**
* 促销信息
*/
@ApiField("promotion")
private String promotion;
/**
* 支付宝子场馆ID
*/
@ApiField("sub_venue_id")
private String subVenueId;
/**
* 子场馆pid
*/
@ApiField("sub_venue_pid")
private String subVenuePid;
/**
* 子场馆商户二级smid(payment_method为smid时必传)
*/
@ApiField("sub_venue_smid")
private String subVenueSmid;
/**
* 场馆当前状态 安全审核中:infosec-audit 安全审核不通过:infosec-unpass 云验收中: cloud-audit 云验收不通过: cloud-unpass 上架: online 下架: offline 人工下架: manual-offline
*/
@ApiField("sub_venue_status")
private String subVenueStatus;
/**
* 标签列表
*/
@ApiListField("tag_list")
@ApiField("string")
private List<String> tagList;
/**
* 培训信息
*/
@ApiField("training")
private String training;
/**
* 场馆类型,
01足球;02篮球;03乒乓球;04羽毛球;05台球;06射箭;07哒哒球;08游泳;09网球;10攀岩;11空手道;12跆拳道;14瑜伽;15搏击;16舞蹈;17艺术体操;18太极;19击剑;20水上运动;21滑雪;22健身;23轮滑;24排球;25门球;00其他运动
*/
@ApiField("venue_type")
private String venueType;
/**
* 会员卡信息
*/
@ApiField("vip")
private String vip;
public String getAdmissionRequirement() {
return this.admissionRequirement;
}
public void setAdmissionRequirement(String admissionRequirement) {
this.admissionRequirement = admissionRequirement;
}
public String getAnnouncement() {
return this.announcement;
}
public void setAnnouncement(String announcement) {
this.announcement = announcement;
}
public String getBookable() {
return this.bookable;
}
public void setBookable(String bookable) {
this.bookable = bookable;
}
public String getDesc() {
return this.desc;
}
public void setDesc(String desc) {
this.desc = desc;
}
public String getEquipmentRental() {
return this.equipmentRental;
}
public void setEquipmentRental(String equipmentRental) {
this.equipmentRental = equipmentRental;
}
public List<Long> getFacilityList() {
return this.facilityList;
}
public void setFacilityList(List<Long> facilityList) {
this.facilityList = facilityList;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public String getOpeningHours() {
return this.openingHours;
}
public void setOpeningHours(String openingHours) {
this.openingHours = openingHours;
}
public String getOutSubVenueId() {
return this.outSubVenueId;
}
public void setOutSubVenueId(String outSubVenueId) {
this.outSubVenueId = outSubVenueId;
}
public String getPayeeAccount() {
return this.payeeAccount;
}
public void setPayeeAccount(String payeeAccount) {
this.payeeAccount = payeeAccount;
}
public String getPaymentMethod() {
return this.paymentMethod;
}
public void setPaymentMethod(String paymentMethod) {
this.paymentMethod = paymentMethod;
}
public String getPaymentType() {
return this.paymentType;
}
public void setPaymentType(String paymentType) {
this.paymentType = paymentType;
}
public List<String> getPhone() {
return this.phone;
}
public void setPhone(List<String> phone) {
this.phone = phone;
}
public List<String> getPictureList() {
return this.pictureList;
}
public void setPictureList(List<String> pictureList) {
this.pictureList = pictureList;
}
public String getPoster() {
return this.poster;
}
public void setPoster(String poster) {
this.poster = poster;
}
public List<String> getProductTypeList() {
return this.productTypeList;
}
public void setProductTypeList(List<String> productTypeList) {
this.productTypeList = productTypeList;
}
public String getPromotion() {
return this.promotion;
}
public void setPromotion(String promotion) {
this.promotion = promotion;
}
public String getSubVenueId() {
return this.subVenueId;
}
public void setSubVenueId(String subVenueId) {
this.subVenueId = subVenueId;
}
public String getSubVenuePid() {
return this.subVenuePid;
}
public void setSubVenuePid(String subVenuePid) {
this.subVenuePid = subVenuePid;
}
public String getSubVenueSmid() {
return this.subVenueSmid;
}
public void setSubVenueSmid(String subVenueSmid) {
this.subVenueSmid = subVenueSmid;
}
public String getSubVenueStatus() {
return this.subVenueStatus;
}
public void setSubVenueStatus(String subVenueStatus) {
this.subVenueStatus = subVenueStatus;
}
public List<String> getTagList() {
return this.tagList;
}
public void setTagList(List<String> tagList) {
this.tagList = tagList;
}
public String getTraining() {
return this.training;
}
public void setTraining(String training) {
this.training = training;
}
public String getVenueType() {
return this.venueType;
}
public void setVenueType(String venueType) {
this.venueType = venueType;
}
public String getVip() {
return this.vip;
}
public void setVip(String vip) {
this.vip = vip;
}
}
|
let player1 = 0;
let player2 = 0;
let turn = 0;
while (player1 != 6 && player2 != 6) {
let roll = Math.floor(Math.random() * 6) + 1;
if (turn == 0) {
player1 = roll;
console.log("Player1 rolled a " + roll);
}
else {
player2 = roll;
console.log("Player2 rolled a " + roll);
}
turn = (turn + 1) % 2;
}
if (player1 == 6) {
console.log("Player 1 wins!");
}
else {
console.log("Player 2 wins!");
}
|
<filename>docs/theme/components/shared/Header/HeaderDropDown/Basic.js
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React, { useEffect, useState, useRef } from "react";
import styled from "styled-components";
const Basic = props => {
let { options} = props;
const [toggle, setToggle] = useState(false);
const dropdownRef = useRef();
useEffect(() => {
const setToggleOnEvent = event => {
if (!(event.target.classList.contains('dropbtn') || event.target.classList.contains('dropdown-item')) && dropdownRef.current.classList.contains('show')) {
setToggle(false);
}
};
window.addEventListener("mousedown", setToggleOnEvent);
return () => {
window.removeEventListener("mousedown", setToggleOnEvent);
};
}, []);
const ComponentStyle = styled.div`
> div {
display: inline-block;
width: 160px;
}
label {
margin-right: 5px;
}
`;
const DropDownComp = styled.div`
.dropbtn {
background-color: #ffffff;
color: rgb(45, 55, 71);
padding: 5px;
font-size: 14px;
border: 1px solid rgb(204, 204, 204);
cursor: pointer;
width: 100%;
text-align: left;
border-radius: 4px;
&:hover, &:focus {
border: 1px solid rgb(55, 187, 155);
}
}
.dropdown {
position: relative;
display: inline-block;
width: 100%;
}
.dropdown-content {
margin-top: 10px;
display: none;
position: absolute;
background-color: #ffffff;
width: 160px;
overflow: auto;
max-height: 300px;
box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2);
z-index: 1;
.dropdown-item {
padding: 5px 12px;
&:hover {
background-color: rgba(55, 187, 155, 0.1);;
}
}
}
.dropdown-content a {
padding: 12px 16px;
text-decoration: none;
display: block;
}
.dropdown a:hover {background-color: #ddd;}
.show {display: block;}
i {
color: rgb(45, 55, 71);
border: solid black;
border-width: 0 1px 1px 0;
display: inline-block;
padding: 3px;
float: right;
}
.up {
margin-top: 5px;
transform: rotate(-135deg);
-webkit-transform: rotate(-135deg);
}
.down {
margin-top: 3px;
transform: rotate(45deg);
-webkit-transform: rotate(45deg);
}
`;
return (
<ComponentStyle>
<DropDownComp>
<div className="dropdown">
<button className="dropbtn" onClick={() => setToggle(c => !c)}>Versions <i className={`${toggle ? 'up' : 'down'}`}></i></button>
<div ref={dropdownRef} id="myDropdown" className={`dropdown-content ${toggle ? 'show' : ''}`}>
{
options.map(function(opt, i) {
return <div className="dropdown-item" key={i} data-href={opt.href} onClick={
event => {
const dataHref = event.currentTarget.dataset.href;
const href = `${window.location.origin}${dataHref}`;
window.open(href, "_blank");
}}>
{opt.title}
</div>
})
}
</div>
</div>
</DropDownComp>
</ComponentStyle>
);
};
Basic.propTypes = {};
export default Basic;
|
#!/usr/bin/env python
import click as ck
import numpy as np
import pandas as pd
from tensorflow.keras.models import load_model, Model
from subprocess import Popen, PIPE
import time
from utils import Ontology, NAMESPACES, FUNC_DICT
from aminoacids import to_onehot
import math
from collections import Counter
MAXLEN = 2000
@ck.command()
@ck.option('--model-file', '-mf', default='data-cafa/model.h5', help='Tensorflow model file')
@ck.option('--terms-file', '-tf', default='data-cafa/terms.pkl', help='List of predicted terms')
@ck.option('--annotations-file', '-tf', default='data-cafa/swissprot.pkl', help='Experimental annotations')
def main(model_file, terms_file, annotations_file):
go_rels = Ontology('data-cafa/go.obo', with_rels=True)
terms_df = pd.read_pickle(terms_file)
terms = terms_df['terms'].values.flatten()
terms_dict = {v: k for k, v in enumerate(terms)}
df = pd.read_pickle(annotations_file)
annotations = df['annotations'].values
annotations = list(map(lambda x: set(x), annotations))
go_rels.calculate_ic(annotations)
go_id = 'GO:0008047'
go_idx = terms_dict[go_id]
# df = df[df['orgs'] == '559292']
index = []
seq_lengths = []
for i, row in enumerate(df.itertuples()):
if go_id in row.annotations:
index.append(i)
seq_lengths.append(len(row.sequences))
df = df.iloc[index]
annotations = df['annotations'].values
annotations = list(map(lambda x: set(x), annotations))
prot_ids = df['proteins'].values
ids, data = get_data(df['sequences'])
# for i, row in df.iterrows():
# ipros = '\t'.join(row['interpros'])
# print(f'{row["proteins"]}\t{ipros}')
# Load CNN model
model = load_model(model_file)
model.summary()
return
int_model = Model(inputs=model.input, outputs=model.layers[-2].output)
dense = model.layers[-1]
W = dense.get_weights()[0][:, go_idx]
b = dense.get_weights()[1][go_idx]
print(np.argsort(-W), b)
preds = int_model.predict(data, batch_size=100, verbose=0)
filters = np.argsort(preds, axis=1)
filter_cnt = Counter()
for f in filters:
filter_cnt.update(f[:20])
AALETTER = np.array([
'*', 'A', 'R', 'N', 'D', 'C', 'Q', 'E', 'G', 'H', 'I',
'L', 'K', 'M', 'F', 'P', 'S', 'T', 'W', 'Y', 'V'])
print(filter_cnt)
return
for f_id, cnt in filter_cnt.most_common(10):
conv_id = f_id // 512
fl_id = f_id % 512
conv_layer = model.layers[conv_id + 1]
weights = conv_layer.get_weights()
w1 = weights[0]
w2 = weights[1]
motif = ''.join(AALETTER[np.argmax(w1[:, :, fl_id], axis=1)])
print(f'>{f_id}')
print(motif)
conv_model = Model(inputs=model.input, outputs=conv_layer.output)
preds = conv_model.predict(data, batch_size=100, verbose=0)
f_out = preds[:, :, fl_id]
f_length = conv_layer.kernel_size[0]
starts = np.argmax(f_out, axis=1)
ends = starts + f_length
for i in range(starts.shape[0]):
seq = data[i, starts[i]:ends[i], :]
seq_ind = np.argmax(seq, axis=1)
motif = ''.join(AALETTER[seq_ind])
print(f'>{f_id}_{i}')
print(motif.replace('*', ''))
# for l in range(16):
# conv1 = model.layers[l + 1]
# weights = conv1.get_weights()
# w1 = weights[0]
# w2 = weights[1]
# for i in range(512):
# motif = ''.join(AALETTER[np.argmax(w1[:, :, i], axis=1)])
# print(f'>{l}_{i}')
# print(motif)
def get_data(sequences):
pred_seqs = []
ids = []
for i, seq in enumerate(sequences):
if len(seq) > MAXLEN:
st = 0
while st < len(seq):
pred_seqs.append(seq[st: st + MAXLEN])
ids.append(i)
st += MAXLEN - 128
else:
pred_seqs.append(seq)
ids.append(i)
n = len(pred_seqs)
data = np.zeros((n, MAXLEN, 21), dtype=np.float32)
for i in range(n):
seq = pred_seqs[i]
data[i, :, :] = to_onehot(seq)
return ids, data
def evaluate_annotations(go, real_annots, pred_annots):
total = 0
p = 0.0
r = 0.0
p_total= 0
ru = 0.0
mi = 0.0
for i in range(len(real_annots)):
if len(real_annots[i]) == 0:
continue
tp = set(real_annots[i]).intersection(set(pred_annots[i]))
fp = pred_annots[i] - tp
fn = real_annots[i] - tp
for go_id in fp:
mi += go.get_ic(go_id)
for go_id in fn:
ru += go.get_ic(go_id)
tpn = len(tp)
fpn = len(fp)
fnn = len(fn)
total += 1
recall = tpn / (1.0 * (tpn + fnn))
r += recall
if len(pred_annots[i]) > 0:
p_total += 1
precision = tpn / (1.0 * (tpn + fpn))
p += precision
if total == 0:
return 0, 0, 0, 1000
ru /= total
mi /= total
r /= total
if p_total > 0:
p /= p_total
f = 0.0
if p + r > 0:
f = 2 * p * r / (p + r)
s = math.sqrt(ru * ru + mi * mi)
return f, p, r, s
if __name__ == '__main__':
main()
|
const Request = require('../Request');
const assignementBase = {
list(projectID, cb) {
this.options.url = this.baseUri + projectID + '/' + this.name;
new Request(this.options, cb);
},
retrieve(projectID, taskAssignmentsID, cb) {
this.options.url = this.baseUri + projectID + '/' + this.name + '/' + taskAssignmentsID;
new Request(this.options, cb);
},
create(projectID, params, cb) {
this.options.url = this.baseUri + projectID + '/' + this.name;
this.options.method = 'POST';
this.options.body = JSON.stringify(params);
new Request(this.options, cb);
},
update(projectID, taskAssignmentsID, params, cb) {
this.options.url = this.baseUri + projectID + '/' + this.name + '/' + taskAssignmentsID;
this.options.method = 'PATCH';
this.options.body = JSON.stringify(params);
new Request(this.options, cb);
},
delete(projectID, taskAssignmentsID, cb) {
this.options.url = this.baseUri + projectID + '/' + this.name + '/' + taskAssignmentsID;
this.options.method = 'DELETE';
new Request(this.options, cb);
}
};
module.exports = assignementBase;
|
/* eslint-disable no-param-reassign */
import { GCD } from './gcd';
export default (servings: number, customServings: number, numerator: number, denominator: number): string => {
// If there isn't a denominator.
// We can assume the user wants to display
// the recipe ings as decimals.
if (denominator <= 1) { return parseFloat((numerator * (customServings / servings)).toFixed(3)).toString(); }
// Check if there is a custom serving.
// If there is, we multiple the numerator by the custom servings amount
// and multiple the denominator by the servings amount.
if (servings !== customServings) {
numerator *= customServings;
denominator *= servings;
}
// Get the quotient from the mixed fraction
// so we are only left with a fraction < 1.
const quotient = Math.floor(numerator / denominator);
// The remainder from what is left over.
// Set is as the new numerator.
numerator %= denominator;
// If the numerator zero then return just the quotient
if (numerator === 0) { return quotient.toString(); }
// Get the GCD and reduce the fraction.1
const gcd = GCD(numerator, denominator);
numerator /= gcd;
denominator /= gcd;
// OPT: We should do some math to try and
// round weird fractions to smaller ones.
// For Example, 23/64 -> 3/8
// If the denominator is greater than 8.
// Display as a decimal.
if (denominator > 12) { return parseFloat((quotient + (numerator / denominator)).toFixed(3)).toString(); }
const quotientString = quotient > 0 ? `${quotient.toString()} ` : '';
return `${quotientString}${numerator.toString()}/${denominator.toString()}`;
};
|
#!/bin/bash
# (c) Copyright [2021] Micro Focus or one of its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# A script that will generate the ClusterServiceVersion.
set -o errexit
set -o pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
REPO_DIR=$(dirname $SCRIPT_DIR)
OPERATOR_SDK=$REPO_DIR/bin/operator-sdk
KUSTOMIZE=$REPO_DIR/bin/kustomize
function usage {
echo "usage: $0 <version> <bundle_metadata_opts>"
echo
echo "<version> is the version of the operator."
echo
echo "<bundle_metadata_opts> are extra options to pass to"
echo "'operator-sdk generate bundle'"
echo
exit 1
}
OPTIND=1
while getopts "h" opt; do
case ${opt} in
h)
usage
;;
\?)
echo "Unknown option: -${opt}"
usage
;;
esac
done
if [ $(( $# - $OPTIND )) -lt 2 ]
then
usage
fi
VERSION=${@:$OPTIND:1}
BUNDLE_METADATA_OPTS=${@:$OPTIND+1:1}
set -o xtrace
cd $REPO_DIR
$OPERATOR_SDK generate kustomize manifests -q
mkdir -p config/overlays/csv
cd config/overlays/csv
cat <<- EOF > kustomization.yaml
bases:
- ../../manifests
EOF
$KUSTOMIZE edit set image controller=$OPERATOR_IMG
cd $REPO_DIR
$KUSTOMIZE build config/overlays/csv | $OPERATOR_SDK generate bundle -q --overwrite --version $VERSION $BUNDLE_METADATA_OPTS
# Fill in the placeholders
sed -i "s/CREATED_AT_PLACEHOLDER/$(date +"%FT%H:%M:%SZ")/g" bundle/manifests/verticadb-operator.clusterserviceversion.yaml
sed -i "s+OPERATOR_IMG_PLACEHOLDER+$(make echo-images | grep OPERATOR_IMG | cut -d'=' -f2)+g" bundle/manifests/verticadb-operator.clusterserviceversion.yaml
|
import { Schema, Validator, ValidatorResult } from 'jsonschema';
import values = require('lodash.values');
import { schemas } from './schemas';
/**
* A validator for [JSON-schemas](http://json-schema.org/)
*/
export class SchemaValidator {
private _validator: Validator;
/**
* Instantiates a SchemaValidator instance
*/
constructor() {
this._validator = new Validator();
for (const schema of values(schemas)) {
this._validator.addSchema(schema, schema.id);
}
}
/**
* Add a schema to the validator. All schemas and sub-schemas must be added to
* the validator before the `validate` and `isValid` methods can be called with
* instances of that schema.
* @param schema The schema to add
*/
public addSchema(schema: Schema) {
this._validator.addSchema(schema, schema.id);
}
// In order to validate a complex JS object using jsonschema, we must replace any complex
// sub-types (e.g BigNumber) with a simpler string representation. Since BigNumber and other
// complex types implement the `toString` method, we can stringify the object and
// then parse it. The resultant object can then be checked using jsonschema.
/**
* Validate the JS object conforms to a specific JSON schema
* @param instance JS object in question
* @param schema Schema to check against
* @returns The results of the validation
*/
public validate(instance: any, schema: Schema): ValidatorResult {
const jsonSchemaCompatibleObject = JSON.parse(JSON.stringify(instance));
return this._validator.validate(jsonSchemaCompatibleObject, schema);
}
/**
* Check whether an instance properly adheres to a JSON schema
* @param instance JS object in question
* @param schema Schema to check against
* @returns Whether or not the instance adheres to the schema
*/
public isValid(instance: any, schema: Schema): boolean {
const isValid = this.validate(instance, schema).errors.length === 0;
return isValid;
}
}
|
<reponame>freenet-233/finance-gateway
package pacs008;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* Information related to a proxy identification of the account.
*
* <p>ProxyAccountIdentification1 complex type的 Java 类。
*
* <p>以下模式片段指定包含在此类中的预期内容。
*
* <pre>
* <complexType name="ProxyAccountIdentification1">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="Tp" type="{urn:iso:std:iso:20022:tech:xsd:pacs.008.001.10}ProxyAccountType1Choice" minOccurs="0"/>
* <element name="Id" type="{urn:iso:std:iso:20022:tech:xsd:pacs.008.001.10}Max2048Text"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ProxyAccountIdentification1", namespace = "urn:iso:std:iso:20022:tech:xsd:pacs.008.001.10", propOrder = {
"tp",
"id"
})
public class ProxyAccountIdentification1 {
@XmlElement(name = "Tp", namespace = "urn:iso:std:iso:20022:tech:xsd:pacs.008.001.10")
protected ProxyAccountType1Choice tp;
@XmlElement(name = "Id", namespace = "urn:iso:std:iso:20022:tech:xsd:pacs.008.001.10", required = true)
protected String id;
/**
* 获取tp属性的值。
*
* @return
* possible object is
* {@link ProxyAccountType1Choice }
*
*/
public ProxyAccountType1Choice getTp() {
return tp;
}
/**
* 设置tp属性的值。
*
* @param value
* allowed object is
* {@link ProxyAccountType1Choice }
*
*/
public void setTp(ProxyAccountType1Choice value) {
this.tp = value;
}
/**
* 获取id属性的值。
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* 设置id属性的值。
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
}
|
<reponame>DatOneLefty/wot-in-conjugation<filename>conj.js
function process() {
var verb = document.getElementById("verb").value;
var noun = document.getElementById("n").value;
if (noun == "") {
con(0, verb);
} else {
con(1, verb);
}
}
function con(type, verb) {
var end = verb.slice(-2, verb.length);
console.log(end);
var verb = verb.slice(0, verb.length-2);
var deny = false;
if (end == "er") {
var e1 = "o";
var e2 = "es";
var e3 = "e";
var e4 = "emos";
var e5 = "éis";
var e6 = "en";
}
else if (end == "ir") {
var e1 = "o";
var e2 = "es";
var e3 = "e";
var e4 = "imos";
var e5 = "ís";
var e6 = "en";
}
else if (end == "ar") {
var e1 = "o";
var e2 = "as";
var e3 = "a";
var e4 = "amos";
var e5 = "áis";
var e6 = "an";
}
else {
deny = true;
document.getElementById('conj').innerHTML = "<font color='red'>The verb needs to end in er, ar, or ir</font>";
}
if (type == 0 && deny == false) {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td>" + verb + e4 + "</td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td>" + verb + e5 + "</td></tr>" + "<tr><td>" + verb + e3 + "</td>" + "<td>" + verb + e6 + "</td></tr>" + "</table>";
}
if (type == 1 && deny == false) {
var noun = document.getElementById("n").value;
noun = noun.toLowerCase();
if (noun == "yo") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td><b>" + verb + e1 + "</b></td>" + "<td>" + verb + e4 + "</td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td>" + verb + e5 + "</td></tr>" + "<tr><td>" + verb + e3 + "</td>" + "<td>" + verb + e6 + "</td></tr>" + "</table>";
}
else if (noun == "tu") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td>" + verb + e4 + "</td></tr>" + "<tr><td><b>" + verb + e2 + "</b></td>" + "<td>" + verb + e5 + "</td></tr>" + "<tr><td>" + verb + e3 + "</td>" + "<td>" + verb + e6 + "</td></tr>" + "</table>";
}
else if (noun == "el") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td>" + verb + e4 + "</td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td>" + verb + e5 + "</td></tr>" + "<tr><td><b>" + verb + e3 + "</b></td>" + "<td>" + verb + e6 + "</td></tr>" + "</table>";
}
else if (noun == "ella") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td>" + verb + e4 + "</td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td>" + verb + e5 + "</td></tr>" + "<tr><td><b>" + verb + e3 + "</b></td>" + "<td>" + verb + e6 + "</td></tr>" + "</table>";
}
else if (noun == "usted") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td>" + verb + e4 + "</td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td>" + verb + e5 + "</td></tr>" + "<tr><td><b>" + verb + e3 + "</b></td>" + "<td>" + verb + e6 + "</td></tr>" + "</table>";
}
else if (noun == "nosotros") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td><b>" + verb + e4 + "</b></td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td>" + verb + e5 + "</td></tr>" + "<tr><td>" + verb + e3 + "</td>" + "<td>" + verb + e6 + "</td></tr>" + "</table>";
}
else if (noun == "nosotras") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td><b>" + verb + e4 + "</b></td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td>" + verb + e5 + "</td></tr>" + "<tr><td>" + verb + e3 + "</td>" + "<td>" + verb + e6 + "</td></tr>" + "</table>";
}
else if (noun == "vosotros") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td>" + verb + e4 + "</td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td><b>" + verb + e5 + "</b></td></tr>" + "<tr><td>" + verb + e3 + "</td>" + "<td>" + verb + e6 + "</td></tr>" + "</table>";
}
else if (noun == "vosotras") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td>" + verb + e4 + "</td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td><b>" + verb + e5 + "</b></td></tr>" + "<tr><td>" + verb + e3 + "</td>" + "<td>" + verb + e6 + "</td></tr>" + "</table>";
}
else if (noun == "ellos") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td>" + verb + e4 + "</td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td>" + verb + e5 + "</td></tr>" + "<tr><td>" + verb + e3 + "</td>" + "<td><b>" + verb + e6 + "</b></td></tr>" + "</table>";
}
else if (noun == "ellas") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td>" + verb + e4 + "</td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td>" + verb + e5 + "</td></tr>" + "<tr><td>" + verb + e3 + "</td>" + "<td><b>" + verb + e6 + "</b></td></tr>" + "</table>";
}
else if (noun == "ustedes") {
document.getElementById("conj").innerHTML = "<table>" + "<tr><td>" + verb + e1 + "</td>" + "<td>" + verb + e4 + "</td></tr>" + "<tr><td>" + verb + e2 + "</td>" + "<td>" + verb + e5 + "</td></tr>" + "<tr><td>" + verb + e3 + "</td>" + "<td><b>" + verb + e6 + "</b></td></tr>" + "</table>";
}
else {
document.getElementById("conj").innerHTML = "<font color='red'>Subject not in database. You can only use subjects like yo, usted, vosotros, etc</font>";
}
}
}
|
<gh_stars>1-10
import random
a = [str(random.randrange(1, 8))] + [str(random.randrange(0, 8)) for _ in range(333333)]
print(''.join(a))
|
SELECT * FROM Defects
WHERE Status = 'Open'
AND Category = 'Bug'
|
#!/bin/sh
for i in $(seq 0 $(($(nproc --all)-1))); do
julia --threads auto server.jl &
done
while : ; do sleep 1 ; done
|
package sdk
import (
"context"
"errors"
"fmt"
"github.com/chmurakrajowa/terraform-provider-ochk/ochk/sdk/gen/client/firewall_rules_e_w"
"github.com/chmurakrajowa/terraform-provider-ochk/ochk/sdk/gen/models"
"github.com/go-openapi/strfmt"
"net/http"
)
type FirewallEWRulesProxy struct {
httpClient *http.Client
service firewall_rules_e_w.ClientService
}
func (p *FirewallEWRulesProxy) Create(ctx context.Context, securityPolicyID string, rule *models.DFWRule) (*models.DFWRule, error) {
if err := rule.Validate(strfmt.Default); err != nil {
return nil, fmt.Errorf("error while validating firewall EW rule struct: %w", err)
}
params := &firewall_rules_e_w.DfwRuleCreateUsingPUTParams{
SecurityPolicyID: securityPolicyID,
DfwRule: rule,
Context: ctx,
HTTPClient: p.httpClient,
}
_, put, err := p.service.DfwRuleCreateUsingPUT(params)
if err != nil {
return nil, fmt.Errorf("error while creating firewall EW rule: %w", err)
}
if !put.Payload.Success {
return nil, fmt.Errorf("creating firewall EW rule failed: %s", put.Payload.Messages)
}
return put.Payload.DfwRule, nil
}
func (p *FirewallEWRulesProxy) Read(ctx context.Context, securityPolicyID string, ruleID string) (*models.DFWRule, error) {
params := &firewall_rules_e_w.DfwRuleGetUsingGETParams{
RuleID: ruleID,
SecurityPolicyID: securityPolicyID,
Context: ctx,
HTTPClient: p.httpClient,
}
response, err := p.service.DfwRuleGetUsingGET(params)
if err != nil {
return nil, fmt.Errorf("error while reading firwall EW rule: %w", err)
}
if !response.Payload.Success {
return nil, fmt.Errorf("retrieving firewall EW rule failed: %s", response.Payload.Messages)
}
return response.Payload.RuleInstance, nil
}
func (p *FirewallEWRulesProxy) Update(ctx context.Context, securityPolicyID string, rule *models.DFWRule) (*models.DFWRule, error) {
if err := rule.Validate(strfmt.Default); err != nil {
return nil, fmt.Errorf("error while validating firewall EW rule struct: %w", err)
}
params := &firewall_rules_e_w.DfwRuleUpdateUsingPUTParams{
SecurityPolicyID: securityPolicyID,
RuleID: rule.RuleID,
DfwRule: rule,
Context: ctx,
HTTPClient: p.httpClient,
}
put, err := p.service.DfwRuleUpdateUsingPUT(params)
if err != nil {
return nil, fmt.Errorf("error while updating firewall EW rule: %w", err)
}
if !put.Payload.Success {
return nil, fmt.Errorf("creating updating EW rule failed: %s", put.Payload.Messages)
}
return put.Payload.DfwRule, nil
}
func (p *FirewallEWRulesProxy) ListByDisplayName(ctx context.Context, securityPolicyID string, displayName string) ([]*models.DFWRule, error) {
params := &firewall_rules_e_w.DfwRuleListUsingGETParams{
SecurityPolicyID: securityPolicyID,
DisplayName: &displayName,
Context: ctx,
HTTPClient: p.httpClient,
}
response, err := p.service.DfwRuleListUsingGET(params)
if err != nil {
return nil, fmt.Errorf("error while listing firewall EW rule: %w", err)
}
if !response.Payload.Success {
return nil, fmt.Errorf("listing firewall EW rule failed: %s", response.Payload.Messages)
}
return response.Payload.RuleInstances, nil
}
func (p *FirewallEWRulesProxy) List(ctx context.Context, securityPolicyID string) ([]*models.DFWRule, error) {
params := &firewall_rules_e_w.DfwRuleListUsingGETParams{
SecurityPolicyID: securityPolicyID,
Context: ctx,
HTTPClient: p.httpClient,
}
response, err := p.service.DfwRuleListUsingGET(params)
if err != nil {
return nil, fmt.Errorf("error while listing firewall EW rule: %w", err)
}
if !response.Payload.Success {
return nil, fmt.Errorf("listing firewall EW rule failed: %s", response.Payload.Messages)
}
return response.Payload.RuleInstances, nil
}
func (p *FirewallEWRulesProxy) Exists(ctx context.Context, securityPolicyID string, ruleID string) (bool, error) {
if _, err := p.Read(ctx, securityPolicyID, ruleID); err != nil {
if IsNotFoundError(err) {
return false, nil
}
return false, fmt.Errorf("error while reading firewall EW rule: %w", err)
}
return true, nil
}
func (p *FirewallEWRulesProxy) Delete(ctx context.Context, securityPolicyID string, ruleID string) error {
params := &firewall_rules_e_w.DfwRuleDeleteUsingDELETEParams{
SecurityPolicyID: securityPolicyID,
RuleID: ruleID,
Context: ctx,
HTTPClient: p.httpClient,
}
response, err := p.service.DfwRuleDeleteUsingDELETE(params)
if err != nil {
var badRequest *firewall_rules_e_w.DfwRuleDeleteUsingDELETEBadRequest
if ok := errors.As(err, &badRequest); ok {
return &NotFoundError{Err: err}
}
return fmt.Errorf("error while deleting firewall EW rule: %w", err)
}
if !response.Payload.Success {
return fmt.Errorf("deleting firewall EW rule failed: %s", response.Payload.Messages)
}
return nil
}
|
const express = require('express');
const router = express.Router();
const multer = require("multer");
const File = require('../../models/pdf_files');
const date = Date.now();
let file_path = ''
const storage = multer.diskStorage({
destination: "./upload/",
filename: function(req, file, cb){
file_path = date + "_" + file.originalname.replace( / +/g, '')
cb(null, date + file.originalname.replace( / +/g, ''));
}
});
const upload = multer({
storage: storage,
limits: { fileSize: 100000000 },
});
router.post('/', upload.single("doc_file"),(req , res) => {
const newData = new File({
name: req.file['originalname'],
type: req.file.mimetype.split('/')[1],
uri: "https://192.168.109.85:5000/api/pdfs/"+file_path
})
newData.save(()=>{
let newUrl = "https://192.168.109.85:5000/api/pdfs/"+file_path;
console.log("Successfully uploaded!", newUrl);
let data = {url: newUrl, status:200};
res.json(data).status(200)
});
});
module.exports = router;
|
const object = {
"Apple": "",
"Orange": "",
"Carrot": ""
};
|
<reponame>CodingMankk/16-BRVAHDemo
package com.oztaking.www.a16_brvahdemo.BRVADLoadMoreDemo;
import android.os.Handler;
import android.os.Looper;
import com.oztaking.www.a16_brvahdemo.BRVADDemo.DataServer;
/***********************************************
* 文 件 名:
* 创 建 人: OzTaking
* 功 能:
* 创建日期:
* 修改时间:
* 修改备注:
***********************************************/
public class Request extends Thread {
private static final int PAGE_SIZE = 6;
private int mPage;
private RequestCallBack mCallBack;
private Handler mHandler;
private static boolean mFirstPageNoMore;
private static boolean mFirstError = true;
public Request(int page, RequestCallBack callBack) {
mPage = page;
mCallBack = callBack;
mHandler = new Handler(Looper.getMainLooper());
}
@Override
public void run() {
try {Thread.sleep(500);
}catch (InterruptedException e) {}
if (mPage == 2 && mFirstError) {
mFirstError = false;
mHandler.post(new Runnable() {
@Override
public void run() {
mCallBack.fail(new RuntimeException("fail"));
}
});
} else {
int size = PAGE_SIZE;
if (mPage == 1) {
if (mFirstPageNoMore) {
size = 1;
}
mFirstPageNoMore = !mFirstPageNoMore;
if (!mFirstError) {
mFirstError = true;
}
} else if (mPage == 4) {
size = 1;
}
final int dataSize = size;
mHandler.post(new Runnable() {
@Override
public void run() {
mCallBack.success(DataServer.getSampleData(dataSize));
}
});
}
}
}
|
import * as NS from '../../namespace';
import {makeCommunicationActionCreators} from 'shared/helpers/redux';
export const { execute: openChannel, completed: openChannelSuccess, failed: openChannelFailed} =
makeCommunicationActionCreators<NS.IOpenChannel, NS.IOpenChannelSuccess, NS.IOpenChannelFail>(
'CHAT:OPEN_CHANNEL',
'CHAT:OPEN_CHANNEL_SUCCESS',
'CHAT:OPEN_CHANNEL_FAIL',
);
export const { execute: closeChannel, completed: closeChannelSuccess, failed: closeChannelFailed} =
makeCommunicationActionCreators<NS.ICloseChannel, NS.ICloseChannelSuccess, NS.ICloseChannelFail>(
'CHAT:CLOSE_CHANNEL',
'CHAT:CLOSE_CHANNEL_SUCCESS',
'CHAT:CLOSE_CHANNEL_FAIL',
);
|
#!/bin/bash
EXODUS_VERSION=$1
echo EXODUS_VERSION=$EXODUS_VERSION
#./googlecode_upload.py --help
#Usage: googlecode-upload.py -s SUMMARY -p PROJECT [options] FILE
#
#Options:
# -h, --help show this help message and exit
# -s SUMMARY, --summary=SUMMARY
# Short description of the file
# -p PROJECT, --project=PROJECT
# Google Code project name
# -u USER, --user=USER Your Google Code username
# -w PASSWORD, --password=PASSWORD
# Your Google Code password
# -l LABELS, --labels=LABELS
# An optional list of comma-separated labels to attach
# to the file
#save current directory (to find googlecode_upload.py later)
export ORIG_DIRXYZ=`pwd`
#get into BitRock's .app output folder
pushd /Applications/BitRock*/output
#delete any old zip
if [ -f "exodus-${EXODUS_VERSION}-osx-installer.app.zip" ]
then
rm exodus-${EXODUS_VERSION}-osx-installer.app.zip
fi
#make a new zip
zip -r \
exodus-${EXODUS_VERSION}-osx-installer.app.zip \
exodus-${EXODUS_VERSION}-osx-installer.app/*
#view new zip size
ls -l exodus-${EXODUS_VERSION}-osx-installer.app.zip
#upload the new zip to google code
${ORIG_DIRXYZ}/googlecode_upload.py \
-s "Mac OSX 64bit Snow Leopard" \
-p "exodusdb" \
-u "neosys.com@gmail.com" \
-w "${GOOGLECODE_PASS}" \
-l "Featured,Type-Package,OpSys-OSX" \
exodus-${EXODUS_VERSION}-osx-installer.app.zip
#get back to whereever we started
popd
|
/*******************************************************/
/* "C" Language Integrated Production System */
/* */
/* CLIPS Version 6.30 08/22/14 */
/* */
/* */
/*******************************************************/
/*************************************************************/
/* Purpose: */
/* */
/* Principal Programmer(s): */
/* <NAME> */
/* */
/* Contributing Programmer(s): */
/* */
/* Revision History: */
/* 6.23: Corrected compilation errors for files */
/* generated by constructs-to-c. DR0861 */
/* */
/* 6.24: Renamed BOOLEAN macro type to intBool. */
/* */
/* 6.30: Removed conditional code for unsupported */
/* compilers/operating systems (IBM_MCW, */
/* MAC_MCW, and IBM_TBC). */
/* */
/* Changed integer type/precision. */
/* */
/* Added const qualifiers to remove C++ */
/* deprecation warnings. */
/* */
/* Converted API macros to function calls. */
/* */
/* Fixed linkage issue when DEBUGGING_FUNCTIONS */
/* is set to 0 and PROFILING_FUNCTIONS is set to */
/* 1. */
/* */
/*************************************************************/
#ifndef _H_genrccom
#define _H_genrccom
#ifndef _H_constrct
#include "constrct.h"
#endif
#ifndef _H_cstrccom
#include "cstrccom.h"
#endif
#ifndef _H_evaluatn
#include "evaluatn.h"
#endif
#ifndef _H_moduldef
#include "moduldef.h"
#endif
#ifndef _H_genrcfun
#include "genrcfun.h"
#endif
#ifndef _H_symbol
#include "symbol.h"
#endif
#ifdef LOCALE
#undef LOCALE
#endif
#ifdef _GENRCCOM_SOURCE_
#define LOCALE
#else
#define LOCALE extern
#endif
LOCALE void SetupGenericFunctions(void *);
LOCALE void *EnvFindDefgeneric(void *,const char *);
LOCALE DEFGENERIC *LookupDefgenericByMdlOrScope(void *,const char *);
LOCALE DEFGENERIC *LookupDefgenericInScope(void *,const char *);
LOCALE void *EnvGetNextDefgeneric(void *,void *);
LOCALE long EnvGetNextDefmethod(void *,void *,long);
LOCALE int EnvIsDefgenericDeletable(void *,void *);
LOCALE int EnvIsDefmethodDeletable(void *,void *,long);
LOCALE void UndefgenericCommand(void *);
LOCALE void *GetDefgenericModuleCommand(void *);
LOCALE void UndefmethodCommand(void *);
LOCALE DEFMETHOD *GetDefmethodPointer(void *,long);
LOCALE intBool EnvUndefgeneric(void *,void *);
LOCALE intBool EnvUndefmethod(void *,void *,long);
#if ! OBJECT_SYSTEM
LOCALE void TypeCommand(void *,DATA_OBJECT *);
#endif
#if DEBUGGING_FUNCTIONS || PROFILING_FUNCTIONS
LOCALE void EnvGetDefmethodDescription(void *,char *,size_t,void *,long);
#endif
#if DEBUGGING_FUNCTIONS
LOCALE unsigned EnvGetDefgenericWatch(void *,void *);
LOCALE void EnvSetDefgenericWatch(void *,unsigned,void *);
LOCALE unsigned EnvGetDefmethodWatch(void *,void *,long);
LOCALE void EnvSetDefmethodWatch(void *,unsigned,void *,long);
LOCALE void PPDefgenericCommand(void *);
LOCALE void PPDefmethodCommand(void *);
LOCALE void ListDefmethodsCommand(void *);
LOCALE const char *EnvGetDefmethodPPForm(void *,void *,long);
LOCALE void ListDefgenericsCommand(void *);
LOCALE void EnvListDefgenerics(void *,const char *,struct defmodule *);
LOCALE void EnvListDefmethods(void *,const char *,void *);
#endif
LOCALE void GetDefgenericListFunction(void *,DATA_OBJECT *);
LOCALE void EnvGetDefgenericList(void *,DATA_OBJECT *,struct defmodule *);
LOCALE void GetDefmethodListCommand(void *,DATA_OBJECT *);
LOCALE void EnvGetDefmethodList(void *,void *,DATA_OBJECT *);
LOCALE void GetMethodRestrictionsCommand(void *,DATA_OBJECT *);
LOCALE void EnvGetMethodRestrictions(void *,void *,long,DATA_OBJECT *);
LOCALE SYMBOL_HN *GetDefgenericNamePointer(void *);
LOCALE void SetNextDefgeneric(void *,void *);
LOCALE const char *EnvDefgenericModule(void *,void *);
LOCALE const char *EnvGetDefgenericName(void *,void *);
LOCALE const char *EnvGetDefgenericPPForm(void *,void *);
LOCALE SYMBOL_HN *EnvGetDefgenericNamePointer(void *,void *);
LOCALE void EnvSetDefgenericPPForm(void *,void *,const char *);
#if ALLOW_ENVIRONMENT_GLOBALS
LOCALE void SetDefgenericPPForm(void *,const char *);
LOCALE const char *DefgenericModule(void *);
LOCALE void *FindDefgeneric(const char *);
LOCALE void GetDefgenericList(DATA_OBJECT *,struct defmodule *);
LOCALE const char *GetDefgenericName(void *);
LOCALE const char *GetDefgenericPPForm(void *);
LOCALE void *GetNextDefgeneric(void *);
LOCALE int IsDefgenericDeletable(void *);
LOCALE intBool Undefgeneric(void *);
LOCALE void GetDefmethodList(void *,DATA_OBJECT_PTR);
LOCALE void GetMethodRestrictions(void *,long,DATA_OBJECT *);
LOCALE long GetNextDefmethod(void *,long );
LOCALE int IsDefmethodDeletable(void *,long );
LOCALE intBool Undefmethod(void *,long );
#if DEBUGGING_FUNCTIONS
LOCALE unsigned GetDefgenericWatch(void *);
LOCALE void ListDefgenerics(const char *,struct defmodule *);
LOCALE void SetDefgenericWatch(unsigned,void *);
LOCALE const char *GetDefmethodPPForm(void *,long);
LOCALE unsigned GetDefmethodWatch(void *,long);
LOCALE void ListDefmethods(const char *,void *);
LOCALE void SetDefmethodWatch(unsigned,void *,long);
#endif /* DEBUGGING_FUNCTIONS */
#if DEBUGGING_FUNCTIONS || PROFILING_FUNCTIONS
LOCALE void GetDefmethodDescription(char *,int,void *,long );
#endif /* DEBUGGING_FUNCTIONS || PROFILING_FUNCTIONS */
#endif /* ALLOW_ENVIRONMENT_GLOBALS */
#endif /* _H_genrccom */
|
#!/bin/sh
#sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv EA312927
#sudo bash -c 'echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.2 multiverse" > /etc/apt/sources.list.d/mongodb-org-3.2.list'
#
#sudo apt update
#sudo apt install -y mongodb-org
#
sudo sed -i.bak 's/bindIp: 127.0.0.1/bindIp: 0.0.0.0/g' /etc/mongod.conf
#
#sudo systemctl enable mongod.service
sudo systemctl restart mongod.service
|
package com.kinstalk.satellite.service.api;
import com.kinstalk.satellite.domain.Menu;
import java.util.List;
public interface MenuService {
public Menu queryMenu(Long id);
public long deleteMenu(Long id);
public long saveMenu(Menu menu);
public void removeMenuCache();
public void moveUpMenuById( Long id);
public void moveDownMenuById(Long id);
public List<Menu> queryAllRootMenu();
public void loopQueryMenu(List<Menu> menuList);
public List<Menu> queryMenuByParentId(Long parentId);
//按照用户查询菜单。
public List<Menu> queryMenuByTypeIdAndParentIdAndUid(Long menuTypeId, long l, String uid);
//按照用户递归查询菜单。
public void loopQueryMenuAndUid(List<Menu> menuList, String uid);
}
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
touch log/stderror.err
ktools_monitor.sh $$ & pid0=$!
exit_handler(){
exit_code=$?
kill -9 $pid0 2> /dev/null
if [ "$exit_code" -gt 0 ]; then
echo 'Ktools Run Error - exitcode='$exit_code
else
echo 'Run Completed'
fi
set +x
group_pid=$(ps -p $$ -o pgid --no-headers)
sess_pid=$(ps -p $$ -o sess --no-headers)
script_pid=$$
printf "Script PID:%d, GPID:%s, SPID:%d
" $script_pid $group_pid $sess_pid >> log/killout.txt
ps -jf f -g $sess_pid > log/subprocess_list
PIDS_KILL=$(pgrep -a --pgroup $group_pid | awk 'BEGIN { FS = "[ \t\n]+" }{ if ($1 >= '$script_pid') print}' | grep -v celery | egrep -v *\\.log$ | egrep -v *\\.sh$ | sort -n -r)
echo "$PIDS_KILL" >> log/killout.txt
kill -9 $(echo "$PIDS_KILL" | awk 'BEGIN { FS = "[ \t\n]+" }{ print $1 }') 2>/dev/null
exit $exit_code
}
trap exit_handler QUIT HUP INT KILL TERM ERR EXIT
check_complete(){
set +e
proc_list="eve getmodel gulcalc fmcalc summarycalc eltcalc aalcalc leccalc pltcalc ordleccalc"
has_error=0
for p in $proc_list; do
started=$(find log -name "$p*.log" | wc -l)
finished=$(find log -name "$p*.log" -exec grep -l "finish" {} + | wc -l)
if [ "$finished" -lt "$started" ]; then
echo "[ERROR] $p - $((started-finished)) processes lost"
has_error=1
elif [ "$started" -gt 0 ]; then
echo "[OK] $p"
fi
done
if [ "$has_error" -ne 0 ]; then
false # raise non-zero exit code
fi
}
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
mkdir output/full_correlation/
rm -R -f fifo/*
mkdir fifo/full_correlation/
rm -R -f work/*
mkdir work/kat/
mkdir work/full_correlation/
mkdir work/full_correlation/kat/
mkfifo fifo/gul_P1
mkfifo fifo/gul_S1_summary_P1
mkfifo fifo/gul_S1_eltcalc_P1
mkfifo fifo/full_correlation/gul_P1
mkfifo fifo/full_correlation/gul_S1_summary_P1
mkfifo fifo/full_correlation/gul_S1_eltcalc_P1
# --- Do ground up loss computes ---
( eltcalc < fifo/gul_S1_eltcalc_P1 > work/kat/gul_S1_eltcalc_P1 ) 2>> log/stderror.err & pid1=$!
tee < fifo/gul_S1_summary_P1 fifo/gul_S1_eltcalc_P1 > /dev/null & pid2=$!
( summarycalc -m -i -1 fifo/gul_S1_summary_P1 < fifo/gul_P1 ) 2>> log/stderror.err &
# --- Do ground up loss computes ---
( eltcalc < fifo/full_correlation/gul_S1_eltcalc_P1 > work/full_correlation/kat/gul_S1_eltcalc_P1 ) 2>> log/stderror.err & pid3=$!
tee < fifo/full_correlation/gul_S1_summary_P1 fifo/full_correlation/gul_S1_eltcalc_P1 > /dev/null & pid4=$!
( summarycalc -m -i -1 fifo/full_correlation/gul_S1_summary_P1 < fifo/full_correlation/gul_P1 ) 2>> log/stderror.err &
( eve 1 1 | getmodel | gulcalc -S100 -L100 -r -j fifo/full_correlation/gul_P1 -a1 -i - > fifo/gul_P1 ) 2>> log/stderror.err &
wait $pid1 $pid2 $pid3 $pid4
# --- Do ground up loss kats ---
kat -s work/kat/gul_S1_eltcalc_P1 > output/gul_S1_eltcalc.csv & kpid1=$!
# --- Do ground up loss kats for fully correlated output ---
kat -s work/full_correlation/kat/gul_S1_eltcalc_P1 > output/full_correlation/gul_S1_eltcalc.csv & kpid2=$!
wait $kpid1 $kpid2
check_complete
exit_handler
|
import { Helper } from "./helper";
//storage functions
export class Storage {
helper: any;
constructor(config) {
this.helper = new Helper(config);
}
async list(callback) {
const data = {};
const url = '/storage';
return await this.helper.httpGet(url, data);
}
async create(data) {
const url = '/storage';
return await this.helper.httpPost(url, data);
}
async get(storage) {
const data = {};
const url = '/storage/' + storage;
return await this.helper.httpGet(url, data);
}
async update(storage, data) {
const url = '/storage/' + storage;
return await this.helper.httpPut(url, data);
}
async delete (storage) {
const data = {};
const url = '/storage/' + storage;
return await this.helper.httpDel(url, data);
}
}
|
#!/bin/bash
for i in `seq 1 2`
do
awk -v snp=$snp -F" " '$1 == "'"$snp"'" {print}' phasedPO_g0.15_AD_gexppl.tped | awk '{for(i=2;i<=NF;i=i+2){printf "%s ", $i}{printf "%s", RS}}' > 'chr'$snp'_phased'
done
|
import nltk
from flask import Flask, request, jsonify
from nltk.sentiment.vader import SentimentIntensityAnalyzer
app = Flask(__name__)
sid = SentimentIntensityAnalyzer()
@app.route('/', methods=['POST'])
def sentiment():
text = request.data
ss = sid.polarity_scores(text)
return jsonify(ss)
if __name__ == '__main__':
app.run()
|
#include <vector>
std::vector<int> cumulativeSum(const std::vector<int>& input, std::size_t limit, unsigned long count, bool copy) {
std::vector<int> result;
std::size_t actualLimit = (limit < input.size()) ? limit : input.size();
if (copy) {
std::vector<int> inputCopy = input;
for (std::size_t i = 0; i < actualLimit; ++i) {
int sum = 0;
for (std::size_t j = 0; j <= i; ++j) {
sum += inputCopy[j];
}
result.push_back(sum);
}
} else {
for (std::size_t i = 0; i < actualLimit; ++i) {
int sum = 0;
for (std::size_t j = 0; j <= i; ++j) {
sum += input[j];
}
result.push_back(sum);
}
}
if (result.size() < count) {
int sum = 0;
for (std::size_t i = 0; i < result.size(); ++i) {
sum += result[i];
}
for (unsigned long i = result.size(); i < count; ++i) {
result.push_back(sum);
}
}
return result;
}
|
int sellMarket(double tradingLots,double stopLoss,double takeProfit,int expiration=0,color tradeColor=Red)
{
int ticket=OrderSend(Symbol(),OP_SELL,tradingLots,Bid,3,NormalizeDouble(stopLoss,Digits),NormalizeDouble(takeProfit,Digits),"Sell market trade",16384,expiration,tradeColor);
if(ticket>0)
{
if(OrderSelect(ticket,SELECT_BY_TICKET,MODE_TRADES))
{
Print("SELL order opened : ",OrderOpenPrice());
}
return ticket;
}
else
{
Print("Error opening SELL order : ",GetLastError());
return 0;
}
}
int modifyTrade(int ticket,double stopLoss,double takeProfit,int expiration=0,color modifyColor=Orange)
{
if(ticket>0)
{
if(OrderModify(ticket,OrderOpenPrice(),NormalizeDouble(stopLoss,Digits),NormalizeDouble(takeProfit,Digits),expiration,modifyColor))
{
Print("Order "+IntegerToString(ticket)+" modified");
return ticket;
}
}
else
{
Print("[ERROR] Could not modify trade as no ticket number was found");
}
return 0;
}
//+------------------------------------------------------------------+
//| Close all open buy trades
//+------------------------------------------------------------------+
int closeAllBuys()
{
for(int i=0;i<OrdersTotal();i++)
{
if(!OrderSelect(i,SELECT_BY_POS,MODE_TRADES))
{
// could not find the order
Print("[ERROR] Could not find the order to monitor stop loss");
continue;
}
// if there is an open sell trade of this symbol
if(OrderType()==OP_BUY && OrderSymbol()==Symbol())
{
int closeID=OrderClose(OrderTicket(),OrderLots(),Bid,3,Blue);
if(!closeID)
{
Print("[ERROR] Could not close all buy trades | "+IntegerToString(GetLastError()));
return -1;
}
else
{
return closeID;
}
}
}
return -1;
}
//+------------------------------------------------------------------+
//| Close all open sell trades
//+------------------------------------------------------------------+
int closeAllSells()
{
for(int i=0;i<OrdersTotal();i++)
{
if(!OrderSelect(i,SELECT_BY_POS,MODE_TRADES))
{
// could not find the order
Print("[ERROR] Could not find the order to monitor stop loss");
continue;
}
// if there is an open sell trade of this symbol
if(OrderType()==OP_SELL && OrderSymbol()==Symbol())
{
int closeID=OrderClose(OrderTicket(),OrderLots(),Ask,3,Blue);
if(!closeID)
{
Print("[ERROR] Could not close all sell trades | "+IntegerToString(GetLastError()));
return -1;
}
else
{
return closeID;
}
}
}
return -1;
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
int findSellCount()
{
int tradeCount=0;
for(int i=0;i<OrdersTotal();i++)
{
if(OrderType()==OP_SELL && OrderSymbol()==Symbol())
{
tradeCount++;
}
}
return tradeCount;
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
int findBuyCount()
{
int tradeCount=0;
for(int i=0;i<OrdersTotal();i++)
{
if(OrderType()==OP_BUY && OrderSymbol()==Symbol())
{
tradeCount++;
}
}
return tradeCount;
}
//+------------------------------------------------------------------+
|
#!@bash@/bin/bash
set -eo pipefail
shopt -s nullglob
export PATH=@path@
usage() {
echo "usage: $0 -t <timeout> -c <path-to-default-configuration> [-d <boot-dir>] [-g <num-generations>]" >&2
exit 1
}
timeout= # Timeout in centiseconds
default= # Default configuration
target=/boot # Target directory
numGenerations=0 # Number of other generations to include in the menu
dtbDir= # Custom DTB source
while getopts "t:b:c:d:g:" opt; do
case "$opt" in
t) # U-Boot interprets '0' as infinite and negative as instant boot
if [ "$OPTARG" -lt 0 ]; then
timeout=0
elif [ "$OPTARG" = 0 ]; then
timeout=-10
else
timeout=$((OPTARG * 10))
fi
;;
b) dtbDir="$OPTARG" ;;
c) default="$OPTARG" ;;
d) target="$OPTARG" ;;
g) numGenerations="$OPTARG" ;;
\?) usage ;;
esac
done
[ "$timeout" = "" -o "$default" = "" ] && usage
mkdir -p $target/nixos
mkdir -p $target/extlinux
# Convert a path to a file in the Nix store such as
# /nix/store/<hash>-<name>/file to <hash>-<name>-<file>.
cleanName() {
local path="$1"
echo "$path" | sed 's|^/nix/store/||' | sed 's|/|-|g'
}
# Copy a file from the Nix store to $target/nixos.
declare -A filesCopied
copyToKernelsDir() {
local src=$(readlink -f "$1")
local dst="$target/nixos/$(cleanName $src)"
# Don't copy the file if $dst already exists. This means that we
# have to create $dst atomically to prevent partially copied
# kernels or initrd if this script is ever interrupted.
if ! test -e $dst; then
local dstTmp=$dst.tmp.$$
cp -r $src $dstTmp
mv $dstTmp $dst
fi
filesCopied[$dst]=1
result=$dst
}
# Copy its kernel, initrd and dtbs to $target/nixos, and echo out an
# extlinux menu entry
addEntry() {
local path=$(readlink -f "$1")
local tag="$2" # Generation number or 'default'
if ! test -e $path/kernel -a -e $path/initrd; then
return
fi
copyToKernelsDir "$path/kernel"; kernel=$result
copyToKernelsDir "$path/initrd"; initrd=$result
if [ -z "$dtbDir" ]; then
# XXX UGLY: maybe the system config should have a top-level "dtbs" entry?
dtbDir=$(readlink -m "$path/kernel/../dtbs")
fi
if [ -d "$dtbDir" ]; then
copyToKernelsDir "$dtbDir"; dtbs=$result
fi
timestampEpoch=$(stat -L -c '%Z' $path)
timestamp=$(date "+%Y-%m-%d %H:%M" -d @$timestampEpoch)
nixosLabel="$(cat $path/nixos-version)"
extraParams="$(cat $path/kernel-params)"
echo
echo "LABEL nixos-$tag"
if [ "$tag" = "default" ]; then
echo " MENU LABEL NixOS - Default"
else
echo " MENU LABEL NixOS - Configuration $tag ($timestamp - $nixosLabel)"
fi
echo " LINUX ../nixos/$(basename $kernel)"
echo " INITRD ../nixos/$(basename $initrd)"
if [ -d "$dtbDir" ]; then
echo " FDTDIR ../nixos/$(basename $dtbs)"
fi
echo " APPEND systemConfig=$path init=$path/init $extraParams"
}
tmpFile="$target/extlinux/extlinux.conf.tmp.$$"
cat > $tmpFile <<EOF
# Generated file, all changes will be lost on nixos-rebuild!
# Change this to e.g. nixos-42 to temporarily boot to an older configuration.
DEFAULT nixos-default
MENU TITLE ------------------------------------------------------------
TIMEOUT $timeout
EOF
addEntry $default default >> $tmpFile
if [ "$numGenerations" -gt 0 ]; then
# Add up to $numGenerations generations of the system profile to the menu,
# in reverse (most recent to least recent) order.
for generation in $(
(cd /nix/var/nix/profiles && ls -d system-*-link) \
| sed 's/system-\([0-9]\+\)-link/\1/' \
| sort -n -r \
| head -n $numGenerations); do
link=/nix/var/nix/profiles/system-$generation-link
addEntry $link $generation
done >> $tmpFile
fi
mv -f $tmpFile $target/extlinux/extlinux.conf
# Remove obsolete files from $target/nixos.
for fn in $target/nixos/*; do
if ! test "${filesCopied[$fn]}" = 1; then
echo "Removing no longer needed boot file: $fn"
chmod +w -- "$fn"
rm -rf -- "$fn"
fi
done
|
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Kingfisher/Kingfisher.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Realm/Realm.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RealmSwift/RealmSwift.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RxSwift/RxSwift.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Kingfisher/Kingfisher.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Realm/Realm.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RealmSwift/RealmSwift.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RxSwift/RxSwift.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<reponame>soonitoon/dwitter
import React, { useState } from "react";
import { AuthService } from "mybase";
import AuthWithSocial from "components/AuthWithSocial";
import { BsPeople, BsSearch } from "react-icons/bs";
import { IoLogoTwitter } from "react-icons/io5";
const AuthForm = () => {
const [error, setError] = useState("");
const [email, setEmail] = useState("");
const [password, setPassword] = useState("");
const [newAccount, setNewAccount] = useState(false);
const toggleAccount = () => {
setNewAccount((prev) => !prev);
};
const onChange = (event) => {
const {
target: { name, value },
} = event;
if (name === "email") {
setEmail(value);
} else if (name === "password") {
setPassword(value);
}
};
const onSubmit = async (event) => {
event.preventDefault();
try {
if (newAccount) {
await AuthService.createUserWithEmailAndPassword(email, password);
} else {
await AuthService.signInWithEmailAndPassword(email, password);
}
} catch (error) {
const errorCode = error.code;
let errorMessage;
if (errorCode === "auth/email-already-in-use") {
errorMessage = "사용중인 이메일이에요.";
} else if (errorCode === "auth/invalid-email") {
errorMessage = "이메일 주소를 정확히 입력해주세요.";
} else if (errorCode === "auth/operation-not-allowed") {
errorMessage = "사용할 수 없는 이메일 혹은 비밀번호에요.";
} else if (errorCode === "auth/weak-password") {
errorMessage = "비밀번호가 너무 약해요.";
} else if (errorCode === "auth/user-disabled") {
errorMessage = "사용 중지된 계정이에요.";
} else if (errorCode === "auth/user-not-found") {
errorMessage = "존재하지 않는 이메일이에요.";
} else if (errorCode === "auth/wrong-password") {
errorMessage = "잘못된 비밀번호에요.";
} else {
errorMessage = "알 수 없는 오류가 발생했어요.";
}
setError(errorMessage);
}
};
return (
<>
<IoLogoTwitter className="top-dwitte-icon" />
<h1 className="main-title">
지금 전 세계에서 무슨 일이
<br /> 일어나고 있는지 알아보세요.
</h1>
<h5 className="sub-title">오늘 드위터에 가입하세요.</h5>
<AuthWithSocial />
<div className="color-box">
<h5 className="color-box-text">
<BsSearch className="color-box-icon" />
<div>관심사를 팔로우 하세요.</div>
</h5>
<h5 className="color-box-text">
<BsPeople className="color-box-icon" />
<div>
사람들이 무엇에 대해 이야기하고
<br />
있는지 알아보세요.
</div>
</h5>
</div>
<h5 className="make-account-title">혹은, 직접 로그인.</h5>
<form onSubmit={onSubmit} className="LoginForm">
<input
name="email"
type="text"
required
value={email}
onChange={onChange}
placeholder="<EMAIL>"
className="email-input"
></input>
<input
name="password"
type="password"
required
value={password}
onChange={onChange}
placeholder="password"
className="password-input"
></input>
<input
type="submit"
className="submit-account"
value={newAccount ? "만들기" : "로그인"}
></input>
<button onClick={toggleAccount} className="toggle-login">
{newAccount ? "계정이 있나요?" : "계정 만들기"}
</button>
</form>
<p className="login-error">{error}</p>
<footer className="AuthFooter">
<p>트위터 클론코딩 -드위터</p>
</footer>
</>
);
};
export default AuthForm;
|
#!/bin/bash -l
##############################################################
#
# Shell script for submitting parallel python jobs on SLURM
# cluster with nodes, CPUS, tasks, GPUs
#
##############################################################
# ml python/3.6.0
# ml cuda/9.0
module unload git
ml anaconda-python/3.6
source activate dnn
# ml parallel
## activate virtualenv/condaenv to use their modules
# 1. Prompt user for input that runs the analysis
echo "Begin analysis." # print beginning statement
# NEED TO RUN FOR EZ=0,1,2,3 and varying PZ all once
# Pause before running to check
tempdatadir='/scratch/users/ali39@jhu.edu/data/dnn/temp/test_fragility_v3/'
outputdatadir='/scratch/users/ali39@jhu.edu/data/dnn/output/test_fragility_v3/'
traindatadir='/scratch/users/ali39@jhu.edu/data/dnn/'
# /scratch/users/ali39@jhu.edu
printf "\nThis is the data directories: \n"
printf "Temp datadir: $tempdatadir \n"
printf "Output datadir: $outputdatadir \n"
printf "Train datadir: $traindatadir \n"
printf "\n"
#### Create all logging directories if needed
# _gnuerr = all error logs for sbatch gnu runs %A.out
# _gnuout = all output logs for sbatch gnu runs %A.out
# _logs = the parallel gnu logfile for resuming job at errors
outdir=_out
# create output directory
if [ -d "$outdir" ]; then
echo "Out log directory exists!\n\n"
else
mkdir $outdir
fi
# 2. Define Slurm Parameters
NUM_PROCSPERNODE=6 # number of processors per node (1-24). Use 24 for GNU jobs.
NUM_NODES=1 # number of nodes to request
NUM_CPUPERTASK=1
partition=gpu # debug, shared, unlimited, parallel, gpu, lrgmem, scavenger
# partition=debug
qos=scavenger
numgpus=1
gpu="gpu:$numgpus"
echo $gpu
# set jobname
jobname="submit_trainpy.log"
# create export commands
exvars="tempdatadir=${tempdatadir},\
outputdatadir=${outputdatadir},\
traindatadir=${traindatadir} "
## job reqs
walltime=0:45:0
# build basic sbatch command with all params parametrized
sbatcomm="sbatch \
--time=${walltime} \
--nodes=${NUM_NODES} \
--cpus-per-task=${NUM_CPUPERTASK} \
--job-name=${jobname} \
--ntasks-per-node=${NUM_PROCSPERNODE} \
--partition=${partition}
--gres=${gpu} "
# build a scavenger job, gpu job, or other job
printf "Sbatch should run now\n"
echo $sbatcomm $exvars ./submit_train.sbatch
${sbatcomm} --export=$exvars ./submit_train.sbatch
read -p "Continuing in 0.5 Seconds...." -t 0.5
echo "Continuing ...."
# grep for SLURM_EXPORT_ENV when testing
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/SHPaymentMethod/SHPaymentMethod.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/SHPaymentMethod/SHPaymentMethod.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<gh_stars>10-100
/*! Copyright (c) 2013 <NAME>. License: BSD 3-Clause. */
(function(){
var body = document.querySelector('body'),
advertisement = body.querySelector('.advertisement'),
adCloseButton = advertisement.querySelector('.close-button');
body.style.overflow = 'hidden';
adCloseButton.addEventListener('click', function(e) {
e.preventDefault();
body.removeAttribute('style');
advertisement.style.display = 'none';
});
}());
|
<reponame>Antloup/workflow-js
import Action from './Action';
import Comparator from './Comparator';
import Condition from './Condition';
import ParentType from './ParentType';
import Rule from './Rule';
export {Action, Comparator, Condition, ParentType, Rule};
|
#!/bin/bash
echo 'BWCE-AWS: Start of EC2 Instance UserData execution...'
export PATH=/home/ec2-user/.local/bin:$PATH
export PYTHONPATH=$PYTHONPATH:/home/ec2-user/.local/lib/python2.7/site-packages
echo 'BWCE-AWS: Install Docker-ce...'
sudo yum install -y yum-utils device-mapper-persistent-data lvm2
sudo yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo
sudo yum -y install docker-ce
echo 'Docker installed, starting now...'
sudo systemctl start docker
sudo usermod -aG docker $USER
pluginListName=
{
"Ref": "PluginList"
}
if [[ -n \"$pluginListName\" ]]; then
echo 'BWCE-AWS: Provided List of Plug-ins... '
{
"Ref": "PluginList"
}
for pluginName in $(echo
{
"Ref": "PluginList"
}
| tr ',' '\\n')
do
if [ $pluginName == 'DC' ]; then
echo 'BWCE-AWS: Copying DC Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/DC/TIB_bwdcp_4.5.1_v4.1_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwdcp_4.5.1_v4.1_bwce-runtime.zip || true
elif [ $pluginName == 'DCRM' ]; then
echo 'BWCE-AWS: Copying DCRM Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/DCRM/TIB_bwplugindynamicscrm_6.5.0_v9_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwplugindynamicscrm_6.5.0_v9_bwce-runtime.zip || true
elif [ $pluginName == 'Cassandra' ]; then
echo 'BWCE-AWS: Copying Cassandra Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/Cassandra/TIB_bwplugincassandra_6.3.0_v11.1_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwplugincassandra_6.3.0_v11.1_bwce-runtime.zip || true
elif [ $pluginName == 'FTL' ]; then
echo 'BWCE-AWS: Copying FTL Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/FTL/TIB_bwpluginftl_6.4.1_v2.1_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginftl_6.4.1_v2.1_bwce-runtime.zip || true
elif [ $pluginName == 'Marketo' ]; then
echo 'BWCE-AWS: Copying Marketo Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/Marketo/TIB_bwpluginmarketo_7.2.1_v2.1_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginmarketo_7.2.1_v2.1_bwce-runtime.zip || true
elif [ $pluginName == 'SFTP' ]; then
echo 'BWCE-AWS: Copying SFTP Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/SFTP/TIB_bwsp_6.1.2_v1.2_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwsp_6.1.2_v1.2_bwce-runtime.zip || true
elif [ $pluginName == 'SQS-SNS' ]; then
echo 'BWCE-AWS: Copying SQS-SNS Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/SQS-SNS/TIB_bwpluginawsm_6.2.0_v4_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginawsm_6.2.0_v4_bwce-runtime.zip || true
elif [ $pluginName == 'Workday' ]; then
echo 'BWCE-AWS: Copying Workday Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/Workday/TIB_bwpluginworkday_6.0.1_v5_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginworkday_6.0.1_v5_bwce-runtime.zip || true
elif [ $pluginName == 'Netsuite' ]; then
echo 'BWCE-AWS: Copying Netsuite Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/Netsuite/TIB_bwpluginnetsuite_6.3.1_v9_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginnetsuite_6.3.1_v9_bwce-runtime.zip || true
elif [ $pluginName == 'PDF' ]; then
echo 'BWCE-AWS: Copying PDF Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/PDF/TIB_bwpluginpdf_6.2.1_v1.1_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginpdf_6.2.1_v1.1_bwce-runtime.zip || true
elif [ $pluginName == 'Files' ]; then
echo 'BWCE-AWS: Copying Files Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/Files/TIB_bwpluginfiles_8.1.0_v11.3_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginfiles_8.1.0_v11.3_bwce-runtime.zip || true
elif [ $pluginName == 'SAP' ]; then
echo 'BWCE-AWS: Copying SAP Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/SAP/TIB_bwpluginsap_8.2.1_v16_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginsap_8.2.1_v16_bwce-runtime.zip || true
elif [ $pluginName == 'ADB' ]; then
echo 'BWCE-AWS: Copying ADB Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/ADB/TIB_bwpluginadb_8.1.0_v12_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginadb_8.1.0_v12_bwce-runtime.zip || true
elif [ $pluginName == 'MongoDB' ]; then
echo 'BWCE-AWS: Copying MongoDB Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/MongoDB/TIB_bwpluginmongodb_6.2.1_v2_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginmongodb_6.2.1_v2_bwce-runtime.zip || true
elif [ $pluginName == 'SFDC' ]; then
echo 'BWCE-AWS: Copying SFDC Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/SFDC/TIB_bwpluginsalesforce_6.4.0_v9_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginsalesforce_6.4.0_v9_bwce-runtime.zip || true
elif [ $pluginName == 'ServiceNow' ]; then
echo 'BWCE-AWS: Copying ServiceNow Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/ServiceNow/TIB_bwpluginservicenow_6.0.0_v19.0.2_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginservicenow_6.0.0_v19.0.2_bwce-runtime.zip || true
elif [ $pluginName == 'MQ' ]; then
echo 'BWCE-AWS: Copying MQ Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/MQ/TIB_bwmq_8.5.1_v4.2_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwmq_8.5.1_v4.2_bwce-runtime.zip || true
elif [ $pluginName == 'OData' ]; then
echo 'BWCE-AWS: Copying OData Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/OData/TIB_bwpluginodata_6.0.1_v3.1_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginodata_6.0.1_v3.1_bwce-runtime.zip || true
elif [ $pluginName == 'AMQP' ]; then
echo 'BWCE-AWS: Copying AMQP Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/AMQP/TIB_bwpluginamqp_6.0.1_v6.1_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginamqp_6.0.1_v6.1_bwce-runtime.zip || true
elif [ $pluginName == 'Kafka' ]; then
echo 'BWCE-AWS: Copying Kafka Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/Kafka/TIB_bwpluginkafka_6.0.0_v19.1.1_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginkafka_6.0.0_v19.1.1_bwce-runtime.zip || true
elif [ $pluginName == 'S3' ]; then
echo 'BWCE-AWS: Copying S3 Plugin Runtime...'
cp /home/ec2-user/bwce/installers/plugins/S3/TIB_bwpluginawss3_6.1.1_v7.2_bwce-runtime.zip /home/ec2-user/bwce/bwce-docker/resources/addons/plugins/TIB_bwpluginawss3_6.1.1_v7.2_bwce-runtime.zip || true
fi
done
else
echo 'BWCE-AWS: List of Plug-ins not provided...'
fi
$( aws ecr get-login --region
{
"Ref": "AWS::Region"
}
--no-include-email )
sudo chmod +x /home/ec2-user/bwce/bwce-docker/createDockerImage.sh
cd /home/ec2-user/bwce/bwce-docker/
rm -f /home/ec2-user/bwce/bwce-docker/resources/bwce-runtime/bwceruntime-aws-2.3.4.zip
./createDockerImage.sh /home/ec2-user/bwce/bwce-runtime/bwceruntime-aws-2.3.4.zip tibco/bwce:2.3.4
docker tag tibco/bwce:2.3.4
{
"Ref": "AWS::AccountId"
}
.dkr.ecr.
{
"Ref": "AWS::Region"
}
.amazonaws.com/
{
"Ref": "BwceEcrRepository"
}
:2.3.4
docker push
{
"Ref": "AWS::AccountId"
},
.dkr.ecr.
{
"Ref": "AWS::Region"
}
.amazonaws.com/
{
"Ref": "BwceEcrRepository"
}
:2.3.4
/opt/aws/bin/cfn-init -v --stack
{
"Ref": "AWS::StackName"
}
--resource InitialEC2Setup --configsets
{
"Fn::If": [
"CreateS3Bucket",
"quickstartwithS3",
"cfnInitEnd"
]
}
--region
{
"Ref": "AWS::Region"
}
echo 'BWCE-AWS: End of EC2 Instance UserData execution, shutting down...'
sudo poweroff
|
import React from 'react'
import { Plain, types, uuid } from 'react-bricks'
import BlockNames from '../BlockNames'
import styles from './Features.module.css'
//=============================
// Colors enum
//=============================
const Colors = {
white: { value: '#fff', label: 'White' },
lightGray: { value: '#f7fafc', label: 'Light Gray' },
}
//=============================
// Component to be rendered
//=============================
const Features = ({ backgroundColor, renderItems }) => {
return (
<section className={styles.features} style={{ backgroundColor }}>
<div className={styles.container}>{renderItems}</div>
</section>
)
}
//=============================
// Get Default Props
//=============================
const getDefaultProps = () => ({
backgroundColor: Colors.white.value,
items: [
{
id: uuid(),
type: BlockNames.FeatureItem,
props: {
imageSource: {
src:
'https://api.reactbricks.com/images/original/41800240-5e34-11ea-b64f-f36644626031.svg',
placeholderSrc:
'https://api.reactbricks.com/images/original/41800240-5e34-11ea-b64f-f36644626031.svg',
srcSet: '',
},
title: Plain.deserialize('A React Brick in the wall'),
text: Plain.deserialize(
`We don't need no source control. All in all you are just another React Brick in the wall.`
),
},
},
],
})
//=============================
// Side Edit Props
//=============================
const sideEditProps = [
{
name: 'backgroundColor',
label: 'Background',
type: types.SideEditPropType.Select,
selectOptions: {
display: types.OptionsDisplay.Color,
options: [Colors.white, Colors.lightGray],
},
},
]
//=============================
// Exported BlockType Schema
//=============================
const schema = {
name: BlockNames.Features,
label: 'Features',
superType: types.BlockSuperType.Repeater,
render: props => <Features {...props} />,
getDefaultProps,
sideEditProps,
itemsType: BlockNames.FeatureItem,
addItemText: 'Add feature',
removeItemText: 'Remove feature',
}
export default schema
|
/* eslint-env webextensions */
let linkElements
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
switch (message.type) {
case 'HREFS_REQUEST':
linkElements = Array.from(document.querySelectorAll('[href]'))
const hrefs = linkElements
.map(link => link.href)
.filter(link => link)
.filter(link => !link.startsWith('javascript:'))
sendResponse({ hrefs })
break
case 'HIGHLIGHT_HREFS':
linkElements.forEach(element => {
if (message.hrefs.includes(element.href)) {
element.style.border = 'thin solid blue'
} else {
// TODO: save and restore original value
element.style.border = 'unset'
}
})
break
case 'CLEAN_HIGHLIGHTS':
linkElements.forEach(element => {
// TODO: save and restore original value
element.style.border = 'unset'
})
break
}
})
|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('../../multiplex')) :
typeof define === 'function' && define.amd ? define(['../../multiplex'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.mx));
}(this, (function (mx) { 'use strict';
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var mx__default = /*#__PURE__*/_interopDefaultLegacy(mx);
var array = [1, 2, 3, 4, 5];
mx__default['default'].range(1, 5);
new mx__default['default'].Collection(array);
var list = new mx__default['default'].List(array);
new mx__default['default'].LinkedList(array);
new mx__default['default'].HashSet(array);
new mx__default['default'].Stack(array);
new mx__default['default'].Queue(array);
new mx__default['default'].Set(array);
var map = new mx__default['default'].Map();
var dictionary = new mx__default['default'].Dictionary();
var sortedList = new mx__default['default'].SortedList();
list.asReadOnly();
new mx__default['default'].Lookup(array, function (t) {
return t;
});
for (var i = 0; i < array.length; i++) {
map.set(array[i], array[i]);
dictionary.set(array[i], array[i]);
sortedList.add(array[i], array[i]);
}
var qunit = typeof QUnit === 'undefined' ? require('qunitjs') : QUnit;
var qmodule = qunit.module;
var qtest = qunit.test;
qunit.expect;
qmodule('linq-join');
var numbers = [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5];
var strings = numbers.join(',').split(',');
var identity = function (t) {
return t;
};
var str2num = function (t) {
return parseInt(t);
};
qtest('basic "join" tests', function (assert) {
assert.equal(mx__default['default'](numbers).join(array, identity, identity, identity).count(), array.length, ' join count of an array of separate numbers');
assert.equal(mx__default['default'](array).join(numbers, identity, identity, identity).count(), array.length, ' join count of an array of separate numbers');
assert.deepEqual(mx__default['default'](array).join(numbers, identity, identity, identity).toArray(), array, ' join result of an array of separate numbers');
assert.equal(mx__default['default']([1, 1, 1]).join(array, identity, identity, identity).count(), 3, ' join count of an array of identical numbers');
assert.equal(mx__default['default'](array).join([1, 1, 1], identity, identity, identity).count(), 3, ' join count of an array of identical numbers');
assert.deepEqual(mx__default['default']([1, 1, 1]).join(numbers, identity, identity, identity).toArray(), [1, 1, 1], ' join result of an array of identical numbers');
assert.equal(mx__default['default']([]).join(array, identity, identity, identity).count(), 0, ' join count of an empty array');
assert.equal(mx__default['default'](array).join([], identity, identity, identity).count(), 0, ' join count of an empty array');
assert.equal(mx__default['default'](array).join([null], identity, identity, identity).count(), 0, ' join count of an null array');
assert.equal(mx__default['default'](array).join([undefined], identity, identity, identity).count(), 0, ' join count of an undefined array');
assert.deepEqual(mx__default['default'](numbers).join(numbers, identity, identity, identity).toArray(), numbers, ' join result of the same array');
assert.equal(mx__default['default'](strings).join(numbers, str2num, identity, identity).count(), numbers.length, ' join count of an array of separate strings');
assert.equal(mx__default['default'](numbers).join(strings, identity, str2num, identity).count(), numbers.length, ' join count of an array of separate strings');
assert.deepEqual(mx__default['default'](strings).join(numbers, str2num, identity, identity).toArray(), strings, ' join result of an array of separate strings');
assert.deepEqual(mx__default['default'](numbers).join(strings, identity, str2num, identity).toArray(), numbers, ' join result of an array of separate strings');
assert.equal(mx__default['default']([true, false, true]).join([false], identity, identity, identity).count(), 1, ' join count of a boolean array');
assert.equal(mx__default['default']([true, false, true]).join([true], identity, identity, identity).count(), 2, ' join count of a boolean array');
assert.equal(mx__default['default']([new Date(2017, 0, 1), new Date(2017, 0, 2)]).join([new Date(2017, 0, 2), new Date(2017, 0, 3)], identity, identity, identity).count(), 1, ' join count of a date array');
assert.equal(mx__default['default']([{ val: 1 }, { val: 2 }]).join([{ val: 2 }, { val: 3 }], identity, identity, identity).count(), 1, ' join count of a object literal array');
});
// qtest('"distinct" test using comparer', function (assert) {
// assert.equal(mx([{ val: 1, name: 'A' }, { val: 1, name: 'B' }, { val: 1, name: 'C' }]).distinct({
// hash: function (t) {
// return t.val;
// },
// equals: function (a, b) {
// return a.val === b.val && a.name === b.name;
// }
// }).count(), 3, 'distinct count of an array of distinct object literals using comparer');
// });
// qtest('collections "distinct" method tests', function (assert) {
// var numericComparer = {
// hash: function (t) {
// return t % 2;
// },
// equals: function (a, b) {
// return a === b;
// }
// };
// assert.equal(mocks.collection.distinct().count(), 5, 'Test "distinct" in a Collection');
// assert.equal(mocks.collection.distinct(numericComparer).count(), 5, 'Test "distinct" in a Collection with comparer');
// assert.equal(mocks.list.distinct().count(), 5, 'Test "distinct" in a List');
// assert.equal(mocks.list.distinct(numericComparer).count(), 5, 'Test "distinct" in a List with comparer');
// assert.equal(mocks.readOnlyCollection.distinct().count(), 5, 'Test "distinct" in a ReadOnlyCollection');
// assert.equal(mocks.readOnlyCollection.distinct(numericComparer).count(), 5, 'Test "distinct" in a ReadOnlyCollection with comparer');
// assert.equal(mocks.linkedList.distinct().count(), 5, 'Test "distinct" in a LinkedList');
// assert.equal(mocks.linkedList.distinct(numericComparer).count(), 5, 'Test "distinct" in a LinkedList with comparer');
// assert.equal(mocks.hashSet.distinct().count(), 5, 'Test "distinct" in a HashSet');
// assert.equal(mocks.hashSet.distinct(numericComparer).count(), 5, 'Test "distinct" in a HashSet with comparer');
// assert.equal(mocks.stack.distinct().count(), 5, 'Test "distinct" in a Stack');
// assert.equal(mocks.stack.distinct(numericComparer).count(), 5, 'Test "distinct" in a Stack with comparer');
// assert.equal(mocks.queue.distinct().count(), 5, 'Test "distinct" in a Queue');
// assert.equal(mocks.queue.distinct(numericComparer).count(), 5, 'Test "distinct" in a Queue with comparer');
// assert.equal(mocks.set.distinct().count(), 5, 'Test "distinct" in a Set');
// assert.equal(mocks.set.distinct(numericComparer).count(), 5, 'Test "distinct" in a Set with comparer');
// var mapComparer = {
// hash: function (t) {
// return t[0] % 2;
// },
// equals: function (a, b) {
// return a[0] === b[0];
// }
// };
// assert.equal(mocks.map.distinct().count(), 5, 'Test "distinct" in a Map');
// assert.equal(mocks.map.distinct(mapComparer).count(), 5, 'Test "distinct" in a Map with comparer');
// var keyValuePairComparer = {
// hash: function (t) {
// return t.key % 2;
// },
// equals: function (a, b) {
// return a.key === b.key;
// }
// };
// assert.equal(mocks.dictionary.distinct().count(), 5, 'Test "distinct" in a Dictionary');
// assert.equal(mocks.dictionary.distinct(keyValuePairComparer).count(), 5, 'Test "distinct" in a Dictionary with comparer');
// assert.equal(mocks.lookup.distinct().count(), 5, 'Test "distinct" in a Lookup');
// assert.equal(mocks.lookup.distinct(keyValuePairComparer).count(), 5, 'Test "distinct" in a Lookup with comparer');
// assert.equal(mocks.sortedList.distinct().count(), 5, 'Test "distinct" in a SortedList');
// assert.equal(mocks.sortedList.distinct(keyValuePairComparer).count(), 5, 'Test "distinct" in a SortedList with comparer');
// });
})));
|
<filename>src/models/token/RefreshTokenRepository.ts
import { EntityRepository, Repository } from 'typeorm';
import { User } from '../user/User';
import { AbstractTokenHelper } from './AbstractTokenHelper';
import { IAbstractTokenRepository } from './IAbstractTokenRepository';
import { RefreshToken } from './RefreshToken';
const EXPIRATION_TIMESTAMP = Number(process.env.REFRESH_TOKEN_EXPIRATION || 1209600); // 2 weeks by default
@EntityRepository(RefreshToken)
export class RefreshTokenRepository extends Repository<RefreshToken> implements IAbstractTokenRepository {
findOneByToken(token: string): Promise<RefreshToken> {
return AbstractTokenHelper.findOneByToken(this, token);
}
generateToken(user: User): Promise<RefreshToken> {
return AbstractTokenHelper.generateToken(this, user, EXPIRATION_TIMESTAMP);
}
}
|
package com.misakiga.husky.provider.domain;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import javax.persistence.Column;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Table;
import java.io.Serializable;
import java.util.Date;
/**
* @author MISAKIGA
*/
@Data
@Table(name = "ums_permission")
public class UmsPermission implements Serializable {
private static final long serialVersionUID = -6309784278282035821L;
@Id
@Column(name = "id")
@GeneratedValue(generator = "JDBC")
private Long id;
/**
* 父级权限id
*/
@Column(name = "pid")
private Long pid;
/**
* 名称
*/
@Column(name = "`name`")
private String name;
/**
* 权限值
*/
@Column(name = "`value`")
private String value;
/**
* 图标
*/
@Column(name = "icon")
private String icon;
/**
* 权限类型:0->目录;1->菜单;2->按钮(接口绑定权限)
*/
@Column(name = "`type`")
private Integer type;
/**
* 前端资源路径
*/
@Column(name = "uri")
private String uri;
/**
* 启用状态;0->禁用;1->启用
*/
@Column(name = "`status`")
private Integer status;
/**
* 创建时间
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@Column(name = "create_time")
private Date createTime;
/**
* 排序
*/
@Column(name = "sort")
private Integer sort;
@Override
public String toString() {
return "UmsPermission{" +
"id=" + id +
", pid=" + pid +
", name='" + name + '\'' +
", value='" + value + '\'' +
", icon='" + icon + '\'' +
", type=" + type +
", uri='" + uri + '\'' +
", status=" + status +
", createTime=" + createTime +
", sort=" + sort +
'}';
}
}
|
<reponame>thexdesk/ipfs-companion
'use strict'
/* eslint-env browser, webextensions */
require('./quick-import.css')
const browser = require('webextension-polyfill')
const choo = require('choo')
const html = require('choo/html')
const logo = require('./logo')
const drop = require('drag-and-drop-files')
const fileReaderPullStream = require('pull-file-reader')
document.title = browser.i18n.getMessage('panel_quickImport')
const app = choo()
app.use(quickImportStore)
app.route('*', quickImportPage)
app.mount('#root')
function quickImportStore (state, emitter) {
state.message = ''
state.peerCount = ''
state.ipfsNodeType = 'external'
state.expandOptions = false
state.openViaWebUI = true
state.userChangedOpenViaWebUI = false
state.importDir = ''
state.userChangedImportDir = false
function updateState ({ ipfsNodeType, peerCount, importDir, openViaWebUI }) {
state.ipfsNodeType = ipfsNodeType
state.peerCount = peerCount
// This event will fire repeatedly,
// we need to ensure we don't unset the user's preferences
// when they change the options on this page
if (!state.userChangedImportDir) {
state.importDir = importDir
}
if (!state.userChangedOpenViaWebUI) {
state.openViaWebUI = openViaWebUI
}
}
let port
emitter.on('DOMContentLoaded', async () => {
// initialize connection to the background script which will trigger UI updates
port = browser.runtime.connect({ name: 'browser-action-port' })
port.onMessage.addListener(async (message) => {
if (message.statusUpdate) {
console.log('In browser action, received message from background:', message)
updateState(message.statusUpdate)
emitter.emit('render')
}
})
})
emitter.on('fileInputChange', event => processFiles(state, emitter, event.target.files))
// drag & drop anywhere
drop(document.body, files => processFiles(state, emitter, files))
}
async function processFiles (state, emitter, files) {
console.log('Processing files', files)
try {
if (!files.length) {
// File list may be empty in some rare cases
// eg. when user drags something from proprietary browser context
// We just ignore those UI interactions.
throw new Error('found no valid sources, try selecting a local file instead')
}
const { ipfsCompanion } = await browser.runtime.getBackgroundPage()
const ipfsImportHandler = ipfsCompanion.ipfsImportHandler
const importTab = await browser.tabs.getCurrent()
const streams = files2streams(files)
emitter.emit('render')
const options = {
wrapWithDirectory: true,
pin: false // we use MFS for implicit pinning instead
}
state.progress = `Importing ${streams.length} files...`
const importDir = ipfsImportHandler.formatImportDirectory(state.importDir)
let result
try {
result = await ipfsImportHandler.importFiles(streams, options, importDir)
} catch (err) {
console.error('Failed to import files to IPFS', err)
ipfsCompanion.notify('notify_importErrorTitle', 'notify_inlineErrorMsg', `${err.message}`)
throw err
}
state.progress = 'Completed'
emitter.emit('render')
console.log(`Successfully imported ${streams.length} files`)
ipfsImportHandler.copyShareLink(result)
ipfsImportHandler.preloadFilesAtPublicGateway(result)
// open web UI at proper directory
// unless and embedded node is in use (no access to web UI)
// in which case, open resource.
if (state.ipfsNodeType === 'embedded' || !state.openViaWebUI) {
await ipfsImportHandler.openFilesAtGateway({ result, openRootInNewTab: true })
} else {
await ipfsImportHandler.openFilesAtWebUI(importDir)
}
// close import tab as it will be replaced with a new tab with imported content
await browser.tabs.remove(importTab.id)
} catch (err) {
console.error('Unable to perform import', err)
// keep import tab and display error message in it
state.message = 'Unable to import to IPFS:'
state.progress = `${err}`
emitter.emit('render')
}
}
/* disabled in favor of fileReaderPullStream
function file2buffer (file) {
return new Promise((resolve, reject) => {
const reader = new FileReader()
reader.onloadend = () => resolve(Buffer.from(reader.result))
reader.onerror = reject
reader.readAsArrayBuffer(file)
})
} */
function files2streams (files) {
const streams = []
for (const file of files) {
if (!file.type && file.size === 0) {
// UX fail-safe:
// at the moment drag&drop of an empty file without an extension
// looks the same as dropping a directory
throw new Error(`unable to add "${file.name}", directories and empty files are not supported`)
}
const fileStream = fileReaderPullStream(file, { chunkSize: 32 * 1024 * 1024 })
streams.push({
path: file.name,
content: fileStream
})
}
return streams
}
function quickImportOptions (state, emit) {
const onExpandOptions = (e) => { state.expandOptions = true; emit('render') }
const onDirectoryChange = (e) => { state.userChangedImportDir = true; state.importDir = e.target.value }
const onOpenViaWebUIChange = (e) => { state.userChangedOpenViaWebUI = true; state.openViaWebUI = e.target.checked }
const displayOpenWebUI = state.ipfsNodeType !== 'embedded'
if (state.expandOptions) {
return html`
<div id='quickImportOptions' class='sans-serif mt3 f6 lh-copy light-gray no-user-select'>
${displayOpenWebUI ? html`<label for='openViaWebUI' class='flex items-center db relative mt1 pointer'>
<input id='openViaWebUI' type='checkbox' onchange=${onOpenViaWebUIChange} checked=${state.openViaWebUI} />
<span class='mark db flex items-center relative mr2 br2'></span>
${browser.i18n.getMessage('quickImport_options_openViaWebUI')}
</label>` : null}
<label for='importDir' class='flex items-center db relative mt1 pointer'>
${browser.i18n.getMessage('quickImport_options_importDir')}
<span class='mark db flex items-center relative mr2 br2'></span>
<input id='importDir' class='w-40 bg-transparent aqua monospace br1 ba b--aqua pa2' type='text' oninput=${onDirectoryChange} value=${state.importDir} />
</label>
</div>
`
}
return html`
<button class='mt3 f6 lh-copy link bn bg-transparent moon-gray dib pa0 pointer' style='color: #6ACAD1' onclick=${onExpandOptions}>
${browser.i18n.getMessage('quickImport_options_show')} »
</button>
`
}
function quickImportPage (state, emit) {
const onFileInputChange = (e) => emit('fileInputChange', e)
const { peerCount } = state
return html`
<div class="montserrat pt5" style="background: linear-gradient(to top, #041727 0%,#043b55 100%); height:100%;">
<div class="mw8 center pa3 white">
<header class="flex items-center no-user-select">
${logo({
size: 80,
path: '../../icons',
heartbeat: false
})}
<div class="pl3">
<h1 class="f2 fw5 ma0">
${browser.i18n.getMessage('panel_quickImport')}
</h1>
<p class="f3 fw2 lh-copy ma0 light-gray">
${browser.i18n.getMessage('quickImport_subhead_peers', [peerCount])}
</p>
</div>
</header>
<label for="quickImportInput" class='db relative mt5 hover-inner-shadow pointer' style="border:solid 2px #6ACAD1">
<input class="db pointer w-100 h-100 top-0 o-0" type="file" id="quickImportInput" multiple onchange=${onFileInputChange} />
<div class='dt dim' style='padding-left: 100px; height: 300px'>
<div class='dtc v-mid'>
<span class="f3 dim br1 ph4 pv3 dib navy" style="background: #6ACAD1">
${browser.i18n.getMessage('quickImport_pick_file_button')}
</span>
<span class='f3'>
<emph class='underline pl3 pr2 moon-gray'>
${browser.i18n.getMessage('quickImport_or')}
</emph>
${browser.i18n.getMessage('quickImport_drop_it_here')}
</span>
<p class='f4 db'>${state.message}<span class='code db absolute fr pv2'>${state.progress}</span></p>
</div>
</div>
</label>
${quickImportOptions(state, emit)}
</div>
</div>
`
}
|
import React from "react";
import { connect } from "react-redux";
import { getSpotifyTokenThunk } from "../store/spotifyAuth";
import { getAllSongsThunk } from "../store/allSongs";
class FavoriteSongs extends React.Component {
async componentDidMount() {
await this.props.getSpotifyToken();
await this.props.getSongs(this.props.spotifyToken);
}
render() {
const songs = this.props.allSongs || [];
return (
<div className="row justify-content-around">
{songs.map((song) => {
const songInfo = song.track;
const image = songInfo.album.images
? this.props.smallestImage(songInfo.album.images)
: null;
const artistList = songInfo.artists.reduce((artistArr, artist) => {
artistArr.push(artist.name)
return artistArr
},[])
return (
<div
className="col-3 my-3 mx-2 card bg-dark bg-opacity-50 bg-gradient text-white shadow"
key={songInfo.id}
onClick={() => this.props.clickHandler(songInfo)}
>
<img className="shadow-sm card-img-top rounded mt-4" src={`${image.url}`} />
<div className="card-body">
<h4 className="fw-bolder text-white card-text text-truncate">{songInfo.name}</h4>
<h5 className="fw-bold text-white text-muted card-text text-truncate">{artistList.join(', ')}</h5>
</div>
</div>
);
})}
</div>
);
}
}
const mapState = (state) => {
return {
spotifyToken: state.spotifyToken,
allSongs: state.allSongs,
};
};
const mapDispatch = (dispatch) => {
return {
getSpotifyToken: () => dispatch(getSpotifyTokenThunk()),
getSongs: (spotifyToken) => dispatch(getAllSongsThunk(spotifyToken)),
};
};
export default connect(mapState, mapDispatch)(FavoriteSongs);
|
#include <iostream>
int add(int a, int b) {
while (b != 0) {
int carry = a & b;
a = a ^ b;
b = carry << 1;
}
return a;
}
int main() {
int a = 5, b = 7;
std::cout << add(a, b) << std::endl;
return 0;
}
|
<filename>src/gapp/Hits.hpp<gh_stars>1-10
/**
* @file Hits.hpp
*
* Created on: 23.07.2013
* @author: <NAME> (<EMAIL> at gmail dot com)
*/
#ifndef ZMIJ_GAPP_HITS_HPP_
#define ZMIJ_GAPP_HITS_HPP_
#include <gapp/Types.hpp>
namespace gapp {
struct SystemInfo {
/**
* Screen Resolution
*
* Optional.
*
* Specifies the screen resolution.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* sr text None 20 Bytes all
*
* Example value: 800x600
* Example usage: sr=800x600
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#sr
*/
text_opt_t screen_resolution;
/**
* Viewport size
*
* Optional.
*
* Specifies the viewable area of the browser / device.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* vp text None 20 Bytes all
*
* Example value: 123x456
* Example usage: vp=123x456
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#vp
*/
text_opt_t viewport_size;
/**
* Document Encoding
*
* Optional.
*
* Specifies the character set used to encode the page / document.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* de text UTF-8 20 Bytes all
*
* Example value: UTF-8
* Example usage: de=UTF-8
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#de
* Max length 20 bytes
*/
text_opt_t document_encoding;
/**
* Screen Colors
*
* Optional.
*
* Specifies the screen color depth.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* sd text None 20 Bytes all
*
* Example value: 24-bits
* Example usage: sd=24-bits
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#sd
*/
text_opt_t screen_colors;
/**
* User Language
*
* Optional.
*
* Specifies the language.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* ul text None 20 Bytes all
*
* Example value: en-us
* Example usage: ul=en-us
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ul
*/
text_opt_t user_language;
/**
* Java Enabled
*
* Optional.
*
* Specifies whether Java was enabled.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* je boolean None None all
*
* Example value: 1
* Example usage: je=1
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#je
*/
boolean_opt_t java_enabled;
/**
* Flash Version
*
* Optional.
*
* Specifies the flash version.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* fl text None 20 Bytes all
*
* Example value: 10 1 r103
* Example usage: fl=10%201%20r103
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#fl
* Max length 20 bytes
*/
text_opt_t flash_version;
};
std::ostream&
operator << (std::ostream& out, SystemInfo const& val);
typedef boost::optional< SystemInfo > system_info_opt_t;
struct TrafficSources {
/**
* Document Referrer
*
* Optional.
*
* Specifies which referral source brought traffic to a website. This
* value is also used to compute the traffic source. The format of this
* value is a URL.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* dr text None 2048 Bytes all
*
* Example value: http://example.com
* Example usage: dr=http%3A%2F%2Fexample.com
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#dr
*/
text_opt_t document_referrer;
/**
* Campaign Name
*
* Optional.
*
* Specifies the campaign name.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* cn text None 100 Bytes all
*
* Example value: (direct)
* Example usage: cn=%28direct%29
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#cn
*/
text_opt_t campaign_name;
/**
* Campaign Source
*
* Optional.
*
* Specifies the campaign source.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* cs text None 100 Bytes all
*
* Example value: (direct)
* Example usage: cs=%28direct%29
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#cs
*/
text_opt_t campaign_source;
/**
* Campaign Medium
*
* Optional.
*
* Specifies the campaign medium.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* cm text None 50 Bytes all
*
* Example value: organic
* Example usage: cm=organic
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#cm
*/
text_opt_t campaign_medium;
/**
* Campaign Keyword
*
* Optional.
*
* Specifies the campaign keyword.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* ck text None 500 Bytes all
*
* Example value: Blue Shoes
* Example usage: ck=Blue%20Shoes
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ck
*/
text_opt_t campaign_keyword;
/**
* Campaign Content
*
* Optional.
*
* Specifies the campaign content.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* cc text None 500 Bytes all
*
* Example value: content
* Example usage: cc=content
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#cc
*/
text_opt_t campaign_content;
/**
* Campaign ID
*
* Optional.
*
* Specifies the campaign ID.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* ci text None 100 Bytes all
*
* Example value: ID
* Example usage: ci=ID
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ci
*/
text_opt_t campaign_id;
/**
* Google AdWords ID
*
* Optional.
*
* Specifies the Google AdWords Id.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* gclid text None None all
*
* Example value: CL6Q-OXyqKUCFcgK2goddQuoHg
* Example usage: gclid=CL6Q-OXyqKUCFcgK2goddQuoHg
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#gclid
*/
text_opt_t gclid;
/**
* Google Display Ads ID
*
* Optional.
*
* Specifies the Google Display Ads Id.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* dclid text None None all
*
* Example value: d_click_id
* Example usage: dclid=d_click_id
*
* Google Display Ads ID
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#dclid
*/
text_opt_t dclid;
};
std::ostream&
operator << (std::ostream& out, TrafficSources const& val);
typedef boost::optional< TrafficSources > traffic_sources_opt_t;
struct ContentInformation {
/**
* Document location URL
*
* Optional.
*
* Use this parameter to send the full URL (document location) of the page
* on which content resides. You can use the &dh and &dp parameters to
* override the hostname and path + query portions of the document location,
* accordingly. The JavaScript clients determine this parameter using the
* concatenation of the document.location.origin +
* document.location.pathname + document.location.search browser parameters.
* Be sure to remove any user authentication or other private information
* from the URL if present.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* dl text None 2048 Bytes all
*
* Example value: http://foo.com/home?a=b
* Example usage: dl=http%3A%2F%2Ffoo.com%2Fhome%3Fa%3Db
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#dl
*/
text_opt_t document_location_url;
/**
* Document Host Name
*
* Optional.
*
* Specifies the hostname from which content was hosted.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* dh text None 100 Bytes all
*
* Example value: foo.com
* Example usage: dh=foo.com
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#dh
*/
text_opt_t document_host_name;
/**
* Document Path
*
* Optional.
*
* The path portion of the page URL. Should begin with '/'.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* dp text None 2048 Bytes all
*
* Example value: /foo
* Example usage: dp=%2Ffoo
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#dp
*/
text_opt_t document_path;
/**
* Document Title
*
* Optional.
*
* The title of the page / document.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* dt text None 1500 Bytes all
*
* Example value: Settings
* Example usage: dt=Settings
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#dt
*/
text_opt_t document_title;
/**
* Content Description
*
* Optional.
*
* If not specified, this will default to the unique URL of the page by
* either using the &dl parameter as-is or assembling it from &dh and &dp.
* App tracking makes use of this for the 'Screen Name' of the appview hit.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* cd text None 2048 Bytes all
* Example value: High Scores
* Example usage: cd=High%20Scores
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#cd
*/
text_opt_t content_description;
};
std::ostream&
operator << (std::ostream& out, ContentInformation const& val);
typedef boost::optional< ContentInformation > content_info_opt_t;
struct ApplicationInfo {
//@{
/** @name App Tracking */
/**
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#an
* Max length 100 bytes
*/
text_opt_t application_name;
/**
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#av
* Max length 100 bytes
*/
text_opt_t application_version;
//@}
};
std::ostream&
operator << (std::ostream& out, ApplicationInfo const& val);
typedef boost::optional< ApplicationInfo > application_info_opt_t;
/**
* Base payload type.
* Please note that Protocol version and Tracking ID / Web property ID
* is filled by the tracker class
*/
struct Hit {
virtual
~Hit() {}
//@{
/** @name Required parameters */
/**
* Client ID
*
* Required for all hit types.
* This anonymously identifies a particular user, device, or browser
* instance. For the web, this is generally stored as a first-party
* cookie with a two-year expiration. For mobile apps, this is randomly
* generated for each particular instance of an application install.
* The value of this field should be a random UUID (version 4) as
* described in http://www.ietf.org/rfc/rfc4122.txt
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* cid text None None all
*
* Example value: 35009a79-1a05-49d7-b876-2b884d0f825b
* Example usage: cid=35009a79-1a05-49d7-b876-2b884d0f825b
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#cid
*/
text_t client_id;
//@{
/** @name Hit */
/**
* Hit type
*
* Required for all hit types.
*
* The type of hit. Must be one of 'pageview', 'appview', 'event',
* 'transaction', 'item', 'social', 'exception', 'timing'.
* Parameter Value Type Default Value Max Length Supported Hit Types
* t text None None all
*
* Example value: pageview
* Example usage: t=pageview
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#t
*/
hit_type_t hit_type;
//@}
//@}
//@{
/** @name Optional parameters */
//@{
/** @name General */
/**
* Anonymize IP
*
* When present, the IP address of the sender will be anonymized. For
* example, the IP will be anonymized if any of the following parameters
* are present in the payload: &aip=, &aip=0, or &aip=1
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* aip boolean None None all
*
* Example value: 1
* Example usage: aip=1
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#aip
*/
boolean_opt_t anonymize_ip;
/**
* Queue Time
*
* Used to collect offline / latent hits. The value represents the time
* delta (in milliseconds) between when the hit being reported occurred
* and the time the hit was sent. The value must be greater than or equal
* to 0. Values greater than four hours may lead to hits not being
* processed.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* qt integer None None all
*
* Example value: 560
* Example usage: qt=560
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#qt
*/
integer_opt_t queue_time;
/**
* Cache Buster
*
* Used to send a random number in GET requests to ensure browsers and
* proxies don't cache hits. It should be sent as the final parameter of
* the request since we've seen some 3rd party internet filtering software
* add additional parameters to HTTP requests incorrectly. This value is
* not used in reporting.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* z text None None all
*
* Example value: 289372387623
* Example usage: z=289372387623
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#z
*/
text_opt_t cache_buster;
//@}
//@{
/** @name Session control */
/**
* Session Control
*
* Optional.
*
* Used to control the session duration. A value of 'start' forces a new
* session to start with this hit and 'end' forces the current session to
* end with this hit. All other values are ignored.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* sc text None None all
*
* Example value: start
* Example usage: sc=start
*
* Example value: end
* Example usage: sc=end
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#sc
*/
text_opt_t session_control;
//@}
//@{
/** @name Traffic Sources */
traffic_sources_opt_t traffic_sources;
//@}
//@{
/** @name Hit */
/**
* Non-Interaction Hit
*
* Optional.
*
* Specifies that a hit be considered non-interactive.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* ni boolean None None all
*
* Example value: 1
* Example usage: ni=1
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ni
*/
boolean_opt_t non_interaction_hit;
//@}
//@{
/** @name Content information */
content_info_opt_t content_info;
//@}
//@{
/** @name Custom Dimensions / Metrics */
// @TODO Define custom dimensions and metrics
//@}
//@}
void
write(std::ostream& out) const;
private:
virtual void
doWrite(std::ostream& out) const {}
};
std::ostream&
operator << (std::ostream& out, Hit const& val);
/**
* Event payload data
*/
struct Event : Hit {
virtual
~Event() {}
//@{
/** @name Requred parameters */
/**
* Event Category
*
* Specifies the event category. Must not be empty.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* ec text None 150 Bytes event
*
* Example value: Category
* Example usage: ec=Category
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ec
*/
text_t event_category;
/**
* Event Action
*
* Specifies the event action. Must not be empty.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* ea text None 500 Bytes event
*
* Example value: Action
* Example usage: ea=Action
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ea
*/
text_t event_action;
//@}
//@{
/** @name Optional parameters */
/**
* Event Label
*
* Optional.
*
* Specifies the event label.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* el text None 500 Bytes event
*
* Example value: Label
* Example usage: el=Label
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#el
*/
text_opt_t event_label;
/**
* Event Value
*
* Optional.
*
* Specifies the event value. Values must be non-negative.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* ev integer None None event
*
* Example value: 55
* Example usage: ev=55
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ev
*/
integer_opt_t event_value;
//@}
private:
virtual void
doWrite( std::ostream& out ) const;
};
struct EcommerceHit : Hit {
virtual
~EcommerceHit() {}
//@{
/** @name Requred parameters */
/**
* Transaction ID
*
* Required for transaction hit type.
* Required for item hit type.
*
* A unique identifier for the transaction. This value should be the same
* for both the Transaction hit and Items hits associated to the particular
* transaction.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* ti text None 500 Bytes transaction, item
*
* Example value: OD564
* Example usage: ti=OD564
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ti
*/
text_t transaction_id;
//@}
//@{
/** @name Optional parameters */
/**
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#cu
* Max length 10 bytes, ISO 4217
*/
text_opt_t currency_code;
//@}
protected:
virtual void
doWrite( std::ostream& out ) const;
};
struct Transaction : EcommerceHit {
virtual
~Transaction() {}
/**
* Transaction Affiliation
*
* Optional.
*
* Specifies the affiliation or store name.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* ta text None 500 Bytes transaction
*
* Example value: Member
* Example usage: ta=Member
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ta
*/
text_opt_t transaction_affiliation;
/**
* Transaction Revenue
*
* Optional.
*
* Specifies the total revenue associated with the transaction. This value
* should include any shipping or tax costs.
* Parameter Value Type Default Value Max Length Supported Hit Types
* tr currency 0 None transaction
*
* Example value: 15.47
* Example usage: tr=15.47
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#tr
*/
currency_opt_t transaction_revenue;
/**
* Transaction Shipping
*
* Optional.
*
* Specifies the total shipping cost of the transaction.
* Parameter Value Type Default Value Max Length Supported Hit Types
* ts currency 0 None transaction
*
* Example value: 3.50
* Example usage: ts=3.50
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ts
*/
currency_opt_t transaction_shipping;
/**
* Transaction Tax
*
* Optional.
*
* Specifies the total tax of the transaction.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* tt currency 0 None transaction
*
* Example value: 11.20
* Example usage: tt=11.20
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#tt
*/
currency_opt_t transaction_tax;
private:
virtual void
doWrite( std::ostream& out ) const;
};
struct Item : EcommerceHit {
virtual
~Item() {}
/**
* Item Name
*
* Required for item hit type.
*
* Specifies the item name.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* in text None 500 Bytes item
*
* Example value: Shoe
* Example usage: in=Shoe
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#in
*/
text_t item_name;
/**
* Item Price
*
* Optional.
*
* Specifies the price for a single item / unit.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* ip currency 0 None item
*
* Example value: 3.50
* Example usage: ip=3.50
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ip
*/
currency_opt_t item_price;
/**
* Item Quantity
*
* Optional.
*
* Specifies the number of items purchased.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* iq integer 0 None item
*
* Example value: 4
* Example usage: iq=4
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#iq
*/
integer_opt_t item_quantity;
/**
* Item Code
*
* Optional.
*
* Specifies the SKU or item code.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* ic text None 500 Bytes item
*
* Example value: SKU47
* Example usage: ic=SKU47
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#ic
*/
text_opt_t item_code;
/**
* Item Category
*
* Optional.
*
* Specifies the category that the item belongs to.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* iv text None 500 Bytes item
*
* Example value: Blue
* Example usage: iv=Blue
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#iv
*/
text_opt_t item_category;
private:
virtual void
doWrite( std::ostream& out ) const;
};
struct Social : Hit {
virtual
~Social() {}
/**
* Social Network
*
* Required for social hit type.
*
* Specifies the social network, for example Facebook or Google Plus.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* sn text None 50 Bytes social
*
* Example value: facebook
* Example usage: sn=facebook
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#sn
*/
text_t social_network;
/**
* Social Action
*
* Required for social hit type.
*
* Specifies the social interaction action. For example on Google Plus
* when a user clicks the +1 button, the social action is 'plus'.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* sa text None 50 Bytes social
*
* Example value: like
* Example usage: sa=like
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#sa
*/
text_t social_action;
/**
* Social Action Target
*
* Required for social hit type.
*
* Specifies the target of a social interaction. This value is typically
* a URL but can be any text.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* st text None 2048 Bytes social
*
* Example value: http://foo.com
* Example usage: st=http%3A%2F%2Ffoo.com
*
* @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters#st
*/
text_t social_action_target;
private:
virtual void
doWrite( std::ostream& out ) const;
};
struct Timing : Hit {
virtual
~Timing() {}
/**
* User timing category
*
* Optional.
*
* Specifies the user timing category.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* utc text None 150 Bytes timing
*
* Example value: category
* Example usage: utc=category
*/
text_opt_t timing_category;
/**
* User timing variable name
*
* Optional.
*
* Specifies the user timing variable.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* utv text None 500 Bytes timing
*
* Example value: lookup
* Example usage: utv=lookup
*/
text_opt_t timing_variable_name;
/**
* User timing time
*
* Optional.
*
* Specifies the user timing value. The value is in milliseconds.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* utt integer None None timing
*
* Example value: 123
* Example usage: utt=123
*/
integer_opt_t timing_time;
/**
* User timing label
*
* Optional.
*
* Specifies the user timing label.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* utl text None 500 Bytes timing
*
* Example value: label
* Example usage: utl=label
*/
text_opt_t timing_label;
/**
* Page Load Time
*
* Optional.
*
* Specifies the time it took for a page to load. The value is in milliseconds.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* plt integer None None timing
*
* Example value: 3554
* Example usage: plt=3554
*/
integer_opt_t page_load_time;
/**
* DNS Time
*
* Optional.
*
* Specifies the time it took to do a DNS lookup.The value is in milliseconds.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* dns integer None None timing
*
* Example value: 43
* Example usage: dns=43
*/
integer_opt_t dns_time;
/**
* Page Download Time
*
* Optional.
*
* Specifies the time it took for the page to be downloaded. The value is
* in milliseconds.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* pdt integer None None timing
*
* Example value: 500
* Example usage: pdt=500
*/
integer_opt_t page_download_time;
/**
* Redirect Response Time
*
* Optional.
*
* Specifies the time it took for any redirects to happen. The value is in
* milliseconds.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* rrt integer None None timing
*
* Example value: 500
* Example usage: rrt=500
*/
integer_opt_t redirect_responce_time;
/**
* TCP Connect Time
*
* Optional.
*
* Specifies the time it took for a TCP connection to be made. The value is
* in milliseconds.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* tcp integer None None timing
*
* Example value: 500
* Example usage: tcp=500
*/
integer_opt_t tcp_connect_time;
/**
* Server Response Time
*
* Optional.
*
* Specifies the time it took for the server to respond after the connect
* time. The value is in milliseconds.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* srt integer None None timing
*
* Example value: 500
* Example usage: srt=500
*/
integer_opt_t server_responce_time;
private:
virtual void
doWrite( std::ostream& out ) const;
};
struct Exception : Hit {
virtual
~Exception() {}
/**
* Exception Description
*
* Optional.
*
* Specifies the description of an exception.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* exd text None 150 Bytes exception
*
* Example value: DatabaseError
* Example usage: exd=DatabaseError
*/
text_opt_t exception_description;
/**
* Is Exception Fatal?
*
* Optional.
*
* Specifies whether the exception was fatal.
*
* Parameter Value Type Default Value Max Length Supported Hit Types
* exf boolean 1 None exception
*
* Example value: 0
* Example usage: exf=0
*/
boolean_opt_t is_fatal;
private:
virtual void
doWrite( std::ostream& out ) const;
};
} // namespace gapp
#endif /* ZMIJ_GAPP_HITS_HPP_ */
|
#!/usr/bin/env bash
set -eu -o pipefail
STACK_ARGS=(
--ghc-options -Werror
)
if [[ "$*" != *--local-bin-path* ]]; then
STACK_ARGS+=(--local-bin-path /usr/local/bin)
fi
# tasty-discover-3.0.2 does not discover all modules by default, but this
# flag is deprecated in newer versions
if [[ "${STACK_YAML}" == "stack-ghc-8.0.yaml" ]]; then
STACK_ARGS+=(--ghc-options '-optF --no-module-suffix')
fi
exec stack build "${STACK_ARGS[@]}" "$@"
|
<gh_stars>1-10
#pragma once
#include <iostream>
#include <vector>
#include <string>
#define COMMENT_CHAR "#"
#define WHITESPACE " \t"
namespace lines {
struct line {
int number;
std::string text;
};
void trim_line(line &input);
using lines = std::vector<line>;
lines lines_from_cin();
lines clean_lines(lines &lines);
std::vector<std::string> split_line(line input);
}
|
/**
* 中科方德软件有限公司<br>
* june.web.service:com.june.web.service.CXF.CalculateServer.java
* 日期:2017年3月26日
*/
package com.june.web.service.CXF;
import org.apache.cxf.frontend.ServerFactoryBean;
import org.apache.cxf.jaxws.JaxWsServerFactoryBean;
/**
* CalculateServer <br>
* 服务端使用JaxWsServerFactoryBean类,来提供WebService服务<br>
* 或者,使用ServerFactoryBean类<br>
* 如果服务端使用ServerFactoryBean类,则最终生成的WSDL文件略有不同
*
* @author 王俊伟 <EMAIL>
* @blog https://www.github.com/junehappylove
* @date 2017年3月26日 上午2:38:24
* @version 1.0.0
*/
public class CalculateServer {
/**
* @param args
* @date 2017年3月26日 上午2:38:24
* @writer junehappylove
*/
public static void main(String[] args) {
useByJaxWsServerFactoryBean();
//useByServerFactoryBean();
}
/**
* 使用JaxWsServerFactoryBean发布WebService
*
* @date 2017年3月26日 上午3:12:00
* @writer junehappylove
*/
static void useByJaxWsServerFactoryBean(){
CalculateImpl calculateImpl = new CalculateImpl();
JaxWsServerFactoryBean factory = new JaxWsServerFactoryBean();
factory.setServiceClass(Calculate.class);
factory.setAddress("http://127.0.0.1:8091/Calculate");
factory.setServiceBean(calculateImpl);
factory.create();
}
/**
* 使用ServerFactoryBean发布WebService
*
* @date 2017年3月26日 上午3:11:37
* @writer junehappylove
*/
static void useByServerFactoryBean(){
CalculateImpl calculateImpl = new CalculateImpl();
ServerFactoryBean factory = new ServerFactoryBean();
factory.setServiceClass(Calculate.class);
factory.setAddress("http://127.0.0.1:8091/Calculate");
factory.setServiceBean(calculateImpl);
factory.create();
}
}
|
func invertTree(root *TreeNode) *TreeNode {
if root == nil {
return nil
}
right := invertTree(root.Right)
left := invertTree(root.Left)
root.Left = right
root.Right = left
return root
}
func main() {
tree := &TreeNode{
Val: 4,
Left: &TreeNode{
Val: 2,
Left: &TreeNode{
Val: 1,
Left: nil,
Right: nil,
},
Right: &TreeNode{
Val: 3,
Left: nil,
Right: nil,
},
},
Right: &TreeNode{
Val: 7,
Left: &TreeNode{
Val: 6,
Left: nil,
Right: nil,
},
Right: &TreeNode{
Val: 9,
Left: nil,
Right: nil,
},
},
}
invertTree(tree)
}
|
#!/bin/csh
# generated by BIGNASim metatrajectory generator
#$ -cwd
#$ -N BIGNaSim_curl_call_NAFlex55cd8ec1d0e35
#$ -o CURL.NAFlex55cd8ec1d0e35.out
#$ -e CURL.NAFlex55cd8ec1d0e35.err
# Launching CURL...
# CURL is calling a REST WS that generates the metatrajectory.
curl -i -H "Content-Type: application/json" -X GET -d '{"idSession":"NAFlex55cd8ec1d0e35","idTraj":"NAFlex_1fzx","name":"BIGNASim55cd8f2d49aa0-NAFlex_1fzx-1_4_1","description":"Subtrajectory of NAFlex_1fzx with 1_4_1 frames selected","mask":"name *","frames":"1:4:1","format":"mdcrd"}' http://ms2/download
|
#!/bin/sh
SERIAL_PORT="$1"
logger -p local0.info -t mtrservice "Starting MTR Log Extractor on port $SERIAL_PORT"
/home/pi/mtr-log-extractor/venv/bin/python3 /home/pi/mtr-log-extractor/mtr-log-extractor.py -p $SERIAL_PORT -t 120 -f /home/pi/extracts/mtr-{}.log -d dropbox /home/pi/dropbox.token
|
//#####################################################################
// Copyright 2009, <NAME>, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Class MATRIX_FLUID_POISSON
//#####################################################################
#include <PhysBAM_Tools/Grids_Uniform/SIDED_FACE_INDEX.h>
#include <PhysBAM_Tools/Matrices/SPARSE_MATRIX_FLAT_MXN.h>
#include <PhysBAM_Tools/Random_Numbers/RANDOM_NUMBERS.h>
#include <PhysBAM_Tools/Read_Write/Octave/OCTAVE_OUTPUT.h>
#include <PhysBAM_Dynamics/Coupled_Evolution/COLLISION_AWARE_INDEX_MAP.h>
#include <PhysBAM_Dynamics/Coupled_Evolution/MATRIX_FLUID_POISSON.h>
#include <PhysBAM_Dynamics/Coupled_Evolution/UNIFORM_COLLISION_AWARE_ITERATOR_FACE_INFO.h>
using namespace PhysBAM;
//#####################################################################
// Constructor
//#####################################################################
template<class TV> MATRIX_FLUID_POISSON<TV>::
MATRIX_FLUID_POISSON(const COLLISION_AWARE_INDEX_MAP<TV>& index_map_input,
const T_ARRAYS_SCALAR& one_over_rho_c_squared_input)
:index_map(index_map_input),one_over_rho_c_squared(one_over_rho_c_squared_input),dt(0)
{
}
//#####################################################################
// Function Compute
//#####################################################################
template<class TV> void MATRIX_FLUID_POISSON<TV>::
Compute(const SPARSE_MATRIX_FLAT_MXN<T>& gradient,const VECTOR_ND<T>& one_over_fluid_mass,const T dt_in,const bool use_preconditioner)
{
if(!use_preconditioner) return;
if(!dt_in){
SPARSE_MATRIX_FLAT_MXN<T> negative_divergence;
gradient.Transpose(negative_divergence);
poisson=negative_divergence.Times_Diagonal_Times(one_over_fluid_mass,gradient).Create_NXN_Matrix();}
// will preconditioning work correctly with this matrix?
Compute_Preconditioner(dt_in);
}
//#####################################################################
// Function Compute_Preconditioner
//#####################################################################
template<class TV> void MATRIX_FLUID_POISSON<TV>::
Compute_Preconditioner(const T dt_in)
{
dt=dt_in;
if(dt){const T dt_squared_by_V=(dt*dt)/index_map.grid.Cell_Size();
V_over_rho_c_squared_dt_squared_inverse_flat.Resize(index_map.Number_Cells());
for(int i=1;i<=index_map.indexed_cells.m;i++){TV_INT cell_index=index_map.indexed_cells(i);
V_over_rho_c_squared_dt_squared_inverse_flat(i)= // TODO(jontg): save and pass in rho_c_squared
dt_squared_by_V/one_over_rho_c_squared(cell_index);}
return;}
#if 1
ARRAY<int> rmap(poisson.n);
for(int i=1;i<=poisson.n;i++)
if(poisson.offsets(i)!=poisson.offsets(i+1))
rmap(i)=map.Append(i);
delete poisson.C;
poisson.C=new SPARSE_MATRIX_FLAT_NXN<T>;
poisson.C->Reset();
int index=poisson.offsets(1);
for(int i=1;i<=poisson.n;i++){
int end=poisson.offsets(i+1);
if(index==end) continue;
for(;index<end;index++)
poisson.C->Append_Entry_To_Current_Row(rmap(poisson.A(index).j),poisson.A(index).a);
poisson.C->Finish_Row();}
poisson.C->In_Place_Incomplete_Cholesky_Factorization();
#else
ARRAY<int> row_lengths(index_map.real_cell_indices.m);
for(int row=1;row<=index_map.real_cell_indices.m;++row){int row_index=index_map.real_cell_indices(row);
for(int index=poisson.offsets(row_index);index<poisson.offsets(row_index+1);++index){
int col_index=poisson.A(index).j,col=index_map.real_cell_indices_reverse_map(col_index);
if(col >= 0){row_lengths(col)++;}}}
delete poisson.C;poisson.C=new SPARSE_MATRIX_FLAT_NXN<T>;
poisson.C->Set_Row_Lengths(row_lengths);
for(int row=1;row<=index_map.real_cell_indices.m;++row){int row_index=index_map.real_cell_indices(row);
for(int index=poisson.offsets(row_index);index<poisson.offsets(row_index+1);++index){
int col_index=poisson.A(index).j,col=index_map.real_cell_indices_reverse_map(col_index);
if(col >= 0) poisson.C->Set_Element(row,col,poisson.A(index).a);}}
poisson.C->In_Place_Incomplete_Cholesky_Factorization();
#endif
}
//#####################################################################
// Function Apply_Preconditioner
//#####################################################################
template<class TV> void MATRIX_FLUID_POISSON<TV>::
Apply_Preconditioner(VECTOR_ND<T>& pressure) const
{
if(dt){for(int i=1;i<=index_map.indexed_cells.m;i++) pressure(i)*=V_over_rho_c_squared_dt_squared_inverse_flat(i);
return;}
#if 1
VECTOR_ND<T> sub_vector(map.m),temp_vector(map.m);
for(int i=1;i<=map.m;++i) sub_vector(i)=pressure(map(i));
poisson.C->Solve_Forward_Substitution(sub_vector,temp_vector,true);
poisson.C->Solve_Backward_Substitution(temp_vector,sub_vector,false,true);
for(int i=1;i<=map.m;++i) pressure(map(i))=sub_vector(i);
#else
VECTOR_ND<T> sub_vector(index_map.real_cell_indices.m),temp_vector(index_map.real_cell_indices.m);
for(int i=1;i<=index_map.real_cell_indices.m;++i) sub_vector(i)=pressure(index_map.real_cell_indices(i));
poisson.C->Solve_Forward_Substitution(sub_vector,temp_vector,true);
poisson.C->Solve_Backward_Substitution(temp_vector,sub_vector,false,true);
for(int i=1;i<=index_map.real_cell_indices.m;++i) pressure(index_map.real_cell_indices(i))=sub_vector(i);
#endif
}
//#####################################################################
// Function Times_Add
//#####################################################################
template<class TV> void MATRIX_FLUID_POISSON<TV>::
Times_Add(const VECTOR_ND<T>& pressure_in,VECTOR_ND<T>& pressure_out) const
{
VECTOR_ND<T> result(pressure_in.n);
poisson.Times(pressure_in,result);
pressure_out+=result;
}
//#####################################################################
// Function Times
//#####################################################################
template<class TV> void MATRIX_FLUID_POISSON<TV>::
Times(const VECTOR_ND<T>& pressure_in,VECTOR_ND<T>& pressure_out) const
{
poisson.Times(pressure_in,pressure_out);
}
//#####################################################################
// Function Test_Matrix
//#####################################################################
template<class TV> void MATRIX_FLUID_POISSON<TV>::
Test_Matrix(const bool print_matrix) const
{
RANDOM_NUMBERS<T> random;
VECTOR_ND<T> a(poisson.n),a2(poisson.n),b(poisson.n),b2(poisson.n);
random.Fill_Uniform(a,-1,1);
random.Fill_Uniform(b,-1,1);
poisson.Times(b,a2);
poisson.Times(a,b2);
T inner1=VECTOR_ND<T>::Dot_Product(a,a2);
T inner2=VECTOR_ND<T>::Dot_Product(b,b2);
std::stringstream ss;
ss<<"MATRIX_FLUID_POISSON Test: "<<inner1<<" vs "<<inner2<<" relative "<<abs(inner1-inner2)/maxabs((T)1e-30,inner1,inner2)<<std::endl;
if(print_matrix) ss<<"poisson:\n"<<poisson<<std::endl;
LOG::filecout(ss.str());
}
//#####################################################################
// Function Print_Each_Matrix
//#####################################################################
template<class TV> void MATRIX_FLUID_POISSON<TV>::
Print_Each_Matrix(int n) const
{
OCTAVE_OUTPUT<T>(STRING_UTILITIES::string_sprintf("Poiss-%i.txt",n).c_str()).Write("Poiss",poisson);
}
//#####################################################################
template class MATRIX_FLUID_POISSON<VECTOR<float,1> >;
template class MATRIX_FLUID_POISSON<VECTOR<float,2> >;
template class MATRIX_FLUID_POISSON<VECTOR<float,3> >;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class MATRIX_FLUID_POISSON<VECTOR<double,1> >;
template class MATRIX_FLUID_POISSON<VECTOR<double,2> >;
template class MATRIX_FLUID_POISSON<VECTOR<double,3> >;
#endif
|
#!/bin/bash
python exploit13/exploit13.py &> exploit13/exploit13.log
|
#!/bin/bash
set -eo pipefail
shopt -s expand_aliases
_log="[$(date) $(whoami)] "
_red=${_log}'\033[0;31m';
_green=${_log}'\033[0;32m';
_yellow=${_log}'\033[1;33m';
_nocol='\033[0m';
function usage() {
echo $"
USAGE: [ -a <T|TN> -c _condaenv -m <run|config|all> -t <panel|WGS> -r ]
-a [required] T: Tumor only, TN: tumor normal
-c Conda environment where BALSAMIC is installed. If not specified, it will use current environment.
-m [required] config: only create config file, run: create config file and start analysis
-t [required] panel: target sequencing workflow (also includes WES), WGS: whole genome sequencing workflow
-d Analysis dir path, if it doesn't exist it will be created
-r Flag. Set to submit jobs instead of running in dry mode
-h Show this help and exit
"
}
while getopts ":a:m:c:t:d:r" opt; do
case ${opt} in
a)
_analysis=${OPTARG}
echo "analysis set to" "${OPTARG}"
[[ $_analysis == 'T' || $_analysis == 'TN' ]] || ( usage >&2; exit 1)
;;
c)
_condaenv=${OPTARG}
echo "conda environment set to" "${OPTARG}"
;;
m)
_startmode=${OPTARG}
echo "start mode set to" "${OPTARG}"
[[ $_startmode == 'config' || $_startmode == 'run' || $_startmode == 'all' ]] || ( usage >&2; exit 1)
;;
t)
_ngstype=${OPTARG}
echo "workflow set to " "${OPTARG}"
[[ $_ngstype == 'panel' || $_ngstype == 'WGS' ]] || ( usage >&2; exit 1)
;;
d)
_analysis_dir=${OPTARG}
echo "analysis dir set to " "${OPTARG}"
;;
r)
rFlag=true;
;;
*) echo "Invalid option: -${OPTARG}" >&2; usage >&2; exit 1;;
esac
done
if [[ ${_ngstype} == "panel" ]]; then
_panel_option='-p tests/test_data/references/panel/panel.bed'
else
_panel_option=''
fi
if [[ ! -z ${_condaenv} ]]; then
source activate ${_condaenv}
fi
if [[ -z ${_analysis_dir} ]]; then
_analysis_dir='run_tests/'
echo "analysis dir set to " "${_analysis_dir}"
fi
# Make sure _analysis_dir exists
mkdir -p ${_analysis_dir}
_genome_ver=hg19
_cluster_config=BALSAMIC/config/cluster.json
_balsamic_cache=/home/proj/stage/cancer/balsamic_cache
_tumor_fastq=tests/test_data/fastq/S1_R_1.fastq.gz
_normal_fastq=tests/test_data/fastq/S2_R_1.fastq.gz
_analysis_config=${_analysis_dir}'/'${_analysis}_${_ngstype}'/'${_analysis}_${_ngstype}'.json'
if [[ ! -z ${rFlag} ]]; then
_run_analysis="-r"
fi
if [[ ${_analysis} == "TN" ]]; then
_normal_option="-n ${_normal_fastq}"
else
_normal_option=" "
fi
function balsamic_config() {
set -x
balsamic --loglevel INFO config case \
-t ${_tumor_fastq} \
${_normal_option} \
--case-id ${_analysis}_${_ngstype} \
--analysis-dir ${_analysis_dir} \
${_panel_option} \
--balsamic-cache ${_balsamic_cache}
}
balsamic_run() {
balsamic --loglevel INFO run analysis \
-s ${_analysis_config} \
-c ${_cluster_config} \
-a qc \
--benchmark \
--account development ${_run_analysis}
}
if [[ $_startmode == 'config' ]]; then
balsamic_config
elif [[ $_startmode == 'run' ]]; then
balsamic_run
else
balsamic_config
balsamic_run
fi
|
#!/bin/bash
TASK=18
MODEL=ctrl_vl-bert
MODEL_CONFIG=ctrl_vl-bert_base
TASKS_CONFIG=xm-influence_test_tasks
PRETRAINED=/science/image/nlp-datasets/emanuele/checkpoints/mpre-unmasked/conceptual_captions_s93/volta/ctrl_vl-bert/ctrl_vl-bert_base/pytorch_model_9.bin
OUTPUT_DIR=/science/image/nlp-datasets/emanuele/results/xm-influence/flickr30kentities_vis4lang/${MODEL}_s93
source activate /science/image/nlp-datasets/emanuele/envs/xm-influence
cd ../../../../volta
python ablate_vis4lang.py \
--bert_model bert-base-uncased --config_file config/${MODEL_CONFIG}.json --from_pretrained ${PRETRAINED} \
--tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK --split val \
--output_dir ${OUTPUT_DIR} --dump_results --masking all
conda deactivate
|
<filename>src/pk/helper/license.go
package helper
import (
"crypto/aes"
"crypto/cipher"
"crypto/md5"
"crypto/rand"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"github.com/denisbrodbeck/machineid"
"io"
"io/ioutil"
"os"
"path"
"path/filepath"
"regexp"
"time"
)
type LicenseInfo struct {
Email string `json:"email"`
CpuId string `json:"cpuid"`
BinPath string `json:"binpath"`
}
type ActivatedInfo struct {
Email string `json:"email"`
CpuId string `json:"cpuid"`
End time.Time `json:"end"` //time.Now().Add(time.Hour * 24 * 365), // 1 year
Start time.Time `json:"start"` //time.Now().Add(time.Hour * 24 * 365), // 1 year
BinPath string `json:"binpath"`
NumberOfItems int32 `json:"numberofitems"`
CryptoEnable bool `json:"cryptoenable"`
TehranEnable bool `json:"tehranenable"`
ForexEnable bool `json:"forexenable"`
}
func (a ActivatedInfo) RemainingTime() int64 {
diff := a.End.Sub(time.Now()).Hours() / 24
return int64(RoundUp(diff, 0))
}
type LicenseGen struct {
}
const phrase = "123==="
var dir_path string = path.Join(GetRootCache(), "license")
func createHash(key string) string {
hasher := md5.New()
hasher.Write([]byte(key))
return hex.EncodeToString(hasher.Sum(nil))
}
func encrypt(data []byte, passphrase string) []byte {
block, _ := aes.NewCipher([]byte(createHash(passphrase)))
gcm, err := cipher.NewGCM(block)
if err != nil {
panic(err.Error())
}
nonce := make([]byte, gcm.NonceSize())
if _, err = io.ReadFull(rand.Reader, nonce); err != nil {
panic(err.Error())
}
ciphertext := gcm.Seal(nonce, nonce, data, nil)
return ciphertext
}
func decrypt(data []byte, passphrase string) []byte {
key := []byte(createHash(passphrase))
block, err := aes.NewCipher(key)
if err != nil {
panic(err.Error())
}
gcm, err := cipher.NewGCM(block)
if err != nil {
panic(err.Error())
}
nonceSize := gcm.NonceSize()
nonce, ciphertext := data[:nonceSize], data[nonceSize:]
plaintext, err := gcm.Open(nil, nonce, ciphertext, nil)
if err != nil {
panic(err.Error())
}
return plaintext
}
func (a LicenseGen) MakeLicense(email string) error {
pwd, err := os.Getwd()
if err != nil {
panic(err)
}
re_email :=regexp.MustCompile(`^[a-z0-9._%+\-]+@[a-z0-9.\-]+\.[a-z]{2,4}$`)
if !re_email.MatchString(email) {
return errors.New("Make License : Email Not Valid");
}
ma_id, err := machineid.ID()
if err != nil {
return err
}
doc := LicenseInfo{
Email: email,
CpuId: ma_id,
BinPath: pwd,
}
docBytes, err := json.Marshal(doc)
if err != nil {
return err
}
sEnc := encrypt(docBytes, phrase)
if GetVerbose() {
fmt.Printf("Encrypted: %x\n", sEnc)
}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: LICENSE WRITE TO FILE
if _, err := os.Stat(dir_path); os.IsNotExist(err) {
os.MkdirAll(dir_path, os.ModePerm)
}
var s string = path.Join(dir_path, "license.ini")
file, err1 := os.Create(s)
defer file.Close()
if err1 != nil {
return errors.New(fmt.Sprintf("make license -> Cannot create file %s", err1))
}
file.WriteString(fmt.Sprintf("%x", sEnc))
file.Sync()
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: LICENSE WRITE TO FILE
fmt.Println("license has been created succefully", s)
return nil
}
func (a LicenseGen) MakeActivate(license_path string, days int32, items_num int32, is_cryto bool, is_tehran bool, is_forex bool) error {
var li_path string = license_path
if !IsExist(li_path) {
return errors.New(fmt.Sprintf("MakeActivate -> could not find file %s", li_path))
}
basedir := filepath.Dir(license_path)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: READ LICENSE
content, err := ioutil.ReadFile(li_path)
if err != nil {
return errors.New(fmt.Sprintf("MakeActivate -> Cannot create file %s \n", err))
}
data, err := hex.DecodeString(string(content))
if err != nil {
return errors.New(fmt.Sprintf("MakeActivate -> decode failed %s \n", err))
}
var license = LicenseInfo{}
dec := decrypt(data, phrase)
if err := json.Unmarshal(dec, &license); err != nil {
return errors.New(fmt.Sprintf("MakeActivate -> json failed %s \n", err))
}
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: MAKE ACTIVATE
doc := ActivatedInfo{
End: time.Now().Add(time.Hour * 24 * time.Duration(days)), // 1 year
Start: time.Now(),
Email: license.Email,
CpuId: license.CpuId,
BinPath: license.BinPath,
NumberOfItems: items_num,
CryptoEnable: is_cryto,
TehranEnable: is_tehran,
ForexEnable: is_forex,
}
docBytes, err := json.Marshal(doc)
if err != nil {
return errors.New(fmt.Sprintf("MakeActivate -> json failed %s \n", err))
}
sEnc := encrypt(docBytes, phrase)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: ACTIVATE WRITE TO FILE
if _, err := os.Stat(basedir); os.IsNotExist(err) {
os.MkdirAll(basedir, os.ModePerm)
}
var a_path string = path.Join(basedir, fmt.Sprintf("%v_%v", license.Email, "activated.ini"))
file, err1 := os.Create(a_path)
defer file.Close()
if err1 != nil {
return errors.New(fmt.Sprintf("make activate -> Cannot create file %s", err1))
}
file.WriteString(fmt.Sprintf("%x", sEnc))
file.Sync()
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: LICENSE WRITE TO FILE
fmt.Println("activate has been created succefully", a_path)
return nil
//:::::::::::::::::::::::::::::::::::::::::::::::::::::
}
func (a LicenseGen) ReadFile(file_name string, target_object_json interface{}) error {
var li_path string = path.Join(dir_path, file_name)
if !IsExist(li_path) {
return errors.New(fmt.Sprintf("could not find file -> %s", li_path))
}
content, err := ioutil.ReadFile(li_path)
if err != nil {
return errors.New(fmt.Sprintf("ReadFile()-> Cannot create file %s \n", err))
}
data, err := hex.DecodeString(string(content))
if err != nil {
return errors.New(fmt.Sprintf("ReadFile() -> decode failed %s \n", err))
}
dec := decrypt(data, phrase)
if err := json.Unmarshal(dec, &target_object_json); err != nil {
return errors.New(fmt.Sprintf("MakeActivate -> json failed %s \n", err))
}
return nil
}
func (a LicenseGen) Validation() error {
var license = LicenseInfo{}
var activated = ActivatedInfo{}
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: READ LICENSE
e1 := a.ReadFile("license.ini", &license)
if e1 != nil {
return e1
}
e2 := a.ReadFile("activated.ini", &activated)
if e2 != nil {
return e2
}
if license.CpuId != activated.CpuId {
return errors.New(fmt.Sprintf("Validation() -> CPU ID Conflict \n"))
}
if license.BinPath != activated.BinPath {
return errors.New(fmt.Sprintf("Validation() -> Bin Path Conflict \n a:[%v] \n l:[%v]\n", activated.BinPath, license.BinPath))
}
if license.Email != activated.Email {
return errors.New(fmt.Sprintf("Validation() -> Bin Path Conflict \n a:[%v] \n l:[%v]\n", activated.Email, license.Email))
}
if activated.RemainingTime() <= 0 {
return errors.New(fmt.Sprintf("Validation() -> license Expire"))
}
return nil
}
func (a LicenseGen) Print() error {
e := a.Validation()
if e != nil {
fmt.Println("license not valid")
if GetVerbose() {
fmt.Println("%v", e)
}
}
var license = LicenseInfo{}
var ai = ActivatedInfo{}
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: READ LICENSE
e1 := a.ReadFile("license.ini", &license)
if e1 != nil {
return e1
}
e2 := a.ReadFile("activated.ini", &ai)
if e1 != nil {
return e2
}
fmt.Println(":::::::::::::: LICENSE :::::::::::::::")
fmt.Println("Email :", ai.Email, "\nStart :", TimeToString(ai.Start, ""), "\nEnd :", TimeToString(ai.End, ""), "\nRemaining Time :", ai.RemainingTime(), " Days", "\nBin Path :", ai.BinPath, "\nItems Num :", ai.NumberOfItems, "\nTehran :", ai.TehranEnable, "\nCrypto :", ai.CryptoEnable, "\nForex :", ai.ForexEnable)
return nil
}
func (a LicenseGen) Test() {
//a.MakeLicense("<EMAIL>")
a.MakeActivate("D:\\workspace\\goprojects\\golanglearning\\src\\d\\license\\license.ini", 360, 10, true, true, false)
e := a.Validation()
if e != nil {
fmt.Println("license not valid")
}
a.Print()
}
|
public class DcpEvent
{
// Properties and methods for DCP event
}
public class ClientFixture
{
// Setup and configuration for interacting with the database
}
public class DcpEventService
{
private readonly ClientFixture _fixture;
public DcpEventService(ClientFixture fixture)
{
_fixture = fixture;
}
public IEnumerable<DcpEvent> GetRecentDcpEvents(DateTime startTime, DateTime endTime)
{
// Implement the logic to retrieve recent DCP events based on the specified time frame
using (var dbContext = _fixture.CreateDbContext()) // Assuming CreateDbContext method creates the database context
{
return dbContext.DcpEvents
.Where(e => e.EventTime >= startTime && e.EventTime <= endTime)
.OrderByDescending(e => e.EventTime)
.ToList();
}
}
}
|
<reponame>frc1418/2014<gh_stars>1-10
#
# This file is part of Team 1418 Dashboard
#
# Team 1418 Dashboard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3.
#
# Team 1418 Dashboard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Team 1418 Dashboard. If not, see <http://www.gnu.org/licenses/>.
#
import gtk
import network_tables
from .. import util
from common import settings
class AutonomousTuningWidget(gtk.VBox):
AUTON_MAX = 15.0
ui_filename = 'autonomous_tuning_widget.ui'
ui_widgets = [
'auto_chooser',
'settings_vbox',
'timing_settings_vbox',
'widget'
]
ui_signals = [
'on_auto_chooser_changed'
]
def __init__(self, table):
gtk.VBox.__init__(self)
util.initialize_from_xml(self)
self.pack_start(self.widget, True, True)
self.table = table
self.tracked_keys = {}
# preload the autonomous chooser in case there isn't a robot listening
self.__preload_chooser()
# setup attachments to things
# listen to all keys
network_tables.attach_fn(self.table, None, self.on_networktables_updated, self)
# attach the chooser too
# -> there's probably a race here.
network_tables.attach_chooser_combo(table, 'Autonomous Mode', self.auto_chooser, self.on_autonomous_choices_updated)
def __preload_chooser(self):
choices = settings.get('autonomous/choices')
if choices is not None:
model = self.auto_chooser.get_model()
model.clear()
for choice in choices:
model.append((choice,))
def on_autonomous_choices_updated(self, choices):
settings.set('autonomous/choices', choices)
def __parse_name(self, name):
vmin, vmax = -1.0, 1.0
# parse the min/max value if it exists
parsed_name = name.rsplit('|', 2)
if len(parsed_name) == 3:
name, vmin, vmax = parsed_name
return name, float(vmin), float(vmax)
def __create_widget(self, name, key, vmin, vmax):
'''
Creates a widget connected to a NetworkTables value
TODO: Could use this elsewhere to dynamically create appropriate
widgets for NetworkTables values...
'''
value = self.table.GetValue(key)
needs_hbox = True
if isinstance(value, bool):
needs_hbox = False
widget = gtk.CheckButton(label=name)
widget.set_active(value)
h_id = widget.connect('toggled', lambda w: self.table.PutBoolean(key, widget.get_active()))
def update_fn(v):
widget.handler_block(h_id)
widget.set_active(v)
widget.handler_unblock(h_id)
elif isinstance(value, float):
# TODO: set increments should be calculated
widget = gtk.SpinButton(digits=2)
widget.set_range(vmin, vmax)
widget.set_increments(0.1, 1)
widget.set_value(value)
h_id = widget.connect('value-changed', lambda w: self.table.PutNumber(key, widget.get_value()))
def update_fn(v):
widget.handler_block(h_id)
widget.set_value(v)
widget.handler_unblock(h_id)
elif isinstance(value, str):
widget = gtk.Entry(max=80)
widget.set_text(value)
h_id = widget.connect('changed', lambda w: self.table.PutString(key, widget.get_text()))
def update_fn(v):
widget.handler_block(h_id)
widget.set_text(v)
widget.handler_unblock(h_id)
else:
return None
self.tracked_keys[key] = update_fn
# each object is its own thing
if needs_hbox:
hbox = gtk.HBox()
hbox.set_spacing(5)
hbox.pack_start(widget, False, False)
hbox.pack_start(gtk.Label(name), False, False)
return hbox
return widget
def clear(self):
'''Clear out all of the things'''
self.tracked_keys.clear()
for c in self.settings_vbox.get_children():
self.settings_vbox.remove(c)
c.destroy()
for c in self.timing_settings_vbox.get_children():
self.timing_settings_vbox.remove(c)
c.destroy()
self.settings_vbox.hide()
self.timing_settings_vbox.hide()
def get_current_mode(self):
'''Returns the current autonomous mode'''
active = self.auto_chooser.get_active_iter()
if active:
return self.auto_chooser.get_model()[active][0]
def update_autonomous_tunables(self, mode_name):
self.clear()
# nothing else needs to happen here
if mode_name == 'None':
return
# put new things in
# -> TODO: There's some kind of bug with updating network tables array
# values. Most unfortunate. It seems to work most of the time
# however, so good enough for now
# find the ordering of duration items
try:
durations = network_tables.get_string_array(self.table, mode_name + '_durations')
except:
durations = []
try:
descriptions = network_tables.get_string_array(self.table, mode_name + '_descriptions')
except:
descriptions = []
# could handle this gracefully, but no
if len(descriptions) != len(durations):
descriptions = [''] * len(durations)
for duration_name, description in zip(durations, descriptions):
key = '%s\\%s_duration' % (mode_name, duration_name)
widget = self.__create_widget(duration_name, key, 0, self.AUTON_MAX)
if widget is None:
continue
if description != '':
widget.set_tooltip_text(description)
self.timing_settings_vbox.pack_start(widget, False, True)
if len(durations) > 0:
self.timing_settings_vbox.show_all()
# now setup the tunables
try:
tunables = network_tables.get_string_array(self.table, mode_name + '_tunables')
except:
tunables = []
for tunable_name in tunables:
tunable_name, vmin, vmax = self.__parse_name(tunable_name)
key = '%s\\%s' % (mode_name, tunable_name)
widget = self.__create_widget(tunable_name, key, vmin, vmax)
if widget is None:
continue
self.settings_vbox.pack_start(widget, False, True)
if len(tunables) > 0:
self.settings_vbox.show_all()
# setup update functions
def updated_vars(v):
self.update_autonomous_tunables(mode_name)
self.tracked_keys[mode_name + '_durations'] = updated_vars
self.tracked_keys[mode_name + '_tunables'] = updated_vars
def on_auto_chooser_changed(self, widget):
active = widget.get_active_iter()
if not active:
return
mode_name = widget.get_model()[active][0]
self.update_autonomous_tunables(mode_name)
def on_networktables_updated(self, key, value):
'''
Called when NetworkTables keys are updated
'''
if key == 'Catapult Values':
print 'no'
return
# if the value is None, assume it is a StringArray
if value is None:
try:
value = network_tables.get_string_array(self.table, key)
except:
pass
# if there's a value we're displaying, then change its
# contents based on this. or something.
update_fn = self.tracked_keys.get(key)
if update_fn is not None:
print "Autonomous tuner update:", key, value
# .. beware of loop?
update_fn(value)
|
python3 $DEFFE_DIR/framework/run_deffe.py -config $DEFFE_DIR/example/config_matmul_tl_samples.json -icp ../../kmeans.hdf5 -only-preloaded-data-exploration -epochs 1000 -batch-size 256 -train-test-split 1.0 -validation-split 0.23
python3 $DEFFE_DIR/framework/run_deffe.py -model-extract-dir checkpoints -config $DEFFE_DIR/example/config_matmul.json -only-preloaded-data-exploration -train-test-split 1.0 -validation-split 0.23 -load-train-test -loss custom_mean_abs_exp_loss -model-stats-output test-output-exploss.csv
python3 $DEFFE_DIR/framework/run_deffe.py -model-extract-dir checkpoints -config $DEFFE_DIR/example/config_matmul.json -only-preloaded-data-exploration -train-test-split 1.0 -validation-split 0.23 -load-train-test -loss custom_mean_abs_log_loss -model-stats-output test-output-logloss.csv
python3 $DEFFE_DIR/framework/run_deffe.py -config $DEFFE_DIR/example/config_matmul_tl_samples.json -icp matmul.hdf5 -input test-input.csv -output test-output.csv -inference-only
python3 $DEFFE_DIR/framework/run_deffe.py -config $DEFFE_DIR/example/config_matmul_tl_samples.json -icp matmul.hdf5 -input ../../../../output_matmul_deffe.csv -output test-output-full.csv -inference-only
|
#!/bin/bash
device=`xinput list | grep TouchPad | egrep -o 'id=[0-9]?{3}' |\
egrep -o "[0-9]?{3}"`
state=`xinput list-props "$device" | grep "Device Enabled" | grep -o "[01]$"`
if [ $state == '1' ];then
xinput --disable $device && notify-send "Touchpad disabled" -u low
else
xinput --enable $device && notify-send "Touchpad enabled" -u low
fi
|
<reponame>smagill/opensphere-desktop<gh_stars>10-100
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.10.06 at 03:53:52 PM EDT
//
package net.opengis.sld._100;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlElementDecl;
import javax.xml.bind.annotation.XmlRegistry;
import javax.xml.namespace.QName;
/**
* This object contains factory methods for each
* Java content interface and Java element interface
* generated in the net.opengis.sld package.
* <p>An ObjectFactory allows you to programatically
* construct new instances of the Java representation
* for XML content. The Java representation of XML
* content can consist of schema derived interfaces
* and classes representing the binding of schema
* type definitions, element declarations and model
* groups. Factory methods for each of these are
* provided in this class.
*
*/
@XmlRegistry
public class ObjectFactory {
private final static QName _PerpendicularOffset_QNAME = new QName("http://www.opengis.net/sld", "PerpendicularOffset");
private final static QName _MaxScaleDenominator_QNAME = new QName("http://www.opengis.net/sld", "MaxScaleDenominator");
private final static QName _ReliefFactor_QNAME = new QName("http://www.opengis.net/sld", "ReliefFactor");
private final static QName _Label_QNAME = new QName("http://www.opengis.net/sld", "Label");
private final static QName _Abstract_QNAME = new QName("http://www.opengis.net/sld", "Abstract");
private final static QName _Name_QNAME = new QName("http://www.opengis.net/sld", "Name");
private final static QName _GreenChannel_QNAME = new QName("http://www.opengis.net/sld", "GreenChannel");
private final static QName _Radius_QNAME = new QName("http://www.opengis.net/sld", "Radius");
private final static QName _MinScaleDenominator_QNAME = new QName("http://www.opengis.net/sld", "MinScaleDenominator");
private final static QName _Title_QNAME = new QName("http://www.opengis.net/sld", "Title");
private final static QName _RasterSymbolizer_QNAME = new QName("http://www.opengis.net/sld", "RasterSymbolizer");
private final static QName _BrightnessOnly_QNAME = new QName("http://www.opengis.net/sld", "BrightnessOnly");
private final static QName _Symbolizer_QNAME = new QName("http://www.opengis.net/sld", "Symbolizer");
private final static QName _Size_QNAME = new QName("http://www.opengis.net/sld", "Size");
private final static QName _FeatureTypeName_QNAME = new QName("http://www.opengis.net/sld", "FeatureTypeName");
private final static QName _SemanticTypeIdentifier_QNAME = new QName("http://www.opengis.net/sld", "SemanticTypeIdentifier");
private final static QName _SourceChannelName_QNAME = new QName("http://www.opengis.net/sld", "SourceChannelName");
private final static QName _PointSymbolizer_QNAME = new QName("http://www.opengis.net/sld", "PointSymbolizer");
private final static QName _DisplacementY_QNAME = new QName("http://www.opengis.net/sld", "DisplacementY");
private final static QName _DisplacementX_QNAME = new QName("http://www.opengis.net/sld", "DisplacementX");
private final static QName _PolygonSymbolizer_QNAME = new QName("http://www.opengis.net/sld", "PolygonSymbolizer");
private final static QName _GammaValue_QNAME = new QName("http://www.opengis.net/sld", "GammaValue");
private final static QName _WellKnownName_QNAME = new QName("http://www.opengis.net/sld", "WellKnownName");
private final static QName _BlueChannel_QNAME = new QName("http://www.opengis.net/sld", "BlueChannel");
private final static QName _LineSymbolizer_QNAME = new QName("http://www.opengis.net/sld", "LineSymbolizer");
private final static QName _Service_QNAME = new QName("http://www.opengis.net/sld", "Service");
private final static QName _Format_QNAME = new QName("http://www.opengis.net/sld", "Format");
private final static QName _IsDefault_QNAME = new QName("http://www.opengis.net/sld", "IsDefault");
private final static QName _TextSymbolizer_QNAME = new QName("http://www.opengis.net/sld", "TextSymbolizer");
private final static QName _AnchorPointY_QNAME = new QName("http://www.opengis.net/sld", "AnchorPointY");
private final static QName _Rotation_QNAME = new QName("http://www.opengis.net/sld", "Rotation");
private final static QName _AnchorPointX_QNAME = new QName("http://www.opengis.net/sld", "AnchorPointX");
private final static QName _RedChannel_QNAME = new QName("http://www.opengis.net/sld", "RedChannel");
private final static QName _Opacity_QNAME = new QName("http://www.opengis.net/sld", "Opacity");
private final static QName _GrayChannel_QNAME = new QName("http://www.opengis.net/sld", "GrayChannel");
private final static QName _Value_QNAME = new QName("http://www.opengis.net/sld", "Value");
/**
* Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: net.opengis.sld
*
*/
public ObjectFactory() {
}
/**
* Create an instance of {@link LabelPlacement }
*
*/
public LabelPlacement createLabelPlacement() {
return new LabelPlacement();
}
/**
* Create an instance of {@link RasterSymbolizer }
*
*/
public RasterSymbolizer createRasterSymbolizer() {
return new RasterSymbolizer();
}
/**
* Create an instance of {@link FeatureTypeStyle }
*
*/
public FeatureTypeStyle createFeatureTypeStyle() {
return new FeatureTypeStyle();
}
/**
* Create an instance of {@link Geometry }
*
*/
public Geometry createGeometry() {
return new Geometry();
}
/**
* Create an instance of {@link Displacement }
*
*/
public Displacement createDisplacement() {
return new Displacement();
}
/**
* Create an instance of {@link UserStyle }
*
*/
public UserStyle createUserStyle() {
return new UserStyle();
}
/**
* Create an instance of {@link ParameterValueType }
*
*/
public ParameterValueType createParameterValueType() {
return new ParameterValueType();
}
/**
* Create an instance of {@link Halo }
*
*/
public Halo createHalo() {
return new Halo();
}
/**
* Create an instance of {@link GraphicFill }
*
*/
public GraphicFill createGraphicFill() {
return new GraphicFill();
}
/**
* Create an instance of {@link GraphicStroke }
*
*/
public GraphicStroke createGraphicStroke() {
return new GraphicStroke();
}
/**
* Create an instance of {@link Font }
*
*/
public Font createFont() {
return new Font();
}
/**
* Create an instance of {@link OverlapBehavior }
*
*/
public OverlapBehavior createOverlapBehavior() {
return new OverlapBehavior();
}
/**
* Create an instance of {@link PointPlacement }
*
*/
public PointPlacement createPointPlacement() {
return new PointPlacement();
}
/**
* Create an instance of {@link ChannelSelection }
*
*/
public ChannelSelection createChannelSelection() {
return new ChannelSelection();
}
/**
* Create an instance of {@link AnchorPoint }
*
*/
public AnchorPoint createAnchorPoint() {
return new AnchorPoint();
}
/**
* Create an instance of {@link ExternalGraphic }
*
*/
public ExternalGraphic createExternalGraphic() {
return new ExternalGraphic();
}
/**
* Create an instance of {@link Mark }
*
*/
public Mark createMark() {
return new Mark();
}
/**
* Create an instance of {@link CssParameter }
*
*/
public CssParameter createCssParameter() {
return new CssParameter();
}
/**
* Create an instance of {@link SelectedChannelType }
*
*/
public SelectedChannelType createSelectedChannelType() {
return new SelectedChannelType();
}
/**
* Create an instance of {@link Histogram }
*
*/
public Histogram createHistogram() {
return new Histogram();
}
/**
* Create an instance of {@link ColorMapEntry }
*
*/
public ColorMapEntry createColorMapEntry() {
return new ColorMapEntry();
}
/**
* Create an instance of {@link UserLayer }
*
*/
public UserLayer createUserLayer() {
return new UserLayer();
}
/**
* Create an instance of {@link AVERAGE }
*
*/
public AVERAGE createAVERAGE() {
return new AVERAGE();
}
/**
* Create an instance of {@link StyledLayerDescriptor }
*
*/
public StyledLayerDescriptor createStyledLayerDescriptor() {
return new StyledLayerDescriptor();
}
/**
* Create an instance of {@link LineSymbolizer }
*
*/
public LineSymbolizer createLineSymbolizer() {
return new LineSymbolizer();
}
/**
* Create an instance of {@link ShadedRelief }
*
*/
public ShadedRelief createShadedRelief() {
return new ShadedRelief();
}
/**
* Create an instance of {@link NamedLayer }
*
*/
public NamedLayer createNamedLayer() {
return new NamedLayer();
}
/**
* Create an instance of {@link EARLIESTONTOP }
*
*/
public EARLIESTONTOP createEARLIESTONTOP() {
return new EARLIESTONTOP();
}
/**
* Create an instance of {@link FeatureTypeConstraint }
*
*/
public FeatureTypeConstraint createFeatureTypeConstraint() {
return new FeatureTypeConstraint();
}
/**
* Create an instance of {@link Extent }
*
*/
public Extent createExtent() {
return new Extent();
}
/**
* Create an instance of {@link TextSymbolizer }
*
*/
public TextSymbolizer createTextSymbolizer() {
return new TextSymbolizer();
}
/**
* Create an instance of {@link ImageOutline }
*
*/
public ImageOutline createImageOutline() {
return new ImageOutline();
}
/**
* Create an instance of {@link Fill }
*
*/
public Fill createFill() {
return new Fill();
}
/**
* Create an instance of {@link ElseFilter }
*
*/
public ElseFilter createElseFilter() {
return new ElseFilter();
}
/**
* Create an instance of {@link ColorMap }
*
*/
public ColorMap createColorMap() {
return new ColorMap();
}
/**
* Create an instance of {@link PointSymbolizer }
*
*/
public PointSymbolizer createPointSymbolizer() {
return new PointSymbolizer();
}
/**
* Create an instance of {@link Graphic }
*
*/
public Graphic createGraphic() {
return new Graphic();
}
/**
* Create an instance of {@link LayerFeatureConstraints }
*
*/
public LayerFeatureConstraints createLayerFeatureConstraints() {
return new LayerFeatureConstraints();
}
/**
* Create an instance of {@link OnlineResource }
*
*/
public OnlineResource createOnlineResource() {
return new OnlineResource();
}
/**
* Create an instance of {@link LinePlacement }
*
*/
public LinePlacement createLinePlacement() {
return new LinePlacement();
}
/**
* Create an instance of {@link Normalize }
*
*/
public Normalize createNormalize() {
return new Normalize();
}
/**
* Create an instance of {@link ContrastEnhancement }
*
*/
public ContrastEnhancement createContrastEnhancement() {
return new ContrastEnhancement();
}
/**
* Create an instance of {@link RANDOM }
*
*/
public RANDOM createRANDOM() {
return new RANDOM();
}
/**
* Create an instance of {@link RemoteOWS }
*
*/
public RemoteOWS createRemoteOWS() {
return new RemoteOWS();
}
/**
* Create an instance of {@link PolygonSymbolizer }
*
*/
public PolygonSymbolizer createPolygonSymbolizer() {
return new PolygonSymbolizer();
}
/**
* Create an instance of {@link Rule }
*
*/
public Rule createRule() {
return new Rule();
}
/**
* Create an instance of {@link Stroke }
*
*/
public Stroke createStroke() {
return new Stroke();
}
/**
* Create an instance of {@link LATESTONTOP }
*
*/
public LATESTONTOP createLATESTONTOP() {
return new LATESTONTOP();
}
/**
* Create an instance of {@link NamedStyle }
*
*/
public NamedStyle createNamedStyle() {
return new NamedStyle();
}
/**
* Create an instance of {@link LegendGraphic }
*
*/
public LegendGraphic createLegendGraphic() {
return new LegendGraphic();
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link ParameterValueType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "PerpendicularOffset")
public JAXBElement<ParameterValueType> createPerpendicularOffset(ParameterValueType value) {
return new JAXBElement<ParameterValueType>(_PerpendicularOffset_QNAME, ParameterValueType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link Double }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "MaxScaleDenominator")
public JAXBElement<Double> createMaxScaleDenominator(Double value) {
return new JAXBElement<Double>(_MaxScaleDenominator_QNAME, Double.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link Double }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "ReliefFactor")
public JAXBElement<Double> createReliefFactor(Double value) {
return new JAXBElement<Double>(_ReliefFactor_QNAME, Double.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link ParameterValueType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Label")
public JAXBElement<ParameterValueType> createLabel(ParameterValueType value) {
return new JAXBElement<ParameterValueType>(_Label_QNAME, ParameterValueType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Abstract")
public JAXBElement<String> createAbstract(String value) {
return new JAXBElement<String>(_Abstract_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Name")
public JAXBElement<String> createName(String value) {
return new JAXBElement<String>(_Name_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link SelectedChannelType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "GreenChannel")
public JAXBElement<SelectedChannelType> createGreenChannel(SelectedChannelType value) {
return new JAXBElement<SelectedChannelType>(_GreenChannel_QNAME, SelectedChannelType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link ParameterValueType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Radius")
public JAXBElement<ParameterValueType> createRadius(ParameterValueType value) {
return new JAXBElement<ParameterValueType>(_Radius_QNAME, ParameterValueType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link Double }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "MinScaleDenominator")
public JAXBElement<Double> createMinScaleDenominator(Double value) {
return new JAXBElement<Double>(_MinScaleDenominator_QNAME, Double.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Title")
public JAXBElement<String> createTitle(String value) {
return new JAXBElement<String>(_Title_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link RasterSymbolizer }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "RasterSymbolizer", substitutionHeadNamespace = "http://www.opengis.net/sld", substitutionHeadName = "Symbolizer")
public JAXBElement<RasterSymbolizer> createRasterSymbolizer(RasterSymbolizer value) {
return new JAXBElement<RasterSymbolizer>(_RasterSymbolizer_QNAME, RasterSymbolizer.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link Boolean }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "BrightnessOnly")
public JAXBElement<Boolean> createBrightnessOnly(Boolean value) {
return new JAXBElement<Boolean>(_BrightnessOnly_QNAME, Boolean.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link SymbolizerType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Symbolizer")
public JAXBElement<SymbolizerType> createSymbolizer(SymbolizerType value) {
return new JAXBElement<SymbolizerType>(_Symbolizer_QNAME, SymbolizerType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link ParameterValueType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Size")
public JAXBElement<ParameterValueType> createSize(ParameterValueType value) {
return new JAXBElement<ParameterValueType>(_Size_QNAME, ParameterValueType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "FeatureTypeName")
public JAXBElement<String> createFeatureTypeName(String value) {
return new JAXBElement<String>(_FeatureTypeName_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "SemanticTypeIdentifier")
public JAXBElement<String> createSemanticTypeIdentifier(String value) {
return new JAXBElement<String>(_SemanticTypeIdentifier_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "SourceChannelName")
public JAXBElement<String> createSourceChannelName(String value) {
return new JAXBElement<String>(_SourceChannelName_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link PointSymbolizer }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "PointSymbolizer", substitutionHeadNamespace = "http://www.opengis.net/sld", substitutionHeadName = "Symbolizer")
public JAXBElement<PointSymbolizer> createPointSymbolizer(PointSymbolizer value) {
return new JAXBElement<PointSymbolizer>(_PointSymbolizer_QNAME, PointSymbolizer.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link ParameterValueType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "DisplacementY")
public JAXBElement<ParameterValueType> createDisplacementY(ParameterValueType value) {
return new JAXBElement<ParameterValueType>(_DisplacementY_QNAME, ParameterValueType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link ParameterValueType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "DisplacementX")
public JAXBElement<ParameterValueType> createDisplacementX(ParameterValueType value) {
return new JAXBElement<ParameterValueType>(_DisplacementX_QNAME, ParameterValueType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link PolygonSymbolizer }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "PolygonSymbolizer", substitutionHeadNamespace = "http://www.opengis.net/sld", substitutionHeadName = "Symbolizer")
public JAXBElement<PolygonSymbolizer> createPolygonSymbolizer(PolygonSymbolizer value) {
return new JAXBElement<PolygonSymbolizer>(_PolygonSymbolizer_QNAME, PolygonSymbolizer.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link Double }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "GammaValue")
public JAXBElement<Double> createGammaValue(Double value) {
return new JAXBElement<Double>(_GammaValue_QNAME, Double.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "WellKnownName")
public JAXBElement<String> createWellKnownName(String value) {
return new JAXBElement<String>(_WellKnownName_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link SelectedChannelType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "BlueChannel")
public JAXBElement<SelectedChannelType> createBlueChannel(SelectedChannelType value) {
return new JAXBElement<SelectedChannelType>(_BlueChannel_QNAME, SelectedChannelType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link LineSymbolizer }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "LineSymbolizer", substitutionHeadNamespace = "http://www.opengis.net/sld", substitutionHeadName = "Symbolizer")
public JAXBElement<LineSymbolizer> createLineSymbolizer(LineSymbolizer value) {
return new JAXBElement<LineSymbolizer>(_LineSymbolizer_QNAME, LineSymbolizer.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Service")
public JAXBElement<String> createService(String value) {
return new JAXBElement<String>(_Service_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Format")
public JAXBElement<String> createFormat(String value) {
return new JAXBElement<String>(_Format_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link Boolean }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "IsDefault")
public JAXBElement<Boolean> createIsDefault(Boolean value) {
return new JAXBElement<Boolean>(_IsDefault_QNAME, Boolean.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link TextSymbolizer }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "TextSymbolizer", substitutionHeadNamespace = "http://www.opengis.net/sld", substitutionHeadName = "Symbolizer")
public JAXBElement<TextSymbolizer> createTextSymbolizer(TextSymbolizer value) {
return new JAXBElement<TextSymbolizer>(_TextSymbolizer_QNAME, TextSymbolizer.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link ParameterValueType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "AnchorPointY")
public JAXBElement<ParameterValueType> createAnchorPointY(ParameterValueType value) {
return new JAXBElement<ParameterValueType>(_AnchorPointY_QNAME, ParameterValueType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link ParameterValueType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Rotation")
public JAXBElement<ParameterValueType> createRotation(ParameterValueType value) {
return new JAXBElement<ParameterValueType>(_Rotation_QNAME, ParameterValueType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link ParameterValueType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "AnchorPointX")
public JAXBElement<ParameterValueType> createAnchorPointX(ParameterValueType value) {
return new JAXBElement<ParameterValueType>(_AnchorPointX_QNAME, ParameterValueType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link SelectedChannelType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "RedChannel")
public JAXBElement<SelectedChannelType> createRedChannel(SelectedChannelType value) {
return new JAXBElement<SelectedChannelType>(_RedChannel_QNAME, SelectedChannelType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link ParameterValueType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Opacity")
public JAXBElement<ParameterValueType> createOpacity(ParameterValueType value) {
return new JAXBElement<ParameterValueType>(_Opacity_QNAME, ParameterValueType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link SelectedChannelType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "GrayChannel")
public JAXBElement<SelectedChannelType> createGrayChannel(SelectedChannelType value) {
return new JAXBElement<SelectedChannelType>(_GrayChannel_QNAME, SelectedChannelType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/sld", name = "Value")
public JAXBElement<String> createValue(String value) {
return new JAXBElement<String>(_Value_QNAME, String.class, null, value);
}
}
|
<filename>vnpy/api/gateio/__init__.py<gh_stars>10-100
# encoding: UTF-8
from .vngate import Gate_DataApi,Gate_TradeApi
|
var fs = require('fs'),
path = require('path'),
omxplayer = require('./modules/omxplayer/omxplayer');
var commandMap = omxplayer.commandMap,
playerCommand = omxplayer.command,
playerPlay = omxplayer.play,
playerStop = omxplayer.stop;
exports.setup = omxplayer.setup;
exports.cleanup = omxplayer.cleanup;
exports.isPlaying = false;
exports.isPaused = false;
exports.currentFile = null;
exports.files = [];
exports.rootDirs = [];
/* Add file or directory of files.
* @param {string} filename
*/
exports.addFile = function (filename) {
filename = expandPath(filename);
if (!fs.existsSync(filename)) {
console.warn("Invalid file '"+ filename +"', skipping.")
} else {
var root_dir = (fs.statSync(filename).isDirectory()) ? filename : path.dirname(filename);
if (root_dir === filename) {
process.stdout.write("Adding files from directory "+ filename +" ");
} else {
process.stdout.write("Adding file "+ filename +" ");
}
findFile(filename, function (err, filepath, stat) {
if (err) console.error(err);
//ignore hidden files
if (path.basename(filepath)[0] !== '.') {
var rel_dir = path.relative(root_dir, path.dirname(filepath));
exports.files.push({
path: filepath,
reldir: (rel_dir.length === 0) ? null : rel_dir
});
process.stdout.write('.');
}
}, function () {
//order by path name, alphabetical
exports.files.sort(function (a, b) {
return (a.path > b.path) ? 1 : ((a.path < b.path) ? -1 : 0);
});
console.log(" all loaded!");
});
}
}
exports.play = function (filename) {
var status;
//test filename
if (!isFileAvailable(filename)) {
status = makeStatus(true, "File not available", false);
status.file = filename;
} else {
if (exports.isPlaying) {
exports.stop();
}
playerPlay(filename);
exports.isPlaying = true;
exports.isPaused = false;
exports.currentFile = filename;
status = makeStatus(true, "Playing file", true);
}
console.log(status);
return status;
};
exports.stop = function () {
var status;
if (exports.isPlaying) {
status = makeStatus(true, "Player stopped", true);
} else {
status = makeStatus(true, "Not playing, player stop failed.", false);
}
playerStop()
exports.isPlaying = false;
exports.isPaused = false;
exports.currentFile = null;
console.log(status);
return status;
};
exports.pause = function () {
var status;
if (exports.isPlaying) {
exports.isPaused = !exports.isPaused;
playerCommand(commandMap.pause);
status = makeStatus(true, "Player is " + (exports.isPaused ? "paused" : "unpaused"), true);
} else {
status = makeStatus(true, "Not playing, pause failed.", false);
}
console.log(status);
return status;
};
exports.volumeUp = function () {
var status;
if (exports.isPlaying) {
playerCommand(commandMap.volumeUp);
status = makeStatus(true, "Volume up", true);
} else {
status = makeStatus(true, "Not playing, volume up failed.", false);
}
console.log(status);
return status;
};
exports.volumeDown = function () {
var status;
if (exports.isPlaying) {
playerCommand(commandMap.volumeDown);
status = makeStatus(true, "Volume down", true);
} else {
status = makeStatus(true, "Not playing, volume down failed.", false);
}
console.log(status);
return status;
};
exports.seekForward = function () {
var status;
if (exports.isPlaying) {
playerCommand(commandMap.seekForward);
status = makeStatus(true, "Seek forward", true);
} else {
status = makeStatus(true, "Not playing, seek forward failed.", false);
}
console.log(status);
return status;
};
exports.seekBackward = function () {
var error;
if (exports.isPlaying) {
playerCommand(commandMap.seekBackward);
status = makeStatus(true, "Seek backward", true);
} else {
status = makeStatus(true, "Not playing, seek backward failed.", false);
}
console.log(status);
return status;
};
exports.stepForward = function () {
var status;
if (exports.isPlaying) {
playerCommand(commandMap.stepForward);
status = makeStatus(true, "Step forward", true);
} else {
status = makeStatus(true, "Not playing, step forward failed.", false);
}
console.log(status);
return status;
};
exports.stepBackward = function () {
var error;
if (exports.isPlaying) {
playerCommand(commandMap.stepBackward);
status = makeStatus(true, "Step backward", true);
} else {
status = makeStatus(true, "Not playing, step backward failed.", false);
}
console.log(status);
return status;
};
/*
* utils
*/
function makeStatus (status, msg, success) {
return {
status: (status) ? 'ok' : 'error',
message: msg,
file: exports.currentFile,
success: success
};
}
function expandPath (pathname) {
if (pathname.substr(0,1) === '~') {
pathname = process.env.HOME + pathname.substr(1)
}
return path.resolve(pathname);
}
function isFileAvailable (filename) {
for (var i = 0, files = exports.files, len = files.length; i < len; i++) {
if (files[i].path === filename) {
return true;
}
}
return false;
}
/* Synchronously walk directory for files, executing callback for each one.
* @param {string} filepath
* @param {function(err,filepath,stats)} callback
* @param {function} onFinish --Optional
*/
function findFile (filepath, callback, onFinish) {
var onFile = function (fp) {
if (!fs.existsSync(fp)) {
var err = new ReferenceError(fp + " does not exist.");
callback(err, fp, null);
} else {
var stats = fs.statSync(fp);
if (stats.isDirectory()) {
var files = fs.readdirSync(fp);
for (var i = 0, len = files.length; i < len; i++) {
onFile(path.join(fp, files[i]));
}
} else {
callback(null, fp, stats);
}
}
}
onFile(filepath);
if (onFinish) onFinish();
}
|
<filename>examples/maiorNotaAlunos/frontend/src/app/core/interface/base-model.component.ts
import { OnInit } from '@angular/core';
import { BaseComponent } from './base.component';
import { AppInjector } from '../../app.injector';
import { CrudService } from '../service/crud.service';
import { ActivatedRoute } from '@angular/router';
/**
* The 'BaseModelComponent' class provides the common API for all the components
* that works with models.
*
* All components that uses models MUST extend this class.
*
* @extends BaseComponent
*/
export abstract class BaseModelComponent extends BaseComponent
implements OnInit {
/**
* Service to do the CRUD operations.
*
* @type {CrudService}
*/
protected service: CrudService = AppInjector.get(CrudService);
/**
* Constructor.
*/
constructor() {
super();
}
/**
* On Init of the component.
*/
ngOnInit(): void {
super.ngOnInit();
}
/**
* Gets the param from the activated route.
*
* @param {string} param
* @returns {string}
*/
protected getParam(param: string): string {
return this.getActivatedRoute()
? this.getActivatedRoute().snapshot.paramMap.get(param)
: null;
}
/**
* Gets the activated route for data extraction.
*
* @returns {ActivatedRoute}
*/
protected getActivatedRoute(): ActivatedRoute {
return null;
}
/**
* Gets the base URL of the service (mainly backend url).
*
* Ex: To get all items, like 'user', in server, the url is: http://server.com/user
* This method should return just your base: 'user'.
*
* @returns {string}
*/
abstract getServiceURL(): string;
/**
* Gets the base URL of the router (frontend url).
*
* Ex: To navigate between 'area' components, like 'area/edit', should return 'area'.
*
* @returns {string}
*/
abstract getRouterURL(): string;
}
|
const fileUrl = require('file-url')
const mathjaxFileUrl = fileUrl(require.resolve('mathjax/unpacked/MathJax.js'))
module.exports = {
content: (node) => {
// logic borrowed from Asciidoctor HTML5 converter
if (node.getAttribute('stem') !== undefined) {
let eqnumsVal = node.getAttribute('eqnums')
if (eqnumsVal === undefined) {
eqnumsVal = 'none'
}
if (eqnumsVal === '') {
eqnumsVal = 'AMS'
}
return `<script type="text/x-mathjax-config">
MathJax.Hub.Config({
messageStyle: "none",
tex2jax: {
inlineMath: [["\\\\(", "\\\\)"]],
displayMath: [["\\\\[", "\\\\]"]],
ignoreClass: "nostem|nolatexmath"
},
asciimath2jax: {
delimiters: [["\\\\$", "\\\\$"]],
ignoreClass: "nostem|noasciimath"
},
TeX: { equationNumbers: { autoNumber: "${eqnumsVal}" } }
});
MathJax.Hub.Register.StartupHook("AsciiMath Jax Ready", function () {
MathJax.InputJax.AsciiMath.postfilterHooks.Add(function (data, node) {
if ((node = data.script.parentNode) && (node = node.parentNode) && node.classList.contains("stemblock")) {
data.math.root.display = "block"
}
return data
})
})
</script>
<script src="${mathjaxFileUrl}?config=TeX-MML-AM_HTMLorMML"></script>
<script>
// defer relayouting by 'Paged' until 'MathJax' rendering is complete
// otherwise formulas wouldn't be replaced and content height can't be calculated by 'Paged'
// 'MathJax' needs to be loaded before 'Paged' to make this work
window.PagedConfig = {
before: () => {
return new Promise((resolve) => {
window.MathJax.Hub.Queue(resolve);
})
}
};
</script>`
}
return ''
}
}
|
#!/usr/bin/env bash
#MIT License
#
#Copyright (c) 2017 Aleksandar Babic
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
# Aleksandar Babic - https://aleksandar.alfa-ing.com
# This script will stop running app on deployHandler platform
# It will check if app is running
# Delete it from PM2 (so it does not restart on server reboot)
# It will save PM2 state
if [ "$#" -ne 1 ]; then
echo "Usage: ./stopApp.sh appName"
exit
fi
#CONFIG
appName=$1
res=$(su - appsrunner -c "pm2 list" | grep $appName | awk '{print $8}')
re='^[0-9]+$'
if [[ $res =~ $re ]] && [[ $res -ne 0 ]] ; then
su - appsrunner -c "pm2 delete $appName"
if [ $? -eq 0 ]; then
echo "App '$appName' stopped."
su - appsrunner -c "pm2 save"
exit 0
fi
fi
echo "App $appName already stopped."
exit 1
|
<filename>include/re/lib/fft/hann_window.hpp
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#pragma once
#include <cmath>
#include <algorithm>
#include <array>
#include <iterator>
#include <functional>
#include <gsl/span>
#include <re/lib/common.hpp>
namespace re {
namespace fft {
template <typename T, int_t N>
class hann_window
{
static_assert(std::is_floating_point<T>::value);
public:
hann_window()
noexcept
{
encache();
}
template <class InputIt, class OutputIt>
void
cut(InputIt in, OutputIt out)
const noexcept
{
std::transform(
std::cbegin(cache),
std::cend(cache),
in,
out,
std::multiplies<>()
);
}
void
cut(gsl::span<T const, N> in, gsl::span<T, N> out)
const noexcept
{
std::transform(
std::cbegin(cache),
std::cend(cache),
std::cbegin(in),
std::begin(out),
std::multiplies<>()
);
}
static constexpr T
norm_correction()
{
return 0.5f;
}
private:
void
encache()
noexcept
{
for (auto i = 0; i < N; ++i) {
cache[i] = window_function(i);
}
}
static constexpr T
window_function(int_t position)
noexcept
{
auto relative_position = static_cast<T>(position) / N;
return (1 - std::cos(relative_position * 2 * pi<T>)) / 2;
}
std::array<T, N> cache;
};
} // fft
} // re
|
#!/bin/bash
set -e
set +x
#####################################################################################################
# 1. global variables, you can change them according to your requirements
#####################################################################################################
# armv7 or armv8, default armv8.
ARCH=armv8
# c++_static or c++_shared, default c++_static.
ANDROID_STL=c++_static
# min android api level
MIN_ANDROID_API_LEVEL_ARMV7=16
MIN_ANDROID_API_LEVEL_ARMV8=21
# android api level, which can also be set to a specific number
ANDROID_API_LEVEL="Default"
# gcc or clang, default gcc.
TOOLCHAIN=gcc
# ON or OFF, default OFF.
WITH_EXTRA=OFF
# ON or OFF, default ON.
WITH_JAVA=ON
# controls whether to compile cv functions into lib, default is OFF.
WITH_CV=OFF
# controls whether to hide log information, default is ON.
WITH_LOG=ON
# controls whether to throw the exception when error occurs, default is OFF
WITH_EXCEPTION=OFF
# options of striping lib according to input model.
OPTMODEL_DIR=""
WITH_STRIP=OFF
# options of compiling NPU lib.
WITH_HUAWEI_KIRIN_NPU=OFF
HUAWEI_KIRIN_NPU_SDK_ROOT="$(pwd)/ai_ddk_lib/" # Download HiAI DDK from https://developer.huawei.com/consumer/cn/hiai/
# options of compiling APU lib.
WITH_MEDIATEK_APU=OFF
MEDIATEK_APU_SDK_ROOT="$(pwd)/apu_ddk" # Download APU SDK from https://paddlelite-demo.bj.bcebos.com/devices/mediatek/apu_ddk.tar.gz
# options of compiling OPENCL lib.
WITH_OPENCL=OFF
# options of adding training ops
WITH_TRAIN=OFF
# num of threads used during compiling..
readonly NUM_PROC=${LITE_BUILD_THREADS:-4}
#####################################################################################################
#####################################################################################################
# 2. local variables, these variables should not be changed.
#####################################################################################################
# url that stores third-party zip file to accelerate third-paty lib installation
readonly THIRDPARTY_TAR=https://paddle-inference-dist.bj.bcebos.com/PaddleLite/third-party-05b862.tar.gz
# absolute path of Paddle-Lite.
readonly workspace=$PWD/$(dirname $0)/../../
# basic options for android compiling.
readonly CMAKE_COMMON_OPTIONS="-DWITH_LITE=ON \
-DLITE_WITH_ARM=ON \
-DLITE_WITH_LIGHT_WEIGHT_FRAMEWORK=ON \
-DLITE_WITH_X86=OFF \
-DWITH_TESTING=OFF \
-DARM_TARGET_OS=android"
# on mac environment, we should expand the maximum file num to compile successfully
os_name=`uname -s`
if [ ${os_name} == "Darwin" ]; then
ulimit -n 1024
fi
#####################################################################################################
####################################################################################################
# 3. functions of prepare workspace before compiling
####################################################################################################
# 3.1 generate `__generated_code__.cc`, which is dependended by some targets in cmake.
# here we fake an empty file to make cmake works.
function prepare_workspace {
local root_dir=$1
local build_dir=$2
# 1. Prepare gen_code file
GEN_CODE_PATH_PREFIX=$build_dir/lite/gen_code
mkdir -p ${GEN_CODE_PATH_PREFIX}
touch ${GEN_CODE_PATH_PREFIX}/__generated_code__.cc
# 2.Prepare debug tool
DEBUG_TOOL_PATH_PREFIX=$build_dir/lite/tools/debug
mkdir -p ${DEBUG_TOOL_PATH_PREFIX}
cp $root_dir/lite/tools/debug/analysis_tool.py ${DEBUG_TOOL_PATH_PREFIX}/
}
# 3.2 prepare source code of opencl lib
# here we bundle all cl files into a cc file to bundle all opencl kernels into a single lib
function prepare_opencl_source_code {
local root_dir=$1
local build_dir=$2
# in build directory
# Prepare opencl_kernels_source.cc file
GEN_CODE_PATH_OPENCL=$root_dir/lite/backends/opencl
rm -f GEN_CODE_PATH_OPENCL/opencl_kernels_source.cc
OPENCL_KERNELS_PATH=$root_dir/lite/backends/opencl/cl_kernel
mkdir -p ${GEN_CODE_PATH_OPENCL}
touch $GEN_CODE_PATH_OPENCL/opencl_kernels_source.cc
python $root_dir/lite/tools/cmake_tools/gen_opencl_code.py $OPENCL_KERNELS_PATH $GEN_CODE_PATH_OPENCL/opencl_kernels_source.cc
}
# 3.3 prepare third_party libraries for compiling
# here we store third_party libraries into Paddle-Lite/third-party
function prepare_thirdparty {
if [ ! -d $workspace/third-party -o -f $workspace/third-party-05b862.tar.gz ]; then
rm -rf $workspace/third-party
if [ ! -f $workspace/third-party-05b862.tar.gz ]; then
wget $THIRDPARTY_TAR
fi
tar xzf third-party-05b862.tar.gz
else
git submodule update --init --recursive
fi
}
####################################################################################################
####################################################################################################
# 4. compiling functions
####################################################################################################
# helper function for setting android api level
function set_android_api_level {
# android api level for android version
if [ "${ARCH}" == "armv7" ]; then
MIN_ANDROID_API_LEVEL=${MIN_ANDROID_API_LEVEL_ARMV7}
else
MIN_ANDROID_API_LEVEL=${MIN_ANDROID_API_LEVEL_ARMV8}
fi
if [ "${ANDROID_API_LEVEL}" == "Default" ]; then
cmake_api_level_options=""
elif [ ${ANDROID_API_LEVEL} -ge ${MIN_ANDROID_API_LEVEL} ]; then
cmake_api_level_options="-DANDROID_NATIVE_API_LEVEL=${ANDROID_API_LEVEL}"
else
echo "Error: ANDROID_API_LEVEL should be no less than ${MIN_ANDROID_API_LEVEL} on ${ARCH}."
exit 1
fi
}
# 4.1 function of tiny_publish compiling
# here we only compile light_api lib
function make_tiny_publish_so {
build_dir=$workspace/build.lite.android.$ARCH.$TOOLCHAIN
if [ "${WITH_OPENCL}" == "ON" ]; then
build_dir=${build_dir}.opencl
fi
if [ "${WITH_npu}" == "ON" ]; then
build_dir=${build_dir}.npu
fi
if [ -d $build_dir ]
then
rm -rf $build_dir
fi
mkdir -p $build_dir
cd $build_dir
if [ "${WITH_OPENCL}" == "ON" ]; then
prepare_opencl_source_code $workspace $build_dir
fi
if [ "${WITH_STRIP}" == "ON" ]; then
WITH_EXTRA=ON
fi
# android api level for android version
set_android_api_level
local cmake_mutable_options="
-DLITE_BUILD_EXTRA=$WITH_EXTRA \
-DLITE_WITH_LOG=$WITH_LOG \
-DLITE_WITH_EXCEPTION=$WITH_EXCEPTION \
-DLITE_BUILD_TAILOR=$WITH_STRIP \
-DLITE_OPTMODEL_DIR=$OPTMODEL_DIR \
-DLITE_WITH_JAVA=$WITH_JAVA \
-DLITE_WITH_CV=$WITH_CV \
-DLITE_WITH_NPU=$WITH_HUAWEI_KIRIN_NPU \
-DNPU_DDK_ROOT=$HUAWEI_KIRIN_NPU_SDK_ROOT \
-DLITE_WITH_APU=$WITH_MEDIATEK_APU \
-DAPU_DDK_ROOT=$MEDIATEK_APU_SDK_ROOT \
-DLITE_WITH_OPENCL=$WITH_OPENCL \
-DARM_TARGET_ARCH_ABI=$ARCH \
-DARM_TARGET_LANG=$TOOLCHAIN \
-DANDROID_STL_TYPE=$ANDROID_STL"
cmake $workspace \
${CMAKE_COMMON_OPTIONS} \
${cmake_api_level_options} \
${cmake_mutable_options} \
-DLITE_ON_TINY_PUBLISH=ON
# todo: third_party of opencl should be moved into git submodule and cmake later
if [ "${WITH_OPENCL}" == "ON" ]; then
make opencl_clhpp -j$NUM_PROC
fi
make publish_inference -j$NUM_PROC
cd - > /dev/null
}
# 4.2 function of full_publish compiling
# here we compile both light_api lib and full_api lib
function make_full_publish_so {
prepare_thirdparty
build_directory=$workspace/build.lite.android.$ARCH.$TOOLCHAIN
if [ -d $build_directory ]
then
rm -rf $build_directory
fi
mkdir -p $build_directory
cd $build_directory
prepare_workspace $workspace $build_directory
if [ "${WITH_OPENCL}" == "ON" ]; then
prepare_opencl_source_code $workspace $build_dir
fi
if [ "${WITH_STRIP}" == "ON" ]; then
WITH_EXTRA=ON
fi
# android api level for android version
set_android_api_level
local cmake_mutable_options="
-DLITE_BUILD_EXTRA=$WITH_EXTRA \
-DLITE_WITH_LOG=$WITH_LOG \
-DLITE_WITH_EXCEPTION=$WITH_EXCEPTION \
-DLITE_BUILD_TAILOR=$WITH_STRIP \
-DLITE_OPTMODEL_DIR=$OPTMODEL_DIR \
-DLITE_WITH_JAVA=$WITH_JAVA \
-DLITE_WITH_CV=$WITH_CV \
-DLITE_WITH_NPU=$WITH_HUAWEI_KIRIN_NPU \
-DNPU_DDK_ROOT=$HUAWEI_KIRIN_NPU_SDK_ROOT \
-DLITE_WITH_APU=$WITH_MEDIATEK_APU \
-DAPU_DDK_ROOT=$MEDIATEK_APU_SDK_ROOT \
-DLITE_WITH_OPENCL=$WITH_OPENCL \
-DARM_TARGET_ARCH_ABI=$ARCH \
-DARM_TARGET_LANG=$TOOLCHAIN \
-DLITE_WITH_TRAIN=$WITH_TRAIN \
-DANDROID_STL_TYPE=$ANDROID_STL"
cmake $workspace \
${CMAKE_COMMON_OPTIONS} \
${cmake_api_level_options} \
${cmake_mutable_options}
# todo: third_party of opencl should be moved into git submodule and cmake later
if [ "${WITH_OPENCL}" == "ON" ]; then
make opencl_clhpp -j$NUM_PROC
fi
make publish_inference -j$NUM_PROC
cd - > /dev/null
}
# 4.3 function of print help information
function print_usage {
echo "----------------------------------------------------------------------------------------------------------------------------------------"
echo -e "| Methods of compiling Padddle-Lite Android library: |"
echo "----------------------------------------------------------------------------------------------------------------------------------------"
echo -e "| compile android library: (armv8, gcc, c++_static) |"
echo -e "| ./lite/tools/build_android.sh |"
echo -e "| print help information: |"
echo -e "| ./lite/tools/build_android.sh help |"
echo -e "| |"
echo -e "| optional argument: |"
echo -e "| --arch: (armv8|armv7), default is armv8 |"
echo -e "| --toolchain: (gcc|clang), defalut is gcc |"
echo -e "| --android_stl: (c++_static|c++_shared), default is c++_static |"
echo -e "| --with_java: (OFF|ON); controls whether to publish java api lib, default is ON |"
echo -e "| --with_cv: (OFF|ON); controls whether to compile cv functions into lib, default is OFF |"
echo -e "| --with_log: (OFF|ON); controls whether to print log information, default is ON |"
echo -e "| --with_exception: (OFF|ON); controls whether to throw the exception when error occurs, default is OFF |"
echo -e "| --with_extra: (OFF|ON); controls whether to publish extra operators and kernels for (sequence-related model such as OCR or NLP) |"
echo -e "| --android_api_level: (16~27); control android api level, default is 16 on armv7 and 21 on armv8. You could set a specific |"
echo -e "| android_api_level as you need. |"
echo -e "| | Paddle-Lite Requird / ARM ABI | armv7 | armv8 | |"
echo -e "| |------------------------------------|-------|-------| |"
echo -e "| |Supported Minimum Android API Level | 16 | 21 | |"
echo -e "| |Supported Minimum Android Version | 4.1 | 5.0 | |"
echo -e "| |"
echo -e "| arguments of striping lib according to input model:(armv8, gcc, c++_static) |"
echo -e "| ./lite/tools/build_android.sh --with_strip=ON --opt_model_dir=YourOptimizedModelDir |"
echo -e "| --with_strip: (OFF|ON); controls whether to strip lib accrding to input model, default is OFF |"
echo -e "| --opt_model_dir: (absolute path to optimized model dir) required when compiling striped library |"
echo -e "| detailed information about striping lib: https://paddle-lite.readthedocs.io/zh/latest/user_guides/library_tailoring.html |"
echo -e "| |"
echo -e "| arguments of npu library compiling:(armv8, gcc, c++_static) |"
echo -e "| ./lite/tools/build_android.sh --with_huawei_kirin_npu=ON --huawei_kirin_npu_sdk_root=YourNpuSdkPath |"
echo -e "| --with_huawei_kirin_npu: (OFF|ON); controls whether to compile lib for huawei_kirin_npu, default is OFF |"
echo -e "| --huawei_kirin_npu_sdk_root: (path to huawei HiAi DDK file) required when compiling npu library |"
echo -e "| you can download huawei HiAi DDK from: https://developer.huawei.com/consumer/cn/hiai/ |"
echo -e "| detailed information about Paddle-Lite NPU: https://paddle-lite.readthedocs.io/zh/latest/demo_guides/npu.html |"
echo -e "| |"
echo -e "| arguments of apu library compiling:(armv8, gcc, c++_static) |"
echo -e "| ./lite/tools/build_android.sh --with_mediatek_apu=ON --mediatek_apu_sdk_root=YourApuSdkPath |"
echo -e "| --with_mediatek_apu: (OFF|ON); controls whether to compile lib for mediatek_apu, default is OFF |"
echo -e "| --mediatek_apu_sdk_root: (path to mediatek APU SDK file) required when compiling apu library |"
echo -e "| you can download mediatek APU SDK from: https://paddlelite-demo.bj.bcebos.com/devices/mediatek/apu_ddk.tar.gz |"
echo -e "| detailed information about Paddle-Lite APU: https://paddle-lite.readthedocs.io/zh/latest/demo_guides/mediatek_apu.html |"
echo -e "| |"
echo -e "| arguments of opencl library compiling:(armv8, gcc, c++_static) |"
echo -e "| ./lite/tools/build_android.sh --with_opencl=ON |"
echo -e "| --with_opencl: (OFF|ON); controls whether to compile lib for opencl, default is OFF |"
echo "----------------------------------------------------------------------------------------------------------------------------------------"
echo
}
####################################################################################################
####################################################################################################
# 5. main functions: choose compiling method according to input argument
####################################################################################################
function main {
if [ -z "$1" ]; then
# compiling result contains light_api lib only, recommanded.
make_tiny_publish_so $ARCH $TOOLCHAIN $ANDROID_STL
exit 0
fi
# Parse command line.
for i in "$@"; do
case $i in
# armv7 or armv8, default armv8
--arch=*)
ARCH="${i#*=}"
shift
;;
# gcc or clang, default gcc
--toolchain=*)
TOOLCHAIN="${i#*=}"
shift
;;
# c++_static or c++_shared, default c++_static
--android_stl=*)
ANDROID_STL="${i#*=}"
shift
;;
--android_api_level=*)
ANDROID_API_LEVEL="${i#*=}"
shift
;;
# ON or OFF, default OFF
--with_extra=*)
WITH_EXTRA="${i#*=}"
shift
;;
# ON or OFF, default OFF
--with_cv=*)
WITH_CV="${i#*=}"
shift
;;
# ON or OFF, default ON
--with_java=*)
WITH_JAVA="${i#*=}"
shift
;;
# ON or OFF, default OFF
--with_strip=*)
WITH_STRIP="${i#*=}"
shift
;;
# string, absolute path to optimized model dir
--opt_model_dir=*)
OPTMODEL_DIR="${i#*=}"
shift
;;
# ON or OFF, default ON
--with_log=*)
WITH_LOG="${i#*=}"
shift
;;
# ON or OFF, default OFF
--with_exception=*)
WITH_EXCEPTION="${i#*=}"
if [[ $WITH_EXCEPTION == "ON" && $ARCH == "armv7" && $TOOLCHAIN != "clang" ]]; then
set +x
echo
echo -e "Error: only clang provide C++ exception handling support for 32-bit ARM."
echo
exit 1
fi
shift
;;
# compiling lib which can operate on opencl and cpu.
--with_opencl=*)
WITH_OPENCL="${i#*=}"
shift
;;
# compiling lib which can operate on huawei npu.
--with_huawei_kirin_npu=*)
WITH_HUAWEI_KIRIN_NPU="${i#*=}"
shift
;;
--huawei_kirin_npu_sdk_root=*)
HUAWEI_KIRIN_NPU_SDK_ROOT="${i#*=}"
shift
;;
# compiling lib which can operate on mediatek apu.
--with_mediatek_apu=*)
WITH_MEDIATEK_APU="${i#*=}"
shift
;;
--mediatek_apu_sdk_root=*)
MEDIATEK_APU_SDK_ROOT="${i#*=}"
shift
;;
# compiling result contains both light_api and cxx_api lib.
full_publish)
make_full_publish_so
exit 0
;;
# compiling lib with training ops.
--with_train=*)
WITH_TRAIN="${i#*=}"
shift
;;
help)
# print help info
print_usage
exit 0
;;
*)
# unknown option
echo "Error: unsupported argument \"${i#*=}\""
print_usage
exit 1
;;
esac
done
# compiling result contains light_api lib only, recommanded.
make_tiny_publish_so
exit 0
}
main $@
|
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "otbVectorDataFileReader.h"
#include "otbVectorDataFileWriter.h"
#include "otbLabeledSampleLocalizationGenerator.h"
int otbLabeledSampleLocalizationGenerator(int itkNotUsed(argc), char* argv[])
{
const char * inputVD1 = argv[1];
const char * inputVD2 = argv[2];
const char * outputVD = argv[3];
typedef otb::VectorData<> VectorDataType;
typedef otb::VectorDataFileReader<VectorDataType> VectorDataReaderType;
typedef otb::VectorDataFileWriter<VectorDataType> VectorDataWriterType;
typedef otb::LabeledSampleLocalizationGenerator<VectorDataType> GeneratorType;
// instantiation
VectorDataReaderType::Pointer reader1 = VectorDataReaderType::New();
VectorDataReaderType::Pointer reader2 = VectorDataReaderType::New();
VectorDataWriterType::Pointer writer = VectorDataWriterType::New();
GeneratorType::Pointer generator = GeneratorType::New();
reader1->SetFileName(inputVD1);
//reader1->Update();
reader2->SetFileName(inputVD2);
//reader2->Update();
generator->PushBackInput(reader1->GetOutput());
generator->PushBackInput(reader2->GetOutput());
generator->SetSeed(0); // enable reproducible random number sequence
generator->SetClassKey("Class");
generator->SetNoClassIdentifier(0);
generator->SetInhibitionRadius(5);
generator->SetRandomLocalizationDensity(0.004);
generator->SetNbMaxIteration(1000);
generator->SetNumberOfPositiveSamplesPerPoint(2);
//generator->Update();
writer->SetFileName(outputVD);
writer->SetInput(generator->GetOutput());
writer->Update();
return EXIT_SUCCESS;
}
|
<reponame>lujanan/leetcode
//给定一个数组 prices ,其中 prices[i] 是一支给定股票第 i 天的价格。
//
// 设计一个算法来计算你所能获取的最大利润。你可以尽可能地完成更多的交易(多次买卖一支股票)。
//
// 注意:你不能同时参与多笔交易(你必须在再次购买前出售掉之前的股票)。
//
//
//
// 示例 1:
//
//
//输入: prices = [7,1,5,3,6,4]
//输出: 7
//解释: 在第 2 天(股票价格 = 1)的时候买入,在第 3 天(股票价格 = 5)的时候卖出, 这笔交易所能获得利润 = 5-1 = 4 。
// 随后,在第 4 天(股票价格 = 3)的时候买入,在第 5 天(股票价格 = 6)的时候卖出, 这笔交易所能获得利润 = 6-3 = 3 。
//
//
// 示例 2:
//
//
//输入: prices = [1,2,3,4,5]
//输出: 4
//解释: 在第 1 天(股票价格 = 1)的时候买入,在第 5 天 (股票价格 = 5)的时候卖出, 这笔交易所能获得利润 = 5-1 = 4 。
// 注意你不能在第 1 天和第 2 天接连购买股票,之后再将它们卖出。因为这样属于同时参与了多笔交易,你必须在再次购买前出售掉之前的股票。
//
//
// 示例 3:
//
//
//输入: prices = [7,6,4,3,1]
//输出: 0
//解释: 在这种情况下, 没有交易完成, 所以最大利润为 0。
//
//
//
// 提示:
//
//
// 1 <= prices.length <= 3 * 10⁴
// 0 <= prices[i] <= 10⁴
//
// Related Topics 贪心 数组 动态规划 👍 1518 👎 0
package algorithm_100
// 贪心
func maxProfit(prices []int) int {
if len(prices) < 1 {
return 0
}
var mon int
for i := 1; i < len(prices); i++ {
mon += max(0, prices[i]-prices[i-1])
}
return mon
}
// 动态规划
func maxProfit1(prices []int) int {
if len(prices) < 1 {
return 0
}
var d0, t0, d1, t1 = 0, 0, -prices[0], -prices[0]
for i := 1; i < len(prices); i++ {
d0 = max(t0, t1+prices[i])
d1 = max(t1, t0-prices[i])
t0, t1 = d0, d1
}
return max(d0, d1)
}
func max(a, b int) int {
if a > b {
return a
}
return b
}
|
function install_brew_bundle() { # [bundle_name ...]
local bundle_name_list=( "$@" )
local x1
##
for x1 in "${bundle_name_list[@]}" ; do
xx :
xx brew bundle install --file="${x1}"
done
}
|
# ETCD v3, using v2 API
# Generate certs
tee bootstrap-certs.py <<-'EOF'
#!/opt/mesosphere/bin/python
import sys
sys.path.append('/opt/mesosphere/lib/python3.6/site-packages')
from dcos_internal_utils import bootstrap
if len(sys.argv) == 1:
print("Usage: ./bootstrap-certs.py <CN> <PATH> | ./bootstrap-certs.py etcd /var/lib/dcos/etcd/certs")
sys.exit(1)
b = bootstrap.Bootstrapper(bootstrap.parse_args())
b.read_agent_secrets()
cn = sys.argv[1]
location = sys.argv[2]
keyfile = location + '/' + cn + '.key'
crtfile = location + '/' + cn + '.crt'
b.ensure_key_certificate(cn, keyfile, crtfile, service_account='dcos_bootstrap_agent')
EOF
chmod +x bootstrap-certs.py
sudo mkdir -p /opt/etcd/
sudo mkdir -p /etc/etcd/certs
sudo mkdir -p /var/etcd/data
# Certs
sudo ./bootstrap-certs.py etcd /etc/etcd/certs
sudo curl -L http://master.mesos/ca/dcos-ca.crt -o /etc/etcd/certs/dcos-ca.crt
# Install etcd
curl -LO https://github.com/coreos/etcd/releases/download/v3.3.5/etcd-v3.3.5-linux-amd64.tar.gz
sudo tar -xzvf etcd-v3.3.5-linux-amd64.tar.gz -C /opt/etcd --strip-components=1
# Create env file
sudo rm -f /etc/etcd/etcd.env
echo "ETCD_DATA_DIR=/var/etcd/data" | sudo tee -a /etc/etcd/etcd.env
echo "ETCD_CERTS_DIR=/etc/etcd/certs" | sudo tee -a /etc/etcd/etcd.env
echo "ETCD_TLS_CERT=etcd.crt" | sudo tee -a /etc/etcd/etcd.env
echo "ETCD_TLS_KEY=etcd.key" | sudo tee -a /etc/etcd/etcd.env
echo "ETCD_CA_CERT=dcos-ca.crt" | sudo tee -a /etc/etcd/etcd.env
echo "LOCAL_HOSTNAME=$(/opt/mesosphere/bin/detect_ip)" | sudo tee -a /etc/etcd/etcd.env
echo "INITIAL_CLUSTER=$(curl -sS master.mesos:8181/exhibitor/v1/cluster/status | python -c 'import sys,json;j=json.loads(sys.stdin.read());print(",".join([y["hostname"]+"=https://"+y["hostname"]+":2380" for y in j]))')" | sudo tee -a /etc/etcd/etcd.env
sed "s/^/export /g" /etc/etcd/etcd.env | sudo tee /etc/etcd/etcd.env.export
# Listen on 0.0.0.0, but don't advertise on 0.0.0.0
# Create etcd service
sudo tee /etc/systemd/system/dcos-etcd-proxy.service <<-'EOF'
[Unit]
Description=etcd-proxy
Documentation=https://github.com/coreos/etcd
Conflicts=etcd.service
Conflicts=etcd2.service
[Service]
Type=notify
Restart=always
RestartSec=5s
LimitNOFILE=40000
TimeoutStartSec=0
EnvironmentFile=/etc/etcd/etcd.env
ExecStart=/opt/etcd/etcd --proxy on \
--data-dir ${ETCD_DATA_DIR} \
--listen-client-urls https://0.0.0.0:2379 \
--key-file ${ETCD_CERTS_DIR}/${ETCD_TLS_KEY} \
--cert-file ${ETCD_CERTS_DIR}/${ETCD_TLS_CERT} \
--peer-key-file ${ETCD_CERTS_DIR}/${ETCD_TLS_KEY} \
--peer-cert-file ${ETCD_CERTS_DIR}/${ETCD_TLS_CERT} \
--trusted-ca-file ${ETCD_CERTS_DIR}/${ETCD_CA_CERT} \
--peer-trusted-ca-file ${ETCD_CERTS_DIR}/${ETCD_CA_CERT} \
--client-cert-auth \
--peer-client-cert-auth \
--initial-cluster ${INITIAL_CLUSTER}
[Install]
WantedBy=multi-user.target
EOF
sudo systemctl daemon-reload
sudo systemctl enable dcos-etcd-proxy.service
sudo systemctl restart dcos-etcd-proxy.service
sudo ETCDCTL_API=2 /opt/etcd/etcdctl \
--endpoints https://localhost:2379 \
--key-file /etc/etcd/certs/etcd.key \
--cert-file /etc/etcd/certs/etcd.crt \
--ca-file /etc/etcd/certs/dcos-ca.crt \
cluster-health
|
import { delay } from "./utils";
import { rpc, DELAY_MS } from "./config";
import { EosioDelband } from "./interfaces";
/**
* Get Table `eosio::delband`
*/
export async function get_table_delband(scopes: Set<string>) {
const delband: EosioDelband[] = [];
for (const scope of Array.from(scopes)) {
console.log(`get_table_rows [eosio::${scope}:userres]`);
const response = await rpc.get_table_rows<EosioDelband>("eosio", scope, "delband", {json: true });
for (const row of response.rows) {
// Only include `delband` that is self delegated
if (row.from == row.to) delband.push(row);
}
}
return delband;
}
/**
* Get Tables
*/
export async function get_tables<T>(code: string, scope: string, table: string, lower_bound_key: string, delete_keys: string[] = []): Promise<T[]> {
let lower_bound = "";
const limit = 1500;
const rows = new Map<string, T>();
while (true) {
console.log(`get_table_rows [${code}::${scope}:${table}] size=${rows.size} lower=${lower_bound}`);
const response: any = await rpc.get_table_rows<T>(code, scope, table, {
json: true,
lower_bound,
limit,
});
for (const row of response.rows) {
// Delete extra fields
for (const key of delete_keys) {
delete row[key];
}
// Adding to Map removes duplicates entries
const key = row[lower_bound_key];
rows.set(key, row);
// Set lower bound
lower_bound = key;
}
// prevent hitting rate limits from API endpoints
await delay(DELAY_MS);
// end of table rows
if (response.more === false) break;
}
return Array.from(rows.values());
}
|
from typing import List
def get_unique_elements(input_list: List[int]) -> List[int]:
unique_elements = []
seen = set()
for num in input_list:
if num not in seen:
unique_elements.append(num)
seen.add(num)
return unique_elements
|
def convert_dict_to_list(d):
l = []
for key, value in d.items():
l.append([key,value])
return l
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.