text stringlengths 1 1.05M |
|---|
<gh_stars>0
package com.infamous.framework.http.core;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.Test;
class ByteArrayPartTest {
@Test
public void test() {
ByteArrayPart byteArrayPart = new ByteArrayPart("file", new byte[0], "application/file", "test.txt");
assertEquals("test.txt", byteArrayPart.getFileName());
assertTrue(byteArrayPart.isFile());
}
} |
"use strict";
exports.__esModule = true;
/**
* Returns an Array of numbers inside range.
*/
function range(start, stop, step) {
if (stop == null) {
stop = start;
start = 0;
}
step = step || 1;
var result = [];
var i = start;
while (i <= stop) {
result.push(i);
i += step;
}
return result;
}
exports["default"] = range;
|
#!/bin/bash -ex
# Get access to testinstall.
. .evergreen/utils.sh
# Create temp directory for validated files.
rm -rf validdist
mkdir -p validdist
mv dist/* validdist || true
for VERSION in 3.4 3.5 3.6 3.7 3.8 3.9; do
PYTHON=/Library/Frameworks/Python.framework/Versions/$VERSION/bin/python3
rm -rf build
# Install wheel if not already there.
if ! $PYTHON -m wheel version; then
createvirtualenv $PYTHON releasevenv
WHEELPYTHON=python
pip install --upgrade wheel
else
WHEELPYTHON=$PYTHON
fi
$WHEELPYTHON setup.py bdist_wheel
deactivate || true
rm -rf releasevenv
# Test that each wheel is installable.
for release in dist/*; do
testinstall $PYTHON $release
mv $release validdist/
done
done
mv validdist/* dist
rm -rf validdist
ls dist
|
<filename>mbhd-swing/src/main/java/org/multibit/hd/ui/views/components/enter_amount/EnterAmountView.java
package org.multibit.hd.ui.views.components.enter_amount;
import com.google.common.base.Optional;
import com.google.common.eventbus.Subscribe;
import net.miginfocom.swing.MigLayout;
import org.bitcoinj.core.Coin;
import org.multibit.hd.core.config.BitcoinConfiguration;
import org.multibit.hd.core.config.Configurations;
import org.multibit.hd.core.config.LanguageConfiguration;
import org.multibit.hd.core.events.ExchangeRateChangedEvent;
import org.multibit.hd.core.exchanges.ExchangeKey;
import org.multibit.hd.core.services.CoreServices;
import org.multibit.hd.core.utils.BitcoinSymbol;
import org.multibit.hd.core.utils.Coins;
import org.multibit.hd.core.utils.Numbers;
import org.multibit.hd.ui.MultiBitUI;
import org.multibit.hd.ui.languages.Formats;
import org.multibit.hd.ui.languages.Languages;
import org.multibit.hd.ui.languages.MessageKey;
import org.multibit.hd.ui.views.components.*;
import org.multibit.hd.ui.views.components.text_fields.FormattedDecimalField;
import org.multibit.hd.ui.views.fonts.AwesomeDecorator;
import org.multibit.hd.ui.views.fonts.AwesomeIcon;
import org.multibit.hd.ui.views.themes.Themes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.*;
import java.awt.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.math.BigDecimal;
/**
* <p>View to provide the following to UI:</p>
* <ul>
* <li>Presentation of a Bitcoin and local currency amount</li>
* <li>Support for instant bi-directional conversion through exchange rate</li>
* </ul>
*
* @since 0.0.1
*
*/
public class EnterAmountView extends AbstractComponentView<EnterAmountModel> {
private static final Logger log = LoggerFactory.getLogger(EnterAmountView.class);
// View components
private FormattedDecimalField bitcoinAmountText;
private FormattedDecimalField localAmountText;
private JLabel exchangeRateStatusLabel = Labels.newBlankLabel();
private JLabel approximatelyLabel = Labels.newBlankLabel();
private JLabel localCurrencySymbolLabel = Labels.newBlankLabel();
private JLabel bitcoinSymbolLabel = Labels.newBlankLabel();
private Optional<ExchangeRateChangedEvent> latestExchangeRateChangedEvent = Optional.absent();
/**
* @param model The model backing this view
*/
public EnterAmountView(EnterAmountModel model) {
super(model);
latestExchangeRateChangedEvent = CoreServices
.getApplicationEventService()
.getLatestExchangeRateChangedEvent();
}
@Override
public JPanel newComponentPanel() {
panel = Panels.newPanel(new MigLayout(
Panels.migXLayout(),
"[][][][][][]", // Columns
"[][][]" // Rows
));
// Set the maximum values for the amount fields
bitcoinAmountText = TextBoxes.newBitcoinAmount(BitcoinSymbol.maxSymbolicAmount().doubleValue());
localAmountText = TextBoxes.newLocalAmount(999_999_999_999_999.9999);
Optional<Coin> coinAmount = getModel().get().getCoinAmount();
// Set initial Bitcoin amount from the model (if non-zero)
if (coinAmount.isPresent() && !Coin.ZERO.equals(coinAmount.get())) {
LanguageConfiguration languageConfiguration = Configurations.currentConfiguration.getLanguage();
BitcoinConfiguration bitcoinConfiguration = Configurations.currentConfiguration.getBitcoin();
String symbolicAmount = Formats.formatCoinAmount(
coinAmount.or(Coin.ZERO),
languageConfiguration,
bitcoinConfiguration
);
bitcoinAmountText.setText(symbolicAmount);
updateLocalAmount();
}
approximatelyLabel = Labels.newApproximately();
LabelDecorator.applyLocalCurrencySymbol(localCurrencySymbolLabel);
// Ensure the Bitcoin symbol label matches the local currency
Font font = bitcoinSymbolLabel.getFont().deriveFont(Font.PLAIN, (float) MultiBitUI.NORMAL_ICON_SIZE);
bitcoinSymbolLabel.setFont(font);
// Use the current Bitcoin configuration
LabelDecorator.applyBitcoinSymbolLabel(bitcoinSymbolLabel);
// Bind a key listener to allow instant update of UI to amount changes
// Do not use a focus listener because it will move the value according to
// the inexact fiat value leading to 10mB becoming 10.00635mB
bitcoinAmountText.addKeyListener(new KeyAdapter() {
@Override
public void keyReleased(KeyEvent e) {
if (e.isActionKey() || e.getKeyCode() == KeyEvent.VK_TAB || e.getKeyCode() == KeyEvent.VK_SHIFT) {
// Ignore
return;
}
updateLocalAmount();
}
});
// Bind a key listener to allow instant update of UI to amount changes
// Do not use a focus listener because it will move the value according to
// the inexact fiat value leading to 10mB becoming 10.00635mB
localAmountText.addKeyListener(new KeyAdapter() {
@Override
public void keyReleased(KeyEvent e) {
if (e.isActionKey() || e.getKeyCode() == KeyEvent.VK_TAB || e.getKeyCode() == KeyEvent.VK_SHIFT) {
// Ignore
return;
}
updateBitcoinAmount();
}
});
// Arrange label placement according to configuration
boolean isCurrencySymbolLeading = Configurations
.currentConfiguration
.getBitcoin()
.isCurrencySymbolLeading();
// Add to the panel
panel.add(Labels.newAmount(), "span 4,grow,push,wrap");
if (isCurrencySymbolLeading) {
panel.add(bitcoinSymbolLabel);
panel.add(bitcoinAmountText);
panel.add(approximatelyLabel, "pushy,baseline");
panel.add(localCurrencySymbolLabel, "pushy,baseline");
panel.add(localAmountText, "wrap");
} else {
panel.add(bitcoinAmountText);
panel.add(bitcoinSymbolLabel);
panel.add(approximatelyLabel, "pushy,baseline");
panel.add(localAmountText);
panel.add(localCurrencySymbolLabel, "pushy,baseline,wrap");
}
panel.add(exchangeRateStatusLabel, "span 4,push,wrap");
setLocalAmountVisibility();
return panel;
}
@Override
public void requestInitialFocus() {
bitcoinAmountText.requestFocusInWindow();
}
@Override
public void updateModelFromView() {
// Do nothing - the model is updated during key press
}
@Subscribe
public void onExchangeRateChanged(ExchangeRateChangedEvent event) {
if (panel == null) {
// Still initialising
return;
}
this.latestExchangeRateChangedEvent = Optional.fromNullable(event);
setLocalAmountVisibility();
// Rate has changed so trigger an update if focus is on either amount boxes
if (bitcoinAmountText.hasFocus()) {
// User is entering Bitcoin amount so will expect the local to update
updateLocalAmount();
}
if (localAmountText.hasFocus()) {
// User is entered local amount so will expect the Bitcoin amount to update
updateBitcoinAmount();
}
}
/**
* <p>Handles the process of updating the visibility of the local amount</p>
* <p>This is required when an exchange has failed to provide an exchange rate in the current session</p>
*/
private void setLocalAmountVisibility() {
if (latestExchangeRateChangedEvent.isPresent()
&& latestExchangeRateChangedEvent.get().getRateProvider().isPresent()
&& !ExchangeKey.current().equals(ExchangeKey.NONE)) {
setLocalCurrencyComponentVisibility(true);
// Rate may not be valid
setExchangeRateStatus(latestExchangeRateChangedEvent.get().isValid());
} else {
// No rate or rate provider
// Hide the local currency components
setLocalCurrencyComponentVisibility(false);
// Rate is not valid by definition
setExchangeRateStatus(false);
}
}
/**
* @param visible True if the local currency components should be visible
*/
private void setLocalCurrencyComponentVisibility(boolean visible) {
// We can show local currency components
this.approximatelyLabel.setVisible(visible);
this.localCurrencySymbolLabel.setVisible(visible);
this.localAmountText.setVisible(visible);
this.exchangeRateStatusLabel.setVisible(visible);
}
/**
* @param valid True if the exchange rate is present and valid
*/
private void setExchangeRateStatus(boolean valid) {
if (valid) {
// Update the label to show a check mark
AwesomeDecorator.bindIcon(
AwesomeIcon.CHECK,
exchangeRateStatusLabel,
true,
MultiBitUI.NORMAL_ICON_SIZE
);
exchangeRateStatusLabel.setText(Languages.safeText(MessageKey.EXCHANGE_RATE_STATUS_OK));
} else {
// Update the label to show a cross
AwesomeDecorator.bindIcon(
AwesomeIcon.TIMES,
exchangeRateStatusLabel,
true,
MultiBitUI.NORMAL_ICON_SIZE
);
exchangeRateStatusLabel.setText(Languages.safeText(MessageKey.EXCHANGE_RATE_STATUS_WARN));
}
}
/**
* Update the Bitcoin amount based on a change in the local amount
*/
private void updateBitcoinAmount() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
// Build the value directly from the string
Optional<BigDecimal> value = Numbers.parseBigDecimal(localAmountText.getText());
BitcoinSymbol bitcoinSymbol = BitcoinSymbol.current();
if (latestExchangeRateChangedEvent.isPresent()) {
if (value.isPresent()) {
BigDecimal localAmount = value.get();
BigDecimal exchangeRate = latestExchangeRateChangedEvent.get().getRate();
try {
// Apply the exchange rate
Coin coin = Coins.fromLocalAmount(localAmount, exchangeRate);
// Update the model with the plain value
getModel().get().setCoinAmount(coin);
getModel().get().setLocalAmount(value);
// Use the symbolic amount in setValue() for display formatting
BigDecimal symbolicAmount = Coins.toSymbolicAmount(coin, bitcoinSymbol);
bitcoinAmountText.setValue(symbolicAmount);
// Give feedback to the user
localAmountText.setBackground(Themes.currentTheme.dataEntryBackground());
} catch (ArithmeticException | IllegalArgumentException e) {
// Give feedback to the user
localAmountText.setBackground(Themes.currentTheme.invalidDataEntryBackground());
}
} else {
bitcoinAmountText.setText("");
// Update the model
getModel().get().setCoinAmount(Coin.ZERO);
getModel().get().setLocalAmount(Optional.<BigDecimal>absent());
}
} else {
// No exchange rate so no local amount
getModel().get().setLocalAmount(Optional.<BigDecimal>absent());
}
setLocalAmountVisibility();
}
});
}
/**
* Update the local amount based on a change in the Bitcoin amount
*/
private void updateLocalAmount() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
// Build the value directly from the string
Optional<BigDecimal> value = Numbers.parseBigDecimal(bitcoinAmountText.getText());
BitcoinSymbol bitcoinSymbol = BitcoinSymbol.current();
if (latestExchangeRateChangedEvent.isPresent()) {
if (value.isPresent()) {
try {
Coin coin = Coins.fromSymbolicAmount(value.get(), bitcoinSymbol);
// Update the model
getModel().get().setCoinAmount(coin);
// Apply the exchange rate if present
BigDecimal exchangeRate = latestExchangeRateChangedEvent.get().getRate();
if (exchangeRate == null) {
getModel().get().setLocalAmount(Optional.<BigDecimal>absent());
} else {
BigDecimal localAmount = Coins.toLocalAmount(coin, exchangeRate);
if (localAmount.compareTo(BigDecimal.ZERO) != 0) {
getModel().get().setLocalAmount(Optional.of(localAmount));
} else {
getModel().get().setLocalAmount(Optional.<BigDecimal>absent());
}
// Use setValue for the local amount so that the display formatter
// will match the currency requirements
localAmountText.setValue(localAmount);
}
// Give feedback to the user
bitcoinAmountText.setBackground(Themes.currentTheme.dataEntryBackground());
} catch (ArithmeticException | IllegalArgumentException e) {
// Give feedback to the user
bitcoinAmountText.setBackground(Themes.currentTheme.invalidDataEntryBackground());
}
} else {
localAmountText.setText("");
// Update the model
getModel().get().setCoinAmount(Coin.ZERO);
getModel().get().setLocalAmount(Optional.<BigDecimal>absent());
}
} else {
// No exchange rate so no local amount
if (value.isPresent()) {
try {
// Use the value directly
Coin coin = Coins.fromSymbolicAmount(value.get(), bitcoinSymbol);
// Update the model
getModel().get().setCoinAmount(coin);
getModel().get().setLocalAmount(Optional.<BigDecimal>absent());
// Give feedback to the user
localAmountText.setBackground(Themes.currentTheme.dataEntryBackground());
} catch (ArithmeticException | IllegalArgumentException e) {
// Give feedback to the user
localAmountText.setBackground(Themes.currentTheme.invalidDataEntryBackground());
}
} else {
// Update the model
getModel().get().setCoinAmount(Coin.ZERO);
getModel().get().setLocalAmount(Optional.<BigDecimal>absent());
}
}
setLocalAmountVisibility();
}
});
}
}
|
def isPalindrome(input_str):
n = len(input_str)
# Loop from 0 to n/2
for i in range(0, n // 2):
if input_str[i] != input_str[n - i - 1]:
return False
# Return true if string is palindrome
return True
if isPalindrome(input_str):
print("Palindrome")
else:
print("Not Palindrome") |
/*
* Cardinal-Components-API
* Copyright (C) 2019-2020 OnyxStudios
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
package nerdhub.cardinal.components.internal;
import nerdhub.cardinal.components.api.ComponentRegistry;
import nerdhub.cardinal.components.api.ComponentType;
import nerdhub.cardinal.components.api.component.ComponentProvider;
import nerdhub.cardinal.components.api.component.extension.SyncedComponent;
import nerdhub.cardinal.components.api.event.ChunkSyncCallback;
import nerdhub.cardinal.components.api.util.Components;
import nerdhub.cardinal.components.api.util.sync.ChunkSyncedComponent;
import net.fabricmc.fabric.api.network.ClientSidePacketRegistry;
import net.fabricmc.loader.api.FabricLoader;
import net.minecraft.util.Identifier;
import net.minecraft.util.PacketByteBuf;
import net.minecraft.world.chunk.Chunk;
import java.util.function.Consumer;
public final class ComponentsChunkNetworking {
public static void init() {
if (FabricLoader.getInstance().isModLoaded("fabric-networking-v0")) {
ChunkSyncCallback.EVENT.register((player, tracked) -> {
Components.forEach(ComponentProvider.fromChunk(tracked), (componentType, component) -> {
if (component instanceof SyncedComponent) {
((SyncedComponent) component).syncWith(player);
}
});
});
}
}
// Safe to put in the same class as no client-only class is directly referenced
public static void initClient() {
if (FabricLoader.getInstance().isModLoaded("fabric-networking-v0")) {
ClientSidePacketRegistry.INSTANCE.register(ChunkSyncedComponent.PACKET_ID, (context, buffer) -> {
int chunkX = buffer.readInt();
int chunkZ = buffer.readInt();
Identifier componentTypeId = buffer.readIdentifier();
ComponentType<?> componentType = ComponentRegistry.INSTANCE.get(componentTypeId);
if (componentType == null) {
return;
}
PacketByteBuf copy = new PacketByteBuf(buffer.copy());
Consumer<Chunk> chunkSync = componentType.asComponentPath()
.compose(ComponentProvider::fromChunk)
.thenCastTo(SyncedComponent.class)
.andThenDo(component -> component.processPacket(context, copy));
context.getTaskQueue().execute(() -> {
try {
// On the client, unloaded chunks return EmptyChunk
Chunk chunk = context.getPlayer().world.getChunk(chunkX, chunkZ);
chunkSync.accept(chunk);
} finally {
copy.release();
}
});
});
}
}
}
|
<filename>com/company/Sniper.java
package com.company;
public class Sniper extends Soldier {
public Sniper() {
description = "Sniper, with a pistol";
}
public int number_of_guns() {
return 1;
}
}
|
<gh_stars>1-10
#include <stdarg.h>
#include <limits.h>
#include <float.h>
#include "_PDCLIB_test.h"
typedef int (*intfunc_t)( void );
enum tag_t
{
TAG_END,
TAG_INT,
TAG_LONG,
TAG_LLONG,
TAG_DBL,
TAG_LDBL,
TAG_INTPTR,
TAG_LDBLPTR,
TAG_FUNCPTR
};
static int dummy( void )
{
return INT_MAX;
}
static int test( enum tag_t s, ... )
{
enum tag_t tag = s;
va_list ap;
va_start( ap, s );
for (;;)
{
switch ( tag )
{
case TAG_INT: if( va_arg( ap, int ) != INT_MAX ) return 0; break;
case TAG_LONG: if( va_arg( ap, long ) != LONG_MAX ) return 0; break;
case TAG_LLONG: if( va_arg( ap, long long ) != LLONG_MAX ) return 0; break;
case TAG_DBL: if( va_arg( ap, double ) != DBL_MAX ) return 0; break;
case TAG_LDBL: if( va_arg( ap, long double ) != LDBL_MAX ) return 0; break;
case TAG_INTPTR: if( *( va_arg( ap, int * ) ) != INT_MAX ) return 0; break;
case TAG_LDBLPTR: if( *( va_arg( ap, long double * ) ) != LDBL_MAX ) return 0; break;
case TAG_FUNCPTR: if( va_arg( ap, intfunc_t ) != dummy ) return 0; break;
case TAG_END: va_end( ap ); return 1;
}
tag = va_arg( ap, enum tag_t );
}
}
START_TEST( stdarg )
{
int x = INT_MAX;
long double d = LDBL_MAX;
TESTCASE( test(TAG_END) );
TESTCASE( test(TAG_INT, INT_MAX, TAG_END) );
TESTCASE( test(TAG_LONG, LONG_MAX, TAG_LLONG, LLONG_MAX, TAG_END) );
TESTCASE( test(TAG_DBL, DBL_MAX, TAG_LDBL, LDBL_MAX, TAG_END) );
TESTCASE( test(TAG_INTPTR, &x, TAG_LDBLPTR, &d, TAG_FUNCPTR, dummy, TAG_END) );
}
END_TEST
START_SUITE( stdarg )
{
RUN_TEST( stdarg );
}
END_SUITE
|
package com.qweex.openbooklikes.model;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import org.json.JSONException;
import org.json.JSONObject;
public class Book extends ModelBase implements Shareable {
private final static String[]
STRING_FIELDS = new String[] {"title", "author", "cover", "isbn_10", "isbn_13", "format", "publisher", "language",
"pages", "publish_date"}; // These should be better data types
@Override
protected String[] idFields() {
return new String[0];
}
@Override
protected String[] stringFields() {
return STRING_FIELDS;
}
@Override
protected String[] intFields() {
return new String[0];
}
@Override
public String apiPrefix() {
return "book";
}
@Override
public String apiName() {
return "book";
}
@Override
public String apiNamePlural() {
return "books";
}
public Book(JSONObject json) throws JSONException {
super(json);
}
public Book(Bundle b) {
super(b);
}
//FIXME: not sure if this is 100% reliable
@Override
public Uri link() {
return new Uri.Builder()
.scheme(DEFAULT_DOMAIN_SCHEME)
.authority("booklikes.com")
.appendPath("-")
.appendPath("book," + id())
.build();
}
@Override
public Intent share() {
return new Intent(android.content.Intent.ACTION_SEND)
.setType("text/plain")
.putExtra(Intent.EXTRA_SUBJECT, getS("title"))
.putExtra(Intent.EXTRA_TEXT, link().toString());
}
}
|
def compressString(s):
""" Compresses the inputted string s"""
# Initialize current character and count
currentChar = s[0]
count = 0
# Initialize output string
result = ""
# Iterate over the input string
for c in s:
# Increment count and check for change
if c == currentChar:
count += 1
else:
# Append to result if change in character
result += currentChar + str(count)
# Reinitialize character and count
currentChar = c
count = 1
# Append the last character and count
result += currentChar + str(count)
# Return the result
return result
# Print the compressed string
print(compressString("aaaaabbbbbcccccd")) #Output: a5b5c5d1 |
import api from 'api';
import { ADMIN_MODE } from 'helpers/constants';
async function API(context = {}, data = {}) {
const { commit, dispatch } = context;
const {
url,
method,
isAuth = true,
headers,
body,
onError,
onSuccess,
...rest
} = data;
let isError = false;
const response = await api(url, {
method: method || 'POST',
credentials: isAuth ? 'include' : 'omit',
headers: {
'Content-Type': 'application/json',
...headers,
},
body: {
...body,
},
...rest
}).catch(error => {
isError = true;
console.error(error);
commit({
type: 'UPDATE_ERROR',
error,
});
});
if (response.status === 'error') {
onError && onError(response.message);
isError = true;
} else if (response.status === 'ok') {
onSuccess && onSuccess(response.message);
}
return Promise.resolve({ isError, response });
}
export default {
async UPDATE_BOOK(context, { book, onSuccess, onError }) {
const { dispatch } = context;
const { isError, response } = await API(context, {
onError,
onSuccess,
url: 'update_book',
method: 'PATCH',
body: book,
});
if (isError) return;
dispatch({ type: 'FETCH_BOOKS' });
},
async UPDATE_AUTHOR(context, { author, onSuccess, onError }) {
const { dispatch } = context;
const { isError, response } = await API(context, {
onError,
onSuccess,
url: 'update_author',
method: 'PATCH',
body: {
author_id: author.author_id,
first_name: author.name,
last_name: author.surname,
middle_name: author.middleName,
},
});
if (isError) return;
dispatch({ type: 'FETCH_AUTHORS' });
dispatch({ type: 'FETCH_BOOKS' });
},
async KEEP_BOOK(context, { book_id, qty, onSuccess, onError }) {
const { dispatch } = context;
const { isError, response } = await API(context, {
onError,
onSuccess,
url: 'keep_book',
body: {
book_id,
qty,
},
});
if (isError) return;
dispatch({ type: 'FETCH_BOOKS' });
},
async ADD_BOOK(context, { title, price, authors, onSuccess, onError }) {
const { isError, response } = await API(context, {
onError,
onSuccess,
url: 'add_book',
title,
price,
authors,
});
},
async ADD_AUTHOR(context, { author, onSuccess, onError }) {
await API(context, {
onError,
onSuccess,
url: 'add_author',
body: {
last_name: author.surname,
first_name: author.name,
middle_name: author.middleName,
},
});
},
async CHANGE_USER(context, { user, onSuccess, onError }) {
const {
commit,
state: {
user: {
user_id,
billing_id,
} = {},
} = {},
} = context
const { isError, response } = await API(context, {
onError,
onSuccess,
url: 'user',
method: 'PATCH',
body: {
user_id,
billing_id,
email: user.email,
login: user.login,
password: <PASSWORD>,
first_name: user.name,
last_name: user.surname,
middle_name: user.middleName,
phone: user.phone,
shipping_address: user.shippingAddress,
},
});
if (isError) return;
commit({
type: 'UPDATE_USER',
user: response.user,
});
},
async CREATE_ORDER(context, { onSuccess, onError }) {
const {
commit,
state: {
user: {
user_id,
} = {},
} = {},
} = context;
const { isError, response } = await API(context, {
onError,
onSuccess,
url: 'create_order',
body: { user_id },
});
if (isError) return;
commit({ type: 'CLEAR_CART' });
},
async REGISTER(context, { email, login, password, onError, onSuccess }) {
const { commit, state } = context;
const { isError, response } = await API(context, {
onError,
onSuccess,
url: 'register',
isAuth: false,
body: { email, login, password },
});
if (isError) return;
commit({ type: 'CLOSE_REGISTER_MODAL' });
},
async LOGIN(context, { email, password, onError, onSuccess }) {
const { dispatch, commit, state } = context;
const { isError, response } = await API(context, {
onError,
onSuccess,
url: 'login',
body: { email, password },
});
if (isError) return;
commit({
type: 'UPDATE_USER',
user: response.user,
});
commit({
type: 'UPDATE_IS_AUTHED',
isAuthed: true,
});
if (response.user.login === 'pick4er') {
commit({
type: 'UPDATE_USER_MODE',
userMode: ADMIN_MODE,
});
}
commit({ type: 'CLOSE_LOGIN_MODAL' });
dispatch({ type: 'FETCH_CART' });
},
async LOGOUT(context) {
const { commit, state } = context;
const { isError, response } = await API(context, {
url: 'logout',
});
if (isError) return;
commit({
type: 'UPDATE_IS_AUTHED',
isAuthed: false,
});
commit({
type: 'UPDATE_USER_MODE',
userMode: '',
});
commit({
type: 'FORGET_USER',
user: {},
});
commit({ type: 'CLEAR_CART' });
},
async CHECK_IF_AUTHED(context) {
const {
dispatch,
commit,
state: {
isAuthed, cart,
} = {},
} = context;
if (isAuthed) return;
const { isError, response } = await API(context, {
method: 'GET',
url: 'is_authenticated',
json: true,
});
if (isError) return;
if (response.status === 'unauthorized') {
commit({
type: 'UPDATE_IS_AUTHED',
isAuthed: false,
});
return;
}
const { user } = response;
commit({
type: 'UPDATE_IS_AUTHED',
isAuthed: true,
});
commit({
type: 'UPDATE_USER',
user,
});
(cart.length === 0) &&
dispatch({ type: 'FETCH_CART' });
(user.login === 'pick4er') &&
commit({
type: 'UPDATE_USER_MODE',
userMode: ADMIN_MODE,
});
},
async FETCH_BOOK(context, { bookId }) {
const { commit } = context;
const { isError, response } = await API(context, {
method: 'GET',
url: `book/?book_id=${bookId}`,
isAuth: false,
json: true,
});
if (isError) return;
commit({
type: 'UPDATE_BOOK',
book: response.book,
});
},
async FETCH_BOOKS(context) {
const { commit } = context;
const { isError, response } = await API(context, {
url: 'books',
method: 'GET',
isAuth: false,
});
if (isError) return;
commit({
type: 'UPDATE_BOOKS',
books: response.books || [],
});
},
async FETCH_AUTHORS(context) {
const { commit } = context;
const { isError, response } = await API(context, {
url: 'authors',
method: 'GET',
});
if (isError) return;
commit({
type: 'UPDATE_AUTHORS',
authors: response.authors || [],
});
},
async FETCH_CART(context) {
const {
commit,
state: {
user: {
user_id,
} = {},
} = {},
} = context;
const url = `get_cart/?user_id=${user_id}`;
const { isError, response } = await API(context, {
url,
method: 'GET',
});
if (isError) return;
commit({
type: 'UPDATE_CART',
cart: response.cart || [],
});
},
async ORDER_BOOK(context, { book_id, count }) {
const {
commit,
dispatch,
state: {
isAuthed,
user: {
user_id,
} = {},
} = {},
} = context;
if (!isAuthed) {
alert(`
Авторизуйтесь, пожалуйста.\
Например:\
- Email: <EMAIL>\
- Пароль: 12345\
`);
return;
}
const { isError, response } = await API(context, {
method: 'PATCH',
url: 'update_cart',
body: {
user_id,
book_id,
qty: count,
},
});
if (isError) return;
dispatch({ type: 'FETCH_CART' });
},
};
|
import {Point} from '../../..'
import {PolylinePoint} from '../../../math/geometry/polylinePoint'
import {ConeRightSide} from './ConeRightSide'
import {SweepEvent} from './SweepEvent'
// right here means an intersection of a right cone side with an obstacle edge
export class RightIntersectionEvent extends SweepEvent {
coneRightSide: ConeRightSide
intersectionPoint: Point
endVertex: PolylinePoint
get EndVertex(): PolylinePoint {
return this.endVertex
}
set EndVertex(value: PolylinePoint) {
this.endVertex = value
}
constructor(
coneRightSide: ConeRightSide,
intersectionPoint: Point,
endVertex: PolylinePoint,
) {
super()
this.coneRightSide = coneRightSide
this.intersectionPoint = intersectionPoint
this.endVertex = endVertex
}
get Site(): Point {
return this.intersectionPoint
}
toString(): string {
return 'RightIntersectionEvent ' + this.intersectionPoint
}
}
|
<reponame>raoniorodrigues/CRUD-Angular-Spring<gh_stars>0
package com.raoni.crudspring.controller;
import java.util.List;
import com.raoni.crudspring.model.Course;
import com.raoni.crudspring.repository.CourseRepository;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import lombok.AllArgsConstructor;
@RestController
@AllArgsConstructor
@RequestMapping("/api/courses")
public class CoursesController {
private final CourseRepository courseRepository;
@GetMapping
// @RequestMapping(method = RequestMethod.GET)
public List<Course> list (){
return courseRepository.findAll();
}
}
|
// https://uva.onlinejudge.org/external/101/10116.pdf
#include<bits/stdc++.h>
using namespace std;
using vi=vector<int>;
using vvi=vector<vi>;
using vs=vector<string>;
int main(){
ios::sync_with_stdio(0);
cin.tie(0);
for(;;){
int n,m,r;
cin>>n>>m>>r;
if(!n)break;
vs a(n);
for(int i=0;i<n;i++)cin>>a[i];
vvi b(n,vi(m));
int y=0,x=r-1,k=0;
while(y>=0&&y<n&&x>=0&&x<m&&!b[y][x]){
b[y][x]=++k;
switch(a[y][x]){
case'N':y--;break;
case'S':y++;break;
case'W':x--;break;
case'E':x++;
}
}
if(y<0||y>=n||x<0||x>=m)cout<<k<<" step(s) to exit\n";
else cout<<b[y][x]-1<<" step(s) before a loop of "<<k-b[y][x]+1<<" step(s)\n";
}
}
|
<filename>test/unit/core/scraper/iframe.js
import assert from "node:assert";
import * as scraper from "../../../../src/core/scraper/iframe.js";
describe("core/scraper/iframe.js", function () {
describe("extract()", function () {
it("should return null when it's not a HTML page", async function () {
const url = new URL("https://foo.com/bar.zip");
const content = { html: () => Promise.resolve(null) };
const options = { depth: false };
const file = await scraper.extract(url, content, options);
assert.strictEqual(file, null);
});
it("should return null when it's depth", async function () {
const url = new URL("https://foo.com/bar.html");
const content = {
html: () => Promise.resolve(new DOMParser().parseFromString(`
<html>
<body>
<iframe src="https://www.youtube.com/embed/baz"
></iframe>
</body>
</html>`, "text/html")),
};
const options = { depth: true };
const file = await scraper.extract(url, content, options);
assert.strictEqual(file, null);
});
it("should return null when there isn't iframe", async function () {
const url = new URL("https://foo.com/bar.html");
const content = {
html: () => Promise.resolve(new DOMParser().parseFromString(`
<html>
<body></body>
</html>`, "text/html")),
};
const options = { depth: false };
const file = await scraper.extract(url, content, options);
assert.strictEqual(file, null);
});
it("should return URL from iframe", async function () {
const url = new URL("https://foo.com/bar.html");
const content = {
html: () => Promise.resolve(new DOMParser().parseFromString(`
<html>
<body>
<iframe src="https://www.dailymotion.com/embed/video` +
`/baz"></iframe>
</body>
</html>`, "text/html")),
};
const options = { depth: false, incognito: true };
const file = await scraper.extract(url, content, options);
assert.strictEqual(file,
"plugin://plugin.video.dailymotion_com/" +
"?mode=playVideo&url=baz");
});
it("should return URL from second iframe", async function () {
const url = new URL("https://www.dailymotion.com/index.html");
const content = {
html: () => Promise.resolve(new DOMParser().parseFromString(`
<html>
<body>
<iframe src="http://exemple.com/data.zip"></iframe>
<iframe src="/embed/video/foo"></iframe>
</body>
</html>`, "text/html")),
};
const options = { depth: false, incognito: false };
const file = await scraper.extract(url, content, options);
assert.strictEqual(file,
"plugin://plugin.video.dailymotion_com/" +
"?mode=playVideo&url=foo");
});
});
});
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CC_STUBS_REGION_H_
#define CC_STUBS_REGION_H_
#include "IntRect.h"
#if INSIDE_WEBKIT_BUILD
#include "Source/WebCore/platform/graphics/Region.h"
#else
#include "third_party/WebKit/Source/WebCore/platform/graphics/Region.h"
#endif
#include "ui/gfx/rect.h"
namespace cc {
class Region : public WebCore::Region {
public:
Region() { }
Region(const IntRect& rect)
: WebCore::Region(rect)
{
}
Region(const WebCore::IntRect& rect)
: WebCore::Region(rect)
{
}
Region(const WebCore::Region& region)
: WebCore::Region(region)
{
}
Region(const gfx::Rect& rect)
: WebCore::Region(WebCore::IntRect(rect.x(), rect.y(), rect.width(), rect.height()))
{
}
bool IsEmpty() const { return isEmpty(); }
bool Contains(const gfx::Point& point) const { return contains(cc::IntPoint(point)); }
bool Contains(const gfx::Rect& rect) const { return contains(cc::IntRect(rect)); }
void Subtract(const gfx::Rect& rect) { subtract(cc::IntRect(rect)); }
void Subtract(const Region& region) { subtract(region); }
void Union(const gfx::Rect& rect) { unite(cc::IntRect(rect)); }
void Union(const Region& region) { unite(region); }
void Intersect(const gfx::Rect& rect) { intersect(cc::IntRect(rect)); }
void Intersect(const Region& region) { intersect(region); }
gfx::Rect bounds() const { return cc::IntRect(WebCore::Region::bounds()); }
private:
bool isEmpty() const { return WebCore::Region::isEmpty(); }
bool contains(const IntPoint& point) const { return WebCore::Region::contains(point); }
bool contains(const IntRect& rect) const { return WebCore::Region::contains(rect); }
void subtract(const IntRect& rect) { return WebCore::Region::subtract(rect); }
void subtract(const Region& region) { return WebCore::Region::subtract(region); }
void unite(const IntRect& rect) { return WebCore::Region::unite(rect); }
void unite(const Region& region) { return WebCore::Region::unite(region); }
void intersect(const IntRect& rect) { return WebCore::Region::intersect(rect); }
void intersect(const Region& region) { return WebCore::Region::intersect(region); }
};
inline Region subtract(const Region& region, const gfx::Rect& rect) { return WebCore::intersect(region, cc::IntRect(rect)); }
inline Region intersect(const Region& region, const gfx::Rect& rect) { return WebCore::intersect(region, cc::IntRect(rect)); }
}
#endif // CC_STUBS_REGION_H_
|
<filename>open-sphere-plugins/search/src/main/java/io/opensphere/search/controller/SearchController.java
package io.opensphere.search.controller;
import java.awt.EventQueue;
import java.util.List;
import java.util.Set;
import javafx.application.Platform;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.ListChangeListener;
import javafx.collections.ListChangeListener.Change;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Maps;
import io.opensphere.core.TimeManager.PrimaryTimeSpanChangeListener;
import io.opensphere.core.Toolbox;
import io.opensphere.core.geometry.Geometry;
import io.opensphere.core.search.SearchRegistry;
import io.opensphere.core.search.SearchResult;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.concurrent.ProcrastinatingExecutor;
import io.opensphere.core.util.swing.input.DontShowDialog;
import io.opensphere.core.viewer.ViewChangeSupport.ViewChangeListener;
import io.opensphere.search.model.SearchModel;
/**
* Performs keyword or area searches and populates the models with those results.
*/
public class SearchController
{
/**
* The keyword listener.
*/
private final ChangeListener<String> myKeywordListener = this::onKeywordChange;
/**
* The main search model.
*/
private final SearchModel myModel;
/**
* Listens for any new results.
*/
private final ListChangeListener<SearchResult> myResultsListener = this::onResultsChanged;
/**
* Executes searches.
*/
private final SearchExecutor mySearcher;
/**
* Used to get all the installed search providers.
*/
private final SearchRegistry mySearchRegistry;
/**
* Listens for any changes to the different search types.
*/
private ListChangeListener<String> mySearchTypeListener;
/**
* Handles selection and hover changes within the model.
*/
private final SelectedResultHandler mySelectedResultsHandler;
/**
* Ensures the results are in proper sorted order.
*/
private final ResultsSorter mySorter;
/**
* Displays the results on the map.
*/
private final SearchTransformer myTransformer;
/** The toolbox. */
private final Toolbox myToolbox;
/** Listen to events from the main viewer. */
private final ViewChangeListener myViewListener = (viewer, type) -> handleSpatialTemporalChange();
/** Listen to time changes. */
private final PrimaryTimeSpanChangeListener myTimeListener = PrimaryTimeSpanChangeListener
.newChangedListener(spans -> handleSpatialTemporalChange());
/** The executor to throttle notifications to the user. */
private final ProcrastinatingExecutor myNotificationExecutor = new ProcrastinatingExecutor("SearchNotification", 1000);
/** Whether the notification dialog is showing. */
private boolean myNotificationDialogShowing;
/**
* Constructs a new search controller.
*
* @param toolbox The system toolbox.
* @param searchModel The main search model.
*/
public SearchController(Toolbox toolbox, SearchModel searchModel)
{
myToolbox = toolbox;
mySearchRegistry = toolbox.getSearchRegistry();
myModel = searchModel;
mySearcher = new SearchExecutor(myModel, mySearchRegistry, toolbox.getTimeManager(), toolbox.getMapManager());
BiMap<SearchResult, Geometry> resultToGeometries = Maps.synchronizedBiMap(HashBiMap.create());
BiMap<SearchResult, Geometry> resultToLabelGeometries = Maps.synchronizedBiMap(HashBiMap.create());
mySelectedResultsHandler = new SelectedResultHandler(myModel, toolbox.getControlRegistry(), toolbox.getMapManager(),
resultToGeometries, resultToLabelGeometries, new ViewerAnimatorCreator());
myTransformer = new SearchTransformer(myModel, toolbox.getGeometryRegistry(), resultToGeometries,
resultToLabelGeometries);
myModel.getAllResults().addListener(myResultsListener);
mySorter = new ResultsSorter(myModel);
myModel.getKeyword().addListener(myKeywordListener);
}
/**
* Stops listening to model changes.
*/
public void close()
{
mySorter.close();
myTransformer.close();
mySelectedResultsHandler.close();
myModel.getSelectedSearchTypes().removeListener(mySearchTypeListener);
myModel.getAllResults().removeListener(myResultsListener);
myModel.getKeyword().removeListener(myKeywordListener);
}
/**
* Performs the search based on the inputs within the search model.
*/
public void performSearch()
{
if (mySearchTypeListener == null)
{
mySearchTypeListener = this::onSearchTypesChanged;
myModel.getSelectedSearchTypes().addListener(mySearchTypeListener);
}
clearSearch();
mySearcher.performSearch();
}
/**
*
* @param visible whether the dialog is visible
*/
public void setDialogVisible(boolean visible)
{
if (visible)
{
myToolbox.getMapManager().getViewChangeSupport().addViewChangeListener(myViewListener);
myToolbox.getTimeManager().addPrimaryTimeSpanChangeListener(myTimeListener);
}
else
{
myToolbox.getMapManager().getViewChangeSupport().removeViewChangeListener(myViewListener);
myToolbox.getTimeManager().removePrimaryTimeSpanChangeListener(myTimeListener);
}
}
/**
* Clears all the results from the previous search.
*/
private void clearSearch()
{
myModel.getAllResults().clear();
myModel.getShownResults().clear();
mySearcher.clearSearch();
}
/**
* Handles changes to the map (viewer) or time.
*/
private void handleSpatialTemporalChange()
{
myNotificationExecutor.execute(() ->
{
if (!myNotificationDialogShowing)
{
myNotificationDialogShowing = true;
EventQueue.invokeLater(() ->
{
JFrame parent = myToolbox.getUIRegistry().getMainFrameProvider().get();
int response = DontShowDialog.showConfirmAndRememberDialog(myToolbox.getPreferencesRegistry(), parent,
"Search results may have changed. Re-query?", "Search Notification", false);
if (response == JOptionPane.OK_OPTION)
{
Platform.runLater(this::performSearch);
}
});
myNotificationDialogShowing = false;
}
});
}
/**
* Handles when search types are selected and adds the results of those types from the view.
*
* @param types The types that have been selected.
*/
private void handleSearchTypesAdded(Set<String> types)
{
Set<String> typesToSearch = New.set(types);
List<SearchResult> toShow = New.list();
for (SearchResult result : myModel.getAllResults())
{
if (types.contains(result.getSearchType()))
{
toShow.add(result);
typesToSearch.remove(result.getSearchType());
}
}
if (!toShow.isEmpty())
{
myModel.getShownResults().addAll(toShow);
}
if (!typesToSearch.isEmpty())
{
mySearcher.performSearch(typesToSearch);
}
}
/**
* Handles when search types are unselected and removes the results of those types from the view.
*
* @param types The types that have been unselected.
*/
private void handleSearchTypesRemoved(Set<String> types)
{
List<SearchResult> toRemove = New.list();
for (SearchResult result : myModel.getShownResults())
{
if (types.contains(result.getSearchType()))
{
toRemove.add(result);
}
}
myModel.getShownResults().removeAll(toRemove);
}
/**
* Handles when the keyword has changed and either performs a new search or clears out the search.
*
* @param observable The observable.
* @param oldValue The old value.
* @param newValue The new value.
*/
private void onKeywordChange(ObservableValue<? extends String> observable, String oldValue, String newValue)
{
if (StringUtils.isEmpty(newValue))
{
clearSearch();
}
else
{
performSearch();
}
}
/**
* Handles when results are added and figures out if we should show them or not.
*
* @param change The change event.
*/
private void onResultsChanged(Change<? extends SearchResult> change)
{
List<SearchResult> resultsToShow = New.list();
while (change.next())
{
for (SearchResult result : change.getAddedSubList())
{
if (myModel.getSelectedSearchTypes().contains(result.getSearchType()))
{
resultsToShow.add(result);
}
}
}
myModel.getShownResults().addAll(resultsToShow);
}
/**
* Handles when the selected search types change and removes or adds results based on the changes.
*
* @param change The change event.
*/
private void onSearchTypesChanged(Change<? extends String> change)
{
Set<String> removed = New.set();
Set<String> added = New.set();
while (change.next())
{
removed.addAll(change.getRemoved());
added.addAll(change.getAddedSubList());
}
if (!removed.isEmpty())
{
handleSearchTypesRemoved(removed);
}
if (!added.isEmpty())
{
handleSearchTypesAdded(added);
}
}
}
|
<filename>src/main/java/com/kvn/poi/exp/PoiExporter.java<gh_stars>10-100
package com.kvn.poi.exp;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
import org.apache.poi.xssf.usermodel.XSSFRow;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import com.kvn.poi.exception.PoiElErrorCode;
import com.kvn.poi.exp.context.PoiExporterContext;
import com.kvn.poi.exp.function.FunctionRegister;
import com.kvn.poi.exp.processor.RowProcessorStrategy;
import com.kvn.poi.log.Log;
/**
* excel导出类
* @author wzy
* @date 2017年7月5日 上午9:41:52
*/
public class PoiExporter {
private static final Logger logger = LoggerFactory.getLogger(PoiExporter.class);
/**
* 向StandardEvaluationContext中注册内部函数
*/
static {
FunctionRegister.registerInternalFunction();
}
public static void export(XSSFWorkbook wb, Map<String, Object> rootObjectMap) {
Long start = System.currentTimeMillis();
PoiExporterContext peContext = new PoiExporterContext(new SpelExpressionParser(), rootObjectMap);
// 分sheet进行处理
for (int i = 0; i < wb.getNumberOfSheets(); i++) {
XSSFSheet sheet = wb.getSheetAt(i);
// 开始行结束行
int j = sheet.getFirstRowNum();
// 每行
while (j <= sheet.getLastRowNum()) {
XSSFRow row = sheet.getRow(j);
if (row == null) {
continue;
}
int dealRows = RowProcessorStrategy.getRowProcessor(row).dealRow(row, peContext);
j = j + dealRows;
}
}
long end = System.currentTimeMillis();
logger.info(Log.op("PoiEl#parse").msg("PoiEl解析模板耗时[{0}]ms", (end - start)).toString());
}
/**
* 导出到指定地方 des
* @param templateFile
* @param rootObjectMap
* @param des
* @return
*/
public static XSSFWorkbook export2Destination(File templateFile, Map<String, Object> rootObjectMap, OutputStream des){
InputStream in = null;
try {
in = new FileInputStream(templateFile);
} catch (FileNotFoundException e) {
throw PoiElErrorCode.TEMPLATE_FILE_NOT_FOUND.exp(e, templateFile.getName());
}
return export2Destination(in, rootObjectMap, des);
}
/**
* 导出到指定地方 des
* @param templateInputStream 模板
* @param rootObjectMap 数据
* @param des 导出的位置
* @return
*/
public static XSSFWorkbook export2Destination(InputStream templateInputStream, Map<String, Object> rootObjectMap, OutputStream des){
XSSFWorkbook wb = null;
try {
wb = new XSSFWorkbook(templateInputStream);
} catch (IOException e) {
throw PoiElErrorCode.SYSTEM_ERROR.exp(e);
}
PoiExporter.export(wb, rootObjectMap);
// 关闭资源
try {
wb.write(des);
des.flush();
des.close();
} catch (IOException e) {
throw PoiElErrorCode.SYSTEM_ERROR.exp(e);
}
return wb;
}
}
|
#!/usr/bin/env sh
mkdir -p $PWD/docs/api/generated
find "$PWD/src" -iname "index.ts" -print | xargs node node_modules/.bin/jscodeshift -t $PWD/tools/codemods/generate-docs.ts --extensions=ts --parser=ts
|
import avro.schema
from avro.datafile import DataFileReader
from avro.io import DatumReader
# Read the Avro schema from the file
with open('user_schema.avsc', 'r') as schema_file:
schema_str = schema_file.read()
# Parse the Avro schema
schema = avro.schema.Parse(schema_str)
# Validate the schema
avro.schema.Parse(schema.to_json())
# Print the fields and their corresponding data types
print("Fields:")
for field in schema.fields:
print(f"- {field['name']}: {field['type']}") |
/*******************************************************************************
* Copyright 2015 InfinitiesSoft Solutions Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.infinities.skyport.model.configuration.network;
import java.io.Serializable;
import com.infinities.skyport.model.FunctionConfiguration;
public class NetworkFirewallConfiguration implements Serializable, Cloneable {
private static final long serialVersionUID = 1L;
private FunctionConfiguration associateWithSubnet = new FunctionConfiguration();
private FunctionConfiguration authorize = new FunctionConfiguration();
private FunctionConfiguration createFirewall = new FunctionConfiguration();
private FunctionConfiguration getActiveConstraintsForFirewall = new FunctionConfiguration();
private FunctionConfiguration getFirewall = new FunctionConfiguration();
private FunctionConfiguration listFirewallStatus = new FunctionConfiguration();
private FunctionConfiguration listFirewalls = new FunctionConfiguration();
private FunctionConfiguration listRules = new FunctionConfiguration();
private FunctionConfiguration removeFirewall = new FunctionConfiguration();
private FunctionConfiguration removeTags = new FunctionConfiguration();
private FunctionConfiguration revoke = new FunctionConfiguration();
private FunctionConfiguration updateTags = new FunctionConfiguration();
private FunctionConfiguration setTags = new FunctionConfiguration();
public FunctionConfiguration getAssociateWithSubnet() {
return associateWithSubnet;
}
public void setAssociateWithSubnet(FunctionConfiguration associateWithSubnet) {
this.associateWithSubnet = associateWithSubnet;
}
public FunctionConfiguration getAuthorize() {
return authorize;
}
public void setAuthorize(FunctionConfiguration authorize) {
this.authorize = authorize;
}
public FunctionConfiguration getCreateFirewall() {
return createFirewall;
}
public void setCreateFirewall(FunctionConfiguration createFirewall) {
this.createFirewall = createFirewall;
}
public FunctionConfiguration getGetActiveConstraintsForFirewall() {
return getActiveConstraintsForFirewall;
}
public void setGetActiveConstraintsForFirewall(FunctionConfiguration getActiveConstraintsForFirewall) {
this.getActiveConstraintsForFirewall = getActiveConstraintsForFirewall;
}
public FunctionConfiguration getGetFirewall() {
return getFirewall;
}
public void setGetFirewall(FunctionConfiguration getFirewall) {
this.getFirewall = getFirewall;
}
public FunctionConfiguration getListFirewallStatus() {
return listFirewallStatus;
}
public void setListFirewallStatus(FunctionConfiguration listFirewallStatus) {
this.listFirewallStatus = listFirewallStatus;
}
public FunctionConfiguration getListFirewalls() {
return listFirewalls;
}
public void setListFirewalls(FunctionConfiguration listFirewalls) {
this.listFirewalls = listFirewalls;
}
public FunctionConfiguration getListRules() {
return listRules;
}
public void setListRules(FunctionConfiguration listRules) {
this.listRules = listRules;
}
public FunctionConfiguration getRemoveFirewall() {
return removeFirewall;
}
public void setRemoveFirewall(FunctionConfiguration removeFirewall) {
this.removeFirewall = removeFirewall;
}
public FunctionConfiguration getRemoveTags() {
return removeTags;
}
public void setRemoveTags(FunctionConfiguration removeTags) {
this.removeTags = removeTags;
}
public FunctionConfiguration getRevoke() {
return revoke;
}
public void setRevoke(FunctionConfiguration revoke) {
this.revoke = revoke;
}
public FunctionConfiguration getUpdateTags() {
return updateTags;
}
public void setUpdateTags(FunctionConfiguration updateTags) {
this.updateTags = updateTags;
}
public FunctionConfiguration getSetTags() {
return setTags;
}
public void setSetTags(FunctionConfiguration setTags) {
this.setTags = setTags;
}
@Override
public NetworkFirewallConfiguration clone() {
NetworkFirewallConfiguration clone = new NetworkFirewallConfiguration();
clone.associateWithSubnet = associateWithSubnet.clone();
clone.authorize = authorize.clone();
clone.createFirewall = createFirewall.clone();
clone.getActiveConstraintsForFirewall = getActiveConstraintsForFirewall.clone();
clone.getFirewall = getFirewall.clone();
clone.listFirewallStatus = listFirewallStatus.clone();
clone.listFirewalls = listFirewalls.clone();
clone.listRules = listRules.clone();
clone.removeFirewall = removeFirewall.clone();
clone.removeTags = removeTags.clone();
clone.revoke = revoke.clone();
clone.updateTags = updateTags.clone();
clone.setTags = setTags.clone();
return clone;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((associateWithSubnet == null) ? 0 : associateWithSubnet.hashCode());
result = prime * result + ((authorize == null) ? 0 : authorize.hashCode());
result = prime * result + ((createFirewall == null) ? 0 : createFirewall.hashCode());
result = prime * result
+ ((getActiveConstraintsForFirewall == null) ? 0 : getActiveConstraintsForFirewall.hashCode());
result = prime * result + ((getFirewall == null) ? 0 : getFirewall.hashCode());
result = prime * result + ((listFirewallStatus == null) ? 0 : listFirewallStatus.hashCode());
result = prime * result + ((listFirewalls == null) ? 0 : listFirewalls.hashCode());
result = prime * result + ((listRules == null) ? 0 : listRules.hashCode());
result = prime * result + ((removeFirewall == null) ? 0 : removeFirewall.hashCode());
result = prime * result + ((removeTags == null) ? 0 : removeTags.hashCode());
result = prime * result + ((revoke == null) ? 0 : revoke.hashCode());
result = prime * result + ((setTags == null) ? 0 : setTags.hashCode());
result = prime * result + ((updateTags == null) ? 0 : updateTags.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
NetworkFirewallConfiguration other = (NetworkFirewallConfiguration) obj;
if (associateWithSubnet == null) {
if (other.associateWithSubnet != null)
return false;
} else if (!associateWithSubnet.equals(other.associateWithSubnet))
return false;
if (authorize == null) {
if (other.authorize != null)
return false;
} else if (!authorize.equals(other.authorize))
return false;
if (createFirewall == null) {
if (other.createFirewall != null)
return false;
} else if (!createFirewall.equals(other.createFirewall))
return false;
if (getActiveConstraintsForFirewall == null) {
if (other.getActiveConstraintsForFirewall != null)
return false;
} else if (!getActiveConstraintsForFirewall.equals(other.getActiveConstraintsForFirewall))
return false;
if (getFirewall == null) {
if (other.getFirewall != null)
return false;
} else if (!getFirewall.equals(other.getFirewall))
return false;
if (listFirewallStatus == null) {
if (other.listFirewallStatus != null)
return false;
} else if (!listFirewallStatus.equals(other.listFirewallStatus))
return false;
if (listFirewalls == null) {
if (other.listFirewalls != null)
return false;
} else if (!listFirewalls.equals(other.listFirewalls))
return false;
if (listRules == null) {
if (other.listRules != null)
return false;
} else if (!listRules.equals(other.listRules))
return false;
if (removeFirewall == null) {
if (other.removeFirewall != null)
return false;
} else if (!removeFirewall.equals(other.removeFirewall))
return false;
if (removeTags == null) {
if (other.removeTags != null)
return false;
} else if (!removeTags.equals(other.removeTags))
return false;
if (revoke == null) {
if (other.revoke != null)
return false;
} else if (!revoke.equals(other.revoke))
return false;
if (setTags == null) {
if (other.setTags != null)
return false;
} else if (!setTags.equals(other.setTags))
return false;
if (updateTags == null) {
if (other.updateTags != null)
return false;
} else if (!updateTags.equals(other.updateTags))
return false;
return true;
}
}
|
class InsertionSort
{
public static int[] InsertionSort(int[] array)
{
for (int i = 1; i < array.Length; i++)
{
int currentValue = array[i];
int j = i - 1;
while (j >= 0 && array[j] > currentValue)
{
array[j + 1] = array[j];
j--;
}
array[j + 1] = currentValue;
}
return array;
}
}
//usage
int[] array = {10, 7, 4, 2, 8, 1};
int[] sortedArray = InsertionSort.InsertionSort(array);
// -> sortedArray = [1,2,4,7,8,10] |
import React from 'react'
import { Message, dialog, Header, Button } from '../../../src'
import Page from '../../component/page'
var Demo = React.createClass({
getInitialState(){
return {
title: '页面标题'
}
},
goBackEv(){
dialog.show({
message: '您还有未保存的数据,是否确认返回?',
buttons: [{ label:'确定', onClick:()=>{
dialog.hide();
history.go(-1);
}}]
})
},
render: function() {
return (
<Page title="Header" subTitle="只在浏览器中显示,APP的webview中不显示">
<div className="weui-cells__title">默认</div>
<Header>页面标题</Header>
<div className="weui-cells__title">带 返回图标</div>
<Header showBack>页面标题</Header>
<div className="weui-cells__title">带 返回 及 首页图标</div>
<Header showBack showHome>页面标题</Header>
<div className="weui-cells__title">自定义返回文字</div>
<Header showBack showHome backText="上一步">页面标题</Header>
<div className="weui-cells__title">自定义左侧、右侧内容</div>
<Header showHome leftSlot={<span style={{color:'green'}}>自定义内容</span>} rightSlot={<span style={{color:'red'}}>自定义内容</span>}>页面标题</Header>
<div className="weui-cells__title">阻止返回事件</div>
<Header showBack onClickBack={ this.goBackEv }>页面标题</Header>
<div className="weui-cells__title">异步设置标题</div>
<Header showBack onClickBack={ this.goBackEv } title={this.state.title}></Header>
<div style={{padding: 30}}>
<Button type="primary" onClick={()=>this.setState({title: '异步标题'})}>改变标题为异步标题</Button>
</div>
<Message />
</Page>
);
},
});
export default Demo
|
#!/usr/bin/env bash
version="v1 20200606.0"
unset -v progname
progname="${0##*/}"
unset -f msg err
msg() {
case $# in
[1-9]*)
echo "${progname}: $*" >&2
;;
esac
}
err() {
local code
code="${1}"
shift 1
msg "$@"
exit "${code}"
}
# b: beginning
# r: range
# return random number between b ~ b+r
random() {
local b=$1
local r=$2
if [ $r -le 0 ]; then
r=100
fi
local rand=$(( $(od -A n -t d -N 3 /dev/urandom | grep -oE '[0-9]+') % r ))
echo $(( b + rand ))
}
# https://www.linuxjournal.com/content/validating-ip-address-bash-script
function valid_ip()
{
local ip=$1
local stat=1
if [[ $ip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
OIFS=$IFS
IFS='.'
ip=($ip)
IFS=$OIFS
[[ ${ip[0]} -le 255 && ${ip[1]} -le 255 \
&& ${ip[2]} -le 255 && ${ip[3]} -le 255 ]]
stat=$?
fi
return $stat
}
function myip() {
# get ipv4 address only, right now only support ipv4 addresses
PUB_IP=$(dig -4 @resolver1.opendns.com ANY myip.opendns.com +short)
if valid_ip $PUB_IP; then
msg "public IP address autodetected: $PUB_IP"
else
err 1 "NO valid public IP found: $PUB_IP"
fi
}
function check_root
{
if [[ $EUID -ne 0 ]]; then
msg "this script must be run as root to setup environment"
msg please use \"sudo ${progname}\"
msg "you may use -S option to run as normal user"
exit 1
fi
}
function add_env
{
filename=$1
shift
grep -qxF "$@" $filename || echo "$@" >> $filename
}
function setup_env
{
check_root
### KERNEL TUNING ###
# Increase size of file handles and inode cache
sysctl -w fs.file-max=2097152
# Do less swapping
sysctl -w vm.swappiness=10
sysctl -w vm.dirty_ratio=60
sysctl -w vm.dirty_background_ratio=2
# Sets the time before the kernel considers migrating a proccess to another core
sysctl -w kernel.sched_migration_cost_ns=5000000
### GENERAL NETWORK SECURITY OPTIONS ###
# Number of times SYNACKs for passive TCP connection.
sysctl -w net.ipv4.tcp_synack_retries=2
# Allowed local port range
sysctl -w net.ipv4.ip_local_port_range='2000 65535'
# Protect Against TCP Time-Wait
sysctl -w net.ipv4.tcp_rfc1337=1
# Control Syncookies
sysctl -w net.ipv4.tcp_syncookies=1
# Decrease the time default value for tcp_fin_timeout connection
sysctl -w net.ipv4.tcp_fin_timeout=15
# Decrease the time default value for connections to keep alive
sysctl -w net.ipv4.tcp_keepalive_time=300
sysctl -w net.ipv4.tcp_keepalive_probes=5
sysctl -w net.ipv4.tcp_keepalive_intvl=15
### TUNING NETWORK PERFORMANCE ###
# Default Socket Receive Buffer
sysctl -w net.core.rmem_default=31457280
# Maximum Socket Receive Buffer
sysctl -w net.core.rmem_max=33554432
# Default Socket Send Buffer
sysctl -w net.core.wmem_default=31457280
# Maximum Socket Send Buffer
sysctl -w net.core.wmem_max=33554432
# Increase number of incoming connections
sysctl -w net.core.somaxconn=8096
# Increase number of incoming connections backlog
sysctl -w net.core.netdev_max_backlog=65536
# Increase the maximum amount of option memory buffers
sysctl -w net.core.optmem_max=25165824
sysctl -w net.ipv4.tcp_max_syn_backlog=8192
# Increase the maximum total buffer-space allocatable
# This is measured in units of pages (4096 bytes)
sysctl -w net.ipv4.tcp_mem='786432 1048576 26777216'
sysctl -w net.ipv4.udp_mem='65536 131072 262144'
# Increase the read-buffer space allocatable
sysctl -w net.ipv4.tcp_rmem='8192 87380 33554432'
sysctl -w net.ipv4.udp_rmem_min=16384
# Increase the write-buffer-space allocatable
sysctl -w net.ipv4.tcp_wmem='8192 65536 33554432'
sysctl -w net.ipv4.udp_wmem_min=16384
# Increase the tcp-time-wait buckets pool size to prevent simple DOS attacks
sysctl -w net.ipv4.tcp_max_tw_buckets=1440000
sysctl -w net.ipv4.tcp_tw_reuse=1
sysctl -w net.ipv4.tcp_fastopen=3
sysctl -w net.ipv4.tcp_window_scaling=1
add_env /etc/security/limits.conf "* soft nproc 65535"
add_env /etc/security/limits.conf "* hard nproc 65535"
add_env /etc/security/limits.conf "* soft nofile 65535"
add_env /etc/security/limits.conf "* hard nofile 65535"
add_env /etc/security/limits.conf "root soft nproc 65535"
add_env /etc/security/limits.conf "root hard nproc 65535"
add_env /etc/security/limits.conf "root soft nofile 65535"
add_env /etc/security/limits.conf "root hard nofile 65535"
add_env /etc/pam.d/common-session "session required pam_limits.so"
}
function check_pkg_management
{
if which yum > /dev/null; then
PKG_INSTALL='sudo yum install -y'
return
fi
if which apt-get > /dev/null; then
PKG_INSTALL='sudo apt-get install -y'
return
fi
}
######## main #########
print_usage() {
cat <<- ENDEND
usage: ${progname} [options]
options:
-c back up database/logs and start clean (not for mainnet)
(use only when directed by Harmony)
-1 do not loop; run once and exit
-h print this help and exit
-k KEYFILE use the given BLS key file (default: autodetect)
-s run setup env only (must run as root)
-S run the ${progname} as non-root user (default: run as root)
-p passfile use the given BLS passphrase file
-d just download the Harmony binaries (default: off)
-D do not download Harmony binaries (default: download when start)
-N network join the given network (mainnet, testnet, staking, partner, stress, devnet, tnet; default: mainnet)
-n port specify the public base port of the node (default: 9000)
-T nodetype specify the node type (validator, explorer; default: validator)
-i shardid specify the shard id (valid only with explorer node; default: 1)
-a dbfile specify the db file to download (default:off)
-U FOLDER specify the upgrade folder to download binaries
-P enable public rpc end point (default:off)
-v print out the version of the node.sh
-V print out the version of the Harmony binary
-z run in staking mode
-y run in legacy, foundational-node mode (default)
-Y verify the signature of the downloaded binaries (default: off)
-m minpeer specify minpeers for bootstrap (default: 6)
-M support multi-key mode (default: off)
-f blsfolder folder that stores the bls keys and corresponding passphrases (default: ./.hmy/blskeys)
-A enable archival node mode (default: off)
-B blacklist specify file containing blacklisted accounts as a newline delimited file (default: ./.hmy/blacklist.txt)
-r address start a pprof profiling server listening on the specified address
-I use statically linked Harmony binary (default: true)
-R tracefile enable p2p trace using tracefile (default: off)
-l limit broadcasting of invalid transactions (default: off)
-L log_level logging verbosity: 0=silent, 1=error, 2=warn, 3=info, 4=debug, 5=detail (default: $log_level)
examples:
# start node program w/o root account
${progname} -S -k mybls.key
# download beacon chain (shard0) db snapshot
${progname} -i 0 -b
# just re-download the harmony binaries
${progname} -d
# start a non-validating node in shard 1
# you need to have a dummy BLSKEY/pass file using 'touch BLSKEY; touch blspass'
${progname} -S -k BLSKEY -p blspass -T explorer -i 1
# upgrade harmony binaries from specified repo
${progname} -1 -U upgrade
# start the node in a different port 9010
${progname} -n 9010
# multi-bls: place all keys/passphrases under .hmy/blskeys
# e.g. <blskey>.key and <blskey>.pass
${progname} -S -M
# multi-bls: specify folder that contains bls keys
${progname} -S -M -f /home/xyz/myfolder
# multi-bls using default passphrase: place all keys under .hmy/blskeys
# supply passphrase file using -p option (single passphrase will be used for all bls keys)
${progname} -S -M -p blspass.txt
# multi-bls using user input passphrase: place all keys under .hmy/blskeys
# supply passphrase for each of the bls key file when prompted
${progname} -S -M
ENDEND
}
usage() {
msg "$@"
print_usage >&2
exit 64 # EX_USAGE
}
# =======
BUCKET=pub.harmony.one
OS=$(uname -s)
unset start_clean loop run_as_root blspass do_not_download download_only network node_type shard_id broadcast_invalid_tx
unset upgrade_rel public_rpc staking_mode pub_port multi_key blsfolder blacklist verify TRACEFILE minpeers max_bls_keys_per_node log_level
start_clean=false
loop=true
run_as_root=true
do_not_download=false
download_only=false
network=mainnet
node_type=validator
shard_id=-1
public_rpc=false
staking_mode=false
multi_key=false
blsfolder=./.hmy/blskeys
archival=false
blacklist=./.hmy/blacklist.txt
pprof=""
static=true
verify=false
minpeers=6
max_bls_keys_per_node=10
broadcast_invalid_tx=true
log_level=3
${BLSKEYFILE=}
${TRACEFILE=}
unset OPTIND OPTARG opt
OPTIND=1
while getopts :1chk:sSp:dDN:T:i:U:PvVyzn:MAIB:r:Y:f:R:m:L:l opt
do
case "${opt}" in
'?') usage "unrecognized option -${OPTARG}";;
':') usage "missing argument for -${OPTARG}";;
c) start_clean=true;;
1) loop=false;;
h) print_usage; exit 0;;
k) BLSKEYFILE="${OPTARG}";;
s) setup_env; exit 0;;
S) run_as_root=false ;;
p) blspass="${OPTARG}";;
d) download_only=true;;
D) do_not_download=true;;
m) minpeers="${OPTARG}";;
M) multi_key=true;;
f) blsfolder="${OPTARG}";;
N) network="${OPTARG}";;
n) pub_port="${OPTARG}";;
T) node_type="${OPTARG}";;
i) shard_id="${OPTARG}";;
I) static=true;;
U) upgrade_rel="${OPTARG}";;
P) public_rpc=true;;
B) blacklist="${OPTARG}";;
r) pprof="${OPTARG}";;
v) msg "version: $version"
exit 0 ;;
V) LD_LIBRARY_PATH=. ./harmony -version
exit 0 ;;
Y) verify=true;;
z) staking_mode=true;;
y) staking_mode=false;;
A) archival=true;;
R) TRACEFILE="${OPTARG}";;
l) broadcast_invalid_tx=false;;
L) log_level="${OPTARG}";;
*) err 70 "unhandled option -${OPTARG}";; # EX_SOFTWARE
esac
done
shift $((${OPTIND} - 1))
unset -v bootnodes REL network_type dns_zone syncdir
case "${node_type}" in
validator) ;;
explorer) archival=true;;
*)
usage ;;
esac
case "${network}" in
mainnet)
bootnodes=(
/ip4/100.26.90.187/tcp/9874/p2p/Qmdfjtk6hPoyrH1zVD9PEH4zfWLo38dP2mDvvKXfh3tnEv
/ip4/54.213.43.194/tcp/9874/p2p/QmZJJx6AdaoEkGLrYG4JeLCKeCKDjnFz2wfHNHxAqFSGA9
/ip4/13.113.101.219/tcp/12019/p2p/QmQayinFSgMMw5cSpDUiD9pQ2WeP6WNmGxpZ6ou3mdVFJX
/ip4/99.81.170.167/tcp/12019/p2p/QmRVbTpEYup8dSaURZfF6ByrMTSKa4UyUzJhSjahFzRqNj
)
REL=main
network_type=mainnet
dns_zone=t.hmny.io
syncdir=mainnet.min
;;
testnet)
bootnodes=(
/ip4/54.86.126.90/tcp/9850/p2p/Qmdfjtk6hPoyrH1zVD9PEH4zfWLo38dP2mDvvKXfh3tnEv
/ip4/52.40.84.2/tcp/9850/p2p/QmbPVwrqWsTYXq1RxGWcxx9SWaTUCfoo1wA6wmdbduWe29
)
REL=testnet
network_type=testnet
dns_zone=b.hmny.io
syncdir=lrtn
;;
staking)
bootnodes=(
/ip4/54.86.126.90/tcp/9867/p2p/Qmdfjtk6hPoyrH1zVD9PEH4zfWLo38dP2mDvvKXfh3tnEv
/ip4/52.40.84.2/tcp/9867/p2p/QmbPVwrqWsTYXq1RxGWcxx9SWaTUCfoo1wA6wmdbduWe29
)
REL=pangaea
network_type=pangaea
dns_zone=os.hmny.io
syncdir=ostn
;;
partner)
bootnodes=(
/ip4/52.40.84.2/tcp/9800/p2p/QmbPVwrqWsTYXq1RxGWcxx9SWaTUCfoo1wA6wmdbduWe29
/ip4/54.86.126.90/tcp/9800/p2p/Qmdfjtk6hPoyrH1zVD9PEH4zfWLo38dP2mDvvKXfh3tnEv
)
REL=partner
network_type=partner
dns_zone=ps.hmny.io
syncdir=pstn
;;
stn|stress|stressnet)
bootnodes=(
/ip4/52.40.84.2/tcp/9842/p2p/QmbPVwrqWsTYXq1RxGWcxx9SWaTUCfoo1wA6wmdbduWe29
)
REL=stressnet
network_type=stressnet
dns_zone=stn.hmny.io
syncdir=stn
;;
*)
err 64 "${network}: invalid network"
;;
esac
case $# in
[1-9]*)
usage "extra arguments at the end ($*)"
;;
esac
# reset REL if upgrade_rel is set
if [ -n "$upgrade_rel" ]; then
REL="${upgrade_rel}"
fi
if [ "$OS" == "Darwin" ]; then
FOLDER=release/darwin-x86_64/$REL
fi
if [ "$OS" == "Linux" ]; then
FOLDER=release/linux-x86_64/$REL
if [ "$static" == "true" ]; then
FOLDER=${FOLDER}/static
fi
fi
extract_checksum() {
awk -v basename="${1}" '
{
s = $0;
}
# strip hash and following space; skip line if unsuccessful
sub(/^[0-9a-f]+ /, "", s) == 0 { next; }
# save hash
{ hash = substr($0, 1, length($0) - length(s) - 1); }
# strip executable indicator (space or asterisk); skip line if unsuccessful
sub(/^[* ]/, "", s) == 0 { next; }
# leave basename only
{ sub(/^.*\//, "", s); }
# if basename matches, print the hash and basename
s == basename { printf "%s %s\n", hash, basename; }
'
}
verify_checksum() {
local dir file checksum_file checksum_for_file
dir="${1}"
file="${2}"
checksum_file="${3}"
[ -f "${dir}/${checksum_file}" ] || return 0
checksum_for_file="${dir}/${checksum_file}::${file}"
extract_checksum "${file}" < "${dir}/${checksum_file}" > "${checksum_for_file}"
[ -s "${dir}/${checksum_for_file}" ] || return 0
if ! (cd "${dir}" && exec md5sum -c --status "${checksum_for_file}")
then
msg "checksum FAILED for ${file}"
return 1
fi
return 0
}
verify_signature() {
local dir file
dir="${1}"
file="${dir}/${2}"
sigfile="${dir}/${2}.sig"
result=$(openssl dgst -sha256 -verify "${outdir}/harmony_pubkey.pem" -signature "${sigfile}" "${file}" 2>&1)
echo ${result}
if [[ ${result} != "Verified OK" ]]; then
return 1
fi
return 0
}
download_binaries() {
local outdir status
${do_not_download} && return 0
outdir="${1}"
mkdir -p "${outdir}"
for bin in $(cut -c35- "${outdir}/md5sum.txt"); do
status=0
curl -sSf http://${BUCKET}.s3.amazonaws.com/${FOLDER}/${bin} -o "${outdir}/${bin}" || status=$?
case "${status}" in
0) ;;
*)
msg "cannot download ${bin} (status ${status})"
return ${status}
;;
esac
if $verify; then
curl -sSf http://${BUCKET}.s3.amazonaws.com/${FOLDER}/${bin}.sig -o "${outdir}/${bin}.sig" || status=$?
case "${status}" in
0) ;;
*)
msg "cannot download ${bin}.sig (status ${status})"
return ${status}
;;
esac
verify_signature "${outdir}" "${bin}" || return $?
fi
verify_checksum "${outdir}" "${bin}" md5sum.txt || return $?
msg "downloaded ${bin}"
done
chmod +x "${outdir}/harmony" "${outdir}/node.sh"
(cd "${outdir}" && exec openssl sha256 $(cut -c35- md5sum.txt)) > "${outdir}/harmony-checksums.txt"
}
_curl_check_exist() {
local url=$1
local statuscode=$(curl -I --silent --output /dev/null --write-out "%{http_code}" $url)
if [ $statuscode -ne 200 ]; then
return 1
else
return 0
fi
}
_curl_download() {
local url=$1
local outdir=$2
local filename=$3
mkdir -p "${outdir}"
if _curl_check_exist $url; then
curl --progress-bar -Sf $url -o "${outdir}/$filename" || return $?
return 0
else
msg "failed to find/download $url"
return 1
fi
}
any_new_binaries() {
local outdir
${do_not_download} && return 0
outdir="${1}"
mkdir -p "${outdir}"
if ${verify}; then
curl -L https://harmony.one/pubkey -o "${outdir}/harmony_pubkey.pem"
if ! grep -q "BEGIN\ PUBLIC\ KEY" "${outdir}/harmony_pubkey.pem"; then
msg "failed to downloaded harmony public signing key"
return 1
fi
fi
curl -sSf http://${BUCKET}.s3.amazonaws.com/${FOLDER}/md5sum.txt -o "${outdir}/md5sum.txt.new" || return $?
if diff "${outdir}/md5sum.txt.new" "${outdir}/md5sum.txt"
then
rm "${outdir}/md5sum.txt.new"
else
mv "${outdir}/md5sum.txt.new" "${outdir}/md5sum.txt"
return 1
fi
return 0
}
if ${download_only}; then
if any_new_binaries staging
then
msg "binaries did not change in staging"
else
download_binaries staging || err 69 "download node software failed"
msg "downloaded files are in staging direectory"
fi
exit 0
fi
if ${run_as_root}; then
check_root
fi
# multi_key mode will use specified keys in .hmy/multikeys directory
if ! ${multi_key}; then
case "${BLSKEYFILE}" in
"")
unset -v f
for f in \
~/*--????-??-??T??-??-??.*Z--bls_???????????????????????????????????????????????????????????????????????????????????????????????? \
~/????????????????????????????????????????????????????????????????????????????????????????????????.key \
*--????-??-??T??-??-??.*Z--bls_???????????????????????????????????????????????????????????????????????????????????????????????? \
????????????????????????????????????????????????????????????????????????????????????????????????.key
do
[ -f "${f}" ] || continue
case "${BLSKEYFILE}" in
"")
BLSKEYFILE="${f}"
;;
*)
[ "${f}" -ef "${BLSKEYFILE}" ] || \
err 69 "multiple key files found (${f}, ${BLSKEYFILE}); please use -k to specify"
;;
esac
done
case "${BLSKEYFILE}" in
"") err 69 "could not autodetect BLS key file; please use -k to specify";;
esac
msg "autodetected BLS key file: ${BLSKEYFILE}"
;;
*)
msg "using manually specified BLS key file: ${BLSKEYFILE}"
;;
esac
fi
if any_new_binaries .
then
msg "binaries did not change"
else
download_binaries . || err 69 "initial node software update failed"
fi
NODE_PORT=${pub_port:-9000}
PUB_IP=
if [ "$OS" == "Linux" ]; then
if ${run_as_root}; then
setup_env
fi
fi
# find my public ip address
myip
check_pkg_management
unset -v BN_MA bn
for bn in "${bootnodes[@]}"
do
BN_MA="${BN_MA+"${BN_MA},"}${bn}"
done
if [[ "${start_clean}" == "true" && "${network_type}" != "mainnet" ]]
then
msg "cleaning up old database (-c)"
# set a 2s timeout, and set its default return value to Y (true)
read -t 2 -rp "Remove old database? (Y/n) " yesno
yesno=${yesno:-Y}
echo
if [[ "$yesno" == "y" || "$yesno" == "Y" ]]; then
unset -v backup_dir now
now=$(date -u +%Y-%m-%dT%H:%M:%SZ)
mkdir -p backups; rm -rf backups/*
backup_dir=$(mktemp -d "backups/${now}.XXXXXX")
mv -f harmony_db_* .dht* "${backup_dir}/" 2>/dev/null || :
fi
# install unzip as dependency of rclone
if ! which unzip > /dev/null; then
$PKG_INSTALL unzip
fi
# do rclone sync
if ! which rclone > /dev/null; then
msg "installing rclone to fast sync db"
msg "curl https://rclone.org/install.sh | sudo bash"
curl https://rclone.org/install.sh | sudo bash
mkdir -p ~/.config/rclone
fi
if ! grep -q 'hmy' ~/.config/rclone/rclone.conf 2> /dev/null; then
msg "adding [hmy] profile to rclone.conf"
cat<<-EOT>>~/.config/rclone/rclone.conf
[hmy]
type = s3
provider = AWS
env_auth = false
region = us-west-1
acl = public-read
EOT
fi
msg "Syncing harmony_db_0"
rclone sync -P hmy://pub.harmony.one/$syncdir/harmony_db_0 harmony_db_0
fi
mkdir -p latest
unset -v check_update_pid
cleanup() {
local trap_sig kill_sig
trap_sig="${1:-EXIT}"
kill_sig="${trap_sig}"
case "${kill_sig}" in
0|EXIT|2|INT) kill_sig=TERM;;
esac
case "${check_update_pid+set}" in
set)
msg "terminating update checker (pid ${check_update_pid})"
kill -${kill_sig} "${check_update_pid}"
;;
esac
}
unset -v trap_sigs trap_sig
trap_sigs="EXIT HUP INT TERM"
trap_func() {
local trap_sig="${1-EXIT}"
case "${trap_sig}" in
0|EXIT) msg "exiting";;
*) msg "received SIG${trap_sig}";;
esac
trap - ${trap_sigs}
cleanup "${trap_sig}"
case "${trap_sig}" in
""|0|EXIT) ;;
*) kill -"${trap_sig}" "$$";;
esac
}
for trap_sig in ${trap_sigs}
do
trap "trap_func ${trap_sig}" ${trap_sig}
done
# Kill the given PID, ensuring that it is a child of this script ($$).
kill_child() {
local pid
pid="${1}"
case $(($(ps -oppid= -p"${pid}" || :) + 0)) in
$$) ;;
*) return 1;;
esac
msg "killing pid ${pid}"
kill "${pid}"
}
# Kill nodes that are direct child of this script (pid $$),
# i.e. run directly from main loop.
kill_node() {
local pids pid delay
msg "finding node processes that are our children"
pids=$(
ps axcwwo "pid=,ppid=,command=" |
awk -v me=$$ '$2 == me && $3 == "harmony" { print $1; }'
)
msg "found node processes: ${pids:-"<none>"}"
for pid in ${pids}
do
delay=0
while kill_child ${pid}
do
sleep ${delay}
delay=1
done
msg "pid ${pid} no longer running"
done
}
unset -v save_pass_file
save_pass_file=true
prompt_save=false
read_bls_pass() {
for f in ${blsfolder}/*.key
do
if [ ! -f $f ]; then
err 10 "could not find bls key file: $f"
fi
passfile=${blsfolder}/$(basename "${f%.*}").pass
if [ ! -f "$passfile" ]; then
unset -v passphrase
read -rsp "Enter passphrase for the BLS key file $f: " passphrase
echo ${passphrase} | tee $passfile
chmod og-wr $passfile
echo "Passphrase is temporarily saved to: $passfile"
prompt_save=true
fi
done
if ${prompt_save} ; then
while true
do
read -t 3 -rp "Do you wish to delete the saved passphrase files after successful start of node? (y|n):" yn
yn=${yn:-Y}
case $yn in
[Yy]*) save_pass_file=false
break;;
[Nn]*) save_pass_file=true
break;;
*) sleep 1 && echo "Please answer yes (y|Y) or no (n|N)";;
esac
done
prompt_save=false
fi
}
rm_bls_pass() {
if ! ${save_pass_file} ; then
for f in ${blsfolder}/*.pass
do
if [ -f $f ]; then
rm $f
fi
done
fi
}
{
while ${loop}
do
msg "re-downloading binaries in 30~60m"
redl_sec=$( random 1800 1800 )
sleep $redl_sec
if any_new_binaries staging
then
msg "binaries did not change"
continue
fi
while ! download_binaries staging
do
msg "staging download failed; retrying in 10~30m"
retry_sec=$( random 600 1200 )
sleep $retry_sec
done
if diff staging/harmony-checksums.txt harmony-checksums.txt
then
msg "binaries did not change"
continue
fi
msg "binaries changed; moving from staging into main"
(cd staging; exec mv harmony-checksums.txt $(cut -c35- md5sum.txt) ..) || continue
msg "binaries updated, killing node to restart"
kill_node
done
} > harmony-update.out 2>&1 &
check_update_pid=$!
if ! ${multi_key}; then
if [ -z "${blspass}" ]; then
unset -v passphrase
read -rsp "Enter passphrase for the BLS key file ${BLSKEYFILE}: " passphrase
echo
elif [ ! -f "${blspass}" ]; then
err 10 "can't find the ${blspass} file"
fi
else
read_bls_pass
fi
while :
do
msg "############### Running Harmony Process ###############"
args=(
-bootnodes "${BN_MA}"
-ip "${PUB_IP}"
-port "${NODE_PORT}"
-network_type="${network_type}"
-dns_zone="${dns_zone}"
-blacklist="${blacklist}"
-min_peers="${minpeers}"
-max_bls_keys_per_node="${max_bls_keys_per_node}"
-broadcast_invalid_tx="${broadcast_invalid_tx}"
-verbosity="${log_level}"
)
args+=(
-is_archival="${archival}"
)
if ! ${multi_key}; then
args+=(
-blskey_file "${BLSKEYFILE}"
)
fi
if ${multi_key}; then
args+=(
-blsfolder "${blsfolder}"
)
fi
if ${public_rpc}; then
args+=(
-public_rpc
)
fi
if [ ! -z "${pprof}" ]; then
args+=(
-pprof "${pprof}"
)
fi
# backward compatible with older harmony node software
case "${node_type}" in
validator)
case "${shard_id}" in
?*)
args+=(
-shard_id="${shard_id}"
)
if ${staking_mode}
then
args+=(-staking="${staking_mode}")
fi
;;
esac
;;
explorer)
args+=(
-node_type="${node_type}"
-shard_id="${shard_id}"
)
;;
esac
case "${TRACEFILE}" in
"") ;;
*) msg "WARN: enabled p2p tracefile: $TRACEFILE. Be aware of the file size."
export P2P_TRACEFILE=${TRACEFILE} ;;
esac
case "$OS" in
Darwin) ld_path_var=DYLD_FALLBACK_LIBRARY_PATH;;
*) ld_path_var=LD_LIBRARY_PATH;;
esac
run() {
(sleep 60 && rm_bls_pass)&
env "${ld_path_var}=$(pwd)" ./harmony "${args[@]}" "${@}"
}
case "${blspass:+set}" in
"") echo -n "${passphrase}" | run -blspass stdin;;
*) run -blspass file:${blspass};;
esac || msg "node process finished with status $?"
${loop} || break
msg "restarting in 10s..."
save_pass_file=false
rm_bls_pass
sleep 10
if ${multi_key}; then
read_bls_pass
fi
done
# vim: set expandtab:ts=3
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=10:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_Pendulum-v0_ddpg_hardcopy_action_noise_seed4_run5_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env Pendulum-v0 --random-seed 4 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/Pendulum-v0/ddpg_hardcopy_action_noise_seed4_run5 --continuous-act-space-flag --double-ddpg-flag --target-hard-copy-flag
|
<reponame>laginha87/puzzle-fighter<filename>jest.config.js<gh_stars>0
module.exports = {
"roots": [
"<rootDir>/src"
],
"testRegex": "((\\.|/)(spec))\\.ts$",
"transform": {
"^.+\\.tsx?$": "ts-jest"
},
"moduleNameMapper": {
"\~src/(.*)$": "<rootDir>/src/$1",
"assets/(.*)$": "<rootDir>/__mocks__/fileMock.js",
"tests/(.*)$": "<rootDir>/tests/$1"
},
"globals": {
"ts-jest": {
tsConfig: 'tsconfig.json',
diagnostics: false
}
}
} |
"use babel"
import { CompositeDisposable } from "atom"
export default class IndentationLinesView {
constructor(editor) {
this.editor = editor
this.subscriptions = new CompositeDisposable()
this.gutter = this.editor.addGutter({ name: "indentation-lines", priority: 1000 })
atom.views.getView(this.gutter).classList.add("indentation-lines")
this.subscriptions.add(this.editor.observeCursors((cursor) => this.addLine(cursor)))
}
addLine(cursor) {
const row = cursor.getBufferRow()
const marker = this.editor.markBufferRange(this.findRange(row), { invalidate: "never" })
this.gutter.decorateMarker(marker, { class: "indentation-line" })
cursor.onDidChangePosition(({ newBufferPosition }) => {
const { row } = newBufferPosition
marker.setBufferRange(this.findRange(row))
})
cursor.onDidDestroy(() => {
marker.destroy()
})
}
findRange(row) {
let indent = this.getIndent(row)
const last = this.editor.getLastBufferRow()
let prev = Math.max(0, row - 1)
while (prev > 0 && this.isEmptyRow(prev)) prev -= 1
const prevIndent = this.getIndent(prev)
let next = Math.min(last, row + 1)
while (next < last && this.isEmptyRow(next)) next += 1
const nextIndent = this.getIndent(next)
if (prevIndent == 0 && nextIndent == 0) return [[row, 0], [row, 0]]
if (indent == 0 && prevIndent != 0 && nextIndent != 0) indent = Math.max(prevIndent, nextIndent)
let start = Math.max(0, row - 1)
let startIndent = this.getIndent(start)
const prevLarger = prevIndent > indent
while (start > 0 && ((prevLarger ? startIndent > indent : startIndent >= indent) || this.isEmptyRow(start))) {
start -= 1
startIndent = this.getIndent(start)
}
let end = Math.min(last, row + 1)
let endIndent = this.getIndent(end)
const nextLarger = nextIndent > indent
while (end < last && ((nextLarger ? endIndent > indent : endIndent >= indent) || this.isEmptyRow(end))) {
end += 1
endIndent = this.getIndent(end)
}
if (prevIndent <= indent && nextIndent > indent) return [[row, 0], [end, 0]]
if (prevIndent > indent && nextIndent <= indent) return [[start], [row, 0]]
if (prevIndent <= indent && nextIndent <= indent) return [[start, 0], [end, 0]]
return [[row, 0], [row, 0]]
}
getIndent(row) {
return this.editor.indentationForBufferRow(row)
}
isEmptyRow(row) {
return this.editor.lineTextForBufferRow(row).trim().length == 0
}
destroy() {
this.subscriptions.dispose()
this.gutter.destroy()
}
}
|
function dot_prod(x, y)
if length(x) != length(y)
error("Input vectors must have the same length")
end
prod = 0
for i in eachindex(x)
prod += x[i] * y[i]
end
return prod
end |
<filename>src/minimumcost_spanning_tree/Boj13418.java
package minimumcost_spanning_tree;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.PriorityQueue;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* 백준 13418번: 학교 탐방하기
*
* @see https://www.acmicpc.net/problem/13418/
*
*/
public class Boj13418 {
private static int[] parent;
private static class Path implements Comparable<Path>{
int from;
int to;
int cost;
public Path(int from, int to, int cost) {
this.from = from;
this.to = to;
this.cost = cost;
}
@Override
public int compareTo(Path p) {
return this.cost == 1 ? -1 : 1;
}
}
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int N = Integer.parseInt(st.nextToken());
int M = Integer.parseInt(st.nextToken()) + 1;
PriorityQueue<Path> minPq = new PriorityQueue<>();
PriorityQueue<Path> maxPq = new PriorityQueue<>();
while(M-- > 0) {
st = new StringTokenizer(br.readLine());
int from = Integer.parseInt(st.nextToken());
int to = Integer.parseInt(st.nextToken());
int type = Integer.parseInt(st.nextToken());
minPq.offer(new Path(from, to, type)); // 내리막 기준 정렬
maxPq.offer(new Path(from, to, type == 0 ? 1 : 0)); // 오르막 기준 정렬
}
init(N);
long max = kruskal(N, maxPq, 1);
init(N);
long min = kruskal(N, minPq, 0);
System.out.println(max - min);
}
private static void init(int n) {
parent = new int[n + 1];
for(int i = 0; i < n + 1; i++) {
parent[i] = -1;
}
}
private static int find(int x) {
if(parent[x] < 0) return x;
else return parent[x] = find(parent[x]);
}
private static boolean merge(int x, int y) {
x = find(x);
y = find(y);
if(x == y) return false;
if(parent[x] < parent[y]) {
parent[x] += parent[y];
parent[y] = x;
}
else {
parent[y] += parent[x];
parent[x] = y;
}
return true;
}
private static long kruskal(int n, PriorityQueue<Path> pq, int type) {
long cost = 0;
while(!pq.isEmpty()) {
Path current = pq.poll();
if(merge(current.from, current.to)) {
if(current.cost == type) cost++; // 경우에 따라 오르막길이 나온 경우 +1
}
}
return cost * cost;
}
}
|
import React from 'react';
const MyButton = ({ text, onClick }) => {
return (
<button onClick={onClick}>{text}</button>
);
};
export default MyButton; |
<reponame>Renn/MeetHere
package com.webapp.controller;
import com.webapp.filter.LoginFilter;
import com.webapp.model.Building;
import com.webapp.model.Record;
import com.webapp.model.user.User;
import com.webapp.service.database.dao.BuildingDao;
import com.webapp.service.database.dao.RecordDao;
import org.junit.jupiter.api.Test;
import org.springframework.util.Assert;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.text.ParseException;
import java.util.LinkedList;
import java.util.List;
import java.text.SimpleDateFormat;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
/**
* @author Guoyuanjie
*/
class BuildingControllerTest {
private BuildingDao buildingDao = mock(BuildingDao.class);
private RecordDao recordDao=mock(RecordDao.class);
private HttpServletRequest request=mock(HttpServletRequest.class);
private HttpSession session=mock(HttpSession.class);
private BuildingController buildingController =new BuildingController(buildingDao,recordDao);
@Test
public void service_WhenIsAuthorized()
{
String action="";
String currentUserType="";
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
ModelAndView result=buildingController.service(action,request,session);
assertNotNull(result);
}
@Test
public void service_WhenActionIsNotListAndBook()
{
String action="sds";
String currentUserType="user";
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(request.getParameter("id")).thenReturn("305");
ModelAndView result=buildingController.service(action,request,session);
assertNotNull(result);
}
@Test
public void service_WhenActionIsDelete_UserIsAdmin()
{
String action="delete";
String currentUserType="admin";
List<Building> buildingList=new LinkedList<>();
buildingList.add(new Building());
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(request.getParameter("id")).thenReturn("305");
when(buildingDao.listBuilding(20)).thenReturn(buildingList);
ModelAndView result=buildingController.service(action,request,session);
verify(buildingDao).deleteBuilding(305);
assertAll(
()->assertEquals("mainAdmin",result.getViewName()),
()->assertEquals("admin/building.jsp",result.getModelMap().get("mainPage")),
()->assertEquals(result.getModelMap().get("buildingList"),buildingList)
);
}
@Test
public void service_WhenActionIsModify_UserIsAdmin()
{
String action="modify";
String currentUserType="admin";
Building building=new Building();
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(request.getParameter("id")).thenReturn("305");
when(buildingDao.queryBuildingById(305)).thenReturn(building);
ModelAndView result=buildingController.service(action,request,session);
assertAll(
()->assertEquals(result.getViewName(),"mainAdmin"),
()->assertEquals(result.getModelMap().get("mainPage"),"admin/buildingModify.jsp"),
()->assertEquals(result.getModelMap().get("id"),"305"),
()->assertEquals(result.getModelMap().get("building"),building)
);
}
@Test
public void service_WhenActionIsAdd_UserIsAdmin()
{
String action="add";
String currentUserType="admin";
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(request.getParameter("id")).thenReturn("305");
ModelAndView result=buildingController.service(action,request,session);
assertAll(
()->assertEquals(result.getViewName(),"mainAdmin"),
()->assertEquals(result.getModelMap().get("mainPage"),"admin/buildingModify.jsp"),
()->assertEquals(result.getModelMap().get("id"),null),
()->assertEquals(result.getModelMap().get("building"),null)
);
}
@Test
public void service_WhenActionIsSave_UserIsAdmin_IDStrIsNotNull()
{
String action="save";
String currentUserType="admin";
List<Building> buildingList=new LinkedList<>();
buildingList.add(new Building());
Building building=new Building("ECNU","none","999");
building.setId(305);
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(request.getParameter("id")).thenReturn("305");
when(request.getParameter("buildingName")).thenReturn("ECNU");
when(request.getParameter("buildingDescription")).thenReturn("none");
when(request.getParameter("buildingPrice")).thenReturn("999");
when(request.getParameter("buildingId")).thenReturn("305");
when(buildingDao.listBuilding(20)).thenReturn(buildingList);
ModelAndView result=buildingController.service(action,request,session);
verify(buildingDao).updateBuilding(building);
assertAll(
()->assertEquals(result.getViewName(),"mainAdmin"),
()->assertEquals(result.getModelMap().get("mainPage"),"admin/building.jsp"),
()->assertEquals(result.getModelMap().get("buildingList"),buildingList)
);
}
@Test
public void service_WhenActionIsSave_UserIsAdmin_IDStrIsNull()
{
String action="save";
String currentUserType="admin";
List<Building> buildingList=new LinkedList<>();
buildingList.add(new Building());
Building building=new Building("ECNU","none","999");
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(request.getParameter("id")).thenReturn("305");
when(request.getParameter("buildingName")).thenReturn("ECNU");
when(request.getParameter("buildingDescription")).thenReturn("none");
when(request.getParameter("buildingPrice")).thenReturn("999");
when(request.getParameter("buildingId")).thenReturn("");
when(buildingDao.listBuilding(20)).thenReturn(buildingList);
ModelAndView result=buildingController.service(action,request,session);
verify(buildingDao).addBuilding(building);
assertAll(
()->assertEquals(result.getViewName(),"mainAdmin"),
()->assertEquals(result.getModelMap().get("mainPage"),"admin/building.jsp"),
()->assertEquals(result.getModelMap().get("buildingList"),buildingList)
);
}
@Test
public void service_WhenActionIsList_UserIsAdmin()
{
String action="list";
String currentUserType="admin";
List<Building> buildingList=new LinkedList<>();
buildingList.add(new Building());
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(buildingDao.listBuilding(20)).thenReturn(buildingList);
ModelAndView result=buildingController.service(action,request,session);
assertAll(
()->assertEquals(result.getViewName(),"mainAdmin"),
()->assertEquals(result.getModelMap().get("mainPage"),"admin/building.jsp"),
()->assertEquals(result.getModelMap().get("buildingList"),buildingList)
);
}
@Test
public void service_WhenActionIsList_UserIsUser()
{
String action="list";
String currentUserType="user";
List<Building> buildingList=new LinkedList<>();
buildingList.add(new Building());
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(buildingDao.listBuilding(20)).thenReturn(buildingList);
ModelAndView result=buildingController.service(action,request,session);
assertAll(
()->assertEquals(result.getViewName(),"mainUser"),
()->assertEquals(result.getModelMap().get("mainPage"),"user/building.jsp"),
()->assertEquals(result.getModelMap().get("buildingList"),buildingList)
);
}
@Test
public void service_WhenActionIsOther_UserIsAdmin()
{
String action="other";
String currentUserType="admin";
List<Building> buildingList=new LinkedList<>();
buildingList.add(new Building());
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(buildingDao.listBuilding(20)).thenReturn(buildingList);
ModelAndView result=buildingController.service(action,request,session);
assertAll(
()->assertEquals(result.getViewName(),"mainAdmin"),
()->assertEquals(result.getModelMap().get("mainPage"),"admin/building.jsp"),
()->assertEquals(result.getModelMap().get("buildingList"),buildingList)
);
}
@Test
public void service_WhenActionIsBook_UserIsUser()
{
String action="book";
String currentUserType="user";
String buildingId="305";
String startDate="2019-02-16";
String duration="1545";
List<Building> buildingList=new LinkedList<>();
buildingList.add(new Building());
User user =new User();
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(request.getParameter("id")).thenReturn("111");
when(buildingDao.listBuilding(20)).thenReturn(buildingList);
when(session.getAttribute("currentUser")).thenReturn(user);
when(request.getParameter("buildingId")).thenReturn(buildingId);
when(request.getParameter("startDate")).thenReturn(startDate);
when(request.getParameter("duration")).thenReturn(duration);
ModelAndView result=buildingController.service(action,request,session);
verify(recordDao).addRecord(any(Record.class));
assertAll(
()->assertEquals(result.getViewName(),"mainUser"),
()->assertEquals(result.getModelMap().get("mainPage"),"user/building.jsp"),
()->assertEquals(result.getModelMap().get("buildingList"),buildingList)
);
}
@Test
public void service_Try_Throws_Parse_Exception_WhenActionIsBook_UserIsAdmin() throws IOException {
PrintStream originalOut = System.out;
PrintStream originalErr = System.err;
ByteArrayOutputStream outContent=new ByteArrayOutputStream();
ByteArrayOutputStream errContent=new ByteArrayOutputStream();
System.setOut(new PrintStream(outContent));
System.setErr(new PrintStream(errContent));
String action="book";
String currentUserType="user";
String buildingId="305";
String startDate="";
String duration="1545";
User user =new User();
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(request.getParameter("id")).thenReturn("111");
when(session.getAttribute("currentUser")).thenReturn(user);
when(request.getParameter("buildingId")).thenReturn(buildingId);
when(request.getParameter("startDate")).thenReturn(startDate);
when(request.getParameter("duration")).thenReturn(duration);
System.out.println(errContent.toString());
buildingController.service(action,request,session);
assertTrue(errContent.toString().contains("java.text.ParseException"));
System.setErr(originalErr);
System.setOut(originalOut);
outContent.close();
errContent.close();
}
@Test
public void service_WhenActionIsBook_UserIsAdmin()
{
String action="book";
String currentUserType="admin";
String buildingId="305";
String startDate="2019-02-16";
String duration="1545";
List<Building> buildingList=new LinkedList<>();
buildingList.add(new Building());
User user =new User();
when(session.getAttribute("currentUserType")).thenReturn(currentUserType);
when(request.getParameter("id")).thenReturn("111");
when(buildingDao.listBuilding(20)).thenReturn(buildingList);
when(session.getAttribute("currentUser")).thenReturn(user);
when(request.getParameter("buildingId")).thenReturn(buildingId);
when(request.getParameter("startDate")).thenReturn(startDate);
when(request.getParameter("duration")).thenReturn(duration);
ModelAndView result=buildingController.service(action,request,session);
verify(recordDao).addRecord(any(Record.class));
assertAll(
()->assertEquals(result.getViewName(),"mainAdmin"),
()->assertEquals(result.getModelMap().get("mainPage"),"admin/building.jsp"),
()->assertEquals(result.getModelMap().get("buildingList"),buildingList)
);
}
} |
#!/usr/bin/env bash
: <<'DOC'
tag::catalog[]
Title:: Maximum capacity script for rejoin test
Goal:: Find maximum state size that can be synced within CUP interval.
Runbook::
. set up the testnet (nns + subnet installation)
. install the `statesync-test` canister multiple times, updates the canisters state repeatedly
. wait for some time (configurable) and kill a replica before all update calls finish
. after all update calls finish, stop one third of the replicas, which are all in the same data center as the first one
. restart the first killed replica (now it needs to catch up for the network to make progress)
. wait 10min
. if the metrics pass criterion, restarts the killed group and repeats the rejoin_test with increased state size
. otherwise, show the result of last successful run
Success::
.. if the network still makes progress with 2/3 of the nodes in last 5min, and
.. if statesync duration < CUP interval = DKG interval length / finalization
end::catalog[]
DOC
set -euo pipefail
export exit_code=0
if (($# != 7)); then
echo >&2 "Wrong number of arguments, please provide values for <testnet_identifier> <num_canisters_for_copy> <initial_num_canisters> <incremental_num_canisters> <max_iterations> <subnet_type> <results_dir>:"
echo >&2 "$0 p2p58 3 10 5 2 [normal|large] ./results/"
exit 1
fi
testnet="$1"
num_canisters_for_copy="$2"
initial_num_canisters="$3"
incremental_num_canisters="$4"
max_iterations="$5"
subnet_type="$6"
results_dir="$(
mkdir -p "$7"
realpath "$7"
)"
experiment_dir="$results_dir/maximum_rejoin_test_${testnet}-initial_size_${initial_num_canisters}-incremental_size_${incremental_num_canisters}-$(date +%s)"
size_level=8
runtime=60
random_seed=0
# shellcheck disable=SC1090
source "${HELPERS:-$(dirname "${BASH_SOURCE[0]}")/include/helpers.sh}"
export STATESYNC_TEST_CANISTER_WASM_PATH="$MEDIA_CANISTERS_PATH/statesync-test-canister.wasm"
export exit_code=0
if [[ $((size_level)) -eq 0 ]] && [[ $((runtime)) -lt $((runtime / 8 + 30 * num_canisters)) ]]; then
echo >&2 "Please set a higher runtime to make sure other replicas make more progress after killing the first one."
exit 1
fi
if [[ $((size_level)) -gt 8 ]]; then
echo >&2 "The size_level is no greater than 8. When setting size_level to 0, it runs the original test without large state."
exit 1
fi
# Store the time at which the test was called, so we can compute how long everything takes.
calltime="$(date '+%s')"
HOSTS_INI_ARGUMENTS=()
export HOSTS_INI_FILENAME=hosts.ini
if [[ "$subnet_type" == "large" ]]; then
# The test will run with a special hosts file creating a large app subnet.
export HOSTS_INI_FILENAME=hosts_large_subnet.ini
HOSTS_INI_ARGUMENTS+=(--hosts-ini "$HOSTS_INI_FILENAME")
fi
echo "Starting Rejoin Test"
echo "On testnet with identifier $testnet with runtime $runtime (in seconds)."
# Testnet NNS URL: the API endpoint of the first NNS replica.
nns_url=$(jq_hostvars '[._meta.hostvars[.nns.hosts[0]]]' 'map(.api_listen_url)[0]')
# Get the list of all node_indices, so we can use that in scenarios
# shellcheck disable=SC2046
mapfile -d " " -t node_indices <<<$(jq_hostvars 'map(select(.subnet_index==1) | .node_index) | @sh')
echo "${node_indices[@]}" >"$experiment_dir/node_indices"
statesync_node=$(jq_hostvars "with_entries(select(.value.node_index==${node_indices[0]})) | keys[]")
statesync_node_ipv6=$(jq_hostvars "map(select(.node_index==${node_indices[0]}) | .ipv6)[0]")
echo "Node $statesync_node with ipv6 $statesync_node_ipv6 is selected to do state sync."
# Unpack statesync-test-canister.wasm and let Project::cargo_bin() know about its location.
export STATESYNC_TEST_CANISTER_WASM_PATH="$MEDIA_CANISTERS_PATH/statesync-test-canister.wasm"
# Deploy to testnet
deploy_with_timeout "$testnet" \
--git-revision "$GIT_REVISION" "${HOSTS_INI_ARGUMENTS[@]}"
echo "Testnet deployment successful. Test starts now."
starttime=""
endtime=""
finaltime=""
systemtest_endtime_file=""
systemtest_log=""
maximum_capacity_result_file="$experiment_dir/maximum_capacity"
show_maximum_capacity() {
success_iteration=$(($1 - 1))
if [[ $((success_iteration)) -eq 0 ]]; then
echo "There were no successful rejoin_test runs."
else
num_canisters=$((initial_num_canisters + incremental_num_canisters * (success_iteration - 1)))
echo "The last successful run of rejoin_test is iteration $success_iteration with $num_canisters canisters of 1 GiB size."
metrics_dir="$experiment_dir/$success_iteration/metrics"
statesync_fetch_size="$(jq -r '.data.result[0].value[1]' <"$metrics_dir/state_sync_size_bytes_total.json")"
statesync_duration="$(jq -r '.data.result[0].value[1]' <"$metrics_dir/state_sync_duration_seconds_sum.json")"
echo "The last successful state sync took $statesync_duration seconds and fetched $statesync_fetch_size bytes remotely."
echo "$statesync_fetch_size" >"$maximum_capacity_result_file"
fi
}
set_variables() {
experiment_subdir="$experiment_dir/$1"
mkdir -p "$experiment_subdir"
systemtest_endtime_file="$experiment_subdir/endtime"
systemtest_log="$experiment_subdir/workload-generator.log"
mkdir -p "$experiment_subdir/data_to_upload"
echo '
{
"HeightStart": height_start
}
' >>"$experiment_subdir/data_to_upload/HeightStart.json"
echo '
{
"HeightEnd": height_end
}
' >>"$experiment_subdir/data_to_upload/HeightEnd.json"
echo '
{
"StatesyncDuration": statesync_duration
}
' >>"$experiment_subdir/data_to_upload/StatesyncDuration.json"
}
set_start_time() {
# Store the test start time in epoch, so we could query Prometheus later.
starttime="$(date '+%s')"
echo "Starting the iteration $1 of the rejoin_test."
echo "Start time: $(dateFromEpoch "$starttime")"
echo "$starttime" >"$experiment_subdir/starttime"
}
kill_the_first_replica() {
(
cd "$PROD_SRC/ansible"
ansible-playbook -i "../env/$testnet/hosts" icos_node_stress.yml \
--limit "$statesync_node" \
-e ic_action=kill-replica 2>&1 \
| tee -a "$experiment_subdir/scenario.log"
# Purge its checkpoints folder to control the size of transferred chunks during state sync.
ansible-playbook -i "../env/$testnet/hosts" icos_node_recover_base_checkpoint.yml \
--limit "$statesync_node" 2>&1 | tee -a "$experiment_subdir/scenario.log"
) &
scenario_pid=$!
wait "$scenario_pid"
}
start_e2e_test_driver() {
# Start the e2e system test in a subshell. This will allow us to have a better
# control over when it finishes.
cur_iteration="$1"
num_canisters="$incremental_num_canisters"
if [[ $((cur_iteration)) -eq 0 ]]; then
num_canisters="$num_canisters_for_copy"
elif [[ $((cur_iteration)) -eq 1 ]]; then
num_canisters="$initial_num_canisters"
fi
(
{
echo "e2e part"
command -v e2e-test-driver
if ! e2e-test-driver \
--nns_url "$nns_url" \
--runtime "$runtime" \
--num_canisters "$num_canisters" \
--size_level "$size_level" \
--random_seed "$random_seed" \
-- "5.2"; then
echo "failed" >"$experiment_subdir/systemtest_failed"
fi
} | tee -a "$systemtest_log"
# Sleep 4 minutes to make the new checkpoint.
sleep 240
date '+%s' >"$systemtest_endtime_file"
) &
systemtest_pid=$!
wait "$systemtest_pid"
endtime="$(<"$systemtest_endtime_file")"
echo "Ending system test *** $(dateFromEpoch "$endtime") (start time was $(dateFromEpoch "$starttime"))"
random_seed=$((random_seed + size_level * num_canisters))
duration=$((endtime - starttime))
echo "$((duration / 60)) minutes and $((duration % 60)) seconds elapsed in the first part of the test."
}
check_e2e_test_driver_result() {
if [[ -e "$experiment_subdir/systemtest_failed" ]]; then
echo "System test failed, logs in '$systemtest_log'"
return 1
else
echo "System test passed, continue with the next iteration"
return 0
fi
}
kill_the_last_group() {
(
# Stop nodes from 1st subnet, last third of the nodes
stress_nodes=$(jq_subnet_nodes_nth_third 1 2)
cd "$PROD_SRC/ansible"
ansible-playbook -i "../env/$testnet/hosts" icos_node_stress.yml \
--limit "$stress_nodes" -e ic_action=kill-replica 2>&1 | tee -a "$experiment_subdir/scenario.log"
) &
scenario_pid=$!
wait "$scenario_pid"
}
restart_the_first_replica() {
(
cd "$PROD_SRC/ansible"
ansible-playbook -i "../env/$testnet/hosts" icos_node_stress.yml \
--limit "$statesync_node" \
-e ic_action=reset 2>&1 \
| tee -a "$experiment_subdir/scenario.log"
) &
scenario_pid=$!
wait "$scenario_pid"
}
restart_the_last_group() {
(
# Stop nodes from 1st subnet, last third of the nodes
stress_nodes=$(jq_subnet_nodes_nth_third 1 2)
cd "$PROD_SRC/ansible"
ansible-playbook -i "../env/$testnet/hosts" icos_node_stress.yml \
--limit "$stress_nodes" -e ic_action=reset 2>&1 | tee -a "$experiment_subdir/scenario.log"
echo "Sleep for 2min for recovery of the last group."
sleep 120
) &
scenario_pid=$!
wait "$scenario_pid"
}
wait_for_state_sync() {
# Sleep for 10min -- during this time, some nodes will be down.
# At the beginning of the sleeping time, the restarted replica will conduct state sync.
# Normally, state sync should finish within 5 minutes (which is roughly the CUP interval).
# The following 5 minutes is the time period for which we'll query the metrics.
# We need to ensure that progress is made even with partial membership.
echo "Sleep for 10min while subshell runs scenario for second part"
sleep 600
finaltime="$(date '+%s')"
echo "Final time: $(dateFromEpoch "$finaltime")"
echo "(Start time was $(dateFromEpoch "$starttime"))"
}
query_finalization_height_and_rate() {
# Get the report
# Produce the list of all unaffected nodes in the testnet, suitable for passing to the Prometheus query
metricshosts_of_unaffected_nodes="$(jq_subnet_load_third_nodes_urls_for_metrics 1)"
# Extract the IC name from the testnet name (p2p_15_28 -> p2p)
ic="${testnet%%_*}"
measure_time=$((finaltime - 300))
# Get these metrics. We will go from the last 5 min to the endtime, with 60s step.
# In each of the time windows (steps) we calculate the min for the metric.
# If min is increasing, we know the acceptance criteria is satisfied
mkdir -p "$experiment_subdir/metrics"
common_labels="ic=\"$ic\",job=\"replica\",instance=~\"$metricshosts_of_unaffected_nodes\""
metric="artifact_pool_consensus_height_stat"
selector="$metric{$common_labels,type=\"finalization\",pool_type=\"validated\",stat=\"max\"}"
curl -G "http://prometheus.dfinity.systems:9090/api/v1/query_range" \
-fsSL -m 30 --retry 10 --retry-connrefused \
-o "$experiment_subdir/metrics/${metric}_min.json" \
-H "Accept: application/json" \
--data-urlencode "start=$measure_time" \
--data-urlencode "end=$finaltime" \
--data-urlencode "step=60s" \
--data-urlencode "query=min($selector)"
# Get the finalization rate of unaffected nodes. We will go from the last 5 min to the endtime, with 60s step.
# Calculate the averages over the large interval.
# We split into smaller buckets, then apply avg_over_time. The outer avg it
# to get an aggregate, instead of having values per replica.
common_labels="ic=\"$ic\",job=\"replica\",instance=~\"$metricshosts_of_unaffected_nodes\""
metric="artifact_pool_consensus_height_stat"
selector="$metric{$common_labels,type=\"finalization\",pool_type=\"validated\",stat=\"max\"}"
curl -G "http://prometheus.dfinity.systems:9090/api/v1/query" \
-o "$experiment_subdir/metrics/${metric}_avg_total.json" \
-fsSL -m 30 --retry 10 --retry-connrefused \
-H "Accept: application/json" \
--data-urlencode "time=$endtime" \
--data-urlencode "query=avg(rate(${selector}[300s]))"
}
query_state_sync_duration_and_fetch_size() {
# Get the state sync duration from the node which is first killed.
metricshosts_of_the_first_node=$(jq_hostvars "map(select(.node_index==${node_indices[0]}) | .metrics_listen_addr)[0]" | escapebracket)
# Get the metrics of state sync duration, summed up until $finaltime.
# Query the metrics from the first-killed node as it is the only one which conducts state sync.
# In this rejoin test, successful state sync only happens once.
# The value of state_sync_duration_seconds_sum at the final time should represent the state sync duration which just happened.
common_labels="ic=\"$ic\",job=\"replica\",instance=~\"$metricshosts_of_the_first_node\",status=\"ok\""
metric="state_sync_duration_seconds_sum"
selector="$metric{$common_labels}"
curl -G "http://prometheus.dfinity.systems:9090/api/v1/query" \
-fsSL -m 30 --retry 10 --retry-connrefused \
-o "$experiment_subdir/metrics/${metric}.json" \
-H "Accept: application/json" \
--data-urlencode "time=$finaltime" \
--data-urlencode "query=$selector"
# Get the metrics of state sync size, summed up until $finaltime.
common_labels="ic=\"$ic\",job=\"replica\",instance=~\"$metricshosts_of_the_first_node\",op=\"fetch\""
metric="state_sync_size_bytes_total"
selector="$metric{$common_labels}"
curl -G "http://prometheus.dfinity.systems:9090/api/v1/query" \
-fsSL -m 30 --retry 10 --retry-connrefused \
-o "$experiment_subdir/metrics/${metric}.json" \
-H "Accept: application/json" \
--data-urlencode "time=$finaltime" \
--data-urlencode "query=$selector"
echo "Results stored in '$experiment_subdir/metrics'"
}
check_state_sync() {
# There is a progress in the height
height_start="$(jq -r '.data.result[0].values | first | .[1]' <"$experiment_subdir/metrics/artifact_pool_consensus_height_stat_min.json")"
height_end="$(jq -r '.data.result[0].values | last | .[1]' <"$experiment_subdir/metrics/artifact_pool_consensus_height_stat_min.json")"
sed -i "s/height_start/$height_start/g" "$experiment_subdir/data_to_upload/HeightStart.json"
sed -i "s/height_end/$height_end/g" "$experiment_subdir/data_to_upload/HeightEnd.json"
finalization_rate="$(jq -r '.data.result[0].value[1]' <"$experiment_subdir/metrics/artifact_pool_consensus_height_stat_avg_total.json")"
statesync_duration="$(jq -r '.data.result[0].value[1]' <"$experiment_subdir/metrics/state_sync_duration_seconds_sum.json")"
sed -i "s/statesync_duration/$statesync_duration/g" "$experiment_subdir/data_to_upload/StatesyncDuration.json"
dkg_interval_length=$(ic-admin --nns-url "$nns_url" get-topology | jq -r ".topology.subnets | to_entries[1].value.records[0].value.dkg_interval_length")
cup_interval_time=$(bc <<<"$dkg_interval_length/ ($finalization_rate + 0.000001)")
# State sync needs to finish within the CUP interval and has 30s left to recover the checkpoint.
if ((height_start >= height_end)); then
failure "Some of the healthy nodes did not advance, fail!"
elif (($(bc <<<"$statesync_duration > $cup_interval_time - 30"))); then
failure "State sync takes too much time and could not finish within the CUP interval."
else
success "All healthy nodes progressed, great success!"
fi
}
prepare_base_checkpoint_for_copy() {
set_variables 0
set_start_time 0
start_e2e_test_driver 0
trap 'echo "SIGINT received, killing all jobs"; jobs -p | xargs -rn1 pkill -P >/dev/null 2>&1; exit 1' INT
if ! check_e2e_test_driver_result; then
failure "Preparation for base checkpoint fails! There were no successful rejoin_test runs."
exit $exit_code
fi
cd "$PROD_SRC/ansible"
ansible-playbook -i "../env/$testnet/hosts" icos_node_stress.yml \
--limit "$statesync_node" \
-e ic_action=kill-replica 2>&1 \
| tee -a "$experiment_subdir/scenario.log"
# Move base checkpoint to backup directory.
ansible-playbook -i "../env/$testnet/hosts" icos_node_backup_base_checkpoint.yml \
--limit "$statesync_node" 2>&1 | tee -a "$experiment_subdir/scenario.log"
}
prepare_base_checkpoint_for_copy
all_passed=true
for iteration in $(seq 1 "$max_iterations"); do
set_variables "$iteration"
set_start_time "$iteration"
kill_the_first_replica
start_e2e_test_driver "$iteration"
trap 'echo "SIGINT received, killing all jobs"; jobs -p | xargs -rn1 pkill -P >/dev/null 2>&1; exit 1' INT
if ! check_e2e_test_driver_result; then
all_passed=false
break
fi
kill_the_last_group
restart_the_first_replica
wait_for_state_sync
query_finalization_height_and_rate
query_state_sync_duration_and_fetch_size
check_state_sync "$iteration"
if [[ $((exit_code)) -ne 0 ]]; then
all_passed=false
break
fi
restart_the_last_group
done
if [[ "$all_passed" == true ]]; then
show_maximum_capacity $((max_iterations + 1))
else
show_maximum_capacity "$iteration"
fi
finaltime="$(date '+%s')"
echo "Ending tests *** $(dateFromEpoch "$finaltime") (start time was $(dateFromEpoch "$calltime"))"
duration=$((finaltime - calltime))
echo "$((duration / 60)) minutes and $((duration % 60)) seconds elapsed in total in this test."
echo "The test was called with the following arguments"
echo "$@"
|
#!/bin/bash
BASE_DIR=$(dirname $0)
source $BASE_DIR/setenv.sh
${CURL_CMD} -u "${CM_ADMIN}:${CM_PASS}" -i -X POST ${HTTP_PROTOCOL}://${CM_SERVER}:${CM_PORT}/api/${CM_VERSION}/cm/commands/inspectHosts
|
<filename>src/main/java/com/epam/reportportal/extension/azure/event/EventHandlerFactory.java
package com.epam.reportportal.extension.azure.event;
import com.epam.reportportal.extension.azure.event.handler.EventHandler;
/**
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public interface EventHandlerFactory<T> {
EventHandler<T> getEventHandler(String key);
}
|
from django.db import models
class GalleryImage(models.Model):
title = models.CharField(max_length=128, unique=True)
image = models.ImageField(upload_to="gallery")
description = models.CharField(max_length=256, blank=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __str__(self):
return self.title
|
//对整数的二进制表示取反(0 变 1 ,1 变 0)后,再转换为十进制表示,可以得到这个整数的补数。
//
//
// 例如,整数 5 的二进制表示是 "101" ,取反后得到 "010" ,再转回十进制表示得到补数 2 。
//
//
// 给你一个整数 num ,输出它的补数。
//
//
//
//
//
//
// 示例 1:
//
//
//输入:num = 5
//输出:2
//解释:5 的二进制表示为 101(没有前导零位),其补数为 010。所以你需要输出 2 。
//
//
// 示例 2:
//
//
//输入:num = 1
//输出:0
//解释:1 的二进制表示为 1(没有前导零位),其补数为 0。所以你需要输出 0 。
//
//
//
//
// 提示:
//
//
// 1 <= num < 2³¹
//
//
//
//
// 注意:本题与 1009 https://leetcode-cn.com/problems/complement-of-base-10-integer/ 相
//同
// Related Topics 位运算 👍 256 👎 0
package algorithm_400
import (
"math"
"testing"
)
func Test_findComplement(t *testing.T) {
type args struct {
num int
}
tests := []struct {
name string
args args
want int
}{
{"t1", args{5}, 2},
{"t2", args{1}, 0},
{"t3", args{math.MaxInt32 - 1}, 1},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := findComplement(tt.args.num); got != tt.want {
t.Errorf("findComplement() = %v, want %v", got, tt.want)
}
})
}
}
|
import React, { useRef } from 'react';
import './SmartText.css';
const KEY_TAB = 9;
const KEY_ESCAPE = 27;
const KEY_DOWN = 40;
const KEY_PERIOD = 190;
const SEARCH_API_URL = 'http://localhost:3000/search';
export interface SmartTextProps {
id?: string;
value?: string;
}
export const SmartText = (props: SmartTextProps) => {
const containerRef = useRef<HTMLUListElement>(null);
const editorRef = useRef<HTMLDivElement>(null);
let open = false;
let ignoreInput = false;
let search: string | null = null;
let matches: any[] = [];
let selectedIndex = -1;
const conceptCache: any = {};
const codeCache: any = {};
let codes: any[] = [];
let actions: any[] = [];
function onKeyDown(e: React.KeyboardEvent) {
switch (e.keyCode) {
case KEY_TAB:
if (e.shiftKey) {
handleShiftTabKey(e);
} else {
handleTabKey(e);
}
break;
case KEY_ESCAPE:
handleEscapeKey(e);
break;
case KEY_DOWN:
handleDownArrow(e);
break;
case KEY_PERIOD:
if (e.ctrlKey) {
handleCreateGroup(e);
}
break;
}
}
function onTextChange() {
if (ignoreInput) {
// Ignore non-user changes
return;
}
// Reset the search
selectedIndex = -1;
// Current selection
const range = getRange();
if (!range || range.startContainer !== range.endContainer) {
// Ignore selections that span multiple elements
closeSuggestions();
return;
}
// Walk backwards until first of these conditions:
// 1) Start of input
// 2) New line
// 3) Period
// 4) Comma
// 5) 4th space (up to 4 spaces)
const allText = range.endContainer.textContent;
if (!allText) {
closeSuggestions();
return;
}
const endIndex = range.endOffset;
let startIndex = 0;
let spaceCount = 0;
for (let index = endIndex - 1; index >= 0; index--) {
const c = allText.charAt(index);
if (c === '\n' || c === '.' || c === ',') {
startIndex = index + 1;
break;
}
if (c === ' ' && ++spaceCount >= 4) {
startIndex = index + 1;
break;
}
}
const search2 = allText.substring(startIndex, endIndex).trim();
if (search2.length >= 2 && search2 !== search) {
search = search2;
const url = SEARCH_API_URL + '?q=' + search2;
const init = { method: 'GET' };
fetch(url, init)
.then(response => response.json())
.then(handleSearchResults);
} else {
search = null;
closeSuggestions();
}
// Update concepts in case of delete
// TODO: Only do this on delete events?
updateConcepts();
}
function handleSearchResults(response: any) {
matches = response;
updateAutoComplete();
}
function openSuggestions() {
const rangeBounds = getRangeBounds();
if (!rangeBounds) {
closeSuggestions();
return;
}
const container = containerRef.current;
if (container) {
const toolbarEl = document.querySelector('.ql-toolbar');
const toolbarHeight = toolbarEl ? toolbarEl.getBoundingClientRect().height : 0;
container.style.left = (rangeBounds.x) + 'px';
container.style.top = (toolbarHeight + rangeBounds.top + rangeBounds.height) + 'px';
container.style.position = 'fixed';
container.style.display = 'block';
open = true;
}
}
function getRange() {
const selection = window.getSelection();
if (!selection) {
return null;
}
return selection.getRangeAt(0);
}
function getRangeBounds() {
const range = getRange();
if (!range) {
return null;
}
const rangeRects = range.getClientRects();
if (!rangeRects || rangeRects.length === 0) {
return;
}
return rangeRects[0];
}
function closeSuggestions() {
const container = containerRef.current;
if (container) {
container.style.display = 'none';
}
open = false;
}
function updateAutoComplete() {
if (!search) {
return;
}
if (matches.length === 0) {
closeSuggestions();
return;
}
const searchTokens = search.split(/\s+/);
const searchRegexes = searchTokens.map(token => new RegExp(escapeRegExp(token), 'gi'));
let html = '';
for (let i = 0; i < matches.length; i++) {
const concept = matches[i].concept;
const selected = i === selectedIndex ? ' class="selected"' : '';
const style = concept.type === 'template' ? ' style="color:' + concept.color + '"' : '';
let highlight = concept.name;
for (let j = 0; j < searchRegexes.length; j++) {
highlight = highlight.replace(searchRegexes[j], '<b>' + searchTokens[j] + '</b>');
}
html += '<li ' + selected + style + '">' + highlight + '</li>';
}
const container = containerRef.current;
if (container) {
container.innerHTML = html;
}
openSuggestions();
}
function handleTabKey(e: React.KeyboardEvent) {
e.preventDefault();
e.stopPropagation();
if (open) {
applyReplacement();
} else {
selectNextPlaceholder();
}
}
function handleShiftTabKey(e: React.KeyboardEvent) {
e.preventDefault();
e.stopPropagation();
if (!open) {
selectPrevPlaceholder();
}
}
function handleEscapeKey(e: React.KeyboardEvent) {
if (open) {
e.preventDefault();
e.stopPropagation();
search = null;
closeSuggestions();
}
}
function handleDownArrow(e: React.KeyboardEvent) {
if (!open) {
return true;
}
e.preventDefault();
e.stopPropagation();
selectedIndex = 0;
updateAutoComplete();
const container = containerRef.current;
if (container) {
container.focus();
}
}
function handleCreateGroup(e: React.KeyboardEvent) {
e.preventDefault();
e.stopPropagation();
const selection = window.getSelection();
if (!selection) {
return;
}
const oldContents = selection.getRangeAt(0).cloneContents();
const tempDiv = document.createElement('div');
tempDiv.appendChild(oldContents);
const newContents = '<div class="section">' + tempDiv.innerHTML + '</div>';
document.execCommand('insertHTML', false, newContents);
}
function handleContainerKey(e: React.KeyboardEvent) {
if (!open) {
return;
}
if (e.key === 'Escape') {
e.preventDefault();
e.stopPropagation();
closeSuggestions();
}
if (e.key === 'ArrowUp') {
e.preventDefault();
e.stopPropagation();
selectedIndex--;
if (selectedIndex < 0) {
closeSuggestions();
const editor = editorRef.current;
if (editor) {
editor.focus();
}
} else {
updateAutoComplete();
}
}
if (e.key === 'ArrowDown') {
e.preventDefault();
e.stopPropagation();
selectedIndex = Math.min(selectedIndex + 1, matches.length - 1);
updateAutoComplete();
}
if (e.key === 'Tab' || e.key === 'Enter') {
e.preventDefault();
e.stopPropagation();
applyReplacement();
}
}
function applyReplacement() {
// Start ignoring input
ignoreInput = true;
selectedIndex = Math.max(0, selectedIndex);
const match = matches[selectedIndex];
const concept = match.concept;
const replacement = concept.name;
// Add the concept to the local cache
conceptCache[concept.id] = concept;
// Get the current selection
const selection = window.getSelection();
if (!selection) {
return;
}
const selectionRange = selection.getRangeAt(0);
const selectionElement = selectionRange.endContainer;
const selectionContent = selectionElement.textContent;
if (!selectionContent) {
return;
}
// Find the match term in the element
const matchIndex = selectionContent.lastIndexOf(match.matchTerm, selectionRange.endOffset);
const matchLength = match.matchTerm.length;
// Select the search term
const searchRange = new Range();
searchRange.setStart(selectionElement, matchIndex);
searchRange.setEnd(selectionElement, matchIndex + matchLength);
selection.removeAllRanges();
selection.addRange(searchRange);
// Replace with the replacement text
let replacementHtml = '<span class="concept"';
replacementHtml += ' data-id="' + concept.id + '"';
if (concept.color) {
replacementHtml += ' style="color:' + concept.color + '"';
}
replacementHtml += '>';
replacementHtml += replacement;
replacementHtml += '</span> ';
document.execCommand('insertHtml', false, replacementHtml);
// Capture the cursor at this point
const afterSelection = window.getSelection();
if (afterSelection) {
const afterRange = afterSelection.getRangeAt(0);
if (concept.type === 'template') {
// If this is a template, add the template content
document.execCommand('insertText', false, concept.content);
}
afterSelection.removeAllRanges();
afterSelection.addRange(afterRange);
}
if (concept.type === 'template' && concept.content.indexOf('[') >= 0) {
// If this is a template with placeholders, select the first placeholder
selectNextPlaceholder();
}
updateConcepts();
closeSuggestions();
// Stop ignoring input
ignoreInput = false;
}
/**
* Diffs two arrays into "add" and "remove" elements.
* @param {Array} oldArray
* @param {Array} newArray
* @return {Object} Diff object containing "add" and "remove" propeties.
*/
function diffArrays(oldArray: any[], newArray: any[]) {
const addList = [];
const removeList = [];
let i = 0;
let j = 0;
while (i < oldArray.length && j < newArray.length) {
if (oldArray[i] === newArray[j]) {
i++;
j++;
} else if (oldArray[i] < newArray[j]) {
removeList.push(oldArray[i++]);
} else {
addList.push(newArray[j++]);
}
}
while (i < oldArray.length) {
removeList.push(oldArray[i++]);
}
while (j < newArray.length) {
addList.push(newArray[j++]);
}
return {
add: addList,
remove: removeList
};
}
/**
* Scans the text for all "concepts".
* Updates ICD-10 codes and actions based on the contents of the editor.
*/
function updateConcepts() {
const editor = editorRef.current;
if (!editor) {
return;
}
const conceptElements = editor.querySelectorAll('.concept');
const newCodesSet = new Set();
const newActionsSet = new Set();
for (let i = 0; i < conceptElements.length; i++) {
const conceptElement = conceptElements[i] as HTMLElement;
const conceptId = conceptElement.dataset.id;
if (!conceptId) {
continue;
}
const concept = conceptCache[conceptId];
for (let j = 0; j < concept.codes.length; j++) {
const code = concept.codes[j];
codeCache[code.id] = code;
newCodesSet.add(code.id);
}
if (concept.type === 'rxnorm') {
newActionsSet.add(concept.id);
}
}
const oldCodes = codes;
const newCodes = Array.from(newCodesSet);
newCodes.sort();
const oldActions = actions;
const newActions = Array.from(newActionsSet);
newActions.sort();
const codeDiff = diffArrays(oldCodes, newCodes);
updateCodes(codeDiff);
codes = newCodes;
const actionsDiff = diffArrays(oldActions, newActions);
updateActions(actionsDiff);
actions = newActions;
}
function updateCodes(codesDiff: any) {
const container = document.querySelector('.code-list');
if (!container) {
return;
}
const children = container.querySelectorAll('.code');
// Remove deleted codes
for (let i = children.length - 1; i >= 0; i--) {
for (let j = 0; j < codesDiff.remove.length; j++) {
if ((children[i] as HTMLElement).dataset.id === codesDiff.remove[j]) {
container.removeChild(children[i]);
break;
}
}
}
// Add new codes
for (let i = 0; i < codesDiff.add.length; i++) {
const codeId = codesDiff.add[i];
const code = codeCache[codeId];
const el = document.createElement('div');
el.dataset.id = codeId;
el.className = 'code';
el.innerHTML = '<strong>' + code.id + '</strong> ' + code.name;
container.appendChild(el);
}
}
function updateActions(actionsDiff: any) {
const container = document.querySelector('.action-container');
if (!container) {
return;
}
const children = container.querySelectorAll('.action');
// Remove deleted actions
for (let i = children.length - 1; i >= 0; i--) {
for (let j = 0; j < actionsDiff.remove.length; j++) {
if ((children[i] as HTMLElement).dataset.id === actionsDiff.remove[j]) {
container.removeChild(children[i]);
break;
}
}
}
// Add new actions
for (let i = 0; i < actionsDiff.add.length; i++) {
const actionId = actionsDiff.add[i];
const action = conceptCache[actionId];
const el = document.createElement('div');
el.dataset.id = actionId;
el.className = 'action';
el.innerHTML = action.name;
container.appendChild(el);
}
}
function selectNextPlaceholder() {
const range = getRange();
if (!range) {
return;
}
const placeholderRange = searchForNextPlaceholder(
range.endContainer as HTMLElement,
range.endOffset);
if (!placeholderRange) {
return;
}
const selection = window.getSelection();
if (!selection) {
return;
}
selection.removeAllRanges();
selection.addRange(placeholderRange);
}
function selectPrevPlaceholder() {
const range = getRange();
if (!range) {
return;
}
const placeholderRange = searchForPrevPlaceholder(
range.startContainer as HTMLElement,
range.startOffset);
if (!placeholderRange) {
return;
}
const selection = window.getSelection();
if (!selection) {
return;
}
selection.removeAllRanges();
selection.addRange(placeholderRange);
}
function searchForNextPlaceholder(startElement: HTMLElement, startOffset: number) {
let element = startElement;
let offset = startOffset;
while (element) {
if (element.nodeName === '#text') {
const textContent = element.textContent;
if (textContent) {
const startIndex = textContent.indexOf('[', offset);
if (startIndex >= 0) {
const closeBracketIndex = textContent.indexOf(']', startIndex);
const endIndex = closeBracketIndex >= 0 ? closeBracketIndex + 1 : textContent.length;
const resultRange = new Range();
resultRange.setStart(element, startIndex);
resultRange.setEnd(element, endIndex);
return resultRange;
}
}
}
// Not found
// Advance to next node
if (element.childNodes && offset < element.childNodes.length) {
// Search children
element = element.childNodes[offset] as HTMLElement;
offset = 0;
} else if (element === editorRef.current) {
// Top of editor, text not found
return null;
} else if (element.parentNode) {
// Move up to parent
const siblings = element.parentNode.childNodes;
const elementIndex = indexOfNode(siblings, element);
if (elementIndex === -1) {
throw 'Element not found in parent list?';
}
element = element.parentNode as HTMLElement;
offset = elementIndex + 1;
} else {
// This should not happen
return null;
}
}
return null;
}
function searchForPrevPlaceholder(startElement: HTMLElement, startOffset: number) {
let element = startElement;
let offset = startOffset;
while (element) {
if (element.nodeName === '#text') {
const textContent = element.textContent;
if (textContent) {
const endIndex = textContent.lastIndexOf(']', offset);
if (endIndex >= 0) {
const startIndex = textContent.lastIndexOf('[', endIndex);
if (startIndex >= 0) {
const resultRange = new Range();
resultRange.setStart(element, startIndex);
resultRange.setEnd(element, endIndex + 1);
return resultRange;
}
}
}
}
// Not found
// Advance to next node
if (element.childNodes && offset >= 0 && offset < element.childNodes.length) {
// Search children
element = element.childNodes[offset] as HTMLElement;
if (element.nodeName === '#text') {
offset = element.textContent ? element.textContent.length : 0;
} else if (element.childNodes) {
offset = element.childNodes.length - 1;
} else {
offset = 0;
}
} else if (element === editorRef.current) {
// Top of editor, text not found
return null;
} else if (element.parentNode) {
// Move up to parent
const siblings = element.parentNode.childNodes;
const elementIndex = indexOfNode(siblings, element);
if (elementIndex === -1) {
throw 'Element not found in parent list?';
}
element = element.parentNode as HTMLElement;
offset = elementIndex - 1;
} else {
// This should not happen
return null;
}
}
return null;
}
function indexOfNode(nodeList: NodeList, node: Node) {
for (let i = 0; i < nodeList.length; i++) {
if (nodeList[i] === node) {
return i;
}
}
return -1;
}
function escapeRegExp(text: string) {
return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
}
return (
<div className="medplum-smarttext">
<div className="medplum-smartext-editor-container">
<div
className="medplum-smarttext-editor"
ref={editorRef}
contentEditable={true}
id={props.id}
defaultValue={props.value || ''}
onKeyDown={onKeyDown}
onInput={onTextChange}
></div>
</div>
<ul
className="medplum-smarttext-completions"
ref={containerRef}
tabIndex={-1}
onKeyDown={handleContainerKey}
>
</ul>
<div className="code-container">
<div className="code-header">ICD-10 SUGGESTIONS</div>
<div className="code-list">
</div>
</div>
<div className="clear"></div>
<div className="action-container"></div>
</div>
);
};
|
public class Order {
private int priority;
public Order(int priority) {
this.priority = priority;
}
public String getPriorityLevel() {
if (priority == ORDER_LATE) {
return "Late";
} else if (priority == ORDER_VERY_LATE) {
return "Very Late";
} else if (priority == ORDER_LATEST) {
return "Latest";
} else {
return "Unknown Priority";
}
}
public static void main(String[] args) {
Order order1 = new Order(ORDER_LATE);
Order order2 = new Order(ORDER_VERY_LATE);
Order order3 = new Order(ORDER_LATEST);
Order order4 = new Order(-500); // Unknown priority
System.out.println("Order 1 Priority: " + order1.getPriorityLevel());
System.out.println("Order 2 Priority: " + order2.getPriorityLevel());
System.out.println("Order 3 Priority: " + order3.getPriorityLevel());
System.out.println("Order 4 Priority: " + order4.getPriorityLevel());
}
} |
<gh_stars>1-10
import { curry } from 'ramda';
export const moreThan = curry(
(message: string, bound: number, value: string | undefined) => {
if (value !== undefined && +value <= bound) {
return message;
}
});
|
// Copyright 2006, 2011 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.internal.transform;
import org.apache.tapestry5.annotations.Retain;
import org.apache.tapestry5.model.MutableComponentModel;
import org.apache.tapestry5.plastic.PlasticClass;
import org.apache.tapestry5.plastic.PlasticField;
import org.apache.tapestry5.services.transform.ComponentClassTransformWorker2;
import org.apache.tapestry5.services.transform.TransformationSupport;
/**
* Identifies fields with the {@link org.apache.tapestry5.annotations.Retain} annotation, and "claims" them so that no
* special work will occur on them. Retain has been deprecated in Tapestry 5.2 and will likely be removed in the future.
*/
public final class RetainWorker implements ComponentClassTransformWorker2
{
/**
* Claims each field with the {@link org.apache.tapestry5.annotations.Retain} , claiming it using the
* annotation class (not the annotation instance, to avoid
* instantiating the annotation) as the tag.
*/
public void transform(PlasticClass plasticClass, TransformationSupport support, MutableComponentModel model)
{
for (PlasticField field : plasticClass.getFieldsWithAnnotation(Retain.class))
{
field.claim(Retain.class);
}
}
}
|
import pytest
from pathlib import Path
import asyncio
async def open_async(file_path: str, mode: str) -> bytes:
async with open(file_path, mode) as f:
data = await f.read()
return data
@pytest.mark.asyncio
async def test_read(testdatafile, testdata):
file_path = Path(str(testdatafile))
async with open_async(file_path, 'rb') as f:
data = await f
assert f._fileobj.closed
assert data == testdata |
def strip_whitespace_from_data(data):
for i in range(len(data)):
for key, value in data[i].items():
if isinstance(value, str):
data[i][key] = value.strip()
return data
data = [{'name': 'John Doe ', 'age': 34}, {'name': 'Tim Smith ', 'age': 14}]
data = strip_whitespace_from_data(data)
print(data) |
/*
Siesta 4.2.2
Copyright(c) 2009-2016 Bryntum AB
http://bryntum.com/contact
http://bryntum.com/products/siesta/license
*/
Class('Ariadne.ExtJSDomQueryFinder', {
isa : Ariadne.DomQueryFinder,
has : {
},
methods : {
initIdentifiers : function () {
this.addIdentifier(new Ariadne.ExtJSDomQueryFinder.Identifier.Id({
shouldIgnoreDomElementId : this.shouldIgnoreDomElementId,
uniqueDomNodeProperty : this.uniqueDomNodeProperty,
finder : this,
priority : 1000000
}))
this.addIdentifier(new Ariadne.ExtJSDomQueryFinder.Identifier.CssClass({
finder : this,
priority : 100000
}))
this.addIdentifier(new Ariadne.DomQueryFinder.Identifier.Contains({
finder : this,
priority : 10000
}))
this.addIdentifier(new Ariadne.ExtJSDomQueryFinder.Identifier.AttributeValue({
finder : this,
priority : 1000
}))
this.addIdentifier(new Ariadne.DomQueryFinder.Identifier.TagName({
finder : this,
priority : 100
}))
this.addIdentifier(new Ariadne.DomQueryFinder.Identifier.NthOfType({
finder : this,
priority : 10
}))
this.directChildIdentifier = new Ariadne.DomQueryFinder.Identifier.DirectChild({ finder : this })
},
findQueries : function (target, root, options) {
if (target && target.dom && target.dom.tagName) target = target.dom
return this.SUPER(target, root, options)
}
}
});
|
#!/usr/bin/env bash
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
export DEVSTACK_GATE_NEUTRON=1
export DEVSTACK_GATE_HEAT=1
export DEVSTACK_GATE_INSTALL_TESTONLY=1
export DEVSTACK_GATE_TEMPEST=1
export DEVSTACK_GATE_TEMPEST_NOTESTS=1
export KEEP_LOCALRC=1
# if [ -z ${DEVSTACK_LOCAL_CONFIG+x} ]; then
# DEVSTACK_LOCAL_CONFIG="enable_plugin vitrage git://git.openstack.org/openstack/vitrage"
# fi
DEVSTACK_LOCAL_CONFIG+=$'\nenable_plugin heat git://git.openstack.org/openstack/heat'
DEVSTACK_LOCAL_CONFIG+=$'\nenable_plugin ceilometer git://git.openstack.org/openstack/ceilometer'
DEVSTACK_LOCAL_CONFIG+=$'\nenable_plugin aodh git://git.openstack.org/openstack/aodh'
DEVSTACK_LOCAL_CONFIG+=$'\ndisable_service ceilometer-alarm-evaluator,ceilometer-alarm-notifier'
DEVSTACK_LOCAL_CONFIG+=$'\ndisable_service n-net'
DEVSTACK_LOCAL_CONFIG+="$(cat <<EOF
[[post-config|\$NOVA_CONF]]
[DEFAULT]
notification_topics = notifications,vitrage_notifications
notification_driver = messagingv2
[[post-config|\$NEUTRON_CONF]]
[DEFAULT]
notification_topics = notifications,vitrage_notifications
notification_driver = messagingv2
[[post-config|\$CINDER_CONF]]
[DEFAULT]
notification_topics = notifications,vitrage_notifications
notification_driver = messagingv2
[[post-config|\$HEAT_CONF]]
[DEFAULT]
notification_topics = notifications,vitrage_notifications
notification_driver = messagingv2
policy_file = /etc/heat/policy.json-tempest
[[post-config|\$AODH_CONF]]
[oslo_messaging_notifications]
driver = messagingv2
topics = notifications, vitrage_notifications
[[post-config|\$VITRAGE_CONF]]
[static_physical]
changes_interval = 5
[datasources]
snapshots_interval = 120
EOF
)"
export DEVSTACK_LOCAL_CONFIG
if [ -z ${ENABLED_SERVICES+x} ]; then
ENABLED_SERVICES=tempest
fi
ENABLED_SERVICES+=,q-svc,q-dhcp,q-meta,q-agt,q-l3
ENABLED_SERVICES+=,h-eng h-api h-api-cfn h-api-cw
ENABLED_SERVICES+=,vitrage-api,vitrage-graph
ENABLED_SERVICES+=,key,aodi-api,aodh-notifier,aodh-evaluator
ENABLED_SERVICES+=,ceilometer-alarm-evaluator,ceilometer-alarm-notifier
ENABLED_SERVICES+=,ceilometer-api
ENABLED_SERVICES+=,aodh-api
export ENABLED_SERVICES
GATE_DEST=$BASE/new
DEVSTACK_PATH=$GATE_DEST/devstack
$GATE_DEST/devstack-gate/devstack-vm-gate.sh
|
import { Middleware } from '../middleware';
import { Context } from '../context';
import { Component, Autowired } from '../../common';
import { ChannelStrategy, CHANNEL_MIDDLEWARE_PRIORITY } from './channel-protocol';
@Component(Middleware)
export class ChannelMiddleware implements Middleware {
@Autowired(ChannelStrategy)
protected readonly channelStrategy: ChannelStrategy;
async handle(ctx: Context, next: () => Promise<void>): Promise<void> {
if (!ctx.channelStrategy) {
ctx.channelStrategy = this.channelStrategy;
}
Context.setChannalStrategy(ctx.channelStrategy);
await next();
}
readonly priority = CHANNEL_MIDDLEWARE_PRIORITY;
}
|
window.onload = () => {
const elfCode = {
appendToList: (list, value) => {
const li = document.createElement("li");
li.appendChild(document.createTextNode(value));
list.appendChild(li);
}
};
const numbersAction = document.getElementById('numbers-action');
const numbersDisplay = document.getElementById('numbers-display');
const arrays = {
numbers: [15,14,13,12,11,10,9,8,7,6,5]
};
arrays.numbers.sort((a,b) => a - b);
numbersAction.onclick = () => {
for (let number of arrays.numbers) {
console.log(number);
elfCode.appendToList(numbersDisplay, number);
};
const languageAction = document.getElementById('language-action');
const languageDisplay = document.getElementById('language-display');
const array = {
language: ['JavaScript', 'HTML', 'CSS', 'C#', 'Python', 'C/C++'],
}
languageAction.onclick = () => {
for (let language of array.language) {
console.log(language);
elfCode.appendToList(languageDisplay, language);
};
}
const languagePopularityAction = document.getElementById('languagePopularity-action');
const languagePopularityDisplay = document.getElementById('languagePopularity-display');
const array1 = {
languagePopularity: ['JavaScript: Rank 1', 'HTML: Rank 2', 'CSS: Rank 3', 'Python: Rank 4', 'C#: Rank 5', 'C/C++: Rank 6']
}
languagePopularityAction.onclick = () => {
for (let languagePopularity of array1.languagePopularity) {
console.log(languagePopularity);
elfCode.appendToList(languagePopularityDisplay, languagePopularity);
};
}
};
};
|
<reponame>Astrid-Caravan/caravan-anima-expected
const rarityInput = document.getElementById(`rarity`);
const probablyHeroInput = document.getElementById(`probably-hero`);
const probablyHeroText = document.getElementById(`probably-hero-text`);
const probablyAnimaInput = document.getElementById(`probably-anima`);
const probablyAnimaText = document.getElementById(`probably-anima-text`);
const animaCountInput = document.getElementById(`anima-count`);
const animaCountText = document.getElementById(`anima-count-text`);
const gachaCountInput = document.getElementById(`gacha-count`);
const gachaCountText = document.getElementById(`gacha-count-text`);
const resultList = document.getElementById(`result`);
const createResult = (title, body) => {
const item = document.createElement(`li`);
const titleElement = document.createElement(`span`);
const bodyElement = document.createElement(`span`);
item.classList.add(`list-group-item`);
item.classList.add(`result-item`);
titleElement.innerText = title;
item.appendChild(titleElement);
bodyElement.innerText = body;
item.appendChild(bodyElement);
return item;
};
const calc = () => {
const ph = Number(probablyHeroInput.value) / 100.0;
const phc = Number(rarityInput.value);
const pa = Number(probablyAnimaInput.value) / 100.0;
const pac = Number(animaCountInput.value);
const count = Number(gachaCountInput.value);
while(resultList.firstChild) {
resultList.firstChild.remove();
}
// 本体を引けない確率
const fh = Math.pow(1 - ph, count);
resultList.appendChild(
createResult(
`本体を引ける確率`,
`${(1 - fh) * 100} %`
)
);
// 各回数でのアニマ取得期待値の総和
const expected = Array.from({ length: count }, (_, i) => i + 1).reduce((acc, c) => {
// 前回までに本体を引けていない確率
const f = Math.pow(1 - ph, c - 1);
// すでに本体を引いていて得られるアニマが半分になる
const s1 = (ph - (ph * f));
// まだ本体を引いていないため解放分相当のアニマを得られる
const s2 = (ph - (ph * (1 - f)));
return acc + (pa * pac) + (s1 * (phc / 2)) + (s2 * phc);
});
resultList.appendChild(
createResult(
`入手できるアニマの期待値合計`,
`${expected} 個`
)
);
};
document.querySelectorAll(`input[data-bind-control]`).forEach(range => {
const textInput = document.getElementById(range.getAttribute(`data-bind-control`));
if (textInput) {
range.addEventListener(`input`, function () {
textInput.value = this.value;
});
range.addEventListener(`change`, function () {
calc();
});
textInput.addEventListener(`change`, function () {
if (isNaN(this.value)) {
// 数値でない
this.value = range.value;
} else {
range.value = this.value;
}
calc();
});
}
});
rarityInput.addEventListener(`change`, calc);
calc(); |
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vertx.java.core.http;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.handler.codec.http.HttpChunkTrailer;
import org.jboss.netty.handler.codec.http.HttpResponse;
import org.vertx.java.core.CompletionHandler;
import org.vertx.java.core.Future;
import org.vertx.java.core.Handler;
import org.vertx.java.core.buffer.Buffer;
import org.vertx.java.core.http.ws.Handshake;
import org.vertx.java.core.http.ws.WebSocketFrame;
import org.vertx.java.core.http.ws.hybi00.Handshake00;
import org.vertx.java.core.http.ws.hybi08.Handshake08;
import org.vertx.java.core.http.ws.hybi17.Handshake17;
import org.vertx.java.core.logging.Logger;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
class ClientConnection extends AbstractConnection {
private static final Logger log = Logger.getLogger(ClientConnection.class);
ClientConnection(HttpClient client, Channel channel, String hostHeader, boolean ssl,
boolean keepAlive,
long contextID, Thread th) {
super(channel, contextID, th);
this.client = client;
this.hostHeader = hostHeader;
this.ssl = ssl;
this.keepAlive = keepAlive;
}
final HttpClient client;
final String hostHeader;
final boolean keepAlive;
private final boolean ssl;
private volatile HttpClientRequest currentRequest;
// Requests can be pipelined so we need a queue to keep track of requests
private final Queue<HttpClientRequest> requests = new ConcurrentLinkedQueue();
private volatile HttpClientResponse currentResponse;
private WebSocket ws;
void toWebSocket(final String uri,
final Handler<WebSocket> wsConnect,
final WebSocketVersion wsVersion) {
if (ws != null) {
throw new IllegalStateException("Already websocket");
}
try {
final Handshake shake;
if (wsVersion == WebSocketVersion.HYBI_00) {
shake = new Handshake00();
} else if (wsVersion == WebSocketVersion.HYBI_08) {
shake = new Handshake08();
} else if (wsVersion == WebSocketVersion.HYBI_17) {
shake = new Handshake17();
} else {
throw new IllegalArgumentException("Invalid version");
}
// Create a raw request
HttpClientRequest req = new HttpClientRequest(client, "GET", uri, new Handler<HttpClientResponse>() {
public void handle(HttpClientResponse resp) {
try {
shake.onComplete(resp, new CompletionHandler<Void>() {
public void handle(Future<Void> fut) {
if (fut.succeeded()) {
//We upgraded ok
ChannelPipeline p = channel.getPipeline();
p.replace("decoder", "wsdecoder", shake.getDecoder());
p.replace("encoder", "wsencoder", shake.getEncoder(false));
ws = new WebSocket(uri, ClientConnection.this);
wsConnect.handle(ws);
} else {
handleException(fut.exception());
}
}
});
} catch (Exception e) {
handleException(e);
}
}
}, contextID, Thread.currentThread(), this);
shake.fillInRequest(req, (ssl ? "http://" : "https://") + hostHeader);
req.end();
} catch (Exception e) {
handleException(e);
}
}
@Override
public void close() {
// if (ws != null) {
// //Need to send 9 zeros to represent a close
// byte[] bytes = new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0}; // Just to be explicit
// ChannelFuture future = channel.write(ChannelBuffers.copiedBuffer(bytes));
// future.addListener(ChannelFutureListener.CLOSE); // Close after it's written
// }
client.returnConnection(this);
}
void internalClose() {
//channel.write(ChannelBuffers.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE);
channel.close();
}
//TODO - combine these with same in ServerConnection and NetSocket
void handleInterestedOpsChanged() {
try {
if (currentRequest != null) {
if ((channel.getInterestOps() & Channel.OP_WRITE) == Channel.OP_WRITE) {
setContextID();
currentRequest.handleInterestedOpsChanged();
}
}
} catch (Throwable t) {
handleHandlerException(t);
}
}
void handleResponse(HttpResponse resp) {
HttpClientRequest req;
if (resp.getStatus().getCode() == 100) {
//If we get a 100 continue it will be followed by the real response later, so we don't remove it yet
req = requests.peek();
} else {
req = requests.poll();
}
if (req == null) {
throw new IllegalStateException("No response handler");
}
setContextID();
HttpClientResponse nResp = new HttpClientResponse(this, resp, req.th);
currentResponse = nResp;
req.handleResponse(nResp);
}
void handleResponseChunk(Buffer buff) {
setContextID();
try {
currentResponse.handleChunk(buff);
} catch (Throwable t) {
handleHandlerException(t);
}
}
void handleResponseEnd() {
handleResponseEnd(null);
}
void handleResponseEnd(HttpChunkTrailer trailer) {
try {
currentResponse.handleEnd(trailer);
} catch (Throwable t) {
handleHandlerException(t);
}
if (!keepAlive) {
close();
}
}
void handleWsFrame(WebSocketFrame frame) {
if (ws != null) {
ws.handleFrame(frame);
}
}
protected void handleClosed() {
super.handleClosed();
if (ws != null) {
ws.handleClosed();
}
}
protected long getContextID() {
return super.getContextID();
}
protected void handleException(Exception e) {
super.handleException(e);
if (currentRequest != null) {
currentRequest.handleException(e);
}
if (currentResponse != null) {
currentResponse.handleException(e);
}
}
protected void addFuture(Handler<Void> doneHandler, ChannelFuture future) {
super.addFuture(doneHandler, future);
}
ChannelFuture write(Object obj) {
return channel.write(obj);
}
void setCurrentRequest(HttpClientRequest req) {
if (currentRequest != null) {
throw new IllegalStateException("Connection is already writing a request");
}
this.currentRequest = req;
this.requests.add(req);
}
void endRequest() {
if (currentRequest == null) {
throw new IllegalStateException("No write in progress");
}
currentRequest = null;
if (keepAlive) {
//Close just returns connection to the pool
close();
} else {
//The connection gets closed after the response is received
}
}
}
|
import collections
def find_shortest_path(maze):
start = find_start(maze)
queue = collections.deque([[start]])
seen = set([start])
while queue:
path = queue.popleft()
x, y = path[-1]
if is_goal(maze, x, y):
return path
for x2, y2 in all_neighbors(maze, x, y):
if (x2, y2) not in seen:
queue.append(path + [(x2, y2)])
seen.add((x2, y2))
return [] |
<reponame>getkuby/kube-dsl
module KubeDSL::DSL::Admissionregistration
autoload :V1, 'kube-dsl/dsl/admissionregistration/v1'
autoload :V1beta1, 'kube-dsl/dsl/admissionregistration/v1beta1'
end
|
<gh_stars>0
# Copyright 2020 The AutoKeras Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import kerastuner
import pytest
import tensorflow as tf
import autokeras as ak
from autokeras.tuners import task_specific
@pytest.fixture
def clear_session():
tf.keras.backend.clear_session()
yield
tf.keras.backend.clear_session()
def test_img_clf_init_hp0_equals_hp_of_a_model(clear_session, tmp_path):
clf = ak.ImageClassifier(directory=tmp_path)
clf.inputs[0].shape = (32, 32, 3)
clf.outputs[0].in_blocks[0].output_shape = (10,)
init_hp = task_specific.IMAGE_CLASSIFIER[0]
hp = kerastuner.HyperParameters()
hp.values = copy.copy(init_hp)
clf.tuner.hypermodel.build(hp)
assert set(init_hp.keys()) == set(hp._hps.keys())
def test_img_clf_init_hp1_equals_hp_of_a_model(clear_session, tmp_path):
clf = ak.ImageClassifier(directory=tmp_path)
clf.inputs[0].shape = (32, 32, 3)
clf.outputs[0].in_blocks[0].output_shape = (10,)
init_hp = task_specific.IMAGE_CLASSIFIER[1]
hp = kerastuner.HyperParameters()
hp.values = copy.copy(init_hp)
clf.tuner.hypermodel.build(hp)
assert set(init_hp.keys()) == set(hp._hps.keys())
def test_img_clf_init_hp2_equals_hp_of_a_model(clear_session, tmp_path):
clf = ak.ImageClassifier(directory=tmp_path)
clf.inputs[0].shape = (32, 32, 3)
clf.outputs[0].in_blocks[0].output_shape = (10,)
init_hp = task_specific.IMAGE_CLASSIFIER[2]
hp = kerastuner.HyperParameters()
hp.values = copy.copy(init_hp)
clf.tuner.hypermodel.build(hp)
assert set(init_hp.keys()) == set(hp._hps.keys())
def test_txt_clf_init_hp0_equals_hp_of_a_model(clear_session, tmp_path):
clf = ak.TextClassifier(directory=tmp_path)
clf.inputs[0].shape = (1,)
clf.outputs[0].in_blocks[0].output_shape = (10,)
init_hp = task_specific.TEXT_CLASSIFIER[0]
hp = kerastuner.HyperParameters()
hp.values = copy.copy(init_hp)
clf.tuner.hypermodel.build(hp)
assert set(init_hp.keys()) == set(hp._hps.keys())
def test_txt_clf_init_hp1_equals_hp_of_a_model(clear_session, tmp_path):
clf = ak.TextClassifier(directory=tmp_path)
clf.inputs[0].shape = (1,)
clf.outputs[0].in_blocks[0].output_shape = (10,)
init_hp = task_specific.TEXT_CLASSIFIER[1]
hp = kerastuner.HyperParameters()
hp.values = copy.copy(init_hp)
clf.tuner.hypermodel.build(hp)
assert set(init_hp.keys()) == set(hp._hps.keys())
def test_sd_clf_init_hp0_equals_hp_of_a_model(clear_session, tmp_path):
clf = ak.StructuredDataClassifier(
directory=tmp_path,
column_names=["a", "b"],
column_types={"a": "numerical", "b": "numerical"},
)
clf.inputs[0].shape = (2,)
clf.outputs[0].in_blocks[0].output_shape = (10,)
init_hp = task_specific.STRUCTURED_DATA_CLASSIFIER[0]
hp = kerastuner.HyperParameters()
hp.values = copy.copy(init_hp)
clf.tuner.hypermodel.build(hp)
assert set(init_hp.keys()) == set(hp._hps.keys())
def test_sd_reg_init_hp0_equals_hp_of_a_model(clear_session, tmp_path):
clf = ak.StructuredDataRegressor(
directory=tmp_path,
column_names=["a", "b"],
column_types={"a": "numerical", "b": "numerical"},
)
clf.inputs[0].shape = (2,)
clf.outputs[0].in_blocks[0].output_shape = (10,)
init_hp = task_specific.STRUCTURED_DATA_REGRESSOR[0]
hp = kerastuner.HyperParameters()
hp.values = copy.copy(init_hp)
clf.tuner.hypermodel.build(hp)
assert set(init_hp.keys()) == set(hp._hps.keys())
|
/******************************************************************************
*
* Copyright(c) 2007 - 2011 Realtek Corporation. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110, USA
*
*
******************************************************************************/
#define _RTL8188E_CMD_C_
#include <drv_types.h>
#include <rtl8188e_hal.h>
#include "hal_com_h2c.h"
#define CONFIG_H2C_EF
#define RTL88E_MAX_H2C_BOX_NUMS 4
#define RTL88E_MAX_CMD_LEN 7
#define RTL88E_MESSAGE_BOX_SIZE 4
#define RTL88E_EX_MESSAGE_BOX_SIZE 4
static u8 _is_fw_read_cmd_down(_adapter* padapter, u8 msgbox_num)
{
u8 read_down = _FALSE;
int retry_cnts = 100;
u8 valid;
//DBG_8192C(" _is_fw_read_cmd_down ,reg_1cc(%x),msg_box(%d)...\n",rtw_read8(padapter,REG_HMETFR),msgbox_num);
do{
valid = rtw_read8(padapter,REG_HMETFR) & BIT(msgbox_num);
if(0 == valid ){
read_down = _TRUE;
}
else
rtw_msleep_os(1);
}while( (!read_down) && (retry_cnts--));
return read_down;
}
/*****************************************
* H2C Msg format :
* 0x1DF - 0x1D0
*| 31 - 8 | 7-5 4 - 0 |
*| h2c_msg |Class_ID CMD_ID |
*
* Extend 0x1FF - 0x1F0
*|31 - 0 |
*|ext_msg|
******************************************/
s32 FillH2CCmd_88E(PADAPTER padapter, u8 ElementID, u32 CmdLen, u8 *pCmdBuffer)
{
struct dvobj_priv *dvobj = adapter_to_dvobj(padapter);
HAL_DATA_TYPE *pHalData = GET_HAL_DATA(padapter);
u8 h2c_box_num;
u32 msgbox_addr;
u32 msgbox_ex_addr = 0;
u8 cmd_idx,ext_cmd_len;
u32 h2c_cmd = 0;
u32 h2c_cmd_ex = 0;
s32 ret = _FAIL;
_func_enter_;
padapter = GET_PRIMARY_ADAPTER(padapter);
pHalData = GET_HAL_DATA(padapter);
if(padapter->bFWReady == _FALSE)
{
DBG_8192C("FillH2CCmd_88E(): return H2C cmd because fw is not ready\n");
return ret;
}
_enter_critical_mutex(&(dvobj->h2c_fwcmd_mutex), NULL);
if (!pCmdBuffer) {
goto exit;
}
if (CmdLen > RTL88E_MAX_CMD_LEN) {
goto exit;
}
if (rtw_is_surprise_removed(padapter))
goto exit;
//pay attention to if race condition happened in H2C cmd setting.
do{
h2c_box_num = pHalData->LastHMEBoxNum;
if(!_is_fw_read_cmd_down(padapter, h2c_box_num)){
DBG_8192C(" fw read cmd failed...\n");
goto exit;
}
*(u8*)(&h2c_cmd) = ElementID;
if(CmdLen<=3)
{
_rtw_memcpy((u8*)(&h2c_cmd)+1, pCmdBuffer, CmdLen );
}
else{
_rtw_memcpy((u8*)(&h2c_cmd)+1, pCmdBuffer,3);
ext_cmd_len = CmdLen-3;
_rtw_memcpy((u8*)(&h2c_cmd_ex), pCmdBuffer+3,ext_cmd_len );
//Write Ext command
msgbox_ex_addr = REG_HMEBOX_EXT_0 + (h2c_box_num *RTL88E_EX_MESSAGE_BOX_SIZE);
#ifdef CONFIG_H2C_EF
for(cmd_idx=0;cmd_idx<ext_cmd_len;cmd_idx++ ){
rtw_write8(padapter,msgbox_ex_addr+cmd_idx,*((u8*)(&h2c_cmd_ex)+cmd_idx));
}
#else
h2c_cmd_ex = le32_to_cpu( h2c_cmd_ex );
rtw_write32(padapter, msgbox_ex_addr, h2c_cmd_ex);
#endif
}
// Write command
msgbox_addr =REG_HMEBOX_0 + (h2c_box_num *RTL88E_MESSAGE_BOX_SIZE);
#ifdef CONFIG_H2C_EF
for(cmd_idx=0;cmd_idx<RTL88E_MESSAGE_BOX_SIZE;cmd_idx++ ){
rtw_write8(padapter,msgbox_addr+cmd_idx,*((u8*)(&h2c_cmd)+cmd_idx));
}
#else
h2c_cmd = le32_to_cpu( h2c_cmd );
rtw_write32(padapter,msgbox_addr, h2c_cmd);
#endif
// DBG_8192C("MSG_BOX:%d,CmdLen(%d), reg:0x%x =>h2c_cmd:0x%x, reg:0x%x =>h2c_cmd_ex:0x%x ..\n"
// ,pHalData->LastHMEBoxNum ,CmdLen,msgbox_addr,h2c_cmd,msgbox_ex_addr,h2c_cmd_ex);
pHalData->LastHMEBoxNum = (h2c_box_num+1) % RTL88E_MAX_H2C_BOX_NUMS;
}while(0);
ret = _SUCCESS;
exit:
_exit_critical_mutex(&(dvobj->h2c_fwcmd_mutex), NULL);
_func_exit_;
return ret;
}
u8 rtl8192c_h2c_msg_hdl(_adapter *padapter, unsigned char *pbuf)
{
u8 ElementID, CmdLen;
u8 *pCmdBuffer;
struct cmd_msg_parm *pcmdmsg;
if(!pbuf)
return H2C_PARAMETERS_ERROR;
pcmdmsg = (struct cmd_msg_parm*)pbuf;
ElementID = pcmdmsg->eid;
CmdLen = pcmdmsg->sz;
pCmdBuffer = pcmdmsg->buf;
FillH2CCmd_88E(padapter, ElementID, CmdLen, pCmdBuffer);
return H2C_SUCCESS;
}
/*
#if defined(CONFIG_AUTOSUSPEND) && defined(SUPPORT_HW_RFOFF_DETECTED)
u8 rtl8192c_set_FwSelectSuspend_cmd(_adapter *padapter ,u8 bfwpoll, u16 period)
{
u8 res=_SUCCESS;
struct H2C_SS_RFOFF_PARAM param;
DBG_8192C("==>%s bfwpoll(%x)\n",__FUNCTION__,bfwpoll);
param.gpio_period = period;//Polling GPIO_11 period time
param.ROFOn = (_TRUE == bfwpoll)?1:0;
FillH2CCmd_88E(padapter, SELECTIVE_SUSPEND_ROF_CMD, sizeof(param), (u8*)(¶m));
return res;
}
#endif //CONFIG_AUTOSUSPEND && SUPPORT_HW_RFOFF_DETECTED
*/
u8 rtl8188e_set_rssi_cmd(_adapter*padapter, u8 *param)
{
u8 res=_SUCCESS;
HAL_DATA_TYPE *pHalData = GET_HAL_DATA(padapter);
_func_enter_;
if(pHalData->fw_ractrl == _FALSE){
DBG_8192C("==>%s fw dont support RA \n",__FUNCTION__);
return _FAIL;
}
*((u32*) param ) = cpu_to_le32( *((u32*) param ) );
FillH2CCmd_88E(padapter, H2C_RSSI_REPORT, 3, param);
_func_exit_;
return res;
}
u8 rtl8188e_set_raid_cmd(_adapter*padapter, u32 bitmap, u8* arg)
{
u8 res=_SUCCESS;
HAL_DATA_TYPE *pHalData = GET_HAL_DATA(padapter);
struct sta_info *psta = NULL;
struct macid_ctl_t *macid_ctl = &padapter->dvobj->macid_ctl;
u8 macid, init_rate, raid, shortGIrate=_FALSE;
u8 H2CCommand[7]={0};
if(pHalData->fw_ractrl == _FALSE){
DBG_8192C("==>%s fw dont support RA \n",__FUNCTION__);
return _FAIL;
}
macid = arg[0];
raid = arg[1];
shortGIrate = arg[2];
init_rate = arg[3];
if (macid < macid_ctl->num)
psta = macid_ctl->sta[macid];
if (psta == NULL) {
DBG_871X_LEVEL(_drv_always_, FUNC_ADPT_FMT" macid:%u, sta is NULL\n"
, FUNC_ADPT_ARG(padapter), macid);
return _FAIL;
}
H2CCommand[0] = macid;
H2CCommand[1] = raid | (shortGIrate?0x80:0x00) ;
H2CCommand[2] = psta->bw_mode & 0x03; //BW;
#ifdef CONFIG_INTEL_PROXIM
if(padapter->proximity.proxim_on ==_TRUE)
pHalData->bDisableTXPowerTraining = _FALSE;
#endif
//DisableTXPowerTraining
if(pHalData->bDisableTXPowerTraining){
H2CCommand[2] |= BIT6;
DBG_871X("%s,Disable PWT by driver\n",__FUNCTION__);
}
else{
PDM_ODM_T pDM_OutSrc = &pHalData->odmpriv;
if(pDM_OutSrc->bDisablePowerTraining){
H2CCommand[2] |= BIT6;
DBG_871X("%s,Disable PWT by DM\n",__FUNCTION__);
}
}
H2CCommand[3] = (u1Byte)(bitmap & 0x000000ff);
H2CCommand[4] = (u1Byte)((bitmap & 0x0000ff00) >>8);
H2CCommand[5] = (u1Byte)((bitmap & 0x00ff0000) >> 16);
H2CCommand[6] = (u1Byte)((bitmap & 0xff000000) >> 24);
FillH2CCmd_88E(padapter, H2C_DM_MACID_CFG, 7, H2CCommand);
//The firmware Rate Adaption function is triggered by TBTT INT, so to
// enable the rate adaption, we need to enable the hardware Beacon function Reg 0x550[3]
//SetBcnCtrlReg(padapter, BIT3, 0);
rtw_write8(padapter, REG_BCN_CTRL, rtw_read8(padapter, REG_BCN_CTRL)|BIT3);
return res;
}
//bitmap[0:27] = tx_rate_bitmap
//bitmap[28:31]= Rate Adaptive id
//arg[0:4] = macid
//arg[5] = Short GI
void rtl8188e_Add_RateATid(PADAPTER pAdapter, u64 rate_bitmap, u8 *arg, u8 rssi_level)
{
HAL_DATA_TYPE *pHalData = GET_HAL_DATA(pAdapter);
u8 macid, init_rate, raid, shortGIrate=_FALSE;
u32 bitmap = (u32) rate_bitmap;
macid = arg[0];
raid = arg[1];
shortGIrate = arg[2];
init_rate = arg[3];
bitmap &=0x0fffffff;
if(rssi_level != DM_RATR_STA_INIT)
bitmap = ODM_Get_Rate_Bitmap(&pHalData->odmpriv, macid, bitmap, rssi_level);
if (shortGIrate==_TRUE)
init_rate |= BIT(6);
bitmap &= 0x0fffffff;
DBG_871X("%s=> mac_id:%d , raid:%d , ra_bitmap=0x%x, shortGIrate=0x%02x\n",
__FUNCTION__,macid ,raid ,bitmap, shortGIrate);
#if(RATE_ADAPTIVE_SUPPORT == 1)
if(!pHalData->fw_ractrl ){
ODM_RA_UpdateRateInfo_8188E(
&(pHalData->odmpriv),
macid,
raid,
bitmap,
shortGIrate
);
}
else
#endif
{
rtl8188e_set_raid_cmd(pAdapter,bitmap,arg);
}
}
void rtl8188e_set_FwPwrMode_cmd(PADAPTER padapter, u8 Mode)
{
SETPWRMODE_PARM H2CSetPwrMode;
struct pwrctrl_priv *pwrpriv = adapter_to_pwrctl(padapter);
u8 RLBM = 0; // 0:Min, 1:Max , 2:User define
_func_enter_;
DBG_871X("%s: Mode=%d SmartPS=%d UAPSD=%d\n", __FUNCTION__,
Mode, pwrpriv->smart_ps, padapter->registrypriv.uapsd_enable);
H2CSetPwrMode.AwakeInterval = 2; //DTIM = 1
switch(Mode)
{
case PS_MODE_ACTIVE:
H2CSetPwrMode.Mode = 0;
break;
case PS_MODE_MIN:
H2CSetPwrMode.Mode = 1;
break;
case PS_MODE_MAX:
RLBM = 1;
H2CSetPwrMode.Mode = 1;
break;
case PS_MODE_DTIM:
RLBM = 2;
H2CSetPwrMode.AwakeInterval = 3; //DTIM = 2
H2CSetPwrMode.Mode = 1;
break;
case PS_MODE_UAPSD_WMM:
H2CSetPwrMode.Mode = 2;
break;
default:
H2CSetPwrMode.Mode = 0;
break;
}
//H2CSetPwrMode.Mode = Mode;
H2CSetPwrMode.SmartPS_RLBM = (((pwrpriv->smart_ps<<4)&0xf0) | (RLBM & 0x0f));
H2CSetPwrMode.bAllQueueUAPSD = padapter->registrypriv.uapsd_enable;
if(Mode > 0)
{
H2CSetPwrMode.PwrState = 0x00;// AllON(0x0C), RFON(0x04), RFOFF(0x00)
#ifdef CONFIG_EXT_CLK
H2CSetPwrMode.Mode |= BIT(7);//supporting 26M XTAL CLK_Request feature.
#endif //CONFIG_EXT_CLK
}
else
H2CSetPwrMode.PwrState = 0x0C;// AllON(0x0C), RFON(0x04), RFOFF(0x00)
FillH2CCmd_88E(padapter, H2C_PS_PWR_MODE, sizeof(H2CSetPwrMode), (u8 *)&H2CSetPwrMode);
_func_exit_;
}
void ConstructBeacon(_adapter *padapter, u8 *pframe, u32 *pLength)
{
struct rtw_ieee80211_hdr *pwlanhdr;
u16 *fctrl;
u32 rate_len, pktlen;
struct mlme_ext_priv *pmlmeext = &(padapter->mlmeextpriv);
struct mlme_ext_info *pmlmeinfo = &(pmlmeext->mlmext_info);
WLAN_BSSID_EX *cur_network = &(pmlmeinfo->network);
u8 bc_addr[] = {0xff, 0xff, 0xff, 0xff, 0xff, 0xff};
//DBG_871X("%s\n", __FUNCTION__);
pwlanhdr = (struct rtw_ieee80211_hdr *)pframe;
fctrl = &(pwlanhdr->frame_ctl);
*(fctrl) = 0;
_rtw_memcpy(pwlanhdr->addr1, bc_addr, ETH_ALEN);
_rtw_memcpy(pwlanhdr->addr2, adapter_mac_addr(padapter), ETH_ALEN);
_rtw_memcpy(pwlanhdr->addr3, get_my_bssid(cur_network), ETH_ALEN);
SetSeqNum(pwlanhdr, 0/*pmlmeext->mgnt_seq*/);
//pmlmeext->mgnt_seq++;
SetFrameSubType(pframe, WIFI_BEACON);
pframe += sizeof(struct rtw_ieee80211_hdr_3addr);
pktlen = sizeof (struct rtw_ieee80211_hdr_3addr);
//timestamp will be inserted by hardware
pframe += 8;
pktlen += 8;
// beacon interval: 2 bytes
_rtw_memcpy(pframe, (unsigned char *)(rtw_get_beacon_interval_from_ie(cur_network->IEs)), 2);
pframe += 2;
pktlen += 2;
// capability info: 2 bytes
_rtw_memcpy(pframe, (unsigned char *)(rtw_get_capability_from_ie(cur_network->IEs)), 2);
pframe += 2;
pktlen += 2;
if( (pmlmeinfo->state&0x03) == WIFI_FW_AP_STATE)
{
//DBG_871X("ie len=%d\n", cur_network->IELength);
pktlen += cur_network->IELength - sizeof(NDIS_802_11_FIXED_IEs);
_rtw_memcpy(pframe, cur_network->IEs+sizeof(NDIS_802_11_FIXED_IEs), pktlen);
goto _ConstructBeacon;
}
//below for ad-hoc mode
// SSID
pframe = rtw_set_ie(pframe, _SSID_IE_, cur_network->Ssid.SsidLength, cur_network->Ssid.Ssid, &pktlen);
// supported rates...
rate_len = rtw_get_rateset_len(cur_network->SupportedRates);
pframe = rtw_set_ie(pframe, _SUPPORTEDRATES_IE_, ((rate_len > 8)? 8: rate_len), cur_network->SupportedRates, &pktlen);
// DS parameter set
pframe = rtw_set_ie(pframe, _DSSET_IE_, 1, (unsigned char *)&(cur_network->Configuration.DSConfig), &pktlen);
if( (pmlmeinfo->state&0x03) == WIFI_FW_ADHOC_STATE)
{
u32 ATIMWindow;
// IBSS Parameter Set...
//ATIMWindow = cur->Configuration.ATIMWindow;
ATIMWindow = 0;
pframe = rtw_set_ie(pframe, _IBSS_PARA_IE_, 2, (unsigned char *)(&ATIMWindow), &pktlen);
}
//todo: ERP IE
// EXTERNDED SUPPORTED RATE
if (rate_len > 8)
{
pframe = rtw_set_ie(pframe, _EXT_SUPPORTEDRATES_IE_, (rate_len - 8), (cur_network->SupportedRates + 8), &pktlen);
}
//todo:HT for adhoc
_ConstructBeacon:
if ((pktlen + TXDESC_SIZE) > 512)
{
DBG_871X("beacon frame too large\n");
return;
}
*pLength = pktlen;
//DBG_871X("%s bcn_sz=%d\n", __FUNCTION__, pktlen);
}
void ConstructPSPoll(_adapter *padapter, u8 *pframe, u32 *pLength)
{
struct rtw_ieee80211_hdr *pwlanhdr;
u16 *fctrl;
u32 pktlen;
struct mlme_ext_priv *pmlmeext = &(padapter->mlmeextpriv);
struct mlme_ext_info *pmlmeinfo = &(pmlmeext->mlmext_info);
//DBG_871X("%s\n", __FUNCTION__);
pwlanhdr = (struct rtw_ieee80211_hdr *)pframe;
// Frame control.
fctrl = &(pwlanhdr->frame_ctl);
*(fctrl) = 0;
SetPwrMgt(fctrl);
SetFrameSubType(pframe, WIFI_PSPOLL);
// AID.
SetDuration(pframe, (pmlmeinfo->aid | 0xc000));
// BSSID.
_rtw_memcpy(pwlanhdr->addr1, get_my_bssid(&(pmlmeinfo->network)), ETH_ALEN);
// TA.
_rtw_memcpy(pwlanhdr->addr2, adapter_mac_addr(padapter), ETH_ALEN);
*pLength = 16;
}
void ConstructNullFunctionData(
PADAPTER padapter,
u8 *pframe,
u32 *pLength,
u8 *StaAddr,
u8 bQoS,
u8 AC,
u8 bEosp,
u8 bForcePowerSave)
{
struct rtw_ieee80211_hdr *pwlanhdr;
u16 *fctrl;
u32 pktlen;
struct mlme_priv *pmlmepriv = &padapter->mlmepriv;
struct wlan_network *cur_network = &pmlmepriv->cur_network;
struct mlme_ext_priv *pmlmeext = &(padapter->mlmeextpriv);
struct mlme_ext_info *pmlmeinfo = &(pmlmeext->mlmext_info);
//DBG_871X("%s:%d\n", __FUNCTION__, bForcePowerSave);
pwlanhdr = (struct rtw_ieee80211_hdr*)pframe;
fctrl = &pwlanhdr->frame_ctl;
*(fctrl) = 0;
if (bForcePowerSave)
{
SetPwrMgt(fctrl);
}
switch(cur_network->network.InfrastructureMode)
{
case Ndis802_11Infrastructure:
SetToDs(fctrl);
_rtw_memcpy(pwlanhdr->addr1, get_my_bssid(&(pmlmeinfo->network)), ETH_ALEN);
_rtw_memcpy(pwlanhdr->addr2, adapter_mac_addr(padapter), ETH_ALEN);
_rtw_memcpy(pwlanhdr->addr3, StaAddr, ETH_ALEN);
break;
case Ndis802_11APMode:
SetFrDs(fctrl);
_rtw_memcpy(pwlanhdr->addr1, StaAddr, ETH_ALEN);
_rtw_memcpy(pwlanhdr->addr2, get_my_bssid(&(pmlmeinfo->network)), ETH_ALEN);
_rtw_memcpy(pwlanhdr->addr3, adapter_mac_addr(padapter), ETH_ALEN);
break;
case Ndis802_11IBSS:
default:
_rtw_memcpy(pwlanhdr->addr1, StaAddr, ETH_ALEN);
_rtw_memcpy(pwlanhdr->addr2, adapter_mac_addr(padapter), ETH_ALEN);
_rtw_memcpy(pwlanhdr->addr3, get_my_bssid(&(pmlmeinfo->network)), ETH_ALEN);
break;
}
SetSeqNum(pwlanhdr, 0);
if (bQoS == _TRUE) {
struct rtw_ieee80211_hdr_3addr_qos *pwlanqoshdr;
SetFrameSubType(pframe, WIFI_QOS_DATA_NULL);
pwlanqoshdr = (struct rtw_ieee80211_hdr_3addr_qos*)pframe;
SetPriority(&pwlanqoshdr->qc, AC);
SetEOSP(&pwlanqoshdr->qc, bEosp);
pktlen = sizeof(struct rtw_ieee80211_hdr_3addr_qos);
} else {
SetFrameSubType(pframe, WIFI_DATA_NULL);
pktlen = sizeof(struct rtw_ieee80211_hdr_3addr);
}
*pLength = pktlen;
}
void rtl8188e_set_FwRsvdPage_cmd(PADAPTER padapter, PRSVDPAGE_LOC rsvdpageloc)
{
u8 u1H2CRsvdPageParm[H2C_RSVDPAGE_LOC_LEN]={0};
u8 u1H2CAoacRsvdPageParm[H2C_AOAC_RSVDPAGE_LOC_LEN]={0};
//DBG_871X("8188RsvdPageLoc: PsPoll=%d Null=%d QoSNull=%d\n",
// rsvdpageloc->LocPsPoll, rsvdpageloc->LocNullData, rsvdpageloc->LocQosNull);
SET_H2CCMD_RSVDPAGE_LOC_PSPOLL(u1H2CRsvdPageParm, rsvdpageloc->LocPsPoll);
SET_H2CCMD_RSVDPAGE_LOC_NULL_DATA(u1H2CRsvdPageParm, rsvdpageloc->LocNullData);
SET_H2CCMD_RSVDPAGE_LOC_QOS_NULL_DATA(u1H2CRsvdPageParm, rsvdpageloc->LocQosNull);
FillH2CCmd_88E(padapter, H2C_COM_RSVD_PAGE, H2C_RSVDPAGE_LOC_LEN, u1H2CRsvdPageParm);
#ifdef CONFIG_WOWLAN
//DBG_871X("8188E_AOACRsvdPageLoc: RWC=%d ArpRsp=%d\n", rsvdpageloc->LocRemoteCtrlInfo, rsvdpageloc->LocArpRsp);
SET_H2CCMD_AOAC_RSVDPAGE_LOC_REMOTE_WAKE_CTRL_INFO(u1H2CAoacRsvdPageParm, rsvdpageloc->LocRemoteCtrlInfo);
SET_H2CCMD_AOAC_RSVDPAGE_LOC_ARP_RSP(u1H2CAoacRsvdPageParm, rsvdpageloc->LocArpRsp);
FillH2CCmd_88E(padapter, H2C_COM_AOAC_RSVD_PAGE, H2C_AOAC_RSVDPAGE_LOC_LEN, u1H2CAoacRsvdPageParm);
#endif
}
// To check if reserved page content is destroyed by beacon beacuse beacon is too large.
// 2010.06.23. Added by tynli.
VOID
CheckFwRsvdPageContent(
IN PADAPTER Adapter
)
{
HAL_DATA_TYPE* pHalData = GET_HAL_DATA(Adapter);
u32 MaxBcnPageNum;
if(pHalData->FwRsvdPageStartOffset != 0)
{
/*MaxBcnPageNum = PageNum_128(pMgntInfo->MaxBeaconSize);
RT_ASSERT((MaxBcnPageNum <= pHalData->FwRsvdPageStartOffset),
("CheckFwRsvdPageContent(): The reserved page content has been"\
"destroyed by beacon!!! MaxBcnPageNum(%d) FwRsvdPageStartOffset(%d)\n!",
MaxBcnPageNum, pHalData->FwRsvdPageStartOffset));*/
}
}
//
// Description: Get the reserved page number in Tx packet buffer.
// Retrun value: the page number.
// 2012.08.09, by tynli.
//
u8
GetTxBufferRsvdPageNum8188E(_adapter *padapter, bool wowlan)
{
HAL_DATA_TYPE *pHalData = GET_HAL_DATA(padapter);
u8 RsvdPageNum=0;
// default reseved 1 page for the IC type which is undefined.
u8 TxPageBndy= LAST_ENTRY_OF_TX_PKT_BUFFER_8188E(padapter);
rtw_hal_get_def_var(padapter, HAL_DEF_TX_PAGE_BOUNDARY, (u8 *)&TxPageBndy);
RsvdPageNum = LAST_ENTRY_OF_TX_PKT_BUFFER_8188E(padapter) - TxPageBndy + 1;
return RsvdPageNum;
}
void rtl8188e_set_FwJoinBssReport_cmd(PADAPTER padapter, u8 mstatus)
{
JOINBSSRPT_PARM_88E JoinBssRptParm;
HAL_DATA_TYPE *pHalData = GET_HAL_DATA(padapter);
struct mlme_ext_priv *pmlmeext = &(padapter->mlmeextpriv);
struct mlme_ext_info *pmlmeinfo = &(pmlmeext->mlmext_info);
#ifdef CONFIG_WOWLAN
struct mlme_priv *pmlmepriv = &padapter->mlmepriv;
struct sta_info *psta = NULL;
#endif
BOOLEAN bSendBeacon=_FALSE;
BOOLEAN bcn_valid = _FALSE;
u8 DLBcnCount=0;
u32 poll = 0;
_func_enter_;
DBG_871X("%s mstatus(%x)\n", __FUNCTION__,mstatus);
if(mstatus == 1)
{
// We should set AID, correct TSF, HW seq enable before set JoinBssReport to Fw in 88/92C.
// Suggested by filen. Added by tynli.
rtw_write16(padapter, REG_BCN_PSR_RPT, (0xC000|pmlmeinfo->aid));
// Do not set TSF again here or vWiFi beacon DMA INT will not work.
//correct_TSF(padapter, pmlmeext);
// Hw sequende enable by dedault. 2010.06.23. by tynli.
//rtw_write16(padapter, REG_NQOS_SEQ, ((pmlmeext->mgnt_seq+100)&0xFFF));
//rtw_write8(padapter, REG_HWSEQ_CTRL, 0xFF);
//Set REG_CR bit 8. DMA beacon by SW.
pHalData->RegCR_1 |= BIT0;
rtw_write8(padapter, REG_CR+1, pHalData->RegCR_1);
// Disable Hw protection for a time which revserd for Hw sending beacon.
// Fix download reserved page packet fail that access collision with the protection time.
// 2010.05.11. Added by tynli.
//SetBcnCtrlReg(padapter, 0, BIT3);
//SetBcnCtrlReg(padapter, BIT4, 0);
rtw_write8(padapter, REG_BCN_CTRL, rtw_read8(padapter, REG_BCN_CTRL)&(~BIT(3)));
rtw_write8(padapter, REG_BCN_CTRL, rtw_read8(padapter, REG_BCN_CTRL)|BIT(4));
if(pHalData->RegFwHwTxQCtrl&BIT6)
{
DBG_871X("HalDownloadRSVDPage(): There is an Adapter is sending beacon.\n");
bSendBeacon = _TRUE;
}
// Set FWHW_TXQ_CTRL 0x422[6]=0 to tell Hw the packet is not a real beacon frame.
rtw_write8(padapter, REG_FWHW_TXQ_CTRL+2, (pHalData->RegFwHwTxQCtrl&(~BIT6)));
pHalData->RegFwHwTxQCtrl &= (~BIT6);
// Clear beacon valid check bit.
rtw_hal_set_hwreg(padapter, HW_VAR_BCN_VALID, NULL);
DLBcnCount = 0;
poll = 0;
do
{
/* download rsvd page.*/
rtw_hal_set_fw_rsvd_page(padapter, _FALSE);
DLBcnCount++;
do
{
rtw_yield_os();
//rtw_mdelay_os(10);
// check rsvd page download OK.
rtw_hal_get_hwreg(padapter, HW_VAR_BCN_VALID, (u8*)(&bcn_valid));
poll++;
} while (!bcn_valid && (poll%10) != 0 && !RTW_CANNOT_RUN(padapter));
} while (!bcn_valid && DLBcnCount <= 100 && !RTW_CANNOT_RUN(padapter));
//RT_ASSERT(bcn_valid, ("HalDownloadRSVDPage88ES(): 1 Download RSVD page failed!\n"));
if (RTW_CANNOT_RUN(padapter))
;
else if (!bcn_valid)
DBG_871X(ADPT_FMT": 1 DL RSVD page failed! DLBcnCount:%u, poll:%u\n",
ADPT_ARG(padapter) ,DLBcnCount, poll);
else {
struct pwrctrl_priv *pwrctl = adapter_to_pwrctl(padapter);
pwrctl->fw_psmode_iface_id = padapter->iface_id;
DBG_871X(ADPT_FMT": 1 DL RSVD page success! DLBcnCount:%u, poll:%u\n",
ADPT_ARG(padapter), DLBcnCount, poll);
}
// Enable Bcn
//SetBcnCtrlReg(padapter, BIT3, 0);
//SetBcnCtrlReg(padapter, 0, BIT4);
rtw_write8(padapter, REG_BCN_CTRL, rtw_read8(padapter, REG_BCN_CTRL)|BIT(3));
rtw_write8(padapter, REG_BCN_CTRL, rtw_read8(padapter, REG_BCN_CTRL)&(~BIT(4)));
// To make sure that if there exists an adapter which would like to send beacon.
// If exists, the origianl value of 0x422[6] will be 1, we should check this to
// prevent from setting 0x422[6] to 0 after download reserved page, or it will cause
// the beacon cannot be sent by HW.
// 2010.06.23. Added by tynli.
if(bSendBeacon)
{
rtw_write8(padapter, REG_FWHW_TXQ_CTRL+2, (pHalData->RegFwHwTxQCtrl|BIT6));
pHalData->RegFwHwTxQCtrl |= BIT6;
}
//
// Update RSVD page location H2C to Fw.
//
if(bcn_valid)
{
rtw_hal_set_hwreg(padapter, HW_VAR_BCN_VALID, NULL);
DBG_871X("Set RSVD page location to Fw.\n");
//FillH2CCmd88E(Adapter, H2C_88E_RSVDPAGE, H2C_RSVDPAGE_LOC_LENGTH, pMgntInfo->u1RsvdPageLoc);
}
// Do not enable HW DMA BCN or it will cause Pcie interface hang by timing issue. 2011.11.24. by tynli.
//if(!padapter->bEnterPnpSleep)
{
// Clear CR[8] or beacon packet will not be send to TxBuf anymore.
pHalData->RegCR_1 &= (~BIT0);
rtw_write8(padapter, REG_CR+1, pHalData->RegCR_1);
}
}
_func_exit_;
}
#ifdef CONFIG_P2P_PS
void rtl8188e_set_p2p_ps_offload_cmd(_adapter* padapter, u8 p2p_ps_state)
{
HAL_DATA_TYPE *pHalData = GET_HAL_DATA(padapter);
struct pwrctrl_priv *pwrpriv = adapter_to_pwrctl(padapter);
struct wifidirect_info *pwdinfo = &( padapter->wdinfo );
struct P2P_PS_Offload_t *p2p_ps_offload = (struct P2P_PS_Offload_t *)(&pHalData->p2p_ps_offload);
u8 i;
_func_enter_;
#if 1
switch(p2p_ps_state)
{
case P2P_PS_DISABLE:
DBG_8192C("P2P_PS_DISABLE \n");
_rtw_memset(p2p_ps_offload, 0 ,1);
break;
case P2P_PS_ENABLE:
DBG_8192C("P2P_PS_ENABLE \n");
// update CTWindow value.
if( pwdinfo->ctwindow > 0 )
{
p2p_ps_offload->CTWindow_En = 1;
rtw_write8(padapter, REG_P2P_CTWIN, pwdinfo->ctwindow);
}
// hw only support 2 set of NoA
for( i=0 ; i<pwdinfo->noa_num ; i++)
{
// To control the register setting for which NOA
rtw_write8(padapter, REG_NOA_DESC_SEL, (i << 4));
if(i == 0)
p2p_ps_offload->NoA0_En = 1;
else
p2p_ps_offload->NoA1_En = 1;
// config P2P NoA Descriptor Register
//DBG_8192C("%s(): noa_duration = %x\n",__FUNCTION__,pwdinfo->noa_duration[i]);
rtw_write32(padapter, REG_NOA_DESC_DURATION, pwdinfo->noa_duration[i]);
//DBG_8192C("%s(): noa_interval = %x\n",__FUNCTION__,pwdinfo->noa_interval[i]);
rtw_write32(padapter, REG_NOA_DESC_INTERVAL, pwdinfo->noa_interval[i]);
//DBG_8192C("%s(): start_time = %x\n",__FUNCTION__,pwdinfo->noa_start_time[i]);
rtw_write32(padapter, REG_NOA_DESC_START, pwdinfo->noa_start_time[i]);
//DBG_8192C("%s(): noa_count = %x\n",__FUNCTION__,pwdinfo->noa_count[i]);
rtw_write8(padapter, REG_NOA_DESC_COUNT, pwdinfo->noa_count[i]);
}
if( (pwdinfo->opp_ps == 1) || (pwdinfo->noa_num > 0) )
{
// rst p2p circuit
rtw_write8(padapter, REG_DUAL_TSF_RST, BIT(4));
p2p_ps_offload->Offload_En = 1;
if(pwdinfo->role == P2P_ROLE_GO)
{
p2p_ps_offload->role= 1;
p2p_ps_offload->AllStaSleep = 0;
}
else
{
p2p_ps_offload->role= 0;
}
p2p_ps_offload->discovery = 0;
}
break;
case P2P_PS_SCAN:
DBG_8192C("P2P_PS_SCAN \n");
p2p_ps_offload->discovery = 1;
break;
case P2P_PS_SCAN_DONE:
DBG_8192C("P2P_PS_SCAN_DONE \n");
p2p_ps_offload->discovery = 0;
pwdinfo->p2p_ps_state = P2P_PS_ENABLE;
break;
default:
break;
}
FillH2CCmd_88E(padapter, H2C_PS_P2P_OFFLOAD, 1, (u8 *)p2p_ps_offload);
#endif
_func_exit_;
}
#endif //CONFIG_P2P_PS
#ifdef CONFIG_TSF_RESET_OFFLOAD
/*
ask FW to Reset sync register at Beacon early interrupt
*/
u8 rtl8188e_reset_tsf(_adapter *padapter, u8 reset_port )
{
u8 buf[2];
u8 res=_SUCCESS;
s32 ret;
_func_enter_;
if (IFACE_PORT0==reset_port) {
buf[0] = 0x1; buf[1] = 0;
} else{
buf[0] = 0x0; buf[1] = 0x1;
}
ret = FillH2CCmd_88E(padapter, H2C_RESET_TSF, 2, buf);
_func_exit_;
return res;
}
int reset_tsf(PADAPTER Adapter, u8 reset_port )
{
u8 reset_cnt_before = 0, reset_cnt_after = 0, loop_cnt = 0;
u32 reg_reset_tsf_cnt = (IFACE_PORT0==reset_port) ?
REG_FW_RESET_TSF_CNT_0:REG_FW_RESET_TSF_CNT_1;
u32 reg_bcncrtl = (IFACE_PORT0==reset_port) ?
REG_BCN_CTRL_1:REG_BCN_CTRL;
rtw_scan_abort(Adapter->pbuddy_adapter); /* site survey will cause reset_tsf fail */
reset_cnt_after = reset_cnt_before = rtw_read8(Adapter,reg_reset_tsf_cnt);
rtl8188e_reset_tsf(Adapter, reset_port);
while ((reset_cnt_after == reset_cnt_before ) && (loop_cnt < 10)) {
rtw_msleep_os(100);
loop_cnt++;
reset_cnt_after = rtw_read8(Adapter, reg_reset_tsf_cnt);
}
return(loop_cnt >= 10) ? _FAIL : _TRUE;
}
#endif // CONFIG_TSF_RESET_OFFLOAD
|
#!/bin/bash
# Copyright 2018 AIShell-Foundation(Authors:Jiayu DU, Xingyu NA, Bengu WU, Hao ZHENG)
# 2018 Beijing Shell Shell Tech. Co. Ltd. (Author: Hui BU)
# Apache 2.0
# This is a shell script, and it download and process DaCiDian for Mandarin ASR.
. ./path.sh
download_dir=data/local/DaCiDian
dir=data/local/dict
if [ $# -ne 1 ]; then
echo "Usage: $0 <dict-dir>";
exit 1;
fi
dir=$1
# download the DaCiDian from github
git clone https://github.com/aishell-foundation/DaCiDian.git $download_dir
# here we map <UNK> to the phone spn(spoken noise)
mkdir -p $dir
python $download_dir/DaCiDian.py $download_dir/word_to_pinyin.txt $download_dir/pinyin_to_phone.txt > $dir/lexicon.txt
echo -e "<UNK>\tspn" >> $dir/lexicon.txt
# prepare silence_phones.txt, nonsilence_phones.txt, optional_silence.txt, extra_questions.txt
cat $dir/lexicon.txt | awk '{ for(n=2;n<=NF;n++){ phones[$n] = 1; }} END{for (p in phones) print p;}'| \
sort -u |\
perl -e '
my %ph_cl;
while (<STDIN>) {
$phone = $_;
chomp($phone);
chomp($_);
$phone = $_;
next if ($phone eq "sil");
if (exists $ph_cl{$phone}) { push(@{$ph_cl{$phone}}, $_) }
else { $ph_cl{$phone} = [$_]; }
}
foreach $key ( keys %ph_cl ) {
print "@{ $ph_cl{$key} }\n"
}
' | sort -k1 > $dir/nonsilence_phones.txt || exit 1;
echo sil > $dir/silence_phones.txt
echo sil > $dir/optional_silence.txt
cat $dir/silence_phones.txt | awk '{printf("%s ", $1);} END{printf "\n";}' > $dir/extra_questions.txt || exit 1;
cat $dir/nonsilence_phones.txt | perl -e 'while(<>){ foreach $p (split(" ", $_)) {
$p =~ m:^([^\d]+)(\d*)$: || die "Bad phone $_"; $q{$2} .= "$p "; } } foreach $l (values %q) {print "$l\n";}' \
>> $dir/extra_questions.txt || exit 1;
echo "local/prepare_dict.sh succeeded"
exit 0;
|
<reponame>Patricia888/Code-Wars<filename>map-double-the-values-in-array/app.js
'use strict';
function double(array) {
let mappedArray = array.map(x => x * 2);
return mappedArray;
}
|
#!/bin/bash
#
# Copyright © 2016-2020 The Thingsboard Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
--loadDemo)
LOAD_DEMO=true
shift # past argument
;;
*)
# unknown option
;;
esac
shift # past argument or value
done
if [ "$LOAD_DEMO" == "true" ]; then
loadDemo=true
else
loadDemo=false
fi
CONF_FOLDER=${pkg.installFolder}/conf
configfile=${pkg.name}.conf
jarfile=${pkg.installFolder}/bin/${pkg.name}.jar
installDir=${pkg.installFolder}/data
source "${CONF_FOLDER}/${configfile}"
run_user=${pkg.user}
su -s /bin/sh -c "java -cp ${jarfile} $JAVA_OPTS -Dloader.main=org.thingsboard.server.ThingsboardInstallApplication \
-Dinstall.data_dir=${installDir} \
-Dinstall.load_demo=${loadDemo} \
-Dspring.jpa.hibernate.ddl-auto=none \
-Dinstall.upgrade=false \
-Dlogging.config=${pkg.installFolder}/bin/install/logback.xml \
org.springframework.boot.loader.PropertiesLauncher" "$run_user"
if [ $? -ne 0 ]; then
echo "ThingsBoard installation failed!"
else
echo "ThingsBoard installed successfully!"
fi
exit $?
|
<gh_stars>1-10
import React from 'react';
import PropTypes from 'prop-types';
import { Menu } from '@';
import styles from './topMenuDropdownWrap.css';
const TopMenuDropdownWrap = ({ children, ...props }) => (
<Menu {...props} className={styles.vdsCustomMenuWrap} tabIndex={0}>
{children}
</Menu>
);
TopMenuDropdownWrap.propTypes = {
children: PropTypes.node.isRequired,
};
export default TopMenuDropdownWrap;
|
#!/bin/bash
export LDFLAGS="-L/${PREFIX}/lib"
export CFLAGS="-I${PREFIX}/include"
export CPPFLAGS="${CFLAGS}"
export LIBS='-lz'
./configure --with-boost=${PREFIX} --prefix=${PREFIX}
make
make install
|
/*
* Copyright (c) 2012, 2014, Oracle and/or its affiliates.
* All rights reserved. Use is subject to license terms.
*
* This file is available and licensed under the following license:
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the distribution.
* - Neither the name of Oracle Corporation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.oracle.javafx.scenebuilder.app;
import com.oracle.javafx.scenebuilder.app.about.AboutWindowController;
import com.oracle.javafx.scenebuilder.kit.editor.EditorController;
import com.oracle.javafx.scenebuilder.kit.editor.panel.content.ContentPanelController;
import com.oracle.javafx.scenebuilder.kit.editor.panel.content.driver.handles.AbstractGenericHandles;
import com.oracle.javafx.scenebuilder.kit.editor.panel.content.driver.handles.AbstractHandles;
import com.oracle.javafx.scenebuilder.kit.editor.panel.content.util.CardinalPoint;
import com.oracle.javafx.scenebuilder.kit.editor.panel.hierarchy.AbstractHierarchyPanelController;
import com.oracle.javafx.scenebuilder.kit.editor.panel.hierarchy.HierarchyItem;
import com.oracle.javafx.scenebuilder.kit.editor.selection.ObjectSelectionGroup;
import com.oracle.javafx.scenebuilder.kit.editor.selection.Selection;
import com.oracle.javafx.scenebuilder.kit.fxom.FXOMDocument;
import com.oracle.javafx.scenebuilder.kit.fxom.FXOMObject;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import javafx.geometry.Bounds;
import javafx.geometry.Point2D;
import javafx.scene.Node;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Cell;
import javafx.scene.control.TreeItem;
/**
* This class groups the entry points reserved to QE testing.
*
* Design consideration
*
* This class tries to hide SB internal architecture as much as possible;
* for example, an FXML document is represented by a DocumentWindowController
* instance in SB; however, in this class, the FXML document is
* identified by the Scene instance holding the document window contents..
*
* However some internals must be disclosed:
*
* - FXOMObject : represents a design object ; it is paired with an object
* in the user scene graph ; FXOMObject.getSceneGraphObject() returns the
* matching scene graph object : sometimes it's a plain Node (eg Button),
* sometimes not (eg a Tab, a TableColumn...).
*
* - ...
*
*/
public class SceneBuilderTest {
/**
* Performs [File/New] menu command and returns the Scene instance
* holding the new document window.
*
* @return the scene instance holding the new document window (never null).
*/
public static Scene newFxmlFile() {
final DocumentWindowController newWindow
= SceneBuilderApp.getSingleton().makeNewWindow();
newWindow.openWindow();
return newWindow.getScene();
}
/**
* Performs [File/Open] menu command with the file passed in argument.
* If an error happens, the method throws the corresponding exception
* (in place of displaying an alert dialog).
*
* @param fxmlFile fxml file to be opened (never null)
* @return the scene instance holding the new document window (never null).
* @throws IOException if the open operation has failed.
*/
public static Scene openFxmlFile(File fxmlFile) throws IOException {
assert fxmlFile != null;
final DocumentWindowController newWindow
= SceneBuilderApp.getSingleton().makeNewWindow();
newWindow.loadFromFile(fxmlFile);
newWindow.openWindow();
return newWindow.getScene();
}
/**
* Returns the root of the [user scene graph] ie the scene graph
* constructed from the content of the FXML file. If documentScene does
* not match any document window, returns null.
*
* Note: the returned is an [Object] because an FXML file is not limited
* to javafx.scene.Node.
*
* @param documentScene a scene holding a document window
*
* @return the user scene graph root or null if documentScene does
* not hold a document window
*/
public static Object getUserSceneGraphRoot(Scene documentScene) {
assert documentScene != null;
final Object result;
final FXOMDocument fxomDocument = lookupFxomDocument(documentScene);
if (fxomDocument == null) {
result = null;
} else {
result = fxomDocument.getSceneGraphRoot();
}
return result;
}
/**
* Returns the set of selected objects. Each selected object is represented
* by an FXOMObject instance.
*
* @param documentScene a scene holding a document window
* @return the set of selected objects or null if documentScene does
* not hold a document window
*/
public static Set<FXOMObject> findSelectedObjects(Scene documentScene) {
assert documentScene != null;
final Set<FXOMObject> result;
final DocumentWindowController dwc = lookupWindowController(documentScene);
if (dwc == null) {
result = null;
} else {
final Selection selection = dwc.getEditorController().getSelection();
if (selection.getGroup() instanceof ObjectSelectionGroup) {
final ObjectSelectionGroup osg = (ObjectSelectionGroup) selection.getGroup();
result = Collections.unmodifiableSet(osg.getItems());
} else {
// TODO(elp) : will implement later
result = Collections.emptySet();
}
}
return result;
}
/**
* Returns the fxom object matching a given node in the content panel.
* Returns null if nothing is found.
*
* @param node a node part of the content panel (never null)
* @return null or the matching fxom object
*/
public static FXOMObject fxomObjectFromContentPanelNode(Node node) {
assert node != null;
assert node.getScene() != null;
final FXOMObject result;
final DocumentWindowController dwc = lookupWindowController(node.getScene());
if (dwc == null) {
result = null;
} else {
final Bounds b = node.getLayoutBounds();
final double midX = (b.getMinX() + b.getMaxX()) / 2.0;
final double midY = (b.getMinY() + b.getMaxY()) / 2.0;
final Point2D nodeCenter = node.localToScene(midX, midY, true /* rootScene */);
final ContentPanelController cpc = dwc.getContentPanelController();
result = cpc.searchWithNode(node, nodeCenter.getX(), nodeCenter.getY());
}
return result;
}
/**
* Returns the node in content panel matching a given fxom object.
* This method invokes FXOMObject.getSceneGraphObject() and checks if
* it is a Node. If it's not, it returns null.
*
* @param documentScene a scene holding a document window
* @param fxomObject an fxom object (never null)
* @return null or the matching node in content panel
*/
public static Node fxomObjectToContentPanelNode(
Scene documentScene, FXOMObject fxomObject) {
assert documentScene != null;
assert fxomObject != null;
final Node result;
if (fxomObject.getSceneGraphObject() instanceof Node) {
result = (Node) fxomObject.getSceneGraphObject();
} else {
result = null;
}
return result;
}
/**
* Returns the fxom object matching a given node in the hierarchy panel.
* Returns null if nothing is found.
* This method lookups for a Cell object ancestor of the specified node parameter
* and returns the associated FXOMObject.
* If there is no Cell object ancestor, it returns null.
*
* @param node a node part of the hierarchy panel (never null)
* @return null or the matching fxom object
*/
public static FXOMObject fxomObjectFromHierarchyPanelNode(Node node) {
assert node != null;
assert node.getScene() != null;
final FXOMObject result;
final DocumentWindowController dwc = lookupWindowController(node.getScene());
if (dwc == null) {
result = null;
} else {
Parent parent = node.getParent();
Cell<?> cell = null;
while (parent != null) {
if (parent instanceof Cell) {
cell = (Cell<?>) parent;
break;
}
}
// A cell has been found
if (cell != null) {
assert cell.isEmpty() == false;
if (cell.isVisible()) {
final Object item = cell.getItem();
assert item instanceof HierarchyItem;
final HierarchyItem hierarchyItem = (HierarchyItem) item;
result = hierarchyItem.getFxomObject();
} else {
result = null;
}
} else {
result = null;
}
}
return result;
}
/**
* Returns the node in hierarchy panel matching a given fxom object.
* Returns null if the FXOMObject is currently not displayed by hierarchy
* panel.
* The returned Node is a Cell object.
*
* @param documentScene a scene holding a document window
* @param fxomObject an fxom object (never null)
* @return null or the matching node in hierarchy panel
*/
public static Node fxomObjectToHierarchyPanelNode(
Scene documentScene, FXOMObject fxomObject) {
assert documentScene != null;
assert fxomObject != null;
final Node result;
final DocumentWindowController dwc = lookupWindowController(documentScene);
if (dwc == null) {
result = null;
} else {
final EditorController ec = dwc.getEditorController();
assert fxomObject.getFxomDocument() == ec.getFxomDocument();
final AbstractHierarchyPanelController hpc = dwc.getHierarchyPanelController();
assert hpc != null;
assert hpc.getPanelControl() != null;
if (hpc.getPanelControl().isVisible()) {
final TreeItem<HierarchyItem> treeItem = hpc.lookupTreeItem(fxomObject);
if (treeItem != null) {
result = hpc.getCell(treeItem);
} else {
result = null;
}
} else {
result = null;
}
}
return result;
}
/**
* Looks for the TreeItem corresponding to the specified FXOM object.
* If a TreeItem has been found, scroll to this TreeItem within the hierarchy panel.
*
* @param documentScene
* @param fxomObject
*/
public static void revealInHierarchyPanel(
Scene documentScene, FXOMObject fxomObject) {
assert documentScene != null;
assert fxomObject != null;
final DocumentWindowController dwc = lookupWindowController(documentScene);
if (dwc != null) {
final EditorController ec = dwc.getEditorController();
assert fxomObject.getFxomDocument() == ec.getFxomDocument();
final AbstractHierarchyPanelController hpc
= dwc.getHierarchyPanelController();
assert hpc != null;
assert hpc.getPanelControl() != null;
// First expand the hierarchy tree
expandAllTreeItems(hpc.getRoot());
// Then look for the fxom object
if (hpc.getPanelControl().isVisible()) {
final TreeItem<HierarchyItem> treeItem
= hpc.lookupTreeItem(fxomObject);
if (treeItem != null) {
hpc.scrollTo(treeItem);
}
}
}
}
/**
* Returns the node representing a resize handle.
*
* @param documentScene a scene holding a document window
* @param fxomObject one of the selected fxom object
* @param cp the cardinal point of the target handle
* @return null or the node representing the handle
*/
public static Node lookupResizeHandle(
Scene documentScene, FXOMObject fxomObject, CardinalPoint cp) {
assert documentScene != null;
assert fxomObject != null;
final Node result;
final DocumentWindowController dwc = lookupWindowController(documentScene);
if (dwc == null) {
result = null;
} else {
final EditorController ec = dwc.getEditorController();
assert fxomObject.getFxomDocument() == ec.getFxomDocument();
assert ec.getSelection().isSelected(fxomObject);
final ContentPanelController cpc = dwc.getContentPanelController();
final AbstractHandles<?> h = cpc.lookupHandles(fxomObject);
if (h instanceof AbstractGenericHandles<?>) {
final AbstractGenericHandles<?> gh = (AbstractGenericHandles<?>) h;
result = gh.getHandleNode(cp);
} else {
result = null;
}
}
return result;
}
/**
* Returns the version string.
* It has the format 'Version: [major].[minor]-b[ii], Changeset: [someValue]'.
* <br>A typical value is 'Version: 2.0-b07, Changeset: 8a5ccd834b5f'.
*
* @return a version string. It is never null: in the case something weird
* would occur when constructing the proper value then what is returned is
* 'UNSET'.
*/
public static String getVersionString() {
AboutWindowController awc = new AboutWindowController();
return awc.getBuildInfo();
}
/**
* Closes the preview window associated to a document window.
* Performs nothing if documentScene is not a scene associated to a
* document window or if preview window is not opened.
*
* @param documentScene a scene holding a document window
*/
public static void closePreviewWindow(Scene documentScene) {
final DocumentWindowController dwc = lookupWindowController(documentScene);
if (dwc != null) {
dwc.getPreviewWindowController().closeWindow();
}
}
/**
* Starts the application in test mode.
* In this mode, no files are opened at application startup.
*
* @param args arguments to SceneBuilderApp.main()
*/
public static void startApplication(String[] args) {
AppPlatform.setStartingFromTestBed(true);
SceneBuilderApp.main(args);
}
/*
* Private
*/
private static FXOMDocument lookupFxomDocument(Scene documentScene) {
final FXOMDocument result;
final DocumentWindowController dwc = lookupWindowController(documentScene);
if (dwc == null) {
result = null;
} else {
result = dwc.getEditorController().getFxomDocument();
}
return result;
}
private static DocumentWindowController lookupWindowController(Scene documentScene) {
DocumentWindowController result = null;
final SceneBuilderApp app = SceneBuilderApp.getSingleton();
for (DocumentWindowController c : app.getDocumentWindowControllers()) {
if (c.getScene() == documentScene) {
result = c;
break;
}
}
return result;
}
private static <T> void expandAllTreeItems(final TreeItem<T> parentTreeItem) {
if (parentTreeItem != null) {
parentTreeItem.setExpanded(true);
final List<TreeItem<T>> children = parentTreeItem.getChildren();
if (children != null) {
for (TreeItem<T> child : children) {
expandAllTreeItems(child);
}
}
}
}
}
|
def remove_duplicates(arr):
a = []
for i in arr:
if i not in a:
a.append(i)
return a
print(remove_duplicates([1,1,2,3,3])) # Outputs: [1,2,3] |
import { NgModule } from '@angular/core';
import { IonicModule } from 'ionic-angular';
import {FilterPipe} from "./filter.pipe";
export const pipes = [
FilterPipe
];
@NgModule({
declarations:[pipes],
imports: [IonicModule],
exports: [pipes]
})
export class PipesModule { }
|
export function getNow() {
return window.performance && window.performance.now ? (window.performance.now() + window.performance.timing.navigationStart) : +new Date()
}
export function extend(target, ...rest) {
for (let i = 0; i < rest.length; i++) {
let source = rest[i]
for (let key in source) {
target[key] = source[key]
}
}
return target
}
|
<filename>app/controllers/categories_controller.rb
class CategoriesController < ApplicationController
def index
@categories = Category.all
@most_voted = Article.get_most_votes
end
def new
if !current_user.nil?
@category = Category.new
@categories = Category.all
else
flash[:notice] = 'Please login to continue'
redirect_to '/login'
end
end
def create
@category = Category.create(category_params)
@category.user_id = current_user.id
@category.name = params[:category][:name].capitalize
@cat = Category.category_exists(@category.name)
if @cat.exists?
render :new, notice: 'Category already exists'
else
@category.save
redirect_to root_path
end
end
def show
if !current_user.nil?
@category = Category.find(params[:id])
@articles = @category.articles.order('updated_at DESC')
else
redirect_to '/login'
end
end
def default_image
image_tag('default.jpg', alt: 'default image')
end
def destroy
@category = Category.find(params[:id])
@category.destroy
flash[:success] = 'Deleted successfully'
redirect_to root_path
end
private
def category_params
params.require(:category).permit(:name, :priority)
end
end
|
#!/bin/bash
# $1 should be training/{label} or testing/{label}
find images/$1 -name *.jpg | xargs -n 1 cmake-build-debug/gestures.bin --model_folder /Users/npahucki/Projects/Edmodo/openpose/models/ --disable_display --image_path |
package com.aliyun.demo;
import com.aliyuncs.DefaultAcsClient;
import com.aliyuncs.exceptions.ClientException;
import com.aliyuncs.http.MethodType;
import com.aliyuncs.http.ProtocolType;
import com.aliyuncs.profile.DefaultProfile;
import com.aliyuncs.profile.IClientProfile;
import com.aliyuncs.sts.model.v20150401.AssumeRoleRequest;
import com.aliyuncs.sts.model.v20150401.AssumeRoleResponse;
import net.sf.json.JSONObject;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet(asyncSupported = true)
public class AppTokenServer extends HttpServlet {
/**
*
*/
private static final long serialVersionUID = 5522372203700422672L;
// 目前只有"cn-hangzhou"这个region可用, 不要使用填写其他region的值
public static final String REGION_CN_HANGZHOU = "cn-hangzhou";
public static final String STS_API_VERSION = "2015-04-01";
protected AssumeRoleResponse assumeRole(String accessKeyId, String accessKeySecret, String roleArn,
String roleSessionName, String policy, ProtocolType protocolType, long durationSeconds)
throws ClientException {
try {
// 创建一个 Aliyun Acs Client, 用于发起 OpenAPI 请求
IClientProfile profile = DefaultProfile.getProfile(REGION_CN_HANGZHOU, accessKeyId, accessKeySecret);
DefaultAcsClient client = new DefaultAcsClient(profile);
// 创建一个 AssumeRoleRequest 并设置请求参数
final AssumeRoleRequest request = new AssumeRoleRequest();
request.setVersion(STS_API_VERSION);
request.setMethod(MethodType.POST);
request.setProtocol(protocolType);
request.setRoleArn(roleArn);
request.setRoleSessionName(roleSessionName);
request.setPolicy(policy);
request.setDurationSeconds(durationSeconds);
// 发起请求,并得到response
final AssumeRoleResponse response = client.getAcsResponse(request);
return response;
} catch (ClientException e) {
throw e;
}
}
public static String ReadJson(String path) {
// 从给定位置获取文件
File file = new File(path);
System.out.println("用户输入url:" + file.getAbsolutePath());
BufferedReader reader = null;
// 返回值,使用StringBuffer
StringBuffer data = new StringBuffer();
//
try {
reader = new BufferedReader(new FileReader(file));
// 每次读取文件的缓存
String temp = null;
while ((temp = reader.readLine()) != null) {
data.append(temp);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
// 关闭文件流
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return data.toString();
}
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String path = getServletContext().getRealPath("config.json");
System.out.println("用户输入11url:" + path);
//String data = ReadJson("D:\\Workspace\\Eclipse\\NetlistenerWeb\\WebContent\\config.json");
//System.out.println("用户输入url:" + path);
String data = ReadJson(path);
System.out.println("用户输入url:" + data);
if (data.equals("")) {
response(request, response, "./config.json is empty or not found");
return;
}
System.out.println(data);
JSONObject jsonObj = JSONObject.fromObject(data);
// 只有 RAM用户(子账号)才能调用 AssumeRole 接口
// 阿里云主账号的AccessKeys不能用于发起AssumeRole请求
// 请首先在RAM控制台创建一个RAM用户,并为这个用户创建AccessKeys
String accessKeyId = request.getParameter("ak");
if(accessKeyId==null||accessKeyId.length()==0) {
accessKeyId = jsonObj.getString("AccessKeyID");
}
System.out.println("用户输入AccessKeyId:" + accessKeyId);
String accessKeySecret = request.getParameter("sk");
if(accessKeySecret==null||accessKeySecret.length()==0) {
accessKeySecret = jsonObj.getString("AccessKeySecret");
}
System.out.println("用户输入AccessKeySecret:" + accessKeySecret);
// RoleArn 需要在 RAM 控制台上获取
String roleArn = jsonObj.getString("RoleArn");
long durationSeconds = jsonObj.getLong("TokenExpireTime");
String policy = ReadJson(getServletContext().getRealPath("") + jsonObj.getString("PolicyFile"));
// RoleSessionName 是临时Token的会话名称,自己指定用于标识你的用户,主要用于审计,或者用于区分Token颁发给谁
// 但是注意RoleSessionName的长度和规则,不要有空格,只能有'-' '_' 字母和数字等字符
// 具体规则请参考API文档中的格式要求
String roleSessionName = "alice-001";
// 此处必须为 HTTPS
ProtocolType protocolType = ProtocolType.HTTPS;
try {
final AssumeRoleResponse stsResponse = assumeRole(accessKeyId, accessKeySecret, roleArn, roleSessionName,
policy, protocolType, durationSeconds);
Map<String, String> respMap = new LinkedHashMap<String, String>();
respMap.put("StatusCode", "200");
respMap.put("AccessKeyId", stsResponse.getCredentials().getAccessKeyId());
respMap.put("AccessKeySecret", stsResponse.getCredentials().getAccessKeySecret());
respMap.put("SecurityToken", stsResponse.getCredentials().getSecurityToken());
respMap.put("Expiration", stsResponse.getCredentials().getExpiration());
JSONObject ja1 = JSONObject.fromObject(respMap);
response(request, response, ja1.toString());
} catch (ClientException e) {
Map<String, String> respMap = new LinkedHashMap<String, String>();
respMap.put("StatusCode", "500");
respMap.put("ErrorCode", e.getErrCode());
respMap.put("ErrorMessage", e.getErrMsg());
JSONObject ja1 = JSONObject.fromObject(respMap);
response(request, response, ja1.toString());
}
}
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
doGet(request, response);
}
private void response(HttpServletRequest request, HttpServletResponse response, String results) throws IOException {
String callbackFunName = request.getParameter("callback");
if (callbackFunName == null || callbackFunName.equalsIgnoreCase(""))
response.getWriter().println(results);
else
response.getWriter().println(callbackFunName + "( " + results + " )");
response.setStatus(HttpServletResponse.SC_OK);
response.flushBuffer();
}
}
|
'use strict';
const crypto = require('crypto');
const parse = require('url').parse;
const bodyify = require('querystring').stringify;
const eapiKey = 'e82ckenh8dichen8';
const linuxapiKey = '<KEY>';
const decrypt = (buffer, key) => {
const decipher = crypto.createDecipheriv('aes-128-ecb', key, null);
return Buffer.concat([decipher.update(buffer), decipher.final()]);
};
const encrypt = (buffer, key) => {
const cipher = crypto.createCipheriv('aes-128-ecb', key, null);
return Buffer.concat([cipher.update(buffer), cipher.final()]);
};
module.exports = {
eapi: {
encrypt: (buffer) => encrypt(buffer, eapiKey),
decrypt: (buffer) => decrypt(buffer, eapiKey),
encryptRequest: (url, object) => {
url = parse(url);
const text = JSON.stringify(object);
const message = `nobody${url.path}use${text}md5forencrypt`;
const digest = crypto
.createHash('md5')
.update(message)
.digest('hex');
const data = `${url.path}-36cd479b6b5-${text}-36cd479b6b5-${digest}`;
return {
url: url.href.replace(/\w*api/, 'eapi'),
body: bodyify({
params: module.exports.eapi
.encrypt(Buffer.from(data))
.toString('hex')
.toUpperCase(),
}),
};
},
},
linuxapi: {
encrypt: (buffer) => encrypt(buffer, linuxapiKey),
decrypt: (buffer) => decrypt(buffer, linuxapiKey),
encryptRequest: (url, object) => {
url = parse(url);
const text = JSON.stringify({
method: 'POST',
url: url.href,
params: object,
});
return {
url: url.resolve('/api/linux/forward'),
body: bodyify({
eparams: module.exports.linuxapi
.encrypt(Buffer.from(text))
.toString('hex')
.toUpperCase(),
}),
};
},
},
miguapi: {
encryptBody: (object) => {
const text = JSON.stringify(object);
const derive = (password, salt, keyLength, ivSize) => {
// EVP_BytesToKey
salt = salt || Buffer.alloc(0);
const keySize = keyLength / 8;
const repeat = Math.ceil((keySize + ivSize * 8) / 32);
const buffer = Buffer.concat(
Array(repeat)
.fill(null)
.reduce(
(result) =>
result.concat(
crypto
.createHash('md5')
.update(
Buffer.concat([
result.slice(-1)[0],
password,
salt,
])
)
.digest()
),
[Buffer.alloc(0)]
)
);
return {
key: buffer.slice(0, keySize),
iv: buffer.slice(keySize, keySize + ivSize),
};
};
const password = Buffer.from(
crypto.randomBytes(32).toString('hex')
),
salt = crypto.randomBytes(8);
const key =
'-----<KEY>END PUBLIC KEY-----';
const secret = derive(password, salt, 256, 16);
const cipher = crypto.createCipheriv(
'aes-256-cbc',
secret.key,
secret.iv
);
return bodyify({
data: Buffer.concat([
Buffer.from('Salted__'),
salt,
cipher.update(Buffer.from(text)),
cipher.final(),
]).toString('base64'),
secKey: crypto
.publicEncrypt(
{ key, padding: crypto.constants.RSA_PKCS1_PADDING },
password
)
.toString('base64'),
});
},
},
base64: {
encode: (text, charset) =>
Buffer.from(text, charset)
.toString('base64')
.replace(/\+/g, '-')
.replace(/\//g, '_'),
decode: (text, charset) =>
Buffer.from(
text.replace(/-/g, '+').replace(/_/g, '/'),
'base64'
).toString(charset),
},
uri: {
retrieve: (id) => {
id = id.toString().trim();
const key = '<KEY>';
const string = Array.from(Array(id.length).keys())
.map((index) =>
String.fromCharCode(
id.charCodeAt(index) ^
key.charCodeAt(index % key.length)
)
)
.join('');
const result = crypto
.createHash('md5')
.update(string)
.digest('base64')
.replace(/\//g, '_')
.replace(/\+/g, '-');
return `http://p1.music.126.net/${result}/${id}`;
},
},
md5: {
digest: (value) => crypto.createHash('md5').update(value).digest('hex'),
pipe: (source) =>
new Promise((resolve, reject) => {
const digest = crypto.createHash('md5').setEncoding('hex');
source
.pipe(digest)
.on('error', (error) => reject(error))
.once('finish', () => resolve(digest.read()));
}),
},
};
try {
module.exports.kuwoapi = require('./kwDES');
} catch (e) {}
|
n = int(input("Enter a number: "))
for num in range(2,n + 1):
if num > 1:
for i in range(2,num):
if (num % i) == 0:
break
else:
print(num) |
#!/bin/sh
ZIP_NAME="roku-tinydesk.zip"
./uninstall.sh
rm $ZIP_NAME;
zip $ZIP_NAME -r -9 . -i "source/*" -i "images/*" -i "manifest"
curl -s -S -F "mysubmit=Install" -F "archive=@$ZIP_NAME" -F "passwd=" --digest --user rokudev:$ROKU_PASSWORD http://$ROKU_IP/plugin_install
|
public static boolean isAlphabetInString(String str) {
// convert the given string to lower case string.
str = str.toLowerCase();
// create a count array of size 26(size of english
// alphabet)
int[] count = new int[26];
// count stores the number of times each character
// appears in the string.
for (int i=0; i<str.length(); i++)
count[str.charAt(i) - 'a']++;
// itereate over the count array
for (int i=0; i<26; i++)
if(count[i] == 0)
return false;
return true;
} |
#! /bin/bash
set -euo pipefail
BUILD_DISTRO=poky
BUILD_SDK=no
BUILD_PATH="$(realpath build)"
KERNEL_PROVIDER=
SITE_CONF=
KAS_PREFIX=kas/dev
KAS_INCLUDES="kas/inc/ci.yml"
usage() {
echo "meta-sancloud CI build script"
echo "Usage:"
echo " $0 [-R] [-A] [-s] [-n] [-k KERNEL] [-i SITE_CONF] [-p BUILD_PATH] [-h]"
echo ""
echo " -R: Build release config (default is development config)."
echo ""
echo " -A: Build the Arago distro (default is the Poky distro)."
echo ""
echo " -s: Build an SDK as well as images."
echo ""
echo " -k KERNEL: Use an alternative kernel recipe"
echo " Valid values for KERNEL are 'mainline', 'stable', 'lts' and 'rt'."
echo ""
echo " -i SITE_CONF: Use the given file to provide site-specific configuration."
echo ""
echo " -p BUILD_PATH: Use the given path for all intermediate files."
echo " The default build path is 'build' under the current directory."
echo ""
echo " -h: Print this help message and exit".
}
while getopts ":RAsk:i:p:h" opt; do
case $opt in
R)
KAS_PREFIX=kas
;;
A)
BUILD_DISTRO=arago
;;
s)
BUILD_SDK=yes
;;
k)
KERNEL_PROVIDER=$OPTARG
;;
i)
SITE_CONF="$(realpath "$OPTARG")"
;;
p)
BUILD_PATH="$(realpath "$OPTARG")"
;;
h)
usage
exit 0
;;
\?)
echo "Invalid option: -$OPTARG" >&2
usage >&2
exit 1
;;
:)
echo "Option missing argument: -$OPTARG" >&2
usage >&2
exit 1
;;
esac
done
echo ">>> Preparing for build"
if [[ -n "$KERNEL_PROVIDER" ]]; then
echo ">>> Enabling kernel provider '$KERNEL_PROVIDER'"
KAS_INCLUDES="kas/inc/kernel-${KERNEL_PROVIDER}.yml:${KAS_INCLUDES}"
fi
rm -rf images "$BUILD_PATH"
mkdir images "$BUILD_PATH"
export KAS_BUILD_DIR="$BUILD_PATH"
if [[ -n "$SITE_CONF" ]]; then
echo ">>> Linking '$SITE_CONF' as site configuration"
mkdir "$BUILD_PATH/conf"
ln -s "$SITE_CONF" "$BUILD_PATH/conf/site.conf"
fi
echo ">>> Building images"
kas build --update --force-checkout ${KAS_PREFIX}/bbe-${BUILD_DISTRO}.yml:$KAS_INCLUDES
cp "$BUILD_PATH/tmp/deploy/images/bbe/MLO" \
"$BUILD_PATH/tmp/deploy/images/bbe/am335x-sancloud-bbe.dtb" \
"$BUILD_PATH/tmp/deploy/images/bbe/modules-bbe.tgz" \
"$BUILD_PATH/tmp/deploy/images/bbe/u-boot.img" \
"$BUILD_PATH/tmp/deploy/images/bbe/u-boot-initial-env" \
"$BUILD_PATH/tmp/deploy/images/bbe/zImage" \
images
for dtb in am335x-sancloud-bbe-icu4.dtb am335x-sancloud-bbe-extended-wifi.dtb am335x-sancloud-bbe-lite.dtb; do
srcpath="$BUILD_PATH/tmp/deploy/images/bbe/${dtb}"
if [[ -e "${srcpath}" ]]; then
cp "${srcpath}" images
fi
done
if [[ "$BUILD_DISTRO" == "poky" ]]; then
cp "$BUILD_PATH/tmp/deploy/images/bbe/core-image-base-bbe.wic.bmap" \
"$BUILD_PATH/tmp/deploy/images/bbe/core-image-base-bbe.wic.xz" \
"$BUILD_PATH/tmp/deploy/images/bbe/core-image-base-bbe.ext4.bmap" \
"$BUILD_PATH/tmp/deploy/images/bbe/core-image-base-bbe.ext4.xz" \
"$BUILD_PATH/tmp/deploy/images/bbe/sancloud-ramdisk-image-bbe.ext2.xz" \
"$BUILD_PATH/tmp/deploy/images/bbe/sancloud-ramdisk-image-bbe.squashfs-lz4" \
"$BUILD_PATH/tmp/deploy/images/bbe/core-image-base.env" \
images
elif [[ "$BUILD_DISTRO" == "arago" ]]; then
cp "$BUILD_PATH/tmp/deploy/images/bbe/tisdk-base-image-bbe.wic.bmap" \
"$BUILD_PATH/tmp/deploy/images/bbe/tisdk-base-image-bbe.wic.xz" \
"$BUILD_PATH/tmp/deploy/images/bbe/tisdk-base-image.env" \
"$BUILD_PATH/tmp/deploy/images/bbe/tisdk-default-image-bbe.wic.bmap" \
"$BUILD_PATH/tmp/deploy/images/bbe/tisdk-default-image-bbe.wic.xz" \
"$BUILD_PATH/tmp/deploy/images/bbe/tisdk-default-image.env" \
images
fi
if [[ "$BUILD_SDK" == "yes" ]]; then
echo ">>> Building SDK"
kas build ${KAS_PREFIX}/bbe-sdk-${BUILD_DISTRO}.yml:$KAS_INCLUDES
cp "$BUILD_PATH/tmp/deploy/sdk/${BUILD_DISTRO}-"*.sh images
fi
|
from typing import List
def birthday(s: List[int], d: int, m: int) -> int:
number_divided = 0
number_iteration = len(s) - (m - 1)
for i in range(number_iteration):
if sum(s[i:i+m]) == d:
number_divided += 1
return number_divided |
import React from 'react';
import { useSelector, useDispatch } from 'react-redux';
import LocalInterpreter from '../components/LocalInterpreter/LocalInterpreter';
const LocalInterpreterContainer = () => {
const {
tweets,
selectedTweet,
secondSelectedTweet
} = useSelector(state => state.tweet, []);
const {
currentModelInfo,
features
} = useSelector(state => state.globalInterpreter, []);
const {
qType,
selectedTweetRules,
contrastiveRules,
contrastiveEXs,
diffRule,
isCFLoading
} = useSelector(state => state.localInterpreter, []);
if (!tweets || tweets.length === 0) return <div />;
return (
<LocalInterpreter
tweets={tweets}
selectedTweet={selectedTweet}
secondSelectedTweet={secondSelectedTweet}
features={features}
qType={qType}
contrastiveRules={contrastiveRules}
selectedTweetRules={selectedTweetRules}
contrastiveEXs={contrastiveEXs}
currentModelInfo={currentModelInfo}
diffRule={diffRule}
isCFLoading={isCFLoading}
/>
);
};
export default LocalInterpreterContainer;
|
#!/bin/bash
set -e
source toolversions.sh
declare -r CORE_PROTOS_ROOT=$PROTOBUF_TOOLS_ROOT/tools
# This script generates all APIs from the googleapis/googleapis github repository,
# using the code generator from googleapis/gapic-generator.
# It will fetch both repositories if necessary.
# Currently it will only work on Windows due to the way nuget packages installed;
# changing toolversions.sh could mitigate that, if it's ever necessary.
#
# Prerequisites
# - Bash as supplied with Windows git
# - git
# - wget
# - unzip
# - Java 9
OUTDIR=tmp
if [[ "$SYNTHTOOL_GOOGLEAPIS" != "" ]]
then
declare -r GOOGLEAPIS="$SYNTHTOOL_GOOGLEAPIS"
else
declare -r GOOGLEAPIS=googleapis
fi
fetch_github_repos() {
if [ -d "gapic-generator" ]
then
git -C gapic-generator pull -q
git -C gapic-generator submodule update
else
git clone --recursive https://github.com/googleapis/gapic-generator \
--config core.autocrlf=false \
--config core.eol=lf
fi
if [[ "$SYNTHTOOL_GOOGLEAPIS" == "" ]]
then
if [ -d "googleapis" ]
then
git -C googleapis pull -q
else
# Auto-detect whether we're cloning the public or private googleapis repo.
git remote -v | grep -q google-cloud-dotnet-private && repo=googleapis-private || repo=googleapis
git clone --recursive https://github.com/googleapis/${repo} googleapis
fi
fi
}
generate_microgenerator() {
API_TMP_DIR=$OUTDIR/$1
PRODUCTION_PACKAGE_DIR=$API_TMP_DIR/$1
API_OUT_DIR=apis
API_SRC_DIR=$GOOGLEAPIS/$($PYTHON3 tools/getapifield.py apis/apis.json $1 protoPath)
# We currently assume there's exactly one service config per API
GRPC_SERVICE_CONFIG=$(echo $API_SRC_DIR/*.json)
mkdir -p $PRODUCTION_PACKAGE_DIR
echo "Generating $1 (micro-generator)"
$PROTOC \
--csharp_out=$PRODUCTION_PACKAGE_DIR \
--grpc_out=$PRODUCTION_PACKAGE_DIR \
--gapic_out=$API_TMP_DIR \
--gapic_opt=grpc-service-config=$GRPC_SERVICE_CONFIG \
-I $GOOGLEAPIS \
-I $CORE_PROTOS_ROOT \
--plugin=protoc-gen-grpc=$GRPC_PLUGIN \
--plugin=protoc-gen-gapic=$GAPIC_PLUGIN \
$API_SRC_DIR/*.proto \
2>&1 | grep -v "but not used" || true # Ignore import warnings (and grep exit code)
# We generate our own project files
rm $(find tmp -name '*.csproj')
# Copy the rest into the right place
cp -r $API_TMP_DIR $API_OUT_DIR
}
generate_gapicgenerator() {
API_TMP_DIR=$OUTDIR/$1
API_OUT_DIR=apis
PROTO_PATH=$($PYTHON3 tools/getapifield.py apis/apis.json $1 protoPath)
API_SRC_DIR=$GOOGLEAPIS/$PROTO_PATH
SERVICE_YAML=$($PYTHON3 tools/getapifield.py apis/apis.json $1 serviceYaml)
# This is a hacky way of allowing a proto package to be explicitly specified,
# or inferred from the proto path. We might want to add an option to getapifield.py for default values.
PROTO_PACKAGE=$(python tools/getapifield.py apis/apis.json $1 protoPackage 2> /dev/null || echo $PROTO_PATH | sed 's/\//./g')
# Look the versioned directory and its parent for the service YAML.
# (Currently the location is in flux; we should be able to use just the
# versioned directory eventually.)
if [[ -f $API_SRC_DIR/$SERVICE_YAML ]]
then
API_YAML=$API_SRC_DIR/$SERVICE_YAML
elif [[ -f $API_SRC_DIR/../$SERVICE_YAML ]]
then
API_YAML=$API_SRC_DIR/../$SERVICE_YAML
else
echo "$SERVICE_YAML doesn't exist. Please check inputs."
exit 1
fi
mkdir $API_TMP_DIR
# There should be only one gapic yaml file...
for i in $API_SRC_DIR/*_gapic.yaml
do
cp $i $API_TMP_DIR/gapic.yaml
done
# Include extra protos, when they're present, even if they're not needed.
extra_protos=()
if [[ -d $GOOGLEAPIS/google/iam/v1 ]]; then extra_protos+=($GOOGLEAPIS/google/iam/v1/*.proto); fi
if [[ -d $GOOGLEAPIS/grafeas/v1 ]]; then extra_protos+=($GOOGLEAPIS/grafeas/v1/*.proto); fi
if [[ -f $GOOGLEAPIS/google/cloud/common_resources.proto ]]; then extra_protos+=($GOOGLEAPIS/google/cloud/common_resources.proto); fi
# Generate the descriptor set for this API.
$PROTOC \
-I $GOOGLEAPIS \
-I $CORE_PROTOS_ROOT \
--include_source_info \
--include_imports \
-o $API_TMP_DIR/protos.desc \
$API_SRC_DIR/*.proto \
${extra_protos[*]} \
2>&1 | grep -v "but not used" || true # Ignore import warnings (and grep exit code)
jvm_args=()
jvm_args+=(-cp gapic-generator/build/libs/gapic-generator-${GAPIC_GENERATOR_VERSION}-all.jar)
args=()
args+=(--descriptor_set=$API_TMP_DIR/protos.desc)
args+=(--service_yaml=$API_YAML)
args+=(--gapic_yaml=$API_TMP_DIR/gapic.yaml)
args+=(--output=$API_TMP_DIR)
args+=(--language=csharp)
args+=(--package=$PROTO_PACKAGE)
# Suppress protobuf warnings in Java 9/10. By the time they
# become a problem, we won't be using Java...
java $JAVA9OPTS ${jvm_args[*]} com.google.api.codegen.GeneratorMain GAPIC_CODE ${args[*]} \
2>&1 | grep -v "does not have control environment" || true # Ignore control environment warnings (and grep exit code)
cp -r $API_TMP_DIR/$1 $API_OUT_DIR
# Generate the C# protos/gRPC directly into the right directory
# This assumes they all belong to the same API, and are in the same namespace...
$PROTOC \
--csharp_out=$API_OUT_DIR/$1/$1 \
--grpc_out=$API_OUT_DIR/$1/$1 \
-I $GOOGLEAPIS \
-I $CORE_PROTOS_ROOT \
--plugin=protoc-gen-grpc=$GRPC_PLUGIN \
$API_SRC_DIR/*.proto \
2>&1 | grep -v "but not used" || true # Ignore import warnings (and grep exit code)
}
generate_proto() {
API_SRC_DIR=$GOOGLEAPIS/$($PYTHON3 tools/getapifield.py apis/apis.json $1 protoPath)
$PROTOC \
--csharp_out=apis/$1/$1 \
-I $GOOGLEAPIS \
-I $CORE_PROTOS_ROOT \
$API_SRC_DIR/*.proto
}
generate_protogrpc() {
API_SRC_DIR=$GOOGLEAPIS/$($PYTHON3 tools/getapifield.py apis/apis.json $1 protoPath)
$PROTOC \
--csharp_out=apis/$1/$1 \
--grpc_out=apis/$1/$1 \
-I $GOOGLEAPIS \
-I $CORE_PROTOS_ROOT \
--plugin=protoc-gen-grpc=$GRPC_PLUGIN \
$API_SRC_DIR/*.proto
}
generate_api() {
PACKAGE=$1
PACKAGE_DIR=apis/$1
if [[ $CHECK_COMPATIBILITY == "true" && -d $PACKAGE_DIR ]]
then
echo "Building existing version of $PACKAGE for compatibility checking"
dotnet build -c Release -f netstandard2.0 -v quiet -p:SourceLinkCreate=false $PACKAGE_DIR/$PACKAGE
cp $PACKAGE_DIR/$PACKAGE/bin/Release/netstandard2.0/$PACKAGE.dll $OUTDIR
fi
echo "Generating $PACKAGE"
GENERATOR=$($PYTHON3 tools/getapifield.py apis/apis.json $PACKAGE generator)
if [[ -f $PACKAGE_DIR/pregeneration.sh ]]
then
echo "Running pre-generation script for $PACKAGE"
(cd $PACKAGE_DIR; ./pregeneration.sh)
fi
case "$GENERATOR" in
micro)
generate_microgenerator $1
;;
gapic)
generate_gapicgenerator $1
;;
proto)
generate_proto $1
;;
protogrpc)
generate_protogrpc $1
;;
*)
echo "Unknown generator: $GENERATOR"
exit 1
esac
if [[ -f $PACKAGE_DIR/postgeneration.patch ]]
then
echo "Applying post-generation patch for $PACKAGE"
(cd $PACKAGE_DIR; git apply postgeneration.patch)
fi
if [[ -f $PACKAGE_DIR/postgeneration.sh ]]
then
echo "Running post-generation script for $PACKAGE"
(cd $PACKAGE_DIR; ./postgeneration.sh)
fi
if [[ $(grep -E "^namespace" apis/$1/$1/*.cs | grep -Ev "namespace ${1}[[:space:]{]*\$") ]]
then
echo "API $1 has broken namespace declarations"
# Monitoring currently has an exemption as we know it's broken.
# We plan to remove that exemption (and the breakage) when we do a major version bump.
# For anything else, fail the build.
if [[ $1 != "Google.Cloud.Monitoring.V3" ]]
then
exit 1
fi
fi
if [[ $CHECK_COMPATIBILITY == "true" ]]
then
if [[ -f $OUTDIR/$PACKAGE.dll ]]
then
echo "Building new version of $PACKAGE for compatibility checking"
dotnet build -c Release -f netstandard2.0 -v quiet -p:SourceLinkCreate=false $PACKAGE_DIR/$PACKAGE
echo ""
echo "Changes in $PACKAGE:"
dotnet run --no-build -p tools/Google.Cloud.Tools.VersionCompat -- \
$OUTDIR/$PACKAGE.dll \
$PACKAGE_DIR/$PACKAGE/bin/Release/netstandard2.0/$PACKAGE.dll
else
echo ""
echo "$PACKAGE is a new API."
fi
fi
}
# Entry point
install_protoc
install_microgenerator
install_grpc
fetch_github_repos
GAPIC_GENERATOR_VERSION=$(cat gapic-generator/version.txt)
# Build GAPIC generator once with gradle so we can invoke it from Java directly
# once per API. We don't care that we're using deprecated Gradle features: we
# won't be using Gradle at all by the end of 2018, with any luck...
(cd gapic-generator; ./gradlew shadowJar --warning-mode=none)
OUTDIR=tmp
rm -rf $OUTDIR
mkdir $OUTDIR
CHECK_COMPATIBILITY=false
if [[ $1 == "--check_compatibility" ]]
then
CHECK_COMPATIBILITY=true
# Build the tool once so it doesn't interfere with output later
dotnet build tools/Google.Cloud.Tools.VersionCompat
shift
fi
packages=$@
if [[ -z "$packages" ]]
then
packages=$($PYTHON3 tools/listapis.py apis/apis.json --test generator)
fi
for package in $packages
do
generate_api $package
done
|
import * as React from "react";
import Svg, { Path, SvgProps } from "react-native-svg";
interface Props extends SvgProps {
size?: number;
}
const StatusOfflineOutline = ({ size = 24, ...props }: Props) => {
return (
<Svg
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
width={size}
height={size}
{...props}
>
<Path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M18.364 5.636a9 9 0 010 12.728m0 0l-2.829-2.829m2.829 2.829L21 21M15.536 8.464a5 5 0 010 7.072m0 0l-2.829-2.829m-4.243 2.829a4.978 4.978 0 01-1.414-2.83m-1.414 5.658a9 9 0 01-2.167-9.238m7.824 2.167a1 1 0 111.414 1.414m-1.414-1.414L3 3m8.293 8.293l1.414 1.414"
/>
</Svg>
);
};
export default StatusOfflineOutline;
|
class TaxHousehold
include Mongoid::Document
include Mongoid::Timestamps
include HasFamilyMembers
# A set of family_members, grouped according to IRS and ACA rules, who are considered a single unit
# when determining eligibility for Insurance Assistance and Medicaid
embedded_in :household
auto_increment :hbx_assigned_id, seed: 9999 # Create 'friendly' ID to publish for other systems
field :allocated_aptc_in_cents, type: Integer, default: 0
field :is_eligibility_determined, type: Boolean, default: false
field :effective_start_date, type: Date
field :effective_end_date, type: Date
field :submitted_at, type: DateTime
field :primary_applicant_id, type: Moped::BSON::ObjectId
index({hbx_assigned_id: 1})
embeds_many :tax_household_members
accepts_nested_attributes_for :tax_household_members
embeds_many :eligibility_determinations
# *IMP* Check for tax members with financial information missing
def members_with_financials
tax_household_members.reject{|m| m.financial_statements.empty? }
end
def build_tax_family
family = {}
family[:primary] = primary
family[:spouse] = spouse
family[:dependents] = dependents
end
def associated_policies
policies = []
household.hbx_enrollments.each do |enrollment|
if pol.subscriber.coverage_start > Date.new((date.year - 1),12,31) && pol.subscriber.coverage_start < Date.new(date.year,12,31)
policy_disposition = PolicyDisposition.new(pol)
coverages << pol if (policy_disposition.start_date.month..policy_disposition.end_date.month).include?(date.month)
end
end
policies
end
def no_tax_filer?
members_with_financials.detect{|m| m.financial_statements[0].tax_filing_status == 'tax_filer' }.nil?
end
def tax_filers
members_with_financials.select{ |m| m.financial_statements[0].tax_filing_status == 'tax_filer' }
end
def primary
if tax_filers.count > 1
if tax_filers.detect{|filer| filer.financial_statements[0].is_tax_filing_together == false }
raise 'multiple tax filers filing seperate in a single tax household!!'
end
tax_filer = tax_filers.detect{|filer| filer.is_primary_applicant? }
raise "multiple tax_filers but primary applicant is not one of them??" if tax_filer.blank?
tax_filer
else
tax_filers.first
end
end
def spouse
if tax_filers.count > 1
return tax_filers.detect{|filer| !filer.is_primary_applicant? }
else
non_filers = members_with_financials.select{|m| m.financial_statements[0].tax_filing_status == 'non_filer'}
if non_filers.any?
non_filers.each do |non_filer|
if has_spouse_relation?(non_filer)
return non_filer
end
end
end
end
nil
end
def has_spouse_relation?(non_filer)
pols = non_filer.family_member.person.policies
person = non_filer.family_member.person
pols.each do |pol|
member = pol.enrollees.detect{|enrollee| enrollee.person == person}
if member.rel_code == 'spouse'
return true
end
end
false
end
def dependents
members_with_financials.select {|m|
m.financial_statements[0].tax_filing_status == 'dependent'
} + members_with_financials.select {|m|
m.financial_statements[0].tax_filing_status == 'non_filer'
}.reject{|m| m == spouse }
end
# def coverage_as_of(date)
# # pols = []
# # members_with_financials.select{|m|
# # pols += m.family_member.person.policies
# # }
# pols = household.hbx_enrollments.select{|x| x.policy }
# coverages = []
# pols.uniq.select do |pol|
# if pol.subscriber.coverage_start > Date.new((date.year - 1),12,31) && pol.subscriber.coverage_start < Date.new(date.year,12,31)
# policy_disposition = PolicyDisposition.new(pol)
# coverages << pol if (policy_disposition.start_date.month..policy_disposition.end_date.month).include?(date.month)
# end
# end
# coverages.map{|x| x.id}
# end
def allocated_aptc_in_dollars=(dollars)
self.allocated_aptc_in_cents = (Rational(dollars) * Rational(100)).to_i
end
def allocated_aptc_in_dollars
(Rational(allocated_aptc_in_cents) / Rational(100)).to_f if allocated_aptc_in_cents
end
# Income sum of all tax filers in this Household for specified year
def total_incomes_by_year
family_member_links.inject({}) do |acc, per|
p_incomes = per.financial_statements.inject({}) do |acc, ae|
acc.merge(ae.total_incomes_by_year) { |k, ov, nv| ov + nv }
end
acc.merge(p_incomes) { |k, ov, nv| ov + nv }
end
end
#TODO: return count for adults (21-64), children (<21) and total
def size
members.size
end
def family
return if household.blank?
household.family
end
def is_eligibility_determined?
elegibility_determinizations.size > 0 ? true : false
end
#primary applicant is the tax household member who is the subscriber
def primary_applicant
tax_household_members.detect do |tax_household_member|
tax_household_member.is_subscriber == true
end
end
end
|
#!/bin/bash
cd server
echo "Building server"
rm -rf ./node_modules
npm ci
npm run clean
npm run compile
echo "Building client"
cd ../client
rm -rf ./node_modules
npm ci
vsce package
|
<filename>assets/js/page-loader.js
// "use strict";
function loadPage(key) {
var hash = CryptoJS.SHA256(key).toString();
var url = "messages/" + hash + "/index.html";
$("#wrong-key").hide();
$.ajax({
url: url,
type: 'GET',
dataType: 'html',
success: function(data) {
document.write(CryptoJS.AES.decrypt(data, key).toString(CryptoJS.enc.Utf8));
document.close();
},
error: function() {
$("#wrong-key").show();
}
});
}
$("#submit-button").on("click", function(e) {
e.preventDefault();
loadPage($("#key-field").val());
});
$("#key-field").keypress(function(e) {
if (e.which == 13) {
loadPage($("#key-field").val());
}
});
|
<filename>vendor/assets/components/catarse.js/spec/components/admin-project-details-card.spec.js<gh_stars>0
describe('AdminProjectDetailsCard', function() {
var AdminProjectDetailsCard = window.c.AdminProjectDetailsCard;
describe('controller', function() {
beforeAll(function(){
generateController = function(attrs) {
projectDetail = ProjectDetailsMockery(attrs)[0];
component = m.component(AdminProjectDetailsCard, {resource: projectDetail});
return component.controller();
};
});
describe('project status text', function() {
it('when project is online', function() {
ctrl = generateController({state: 'online'});
expect(ctrl.statusTextObj().text).toEqual('NO AR');
expect(ctrl.statusTextObj().cssClass).toEqual('text-success');
});
it('when project is failed', function() {
ctrl = generateController({state: 'failed'});
expect(ctrl.statusTextObj().text).toEqual('NÃO FINANCIADO');
expect(ctrl.statusTextObj().cssClass).toEqual('text-error');
});
});
describe('project remaining time', function() {
it('when remaining time is in days', function() {
ctrl = generateController({remaining_time: {total: 10, unit: 'days'}});
expect(ctrl.remainingTextObj().total).toEqual(10);
expect(ctrl.remainingTextObj().unit).toEqual('dias');
});
it('when remaining time is in seconds', function() {
ctrl = generateController({remaining_time: {total: 12, unit: 'seconds'}});
expect(ctrl.remainingTextObj().total).toEqual(12);
expect(ctrl.remainingTextObj().unit).toEqual('segundos');
});
it('when remaining time is in hours', function() {
ctrl = generateController({remaining_time: {total: 2, unit: 'hours'}});
expect(ctrl.remainingTextObj().total).toEqual(2);
expect(ctrl.remainingTextObj().unit).toEqual('horas');
});
});
});
describe('view', function() {
beforeAll(function() {
projectDetail = ProjectDetailsMockery()[0];
component = m.component(AdminProjectDetailsCard, {resource: projectDetail});
ctrl = component.controller();
view = component.view(ctrl, {resource: projectDetail});
$output = mq(view);
});
it('should render details of the project in card', function() {
var remaningTimeObj = ctrl.remainingTextObj(),
statusTextObj = ctrl.statusTextObj();
expect($output.find('.project-details-card').length).toEqual(1);
expect($output.contains(projectDetail.total_contributions)).toEqual(true);
expect($output.contains('$ ' + window.c.h.formatNumber(projectDetail.pledged, 2))).toEqual(true);
expect($output.contains(projectDetail.progress.toFixed(2) + '%')).toEqual(true);
expect($output.contains(remaningTimeObj.unit + ' restantes')).toEqual(true);
expect($output.contains(statusTextObj.text)).toEqual(true);
});
});
});
|
<filename>daire.go
/*
Copyright 2015 <NAME>. All rights reserved.
Use of this source code is governed by the MIT License (MIT) that can be found in the LICENSE file.
*/
/*
daire
Go program that acts as a single host reverse proxy
Usage:
go run ./daire.go [listen host and port] [to host and port]
*/
package main
import (
"log"
"net/http"
"net/http/httputil"
"net/url"
"os"
)
type SingleRequestTransport struct {
// bottleneck chan bool
}
func (s *SingleRequestTransport) RoundTrip(req *http.Request) (*http.Response, error) {
// s.bottleneck <- true
response, err := http.DefaultTransport.RoundTrip(req)
// <- s.bottleneck
return response, err;
}
func main() {
if len(os.Args) != 3 {
log.Fatal("Usage: go run ./daire.go [listen host and port] [to host and port]")
}
hostAndPort := os.Args[1]
proxyHost := os.Args[2]
proxy := httputil.NewSingleHostReverseProxy(&url.URL{Scheme: "http", Host: proxyHost, Path: "/"})
proxy.Transport = &SingleRequestTransport{/*bottleneck: make(chan bool, 1)*/}
server := &http.Server{
Addr: hostAndPort,
Handler: proxy,
}
log.Print("Daire proxy listening on: ", hostAndPort, ", redirecting to: ", proxyHost)
err := server.ListenAndServe()
if (err != nil) {
log.Fatal(err)
}
}
|
#!/bin/sh
# ---- TEMPLATE ----
# Runs on every Stop before anything is stopped
# get config file
if [ ! -f $1 ] ; then
echo "Config-File $1 not found..."
exit 255
fi
#Load config
. $1
# You can uncommend this line to see when hook is starting:
# echo "------------------ Running $0 ------------------"
|
<gh_stars>0
#include "distribution.h"
#include "adjacentR.h"
using namespace std;
using namespace Rcpp ;
// [[Rcpp::depends(RcppEigen)]]
AdjacentR::AdjacentR(void) {
distribution dist;
}
Eigen::VectorXd AdjacentR::inverse_logistic(const Eigen::VectorXd& eta) const
{
Eigen::VectorXd pi( eta.size() );
pi[eta.size()-1] = cdf_logit( eta(eta.size()-1) ) / ( 1-cdf_logit( eta(eta.size()-1) ) );
double norm = 1 + pi[eta.size()-1];
for(size_t j=(eta.size()-1); j>0; --j)
{
pi[j-1] = pi[j] * cdf_logit( eta(j-1) ) / ( 1-cdf_logit( eta(j-1) ) );
norm += pi[j-1];
}
return in_open_corner(pi/norm);
}
Eigen::MatrixXd AdjacentR::inverse_derivative_logistic(const Eigen::VectorXd& eta) const
{
Eigen::VectorXd pi = AdjacentR::inverse_logistic(eta);
Eigen::MatrixXd D = Eigen::MatrixXd::Zero(pi.rows(),pi.rows());
Eigen::MatrixXd Ones = Eigen::MatrixXd::Ones(pi.rows(),pi.rows());
for(size_t j=0; j<pi.rows(); ++j)
{ D(j,j) = pdf_logit( eta(j) ) /( std::max(1e-10, std::min(1-1e-6, cdf_logit(eta(j)))) * std::max(1e-10, std::min(1-1e-6, 1-cdf_logit(eta(j)))) ); }
return D * Eigen::TriangularView<Eigen::MatrixXd, Eigen::UpLoType::Lower>(Ones) * ( Eigen::MatrixXd(pi.asDiagonal()) - pi * pi.transpose() );
}
Eigen::VectorXd AdjacentR::inverse_normal(const Eigen::VectorXd& eta) const
{
Eigen::VectorXd pi( eta.size() );
pi[eta.size()-1] = cdf_normal( eta(eta.size()-1) ) / ( 1-cdf_normal( eta(eta.size()-1) ) );
double norm = 1 + pi[eta.size()-1];
for(size_t j=(eta.size()-1); j>0; --j)
{
pi[j-1] = pi[j] * cdf_normal( eta(j-1) ) / ( 1-cdf_normal( eta(j-1) ) );
norm += pi[j-1];
}
return in_open_corner(pi/norm);
}
Eigen::MatrixXd AdjacentR::inverse_derivative_normal(const Eigen::VectorXd& eta) const
{
Eigen::VectorXd pi = AdjacentR::inverse_normal(eta);
Eigen::MatrixXd D = Eigen::MatrixXd::Zero(pi.rows(),pi.rows());
Eigen::MatrixXd Ones = Eigen::MatrixXd::Ones(pi.rows(),pi.rows());
for(size_t j=0; j<pi.rows(); ++j)
{ D(j,j) = pdf_normal( eta(j) ) /( std::max(1e-10, std::min(1-1e-6, cdf_normal(eta(j)))) * std::max(1e-10, std::min(1-1e-6, 1-cdf_normal(eta(j)))) ); }
return D * Eigen::TriangularView<Eigen::MatrixXd, Eigen::UpLoType::Lower>(Ones) * ( Eigen::MatrixXd(pi.asDiagonal()) - pi * pi.transpose() );
}
Eigen::VectorXd AdjacentR::inverse_cauchit(const Eigen::VectorXd& eta) const
{
Eigen::VectorXd pi( eta.size() );
pi[eta.size()-1] = cdf_cauchit( eta(eta.size()-1) ) / ( 1-cdf_cauchit( eta(eta.size()-1) ) );
double norm = 1 + pi[eta.size()-1];
for(size_t j=(eta.size()-1); j>0; --j)
{
pi[j-1] = pi[j] * cdf_cauchit( eta(j-1) ) / ( 1-cdf_cauchit( eta(j-1) ) );
norm += pi[j-1];
}
return in_open_corner(pi/norm);
}
Eigen::MatrixXd AdjacentR::inverse_derivative_cauchit(const Eigen::VectorXd& eta) const
{
Eigen::VectorXd pi = AdjacentR::inverse_cauchit(eta);
Eigen::MatrixXd D = Eigen::MatrixXd::Zero(pi.rows(),pi.rows());
Eigen::MatrixXd Ones = Eigen::MatrixXd::Ones(pi.rows(),pi.rows());
for(size_t j=0; j<pi.rows(); ++j)
{ D(j,j) = pdf_cauchit( eta(j) ) /( std::max(1e-10, std::min(1-1e-6, cdf_cauchit(eta(j)))) * std::max(1e-10, std::min(1-1e-6, 1-cdf_cauchit(eta(j)))) ); }
return D * Eigen::TriangularView<Eigen::MatrixXd, Eigen::UpLoType::Lower>(Ones) * ( Eigen::MatrixXd(pi.asDiagonal()) - pi * pi.transpose() );
}
Eigen::VectorXd AdjacentR::inverse_gompertz(const Eigen::VectorXd& eta) const
{
Eigen::VectorXd pi( eta.size() );
pi[eta.size()-1] = cdf_gompertz( eta(eta.size()-1) ) / ( 1-cdf_gompertz( eta(eta.size()-1) ) );
double norm = 1 + pi[eta.size()-1];
for(size_t j=(eta.size()-1); j>0; --j)
{
pi[j-1] = pi[j] * cdf_gompertz( eta(j-1) ) / ( 1-cdf_gompertz( eta(j-1) ) );
norm += pi[j-1];
}
return in_open_corner(pi/norm);
}
Eigen::MatrixXd AdjacentR::inverse_derivative_gompertz(const Eigen::VectorXd& eta) const
{
Eigen::VectorXd pi = AdjacentR::inverse_gompertz(eta);
Eigen::MatrixXd D = Eigen::MatrixXd::Zero(pi.rows(),pi.rows());
Eigen::MatrixXd Ones = Eigen::MatrixXd::Ones(pi.rows(),pi.rows());
for(size_t j=0; j<pi.rows(); ++j)
{ D(j,j) = pdf_gompertz( eta(j) ) /( std::max(1e-10, std::min(1-1e-6, cdf_gompertz(eta(j)))) * std::max(1e-10, std::min(1-1e-6, 1-cdf_gompertz(eta(j)))) ); }
return D * Eigen::TriangularView<Eigen::MatrixXd, Eigen::UpLoType::Lower>(Ones) * ( Eigen::MatrixXd(pi.asDiagonal()) - pi * pi.transpose() );
}
Eigen::VectorXd AdjacentR::inverse_gumbel(const Eigen::VectorXd& eta) const
{
Eigen::VectorXd pi( eta.size() );
pi[eta.size()-1] = cdf_gumbel( eta(eta.size()-1) ) / ( 1-cdf_gumbel( eta(eta.size()-1) ) );
double norm = 1 + pi[eta.size()-1];
for(size_t j=(eta.size()-1); j>0; --j)
{
pi[j-1] = pi[j] * cdf_gumbel( eta(j-1) ) / ( 1-cdf_gumbel( eta(j-1) ) );
norm += pi[j-1];
}
return in_open_corner(pi/norm);
}
Eigen::MatrixXd AdjacentR::inverse_derivative_gumbel(const Eigen::VectorXd& eta) const
{
Eigen::VectorXd pi = AdjacentR::inverse_gumbel(eta);
Eigen::MatrixXd D = Eigen::MatrixXd::Zero(pi.rows(),pi.rows());
Eigen::MatrixXd Ones = Eigen::MatrixXd::Ones(pi.rows(),pi.rows());
for(size_t j=0; j<pi.rows(); ++j)
{ D(j,j) = pdf_gumbel( eta(j) ) /( std::max(1e-10, std::min(1-1e-6, cdf_gumbel(eta(j)))) * std::max(1e-10, std::min(1-1e-6, 1-cdf_gumbel(eta(j)))) ); }
return D * Eigen::TriangularView<Eigen::MatrixXd, Eigen::UpLoType::Lower>(Ones) * ( Eigen::MatrixXd(pi.asDiagonal()) - pi * pi.transpose() );
}
distribution dist_adj;
// [[Rcpp::export(".GLMadj")]]
List GLMadj(Formula formula,
CharacterVector categories_order,
CharacterVector proportional_effects,
DataFrame data,
std::string distribution,
double freedom_degrees){
const int N = data.nrows() ; // Number of observations
List Full_M = dist_adj.All_pre_data_or(formula, data,
categories_order, proportional_effects);
Eigen::MatrixXd Y_init = Full_M["Response_EXT"];
Eigen::MatrixXd X_EXT = Full_M["Design_Matrix"];
CharacterVector levs1 = Full_M["Levels"];
CharacterVector explanatory_complete = Full_M["Complete_effects"];
int P_c = explanatory_complete.length();
int P_p = 0;
if(proportional_effects[0] != "NA"){P_p = proportional_effects.length();}
int P = P_c + P_p ; // Number of explanatory variables without intercept
int Q = Y_init.cols();
int K = Q + 1;
// // // Beta initialization with zeros
Eigen::MatrixXd BETA;
BETA = Eigen::MatrixXd::Zero(X_EXT.cols(),1);
//
int iteration = 0;
// double check_tutz = 1.0;
double Stop_criteria = 1.0;
Eigen::MatrixXd X_M_i ;
Eigen::VectorXd Y_M_i ;
Eigen::VectorXd eta ;
Eigen::VectorXd pi ;
Eigen::MatrixXd D ;
Eigen::MatrixXd Cov_i ;
Eigen::MatrixXd W_in ;
Eigen::MatrixXd Score_i_2 ;
Eigen::MatrixXd F_i_2 ;
Eigen::VectorXd LogLikIter;
LogLikIter = Eigen::MatrixXd::Zero(1,1) ;
Eigen::MatrixXd var_beta;
Eigen::VectorXd Std_Error;
double LogLik;
Eigen::MatrixXd pi_ma(N, K);
Eigen::MatrixXd F_i_final = Eigen::MatrixXd::Zero(BETA.rows(), BETA.rows());
// for (int iteration=1; iteration < 18; iteration++){
// while (check_tutz > 0.0001){
double epsilon = 0.0001 ;
while (Stop_criteria >( epsilon / N)){
Eigen::MatrixXd Score_i = Eigen::MatrixXd::Zero(BETA.rows(),1);
Eigen::MatrixXd F_i = Eigen::MatrixXd::Zero(BETA.rows(), BETA.rows());
LogLik = 0.;
// Loop by subject
for (int i=0; i < N; i++){
// Block of size (p,q), starting at (i,j): matrix.block(i,j,p,q);
X_M_i = X_EXT.block(i*Q , 0 , Q , X_EXT.cols());
Y_M_i = Y_init.row(i);
eta = X_M_i * BETA;
AdjacentR adj;
// Vector pi depends on selected distribution
if(distribution == "logistic"){
pi = adj.inverse_logistic(eta);
D = adj.inverse_derivative_logistic(eta);
}else if(distribution == "normal"){
pi = adj.inverse_normal(eta);
D = adj.inverse_derivative_normal(eta);
}else if(distribution == "cauchit"){
pi = adj.inverse_cauchit(eta);
D = adj.inverse_derivative_cauchit(eta);
}else if(distribution == "gompertz"){
pi = adj.inverse_gompertz(eta);
D = adj.inverse_derivative_gompertz(eta);
}else if(distribution == "gumbel"){
pi = adj.inverse_gumbel(eta);
D = adj.inverse_derivative_gumbel(eta);
}
Cov_i = Eigen::MatrixXd(pi.asDiagonal()) - (pi*pi.transpose());
W_in = D * Cov_i.inverse();
Score_i_2 = X_M_i.transpose() * W_in * (Y_M_i - pi);
Score_i = Score_i + Score_i_2;
F_i_2 = X_M_i.transpose() * (W_in) * (D.transpose() * X_M_i);
F_i = F_i + F_i_2;
LogLik = LogLik + (Y_M_i.transpose().eval()*Eigen::VectorXd(pi.array().log())) + ( (1 - Y_M_i.sum()) * std::log(1 - pi.sum()) );
pi_ma.row(i) = pi.transpose();
}
Eigen::VectorXd Ones1 = Eigen::VectorXd::Ones(pi_ma.rows());
pi_ma.col(Q) = Ones1 - pi_ma.rowwise().sum() ;
LogLikIter.conservativeResize(iteration+2, 1);
LogLikIter(iteration+1) = LogLik;
Stop_criteria = (abs(LogLikIter(iteration+1) - LogLikIter(iteration))) / (epsilon + (abs(LogLikIter(iteration+1)))) ;
Eigen::VectorXd beta_old = BETA;
BETA = BETA + (F_i.inverse() * Score_i);
// check_tutz = ((BETA - beta_old).norm())/(beta_old.norm()+check_tutz);
iteration = iteration + 1;
F_i_final = F_i;
}
// var_beta = (((X_EXT.transpose() * F_i_final) * X_EXT).inverse());
var_beta = F_i_final.inverse();
Std_Error = var_beta.diagonal();
Std_Error = Std_Error.array().sqrt() ;
std::vector<std::string> text=as<std::vector<std::string>>(explanatory_complete);
std::vector<std::string> level_text=as<std::vector<std::string>>(categories_order);
StringVector names(Q*P_c + P_p);
if(P_c > 0){
for(int var = 0 ; var < explanatory_complete.size() ; var++){
for(int cat = 0 ; cat < Q ; cat++){
names[(Q*var) + cat] = dist_adj.concatenate(text[var], level_text[cat]);
}
}
}
if(P_p > 0){
for(int var_p = 0 ; var_p < proportional_effects.size() ; var_p++){
names[(Q*P_c) + var_p] = proportional_effects[var_p];
}
}
// TO NAMED THE RESULT BETAS
NumericMatrix coef = wrap(BETA);
rownames(coef) = names;
// AIC
double AIC = (-2*LogLik) + (2 *coef.length());
// AIC
double BIC = (-2*LogLik) + (coef.length() * log(N) );
int df = (N*Q) - coef.length();
Eigen::MatrixXd predicted = X_EXT * BETA;
Eigen::VectorXd Ones2 = Eigen::VectorXd::Ones(Y_init.rows());
Eigen::VectorXd vex1 = (Y_init.rowwise().sum()) ;
Y_init.conservativeResize( Y_init.rows(), K);
Y_init.col(Q) = (vex1 - Ones2).array().abs() ;
Eigen::MatrixXd residuals = Y_init - pi_ma;
Eigen::VectorXd pi_ma_vec(Eigen::Map<Eigen::VectorXd>(pi_ma.data(), pi_ma.cols()*pi_ma.rows()));
Eigen::VectorXd Y_init_vec(Eigen::Map<Eigen::VectorXd>(Y_init.data(), Y_init.cols()*Y_init.rows()));
Eigen::VectorXd div_arr = Y_init_vec.array() / pi_ma_vec.array();
Eigen::VectorXd dev_r(Y_init.rows());
int el_1 = 0;
for (int element = 0 ; element < div_arr.size() ; element++){
if (div_arr[element] != 0){
dev_r[el_1] = div_arr[element];
el_1 = el_1 +1 ;
}
}
Eigen::ArrayXd dev_log = dev_r.array().log();
double deviance = dev_log.sum();
deviance = -2*deviance;
return List::create(
// Named("Nb. iterations") = iteration-1 ,
Named("coefficients") = coef,
Named("AIC") = AIC,
Named("BIC") = BIC,
// Named("var_beta") = var_beta,
Named("stderr") = Std_Error,
Rcpp::Named("df") = df,
Rcpp::Named("predicted") = predicted,
Rcpp::Named("fitted") = pi_ma,
Rcpp::Named("pi_ma_vec") = pi_ma_vec,
Rcpp::Named("Y_init_vec") = Y_init_vec,
Rcpp::Named("dev_log") = dev_log,
Rcpp::Named("deviance") = deviance,
Rcpp::Named("residuals") = residuals,
Named("Log-likelihood") = LogLik
);
}
RCPP_MODULE(adjacentmodule){
Rcpp::function("GLMadj", &GLMadj,
List::create(_["formula"] = R_NaN,
_["categories_order"] = CharacterVector::create( "A", NA_STRING),
_["proportional_effects"] = CharacterVector::create(NA_STRING),
_["data"] = NumericVector::create( 1, NA_REAL, R_NaN, R_PosInf, R_NegInf),
_["distribution"] = "a",
_["freedom_degrees"] = 1.0),
"Adjacent model");
// Rcpp::class_<AdjacentR>("AdjacentR")
// .constructor()
// // .method( "GLMadj", &AdjacentR::GLMadj)
// ;
}
|
#!/bin/bash
#! Copyright (C) 2017 Christian Stransky
#!
#! This software may be modified and distributed under the terms
#! of the MIT license. See the LICENSE file for details.
#Manual configurations go here
#URL to the exit survey
finalSurveyURL="<surveyUrl>"
dailyMaxInstances="2"
maxInstances="200"
#Register API keys at https://www.google.com/recaptcha/admin
#These are currently test keys that will pass any verifications.
recaptchaSiteKey='6LeIxAcTAAAAAJcZVRqyHh71UMIEGNQ_MXjiZKhI'
recaptchaSecret='6LeIxAcTAAAAAGG-vFI1TnRWxMZNFuojJ4WifJWe'
#AWS Credentials
awsLang='en'
awsAccessKey=''
awsSecretKey=''
awsRegion='us-east-1'
# AWS Settings
awsImageId='ami-<id>'
awsInstanceType='t2.nano'
awsSecurityGroupID='sg-<number>'
awsSshKeyName='SSH Gateway' #You should put your ssh key here, incase that you want to connect to the instances
poolSize="1"
#Dummy verifier, will always return Valid
tokenGetUrl="https://userstudies.cs.uni-saarland.de/dummyToken/gettoken"
tokenSetUrl="https://userstudies.cs.uni-saarland.de/dummyToken/settoken"
####################################################################
##Don't modify below this line, unless you know what you are doing##
####################################################################
taskfilesBasePath="$PWD/../task_generation/generated/"
RED='\033[1;31m'
NC='\033[0m' # No Color
prompt_confirm() {
while true; do
read -r -n 1 -p "${1:-Continue?} [y/n]: " REPLY
case $REPLY in
[yY]) echo ; return 0 ;;
[nN]) echo ; return 1 ;;
*) printf " ${RED} %s \n${NC}" "invalid input"
esac
done
}
finalSurveyUrlPlaceHolder="%finalSurveyURL%"
taskfilesBasePathPlaceholder="%taskFilesBasePath%"
if [[ $EUID -ne 0 ]]; then
echo -e "${RED}[LandingServer] This script must be run as root${NC}" 1>&2
exit 1
fi
echo -e "${RED}[LandingServer] ------------------------------------------------------------${NC}"
echo -e "${RED}[LandingServer] ------------------------------------------------------------${NC}"
echo -e "${RED}[LandingServer] -- WARNING: This script will provision the landing server --${NC}"
echo -e "${RED}[LandingServer] --- Only run this script on a clean Ubuntu/Debian image ---${NC}"
echo -e "${RED}[LandingServer] ------------------------------------------------------------${NC}"
echo -e "${RED}[LandingServer] ------------------------------------------------------------${NC}"
prompt_confirm "[LandingServer] Preparing this server to run as the landing server" || exit 0
echo "[LandingServer][apt] Updating package list"
apt-get -qq -y update
echo "[LandingServer][apt] Upgrading packages to latest version"
apt-get -qq -y upgrade
echo "[LandingServer][apt] Checking Webserver requirements"
if [ -x "$(command -v apache2)" ]; then
echo '[LandingServer][apt] Found apache2 - uninstalling'
apt-get -qq -y remove apache2
fi
if ! [ -x "$(command -v nginx)" ]; then
echo '[LandingServer][apt] nginx not found - installing'
apt-get -qq -y install nginx
fi
echo "[LandingServer][apt] Installing php7.0-fpm php-pgsql php-redis redis-server postgresql python-flask-sqlalchemy python-boto3 php-curl composer php-zip php-simplexml python-psycopg2"
apt-get -qq -y install php7.0-fpm php-pgsql php-redis redis-server postgresql python-flask-sqlalchemy python-boto3 php-curl composer php-zip php-simplexml python-psycopg2
#Generate Passwords for the database in the first run. Replace in the files directly
pwUser1=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1)
pwUser2=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1)
pwUser3=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1)
sed -i "s|%pwUser1%|$pwUser1|g" postgres/dbSchema.sql
sed -i "s|%pwUser1%|$pwUser1|g" landing/webpageConf/config.php
sed -i "s|%pwUser2%|$pwUser2|g" postgres/dbSchema.sql
sed -i "s|%pwUser2%|$pwUser2|g" landing/submit/configSubmitDB.py
sed -i "s|%pwUser3%|$pwUser3|g" postgres/dbSchema.sql
sed -i "s|%pwUser3%|$pwUser3|g" landing/submit/configGetCode.py
#Generate Tasks
#cd ../task_generation/
#python generateNotebooks.py
#cd ../landing_server/
#Import data to postgres
echo "[LandingServer] Importing data to database"
su postgres -c "psql -tc \"SELECT 1 FROM pg_database WHERE datname = 'notebook'\" | grep -q 1 || psql -c \"CREATE DATABASE notebook\""
su postgres -c "psql -f postgres/dbSchema.sql"
su postgres -c "psql -d notebook -f ../task_generation/generated/dbSchema.sql"
echo "[LandingServer] Checking if user 'flaskserver' exists"
user_exists=$(id -u flaskserver > /dev/null 2>&1; echo $?)
if [ $user_exists == 1 ]; then
#Create a system account for jupyter and disable logins
user_created=$(adduser flaskserver --system --group --shell=/bin/false --disabled-login > /dev/null 2>&1; echo $?)
if [ $user_created == 0 ]; then
echo "[LandingServer] User 'flaskserver' created"
else
echo -e "${RED}[LandingServer] User 'flaskserver' could not be created - aborting"
exit 1
fi
else
echo "[LandingServer] User 'flaskserver' already exists"
fi
#Copy files
echo "[LandingServer] Copying configuration files"
cp landing/submit/submitDB.py /usr/local/bin/submitDB.py
cp landing/submit/configSubmitDB.py /usr/local/bin/configSubmitDB.py
sed -i "s|$finalSurveyUrlPlaceHolder|$finalSurveyURL|g" /usr/local/bin/submitDB.py
cp landing/submit/submitDB.service /etc/systemd/system/submitDB.service
cp landing/submit/getCode.service /etc/systemd/system/getCode.service
cp landing/submit/getCode.py /usr/local/bin/getCode.py
sed -i "s|$taskfilesBasePathPlaceholder|$taskfilesBasePath|g" /usr/local/bin/getCode.py
cp landing/submit/configGetCode.py /usr/local/bin/configGetCode.py
cp -rfT landing/webpageConf/ /var/www/webpageConf/
sed -i "s|%dailyMaxInstances%|$dailyMaxInstances|g" /var/www/webpageConf/config.php
sed -i "s|%maxInstances%|$maxInstances|g" /var/www/webpageConf/config.php
sed -i "s|%recaptchaSiteKey%|$recaptchaSiteKey|g" /var/www/webpageConf/config.php
sed -i "s|%recaptchaSecret%|$recaptchaSecret|g" /var/www/webpageConf/config.php
sed -i "s|%awsLang%|$awsLang|g" /var/www/webpageConf/config.php
sed -i "s|%awsAccessKey%|$awsAccessKey|g" /var/www/webpageConf/config.php
sed -i "s|%awsSecretKey%|$awsSecretKey|g" /var/www/webpageConf/config.php
sed -i "s|%awsRegion%|$awsRegion|g" /var/www/webpageConf/config.php
sed -i "s|%awsImageId%|$awsImageId|g" /var/www/webpageConf/config.php
sed -i "s|%awsInstanceType%|$awsInstanceType|g" /var/www/webpageConf/config.php
sed -i "s|%awsSecurityGroupID%|$awsSecurityGroupID|g" /var/www/webpageConf/config.php
sed -i "s|%sshKeyName%|$awsSshKeyName|g" /var/www/webpageConf/config.php
sed -i "s|%poolSize%|$poolSize|g" /var/www/webpageConf/config.php
sed -i "s|%tokenGetUrl%|$tokenGetUrl|g" /var/www/webpageConf/config.php
sed -i "s|%tokenSetUrl%|$tokenSetUrl|g" /var/www/webpageConf/config.php
mkdir -p /home/flaskserver/.aws/
cp landing/submit/boto.conf /home/flaskserver/.aws/config
sed -i "s|%awsAccessKey%|$awsAccessKey|g" /home/flaskserver/.aws/config
sed -i "s|%awsSecretKey%|$awsSecretKey|g" /home/flaskserver/.aws/config
sed -i "s|%awsRegion%|$awsRegion|g" /home/flaskserver/.aws/config
cp -rfT landing/webpage/ /var/www/html/
cp -rfT landing/tools/ /var/www/tools/
cp nginx/defaultLandingServer /etc/nginx/sites-enabled/default
cp redis/redis.conf /etc/redis/redis.conf
#Get external dependencies
echo "[LandingServer] Installing composer dependencies"
cd /var/www/html/
composer require google/recaptcha "~1.1"
composer require aws/aws-sdk-php
composer install
#Enable service
echo "[LandingServer] Enabling services"
systemctl daemon-reload
systemctl enable submitDB
systemctl enable getCode
echo "[LandingServer] Starting services"
service submitDB stop
service submitDB start
service getCode stop
service getCode start
service nginx reload
service redis-server restart
#Color the terminal red for future connects to ensure that the user notices, that he is working on a live server
export PS1='\A \033[1;31m\u@landing-server\033[0m:\w\$ '
grep -q -F "export PS1='$PS1'" ~/.bashrc || echo "export PS1='$PS1'" >> ~/.bashrc
source ~/.bashrc
echo "[LandingServer] Ready" |
package commands
import (
"bytes"
"encoding/xml"
"io/ioutil"
"os"
"strings"
"golang.org/x/net/html/charset"
"provisioner/provisioner"
)
type DisableUAAHSTS struct {
WebXMLPath string
}
func (d *DisableUAAHSTS) Run() error {
var webXMLData WebApp
webXMLContents, err := ioutil.ReadFile(d.WebXMLPath)
if err != nil {
return err
}
decoder := xml.NewDecoder(bytes.NewReader(webXMLContents))
decoder.CharsetReader = charset.NewReaderLabel
if err := decoder.Decode(&webXMLData); err != nil {
return err
}
hstsFilter := Filter{
FilterName: "httpHeaderSecurity",
FilterClass: "org.apache.catalina.filters.HttpHeaderSecurityFilter",
InitParam: InitParam{
ParamName: "hstsEnabled",
ParamValue: "false",
},
AsyncSupported: true,
}
hstsFilterExists := false
for _, filter := range webXMLData.Filters {
if strings.TrimSpace(filter.FilterName) == strings.TrimSpace(hstsFilter.FilterName) &&
strings.TrimSpace(filter.FilterClass) == strings.TrimSpace(hstsFilter.FilterClass) &&
strings.TrimSpace(filter.InitParam.ParamName) == strings.TrimSpace(hstsFilter.InitParam.ParamName) {
hstsFilterExists = true
}
}
if hstsFilterExists {
webXMLData.Filters = nil
} else {
webXMLData.Filters = []Filter{hstsFilter}
}
webXMLFile, err := os.OpenFile(d.WebXMLPath, os.O_WRONLY|os.O_TRUNC, 0644)
if err != nil {
panic(err)
}
defer webXMLFile.Close()
encoder := xml.NewEncoder(webXMLFile)
encoder.Indent("", " ")
if err := encoder.Encode(&webXMLData); err != nil {
panic(err)
}
return nil
}
func (*DisableUAAHSTS) Distro() string {
return provisioner.DistributionPCF
}
type WebApp struct {
XMLName xml.Name `xml:"web-app"`
Filters []Filter `xml:"filter"`
AllXML string `xml:",innerxml"`
}
type Filter struct {
FilterName string `xml:"filter-name"`
FilterClass string `xml:"filter-class"`
InitParam InitParam `xml:"init-param"`
AsyncSupported bool `xml:"async-supported"`
}
type InitParam struct {
ParamName string `xml:"param-name"`
ParamValue string `xml:"param-value"`
}
|
type THandler<S, O> = (state?: S, options?: O) => void
type THandlers<S, O> = (THandler<S, O>|void)[]
/**
* Beeper
* A simple Emitter witch can dispatch state and option object
* Inspired by solid-js/signal https://www.npmjs.com/package/@solid-js/signal
* @param initialState
* @param initialOptions
*/
export function beeper<S = any, O extends Record<string, any> = {}>(initialState: S = null, initialOptions?: O) {
let handlers: THandlers<S, O> = []
let initState: S = initialState
let initOptions: O = initialOptions || {} as O
let currentState: S = initState
let currentOptions: O = initOptions
const off = (handler: THandler<S, O>): void => {
handlers = handlers.filter((e) => e !== handler)
}
const on = (handler: THandler<S, O>): (handler?: THandler<S, O>) => void => {
handlers.push(handler)
return () => off(handler)
}
const dispatch = (state?: S, options?: O): THandlers<S, O> => {
currentState = state
currentOptions = {...currentOptions, ...options}
return handlers.map((handler:THandler<S, O>) => handler(state, currentOptions))
}
const reset = (): void => {
handlers = []
currentState = initState
currentOptions = initOptions
}
return {
off,
on,
dispatch,
reset,
get state() { return currentState },
get options() { return currentOptions },
get handlers() { return handlers },
}
}
|
package com.mounacheikhna.decorators.utils;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Bitmap;
import android.os.Build;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.ScriptIntrinsicBlur;
/**
* Created by cheikhna on 04/04/2015.
*/
public class BlurUtils {
private static final float SCALE_RATIO = 5f;
private static final float DEFAULT_BLUR_RADIUS = 5.f;
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
public static Bitmap blurBitmap(Bitmap bitmap, Context context){
int width = bitmap.getWidth(), height = bitmap.getHeight();
Bitmap b = Bitmap.createScaledBitmap(Bitmap.createScaledBitmap(bitmap,(int)(width/SCALE_RATIO), (int)(height/SCALE_RATIO), false), width, height, false);
return blurBitmap(b, DEFAULT_BLUR_RADIUS, context);
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
private static Bitmap blurBitmap(Bitmap src, float blurRadius, Context context) {
RenderScript rs = RenderScript.create(context);
Bitmap.Config conf = Bitmap.Config.ARGB_8888;
Bitmap blurredBitmap = Bitmap.createBitmap(src.getWidth(), src.getHeight(), conf);
final Allocation input = Allocation.createFromBitmap(rs, src, Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_SCRIPT);
final Allocation output = Allocation.createTyped(rs, input.getType());
final ScriptIntrinsicBlur script = ScriptIntrinsicBlur.create(rs, Element.U8_4(rs));
script.setRadius(blurRadius);
script.setInput(input);
script.forEach(output);
output.copyTo(blurredBitmap);
return blurredBitmap;
}
}
|
#!/bin/bash
log="cloudflare-$(date +'%Y-%m-%d').log"
###########################################
## Timestamp on log
###########################################
message=$(date +"%D %T")
>&2 echo -e "\n${message}" >> "${log}"
###########################################
## Check if we have enough arguments
###########################################
if [ "$#" -ne 5 ]; then
message="Usage: $0 <auth_email> <auth_key> <zone_identifier> <record_name> <proxy>"
>&2 echo -e "${message}" >> "${log}"
exit 1
fi
###########################################
## Variables
###########################################
auth_email=$1 # The email used to login 'https://dash.cloudflare.com'
auth_key=$2 # Top right corner, "My profile" > "Global API Key"
zone_identifier=$3 # Can be found in the "Overview" tab of your domain
record_name=$4 # Which record you want to be synced
proxy=$5 # Set the proxy to true or false
###########################################
## Check if we have an public IP
###########################################
ip=$(curl -s https://api.ipify.org || curl -s https://ipv4.icanhazip.com/ || curl -s https://ip4.seeip.org)
if [ "${ip}" == "" ]; then
message="No public IP found."
>&2 echo -e "${message}" >> "${log}"
exit 1
fi
###########################################
## Seek for the A record
###########################################
echo "Check Initiated" >> "${log}"
record=$(curl -s -X GET "https://api.cloudflare.com/client/v4/zones/$zone_identifier/dns_records?name=$record_name" -H "X-Auth-Email: $auth_email" -H "X-Auth-Key: $auth_key" -H "Content-Type: application/json")
###########################################
## Check if the domaine has an A record
###########################################
if [[ $record == *"\"count\":0"* ]]; then
message=" Record does not exist, perhaps create one first? (${ip} for ${record_name})"
>&2 echo -e "${message}" >> "${log}"
exit 1
fi
###########################################
## Get the existing IP
###########################################
old_ip=$(echo "$record" | grep -Po '(?<="content":")[^"]*' | head -1)
# Compare if they're the same
if [[ $ip == $old_ip ]]; then
message=" IP ($ip) for ${record_name} has not changed."
echo "${message}" >> "${log}"
exit 0
fi
###########################################
## Set the record identifier from result
###########################################
record_identifier=$(echo "$record" | grep -Po '(?<="id":")[^"]*' | head -1)
###########################################
## Change the IP@Cloudflare using the API
###########################################
update=$(curl -s -X PUT "https://api.cloudflare.com/client/v4/zones/$zone_identifier/dns_records/$record_identifier" \
-H "X-Auth-Email: $auth_email" \
-H "X-Auth-Key: $auth_key" \
-H "Content-Type: application/json" \
--data "{\"id\":\"$zone_identifier\",\"type\":\"A\",\"proxied\":${proxy},\"name\":\"$record_name\",\"content\":\"$ip\"}")
###########################################
## Report the status
###########################################
case "$update" in
*"\"success\":false"*)
message="$ip $record_name DDNS failed for $record_identifier ($ip). DUMPING RESULTS:\n$update"
>&2 echo -e "${message}" >> "${log}"
exit 1;;
*)
message="$ip $record_name DDNS updated."
echo "${message}" >> "${log}"
exit 0;;
esac
|
<gh_stars>0
import { NgModule } from '@angular/core'
import { HttpModule } from '@angular/http';
import { BrowserModule } from '@angular/platform-browser';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { routes } from './app.router';
import { SortPipe } from './sort-by.pipe'
import { TruncatePipe } from './limit-to.pipe'
import AppComponent from './app.component'
import MainPageComponent from './main-page/main-page.component'
import MainNewsComponent from './main-page/main-news/main-news.component'
import WeatherComponent from './main-page/weather/weather.component'
import NewsRowComponent from './main-page/news-row/news-row.component'
import CommentsRowComponent from './main-page/comments-row/comments-row.component'
import CommentPreviewComponent from './main-page/comments-row/comment-preview/comment-preview.component'
import MiniNewsComponent from './main-page/news-row/mini-news/mini-news.component'
import NavbarComponent from './navbar/navbar.component'
import TrendingComponent from './trending/trending.component'
import SingleTrendComponent from './trending/single-trend/single-trend.component'
import SignInComponent from './sign-in/sign-in.component'
import RegisterComponent from './register/register.component'
import ManageComponent from './manage/manage.component'
import PostsComponent from './manage/posts/posts.component'
import AddPostComponent from './manage/add-post/add-post.component'
import EditPostComponent from './manage/edit-post/edit-post.component'
import CategoriesComponent from './manage/categories/categories.component'
import AddTagComponent from './manage/add-tag/add-tag.component'
import CommentsViewComponent from './manage/comments-view/comments-view.component'
import UsersComponent from './manage/users/users.component'
import SettingsComponent from './settings/settings.component'
import NewsPageComponent from './news-page/news-page.component'
import SingleNewsComponent from './news-page/single-news/single-news.component'
import CommentsComponent from './news-page/comments/comments.component'
import AddCommentComponent from './news-page/comments/add-comment/add-comment.component'
import SingleCommentComponent from './news-page/comments/single-comment/single-comment.component'
import { ApiService } from './services/api.service'
import { AuthService } from './services/auth.service'
@NgModule({
imports: [
BrowserModule,
FormsModule,
ReactiveFormsModule,
HttpModule,
routes
],
declarations: [
AppComponent,
MainPageComponent,
MainNewsComponent,
WeatherComponent,
NavbarComponent,
TrendingComponent,
SingleTrendComponent,
SignInComponent,
RegisterComponent,
ManageComponent,
AddPostComponent,
EditPostComponent,
PostsComponent,
CategoriesComponent,
AddTagComponent,
CommentsViewComponent,
UsersComponent,
SettingsComponent,
NewsRowComponent,
CommentsRowComponent,
MiniNewsComponent,
CommentPreviewComponent,
NewsPageComponent,
SingleNewsComponent,
CommentsComponent,
AddCommentComponent,
SingleCommentComponent,
SortPipe,
TruncatePipe
],
providers: [
ApiService,
AuthService
],
//exports: [SortPipe],
bootstrap: [AppComponent]
})
export default class AppModule{}
|
/*
* Copyright 2014-2020 The Ideal Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
package ideal.development.actions;
import ideal.library.elements.*;
import ideal.runtime.elements.*;
import ideal.runtime.logs.*;
import ideal.development.elements.*;
import ideal.development.names.*;
import ideal.development.types.*;
import javax.annotation.Nullable;
public interface DEBUG {
boolean trace = true;
boolean not_found = trace;
action_name trace_name = simple_name.make("foo_bar_baz");
// special_name.IMPLICIT;
boolean in_progress_declaration = false;
}
|
kubectl delete -f examples/prod-cluster.yaml
gcloud -q sql databases delete druid --instance=intellipse-metadata
kubectl delete pvc --all -n druid
gsutil -q rm -r gs://production-205919/druid
sleep 5
kubectl exec zookeeper-0 -n zookeeper zkCli.sh rmr /druid
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Dados;
/**
*
* @author Mac
*/
public class vProdutos {
private int idproduto;
private String nome;
private String descricao;
private String quantidade;
private Double preco;
public vProdutos() {
}
public vProdutos(int idproduto, String nome, String descricao, String quantidade, Double preco) {
this.idproduto = idproduto;
this.nome = nome;
this.descricao = descricao;
this.quantidade = quantidade;
this.preco = preco;
}
public int getIdproduto() {
return idproduto;
}
public void setIdproduto(int idproduto) {
this.idproduto = idproduto;
}
public String getNome() {
return nome;
}
public void setNome(String nome) {
this.nome = nome;
}
public String getDescricao() {
return descricao;
}
public void setDescricao(String descricao) {
this.descricao = descricao;
}
public String getQuantidade() {
return quantidade;
}
public void setQuantidade(String quantidade) {
this.quantidade = quantidade;
}
public Double getPreco() {
return preco;
}
public void setPreco(Double preco) {
this.preco = preco;
}
}
|
/**
* SPDX-License-Identifier: Apache-2.0
*/
var helper = require('../../helper.js');
var logger = helper.getLogger('metricservice');
var sql = require('./db/pgservice.js');
class MetricService {
constructor() {
}
//==========================query counts ==========================
getChaincodeCount(channelName) {
return sql.getRowsBySQlCase(`select count(1) c from chaincodes where genesis_block_hash='${channelName}' `)
}
getPeerlistCount(channelName) {
return sql.getRowsBySQlCase(`select count(1) c from peer where genesis_block_hash='${channelName}' `)
}
getTxCount(channelName) {
return sql.getRowsBySQlCase(`select count(1) c from transactions where genesis_block_hash='${channelName}'`)
}
getBlockCount(channelName) {
return sql.getRowsBySQlCase(`select count(1) c from blocks where genesis_block_hash='${channelName}'`)
}
async getPeerData(channelName) {
let peerArray = []
var c1 = await sql.getRowsBySQlNoCondtion(`select channel.name as channelname,c.requests as requests,c.genesis_block_hash as genesis_block_hash ,c.server_hostname as server_hostname from peer as c inner join channel on c.genesis_block_hash=channel.genesis_block_hash where c.genesis_block_hash='${channelName}'`);
for (var i = 0, len = c1.length; i < len; i++) {
var item = c1[i];
peerArray.push({ 'name': item.channelname, 'requests': item.requests, 'server_hostname': item.server_hostname ,"genesis_block_hash":item.genesis_block_hash})
}
return peerArray
}
//BE -303
async getOrdererData() {
let ordererArray = []
var c1 = await sql.getRowsBySQlNoCondtion(`select c.requests as requests,c.server_hostname as server_hostname,c.genesis_block_hash as genesis_block_hash from orderer c`);
for (var i = 0, len = c1.length; i < len; i++) {
var item = c1[i];
ordererArray.push({ 'requests': item.requests, 'server_hostname': item.server_hostname,'genesis_block_hash':item.genesis_block_hash })
}
return ordererArray
}
//BE -303
async getTxPerChaincodeGenerate(channelName) {
let txArray = []
var c = await sql.getRowsBySQlNoCondtion(`select c.name as chaincodename,channel.name as channelname ,c.version as version,c.genesis_block_hash as genesis_block_hash,c.path as path ,txcount as c from chaincodes as c inner join channel on c.genesis_block_hash=channel.genesis_block_hash where c.genesis_block_hash='${channelName}' `);
//console.log("chaincode---" + c)
if (c) {
c.forEach((item, index) => {
txArray.push({ 'channelName': item.channelname, 'chaincodename': item.chaincodename, 'path': item.path, 'version': item.version, 'txCount': item.c,'genesis_block_hash':item.genesis_block_hash })
})
}
return txArray
}
async getTxPerChaincode(channelName, cb) {
try {
var txArray = await this.getTxPerChaincodeGenerate(channelName);
cb(txArray);
} catch(err) {
logger.error(err)
cb([])
}
}
async getStatusGenerate(channelName) {
var chaincodeCount = await this.getChaincodeCount(channelName)
if (!chaincodeCount) chaincodeCount = 0
var txCount = await this.getTxCount(channelName)
if (!txCount) txCount = 0
txCount.c = txCount.c ? txCount.c : 0
var blockCount = await this.getBlockCount(channelName)
if (!blockCount) blockCount = 0
blockCount.c = blockCount.c ? blockCount.c : 0
var peerCount = await this.getPeerlistCount(channelName)
if (!peerCount) peerCount = 0
peerCount.c = peerCount.c ? peerCount.c : 0
return { 'chaincodeCount': chaincodeCount.c, 'txCount': txCount.c, 'latestBlock': blockCount.c, 'peerCount': peerCount.c }
}
async getStatus(channelName, cb) {
try {
var data = await this.getStatusGenerate(channelName);
cb(data);
} catch(err) {
logger.error(err)
}
}
async getPeerList(channelName, cb) {
try {
var peerArray = await this.getPeerData(channelName);
cb(peerArray)
} catch(err) {
logger.error(err)
cb([])
}
}
//BE -303
async getOrdererList(cb) {
try {
var ordererArray = await this.getOrdererData();
cb(ordererArray)
} catch(err) {
logger.error(err)
cb([])
}
}
//BE -303
//transaction metrics
getTxByMinute(channelName, hours) {
let sqlPerMinute = ` with minutes as (
select generate_series(
date_trunc('min', now()) - '${hours}hour'::interval,
date_trunc('min', now()),
'1 min'::interval
) as datetime
)
select
minutes.datetime,
count(createdt)
from minutes
left join TRANSACTIONS on date_trunc('min', TRANSACTIONS.createdt) = minutes.datetime and genesis_block_hash ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerMinute);
}
getTxByHour(channelName, day) {
let sqlPerHour = ` with hours as (
select generate_series(
date_trunc('hour', now()) - '${day}day'::interval,
date_trunc('hour', now()),
'1 hour'::interval
) as datetime
)
select
hours.datetime,
count(createdt)
from hours
left join TRANSACTIONS on date_trunc('hour', TRANSACTIONS.createdt) = hours.datetime and genesis_block_hash ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerHour);
}
getTxByDay(channelName, days) {
let sqlPerDay = ` with days as (
select generate_series(
date_trunc('day', now()) - '${days}day'::interval,
date_trunc('day', now()),
'1 day'::interval
) as datetime
)
select
days.datetime,
count(createdt)
from days
left join TRANSACTIONS on date_trunc('day', TRANSACTIONS.createdt) =days.datetime and genesis_block_hash ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerDay);
}
getTxByWeek(channelName, weeks) {
let sqlPerWeek = ` with weeks as (
select generate_series(
date_trunc('week', now()) - '${weeks}week'::interval,
date_trunc('week', now()),
'1 week'::interval
) as datetime
)
select
weeks.datetime,
count(createdt)
from weeks
left join TRANSACTIONS on date_trunc('week', TRANSACTIONS.createdt) =weeks.datetime and genesis_block_hash ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerWeek);
}
getTxByMonth(channelName, months) {
let sqlPerMonth = ` with months as (
select generate_series(
date_trunc('month', now()) - '${months}month'::interval,
date_trunc('month', now()),
'1 month'::interval
) as datetime
)
select
months.datetime,
count(createdt)
from months
left join TRANSACTIONS on date_trunc('month', TRANSACTIONS.createdt) =months.datetime and channelname ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerMonth);
}
getTxByYear(channelName, years) {
let sqlPerYear = ` with years as (
select generate_series(
date_trunc('year', now()) - '${years}year'::interval,
date_trunc('year', now()),
'1 year'::interval
) as year
)
select
years.year,
count(createdt)
from years
left join TRANSACTIONS on date_trunc('year', TRANSACTIONS.createdt) =years.year and genesis_block_hash ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerYear);
}
// block metrics API
getBlocksByMinute(channelName, hours) {
let sqlPerMinute = ` with minutes as (
select generate_series(
date_trunc('min', now()) - '${hours} hour'::interval,
date_trunc('min', now()),
'1 min'::interval
) as datetime
)
select
minutes.datetime,
count(createdt)
from minutes
left join BLOCKS on date_trunc('min', BLOCKS.createdt) = minutes.datetime and genesis_block_hash ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerMinute);
}
getBlocksByHour(channelName, days) {
let sqlPerHour = ` with hours as (
select generate_series(
date_trunc('hour', now()) - '${days}day'::interval,
date_trunc('hour', now()),
'1 hour'::interval
) as datetime
)
select
hours.datetime,
count(createdt)
from hours
left join BLOCKS on date_trunc('hour', BLOCKS.createdt) = hours.datetime and genesis_block_hash ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerHour);
}
getBlocksByDay(channelName, days) {
let sqlPerDay = ` with days as (
select generate_series(
date_trunc('day', now()) - '${days}day'::interval,
date_trunc('day', now()),
'1 day'::interval
) as datetime
)
select
days.datetime,
count(createdt)
from days
left join BLOCKS on date_trunc('day', BLOCKS.createdt) =days.datetime and genesis_block_hash ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerDay);
}
getBlocksByWeek(channelName, weeks) {
let sqlPerWeek = ` with weeks as (
select generate_series(
date_trunc('week', now()) - '${weeks}week'::interval,
date_trunc('week', now()),
'1 week'::interval
) as datetime
)
select
weeks.datetime,
count(createdt)
from weeks
left join BLOCKS on date_trunc('week', BLOCKS.createdt) =weeks.datetime and genesis_block_hash ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerWeek);
}
getBlocksByMonth(channelName, months) {
let sqlPerMonth = ` with months as (
select generate_series(
date_trunc('month', now()) - '${months}month'::interval,
date_trunc('month', now()),
'1 month'::interval
) as datetime
)
select
months.datetime,
count(createdt)
from months
left join BLOCKS on date_trunc('month', BLOCKS.createdt) =months.datetime and genesis_block_hash ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerMonth);
}
getBlocksByYear(channelName, years) {
let sqlPerYear = ` with years as (
select generate_series(
date_trunc('year', now()) - '${years}year'::interval,
date_trunc('year', now()),
'1 year'::interval
) as year
)
select
years.year,
count(createdt)
from years
left join BLOCKS on date_trunc('year', BLOCKS.createdt) =years.year and genesis_block_hash ='${channelName}'
group by 1
order by 1 `;
return sql.getRowsBySQlQuery(sqlPerYear);
}
getTxByOrgs(channelName) {
let sqlPerOrg = ` select count(creator_msp_id), creator_msp_id
from transactions
where genesis_block_hash ='${channelName}'
group by creator_msp_id`;
return sql.getRowsBySQlQuery(sqlPerOrg);
}
}
module.exports = MetricService; |
<filename>src/main/java/io/github/mfvanek/pg/common/health/DatabaseHealthFactoryImpl.java
/*
* Copyright (c) 2019-2021. <NAME> and others.
* https://github.com/mfvanek/pg-index-health
*
* This file is a part of "pg-index-health" - a Java library for
* analyzing and maintaining indexes health in PostgreSQL databases.
*
* Licensed under the Apache License 2.0
*/
package io.github.mfvanek.pg.common.health;
import io.github.mfvanek.pg.common.maintenance.MaintenanceFactory;
import io.github.mfvanek.pg.connection.HighAvailabilityPgConnection;
import java.util.Objects;
import javax.annotation.Nonnull;
public class DatabaseHealthFactoryImpl implements DatabaseHealthFactory {
private final MaintenanceFactory maintenanceFactory;
public DatabaseHealthFactoryImpl(@Nonnull final MaintenanceFactory maintenanceFactory) {
this.maintenanceFactory = Objects.requireNonNull(maintenanceFactory);
}
@Nonnull
@Override
public DatabaseHealth of(@Nonnull HighAvailabilityPgConnection haPgConnection) {
return new DatabaseHealthImpl(haPgConnection, maintenanceFactory);
}
}
|
#!/bin/bash
serve -s build
|
from typing import List
def threeSum(nums: List[int]) -> List[List[int]]:
nums.sort()
res = []
for i in range(len(nums) - 2):
if i > 0 and nums[i - 1] == nums[i]: continue
l, r = i + 1, len(nums) - 1
while l < r:
s = nums[i] + nums[l] + nums[r]
if s == 0:
res.append([nums[i], nums[l], nums[r]])
while l < r and nums[l] == nums[l + 1]: l += 1
while l < r and nums[r] == nums[r - 1]: r -= 1
l += 1
r -= 1
elif s < 0:
l += 1
else:
r -= 1
return res |
<gh_stars>0
#pragma once
#include <set>
#include <string>
#include <iostream>
#include <string_view>
#include <iostream>
#include <iterator>
#include <vector>
#include <regex>
#include <unordered_map>
#include <chrono>
#include <sstream>
#include <limits>
#include <stack>
#include <map>
#include <thread>
#include <future>
#include <deque>
#include <condition_variable>
#include <atomic>
#include <iomanip>
#include <fstream>
#include <assert.h>
#include "command.h"
namespace {
using namespace std::literals;
// just one copy for all orders :-)
const char BuySide[] = "BUY";
const char SellSide[] = "SELL";
const char dummy[] = "dummy";
std::vector<std::string> Split(const std::string& input, const char delimiter){
std::vector<std::string> result;
std::stringstream ss(input);
std::string s;
while (std::getline(ss, s, delimiter)) {
result.push_back(s);
}
return result;
}
template <typename SIDE>
constexpr bool isValidOrderType(const SIDE& t) {
if (t == BuySide || t == SellSide) {
return true;
}
return false;
}
template<const char* SIDE>
class Order;
template<const char* SIDE>
using Order_ptr = std::shared_ptr<Order<SIDE>>;
using SellOrder_ptr = Order_ptr<SellSide>;
using BuyOrder_ptr = Order_ptr<BuySide>;
template<const char* SIDE>
class Order{
public:
Order(std::uint64_t ts, std::string s, std::uint32_t oid, uint64_t v, double p) noexcept
: mTimestamp(ts), mSymbol(s), mSymbolHash(std::hash<std::string>{}(mSymbol)), mOrderID(oid), mVolume(v), mPrice(p) { }
Order(std::string s, uint64_t v = 0) noexcept
:mSymbol(s), mSymbolHash(std::hash<std::string>{}(mSymbol)), mVolume(v) { }
Order<SIDE>& operator=(const Order& rhs) = delete;
friend bool operator<(const Order_ptr<SIDE>& lhs, const Order_ptr<SIDE>& rhs){
return lhs->Compare(rhs);
}
/* Order Book (BUY) while SELL just ordered by Symbol and Time
* |------------------
* | SYMBOL | volume |
* | "1" MAX
* | "2" ..
* | "3" ..
* | "3" ..
* | "3" ..
* | "4" MIN
*/
bool Compare(const Order_ptr<SIDE>& rhs) const {
if (IsBuyOrder()){
if (mSymbolHash == rhs->mSymbolHash){
return mVolume > rhs->mVolume;
}
return mSymbolHash < rhs->mSymbolHash;
}else{
// assume the input is chronologically ordered
return mSymbolHash < rhs->mSymbolHash;
}
}
/*
* quick pointer checking instead of string compare
* bool IsBuyOrder<&BuySide>():
* push rbp
* mov rbp, rsp
* mov eax, 1
* pop rbp
* ret
* main:
* push rbp
* mov rbp, rsp
* call bool IsBuyOrder<&BuySide>()
* mov eax, 0
* pop rbp
* ret
* [eax(return register) is set to 1 with no runtime overhead] this is critical as this function is called often
*/
inline constexpr bool IsBuyOrder() const {
return (mSide == BuySide);
}
public:
// declare private with getter/setter
std::uint64_t mTimestamp;
std::string mSymbol; //[PK]
std::uint32_t mSymbolHash;
std::uint32_t mOrderID;//[UNIQUE_KEY]
const char* mSide = SIDE;
uint64_t mVolume; //[PK + Composite Key]
double mPrice;
};
/*
* multiset is guranteed to add same elements at the upper bound of equal range
* multiset performs better than prority_queue with constant time of O(1)
*/
template<const char* SIDE, std::enable_if_t<isValidOrderType<const char*>(SIDE), bool> = false>
class OrdersContainer : public std::multiset<Order_ptr<SIDE>>{
public:
using std::multiset<Order_ptr<SIDE>>::insert;
friend std::ostream& operator<<(std::ostream& os, const OrdersContainer<SIDE>& oc){
os << SIDE << ": ";
for (const auto& i : oc){
os << i << " ";
}
os << std::endl;
return os;
}
};
class SellOrderBookKeeping{
struct comp {
bool operator()(const SellOrder_ptr& lhs, const SellOrder_ptr& rhs) const {
return (lhs->mPrice < rhs->mPrice);
}
};
using BestPriceSellOrdersSet = std::set<SellOrder_ptr, comp>;
using BestSellPriceForSymbolHash = std::unordered_map<std::string_view/*symbol*/, BestPriceSellOrdersSet>;
using BestSellPriceAtTimePointMap = std::map<std::uint64_t/*timestamp*/, BestSellPriceForSymbolHash>;
using MessageType = std::tuple<SellOrder_ptr, bool>;
public:
bool Process(){
while(true){
std::unique_lock<std::mutex> lk(mMessageBoxMutex);
mMessageBoxCondVar.wait(lk, [this]{ return !mMessageBox.empty(); });
const auto [o, isInsert] = mMessageBox.front();
if (isInsert){
DoSellOrderInsert(o);
}else{
DoSellOrderCXL(o);
}
mMessageBox.pop_front();
lk.unlock();
mMessageBoxCondVar.notify_one();
}
}
void AddMessage(const SellOrder_ptr newOrder, const bool isInsert = true){
std::unique_lock<std::mutex> lk(mMessageBoxMutex);
if (!lk.owns_lock())
mMessageBoxCondVar.wait(lk);
mMessageBox.push_back(std::make_tuple(newOrder, isInsert));
lk.unlock();
mMessageBoxCondVar.notify_one();
}
BestSellPriceAtTimePointMap::iterator GetBestSell(std::uint64_t ts) {
while (!mMessageBox.empty()) {
std::this_thread::sleep_for(1ms);
}
return mLookupBestSellPrice.lower_bound(ts);;
}
public:
inline void DoSellOrderInsert(const SellOrder_ptr& newOrder){
if (mLookupBestSellPrice.empty()){
BestSellPriceForSymbolHash& second = mLookupBestSellPrice[newOrder->mTimestamp];
second[newOrder->mSymbol].insert(newOrder);
}else{
const BestSellPriceForSymbolHash& prevTS = mLookupBestSellPrice.rbegin()->second;
const auto [it, success] = mLookupBestSellPrice.insert({newOrder->mTimestamp, prevTS});
BestSellPriceForSymbolHash& newTS = it->second;
newTS[newOrder->mSymbol].insert(newOrder);
}
}
inline void DoSellOrderCXL(const SellOrder_ptr& newOrder){
if (mLookupBestSellPrice.empty()){
// Invalid: ignore
}else{
const BestSellPriceForSymbolHash& prevTS = mLookupBestSellPrice.rbegin()->second;
const auto [it, success] = mLookupBestSellPrice.insert({newOrder->mTimestamp, prevTS});
BestSellPriceForSymbolHash& newTS = it->second;
auto& st = newTS[newOrder->mSymbol];
auto itr = std::find_if(st.begin(), st.end(), [newOrder](const SellOrder_ptr& i) {
return (i->mOrderID == newOrder->mOrderID);
});
if (itr != st.end())
st.erase(itr);
}
}
std::deque<MessageType> mMessageBox;
std::mutex mMessageBoxMutex;
std::condition_variable mMessageBoxCondVar;
std::atomic_bool mExit;
// Mayeb squize it for some regular intervals?
BestSellPriceAtTimePointMap mLookupBestSellPrice;
};
class OrderBook{
public:
OrderBook(){
std::thread(&SellOrderBookKeeping::Process, std::ref(mBookKeeper)).detach();
}
/* CORE APIS [START] */
/*
* Since the OB is sorted based on (symbols + MAX volmume) its takes log time with no additional memory
*/
std::unordered_map<std::string_view, std::uint64_t> OrderCounts(){
std::unordered_map<std::string_view, std::uint64_t> ret;
if (!mBuyOrders.empty()){
auto firstKeyItr = mBuyOrders.begin();
for (auto currItr = firstKeyItr; currItr != mBuyOrders.end(); ) {
// TODO Must be optimized with single call for composite key(fat key)
BuyOrder_ptr tmp = std::make_shared<Order<BuySide>>((*currItr)->mSymbol, UINT64_MAX);
const auto lb = mBuyOrders.lower_bound(tmp);
tmp->mVolume = 0;
const auto ub = mBuyOrders.upper_bound(tmp);
ret[(*lb)->mSymbol] = std::distance(lb, ub);
currItr = ub;
}
}
if (!mSellOrders.empty()){
auto firstKeyItr = mSellOrders.begin();
for (auto currItr = firstKeyItr; currItr != mSellOrders.end(); ) {
SellOrder_ptr tmp = std::make_shared<Order<SellSide>>((*currItr)->mSymbol);
const auto [lb, ub] = mSellOrders.equal_range(tmp);
ret[(*lb)->mSymbol] += std::distance(lb, ub);
currItr = ub;
}
}
return ret;
}
/*
* Since the BUY orders in OB are sorted in descending order of hash(symbols, volumes) its takes (log + constant) time to take from top
* designed for end of trade day
*/
std::vector<BuyOrder_ptr> BiggestBuyOrders(const std::string& symbol, const int top){
std::vector<BuyOrder_ptr> ret;
ret.reserve(top);
BuyOrder_ptr tmp = std::make_shared<Order<BuySide>>(symbol, UINT64_MAX);
const auto lb = mBuyOrders.lower_bound(tmp);
tmp->mVolume = 0;
const auto ub = mBuyOrders.upper_bound(tmp);
OrdersContainer<BuySide>::const_iterator currItr = lb;
for (int currCount = 0; (currItr != ub && currCount++ < top) ; ++currItr){
ret.emplace_back(*currItr);
}
return ret;
}
/*
* Sell price must be lowest for maximum profit. lets have whole Order returned just in case need more than volume+price info in future(Open Closed Principle)
* 2 approaches:
* #1 - store the lowest price of every symbol for every transaction(time point).
* pros: cons:
* quick turnaround O(1) extra memory
* ----------------------------------------------
* [timepoint] [symbol] [orderID of best sell]
* ---------------------------------------------
* 9'0 clock "1" 1[$5]
* "2" 2[$5]
* "3" 3
* 10'0 clock "1" 11[$4]->1[$5]
* "2" 22[$4]->2[$5]
* "3" 3
* "4" 44
* 11'0 clock "1" 11[$4]->1[$5]->5[$6] // ID: 5 added
* "2" 22[$4]->2[$5]
* "3" 3
* "4" 44
* 12'0 clock "1" 11[$4]->5[$6] // ID: 1 cancelled
* "2" 2[$5] // ID: 22 cancelled
* "3" 3
* "4" 44
* #2 - rollback the order book to the said time point. using either Command or Memento design patterns
* Command pattern: every doAmend() calls will be stacked up for undoAmend()
* Memento pattern: cache the OrderBook (of shared pointers) at various time point(or transactions)
* pros: cons:
* lazy turnaround no (minimum) extra memory
*
* Implementing #1 for now at constant time complexity O(1)
*/
SellOrder_ptr BestSellAtTime(const std::string& symbol, const std::string& timestamp){
SellOrder_ptr minOrder;
// Below code is to query at the end of trade day
#if 0
double minPrice = std::numeric_limits<double>::max();
SellOrder_ptr tmp = std::make_shared<Order<SellSide>>(symbol);
const auto [lb, ub] = mSellOrders.equal_range(tmp);
std::uint64_t ts = getTime(timestamp) - mMidNightInMS;
// std::find_if performs better than std::min_element
std::find_if(lb, ub, [ts, &minPrice, &minOrder](const SellOrder_ptr& item){
if (item->mTimestamp > ts)
return true;
// std::min(minPrice, item->mPrice)
if (item->mPrice < minPrice){
minPrice = item->mPrice;
minOrder = item;
}
return false;
});
#else
// Below code is to query within the trade day
std::uint64_t ts = getTime(timestamp) - mMidNightInMS;
auto itr = mBookKeeper.GetBestSell(ts);
const auto& st = itr->second[symbol];
if (const auto& st = itr->second[symbol]; !st.empty()){
minOrder = *(st.begin());
}
#endif
return minOrder;
}
/* CORE APIS [END] */
// BUY
inline void DoBuyOrderInsert(const BuyOrder_ptr& newOrder){
auto itr = mBuyOrders.insert(newOrder);
mLookupBuyOrders[newOrder->mOrderID] = itr;
}
inline void DoBuyOrderCXL(const BuyOrder_ptr& newOrder){
auto Buyitr = mLookupBuyOrders.find(newOrder->mOrderID);
if (Buyitr != mLookupBuyOrders.end()){
mBuyOrders.erase(Buyitr->second);
}
}
inline void DoBuyOrderCRP(const BuyOrder_ptr& newOrder){
DoBuyOrderCXL(newOrder);
DoBuyOrderInsert(newOrder);
}
// SELL
inline void DoSellOrderInsert(const SellOrder_ptr& newOrder){
mBookKeeper.AddMessage(newOrder);
auto itr = mSellOrders.insert(newOrder);
mLookupSellOrders[newOrder->mOrderID] = itr;
}
inline void DoSellOrderCXL(const SellOrder_ptr& newOrder){
mBookKeeper.AddMessage(newOrder, false);
auto Sellitr = mLookupSellOrders.find(newOrder->mOrderID);
if (Sellitr != mLookupSellOrders.end()){
mSellOrders.erase(Sellitr->second);
}
}
inline void DoSellOrderCRP(const SellOrder_ptr& newOrder){
// Do not modify the pointer. choronologial order will be lost so operator= is deleted
DoSellOrderCXL(newOrder);
DoSellOrderInsert(newOrder);
}
template<const char* SIDE>
Order_ptr<SIDE> GetNewOrder(const std::vector<std::string>& v) noexcept{
std::uint64_t time_since_start_of_day_ms = getTime(v[0]) - mMidNightInMS;
return std::make_shared<Order<SIDE>>(time_since_start_of_day_ms, std::move(v[1]), (std::uint64_t)std::atoi(v[2].data()),
(std::uint64_t)std::atoi(v[5].data()), (double)std::stod(v[6].data()));
}
private:
/*
* Processor is faster at numeric compare than string compare
*/
std::uint64_t getTime(const std::string& t) noexcept{
// Eg: 14:17:21.877391
const auto& v = Split(t, '.');
std::istringstream ss(v[0]);
time_t tmp{0};
struct tm tmm = *localtime(&tmp);
ss >> std::get_time(&tmm, "%H:%M:%S");
if (ss.fail()) {
return 0;
}else {
std::time_t time = std::mktime(&tmm);
std::chrono::system_clock::time_point t = std::chrono::system_clock::from_time_t(time);
auto t_ms = std::chrono::time_point_cast<std::chrono::microseconds>(t);
std::uint64_t t_f = t_ms.time_since_epoch().count();
if (v.size() >= 2){
try{
std::size_t tt_ms = std::atoi(v[1].data());
return (t_f + tt_ms);
}catch(...){
return 0;
}
}
return t_f;
}
}
// reduced timestamp size by calculating from midnight(or even market start better?) instead of time_since_epoch in microseconds for accuracy
const std::uint64_t mMidNightInMS = getTime("00:00:00");
// BST for storage and hash map for fast look up.
OrdersContainer<BuySide> mBuyOrders;
OrdersContainer<SellSide> mSellOrders;
std::unordered_map<std::uint32_t/*orderID*/, OrdersContainer<BuySide>::iterator> mLookupBuyOrders;
std::unordered_map<std::uint32_t/*orderID*/, OrdersContainer<SellSide>::iterator> mLookupSellOrders;
SellOrderBookKeeping mBookKeeper;
};
// As per SOLID Design Principles. Strong exception safety
class DaVinciOrderMatchingEngine : public Command
{
public:
void execute(){
// Eg: 14:17:21.877391;DVAM1;00000001;I;BUY;100;12.5
static const std::string file{"../DaVinci_test_data.txt"}; // SSO
std::ifstream input;
try{
input.open(file);
auto start = std::chrono::steady_clock::now();
for (std::string line; std::getline(input, line, '\n'); ) {
const auto& v = Split(line, ';');
assert(v.size() == 7);
DoOrder(v);
}
auto end = std::chrono::steady_clock::now();
std::cout << "Time to complete data file processing: " << (end - start)/1s << "s." << std::endl;
}catch (...) {
input.close();
}
input.close();
auto start = std::chrono::steady_clock::now();
std::cout << "OrderCounts: " << std::endl;
for (const auto& i : mOB.OrderCounts()){
std::cout << "Symbol: " << i.first << " Count: " << i.second << std::endl;
}
auto end = std::chrono::steady_clock::now();
std::cout << "API 1 completed in : " << std::chrono::duration_cast<std::chrono::microseconds>(end - start).count() << "µs." << std::endl;
start = std::chrono::steady_clock::now();
std::cout << "top 3 BiggestBuyOrders for DVAM1: " << std::endl;
for (const BuyOrder_ptr& i : mOB.BiggestBuyOrders("DVAM1", 3)){
std::cout << "Order ID: " << i->mOrderID << " Volume: " << i->mVolume << std::endl;
}
end = std::chrono::steady_clock::now();
std::cout << "API 2 completed in : " << std::chrono::duration_cast<std::chrono::microseconds>(end - start).count() << "µs." << std::endl;
start = std::chrono::steady_clock::now();
std::cout << "BestSellAtTime for TEST0 unitl 15:30:00: " << std::endl;
const SellOrder_ptr& s = mOB.BestSellAtTime("TEST0", "15:30:00");
if (!s){
std::cout << "TEST0 has been cancelled or never placed until 15:30:00" << std::endl;
}else{
std::cout << "Sell Price: " << s->mPrice << " Volume: " << s->mVolume << std::endl;
}
end = std::chrono::steady_clock::now();
std::cout << "API 3 completed in : " << std::chrono::duration_cast<std::chrono::microseconds>(end - start).count() << "µs." << std::endl;
}
private:
void DoOrder(const std::vector<std::string>& v){
// non-type template parameter to seperate out buy/sell side at compile time.
if (IsBuy(v[4])){
BuyOrder_ptr o = mOB.GetNewOrder<BuySide>(v);
if (IsInsert(v[3])){
mOB.DoBuyOrderInsert(o);
}else if (IsCancel(v[3])){
mOB.DoBuyOrderCXL(o);
}else if (IsAmend(v[3])){
mOB.DoBuyOrderCRP(o);
}
}else{
SellOrder_ptr o = mOB.GetNewOrder<SellSide>(v);
if (IsInsert(v[3])){
mOB.DoSellOrderInsert(o);
}else if (IsCancel(v[3])){
mOB.DoSellOrderCXL(o);
}else if (IsAmend(v[3])){
mOB.DoSellOrderCRP(o);
}
}
}
inline bool IsBuy(const std::string& ID){
return (ID == "BUY");
}
inline bool IsInsert(const std::string& OP){
return (OP == "I");
}
inline bool IsCancel(const std::string& OP){
return (OP == "C");
}
inline bool IsAmend(const std::string& OP){
return (OP == "A");
}
OrderBook mOB;
};
}// anonymous namespace will shrink binary size by NOT exporting inline functions to as they are static to this file
|
<reponame>reportportal/plugin-bts-azure
/*
* WorkItemTracking
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: 6.1-preview
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package com.epam.reportportal.extension.azure.rest.client.model.workitem;
import com.google.gson.annotations.SerializedName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Describes a work item.
*/
@ApiModel(description = "Describes a work item.")
@javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaClientCodegen", date = "2021-08-03T21:33:08.123Z")
public class WorkItem extends WorkItemTrackingResource {
@SerializedName("commentVersionRef")
private WorkItemCommentVersionRef commentVersionRef = null;
@SerializedName("fields")
private Map<String, Object> fields = null;
@SerializedName("id")
private Integer id = null;
@SerializedName("relations")
private List<WorkItemRelation> relations = null;
@SerializedName("rev")
private Integer rev = null;
public WorkItem commentVersionRef(WorkItemCommentVersionRef commentVersionRef) {
this.commentVersionRef = commentVersionRef;
return this;
}
/**
* Reference to a specific version of the comment added/edited/deleted in this revision.
* @return commentVersionRef
**/
@ApiModelProperty(value = "Reference to a specific version of the comment added/edited/deleted in this revision.")
public WorkItemCommentVersionRef getCommentVersionRef() {
return commentVersionRef;
}
public void setCommentVersionRef(WorkItemCommentVersionRef commentVersionRef) {
this.commentVersionRef = commentVersionRef;
}
public WorkItem fields(Map<String, Object> fields) {
this.fields = fields;
return this;
}
public WorkItem putFieldsItem(String key, Object fieldsItem) {
if (this.fields == null) {
this.fields = new HashMap<String, Object>();
}
this.fields.put(key, fieldsItem);
return this;
}
/**
* Map of field and values for the work item.
* @return fields
**/
@ApiModelProperty(value = "Map of field and values for the work item.")
public Map<String, Object> getFields() {
return fields;
}
public void setFields(Map<String, Object> fields) {
this.fields = fields;
}
public WorkItem id(Integer id) {
this.id = id;
return this;
}
/**
* The work item ID.
* @return id
**/
@ApiModelProperty(value = "The work item ID.")
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public WorkItem relations(List<WorkItemRelation> relations) {
this.relations = relations;
return this;
}
public WorkItem addRelationsItem(WorkItemRelation relationsItem) {
if (this.relations == null) {
this.relations = new ArrayList<WorkItemRelation>();
}
this.relations.add(relationsItem);
return this;
}
/**
* Relations of the work item.
* @return relations
**/
@ApiModelProperty(value = "Relations of the work item.")
public List<WorkItemRelation> getRelations() {
return relations;
}
public void setRelations(List<WorkItemRelation> relations) {
this.relations = relations;
}
public WorkItem rev(Integer rev) {
this.rev = rev;
return this;
}
/**
* Revision number of the work item.
* @return rev
**/
@ApiModelProperty(value = "Revision number of the work item.")
public Integer getRev() {
return rev;
}
public void setRev(Integer rev) {
this.rev = rev;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
WorkItem workItem = (WorkItem) o;
return Objects.equals(this.commentVersionRef, workItem.commentVersionRef) &&
Objects.equals(this.fields, workItem.fields) &&
Objects.equals(this.id, workItem.id) &&
Objects.equals(this.relations, workItem.relations) &&
Objects.equals(this.rev, workItem.rev) &&
super.equals(o);
}
@Override
public int hashCode() {
return Objects.hash(commentVersionRef, fields, id, relations, rev, super.hashCode());
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class WorkItem {\n");
sb.append(" ").append(toIndentedString(super.toString())).append("\n");
sb.append(" commentVersionRef: ").append(toIndentedString(commentVersionRef)).append("\n");
sb.append(" fields: ").append(toIndentedString(fields)).append("\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" relations: ").append(toIndentedString(relations)).append("\n");
sb.append(" rev: ").append(toIndentedString(rev)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
'use strict';
const BbPromise = require('bluebird');
const AlexaApi = require('./AlexaApi');
module.exports = {
updateModels(diffs) {
const alexaApi = new AlexaApi(this.getToken(), this.getClient());
return BbPromise.bind(this)
.then(() => BbPromise.resolve(diffs))
.mapSeries(function (model) {
const localSkills = this.serverless.service.custom.alexa.skills;
const local = localSkills.find(skill => skill.id === model.skillId);
if (
!(typeof local.models[model.locale] === 'undefined')
&& !(typeof model.diff === 'undefined')
) {
return alexaApi.updateModel(local.id, model.locale, local.models[model.locale]);
}
return BbPromise.resolve();
});
},
};
|
#***************************************************************************
#
# BSD LICENSE
#
# Copyright(c) 2007-2019 Intel Corporation. All rights reserved.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#**************************************************************************
#!/usr/bin/bash
# exit on errors
set -e
: ${QZ_ROOT?}
readonly BASEDIR=$(readlink -f $(dirname $0))
declare -i rc=0
rm -f $BASEDIR/astyle.log
if hash astyle; then
echo -n "Checking coding style..."
find $QZ_ROOT -iregex '.*\.[ch]' | \
xargs astyle --options=$BASEDIR/astylerc | \
tee -a $BASEDIR/astyle.log
if grep -q "^Formatted" $BASEDIR/astyle.log; then
echo -e "ERRORS detected\n"
grep --color=auto "^Formatted.*" $BASEDIR/astyle.log
echo "Incorrect code style detected in one or more files."
echo "The files have been automatically formatted."
rc=1
else
echo " OK"
fi
else
echo "You do not have astyle installed so your code style is not being checked!"
rc=2
fi
exit $rc
|
"""
Parsing and gridding GBIF [taxon, latitude, longitude] .csv file
@author: <EMAIL>
"""
import pandas as pd
import os
import numpy as np
###########################################################################################
def subcoor(d,lat,lon):
d_ = d.loc[d.iloc[:,1]<lat+1].loc[d.iloc[:,1]>lat].loc[d.iloc[:,2]<lon+1].loc[d.iloc[:,2]>lon]
return(d_)
def makegrid(n):
a=np.array(list(range(n)))+1 # axis with offset for 0 base index to 1
points=product(a,repeat=2) #only allow repeats for (i,j), (j,i) pairs with i!=j
return(np.asarray(list(points)) )
def maketensor(z,y,x):
a = np.zeros((z, y, x))
return(a)
def makespphash(iterable):
seen = set()
result = []
for element in iterable:
hashed = element
if isinstance(element, dict):
hashed = tuple(sorted(element.iteritems()))
elif isinstance(element, list):
hashed = tuple(element)
if hashed not in seen:
result.append(element)
seen.add(hashed)
return result
################################################################################
def readgbif(path="../gbif/pgbif.csv",sep="\t"):
d = pd.read_csv(path, sep)
#print('Load GBIF file with #rows = %i' %(d.size))
return(d)
def tensorgbif():
spp=makespphash(d.iloc[:,0])
spptot=len(spp)
sppdic=make_sppdic(spp,spptot)
#tens=maketensor(10,10,spptot) # this for future implementation
return('notimplemented yet')
def whichwindow(w,v):
count=0
for i in w:
if v>=i[0] and v<i[1]:
break
else:
count=count+1
return count
def iffamily(val,fam):
if val==fam:
return(1)
else:
return(0)
def tensoronetaxon(step, breaks, lat,lon, d, sppname,vtype="freq"):
if vtype not in ["freq","yesno"]:
Exception("The type of raster has to be either 'freq' or 'yesno'")
# Subset to SF
d_ = subcoor(d,lat,lon)
cactusnum=int(d_[d_['family'] == sppname].size)
# print('There are {} {} within this grid'.format(sppname,cactusnum))
## make grid steps
sb= step/breaks
xwind=[[lon+(sb*i),lon+(sb*(i+1))] for i in range(int(breaks))]
ywind=[[lat+(sb*i),lat+(sb*(i+1))] for i in range(int(breaks))]
####################################################
# Fill tensor
tens=maketensor(2,breaks+1,breaks+1)#only for cactaceae
for index, r in d_.iterrows():
# print(r)
da=whichwindow(ywind,r[1])
do=whichwindow(xwind,r[2])
dspp=iffamily(r[0],sppname)
tens[dspp,da,do]= tens[dspp,da,do] +1
# total observation per grid
totobs=tens.sum(axis=0)
# % of cactaceae
if vtype=="freq":
cactae=tens[1,:,:]/(totobs+0.0001)
else:
cactae=tens[1,:,:]
cactae=(cactae>0)*1
return(cactae)
def key_for_value(d, value):
# this will be useful for final implementation
return(list(d.keys())[list(d.values()).index(value)])
def make_sppdic(spp,total):
sppdic={}
for i in range(0,total):
sppdic[i]=spp[i]
return(sppdic)
def tensorgbif(lat,lon,step, breaks,d, sppdic,vtype="yesno"):
if vtype not in ["freq","yesno"]:
Exception("The type of raster has to be either 'freq' or 'yesno'")
# Subset to SF
d_ = subcoor(d,lat,lon)
## make grid steps
sb= step/breaks
xwind=[[lon+(sb*i),lon+(sb*(i+1))] for i in range(int(breaks))]
ywind=[[lat+(sb*i),lat+(sb*(i+1))] for i in range(int(breaks))]
ywind.reverse()
# reverse necessary, as 2d numpy array the first dimension is
# the vertical but starts oppositely as we measure lat |
# v
# the horizontal dimension works intuitively ->
##########################################################
# Fill tensor
tens=maketensor(len(sppdic),breaks+1,breaks+1)#only for cactaceae
for index, r in d_.iterrows():
# print(r)
da=whichwindow(ywind,r[1])
do=whichwindow(xwind,r[2])
dspp=key_for_value(sppdic,r[0])
tens[dspp,da,do]= tens[dspp,da,do] +1
# total observation per grid
totobs=tens.sum(axis=0)
# % of cactaceae
if vtype=="freq":
cactae=tens[:,:,:]/(totobs+0.0001)
else:
cactae=tens[:,:,:]
cactae=(cactae>0)*1
return(totobs,cactae)
def vec_tensorgbif(latlon,step,breaks,d,sppdic,vtype):
tots=[]
spp=[]
for lalo in latlon:
to,sp = tensorgbif(float(lalo[0]),float(lalo[1]),step, breaks, d, sppdic,vtype)
tots.append(to)
spp.append(sp)
return(tots,spp)
# def make_cacdic(spp,total):
# sppdic={'Cactaceae':1 , 'NoCactaceae':0}
# return(sppdic)
# def make_locdic(lon,totalwindows=1,windowstep=0.1):
# locdic={}
# lon=round(lon,1)
# for i in range(0,totalwindows):
# locdic[i]=round(lon,1)
# lon=lon+windowstep
# return(locdic)
# # generate translators of location
# londic=make_locdic(lon,breaks+1)
# latdic=make_locdic(lat,breaks+1)
# # total cactus
# # d_[d_['family']=='Cactaceae'].size
# d_[d_['family']=='Brassicaceae'].size
#
|
public class CategorizeInteger {
public static String categorize(int n) {
if (n <= 5) {
return "ok";
} else if (n <= 10) {
return "notify";
} else {
return "warning";
}
}
public static void main(String[] args) {
int input = Integer.parseInt(args[0]);
String category = categorize(input);
System.out.println(category);
}
} |
import Dependencies._
lazy val agni = project
.in(file("."))
.settings(allSettings)
.settings(noPublishSettings)
.aggregate(core, `twitter-util`, monix, `cats-effect`, examples)
.dependsOn(core, `twitter-util`, monix, `cats-effect`, examples)
lazy val allSettings = Seq.concat(
buildSettings,
baseSettings,
publishSettings
)
lazy val buildSettings = Seq(
organization := "com.github.tkrs",
scalaVersion := V.`scala2.13`,
crossScalaVersions := Seq(V.`scala2.12`, V.`scala2.13`),
libraryDependencies += compilerPlugin((P.kindeProjector).cross(CrossVersion.full))
)
lazy val coreDeps = Seq(P.datastaxJavaDriver, P.catsCore, P.shapeless)
lazy val testDeps = Seq(P.scalacheck, P.scalatest, P.mockito).map(_ % Test)
lazy val baseSettings = Seq(
scalacOptions ++= compilerOptions ++ {
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, n)) if n >= 13 => Nil
case _ => Seq("-Xfuture", "-Ypartial-unification", "-Yno-adapted-args")
}
},
scalacOptions in (Compile, console) := compilerOptions,
scalacOptions in (Compile, test) := compilerOptions,
libraryDependencies ++= (coreDeps ++ testDeps).map(_.withSources),
resolvers ++= Seq(
Resolver.sonatypeRepo("releases"),
Resolver.sonatypeRepo("snapshots")
),
fork in Test := true,
scalacOptions in (Compile, console) ~= (_.filterNot(_ == "-Ywarn-unused:_"))
)
lazy val publishSettings = Seq(
releaseCrossBuild := true,
releasePublishArtifactsAction := PgpKeys.publishSigned.value,
homepage := Some(url("https://github.com/tkrs/agni")),
licenses := Seq("MIT License" -> url("http://www.opensource.org/licenses/mit-license.php")),
publishMavenStyle := true,
publishArtifact in Test := false,
pomIncludeRepository := (_ => false),
publishTo := {
val nexus = "https://oss.sonatype.org/"
if (isSnapshot.value)
Some("snapshots".at(nexus + "content/repositories/snapshots"))
else
Some("releases".at(nexus + "service/local/staging/deploy/maven2"))
},
scmInfo := Some(
ScmInfo(
url("https://github.com/tkrs/agni"),
"scm:git:<EMAIL>:tkrs/agni.git"
)
),
pomExtra :=
<developers>
<developer>
<id>yanana</id>
<name><NAME></name>
<url>https://github.com/yanana</url>
</developer>
<developer>
<id>tkrs</id>
<name><NAME></name>
<url>https://github.com/tkrs</url>
</developer>
</developers>,
pgpPassphrase := sys.env.get("PGP_PASSPHRASE").map(_.toCharArray),
pgpSecretRing := sys.env.get("PGP_SECRET_RING").fold(pgpSecretRing.value)(file)
)
lazy val noPublishSettings = Seq(
skip in publish := true
)
lazy val crossVersionSharedSources: Seq[Setting[_]] =
Seq(Compile, Test).map { sc =>
(sc / unmanagedSourceDirectories) ++= {
(sc / unmanagedSourceDirectories).value.flatMap { dir =>
if (dir.getName != "scala") Seq(dir)
else
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, n)) if n >= 13 => Seq(file(dir.getPath + "_2.13+"))
case _ => Seq(file(dir.getPath + "_2.12-"))
}
}
}
}
lazy val core = project
.in(file("core"))
.settings(allSettings)
.settings(crossVersionSharedSources)
.settings(
sourceGenerators in Compile += (sourceManaged in Compile).map(Boilerplate.gen).taskValue
)
.settings(
description := "agni core",
moduleName := "agni-core"
)
lazy val `twitter-util` = project
.in(file("twitter-util"))
.settings(allSettings)
.settings(
description := "agni twitter-util",
moduleName := "agni-twitter-util"
)
.settings(
libraryDependencies ++= Seq(P.catbird)
)
.dependsOn(core)
lazy val monix = project
.in(file("monix"))
.settings(allSettings)
.settings(
description := "agni monix",
moduleName := "agni-monix"
)
.settings(
libraryDependencies ++= Seq(P.monixEval, P.monixTail)
)
.dependsOn(core)
lazy val `cats-effect` = project
.in(file("cats-effect"))
.settings(allSettings)
.settings(
description := "agni cats-effect",
moduleName := "agni-cats-effect"
)
.settings(
libraryDependencies ++= Seq(P.catsEffect)
)
.dependsOn(core)
lazy val benchmarks = project
.in(file("benchmarks"))
.settings(allSettings)
.settings(noPublishSettings)
.settings(
description := "agni benchmarks",
moduleName := "agni-benchmarks"
)
.settings(
scalacOptions ++= Seq(
"-opt:l:inline",
"-opt-inline-from:**",
"-opt-warnings"
)
)
.enablePlugins(JmhPlugin)
.dependsOn(core % "test->test")
lazy val examples = project
.in(file("examples"))
.settings(allSettings)
.settings(noPublishSettings)
.settings(
description := "agni examples",
moduleName := "agni-examples"
)
.settings(
libraryDependencies ++= Seq(P.datastaxQueryBuilder, P.slf4jSimple, P.scalatest)
)
.dependsOn(`cats-effect`)
lazy val compilerOptions = Seq(
"-target:jvm-1.8",
"-deprecation",
"-encoding",
"UTF-8",
"-unchecked",
"-feature",
"-language:existentials",
"-language:higherKinds",
"-language:implicitConversions",
"-language:postfixOps",
"-Ywarn-dead-code",
"-Ywarn-numeric-widen",
"-Ywarn-unused:_",
"-Xlint"
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.