text stringlengths 1 1.05M |
|---|
package app.javachat.Garage;
import app.javachat.Models.Message;
import app.javachat.Models.User;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
// TODO implementar listMensajes y reemplazar PackageInfo.
public class SalaModel implements Serializable {
private User host;
private final int PORT;
private final String serverIp;
private List<User> listUsuarios;
private List<Message> listMessages;
public SalaModel(String serverIp, int PORT, User host) {
this.serverIp = serverIp;
this.PORT = PORT;
this.host = host;
this.listUsuarios = new ArrayList<>();
this.listMessages = new ArrayList<>();
}
public SalaModel(String serverIp, int PORT, User host, List<User> listUsuarios, List<Message> listMessages) {
this.serverIp = serverIp;
this.listUsuarios = listUsuarios;
this.listMessages = listMessages;
this.PORT = PORT;
}
public String getServerIp() {
return serverIp;
}
public int getPORT() {
return PORT;
}
public void addUser(User user) {
if (!listUsuarios.contains(user))
listUsuarios.add(user);
}
public void addMensaje(Message message) {
if (!listMessages.contains(message))
listMessages.add(message);
}
public User getHost() {
return host;
}
public List<User> getListUsuarios() {
return listUsuarios;
}
public List<Message> getListMensajes() {
return listMessages;
}
public void setListUsuarios(List<User> listUsuarios) {
this.listUsuarios = listUsuarios;
}
public void setListMensajes(List<Message> listMessages) {
this.listMessages = listMessages;
}
}
|
<reponame>seekcx/egg-yup<filename>test/fixtures/apps/yup-test/config/config.default.js
'use strict';
exports.security = {
ctoken: false,
csrf: false,
};
exports.yup = {
locale: 'mars',
locales: {
mars: {
number: {
min: '不能小于 ${min}',
},
},
},
onerror: (err, ctx) => {
ctx.throw(422, 'Validation Failed', {
errors: err.errors,
});
},
};
|
import tensorflow as tf
# Load the MNIST dataset
mnist = tf.keras.datasets.mnist
(x_train, y_train),(x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
# Build the model
model = tf.keras.models.Sequential([
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(10, activation='softmax')
])
# Compile and train the model
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(x_train, y_train, epochs=5)
# Evaluate the model
model.evaluate(x_test, y_test) |
export * from './compare.ts';
export * from './equal.ts';
export * from './fold.ts';
export * from './forEach.ts';
export * from './key.ts';
export * from './map.ts';
export * from './predicate.ts';
export * from './scan.ts';
|
import dayjs from "dayjs";
import DataFeed from "./dataFeed";
/**
* @typedef {Object} CandleData
* @property {Number} code
* @property {Array<*>} data
*/
/**
* @typedef {import("./dataFeed").Candle} Candle
* @typedef {import("services/tradeApiClient.types").MarketSymbol} MarketSymbol
* @typedef {import("services/tradeApiClient.types").ExchangeConnectionEntity} ExchangeConnectionEntity
* @typedef {import("services/tradeApiClient.types").MarketSymbolsCollection} MarketSymbolsCollection
* @typedef {import("tradingView/charting_library/charting_library").OnReadyCallback} OnReadyCallback
* @typedef {import("tradingView/charting_library/charting_library").ServerTimeCallback} ServerTimeCallback
* @typedef {import("tradingView/charting_library/charting_library").SearchSymbolsCallback} SearchSymbolsCallback
* @typedef {import("tradingView/charting_library/charting_library").ResolveCallback} ResolveCallback
* @typedef {import("tradingView/charting_library/charting_library").ErrorCallback} ErrorCallback
* @typedef {import("tradingView/charting_library/charting_library").HistoryCallback} HistoryCallback
* @typedef {import("tradingView/charting_library/charting_library").Bar} Bar
* @typedef {import("tradingView/charting_library/charting_library").SubscribeBarsCallback} SubscribeBarsCallback
* @typedef {import("tradingView/charting_library/charting_library").LibrarySymbolInfo} LibrarySymbolInfo
* @typedef {import("tradingView/charting_library/charting_library").PeriodParams} PeriodParams
*/
/**
* Prices data feed from CoinRay provider.
*
* @property {TradeApiClient} tradeApi API client.
* @returns {IBasicDataFeed} Trading View Chart data feed.
* @inheritdoc
*/
class AscendexDataFeed extends DataFeed {
/**
* Get price candles for selected symbol.
*
* @param {string} base Symbol base currency.
* @param {string} quote Symbol quote currency.
* @param {string|number} resolution Data resolution.
* @param {number} startTime Get data since.
* @param {number} [endTime] Get data to.
* @returns {Promise<CandleData>} Promise that resolve candle data.
* @memberof AscendexDataFeed
*/
// eslint-disable-next-line max-params
async getCandlesData(base, quote, resolution, startTime, endTime) {
let endpointPath = `/tradingview/ascendex?symbol=${base}/${quote}&interval=${resolution}&from=${startTime}`;
if (endTime) {
endpointPath += `&to=${endTime}`;
}
const requestUrl = this.baseUrl + endpointPath;
try {
const response = await fetch(requestUrl);
const candles = await response.json();
return candles;
// return JSON.parse(candles.contents);
} catch (error) {
// eslint-disable-next-line no-console
console.error(`Get candles data error: ${error.message}`);
}
return null;
}
/**
* Parse api candle bars
* @param {*} candle VCCE Candle
* @returns {*} New candle
*/
parseOHLCV(candle) {
return {
time: parseFloat(candle.ts),
open: parseFloat(candle.o),
high: parseFloat(candle.h),
low: parseFloat(candle.l),
close: parseFloat(candle.c),
volume: parseFloat(candle.v),
};
}
/**
* Get price bar for a given symbol.
*
* @param {MarketSymbol} symbolData Market symbol data.
* @param {string} resolution Data resolution.
* @param {PeriodParams} periodParams Selected period.
* @returns {Promise<Candle>} Promise.
*/
getBarsRequest(symbolData, resolution, periodParams) {
const { from: startDate, to: endDate } = periodParams;
return this.getCandlesData(
// @ts-ignore
symbolData.base.toUpperCase(),
// @ts-ignore
symbolData.quote.toUpperCase(),
resolution,
startDate * 1000,
dayjs.unix(endDate).startOf("minute").valueOf(),
).then((newCandles) => {
return newCandles ? newCandles.data.map((d) => this.parseOHLCV(d.data)) : [];
});
}
/**
* Call request to get latest candle price data.
*
* @param {LibrarySymbolInfo} symbolData Market symbol data.
* @param {string} resolution Prices data resolution.
* @returns {Promise<Bar>} Promise.
* @abstract
*/
refreshBarRequest(symbolData, resolution) {
return this.getCandlesData(
// @ts-ignore
symbolData.base.toUpperCase(),
// @ts-ignore
symbolData.quote.toUpperCase(),
resolution,
this.startDate * 1000,
dayjs().startOf("minute").valueOf(),
).then((candles) => {
if (candles && candles.data.length) {
const lastCandle = candles.data[candles.data.length - 1];
return this.parseOHLCV(lastCandle.data);
}
return {};
});
}
}
export default AscendexDataFeed;
|
package org.deeplearning4j.ui.module.tsne;
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.commons.io.FileUtils;
import org.deeplearning4j.api.storage.StatsStorage;
import org.deeplearning4j.api.storage.StatsStorageEvent;
import org.deeplearning4j.ui.api.FunctionType;
import org.deeplearning4j.ui.api.HttpMethod;
import org.deeplearning4j.ui.api.Route;
import org.deeplearning4j.ui.api.UIModule;
import play.Logger;
import play.api.mvc.MultipartFormData;
import play.data.DynamicForm;
import play.data.Form;
import play.libs.Json;
import play.mvc.Http;
import play.mvc.Result;
import play.mvc.Results;
import java.io.File;
import java.io.IOException;
import java.util.*;
import static play.mvc.Controller.flash;
import static play.mvc.Controller.request;
import static play.mvc.Results.badRequest;
import static play.mvc.Results.ok;
/**
* Created by Alex on 25/10/2016.
*/
public class TsneModule implements UIModule {
private static final String UPLOADED_FILE = "UploadedFile";
private Map<String, List<String>> knownSessionIDs = Collections.synchronizedMap(new LinkedHashMap<>());
private List<String> uploadedFileLines = null;
public TsneModule(){
}
@Override
public List<String> getCallbackTypeIDs() {
return Collections.emptyList();
}
@Override
public List<Route> getRoutes() {
Route r1 = new Route("/tsne", HttpMethod.GET, FunctionType.Supplier, () -> ok(org.deeplearning4j.ui.views.html.tsne.Tsne.apply()));
Route r2 = new Route("/tsne/sessions", HttpMethod.GET, FunctionType.Supplier, this::listSessions);
Route r3 = new Route("/tsne/coords/:sid", HttpMethod.GET, FunctionType.Function, this::getCoords);
Route r4 = new Route("/tsne/upload", HttpMethod.POST, FunctionType.Supplier, this::uploadFile);
// Route r5 = new Route("/tsne/post/:sid", HttpMethod.POST, FunctionType.Function, this::postFile);
Route r5 = new Route("/tsne/post/:sid", HttpMethod.GET, FunctionType.Function, this::postFile);
return Arrays.asList(r1, r2, r3, r4, r5);
}
@Override
public void reportStorageEvents(Collection<StatsStorageEvent> events) {
}
@Override
public void onAttach(StatsStorage statsStorage) {
}
@Override
public void onDetach(StatsStorage statsStorage) {
}
private Result listSessions(){
List<String> list = new ArrayList<>(knownSessionIDs.keySet());
if(uploadedFileLines != null){
list.add(UPLOADED_FILE);
}
return Results.ok(Json.toJson(list));
}
private Result getCoords(String sessionId){
if(UPLOADED_FILE.equals(sessionId) && uploadedFileLines != null){
return Results.ok(Json.toJson(uploadedFileLines));
} else if(knownSessionIDs.containsKey(sessionId)){
return Results.ok(Json.toJson(knownSessionIDs.get(sessionId)));
} else {
return Results.ok();
}
}
private Result uploadFile(){
Http.MultipartFormData body = request().body().asMultipartFormData();
List<Http.MultipartFormData.FilePart> fileParts = body.getFiles();
if(fileParts.size() <= 0){
return badRequest("No file uploaded");
}
Http.MultipartFormData.FilePart uploadedFile = fileParts.get(0);
String fileName = uploadedFile.getFilename();
String contentType = uploadedFile.getContentType();
File file = uploadedFile.getFile();
try{
uploadedFileLines = FileUtils.readLines(file);
} catch (IOException e){
return badRequest("Could not read from uploaded file");
}
return ok("File uploaded: " + fileName + ", " + contentType + ", " + file);
}
private Result postFile(String sid){
// System.out.println("POST FILE CALLED: " + sid);
Http.MultipartFormData body = request().body().asMultipartFormData();
List<Http.MultipartFormData.FilePart> fileParts = body.getFiles();
if(fileParts.size() <= 0){
// System.out.println("**** NO FILE ****");
return badRequest("No file uploaded");
}
Http.MultipartFormData.FilePart uploadedFile = fileParts.get(0);
String fileName = uploadedFile.getFilename();
String contentType = uploadedFile.getContentType();
File file = uploadedFile.getFile();
List<String> lines;
try{
lines = FileUtils.readLines(file);
} catch (IOException e){
// System.out.println("**** COULD NOT READ FILE ****");
return badRequest("Could not read from uploaded file");
}
knownSessionIDs.put(sid, lines);
return ok("File uploaded: " + fileName + ", " + contentType + ", " + file);
}
}
|
/* Copyright 2020 Freerware
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package header_test
import (
"testing"
"github.com/freerware/negotiator/internal/header"
"github.com/stretchr/testify/suite"
)
type AcceptTestSuite struct {
suite.Suite
}
func TestAcceptTestSuite(t *testing.T) {
suite.Run(t, new(AcceptTestSuite))
}
func (s AcceptTestSuite) TestAccept_NewAccept() {
tests := []struct {
name string
in []string
err error
}{
{"SingleRange", []string{"application/json"}, nil},
{"WildcardSubType", []string{"application/*"}, nil},
{"Wildcard", []string{"*/*"}, nil},
{"MultipleRanges", []string{"application/json", "application/yaml;q=0.9"}, nil},
{"Empty", []string{}, nil},
}
for _, test := range tests {
s.Run(test.name, func() {
// action.
ae, err := header.NewAccept(test.in)
// assert.
if test.err != nil {
s.Require().EqualError(err, test.err.Error())
} else {
s.Require().NoError(err)
s.NotZero(ae)
}
})
}
}
func (s AcceptTestSuite) TestAccept_MediaRanges() {
json, _ := header.NewMediaRange("application/json")
jsonWithQValue, _ := header.NewMediaRange("application/json;q=0.4")
yamlWithQValue, _ := header.NewMediaRange("text/yaml;q=0.8")
xml, _ := header.NewMediaRange("application/xml")
tests := []struct {
name string
in []string
out []header.MediaRange
}{
{
"MultipleRanges",
[]string{
"application/json;q=0.4",
"application/xml",
"text/yaml;q=0.8",
},
[]header.MediaRange{
xml,
yamlWithQValue,
jsonWithQValue,
},
},
{"SingleRange", []string{"application/json"}, []header.MediaRange{json}},
{"Empty", []string{}, []header.MediaRange{}},
}
for _, test := range tests {
s.Run(test.name, func() {
// action + assert.
ae, err := header.NewAccept(test.in)
s.Require().NoError(err)
s.Require().Len(ae.MediaRanges(), len(test.out))
for idx, ccr := range ae.MediaRanges() {
s.Equal(test.out[idx], ccr)
}
})
}
}
func (s AcceptTestSuite) TestAccept_IsEmpty() {
tests := []struct {
name string
in []string
out bool
}{
{"Empty", []string{}, true},
{"NotEmpty", []string{"application/json", "application/xml"}, false},
}
for _, test := range tests {
s.Run(test.name, func() {
// action + assert.
ae, err := header.NewAccept(test.in)
s.Require().NoError(err)
s.Equal(test.out, ae.IsEmpty())
})
}
}
func (s AcceptTestSuite) TestAccept_String() {
tests := []struct {
name string
in []string
out string
}{
{"Empty", []string{}, "Accept: "},
{
"SingleRange",
[]string{"application/json"},
"Accept: application/json;q=1.000",
},
{
"MultipleRanges",
[]string{
"application/json",
"application/xml;q=0.8",
},
"Accept: application/json;q=1.000,application/xml;q=0.800",
},
}
for _, test := range tests {
s.Run(test.name, func() {
// action + assert.
ae, err := header.NewAccept(test.in)
s.Require().NoError(err)
s.Equal(test.out, ae.String())
})
}
}
func (s *AcceptTestSuite) TestAccept_Compatible() {
tests := []struct {
name string
mediaRange []string
in string
out bool
err error
}{
{"Wildcard", []string{"*/*"}, "application/json", true, nil},
{"SubTypeWildcard", []string{"application/*"}, "application/json", true, nil},
{"MatchLowerCase", []string{"application/json"}, "application/json", true, nil},
{"MatchUpperCase", []string{"application/xml"}, "APPLICATION/XML", true, nil},
{"MatchWithQValue", []string{"application/xml;q=0.9"}, "APPLICATION/XML", true, nil},
{"NoMatch", []string{"application/json"}, "application/xml", false, nil},
}
for _, test := range tests {
s.Run(test.name, func() {
// action + assert.
c, err := header.NewAccept(test.mediaRange)
s.Require().NoError(err)
s.Require().NotZero(c)
ok, err := c.Compatible(test.in)
if test.err != nil {
s.EqualError(err, test.err.Error())
} else {
s.NoError(err)
}
s.Equal(test.out, ok)
})
}
}
|
#!/bin/bash
set -eu
if [ -e ../../../configure.sh ]; then
. ../../../configure.sh
elif [ -e ../../configure.sh ]; then
. ../../configure.sh
elif [ -e ../configure.sh ]; then
. ../configure.sh
elif [ -e ./configure.sh ]; then
. ./configure.sh
else
echo "Error: Could not find 'configure.sh'!"
exit 1
fi
if [[ $# -ne 1 ]]; then
echo "Usage: $0 <image ID>"
exit 1
fi
IID=${1}
BIN_DIR=${DYNAMIC_DIR}/afl/${IID}-bin
OUT_DIR=${DYNAMIC_DIR}/afl/${IID}-out
[[ -d ${BIN_DIR} ]] && rm -rf ${BIN_DIR}
[[ -d ${OUT_DIR} ]] && rm -rf ${OUT_DIR}
|
#include <iostream>
#include <string>
#include <cstdlib>
#include <ctime>
class OTP {
private:
int length;
public:
void setLength(int l) {
length = l;
}
std::string generateOTP() {
const char charset[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
const int max_index = sizeof(charset) - 1;
std::string otp;
srand(static_cast<unsigned int>(time(nullptr)));
for (int i = 0; i < length; ++i) {
otp += charset[rand() % max_index];
}
return otp;
}
};
int main() {
OTP otpGenerator;
otpGenerator.setLength(8);
std::string generatedOTP = otpGenerator.generateOTP();
std::cout << "Generated OTP: " << generatedOTP << std::endl;
return 0;
} |
int[] arr = {1, 5, 10, 15, 20};
int largest = arr[0];
for (int i=1; i<arr.Length; i++)
{
if (largest < arr[i])
{
largest = arr[i];
}
}
Console.WriteLine("Largest element: " + largest); |
/*
*********************************************************************************************************
* LINUX-KERNEL
* AllWinner Linux Platform Develop Kits
* Kernel Module
*
* (c) Copyright 2006-2011, <NAME>
* All Rights Reserved
*
* File : mem_gtbus.c
* By : gq.yang
* Version : v1.0
* Date : 2012-11-3 20:13
* Descript: interrupt for platform mem
* Update : date auther ver notes
*********************************************************************************************************
*/
#include "pm_i.h"
#ifdef CONFIG_ARCH_SUN9IW1P1
/*
*********************************************************************************************************
* MEM gtbus SAVE
*
* Description: mem gtbus save.
*
* Arguments : none.
*
* Returns : 0/-1;
*********************************************************************************************************
*/
__s32 mem_gtbus_save(struct gtbus_state *gtbus_state)
{
int i = 0;
gtbus_state->gtbus_reg =
(gtbus_reg_list_t *) IO_ADDRESS(SUNXI_GTBUS_PBASE);
gtbus_state->gtbus_reg_backup[0] =
gtbus_state->gtbus_reg->master_config_reg[0];
gtbus_state->gtbus_reg_backup[1] =
gtbus_state->gtbus_reg->master_config_reg[1];
gtbus_state->gtbus_reg_backup[2] =
gtbus_state->gtbus_reg->master_config_reg[2];
gtbus_state->gtbus_reg_backup[3] =
gtbus_state->gtbus_reg->master_config_reg[3];
gtbus_state->gtbus_reg_backup[4] =
gtbus_state->gtbus_reg->master_config_reg[4];
gtbus_state->gtbus_reg_backup[5] =
gtbus_state->gtbus_reg->master_config_reg[5];
gtbus_state->gtbus_reg_backup[6] =
gtbus_state->gtbus_reg->master_config_reg[6];
gtbus_state->gtbus_reg_backup[7] =
gtbus_state->gtbus_reg->master_config_reg[7];
gtbus_state->gtbus_reg_backup[8] =
gtbus_state->gtbus_reg->master_config_reg[8];
gtbus_state->gtbus_reg_backup[9] =
gtbus_state->gtbus_reg->master_config_reg[9];
gtbus_state->gtbus_reg_backup[10] =
gtbus_state->gtbus_reg->master_config_reg[10];
gtbus_state->gtbus_reg_backup[11] =
gtbus_state->gtbus_reg->master_config_reg[11];
gtbus_state->gtbus_reg_backup[12] =
gtbus_state->gtbus_reg->master_config_reg[12];
gtbus_state->gtbus_reg_backup[13] =
gtbus_state->gtbus_reg->master_config_reg[13];
gtbus_state->gtbus_reg_backup[14] =
gtbus_state->gtbus_reg->master_config_reg[14];
gtbus_state->gtbus_reg_backup[15] =
gtbus_state->gtbus_reg->master_config_reg[15];
gtbus_state->gtbus_reg_backup[16] =
gtbus_state->gtbus_reg->master_config_reg[16];
gtbus_state->gtbus_reg_backup[17] =
gtbus_state->gtbus_reg->master_config_reg[17];
gtbus_state->gtbus_reg_backup[18] =
gtbus_state->gtbus_reg->master_config_reg[18];
gtbus_state->gtbus_reg_backup[19] =
gtbus_state->gtbus_reg->master_config_reg[19];
gtbus_state->gtbus_reg_backup[20] =
gtbus_state->gtbus_reg->master_config_reg[20];
gtbus_state->gtbus_reg_backup[21] =
gtbus_state->gtbus_reg->master_config_reg[21];
gtbus_state->gtbus_reg_backup[22] =
gtbus_state->gtbus_reg->master_config_reg[22];
gtbus_state->gtbus_reg_backup[23] =
gtbus_state->gtbus_reg->master_config_reg[23];
gtbus_state->gtbus_reg_backup[24] =
gtbus_state->gtbus_reg->master_config_reg[24];
gtbus_state->gtbus_reg_backup[25] =
gtbus_state->gtbus_reg->master_config_reg[25];
gtbus_state->gtbus_reg_backup[26] =
gtbus_state->gtbus_reg->master_config_reg[26];
gtbus_state->gtbus_reg_backup[27] =
gtbus_state->gtbus_reg->master_config_reg[27];
gtbus_state->gtbus_reg_backup[28] =
gtbus_state->gtbus_reg->master_config_reg[28];
gtbus_state->gtbus_reg_backup[29] =
gtbus_state->gtbus_reg->master_config_reg[29];
gtbus_state->gtbus_reg_backup[30] =
gtbus_state->gtbus_reg->master_config_reg[30];
gtbus_state->gtbus_reg_backup[31] =
gtbus_state->gtbus_reg->master_config_reg[31];
gtbus_state->gtbus_reg_backup[32] =
gtbus_state->gtbus_reg->master_config_reg[32];
gtbus_state->gtbus_reg_backup[33] =
gtbus_state->gtbus_reg->master_config_reg[33];
gtbus_state->gtbus_reg_backup[34] =
gtbus_state->gtbus_reg->master_config_reg[34];
gtbus_state->gtbus_reg_backup[35] =
gtbus_state->gtbus_reg->master_config_reg[35];
gtbus_state->gtbus_reg_backup[36] =
gtbus_state->gtbus_reg->band_win_config_reg;
gtbus_state->gtbus_reg_backup[37] =
gtbus_state->gtbus_reg->master_rd_pri_config_reg_0;
gtbus_state->gtbus_reg_backup[38] =
gtbus_state->gtbus_reg->master_rd_pri_config_reg_1;
gtbus_state->gtbus_reg_backup[39] = gtbus_state->gtbus_reg->config_reg;
gtbus_state->gtbus_reg_backup[40] =
gtbus_state->gtbus_reg->soft_clk_on_reg;
gtbus_state->gtbus_reg_backup[41] =
gtbus_state->gtbus_reg->soft_clk_off_reg;
gtbus_state->gtbus_reg_backup[42] = gtbus_state->gtbus_reg->pmu_en_reg;
gtbus_state->gtbus_reg_backup[43] =
gtbus_state->gtbus_reg->cci400_config_reg_0;
gtbus_state->gtbus_reg_backup[44] =
gtbus_state->gtbus_reg->cci400_config_reg_1;
gtbus_state->gtbus_reg_backup[45] =
gtbus_state->gtbus_reg->cci400_config_reg_2;
gtbus_state->gtbus_reg_backup[46] =
gtbus_state->gtbus_reg->ram_bist_config;
if (debug_mask & PM_STANDBY_PRINT_GTBUS_REG) {
for (i = 0; i < GTBUS_REG_BACKUP_LENGTH; i++) {
printk
("gtbus_state->gtbus_reg_backup[%d] = 0x%x .\n",
i, gtbus_state->gtbus_reg_backup[i]);
}
}
return 0;
}
/*
*********************************************************************************************************
* MEM gtbus restore
*
* Description: mem gtbus restore.
*
* Arguments : none.
*
* Returns : 0/-1;
*********************************************************************************************************
*/
__s32 mem_gtbus_restore(struct gtbus_state *gtbus_state)
{
gtbus_state->gtbus_reg->master_config_reg[0] =
gtbus_state->gtbus_reg_backup[0];
gtbus_state->gtbus_reg->master_config_reg[1] =
gtbus_state->gtbus_reg_backup[1];
gtbus_state->gtbus_reg->master_config_reg[2] =
gtbus_state->gtbus_reg_backup[2];
gtbus_state->gtbus_reg->master_config_reg[3] =
gtbus_state->gtbus_reg_backup[3];
gtbus_state->gtbus_reg->master_config_reg[4] =
gtbus_state->gtbus_reg_backup[4];
gtbus_state->gtbus_reg->master_config_reg[5] =
gtbus_state->gtbus_reg_backup[5];
gtbus_state->gtbus_reg->master_config_reg[6] =
gtbus_state->gtbus_reg_backup[6];
gtbus_state->gtbus_reg->master_config_reg[7] =
gtbus_state->gtbus_reg_backup[7];
gtbus_state->gtbus_reg->master_config_reg[8] =
gtbus_state->gtbus_reg_backup[8];
gtbus_state->gtbus_reg->master_config_reg[9] =
gtbus_state->gtbus_reg_backup[9];
gtbus_state->gtbus_reg->master_config_reg[10] =
gtbus_state->gtbus_reg_backup[10];
gtbus_state->gtbus_reg->master_config_reg[11] =
gtbus_state->gtbus_reg_backup[11];
gtbus_state->gtbus_reg->master_config_reg[12] =
gtbus_state->gtbus_reg_backup[12];
gtbus_state->gtbus_reg->master_config_reg[13] =
gtbus_state->gtbus_reg_backup[13];
gtbus_state->gtbus_reg->master_config_reg[14] =
gtbus_state->gtbus_reg_backup[14];
gtbus_state->gtbus_reg->master_config_reg[15] =
gtbus_state->gtbus_reg_backup[15];
gtbus_state->gtbus_reg->master_config_reg[16] =
gtbus_state->gtbus_reg_backup[16];
gtbus_state->gtbus_reg->master_config_reg[17] =
gtbus_state->gtbus_reg_backup[17];
gtbus_state->gtbus_reg->master_config_reg[18] =
gtbus_state->gtbus_reg_backup[18];
gtbus_state->gtbus_reg->master_config_reg[19] =
gtbus_state->gtbus_reg_backup[19];
gtbus_state->gtbus_reg->master_config_reg[20] =
gtbus_state->gtbus_reg_backup[20];
gtbus_state->gtbus_reg->master_config_reg[21] =
gtbus_state->gtbus_reg_backup[21];
gtbus_state->gtbus_reg->master_config_reg[22] =
gtbus_state->gtbus_reg_backup[22];
gtbus_state->gtbus_reg->master_config_reg[23] =
gtbus_state->gtbus_reg_backup[23];
gtbus_state->gtbus_reg->master_config_reg[24] =
gtbus_state->gtbus_reg_backup[24];
gtbus_state->gtbus_reg->master_config_reg[25] =
gtbus_state->gtbus_reg_backup[25];
gtbus_state->gtbus_reg->master_config_reg[26] =
gtbus_state->gtbus_reg_backup[26];
gtbus_state->gtbus_reg->master_config_reg[27] =
gtbus_state->gtbus_reg_backup[27];
gtbus_state->gtbus_reg->master_config_reg[28] =
gtbus_state->gtbus_reg_backup[28];
gtbus_state->gtbus_reg->master_config_reg[29] =
gtbus_state->gtbus_reg_backup[29];
gtbus_state->gtbus_reg->master_config_reg[30] =
gtbus_state->gtbus_reg_backup[30];
gtbus_state->gtbus_reg->master_config_reg[31] =
gtbus_state->gtbus_reg_backup[31];
gtbus_state->gtbus_reg->master_config_reg[32] =
gtbus_state->gtbus_reg_backup[32];
gtbus_state->gtbus_reg->master_config_reg[33] =
gtbus_state->gtbus_reg_backup[33];
gtbus_state->gtbus_reg->master_config_reg[34] =
gtbus_state->gtbus_reg_backup[34];
gtbus_state->gtbus_reg->master_config_reg[35] =
gtbus_state->gtbus_reg_backup[35];
gtbus_state->gtbus_reg->band_win_config_reg =
gtbus_state->gtbus_reg_backup[36];
gtbus_state->gtbus_reg->master_rd_pri_config_reg_0 =
gtbus_state->gtbus_reg_backup[37];
gtbus_state->gtbus_reg->master_rd_pri_config_reg_1 =
gtbus_state->gtbus_reg_backup[38];
gtbus_state->gtbus_reg->config_reg = gtbus_state->gtbus_reg_backup[39];
gtbus_state->gtbus_reg->soft_clk_on_reg =
gtbus_state->gtbus_reg_backup[40];
gtbus_state->gtbus_reg->soft_clk_off_reg =
gtbus_state->gtbus_reg_backup[41];
gtbus_state->gtbus_reg->pmu_en_reg = gtbus_state->gtbus_reg_backup[42];
#if 0
busy_waiting();
/*clk off: bit19 = 1; */
gtbus_state->gtbus_reg->cci400_config_reg_1 = 0x00080000;
/*restore reg0, reg1 */
gtbus_state->gtbus_reg->cci400_config_reg_1 =
(0x00080000 | ((~0x00080000) & gtbus_state->gtbus_reg_backup[44]));
gtbus_state->gtbus_reg->cci400_config_reg_0 =
gtbus_state->gtbus_reg_backup[43];
/*sec: reset cci400 */
gtbus_state->gtbus_reg->cci400_config_reg_0 =
(0x80000000 | gtbus_state->gtbus_reg_backup[43]);
/*make sure the reset bit is in effect? */
change_runtime_env();
delay_us(1);
gtbus_state->gtbus_reg->cci400_config_reg_0 =
(gtbus_state->gtbus_reg_backup[43]);
/*clk on: bit19 = 0; */
gtbus_state->gtbus_reg->cci400_config_reg_1 =
gtbus_state->gtbus_reg_backup[44];
gtbus_state->gtbus_reg->cci400_config_reg_2 =
gtbus_state->gtbus_reg_backup[45];
#endif
gtbus_state->gtbus_reg->ram_bist_config =
gtbus_state->gtbus_reg_backup[46];
if (debug_mask & PM_STANDBY_PRINT_GTBUS_REG) {
mem_gtbus_save(gtbus_state);
}
return 0;
}
#endif
|
<reponame>tufty/stm32tl<filename>examples/usb/cdc/main.cpp<gh_stars>1-10
#include <string.h>
#include <clocks.h>
#include <tasks.h>
#include <gpio.h>
#include <stddef.h>
#include <io.h>
#include <drivers/ringbuffer.h>
#include <usb/usb.h>
#include <drivers/usb_cdc.h>
#include <board.h>
#ifdef STM32F0xx
typedef SYSCLK_T<HSI48_RC> SYSCLK;
#else
typedef HSE_OSC_T<> HSE;
typedef PLL_T<HSE, 72000000> PLL;
typedef SYSCLK_T<PLL> SYSCLK;
#endif
typedef SYSTICK_T<SYSCLK> SYSTICK;
typedef TIMEOUT_T<SYSTICK> TIMEOUT;
typedef BOARD_T::BOARD<BOARD_T::LED1 | BOARD_T::LED2 | BOARD_T::BUTTON | BOARD_T::CONSOLE> BOARD;
typedef GPIO_OUTPUT_T<PA, 4> PIN;
extern "C" {
void SysTick_Handler(void) {
if (TIMEOUT::count_down()) exit_idle();
}
}
char buffer[96];
int main(void)
{
#ifdef STM32F0xx
HSI48_RC::init();
#else
HSE::init();
PLL::init();
#endif
SYSCLK::init();
SYSTICK::init();
BOARD::init();
CON::init();
PIN::init();
#ifdef INTERACTIVE
CON::getc();
LED1::set_high();
while (1) {
LED2::toggle();
CON::puts("Press any key... \n");
char c = CON::getc();
printf<CON>("Key pressed: %c %02x\n", c, c);
CON::puts("Enter a string...\n");
buffer[CON::gets(buffer, sizeof(buffer) - 1)] = '\0';
CON::puts(buffer);
}
#else
while (1) {
LED2::toggle();
uint32_t n = CON::read((uint8_t *) buffer, sizeof(buffer));
hex_dump<CON>(buffer, n, 0, true);
}
#endif
return 0;
}
|
<reponame>ianlyons/html-validator
const w3c = require('./w3c-validator')
const whatwg = require('./whatwg-validator')
module.exports = async options => {
const { validator } = options
const useWHATWG = validator && validator.toLowerCase() === 'whatwg'
return useWHATWG ? whatwg(options) : w3c(options)
}
|
#! /bin/bash
#INIT SCRIPT
while [ true ]; do
#CLEAR DISPLAY
clear
#END CLEAR DISPLAY
#DISPLAY TIME AND DATE
date_time_raw=`date +%Y" "%m" "%d" "%H" "%M" "%S" "%b" "%Z" "%A" "%s`
#[2001 01 01] [12 34 56] [Jan] [GMT] [Monday]
time_now=`echo "${date_time_raw}"|awk '{print $4":"$5":"$6}'`
time_now_tz=`echo "${date_time_raw}"|awk '{print $8}'`
date_now=`echo "${date_time_raw}"|awk '{print $1"-"$7"-"$3}'`
date_now_day=`echo "${date_time_raw}"|awk '{print $9}'`
printf "\033[1;35m❄️❄️❄️Date and Time❄️❄️❄️\033[0m\n"
printf "\033[1;37m"
printf "Time❄️ %s %s\n" $time_now $time_now_tz
printf "Date❄️ %s, %s\n" $date_now $date_now_day
printf "\n"
printf "\033[0m"
#END DISPLAY TIME AND DATE
#DISPLAY SYSTEM UPTIME
IFS=" "
date_time_list=( $date_time_raw )
#up_since=`uptime -s|sed 's/[:-]/ /g'`
up_since_raw=`uptime -s`
up_since=`echo "${up_since_raw}"|sed 's/[-]/ /g'`
#[2001 01 01] [12:34:56]
up_time_raw=`date -d "${up_since_raw}" +"%s"`
up_time_stamp=`echo "$((${date_time_list[9]} - ${up_time_raw}))"`
up_time_days=`echo "$(((((${date_time_list[9]} - ${up_time_raw}) / 24) / 60) / 60))"`
up_time=`date -u -d @${up_time_stamp} +"%H %M %S"`
up_time_list=( $up_time )
printf "\033[1;35m❄️❄️❄️Current Uptime❄️❄️❄️\033[0m\n"
printf "\033[1;37m"
printf "Uptime since last restart❄️\n %s d, %s:%s:%s\n" $up_time_days ${up_time_list[0]} ${up_time_list[1]} ${up_time_list[2]}
printf " Total seconds: %s\n" $up_time_stamp
printf "\n"
printf "\033[0m"
#END DISPLAY SYSTEM UPTIME
#DISPLAY SYSTEM INFO
sysinfo_os=`uname -s`
sysinfo_arch=`uname -m`
sysinfo_rel=`uname -r`
sysinfo_host=`echo $HOSTNAME`
printf "\033[1;35m❄️❄️❄️System Information❄️❄️❄️\033[0m\n"
printf "\033[1;37m"
printf "OS❄️ %s\n" $sysinfo_os
printf "Kernel❄️ %s\n" $sysinfo_rel
printf "Architecture❄️ %s\n" $sysinfo_arch
printf "Hostname❄️ %s\n" $sysinfo_host
printf "\033[0m\n"
#END DISPLAY SYSTEM INFO
#DISPLAY MEMORY
mem_raw=`free -m`
mem_total=`echo "${mem_raw}"|awk 'NR==2{print $2}'`
mem_used=`echo "${mem_raw}"|awk 'NR==2{print $3}'`
mem_free=`echo "${mem_raw}"|awk 'NR==2{print $7}'`
mem_free_perc=`echo "${mem_raw}"|awk 'NR==2{print ($7*100/$2)}'`
printf "\033[1;35m❄️❄️❄️MEMORY INFO❄️❄️❄️"
printf "\033[0m\n"
if [ `echo "${mem_free_perc} < 20.0"|bc` -eq 1 ]; then
printf "\033[0;31m!!MEMORY CRITICAL!!\n%s MB/%s MB (%.2f%%)" $mem_free $mem_total $mem_free_perc
elif [ `echo "${mem_free_perc} < 25.0"|bc` -eq 1 ]; then
printf "\033[1;31m!MEMORY LOW!\n%s MB/%s MB (%.2f%%)" $mem_free $mem_total $mem_free_perc
elif [ `echo "${mem_free_perc} < 30.0"|bc` -eq 1 ]; then
printf "\033[0;33mMEMORY LIMITED\n%s MB/%s MB (%.2f%%)" $mem_free $mem_total $mem_free_perc
elif [ `echo "${mem_free_perc} < 40.0"|bc` -eq 1 ]; then
printf "\033[1;33mMEMORY OK\n%s MB/%s MB (%.2f%%)" $mem_free $mem_total $mem_free_perc
else
printf "\033[1;32mMEMORY GOOD\n%s MB/%s MB(%.2f%%)" $mem_free $mem_total $mem_free_perc
fi
printf "\n\033[0m\n"
#END DISPLAY MEMORY
#CURRENT PROCESSES
curr_proc=`ps ax|wc -l`
printf "\033[1;35m❄️❄️❄️Processes❄️❄️❄️\033[0m\n"
printf "\033[1;37m"
printf "Count❄️ %d\n" $curr_proc
printf "\033[0m"
#END CURRENT PROCESSES
printf "\033[0m"
sleep 1
done
#END SCRIPT
|
module.exports = function(grunt) {
"use strict";
var path = require("path");
var cwd = process.cwd();
var tsJSON = {
dev: {
src: ["src/**/*.ts", "typings/**/*.d.ts"],
outDir: "build/src/",
options: {
target: 'es5',
module: 'commonjs',
noImplicitAny: true,
sourceMap: false,
declaration: true,
compiler: "./node_modules/grunt-ts/customcompiler/tsc",
removeComments: false
}
},
test: {
src: ["test/*.ts", "typings/**/*.d.ts", "build/plottable.d.ts"],
outDir: "build/test/",
// watch: "test",
options: {
target: 'es5',
sourceMap: false,
noImplicitAny: true,
declaration: false,
compiler: "./node_modules/grunt-ts/customcompiler/tsc",
removeComments: false
}
},
verify_d_ts: {
src: ["typings/d3/d3.d.ts", "plottable.d.ts"]
}
};
var bumpJSON = {
options: {
files: ['package.json', 'bower.json'],
updateConfigs: ['pkg'],
commit: false,
createTag: false,
push: false
}
};
var FILES_TO_COMMIT = ['plottable.js',
'plottable.min.js',
'plottable.d.ts',
'test/tests.js',
"plottable.css",
"plottable.zip",
"bower.json",
"package.json"];
var prefixMatch = "\\n *(function |var |static )?";
var varNameMatch = "[^(:;]*(\\([^)]*\\))?"; // catch function args too
var nestedBraceMatch = ": \\{[^{}]*\\}";
var typeNameMatch = ": [^;]*";
var finalMatch = "((" + nestedBraceMatch + ")|(" + typeNameMatch + "))?\\n?;";
var jsdoc_init = "\\n *\\/\\*\\* *\\n";
var jsdoc_mid = "( *\\*[^\\n]*\\n)+";
var jsdoc_end = " *\\*\\/ *";
var jsdoc = "(" + jsdoc_init + jsdoc_mid + jsdoc_end + ")?";
var sedJSON = {
private_definitions: {
pattern: jsdoc + prefixMatch + "private " + varNameMatch + finalMatch,
replacement: "",
path: "build/plottable.d.ts",
},
plottable_multifile: {
pattern: '/// *<reference path="([^."]*).ts" */>',
replacement: 'synchronousRequire("/build/src/$1.js");',
path: "plottable_multifile.js",
},
definitions: {
pattern: '/// *<reference path=[\'"].*[\'"] */>',
replacement: "",
path: "build/plottable.d.ts",
},
tests_multifile: {
pattern: '/// *<reference path="([^."]*).ts" */>',
replacement: 'synchronousRequire("/build/test/$1.js");',
path: "test/tests_multifile.js",
},
sublime: {
pattern: "(.*\\.ts)",
replacement: '/// <reference path="../$1" />',
path: "build/sublime.d.ts",
},
version_number: {
pattern: "@VERSION",
replacement: "<%= pkg.version %>",
path: "plottable.js"
}
};
// e.g. ["components/foo.ts", ...]
// the important thing is that they are sorted by hierarchy,
// leaves first, roots last
var tsFiles;
// since src/reference.ts might have changed, I need to update this
// on each recompile
var updateTsFiles = function() {
tsFiles = grunt.file.read("src/reference.ts")
.split("\n")
.filter(function(s) {
return s !== "";
})
.map(function(s) {
return s.match(/"(.*\.ts)"/)[1];
});
};
updateTsFiles();
var testTsFiles;
var updateTestTsFiles = function() {
testTsFiles = grunt.file.read("test/testReference.ts")
.split("\n")
.filter(function(s) {
return s !== "";
})
.map(function(s) {
return s.match(/"(.*\.ts)"/)[1];
});
};
updateTestTsFiles();
var browsers = [{
browserName: "firefox",
version: "30"
}, {
browserName: "chrome",
version: "35"
}, {
browserName: "internet explorer",
version: "9",
platform: "WIN7"
}];
var configJSON = {
pkg: grunt.file.readJSON("package.json"),
bump: bumpJSON,
concat: {
header: {
src: ["license_header.txt", "plottable.js"],
dest: "plottable.js",
},
plottable_multifile: {
src: ["synchronousRequire.js", "src/reference.ts"],
dest: "plottable_multifile.js",
},
tests_multifile: {
src: ["synchronousRequire.js", "test/testReference.ts"],
dest: "test/tests_multifile.js",
},
plottable: {
src: tsFiles.map(function(s) {
return "build/src/" + s.replace(".ts", ".js");
}),
dest: "plottable.js",
},
tests: {
src: testTsFiles.map(function(s) {
return "build/test/" + s.replace(".ts", ".js");
}),
dest: "test/tests.js",
},
definitions: {
src: tsFiles.map(function(s) {
return "build/src/" + s.replace(".ts", ".d.ts");
}),
dest: "build/plottable.d.ts",
},
},
ts: tsJSON,
tslint: {
options: {
configuration: grunt.file.readJSON("tslint.json")
},
all: {
src: ["src/**/*.ts", "test/**/*.ts"]
}
},
jshint: {
files: ['Gruntfile.js', 'quicktests/**/*.js'],
options: {
"curly": true,
"eqeqeq": true,
"evil": true,
"indent": 2,
"latedef": true,
"globals": {
"jQuery": true,
"d3": true,
"window": true,
"console": true,
"$": true,
"makeRandomData": true,
"setTimeout": true,
"document": true,
"Plottable": true
},
"strict": true,
"eqnull": true
}
},
parallelize: {
tslint: {
all: 4
}
},
watch: {
"options": {
livereload: true
},
"rebuild": {
"tasks": ["dev-compile"],
"files": ["src/**/*.ts", "examples/**/*.ts"]
},
"tests": {
"tasks": ["test-compile"],
"files": ["test/**/*.ts"]
}
},
blanket_mocha: {
all: ['test/coverage.html'],
options: {
threshold: 70
}
},
connect: {
server: {
options: {
port: 9999,
hostname: "*",
base: "",
livereload: true
}
}
},
clean: {
tscommand: ["tscommand*.tmp.txt"]
},
sed: sedJSON,
gitcommit: {
version: {
options: {
message: "Release version <%= pkg.version %>"
},
files: {
src: FILES_TO_COMMIT
}
},
built: {
options: {
message: "Update built files"
},
files: {
src: FILES_TO_COMMIT
}
}
},
compress: {
main: {
options: {
archive: 'plottable.zip'
},
files: [
{src: 'plottable.js' , dest: '.'},
{src: 'plottable.min.js', dest: '.'},
{src: 'plottable.d.ts', dest: '.'},
{src: 'plottable.css' , dest: '.'},
{src: 'README.md' , dest: '.'},
{src: 'LICENSE' , dest: '.'}]
}
},
uglify: {
main: {
files: {'plottable.min.js': ['plottable.js']}
}
},
shell: {
sublime: {
command: "(echo 'src/reference.ts'; find typings -name '*.d.ts') > build/sublime.d.ts",
},
},
'saucelabs-mocha': {
all: {
options: {
urls: ['http://127.0.0.1:9999/test/tests.html'],
testname: 'Plottable Sauce Unit Tests',
browsers: browsers,
build: process.env.TRAVIS_JOB_ID,
"tunnel-identifier": process.env.TRAVIS_JOB_NUMBER
}
}
}
};
// project configuration
grunt.initConfig(configJSON);
require('load-grunt-tasks')(grunt);
// default task (this is what runs when a task isn't specified)
grunt.registerTask("update_ts_files", updateTsFiles);
grunt.registerTask("update_test_ts_files", updateTestTsFiles);
grunt.registerTask("definitions_prod", function() {
grunt.file.copy("build/plottable.d.ts", "plottable.d.ts");
});
grunt.registerTask("test-compile", [
"ts:test",
"concat:tests_multifile",
"sed:tests_multifile",
"concat:tests",
]);
grunt.registerTask("default", "launch");
var compile_task = [
"update_ts_files",
"update_test_ts_files",
"ts:dev",
"concat:plottable",
"concat:definitions",
"sed:definitions",
"sed:private_definitions",
"concat:header",
"sed:version_number",
"definitions_prod",
"test-compile",
"concat:plottable_multifile",
"sed:plottable_multifile",
"clean:tscommand"
];
grunt.registerTask("dev-compile", compile_task);
grunt.registerTask("release:patch", ["bump:patch", "dist-compile", "gitcommit:version"]);
grunt.registerTask("release:minor", ["bump:minor", "dist-compile", "gitcommit:version"]);
grunt.registerTask("release:major", ["bump:major", "dist-compile", "gitcommit:version"]);
grunt.registerTask("dist-compile", [
"dev-compile",
"blanket_mocha",
"parallelize:tslint",
"ts:verify_d_ts",
"uglify",
"compress"
]);
grunt.registerTask("commitjs", ["dist-compile", "gitcommit:built"]);
grunt.registerTask("launch", ["connect", "dev-compile", "watch"]);
grunt.registerTask("test-sauce", ["connect", "saucelabs-mocha"]);
grunt.registerTask("test", ["dev-compile", "blanket_mocha", "parallelize:tslint", "jshint", "ts:verify_d_ts"]);
// Disable saucelabs for external pull requests. Check if we can see the SAUCE_USERNAME
var travisTests = ["test"];
if (process.env.SAUCE_USERNAME) {
travisTests.push("test-sauce");
}
grunt.registerTask("test-travis", travisTests);
grunt.registerTask("bm", ["blanket_mocha"]);
grunt.registerTask("sublime", [
"shell:sublime",
"sed:sublime",
]);
};
|
SELECT c.name, SUM(o.amount) AS "Total Amount Spent"
FROM customers c
JOIN orders o ON c.customer_id = o.customer_id
WHERE o.date > DATE_SUB(CURDATE(), INTERVAL 1 MONTH)
GROUP BY c.name; |
package middleware
import (
"context"
"github.com/cosmos/cosmos-sdk/codec/legacy"
"github.com/cosmos/cosmos-sdk/crypto/keys/multisig"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
"github.com/cosmos/cosmos-sdk/types/tx"
"github.com/cosmos/cosmos-sdk/types/tx/signing"
"github.com/cosmos/cosmos-sdk/x/auth/migrations/legacytx"
authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing"
abci "github.com/tendermint/tendermint/abci/types"
)
type validateBasicTxHandler struct {
next tx.Handler
}
// ValidateBasicMiddleware will call tx.ValidateBasic, msg.ValidateBasic(for each msg inside tx)
// and return any non-nil error.
// If ValidateBasic passes, middleware calls next middleware in chain. Note,
// validateBasicTxHandler will not get executed on ReCheckTx since it
// is not dependent on application state.
func ValidateBasicMiddleware(txh tx.Handler) tx.Handler {
return validateBasicTxHandler{
next: txh,
}
}
var _ tx.Handler = validateBasicTxHandler{}
// validateBasicTxMsgs executes basic validator calls for messages.
func validateBasicTxMsgs(msgs []sdk.Msg) error {
if len(msgs) == 0 {
return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "must contain at least one message")
}
for _, msg := range msgs {
err := msg.ValidateBasic()
if err != nil {
return err
}
}
return nil
}
// CheckTx implements tx.Handler.CheckTx.
func (txh validateBasicTxHandler) CheckTx(ctx context.Context, tx sdk.Tx, req abci.RequestCheckTx) (abci.ResponseCheckTx, error) {
// no need to validate basic on recheck tx, call next middleware
if req.Type == abci.CheckTxType_Recheck {
return txh.next.CheckTx(ctx, tx, req)
}
if err := validateBasicTxMsgs(tx.GetMsgs()); err != nil {
return abci.ResponseCheckTx{}, err
}
if err := tx.ValidateBasic(); err != nil {
return abci.ResponseCheckTx{}, err
}
return txh.next.CheckTx(ctx, tx, req)
}
// DeliverTx implements tx.Handler.DeliverTx.
func (txh validateBasicTxHandler) DeliverTx(ctx context.Context, tx sdk.Tx, req abci.RequestDeliverTx) (abci.ResponseDeliverTx, error) {
if err := tx.ValidateBasic(); err != nil {
return abci.ResponseDeliverTx{}, err
}
if err := validateBasicTxMsgs(tx.GetMsgs()); err != nil {
return abci.ResponseDeliverTx{}, err
}
return txh.next.DeliverTx(ctx, tx, req)
}
// SimulateTx implements tx.Handler.SimulateTx.
func (txh validateBasicTxHandler) SimulateTx(ctx context.Context, sdkTx sdk.Tx, req tx.RequestSimulateTx) (tx.ResponseSimulateTx, error) {
if err := sdkTx.ValidateBasic(); err != nil {
return tx.ResponseSimulateTx{}, err
}
if err := validateBasicTxMsgs(sdkTx.GetMsgs()); err != nil {
return tx.ResponseSimulateTx{}, err
}
return txh.next.SimulateTx(ctx, sdkTx, req)
}
var _ tx.Handler = txTimeoutHeightTxHandler{}
type txTimeoutHeightTxHandler struct {
next tx.Handler
}
// TxTimeoutHeightMiddleware defines a middleware that checks for a
// tx height timeout.
func TxTimeoutHeightMiddleware(txh tx.Handler) tx.Handler {
return txTimeoutHeightTxHandler{
next: txh,
}
}
func checkTimeout(ctx context.Context, tx sdk.Tx) error {
sdkCtx := sdk.UnwrapSDKContext(ctx)
timeoutTx, ok := tx.(sdk.TxWithTimeoutHeight)
if !ok {
return sdkerrors.Wrap(sdkerrors.ErrTxDecode, "expected tx to implement TxWithTimeoutHeight")
}
timeoutHeight := timeoutTx.GetTimeoutHeight()
if timeoutHeight > 0 && uint64(sdkCtx.BlockHeight()) > timeoutHeight {
return sdkerrors.Wrapf(
sdkerrors.ErrTxTimeoutHeight, "block height: %d, timeout height: %d", sdkCtx.BlockHeight(), timeoutHeight,
)
}
return nil
}
// CheckTx implements tx.Handler.CheckTx.
func (txh txTimeoutHeightTxHandler) CheckTx(ctx context.Context, tx sdk.Tx, req abci.RequestCheckTx) (abci.ResponseCheckTx, error) {
if err := checkTimeout(ctx, tx); err != nil {
return abci.ResponseCheckTx{}, err
}
return txh.next.CheckTx(ctx, tx, req)
}
// DeliverTx implements tx.Handler.DeliverTx.
func (txh txTimeoutHeightTxHandler) DeliverTx(ctx context.Context, tx sdk.Tx, req abci.RequestDeliverTx) (abci.ResponseDeliverTx, error) {
if err := checkTimeout(ctx, tx); err != nil {
return abci.ResponseDeliverTx{}, err
}
return txh.next.DeliverTx(ctx, tx, req)
}
// SimulateTx implements tx.Handler.SimulateTx.
func (txh txTimeoutHeightTxHandler) SimulateTx(ctx context.Context, sdkTx sdk.Tx, req tx.RequestSimulateTx) (tx.ResponseSimulateTx, error) {
if err := checkTimeout(ctx, sdkTx); err != nil {
return tx.ResponseSimulateTx{}, err
}
return txh.next.SimulateTx(ctx, sdkTx, req)
}
type validateMemoTxHandler struct {
ak AccountKeeper
next tx.Handler
}
// ValidateMemoMiddleware will validate memo given the parameters passed in
// If memo is too large middleware returns with error, otherwise call next middleware
// CONTRACT: Tx must implement TxWithMemo interface
func ValidateMemoMiddleware(ak AccountKeeper) tx.Middleware {
return func(txHandler tx.Handler) tx.Handler {
return validateMemoTxHandler{
ak: ak,
next: txHandler,
}
}
}
var _ tx.Handler = validateMemoTxHandler{}
func (vmm validateMemoTxHandler) checkForValidMemo(ctx context.Context, tx sdk.Tx) error {
sdkCtx := sdk.UnwrapSDKContext(ctx)
memoTx, ok := tx.(sdk.TxWithMemo)
if !ok {
return sdkerrors.Wrap(sdkerrors.ErrTxDecode, "invalid transaction type")
}
params := vmm.ak.GetParams(sdkCtx)
memoLength := len(memoTx.GetMemo())
if uint64(memoLength) > params.MaxMemoCharacters {
return sdkerrors.Wrapf(sdkerrors.ErrMemoTooLarge,
"maximum number of characters is %d but received %d characters",
params.MaxMemoCharacters, memoLength,
)
}
return nil
}
// CheckTx implements tx.Handler.CheckTx method.
func (vmm validateMemoTxHandler) CheckTx(ctx context.Context, tx sdk.Tx, req abci.RequestCheckTx) (abci.ResponseCheckTx, error) {
if err := vmm.checkForValidMemo(ctx, tx); err != nil {
return abci.ResponseCheckTx{}, err
}
return vmm.next.CheckTx(ctx, tx, req)
}
// DeliverTx implements tx.Handler.DeliverTx method.
func (vmm validateMemoTxHandler) DeliverTx(ctx context.Context, tx sdk.Tx, req abci.RequestDeliverTx) (abci.ResponseDeliverTx, error) {
if err := vmm.checkForValidMemo(ctx, tx); err != nil {
return abci.ResponseDeliverTx{}, err
}
return vmm.next.DeliverTx(ctx, tx, req)
}
// SimulateTx implements tx.Handler.SimulateTx method.
func (vmm validateMemoTxHandler) SimulateTx(ctx context.Context, sdkTx sdk.Tx, req tx.RequestSimulateTx) (tx.ResponseSimulateTx, error) {
if err := vmm.checkForValidMemo(ctx, sdkTx); err != nil {
return tx.ResponseSimulateTx{}, err
}
return vmm.next.SimulateTx(ctx, sdkTx, req)
}
var _ tx.Handler = consumeTxSizeGasTxHandler{}
type consumeTxSizeGasTxHandler struct {
ak AccountKeeper
next tx.Handler
}
// ConsumeTxSizeGasMiddleware will take in parameters and consume gas proportional
// to the size of tx before calling next middleware. Note, the gas costs will be
// slightly over estimated due to the fact that any given signing account may need
// to be retrieved from state.
//
// CONTRACT: If simulate=true, then signatures must either be completely filled
// in or empty.
// CONTRACT: To use this middleware, signatures of transaction must be represented
// as legacytx.StdSignature otherwise simulate mode will incorrectly estimate gas cost.
func ConsumeTxSizeGasMiddleware(ak AccountKeeper) tx.Middleware {
return func(txHandler tx.Handler) tx.Handler {
return consumeTxSizeGasTxHandler{
ak: ak,
next: txHandler,
}
}
}
func (cgts consumeTxSizeGasTxHandler) simulateSigGasCost(ctx context.Context, tx sdk.Tx) error {
sdkCtx := sdk.UnwrapSDKContext(ctx)
params := cgts.ak.GetParams(sdkCtx)
sigTx, ok := tx.(authsigning.SigVerifiableTx)
if !ok {
return sdkerrors.Wrap(sdkerrors.ErrTxDecode, "invalid tx type")
}
// in simulate mode, each element should be a nil signature
sigs, err := sigTx.GetSignaturesV2()
if err != nil {
return err
}
n := len(sigs)
for i, signer := range sigTx.GetSigners() {
// if signature is already filled in, no need to simulate gas cost
if i < n && !isIncompleteSignature(sigs[i].Data) {
continue
}
var pubkey cryptotypes.PubKey
acc := cgts.ak.GetAccount(sdkCtx, signer)
// use placeholder simSecp256k1Pubkey if sig is nil
if acc == nil || acc.GetPubKey() == nil {
pubkey = simSecp256k1Pubkey
} else {
pubkey = acc.GetPubKey()
}
// use stdsignature to mock the size of a full signature
simSig := legacytx.StdSignature{ //nolint:staticcheck // this will be removed when proto is ready
Signature: simSecp256k1Sig[:],
PubKey: pubkey,
}
sigBz := legacy.Cdc.MustMarshal(simSig)
cost := sdk.Gas(len(sigBz) + 6)
// If the pubkey is a multi-signature pubkey, then we estimate for the maximum
// number of signers.
if _, ok := pubkey.(*multisig.LegacyAminoPubKey); ok {
cost *= params.TxSigLimit
}
sdkCtx.GasMeter().ConsumeGas(params.TxSizeCostPerByte*cost, "txSize")
}
return nil
}
func (cgts consumeTxSizeGasTxHandler) consumeTxSizeGas(ctx context.Context, tx sdk.Tx, txBytes []byte, simulate bool) error {
sdkCtx := sdk.UnwrapSDKContext(ctx)
params := cgts.ak.GetParams(sdkCtx)
sdkCtx.GasMeter().ConsumeGas(params.TxSizeCostPerByte*sdk.Gas(len(txBytes)), "txSize")
return nil
}
// CheckTx implements tx.Handler.CheckTx.
func (cgts consumeTxSizeGasTxHandler) CheckTx(ctx context.Context, tx sdk.Tx, req abci.RequestCheckTx) (abci.ResponseCheckTx, error) {
if err := cgts.consumeTxSizeGas(ctx, tx, req.GetTx(), false); err != nil {
return abci.ResponseCheckTx{}, err
}
return cgts.next.CheckTx(ctx, tx, req)
}
// DeliverTx implements tx.Handler.DeliverTx.
func (cgts consumeTxSizeGasTxHandler) DeliverTx(ctx context.Context, tx sdk.Tx, req abci.RequestDeliverTx) (abci.ResponseDeliverTx, error) {
if err := cgts.consumeTxSizeGas(ctx, tx, req.GetTx(), false); err != nil {
return abci.ResponseDeliverTx{}, err
}
return cgts.next.DeliverTx(ctx, tx, req)
}
// SimulateTx implements tx.Handler.SimulateTx.
func (cgts consumeTxSizeGasTxHandler) SimulateTx(ctx context.Context, sdkTx sdk.Tx, req tx.RequestSimulateTx) (tx.ResponseSimulateTx, error) {
if err := cgts.consumeTxSizeGas(ctx, sdkTx, req.TxBytes, true); err != nil {
return tx.ResponseSimulateTx{}, err
}
if err := cgts.simulateSigGasCost(ctx, sdkTx); err != nil {
return tx.ResponseSimulateTx{}, err
}
return cgts.next.SimulateTx(ctx, sdkTx, req)
}
// isIncompleteSignature tests whether SignatureData is fully filled in for simulation purposes
func isIncompleteSignature(data signing.SignatureData) bool {
if data == nil {
return true
}
switch data := data.(type) {
case *signing.SingleSignatureData:
return len(data.Signature) == 0
case *signing.MultiSignatureData:
if len(data.Signatures) == 0 {
return true
}
for _, s := range data.Signatures {
if isIncompleteSignature(s) {
return true
}
}
}
return false
}
|
def allocate_tables(students, table_capacity):
allocated_tables = []
for num_students in students:
table_allocation = []
while num_students > 0:
if num_students >= table_capacity:
table_allocation.append(table_capacity)
num_students -= table_capacity
else:
table_allocation.append(num_students)
num_students = 0
allocated_tables.append(table_allocation)
return allocated_tables |
#!/usr/bin/env bash
#===============================================================================
#
# FILE: restore-wale-backup.sh
#
# USAGE: ./restore-wale-backup.sh
#
# DESCRIPTION: This script will restore a database from a wal-e backup with the
# capability of restoring from a specific time as well.
#
# OPTIONS: ---
# REQUIREMENTS: ---
# BUGS: ---
# NOTES: ---
# AUTHOR: Gregg Jensen (), gjensen@devops.center
# Bob Lozano (), bob@devops.center
# ORGANIZATION: devops.center
# CREATED: 06/30/2017 08:10:07
# REVISION: ---
#
# Copyright 2014-2017 devops.center llc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#===============================================================================
#set -o nounset # Treat unset variables as an error
#set -o errexit # exit immediately if command exits with a non-zero status
#set -x # essentially debug mode
#-------------------------------------------------------------------------------
# bring in the devops.center dcLog functions
#-------------------------------------------------------------------------------
#source /usr/local/bin/dcEnv.sh
#--- FUNCTION ----------------------------------------------------------------
# NAME: usage
# DESCRIPTION:
# PARAMETERS:
# RETURNS:
#-------------------------------------------------------------------------------
function usage
{
echo "usage: ./restore-wale-backup.sh [--backupfile backup-file] [--recoverytime recovery-time] [--list] s3base aws-hostname"
echo
echo "This script will restore a database from a wal-e backup with the capability of restoring from a specific time as well."
echo
echo "s3base - This is a term that is made up of the application name and the environment separated by a dash (ie dcDemoBlog-dev)"
echo "aws-hostname - This is the database instance hostname (ie dcDemoBlog-dev-db7) "
echo
echo "Recover from a specific point in time. You can recover the database from a specific point in time by specifying the"
echo "--recoverytime option. The value of the recovery time needs to follow this format: 2017-02-01 19:58:55"
echo "You will need to put double quotes around it to ensure that the option sees it as one argument."
echo
echo "If you already know the name of the backup file you want wal-e to pull down you can specify that name with the"
echo " --backupfile option and it will"
}
#-------------------------------------------------------------------------------
# option checking
#-------------------------------------------------------------------------------
if [[ -z $1 ]]; then
usage
exit 1
fi
while [[ $# -gt 0 ]]; do
case $1 in
--backupfile ) shift
BACKUPFILE=$1
;;
--recoverytime ) shift
RECOVERYTIME=$1
;;
--list ) LIST=1
;;
[!-]* ) if [[ $# -eq 2 ]]; then
S3BASE=$1
AWS_HOSTNAME=$2
shift;
else
echo "Too many/few of the 2 required parameters."
usage
exit 1
fi
;;
* ) usage
exit 1
esac
shift
done
#dcStartLog "Restoring database backup with wal-e backup"
echo "Restoring database backup with wal-e backup"
#-------------------------------------------------------------------------------
# check to make sure they have AWS_REGION set in the environment to the appropriate
# S3 region that has there backup in it. The region was determined when the instances
# were created.
#-------------------------------------------------------------------------------
if [[ -z ${AWS_REGION} ]]; then
echo "NOTE: The AWS S3 REGION needs to be set."
echo " (ie, export AWS_REGION=\"us-west-2\")"
exit
fi
#-------------------------------------------------------------------------------
# list available wal-e backups and exit
#-------------------------------------------------------------------------------
set -x
if ! [[ -z "$LIST" ]]; then
#dcLog "Giving a list of wal-e backups only and exiting"
echo "Giving a list of wal-e backups only and exiting"
sudo su -s /bin/sh postgres -c "export AWS_REGION=$AWS_REGION; wal-e --aws-instance-profile --s3-prefix s3://${S3BASE}-postgres-wale/${AWS_HOSTNAME} backup-list"
exit 1
fi
#-------------------------------------------------------------------------------
# Need to stop postgres first
#-------------------------------------------------------------------------------
#dcLog "stopping postgres"
echo "stopping postgres"
sudo supervisorctl stop postgres
#-------------------------------------------------------------------------------
# Not sure what goes into this directory but maybe it is something that wal-e uses
#-------------------------------------------------------------------------------
if [[ ! -d /media/data/postgres/xlog/transactions ]]; then
sudo mkdir /media/data/postgres/xlog/transactions
sudo chown postgres:postgres /media/data/postgres/xlog/transactions
fi
#-------------------------------------------------------------------------------
# make copies of files needed for wal-e restore
#-------------------------------------------------------------------------------
sudo cp --preserve /media/data/postgres/db/pgdata/postgresql.conf /media/data/postgres/backup/
sudo cp --preserve /media/data/postgres/db/pgdata/pg_ident.conf /media/data/postgres/backup/
sudo cp --preserve /media/data/postgres/db/pgdata/pg_hba.conf /media/data/postgres/backup/
#-------------------------------------------------------------------------------
# in order to get ready for the backup-fetch the database directory needs to be
# cleared. Note, the good postgres conf files are already in the backup directory
# so afterwards they will be copied in.
#-------------------------------------------------------------------------------
sudo -u postgres rm -rf /media/data/postgres/db/pgdata
sudo -u postgres mkdir /media/data/postgres/db/pgdata
sudo -u postgres chmod 700 /media/data/postgres/db/pgdata
#-------------------------------------------------------------------------------
# if the backup file isn't specified, use LATEST...but we do need to transfer a
# backup over so that we can recover from it.
#-------------------------------------------------------------------------------
#dcLog "doing a wal-e backup-fetch to get the backup file"
echo "doing a wal-e backup-fetch to get the backup file"
if [[ -z "$BACKUPFILE" ]]; then
sudo su -s /bin/sh postgres -c "export AWS_REGION=$AWS_REGION; wal-e --aws-instance-profile --s3-prefix s3://${S3BASE}-postgres-wale/${AWS_HOSTNAME} backup-fetch /media/data/postgres/db/pgdata/ LATEST"
else
sudo su -s /bin/sh postgres -c "export AWS_REGION=$AWS_REGION; wal-e --aws-instance-profile --s3-prefix s3://${S3BASE}-postgres-wale/${AWS_HOSTNAME} backup-fetch /media/data/postgres/db/pgdata/ $BACKUPFILE"
fi
if [[ $? -gt 0 ]]; then
echo "The backup-fetch did not complete, exiting..."
sudo supervisorctl start postgres
exit 1
fi
#-------------------------------------------------------------------------------
# make backups of postgresql.conf, pg_hba.conf, and pg_ident.conf by copying over to new data dir
#-------------------------------------------------------------------------------
#dcLog "making backups of the necessary postgres conf files"
echo "making backups of the necessary postgres conf files"
#sudo cp --preserve /media/data/postgres/backup/postgresql.conf.wale /media/data/postgres/db/pgdata/postgresql.conf
sudo cp --preserve /media/data/postgres/backup/postgresql.conf /media/data/postgres/db/pgdata/
sudo cp --preserve /media/data/postgres/backup/pg_hba.conf /media/data/postgres/db/pgdata/
sudo cp --preserve /media/data/postgres/backup/pg_ident.conf /media/data/postgres/db/pgdata/
# create recovery.conf file
#-------------------------------------------------------------------------------
# Need to create the recovery.conf file that postgres will see when it starts up. It will then
# execute what it finds in the recover.conf file into a database.
# NOTE: the database that it recovers to is not the main database and will need to be promoted
# once it has been checked for accuracy
#-------------------------------------------------------------------------------
#dcLog "creating the recover.conf file"
echo "creating the recover.conf file"
if [[ -z "${RECOVERYTIME}" ]]; then
cat <<- EOF1 | sudo tee /media/data/postgres/db/pgdata/recovery.conf > /dev/null
restore_command = 'export AWS_REGION=${AWS_REGION}; wal-e --aws-instance-profile --s3-prefix s3://${S3BASE}-postgres-wale/${AWS_HOSTNAME} wal-fetch %f %p'
pause_at_recovery_target = false
EOF1
else
cat <<- EOF2 | sudo tee /media/data/postgres/db/pgdata/recovery.conf > /dev/null
restore_command = 'export AWS_REGION=${AWS_REGION}; wal-e --aws-instance-profile --s3-prefix s3://${S3BASE}-postgres-wale/${AWS_HOSTNAME} wal-fetch %f %p'
pause_at_recovery_target = false
recovery_target_time = '${RECOVERYTIME}'
EOF2
fi
#-------------------------------------------------------------------------------
# chown owner of the recovery.conf file so that postgres will see and use it correctly
#-------------------------------------------------------------------------------
sudo chown postgres:postgres /media/data/postgres/db/pgdata/recovery.conf
#dcLog "starting the database"
echo "starting the database"
sudo supervisorctl start postgres
# ensure postgres has started
sleep 5
# create a basebackup to allow for wal-e restores on this host
sudo su -s /bin/sh postgres -c "export AWS_REGION=${AWS_REGION}; wal-e --aws-instance-profile --s3-prefix s3://${S3BASE}-postgres-wale/${AWS_HOSTNAME} backup-push /media/data/postgres/db/pgdata"
if [[ $? -gt 0 ]]; then
echo "It doesn't appear that the database is active yet, you will need to wait and try creating the wal-e baseline backup later."
fi
#dcEndLog "Finished..."
echo "Finished..."
|
/**
* @description 部门管理模型
* @author lnden
*/
const mongoose = require('mongoose')
const deptSchema = mongoose.Schema({
deptName: String,
userId: String,
userName: String,
userEmail: String,
parentId: [mongoose.Types.ObjectId],
updateTime: {
type: Date,
default: Date.now()
},
createTime: {
type: Date,
default: Date.now()
},
})
module.exports = mongoose.model('dept', deptSchema, 'depts') |
#!/usr/bin/env sh
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Mojib Wali.
#
# invenio-config-tugraz is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
docker-services-cli up postgresql es redis
python -m check_manifest --ignore ".travis-*" && \
python -m sphinx.cmd.build -qnNW docs docs/_build/html && \
docker-services-cli up es postgresql redis
python -m pytest
tests_exit_code=$?
docker-services-cli down
exit "$tests_exit_code" |
<gh_stars>0
import BotToken from './settings/botToken/BotToken';
import ReceivedMessages from './receivedMessages/ReceivedMessages';
import { Observable, Subject } from 'rxjs';
import { Hub } from '../reactHub/Hub';
let count = 0
const Main = () => {
const hub = new Hub()
const messages: Observable<any> = new Observable((observer: any) => {
setInterval(() => {
observer.next("Hello "+count)
count++
}, 2000)
})
hub.plug({
name: "Messages",
outputs: [{
name: "msgsReceived",
outputObservable: messages
}]
})
const messagesLog: string[] = []
const messagesLogProps = new Subject()
hub.plug({
name: "MessagesDisplay",
inputs: [{
source: "Messages:msgsReceived",
inputSubscriber: (msg: string) => {
messagesLog.push(msg)
messagesLogProps.next({
"messages": messagesLog
})
}
}],
renderer: {
props: messagesLogProps,
functionComponent: ReceivedMessages
}
})
setTimeout( () => hub.unplug("MessagesDisplay") , 5000)
hub.plug({
name: "BotToken",
renderer: {
functionComponent: BotToken
}
})
}
export default Main |
<filename>src/pages/_error.tsx
import HomeTemplate from '../templates/HomeTemplate';
export default () => {
return <HomeTemplate />
} |
<gh_stars>0
package org.baade.eel.ls;
import org.baade.eel.core.Globals;
import org.baade.eel.core.conf.GameSystemProperty;
import org.baade.eel.ls.handler.LoginHTTPHandler;
import org.baade.eel.ls.handler.LoginSocketChannelInitializer;
import org.baade.eel.ls.server.LoginHTTPServer;
import org.baade.eel.ls.server.LoginSocketServer;
/**
* 登录服的启动入口
*/
public class Launch {
public static final String LOG_NAME_OF_LOGIN_SERVER = "LS";
public static void main(String[] args) {
System.setProperty(GameSystemProperty.LOG_NAME.getKey(), LOG_NAME_OF_LOGIN_SERVER);
startHttpServer();
startSocketServer();
}
private static void startHttpServer() {
LoginHTTPServer server = new LoginHTTPServer();
server.setPort(Globals.getConfig().getLoginServer().getHttpPort());
LoginHTTPHandler httpHandler = new LoginHTTPHandler();
server.setHandler(httpHandler);
server.start();
}
private static void startSocketServer() {
LoginSocketServer server = new LoginSocketServer();
server.setPort(Globals.getConfig().getLoginServer().getSocketPort());
LoginSocketChannelInitializer channelInitializer = new LoginSocketChannelInitializer();
server.setChannelInitializer(channelInitializer);
server.start();
}
} |
#import the relevant libraries
import pandas as pd
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import train_test_split
#load the dataset
dataset = pd.read_csv('dataset.csv')
#separate the features and labels
X = dataset.iloc[:, :-1].values
Y = dataset.iloc[:, -1].values
#split into training and test sets
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.25)
#train the model
clf = DecisionTreeClassifier()
clf.fit(X_train, Y_train)
#predict on test set
Y_pred = clf.predict(X_test)
#calculate accuracy
accuracy = clf.score(X_test, Y_test)
print('Accuracy of the model:', accuracy) |
<reponame>twinstone/open-anonymizer<gh_stars>1-10
package openanonymizer.service;
import openanonymizer.core.storage.TransformationStorage;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.log4j.xml.DOMConfigurator;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.Optional;
/**
* Test for {@link TransformationStorage}. Tests possibility of storing transformed field values.
*/
public class TransformationStorageTest {
@Before
public void before() {
DOMConfigurator.configure("log4j.xml");
}
@Test(expected = NullPointerException.class)
public void nullSourceTestInsert() {
TransformationStorage.insertValue(null, null, null);
}
@Test(expected = NullPointerException.class)
public void nullSourceTestGet() {
TransformationStorage.findByLeft(null, null);
}
@Test(expected = NullPointerException.class)
public void nullSourceTestClear() {
TransformationStorage.clearSource(null);
}
@Test(expected = NullPointerException.class)
public void nullLeftValueTestInsert() {
TransformationStorage.insertValue("test", null, null);
}
@Test(expected = NullPointerException.class)
public void nullLeftValueTestGet() {
TransformationStorage.findByLeft("test", null);
}
@Test(expected = NullPointerException.class)
public void nullRightTestInsert() {
TransformationStorage.insertValue("test", 1, null);
}
@Test(expected = IllegalArgumentException.class)
public void clearStorageTest() {
TransformationStorage.clearSource("test");
TransformationStorage.clearSource("test");
}
@Test
public void storageTest1() {
TransformationStorage.insertValue("test1", 1, 100);
TransformationStorage.insertValue("test1", 2, 200);
Assert.assertTrue(TransformationStorage.findByLeft("test1", 1).isPresent());
Assert.assertTrue(TransformationStorage.findByLeft("test1", 2).isPresent());
}
@Test
public void storageTest2() {
TransformationStorage.insertValue("test1", 1, 100);
TransformationStorage.insertValue("test1", 2, 200);
Optional<Pair<?, ?>> pair1 = TransformationStorage.findByLeft("test1", 1);
Optional<Pair<?, ?>> pair2 = TransformationStorage.findByLeft("test1", 2);
Assert.assertEquals(100, pair1.get().getRight());
Assert.assertEquals(200, pair2.get().getRight());
TransformationStorage.clearSource("test1");
Assert.assertFalse(TransformationStorage.findByLeft("test1", 1).isPresent());
TransformationStorage.insertValue("test1", 1, 300);
Assert.assertTrue(TransformationStorage.findByLeft("test1", 1).isPresent());
}
@Test
public void storageTest3() {
TransformationStorage.insertValue("test1", 1, 100);
TransformationStorage.insertValue("test2", 2, 200);
TransformationStorage.clearSource("test2");
Assert.assertTrue(TransformationStorage.findByLeft("test1", 1).isPresent());
Assert.assertFalse(TransformationStorage.findByLeft("test2", 2).isPresent());
}
}
|
#!/usr/bin/env bash
CONTAINER_NAME='fosstrak_epcis'
docker rm ${CONTAINER_NAME}
|
#!/bin/bash
SRC=$(realpath $(cd -P "$(dirname "${BASH_SOURCE[0]}")" && pwd))
set -e
PKGPATH=$GOPATH/src/github.com/grpc-ecosystem/grpc-gateway/internal/httprule
pushd $PKGPATH &> /dev/null
git clean -f -x -d
git reset --hard
git pull
popd &> /dev/null
rm -f $SRC/*.go
FILES=$(ls $PKGPATH/*.go|grep -v test)
cp $FILES $SRC/
GEN=$(cat << END
package httprule
//go:generate ./gen.sh
END
)
echo "$GEN" > $SRC/gen.go
|
AI can be used to detect spam email by utilizing supervised machine learning algorithms such as Support Vector Machines or Naive Bayes. The AI program can start by collecting labeled email data containing both spam and non-spam emails. It can then preprocess the data by extracting features such as the email subject, sender, words used in the message, links, attachments, etc. The program can then train a supervised machine learning classifier on the data to generate a spam detector which can be used to detect spam emails with high accuracy. |
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
export VERBOSE=${VERBOSE:="false"}
# shellcheck source=scripts/ci/_script_init.sh
. "$( dirname "${BASH_SOURCE[0]}" )/_script_init.sh"
if [[ -f ${BUILD_CACHE_DIR}/.skip_tests ]]; then
echo
echo "Skipping running tests !!!!!"
echo
exit
fi
prepare_build
rebuild_ci_image_if_needed
# Test environment
export BACKEND=${BACKEND:="sqlite"}
# Whether necessary for airflow run local sources are mounted to docker
export MOUNT_LOCAL_SOURCES=${MOUNT_LOCAL_SOURCES:="false"}
# whethere verbose output should be produced
export VERBOSE=${VERBOSE:="false"}
# whethere verbose commadns output (set-x) should be used
export VERBOSE_COMMANDS=${VERBOSE_COMMANDS:="false"}
# Forwards host credentials to the container
export FORWARD_CREDENTIALS=${FORWARD_CREDENTIALS:="false"}
# Installs different airflow version than current from the sources
export INSTALL_AIRFLOW_VERSION=${INSTALL_AIRFLOW_VERSION:=""}
if [[ ${MOUNT_LOCAL_SOURCES} == "true" ]]; then
DOCKER_COMPOSE_LOCAL=("-f" "${MY_DIR}/docker-compose/local.yml")
else
DOCKER_COMPOSE_LOCAL=()
fi
if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then
DOCKER_COMPOSE_LOCAL+=("-f" "${MY_DIR}/docker-compose/forward-credentials.yml")
fi
if [[ ${INSTALL_AIRFLOW_VERSION} != "" ]]; then
DOCKER_COMPOSE_LOCAL+=("-f" "${MY_DIR}/docker-compose/remove-sources.yml")
fi
echo
echo "Using docker image: ${AIRFLOW_CI_IMAGE} for docker compose runs"
echo
INTEGRATIONS=()
ENABLED_INTEGRATIONS=${ENABLED_INTEGRATIONS:=""}
for _INT in ${ENABLED_INTEGRATIONS}
do
INTEGRATIONS+=("-f")
INTEGRATIONS+=("${MY_DIR}/docker-compose/integration-${_INT}.yml")
done
RUN_INTEGRATION_TESTS=${RUN_INTEGRATION_TESTS:=""}
if [[ ${RUNTIME:=} == "kubernetes" ]]; then
export KUBERNETES_MODE=${KUBERNETES_MODE:="git_mode"}
export KUBERNETES_VERSION=${KUBERNETES_VERSION:="v1.15.3"}
set +u
# shellcheck disable=SC2016
docker-compose --log-level INFO \
-f "${MY_DIR}/docker-compose/base.yml" \
-f "${MY_DIR}/docker-compose/backend-${BACKEND}.yml" \
-f "${MY_DIR}/docker-compose/runtime-kubernetes.yml" \
"${INTEGRATIONS[@]}" \
"${DOCKER_COMPOSE_LOCAL[@]}" \
run airflow-testing \
'/opt/airflow/scripts/ci/in_container/entrypoint_ci.sh "${@}"' \
/opt/airflow/scripts/ci/in_container/entrypoint_ci.sh "${@}"
# Note the command is there twice (!) because it is passed via bash -c
# and bash -c starts passing parameters from $0. TODO: fixme
set -u
else
set +u
# shellcheck disable=SC2016
docker-compose --log-level INFO \
-f "${MY_DIR}/docker-compose/base.yml" \
-f "${MY_DIR}/docker-compose/backend-${BACKEND}.yml" \
"${INTEGRATIONS[@]}" \
"${DOCKER_COMPOSE_LOCAL[@]}" \
run airflow-testing \
'/opt/airflow/scripts/ci/in_container/entrypoint_ci.sh "${@}"' \
/opt/airflow/scripts/ci/in_container/entrypoint_ci.sh "${@}"
# Note the command is there twice (!) because it is passed via bash -c
# and bash -c starts passing parameters from $0. TODO: fixme
set -u
fi
|
#!/bin/bash
# Copyright 2018 (author: Haris Bin Zia)
# Apache 2.0
. ./cmd.sh
[ -f path.sh ] && . ./path.sh
set -e
numLeavesTri1=2000
numGaussTri1=10000
numLeavesMLLT=2000
numGaussMLLT=10000
numLeavesSAT=2000
numGaussSAT=10000
numGaussUBM=400
numLeavesSGMM=7000
numGaussSGMM=9000
feats_nj=8
train_nj=8
decode_nj=8
echo ============================================================================
echo " Data & Lexicon & Language Preparation "
echo ============================================================================
utils/fix_data_dir.sh data/train
utils/fix_data_dir.sh data/test
utils/prepare_lang.sh data/local/lang '<oov>' data/local data/lang
utils/format_lm.sh data/lang data/local/lm/trainlm.gz data/local/lang/lexicon.txt data/lang_test
#utils/format_lm.sh data/lang data/local/lm/bbclm.gz data/local/lang/lexicon.txt data/lang_test
echo ============================================================================
echo " MFCC Feature Extration & CMVN for Training and Test set "
echo ============================================================================
mfccdir=mfcc
steps/make_mfcc.sh --cmd "$train_cmd" --nj $feats_nj data/train exp/make_mfcc/train $mfccdir
steps/compute_cmvn_stats.sh data/train exp/make_mfcc/train $mfccdir
steps/make_mfcc.sh --cmd "$train_cmd" --nj $feats_nj data/test exp/make_mfcc/test $mfccdir
steps/compute_cmvn_stats.sh data/test exp/make_mfcc/test $mfccdir
utils/validate_data_dir.sh data/train
utils/fix_data_dir.sh data/train
utils/validate_data_dir.sh data/test
utils/fix_data_dir.sh data/test
echo ============================================================================
echo " MonoPhone Training & Decoding "
echo ============================================================================
steps/train_mono.sh --nj "$train_nj" --cmd "$train_cmd" data/train data/lang exp/mono
utils/mkgraph.sh data/lang_test exp/mono exp/mono/graph
steps/decode.sh --nj "$decode_nj" --cmd "$decode_cmd" \
exp/mono/graph data/test exp/mono/decode
echo ============================================================================
echo " tri1 : Deltas + Delta-Deltas Training & Decoding "
echo ============================================================================
steps/align_si.sh --boost-silence 1.25 --nj "$train_nj" --cmd "$train_cmd" \
data/train data/lang exp/mono exp/mono_ali
steps/train_deltas.sh --cmd "$train_cmd" \
$numLeavesTri1 $numGaussTri1 data/train data/lang exp/mono_ali exp/tri1
utils/mkgraph.sh data/lang_test exp/tri1 exp/tri1/graph
steps/decode.sh --nj "$decode_nj" --cmd "$decode_cmd" \
exp/tri1/graph data/test exp/tri1/decode
echo ============================================================================
echo " tri2 : LDA + MLLT Training & Decoding "
echo ============================================================================
steps/align_si.sh --nj "$train_nj" --cmd "$train_cmd" \
data/train data/lang exp/tri1 exp/tri1_ali
steps/train_lda_mllt.sh --cmd "$train_cmd" \
$numLeavesMLLT $numGaussMLLT data/train data/lang exp/tri1_ali exp/tri2
utils/mkgraph.sh data/lang_test exp/tri2 exp/tri2/graph
steps/decode.sh --nj "$decode_nj" --cmd "$decode_cmd" \
exp/tri2/graph data/test exp/tri2/decode
echo ============================================================================
echo " tri3 : LDA + MLLT + SAT Training & Decoding "
echo ============================================================================
steps/align_si.sh --nj "$train_nj" --cmd "$train_cmd" \
--use-graphs true data/train data/lang exp/tri2 exp/tri2_ali
steps/train_sat.sh --cmd "$train_cmd" \
$numLeavesSAT $numGaussSAT data/train data/lang exp/tri2_ali exp/tri3
utils/mkgraph.sh data/lang_test exp/tri3 exp/tri3/graph
steps/decode_fmllr.sh --nj "$decode_nj" --cmd "$decode_cmd" \
exp/tri3/graph data/test exp/tri3/decode
echo ============================================================================
echo " SGMM2 Training & Decoding "
echo ============================================================================
steps/align_fmllr.sh --nj "$train_nj" --cmd "$train_cmd" \
data/train data/lang exp/tri3 exp/tri3_ali
steps/train_ubm.sh --cmd "$train_cmd" \
$numGaussUBM data/train data/lang exp/tri3_ali exp/ubm4
steps/train_sgmm2.sh --cmd "$train_cmd" $numLeavesSGMM $numGaussSGMM \
data/train data/lang exp/tri3_ali exp/ubm4/final.ubm exp/sgmm2_4
utils/mkgraph.sh data/lang_test exp/sgmm2_4 exp/sgmm2_4/graph
steps/decode_sgmm2.sh --nj "$decode_nj" --cmd "$decode_cmd"\
--transform-dir exp/tri3/decode exp/sgmm2_4/graph data/test \
exp/sgmm2_4/decode
echo ============================================================================
echo " MMI + SGMM2 Training & Decoding "
echo ============================================================================
steps/align_sgmm2.sh --nj "$train_nj" --cmd "$train_cmd" \
--transform-dir exp/tri3_ali --use-graphs true --use-gselect true \
data/train data/lang exp/sgmm2_4 exp/sgmm2_4_ali
steps/make_denlats_sgmm2.sh --nj "$train_nj" --sub-split "$train_nj" \
--acwt 0.2 --lattice-beam 10.0 --beam 18.0 \
--cmd "$decode_cmd" --transform-dir exp/tri3_ali \
data/train data/lang exp/sgmm2_4_ali exp/sgmm2_4_denlats
steps/train_mmi_sgmm2.sh --acwt 0.2 --cmd "$decode_cmd" \
--transform-dir exp/tri3_ali --boost 0.1 --drop-frames true \
data/train data/lang exp/sgmm2_4_ali exp/sgmm2_4_denlats exp/sgmm2_4_mmi_b0.1
for iter in 1 2 3 4; do
steps/decode_sgmm2_rescore.sh --cmd "$decode_cmd" --iter $iter \
--transform-dir exp/tri3/decode data/lang_test data/test \
exp/sgmm2_4/decode exp/sgmm2_4_mmi_b0.1/decode_it$iter
done
echo ============================================================================
echo " DNN Hybrid Training & Decoding (Karel's recipe) "
echo ============================================================================
local/nnet/run_dnn.sh
echo ============================================================================
echo "Finished Successfully"
echo ============================================================================
exit 0
|
<filename>pipeline_plugins/tests/components/utils/test_cc_get_ips_info_by_str.py
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from mock import MagicMock, patch
from django.test import TestCase
from pipeline_plugins.components.utils.sites.open.utils import cc_get_ips_info_by_str
class CCGetIPsInfoByStrTestCase(TestCase):
def setUp(self):
self.username = "tester"
self.biz_cc_id = 123
self.supplier_account = "test_supplier_account"
self.maxDiff = None
def test_ip_format(self):
ip_str = "1.1.1.1,2.2.2.2\n3.3.3.3,4.4.4.4"
get_business_host_topo_return = [
{
"host": {"bk_host_id": 1, "bk_host_innerip": "1.1.1.1", "bk_cloud_id": 0},
"module": [{"bk_module_id": 1, "bk_module_name": "module_1"}],
"set": [{"bk_set_id": 1, "bk_set_name": "set_1"}],
},
{
"host": {"bk_host_id": 2, "bk_host_innerip": "2.2.2.2", "bk_cloud_id": 0},
"module": [{"bk_module_id": 2, "bk_module_name": "module_2"}],
"set": [{"bk_set_id": 2, "bk_set_name": "set_2"}],
},
{
"host": {"bk_host_id": 3, "bk_host_innerip": "3.3.3.3", "bk_cloud_id": 0},
"module": [{"bk_module_id": 3, "bk_module_name": "module_3"}],
"set": [{"bk_set_id": 3, "bk_set_name": "set_3"}],
},
]
cmdb = MagicMock()
cmdb.get_business_host_topo = MagicMock(return_value=get_business_host_topo_return)
supplier_account_for_business = MagicMock(return_value=self.supplier_account)
with patch(
"pipeline_plugins.components.utils.sites.open.utils.supplier_account_for_business",
supplier_account_for_business,
):
with patch("pipeline_plugins.components.utils.sites.open.utils.cmdb", cmdb):
result = cc_get_ips_info_by_str(username=self.username, biz_cc_id=self.biz_cc_id, ip_str=ip_str)
supplier_account_for_business.assert_called_once_with(self.biz_cc_id)
cmdb.get_business_host_topo.assert_called_once_with(
username=self.username,
bk_biz_id=self.biz_cc_id,
supplier_account=self.supplier_account,
host_fields=["bk_host_innerip", "bk_host_id", "bk_cloud_id"],
ip_list=["1.1.1.1", "2.2.2.2", "3.3.3.3", "4.4.4.4"],
)
self.assertEqual(result["result"], True)
self.assertEqual(
result["ip_result"],
[
{
"InnerIP": "1.1.1.1",
"HostID": 1,
"Source": 0,
"Sets": [{"bk_set_id": 1, "bk_set_name": "set_1"}],
"Modules": [{"bk_module_id": 1, "bk_module_name": "module_1"}],
},
{
"InnerIP": "2.2.2.2",
"HostID": 2,
"Source": 0,
"Sets": [{"bk_set_id": 2, "bk_set_name": "set_2"}],
"Modules": [{"bk_module_id": 2, "bk_module_name": "module_2"}],
},
{
"InnerIP": "3.3.3.3",
"HostID": 3,
"Source": 0,
"Sets": [{"bk_set_id": 3, "bk_set_name": "set_3"}],
"Modules": [{"bk_module_id": 3, "bk_module_name": "module_3"}],
},
],
)
self.assertEqual(result["ip_count"], 3)
self.assertEqual(result["invalid_ip"], ["4.4.4.4"])
def test_set_module_format(self):
ip_str = "set_1|module_1|1.1.1.1,set_2|module_2|2.2.2.2\nset_3|module_3|3.3.3.3,set_3|module_3|4.4.4.4"
get_business_host_topo_return = [
{
"host": {"bk_host_id": 1, "bk_host_innerip": "1.1.1.1", "bk_cloud_id": 0},
"module": [{"bk_module_id": 1, "bk_module_name": "module_1"}],
"set": [{"bk_set_id": 1, "bk_set_name": "set_1"}],
},
{
"host": {"bk_host_id": 2, "bk_host_innerip": "2.2.2.2", "bk_cloud_id": 0},
"module": [{"bk_module_id": 2, "bk_module_name": "module_2"}],
"set": [{"bk_set_id": 2, "bk_set_name": "set_2"}],
},
{
"host": {"bk_host_id": 3, "bk_host_innerip": "3.3.3.3", "bk_cloud_id": 0},
"module": [{"bk_module_id": 3, "bk_module_name": "module_3"}],
"set": [{"bk_set_id": 3, "bk_set_name": "set_3"}],
},
]
cmdb = MagicMock()
cmdb.get_business_host_topo = MagicMock(return_value=get_business_host_topo_return)
supplier_account_for_business = MagicMock(return_value=self.supplier_account)
with patch(
"pipeline_plugins.components.utils.sites.open.utils.supplier_account_for_business",
supplier_account_for_business,
):
with patch("pipeline_plugins.components.utils.sites.open.utils.cmdb", cmdb):
result = cc_get_ips_info_by_str(username=self.username, biz_cc_id=self.biz_cc_id, ip_str=ip_str)
supplier_account_for_business.assert_called_once_with(self.biz_cc_id)
cmdb.get_business_host_topo.assert_called_once_with(
username=self.username,
bk_biz_id=self.biz_cc_id,
supplier_account=self.supplier_account,
host_fields=["bk_host_innerip", "bk_host_id", "bk_cloud_id"],
ip_list=["1.1.1.1", "2.2.2.2", "3.3.3.3", "4.4.4.4"],
)
self.assertEqual(result["result"], True)
self.assertEqual(
result["ip_result"],
[
{
"InnerIP": "1.1.1.1",
"HostID": 1,
"Source": 0,
"Sets": [{"bk_set_id": 1, "bk_set_name": "set_1"}],
"Modules": [{"bk_module_id": 1, "bk_module_name": "module_1"}],
"SetID": 1,
"SetName": "set_1",
"ModuleID": 1,
"ModuleName": "module_1",
},
{
"InnerIP": "2.2.2.2",
"HostID": 2,
"Source": 0,
"Sets": [{"bk_set_id": 2, "bk_set_name": "set_2"}],
"Modules": [{"bk_module_id": 2, "bk_module_name": "module_2"}],
"SetID": 2,
"SetName": "set_2",
"ModuleID": 2,
"ModuleName": "module_2",
},
{
"InnerIP": "3.3.3.3",
"HostID": 3,
"Source": 0,
"Sets": [{"bk_set_id": 3, "bk_set_name": "set_3"}],
"Modules": [{"bk_module_id": 3, "bk_module_name": "module_3"}],
"SetID": 3,
"SetName": "set_3",
"ModuleID": 3,
"ModuleName": "module_3",
},
],
)
self.assertEqual(result["ip_count"], 3)
self.assertEqual(result["invalid_ip"], ["4.4.4.4"])
def test_cloud_ip_format(self):
ip_str = "0:1.1.1.1,0:2.2.2.2\n0:3.3.3.3,0:4.4.4.4"
get_business_host_topo_return = [
{
"host": {"bk_host_id": 1, "bk_host_innerip": "1.1.1.1", "bk_cloud_id": 0},
"module": [{"bk_module_id": 1, "bk_module_name": "module_1"}],
"set": [{"bk_set_id": 1, "bk_set_name": "set_1"}],
},
{
"host": {"bk_host_id": 2, "bk_host_innerip": "2.2.2.2", "bk_cloud_id": 0},
"module": [{"bk_module_id": 2, "bk_module_name": "module_2"}],
"set": [{"bk_set_id": 2, "bk_set_name": "set_2"}],
},
{
"host": {"bk_host_id": 3, "bk_host_innerip": "3.3.3.3", "bk_cloud_id": 0},
"module": [{"bk_module_id": 3, "bk_module_name": "module_3"}],
"set": [{"bk_set_id": 3, "bk_set_name": "set_3"}],
},
]
cmdb = MagicMock()
cmdb.get_business_host_topo = MagicMock(return_value=get_business_host_topo_return)
supplier_account_for_business = MagicMock(return_value=self.supplier_account)
with patch(
"pipeline_plugins.components.utils.sites.open.utils.supplier_account_for_business",
supplier_account_for_business,
):
with patch("pipeline_plugins.components.utils.sites.open.utils.cmdb", cmdb):
result = cc_get_ips_info_by_str(username=self.username, biz_cc_id=self.biz_cc_id, ip_str=ip_str)
supplier_account_for_business.assert_called_once_with(self.biz_cc_id)
cmdb.get_business_host_topo.assert_called_once_with(
username=self.username,
bk_biz_id=self.biz_cc_id,
supplier_account=self.supplier_account,
host_fields=["bk_host_innerip", "bk_host_id", "bk_cloud_id"],
ip_list=["1.1.1.1", "2.2.2.2", "3.3.3.3", "4.4.4.4"],
)
self.assertEqual(result["result"], True)
self.assertEqual(
result["ip_result"],
[
{
"InnerIP": "1.1.1.1",
"HostID": 1,
"Source": 0,
"Sets": [{"bk_set_id": 1, "bk_set_name": "set_1"}],
"Modules": [{"bk_module_id": 1, "bk_module_name": "module_1"}],
},
{
"InnerIP": "2.2.2.2",
"HostID": 2,
"Source": 0,
"Sets": [{"bk_set_id": 2, "bk_set_name": "set_2"}],
"Modules": [{"bk_module_id": 2, "bk_module_name": "module_2"}],
},
{
"InnerIP": "3.3.3.3",
"HostID": 3,
"Source": 0,
"Sets": [{"bk_set_id": 3, "bk_set_name": "set_3"}],
"Modules": [{"bk_module_id": 3, "bk_module_name": "module_3"}],
},
],
)
self.assertEqual(result["ip_count"], 3)
self.assertEqual(result["invalid_ip"], ["4.4.4.4"])
|
package cc.sfclub.core;
import cc.sfclub.util.common.JsonConfig;
import lombok.Getter;
import lombok.Setter;
@SuppressWarnings("all")
@Getter
public class CoreCfg extends JsonConfig {
private String commandPrefix = "!p";
private String locale = "en_US";
private int config_version = Core.CONFIG_VERSION;
private boolean debug = true;
private String name = "Polar";
private String version = "v4-production";
@Setter
private boolean resetDatabase = true;
public CoreCfg() {
super(".");
}
}
|
<gh_stars>1-10
package types
type Transaction struct {
TxHash string `json:"txHash"`
Meta Meta `json:"meta"`
}
|
<filename>src/main/java/com/rondhuit/w2v/WordVectors.java
package com.rondhuit.w2v;
import java.io.Serializable;
import java.nio.charset.Charset;
import java.util.AbstractMap;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
public class WordVectors implements Serializable {
private static final long serialVersionUID = -272309887639936333L;
public transient final Charset ENCODING = Charset.forName("UTF-8");
private int words, size;
private String[] vocab;
private float[][] matrix;
public WordVectors(int layer1Size, int vocabSize, VocabWord[] _vocab,
double[] syn0) {
words = vocabSize;
size = layer1Size;
vocab = new String[words];
matrix = new float[words][];
for (int i = 0; i < words; i++) {
vocab[i] = _vocab[i].word;
matrix[i] = new float[size];
double len = 0;
for (int j = 0; j < size; j++) {
matrix[i][j] = (float) syn0[i * layer1Size + j];
len += matrix[i][j] * matrix[i][j];
}
len = Math.sqrt(len);
for (int j = 0; j < size; j++) {
matrix[i][j] /= len;
}
}
}
public int getSize() {
return size;
}
public int getNumWords() {
return words;
}
public String getWord(int idx) {
return vocab[idx];
}
public float getMatrixElement(int row, int column) {
return matrix[row][column];
}
public Word getWord(String input) {
// linear search the input word in vocabulary
float[] vec = null;
int bi = -1;
double len = 0;
for (int i = 0; i < words; i++) {
if (input.equals(getWord(i))) {
bi = i;
System.out.printf("\nWord: %s Position in vocabulary: %d\n",
input, bi);
vec = new float[size];
for (int j = 0; j < size; j++) {
vec[j] = getMatrixElement(bi, j);
len += vec[j] * vec[j];
}
}
}
if (vec == null) {
return null;
}
len = Math.sqrt(len);
for (int i = 0; i < size; i++) {
vec[i] /= len;
}
return new Word(input, vec, new int[]{bi});
}
public SortedSet<Map.Entry<Double,Word>> search(Word word) {
SortedSet<Map.Entry<Double,Word>> sortedset = new TreeSet<Map.Entry<Double, Word>>(
new Comparator<Map.Entry<Double, Word>>() {
@Override
public int compare(Entry<Double, Word> o1,
Entry<Double, Word> o2) {
return o2.getKey().compareTo(o1.getKey());
}
});
for (int i = 0; i < words; i++) {
if( word.contains(i)) {
continue;
}
double dist = 0;
for (int j = 0; j < size; j++) {
dist += word.vec[j] * getMatrixElement(i, j);
}
Word w = new Word(vocab[i], matrix[i], null);
SimpleEntry<Double, Word> e = new AbstractMap.SimpleEntry<Double, Word>(dist, w);
sortedset.add(e);
}
return sortedset;
}
}
|
<gh_stars>100-1000
require 'shoryuken'
module Rollbar
module Delay
# Following class allows to send rollbars using Sho-ryu-ken as a background
# jobs processor. See the queue_name method which states that your queues
# needs to be names as "rollbar_ENVIRONMENT". Retry intervals will be used
# to retry sending the same message again if failed before.
class Shoryuken
include ::Shoryuken::Worker
class << self
attr_accessor :queue
end
self.queue = "rollbar_#{Rollbar.configuration.environment}"
def self.call(payload)
new.call(payload, :queue => queue)
end
def call(payload, options = {})
self.class.perform_async(payload, options)
end
# not allowing bulk, to not double-report rollbars if one of them failed in bunch.
shoryuken_options :auto_delete => true,
:body_parser => :json,
:retry_intervals => [60, 180, 360, 120_0, 360_0, 186_00]
def perform(_sqs_message, payload)
Rollbar.process_from_async_handler(payload)
end
end
end
end
|
<reponame>SebFalque/qwester
# This migration comes from qwester (originally 20121126152146)
class AddRuleToRuleSets < ActiveRecord::Migration
def change
add_column :qwester_rule_sets, :rule, :text
end
end
|
<reponame>lifeomic/abac<filename>test/enforceLenient.test.js
'use strict';
import {enforceLenient} from '../dist';
import test from 'ava';
test('Partially evaluated policy should enforce properly', t => {
const policy = {
rules: {
accessAdmin: true,
readData: true,
deleteData: [
{
'resource.dataset': {
comparison: 'equals',
value: 'project'
}
}
],
updateData: [
{
'resource.ownerId': {
comparison: 'equals',
target: 'user.id'
}
}
]
}
};
t.true(enforceLenient('accessAdmin', policy));
t.true(enforceLenient('readData', policy));
t.falsy(enforceLenient('billingAdmin', policy));
// updateData and deleteData are allowed, because
// the rules allow them for some attributes, so
// in the absence of the full attribute set
// the best we can do is allow it. This is why
// enforceLenient shouldn't be used for actually
// securing access, but is fine for a client application.
t.true(enforceLenient('updateData', policy));
t.true(enforceLenient('deleteData', policy));
// Given full information enforceLenient does give correct answers:
t.falsy(enforceLenient('updateData', policy, {resource: {ownerId: 'john'}, user: {id: 'jane'}}));
t.true(enforceLenient('deleteData', policy, {resource: {dataset: 'project'}}));
t.falsy(enforceLenient('deleteData', policy, {resource: {dataset: 'project2'}}));
});
test('returns false for invalid operation names', t => {
t.false(enforceLenient('not-an-operation', {rules: {}}));
});
test('returns false for invalid policy', t => {
const policy = {
rules: {
readData: {
'?!*bogus*!?': {
comparison: 'equals',
value: 'test'
}
}
}
};
t.false(enforceLenient('readData', policy, {}));
});
|
<gh_stars>1-10
import nock from "nock";
import dotenv from "dotenv";
import "@testing-library/jest-dom";
dotenv.config({ path: ".env.test" });
beforeAll(() => {
nock(`${process.env.NEXT_PUBLIC_API_URL}`)
.get("/users")
.reply(200, {
page: 2,
per_page: 6,
total: 12,
total_pages: 2,
data: [
{
id: 7,
email: "<EMAIL>",
first_name: "Michael",
last_name: "Lawson",
avatar: "https://reqres.in/img/faces/7-image.jpg",
},
{
id: 8,
email: "<EMAIL>",
first_name: "Lindsay",
last_name: "Ferguson",
avatar: "https://reqres.in/img/faces/8-image.jpg",
},
{
id: 9,
email: "<EMAIL>",
first_name: "Tobias",
last_name: "Funke",
avatar: "https://reqres.in/img/faces/9-image.jpg",
},
{
id: 10,
email: "<EMAIL>",
first_name: "Byron",
last_name: "Fields",
avatar: "https://reqres.in/img/faces/10-image.jpg",
},
{
id: 11,
email: "<EMAIL>",
first_name: "George",
last_name: "Edwards",
avatar: "https://reqres.in/img/faces/11-image.jpg",
},
{
id: 12,
email: "<EMAIL>",
first_name: "Rachel",
last_name: "Howell",
avatar: "https://reqres.in/img/faces/12-image.jpg",
},
],
support: {
url: "https://reqres.in/#support-heading",
text: "To keep ReqRes free, contributions towards server costs are appreciated!",
},
});
});
afterAll(() => {
nock.cleanAll();
});
|
<reponame>midasplatform/MidasClient
/******************************************************************************
* Copyright 2011 Kitware Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "MidasTreeView.h"
#include "MidasTreeItem.h"
#include "MidasCommunityTreeItem.h"
#include "MidasCollectionTreeItem.h"
#include "MidasItemTreeItem.h"
#include "MidasBitstreamTreeItem.h"
#include "MidasTreeModel.h"
#include "midasSynchronizer.h"
#include <QModelIndex>
#include <QtGui>
MidasTreeView::MidasTreeView(QWidget* parent)
: MidasTreeViewBase(parent), m_Synch(NULL)
{
}
MidasTreeView::~MidasTreeView()
{
}
void MidasTreeView::SetSynchronizer(midasSynchronizer* synch)
{
m_Synch = synch;
m_Model->SetSynchronizer(synch);
}
void MidasTreeView::UpdateSelection(const QItemSelection & selected,
const QItemSelection & deselected)
{
(void)deselected;
QModelIndexList items = selected.indexes();
if( items.size() > 0 && items.first().isValid() )
{
MidasTreeItem* item = const_cast<MidasTreeItem *>(
m_Model->GetMidasTreeItem(items.first() ) );
this->FetchItemData(item);
emit MidasTreeItemSelected(item);
}
else
{
emit MidasNoTreeItemSelected();
}
emit FetchedSelectedResource();
}
bool MidasTreeView::IsModelIndexSelected() const
{
QItemSelectionModel* selectionModel = this->selectionModel();
return selectionModel->selectedIndexes().size() > 0;
}
const QModelIndex MidasTreeView::GetSelectedModelIndex() const
{
QModelIndexList selectedIndexes = selectionModel()->selectedIndexes();
if( selectedIndexes.size() )
{
return selectedIndexes.first();
}
else
{
return QModelIndex();
}
}
const MidasTreeItem* MidasTreeView::GetSelectedMidasTreeItem() const
{
return m_Model->GetMidasTreeItem(this->GetSelectedModelIndex() );
}
void MidasTreeView::Update()
{
this->Clear();
this->Initialize();
}
void MidasTreeView::Clear()
{
disconnect(this);
m_Model->Clear();
this->reset();
}
void MidasTreeView::Initialize()
{
m_Model->Populate();
m_Model->RestoreExpandedState();
}
void MidasTreeView::DecorateByUuid(std::string uuid)
{
m_Model->DecorateByUuid(uuid);
}
void MidasTreeView::contextMenuEvent(QContextMenuEvent* event)
{
emit MidasTreeViewContextMenu(event);
event->accept();
}
void MidasTreeView::dragEnterEvent(QDragEnterEvent* event)
{
if( event && event->mimeData() )
{
const QMimeData* md = event->mimeData();
if( md->hasUrls() || md->hasFormat(m_AcceptMimeType) )
{
event->acceptProposedAction();
}
}
}
void MidasTreeView::dragLeaveEvent(QDragLeaveEvent *)
{
}
void MidasTreeView::dragMoveEvent(QDragMoveEvent *)
{
}
void MidasTreeView::dropEvent(QDropEvent *)
{
}
void MidasTreeView::mouseDoubleClickEvent(QMouseEvent *)
{
}
void MidasTreeView::mousePressEvent(QMouseEvent* event)
{
if( event->button() == Qt::LeftButton )
{
m_DragStart = event->pos();
}
QTreeView::mousePressEvent(event);
}
void MidasTreeView::mouseMoveEvent(QMouseEvent* event)
{
if( !(event->buttons() & Qt::LeftButton) )
{
return;
}
if( (event->pos() - m_DragStart).manhattanLength()
< QApplication::startDragDistance() )
{
return;
}
QModelIndex index = this->indexAt(m_DragStart);
if( !index.isValid() )
{
event->setAccepted(false);
return;
}
MidasTreeItem* resource =
const_cast<MidasTreeItem *>(m_Model->GetMidasTreeItem(index) );
QDrag* drag = new QDrag(this);
QMimeData* mimeData = new QMimeData;
std::stringstream data;
data << resource->GetType() << " " << resource->GetId();
mimeData->setData(m_MimeType, QString(data.str().c_str() ).toAscii() );
drag->setPixmap(resource->GetDecoration() );
drag->setMimeData(mimeData);
drag->start();
}
void MidasTreeView::AddResource(mdo::Object *)
{
}
void MidasTreeView::UpdateResource(mdo::Object *)
{
}
void MidasTreeView::DeleteResource(mdo::Object *)
{
}
|
import { Component } from '@angular/core';
import { HttpClient } from '@angular/common/http';
@Component({
selector: 'app-root',
template:
<h1>Tasks & Resources</h1>
<h2>Tasks</h2>
<ul>
<li *ngFor="let task of tasks">{{task.name}}</li>
</ul>
<h2>Resources</h2>
<ul>
<li *ngFor="let resource of resources">{{resource.name}}</li>
</ul>
<h2>Colleagues</h2>
<ul>
<li *ngFor="let colleague of colleagues">{{colleague.name}}</li>
</ul>
})
export class AppComponent {
tasks = [];
resources = [];
colleagues = [];
constructor(private http: HttpClient) {
this.fetchData();
}
private async fetchData() {
const tasksResponse = await this.http.get('tasks-url');
this.tasks = tasksResponse['data'];
const resourcesResponse = await this.http.get('resources-url');
this.resources = resourcesResponse['data'];
const colleaguesResponse = await this.http.get('colleagues-url');
this.colleagues = colleaguesResponse['data'];
}
} |
include(){
source ${projectDir}bash-toolbox/${1//\./\/}.sh
} |
<filename>components/media-cta.js
import React from 'react'
import PropTypes from 'prop-types'
import classNames from 'classnames'
import stylesheet from './media-cta.scss'
export const MediaCTA = ({
title,
content,
button,
icon = null,
isDark = false,
isLeft = false,
isRight = false
}) => {
const MediaCTAClass = classNames({
'MediaCTA__card': true,
'MediaCTA__card--is-dark': isDark,
'MediaCTA__card--is-left': isLeft,
'MediaCTA__card--is-right': isRight,
[icon]: icon
})
const MediaCTAButtonWrapperClass = classNames({
'MediaCTA__button-wrapper': true,
'MediaCTA__button-wrapper--is-left': isLeft,
'MediaCTA__button-wrapper--is-right': isRight
})
return (
<article key={title} className={MediaCTAClass}>
<style dangerouslySetInnerHTML={{ __html: stylesheet }} />
<h2 className="MediaCTA__card--title">{title}</h2>
<div dangerouslySetInnerHTML={{ __html: content }} />
<div className={MediaCTAButtonWrapperClass}>
{button}
</div>
</article>
)
}
MediaCTA.propTypes = {
title: PropTypes.string.isRequired,
content: PropTypes.string.isRequired,
button: PropTypes.arrayOf(PropTypes.element),
icon: PropTypes.string,
isDark: PropTypes.bool,
isLeft: PropTypes.bool,
isRight: PropTypes.bool
}
MediaCTA.defaultProps = []
|
package uk.ac.cam.ch.wwmm.opsin;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import static uk.ac.cam.ch.wwmm.opsin.XmlDeclarations.*;
/**The rules by which words are grouped together (e.g. in functional class nomenclature)
*
* @author dl387
*
*/
class WordRules {
/**The wordRules themselves.*/
private final List<WordRuleDescription> wordRuleList;
enum EndsWithGroup {
acid,
ateGroup;
}
private static final Pattern icOrOusAcid = Pattern.compile("(ic|ous)([ ]?acid)?$");
private static final Pattern ateOrIteOrAmide = Pattern.compile("(at|it|amid)e?$");
/**
* Describes a word that a wordRule is looking for
* @author dl387
*
*/
private static class WordDescription {
/**Whether the word is a full word, substituent word or functionalTerm word*/
private final WordType type;
/**A group with a hardcoded method for efficient detection */
private final EndsWithGroup endsWithGroup;
/**A case insensitive pattern which attempts to match the end of the String value of the word*/
private final Pattern endsWithPattern;
/**The case insensitive String value of the word */
private final String value;
/** Only applicable for functionalTerms. The string value of the functionalTerm's type attribute*/
private final String functionalGroupType;
/** Only applicable for functionalTerms. The string value of the functionalTerm's subType attribute*/
private final String functionalGroupSubType;
/** The value of the type attribute of the last group element in the word e.g. maybe aminoAcid*/
private final String endsWithGroupType;
/** The value of the subType attribute of the last group element in the word e.g. maybe elementaryAtom*/
private final String endsWithGroupSubType;
/**
* Makes a description of a word to looks for
* @param reader
*/
WordDescription(XMLStreamReader reader){
WordType type = null;
String value = null;
EndsWithGroup endsWithGroup = null;
Pattern endsWithPattern = null;
String functionalGroupType = null;
String functionalGroupSubType = null;
String endsWithGroupType = null;
String endsWithGroupSubType = null;
for (int i = 0, l = reader.getAttributeCount(); i < l; i++) {
String atrValue = reader.getAttributeValue(i);
switch (reader.getAttributeLocalName(i)) {
case "type":
type = WordType.valueOf(atrValue);
break;
case "value":
value = atrValue;
break;
case "functionalGroupType":
functionalGroupType = atrValue;
break;
case "functionalGroupSubType":
functionalGroupSubType = atrValue;
break;
case "endsWith":
endsWithGroup = EndsWithGroup.valueOf(atrValue);
break;
case "endsWithRegex":
endsWithPattern = Pattern.compile(atrValue +"$", Pattern.CASE_INSENSITIVE);
break;
case "endsWithGroupType":
endsWithGroupType = atrValue;
break;
case "endsWithGroupSubType":
endsWithGroupSubType = atrValue;
break;
default:
break;
}
}
if (type == null) {
throw new RuntimeException("Malformed wordRule, no type specified");
}
this.type = type;
this.endsWithGroup = endsWithGroup;
this.endsWithPattern = endsWithPattern;
this.value = value;
this.functionalGroupType = functionalGroupType;
this.functionalGroupSubType = functionalGroupSubType;
this.endsWithGroupType = endsWithGroupType;
this.endsWithGroupSubType = endsWithGroupSubType;
}
WordType getType() {
return type;
}
EndsWithGroup getEndsWithGroup() {
return endsWithGroup;
}
Pattern getEndsWithPattern() {
return endsWithPattern;
}
String getValue() {
return value;
}
String getFunctionalGroupType() {
return functionalGroupType;
}
String getFunctionalGroupSubType() {
return functionalGroupSubType;
}
String getEndsWithGroupType() {
return endsWithGroupType;
}
String getEndsWithGroupSubType() {
return endsWithGroupSubType;
}
}
/**
* A representation of a wordRule element from wordRules.xml
* @author dl387
*
*/
private static class WordRuleDescription {
private final List<WordDescription> wordDescriptions;
private final WordRule ruleName;
private final WordType ruleType;
List<WordDescription> getWordDescriptions() {
return wordDescriptions;
}
WordRule getRuleName() {
return ruleName;
}
WordType getRuleType() {
return ruleType;
}
/**
* Creates a wordRule from a wordRule element found in wordRules.xml
* @param reader
* @throws XMLStreamException
*/
WordRuleDescription(XMLStreamReader reader) throws XMLStreamException {
List<WordDescription> wordDescriptions = new ArrayList<>();
ruleName = WordRule.valueOf(reader.getAttributeValue(null, "name"));
ruleType = WordType.valueOf(reader.getAttributeValue(null,"type"));
while (reader.hasNext()) {
int event = reader.next();
if (event == XMLStreamConstants.START_ELEMENT) {
if (reader.getLocalName().equals("word")) {
wordDescriptions.add(new WordDescription(reader));
}
}
else if (event == XMLStreamConstants.END_ELEMENT) {
if (reader.getLocalName().equals("wordRule")) {
break;
}
}
}
this.wordDescriptions = Collections.unmodifiableList(wordDescriptions);
}
}
/**Initialises the WordRules.
* @param resourceGetter
* @throws IOException
*/
WordRules(ResourceGetter resourceGetter) throws IOException {
List<WordRuleDescription> wordRuleList = new ArrayList<>();
XMLStreamReader reader = resourceGetter.getXMLStreamReader("wordRules.xml");
try {
while (reader.hasNext()) {
if (reader.next() == XMLStreamConstants.START_ELEMENT &&
reader.getLocalName().equals("wordRule")) {
wordRuleList.add(new WordRuleDescription(reader));
}
}
}
catch (XMLStreamException e) {
throw new IOException("Parsing exception occurred while reading wordRules.xml", e);
}
finally {
try {
reader.close();
} catch (XMLStreamException e) {
throw new IOException("Parsing exception occurred while reading wordRules.xml", e);
}
}
this.wordRuleList = Collections.unmodifiableList(wordRuleList);
}
/**Takes a molecule element and places the word elements into wordRule elements
* @param moleculeEl A molecule element with word children
* @param n2sConfig
* @param allowSpaceRemoval
* @param componentRatios
* @throws ParsingException
*/
void groupWordsIntoWordRules(Element moleculeEl, NameToStructureConfig n2sConfig, boolean allowSpaceRemoval, Integer[] componentRatios) throws ParsingException {
WordRulesInstance instance = new WordRulesInstance(moleculeEl, n2sConfig, allowSpaceRemoval, componentRatios);
List<Element> wordEls = moleculeEl.getChildElements(WORD_EL);
//note that multiple words in wordEls may be later replaced by a wordRule element
for (int i = 0; i <wordEls.size(); i++) {
if (instance.matchWordRule(wordEls, i)) {
i=-1;//if function did something
}
}
List<Element> wordRuleEls = moleculeEl.getChildElements();
for (Element wordRuleEl : wordRuleEls) {
if (!wordRuleEl.getName().equals(WORDRULE_EL)){
throw new ParsingException("Unable to assign wordRule to: " + wordRuleEl.getAttributeValue(VALUE_ATR));
}
}
}
private class WordRulesInstance {
private final Element moleculeEl;
private final boolean allowRadicals;
private final boolean allowSpaceRemoval;
private final Integer expectedNumOfComponents;
WordRulesInstance(Element moleculeEl, NameToStructureConfig n2sConfig, boolean allowSpaceRemoval, Integer[] componentRatios) {
this.moleculeEl = moleculeEl;
this.allowRadicals = n2sConfig.isAllowRadicals();
this.allowSpaceRemoval = allowSpaceRemoval;
this.expectedNumOfComponents = componentRatios != null ? componentRatios.length : null;
}
private boolean matchWordRule(List<Element> wordEls, int indexOfFirstWord) throws ParsingException {
wordRuleLoop: for (WordRuleDescription wordRuleDesc : wordRuleList) {
int i = indexOfFirstWord;
List<WordDescription> wordDescriptions = wordRuleDesc.getWordDescriptions();
int wordsInWordRule = wordDescriptions.size();
if (i + wordsInWordRule <= wordEls.size()) {//need sufficient words to match the word rule
for (int j = 0; j < wordsInWordRule; j++) {
Element wordEl = wordEls.get(i + j);
WordDescription wd = wordDescriptions.get(j);
if (!wd.getType().toString().equals(wordEl.getAttributeValue(TYPE_ATR))){
continue wordRuleLoop;//type mismatch;
}
String functionalGroupTypePredicate = wd.getFunctionalGroupType();
String functionalGroupSubTypePredicate = wd.getFunctionalGroupSubType();
if (functionalGroupTypePredicate != null || functionalGroupSubTypePredicate != null) {
if (!WordType.functionalTerm.toString().equals(wordEl.getAttributeValue(TYPE_ATR))){
continue wordRuleLoop;
}
Element lastEl = getLastElementInWord(wordEl);
if (lastEl == null) {
throw new ParsingException("OPSIN Bug: Cannot find the functional element in a functionalTerm");
}
while (lastEl.getName().equals(CLOSEBRACKET_EL) || lastEl.getName().equals(STRUCTURALCLOSEBRACKET_EL)) {
lastEl = OpsinTools.getPreviousSibling(lastEl);
if (lastEl == null) {
throw new ParsingException("OPSIN Bug: Cannot find the functional element in a functionalTerm");
}
}
if (functionalGroupTypePredicate != null && !functionalGroupTypePredicate.equals(lastEl.getAttributeValue(TYPE_ATR))) {
continue wordRuleLoop;
}
if (functionalGroupSubTypePredicate != null && !functionalGroupSubTypePredicate.equals(lastEl.getAttributeValue(SUBTYPE_ATR))) {
continue wordRuleLoop;
}
}
EndsWithGroup endsWithGroupPredicate = wd.getEndsWithGroup();
if (endsWithGroupPredicate != null && !endsWithGroupPredicateSatisfied(wordEl, endsWithGroupPredicate)) {
continue wordRuleLoop;
}
String valuePredicate = wd.getValue();
if (valuePredicate != null && !wordEl.getAttributeValue(VALUE_ATR).toLowerCase(Locale.ROOT).equals(valuePredicate)){//word string contents mismatch
continue wordRuleLoop;
}
Pattern endsWithPatternPredicate = wd.getEndsWithPattern();
if (endsWithPatternPredicate != null) {
if (!endsWithPatternPredicate.matcher(wordEl.getAttributeValue(VALUE_ATR)).find()){
continue wordRuleLoop;
}
}
String endsWithGroupTypePredicate = wd.getEndsWithGroupType();
if (endsWithGroupTypePredicate != null) {
Element lastGroupInWordRule = getLastGroupInWordRule(wordEl);
if (lastGroupInWordRule == null || !endsWithGroupTypePredicate.equals(lastGroupInWordRule.getAttributeValue(TYPE_ATR))){
continue wordRuleLoop;
}
}
String endsWithSubGroupTypePredicate = wd.getEndsWithGroupSubType();
if (endsWithSubGroupTypePredicate != null) {
Element lastGroupInWordRule = getLastGroupInWordRule(wordEl);
if (lastGroupInWordRule == null || !endsWithSubGroupTypePredicate.equals(lastGroupInWordRule.getAttributeValue(SUBTYPE_ATR))){
continue wordRuleLoop;
}
}
}
//Word Rule matches!
Element wordRuleEl = new GroupingEl(WORDRULE_EL);
WordRule wordRule = wordRuleDesc.getRuleName();
wordRuleEl.addAttribute(new Attribute(TYPE_ATR, wordRuleDesc.getRuleType().toString()));
wordRuleEl.addAttribute(new Attribute(WORDRULE_EL, wordRule.toString()));
/*
* Some wordRules can not be entirely processed at the structure building stage
*/
switch (wordRule) {
case functionGroupAsGroup:
//convert the functional term into a full term
Element functionalWord = wordEls.get(i + wordsInWordRule -1);
if (!functionalWord.getAttributeValue(TYPE_ATR).equals(FUNCTIONALTERM_EL) || wordsInWordRule>2){
throw new ParsingException("OPSIN bug: Problem with functionGroupAsGroup wordRule");
}
convertFunctionalGroupIntoGroup(functionalWord);
if (wordsInWordRule==2){
joinWords(wordEls, wordEls.get(i), functionalWord);
wordsInWordRule =1;
}
wordRuleEl.getAttribute(WORDRULE_ATR).setValue(WordRule.simple.toString());
break;
case carbonylDerivative:
case acidReplacingFunctionalGroup:
//e.g. acetone 4,4-diphenylsemicarbazone. This is better expressed as a full word as the substituent actually locants onto the functional term
for (int j = 1; j < (wordsInWordRule - 1); j++) {
Element wordEl = wordEls.get(i + j);
if (WordType.substituent.toString().equals(wordEl.getAttributeValue(TYPE_ATR))) {
joinWords(wordEls, wordEls.get(i + j), wordEls.get(i + j + 1));
wordsInWordRule--;
List<Element> functionalTerm = OpsinTools.getDescendantElementsWithTagName(wordEls.get(i + j), FUNCTIONALTERM_EL);//rename functionalTerm element to root
if (functionalTerm.size() != 1){
throw new ParsingException("OPSIN bug: Problem with "+ wordRule +" wordRule");
}
functionalTerm.get(0).setName(ROOT_EL);
List<Element> functionalGroups = OpsinTools.getDescendantElementsWithTagName(functionalTerm.get(0), FUNCTIONALGROUP_EL);//rename functionalGroup element to group
if (functionalGroups.size() != 1){
throw new ParsingException("OPSIN bug: Problem with "+ wordRule +" wordRule");
}
functionalGroups.get(0).setName(GROUP_EL);
wordEls.get(i + j).getAttribute(TYPE_ATR).setValue(WordType.full.toString());
}
}
break;
case additionCompound:
case oxide:
//is the halide/pseudohalide/oxide actually a counterion rather than covalently bonded
Element possibleElementaryAtomContainingWord = wordEls.get(i);
List<Element> elementaryAtoms = OpsinTools.getDescendantElementsWithTagNameAndAttribute(possibleElementaryAtomContainingWord, GROUP_EL, TYPE_ATR, ELEMENTARYATOM_TYPE_VAL);
if (elementaryAtoms.size() == 1) {
Element elementaryAtom = elementaryAtoms.get(0);
ChemEl chemEl1 = getChemElFromElementaryAtomEl(elementaryAtom);
if (wordRule == WordRule.oxide) {
if (wordsInWordRule != 2){
throw new ParsingException("OPSIN bug: Problem with "+ wordRule +" wordRule");
}
Element oxideWord = wordEls.get(i + 1);
ChemEl chemEl2 = getChemElFromWordWithFunctionalGroup(oxideWord);
if (!FragmentTools.isCovalent(chemEl1, chemEl2) || chemEl1 == ChemEl.Ag){
Element oxideGroup = convertFunctionalGroupIntoGroup(oxideWord);
setOxideStructureAppropriately(oxideGroup, elementaryAtom);
applySimpleWordRule(wordEls, indexOfFirstWord, possibleElementaryAtomContainingWord);
continue wordRuleLoop;
}
}
else {
for (int j = 1; j < wordsInWordRule; j++) {
Element functionalGroup = wordEls.get(i + j);
ChemEl chemEl2 = getChemElFromWordWithFunctionalGroup(functionalGroup);
if (!FragmentTools.isCovalent(chemEl1, chemEl2)) {//use separate word rules for ionic components
boolean specialCaseCovalency = false;
if (chemEl2.isHalogen() && wordsInWordRule == 2) {
switch (chemEl1) {
case Mg:
if (possibleElementaryAtomContainingWord.getChildCount() > 1) {
//treat grignards (i.e. substitutedmagnesium halides) as covalent
specialCaseCovalency = true;
}
break;
case Al:
if (chemEl2 == ChemEl.Cl || chemEl2 == ChemEl.Br || chemEl2 == ChemEl.I) {
specialCaseCovalency = true;
}
break;
case Ti:
if (oxidationNumberOrMultiplierIs(elementaryAtom, functionalGroup, 4) &&
(chemEl2 == ChemEl.Cl || chemEl2 == ChemEl.Br || chemEl2 == ChemEl.I)) {
specialCaseCovalency = true;
}
break;
case V:
if (oxidationNumberOrMultiplierIs(elementaryAtom, functionalGroup, 4) &&
chemEl2 == ChemEl.Cl) {
specialCaseCovalency = true;
}
break;
case Zr:
case Hf:
if (oxidationNumberOrMultiplierIs(elementaryAtom, functionalGroup, 4) &&
chemEl2 == ChemEl.Br) {
specialCaseCovalency = true;
}
break;
case U:
if (oxidationNumberOrMultiplierIs(elementaryAtom, functionalGroup, 6) &&
(chemEl2 == ChemEl.F || chemEl2 == ChemEl.Cl)) {
specialCaseCovalency = true;
}
break;
case Np:
case Pu:
if (oxidationNumberOrMultiplierIs(elementaryAtom, functionalGroup, 6) &&
chemEl2 == ChemEl.F) {
specialCaseCovalency = true;
}
break;
default:
break;
}
}
else if ((chemEl2 == ChemEl.H || chemEl2 == ChemEl.C ) && wordsInWordRule == 2) {
if (chemEl1 == ChemEl.Al) {
//organoaluminium and aluminium hydrides are covalent
specialCaseCovalency = true;
}
}
if (!specialCaseCovalency) {
continue wordRuleLoop;
}
}
}
}
}
break;
case potentialAlcoholEster:
if (expectedNumOfComponents != null && expectedNumOfComponents == moleculeEl.getChildCount()) {
//don't apply this wordRule if doing so makes the number of components incorrect
continue wordRuleLoop;
}
break;
case monovalentFunctionalGroup:
Element potentialOxy = getLastElementInWord(wordEls.get(0));
String val = potentialOxy.getValue();
if (val.equals("oxy") || val.equals("oxo")) {
throw new ParsingException(wordEls.get(0).getValue() + wordEls.get(1).getValue() +" is unlikely to be intended to be a molecule");
}
break;
default:
break;
}
List<String> wordValues = new ArrayList<>();
Element parentEl = wordEls.get(i).getParent();
int indexToInsertAt = parentEl.indexOf(wordEls.get(i));
for (int j = 0; j < wordsInWordRule; j++) {
Element wordEl = wordEls.remove(i);
wordEl.detach();
wordRuleEl.addChild(wordEl);
wordValues.add(wordEl.getAttributeValue(VALUE_ATR));
}
wordRuleEl.addAttribute(new Attribute(VALUE_ATR, StringTools.stringListToString(wordValues, " ")));//The bare string of all the words under this wordRule
parentEl.insertChild(wordRuleEl, indexToInsertAt);
wordEls.add(i, wordRuleEl);
return true;
}
}
Element firstWord = wordEls.get(indexOfFirstWord);
if (firstWord.getName().equals(WORD_EL) && WordType.full.toString().equals(firstWord.getAttributeValue(TYPE_ATR))){//No wordRule -->wordRule="simple"
applySimpleWordRule(wordEls, indexOfFirstWord, firstWord);
return false;
}
else if (allowSpaceRemoval && WordType.substituent.toString().equals(firstWord.getAttributeValue(TYPE_ATR))){
/*
* substituents may join together or to a full e.g. 2-ethyl toluene -->2-ethyltoluene
* 1-chloro 2-bromo ethane --> 1-chloro-2-bromo ethane then subsequently 1-chloro-2-bromo-ethane
*/
if (indexOfFirstWord +1 < wordEls.size()){
Element wordToPotentiallyCombineWith = wordEls.get(indexOfFirstWord +1);
if (WordType.full.toString().equals(wordToPotentiallyCombineWith.getAttributeValue(TYPE_ATR)) ||
WordType.substituent.toString().equals(wordToPotentiallyCombineWith.getAttributeValue(TYPE_ATR))){
joinWords(wordEls, firstWord, wordToPotentiallyCombineWith);
return true;
}
}
}
else if (WordType.functionalTerm.toString().equals(firstWord.getAttributeValue(TYPE_ATR)) && firstWord.getAttributeValue(VALUE_ATR).equalsIgnoreCase("salt")) {
wordEls.remove(indexOfFirstWord);
firstWord.detach();
if (moleculeEl.getAttribute(ISSALT_ATR) == null) {
moleculeEl.addAttribute(ISSALT_ATR, "yes");
}
return true;
}
if (wordEls.size() == 1 && indexOfFirstWord == 0 && firstWord.getName().equals(WORD_EL) && WordType.substituent.toString().equals(firstWord.getAttributeValue(TYPE_ATR))) {
if (firstWord.getAttributeValue(VALUE_ATR).equalsIgnoreCase("dihydrogen")) {
convertToDihydrogenMolecule(firstWord);
return true;
}
if (allowRadicals) {
//name is all one substituent, make this a substituent and finish
applySubstituentWordRule(wordEls, indexOfFirstWord, firstWord);
}
}
return false;
}
private boolean endsWithGroupPredicateSatisfied(Element wordEl, EndsWithGroup endsWithGroupPredicate) throws ParsingException {
Element lastEl = getLastElementInWord(wordEl);
if (lastEl == null) {
return false;
}
String elName = lastEl.getName();
while (elName.equals(CLOSEBRACKET_EL) ||
elName.equals(STRUCTURALCLOSEBRACKET_EL) ||
elName.equals(ISOTOPESPECIFICATION_EL)) {
lastEl = OpsinTools.getPreviousSibling(lastEl);
if (lastEl == null) {
return false;
}
elName = lastEl.getName();
}
if (endsWithGroupPredicate == EndsWithGroup.acid) {
if (elName.equals(SUFFIX_EL)) {
if (icOrOusAcid.matcher(lastEl.getAttributeValue(VALUE_ATR)).find()) {
return true;
}
}
else if (elName.equals(GROUP_EL)) {
if (lastEl.getAttribute(FUNCTIONALIDS_ATR) != null && icOrOusAcid.matcher(lastEl.getValue()).find()) {
return true;
}
}
}
else if (endsWithGroupPredicate == EndsWithGroup.ateGroup) {
if (elName.equals(GROUP_EL)) {
if (lastEl.getAttribute(FUNCTIONALIDS_ATR) != null && ateOrIteOrAmide.matcher(lastEl.getValue()).find()) {
return true;
}
}
else {
while (lastEl != null && elName.equals(SUFFIX_EL)) {
String suffixValAtr = lastEl.getAttributeValue(VALUE_ATR);
if (ateOrIteOrAmide.matcher(suffixValAtr).find() || suffixValAtr.equals("glycoside")) {
return true;
}
//glycoside is not always the last suffix
lastEl = OpsinTools.getPreviousSibling(lastEl, SUFFIX_EL);
}
}
}
return false;
}
private boolean oxidationNumberOrMultiplierIs(Element elementaryAtomEl, Element functionalGroupWord, int expectedVal) throws ParsingException {
List<Element> functionalGroups = OpsinTools.getDescendantElementsWithTagName(functionalGroupWord, FUNCTIONALGROUP_EL);
if (functionalGroups.size() != 1) {
throw new ParsingException("OPSIN bug: Unable to find functional group in oxide or addition compound rule");
}
Element possibleMultiplier = OpsinTools.getPreviousSibling(functionalGroups.get(0));
if (possibleMultiplier != null && possibleMultiplier.getName().equals(MULTIPLIER_EL)) {
return Integer.parseInt(possibleMultiplier.getAttributeValue(VALUE_ATR)) == expectedVal;
}
else {
Element possibleOxidationNumber = OpsinTools.getNextSibling(elementaryAtomEl);
if(possibleOxidationNumber != null && possibleOxidationNumber.getName().equals(OXIDATIONNUMBERSPECIFIER_EL)) {
return Integer.parseInt(possibleOxidationNumber.getAttributeValue(VALUE_ATR)) == expectedVal;
}
}
return false;
}
private Element getLastGroupInWordRule(Element wordEl) {
Element lastEl = getLastElementInWord(wordEl);
if (lastEl.getName().equals(GROUP_EL)) {
return lastEl;
}
else{
List<Element> groups = lastEl.getParent().getChildElements(GROUP_EL);
if (groups.size() > 0) {
return groups.get(groups.size() - 1);
}
}
return null;
}
private Element getLastElementInWord(Element wordEl) {
List<Element> children = wordEl.getChildElements();
Element lastChild = children.get(children.size() - 1);
while (lastChild.getChildCount() != 0) {
children = lastChild.getChildElements();
lastChild = children.get(children.size() - 1);
}
return lastChild;
}
private void applySimpleWordRule(List<Element> wordEls, int indexOfFirstWord, Element firstWord) {
Element parentEl = firstWord.getParent();
int indexToInsertAt = parentEl.indexOf(firstWord);
Element wordRuleEl = new GroupingEl(WORDRULE_EL);
wordRuleEl.addAttribute(new Attribute(WORDRULE_ATR, WordRule.simple.toString()));//No wordRule
wordRuleEl.addAttribute(new Attribute(TYPE_ATR, WordType.full.toString()));
wordRuleEl.addAttribute(new Attribute(VALUE_ATR, firstWord.getAttributeValue(VALUE_ATR)));
firstWord.detach();
wordRuleEl.addChild(firstWord);
wordEls.set(indexOfFirstWord, wordRuleEl);
parentEl.insertChild(wordRuleEl, indexToInsertAt);
}
private void applySubstituentWordRule(List<Element> wordEls, int indexOfFirstWord, Element firstWord) {
Element parentEl = firstWord.getParent();
int indexToInsertAt = parentEl.indexOf(firstWord);
Element wordRuleEl = new GroupingEl(WORDRULE_EL);
wordRuleEl.addAttribute(new Attribute(WORDRULE_ATR, WordRule.substituent.toString()));
wordRuleEl.addAttribute(new Attribute(TYPE_ATR, WordType.full.toString()));
wordRuleEl.addAttribute(new Attribute(VALUE_ATR, firstWord.getAttributeValue(VALUE_ATR)));
firstWord.detach();
wordRuleEl.addChild(firstWord);
wordEls.set(indexOfFirstWord, wordRuleEl);
parentEl.insertChild(wordRuleEl, indexToInsertAt);
}
/**
* Merges two adjacent words
* The latter word (wordToPotentiallyCombineWith) is merged into the former and removed from wordEls
* @param wordEls
* @param firstWord
* @param wordToPotentiallyCombineWith
* @throws ParsingException
*/
private void joinWords(List<Element> wordEls, Element firstWord, Element wordToPotentiallyCombineWith) throws ParsingException {
wordEls.remove(wordToPotentiallyCombineWith);
wordToPotentiallyCombineWith.detach();
List<Element> substituentEls = firstWord.getChildElements(SUBSTITUENT_EL);
if (substituentEls.size()==0){
throw new ParsingException("OPSIN Bug: Substituent element not found where substituent element expected");
}
Element finalSubstituent = substituentEls.get(substituentEls.size() - 1);
List<Element> finalSubstituentChildren = finalSubstituent.getChildElements();
if (!finalSubstituentChildren.get(finalSubstituentChildren.size() - 1).getName().equals(HYPHEN_EL)){//add an implicit hyphen if one is not already present
Element implicitHyphen = new TokenEl(HYPHEN_EL, "-");
finalSubstituent.addChild(implicitHyphen);
}
List<Element> elementsToMergeIntoSubstituent = wordToPotentiallyCombineWith.getChildElements();
for (int j = elementsToMergeIntoSubstituent.size() -1 ; j >=0; j--) {
Element el = elementsToMergeIntoSubstituent.get(j);
el.detach();
OpsinTools.insertAfter(finalSubstituent, el);
}
if (WordType.full.toString().equals(wordToPotentiallyCombineWith.getAttributeValue(TYPE_ATR))){
firstWord.getAttribute(TYPE_ATR).setValue(WordType.full.toString());
}
firstWord.getAttribute(VALUE_ATR).setValue(firstWord.getAttributeValue(VALUE_ATR) + wordToPotentiallyCombineWith.getAttributeValue(VALUE_ATR));
}
private Element convertFunctionalGroupIntoGroup(Element word) throws ParsingException {
word.getAttribute(TYPE_ATR).setValue(WordType.full.toString());
List<Element> functionalTerms = OpsinTools.getDescendantElementsWithTagName(word, FUNCTIONALTERM_EL);
if (functionalTerms.size() != 1){
throw new ParsingException("OPSIN Bug: Exactly 1 functionalTerm expected in functionalGroupAsGroup wordRule");
}
Element functionalTerm = functionalTerms.get(0);
functionalTerm.setName(ROOT_EL);
List<Element> functionalGroups = functionalTerm.getChildElements(FUNCTIONALGROUP_EL);
if (functionalGroups.size() != 1){
throw new ParsingException("OPSIN Bug: Exactly 1 functionalGroup expected in functionalGroupAsGroup wordRule");
}
Element functionalGroup = functionalGroups.get(0);
functionalGroup.setName(GROUP_EL);
functionalGroup.getAttribute(TYPE_ATR).setValue(SIMPLEGROUP_TYPE_VAL);
functionalGroup.addAttribute(new Attribute(SUBTYPE_ATR, SIMPLEGROUP_SUBTYPE_VAL));
return functionalGroup;
}
private void convertToDihydrogenMolecule(Element word) {
word.getAttribute(TYPE_ATR).setValue(WordType.full.toString());
for (int i = word.getChildCount() - 1; i >=0; i--) {
word.removeChild(i);
}
Element root = new GroupingEl(ROOT_EL);
Element group = new TokenEl(GROUP_EL);
group.addAttribute(TYPE_ATR, SIMPLEGROUP_TYPE_VAL);
group.addAttribute(SUBTYPE_ATR, SIMPLEGROUP_SUBTYPE_VAL);
group.addAttribute(VALUE_ATR, "[H][H]");
group.setValue("dihydrogen");
root.addChild(group);
word.addChild(root);
}
/**
* Sets the SMILES of the oxide group to be something like [O-2]
* ... unless the oxide group is multiplied and the elementaryAtom has no oxidation states greater 2
* in which case [O-][O-] would be assumed
* @param oxideGroup
* @param elementaryAtom
*/
private void setOxideStructureAppropriately(Element oxideGroup, Element elementaryAtom) {
boolean chainInterpretation = false;
Integer multiplierVal = null;
Element possibleMultiplier = OpsinTools.getPreviousSibling(oxideGroup);
if (possibleMultiplier != null &&
possibleMultiplier.getName().equals(MULTIPLIER_EL)){
multiplierVal = Integer.parseInt(possibleMultiplier.getAttributeValue(VALUE_ATR));
if (multiplierVal > 1) {
String commonOxidationStatesAndMax = elementaryAtom.getAttributeValue(COMMONOXIDATIONSTATESANDMAX_ATR);
if (commonOxidationStatesAndMax == null ||
Integer.parseInt(commonOxidationStatesAndMax.split(":")[1]) <= 2){
chainInterpretation = true;
}
}
}
Attribute value = oxideGroup.getAttribute(VALUE_ATR);
String smiles = value.getValue();
String element;
if (smiles.equals("O")){
element = "O";
}
else if (smiles.equals("S")){
element = "S";
}
else if (smiles.startsWith("[Se")){
element = "Se";
}
else if (smiles.startsWith("[Te")){
element = "Te";
}
else{
throw new RuntimeException("OPSIN Bug: Unexpected smiles for oxideGroup: " + smiles);
}
if (chainInterpretation){
StringBuilder sb = new StringBuilder();
sb.append('[');
sb.append(element);
sb.append("-]");
for (int i = 2; i < multiplierVal; i++) {
sb.append('[');
sb.append(element);
sb.append(']');
}
sb.append('[');
sb.append(element);
sb.append("-]");
value.setValue(sb.toString());
possibleMultiplier.detach();
}
else{
value.setValue("[" + element + "-2]");
}
}
private ChemEl getChemElFromElementaryAtomEl(Element elementaryAtomEl) {
String elementStr = elementaryAtomEl.getAttributeValue(VALUE_ATR);
if (elementStr.startsWith("[")) {
int len = elementStr.length() - 1;
for (int i = 1; i < len; i++) {
char ch = elementStr.charAt(i);
if ((ch >= 'A' && ch <='Z') || (ch >= 'a' && ch <='z')) {
if (i + 1 < len) {
char ch2 = elementStr.charAt(i + 1);
if ((ch2 >= 'A' && ch2 <='Z') || (ch2 >= 'a' && ch2 <='z')) {
//two letter element
elementStr = elementStr.substring(i, i + 2);
break;
}
}
//one letter element
elementStr = elementStr.substring(i, i + 1);
break;
}
}
}
return ChemEl.valueOf(elementStr);
}
private ChemEl getChemElFromWordWithFunctionalGroup(Element functionalWord) throws ParsingException {
List<Element> functionalGroups = OpsinTools.getDescendantElementsWithTagName(functionalWord, FUNCTIONALGROUP_EL);
if (functionalGroups.size() != 1){
throw new ParsingException("OPSIN bug: Unable to find functional group in oxide or addition compound rule");
}
String smiles = functionalGroups.get(0).getAttributeValue(VALUE_ATR);
String elementStr = "";
for (int i = 0; i < smiles.length(); i++) {
if (Character.isUpperCase(smiles.charAt(i))){
elementStr += smiles.charAt(i);
if (i + 1 <smiles.length() && Character.isLowerCase(smiles.charAt(i + 1))){
elementStr += smiles.charAt(i + 1);
}
break;
}
}
return ChemEl.valueOf(elementStr);
}
}
}
|
/*
* This file is a bit funny. The goal here is to use setns() to manipulate
* files inside the container, so we don't have to reason about the paths to
* make sure they don't escape (we can simply rely on the kernel for
* correctness). Unfortunately, you can't setns() to a mount namespace with a
* multi-threaded program, which every golang binary is. However, by declaring
* our init as an initializer, we can capture process control before it is
* transferred to the golang runtime, so we can then setns() as we'd like
* before golang has a chance to set up any threads. So, we implement two new
* lxd fork* commands which are captured here, and take a file on the host fs
* and copy it into the container ns.
*
* An alternative to this would be to move this code into a separate binary,
* which of course has problems of its own when it comes to packaging (how do
* we find the binary, what do we do if someone does file push and it is
* missing, etc.). After some discussion, even though the embedded method is
* somewhat convoluted, it was preferred.
*/
package main
/*
#include <string.h>
#include <stdio.h>
#include <sys/mount.h>
#include <sched.h>
#include <linux/sched.h>
#include <linux/limits.h>
#include <sys/mman.h>
#include <fcntl.h>
#include <stdbool.h>
#include <unistd.h>
// This expects:
// ./lxd forkputfile /source/path <pid> /target/path
// or
// ./lxd forkgetfile /target/path <pid> /soruce/path <uid> <gid> <mode>
// i.e. 8 arguments, each which have a max length of PATH_MAX.
// Unfortunately, lseek() and fstat() both fail (EINVAL and 0 size) for
// procfs. Also, we can't mmap, because procfs doesn't support that, either.
//
#define CMDLINE_SIZE (8 * PATH_MAX)
int copy(int target, int source)
{
ssize_t n;
char buf[1024];
while ((n = read(source, buf, 1024)) > 0) {
if (write(target, buf, n) != n) {
perror("write");
return -1;
}
}
if (n < 0) {
perror("read");
return -1;
}
return 0;
}
int manip_file_in_ns(char *host, int pid, char *container, bool is_put, uid_t uid, gid_t gid, mode_t mode) {
int host_fd, container_fd, mntns;
char buf[PATH_MAX];
int ret = -1;
int container_open_flags;
host_fd = open(host, O_RDWR);
if (host_fd < 0) {
perror("open host");
return -1;
}
sprintf(buf, "/proc/%d/ns/mnt", pid);
fprintf(stderr, "mntns dir: %s\n", buf);
mntns = open(buf, O_RDONLY);
if (mntns < 0) {
perror("open mntns");
goto close_host;
}
if (setns(mntns, 0) < 0) {
perror("setns");
goto close_mntns;
}
container_open_flags = O_RDWR;
if (is_put)
container_open_flags |= O_CREAT;
container_fd = open(container, container_open_flags, mode);
if (container_fd < 0) {
perror("open container");
goto close_mntns;
}
if (is_put) {
if (copy(container_fd, host_fd) < 0)
goto close_container;
if (fchown(container_fd, uid, gid) < 0) {
perror("fchown");
goto close_container;
}
ret = 0;
} else
ret = copy(host_fd, container_fd);
close_container:
close(container_fd);
close_mntns:
close(mntns);
close_host:
close(host_fd);
return ret;
}
__attribute__((constructor)) void init(void) {
int cmdline;
char *command = NULL, *source = NULL, *target = NULL;
pid_t pid;
char buf[CMDLINE_SIZE];
char *cur;
bool is_put;
ssize_t size;
uid_t uid = 0;
gid_t gid = 0;
mode_t mode = 0;
cmdline = open("/proc/self/cmdline", O_RDONLY);
if (cmdline < 0) {
perror("open");
_exit(232);
}
memset(buf, 0, sizeof(buf));
if ((size = read(cmdline, buf, sizeof(buf)-1)) < 0) {
close(cmdline);
perror("read");
_exit(232);
}
cur = buf;
// skip argv[0]
while (*cur != 0) {
cur++;
}
cur++;
if (size <= cur - buf) {
close(cmdline);
return;
}
command = cur;
if (strcmp(command, "forkputfile") == 0) {
is_put = true;
} else if (strcmp(command, "forkgetfile") == 0) {
is_put = false;
} else {
// This isn't one of our special commands, let's just continue
// normally with execution.
close(cmdline);
return;
}
#define ADVANCE_ARG_REQUIRED() \
do { \
while (*cur != 0) \
cur++; \
cur++; \
if (size <= cur - buf) { \
close(cmdline); \
printf("not enough arguments\n"); \
_exit(1); \
} \
} while(0)
ADVANCE_ARG_REQUIRED();
source = cur;
ADVANCE_ARG_REQUIRED();
pid = atoi(cur);
ADVANCE_ARG_REQUIRED();
target = cur;
if (is_put) {
ADVANCE_ARG_REQUIRED();
uid = atoi(cur);
ADVANCE_ARG_REQUIRED();
gid = atoi(cur);
ADVANCE_ARG_REQUIRED();
mode = atoi(cur);
}
printf("command: %s\n", command);
printf("source: %s\n", source);
printf("pid: %d\n", pid);
printf("target: %s\n", target);
printf("uid: %d\n", uid);
printf("gid: %d\n", gid);
printf("mode: %d\n", mode);
close(cmdline);
_exit(manip_file_in_ns(source, pid, target, is_put, uid, gid, mode));
}
*/
import "C"
|
<reponame>jeffsdev/rubyContacts
class Email
attr_reader(:type, :email_address)
@@emails = []
def initialize(attributes)
@type = attributes.fetch(:type)
@email_address = attributes.fetch(:email_address)
@id = @@emails.length + 1
end
define_singleton_method(:all) do
@@emails
end
def save
@@emails.push(self)
end
define_singleton_method(:clear) do
@@emails = []
end
end
|
#!/bin/bash
# Script to be run on successful RPM build
set -uex
mydir="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
ci_envs="$mydir/../parse_ci_envs.sh"
if [ -e "${ci_envs}" ]; then
# at some point we want to use: shellcheck source=ci/parse_ci_envs.sh
# shellcheck disable=SC1091
source "${ci_envs}"
fi
: "${CHROOT_NAME:=epel-7-x86_64}"
: "${TARGET:=centos7}"
artdir="${PWD}/artifacts/${TARGET}"
if [ -d /var/cache/pbuilder/ ]; then
mockroot=/var/cache/pbuilder/
(if cd "$mockroot/result/"; then
cp ./*{.buildinfo,.changes,.deb,.dsc,.xz} "$artdir"
fi)
cp utils/rpms/_topdir/BUILD/*.orig.tar.* "$artdir"
pushd "$artdir"
dpkg-scanpackages . /dev/null | \
gzip -9c > Packages.gz
popd
dpkg -f "$artdir"/daos-server_*_amd64.deb Version > "${TARGET}-rpm-version"
ls -l "${TARGET}-rpm-version" || true
cat "${TARGET}-rpm-version" || true
exit 0
fi
mockroot="/var/lib/mock/${CHROOT_NAME}"
cat "$mockroot"/result/{root,build}.log 2>/dev/null || true
if srpms="$(ls _topdir/SRPMS/*)"; then
cp -af "$srpms" "$artdir"
fi
(if cd "$mockroot/result/"; then
cp -r . "$artdir"
fi)
createrepo "$artdir"
rpm --qf "%{version}-%{release}.%{arch}" \
-qp "$artdir"/daos-server-*.x86_64.rpm > "${TARGET}-rpm-version"
rpm -qRp "$artdir"/daos-server-*.x86_64.rpm |
sed -ne '/mercury/s/.* = //p' > "${TARGET}-required-mercury-rpm-version"
cat "$mockroot"/result/{root,build}.log
|
import { Span } from "@azure/core-tracing";
import { RequestPolicyFactory, RequestPolicy, RequestPolicyOptions, BaseRequestPolicy } from "./requestPolicy";
import { WebResourceLike } from "../webResource";
import { HttpOperationResponse } from "../httpOperationResponse";
export interface TracingPolicyOptions {
userAgent?: string;
}
export declare function tracingPolicy(tracingOptions?: TracingPolicyOptions): RequestPolicyFactory;
export declare class TracingPolicy extends BaseRequestPolicy {
private userAgent?;
constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, tracingOptions: TracingPolicyOptions);
sendRequest(request: WebResourceLike): Promise<HttpOperationResponse>;
tryCreateSpan(request: WebResourceLike): Span | undefined;
private tryProcessError;
private tryProcessResponse;
}
//# sourceMappingURL=tracingPolicy.d.ts.map
|
#!/bin/bash
# Script to build and migrate a new version of a shared deployment of CalCentral.
# This is meant for running on Bamboo.
cd $( dirname "${BASH_SOURCE[0]}" )/..
LOG=`date +"log/start-stop_%Y-%m-%d.log"`
LOGIT="tee -a $LOG"
# Enable rvm and use the correct Ruby version and gem set.
[[ -s "$HOME/.rvm/scripts/rvm" ]] && . "$HOME/.rvm/scripts/rvm"
source .rvmrc
export RAILS_ENV=${RAILS_ENV:-production}
export LOGGER_STDOUT=only
# Temporary workaround for a JRuby 1.7.4 + Java 1.7 + JIT/invokedynamic bug : CLC-2732
export JRUBY_OPTS="-Xcompile.invokedynamic=false -Xcext.enabled=true -J-Xmx900m -J-Djruby.compile.mode=OFF"
# export JRUBY_OPTS="-Xcext.enabled=true -J-Xmx900m"
echo | $LOGIT
echo "------------------------------------------" | $LOGIT
echo "`date`: Updating and rebuilding CalCentral..." | $LOGIT
# Load all dependencies.
echo "`date`: bundle install..." | $LOGIT
bundle install --deployment --local || { echo "ERROR: bundle install failed" ; exit 1 ; }
# Rebuild static assets (HTML, JS, etc.) after update.
echo "`date`: Rebuilding static assets..." | $LOGIT
bundle exec rake assets:precompile || { echo "ERROR: asset compilation failed" ; exit 1 ; }
bundle exec rake fix_assets || { echo "ERROR: asset fix failed" ; exit 1 ; }
# Stamp version number
git log --pretty=format:'%H' -n 1 > versions/git.txt || { echo "ERROR: git log command failed" ; exit 1 ; }
# copy Oracle jar into ./lib
echo "`date`: Getting external driver files..." | $LOGIT
./script/install-jars.rb 2>&1 | $LOGIT
# build the knob
echo "`date`: Building calcentral.knob..." | $LOGIT
bundle exec rake torquebox:archive NAME=calcentral || { echo "ERROR: torquebox archive failed" ; exit 1 ; }
|
import { Panel } from './Panel';
import { createInstance } from './createInstance';
import { IBlock, IBlockData } from '@/typings';
import { BasicType } from '@/constants';
export type ISpacer = IBlockData<{
'container-background-color'?: string;
height?: string;
padding?: string;
}>;
export const Spacer: IBlock<ISpacer> = {
name: 'Spacer',
type: BasicType.SPACER,
Panel,
createInstance,
validParentType: [BasicType.COLUMN],
};
|
echo "making migrations"
python3 manage.py makemigrations leads
echo "migrating"
python3 manage.py migrate
echo "running server"
python3 manage.py runserver
|
$(document).ready(function () {
var silder = $(".owl-carousel");
silder.owlCarousel({
autoplay: true,
autoplayTimeout: 3000,
autoplayHoverPause: false,
items: 1,
stagePadding: 20,
center: true,
nav: false,
margin: 50,
dots: true,
loop: true,
responsive: {
0: { items: 1 },
480: { items: 2 },
575: { items: 2 },
768: { items: 2 },
991: { items: 3 },
1200: { items: 4 }
}
});
}); |
def remove_special_characters_and_numbers(string):
# Make a copy of the input string
new_string = string
# Use a regular expression to find all non-alphabetical characters
pattern = re.compile('[^a-zA-Z]')
# Replace the characters with empty string
new_string = pattern.sub('', new_string)
# Return the new string
return new_string |
package main
import (
"net/http"
"os"
"github.com/danibachar/kube-multi-cluster-managment/server/providers/pkg/models"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
"github.com/labstack/gommon/log"
)
func gcpDeployBroker(c echo.Context) error {
log.Info("provider is gcp")
config := models.GCPClusterConfig{}
if err := c.Bind(&config); err != nil {
log.Errorf("Failed reading the request body %s", err)
return echo.NewHTTPError(http.StatusInternalServerError, err)
}
if err := GCPDeploySubmarinerBrokerOn(config); err != nil {
log.Errorf("Failed join broker on cluster %s with error", config.ClusterName, err)
return echo.NewHTTPError(http.StatusInternalServerError, err)
}
return c.JSON(http.StatusOK, config)
}
func gcpJoinBroker(c echo.Context) error {
log.Info("provider is gcp")
config := models.GCPClusterConfig{}
if err := c.Bind(&config); err != nil {
log.Errorf("Failed reading the request body %s", err)
return echo.NewHTTPError(http.StatusInternalServerError, err)
}
if err := GCPJoinClusterToBroker(config); err != nil {
log.Errorf("Failed deply broker on cluster %s with error", config.ClusterName, err)
return echo.NewHTTPError(http.StatusInternalServerError, err)
}
return c.JSON(http.StatusOK, config)
}
func main() {
e := echo.New()
e.Use(middleware.Logger())
e.Use(middleware.Recover())
e.GET("/:name", func(c echo.Context) error {
return c.JSON(http.StatusOK, struct{ Status string }{Status: "OK"})
})
e.DELETE("/:name", func(c echo.Context) error {
provider := c.Param("name")
switch provider {
case "aws":
break
case "gcp":
break
}
return echo.NewHTTPError(http.StatusInternalServerError, "no supported provider")
})
e.PUT("set/:name", func(c echo.Context) error {
defer c.Request().Body.Close()
provider := c.Param("name")
switch provider {
case "aws":
break
case "gcp":
return gcpDeployBroker(c)
}
return echo.NewHTTPError(http.StatusInternalServerError, "no supported provider")
})
e.PUT("join/:name", func(c echo.Context) error {
defer c.Request().Body.Close()
provider := c.Param("name")
switch provider {
case "aws":
break
case "gcp":
return gcpJoinBroker(c)
}
return echo.NewHTTPError(http.StatusInternalServerError, "no supported provider")
})
httpPort := os.Getenv("SERVER_PORT")
e.Logger.Fatal(e.Start(":" + httpPort))
}
|
<reponame>flayyer/eslint-conf
// yarn link
// yarn link '@flyyer/eslint-config'
module.exports = {
extends: ["@flyyer/eslint-config", "@flyyer/eslint-config/prettier"],
};
|
<reponame>feueraustreter/YAPION<filename>src/main/java/yapion/serializing/serializer/primitive/string/StringSerializer.java<gh_stars>1-10
// SPDX-License-Identifier: Apache-2.0
// YAPION
// Copyright (C) 2019,2020 yoyosource
package yapion.serializing.serializer.primitive.string;
import yapion.annotations.deserialize.YAPIONLoadExclude;
import yapion.annotations.serialize.YAPIONSaveExclude;
import yapion.hierarchy.typegroups.YAPIONAnyType;
import yapion.hierarchy.types.YAPIONValue;
import yapion.serializing.InternalSerializer;
import yapion.serializing.data.DeserializeData;
import yapion.serializing.data.SerializeData;
import yapion.serializing.serializer.SerializerImplementation;
@YAPIONSaveExclude(context = "*")
@YAPIONLoadExclude(context = "*")
@SerializerImplementation
public class StringSerializer implements InternalSerializer<String> {
@Override
public String type() {
return "java.lang.String";
}
@Override
public YAPIONAnyType serialize(SerializeData<String> serializeData) {
return new YAPIONValue<>(serializeData.object);
}
@Override
public String deserialize(DeserializeData<? extends YAPIONAnyType> deserializeData) {
return ((YAPIONValue<String>) deserializeData.object).get();
}
} |
#!/usr/bin/env bash
source ../../anaconda3/etc/profile.d/conda.sh
conda activate pytorch12 #specify conda environment
set -x
PARTITION=gpu_24h #specify gpu time 2h, 8h, 24h
JOB_NAME=reppoints_moment_r50_fpn_2x_BFP_attn_0010 #job name can be anything
CONFIG=./configs/my_configs/Empirical_attention/reppoints_moment_r50_fpn_2x_BFP_attn_0010_BRefine.py #specify the config file
WORK_DIR=./work_dirs/reppoints_moment_r50_fpn_2x_BFP_attn_0010_BRefine #where to save the models
GPUS=${5:-4} #specify number of gpus ${5:-num_gpus}
GPUS_PER_NODE=${GPUS_PER_NODE:-4} #specify number of gpus per node ${:-num_gpu per node}
CPUS_PER_TASK=${CPUS_PER_TASK:-5}
SRUN_ARGS=${SRUN_ARGS:-""}
PY_ARGS=${PY_ARGS:-"--validate"}
# NOTE: first stage train 12 epoches
srun -p ${PARTITION} \
--job-name=${JOB_NAME} \
--gres=gpu:${GPUS_PER_NODE} \
--ntasks=${GPUS} \
--ntasks-per-node=${GPUS_PER_NODE} \
--cpus-per-task=${CPUS_PER_TASK} \
--kill-on-bad-exit=1 \
${SRUN_ARGS} \
python -u ./mmdetection/tools/train.py ${CONFIG} --work_dir=${WORK_DIR} --launcher="slurm"
#${PY_ARGS}
|
<reponame>ugurmeet/presto
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.iterative.rule;
import com.facebook.presto.spi.plan.DistinctLimitNode;
import com.facebook.presto.spi.plan.ValuesNode;
import com.facebook.presto.sql.planner.TestTableConstraintsConnectorFactory;
import com.facebook.presto.sql.planner.iterative.properties.LogicalPropertiesProviderImpl;
import com.facebook.presto.sql.planner.iterative.rule.test.BaseRuleTest;
import com.facebook.presto.sql.planner.iterative.rule.test.RuleTester;
import com.facebook.presto.sql.relational.FunctionResolution;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.Optional;
import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.anyTree;
import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.node;
import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.output;
import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.tableScan;
import static java.util.Collections.emptyList;
public class TestRedundantDistinctLimitRemoval
extends BaseRuleTest
{
private LogicalPropertiesProviderImpl logicalPropertiesProvider;
@BeforeClass
public final void setUp()
{
tester = new RuleTester(emptyList(), ImmutableMap.of("exploit_constraints", Boolean.toString(true)), Optional.of(1), new TestTableConstraintsConnectorFactory(1));
logicalPropertiesProvider = new LogicalPropertiesProviderImpl(new FunctionResolution(tester.getMetadata().getFunctionAndTypeManager()));
}
@Test
public void singleTableTests()
{
tester().assertThat(new RemoveRedundantDistinctLimit(), logicalPropertiesProvider)
.on(p ->
p.distinctLimit(
10,
ImmutableList.of(p.variable("c")),
p.values(1, p.variable("c"))))
.matches(node(ValuesNode.class));
tester().assertThat(new RemoveRedundantDistinctLimit(), logicalPropertiesProvider)
.on(p ->
p.distinctLimit(
0,
ImmutableList.of(p.variable("c")),
p.values(1, p.variable("c"))))
.matches(node(ValuesNode.class));
tester().assertThat(ImmutableSet.of(new MergeLimitWithDistinct(), new RemoveRedundantDistinctLimit()), logicalPropertiesProvider)
.on("SELECT distinct(c) FROM (SELECT count(*) as c FROM orders) LIMIT 10")
.validates(plan -> assertNodeRemovedFromPlan(plan, DistinctLimitNode.class));
//negative test
tester().assertThat(ImmutableSet.of(new MergeLimitWithDistinct(), new RemoveRedundantDistinctLimit()), logicalPropertiesProvider)
.on("SELECT distinct(c) FROM (SELECT count(*) as c FROM orders GROUP BY orderkey) LIMIT 10")
.matches(output(
node(DistinctLimitNode.class,
anyTree(
tableScan("orders")))));
//TODO where are the constraints use cases?!
}
@Test
public void complexQueryTests()
{
//TODO more join, complex query tests
tester().assertThat(ImmutableSet.of(new MergeLimitWithDistinct(), new RemoveRedundantDistinctLimit()), logicalPropertiesProvider)
.on("select distinct totalprice from orders o inner join customer c on o.custkey = c.custkey where o.orderkey=10 limit 2")
.validates(plan -> assertNodeRemovedFromPlan(plan, DistinctLimitNode.class));
//negative tests
tester().assertThat(ImmutableSet.of(new MergeLimitWithDistinct(), new RemoveRedundantDistinctLimit()), logicalPropertiesProvider)
.on("select distinct totalprice from orders o inner join customer c on o.custkey = c.custkey where o.orderkey>10 limit 2")
.validates(plan -> assertNodePresentInPlan(plan, DistinctLimitNode.class));
}
@Test
public void doesNotFire()
{
tester().assertThat(new RemoveRedundantDistinctLimit(), logicalPropertiesProvider)
.on(p ->
p.distinctLimit(
10,
ImmutableList.of(p.variable("c")),
p.values(10, p.variable("c"))))
.doesNotFire();
}
@Test
public void testFeatureDisabled()
{
// Disable the feature and verify that optimization rule is not applied.
RuleTester newTester = new RuleTester(emptyList(), ImmutableMap.of("exploit_constraints", Boolean.toString(false)));
newTester.assertThat(ImmutableSet.of(new MergeLimitWithDistinct(), new RemoveRedundantDistinctLimit()), logicalPropertiesProvider)
.on("select distinct totalprice from orders o inner join customer c on o.custkey = c.custkey where o.orderkey=10 limit 2")
.validates(plan -> assertNodePresentInPlan(plan, DistinctLimitNode.class));
}
}
|
<filename>vendor/github.com/jdcloud-api/jdcloud-sdk-go/services/rds/apis/DescribeBackupDownloadURL.go<gh_stars>1-10
// Copyright 2018 JDCLOUD.COM
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// NOTE: This class is auto generated by the jdcloud code generator program.
package apis
import (
"github.com/jdcloud-api/jdcloud-sdk-go/core"
)
type DescribeBackupDownloadURLRequest struct {
core.JDCloudRequest
/* 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) */
RegionId string `json:"regionId"`
/* 备份ID */
BackupId string `json:"backupId"`
/* 文件名称<br>- MySQL:不支持该参数<br>- SQL Server:必须输入该参数,指定该备份中需要获取下载链接的文件名称。备份中的文件名(不包括后缀)即为备份的数据库名。例如文件名为my_test_db.bak,表示该文件是my_test_db数据库的备份 (Optional) */
FileName *string `json:"fileName"`
/* 指定下载链接的过期时间,单位秒,缺省为86400秒,即24小时。<br>- MySQL:不支持该参数,只能是默认值<br>- SQL Server:支持 (Optional) */
UrlExpirationSecond *string `json:"urlExpirationSecond"`
}
/*
* param regionId: 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) (Required)
* param backupId: 备份ID (Required)
*
* @Deprecated, not compatible when mandatory parameters changed
*/
func NewDescribeBackupDownloadURLRequest(
regionId string,
backupId string,
) *DescribeBackupDownloadURLRequest {
return &DescribeBackupDownloadURLRequest{
JDCloudRequest: core.JDCloudRequest{
URL: "/regions/{regionId}/backups/{backupId}:describeBackupDownloadURL",
Method: "GET",
Header: nil,
Version: "v1",
},
RegionId: regionId,
BackupId: backupId,
}
}
/*
* param regionId: 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md) (Required)
* param backupId: 备份ID (Required)
* param fileName: 文件名称<br>- MySQL:不支持该参数<br>- SQL Server:必须输入该参数,指定该备份中需要获取下载链接的文件名称。备份中的文件名(不包括后缀)即为备份的数据库名。例如文件名为my_test_db.bak,表示该文件是my_test_db数据库的备份 (Optional)
* param urlExpirationSecond: 指定下载链接的过期时间,单位秒,缺省为86400秒,即24小时。<br>- MySQL:不支持该参数,只能是默认值<br>- SQL Server:支持 (Optional)
*/
func NewDescribeBackupDownloadURLRequestWithAllParams(
regionId string,
backupId string,
fileName *string,
urlExpirationSecond *string,
) *DescribeBackupDownloadURLRequest {
return &DescribeBackupDownloadURLRequest{
JDCloudRequest: core.JDCloudRequest{
URL: "/regions/{regionId}/backups/{backupId}:describeBackupDownloadURL",
Method: "GET",
Header: nil,
Version: "v1",
},
RegionId: regionId,
BackupId: backupId,
FileName: fileName,
UrlExpirationSecond: urlExpirationSecond,
}
}
/* This constructor has better compatible ability when API parameters changed */
func NewDescribeBackupDownloadURLRequestWithoutParam() *DescribeBackupDownloadURLRequest {
return &DescribeBackupDownloadURLRequest{
JDCloudRequest: core.JDCloudRequest{
URL: "/regions/{regionId}/backups/{backupId}:describeBackupDownloadURL",
Method: "GET",
Header: nil,
Version: "v1",
},
}
}
/* param regionId: 地域代码,取值范围参见[《各地域及可用区对照表》](../Enum-Definitions/Regions-AZ.md)(Required) */
func (r *DescribeBackupDownloadURLRequest) SetRegionId(regionId string) {
r.RegionId = regionId
}
/* param backupId: 备份ID(Required) */
func (r *DescribeBackupDownloadURLRequest) SetBackupId(backupId string) {
r.BackupId = backupId
}
/* param fileName: 文件名称<br>- MySQL:不支持该参数<br>- SQL Server:必须输入该参数,指定该备份中需要获取下载链接的文件名称。备份中的文件名(不包括后缀)即为备份的数据库名。例如文件名为my_test_db.bak,表示该文件是my_test_db数据库的备份(Optional) */
func (r *DescribeBackupDownloadURLRequest) SetFileName(fileName string) {
r.FileName = &fileName
}
/* param urlExpirationSecond: 指定下载链接的过期时间,单位秒,缺省为86400秒,即24小时。<br>- MySQL:不支持该参数,只能是默认值<br>- SQL Server:支持(Optional) */
func (r *DescribeBackupDownloadURLRequest) SetUrlExpirationSecond(urlExpirationSecond string) {
r.UrlExpirationSecond = &urlExpirationSecond
}
// GetRegionId returns path parameter 'regionId' if exist,
// otherwise return empty string
func (r DescribeBackupDownloadURLRequest) GetRegionId() string {
return r.RegionId
}
type DescribeBackupDownloadURLResponse struct {
RequestID string `json:"requestId"`
Error core.ErrorResponse `json:"error"`
Result DescribeBackupDownloadURLResult `json:"result"`
}
type DescribeBackupDownloadURLResult struct {
PublicURL string `json:"publicURL"`
InternalURL string `json:"internalURL"`
} |
import json
import logging
product_list = [] # Store the product menu
quant_list = [] # Store the updated quantities
logger = logging.getLogger(__name__)
logging.basicConfig(filename='inventory.log', level=logging.INFO) # Configure logging to write to a file
# Read the product menu from the JSON file
with open(r'./actionserver/custom_payload.json') as f:
frendy_product_menu = json.load(f)
product_list = frendy_product_menu['products']
# Prompt the user to input the quantities of each product in stock
for product in product_list:
quantity = int(input(f"Enter the quantity of {product['name']}: "))
quant_list.append(quantity)
logger.info(f"Updated quantity of {product['name']} to {quantity}")
# Store the product menu and the updated quantities for further processing
print("Product Menu:", product_list)
print("Updated Quantities:", quant_list) |
package paixingLogic
import (
"fmt"
"math"
)
//扑克牌类型的定义
const (
POCK_MEI = 1 //梅花
POCK_HEI = 2 //黑桃
POCK_FANG = 3 //方块
POCK_HONG = 4 //红桃
POCK_WANG = 5 //王牌
POCK_JISHU = 100 //扑克牌型之间的差值
)
//扑克牌型的定义
const (
//单张
PAIXING_DAN = 1
//对子
PAIXING_DUIZI = 2
//三个
PAIXING_SAN = 3
//三个带2
PAIXING_SANER = 4
//连对
PAIXING_LIANDUI = 5
//顺子
PAIXING_SHUNZI = 6
//飞机
PAIXING_FEIJI = 7
//炸弹
PAIXING_ZHADAN = 8
//对王
PAIXING_TIANZHA = 9
)
//获取扑克的牌型
func GetPockType(cid int) int {
return int(math.Floor(float64(cid / 100)))
}
//获取扑克的牌值
func GetPockValue(cid int) int {
return cid % 100
}
//获取扑克的cid
func GetPockIndex(t int, n int) int {
if t < POCK_MEI || t > POCK_WANG || n < 0 || n > 52 {
fmt.Println("GetPockIndex :错误的Index ", t, n)
return -1
}
index := -1
switch t {
case POCK_MEI:
index = 1*POCK_JISHU + n
case POCK_HEI:
index = 2*POCK_JISHU + n
case POCK_FANG:
index = 3*POCK_JISHU + n
case POCK_HONG:
index = 4*POCK_JISHU + n
case POCK_WANG:
index = 5*POCK_JISHU + n
}
return index
}
//判断牌型是否加倍
func GetPockTypeBeishu(t int) int {
return 1
// if t == PAIXING_ZHADAN {
// return 2
// } else if t == PAIXING_DUIWANG {
// return 4
// } else {
// return 1
// }
}
|
# Intuitive Code Representation for a Game of Cards
# Declare card suits
SUITS = ("Hearts", "Clubs", "Diamonds", "Spades")
# Declare card ranks
RANKS = ("Ace", 2, 3, 4, 5, 6, 7, 8, 9, 10, "Jack", "Queen", "King")
# Declare card colors
COLORS = ("Red", "Black")
# Construct unique identifiers for cards
def construct_cards(suits, ranks, colors):
"""Constructs unique identifiers and builds a deck of cards"""
unique_ids = []
for suit in suits:
for rank in ranks:
if suit in ["Hearts", "Diamonds"]:
color = colors[0]
else:
color = colors[1]
unique_ids.append((suit, rank, color))
return unique_ids
# Construct and print card deck
deck = construct_cards(SUITS, RANKS, COLORS)
print("Deck of cards:", deck) |
<gh_stars>0
package com.example.samplewebviewapsalar;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.webkit.JsResult;
import android.webkit.WebChromeClient;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import com.apsalar.sdk.Apsalar;
import com.apsalar.sdk.ApsalarJSInterface;
import org.json.*;
import java.net.URLDecoder;
import java.util.HashMap;
import java.util.Map;
public class MyActivity extends Activity {
private WebView mWebView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_my);
Apsalar.startSession(this, "<API_KEY>", "<SECRET>");
Log.d("Apsalar SDK", "sdkVersion = " + com.apsalar.sdk.Constants.SDK_VER);
mWebView = (WebView) findViewById(R.id.activity_main_webview);
WebSettings webSettings = mWebView.getSettings();
webSettings.setJavaScriptEnabled(true);
mWebView.setWebChromeClient( new WebChromeClient() {
});
ApsalarJSInterface ApsalarJSInterfaceInstance = new ApsalarJSInterface(this);
//R.id.webview below is an int generated by the android system, can be found in the R file.
ApsalarJSInterfaceInstance.setWebViewId(R.id.activity_main_webview);
mWebView.setWebViewClient( new WebViewClient() {
} );
mWebView.addJavascriptInterface(ApsalarJSInterfaceInstance, "ApsalarInterface");
mWebView.loadUrl("file:///android_asset/www2/index.html");
}
/**
* URL におけるパラメータ部分を解析してディクショナリ化します。
*
* @param url URL。
*
* @return 解析結果。
*/
public Map< String, String > parseUrlParameters( String url ) {
Map< String, String > result = new HashMap< String, String >();
int index = url.indexOf( "?" );
if( index == -1 ) {
return result;
}
String[] params = url.substring( index + 1 ).split( "&" );
for( String param : params ) {
String[] keyValuePair = param.split( "=" );
if( keyValuePair.length >= 2 ) {
try {
String value = URLDecoder.decode(keyValuePair[1], "utf-8");
result.put( keyValuePair[ 0 ], value );
} catch( java.io.UnsupportedEncodingException e ) {
e.printStackTrace();
}
}
}
return result;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.my, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
// Check if the key event was the Back button and if there's history
if ((keyCode == KeyEvent.KEYCODE_BACK) && mWebView.canGoBack()) {
mWebView.goBack();
Apsalar.event("バックボタン押下");
return true;
}
if ((keyCode == KeyEvent.KEYCODE_FORWARD) && mWebView.canGoForward()) {
mWebView.goForward();
Apsalar.event("進むボタン押下");
return true;
}
// If it wasn't the Back key or there's no web page history, bubble up to the default
// system behavior (probably exit the activity)
return super.onKeyDown(keyCode, event);
}
@Override
protected void onPause() {
super.onPause();
Apsalar.unregisterApsalarReceiver();
}
}
|
//
// StrongTest.h
// LoginOC
//
// Created by HongpengYu on 2019/1/13.
// Copyright © 2019 HongpengYu. All rights reserved.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface StrongTest : NSObject
{
id __strong obj_;
}
- (void)setObject:(id __strong)obj;
@end
@interface StrongCycleTest: NSObject
@end
@interface Unsafe_unRetainedTest: NSObject
@end
NS_ASSUME_NONNULL_END
|
/**
* Copyright 2018 hubohua
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.demoncat.dcapp.widget;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.widget.ImageView;
import com.demoncat.dcapp.R;
import com.demoncat.dcapp.utils.CommonUtils;
/**
* @Class: RoundCornerImageView
* @Description: Round corner image view
* @Author: hubohua
* @CreateDate: 2018/4/18
*/
@SuppressLint("AppCompatCustomView")
public class RoundCornerImageView extends ImageView {
private Context context;
private Paint paint;
private int roundWidth;
private int roundHeight;
private Paint paint2;
private int roundSize = DEFAULT_CORNER_SIZE;
private static final int DEFAULT_CORNER_SIZE = 5; // dp
public RoundCornerImageView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context, attrs);
}
public RoundCornerImageView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public RoundCornerImageView(Context context) {
super(context);
init(context, null);
}
private void init(Context context, AttributeSet attrs) {
this.context = context;
roundWidth = CommonUtils.dip2px(context, roundSize);
roundHeight = CommonUtils.dip2px(context, roundSize);
if (attrs != null) {
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.RoundCornerImageView);
roundWidth = a.getDimensionPixelSize(R.styleable.RoundCornerImageView_roundWidth, roundWidth);
roundHeight = a.getDimensionPixelSize(R.styleable.RoundCornerImageView_roundHeight, roundHeight);
a.recycle();
}
paint = new Paint();
paint.setColor(Color.WHITE);
paint.setAntiAlias(true);
paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.DST_OUT));
paint2 = new Paint();
paint2.setXfermode(null);
}
@Override
public void draw(Canvas canvas) {
Bitmap bitmap = Bitmap.createBitmap(getWidth(), getHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas2 = new Canvas(bitmap);
super.draw(canvas2);
drawLiftUp(canvas2);
drawLiftDown(canvas2);
drawRightUp(canvas2);
drawRightDown(canvas2);
canvas.drawBitmap(bitmap, 0, 0, paint2);
bitmap.recycle();
}
private void drawLiftUp(Canvas canvas) {
Path path = new Path();
path.moveTo(0, roundHeight);
path.lineTo(0, 0);
path.lineTo(roundWidth, 0);
path.arcTo(new RectF(0, 0, roundWidth * 2, roundHeight * 2), -90, -90);
path.close();
canvas.drawPath(path, paint);
}
private void drawLiftDown(Canvas canvas) {
Path path = new Path();
path.moveTo(0, getHeight() - roundHeight);
path.lineTo(0, getHeight());
path.lineTo(roundWidth, getHeight());
path.arcTo(new RectF(0, getHeight() - roundHeight * 2, roundWidth * 2, getHeight()), 90, 90);
path.close();
canvas.drawPath(path, paint);
}
private void drawRightDown(Canvas canvas) {
Path path = new Path();
path.moveTo(getWidth() - roundWidth, getHeight());
path.lineTo(getWidth(), getHeight());
path.lineTo(getWidth(), getHeight() - roundHeight);
path.arcTo(new RectF(getWidth() - roundWidth * 2, getHeight() - roundHeight * 2, getWidth(), getHeight()), -0, 90);
path.close();
canvas.drawPath(path, paint);
}
private void drawRightUp(Canvas canvas) {
Path path = new Path();
path.moveTo(getWidth(), roundHeight);
path.lineTo(getWidth(), 0);
path.lineTo(getWidth() - roundWidth, 0);
path.arcTo(new RectF(getWidth() - roundWidth * 2, 0, getWidth(), roundHeight * 2), -90, 90);
path.close();
canvas.drawPath(path, paint);
}
/**
* Set round corner size in dp
* @param size
*/
public void setRoundSize(int size) {
roundSize = size;
roundWidth = CommonUtils.dip2px(context, roundSize);
roundHeight = CommonUtils.dip2px(context, roundSize);
invalidate();
}
}
|
#!/bin/sh -e
#
# Copyright (c) 2009-2015 Robert Nelson <robertcnelson@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
ARCH=$(uname -m)
DIR=$PWD
. "${DIR}/system.sh"
#For:
#toolchain
. "${DIR}/version.sh"
if [ -d $HOME/dl/gcc/ ] ; then
gcc_dir="$HOME/dl/gcc"
else
gcc_dir="${DIR}/dl"
fi
dl_gcc_generic () {
WGET="wget -c --directory-prefix=${gcc_dir}/"
if [ ! -f "${gcc_dir}/${directory}/${datestamp}" ] ; then
echo "Installing: ${toolchain_name}"
echo "-----------------------------"
${WGET} "${site}/${version}/${filename}" || ${WGET} "${archive_site}/${version}/${filename}"
if [ -d "${gcc_dir}/${directory}" ] ; then
rm -rf "${gcc_dir}/${directory}" || true
fi
tar -xf "${gcc_dir}/${filename}" -C "${gcc_dir}/"
if [ -f "${gcc_dir}/${directory}/${binary}gcc" ] ; then
touch "${gcc_dir}/${directory}/${datestamp}"
fi
fi
if [ "x${ARCH}" = "xarmv7l" ] ; then
#using native gcc
CC=
else
CC="${gcc_dir}/${directory}/${binary}"
fi
}
gcc_toolchain () {
site="https://releases.linaro.org"
archive_site="https://releases.linaro.org/archive"
case "${toolchain}" in
gcc_linaro_eabi_4_8)
#
#https://releases.linaro.org/14.04/components/toolchain/binaries/gcc-linaro-arm-none-eabi-4.8-2014.04_linux.tar.xz
#
gcc_version="4.8"
release="2014.04"
toolchain_name="gcc-linaro-arm-none-eabi"
version="14.04/components/toolchain/binaries"
directory="${toolchain_name}-${gcc_version}-${release}_linux"
filename="${directory}.tar.xz"
datestamp="${release}-${toolchain_name}"
binary="bin/arm-none-eabi-"
;;
gcc_linaro_eabi_4_9_i686)
#
#https://releases.linaro.org/14.09/components/toolchain/binaries/gcc-linaro-arm-none-eabi-4.9-2014.09_linux.tar.xz
#
gcc_version="4.9"
release="2014.09"
toolchain_name="gcc-linaro-arm-none-eabi"
version="14.09/components/toolchain/binaries"
directory="${toolchain_name}-${gcc_version}-${release}_linux"
filename="${directory}.tar.xz"
datestamp="${release}-${toolchain_name}"
binary="bin/arm-none-eabi-"
;;
gcc_linaro_eabi_4_9)
#
#https://releases.linaro.org/15.05/components/toolchain/binaries/arm-eabi/gcc-linaro-4.9-2015.05-x86_64_arm-eabi.tar.xz
#
gcc_version="4.9"
release="15.05"
target="arm-eabi"
version="${release}/components/toolchain/binaries/${target}"
filename="gcc-linaro-${gcc_version}-20${release}-x86_64_arm-eabi.tar.xz"
directory="gcc-linaro-${gcc_version}-20${release}-x86_64_arm-eabi"
datestamp="${gcc_version}-20${release}-${target}"
binary="bin/arm-eabi-"
;;
gcc_linaro_eabi_5)
#
#https://releases.linaro.org/components/toolchain/binaries/5.2-2015.11/arm-eabi/gcc-linaro-5.2-2015.11-x86_64_arm-eabi.tar.xz
#
gcc_version="5.2"
release="15.11"
target="arm-eabi"
version="components/toolchain/binaries/${gcc_version}-20${release}/${target}"
filename="gcc-linaro-${gcc_version}-20${release}-x86_64_arm-eabi.tar.xz"
directory="gcc-linaro-${gcc_version}-20${release}-x86_64_arm-eabi"
datestamp="${gcc_version}-20${release}-${target}"
binary="bin/arm-eabi-"
;;
gcc_linaro_gnueabi_4_6)
#
#https://releases.linaro.org/12.03/components/toolchain/binaries/gcc-linaro-arm-linux-gnueabi-2012.03-20120326_linux.tar.bz2
#https://releases.linaro.org/archive/12.03/components/toolchain/binaries/gcc-linaro-arm-linux-gnueabi-2012.03-20120326_linux.tar.bz2
#
release="2012.03"
toolchain_name="gcc-linaro-arm-linux-gnueabi"
version="12.03/components/toolchain/binaries"
version_date="20120326"
directory="${toolchain_name}-${release}-${version_date}_linux"
filename="${directory}.tar.bz2"
datestamp="${version_date}-${toolchain_name}"
binary="bin/arm-linux-gnueabi-"
;;
gcc_linaro_gnueabihf_4_7)
#
#https://releases.linaro.org/13.04/components/toolchain/binaries/gcc-linaro-arm-linux-gnueabihf-4.7-2013.04-20130415_linux.tar.xz
#https://releases.linaro.org/archive/13.04/components/toolchain/binaries/gcc-linaro-arm-linux-gnueabihf-4.7-2013.04-20130415_linux.tar.xz
#
gcc_version="4.7"
release="2013.04"
toolchain_name="gcc-linaro-arm-linux-gnueabihf"
version="13.04/components/toolchain/binaries"
version_date="20130415"
directory="${toolchain_name}-${gcc_version}-${release}-${version_date}_linux"
filename="${directory}.tar.xz"
datestamp="${version_date}-${toolchain_name}"
binary="bin/arm-linux-gnueabihf-"
;;
gcc_linaro_gnueabihf_4_8)
#
#https://releases.linaro.org/14.04/components/toolchain/binaries/gcc-linaro-arm-linux-gnueabihf-4.8-2014.04_linux.tar.xz
#
gcc_version="4.8"
release="2014.04"
toolchain_name="gcc-linaro-arm-linux-gnueabihf"
version="14.04/components/toolchain/binaries"
directory="${toolchain_name}-${gcc_version}-${release}_linux"
filename="${directory}.tar.xz"
datestamp="${release}-${toolchain_name}"
binary="bin/arm-linux-gnueabihf-"
;;
gcc_linaro_gnueabihf_4_9_i686)
#
#https://releases.linaro.org/14.09/components/toolchain/binaries/gcc-linaro-arm-linux-gnueabihf-4.9-2014.09_linux.tar.xz
#
gcc_version="4.9"
release="2014.09"
toolchain_name="gcc-linaro-arm-linux-gnueabihf"
version="14.09/components/toolchain/binaries"
directory="${toolchain_name}-${gcc_version}-${release}_linux"
filename="${directory}.tar.xz"
datestamp="${release}-${toolchain_name}"
binary="bin/arm-linux-gnueabihf-"
;;
gcc_linaro_gnueabihf_4_9)
#
#https://releases.linaro.org/15.05/components/toolchain/binaries/arm-linux-gnueabihf/gcc-linaro-4.9-2015.05-x86_64_arm-linux-gnueabihf.tar.xz
#
gcc_version="4.9"
release="15.05"
target="arm-linux-gnueabihf"
version="${release}/components/toolchain/binaries/${target}"
filename="gcc-linaro-${gcc_version}-20${release}-x86_64_${target}.tar.xz"
directory="gcc-linaro-${gcc_version}-20${release}-x86_64_${target}"
datestamp="${gcc_version}-20${release}-${target}"
binary="bin/${target}-"
;;
gcc_linaro_gnueabihf_5)
#
#https://releases.linaro.org/components/toolchain/binaries/5.2-2015.11/arm-linux-gnueabihf/gcc-linaro-5.2-2015.11-x86_64_arm-linux-gnueabihf.tar.xz
#
gcc_version="5.2"
release="15.11"
target="arm-linux-gnueabihf"
version="components/toolchain/binaries/${gcc_version}-20${release}/${target}"
filename="gcc-linaro-${gcc_version}-20${release}-x86_64_${target}.tar.xz"
directory="gcc-linaro-${gcc_version}-20${release}-x86_64_${target}"
datestamp="${gcc_version}-20${release}-${target}"
binary="bin/${target}-"
;;
gcc_linaro_aarch64_gnu_5)
#
#https://releases.linaro.org/components/toolchain/binaries/5.2-2015.11/aarch64-linux-gnu/gcc-linaro-5.2-2015.11-x86_64_aarch64-linux-gnu.tar.xz
#
gcc_version="5.2"
release="15.11"
target="aarch64-linux-gnu"
version="components/toolchain/binaries/${gcc_version}-20${release}/${target}"
filename="gcc-linaro-${gcc_version}-20${release}-x86_64_${target}.tar.xz"
directory="gcc-linaro-${gcc_version}-20${release}-x86_64_${target}"
datestamp="${gcc_version}-20${release}-${target}"
binary="bin/${target}-"
;;
*)
echo "bug: maintainer forgot to set:"
echo "toolchain=\"xzy\" in version.sh"
exit 1
;;
esac
dl_gcc_generic
}
if [ "x${CC}" = "x" ] && [ "x${ARCH}" != "xarmv7l" ] ; then
if [ "x${ARCH}" = "xi686" ] ; then
echo ""
echo "Warning: 32bit is no longer supported by linaro..."
if [ "x${toolchain}" = "xgcc_linaro_eabi_4_9" ] ; then
echo ""
echo "Warning: 32bit is no longer supported by linaro, using old 14.09 gcc-4.9 release..."
echo ""
toolchain="gcc_linaro_eabi_4_9_i686"
fi
if [ "x${toolchain}" = "xgcc_linaro_gnueabihf_4_9" ] ; then
echo ""
echo "Warning: 32bit is no longer supported by linaro, using old 14.09 gcc-4.9 release..."
echo ""
toolchain="gcc_linaro_gnueabihf_4_9_i686"
fi
fi
gcc_toolchain
fi
GCC_TEST=$(LC_ALL=C "${CC}gcc" -v 2>&1 | grep "Target:" | grep arm || true)
if [ "x${GCC_TEST}" = "x" ] ; then
echo "-----------------------------"
echo "scripts/gcc: Error: The GCC ARM Cross Compiler you setup in system.sh (CC variable) is invalid."
echo "-----------------------------"
gcc_toolchain
fi
echo "-----------------------------"
echo "scripts/gcc: Using: $(LC_ALL=C "${CC}"gcc --version)"
echo "-----------------------------"
echo "CC=${CC}" > "${DIR}/.CC"
|
# permissions to update from WP Dashboard
sudo chown -R www-data:www-data wordpress/
# permissions to manage files locally
sudo chown -R $USER:$USER wordpress/ |
package cmd
import (
"fmt"
"github.com/BraspagDevelopers/bpdt/lib"
"github.com/spf13/cobra"
)
var generateCmd = &cobra.Command{
Use: "generate",
Aliases: []string{"gen"},
}
var generateConfigMapCmd = &cobra.Command{
Use: "configmap <name-on-manifest>",
Short: "Generate a ConfigMap manifest",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
name := args[0]
fromEnv, err := cmd.Flags().GetBool("env")
handleError(err)
prefix, err := cmd.Flags().GetString("prefix")
handleError(err)
stripPrefix, err := cmd.Flags().GetBool("strip-prefix")
handleError(err)
ignoreCase, err := cmd.Flags().GetBool("ignore-case")
handleError(err)
configMap, err := lib.GenerateConfigMap(lib.GenerateConfigMapParams{
Name: name,
FromEnvironment: fromEnv,
Prefix: prefix,
StripPrefix: stripPrefix,
IgnoreCase: ignoreCase,
})
handleError(err)
fmt.Println(configMap)
},
}
func init() {
rootCmd.AddCommand(generateCmd)
generateCmd.AddCommand(generateConfigMapCmd)
generateConfigMapCmd.Flags().Bool("env", false, "Load variables from environment")
generateConfigMapCmd.Flags().String("prefix", "", "Filter the variables by this prefix")
generateConfigMapCmd.Flags().Bool("strip-prefix", false, "Strip the variable name prefix")
generateConfigMapCmd.Flags().BoolP("ignore-case", "i", false, "Ignore case distinctions when filtering variable names")
}
|
#!/bin/bash
host="172.17.0.1"
ntpd -d -q -n -p pool.ntp.org &
export PGPASSWORD={{postgres.postgresPassword}}
until psql bitbucket -h "$host" -U "{{postgres.postgresUser}}" -c '\l'; do
# >&2 echo "Postgres is unavailable - sleeping"
sleep 10
done
>&2 echo "Postgres is up - executing command"
/opt/atlassian/bitbucket/bin/start-bitbucket.sh -fg
|
def rotateNodes(root):
# if node has a left child
if root.left is not None:
# we rotate clockwise
newRoot = root.left
root.left = newRoot.right
newRoot.right = root
root = newRoot
# if node has a right child
if root.right is not None:
# we rotate counter clockwise
newRoot = root.right
root.right = newRoot.left
newRoot.left = root
root = newRoot
return root |
export const Routes = {
SITE: {
HOME: '/',
POST: '/post/',
DRAFTS: '/post/drafts/',
CREATE: '/post/create/',
REGISTER: '/auth/register/',
LOGIN: '/auth/login/',
SETTINGS: '/settings/',
USERS: '/users/',
_500: '/500/',
},
API: {
POSTS: '/api/posts/',
USERS: '/api/users/',
PROFILE: '/api/users/profile/',
SESSION: '/api/auth/session/',
SEED: '/api/seed/',
},
STATIC: {
AVATARS: '/uploads/avatars/',
HEADERS: '/uploads/headers/',
},
} as const;
// ----------- redirects getServerSideProps
export const Redirects = {
NOT_FOUND: {
notFound: true,
},
_500: {
redirect: {
permanent: false,
destination: Routes.SITE._500,
},
},
LOGIN: {
redirect: {
permanent: false,
destination: Routes.SITE.LOGIN,
},
},
HOME: {
redirect: {
permanent: false,
destination: Routes.SITE.HOME,
},
},
} as const;
// ----------- themes array
export const themes = [
'theme-light',
'theme-dark',
'theme-blue',
'theme-red',
'theme-green',
'theme-black',
];
|
<reponame>VICEMedia/bitmovin-javascript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var urljoin = require("url-join");
var http_1 = require("../../../utils/http");
exports.sprites = function (configuration, encodingId, streamId, httpClient) {
var get = httpClient.get, post = httpClient.post, delete_ = httpClient.delete_;
var resourceDetails = function (spriteId) {
return {
details: function () {
var url = urljoin(configuration.apiBaseUrl, 'encoding/encodings', encodingId, 'streams', streamId, 'sprites', spriteId);
return get(configuration, url);
},
customData: function () {
var url = urljoin(configuration.apiBaseUrl, 'encoding/encodings', encodingId, 'streams', streamId, 'sprites', spriteId, 'customData');
return get(configuration, url);
},
delete: function () {
var url = urljoin(configuration.apiBaseUrl, 'encoding/encodings', encodingId, 'streams', streamId, 'sprites', spriteId);
return delete_(configuration, url);
}
};
};
var add = function (sprite) {
var url = urljoin(configuration.apiBaseUrl, 'encoding/encodings', encodingId, 'streams', streamId, 'sprites');
return post(configuration, url, sprite);
};
var list = http_1.utils.buildListCallFunction(httpClient, configuration, urljoin(configuration.apiBaseUrl, 'encoding/encodings', encodingId, 'streams', streamId, 'sprites'));
var resource = Object.assign(resourceDetails, { add: add, list: list });
return resource;
};
exports.default = (function (configuration, encodingId, streamId) {
return exports.sprites(configuration, encodingId, streamId, http_1.default);
});
//# sourceMappingURL=sprites.js.map |
package database.examples.realtime;
import com.google.gson.annotations.Expose;
import database.firebase.TrackableObject;
import org.joda.time.DateTime;
import java.util.ArrayList;
import java.util.List;
public class PostMetaData extends TrackableObject {
@Expose private String creator;
@Expose private String timestamp;
@Expose private List<String> stuff;
private PostMetaData() {}
/**
* Test class for showing off working of realtime database
*/
public PostMetaData(String creator) {
timestamp = createFormattedTime();
this.creator = creator;
stuff = new ArrayList<>();
stuff.add("test");
stuff.add("test2");
}
public void changeCreator(String creator) {
this.creator = creator;
}
private String createFormattedTime() {
DateTime dt = new DateTime();
return dt.getDayOfWeek() + ", " + dt.getMonthOfYear() + " " + dt.getDayOfMonth() + ", " + dt.getYear();
}
@Override
public String toString() {
return " - " + creator + " on " + timestamp;
}
@Override
public void initialize() {
}
}
|
package com.tuya.iot.suite.service.idaas.impl;
import com.tuya.iot.suite.ability.idaas.ability.PermissionCheckAbility;
import com.tuya.iot.suite.service.idaas.PermissionCheckService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* @author <EMAIL>
* @description
* @date 2021/05/31
*/
@Service("permissionCheckService")
@Slf4j
public class PermissionCheckServiceImpl implements PermissionCheckService {
@Autowired
PermissionCheckAbility permissionCheckAbility;
@Override
public Boolean checkPermissionForRole(String spaceId, String permissionCode, String roleCode) {
return permissionCheckAbility.checkPermissionForRole(spaceId, permissionCode, roleCode);
}
@Override
public Boolean checkRoleForUser(String spaceId, String roleCode, String uid) {
return permissionCheckAbility.checkRoleForUser(spaceId, roleCode, uid);
}
@Override
public Boolean checkPermissionForUser(String spaceId, String permissionCode, String userId) {
return permissionCheckAbility.checkPermissionForUser(spaceId, permissionCode, userId);
}
}
|
import { Dialog, DialogSet, DialogTurnStatus, WaterfallDialog } from "botbuilder-dialogs";
import { RootDialog } from "./rootDialog";
import {
ActivityTypes,
CardFactory,
Storage,
tokenExchangeOperationName,
TurnContext,
} from "botbuilder";
import { ResponseType } from "@microsoft/microsoft-graph-client";
import {
createMicrosoftGraphClient,
loadConfiguration,
OnBehalfOfUserCredential,
TeamsBotSsoPrompt,
} from "@microsoft/teamsfx";
import "isomorphic-fetch";
const MAIN_DIALOG = "MainDialog";
const MAIN_WATERFALL_DIALOG = "MainWaterfallDialog";
const TEAMS_SSO_PROMPT_ID = "TeamsFxSsoPrompt";
export class MainDialog extends RootDialog {
private requiredScopes: string[] = ["User.Read"]; // hard code the scopes for demo purpose only
private dedupStorage: Storage;
private dedupStorageKeys: string[];
// Developer controlls the lifecycle of credential provider, as well as the cache in it.
// In this sample the provider is shared in all conversations
constructor(dedupStorage: Storage) {
super(MAIN_DIALOG);
loadConfiguration();
this.addDialog(
new TeamsBotSsoPrompt(TEAMS_SSO_PROMPT_ID, {
scopes: this.requiredScopes,
endOnInvalidMessage: true,
})
);
this.addDialog(
new WaterfallDialog(MAIN_WATERFALL_DIALOG, [
this.ssoStep.bind(this),
this.dedupStep.bind(this),
this.showUserInfo.bind(this),
])
);
this.initialDialogId = MAIN_WATERFALL_DIALOG;
this.dedupStorage = dedupStorage;
this.dedupStorageKeys = [];
}
/**
* The run method handles the incoming activity (in the form of a DialogContext) and passes it through the dialog system.
* If no dialog is active, it will start the default dialog.
* @param {*} dialogContext
*/
async run(context: TurnContext, accessor: any) {
const dialogSet = new DialogSet(accessor);
dialogSet.add(this);
const dialogContext = await dialogSet.createContext(context);
const results = await dialogContext.continueDialog();
if (results.status === DialogTurnStatus.empty) {
await dialogContext.beginDialog(this.id);
}
}
async ssoStep(stepContext: any) {
return await stepContext.beginDialog(TEAMS_SSO_PROMPT_ID);
}
async dedupStep(stepContext: any) {
const tokenResponse = stepContext.result;
// Only dedup after ssoStep to make sure that all Teams client would receive the login request
if (tokenResponse && (await this.shouldDedup(stepContext.context))) {
return Dialog.EndOfTurn;
}
return await stepContext.next(tokenResponse);
}
async showUserInfo(stepContext: any) {
const tokenResponse = stepContext.result;
if (tokenResponse) {
await stepContext.context.sendActivity("Call Microsoft Graph on behalf of user...");
// Call Microsoft Graph on behalf of user
const oboCredential = new OnBehalfOfUserCredential(tokenResponse.ssoToken);
const graphClient = createMicrosoftGraphClient(oboCredential, ["User.Read"]);
const me = await graphClient.api("/me").get();
if (me) {
await stepContext.context.sendActivity(
`You're logged in as ${me.displayName} (${me.userPrincipalName})${
me.jobTitle ? `; your job title is: ${me.jobTitle}` : ""
}.`
);
// show user picture
let photoBinary: ArrayBuffer;
try {
photoBinary = await graphClient
.api("/me/photo/$value")
.responseType(ResponseType.ARRAYBUFFER)
.get();
} catch {
// Just continue when failing to get the photo.
return await stepContext.endDialog();
}
const buffer = Buffer.from(photoBinary);
const imageUri = "data:image/png;base64," + buffer.toString("base64");
const card = CardFactory.thumbnailCard("User Picture", CardFactory.images([imageUri]));
await stepContext.context.sendActivity({ attachments: [card] });
} else {
await stepContext.context.sendActivity("Getting profile from Microsoft Graph failed! ");
}
return await stepContext.endDialog();
}
await stepContext.context.sendActivity("Token exchange was not successful please try again.");
return await stepContext.endDialog();
}
async onEndDialog(context: TurnContext) {
const conversationId = context.activity.conversation.id;
const currentDedupKeys = this.dedupStorageKeys.filter((key) => key.indexOf(conversationId) > 0);
await this.dedupStorage.delete(currentDedupKeys);
this.dedupStorageKeys = this.dedupStorageKeys.filter((key) => key.indexOf(conversationId) < 0);
}
// If a user is signed into multiple Teams clients, the Bot might receive a "signin/tokenExchange" from each client.
// Each token exchange request for a specific user login will have an identical activity.value.Id.
// Only one of these token exchange requests should be processed by the bot. For a distributed bot in production,
// this requires a distributed storage to ensure only one token exchange is processed.
async shouldDedup(context: TurnContext): Promise<boolean> {
const storeItem = {
eTag: context.activity.value.id,
};
const key = this.getStorageKey(context);
const storeItems = { [key]: storeItem };
try {
await this.dedupStorage.write(storeItems);
this.dedupStorageKeys.push(key);
} catch (err) {
if (err instanceof Error && err.message.indexOf("eTag conflict")) {
return true;
}
throw err;
}
return false;
}
getStorageKey(context: TurnContext): string {
if (!context || !context.activity || !context.activity.conversation) {
throw new Error("Invalid context, can not get storage key!");
}
const activity = context.activity;
const channelId = activity.channelId;
const conversationId = activity.conversation.id;
if (activity.type !== ActivityTypes.Invoke || activity.name !== tokenExchangeOperationName) {
throw new Error("TokenExchangeState can only be used with Invokes of signin/tokenExchange.");
}
const value = activity.value;
if (!value || !value.id) {
throw new Error("Invalid signin/tokenExchange. Missing activity.value.id.");
}
return `${channelId}/${conversationId}/${value.id}`;
}
}
|
<reponame>YaroShkvorets/ant-design-vue<filename>components/skeleton/Paragraph.tsx
import type { ExtractPropTypes, PropType } from 'vue';
import { defineComponent } from 'vue';
type widthUnit = number | string;
export const skeletonParagraphProps = {
prefixCls: String,
width: { type: [Number, String, Array] as PropType<widthUnit[] | widthUnit> },
rows: Number,
};
export type SkeletonParagraphProps = Partial<ExtractPropTypes<typeof skeletonParagraphProps>>;
const SkeletonParagraph = defineComponent({
name: 'SkeletonParagraph',
props: skeletonParagraphProps,
setup(props) {
const getWidth = (index: number) => {
const { width, rows = 2 } = props;
if (Array.isArray(width)) {
return width[index];
}
// last paragraph
if (rows - 1 === index) {
return width;
}
return undefined;
};
return () => {
const { prefixCls, rows } = props;
const rowList = [...Array(rows)].map((_, index) => {
const width = getWidth(index);
return (
<li key={index} style={{ width: typeof width === 'number' ? `${width}px` : width }} />
);
});
return <ul class={prefixCls}>{rowList}</ul>;
};
},
});
export default SkeletonParagraph;
|
//
// Copyright (c) 2012 TU Dresden - Database Technology Group
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
// Author: <NAME> <<EMAIL>>
//
// Parses JSON strings with some small modifications
// (C++-Style comments; multiline support)
#ifndef BENCHMARK_CORE_IMPORTER_JSON_IMPORTER_H_
#define BENCHMARK_CORE_IMPORTER_JSON_IMPORTER_H_
#include <string>
#include <map>
#include <vector>
#include <stdexcept>
class JSONTokenizer;
// A custom Exception class that represents errors related to processing
// the json document (e.g. parsing errors)
class JSONException: public std::runtime_error {
public:
JSONException(const std::string& msg = "") : std::runtime_error(msg) {}
};
// An enumeration that contains all different types of JSON values
typedef enum {
kJSONNull,
kJSONBoolean,
kJSONNumber,
kJSONString,
kJSONArray,
kJSONObject
} JSONType;
// An abstract base class representing a JSON Value
class JSONValue {
public:
// Constructor
JSONValue(){};
// Returns the type of this JSON Value
virtual JSONType type() = 0;
};
// A JSON null value
class JSONNull : public JSONValue {
public:
// Constructs a new JSON null value
JSONNull(){};
// Constructs a new JSON null value from the given JSON tokenizer
JSONNull(JSONTokenizer &tokenizer);
// Constructs a new JSON null value from the given input string
JSONNull(const std::string &source);
// Returns the type of this JSON null value
JSONType type() {return kJSONNull;}
};
// A JSON boolean value
class JSONBoolean : public JSONValue {
public:
// Constructs a new JSON boolean value from the given JSON tokenizer
JSONBoolean(JSONTokenizer &tokenizer);
// Constructs a new JSON boolean value from the given input string
JSONBoolean(const std::string &source);
// Sets the boolean value
void value(bool value) {value_=value;}
// Gets the boolean value
bool value() const {return value_;}
// Returns the type of this JSON boolean
JSONType type(){return kJSONBoolean;}
private:
// The value of this boolean JSON value
bool value_;
};
// A JSON number (double)
class JSONNumber : public JSONValue {
public:
// Constructs a new JSON number from the given JSON tokenizer
JSONNumber(JSONTokenizer &tokenizer);
// Sets the JSON number value
void value(double value){value_=value;}
// Returns the JSON number value
double value() const {return value_;}
// Returns the type of this JSON number
JSONType type(){return kJSONNumber;}
private:
// Whether the given number contains any decimal places
bool is_double;
// The value of this number
double value_;
};
// JSON String
class JSONString : public JSONValue{
public:
// Constructs a new JSON string with the given value
JSONString(std::string value):value_(value){}
// Constructs a new JSON string from the given JSON tokenizer
JSONString(JSONTokenizer &tokenizer);
// Sets the value of this string
void value(const std::string &value){value_=value;}
// Gets the value of this string
std::string value() const {return value_;}
// Returns the type of this JSON string
JSONType type(){return kJSONString;}
private:
// The value of this string
std::string value_;
};
// JSON Object
//
// Note:
// JSON objects are interpreted as a map (i.e. duplicates are not supported)
class JSONObject : public JSONValue {
public:
// Constructs an empty JSON Object
JSONObject(){};
// Constructs an JSON Object from a JSON source string
JSONObject(std::string source);
// Constructs an JSON Object from a JSON tokenizer
JSONObject(JSONTokenizer &tokenizer);
// Adds a new key/value pair to the object
void Put(std::string key, JSONValue* value);
void Put(std::string key, std::string value);
// Returns the JSON value stored under the given key
JSONValue* Get(const std::string key) const{
std::map<std::string, JSONValue*>::const_iterator it = values.find(key);
if(it == values.end())
return NULL;
return it->second;
};
// Returns whether the JSON object contains a key/value pair
// with the given key
bool Contains(const std::string key){
return !(values.find(key) == values.end());
}
// Returns the type of this JSON object
JSONType type() {return kJSONObject;}
// TODO: Add an iterator to allow iteration over JSON objects
private:
// The key/value pairs stored inside this JSONobject
std::map<std::string,JSONValue*> values;
// Initializes the JSONobject form the given Tokenizer
void Init(JSONTokenizer &tokenizer);
};
// JSON Array
class JSONArray : public JSONValue {
public:
// Constructs an empty JSON Array
JSONArray(){};
// Constructs an JSON Array from a given JSON tokenizer
JSONArray(JSONTokenizer &tokenizer);
// Returns the number of elements inside the array
size_t size(){return values.size();}
// Returns the JSONValue stored at the given index
JSONValue *Get(unsigned int index){return values[index];}
// Returns the type of this array
JSONType type() {return kJSONArray;}
// TODO: Add an iterator to allow iteration over JSON arrays
private:
// The array's content
std::vector<JSONValue*> values;
};
// A tokenizer that takes a input stream and extracts characters and tokens
// from it.
//
// JSONTokenizer objects are used to share a single input stream between
// multiple JSONValue objects.
class JSONTokenizer {
public:
// Constructs a new JSON tokenizer
JSONTokenizer(std::istream &stream);
// Gets the next character
char next();
// Gets the next character while skipping characters and comments
char next_clean();
// Backs up one character
void back();
// Extracts characters and discards them until the delimiter is found
void SkipTo(char delimiter);
// Returns the next character without extracting it
char peek() const {return stream_.peek();}
// Extracts the next JSONValue
JSONValue *NextValue();
// Returns true if the End Of File has been reached
bool eof() const {return stream_.eof();}
private:
// The input stream used to extract the tokens
std::istream &stream_;
};
#endif // BENCHMARK_CORE_IMPORTER_JSON_IMPORTER_H_
|
#! /bin/bash
script_dir=`dirname $BASH_SOURCE`
control_file="$script_dir/control"
version=`cat $script_dir/../VERSION`
tmpdir=`mktemp -d`
mkdir "$tmpdir/DEBIAN"
cp "$control_file" "$tmpdir/DEBIAN/control"
sed -i s/'{VERSION}'/"$version"/ "$tmpdir/DEBIAN/control"
(cd "$script_dir/../.."; python3 setup.py install --root="$tmpdir")
fakeroot dpkg-deb --build "$tmpdir" "fbmessenger-$version.deb"
|
#!/bin/bash
mkdir build
cd build
cmake ..
make
cp reaper ${PREFIX}/bin/ |
<filename>quizzes/q5/pascal_triangle_test.go
package q5_test
import (
"testing"
"puzzle/quizzes/q5"
)
func Test_CalculateNextTriangle(t *testing.T) {
tri := q5.PascalTriangle()
n1 := []int{1, 1}
n2 := tri.NextLine(n1)
n3 := tri.NextLine(n2)
n4 := tri.NextLine(n3)
n5 := tri.NextLine(n4)
t.Logf("value: %v", n2)
t.Logf("value: %v", n3)
t.Logf("value: %v", n4)
t.Logf("value: %v", n5)
}
|
object StreamGraph {
/**
* https://gist.github.com/dotta/78e8a4a72c5d2de07116
* FlowGraph.partial() returns a Graph
* FlowGraph.closed() returns a RunnableGraph
* Graph does not require all of its ports to be connected
*/
val maxOf3 = FlowGraph.partial() { implicit b =>
import FlowGraph.Implicits._
val zip1 = b.add(ZipWith[Int,Int,Int]((u, v) => if (u > v) u else v))
val zip2 = b.add(ZipWith[Int,Int,Int]((u, v) => if (u > v) u else v))
zip1.out ~> zip2.in0
// this partial graph will have 3 inputs & 1 output
UniformFanInShape(zip2.out, zip1.in0, zip1.in1, zip2.in1)
}
/**
* to execute, now build a closed graph (RunnableGraph) &
* import the partial graph into this closed graph
* using b.add()
*/
val resSink = Sink.head[Int]
val g = FlowGraph.closed(resSink) { implicit b =>
sink =>
import FlowGraph.Implicits._
val sg = b.add(maxOf3)
Source.single(1) ~> sg.in(0)
Source.single(2) ~> sg.in(1)
Source.single(3) ~> sg.in(2)
sg.out ~> sink.inlet
}
// materialize the graph
val mx:Future[Int] = g.run()
val res = Await.result(mx, 300.millis)
println(res)
// Await.result(max, 300.millis) should equal(3)
}
|
#!/bin/sh
if [ -z $ES_VERSION ]; then
echo "No ES_VERSION specified";
exit 1;
fi;
killall java 2>/dev/null
which java
java -version
echo "Downloading Elasticsearch v${ES_VERSION}..."
ES_URL="https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-${ES_VERSION}.tar.gz"
curl -L -o elasticsearch-latest.tar.gz $ES_URL
tar -xvf "elasticsearch-latest.tar.gz"
echo "Adding repo to config..."
find . -name "elasticsearch.yml" | while read TXT ; do echo 'repositories.url.allowed_urls: ["http://*"]' >> $TXT ; done
find . -name "elasticsearch.yml" | while read TXT ; do echo 'path.repo: ["/tmp"]' >> $TXT ; done
find . -name "elasticsearch.yml" | while read TXT ; do echo 'node.max_local_storage_nodes: 1' >> $TXT ; done
find . -name "elasticsearch.yml" | while read TXT ; do echo 'cluster.routing.allocation.disk.watermark.low: 0.1%' >> $TXT ; done
find . -name "elasticsearch.yml" | while read TXT ; do echo 'cluster.routing.allocation.disk.watermark.high: 0.1%' >> $TXT ; done
find . -name "elasticsearch.yml" | while read TXT ; do echo 'node.attr.testattr: test' >> $TXT ; done
find . -name "elasticsearch.yml" | while read TXT ; do echo 'script.max_compilations_rate: 2048/1m' >> $TXT ; done
echo "Starting Elasticsearch v${ES_VERSION}"
./elasticsearch-*/bin/elasticsearch -d
sleep 3
|
<filename>shared/components/header/header.test.js
/*global describe it expect console*/
import TestUtils from 'react-addons-test-utils';
import React from 'react';
import Header from './header.component';
describe('Header component', ()=>{
it('renders without problems', (done)=>{
header = TestUtils.renderIntoDocument(React.createElement(Header) );
expect(header.state).toEqual({});
done();
});
});
|
<reponame>love-adela/algorithm
import sys
read = lambda: sys.stdin.readline()
S, P = read(), read()
|
def insertion_sort(arr):
# Iterate through the array
for j in range(1, len(arr)):
key = arr[j]
# Move elements of arr[0..i-1], that
# are greater than key, to one position
# ahead of their current position
i = j-1
while i >= 0 and key < arr[i] :
arr[i + 1] = arr[i]
i -= 1
arr[i + 1] = key
# Driver code to test above
arr = [1, 5, 9, 3, 8, 4, 6]
insertion_sort(arr)
print("Sorted array:")
for i in range(len(arr)):
print("% d" % arr[i]) |
"""
https://adventofcode.com/2020/day/5
"""
from collections import deque
# test_input = 'FBFBBFFRLR'
with open(r'../input_files/day05_input_mb.txt', 'r') as fh:
boarding_passes = fh.read().splitlines()
def lookup_row(input):
candidate_rows = 128
row_deque = deque(range(128))
for i, char in enumerate(input):
candidate_rows = candidate_rows // 2
if char == 'F':
for i in range(candidate_rows):
row_deque.pop()
elif char == 'B':
for i in range(candidate_rows):
row_deque.popleft()
return row_deque[0]
def lookup_col(input):
candidate_cols = 8
col_deque = deque(range(8))
for char in input:
candidate_cols = candidate_cols // 2
if char == 'R':
for i in range(candidate_cols):
col_deque.popleft()
elif char == 'L':
for i in range(candidate_cols):
col_deque.pop()
return col_deque[0]
# Part One
seat_ids = [lookup_row(i[0:7]) * 8 + lookup_col(i[7:]) for i in boarding_passes]
# Find max SeatID
print(f'Part One - {max(seat_ids) = }.')
# Part Two
# All numbers between 54 and 930
reference = [x for x in range(54, 931)]
# Find Missing ID
missing_id = 0
for n in reference:
if n not in seat_ids:
missing_id = n
print(f'Part Two - {missing_id = }.')
|
import { Vue } from 'vue-property-decorator';
import { TableType } from '../../classes';
export interface ITableTypeConsumer extends Vue {
readonly tableType: TableType<any>;
}
export declare const tableTypeConsumerFactory: (tableType: TableType<any, any[], any[], any[], any[]>) => import("vue").VueConstructor<Vue> & (new () => ITableTypeConsumer);
|
pip3 install figlet
figlet Installing H.I.V.E -c -k
sudo pip3 install virtualenv
virtualenv TheHiveProjectDEV
source TheHiveProjectDEV/bin/activate
pip3 uninstall SpeechRecognition
pip3 uninstall pyttsx3
pip3 uninstall pywhatkit
pip3 uninstall datetime
pip3 uninstall wikipedia
pip3 uninstall pyjokes
pip3 uninstall requests
pip3 install SpeechRecognition
pip3 install pyttsx3
pip3 install pywhatkit
pip3 install datetime
pip3 install wikipedia
pip3 install pyjokes
pip3 install requests
pip install pyjokes
pip install pyowm
chmod +x .hivec.sh
source .hivec.sh
echo 'Thank You for downloading H.I.V.E!'
|
<filename>lib/index.js<gh_stars>0
/**
* Modules
*/
var path = require('path')
var through = require('through2')
var isAsset = require('@themang/is-asset')
var hash = require('hasha')
var mkdirp = require('mkdirp')
var fs = require('fs')
/**
* Vars
*/
/**
* Expose assetify
*/
module.exports = assetify
/**
* assetify
*/
function assetify (browserify, opt) {
opt = opt || {}
opt.output = opt.output || opt.o || path.join(process.cwd(), 'assets')
opt.base = opt.base || opt.b || '/assets/'
browserify.on('bundle', function (bundle) {
mkdirp.sync(opt.output)
})
browserify.transform(function (file) {
return transform(file, opt)
}, { global: true })
}
function transform (file, opt) {
opt = opt || {}
if (!isAsset(file)) return through()
var buffers = []
return through(write, end)
function write (buf, enc, cb) {
buffers.push(buf)
cb()
}
function end (cb) {
this.push(urify(Buffer.concat(buffers)))
cb()
}
function urify (content) {
var basename = hash(content, {algorithm: 'sha1'}) + path.extname(file)
fs.writeFileSync(path.join(opt.output, basename), content)
return "module.exports = '" + opt.base + basename + "'"
}
}
|
def rolling_average(arr):
last_three = arr[-3:]
return sum(last_three) / len(last_three)
mylist = [10, 20, 30, 40, 50]
rolling_avg = rolling_average(mylist)
print("Rolling average:", rolling_avg)
# Output:
# Rolling average: 33.333333333333336 |
#!/bin/bash
pip install setuptools_scm
# The environment variable PLOTTER_INSTALLER_VERSION needs to be defined.
# If the env variable NOTARIZE and the username and password variables are
# set, this will attempt to Notarize the signed DMG.
PLOTTER_INSTALLER_VERSION=$(python installer-version.py)
if [ ! "$PLOTTER_INSTALLER_VERSION" ]; then
echo "WARNING: No environment variable PLOTTER_INSTALLER_VERSION set. Using 0.0.0."
PLOTTER_INSTALLER_VERSION="0.0.0"
fi
echo "Plotter Installer Version is: $PLOTTER_INSTALLER_VERSION"
echo "Installing npm and electron packagers"
npm install electron-installer-dmg -g
npm install electron-packager -g
npm install electron/electron-osx-sign -g
npm install notarize-cli -g
echo "Create dist/"
sudo rm -rf dist
mkdir dist
echo "Create executables with pyinstaller"
pip install pyinstaller==4.2
SPEC_FILE=$(python -c 'import plotter; print(plotter.PYINSTALLER_SPEC_PATH)')
pyinstaller --log-level=INFO "$SPEC_FILE"
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "pyinstaller failed!"
exit $LAST_EXIT_CODE
fi
cp -r dist/daemon ../plotter-blockchain-gui
cd .. || exit
cd plotter-blockchain-gui || exit
echo "npm build"
npm install
npm audit fix
npm run build
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "npm run build failed!"
exit $LAST_EXIT_CODE
fi
electron-packager . Plotter --asar.unpack="**/daemon/**" --platform=darwin \
--icon=src/assets/img/Plotter.icns --overwrite --app-bundle-id=net.plotter.blockchain \
--appVersion=$PLOTTER_INSTALLER_VERSION
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "electron-packager failed!"
exit $LAST_EXIT_CODE
fi
if [ "$NOTARIZE" ]; then
electron-osx-sign Plotter-darwin-x64/Plotter.app --platform=darwin \
--hardened-runtime=true --provisioning-profile=plotterblockchain.provisionprofile \
--entitlements=entitlements.mac.plist --entitlements-inherit=entitlements.mac.plist \
--no-gatekeeper-assess
fi
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "electron-osx-sign failed!"
exit $LAST_EXIT_CODE
fi
mv Plotter-darwin-x64 ../build_scripts/dist/
cd ../build_scripts || exit
DMG_NAME="Plotter-$PLOTTER_INSTALLER_VERSION.dmg"
echo "Create $DMG_NAME"
mkdir final_installer
electron-installer-dmg dist/Plotter-darwin-x64/Plotter.app Plotter-$PLOTTER_INSTALLER_VERSION \
--overwrite --out final_installer
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "electron-installer-dmg failed!"
exit $LAST_EXIT_CODE
fi
if [ "$NOTARIZE" ]; then
echo "Notarize $DMG_NAME on ci"
cd final_installer || exit
notarize-cli --file=$DMG_NAME --bundle-id net.plotter.blockchain \
--username "$APPLE_NOTARIZE_USERNAME" --password "$APPLE_NOTARIZE_PASSWORD"
echo "Notarization step complete"
else
echo "Not on ci or no secrets so skipping Notarize"
fi
# Notes on how to manually notarize
#
# Ask for username and password. password should be an app specific password.
# Generate app specific password https://support.apple.com/en-us/HT204397
# xcrun altool --notarize-app -f Plotter-0.1.X.dmg --primary-bundle-id net.plotter.blockchain -u username -p password
# xcrun altool --notarize-app; -should return REQUEST-ID, use it in next command
#
# Wait until following command return a success message".
# watch -n 20 'xcrun altool --notarization-info {REQUEST-ID} -u username -p password'.
# It can take a while, run it every few minutes.
#
# Once that is successful, execute the following command":
# xcrun stapler staple Plotter-0.1.X.dmg
#
# Validate DMG:
# xcrun stapler validate Plotter-0.1.X.dmg
|
package repl
import (
"fmt"
"io"
"log"
"strings"
"github.com/chzyer/readline"
"github.com/sjsafranek/go-micro-sessions/lib/api"
)
func usage(w io.Writer) {
io.WriteString(w, "commands:\n")
io.WriteString(w, completer.Tree(" "))
}
var completer = readline.NewPrefixCompleter(
readline.PcItem("RUN"),
readline.PcItem("BYE"),
readline.PcItem("EXIT"),
readline.PcItem("HELP"),
)
func filterInput(r rune) (rune, bool) {
switch r {
// block CtrlZ feature
case readline.CharCtrlZ:
return r, false
}
return r, true
}
type Client struct {
api *api.Api
}
func (self *Client) Run() {
l, err := readline.NewEx(&readline.Config{
Prompt: "\033[31m[find5]#\033[0m ",
HistoryFile: "history.find5",
AutoComplete: completer,
InterruptPrompt: "^C",
EOFPrompt: "exit",
HistorySearchFold: true,
FuncFilterInputRune: filterInput,
})
if err != nil {
panic(err)
}
defer l.Close()
log.SetOutput(l.Stderr())
for {
line, err := l.Readline()
if err == readline.ErrInterrupt {
if len(line) == 0 {
break
} else {
continue
}
} else if err == io.EOF {
break
}
line = strings.TrimSpace(line)
parts := strings.Split(line, " ")
command := strings.ToLower(parts[0])
// testing
setPasswordCfg := l.GenPasswordConfig()
setPasswordCfg.SetListener(func(line []rune, pos int, key rune) (newLine []rune, newPos int, ok bool) {
l.SetPrompt(fmt.Sprintf("Enter password(%v): ", len(line)))
l.Refresh()
return nil, 0, false
})
//.end
switch {
case strings.HasPrefix(line, "{") && strings.HasSuffix(line, "}"):
request := api.Request{}
request.Unmarshal(line)
response, _ := self.api.Do(&request)
results, _ := response.Marshal()
fmt.Println(results)
case "bye" == command:
goto exit
case "exit" == command:
goto exit
case "quit" == command:
goto exit
case line == "":
default:
// log.Println("you said:", strconv.Quote(line))
}
}
exit:
}
func New(findapi *api.Api) *Client {
return &Client{api: findapi}
}
|
<reponame>ChristopherChudzicki/mathbox<filename>build/esm/shaders/glsl/screen.pass.uv.js
export default /* glsl */ `vec2 screenPassUV(vec4 uvwo, vec4 stpq) {
return uvwo.xy;
}
`;
|
package org.cloudfoundry.samples.music.repositories.pcc;
import org.cloudfoundry.samples.music.domain.Album;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.Profile;
import org.springframework.data.gemfire.repository.GemfireRepository;
import org.springframework.stereotype.Repository;
public interface PccAlbumRepository extends GemfireRepository<Album, String> {
}
|
import React, { useContext, useEffect } from "react";
import Layout from "../components/Layout";
import Hero from "../components/Hero";
import PageContext from "../context/PageContext";
function Blog(props) {
const { location } = props;
const { isOpen, toggleMenu } = useContext(PageContext);
useEffect(() => {
isOpen && toggleMenu();
}, []);
return (
<Layout
pathname={location.pathname}
seoTitle="My space in WEB"
pageHeading=""
>
<Hero />
</Layout>
);
}
export default Blog;
|
<filename>src/pages/index.js
import React from "react";
import NavBar from "../components/NavBar";
import { graphql } from 'gatsby';
import Jumbotron from '../components/Jumbotron';
import IndexCardBox from '../components/IndexCard';
import JumbotronItem from '../components/Jumbotron/JumbotronItem';
import { Card, Heading, Box } from 'rebass';
import IndexCard from "../components/IndexCard/IndexCard";
const IndexPage = ({ data }) => (
<NavBar>
<Jumbotron>
{data.allContentfulStoryCard.edges.map(edge => (
<JumbotronItem key={edge.node.id}>
<Card
color="white"
backgroundImage={"url(" + edge.node.cardImage.fluid.src + ")"}
backgroundSize='cover'
bg="darkgray"
>
<Box
width={[1/3, 1/4, 1/5]}
bg="hsl(208, 13%, 15%)"
backgroundSize='cover'
css={{
zIndex: 3,
opacity: .95,
height: 800
}}
>
<Heading textAlign="center" fontSize={[5, 6]} py='auto' css={{ opacity: 1 }}>
{edge.node.essentialQuestion}
</Heading>
</Box>
</Card>
</JumbotronItem>
))}
</Jumbotron>
<IndexCardBox>
<IndexCard>
</IndexCard>
</IndexCardBox>
</NavBar>
);
export default IndexPage;
export const query =graphql`
query pageQuery {
allContentfulStoryCard(
# put everything in order
sort: {fields: [updated], order: DESC }
# filter: { pieces: { eq: null } }
filter: {category: {eq: "Small Story"}}
# limit the number of entries
# limit: 1,
# # skips the number of entries
# skip: 1,
) {
edges {
node {
id
essentialQuestion
category
updated (formatString: "dddd DD MMMM YYYY")
pieces
cardImage {
fluid {
src
}
}
}
}
}
}
`; |
<filename>packages/form/src/mixins/formRow.js
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import getComponentName from '@ichef/gypcrete/lib/utils/getComponentName';
import { statusPropTypes } from '@ichef/gypcrete/lib/mixins/withStatus';
export const rowPropTypes = PropTypes.shape({
desc: PropTypes.node,
...statusPropTypes,
});
const formRow = ({
withRef = false,
} = {}) => (WrappedComponent) => {
class FormRow extends PureComponent {
static displayName = `formRow(${getComponentName(WrappedComponent)})`;
static propTypes = {
disabled: PropTypes.bool,
readOnly: PropTypes.bool,
desc: PropTypes.node,
...statusPropTypes,
// status
// statusOptions
// errorMsg
};
static defaultProps = {
disabled: false,
readOnly: false,
desc: undefined,
};
getWrappedComponent() {
return this.componentRef;
}
handleRef = (ref) => {
this.componentRef = ref;
}
render() {
const {
disabled,
readOnly,
desc,
status,
statusOptions,
errorMsg,
rowProps,
...otherProps
} = this.props;
const ineditable = disabled || readOnly;
return (
<WrappedComponent
ref={withRef ? this.handleRef : undefined}
ineditable={ineditable}
disabled={disabled}
readOnly={readOnly}
rowProps={{
desc,
status,
statusOptions,
errorMsg,
...rowProps,
}}
{...otherProps}
/>
);
}
}
return FormRow;
};
export default formRow;
|
const axios = require('axios');
axios.get('https://api.example.com/foo')
.then((response) => {
console.log(response.data);
})
.catch((error) => {
console.log(error);
}); |
import Sgbd from './../models/sgbd';
async function create(req, res) {
let sgbd = new Sgbd();
sgbd.idNotary = req.decoded['foo'];
sgbd.description = req.body.description;
sgbd.baseDirectory = req.body.baseDirectory;
sgbd.dataDirectory = req.body.dataDirectory;
sgbd.port = req.body.port;
sgbd.dbName = req.body.dbName;
sgbd.size = req.body.size;
try {
let result = await sgbd.create();
return res.status(200).json({
message: result
})
} catch (err) {
return res.status(500).json({
message: err.message || err
})
}
}
async function update(req, res) {
let sgbd = new Sgbd();
sgbd.id = req.params.id;
sgbd.description = req.body.description;
sgbd.baseDirectory = req.body.baseDirectory;
sgbd.dataDirectory = req.body.dataDirectory;
sgbd.port = req.body.port;
sgbd.dbName = req.body.dbName;
sgbd.size = req.body.size;
try {
let result = await sgbd.update();
return res.status(200).json({
message: result
})
} catch (err) {
return res.status(500).json({
message: err.message || err
})
}
}
async function findByNotary(req, res) {
let sgbd = new Sgbd();
sgbd.idNotary = req.params['id'];
try {
let result = await sgbd.findByNotary();
return res.status(200).json({
message: result
})
} catch (err) {
return res.status(500).json({
message: err.message || err
})
}
}
module.exports = {
create: create,
update: update,
findByNotary: findByNotary
}
|
#include <stdio.h>
#include <string.h>
#include <ctype.h>
int main()
{
char string[50];
int i;
strcpy(string, "Hello");
for(i=0;i<strlen(string);i++)
{
string[i] = tolower(string[i]);
}
printf("%s",string);
return 0;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.