text stringlengths 1 1.05M |
|---|
<gh_stars>0
import React from 'react'
import Section from '../Section'
import Link from '../Link'
import styles from './styles.css'
import { platforms } from '../config'
export default function Community() {
const renderPlatforms = () => platforms.map((platform, i) => {
const { title, icon, url } = platform
return (
<li className={styles.platform} key={i}>
<Link className={styles.link} href={url} title={title}>
<span className={`fa fa-lg ${icon}`} />
</Link>
</li>
)
})
return (
<Section contentClassName={styles.container} title="Join the Community">
{renderPlatforms()}
</Section>
)
}
|
use std::collections::HashMap;
fn count_and_sort_words(words: Vec<&str>) -> Vec<(&str, usize)> {
let mut word_counts: HashMap<&str, usize> = HashMap::new();
for &word in &words {
*word_counts.entry(word).or_insert(0) += 1;
}
let mut sortable_words: Vec<(&str, usize)> = word_counts.iter().map(|(w, c)| (*w, *c)).collect();
sortable_words.sort_by_key(|a| a.0);
sortable_words
}
fn main() {
let words = vec!["apple", "banana", "apple", "cherry", "banana"];
let sorted_counts = count_and_sort_words(words);
println!("{:?}", sorted_counts); // Output: [("apple", 2), ("banana", 2), ("cherry", 1)]
} |
export * from "./TestComponent"
|
<reponame>Evangelize/classes<filename>src/models/modules/MemberDevices.js<gh_stars>1-10
const moduleUtils = require('../../lib/moduleUtils');
module.exports = function (sequelize, DataTypes) {
const MemberDevices = sequelize.define(
'memberDevices',
{
id: {
type: DataTypes.BLOB,
primaryKey: true,
get() {
return moduleUtils.binToHex(this.getDataValue('id'));
},
set(val) {
this.setDataValue('id', new Buffer(val, 'hex'));
},
},
entityId: {
type: DataTypes.BLOB,
get: function () {
return moduleUtils.binToHex(this.getDataValue('entityId'));
},
set: function (val) {
if (val) {
this.setDataValue('entityId', new Buffer(val, 'hex'));
} else {
this.setDataValue('entityId', null);
}
},
},
personId: {
type: DataTypes.BLOB,
get: function () {
return moduleUtils.binToHex(this.getDataValue('personId'));
},
set: function (val) {
if (val) {
this.setDataValue('personId', new Buffer(val, 'hex'));
} else {
this.setDataValue('personId', null);
}
},
},
deviceId: {
type: DataTypes.STRING(255),
},
title: {
type: DataTypes.STRING(255),
},
createdAt: {
type: DataTypes.DATE,
get() {
const field = 'createdAt';
let ret = null;
if (this.getDataValue(field)) {
ret = this.getDataValue(field).getTime();
}
return ret;
},
},
updatedAt: {
type: DataTypes.DATE,
get() {
const field = 'updatedAt';
let ret = null;
if (this.getDataValue(field)) {
ret = this.getDataValue(field).getTime();
}
return ret;
},
},
deletedAt: {
type: DataTypes.DATE,
get() {
const field = 'deletedAt';
let ret = null;
if (this.getDataValue(field)) {
ret = this.getDataValue(field).getTime();
}
return ret;
},
},
revision: {
type: DataTypes.INTEGER,
defaultValue: 0,
},
},
{
paranoid: true,
}
);
return MemberDevices;
};
|
#!/bin/sh
# By default, it will use `/sys/firmware/fdt` as a source to forward from.
# The source can be overriden using the FDT variable, or `--fdt`.
#
# Example usage:
#
# fdt-forward \
# --print-header \
# --copy-dtb "./desired-dtb.dtb" \
# --forward-node "/memory" \
# --forward-prop "/" "serial-number" \
# --forward-prop "/soc/mmc@1c10000/wifi@1" "local-mac-address" \
# | fdt-forward --to-dtb > out.dtb
set -e
set -u
PATH="@PATH@:$PATH"
# shellcheck disable=2120
to_dts() {
dtc --sort -I dtb -O dts "$@"
}
# shellcheck disable=2120
to_dtb() {
dtc --sort -I dts -O dtb "$@"
}
# Functions with `__` prefixed names are expected to
# be called from `run`, replacing `-` with `_`.
# `--to-dts` will not continue execution further.
# It is a convenience function for calling `dtc`.
__to_dts() {
to_dts "$@"
}
# `--to-dtb` will not continue execution further.
# It is a convenience function for calling `dtc`.
__to_dtb() {
to_dtb "$@"
}
# Print a device tree header.
__print_header() {
printf '/dts-v1/;\n'
run "$@"
}
# Forwards the *whole* node
__forward_node() {
node="$1"; shift
printf '\n// forwarded node: "%s"\n' "$node"
fdtgrep --show-subnodes --include-node "$node" "$FDT"
run "$@"
}
# Forwards the prop matching the name from the given node.
__forward_prop() {
node="$1"; shift
prop="$1"; shift
printf '\n// forwarded prop: "%s" from node: "%s"\n' "$prop" "$node"
# In order, we get the node (without descendents!) as dts
# Synthesize it back into dtb
# Then we can get the desired prop.
fdtgrep --show-version --include-node "$node" "$FDT" \
| to_dtb \
| fdtgrep "$prop" -
# Doing otherwise would give us *all* matching prop names,
# and not exclusively the one for the desired node.
run "$@"
}
# Copy the whole dtb as dts source.
# Its header will be stripped.
__copy_dtb() {
dtb="$1"; shift
printf '\n// Initial dtb copy...\n'
to_dts "$dtb" | tail -n+2
run "$@"
}
__fdt() {
FDT="$1"; shift
run "$@"
}
FDT="${FDT:-/sys/firmware/fdt}"
run() {
if [ $# -gt 0 ]; then
cmd="${1//-/_}"; shift
"$cmd" "$@"
fi
}
run "$@"
|
// define an Interest class
class Interest {
// class member and methods
constructor(principal, rate, tenure) {
this.principal = principal;
this.rate = rate;
this.tenure = tenure;
}
// method for calculating the simple interest
simpleInterest() {
return (this.principal * this.rate * this.tenure) / 100;
}
// method for calculating the compound interest
compoundInterest() {
return this.principal * (Math.pow((1 + this.rate / 100), this.tenure) - 1);
}
}
// create an instance of the Interest class
let interest = new Interest(1000, 10, 10);
// calculate the simple interest
let simpleInterest = interest.simpleInterest();
console.log('Simple Interest: ' + simpleInterest);
// calculate the compund interest
let compoundInterest = interest.compoundInterest();
console.log('Compound Interest: ' + compoundInterest); |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace FinancialPlanner
{
public class FinancialTransaction
{
public DateTime Date { get; set; }
public string TransactionType { get; set; }
public decimal Amount { get; set; }
}
public class FinancialCalculator
{
public decimal CalculateNetIncome(List<FinancialTransaction> transactions)
{
decimal totalIncome = transactions
.Where(t => t.TransactionType == "Income")
.Sum(t => t.Amount);
decimal totalExpenses = transactions
.Where(t => t.TransactionType == "Expense")
.Sum(t => t.Amount);
return totalIncome - totalExpenses;
}
}
class Program
{
static void Main(string[] args)
{
string filePath = "financial_data.csv";
List<FinancialTransaction> transactions = new List<FinancialTransaction>();
using (var reader = new StreamReader(filePath))
{
while (!reader.EndOfStream)
{
var line = reader.ReadLine();
var values = line.Split(',');
transactions.Add(new FinancialTransaction
{
Date = DateTime.Parse(values[0]),
TransactionType = values[1],
Amount = decimal.Parse(values[2])
});
}
}
FinancialCalculator calculator = new FinancialCalculator();
decimal netIncome = calculator.CalculateNetIncome(transactions);
Console.WriteLine($"Total Income: {transactions.Where(t => t.TransactionType == "Income").Sum(t => t.Amount)}");
Console.WriteLine($"Total Expenses: {transactions.Where(t => t.TransactionType == "Expense").Sum(t => t.Amount)}");
Console.WriteLine($"Net Income: {netIncome}");
}
}
} |
def sample(arr, l):
sample_arr = random.sample(arr, l)
return sample_arr |
export function screenColorDepth() {
return window.screen.colorDepth;
}
export function screenHeight() {
return window.screen.availHeight;
}
export function screenSupportTouch() {
if (typeof Modernizr == "object") {
if (Modernizr) {
return Modernizr.touchevents;
}
}
return "Unknown";
}
export function screenWidth() {
return window.screen.availWidth;
}
export default {
screenColorDepth,
screenHeight,
screenSupportTouch,
screenWidth,
} |
#!/bin/bash
g++ -O3 main.cpp hash.hpp password.hpp -lcrypto
|
def calculate_avg_2dp(lst):
# calculate total
total = 0
for num in lst:
total += num
#calculate average
return round((total / len(lst)), 2)
# Test list
lst = [2.3, 3.7, 7.9, 8.1]
# Call the function
avg = calculate_avg_2dp(lst)
# Print the average
print("The average is {}".format(avg)) |
import { WebSocketGateway, WebSocketServer } from '@nestjs/websockets';
import { Server } from 'socket.io';
@WebSocketGateway({ cors: true })
export class JobNotifications {
@WebSocketServer()
server: Server;
sendSuccessMessages(message: string) {
this.server.sockets.emit('message', { status: 1, message: message });
}
sendFailedMessages(message: string) {
this.server.sockets.emit('message', { status: 2, message: message });
}
}
|
<gh_stars>0
global.p3x.onenote.ng.factory('p3xOnenoteToast', ($mdToast) => {
const toast = (options) => {
if (typeof options === 'string') {
options = {
message: options,
}
}
const template = '<md-toast class="md-toast" style="cursor: pointer;" ng-click="closeToast()">' + options.message + '</md-toast>'
$mdToast.show({
controller: function ($scope, $mdToast) {
$scope.closeToast = function() {
$mdToast.hide();
};
},
template: template,
hideDelay: 5000,
position: 'bottom right'
});
}
return new function () {
this.action = toast;
this.setProxy = new function () {
this.clear = () => toast(p3x.onenote.lang.dialog.setProxy.clear)
this.set = (value) => toast(p3x.onenote.lang.dialog.setProxy.set(value))
}
}
})
|
from nltk.corpus import brown
from nltk.corpus import PlaintextCorpusReader
corpus_root = '/home/milli/Desktop/NLPRes/amh_wikipedia_2016_30K/amh_wikipedia_2016_30K-words.txt'
wordlists = PlaintextCorpusReader(corpus_root, '.*')
wordlists.fileids()
#tagged_token = nltk.tag.str2tuple('መሮጥ/NN')
#print(tagged_token)
#text = nltk.Text(word.lower() for word in nltk.corpus.brown.words())
#text.similar('woman')
#print(brown.words()) |
<gh_stars>0
package weixin.integrate.util;
import java.util.HashMap;
/**
* 常量数据
*/
public class WxIntegrateConstant {
public static final String flowtype_national = "1";
public static final String flowtype_provincial = "2";
public static final String resultcode = "resultCode";
public static final String resultmsg = "resultMsg";
public static final String businesskey = "businessKey";
public static final String status_getbusinesskey = "1";
public static final String status_success = "2";
public static final String status_fail = "3";
public static final String status_timeout = "4";
public static final String businesstype_wxuserreceive = "WxUserReceive";
public static final String businesstype_wxuserpay = "WxUserPay";
public static final String businesstype_appuserreceive = "AppUserReceive";
public static final String businesstype_appuserpay = "AppUserPay";
public static final String session_verifypaypwd = "<PASSWORD>";
public static final String session_businessKey = "businessKey";
public static final String status_subscribe = "1";
public static final String status_unsubscribe = "0";
private static final HashMap<String, String> resultCodeMap = new HashMap<>();
static {
resultCodeMap.put("0", "成功");
resultCodeMap.put("101", "IP鉴权失败");
resultCodeMap.put("102", "鉴权失败");
resultCodeMap.put("103", "商户ID非法");
resultCodeMap.put("104", "活动ID非法");
resultCodeMap.put("105", "流量类型非法");
resultCodeMap.put("106", "流量值非法");
resultCodeMap.put("107", "手机号非法");
resultCodeMap.put("201", "商户流量余额不足");
resultCodeMap.put("202", "不符合安全规则");
resultCodeMap.put("203", "商户未进行微信公众号授权");
resultCodeMap.put("204", "该微信号未关注公众号");
resultCodeMap.put("500", "其他错误");
}
public static String getResultMsg(String code) {
return resultCodeMap.get(code);
}
}
|
<gh_stars>10-100
import { MIME_TYPE } from "../enums";
export declare abstract class IResultMapper {
map: (type: MIME_TYPE, result: any, setMimeType?: (type: any) => void) => any;
}
|
/* Copyright 2012-2016 QReal Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
#include "nxtKit/communication/usbRobotCommunicationThread.h"
#include <QtCore/QCoreApplication>
#include <time.h>
#include <qrkernel/logging.h>
#include <plugins/robots/thirdparty/libusb-1.0.19/include/libusb-1.0/libusb.h>
#include "nxtKit/communication/nxtCommandConstants.h"
#include "nxtKit/communication/nxtUsbDriverInstaller.h"
using namespace nxt::communication;
const int packetHeaderSize = 2;
static const int NXT_VID = 0x0694;
static const int NXT_PID = 0x0002;
static const int ATMEL_VID = 0x03EB;
static const int SAMBA_PID = 0x6124;
static const int NXT_USB_TIMEOUT = 2000;
static const int NXT_EP_OUT = 0x01;
static const int NXT_EP_IN = 0x82;
static const int NXT_INTERFACE_NUMBER = 0;
static const int DEBUG_LEVEL = 3;
UsbRobotCommunicationThread::UsbRobotCommunicationThread()
: mHandle(nullptr)
, mFirmwareMode(false)
, mKeepAliveTimer(new QTimer(this))
, mDriverInstaller(new NxtUsbDriverInstaller)
, mStopped(false)
{
QObject::connect(mKeepAliveTimer, &QTimer::timeout, this, &UsbRobotCommunicationThread::checkForConnection);
mDriverInstaller->moveToThread(qApp->thread());
QObject::connect(this, &UsbRobotCommunicationThread::noDriversFound, mDriverInstaller.data()
, &NxtUsbDriverInstaller::installUsbDriver, Qt::QueuedConnection);
QObject::connect(mDriverInstaller.data(), &NxtUsbDriverInstaller::errorOccured
, this, &UsbRobotCommunicationThread::errorOccured, Qt::QueuedConnection);
QObject::connect(mDriverInstaller.data(), &NxtUsbDriverInstaller::messageArrived
, this, &UsbRobotCommunicationThread::messageArrived, Qt::QueuedConnection);
}
UsbRobotCommunicationThread::~UsbRobotCommunicationThread()
{
if (mHandle) {
disconnect();
}
}
bool UsbRobotCommunicationThread::connectImpl(bool firmwareMode, int vid, int pid, const QString ¬ConnectedErrorText)
{
if (mHandle) {
emit connected(true, QString());
return true;
}
QLOG_INFO() << "Connecting to NXT brick" << (firmwareMode ? "in firmware mode" : "");
libusb_init(nullptr);
libusb_set_debug(nullptr, DEBUG_LEVEL);
libusb_device **devices;
ssize_t count = libusb_get_device_list(nullptr, &devices);
libusb_device_descriptor device_descriptor;
int i = 0;
for (; i < count; ++i) {
if (libusb_get_device_descriptor(devices[i], &device_descriptor) < 0) {
continue;
}
if (device_descriptor.idVendor == vid && device_descriptor.idProduct == pid) {
const int err = libusb_open(devices[i], &mHandle);
if (err == LIBUSB_ERROR_NOT_SUPPORTED) {
emit noDriversFound();
libusb_free_device_list(devices, 1);
if (mHandle) {
libusb_close(mHandle);
}
return false;
} else if (err < 0 || !mHandle) {
QLOG_ERROR() << "libusb_open returned" << err;
emit connected(false, notConnectedErrorText);
libusb_free_device_list(devices, 1);
return false;
} else {
break;
}
}
}
if (!mHandle) {
libusb_free_device_list(devices, 1);
emit connected(false, notConnectedErrorText);
return false;
}
if (libusb_kernel_driver_active(mHandle, NXT_INTERFACE_NUMBER)) {
libusb_detach_kernel_driver(mHandle, NXT_INTERFACE_NUMBER);
}
// old tool libnxt says that it should be 1
const int possibleConfigurations = device_descriptor.bNumConfigurations;
bool configurationFound = false;
bool interfaceFound = false;
for (int configuration = 0; configuration <= possibleConfigurations; configuration++) {
const int err = libusb_set_configuration(mHandle, configuration);
if (err < 0 && err != LIBUSB_ERROR_NOT_FOUND && err != LIBUSB_ERROR_INVALID_PARAM) {
QLOG_ERROR() << "libusb_set_configuration for NXT returned" << err << "for configuration" << configuration;
} else if (err >= 0) {
configurationFound = true;
libusb_config_descriptor *config_descriptor = new libusb_config_descriptor;
const int err = libusb_get_active_config_descriptor(devices[i], &config_descriptor);
if (err >= 0) {
const int possibleInterfaces = config_descriptor->bNumInterfaces;
// old tool libnxt says that it should be 1
QList<int> interfaces = {1, 0, 2};
const int interfacesBound = possibleInterfaces < 3 ? possibleInterfaces : 3;
for (int i = 0; i <= interfacesBound; i++) {
const int err = libusb_claim_interface(mHandle, interfaces[i]);
if (err < 0 && err != LIBUSB_ERROR_NOT_FOUND && err != LIBUSB_ERROR_INVALID_PARAM) {
QLOG_ERROR() << "libusb_claim_interface for NXT returned"
<< err << "for interface" << i;
} else if (err >= 0) {
interfaceFound = true;
break;
}
}
if (interfaceFound) {
delete config_descriptor;
break;
}
} else {
QLOG_ERROR() << "libusb_get_active_config_descriptor for" << configuration << "returned" << err;
}
delete config_descriptor;
}
}
if (!configurationFound) {
QLOG_ERROR() << "No appropriate configuration found among all possible configurations. Giving up.";
emit connected(false, tr("USB Device configuration problem. Try to restart TRIK Studio and re-plug NXT."));
libusb_close(mHandle);
mHandle = nullptr;
libusb_free_device_list(devices, 1);
return false;
}
if (!interfaceFound) {
QLOG_ERROR() << "No appropriate interface found among possible interfaces. Giving up.";
emit connected(false, tr("NXT device is already used by another software."));
libusb_close(mHandle);
mHandle = nullptr;
libusb_free_device_list(devices, 1);
return false;
}
mFirmwareMode = firmwareMode;
QByteArray getFirmwareCommand(4, 0);
getFirmwareCommand[0] = '\0';
getFirmwareCommand[1] = '\0';
// Sending direct command telegram to flashed robot or "N#" in samba mode
getFirmwareCommand[2] = firmwareMode ? 'N' : static_cast<char>(enums::telegramType::directCommandResponseRequired);
getFirmwareCommand[3] = firmwareMode ? '#' : 0x88;
QByteArray handshakeResponse;
send(getFirmwareCommand, firmwareMode ? 4 : 9, handshakeResponse);
// In samba mode NXT should answer "\n\r"
const bool correctFirmwareResponce = !firmwareMode ||
(handshakeResponse.length() == 4 && handshakeResponse[2] == '\n' && handshakeResponse[3] == '\r');
if (handshakeResponse.isEmpty() || !correctFirmwareResponce) {
emit connected(false, tr("NXT handshake procedure failed. Please contact developers."));
libusb_close(mHandle);
mHandle = nullptr;
libusb_free_device_list(devices, 1);
return false;
}
QLOG_INFO() << "Connected successfully!";
emit connected(true, QString());
if (!firmwareMode) {
mKeepAliveTimer->moveToThread(thread());
mKeepAliveTimer->start(500);
}
return true;
}
bool UsbRobotCommunicationThread::connect()
{
const QString error = tr("Cannot find NXT device. Check robot connected and turned on and try again.");
return connectImpl(false, NXT_VID, NXT_PID, error);
}
bool UsbRobotCommunicationThread::send(QObject *addressee, const QByteArray &buffer, int responseSize)
{
QByteArray outputBuffer;
outputBuffer.resize(responseSize);
const bool result = send(buffer, responseSize, outputBuffer);
if (!isResponseNeeded(buffer)) {
emit response(addressee, QByteArray());
} else {
emit response(addressee, outputBuffer);
}
return result;
}
bool UsbRobotCommunicationThread::send(const QByteArray &buffer, int responseSize, QByteArray &outputBuffer)
{
if (!mHandle) {
return false;
}
QByteArray newBuffer;
for (int i = packetHeaderSize; i < buffer.length(); ++i) {
newBuffer[i - packetHeaderSize] = buffer[i];
}
uchar *cmd = reinterpret_cast<uchar *>(const_cast<char *>(newBuffer.data()));
int actualLength = 0;
int err = libusb_bulk_transfer(mHandle, NXT_EP_OUT, cmd, newBuffer.length(), &actualLength, NXT_USB_TIMEOUT);
if (err == LIBUSB_ERROR_IO || err == LIBUSB_ERROR_PIPE
|| err == LIBUSB_ERROR_INTERRUPTED || err == LIBUSB_ERROR_NO_DEVICE)
{
QLOG_ERROR() << "Connection to NXT lost with code" << err << "during sending buffers";
emit errorOccured(tr("Connection to NXT lost"));
disconnect();
return false;
} else if (err < 0) {
QLOG_ERROR() << "Sending" << buffer << "failed with libusb error" << err;
return false;
}
const bool responceUnneeded = (mFirmwareMode && responseSize == 0) || (!mFirmwareMode && !isResponseNeeded(buffer));
if (responceUnneeded) {
return true;
}
uchar response[responseSize];
actualLength = 0;
outputBuffer = QByteArray(responseSize, '\0');
err = libusb_bulk_transfer(mHandle, NXT_EP_IN, response, responseSize, &actualLength, NXT_USB_TIMEOUT);
if (err == LIBUSB_ERROR_IO || err == LIBUSB_ERROR_PIPE || err == LIBUSB_ERROR_INTERRUPTED) {
QLOG_ERROR() << "Connection to NXT lost with code" << err << "during recieving answer";
emit errorOccured(tr("Connection to NXT lost"));
disconnect();
return false;
} else if (err < 0) {
QLOG_TRACE() << "Recieving answer from command" << buffer << "failed with libusb error" << err;
return false;
}
outputBuffer[0] = responseSize - 2;
outputBuffer[1] = 0;
for (int i = 0; i < responseSize - packetHeaderSize; ++i) {
outputBuffer[i + packetHeaderSize] = response[i];
}
return true;
}
void UsbRobotCommunicationThread::reconnect()
{
connect();
}
void UsbRobotCommunicationThread::disconnect()
{
if (mHandle) {
libusb_attach_kernel_driver(mHandle, NXT_INTERFACE_NUMBER);
libusb_close(mHandle);
libusb_exit(nullptr);
mHandle = nullptr;
}
mKeepAliveTimer->stop();
emit disconnected();
}
void UsbRobotCommunicationThread::allowLongJobs(bool allow)
{
mStopped = !allow;
}
bool UsbRobotCommunicationThread::connectFirmware()
{
const QString error = tr("Cannot find NXT device in resetted mode. Check robot resetted, connected and ticking "\
"and try again.");
return connectImpl(true, ATMEL_VID, SAMBA_PID, error);
}
void UsbRobotCommunicationThread::checkForConnection()
{
QByteArray command(4, 0);
command[3] = enums::commandCode::KEEPALIVE;
const int keepAliveResponseSize = 9;
QByteArray response;
response.resize(keepAliveResponseSize);
send(command, keepAliveResponseSize, response);
if (response[3] == '\0') {
emit disconnected();
}
}
bool UsbRobotCommunicationThread::isResponseNeeded(const QByteArray &buffer)
{
return buffer.size() >= 3 && (buffer[2] == enums::telegramType::directCommandResponseRequired
|| buffer[2] == enums::telegramType::systemCommandResponseRequired);
}
|
<filename>console/src/boost_1_78_0/libs/describe/test/quick.cpp<gh_stars>100-1000
// Copyright 2020 <NAME>
// Distributed under the Boost Software License, Version 1.0.
// https://www.boost.org/LICENSE_1_0.txt
#include <boost/describe.hpp>
#include <boost/core/lightweight_test.hpp>
struct X
{
int m;
};
BOOST_DESCRIBE_STRUCT(X, (), (m))
struct Y: public X
{
void m() {}
};
BOOST_DESCRIBE_STRUCT(Y, (X), (m))
#if !defined(BOOST_DESCRIBE_CXX14)
#include <boost/config/pragma_message.hpp>
BOOST_PRAGMA_MESSAGE("Skipping test because C++14 is not available")
int main() {}
#else
BOOST_DEFINE_ENUM(E, v1, v2, v3)
#include <boost/mp11.hpp>
int main()
{
using namespace boost::describe;
using namespace boost::mp11;
BOOST_TEST_EQ( (mp_size< describe_enumerators<E> >::value), 3 );
BOOST_TEST_EQ( (mp_size< describe_bases<Y, mod_any_access> >::value), 1 );
BOOST_TEST_EQ( (mp_size< describe_members<Y, mod_any_access | mod_inherited | mod_hidden> >::value), 1 );
return boost::report_errors();
}
#endif // !defined(BOOST_DESCRIBE_CXX14)
|
#!/bin/bash
#SBATCH -p medium
#SBATCH -t 23:00:00
#SBATCH -J ld_s6400
#SBATCH -N 1
#SBATCH -n 1
#SBATCH -c 20
#SBATCH -o ld_s6400.out
#SBATCH -e ld_s6400.err
module load intel/mkl/64/2017/2.174
module load openmpi/intel/64/1.10.7
#module load conda/4.3.30
#source activate party
Rscript ld_s6400.R
|
import { Component, OnInit, Input, AfterViewInit, ElementRef } from '@angular/core';
import { ChartComponent } from '../chart/chart.component';
import { FunctionSettingsComponent } from '../function-settings/function-settings.component';
import { FuncParam } from 'src/app/domains';
import { ChartService, INDEX_FX_DRAW, INDEX_POINT_DRAW,
INDEX_VX_DRAW, INDEX_TX_DRAW, INDEX_GTX_DRAW, INDEX_EPSILON_GTX_DRAW,
INDEX_EPSILON_TX_DRAW } from 'src/app/services/chart/chart.service';
import { FormGroup, FormBuilder, FormControl } from '@angular/forms';
import { MatDialog } from '@angular/material';
import { Overlay } from '@angular/cdk/overlay';
import { FunctionTableDataPointsDialogComponent } from '../dialogs';
import { PointsSettingsComponent } from '../points-settings/points-settings.component';
import { TranslateService } from '@ngx-translate/core';
@Component({
selector: 'app-chart-settings',
templateUrl: './chart-settings.component.html',
styleUrls: ['./chart-settings.component.scss']
})
export class ChartSettingsComponent implements OnInit, AfterViewInit {
displayDatastore: boolean = this.chartService.chartData[INDEX_POINT_DRAW].active;
displayPointRadius: boolean = this.chartService.chartData[INDEX_FX_DRAW].displayPoint;
liveReload: boolean = this.chartService.liveReload;
activatedFx: boolean = this.activatedF() ;
@Input()
functionSettingsComponent: FunctionSettingsComponent;
public vxParam: FuncParam = new FuncParam();
vFormGroup: FormGroup ;
get name() {
return this.vFormGroup.get('name') as FormControl;
}
private getError(el) {
switch (el) {
case 'func':
if (this.vFormGroup.get('func').hasError('required')) {
return 'required';
}
if (this.vFormGroup.get('func').hasError('pattern')) {
return 'Invalid maths function';
}
break;
default:
return '';
}
}
public onSubmit(post) {
this.chartService.vxParam = new FuncParam(
post.func,
this.functionSettingsComponent.funcFormGroup.value.xMin,
this.functionSettingsComponent.funcFormGroup.value.xMax,
this.functionSettingsComponent.funcFormGroup.value.step,
this.functionSettingsComponent.funcFormGroup.value.deltaX,
);
this.chartService.changeChartData(INDEX_VX_DRAW, 'active' , true);
}
public clear() {
this.chartService.vxParam = new FuncParam('');
this.vFormGroup = this.functionSettingsComponent.buildFuncForm(this.vxParam);
this.chartService.changeChartData(INDEX_VX_DRAW, 'active' , false);
}
disable() {
return !this.vFormGroup.valid;
}
disableClear() {
return this.disable() || !this.chartService.chartData[INDEX_VX_DRAW].active;
}
openFunctionTableDataPointsDialog(): void {
if (this.vFormGroup.valid) {
const bodyRect = document.body.getBoundingClientRect();
const elemRect = this.functionSettingsComponent.elementRef.nativeElement.getBoundingClientRect();
const width = elemRect.width + 30 ;
const top = elemRect.top - 35 ;
const left = elemRect.left - 15;
const dialogRef = this.dialog.open(FunctionTableDataPointsDialogComponent, {
panelClass: 'dialog',
width: width + 'px',
position: { left: left + 'px', top: top + 'px' },
data: { param: this.vFormGroup.value},
scrollStrategy: this.overlay.scrollStrategies.noop(),
// hasBackdrop: false,
});
}
}
onDisplayPointChange(event) {
this.chartService.chartData[INDEX_TX_DRAW].displayPoint = this.displayPointRadius;
this.chartService.chartData[INDEX_GTX_DRAW].displayPoint = this.displayPointRadius;
this.chartService.chartData[INDEX_VX_DRAW].displayPoint = this.displayPointRadius;
this.chartService.chartData[INDEX_EPSILON_GTX_DRAW].displayPoint = this.displayPointRadius;
this.chartService.chartData[INDEX_EPSILON_TX_DRAW].displayPoint = this.displayPointRadius;
this.chartService.changeChartData(INDEX_FX_DRAW, 'displayPoint', this.displayPointRadius);
}
onDisplayDatastoreChange(event) {
this.chartService.changeChartData(INDEX_POINT_DRAW, 'active', this.displayDatastore);
}
onLiveReloadChange(event){
this.chartService.changeLiveReload(this.liveReload);
}
activatedF() {
return this.chartService.chartData[INDEX_FX_DRAW].active ||
this.chartService.chartData[INDEX_TX_DRAW].active ||
this.chartService.chartData[INDEX_GTX_DRAW].active ||
this.chartService.chartData[INDEX_VX_DRAW].active ||
this.chartService.chartData[INDEX_EPSILON_GTX_DRAW].active ||
this.chartService.chartData[INDEX_EPSILON_TX_DRAW].active;
}
constructor(
private chartService: ChartService,
private dialog: MatDialog,
private elementRef: ElementRef,
private overlay: Overlay,
public translate: TranslateService,
) { }
ngOnInit() {
this.chartService.chartDataStateChanged.subscribe( state => {
this.activatedFx = this.activatedF();
});
this.vFormGroup = this.functionSettingsComponent.buildFuncForm(this.vxParam);
}
ngAfterViewInit() {
}
}
|
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2018 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.core.algorithm.bmc;
import static com.google.common.base.Preconditions.checkState;
import com.google.common.collect.FluentIterable;
import java.util.Objects;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.sosy_lab.cpachecker.cfa.model.CFANode;
import org.sosy_lab.cpachecker.core.algorithm.bmc.StandardLiftings.UnsatCallback;
import org.sosy_lab.cpachecker.core.algorithm.bmc.candidateinvariants.CandidateInvariant;
import org.sosy_lab.cpachecker.core.algorithm.bmc.candidateinvariants.SymbolicCandiateInvariant;
import org.sosy_lab.cpachecker.core.algorithm.bmc.candidateinvariants.SymbolicCandiateInvariant.BlockedCounterexampleToInductivity;
import org.sosy_lab.cpachecker.cpa.predicate.PredicateAbstractionManager;
import org.sosy_lab.cpachecker.exceptions.CPATransferException;
import org.sosy_lab.cpachecker.util.predicates.smt.FormulaManagerView;
import org.sosy_lab.java_smt.api.BooleanFormula;
import org.sosy_lab.java_smt.api.BooleanFormulaManager;
import org.sosy_lab.java_smt.api.SolverException;
public class AbstractionBasedLifting implements Lifting {
private final AbstractionStrategy abstractionStrategy;
private final LiftingAbstractionFailureStrategy lafStrategy;
public AbstractionBasedLifting(
AbstractionStrategy pAbstractionStrategy, LiftingAbstractionFailureStrategy pLAFStrategy) {
abstractionStrategy = Objects.requireNonNull(pAbstractionStrategy);
lafStrategy = Objects.requireNonNull(pLAFStrategy);
}
@Override
public boolean canLift() {
return true;
}
@Override
public SymbolicCandiateInvariant lift(
FormulaManagerView pFMGR,
PredicateAbstractionManager pPam,
ProverEnvironmentWithFallback pProver,
BlockedCounterexampleToInductivity pBlockedConcreteCti,
AssertCandidate pAssertPredecessor,
Iterable<Object> pAssertionIds)
throws CPATransferException, InterruptedException, SolverException {
CounterexampleToInductivity cti = pBlockedConcreteCti.getCti();
BooleanFormula concreteCTIFormula = cti.getFormula(pFMGR);
BooleanFormula abstractCtiFormula =
abstractionStrategy.performAbstraction(pPam, cti.getLocation(), concreteCTIFormula);
BooleanFormulaManager bfmgr = pFMGR.getBooleanFormulaManager();
BooleanFormula blockedAbstractCtiFormula = bfmgr.not(abstractCtiFormula);
SymbolicCandiateInvariant blockedAbstractCti =
SymbolicCandiateInvariant.makeSymbolicInvariant(
pBlockedConcreteCti.getApplicableLocations(),
pBlockedConcreteCti.getStateFilter(),
blockedAbstractCtiFormula,
pFMGR);
// First, check if abstract lifting succeeds
SuccessCheckingLiftingUnsatCallback abstractLiftingUnsatCallback =
new SuccessCheckingLiftingUnsatCallback();
SymbolicCandiateInvariant unsatLiftedAbstractBlockingClause =
StandardLiftings.unsatBasedLifting(
pFMGR,
pProver,
blockedAbstractCti,
blockedAbstractCti.negate(pFMGR).splitLiterals(pFMGR, false),
pAssertPredecessor,
pAssertionIds,
abstractLiftingUnsatCallback);
if (abstractLiftingUnsatCallback.isSuccessful()) {
return unsatLiftedAbstractBlockingClause;
}
return lafStrategy.handleLAF(
pFMGR,
pPam,
pProver,
pBlockedConcreteCti,
blockedAbstractCti,
pAssertPredecessor,
pAssertionIds,
abstractionStrategy);
}
private static class SuccessCheckingLiftingUnsatCallback implements UnsatCallback {
private boolean successful = false;
@Override
public void unsat(
SymbolicCandiateInvariant pLiftedCTI,
Iterable<Object> pCtiLiteralAssertionIds,
Iterable<Object> pOtherAssertionIds)
throws SolverException, InterruptedException {
successful = true;
}
public boolean isSuccessful() {
return successful;
}
}
private static class InterpolatingLiftingUnsatCallback
extends SuccessCheckingLiftingUnsatCallback {
private final FormulaManagerView fmgr;
private final ProverEnvironmentWithFallback prover;
private @Nullable BooleanFormula interpolant = null;
InterpolatingLiftingUnsatCallback(
FormulaManagerView pFmgr, ProverEnvironmentWithFallback pProver) {
fmgr = Objects.requireNonNull(pFmgr);
prover = Objects.requireNonNull(pProver);
}
@Override
public void unsat(
SymbolicCandiateInvariant pLiftedCTI,
Iterable<Object> pCtiLiteralAssertionIds,
Iterable<Object> pOtherAssertionIds)
throws SolverException, InterruptedException {
super.unsat(pLiftedCTI, pCtiLiteralAssertionIds, pOtherAssertionIds);
// Lifting is indeed successful, but we can do even better using interpolation
if (prover.supportsInterpolation()) {
try {
interpolant =
fmgr.getBooleanFormulaManager().not(prover.getInterpolant(pOtherAssertionIds));
} catch (SolverException solverException) {
// TODO log that interpolation was switched off
}
}
}
public @Nullable BooleanFormula getInterpolant() {
checkState(isSuccessful(), "Lifting not yet performed or unsuccessful.");
return interpolant;
}
}
public interface LiftingAbstractionFailureStrategy {
SymbolicCandiateInvariant handleLAF(
FormulaManagerView pFMGR,
PredicateAbstractionManager pPam,
ProverEnvironmentWithFallback pProver,
BlockedCounterexampleToInductivity pBlockedConcreteCti,
SymbolicCandiateInvariant pBlockedAbstractCti,
AssertCandidate pAssertPredecessor,
Iterable<Object> pAssertionIds,
AbstractionStrategy pAbstractionStrategy)
throws CPATransferException, InterruptedException, SolverException;
}
public enum RefinementLAFStrategies implements LiftingAbstractionFailureStrategy {
IGNORE {
@Override
public SymbolicCandiateInvariant handleLAF(
FormulaManagerView pFMGR,
PredicateAbstractionManager pPam,
ProverEnvironmentWithFallback pProver,
BlockedCounterexampleToInductivity pBlockedConcreteCti,
SymbolicCandiateInvariant pBlockedAbstractCti,
AssertCandidate pAssertPredecessor,
Iterable<Object> pAssertionIds,
AbstractionStrategy pAbstractionStrategy)
throws CPATransferException, InterruptedException, SolverException {
return pBlockedAbstractCti;
}
},
EAGER {
@Override
public SymbolicCandiateInvariant handleLAF(
FormulaManagerView pFMGR,
PredicateAbstractionManager pPam,
ProverEnvironmentWithFallback pProver,
BlockedCounterexampleToInductivity pBlockedConcreteCti,
SymbolicCandiateInvariant pBlockedAbstractCti,
AssertCandidate pAssertPredecessor,
Iterable<Object> pAssertionIds,
AbstractionStrategy pAbstractionStrategy)
throws CPATransferException, InterruptedException, SolverException {
// If abstract lifting fails, check if concrete lifting succeeds (it should)
InterpolatingLiftingUnsatCallback concreteLiftingUnsatCallback =
new InterpolatingLiftingUnsatCallback(pFMGR, pProver);
Iterable<CandidateInvariant> ctiLiterals =
pBlockedConcreteCti.getCti().splitLiterals(pFMGR, false);
SymbolicCandiateInvariant unsatLiftedConcreteCTI =
StandardLiftings.unsatBasedLifting(
pFMGR,
pProver,
pBlockedConcreteCti,
ctiLiterals,
pAssertPredecessor,
pAssertionIds,
concreteLiftingUnsatCallback);
if (concreteLiftingUnsatCallback.isSuccessful()) {
// Abstract lifting failed, but concrete lifting succeeded
BooleanFormulaManager bfmgr = pFMGR.getBooleanFormulaManager();
BooleanFormula interpolant = concreteLiftingUnsatCallback.getInterpolant();
if (interpolant != null) {
interpolant = pFMGR.uninstantiate(interpolant);
} else {
return unsatLiftedConcreteCTI;
}
refinePrecision(
pAbstractionStrategy,
pPam,
pFMGR,
pBlockedConcreteCti.getCti().getLocation(),
interpolant);
SymbolicCandiateInvariant refinedBlockingClause =
SymbolicCandiateInvariant.makeSymbolicInvariant(
pBlockedConcreteCti.getApplicableLocations(),
pBlockedConcreteCti.getStateFilter(),
bfmgr.not(
bfmgr.and(
bfmgr.not(pBlockedAbstractCti.getPlainFormula(pFMGR)), interpolant)),
pFMGR);
return refinedBlockingClause;
}
return pBlockedConcreteCti;
}
}
}
private static void refinePrecision(
AbstractionStrategy pAbstractionStrategy,
PredicateAbstractionManager pPam,
FormulaManagerView pFMGR,
CFANode pLocation,
BooleanFormula pInterpolant) {
pAbstractionStrategy.refinePrecision(
pPam,
pLocation,
FluentIterable.from(
SymbolicCandiateInvariant.getConjunctionOperands(pFMGR, pInterpolant, true))
.filter(f -> !pFMGR.getBooleanFormulaManager().isTrue(f)));
}
}
|
#!/bin/sh
set -ex;
# https://www.baeldung.com/spring-properties-file-outside-jar
exec /usr/bin/java \
$JAVA_OPTS \
-Dspring.profiles.active="prod" \
-Dspring.config.location="application-prod.yml" \
-Djava.io.tmpdir="/home/java-app/tmp" \
-jar \
/home/java-app/lib/app.jar \
"$@"
|
#!/bin/sh
if [ "$TRAVIS_TEST_RESULT" != "0" ]
then
echo "build not success, bye"
exit 1
fi
url=https://api.github.com/repos/FEMessage/direct-mail/releases/latest
resp_tmp_file=resp.tmp
curl -H "Authorization: token $GITHUB_TOKEN" $url > $resp_tmp_file
html_url=$(sed -n 5p $resp_tmp_file | sed 's/\"html_url\"://g' | awk -F '"' '{print $2}')
body=$(grep body < $resp_tmp_file | sed 's/\"body\"://g;s/\"//g')
msg='{"msgtype": "markdown", "markdown": {"title": "direct-mail更新", "text": "@所有人\n# [direct-mail]('$html_url')\n'$body'"}}'
curl -X POST "https://oapi.dingtalk.com/robot/send\?access_token\=$DINGTALK_ROBOT_TOKEN" -H 'Content-Type: application/json' -d "$msg"
rm $resp_tmp_file
|
<reponame>cyber-itl/citl-static-analysis<gh_stars>10-100
#include <algorithm>
#include <string>
#include <vector>
#include "json.hpp"
#include "glog/logging.h"
#include "analyzers/BaseEnvAnalyzer.hpp"
#include "analyzers/CodeSigningEA.hpp"
#include "llvm/Object/MachO.h"
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/StringRef.h"
#include "llvm/ADT/iterator_range.h"
#include "llvm/BinaryFormat/COFF.h"
#include "llvm/BinaryFormat/MachO.h"
#include "llvm/Object/ELF.h"
#include "llvm/Object/ELFTypes.h"
#include "llvm/Support/Endian.h"
#include "llvm/Support/Error.h"
CodeSigningEA::CodeSigningEA() : BaseEnvAnalyzer("code_signing") {};
template <class ELFT>
int CodeSigningElfEA<ELFT>::run() {
std::vector<std::string> elf_sig_section;
Expected<typename ELFT::ShdrRange> sectionsOrErr = m_elf_file->sections();
if (!sectionsOrErr) {
LOG(ERROR) << "Failed to get ELFFile sections";
return 1;
}
auto sections = sectionsOrErr.get();
for(const auto §ion : sections) {
Expected<StringRef> nameOrErr = m_elf_file->getSectionName(§ion);
if (!nameOrErr) {
continue;
}
StringRef name = nameOrErr.get();
if (name == ".sig") {
elf_sig_section.emplace_back(name.str());
}
else if (name == ".signature") {
elf_sig_section.emplace_back(name.str());
}
else if (name == ".pgptab") {
elf_sig_section.emplace_back(name.str());
}
}
if (!elf_sig_section.size()) {
m_results["is_signed"] = false;
}
else {
m_results["is_signed"] = true;
m_results["elf_sig_section"] = elf_sig_section;
}
return 0;
}
int CodeSigningPeEA::run() {
bool is_signed = false;
if (m_dll_chars & COFF::IMAGE_DLL_CHARACTERISTICS_FORCE_INTEGRITY) {
is_signed = true;
}
m_results["is_signed"] = is_signed;
return 0;
}
int CodeSigningMachEA::run() {
bool is_signed = false;
for (const auto &load_cmd : m_obj->load_commands()) {
if (load_cmd.C.cmd != MachO::LC_CODE_SIGNATURE) {
is_signed = true;
break;
}
}
m_results["is_signed"] = is_signed;
return 0;
}
template class CodeSigningElfEA<ELFType<support::little, false>>;
template class CodeSigningElfEA<ELFType<support::big, false>>;
template class CodeSigningElfEA<ELFType<support::little, true>>;
template class CodeSigningElfEA<ELFType<support::big, true>>;
|
#!/bin/sh
# Pre processing script
#Delete generated code
echo Removing generated code.
rm qrencode_cpp.h
rm -r generated_wrapper
cd C/
rm -r spec
|
#!/bin/sh
scripts/up.sh
scripts/provision.sh
scripts/snapshot.sh
scripts/ping.sh
|
#!/bin/bash
aws --profile mine dynamodb describe-table \
--table-name Records \
--endpoint-url http://localhost:8000
|
<reponame>Xquiset/improved-todo-list-2
import Vue from 'vue'
import Vuetify from 'vuetify'
import 'vuetify/dist/vuetify.min.css'
import colors from "vuetify/lib/util/colors";
Vue.use(Vuetify)
const opts = {
icons: {
iconfont: 'mdiSvg', // 'mdi' || 'mdiSvg' || 'md' || 'fa' || 'fa4' || 'faSvg'
},
theme: {
themes: {
light: {
primary: '#303F9F',
secondary: '#424242',
accent: '#B79CED',
error: '#EF3E36',
info: '#17BEBB',
success: '#548C2F',
warning: '#D65108'
}
}
}
}
export default new Vuetify(opts)
|
# coding=utf-8
import unittest
from .p0001 import Solution
class TestP0001(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def setUp(self):
self.solution = Solution()
def tearDown(self):
self.solution = None
def test_p0001(self):
self.assertEqual([0, 1], self.solution.twoSum([2, 7, 11, 15], 9))
|
<gh_stars>100-1000
// main.c
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
// -----------------------------------------------------------------------------
int main()
{
return 0; //% self.expect("repl", error=True, substrs=["Swift standard library"])
//% self.runCmd("kill")
//% self.expect("repl", error=True, substrs=["running process"])
}
|
def find_value(data, value):
# use binary search to find value
low = 0
high = len(data) - 1
while low <= high:
mid = (low + high) // 2
if data[mid] == value:
return mid
elif data[mid] < value:
low = mid + 1
else:
high = mid -1
return -1 # value not found |
import React, { useState, useRef, useEffect } from 'react';
import { Table, Button, Input } from 'reactstrap';
const MyTable = (props) => {
const [data, setData] = useState(props.data);
const [originalData, setOriginalData] = useState(data);
// code for sorting the table goes here
// code for filtering the table goes here
// code for pagination goes here
return {
<Table>
{/* Table head with sorting and filtering option goes here */}
<tbody>
{data.map((e, i) => (
<tr key={i}>
{Object.keys(e).map((key, j) => (
<td key={j}>{e[key]}</td>
))
}
</tr>
))
}
</tbody>
</Table>
// Code for pagination buttons
}
}
export default MyTable; |
import datetime
import hashlib
import os
import yaml
class Config:
"""Basic utility for working with .yaml configs.
May be used as a base class for more sofisticated configs,
in case some custom validations are required.
"""
def __init__(self, path=None):
if path is None:
return
self.path = path
with open(self.path, 'r') as stream:
self.cfg = yaml.safe_load(stream)
def __get_item_nested(self, key):
if not isinstance(key, str):
if not isinstance(key, list):
key = [key]
res = self.cfg
for item in key:
res = res[item]
return res
return self.cfg[key]
def __getitem__(self, key):
try:
return self.__get_item_nested(key)
except KeyError:
return None
def __contains__(self, key):
try:
return self.__get_item_nested(key) is not None
except KeyError:
return False
@property
def postfix(self):
return "{}.{}".format(self.cfg[self.EXPERIMENT_NAME],
hashlib.md5(str(self.cfg).encode('utf-8')).hexdigest())
@property
def date_postfix(self):
return "{}.{}".format(datetime.datetime.now().strftime("%Y%m%d"), self.postfix)
def save(self, file):
yaml.dump(self.cfg, file, default_flow_style=False)
@staticmethod
def _load(stream):
c = Config(None)
c.cfg = yaml.load(stream)
return c
def __repr__(self):
return self.path
|
Logistic Regression is a suitable algorithm for predicting a binary label for each data point. It is a supervised learning algorithm that can be used for binary classification tasks. It takes a set of features as input and uses a linear model to predict the label. |
public class TriggerManager {
private String triggerMode;
public TriggerManager() {
this.triggerMode = "None";
}
public void setTriggerMode(String triggerMode) {
this.triggerMode = triggerMode;
}
public void startConversion() {
if (triggerMode.equals("None")) {
System.out.println("Conversion started immediately.");
} else if (triggerMode.equals("Falling")) {
System.out.println("Conversion started on falling edge of external source.");
} else if (triggerMode.equals("Rising")) {
System.out.println("Conversion started on rising edge of external source.");
}
}
public static void main(String[] args) {
TriggerManager manager = new TriggerManager();
manager.setTriggerMode("Falling");
manager.startConversion();
}
} |
#!/bin/sh
set -e
ROOTDIR=dist
BUNDLE=${ROOTDIR}/Thana-Qt.app
CODESIGN=codesign
TEMPDIR=sign.temp
TEMPLIST=${TEMPDIR}/signatures.txt
OUT=signature.tar.gz
if [ ! -n "$1" ]; then
echo "usage: $0 <codesign args>"
echo "example: $0 -s MyIdentity"
exit 1
fi
rm -rf ${TEMPDIR} ${TEMPLIST}
mkdir -p ${TEMPDIR}
${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
for i in `grep -v CodeResources ${TEMPLIST}`; do
TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`"
SIZE=`pagestuff $i -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff $i -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${TARGETFILE}.sign"
DIRNAME="`dirname ${SIGNFILE}`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if=$i of=${SIGNFILE} bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
for i in `grep CodeResources ${TEMPLIST}`; do
TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${TARGETFILE}"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: "${TARGETFILE}""
cp "${i}" "${RESOURCE}"
done
rm ${TEMPLIST}
tar -C ${TEMPDIR} -czf ${OUT} .
rm -rf ${TEMPDIR}
echo "Created ${OUT}"
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-old/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-old/1024+0+512-LMPI-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_within_sentences_low_pmi_first_two_thirds_sixth --eval_function last_sixth_eval |
#!/bin/bash
if [ "$SKIP_PROPERTIES_BUILDER" = true ]; then
echo "Skipping properties builder"
echo "`cat $PROP_FILE`"
exit 0
fi
cat > $PROP_FILE <<EOF
#Database Name
dbname=${HYGIEIA_API_ENV_SPRING_DATA_MONGODB_DATABASE:-dashboard}
#Database HostName - default is localhost
dbhost=${MONGODB_HOST:-10.0.1.1}
#Database Port - default is 27017
dbport=${MONGODB_PORT:-27017}
#Database Username - default is blank
dbusername=${HYGIEIA_API_ENV_SPRING_DATA_MONGODB_USERNAME:-db}
#Database Password - default is blank
dbpassword=${HYGIEIA_API_ENV_SPRING_DATA_MONGODB_PASSWORD:-dbpass}
#Collector schedule (required)
score.cron=${SCORE_CRON:-0 0/5 * * * *}
# Max Score
score.maxScore=5
# Default Score Criteria for widget not found :
# no_score : the widget will not be used to score
# zero_score : the widget score will be 0
# valuePercent : specify the value to set in scoreValue param (Default)
score.criteria.noWidgetFound.scoreType=zero_score
# When scoreType is valuePercent we need to define the value for score
score.criteria.noWidgetFound.scoreValue=0
# Default Score Criteria for data not found :
# no_score : the value will not be used to score
# zero_score : the score value will be 0
# valuePercent : specify the value to set in scoreValue param (Default)
score.criteria.noDataFound.scoreType=zero_score
# When scoreType is valuePercent we need to define the value for score
score.criteria.noDataFound.scoreValue=0
# Score settings for build widget
# Number of days to calculate score
score.buildWidget.numberOfDays=14
# Weight for the widget
score.buildWidget.weight=25
# If widget is disabled it will not be used for calculating score
score.buildWidget.disabled=false
# Criteria properties, these will override the default criteria properties
score.buildWidget.criteria.noWidgetFound.scoreType=zero_score
score.buildWidget.criteria.noWidgetFound.scoreValue=0
# Property to propagate score if condition is met
# no : do not propagate score
# widget : propagate to widget
# dashboard : propagate score as dashboard score
score.buildWidget.criteria.noWidgetFound.propagate=no
score.buildWidget.criteria.noDataFound.scoreType=zero_score
score.buildWidget.criteria.noDataFound.scoreValue=0
score.buildWidget.criteria.noDataFound.propagate=no
# Criteria thresholds for data within the range of days
# Type of threshold to apply
# percent : percent of data
# days : number of days where data is present
score.buildWidget.criteria.dataRangeThresholds[0].type=percent
# Comparator to compare the value
# equals,
# less,
# greater,
# less_or_equal,
# greater_or_equal
score.buildWidget.criteria.dataRangeThresholds[0].comparator=less_or_equal
# Value to compare
score.buildWidget.criteria.dataRangeThresholds[0].value=20
# If the threshold is met set the score
score.buildWidget.criteria.dataRangeThresholds[0].score.scoreType=zero_score
score.buildWidget.criteria.dataRangeThresholds[0].score.scoreValue=0
score.buildWidget.criteria.dataRangeThresholds[0].score.propagate=no
# we can set the last n number of days to check for threshold (Not Mandatory)
score.buildWidget.criteria.dataRangeThresholds[0].numDaysToCheck=7
# Build duration within threshold score settings
score.buildWidget.duration.buildDurationThresholdInMillis=300000
score.buildWidget.duration.weight=50
score.buildWidget.duration.disabled=false
# Build duration within threshold score settings
score.buildWidget.status.weight=50
score.buildWidget.status.disabled=false
# Score settings for quality widget
score.qualityWidget.weight=25
score.qualityWidget.disabled=false
score.qualityWidget.criteria.noWidgetFound.scoreType=zero_score
score.qualityWidget.criteria.noWidgetFound.scoreValue=0
score.qualityWidget.criteria.noWidgetFound.propagate=no
score.qualityWidget.criteria.noDataFound.scoreType=zero_score
score.qualityWidget.criteria.noDataFound.scoreValue=0
score.qualityWidget.criteria.noDataFound.propagate=no
# Code Quality widget criteria settings
score.qualityWidget.codeCoverage.weight=50
score.qualityWidget.codeCoverage.disabled=false
score.qualityWidget.codeCoverage.criteria.noDataFound.scoreType=zero_score
score.qualityWidget.codeCoverage.criteria.noDataFound.scoreValue=0
score.qualityWidget.codeCoverage.criteria.noDataFound.propagate=widget
# Unit Tests widget criteria settings
score.qualityWidget.unitTests.weight=50
score.qualityWidget.unitTests.disabled=false
score.qualityWidget.unitTests.criteria.noDataFound.scoreType=zero_score
score.qualityWidget.unitTests.criteria.noDataFound.scoreValue=0
score.qualityWidget.unitTests.criteria.noDataFound.propagate=widget
score.qualityWidget.unitTests.criteria.dataRangeThresholds[0].type=percent
score.qualityWidget.unitTests.criteria.dataRangeThresholds[0].comparator=less
score.qualityWidget.unitTests.criteria.dataRangeThresholds[0].value=100
score.qualityWidget.unitTests.criteria.dataRangeThresholds[0].score.scoreType=zero_score
score.qualityWidget.unitTests.criteria.dataRangeThresholds[0].score.scoreValue=0
score.qualityWidget.unitTests.criteria.dataRangeThresholds[0].score.propagate=widget
# Score settings for deploy widget
score.deployWidget.weight=25
score.deployWidget.disabled=false
score.deployWidget.criteria.noWidgetFound.scoreType=zero_score
score.deployWidget.criteria.noWidgetFound.scoreValue=0
score.deployWidget.criteria.noWidgetFound.propagate=no
score.deployWidget.criteria.noDataFound.scoreType=zero_score
score.deployWidget.criteria.noDataFound.scoreValue=0
score.deployWidget.criteria.noDataFound.propagate=no
# Deployment widget criteria settings
score.deployWidget.deploySuccess.weight=50
score.deployWidget.deploySuccess.disabled=false
score.deployWidget.deploySuccess.criteria.noDataFound.scoreType=zero_score
score.deployWidget.deploySuccess.criteria.noDataFound.scoreValue=0
score.deployWidget.deploySuccess.criteria.noDataFound.propagate=widget
# Instances online widget criteria settings
score.deployWidget.intancesOnline.weight=50
score.deployWidget.intancesOnline.disabled=false
score.deployWidget.intancesOnline.criteria.noDataFound.scoreType=zero_score
score.deployWidget.intancesOnline.criteria.noDataFound.scoreValue=0
score.deployWidget.intancesOnline.criteria.noDataFound.propagate=widget
# Score settings for github scm widget
score.scmWidget.weight=25
score.scmWidget.disabled=false
score.scmWidget.numberOfDays=14
score.scmWidget.criteria.noWidgetFound.scoreType=zero_score
score.scmWidget.criteria.noWidgetFound.scoreValue=0
score.scmWidget.criteria.noWidgetFound.propagate=no
score.scmWidget.criteria.noDataFound.scoreType=zero_score
score.scmWidget.criteria.noDataFound.scoreValue=0
score.scmWidget.criteria.noDataFound.propagate=no
score.scmWidget.criteria.dataRangeThresholds[0].type=percent
score.scmWidget.criteria.dataRangeThresholds[0].comparator=less_or_equal
# Value to compare
score.scmWidget.criteria.dataRangeThresholds[0].value=20
# If the threshold is met set the score
score.scmWidget.criteria.dataRangeThresholds[0].score.scoreType=zero_score
score.scmWidget.criteria.dataRangeThresholds[0].score.scoreValue=0
score.scmWidget.criteria.dataRangeThresholds[0].score.propagate=no
# we can set the last n number of days to check for threshold (Not Mandatory)
score.scmWidget.criteria.dataRangeThresholds[0].numDaysToCheck=7
# Commits per day widget criteria settings
score.scmWidget.commitsPerDay.weight=100
score.scmWidget.commitsPerDay.numberOfDays=14
score.scmWidget.commitsPerDay.disabled=false
score.scmWidget.commitsPerDay.criteria.noDataFound.scoreType=zero_score
score.scmWidget.commitsPerDay.criteria.noDataFound.scoreValue=0
score.scmWidget.commitsPerDay.criteria.noDataFound.propagate=widget
score.scmWidget.commitsPerDay.criteria.dataRangeThresholds[0].type=percent
score.scmWidget.commitsPerDay.criteria.dataRangeThresholds[0].comparator=less_or_equal
score.scmWidget.commitsPerDay.criteria.dataRangeThresholds[0].value=20
score.scmWidget.commitsPerDay.criteria.dataRangeThresholds[0].score.scoreType=zero_score
score.scmWidget.commitsPerDay.criteria.dataRangeThresholds[0].score.scoreValue=0
score.scmWidget.commitsPerDay.criteria.dataRangeThresholds[0].score.propagate=widget
score.scmWidget.commitsPerDay.criteria.dataRangeThresholds[0].numDaysToCheck=7
EOF
echo "
===========================================
Properties file created `date`: $PROP_FILE
Note: passwords hidden
===========================================
`cat $PROP_FILE |egrep -vi password`
"
exit 0
|
TERMUX_PKG_HOMEPAGE=https://developer.android.com/tools/sdk/ndk/index.html
TERMUX_PKG_DESCRIPTION="Multilib binaries for cross-compilation"
TERMUX_PKG_LICENSE="NCSA"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=$TERMUX_NDK_VERSION
TERMUX_PKG_REVISION=2
TERMUX_PKG_SKIP_SRC_EXTRACT=true
TERMUX_PKG_PLATFORM_INDEPENDENT=true
TERMUX_PKG_NO_STATICSPLIT=true
prepare_libs() {
local ARCH="$1"
local SUFFIX="$2"
local NDK_SUFFIX=$SUFFIX
if [ $ARCH = x86 ] || [ $ARCH = x86_64 ]; then
NDK_SUFFIX=$ARCH
fi
mkdir -p $TERMUX_PKG_MASSAGEDIR/$TERMUX_PREFIX/$SUFFIX/lib
local BASEDIR=$NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/$SUFFIX/${TERMUX_PKG_API_LEVEL}
cp $BASEDIR/*.o $TERMUX_PKG_MASSAGEDIR/$TERMUX_PREFIX/$SUFFIX/lib
cp $BASEDIR/lib{c,dl,log,m}.so $TERMUX_PKG_MASSAGEDIR/$TERMUX_PREFIX/$SUFFIX/lib
cp $BASEDIR/lib{c,dl,m}.a $TERMUX_PKG_MASSAGEDIR/$TERMUX_PREFIX/$SUFFIX/lib
cp $TERMUX_STANDALONE_TOOLCHAIN/sysroot/usr/lib/${SUFFIX}/libc++_shared.so \
$TERMUX_PKG_MASSAGEDIR/$TERMUX_PREFIX/$SUFFIX/lib
cp $TERMUX_STANDALONE_TOOLCHAIN/sysroot/usr/lib/${SUFFIX}/lib{c++_static,c++abi}.a \
$TERMUX_PKG_MASSAGEDIR/$TERMUX_PREFIX/$SUFFIX/lib
echo 'INPUT(-lc++_static -lc++abi)' > $TERMUX_PKG_MASSAGEDIR/$TERMUX_PREFIX/$SUFFIX/lib/libc++_shared.a
if [ $ARCH == "x86" ]; then
LIBATOMIC=$NDK/toolchains/llvm/prebuilt/linux-x86_64/lib64/clang/*/lib/linux/i386
elif [ $ARCH == "arm64" ]; then
LIBATOMIC=$NDK/toolchains/llvm/prebuilt/linux-x86_64/lib64/clang/*/lib/linux/aarch64
else
LIBATOMIC=$NDK/toolchains/llvm/prebuilt/linux-x86_64/lib64/clang/*/lib/linux/$ARCH
fi
cp $LIBATOMIC/libatomic.a $TERMUX_PKG_MASSAGEDIR/$TERMUX_PREFIX/$SUFFIX/lib/
cp $LIBATOMIC/libunwind.a $TERMUX_PKG_MASSAGEDIR/$TERMUX_PREFIX/$SUFFIX/lib/
}
add_cross_compiler_rt() {
RT_PREFIX=$NDK/toolchains/llvm/prebuilt/linux-x86_64/lib64/clang/*/lib/linux
RT_OPT_DIR=$TERMUX_PREFIX/opt/ndk-multilib/cross-compiler-rt
LIBLLVM_VERSION=$(. $TERMUX_SCRIPTDIR/packages/libllvm/build.sh; echo $TERMUX_PKG_VERSION)
RT_PATH=$TERMUX_PREFIX/lib/clang/$LIBLLVM_VERSION/lib/android
mkdir -p $TERMUX_PKG_MASSAGEDIR/$RT_OPT_DIR
cp $RT_PREFIX/* $TERMUX_PKG_MASSAGEDIR/$RT_OPT_DIR || true
# Reserve this folder to make its existance be handled by dpkg.
mkdir -p $TERMUX_PKG_MASSAGEDIR/$RT_PATH
touch $TERMUX_PKG_MASSAGEDIR/$RT_PATH/.keep-ndk-multilib
}
termux_step_extract_into_massagedir() {
prepare_libs "arm" "arm-linux-androideabi"
prepare_libs "arm64" "aarch64-linux-android"
prepare_libs "x86" "i686-linux-android"
prepare_libs "x86_64" "x86_64-linux-android"
add_cross_compiler_rt
}
termux_step_create_debscripts() {
# Install the symlinks.
cat <<- POSTINST_EOF > ./postinst
#!$TERMUX_PREFIX/bin/sh
echo "Installing symlinks to $RT_PATH..."
find $RT_OPT_DIR -type f ! -name "libclang_rt*\$(dpkg --print-architecture)*" -exec ln -sf "{}" $RT_PATH \;
exit 0
POSTINST_EOF
# Uninstall the symlinks.
cat <<- PRERM_EOF > ./prerm
#!$TERMUX_PREFIX/bin/sh
if [ "$TERMUX_PACKAGE_FORMAT" != "pacman" ] && [ "\$1" != "remove" ]; then
exit 0
fi
echo "Uninstalling symlinks..."
find $RT_PATH -type l ! -name "libclang_rt*\$(dpkg --print-architecture)*" -exec rm -rf "{}" \;
exit 0
PRERM_EOF
chmod 0755 postinst prerm
}
|
var classarmnn_1_1profiling_1_1_i_register_counter_mapping =
[
[ "~IRegisterCounterMapping", "classarmnn_1_1profiling_1_1_i_register_counter_mapping.xhtml#a9cd06e75fb10218f2decbd4117af4a8e", null ],
[ "RegisterMapping", "classarmnn_1_1profiling_1_1_i_register_counter_mapping.xhtml#ae8ae959752e05d110a66f590dfb18faa", null ],
[ "Reset", "classarmnn_1_1profiling_1_1_i_register_counter_mapping.xhtml#a43a787400d2a563b9eee1a149225c18a", null ]
]; |
mkdir -p media
FLASK_APP=app FLASK_ENV=development flask run
|
export interface VideoSharedMessage {
title: string;
videoId: string;
}
|
package go_batcher
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestBatchMgr(t *testing.T) {
batch := NewBatch("TEST", 10, 0, nil)
batcher := NewBatcher()
batcher.AddBatch(batch)
assert.Equal(t, batcher.GetBatch("TEST"), batch)
batcher.DelBatch("TEST")
assert.Nil(t, batcher.GetBatch("TEST"))
}
|
<filename>framework/src/main/java/me/insidezhou/southernquiet/util/SnowflakeIdGenerator.java
package me.insidezhou.southernquiet.util;
import instep.util.LongIdGenerator;
import org.jetbrains.annotations.Nullable;
import java.util.Random;
/**
* 基于twitter snowflake算法、64bit、默认秒级精度的发号器
* <p>
* 0 - timestamp - highPadding - worker - lowPadding - sequence
*/
@SuppressWarnings("WeakerAccess")
public class SnowflakeIdGenerator extends LongIdGenerator implements IdGenerator {
public final static long EPOCH = 1517414400L; //Thu Feb 01 2018 00:00:00 GMT, seconds
public final static int TimestampBits = 32;
public final static int HighPaddingBits = 0;
public final static int WorkerIdBits = 12;
public final static int LowPaddingBits = 0;
public final static int SequenceStartRange = 0;
public final static int TickAccuracy = 1000;
public SnowflakeIdGenerator(int workerId, int timestampBits, int highPaddingBits, int workerIdBits, int lowPaddingBits, long epoch, int sequenceStartRange, @Nullable Random random, int tickAccuracy) {
super(workerId, timestampBits, highPaddingBits, workerIdBits, lowPaddingBits, epoch, sequenceStartRange, tickAccuracy, random);
}
@SuppressWarnings("unused")
public SnowflakeIdGenerator(int workerId, int timestampBits, int highPaddingBits, int workerIdBits, int lowPaddingBits) {
this(workerId,
timestampBits,
highPaddingBits,
workerIdBits,
lowPaddingBits,
EPOCH,
SequenceStartRange,
new Random(),
TickAccuracy);
}
public SnowflakeIdGenerator(int workerId, long epoch, int sequenceStartRange, int tickAccuracy) {
this(workerId,
TimestampBits,
HighPaddingBits,
WorkerIdBits,
LowPaddingBits,
epoch,
sequenceStartRange,
new Random(),
tickAccuracy);
}
@SuppressWarnings("unused")
public SnowflakeIdGenerator(int workerId) {
this(workerId,
TimestampBits,
HighPaddingBits,
WorkerIdBits,
LowPaddingBits,
EPOCH,
SequenceStartRange,
new Random(),
TickAccuracy);
}
@Override
public long getTicksFromId(long id) {
return id >>> getTimestampShift();
}
@Override
public long getTimestampFromId(long id) {
return getTicksFromId(id) * getTickAccuracy() + getEpochInMilliSeconds();
}
@Override
public int getWorkerFromId(long id) {
return (int) ((id << 1 + getTimestampBits() + getHighPaddingBits()) >>> (1 + getTimestampBits() + getHighPaddingBits() + getWorkerIdShift()));
}
@Override
public int getSequenceFromId(long id) {
return (int) (id << 64 - getSequenceBits()) >>> (64 - getSequenceBits());
}
}
|
<html>
<head>
<title>My Web Page</title>
<style>
body {
background-color: #f2f2f2;
}
.container {
width: 800px;
margin: 0 auto;
background-color: #fff;
padding: 20px;
box-shadow: 0 2px 8px 2px rgba(0,0,0,0.2);
}
h1 {
font-size: 28px;
color: #000;
}
h2 {
font-size: 24px;
color: #888;
}
p {
font-size: 16px;
color: #444;
}
</style>
</head>
<body>
<div class="container">
<h1>My Web Page</h1>
<h2>Welcome!</h2>
<p>This is my web page. Here I share my thoughts and musings on life. Please feel free to explore and comment!</p>
</div>
</body>
</html> |
#!/usr/bin/env python
from GameFlow.console.MainScreen import MainScreen
def run():
"""
Basic function for running the game
"""
main = MainScreen()
main.start()
if __name__ == '__main__':
run()
|
#!/bin/bash
docker build . -t stevelacy/node-turbo-http
|
#!/bin/bash
# http://www.michaelvdw.nl/code/adding-flake8-to-your-django-project/
flake8 --config=./etc/linter.config . |
#!/bin/bash
#SBATCH --job-name=DKBAT_ivy
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=1
#SBATCH --time=24:00:00
#SBATCH --mem=60000M
#SBATCH --gres=gpu:1
#SBATCH --partition=gpu_titanrtx_shared
#SBATCH --mail-type=BEGIN,END
#SBATCH --mail-user=traian.vidrascu@student.uva.nl
conda activate dgat
python main.py --use_2hop 0 --get_2hop 0 --partial_2hop 0 --data ./data/ivy141-all/ --output_folder ./checkpoints/ivy/out/ --batch_size_conv 128 --out_channels 500 --valid_invalid_ratio_conv 40 --valid_invalid_ratio_gat 2 --epochs_gat 3000 --batch_size_gat 32185 --weight_decay_gat 5e-6 --alpha 0.2 --margin 1 --drop_GAT 0.3 |
function generateUniqueID() {
return Math.floor(Math.random() * 1000000000);
}
let uniqueID = generateUniqueID();
console.log(uniqueID); |
<reponame>danieljarrett/Stak-Framework<filename>lib/stak/router.rb
module Stak
class Router
def initialize
@routes = Hash.new { |hash, key| hash[key] = [] }
end
def config(&block)
instance_eval(&block)
end
def match(url, verb, env)
@routes[verb].each do |route|
if route[:path].match(url)
return case route[:target]
when Proc then block($~.captures, route, env)
when String then action($~.captures, route)
when NilClass then default($~.captures)
end
end
end
return Stak::Controller.error
end
def block(captures, route, env)
route_params = captures.each_with_index.reduce({}) do |acc, (value, index)|
acc.update(route[:params][index] => value)
end
Stak::Controller.send(:define_method, :_t, route[:target])
-> (env) { Stak::Controller.new(env).append(route_params)._t }
end
def action(captures, route)
route_params = captures.each_with_index.reduce({}) do |acc, (value, index)|
acc.update(route[:params][index] => value)
end
retrieve(route[:target], route_params)
end
def default(captures)
route_params = captures[1] ? { 'id' => captures[1][0..-2] } : {}
retrieve("#{captures[0]}##{captures[2]}", route_params)
end
def retrieve(action, route_params)
if action =~ /^([^#]+)#([^#]+)$/
resource = $1.to_camelcase
klass = resource.to_klass
return klass.action($2, route_params)
end
end
end
end |
<filename>FSPasswordSample/Pods/Headers/Public/FSJZBus/FSNetsController.h
//
// FSNetsController.h
// myhome
//
// Created by FudonFuchina on 2017/7/9.
// Copyright © 2017年 fuhope. All rights reserved.
//
#import "FSBaseController.h"
@interface FSNetsController : FSBaseController
@property (nonatomic,copy) void (^callback)(FSNetsController *bVC,NSString *bUrl);
@end
|
/*!
# -- BEGIN LICENSE BLOCK ----------------------------------
#
# This file is part of tinyMCE.
# YouTube for tinyMCE
# Copyright (C) 2011 - 2019 <NAME> <aurelien[at]magix-cms[dot]com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# -- END LICENSE BLOCK -----------------------------------
*/
(function () {
var youtube = (function () {
'use strict';
tinymce.PluginManager.requireLangPack("youtube");
tinymce.PluginManager.add("youtube", function (editor, url) {
/*
Add a custom icon to TinyMCE
*/
editor.ui.registry.addIcon('youtube-brands', '<svg width="24" height="24"><use xlink:href="'+url+'/img/youtube.svg#youtube-brands"></use></svg>');
/*
Use to store the instance of the Dialog
*/
var _dialog = false;
/*
An array of options to appear in the "Type" select box.
*/
var _typeOptions = [];
/*
Used to store a reference to the dialog when we have opened it
*/
var _api = false;
var _urlDialogConfig = {
title: 'YouTube Title',
url: url + "/youtube.html",
width: 800,
height: 620
};
// Define the Toolbar button
editor.ui.registry.addButton('youtube', {
icon: 'youtube-brands',
tooltip: "YouTube Tooltip",
title:"YouTube Tooltip",
onAction: () => {
_api = editor.windowManager.openUrl(_urlDialogConfig)
}
});
// Add a button into the menu bar
editor.ui.registry.addMenuItem('youtube', {
icon: 'youtube-brands',
text: "YouTube Tooltip",
tooltip: "YouTube Tooltip",
title:"YouTube Tooltip",
onAction: () => {
_api = editor.windowManager.openUrl(_urlDialogConfig)
}
});
// Return details to be displayed in TinyMCE's "Help" plugin, if you use it
// This is optional.
return {
getMetadata: function () {
return {
name: "YouTube Plugin",
url: "https://github.com/gtraxx/tinymce-plugin-youtube"
};
}
};
});
}());
})(); |
<gh_stars>1-10
process.env.agentId = "";
process.env.appKey = "";
process.env.appSecret = "";
|
<reponame>lananh265/social-network<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.unlockAlt = void 0;
var unlockAlt = {
"viewBox": "0 0 1152 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M1056 768q40 0 68 28t28 68v576q0 40-28 68t-68 28h-960q-40 0-68-28t-28-68v-576q0-40 28-68t68-28h32v-320q0-185 131.5-316.5t316.5-131.5 316.5 131.5 131.5 316.5q0 26-19 45t-45 19h-64q-26 0-45-19t-19-45q0-106-75-181t-181-75-181 75-75 181v320h736z"
}
}]
};
exports.unlockAlt = unlockAlt; |
package com.alipay.api.request;
import java.util.Map;
import com.alipay.api.AlipayRequest;
import com.alipay.api.internal.util.AlipayHashMap;
import com.alipay.api.response.AlipayMobileUrlDeviceverifyAddResponse;
import com.alipay.api.AlipayObject;
/**
* ALIPAY API: alipay.mobile.url.deviceverify.add request
*
* @author auto create
* @since 1.0, 2021-07-14 10:09:48
*/
public class AlipayMobileUrlDeviceverifyAddRequest implements AlipayRequest<AlipayMobileUrlDeviceverifyAddResponse> {
private AlipayHashMap udfParams; // add user-defined text parameters
private String apiVersion="1.0";
/**
* 设备指纹id
*/
private String apdid;
/**
* 应用版本号
*/
private String appVersion;
/**
* 应用名
*/
private String appid;
/**
* 集团统一id
*/
private String hid;
/**
* 手机号码
*/
private String mobile;
/**
* 统一sdk的版本
*/
private String sdkVersion;
/**
* 手淘设备ID
*/
private String umid;
public void setApdid(String apdid) {
this.apdid = apdid;
}
public String getApdid() {
return this.apdid;
}
public void setAppVersion(String appVersion) {
this.appVersion = appVersion;
}
public String getAppVersion() {
return this.appVersion;
}
public void setAppid(String appid) {
this.appid = appid;
}
public String getAppid() {
return this.appid;
}
public void setHid(String hid) {
this.hid = hid;
}
public String getHid() {
return this.hid;
}
public void setMobile(String mobile) {
this.mobile = mobile;
}
public String getMobile() {
return this.mobile;
}
public void setSdkVersion(String sdkVersion) {
this.sdkVersion = sdkVersion;
}
public String getSdkVersion() {
return this.sdkVersion;
}
public void setUmid(String umid) {
this.umid = umid;
}
public String getUmid() {
return this.umid;
}
private String terminalType;
private String terminalInfo;
private String prodCode;
private String notifyUrl;
private String returnUrl;
private boolean needEncrypt=false;
private AlipayObject bizModel=null;
public String getNotifyUrl() {
return this.notifyUrl;
}
public void setNotifyUrl(String notifyUrl) {
this.notifyUrl = notifyUrl;
}
public String getReturnUrl() {
return this.returnUrl;
}
public void setReturnUrl(String returnUrl) {
this.returnUrl = returnUrl;
}
public String getApiVersion() {
return this.apiVersion;
}
public void setApiVersion(String apiVersion) {
this.apiVersion = apiVersion;
}
public void setTerminalType(String terminalType){
this.terminalType=terminalType;
}
public String getTerminalType(){
return this.terminalType;
}
public void setTerminalInfo(String terminalInfo){
this.terminalInfo=terminalInfo;
}
public String getTerminalInfo(){
return this.terminalInfo;
}
public void setProdCode(String prodCode) {
this.prodCode=prodCode;
}
public String getProdCode() {
return this.prodCode;
}
public String getApiMethodName() {
return "alipay.mobile.url.deviceverify.add";
}
public Map<String, String> getTextParams() {
AlipayHashMap txtParams = new AlipayHashMap();
txtParams.put("apdid", this.apdid);
txtParams.put("app_version", this.appVersion);
txtParams.put("appid", this.appid);
txtParams.put("hid", this.hid);
txtParams.put("mobile", this.mobile);
txtParams.put("sdk_version", this.sdkVersion);
txtParams.put("umid", this.umid);
if(udfParams != null) {
txtParams.putAll(this.udfParams);
}
return txtParams;
}
public void putOtherTextParam(String key, String value) {
if(this.udfParams == null) {
this.udfParams = new AlipayHashMap();
}
this.udfParams.put(key, value);
}
public Class<AlipayMobileUrlDeviceverifyAddResponse> getResponseClass() {
return AlipayMobileUrlDeviceverifyAddResponse.class;
}
public boolean isNeedEncrypt() {
return this.needEncrypt;
}
public void setNeedEncrypt(boolean needEncrypt) {
this.needEncrypt=needEncrypt;
}
public AlipayObject getBizModel() {
return this.bizModel;
}
public void setBizModel(AlipayObject bizModel) {
this.bizModel=bizModel;
}
}
|
import React from 'react'
import { View, Text, Image, StyleSheet } from 'react-native'
import {
DrawerContentScrollView,
DrawerItem,
} from '@react-navigation/drawer'
import { connect } from 'react-redux'
import { FontAwesomeIcon } from '@fortawesome/react-native-fontawesome'
import { Row, Grid, Col, Card, CardItem, Button, List, ListItem } from 'native-base'
import { theme } from '../themes'
import { routes } from '../../../src/routes'
import * as selectors from '../../tools/reducers'
import * as actions from '../../tools/actions/auth'
const SideBar = ({ props, isAuthenticated, logout, profile, navigation }) => (
<View style={{ flex:1 }}>
<DrawerContentScrollView { ...props }>
<View>
<View>
{
isAuthenticated ? (
<View style={ theme.background }>
<Grid>
<Row>
<Col>
{/* <Image
style={ styles.logo }
source={{
uri: 'https://happytravel.viajes/wp-content/uploads/2020/04/146-1468479_my-profile-icon-blank-profile-picture-circle-hd.png'
}}
/> */}
<FontAwesomeIcon icon='user-circle' size={ 80 } style={{ ...theme.sidebarIcon, margin: 10 }} />
</Col>
<Col style={{ width: 50 }}>
<Button transparent
onPress={ () => navigation.navigate('notifications') }
>
<FontAwesomeIcon icon='bell' size={ 25 } style={{ ...theme.sidebarIcon, marginTop: 10 }} />
</Button>
</Col>
</Row>
<Row>
<Card style={ styles.infoCard }>
<CardItem
button
onPress={() => navigation.navigate('profile')}
style={ styles.infoCardItem }
>
<View>
<Text style={ styles.username }>{ profile.first_name + ' ' + profile.last_name }</Text>
<Text style={ styles.role }>{ profile.is_tutor ? 'Tutor' : 'Tutorado' }</Text>
</View>
</CardItem>
</Card>
</Row>
</Grid>
</View>
) : (<></>)
}
</View>
{
routes.filter(
route => route.authProtection == isAuthenticated
).map(route => route.showOnSidebar && (
<Row key={ route.name }>
<FontAwesomeIcon icon={ route.icon } size={ 25 } style={{ ...theme.sidebarIcon, marginTop: 15, marginLeft: 15 }} />
<DrawerItem
key={ route.name }
label={ route.displayName }
onPress={ () => navigation.navigate(route.name, { screen: route.defaultRoute }) }
style={{ width: '75%' }}
/>
</Row>
))
}
</View>
</DrawerContentScrollView>
{
isAuthenticated ? (
<>
{/* <Row>
<FontAwesomeIcon icon='sign-out-alt' size={ 25 } style={{ marginTop: 15, marginLeft: 15 }} />
</Row> */}
<DrawerItem
label='Cerrar Sesión'
onPress={ logout }
style={{ width: '75%' }}
/>
</>
) : (<></>)
}
</View>
)
export default connect(
state => ({
isAuthenticated: selectors.isAuthenticated(state),
profile : selectors.getProfile(state),
}),
dispatch => ({
logout() {
dispatch(actions.logout())
}
})
)(SideBar)
const styles = StyleSheet.create({
logo: {
width: 150,
height: 150,
borderRadius: 50,
},
username: {
fontSize: 20,
marginTop: 10,
marginLeft: 15,
fontWeight: 'bold',
},
role: {
fontSize: 16,
marginLeft: 15,
marginBottom: 20
},
userInfo: {
flex: 1,
},
infoCard: {
backgroundColor: 'transparent',
shadowColor: null,
elevation: 0,
borderColor: 'transparent',
flex: 1,
},
infoCardItem: {
backgroundColor: 'transparent',
padding: 0,
margin: -20,
marginTop: -25,
},
})
|
import os
def rename_files(directory_path, new_name, extension):
try:
if not os.path.exists(directory_path):
raise FileNotFoundError("Incorrect directory path")
files = [f for f in os.listdir(directory_path) if os.path.isfile(os.path.join(directory_path, f))]
if not files:
print("No files found in the directory")
return
for i, file in enumerate(files, start=1):
new_file_name = f"{new_name}{i}.{extension}"
os.rename(
os.path.join(directory_path, file),
os.path.join(directory_path, new_file_name)
)
except FileNotFoundError as e:
print(e)
# Example usage
rename_files("/path/to/directory/", "new_file", "txt") |
package tae.cosmetics.gui.util.packet;
import java.awt.Color;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.FontRenderer;
import net.minecraft.client.gui.Gui;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.network.Packet;
import tae.cosmetics.guiscreen.button.GuiExtendButton;
import tae.cosmetics.guiscreen.button.GuiMorePacketInformationButton;
public abstract class AbstractPacketModule extends Gui {
protected static final int scalefactor = 2;
protected static final int modwidth = 210;
protected static final int modheightminimized= 24;
protected static final int modheightfull = (modheightminimized) * 5;
protected static final int modtimestampwidth = 100;
//x coord
public int x = 0;
//y coord
public int y = 0;
//timestamp
private long timestamp = -1;
//TODO: make private
public Packet<?> packet;
protected boolean minimized = true;
protected FontRenderer fontRenderer = Minecraft.getMinecraft().fontRenderer;
private String hoverText;
private GuiExtendButton extendButton;
private GuiMorePacketInformationButton moreInfoButton;
protected AbstractPacketModule(String info, long timestamp, Packet<?> packet) {
hoverText = info;
this.timestamp = timestamp;
this.packet = packet;
extendButton = new GuiExtendButton(0, 0, 0, this, "", 2);
moreInfoButton = new GuiMorePacketInformationButton(0, 0, 0, packet, "", 2);
}
public GuiExtendButton extendButton() {
extendButton.x = x - 10;
extendButton.y = y;
return extendButton;
}
public GuiMorePacketInformationButton moreInfoButton() {
moreInfoButton.x = x + getWidth() + 10;
moreInfoButton.y = y;
return moreInfoButton;
}
public void drawScreen(int mouseX, int mouseY, float partialTicks) {
int x = this.x * scalefactor;
int y = this.y * scalefactor;
GlStateManager.scale(1D / scalefactor, 1D / scalefactor, 1D / scalefactor);
int color;
this.drawGradientRect(x, y, x + modtimestampwidth + modwidth * 2, y + (minimized ? modheightminimized : modheightfull), color = new Color(128, 0, 128).getRGB(), color);
this.drawGradientRect(x + 1, y + 1, x + modtimestampwidth + modwidth * 2 - 1, y + (minimized ? modheightminimized : modheightfull) - 1, color = new Color(64, 0, 64).getRGB(), color);
this.drawGradientRect(x + 2, y, x + 2 + modtimestampwidth - 2, y + (minimized ? modheightminimized : modheightfull) - 2, color = Color.BLACK.getRGB(), color);
this.drawGradientRect(x + 2 + modtimestampwidth + 1, y + 2, x + 2 + modtimestampwidth + modwidth - 1, y + (minimized ? modheightminimized : modheightfull) - 2, color = Color.BLACK.getRGB(), color);
this.drawGradientRect(x + modtimestampwidth + modwidth + 3, y + 2, x + modtimestampwidth + modwidth * 2, y + (minimized ? modheightminimized : modheightfull) - 2, color = Color.BLACK.getRGB(), color);
fontRenderer.drawString(Long.toString(getTimestamp()), x + 3, y + 6, Color.WHITE.getRGB());
drawText(x + 10 + (type() ? modtimestampwidth : modtimestampwidth + modwidth), y + 6);
GlStateManager.scale(scalefactor, scalefactor, scalefactor);
}
/**
*
* Draw text
*
* @param x xcoord
* @param y ycoord
*/
public abstract void drawText(int x, int y);
public String getTip() {
return hoverText;
}
public boolean isMinimized() {
return minimized;
}
public void setMinimized(boolean bool) {
minimized = bool;
}
public int getHeight() {
return minimized ? modheightminimized / scalefactor: modheightfull / scalefactor;
}
public int getWidth() {
return (modtimestampwidth + modwidth * 2) / scalefactor;
}
/**
*
* @return unix timestamp of send/recieve packet
*/
public long getTimestamp() {
return timestamp;
}
/**
*
* @return type of packet: true for client, false for server
*/
public abstract boolean type();
/**
*
* @return when the mouse is overing over the module
*/
public boolean isHovered(int mouseX, int mouseY) {
return mouseX > x && mouseX < x + getWidth() && mouseY > y && mouseY < y + getHeight();
}
}
|
nix-build --option extra-sandbox-paths "/keys=/var/secrets/android-keys /var/cache/ccache?" -j4 --cores $(nproc) "$@"
|
#!/bin/sh
## install Tile Mill as a server
# https://www.mapbox.com/tilemill/docs/linux-install/
# https://www.mapbox.com/tilemill/docs/guides/ubuntu-service/
sudo add-apt-repository -y ppa:developmentseed/mapbox
sudo apt-get update
sudo apt-get install -y tilemill libmapnik nodejs
# files: /usr/share/tilemill
# config: /etc/tilemill/tilemill.config
sudo cp etc/tilemill.config /etc/tilemill/tilemill.config
# logs: /var/log/tilemill
# run as tilemill user using upstart:
sudo restart tilemill
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-HPMI/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-HPMI/1024+0+512-common-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_common_words_first_two_thirds_sixth --eval_function last_sixth_eval |
/disk4/project/P20170819/GSE10616/ColonOnly_CD.vs.healthy/deg.tab.raw | cut -f 6,7 | awk '{print $2"\t"$1}' | grep -v "Gene" | sort -k2gr > rank.lst
sh ../bin/run.GSEA.human.sh <expr.foldchange.rnk> <prefix>
|
#!/bin/bash
set -euo pipefail
: $HOST
: $DOMAIN
FQDN="$HOST.$DOMAIN"
RESOLVE=$(host "$FQDN" | awk '/has address/ { print $4 }')
JUMPBOX_EIP=$(bosh int --path /instance_groups/name=jumpbox/networks/name=public/static_ips/0 jumpbox-manifest-s3/jumpbox.yml)
echo "FQDN: $FQDN, expect: $JUMPBOX_EIP, actual: $RESOLVE"
test "$RESOLVE" = "$JUMPBOX_EIP"
|
package schema
import (
"github.com/hexbee-net/errors"
"github.com/hexbee-net/parquet/datastore"
"github.com/hexbee-net/parquet/parquet"
)
type columnParent int
const (
noParent columnParent = iota
listParent
mapParent
)
type Column struct {
index int
name string
flatName string
nameArray []string
// one of the following should be not null. data or children
data *datastore.ColumnStore
children []*Column
rep parquet.FieldRepetitionType
maxR uint16
maxD uint16
parent columnParent
// for the reader we should read this element from the meta, for the writer we need to build this element
element *parquet.SchemaElement
params *datastore.ColumnParameters
}
// AsColumnDefinition creates a new column definition from the provided column.
func (c *Column) AsColumnDefinition() *ColumnDefinition {
col := &ColumnDefinition{
SchemaElement: c.Element(),
}
for _, child := range c.Children() {
col.Children = append(col.Children, child.AsColumnDefinition())
}
return col
}
// Children returns the column's child columns.
func (c *Column) Children() []*Column {
return c.children
}
// ColumnStore returns the underlying column store.
func (c *Column) ColumnStore() *datastore.ColumnStore {
return c.data
}
// MaxDefinitionLevel returns the maximum definition level for this column.
func (c *Column) MaxDefinitionLevel() uint16 {
return c.maxD
}
// MaxRepetitionLevel returns the maximum repetition value for this column.
func (c *Column) MaxRepetitionLevel() uint16 {
return c.maxR
}
// FlatName returns the name of the column and its parents in dotted notation.
func (c *Column) FlatName() string {
return c.flatName
}
// Name returns the column name.
func (c *Column) Name() string {
return c.name
}
// Index returns the index of the column in schema, zero based.
func (c *Column) Index() int {
return c.index
}
// Element returns schema element definition of the column.
func (c *Column) Element() *parquet.SchemaElement {
if c.element == nil {
// If this is a no-element node, we need to re-create element every time to make sure the content is always up-to-date
return c.buildElement()
}
return c.element
}
// Type returns the parquet type of the value.
// Returns nil if the column is a group.
func (c *Column) Type() *parquet.Type {
if c.data == nil {
return nil
}
return parquet.TypePtr(c.data.ParquetType())
}
// RepetitionType returns the repetition type for the current column.
func (c *Column) RepetitionType() *parquet.FieldRepetitionType {
return &c.rep
}
// DataColumn returns true if the column is data column, false otherwise.
func (c *Column) IsDataColumn() bool {
return c.data != nil
}
// ChildrenCount returns the number of children in a group.
func (c *Column) ChildrenCount() (int, error) {
if c.data != nil {
return 0, errors.New("not a group column")
}
return len(c.children), nil
}
func (c *Column) SetSkipped(b bool) {
c.data.Skipped = b
}
func (c *Column) GetData() (interface{}, int32, error) {
if c.children != nil {
data, maxD, err := c.getNextData()
if err != nil {
return nil, 0, err
}
if c.rep != parquet.FieldRepetitionType_REPEATED || data == nil {
return data, maxD, nil
}
ret := []map[string]interface{}{data}
for {
rl, _, last := c.getFirstRDLevel()
if last || rl < int32(c.maxR) || rl == 0 {
// end of this object
return ret, maxD, nil
}
data, _, err := c.getNextData()
if err != nil {
return nil, maxD, err
}
ret = append(ret, data)
}
}
return c.data.Get(int32(c.maxD), int32(c.maxR))
}
func (c *Column) readGroupSchema(schema []*parquet.SchemaElement, name string, idx int, dLevel, rLevel uint16) (newIndex int, err error) {
if len(schema) <= idx {
return 0, errors.WithFields(
errors.New("schema index out of bound"),
errors.Fields{
"index": idx,
"size": len(schema),
})
}
s := schema[idx]
if s.Type != nil {
return 0, errors.WithFields(
errors.New("field type is not nil for group"),
errors.Fields{
"index": idx,
})
}
if s.NumChildren == nil {
return 0, errors.WithFields(
errors.New("field NumChildren is invalid"),
errors.Fields{
"index": idx,
})
}
if *s.NumChildren <= 0 {
return 0, errors.WithFields(
errors.New("field NumChildren is zero"),
errors.Fields{
"index": idx,
})
}
l := int(*s.NumChildren)
if len(schema) <= idx+l {
return 0, errors.WithFields(
errors.New("not enough element in schema list"),
errors.Fields{
"index": idx,
})
}
if s.RepetitionType != nil && *s.RepetitionType != parquet.FieldRepetitionType_REQUIRED {
dLevel++
}
if s.RepetitionType != nil && *s.RepetitionType == parquet.FieldRepetitionType_REPEATED {
rLevel++
}
c.maxD = dLevel
c.maxR = rLevel
if name == "" {
name = s.Name
} else {
name += "." + s.Name
}
c.flatName = name
c.name = s.Name
c.element = s
c.children = make([]*Column, 0, l)
c.rep = *s.RepetitionType
idx++ // move idx from this group to next
for i := 0; i < l; i++ {
child := &Column{}
if schema[idx].Type == nil {
// another group
idx, err = child.readGroupSchema(schema, name, idx, dLevel, rLevel)
if err != nil {
return 0, err
}
c.children = append(c.children, child)
} else {
idx, err = child.readColumnSchema(schema, name, idx, dLevel, rLevel)
if err != nil {
return 0, err
}
c.children = append(c.children, child)
}
}
return idx, nil
}
func (c *Column) readColumnSchema(schema []*parquet.SchemaElement, name string, idx int, dLevel, rLevel uint16) (newIndex int, err error) {
s := schema[idx]
if s.Name == "" {
return 0, errors.WithFields(
errors.New("name in schema is empty"),
errors.Fields{
"index": idx,
})
}
if s.RepetitionType == nil {
return 0, errors.WithFields(
errors.New("field RepetitionType is nil"),
errors.Fields{
"index": idx,
})
}
if *s.RepetitionType != parquet.FieldRepetitionType_REQUIRED {
dLevel++
}
if *s.RepetitionType == parquet.FieldRepetitionType_REPEATED {
rLevel++
}
c.element = s
c.maxR = rLevel
c.maxD = dLevel
c.rep = *s.RepetitionType
c.name = s.Name
if name == "" {
c.flatName = s.Name
} else {
c.flatName = name + "." + s.Name
}
c.data, err = datastore.GetValuesStore(s)
if err != nil {
return 0, err
}
return idx + 1, nil
}
func (c *Column) buildElement() *parquet.SchemaElement {
rep := c.rep
elem := &parquet.SchemaElement{
RepetitionType: &rep,
Name: c.name,
}
if c.params != nil {
elem.FieldID = c.params.FieldID
elem.ConvertedType = c.params.ConvertedType
elem.LogicalType = c.params.LogicalType
}
if c.data != nil {
elem.Type = parquet.TypePtr(c.data.ParquetType())
elem.TypeLength = c.params.TypeLength
elem.Scale = c.params.Scale
elem.Precision = c.params.Precision
} else {
nc := int32(len(c.children))
elem.NumChildren = &nc
}
return elem
}
func (c *Column) getNextData() (map[string]interface{}, int32, error) {
if c.children == nil {
return nil, 0, errors.New("getNextData is not possible on non group node")
}
ret := make(map[string]interface{})
notNil := 0
var maxD int32
for i := range c.children {
data, dl, err := c.children[i].GetData()
if err != nil {
return nil, 0, err
}
if dl > maxD {
maxD = dl
}
// https://golang.org/doc/faq#nil_error
if m, ok := data.(map[string]interface{}); ok && m == nil {
data = nil
}
// if the data is not nil, then its ok, but if its nil, we need to know in which definition level is this nil is.
// if its exactly one below max definition level, then the parent is there
if data != nil {
ret[c.children[i].name] = data
notNil++
}
var diff int32
if c.children[i].rep != parquet.FieldRepetitionType_REQUIRED {
diff++
}
if dl == int32(c.children[i].maxD)-diff {
notNil++
}
}
if notNil == 0 {
return nil, maxD, nil
}
return ret, int32(c.maxD), nil
}
func (c *Column) getFirstRDLevel() (rLevel, dLevel int32, last bool) {
if c.data != nil {
return c.data.GetRDLevelAt(-1)
}
// there should be at lease 1 child,
for i := range c.children {
rLevel, dLevel, last = c.children[i].getFirstRDLevel()
if last {
return rLevel, dLevel, last
}
// if this value is not nil, dLevel less than this level is not interesting
if dLevel == int32(c.children[i].maxD) {
return rLevel, dLevel, last
}
}
return -1, -1, false
}
func (c *Column) GetSchemaArray() []*parquet.SchemaElement {
ret := []*parquet.SchemaElement{c.Element()}
if c.data != nil {
return ret
}
for i := range c.children {
ret = append(ret, c.children[i].GetSchemaArray()...)
}
return ret
}
|
<filename>tests/nameVisitor.test.ts<gh_stars>1-10
import * as path from "path";
import { FileSystemPackageProvider } from "../src/providers/folder";
import { Package } from "../src/package/package";
import { Visitor } from "../src/visitors/visitor";
import { OraLogger } from "../src/utils/logger";
import { LicenseUtilities } from "../src/extensions/utilities/LicenseUtilities";
import { DependencyUtilities } from "../src/extensions/utilities/DependencyUtilities";
describe(`visitFromFolder Tests`, () => {
let p: Package;
beforeAll(async () => {
const destination = path.join("tests", "data", "testproject2", "node_modules");
const provider: FileSystemPackageProvider = new FileSystemPackageProvider(destination);
const visitor = new Visitor(["webpack"], provider, new OraLogger());
p = await visitor.visit();
});
test(`Checks name`, () => {
expect(p.name).toBe(`webpack`);
});
test(`Checks version`, () => {
expect(p.version).toBe(`4.35.2`);
});
test(`Checks fullName`, () => {
expect(p.fullName).toBe(`webpack@4.35.2`);
});
test(`Checks loop`, () => {
expect(p.isLoop).toBe(false);
});
test(`Checks transitive dependencies`, () => {
expect(new DependencyUtilities(p).transitiveCount).toBe(4279);
});
test(`Checks distinct dependencies by name`, () => {
expect(new DependencyUtilities(p).distinctNameCount).toBe(308);
});
test(`Checks distinct dependencies by name and version`, () => {
expect(new DependencyUtilities(p).distinctVersionCount).toBe(333);
});
test(`Checks visit method`, () => {
let count = 0;
p.visit(() => count++);
expect(count).toBe(4279);
});
test(`Checks visit method with self`, () => {
let count = 0;
p.visit(() => count++, true);
expect(count).toBe(4280);
});
test(`Test getPackagesBy`, () => {
const matches = p.getPackagesBy(p => p.name === "@webassemblyjs/wast-parser");
expect(matches.length).toBe(25);
for (const pkg of matches) {
expect(pkg.name).toBe("@webassemblyjs/wast-parser");
}
});
test(`Test getPackagesByName`, () => {
const matches = p.getPackagesByName("has-value");
expect(matches.length).toBe(32);
for (const pkg of matches) {
expect(pkg.name).toBe("has-value");
}
});
test(`Test getPackagesByName with version`, () => {
const matches = p.getPackagesByName("has-value", "1.0.0");
expect(matches.length).toBe(16);
for (const pkg of matches) {
expect(pkg.name).toBe("has-value");
}
});
test(`Test getPackageByName`, () => {
const match = p.getPackageByName("has-value");
expect.assertions(2);
expect(match).not.toBe(null);
expect(match!.name).toBe("has-value"); //eslint-disable-line
});
test(`Test getPackageByName with version`, () => {
const match = p.getPackageByName("has-value", "1.0.0");
expect.assertions(3);
expect(match).not.toBe(null);
expect(match!.name).toBe("has-value"); //eslint-disable-line
expect(match!.version).toBe("1.0.0"); //eslint-disable-line
});
test(`Test getPackageByName with version`, () => {
const match = p.getPackageByName("has-value", "123.456.789");
expect(match).toBe(null);
});
test(`Test getPackageByName with non existant package`, () => {
const match = p.getPackageByName("doesntexist");
expect(match).toBe(null);
});
test(`Test getPackageByName with non existant package and version`, () => {
const match = p.getPackageByName("doesntexist", "1.0.0");
expect(match).toBe(null);
});
test(`Test getData`, () => {
const name = p.getData("name");
const version = p.getData("version");
const dependencies = p.getData("dependencies");
const license = p.getData("license");
const scriptsTest = p.getData("scripts.test");
const missing = p.getData("adf.sdf.esdf");
expect.assertions(6);
if (dependencies) {
expect(name).toBe("webpack");
expect(version).toBe("4.35.2");
expect(Object.keys(dependencies as object).length).toBe(24);
expect(license).toBe("MIT");
expect(typeof scriptsTest).toBe("string");
expect(missing).toBeUndefined();
}
});
test(`Test group packages by license`, () => {
const [{ license, names }, ...rest] = new LicenseUtilities(p).licensesByGroup;
expect(license).toBe("MIT");
expect(names.length).toBe(239);
expect(rest[0].license).toBe("ISC");
expect(rest[0].names.length).toBe(51);
});
});
describe(`Visitor Max Depth Tests`, () => {
function getPackage(depth: number): Promise<Package> {
const destination = path.join("tests", "data", "testproject2", "node_modules");
const provider: FileSystemPackageProvider = new FileSystemPackageProvider(destination);
const visitor = new Visitor(["webpack"], provider, new OraLogger(), [], depth);
return visitor.visit();
}
test(`Max depth: Infinity`, async () => {
const p = await getPackage(Infinity);
let libCount: number = 0;
p.visit(() => libCount++, true);
expect(libCount).toBe(4280);
});
test(`Max depth: 1`, async () => {
const p = await getPackage(1);
let libCount: number = 0;
p.visit(() => libCount++, true);
expect(libCount).toBe(25);
});
test(`Max depth: 2`, async () => {
const p = await getPackage(2);
let libCount: number = 0;
p.visit(() => libCount++, true);
expect(libCount).toBe(114);
});
});
|
<reponame>JamesMilnerUK/go-project-euler
package euler
import "testing";
func TestProblem(t *testing.T) {
if (findSum() != 233168) {
t.Error("Incorrect result, expected 233168");
}
}
|
#!/bin/sh
strpwd=$(pwd)
strcmd=${strpwd##*dapp/}
strapp=${strcmd%/cmd*}
OUT_DIR="${1}/system/$strapp"
#FLAG=$2
mkdir -p "${OUT_DIR}"
cp ./build/* "${OUT_DIR}"
|
#!/bin/bash
set -eo pipefail
function require() {
if [ -z "$2" ]; then
echo "validation failed: $1 unset"
exit 1
fi
}
require GH_PAT "${GH_PAT}"
for package in $(bin/list-all-packages.sh)
do
curl -H "Authorization: token $GH_PAT" \
-H 'Accept: application/json' \
-d "{\"event_type\": \"build-package-staging\", \"client_payload\": {\"package\": \"${package}\"}}" \
"https://api.github.com/repos/replicatedhq/kurl/dispatches"
done
|
<gh_stars>0
import React, {FunctionComponent} from "react";
import {inject, observer} from "mobx-react";
import {
Card,
CardContent,
CardHeader,
Checkbox,
createStyles,
makeStyles,
Table,
TableBody,
TableCell,
TableHead,
TableRow
} from "@material-ui/core";
import {AccountBalanceMapping, AccountResponse} from "../../models";
import {IAppState} from "../../store";
import {makePreciseNumberString} from "../../utils";
interface AccountsTableMobxProps {
accounts: AccountResponse[],
balances: AccountBalanceMapping,
defaultAccountAddress?: string,
setDefaultAccount: (address: string) => void
}
const useStyles = makeStyles(() => createStyles({
accountsTableCard: {
overflowX: "auto"
}
}));
const _AccountsTable: FunctionComponent<AccountsTableMobxProps> = ({
accounts,
balances,
defaultAccountAddress,
setDefaultAccount
}) => {
const classes = useStyles();
return (
<Card className={classes.accountsTableCard}>
<CardHeader title="Your Wallets"/>
<CardContent>
<Table>
<TableHead>
<TableRow>
<TableCell><b>Address</b></TableCell>
<TableCell><b>Balance</b></TableCell>
<TableCell><b>Is default</b></TableCell>
</TableRow>
</TableHead>
<TableBody>
{accounts.map(account => (
<TableRow>
<TableCell>{account.address}</TableCell>
<TableCell>{makePreciseNumberString(balances[account.address], 16)}</TableCell>
<TableCell>
<Checkbox checked={defaultAccountAddress === account.address}
onChange={() => setDefaultAccount(account.address)}
/>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</CardContent>
</Card>
)
};
const mapMobxToProps = (state: IAppState): AccountsTableMobxProps => ({
accounts: state.accounts.accounts,
balances: state.balances.accountsBalances,
defaultAccountAddress: state.settings.selectedServiceNodeAccount,
setDefaultAccount: state.settings.selectServiceNodeAccount
});
export const AccountsTable = inject(mapMobxToProps)(observer(_AccountsTable as FunctionComponent));
|
#!/usr/bin/env bash
function catch()
{
printf "caught: SIG_%s\n" "$1"
}
clear
trap 'catch "HUP"' HUP
trap 'catch "INT"' INT
trap 'catch "QUIT"' QUIT
trap 'catch "ABRT"' ABRT
trap 'catch "ALRM"' ALRM
trap 'catch "TERM"' TERM
for i in {1..7}
do
sleep 3
case $i in
1) SIGNAL="HUP" ;;
2) SIGNAL="INT" ;;
3) SIGNAL="QUIT" ;;
4) SIGNAL="ABRT" ;;
5) SIGNAL="ALRM" ;;
6) SIGNAL="TERM" ;;
7) SIGNAL="KILL" ;;
esac
echo sending "$SIGNAL"
kill -s "$SIGNAL" $$
done
|
#!/bin/bash
# Bash shell script to wrangle/transform a raw mongoexport file.
#
# Database Name: olympics
# Generated on: 2021-07-30 22:13:42 UTC
# Template: wrangle_one.txt
source ./env.sh
mkdir -p tmp/olympics/out
mkdir -p out/olympics
# This script does the following:
# 1) Downloads blob olympics__g1996_summer.json from container olympics-raw
# to local file tmp/olympics/olympics__g1996_summer.json
# 2) Wrangle/transform the downloaded blob, producing local file
# tmp/olympics/olympics__g1996_summer__wrangled.json
# 3) Uploads the wrangled file to storage container olympics-adf
# 4) Optionally loads the target CosmosDB using the mongoimport program
# 5) Optionally loads the target CosmosDB using a custom DotNet program
# 6) Delete the downloaded and wrangled file, as the host VM may have limited storage
#
# Note: this script is executed by script olympics_wrangle_all.sh
python wrangle.py transform_blob \
--db olympics \
--source-coll g1996_summer \
--in-container olympics-raw \
--blobname olympics__g1996_summer.json \
--filename tmp/olympics/olympics__g1996_summer.json \
--outfile tmp/olympics/olympics__g1996_summer__wrangled.json \
--out-container olympics-games-adf $1 $2 $3
echo ''
echo 'first line of input file:'
head -1 tmp/olympics/olympics__g1996_summer.json
echo ''
echo 'first line of output file:'
head -1 tmp/olympics/olympics__g1996_summer__wrangled.json
if [[ $M2C_COSMOS_LOAD_METHOD == "mongoimport" ]];
then
echo ''
echo 'executing mongoimport to db: olympics coll: games ...'
mongoimport \
--uri $M2C_COSMOS_MONGO_CONN_STRING \
--db olympics \
--collection games \
--file tmp/olympics/olympics__g1996_summer__wrangled.json \
--numInsertionWorkers $M2C_MONGOIMPORT_NWORKERS \
--batchSize $M2C_MONGOIMPORT_BATCH_SIZE \
--mode $M2C_MONGOIMPORT_MODE \
--writeConcern "{w:0}" \
--ssl
echo 'mongoimport completed'
fi
if [[ $M2C_COSMOS_LOAD_METHOD == "dotnet_mongo_loader" ]];
then
echo ''
echo 'executing dotnet_mongo_loader to db: olympics coll: games ...'
dotnet run --project dotnet_mongo_loader/dotnet_mongo_loader.csproj \
olympics games tmp/olympics/olympics__g1996_summer__wrangled.json \
$M2C_DOTNETMONGOLOADER_TARGET $M2C_DOTNETMONGOLOADER_LOAD_IND \
$M2C_DOTNETMONGOLOADER_DOCUMENT_ID_POLICY \
--tracerInterval $M2C_DOTNETMONGOLOADER_TRACER_INTERVAL \
--rowMaxRetries $M2C_DOTNETMONGOLOADER_ROW_MAX_RETRIES \
$M2C_DOTNETMONGOLOADER_VERBOSE
fi
if [[ $M2C_WRANGLING_CLEANUP == "cleanup" ]];
then
echo ''
echo 'deleting the downloaded and wrangled files to save disk space...'
rm tmp/olympics/olympics__g1996_summer.json
rm tmp/olympics/olympics__g1996_summer__wrangled.json
fi
echo 'done' |
function knapsackLight(value1, weight1, value2, weight2, maxW) {
if (weight1 + weight2 <= maxW) {
return value1 + value2;
}
if (weight1 <= maxW && weight2 <= maxW) {
return value1 > value2 ? value1 : value2;
}
if (weight1 <= maxW || weight2 <= maxW) {
return weight1 <= maxW ? value1 : value2;
}
return 0;
}
|
SELECT
users.username,
users.last_activity_time
FROM
users
ORDER BY
users.last_activity_time DESC
LIMIT 3 |
# -*- coding: utf-8 -*-
# Librerias Python
import os
from io import BytesIO
# Librerias Django
from django.http import HttpResponse
from django.template.loader import get_template
from django.conf import settings
# Librerias Terceros
from xhtml2pdf import pisa
def get_ImagePath(instance, filename):
if (instance.equipo_id):
upload_dir = os.path.join(
'equipos', instance.equipo_id, 'anexos', 'img')
elif (instance.articulo_id):
upload_dir = os.path.join(
'articulos', instance.articulo_id, 'anexos', 'img')
elif (instance.orden_trabajo_id):
upload_dir = os.path.join(
'ordenes', instance.orden_trabajo_id, 'anexos', 'img')
return os.path.join(upload_dir, filename)
def get_FilePath(instance, filename):
if (instance.equipo_id):
upload_dir = os.path.join(
'equipos', instance.equipo_id, 'anexos', 'files')
elif (instance.articulo_id):
upload_dir = os.path.join(
'articulos', instance.articulo_id, 'anexos', 'files')
elif (instance.orden_trabajo_id):
upload_dir = os.path.join(
'ordenes', instance.orden_trabajo_id, 'anexos', 'files')
return os.path.join(upload_dir, filename)
class UnsupportedMediaPathException(Exception):
pass
def fetch_resources(uri, rel):
"""
Callback to allow xhtml2pdf/reportlab to retrieve Images,Stylesheets, etc.
`uri` is the href attribute from the html link element.
`rel` gives a relative path, but it's not used here.
"""
if uri.startswith(settings.MEDIA_URL):
path = os.path.join(settings.MEDIA_ROOT,
uri.replace(settings.MEDIA_URL, ""))
elif uri.startswith(settings.STATIC_URL):
path = os.path.join(settings.STATIC_ROOT,
uri.replace(settings.STATIC_URL, ""))
else:
path = os.path.join(settings.STATIC_ROOT,
uri.replace(settings.STATIC_URL, ""))
if not os.path.isfile(path):
path = os.path.join(settings.MEDIA_ROOT,
uri.replace(settings.MEDIA_URL, ""))
if not os.path.isfile(path):
raise UnsupportedMediaPathException(
'media urls must start with %s or %s' % (
settings.MEDIA_ROOT, settings.STATIC_ROOT))
return path
def render_to_pdf(template_src, context_dict={}):
template = get_template(template_src)
html = template.render(context_dict)
result = BytesIO()
pdf = pisa.pisaDocument(BytesIO(html.encode("utf-8")), result, link_callback=fetch_resources)
if not pdf.err:
return HttpResponse(result.getvalue(), content_type='application/pdf')
return None
|
#!/bin/sh
echo "You're going to run (with our additions) ssh $@"
read -s -p "Password to use: " SSHPASS
export SSHPASS
sshpass -e ssh -o PreferredAuthentications=keyboard-interactive -o PubkeyAuthentication=no "$@"
unset SSHPASS
|
def max_value(arr):
if len(arr) == 1:
return arr[0]
mid = len(arr)//2
left = arr[:mid]
right = arr[mid:]
left_max = max_value(left)
right_max = max_value(right)
max_num = max(left_max, right_max)
return max_num |
<reponame>heySeattleW/wxshop<filename>platform-shop/src/main/java/com/platform/controller/ActivityConponController.java<gh_stars>1-10
package com.platform.controller;
import java.util.List;
import java.util.Map;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.platform.entity.ActivityConponEntity;
import com.platform.service.ActivityConponService;
import com.platform.utils.PageUtils;
import com.platform.utils.Query;
import com.platform.utils.R;
/**
* Controller
*
* @author lipengjun
* @email 939961<EMAIL>
* @date 2018-10-29 13:56:24
*/
@RestController
@RequestMapping("activityconpon")
public class ActivityConponController {
@Autowired
private ActivityConponService activityConponService;
/**
* 查看列表
*/
@RequestMapping("/list")
@RequiresPermissions("activityconpon:list")
public R list(@RequestParam Map<String, Object> params) {
//查询列表数据
Query query = new Query(params);
List<ActivityConponEntity> activityConponList = activityConponService.queryList(query);
int total = activityConponService.queryTotal(query);
PageUtils pageUtil = new PageUtils( activityConponList, total, query.getLimit(), query.getPage());
return R.ok().put("page", pageUtil);
}
/**
* 查看信息
*/
@RequestMapping("/info/{id}")
@RequiresPermissions("activityconpon:info")
public R info(@PathVariable("id") Integer id) {
ActivityConponEntity activityConpon = activityConponService.queryObject(id);
return R.ok().put(" activityConpon", activityConpon);
}
/**
* 保存
*/
@RequestMapping("/save")
@RequiresPermissions("activityconpon:save")
public R save(@RequestBody ActivityConponEntity activityConpon) {
activityConponService.save( activityConpon);
return R.ok();
}
/**
* 修改
*/
@RequestMapping("/update")
@RequiresPermissions("activityconpon:update")
public R update(@RequestBody ActivityConponEntity activityConpon) {
activityConponService.update( activityConpon);
return R.ok();
}
/**
* 删除
*/
@RequestMapping("/delete")
@RequiresPermissions("activityconpon:delete")
public R delete(@RequestBody Integer[] ids) {
activityConponService.deleteBatch(ids);
return R.ok();
}
/**
* 查看所有列表
*/
@RequestMapping("/queryAll")
public R queryAll(@RequestParam Map<String, Object> params) {
List< ActivityConponEntity> list = activityConponService.queryList(params);
return R.ok().put("list", list);
}
}
|
/* tslint:disable */
///<reference path="../src/types/index.d.ts"/>
import * as OT from '@opentok/client';
import * as Promise from 'promise';
import {
primary as sessionCredentials,
faultyLogging as badLoggingCredentials,
faultyApi as badApiCredentials,
} from './credentials.json';
import {
NetworkTestError,
InvalidSessionCredentialsError,
MissingOpenTokInstanceError,
MissingSessionCredentialsError,
IncompleteSessionCredentialsError,
InvalidOnCompleteCallback,
InvalidOnUpdateCallback,
} from '../src/NetworkTest/errors';
import { ConnectToSessionTokenError, ConnectToSessionSessionIdError, ConnectivityError, ConnectToSessionError, PublishToSessionError } from '../src/NetworkTest/testConnectivity/errors';
import { ConnectToSessionError as QualityTestSessionError } from '../src/NetworkTest/testQuality/errors';
import { pick, head, nth } from '../src/util';
import NetworkTest from '../src/NetworkTest';
import { ConnectivityTestResults } from '../src/NetworkTest/testConnectivity/index';
import { QualityTestError } from '../src/NetworkTest/testQuality/errors/index';
import { Stats } from 'fs-extra';
type Util = jasmine.MatchersUtil;
type CustomMatcher = jasmine.CustomMatcher;
type EqualityTesters = jasmine.CustomEqualityTester[];
const malformedCredentials = { apiKey: '1234', invalidProp: '1234', token: '1234' };
const badCredentials = { apiKey: '1234', sessionId: '1234', token: '1234' };
const networkTest = new NetworkTest(OT, sessionCredentials);
const badCredentialsNetworkTest = new NetworkTest(OT, badCredentials);
const validOnUpdateCallback = (stats: OT.SubscriberStats) => stats;
const validOnCompleteCallback = (error?: Error, results?: any) => results;
const customMatchers: jasmine.CustomMatcherFactories = {
toBeInstanceOf: (util: Util, customEqualityTesters: EqualityTesters): CustomMatcher => {
return {
compare: (actual: any, expected: any): jasmine.CustomMatcherResult => {
const pass: boolean = actual instanceof expected;
const message: string = pass ? '' : `Expected ${actual} to be an instance of ${expected}`;
return { pass, message };
},
};
},
toBeABoolean: (util: Util, customEqualityTesters: EqualityTesters): CustomMatcher => {
return {
compare: (actual: any, expected: any): jasmine.CustomMatcherResult => {
const pass: boolean = typeof actual === 'boolean';
const message: string = pass ? '' : `Expected ${actual} to be an instance of ${expected}`;
return { pass, message };
},
};
},
};
describe('Network Test', () => {
beforeAll(() => {
jasmine.addMatchers(customMatchers);
});
it('its constructor requires OT and valid session credentials', () => {
expect(() => new NetworkTest(sessionCredentials)).toThrow(new MissingOpenTokInstanceError());
expect(() => new NetworkTest({}, sessionCredentials)).toThrow(new MissingOpenTokInstanceError());
expect(() => new NetworkTest(OT)).toThrow(new MissingSessionCredentialsError());
expect(() => new NetworkTest(OT, malformedCredentials)).toThrow(new IncompleteSessionCredentialsError());
expect(new NetworkTest(OT, sessionCredentials)).toBeInstanceOf(NetworkTest);
});
describe('Connectivity Test', () => {
it('validates its onComplete callback', () => {
expect(() => networkTest.testConnectivity('callback').toThrow(new InvalidOnCompleteCallback()))
expect(() => networkTest.testConnectivity(validOnCompleteCallback).not.toThrowError(NetworkTestError))
});
describe('Test Results', () => {
it('should contain success and failedTests properties', (done) => {
networkTest.testConnectivity()
.then((results: ConnectivityTestResults) => {
it('should contain a boolean success property', () => {
expect(results.success).toBeABoolean
});
it('should contain an array of failedTests', () => {
expect(results.failedTests).toBeInstanceOf(Array);
});
done();
});
}, 10000);
it('should return a failed test case if invalid session credentials are used', (done) => {
const validateResults = (results: ConnectivityTestResults) => {
expect(results.success).toBe(false);
expect(results.failedTests).toBeInstanceOf(Array);
const [initialFailure, secondaryFailure] = results.failedTests;
expect(initialFailure.type).toBe('messaging');
expect(initialFailure.error).toBeInstanceOf(ConnectToSessionError);
expect(secondaryFailure.type).toBe('media');
expect(secondaryFailure.error).toBeInstanceOf(PublishToSessionError);
};
const validateError = (error?: ConnectivityError) => {
expect(error).toBeUndefined();
};
badCredentialsNetworkTest.testConnectivity()
.then(validateResults)
.catch(validateError)
.finally(done);
});
it('should result in a failed test if the logging server cannot be reached', (done) => {
const badLoggingOT = {
...OT,
...{
properties: {
...OT.properties,
loggingURL: OT.properties.loggingURL.replace('tokbox', 'bad-tokbox')
}
}
};
const badLoggingNetworkTest = new NetworkTest(badLoggingOT, badLoggingCredentials)
badLoggingNetworkTest.testConnectivity()
.then((results: ConnectivityTestResults) => {
expect(results.failedTests).toBeInstanceOf(Array);
if (results.failedTests.find(f => f.type === 'logging')) {
done();
}
});
}, 10000);
it('should result in a failed test if the API server cannot be reached', (done) => {
const badApiOT = {
...OT,
...{
properties: {
...OT.properties,
apiURL: OT.properties.apiURL.replace('opentok', 'bad-opentok')
}
}
};
// Why is this necessary? (Is an old session still connected?)
OT.properties.apiURL = OT.properties.apiURL.replace('opentok', 'bad-opentok');
const badApiNetworkTest = new NetworkTest(badApiOT, badApiCredentials)
badApiNetworkTest.testConnectivity()
.then((results: ConnectivityTestResults) => {
expect(results.failedTests).toBeInstanceOf(Array);
if (results.failedTests.find(f => f.type === 'api')) {
done();
OT.properties.apiURL = OT.properties.apiURL.replace('bad-opentok', 'opentok');
}
OT.properties.apiURL = OT.properties.apiURL.replace('bad-opentok', 'opentok');
});
}, 10000);
});
describe('Quality Test', () => {
it('validates its onUpdate and onComplete callbacks', () => {
expect(() => networkTest.testQuality('callback').toThrow(new InvalidOnUpdateCallback()))
expect(() => networkTest.testQuality(validOnUpdateCallback, 'callback').toThrow(new InvalidOnCompleteCallback()))
expect(() => networkTest.testConnectivity(validOnUpdateCallback, validOnCompleteCallback).not.toThrowError(NetworkTestError))
});
it('should return an error if invalid session credentials are used', (done) => {
const validateResults = (results: QualityTestResults) => {
expect(results).toBe(undefined);
};
const validateError = (error?: QualityTestError) => {
expect(error).toBeInstanceOf(QualityTestSessionError);
};
badCredentialsNetworkTest.testQuality()
.then(validateResults)
.catch(validateError)
.finally(done);
});
it('should return valid test results or an error', (done) => {
const validateResults = (results: QualityTestResults) => {
const { mos, audio, video } = results;
expect(mos).toEqual(jasmine.any(Number));
expect(audio.bitrate).toEqual(jasmine.any(Number));
expect(audio.supported).toEqual(jasmine.any(Boolean));
expect(audio.reason || '').toEqual(jasmine.any(String));
expect(audio.packetLossRatio).toEqual(jasmine.any(Number));
expect(video.bitrate).toEqual(jasmine.any(Number));
expect(video.supported).toEqual(jasmine.any(Boolean));
expect(video.reason || '').toEqual(jasmine.any(String));
expect(video.packetLossRatio).toEqual(jasmine.any(Number));
expect(video.frameRate).toEqual(jasmine.any(Number));
expect(video.recommendedResolution).toEqual(jasmine.any(String));
expect(video.recommendedFrameRate).toEqual(jasmine.any(Number));
};
const validateError = (error?: QualityTestError) => {
expect(error).toBe(QualityTestError);
};
const onUpdate = (stats: Stats) => console.info('Subscriber stats:', stats);
networkTest.testQuality(onUpdate)
.then(validateResults)
.catch(validateError)
.finally(done);
}, 40000);
});
});
});
|
<reponame>tessa-/zscripts
/* ///////////////////////// LEGAL NOTICE ///////////////////////////////
This file is part of ZScripts,
a modular script framework for Pokemon Online server scripting.
Copyright (C) 2013 <NAME>, aka "ArchZombie" / "ArchZombie0x", <<EMAIL>>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
/////////////////////// END LEGAL NOTICE /////////////////////////////// */
({
counter: 0
,
step: function ()
{
if (this.counter++ >= 60 && !sys.isServerPrivate())
{
sys.makeServerPublic(false);
sys.makeServerPublic(true);
this.counter = 0;
}
}
});
|
import { IPlugin } from './plugins/IPlugin'
import { IEnvoy } from './plugins/IEnvoy'
import { ForgeConfig, ForgeOptions } from './core/ForgeOptions'
import { ForgeBuilder } from './core/ForgeBuilder'
import { BuildRecord, ExportRecord } from './core/BuildRecord'
import { IBuildState } from './core/BuildState'
import { IStep, StepInfo } from './core/Step'
import { GlobalPlugins } from './plugins/ForgeGlobalPlugins'
import { ForgeBuffer } from './plugins/ForgeBuffer'
import { ForgeStream } from './plugins/ForgeStream'
import { ForgeFn } from './plugins/ForgeFn'
import { ForgeHttpPlugin } from './plugins/ForgeHttpPlugin'
export * from './streams'
export * from './traits'
export * from './utils'
export {
BuildRecord,
ExportRecord,
ForgeOptions,
IBuildState,
IEnvoy,
IPlugin,
IStep,
StepInfo
}
//-- Global Plugins
GlobalPlugins.register(':buffer', new ForgeBuffer())
GlobalPlugins.register(':stream', new ForgeStream())
GlobalPlugins.register(':fn', new ForgeFn())
GlobalPlugins.register(':http', new ForgeHttpPlugin())
export async function build(config: ForgeConfig | string, options: ForgeOptions = new ForgeOptions()): Promise<BuildRecord> {
let importedConfig: ForgeConfig | string | undefined = undefined
if (typeof config === 'string') {
importedConfig = await import(config)
} else if (typeof config === 'object') {
importedConfig = config
} else {
throw new Error(`Unsupported config type used ${typeof config}. Expected a string or an object`)
}
if(importedConfig == null) {
throw new Error(`Could not resolve 'config'. Ensure the path or Object is valid.`)
}
let normalizedOptions = await ForgeConfig.normalize(importedConfig, options)
let builder = new ForgeBuilder(normalizedOptions, options)
builder.on('rate', onRate)
return builder.build()
}
function onRate(obj) {
} |
id=`jq .ClusterId id.txt | tr -d '"'`
DNS=`aws emr describe-cluster --cluster-id $id | jq .Cluster.MasterPublicDnsName | tr -d '"'`
if [ ! -z "$DNS" ]; then
ZEPPELIN_URL="http://${DNS}:8890/"
echo "Connect to zeppelin by ${ZEPPELIN_URL}"
fi
aws emr socks --cluster-id $id --key-pair-file ~/.ssh/is-qa-ap-southeast-2.pem
|
<gh_stars>0
module ApplicationsHelper
def scope_to_sentence(scope)
case scope
when 'upload_run'
'Upload runs on your behalf'
when 'delete_run'
'Delete and/or disown your runs'
when 'manage_race'
'Manage your participation in races'
end
end
end
|
echo "The current working directory: $PWD"
cd ..
cd target
echo "The current working directory: $PWD"
java -jar input-data-transform-1.0.jar |
require File.dirname(__FILE__) + '/../spec_helper'
describe TypesController do
describe "handling GET /types" do
before(:each) do
@type = mock_model(Type)
Type.stub!(:find).and_return([@type])
end
def do_get
get :index
end
it "should be successful" do
do_get
response.should be_success
end
it "should render index template" do
do_get
response.should render_template('index')
end
it "should find all types" do
Type.should_receive(:find).with(:all).and_return([@type])
do_get
end
it "should assign the found types for the view" do
do_get
assigns[:types].should == [@type]
end
end
describe "handling GET /types.xml" do
before(:each) do
@type = mock_model(Type, :to_xml => "XML")
Type.stub!(:find).and_return(@type)
end
def do_get
@request.env["HTTP_ACCEPT"] = "application/xml"
get :index
end
it "should be successful" do
do_get
response.should be_success
end
it "should find all types" do
Type.should_receive(:find).with(:all).and_return([@type])
do_get
end
it "should render the found types as xml" do
@type.should_receive(:to_xml).and_return("XML")
do_get
response.body.should == "XML"
end
end
describe "handling GET /types/1" do
before(:each) do
@type = mock_model(Type)
Type.stub!(:find).and_return(@type)
end
def do_get
get :show, :id => "1"
end
it "should be successful" do
do_get
response.should be_success
end
it "should render show template" do
do_get
response.should render_template('show')
end
it "should find the type requested" do
Type.should_receive(:find).with("1").and_return(@type)
do_get
end
it "should assign the found type for the view" do
do_get
assigns[:type].should equal(@type)
end
end
describe "handling GET /types/1.xml" do
before(:each) do
@type = mock_model(Type, :to_xml => "XML")
Type.stub!(:find).and_return(@type)
end
def do_get
@request.env["HTTP_ACCEPT"] = "application/xml"
get :show, :id => "1"
end
it "should be successful" do
do_get
response.should be_success
end
it "should find the type requested" do
Type.should_receive(:find).with("1").and_return(@type)
do_get
end
it "should render the found type as xml" do
@type.should_receive(:to_xml).and_return("XML")
do_get
response.body.should == "XML"
end
end
describe "handling GET /types/new" do
before(:each) do
@type = mock_model(Type)
Type.stub!(:new).and_return(@type)
end
def do_get
get :new
end
it "should be successful" do
do_get
response.should be_success
end
it "should render new template" do
do_get
response.should render_template('new')
end
it "should create an new type" do
Type.should_receive(:new).and_return(@type)
do_get
end
it "should not save the new type" do
@type.should_not_receive(:save)
do_get
end
it "should assign the new type for the view" do
do_get
assigns[:type].should equal(@type)
end
end
describe "handling GET /types/1/edit" do
before(:each) do
@type = mock_model(Type)
Type.stub!(:find).and_return(@type)
end
def do_get
get :edit, :id => "1"
end
it "should be successful" do
do_get
response.should be_success
end
it "should render edit template" do
do_get
response.should render_template('edit')
end
it "should find the type requested" do
Type.should_receive(:find).and_return(@type)
do_get
end
it "should assign the found Type for the view" do
do_get
assigns[:type].should equal(@type)
end
end
describe "handling POST /types" do
before(:each) do
@type = mock_model(Type, :to_param => "1")
Type.stub!(:new).and_return(@type)
end
describe "with successful save" do
def do_post
@type.should_receive(:save).and_return(true)
post :create, :type => {}
end
it "should create a new type" do
Type.should_receive(:new).with({}).and_return(@type)
do_post
end
it "should redirect to the new type" do
do_post
response.should redirect_to(type_url("1"))
end
end
describe "with failed save" do
def do_post
@type.should_receive(:save).and_return(false)
post :create, :type => {}
end
it "should re-render 'new'" do
do_post
response.should render_template('new')
end
end
end
describe "handling PUT /types/1" do
before(:each) do
@type = mock_model(Type, :to_param => "1")
Type.stub!(:find).and_return(@type)
end
describe "with successful update" do
def do_put
@type.should_receive(:update_attributes).and_return(true)
put :update, :id => "1"
end
it "should find the type requested" do
Type.should_receive(:find).with("1").and_return(@type)
do_put
end
it "should update the found type" do
do_put
assigns(:type).should equal(@type)
end
it "should assign the found type for the view" do
do_put
assigns(:type).should equal(@type)
end
it "should redirect to the type" do
do_put
response.should redirect_to(type_url("1"))
end
end
describe "with failed update" do
def do_put
@type.should_receive(:update_attributes).and_return(false)
put :update, :id => "1"
end
it "should re-render 'edit'" do
do_put
response.should render_template('edit')
end
end
end
describe "handling DELETE /types/1" do
before(:each) do
@type = mock_model(Type, :destroy => true)
Type.stub!(:find).and_return(@type)
end
def do_delete
delete :destroy, :id => "1"
end
it "should find the type requested" do
Type.should_receive(:find).with("1").and_return(@type)
do_delete
end
it "should call destroy on the found type" do
@type.should_receive(:destroy)
do_delete
end
it "should redirect to the types list" do
do_delete
response.should redirect_to(types_url)
end
end
end |
#!/bin/bash
#bdereims@vmware.com
. ./env
[ "$1" == "" -o "$2" == "" ] && echo "usage: $0 <name_of_transportzone> <name_of_logicalswitch>" && exit 1
TZ_ID=$( ${NETWORK_DIR}/id_transportzone.sh ${1} )
[ "${TZ_ID}" == "" ] && echo "${1} doesn't exist!" && exit 1
NEW_LOGICALSWITCH="<virtualWireCreateSpec><name>${2}</name><description>Logical Switch via REST API</description><tenantId></tenantId><controlPlaneMode>HYBRID_MODE</controlPlaneMode><guestVlanAllowed>true</guestVlanAllowed></virtualWireCreateSpec>"
curl -s -k -u ${NSX_ADMIN}:${NSX_PASSWD} -H "Content-Type:text/xml;charset=UTF-8" -X POST --data "${NEW_LOGICALSWITCH}" https://${NSX}/api/2.0/vdn/scopes/${TZ_ID}/virtualwires 2>&1 > /dev/null
LS_PROPS=$( ${NETWORK_DIR}/props_logicialswitch.sh $1 $2 )
[ "${LS_PROPS}" != "" ] && echo "Logicial Switch '${2}' has been sucessfully created in '${1}'." && exit 0
echo "Logical Switch '${2}' does not seem to be created." && exit 1
|
<reponame>thebhaskara/luffy<filename>modules/service/index.js
module.exports = {
user: require('./userService')
} |
import Interfaces = require("../../../Interfaces");
import MapHelpers = require("../MapHelpers");
import Error = require("../../../Error");
import IterableHelpers = require("../../IterableHelpers");
import HashMap = require("../HashMap");
class HashBiMap<K extends Interfaces.IBaseObject, V extends Interfaces.IBaseObject> implements Interfaces.IBiMap<K, V> {
constructor(private map: HashMap<K, V> = new HashMap<K, V>(), private inverseMap: HashMap<V, K> = new HashMap<V, K>()) {
}
containsKey(key: K): boolean {
return this.map.containsKey(key);
}
hashCode(): number {
Error.notImplemented();
return null;
}
equals(biMap: Interfaces.IBiMap<K, V>): boolean {
return MapHelpers.equals(this, biMap);
}
get(key: K): V {
return this.map.get(key);
}
remove(key: K): V {
Error.checkNotNull(key);
// Check for the key/value pair, return null if not found
var value = this.map.get(key);
if (!value) {
return null;
}
this.map.remove(key);
this.inverseMap.remove(value);
return value;
}
set(key: K, value: V): V {
this.inverseMap.set(value, key);
return this.map.set(key, value);
}
size(): number {
return this.map.size();
}
isEmpty(): boolean {
return IterableHelpers.isEmpty(this);
}
inverse(): Interfaces.IBiMap<V, K> {
// TODO: return a copy, or immutable/protected?
return new HashBiMap<V, K>(this.inverseMap, this.map);
}
clear(): void {
this.map.clear();
this.inverseMap.clear();
}
forEach(callback: Interfaces.IForEachMapCallback<K, V>): void {
this.map.forEach(callback);
}
keys(): Interfaces.IIterator<K> {
return this.map.keys();
}
values(): Interfaces.IIterator<V> {
return this.map.values();
}
__iterator__(): Interfaces.IIterator<K> {
return this.map.keys();
}
}
export = HashBiMap; |
#!/usr/bin/env bash
rm -rf mobile
mkdir mobile
cd mobile
cordova create mainnet
cordova create testnet
cd mainnet
cordova plugin add phonegap-plugin-barcodescanner
rm config.xml && rm -R www
cp -a ../../cordova/cordova_mobile/mainnet/* ./
cordova platform add android
cd ../testnet
cordova plugin add phonegap-plugin-barcodescanner
rm config.xml && rm -R www
cp -a ../../cordova/cordova_mobile/testnet/* ./
cordova platform add android |
"""A package to retrieve data from Hong Kong Observatory"""
from hko.astro import astro
from hko.blog import blog
from hko.earthquake import earthquake
from hko.local_weather import local_weather
from hko.lunar_date import lunar_date
from hko.major_city_forecast import major_city_forecast
from hko.marine_forecast import marine_forecast
from hko.rainfall_nowcast import rainfall_nowcast
from hko.regional_weather import regional_weather
from hko.serval_days_weather_forecast import serval_days_weather_forecast
from hko.south_china_coastal_waters import south_china_coastal_waters
from hko.tide import tide
from hko.uv_index import uv_index
from hko.weather_warning import weather_warning
|
package main
import (
"fmt"
"time"
)
// waits for a and b to close.
// 关闭的chan可以一直获取,所以这个实现是不对的
//func waitMany(a, b chan bool) {
// var aclosed, bclosed bool
// for !aclosed || !bclosed {
// select {
// case <-a:
// aclosed = true
// case <-b:
// bclosed = true
// }
// }
//}
func waitMany(a, b chan bool) {
for a != nil || b != nil {
select {
case <-a:
a = nil
case <-b:
b = nil
}
}
}
func main() {
a := make(chan bool)
b := make(chan bool)
t0 := time.Now()
go func() {
close(a)
close(b)
}()
waitMany(a, b)
fmt.Printf("wait %v for waitMany", time.Since(t0))
}
|
#!/usr/bin/env bash
create_signature() {
echo "v2; ${STACK}; $(node --version); $(npm --version); $(yarn --version 2>/dev/null || true); ${PREBUILD}"
}
save_signature() {
local cache_dir="$1"
create_signature > "$cache_dir/node/signature"
}
load_signature() {
local cache_dir="$1"
if test -f "$cache_dir/node/signature"; then
cat "$cache_dir/node/signature"
else
echo ""
fi
}
get_cache_status() {
local cache_dir="$1"
if ! ${NODE_MODULES_CACHE:-true}; then
echo "disabled"
elif ! test -d "$cache_dir/node/"; then
echo "not-found"
elif [ "$(create_signature)" != "$(load_signature "$cache_dir")" ]; then
echo "new-signature"
else
echo "valid"
fi
}
get_cache_directories() {
local build_dir="$1"
local dirs1 dirs2
dirs1=$(read_json "$build_dir/package.json" ".cacheDirectories | .[]?")
dirs2=$(read_json "$build_dir/package.json" ".cache_directories | .[]?")
if [ -n "$dirs1" ]; then
echo "$dirs1"
else
echo "$dirs2"
fi
}
restore_default_cache_directories() {
local build_dir=${1:-}
local cache_dir=${2:-}
# node_modules
if [[ -e "$build_dir/node_modules" ]]; then
echo "- node_modules is checked into source control and cannot be cached"
elif [[ -e "$cache_dir/node/cache/node_modules" ]]; then
echo "- node_modules"
mkdir -p "$(dirname "$build_dir/node_modules")"
mv "$cache_dir/node/cache/node_modules" "$build_dir/node_modules"
else
echo "- node_modules (not cached - skipping)"
fi
# bower_components, should be silent if it is not in the cache
if [[ -e "$cache_dir/node/cache/bower_components" ]]; then
echo "- bower_components"
fi
}
restore_custom_cache_directories() {
local cache_directories
local build_dir=${1:-}
local cache_dir=${2:-}
# Parse the input string with multiple lines: "a\nb\nc" into an array
mapfile -t cache_directories <<< "$3"
echo "Loading ${#cache_directories[@]} from cacheDirectories (package.json):"
for cachepath in "${cache_directories[@]}"; do
if [ -e "$build_dir/$cachepath" ]; then
echo "- $cachepath (exists - skipping)"
else
if [ -e "$cache_dir/node/cache/$cachepath" ]; then
echo "- $cachepath"
mkdir -p "$(dirname "$build_dir/$cachepath")"
mv "$cache_dir/node/cache/$cachepath" "$build_dir/$cachepath"
else
echo "- $cachepath (not cached - skipping)"
fi
fi
done
}
clear_cache() {
local cache_dir="$1"
rm -rf "$cache_dir/node"
mkdir -p "$cache_dir/node"
mkdir -p "$cache_dir/node/cache"
}
save_default_cache_directories() {
local build_dir=${1:-}
local cache_dir=${2:-}
# node_modules
if [[ -e "$build_dir/node_modules" ]]; then
echo "- node_modules"
mkdir -p "$cache_dir/node/cache/node_modules"
cp -a "$build_dir/node_modules" "$(dirname "$cache_dir/node/cache/node_modules")"
else
# this can happen if there are no dependencies
mcount "cache.no-node-modules"
echo "- node_modules (nothing to cache)"
fi
# bower_components
if [[ -e "$build_dir/bower_components" ]]; then
mcount "cache.saved-bower-components"
meta_set "cached-bower-components" "true"
echo "- bower_components"
mkdir -p "$cache_dir/node/cache/bower_components"
cp -a "$build_dir/bower_components" "$(dirname "$cache_dir/node/cache/bower_components")"
fi
meta_set "node-custom-cache-dirs" "false"
}
save_custom_cache_directories() {
local cache_directories
local build_dir=${1:-}
local cache_dir=${2:-}
# Parse the input string with multiple lines: "a\nb\nc" into an array
mapfile -t cache_directories <<< "$3"
echo "Saving ${#cache_directories[@]} cacheDirectories (package.json):"
for cachepath in "${cache_directories[@]}"; do
if [ -e "$build_dir/$cachepath" ]; then
echo "- $cachepath"
mkdir -p "$cache_dir/node/cache/$cachepath"
cp -a "$build_dir/$cachepath" "$(dirname "$cache_dir/node/cache/$cachepath")"
else
echo "- $cachepath (nothing to cache)"
fi
done
meta_set "node-custom-cache-dirs" "true"
}
|
<reponame>trunksbomb/Cyclic
/*******************************************************************************
* The MIT License (MIT)
*
* Copyright (C) 2014-2018 <NAME> (aka Lothrazar)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package com.lothrazar.cyclicmagic.tweak.recipe;
import com.lothrazar.cyclicmagic.config.IHasConfig;
import com.lothrazar.cyclicmagic.registry.RecipeRegistry;
import com.lothrazar.cyclicmagic.registry.module.BaseModule;
import com.lothrazar.cyclicmagic.util.Const;
import net.minecraft.block.BlockStone;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraftforge.common.config.Configuration;
public class RecipeChangerModule extends BaseModule implements IHasConfig {
private boolean playerSkull;
private boolean simpleDispenser;
private boolean repeaterSimple;
private boolean minecartsSimple;
private boolean notchApple;
private boolean melonToSlice;
private boolean snowBlocksToBalls;
private boolean quartzBlocksToItem;
private boolean glowstoneBlockToDust;
private boolean netherwartBlockReverse;
@Override
public void syncConfig(Configuration config) {
String category = Const.ConfigCategory.recipes;
config.setCategoryComment(category, "New and altered recipes");
playerSkull = config.get(category, "Player Skulls",
true, "Create a player skull by combining wither, skeleton, zombie, and creeper skulls").getBoolean();
simpleDispenser = config.get(category, "Simple Dispenser",
true, "Craft a dispenser with string instead of a bow").getBoolean();
repeaterSimple = config.get(category, "Simple Repeater",
true, "Craft repeaters using sticks and redstone in place of redstone torches").getBoolean();
minecartsSimple = config.get(category, "Simple Minecarts",
true, "Craft the minecart combinations using five iron as well as minecarts").getBoolean();
notchApple = config.get(category, "Notch Apple",
true, "Craft a notch apple with golden blocks as usual").getBoolean();
melonToSlice = config.get(category, "Melon Block Slices",
true, "Craft a Melon block into nine slices").getBoolean();
category = Const.ConfigCategory.recipes;
snowBlocksToBalls = config.get(category, "SnowBlockBalls",
true, "Craft Snow blocks back into snowballs").getBoolean();
quartzBlocksToItem = config.get(category, "QuartzBlockToItem",
true, "Craft Quartz blocks back to the items").getBoolean();
glowstoneBlockToDust = config.get(category, "GlowstoneBlockToDust",
true, "Craft Glowstone blocks back to dust").getBoolean();
netherwartBlockReverse = config.get(category, "NetherwartBlockReverse",
true, "Craft Netherwart blocks back to item").getBoolean();
}
@Override
public void onPreInit() {
if (glowstoneBlockToDust) {
glowstoneBlockToDust();
}
if (quartzBlocksToItem) {
quartzBlocksItem();
}
if (snowBlocksToBalls) {
snowBlocksBalls();
}
if (playerSkull) {
playerSkull();
}
if (simpleDispenser) {
simpleDispenser();
}
if (repeaterSimple) {
repeaterSimple();
}
if (minecartsSimple) {
minecartsSimple();
}
if (notchApple) {
notchApple();
}
if (melonToSlice) {
melonToSlice();
}
if (netherwartBlockReverse) {
netherwartBlockReverse();//bone block reverse is already in game, why not this
}
// https://github.com/PrinceOfAmber/SamsPowerups/blob/master/Recipes/src/main/java/com/lothrazar/samsrecipes/RecipeRegistry.java
}
private void netherwartBlockReverse() {
RecipeRegistry.addShapelessRecipe(new ItemStack(Items.NETHER_WART, 9),
new ItemStack(Blocks.NETHER_WART_BLOCK)); // nether_wart_block
}
private void glowstoneBlockToDust() {
RecipeRegistry.addShapelessRecipe(new ItemStack(Items.GLOWSTONE_DUST, 4),
new ItemStack(Blocks.GLOWSTONE));
}
private void snowBlocksBalls() {
RecipeRegistry.addShapelessRecipe(new ItemStack(Items.SNOWBALL, 4),
new ItemStack(Blocks.SNOW));
}
private void quartzBlocksItem() {
RecipeRegistry.addShapelessRecipe(new ItemStack(Items.QUARTZ, 4),
new ItemStack(Blocks.QUARTZ_BLOCK));
}
private void melonToSlice() {
RecipeRegistry.addShapelessRecipe(new ItemStack(Items.MELON, 9),
new ItemStack(Blocks.MELON_BLOCK));
}
private void notchApple() {
// https://www.reddit.com/r/minecraftsuggestions/comments/4d20g5/bring_back_the_notch_apple_crafting_recipe/
RecipeRegistry.addShapedRecipe(new ItemStack(Items.GOLDEN_APPLE, 1, 1), "ggg", "gag", "ggg", 'g', new ItemStack(Blocks.GOLD_BLOCK), 'a', new ItemStack(Items.APPLE));
}
private void playerSkull() {
RecipeRegistry.addShapelessRecipe(new ItemStack(Items.SKULL, 4, Const.skull_player),
new ItemStack(Items.SKULL, 1, Const.skull_wither),
new ItemStack(Items.SKULL, 1, Const.skull_skeleton),
new ItemStack(Items.SKULL, 1, Const.skull_zombie),
new ItemStack(Items.SKULL, 1, Const.skull_creeper));
}
private void repeaterSimple() {
RecipeRegistry.addShapedRecipe(new ItemStack(Items.REPEATER),
"r r", "srs", "ttt",
't', new ItemStack(Blocks.STONE, 1, BlockStone.EnumType.STONE.ordinal()), 's', new ItemStack(Items.STICK), 'r', new ItemStack(Items.REDSTONE));
}
private void minecartsSimple() {
// normally you would need the minecart created in a different step. this is
// faster
RecipeRegistry.addShapedRecipe(new ItemStack(Items.CHEST_MINECART), " ", "ici", "iii", 'i', Items.IRON_INGOT, 'c', Blocks.CHEST);
RecipeRegistry.addShapedRecipe(new ItemStack(Items.TNT_MINECART), " ", "ici", "iii", 'i', Items.IRON_INGOT, 'c', Blocks.TNT);
RecipeRegistry.addShapedRecipe(new ItemStack(Items.HOPPER_MINECART), " ", "ici", "iii", 'i', Items.IRON_INGOT, 'c', Blocks.HOPPER);
RecipeRegistry.addShapedRecipe(new ItemStack(Items.FURNACE_MINECART), " ", "ici", "iii", 'i', Items.IRON_INGOT, 'c', Blocks.FURNACE);
}
private void simpleDispenser() {
RecipeRegistry.addShapedRecipe(new ItemStack(Blocks.DISPENSER),
"ccc", "csc", "crc",
'c', Blocks.COBBLESTONE, 's', Items.STRING, 'r', Items.REDSTONE);
}
}
|
<gh_stars>1-10
package com.cartotype.navigatorappdemo;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.RadioButton;
import android.widget.RadioGroup;
/**
* A simple screen to change the default display distance units.
* Can be extended to support more user settings.
*
* @author <NAME> 2013-03-25
*
*/
public class SettingsActivity extends Activity {
private RadioGroup radioUnitsGroup;
//private RadioButton radioUnitsButton;
private Button btnDisplay;
//For logging
private static final String TAG = "Settings";
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.settings);
// Set units radio button to current value
if (getIntent().getExtras().getString("units").equals("imperial")) {
RadioButton rb1 = (RadioButton) findViewById(R.id.radioUnitsImperial);
rb1.setChecked(true);
}
addListenerOnButton();
}
public void addListenerOnButton() {
radioUnitsGroup = (RadioGroup) findViewById(R.id.radioUnits);
btnDisplay = (Button) findViewById(R.id.settings_btnDisplay);
btnDisplay.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String units = "metric";
// Check which radio button was clicked
switch(radioUnitsGroup.getCheckedRadioButtonId()) {
case R.id.radioUnitsMetric:
break;
case R.id.radioUnitsImperial:
units = "imperial";
break;
default:
break;
}
Log.d(TAG,"User wants to change display units");
// Use Android Intent to send data (the new map) back to parent
Intent resultIntent = new Intent();
resultIntent.putExtra("units", units);
setResult(Activity.RESULT_OK, resultIntent);
// And quit this screen
finish();
}
});
}
} |
package todolist
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestFileStore(t *testing.T) {
assert := assert.New(t)
store := &FileStore{FileLocation: "todos.json"}
todos, _ := store.Load()
assert.Equal(todos[0].Subject, "this is the first subject", "")
}
func TestSave(t *testing.T) {
store := &FileStore{FileLocation: "todos.json"}
todos, _ := store.Load()
store.Save(todos)
}
|
#!/bin/bash
OUTPUT=${OUTPUT:-/site}
case "$1" in
sh|bash|/bin/bash|/bin/sh)
set -- "$@"
;;
*)
set -- /usr/local/bin/eleventy --output=${OUTPUT} "$@"
;;
esac
exec "$@"
|
<reponame>infinitiessoft/skyport-api
/*******************************************************************************
* Copyright 2015 InfinitiesSoft Solutions Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.infinities.skyport.async.service.compute;
import javax.annotation.Nonnull;
import org.dasein.cloud.AccessControlledService;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.compute.AffinityGroup;
import org.dasein.cloud.compute.AffinityGroupCreateOptions;
import org.dasein.cloud.compute.AffinityGroupFilterOptions;
import org.dasein.cloud.compute.AffinityGroupSupport;
import com.infinities.skyport.async.AsyncResult;
public interface AsyncAffinityGroupSupport extends AccessControlledService {
/**
* Creates an affinity group in the cloud
*
* @param options
* the options used when creating the affinity group
* @return the provider ID of the affinity group
* @throws InternalException
* an error occurred within the Dasein Cloud implementation
* creating the affinity group
* @throws CloudException
* an error occurred within the service provider creating the
* affinity group
*/
public @Nonnull AsyncResult<AffinityGroup> create(@Nonnull AffinityGroupCreateOptions options) throws InternalException,
CloudException;
/**
* Deletes the affinity group from the cloud if the affinity group is not
* empty this method should error
*
* @param affinityGroupId
* the ID of the affinity group to be deleted
* @throws InternalException
* an error occurred within the Dasein Cloud implementation
* deleting the affinity group
* @throws CloudException
* an error occurred within the service provider deleting the
* affinity group
*/
public AsyncResult<Void> delete(@Nonnull String affinityGroupId) throws InternalException, CloudException;
/**
* Retrieves the details of the specified Affinity Group from the cloud
*
* @param affinityGroupId
* the ID of the affinity group to be retrieved
* @return the Dasein AffinityGroup object
* @throws InternalException
* an error occurred within the Dasein Cloud implementation
* retrieving the affinity group
* @throws CloudException
* an error occurred within the service provider retrieving the
* affinity group
*/
public @Nonnull AsyncResult<AffinityGroup> get(@Nonnull String affinityGroupId) throws InternalException, CloudException;
/**
* Lists all of the affinity groups visible to the current account
*
* @param options
* Filtering options for the list
* @return All the affinity groups visible to current account
* @throws InternalException
* an error occurred within the Dasein Cloud implementation
* listing the affinity groups
* @throws CloudException
* an error occurred within the service provider listing the
* affinity groups
*/
public @Nonnull AsyncResult<Iterable<AffinityGroup>> list(@Nonnull AffinityGroupFilterOptions options)
throws InternalException, CloudException;
/**
* Modifies details of the specified affinity group
*
* @param affinityGroupId
* the ID of the affinity group to be modified
* @param options
* the options containing the modified data
* @return the newly modified Dasein AffinityGroup object
* @throws InternalException
* an error occurred within the Dasein Cloud implementation
* modifying the affinity group
* @throws CloudException
* an error occurred within the service provider modifying the
* affinity group
*/
public AsyncResult<AffinityGroup> modify(@Nonnull String affinityGroupId, @Nonnull AffinityGroupCreateOptions options)
throws InternalException, CloudException;
AffinityGroupSupport getSupport();
}
|
<gh_stars>1-10
# frozen_string_literal: true
require "spec_helper"
require_relative "./../../lib/bu_pr/git"
describe BuPr::Git do
let(:git) { described_class.new }
describe "#current_branch" do
let(:dummy_branches) {
"cosme-literal\n* develop\n handlable-channels\n rails5\n puma"
}
before do
expect(git).to \
receive(:branches).and_return(dummy_branches)
end
subject { git.current_branch }
it { is_expected.to eq "develop" }
end
end
|
// +build linux,amd64 darwin,amd64
package db
import (
"database/sql"
"fmt"
"log"
ds "github.com/jimmyislive/gocve/internal/pkg/ds"
"github.com/lib/pq"
// make linting happy
_ "github.com/mattn/go-sqlite3"
)
// PopulateDB populates the DB with cve data from the recordsList
func PopulateDB(cfg *ds.Config, recordsList [][]string) error {
fmt.Println("Inserting data into DB...")
var (
err error
db *sql.DB
)
if cfg.DBtype == "sqlite" {
db, err = sql.Open("sqlite3", cfg.DBname)
} else {
psqlInfo := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=disable", cfg.DBhost, cfg.DBport, cfg.DBuser, cfg.Password, cfg.DBname)
db, err = sql.Open("postgres", psqlInfo)
}
if err != nil {
log.Fatal(err)
}
defer db.Close()
sqlStmt := `
create table if not exists cve (cveid text, status text, description text, reference text, phase text, category text);
delete from cve;
`
_, err = db.Exec(sqlStmt)
if err != nil {
log.Printf("%q: %s\n", err, sqlStmt)
}
//stmt, err := db.Prepare("insert into cve(cveid, status, description, reference, phase, category) values(?, ?, ?, ?, ?, ?)")
stmt, err := db.Prepare("insert into cve(cveid, status, description, reference, phase, category) values($1, $2, $3, $4, $5, $6)")
if err != nil {
log.Fatal(err)
}
defer stmt.Close()
for i := 0; i < len(recordsList); i++ {
_, err = stmt.Exec(recordsList[i][0], recordsList[i][1], recordsList[i][2], recordsList[i][3], recordsList[i][4], recordsList[i][5])
if err != nil {
//log.Fatal(err)
fmt.Println(recordsList[i][0])
fmt.Println(err)
}
}
fmt.Println("DB Created")
return nil
}
// ListCVE lists all available CVEs from the DB
func ListCVE(cfg *ds.Config) [][]string {
var (
err error
db *sql.DB
records [][]string
)
if cfg.DBtype == "sqlite" {
db, err = sql.Open("sqlite3", cfg.DBname)
} else {
psqlInfo := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=disable", cfg.DBhost, cfg.DBport, cfg.DBuser, cfg.Password, cfg.DBname)
db, err = sql.Open("postgres", psqlInfo)
}
if err != nil {
log.Fatal(err)
}
defer db.Close()
rows, err := db.Query("SELECT cveid, description FROM cve")
if err != nil {
log.Fatal(err)
}
defer rows.Close()
for rows.Next() {
var cveid, description string
err = rows.Scan(&cveid, &description)
if err != nil {
log.Fatal(err)
}
record := []string{cveid, description}
records = append(records, record)
}
// get any error encountered during iteration
err = rows.Err()
if err != nil {
log.Fatal(err)
}
return records
}
// SearchCVE searches for a pattern in the CVE DB
func SearchCVE(cfg *ds.Config, searchText string) [][]string {
var (
records [][]string
err error
db *sql.DB
)
if cfg.DBtype == "sqlite" {
db, err = sql.Open("sqlite3", cfg.DBname)
} else {
psqlInfo := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=disable", cfg.DBhost, cfg.DBport, cfg.DBuser, cfg.Password, cfg.DBname)
db, err = sql.Open("postgres", psqlInfo)
}
if err != nil {
log.Fatal(err)
}
defer db.Close()
searchTextLikeStr := fmt.Sprintf("%%%s%%", searchText)
stmt := fmt.Sprintf("SELECT cveid, description FROM %s where description LIKE %s OR cveid LIKE %s ", pq.QuoteIdentifier(cfg.Tablename), pq.QuoteLiteral(searchTextLikeStr), pq.QuoteLiteral(searchTextLikeStr))
rows, err := db.Query(stmt)
if err != nil {
log.Fatal(err)
}
defer rows.Close()
for rows.Next() {
var cveid, description string
err = rows.Scan(&cveid, &description)
if err != nil {
log.Fatal(err)
}
record := []string{cveid, description}
records = append(records, record)
}
// get any error encountered during iteration
err = rows.Err()
if err != nil {
log.Fatal(err)
}
return records
}
// GetCVE returns details of a specific CVE
func GetCVE(cfg *ds.Config, cveid string) []string {
var (
record []string
status, description, reference, phase, category string
err error
db *sql.DB
)
if cfg.DBtype == "sqlite" {
db, err = sql.Open("sqlite3", cfg.DBname)
} else {
psqlInfo := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=disable", cfg.DBhost, cfg.DBport, cfg.DBuser, cfg.Password, cfg.DBname)
db, err = sql.Open("postgres", psqlInfo)
}
if err != nil {
log.Fatal(err)
}
defer db.Close()
stmt := fmt.Sprintf("SELECT status, description, reference, phase, category FROM %s where cveid=%s", pq.QuoteIdentifier(cfg.Tablename), pq.QuoteLiteral(cveid))
row := db.QueryRow(stmt)
switch err := row.Scan(&status, &description, &reference, &phase, &category); err {
case nil:
record = append(record, cveid, status, description, reference, phase, category)
default:
fmt.Println(err)
}
return record
}
|
import matplotlib.pyplot as plt
values = [20, 40, 10, 30, 45, 10]
plt.bar(range(len(values)), values)
plt.show() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.