text stringlengths 1 1.05M |
|---|
#!/bin/sh
set -eu
clang++ -fbracket-depth=999999 -march=native -mbmi2 -mtune=native -std=gnu++11 -O3 -flto -fuse-ld=lld -fomit-frame-pointer -fwrapv -Wno-attributes -fno-strict-aliasing -Da24_hex='0x3039' -Da24_val='12345' -Da_minus_two_over_four_array='{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x30,0x39}' -Dbitwidth='32' -Dlimb_weight_gaps_array='{25,24,24,24,25,24,24,24,25,24,24,24}' -Dmodulus_array='{0x07,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xed}' -Dmodulus_bytes_val='37' -Dmodulus_limbs='12' -Dq_mpz='(1_mpz<<291) - 19' "$@"
|
#!/bin/bash
#SBATCH --exclude=hermes[1-4],trillian[1-3],artemis[1-7],qdata[1-8],nibbler[1-4],slurm[1-5]
#SBATCH --output=granger/60_nodes/script_33_138_141.out
#SBATCH --error=granger/60_nodes/script_33_138_141.err
#SBATCH --job-name="138-141"
hostname
date +%s%N
time blender -t 1 -b Star-collapse-ntsc.blend -s 138 -e 141 -a &> /dev/null
date +%s%N
|
<reponame>gilsonmello/fitness<filename>resources/assets/backend/js/tests/flexibility/flexibility.js
/**
* Created by Junnyor on 14/08/2017.
*/
$(function(){
$('#save_flexitests').validate({
submitHandler: function(frm){
swal({
title: "Vocรช realmente deseja atualizar os dados?",
type: "info",
showCancelButton: true,
cancelButtonText: "Cancelar",
confirmButtonColor: "#00a65a",
confirmButtonText: "Salvar",
closeOnConfirm: false,
showLoaderOnConfirm: true
}, function () {
var data = $('#save_flexitests').serialize();
var action = $('#save_flexitests').attr('action');
$.ajax({
url: action,
method: 'POST',
dataType: 'Json',
headers: {
'X-CSRF-TOKEN': $('meta[name="_token"]').attr('content')
},
data: data,
success: function (data) {
if(data == 'true')
swal("Atualizado!", "", "success");
else
swal("Oops...", "Ocorreu algum erro, revise os dados informados!", "error");
},
error: function(data){
swal("Oops...", "Ocorreu algum erro, revise os dados informados!", "error");
}
});
});
}
});
$('#save_wells_bank').validate({
submitHandler: function(frm){
swal({
title: "Vocรช realmente deseja atualizar os dados?",
type: "info",
showCancelButton: true,
cancelButtonText: "Cancelar",
confirmButtonColor: "#00a65a",
confirmButtonText: "Salvar",
closeOnConfirm: false,
showLoaderOnConfirm: true
}, function () {
var data = $('#save_wells_bank').serialize();
var action = $('#save_wells_bank').attr('action');
$.ajax({
url: action,
method: 'POST',
dataType: 'Json',
headers: {
'X-CSRF-TOKEN': $('meta[name="_token"]').attr('content')
},
data: data,
success: function (data) {
if(data == 'true')
swal("Atualizado!", "", "success");
else
swal("Oops...", "Ocorreu algum erro, revise os dados informados!", "error");
},
error: function(data){
swal("Oops...", "Ocorreu algum erro, revise os dados informados!", "error");
}
});
});
}
});
}); |
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#network interface on which to limit traffic
IF="eth0"
#limit of the network interface in question
LINKCEIL="1gbit"
#limit outbound Educoin protocol traffic to this rate
LIMIT="160kbit"
#defines the IPv4 address space for which you wish to disable rate limiting
LOCALNET_V4="192.168.0.0/16"
#defines the IPv6 address space for which you wish to disable rate limiting
LOCALNET_V6="fe80::/10"
#delete existing rules
tc qdisc del dev ${IF} root
#add root class
tc qdisc add dev ${IF} root handle 1: htb default 10
#add parent class
tc class add dev ${IF} parent 1: classid 1:1 htb rate ${LINKCEIL} ceil ${LINKCEIL}
#add our two classes. one unlimited, another limited
tc class add dev ${IF} parent 1:1 classid 1:10 htb rate ${LINKCEIL} ceil ${LINKCEIL} prio 0
tc class add dev ${IF} parent 1:1 classid 1:11 htb rate ${LIMIT} ceil ${LIMIT} prio 1
#add handles to our classes so packets marked with <x> go into the class with "... handle <x> fw ..."
tc filter add dev ${IF} parent 1: protocol ip prio 1 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ip prio 2 handle 2 fw classid 1:11
if [ ! -z "${LOCALNET_V6}" ] ; then
# v6 cannot have the same priority value as v4
tc filter add dev ${IF} parent 1: protocol ipv6 prio 3 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ipv6 prio 4 handle 2 fw classid 1:11
fi
#delete any existing rules
#disable for now
#ret=0
#while [ $ret -eq 0 ]; do
# iptables -t mangle -D OUTPUT 1
# ret=$?
#done
#limit outgoing traffic to and from port 6212. but not when dealing with a host on the local network
# (defined by $LOCALNET_V4 and $LOCALNET_V6)
# --set-mark marks packages matching these criteria with the number "2" (v4)
# --set-mark marks packages matching these criteria with the number "4" (v6)
# these packets are filtered by the tc filter with "handle 2"
# this filter sends the packages into the 1:11 class, and this class is limited to ${LIMIT}
iptables -t mangle -A OUTPUT -p tcp -m tcp --dport 6212 ! -d ${LOCALNET_V4} -j MARK --set-mark 0x2
iptables -t mangle -A OUTPUT -p tcp -m tcp --sport 6212 ! -d ${LOCALNET_V4} -j MARK --set-mark 0x2
if [ ! -z "${LOCALNET_V6}" ] ; then
ip6tables -t mangle -A OUTPUT -p tcp -m tcp --dport 6212 ! -d ${LOCALNET_V6} -j MARK --set-mark 0x4
ip6tables -t mangle -A OUTPUT -p tcp -m tcp --sport 6212 ! -d ${LOCALNET_V6} -j MARK --set-mark 0x4
fi
|
def divide(a,b):
try:
c = a/b
return c
except ZeroDivisionError:
print("Cannot divide by zero!") |
<reponame>cschladetsch/KAI
#pragma once
#include <KAI/Language/Common/AstNodeBase.h>
#include <KAI/Language/Tau/TauToken.h>
TAU_BEGIN
struct TauAstEnumType
{
enum Enum
{
None = 0,
Namespace = 1,
Class = 2,
Property = 3,
Method = 4,
// req.
TokenType = 5,
// a file containing a collection of one or more namespaces
Module = 6,
Argument = 7,
Arglist = 8,
};
struct Node : AstNodeBase<TauToken, TauAstEnumType>
{
typedef AstNodeBase<TauToken, TauAstEnumType> Parent;
using typename Parent::Token;
KAI_NAMESPACE(Object) object;
Node() : Parent(Enum::None) { }
Node(Enum e) : Parent(e) { }
Node(Enum e, Token &tok) : Parent(e, tok) { }
Node(Parent::Token tok) : Parent(tok) { }
};
static const char *ToString(Enum);
};
TAU_END
|
package com.launchacademy.springeditanddelete.seeders;
import com.launchacademy.springeditanddelete.models.Contractor;
import com.launchacademy.springeditanddelete.repositories.ContractorRepository;
import java.util.ArrayList;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
@Component
public class ContractorSeeder implements CommandLineRunner {
@Autowired
private ContractorRepository contractorRepo;
@Override
public void run(String... args) throws Exception {
List<Contractor> contractors = new ArrayList<Contractor>();
Contractor jonSnow = new Contractor();
jonSnow.setFirstName("Jon");
jonSnow.setLastName("Snow");
jonSnow.setEmailAddress("<EMAIL>");
jonSnow.setPostalCode("02110");
jonSnow.setWeeklyHoursAvailable(40);
contractors.add(jonSnow);
Contractor sam = new Contractor();
sam.setFirstName("Samwell");
sam.setLastName("Tarly");
sam.setEmailAddress("<EMAIL>");
sam.setPostalCode("02111");
sam.setWeeklyHoursAvailable(20);
contractors.add(sam);
if(contractorRepo.count() == 0) {
for(Contractor contractor : contractors) {
contractorRepo.save(contractor);
}
}
}
}
|
<gh_stars>1-10
const { Collection } = require("discord.js");
const fs = require("fs");
async function loadCommands(client) {
client.cache.commands = new Collection();
const commandFolders = fs.readdirSync("./commands");
for (const folder of commandFolders) {
const commandFiles = fs
.readdirSync(`./commands/${folder}`)
.filter((file) => file.split(".").pop() === "js");
for (const file of commandFiles) {
if (commandFiles.length <= 0) {
console.log("client couldn't find commands in commands folder.");
} else {
const command = require(`../commands/${folder}/${file}`);
if (command.disabled) continue;
client.cache.commands.set(command.name, command);
}
}
}
}
module.exports = {
loadCommands,
};
|
import requests
def send_message_to_clickatell(phone_number, message):
clickatell_api_url = "https://api.clickatell.com/http/sendmsg"
api_key = "your_clickatell_api_key" # Replace with your actual Clickatell API key
payload = {
"api_id": "your_api_id", # Replace with your actual Clickatell API ID
"user": "your_username", # Replace with your actual Clickatell username
"password": "your_password", # Replace with your actual Clickatell password
"to": phone_number,
"text": message
}
try:
response = requests.get(clickatell_api_url, params=payload)
if response.status_code == 200:
response_data = response.text.split(":")
if response_data[0] == "ID":
return (True, response_data[1])
else:
return (False, None)
else:
return (False, None)
except requests.RequestException:
return (False, None)
def send_message(phone_number, message):
result, uid = send_message_to_clickatell(phone_number, message)
if result == True:
print "Message was sent successfully"
print "Clickatell returned %s" % uid
else:
print "Message was not sent" |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.config;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.config.ConcurrentMapConfiguration;
import com.netflix.config.DynamicPropertyUpdater;
import com.netflix.config.DynamicWatchedConfiguration;
import com.netflix.config.WatchedConfigurationSource;
import com.netflix.config.WatchedUpdateListener;
import com.netflix.config.WatchedUpdateResult;
/**
* Same as DynamicWatchedConfiguration but Disable delimiter parsing for string
*
* @see DynamicWatchedConfiguration
*/
@SuppressWarnings("unchecked")
public class DynamicWatchedConfigurationExt extends ConcurrentMapConfiguration implements WatchedUpdateListener {
private final boolean ignoreDeletesFromSource;
private final DynamicPropertyUpdater updater;
private static final Logger LOGGER = LoggerFactory.getLogger(DynamicWatchedConfigurationExt.class);
private DynamicWatchedConfigurationExt(WatchedConfigurationSource source, boolean ignoreDeletesFromSource,
DynamicPropertyUpdater updater) {
this.ignoreDeletesFromSource = ignoreDeletesFromSource;
this.updater = updater;
setDelimiterParsingDisabled(true);
// get a current snapshot of the config source data
try {
Map<String, Object> currentData = source.getCurrentData();
WatchedUpdateResult result = WatchedUpdateResult.createFull(currentData);
updateConfiguration(result);
} catch (final Exception exc) {
LOGGER.error("could not getCurrentData() from the WatchedConfigurationSource", exc);
}
// add a listener for subsequent config updates
source.addUpdateListener(this);
}
public DynamicWatchedConfigurationExt(final WatchedConfigurationSource source) {
this(source, false, new DynamicPropertyUpdater());
}
@Override
public void updateConfiguration(final WatchedUpdateResult result) {
updater.updateProperties(result, this, ignoreDeletesFromSource);
}
}
|
use rusqlite::{Connection, Result};
pub fn init_tables(conn: &Connection) -> Result<(), String> {
conn.execute(
"CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY,
username TEXT NOT NULL,
email TEXT NOT NULL UNIQUE
)",
[],
)
.map_err(|err| format!("Error creating users table: {}", err))?;
conn.execute(
"CREATE TABLE IF NOT EXISTS posts (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL,
content TEXT NOT NULL,
user_id INTEGER NOT NULL,
FOREIGN KEY (user_id) REFERENCES users(id)
)",
[],
)
.map_err(|err| format!("Error creating posts table: {}", err))?;
Ok(())
} |
# this cast requires the ability to create 'datalad/publish-demo' on Github (must not exist yet)
# it also require SSH access to the URL demo.datalad.org (can be localhost)
say "Sharing is best done on a platform that many people visit. One of them is GitHub."
say "However, Git is not designed to handle large data files directly, and GitHub will refuse large files"
say "One can either use GitHub's own LFS tool ... or use DataLad to flexibly combine many possible data hosting solutions with a repository hosted on GitHub"
say "Here is how this looks..."
say "It starts with a dataset. We give it a description to more easily keep track of where data are."
run "datalad create demo --description \"original dataset location\""
run "cd demo"
say "For this demo, we are generating a large (600MB file) comprised of random data. A file of this size cannot be hosted on GitHub directly."
run "datalad run dd if=/dev/urandom of=big.dat bs=1M count=600"
say "Publishing this dataset in a way that allows anyone to simply install it from GitHub, AND get the big data file, requires two steps"
say "1. Host the data file at some publicly accessible location"
say "2. Configure DataLad to make sure that getting data from GitHub transparently requests from this other location instead"
say "Here we use a personal webserver with SSH access, but, in principle, any hosting solution supported by git-annex is equally suitable"
say "We create a remote sibling of our dataset under the name 'myserver' via SSH, and tell datalad to track it as a common data source that is available for any future installation of this dataset. Access to this location will happen via the given http:// URL, and --ui true tells to install DataLad web UI as on https://datasets.datalad.org. Note that /.git in the URL most likely to be necessary in your case."
run "datalad create-sibling -s myserver demo.datalad.org:public_html/publish-demo --ui true --as-common-datasrc demo-server --target-url http://demo.datalad.org/publish-demo/.git"
say "With this configuration in place, we can now create a repository on GitHub, and configure the remote sibling on the SSH server as a publication dependency"
run "datalad create-sibling-github --github-organization datalad --publish-depends myserver --access-protocol ssh publish-demo"
say "Let's quickly recap that the data file is just in our local dataset"
run "git annex whereis"
say "From now on, we can simply 'publish to GitHub' and DataLad will take care of the rest"
run "datalad publish --to github --transfer-data all"
say "We can confirm that our data file ended up on our server"
run "git annex whereis big.dat"
say "Now we simulate how it would look for a random person to obtain the data in this dataset from GitHub -- by installing into a new location, straight from GitHub (this does not require a GitHub account)"
run "cd ../"
run "datalad install -s git@github.com:datalad/publish-demo.git fromgh"
run "cd fromgh"
say "This dataset does not know how to access the original dataset location, only GitHub and our server"
run "git remote -v"
say "There is no further setup necessary, anyone can get the data -- if they have permission to access the URL of our own server"
run "datalad get big.dat"
run "ls -sLh big.dat"
say "Go publish!"
|
<filename>kakaoCard/src/main/java/com/kakaopay/cryp/SHA256Util.java
package com.kakaopay.cryp;
import javax.crypto.Cipher;
import javax.crypto.spec.SecretKeySpec;
import org.springframework.context.annotation.Configuration;
import com.kakaopay.exception.CrypExcn;
import lombok.extern.slf4j.Slf4j;
/**
* ์ํธํ ๋ชจ๋
*
* @author kjy
* @since Create : 2020. 4. 17.
* @version 1.0
*/
@Configuration
@Slf4j
public class SHA256Util {
private final static String KEYNAME = "nfaator!plaeemo!";
private final static String ALGORITHM = "AES";
public static final String AES_ECB_NOPADDING = "AES/ECB/NoPadding";
public static String encrypt(String source) throws CrypExcn {
byte[] eArr = null;
try {
SecretKeySpec skeySpec = new SecretKeySpec(KEYNAME.getBytes(), ALGORITHM);
Cipher cipher = Cipher.getInstance(AES_ECB_NOPADDING);
cipher.init(Cipher.ENCRYPT_MODE, skeySpec);
eArr = cipher.doFinal(addPadding(source.getBytes()));
} catch (Exception e) {
log.error("์ํธํ ์๋ฌ({}) error :: {}", e.getMessage());
}
return fromHex(eArr);
}
public static String decrypt(final String source) throws CrypExcn {
byte[] eArr = null;
try {
Cipher cipher = Cipher.getInstance(AES_ECB_NOPADDING);
SecretKeySpec skeySpec = new SecretKeySpec(KEYNAME.getBytes(), ALGORITHM);
cipher.init(Cipher.DECRYPT_MODE, skeySpec);
eArr = removePadding(cipher.doFinal(toBytes(source)));
} catch (Exception e) {
log.error("๋ณตํธํ ์๋ฌ({}) error :: {}", e.getMessage());
}
return new String(eArr);
}
private static byte[] toBytes(final String pSource) {
StringBuffer buff = new StringBuffer(pSource);
int bCount = buff.length() / 2;
byte[] bArr = new byte[bCount];
for (int bIndex = 0; bIndex < bCount; bIndex++) {
bArr[bIndex] = (byte) Long.parseLong(buff.substring(2 * bIndex, (2 * bIndex) + 2), 16);
}
return bArr;
}
private static byte[] removePadding(final byte[] pBytes) {
int pCount = pBytes.length;
int index = 0;
boolean loop = true;
while (loop) {
if (index == pCount || pBytes[index] == 0x00) {
loop = false;
index--;
}
index++;
}
byte[] tBytes = new byte[index];
System.arraycopy(pBytes, 0, tBytes, 0, index);
return tBytes;
}
private static byte[] addPadding(final byte[] pBytes) {
int pCount = pBytes.length;
int tCount = pCount + (16 - (pCount % 16));
byte[] tBytes = new byte[tCount];
System.arraycopy(pBytes, 0, tBytes, 0, pCount);
for (int rIndex = pCount; rIndex < tCount; rIndex++) {
tBytes[rIndex] = 0x00;
}
return tBytes;
}
public static String fromHex(byte[] pBytes) {
int pCount = pBytes.length;
StringBuffer buff = new StringBuffer(pCount * 2);
for (int pIndex = 0; pIndex < pCount; pIndex++) {
if (((int) pBytes[pIndex] & 0xff) < 0x10) {
buff.append(0);
}
buff.append(Long.toString((int) pBytes[pIndex] & 0xff, 16));
}
return buff.toString();
}
}
|
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/cache/matcher/IntervalPropertyMatcher.java
package io.opensphere.core.cache.matcher;
import java.io.Serializable;
import io.opensphere.core.model.Accumulator;
/**
* An object that knows how to match an interval property value.
*
* @param <T> The type of the property to be matched.
*/
public interface IntervalPropertyMatcher<T extends Serializable> extends PropertyMatcher<T>
{
/**
* Get an accumulator for values that could be matched.
*
* @return The accumulator.
*/
Accumulator<T> getAccumulator();
/**
* Get a matcher equivalent to this one except that it matches interval
* overlaps. If this matcher already matches interval overlaps, {@code this}
* may be returned.
*
* @return The overlap matcher.
*/
IntervalPropertyMatcher<T> getGroupMatcher();
/**
* Get the smallest interval that will overlap all property values that can
* satisfy this matcher. If the interval is infinite, return {@code null}.
*
* @return The overlap interval.
*/
T getMinimumOverlapInterval();
/**
* Get a simple interval property value that is at least large enough to
* overlap all property values that can satisfy this matcher, but may be
* larger. This may be used to do quick intersection elimination.
*
* @return The simplified bounds.
*/
T getSimplifiedBounds();
/**
* Get if a value is indefinite.
*
* @param object The value.
* @return {@code true} if the input value is indefinite
*/
boolean isIndefinite(Object object);
/**
* Determine if another matcher's result set can overlap mine.
*
* @param other The other matcher.
* @return {@code true} if the result sets may overlap.
*/
boolean overlaps(IntervalPropertyMatcher<?> other);
}
|
#!/bin/bash
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Script to enter the chroot environment
SCRIPT_ROOT=$(readlink -f $(dirname "$0")/..)
. "${SCRIPT_ROOT}/common.sh" || exit 1
# Script must be run outside the chroot and as root.
assert_outside_chroot
assert_root_user
assert_kernel_version
# Define command line flags
# See http://code.google.com/p/shflags/wiki/Documentation10x
DEFINE_string chroot "$DEFAULT_CHROOT_DIR" \
"The destination dir for the chroot environment." "d"
DEFINE_string trunk "$GCLIENT_ROOT" \
"The source trunk to bind mount within the chroot." "s"
DEFINE_string build_number "" \
"The build-bot build number (when called by buildbot only)." "b"
DEFINE_string chrome_root "" \
"The root of your chrome browser source. Should contain a 'src' subdir."
DEFINE_string chrome_root_mount "/home/${SUDO_USER}/chrome_root" \
"The mount point of the chrome broswer source in the chroot."
DEFINE_string cache_dir "" "Directory to use for caching."
DEFINE_boolean official_build $FLAGS_FALSE \
"Set COREOS_OFFICIAL=1 for release builds."
DEFINE_boolean ssh_agent $FLAGS_TRUE "Import ssh agent."
DEFINE_boolean early_make_chroot $FLAGS_FALSE \
"Internal flag. If set, the command is run as root without sudo."
DEFINE_boolean verbose $FLAGS_FALSE "Print out actions taken"
# More useful help
FLAGS_HELP="USAGE: $0 [flags] [VAR=value] [-- command [arg1] [arg2] ...]
One or more VAR=value pairs can be specified to export variables into
the chroot environment. For example:
$0 FOO=bar BAZ=bel
If [-- command] is present, runs the command inside the chroot,
after changing directory to /${SUDO_USER}/trunk/src/scripts. Note that neither
the command nor args should include single quotes. For example:
$0 -- ./build_platform_packages.sh
Otherwise, provides an interactive shell.
"
CROS_LOG_PREFIX=cros_sdk:enter_chroot
SUDO_HOME=$(eval echo ~${SUDO_USER})
# Version of info from common.sh that only echos if --verbose is set.
debug() {
if [ $FLAGS_verbose -eq $FLAGS_TRUE ]; then
info "$*"
fi
}
# Parse command line flags
FLAGS "$@" || exit 1
eval set -- "${FLAGS_ARGV}"
if [ $FLAGS_official_build -eq $FLAGS_TRUE ]; then
COREOS_OFFICIAL=1
fi
[ -z "${FLAGS_cache_dir}" ] && \
die "--cache_dir is required"
# Only now can we die on error. shflags functions leak non-zero error codes,
# so will die prematurely if 'switch_to_strict_mode' is specified before now.
# TODO: replace shflags with something less error-prone, or contribute a fix.
switch_to_strict_mode
# These config files are to be copied into chroot if they exist in home dir.
FILES_TO_COPY_TO_CHROOT=(
.gdata_cred.txt # User/password for Google Docs on chromium.org
.gdata_token # Auth token for Google Docs on chromium.org
.disable_build_stats_upload # Presence of file disables command stats upload
.netrc # May contain required source fetching credentials
.boto # Auth information for gsutil
.boto-key.p12 # Service account key for gsutil
.ssh/config # User may need this for fetching git over ssh
.ssh/known_hosts # Reuse existing known hosts
)
INNER_CHROME_ROOT=$FLAGS_chrome_root_mount # inside chroot
CHROME_ROOT_CONFIG="/var/cache/chrome_root" # inside chroot
FUSE_DEVICE="/dev/fuse"
# We can't use /var/lock because that might be a symlink to /run/lock outside
# of the chroot. Or /run on the host system might not exist.
LOCKFILE="${FLAGS_chroot}/.enter_chroot.lock"
MOUNTED_PATH=$(readlink -f "$FLAGS_chroot")
setup_mount() {
# If necessary, mount $source in the host FS at $target inside the
# chroot directory with $mount_args. We don't write to /etc/mtab because
# these mounts are all contained within an unshare and are therefore
# inaccessible to other namespaces (e.g. the host desktop system).
local source="$1"
local mount_args="-n $2"
local target="$3"
local mounted_path="${MOUNTED_PATH}$target"
case " ${MOUNT_CACHE} " in
*" ${mounted_path} "*)
# Already mounted!
;;
*)
mkdir -p "${mounted_path}"
# The args are left unquoted on purpose.
if [[ -n ${source} ]]; then
mount ${mount_args} "${source}" "${mounted_path}"
else
mount ${mount_args} "${mounted_path}"
fi
;;
esac
}
copy_into_chroot_if_exists() {
# $1 is file path outside of chroot to copy to path $2 inside chroot.
[ -e "$1" ] && cp -p "$1" "${FLAGS_chroot}/$2"
}
# Usage: promote_api_keys
# This takes care of getting the developer API keys into the chroot where
# chrome can build with them. It needs to take it from the places a dev
# is likely to put them, and recognize that older chroots may or may not
# have been used since the concept of keys got added, as well as before
# and after the developer decding to grab his own keys.
promote_api_keys() {
local destination="${FLAGS_chroot}/home/${SUDO_USER}/.googleapikeys"
# Don't disturb existing keys. They could be set differently
if [[ -s "${destination}" ]]; then
return 0
fi
if [[ -r "${SUDO_HOME}/.googleapikeys" ]]; then
cp -p "${SUDO_HOME}/.googleapikeys" "${destination}"
if [[ -s "${destination}" ]] ; then
info "Copied Google API keys into chroot."
fi
elif [[ -r "${SUDO_HOME}/.gyp/include.gypi" ]]; then
local NAME="('google_(api_key|default_client_(id|secret))')"
local WS="[[:space:]]*"
local CONTENTS="('[^\\\\']*')"
sed -nr -e "/^${WS}${NAME}${WS}[:=]${WS}${CONTENTS}.*/{s//\1: \4,/;p;}" \
"${SUDO_HOME}/.gyp/include.gypi" | user_clobber "${destination}"
if [[ -s "${destination}" ]]; then
info "Put discovered Google API keys into chroot."
fi
fi
}
generate_locales() {
# Make sure user's requested locales are available
# http://crosbug.com/19139
# And make sure en_US{,.UTF-8} are always available as
# that what buildbot forces internally
local l locales gen_locales=()
locales=$(printf '%s\n' en_US en_US.UTF-8 ${LANG} \
$LC_{ADDRESS,ALL,COLLATE,CTYPE,IDENTIFICATION,MEASUREMENT,MESSAGES} \
$LC_{MONETARY,NAME,NUMERIC,PAPER,TELEPHONE,TIME} | \
sort -u | sed '/^C$/d')
for l in ${locales}; do
if [[ ${l} == *.* ]]; then
enc=${l#*.}
else
enc="ISO-8859-1"
fi
case $(echo ${enc//-} | tr '[:upper:]' '[:lower:]') in
utf8) enc="UTF-8";;
esac
gen_locales+=("${l} ${enc}")
done
if [[ ${#gen_locales[@]} -gt 0 ]] ; then
# Force LC_ALL=C to workaround slow string parsing in bash
# with long multibyte strings. Newer setups have this fixed,
# but locale-gen doesn't need to be run in any locale in the
# first place, so just go with C to keep it fast.
PATH="/usr/sbin:/usr/bin:/sbin:/bin" LC_ALL=C \
chroot "${FLAGS_chroot}" locale-gen -q -u \
-G "$(printf '%s\n' "${gen_locales[@]}")"
fi
}
setup_env() {
(
flock 200
# Make the lockfile writable for backwards compatibility.
chown ${SUDO_UID}:${SUDO_GID} "${LOCKFILE}"
# Refresh system config files in the chroot.
for copy_file in /etc/{hosts,localtime,resolv.conf}; do
if [ -f "${copy_file}" ] ; then
rm -f "${FLAGS_chroot}${copy_file}"
install -C -m644 "${copy_file}" "${FLAGS_chroot}${copy_file}"
fi
done
fix_mtab "${FLAGS_chroot}"
debug "Mounting chroot environment."
MOUNT_CACHE=$(echo $(awk '{print $2}' /proc/mounts))
# The cros_sdk script created a new filesystem namespace but the system
# default (namely on systemd hosts) may be for everything to be shared.
# Using 'slave' means we see global changes but cannot change global state.
mount --make-rslave /
setup_mount none "-t proc" /proc
setup_mount none "-t sysfs" /sys
setup_mount /dev "--bind" /dev
setup_mount /dev/pts "--bind" /dev/pts
setup_mount tmpfs "-t tmpfs -o nosuid,nodev,mode=755" /run
if [[ -d /run/shm && ! -L /run/shm ]]; then
setup_mount /run/shm "--bind" /run/shm
fi
mkdir -p /run/user/${SUDO_UID}
chown ${SUDO_UID}:${SUDO_GID} /run/user/${SUDO_UID}
# Do this early as it's slow and only needs basic mounts (above).
generate_locales &
mkdir -p "${FLAGS_chroot}/${CHROOT_TRUNK_DIR}"
setup_mount "${FLAGS_trunk}" "--rbind" "${CHROOT_TRUNK_DIR}"
debug "Setting up referenced repositories if required."
REFERENCE_DIR=$(git config --file \
"${FLAGS_trunk}/.repo/manifests.git/config" \
repo.reference)
if [ -n "${REFERENCE_DIR}" ]; then
ALTERNATES="${FLAGS_trunk}/.repo/alternates"
# Ensure this directory exists ourselves, and has the correct ownership.
user_mkdir "${ALTERNATES}"
unset ALTERNATES
IFS=$'\n';
required=( $( sudo -u "${SUDO_USER}" -- \
"${FLAGS_trunk}/chromite/lib/rewrite_git_alternates.py" \
"${FLAGS_trunk}" "${REFERENCE_DIR}" "${CHROOT_TRUNK_DIR}" ) )
unset IFS
setup_mount "${FLAGS_trunk}/.repo/chroot/alternates" --bind \
"${CHROOT_TRUNK_DIR}/.repo/alternates"
# Note that as we're bringing up each referened repo, we also
# mount bind an empty directory over its alternates. This is
# required to suppress git from tracing through it- we already
# specify the required alternates for CHROOT_TRUNK_DIR, no point
# in having git try recursing through each on their own.
#
# Finally note that if you're unfamiliar w/ chroot/vfs semantics,
# the bind is visible only w/in the chroot.
user_mkdir ${FLAGS_trunk}/.repo/chroot/empty
position=1
for x in "${required[@]}"; do
base="${CHROOT_TRUNK_DIR}/.repo/chroot/external${position}"
setup_mount "${x}" "--bind" "${base}"
if [ -e "${x}/.repo/alternates" ]; then
setup_mount "${FLAGS_trunk}/.repo/chroot/empty" "--bind" \
"${base}/.repo/alternates"
fi
position=$(( ${position} + 1 ))
done
unset required position base
fi
unset REFERENCE_DIR
chroot_cache='/var/cache/chromeos-cache'
debug "Setting up shared cache dir directory."
user_mkdir "${FLAGS_cache_dir}"/distfiles/{target,host}
user_mkdir "${FLAGS_chroot}/${chroot_cache}"
setup_mount "${FLAGS_cache_dir}" "--bind" "${chroot_cache}"
# TODO(build): remove this as of 12/01/12.
# Because of how distfiles -> cache_dir was deployed, if this isn't
# a symlink, we *know* the ondisk pathways aren't compatible- thus
# fix it now.
distfiles_path="${FLAGS_chroot}/var/cache/distfiles"
if [ ! -L "${distfiles_path}" ]; then
# While we're at it, ensure the var is exported w/in the chroot; it
# won't exist if distfiles isn't a symlink.
p="${FLAGS_chroot}/etc/profile.d/chromeos-cachedir.sh"
rm -rf "${distfiles_path}"
ln -s chromeos-cache/distfiles "${distfiles_path}"
mkdir -p -m 775 "${p%/*}"
echo 'export CHROMEOS_CACHEDIR=${chroot_cache}' > "${p}"
chmod 0644 "${p}"
fi
user_mkdir "${FLAGS_chroot}/home/${SUDO_USER}/.ssh"
if [ $FLAGS_ssh_agent -eq $FLAGS_TRUE ]; then
# Clean up previous ssh agents.
rmdir "${FLAGS_chroot}"/tmp/ssh-* 2>/dev/null
if [ -n "${SSH_AUTH_SOCK}" -a -d "${SUDO_HOME}/.ssh" ]; then
# Don't try to bind mount the ssh agent dir if it has gone stale.
ASOCK=${SSH_AUTH_SOCK%/*}
if [ -d "${ASOCK}" ]; then
setup_mount "${ASOCK}" "--bind" "${ASOCK}"
fi
fi
fi
if [[ -d "$SUDO_HOME/.subversion" ]]; then
TARGET="/home/${SUDO_USER}/.subversion"
setup_mount "${SUDO_HOME}/.subversion" "--bind" "${TARGET}"
# Symbolic-link the .subversion directory so sandboxed subversion.class
# clients can use it.
for d in \
"${FLAGS_cache_dir}"/distfiles/{host,target}/svn-src/"${SUDO_USER}"; do
if [[ ! -L "${d}/.subversion" ]]; then
rm -rf "${d}/.subversion"
user_mkdir "${d}"
user_symlink /home/${SUDO_USER}/.subversion "${d}/.subversion"
fi
done
fi
# Mount GnuPG's data directory for signing uploads
if [[ -d "$SUDO_HOME/.gnupg" ]]; then
debug "Mounting GnuPG"
setup_mount "${SUDO_HOME}/.gnupg" "--bind" "/home/${SUDO_USER}/.gnupg"
# bind mount the gpg agent dir if available
GPG_AGENT_DIR="${GPG_AGENT_INFO%/*}"
if [[ -d "$GPG_AGENT_DIR" ]]; then
setup_mount "$GPG_AGENT_DIR" "--bind" "$GPG_AGENT_DIR"
fi
fi
# Mount additional directories as specified in .local_mounts file.
local local_mounts="${FLAGS_trunk}/src/scripts/.local_mounts"
if [[ -f ${local_mounts} ]]; then
info "Mounting local folders (read-only for safety concern)"
# format: mount_source
# or mount_source mount_point
# or # comments
local mount_source mount_point
while read mount_source mount_point; do
if [[ -z ${mount_source} ]]; then
continue
fi
# if only source is assigned, use source as mount point.
: ${mount_point:=${mount_source}}
debug " mounting ${mount_source} on ${mount_point}"
setup_mount "${mount_source}" "--bind" "${mount_point}"
# --bind can't initially be read-only so we have to do it via remount.
setup_mount "" "-o remount,ro" "${mount_point}"
done < <(sed -e 's:#.*::' "${local_mounts}")
fi
CHROME_ROOT="$(readlink -f "$FLAGS_chrome_root" || :)"
if [ -z "$CHROME_ROOT" ]; then
CHROME_ROOT="$(cat "${FLAGS_chroot}${CHROME_ROOT_CONFIG}" \
2>/dev/null || :)"
CHROME_ROOT_AUTO=1
fi
if [[ -n "$CHROME_ROOT" ]]; then
if [[ ! -d "${CHROME_ROOT}/src" ]]; then
error "Not mounting chrome source"
rm -f "${FLAGS_chroot}${CHROME_ROOT_CONFIG}"
if [[ ! "$CHROME_ROOT_AUTO" ]]; then
exit 1
fi
else
debug "Mounting chrome source at: $INNER_CHROME_ROOT"
echo $CHROME_ROOT > "${FLAGS_chroot}${CHROME_ROOT_CONFIG}"
setup_mount "$CHROME_ROOT" --bind "$INNER_CHROME_ROOT"
fi
fi
# Install fuse module. Skip modprobe when possible for slight
# speed increase when initializing the env.
if [ -c "${FUSE_DEVICE}" ] && ! grep -q fuse /proc/filesystems; then
modprobe fuse 2> /dev/null ||\
warn "-- Note: modprobe fuse failed. gmergefs will not work"
fi
# Fix permissions on ccache tree. If this is a fresh chroot, then they
# might not be set up yet. Or if the user manually `rm -rf`-ed things,
# we need to reset it. Otherwise, gcc itself takes care of fixing things
# on demand, but only when it updates.
ccache_dir="${FLAGS_chroot}/var/cache/distfiles/ccache"
if [[ ! -d ${ccache_dir} ]]; then
mkdir -p -m 2775 "${ccache_dir}"
fi
find -H "${ccache_dir}" -type d -exec chmod 2775 {} + &
find -H "${ccache_dir}" -gid 0 -exec chgrp 250 {} + &
# Certain files get copied into the chroot when entering.
for fn in "${FILES_TO_COPY_TO_CHROOT[@]}"; do
copy_into_chroot_if_exists "${SUDO_HOME}/${fn}" "/home/${SUDO_USER}/${fn}"
done
promote_api_keys
# Fix permissions on shared memory to allow non-root users access to POSIX
# semaphores.
chmod -R 777 "${FLAGS_chroot}/dev/shm"
# Have found a few chroots where ~/.gsutil is owned by root:root, probably
# as a result of old gsutil or tools. This causes permission errors when
# gsutil cp tries to create its cache files, so ensure the user can
# actually write to their directory.
gsutil_dir="${FLAGS_chroot}/home/${SUDO_USER}/.gsutil"
if [ -d "${gsutil_dir}" ]; then
chown -R ${SUDO_UID}:${SUDO_GID} "${gsutil_dir}"
fi
) 200>>"$LOCKFILE" || die "setup_env failed"
}
setup_env
CHROOT_PASSTHRU=(
"BUILDBOT_BUILD=$FLAGS_build_number"
"CHROMEOS_RELEASE_APPID=${CHROMEOS_RELEASE_APPID:-{DEV-BUILD}}"
"EXTERNAL_TRUNK_PATH=${FLAGS_trunk}"
)
# Add the whitelisted environment variables to CHROOT_PASSTHRU.
load_environment_whitelist
for var in "${ENVIRONMENT_WHITELIST[@]}" ; do
[ "${!var+set}" = "set" ] && CHROOT_PASSTHRU+=( "${var}=${!var}" )
done
# Set up GIT_PROXY_COMMAND so git:// URLs automatically work behind a proxy.
if [[ -n "${all_proxy}" || -n "${https_proxy}" || -n "${http_proxy}" ]]; then
CHROOT_PASSTHRU+=(
"GIT_PROXY_COMMAND=${CHROOT_TRUNK_DIR}/src/scripts/bin/proxy-gw"
)
fi
# Run command or interactive shell. Also include the non-chrooted path to
# the source trunk for scripts that may need to print it (e.g.
# build_image.sh).
if [ $FLAGS_early_make_chroot -eq $FLAGS_TRUE ]; then
cmd=( /bin/bash -l -c 'env "$@"' -- )
elif [ ! -x "${FLAGS_chroot}/usr/bin/sudo" ]; then
# Complain that sudo is missing.
error "Failing since the chroot lacks sudo."
error "Requested enter_chroot command was: $@"
exit 127
else
cmd=( sudo -i -u "${SUDO_USER}" )
fi
cmd+=( "${CHROOT_PASSTHRU[@]}" "$@" )
exec chroot "${FLAGS_chroot}" "${cmd[@]}"
|
import request from 'supertest';
import { app } from '../../app';
import { Routes } from '../../common';
const createItem = () => {
return request(app)
.post(Routes.items)
.set('Cookie', global.signin())
.send({ title: 'Art piece', price: 30 });
};
it('can fetch a list of items', async () => {
await createItem();
await createItem();
await createItem();
const response = await request(app)
.get(Routes.items)
.expect(200);
expect(response.body.length).toEqual(3);
});
|
<reponame>mjburling/beneficiary-fhir-data
package gov.cms.bfd.pipeline.bridge.etl;
import gov.cms.bfd.pipeline.bridge.io.Source;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
public class RifParser implements Parser<String> {
private static final String DELIMITER = "\\|";
private final Source<String> source;
private final Map<String, Integer> headerIndexMap = new HashMap<>();
/**
* Grabs the headers so they can be used to build maps for the {@link RifData} objects created
* later.
*
* @throws IOException If there was an error handling the file.
*/
@Override
public void init() throws IOException {
if (source.hasInput()) {
String headerLine = source.read();
String[] headers = headerLine.split(DELIMITER);
if (headers.length == 0) {
throw new IOException("No headers were read");
}
for (int i = 0; i < headers.length; ++i) {
headerIndexMap.put(headers[i], i);
}
} else {
throw new IOException("File was empty, nothing to read");
}
}
@Override
public boolean hasData() {
return source.hasInput();
}
@Override
public Data<String> read() throws IOException {
if (!headerIndexMap.isEmpty()) {
if (source.hasInput()) {
return new RifData(headerIndexMap, source.read().split(DELIMITER, 0));
} else {
throw new IOException("No mo data to read.");
}
}
throw new IllegalStateException("Parser was not initialized");
}
@Override
public void close() throws IOException {
source.close();
}
@RequiredArgsConstructor
public static class RifData extends Data<String> {
private final SimpleDateFormat rifDateFormat = new SimpleDateFormat("dd-MMM-yyyy");
private final SimpleDateFormat standardFormat = new SimpleDateFormat("yyyy-MM-dd");
private final Map<String, Integer> headerIndexMap;
private final String[] rowData;
@Override
public Optional<String> get(String rifIdentifier) {
Optional<String> optional;
// If the cell data is empty, return an empty Optional instead of an empty String.
if (headerIndexMap.containsKey(rifIdentifier)
&& !rowData[headerIndexMap.get(rifIdentifier)].isEmpty()) {
optional = Optional.of(rowData[headerIndexMap.get(rifIdentifier)]);
} else {
optional = Optional.empty();
}
return optional;
}
/**
* Defining a custom {@link Parser.Data#getFromType(String, Type)} here so we can convert RIF
* style dates to more standard yyyy-mm-dd format.
*
* @param rifIdentifier The rif data being retrieved.
* @param type The expected type of the data being retrieved.
* @return An {@link Optional} possibly containing the transformed data associated with the
* given name.
*/
@Override
public Optional<String> getFromType(String rifIdentifier, Data.Type type) {
return get(rifIdentifier)
.map(
value -> {
if (Type.DATE == type) {
try {
return standardFormat.format(rifDateFormat.parse(value));
} catch (ParseException e) {
throw new IllegalArgumentException("Invalid date format", e);
}
}
return value;
});
}
}
}
|
<reponame>azu/monorepo-sandbox-shipjs
'use strict';
module.exports = monorepoSandboxZ;
function monorepoSandboxZ() {
// TODO
}
|
/*
* node-beryl - Node.js Driver for BerylDB.
* http://www.beryldb.com
*
* Copyright (C) 2021 - <NAME> <<EMAIL>>
*
* This file is part of BerylDB. BerylDB is free software: you can
* redistribute it and/or modify it under the terms of the BSD License
* version 3.
*
* More information about our licensing can be found at https://docs.beryl.dev
*/
module.exports =
{
BRLD_CONNECTED : 108,
BRLD_OK : 164,
BRLD_OK2 : 310,
BRLD_RUN : 165,
ERR_INPUT : 586,
ERR_INPUT2 : 587,
ERR_INPUT3 : 588,
BRLD_START_LIST : 282,
BRLD_END_LIST : 283,
BRLD_ITEM_LIST : 215,
BRLD_NEW_USE : 161,
ERR_MISS_PARAMS : 521,
ERR_WRONG_PASS : 536,
BRLD_PING : 110,
BRLD_RESTART : 289,
BRLD_NOTIFICATION : 277,
BRLD_MONITOR : 245,
BRLD_QUEUED : 162,
BRLD_MULTI_START : 165,
BRLD_MULTI_STOP : 169
} |
def factorial(n):
if n == 0:
return 1
else:
return n * factorial(n-1)
f = 5
result = factorial(f)
print("Factorial of", f, "is:", result) |
<gh_stars>0
export function getLoggedInUser() {
const token = localStorage.getItem('token') || '';
const base64Url = token.split('.')[1];
const base64 = base64Url.replace('-', '+').replace('_', '/');
return JSON.parse(atob(base64)) || null;
}
export function getUserLanguage() {
if (navigator.languages !== undefined)
return navigator.languages[0];
return navigator.language;
} |
import React, {Component} from 'react'
import FilterFooter from '../../../../components/FilterFooter'
import styles from './index.module.css'
export default class FilterMore extends Component {
state = {
selectedValues: this.props.defaultValue
}
// ๆธ
็ฉบๆ้ฎ
onCancel = () => {
this.setState({
selectedValues: []
})
}
// ็กฎๅฎๆ้ฎๆฏไบไปถๅค็ๅฝๆฐ
onOk = () => {
const { type, onSave } = this.props
// onSaveๆฏ็ถ็ปไปถ็ๆนๆณ
onSave(type, this.state.selectedValues)
}
renderFilter(data) {
const { selectedValues } = this.state
return (
data.map(item => {
// ๅฎ็ฐ้ซไบฎ
const isSelected = selectedValues.indexOf(item.value) > -1
return (<span
onClick={() => this.onTagClick(item.value)}
key={item.value}
className={[styles.tag, isSelected ? styles.tagActive : ''].join(' ')}>{item.label}</span>
)})
)
}
onTagClick (value) {
const { selectedValues } = this.state
const newSelectedValues = [...selectedValues]
if(selectedValues.indexOf(value) <= -1) {
// ๆฒกๆๅฝๅ้กนๆฏๆ
ๅต
newSelectedValues.push(value)
} else {
// ๆๅฝๅ้กนๆฏๆ
ๅต
const index = newSelectedValues.findIndex(item => item === value)
newSelectedValues.splice(index, 1)
}
this.setState({
selectedValues: newSelectedValues
})
}
render() {
const { data: {roomType,oriented,floor,characteristic}, onCancel, type} = this.props
return (
<div className={styles.root}>
{/* ๅไธไธช่ๅ็้ฎ็ฝฉๅฑ */}
<div className={styles.mask} onClick={() => onCancel(type)}></div>
{/* ๆกไปถๅ
ๅฎน */}
<div className={styles.tags}>
<dl className={styles.dl}>
<dt className={styles.dt}>ๆทๅ</dt>
<dd className={styles.dd}>{this.renderFilter(roomType)}</dd>
<dt className={styles.dt}>ๆๅ</dt>
<dd className={styles.dd}>{this.renderFilter(oriented)}</dd>
<dt className={styles.dt}>ๆฅผๅฑ</dt>
<dd className={styles.dd}>{this.renderFilter(floor)}</dd>
<dt className={styles.dt}>ๆฟๅฑไบฎ็น</dt>
<dd className={styles.dd}>{this.renderFilter(characteristic)}</dd>
</dl>
</div>
{/* ๅบ้จๆ้ฎ */}
<FilterFooter
className={styles.footer}
cancelText="ๆธ
้ค"
onCancel={this.onCancel}
onOk={this.onOk}
/>
</div>
)
}
} |
plugin_manager()
{
git clone https://github.com/tmux-plugins/tpm ~/.tmux/plugins/tpm
}
tmux()
{
out "$TX_BOLD$CL_GREEN$LB* Checking if Tmux already installed ...$CL_DEFAULT$TX_NORMAL"
if ! command_exists tmux; then
out "$TX_BOLD$CL_GREEN$LB* Installing Tmux ...$CL_DEFAULT$TX_NORMAL$LB"
VERSION=$(get_latest_release "tmux/tmux")
DIR=$(pwd)
cd /tmp
curl -sL https://github.com/tmux/tmux/releases/download/${VERSION}/tmux-${VERSION}.tar.gz | tar zx
cd tmux-${VERSION}
./configure && make
sudo make install
plugin_manager
cd $DIR
else
out "Ok it's installed$LB"
fi
} |
// +build wasm
package ws
import (
"fmt"
"io"
"syscall/js"
"time"
"github.com/superp00t/go-web-daemons/wasmutils"
)
func prototype() js.Value {
return js.Global().Get("WebSocket")
}
type conn struct {
binary bool
conn js.Value
dialComplete chan bool
recv chan []byte
errc chan error
u8Proto js.Value
}
func dialOpts(o Opts) (*conn, error) {
if o.Socks5 != "" {
return nil, fmt.Errorf("ws: cannot dial WebSocket in WebAssembly using a SOCKS5 proxy parameter")
}
o.binary = o.Binary
args := []interface{}{o.URL}
if o.Subprotocol != "" {
args = append(args, o.Subprotocol)
}
c := &conn{}
c.dialComplete = make(chan bool)
c.errc = make(chan error)
c.recv = make(chan []byte)
c.conn = prototype().New(args...)
c.u8Proto = js.Global().Get("Uint8Array")
c.conn.Call("addEventListener", "open", js.NewCallback(func(a []js.Value) {
c.dialComplete <- true
}))
c.conn.Call("addEventListener", "close", js.NewCallback(func(a []js.Value) {
c.errc <- io.EOF
}))
c.conn.Call("addEventListener", "error", js.NewCallback(func(a []js.Value) {
c.errc <- fmt.Errorf("ws: ", a[0].String())
}))
bl := js.Global().Get("Blob")
ab := js.Global().Get("ArrayBuffer")
c.conn.Call("addEventListener", "message", js.NewCallback(func(a []js.Value) {
event := a[0]
data := event.Get("data")
if data.InstanceOf(ab) {
c.handleArrayBuffer(data)
} else if data.InstanceOf(bl) {
c.handleBlob(data)
} else {
c.recv <- []byte(data.String())
}
}))
<-c.dialComplete
return c, nil
}
func (c *conn) Send(b []byte) error {
if c.binary {
u8 := js.TypedArrayOf(b)
c.conn.Call("send", u8)
u8.Release()
} else {
str := js.ValueOf(string(b))
c.conn.Call("send", str)
}
return nil
}
func (c *conn) Recv() ([]byte, error) {
select {
case err := <-c.errc:
return nil, err
case b := <-c.recv:
return b, nil
}
}
func (c *conn) Close() error {
c.conn.Call("close")
return nil
}
func (c *conn) handleArrayBuffer(ab js.Value) {
fmt.Println("arraybuffer")
out := wasmutils.LoadBytesFromArrayBuffer(ab)
fmt.Println(time.Since(t))
c.recv <- out
}
func (c *conn) handleBlob(blob js.Value) {
fr := js.Global().Get("FileReader").New()
fr.Set("onload", js.NewCallback(func(a []js.Value) {
c.handleArrayBuffer(a[0].Get("target").Get("result"))
}))
fr.Call("readAsArrayBuffer", blob)
}
|
The best way to stop a DoS attack is to prevent the attack from even reaching the system by implementing a firewall or creating an intrusion detection system. Additionally, protecting against known attacks by deploying common security safeguards like rate limiting, packet filtering, and detecting suspicious activity can significantly reduce the impact of a DoS attack. |
#!/bin/bash -xe
# inspired from sister script run-tox-with-oslo-master.sh
venv=$1
if [[ -z "$venv" ]]; then
echo "Usage: $?"
echo
echo "VENV: The tox environment to run (eg 'py35')"
exit 1
fi
script_path=/usr/local/jenkins/slave_scripts
sed -i "s/neutron-lib.*/-e git+https:\/\/git.openstack.org\/openstack\/neutron-lib.git#egg=neutron-lib/g" requirements.txt
sed -i "s/neutron-lib.*/-e git+https:\/\/git.openstack.org\/openstack\/neutron-lib.git#egg=neutron-lib/g" upper-constraints.txt
cat << EOF >> tox.ini
[testenv:${venv}-neutron-lib-master]
setenv = VIRTUAL_ENV={envdir}
passenv = TRACE_FAILONLY GENERATE_HASHES http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
usedevelop = True
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
whitelist_externals = sh
commands =
{toxinidir}/tools/ostestr_compat_shim.sh {posargs}
EOF
set +e
$script_path/run-tox.sh pep8
pep8_code=$?
$script_path/run-tox.sh $venv-neutron-lib-master
venv_code=$?
set -e
exit_code=$(( $pep8_code || $venv_code ))
exit $exit_code
|
import json
import time
import datetime
import os
import csv
jsonDir = "./data/json"
csvDir = "./data/csv"
def convertToCsv(jsonFileName,csvFileName) :
with open(csvFileName, 'w', newline='') as csvfile :
with open(jsonFileName, 'r') as orcidJson :
orcidRecords = json.load(orcidJson)
fieldnames = ['orcid', 'lastUpdated','name','educations','employments','ids','emails','workCount']
csvWriter = csv.writer(csvfile)
csvWriter.writerow(fieldnames)
for orcidRecord in orcidRecords :
csvWriter.writerow(generateCsvRow(orcidRecord))
def generateCsvRow(orcid) :
csvRow = []
csvRow.append(getOrcidId(orcid))
csvRow.append(getLastUpdated(orcid))
csvRow.append(getName(orcid))
csvRow.append(getEmployments(orcid))
csvRow.append(getEducations(orcid))
csvRow.append(getIds(orcid))
csvRow.append(getEmails(orcid))
csvRow.append(getWorkCount(orcid))
return csvRow
def getOrcidId(orcidRecord) :
orcidId = orcidRecord.get("orcid-identifier").get("path")
return orcidId
def getLastUpdated(orcidRecord) :
try :
lastUpdated = orcidRecord.get("history").get("last-modified-date").get("value")
lastUpdated = datetime.date.fromtimestamp( lastUpdated/1000 )
except :
lastUpdated = ""
return lastUpdated
def getName(orcidRecord) :
try :
givenName = orcidRecord.get("person").get("name").get("given-names").get("value")
name = givenName
except :
name = ""
try :
familyName = orcidRecord.get("person").get("name").get("family-name").get("value")
name = name + " " + familyName
except :
name = name
if len(name.strip()) == 0 :
try :
name = orcidRecord.get("person").get("other-names").get("other-name")[0].get("content")
except :
name = "Anonymous"
return name
def getEmployments(orcidRecord) :
employmentsList = []
noJob = "No job title given"
noOrg = "No organization name"
noYear = "*"
try :
employmentAGs = orcidRecord.get("activities-summary").get("employments").get("affiliation-group")
for group in employmentAGs:
for summary in group["summaries"] :
try :
org = summary.get("employment-summary").get("organization").get("name")
org = org if org else noOrg
except :
org = noOrg
try :
role = summary.get("employment-summary").get("role-title")
role = role if role else noJob
except :
role = noJob
try :
startYear = summary.get("employment-summary").get("start-date").get("year").get("value")
startYear = startYear if startYear else noYear
except :
startYear = noYear
try :
endYear = summary.get("employment-summary").get("end-date").get("year").get("value")
endYear = endYear if endYear else noYear
except :
endYear = noYear
e = org + ": " + role + " " + str(startYear) + " -> " + str(endYear)
employmentsList.append(e)
except :
employmentsList = []
employments = ";".join(employmentsList)
return employments
def getEducations(orcidRecord) :
educationsList = []
noCourse = "No course of study given"
noOrg = "No organization name"
noYear = "*"
try :
edcuationAGs = orcidRecord.get("activities-summary").get("educations").get("affiliation-group")
for group in edcuationAGs:
for summary in group["summaries"] :
try :
org = summary.get("education-summary").get("organization").get("name")
org = org if org else noOrg
except :
org = noOrg
try :
role = summary.get("education-summary").get("role-title")
role = role if role else noCourse
except :
role = noCourse
try :
startYear = summary.get("education-summary").get("start-date").get("year").get("value")
startYear = startYear if startYear else noYear
except :
startYear = noYear
try :
endYear = summary.get("education-summary").get("end-date").get("year").get("value")
endYear = endYear if endYear else noYear
except :
endYear = noYear
e = org + ": " + role + " " + str(startYear) + " -> " + str(endYear)
educationsList.append(e)
except :
educationsList = []
educations = ";".join(educationsList)
return educations
def getEmails(orcidRecord) :
try :
emailList = [e.get("email") for e in orcidRecord.get("person").get("emails").get("email")]
except :
emailList = []
emails = ";".join(emailList)
return emails
def getIds(orcidRecord) :
try :
idList = [i.get("external-id-type") + ": "+ i.get("external-id-value") for i in orcidRecord.get("person").get("external-identifiers").get("external-identifier")]
except :
idList = []
ids = ";".join(idList)
return ids
def getWorkCount(orcidRecord) :
try :
workCount = len(orcidRecord.get("activities-summary").get("works").get("group"))
except :
workCount = 0
return workCount
idMappingsFileName = 'idMappings.json'
with open(idMappingsFileName, 'r') as idMappings:
mappings=idMappings.read()
institutions = json.loads(mappings)
for i in institutions :
ror = i["ror"].split("/")[-1]
jsonFileName = os.path.join(jsonDir, ror + '.json')
csvFileName = os.path.join(csvDir, ror + '.csv')
try :
convertToCsv(jsonFileName,csvFileName)
except :
continue
|
/*
*
*/
package net.community.chest.db.sql;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import net.community.chest.lang.EnumUtil;
/**
* An {@link Enum} of useful SQL keywords
*
* <P>Copyright 2010 as per GPLv2</P>
*
* @author <NAME>.
* @since Jun 1, 2010 11:35:06 AM
*/
public enum SQLKeyword {
SELECT,
FROM,
AS,
WHERE,
NULL,
IN,
IS,
BETWEEN,
LIKE,
AND,
OR,
NOT,
ASC,
DESC,
MIN,
MAX,
COUNT,
DISTINCT,
ORDERBY("ORDER BY"),
GROUPBY("GROUP BY");
private final String _keyword;
public final String getKeyword ()
{
return _keyword;
}
/*
* @see java.lang.Enum#toString()
*/
@Override
public String toString ()
{
return getKeyword();
}
SQLKeyword (String keyword)
{
if ((null == keyword) || (keyword.length() <= 0))
_keyword = name().toUpperCase();
else
_keyword = keyword;
}
SQLKeyword ()
{
this(null);
}
public static final List<SQLKeyword> VALUES=Collections.unmodifiableList(Arrays.asList(values()));
public static final SQLKeyword fromName (String s)
{
return EnumUtil.fromName(VALUES, s, false);
}
public static final SQLKeyword fromKeyword (String s)
{
if ((null == s) || (s.length() <= 0))
return null;
for (final SQLKeyword v : VALUES)
{
final String kw=(null == v) ? null : v.getKeyword();
if (s.equalsIgnoreCase(kw))
return v;
}
return null; // no match found
}
}
|
package com.yin.springboot.mybatis.domain;
import java.io.Serializable;
import lombok.Data;
@Data
public class CmsPrefrenceArea implements Serializable {
private Long id;
private String name;
private String subTitle;
/**
* ๅฑ็คบๅพ็
*/
private byte[] pic;
private Integer sort;
private Integer showStatus;
private static final long serialVersionUID = 1L;
} |
/***********************************************************************
This class is for drawing a fluidsolver using the OpenFrameworks texture
************************************************************************/
#pragma once
#include "MSAFluidDrawerBase.h"
namespace msa {
namespace fluid {
class DrawerGl : public DrawerBase {
public:
float getWidth() const override {
return tex.getWidth();
}
float getHeight() const override {
return tex.getHeight();
}
ofTexture& getTextureReference() {
return tex;
}
protected:
mutable ofTexture tex;
void createTexture() override {
int texWidth = _fluidSolver->getWidth()-2;
int texHeight =_fluidSolver->getHeight()-2;
tex.allocate(texWidth, texHeight, _glType);
}
void updateTexture() const override {
tex.loadData(_pixels, (int)tex.getWidth(), (int)tex.getHeight(), _glType);
}
void deleteTexture() override {
tex.clear();
}
void drawTexture(float x, float y, float w, float h) const override {
tex.draw(x, y, w, h);
}
};
}
}
|
package com.contentstack.gqlspring.models;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
@Data
public class AboutModel {
@JsonProperty
public String title;
@JsonProperty
public String url;
@JsonProperty
public Object seo;
@JsonProperty
public Object page_components;
} |
#!/bin/sh
PV=$(mvn org.apache.maven.plugins:maven-help-plugin:3.2.0:evaluate -Dexpression=project.version -q -DforceStdout)-aarch64
docker login quay.io -u ${QUAY_MANUFACTURING_USERNAME} -p ${QUAY_MANUFACTURING_PASSWORD}
docker run --rm --privileged multiarch/qemu-user-static:register --reset
docker rmi quay.io/qiotmanufacturing/datacenter-plant-manager:$PV --force
docker build -t quay.io/qiotmanufacturing/datacenter-plant-manager:$PV -f src/main/docker/Dockerfile.native.multiarch .
docker push quay.io/qiotmanufacturing/datacenter-plant-manager:$PV |
#!/bin/sh
autoreconf
find . -name 'autom4te.cache' | xargs rm -rf
|
package pl.bliw.emulator.cpu;
/**
* The Timers class encapsulates two timers which are in Chip8.
*/
public class Timers {
/**
* The counter for delay timer.
*/
private int delayTimer;
/**
* The counter for sound timer.
*/
private int soundTimer;
/**
* @return value of delay timer.
*/
public int getDelayTimer() {
return delayTimer;
}
/**
* Sets delay timer value to given value.
*
* @param value new value for delay timer.
*/
public void setDelayTimer(int value) {
this.delayTimer = value;
}
/**
* @return value of sound timer.
*/
public int getSoundTimer() {
return soundTimer;
}
/**
* Sets sound timer value to given value.
* @param value new value for sound timer.
*/
public void setSoundTimer(int value) {
this.soundTimer = value;
}
/**
* Decrements sound timer by one if current value of sound timer is greater than 0.
*/
public void decrementSoundTimer() {
if (soundTimer > 0) {
this.soundTimer--;
}
}
/**
* Decrements delay timer by one if current value of delay timer is greater than 0.
*/
public void decrementDelayTimer() {
if (delayTimer > 0) {
this.delayTimer--;
}
}
}
|
<reponame>vampire-studios/Obsidian
package io.github.vampirestudios.obsidian.api.bedrock.block.events;
import io.github.vampirestudios.obsidian.api.bedrock.block.Event;
public class Damage extends Event {
public int amount;
public String target;
public String type;
}
|
<filename>frontend/src/pages/components/chat/TextArea.tsx
import React from 'react';
import './TextArea.scss';
import * as chat from '../../../assets/img/chat-typing.svg';
import * as send from '../../../assets/img/send.svg';
export class TextArea extends React.Component {
render() {
return (
<div className="textarea-container">
<input
id="textareaCheckbox"
type="checkbox"
className="textarea-checkbox"
/>
<label
htmlFor="textareaCheckbox"
className="textarea-container__logo"
id="textareaBtn"
>
<img
src={chat.default}
alt="chat_typing"
className="textarea-container__logo-msg"
/>
<img
src={send.default}
alt="send"
className="textarea-container__logo-send"
/>
</label>
<div className="textarea-btn__options">
<span contentEditable="true" id="msgText"></span>
</div>
</div>
);
}
}
|
<filename>spec/hatchet/config_spec.rb<gh_stars>10-100
require("spec_helper")
describe "ConfigTest" do
before { @config = Hatchet::Config.new }
it("config path for name") do
expect(@config.path_for_name("rails3_mri_193")).to(eq("repo_fixtures/repos/rails3/rails3_mri_193"))
end
it("config path for name with full repo name") do
expect(@config.path_for_name("rails3/rails3_mri_193")).to(eq("repo_fixtures/repos/rails3/rails3_mri_193"))
end
it("config dirs") do
{ "repo_fixtures/repos/bundler/no_lockfile" => "https://github.com/sharpstone/no_lockfile.git", "repo_fixtures/repos/default/default_ruby" => "https://github.com/sharpstone/default_ruby.git", "repo_fixtures/repos/rails2/rails2blog" => "https://github.com/sharpstone/rails2blog.git", "repo_fixtures/repos/rails3/rails3_mri_193" => "https://github.com/sharpstone/rails3_mri_193.git" }.each do |key, value|
assert_include(key, value, @config.dirs)
end
end
it("config repos") do
{ "default_ruby" => "repo_fixtures/repos/default/default_ruby", "no_lockfile" => "repo_fixtures/repos/bundler/no_lockfile", "rails2blog" => "repo_fixtures/repos/rails2/rails2blog", "rails3_mri_193" => "repo_fixtures/repos/rails3/rails3_mri_193" }.each do |key, value|
assert_include(key, value, @config.repos)
end
end
it("no internal config raises no errors") do
@config.send(:set_internal_config!, {})
expect(@config.repo_directory_path).to(eq("./repos"))
end
it("github shortcuts") do
@config.send(:init_config!, "foo" => (["schneems/sextant"]))
expect(@config.dirs["./repos/foo/sextant"]).to(eq("https://github.com/schneems/sextant.git"))
end
private def assert_include(key, value, actual)
expect(actual[key]).to eq(value), "Expected #{actual.inspect} to include #{{ key => value }} but it did not"
end
end
|
import React from 'react';
import { Grid, Button, TextField } from '@material-ui/core';
export default function LivePreviewExample() {
return (
<>
<div className="app-wrapper bg-white min-vh-100">
<div className="app-main min-vh-100">
<div className="app-content p-0">
<div className="app-content--inner d-flex align-items-center">
<div className="flex-grow-1 w-100 d-flex align-items-center">
<div className="bg-composed-wrapper--content py-5">
<Grid item md={10} lg={8} xl={4} className="mx-auto">
<div className="text-center mb-4">
<h1 className="display-4 mb-1 font-weight-bold">
Create your account
</h1>
<p className="font-size-lg mb-0 text-black-50">
Start benefiting from our tools right away
</p>
</div>
<div className="mb-3">
<label className="font-weight-bold mb-2">
Email address
</label>
<TextField
variant="outlined"
size="small"
fullWidth
placeholder="Enter your email address"
type="email"
/>
</div>
<div className="mb-3">
<div className="d-flex justify-content-between">
<label className="font-weight-bold mb-2">
Password
</label>
</div>
<TextField
variant="outlined"
size="small"
fullWidth
placeholder="Enter your password"
type="password"
/>
</div>
<Grid container spacing={6}>
<Grid item md={6}>
<div>
<label className="font-weight-bold mb-2">
First name
</label>
<TextField
variant="outlined"
size="small"
fullWidth
placeholder="Enter your first name"
/>
</div>
</Grid>
<Grid item md={6}>
<div>
<label className="font-weight-bold mb-2">
Last name
</label>
<TextField
variant="outlined"
size="small"
fullWidth
placeholder="Enter your last name"
/>
</div>
</Grid>
</Grid>
<div className="my-4">
By clicking the <strong>Create account</strong> button
below you agree to our terms of service and privacy
statement.
</div>
<div className="text-center mb-4">
<Button className="btn-primary text-uppercase font-weight-bold font-size-sm my-3">
Create account
</Button>
</div>
</Grid>
</div>
</div>
</div>
</div>
</div>
</div>
</>
);
}
|
<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.key2 = void 0;
var key2 = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M15.658 4.91l-1.58-1.58c-0.387-0.387-1.021-1.021-1.409-1.409l-1.58-1.58c-0.387-0.387-1.077-0.456-1.533-0.152l-4.319 2.88c-0.456 0.304-0.628 0.954-0.383 1.444l1.101 2.203c0.034 0.067 0.073 0.139 0.115 0.213l-5.571 5.571-0.5 3.5h3v-1h2v-2h2v-2h2v-1.112c0.1 0.060 0.196 0.113 0.284 0.157l2.203 1.101c0.49 0.245 1.14 0.072 1.444-0.383l2.88-4.319c0.304-0.456 0.236-1.146-0.152-1.533zM2.354 13.354l-0.707-0.707 4.868-4.868 0.707 0.707-4.868 4.868zM14.328 6.621l-0.707 0.707c-0.194 0.194-0.513 0.194-0.707 0l-4.243-4.243c-0.194-0.194-0.194-0.513 0-0.707l0.707-0.707c0.194-0.194 0.513-0.194 0.707 0l4.243 4.243c0.194 0.194 0.194 0.513 0 0.707z"
}
}]
};
exports.key2 = key2; |
import { css } from 'glamor';
import React, { Component, PropTypes } from 'react';
import classes from './styles';
import FormLabel from '../FormLabel';
class FormField extends Component {
constructor () {
super();
this.formFieldId = generateId();
}
getChildContext () {
return {
formFieldId: this.formFieldId,
};
}
render () {
const { formLayout = 'basic', labelWidth } = this.context;
const {
cssStyles,
children,
className,
cropLabel,
htmlFor,
label,
offsetAbsentLabel,
...props
} = this.props;
props.className = css(
classes.FormField,
classes['FormField--form-layout-' + formLayout],
offsetAbsentLabel ? classes['FormField--offset-absent-label'] : null,
cssStyles
);
if (className) {
props.className += (' ' + className);
}
if (offsetAbsentLabel && labelWidth) {
props.style = {
paddingLeft: labelWidth,
...props.style,
};
}
// elements
const componentLabel = label ? (
<FormLabel htmlFor={htmlFor} cropText={cropLabel}>
{label}
</FormLabel>
) : null;
return (
<div {...props} htmlFor={htmlFor}>
{componentLabel}
{children}
</div>
);
}
};
const stylesShape = {
_definition: PropTypes.object,
_name: PropTypes.string,
};
FormField.contextTypes = {
formLayout: PropTypes.oneOf(['basic', 'horizontal', 'inline']),
labelWidth: PropTypes.oneOfType([
PropTypes.number,
PropTypes.string,
]),
};
FormField.childContextTypes = {
formFieldId: PropTypes.string,
};
FormField.propTypes = {
cssStyles: PropTypes.oneOfType([
PropTypes.arrayOf(PropTypes.shape(stylesShape)),
PropTypes.shape(stylesShape),
]),
children: PropTypes.node,
cropLabel: PropTypes.bool,
htmlFor: React.PropTypes.string,
label: React.PropTypes.string,
offsetAbsentLabel: React.PropTypes.bool,
};
function generateId () {
return Math.random().toString(36).substr(2, 9);
};
module.exports = FormField;
|
/**
* This file is the part of NChart3D Framework
* http://www.nchart3d.com
*
* File: NChartCandlestickSeries.h
* Version: "2.9.1"
*
* Copyright (C) 2017 Nulana LTD. All Rights Reserved.
*/
#import "NChartOHLCSeries.h"
/**
* The NChartCandlestickSeries class provides methods to display candlestick series.
*/
NCHART3D_EXPORT @interface NChartCandlestickSeries : NChartOHLCSeries
/**
* Color for the border of positive candles.
*/
@property (nonatomic, retain) NSColor *positiveBorderColor;
/**
* Color for the border of negative candles.
*/
@property (nonatomic, retain) NSColor *negativeBorderColor;
@end
/**
* The NChartCandlestickSeriesSettings class provides global settings for <NChartCandlestickSeries>.
*/
NCHART3D_EXPORT @interface NChartCandlestickSeriesSettings : NChartOHLCSeriesSettings
/**
* The resolution of cylinders. Resolution is the amount of vertices that build the circle.
* For example if you want to get a square candlestick, you should set resolution to 4. If you want to get a cylindrical
* candlestick, you may set a larger value. But the larger is the resolution, the more memory is used and the slower the
* rendering will be, so you should find out the minimal acceptable value. A good value for cylinder is 16 or 20.
* The default value is 20.
* @note This value cannot be less than 3 and greater than 32.
*/
@property (nonatomic, assign) NSUInteger cylindersResolution;
/**
* Flag determining whether horizontal lines on the ending of lower and upper candlestick shadows are shown (YES) or
* hidden (NO). The default value is YES.
*/
@property (nonatomic, assign) BOOL showShadowEndingLines;
@end
|
<gh_stars>0
package org.thebund1st.hankou.winning.lottery.domain;
import java.util.List;
public interface WinningWindowAssembler {
List<WinningWindow> assemble(List<LotteryPosition> positions, int bound);
}
|
'use strict'
const path = require('path')
const webpack = require('webpack')
module.exports = {
output: {
path: path.resolve(__dirname, './dist'),
publicPath: '/',
filename: 'bundle.js'
},
devtool: 'eval',
plugins: [
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify('development'),
NODE_ENV: JSON.stringify('development')
})
],
module: {
rules: [
{
test: /\.jsx?$/,
exclude: /node_modules/,
loader: 'babel-loader'
},
{
test: /\.(jpe?g|png|gif|svg)$/i,
loader: 'file-loader?name=[name].[ext]'
},
{
test: /\.css?$/,
loaders: [
'style-loader',
'css-loader?importLoaders=1',
{
loader: 'postcss-loader',
options: {
config: {
path: path.resolve(__dirname, './postcss.config.js')
}
}
}
]
},
{
test: /\.(ttf|otf|eot|woff(2)?)(\?[a-z0-9]+)?$/,
loader: 'file-loader?name=fonts/[name].[ext]'
}
]
}
}
|
import numpy as np
def draw_circle(img, row, col, rad):
# Implementation of draw_circle is not provided as it is assumed to be available
def generate_noisy_circle_image(size, radius, noise):
img = np.zeros((size, size)) # Create an empty image of the specified size
row = np.random.randint(size) # Generate a random row position for the circle
col = np.random.randint(size) # Generate a random column position for the circle
rad = np.random.randint(10, max(10, radius)) # Generate a random radius for the circle
draw_circle(img, row, col, rad) # Draw the circle on the image
# Add noise to the image
img += noise * np.random.rand(*img.shape)
return (row, col, rad), img # Return the circle's parameters and the noisy image |
<reponame>vrpolakatcisco/jumpavg
# Copyright (c) 2018 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module holding AbstractGroupClassifier class."""
from abc import ABCMeta, abstractmethod
class AbstractGroupClassifier(object):
"""Abstract class defining API for classifier.
The classifier is an object with classify() method
which divides data into groups containing metadata.
"""
__metaclass__ = ABCMeta
@abstractmethod
def classify(self, values):
"""Divide values into consecutive groups with metadata.
The metadata does not need to follow any specific rules,
although progression/regression/outlier description would be fine.
:param values: Sequence of runs to classify.
:type values: Iterable of float or of AvgStdevMetadata
:returns: Classified groups
:rtype: Iterable of RunGroup
"""
pass
|
#!/bin/bash
# Run unittests and regression tests. Specify log filenames for regression tests if desired.
set -e
full_log=${1:-tests-full.log}
clean_log=${2:-tests-clean.log}
dir=`dirname $0`
echo ----------
echo UNIT TESTS
echo ----------
$dir/unittests.py
echo
echo "Passed."
echo
echo ----------------
echo REGRESSION TESTS
echo ----------------
$dir/tests.sh >$full_log 2>&1 || echo "Regression test error!"
cat $full_log \
| grep -v /zinc/zinc.py \
| grep -v expect_error \
| grep -v "^\+ echo " \
| sed -r 's/\b[a-z0-9]{13}\b/_REV_/g' \
| sed -r 's/mtime=[0-9.]+/mtime=_MTIME_/g' \
| sed -r 's/^[0-9]{10}[.][0-9]/_TIMESTAMP_/g' \
| sed -r 's/[0-9-]{10} [0-9:]{8} UTC/_DATE_/g' \
| sed -r 's/@[0-9a-f]{6,9}/@_ID_/g' \
| sed -r 's/at 0x[0-9a-f]{6,9}/at _ADDR_/g' \
| sed -r 's!(s3|file)://.*zinc-testing!_REPO_ROOT_:/!g' \
| sed -r 's/[ 0-9]+ Killed/_PID_ Killed/g' \
> $clean_log
echo "Done."
echo
echo "Full log: $full_log"
echo "Clean log: $clean_log"
echo
echo "To compare regression test results with previously correct output, run:"
echo "git diff $clean_log"
|
#!/usr/bin/env bash
# Utility script to download and build libtiff
# Exit the whole script if any command fails.
set -ex
LIBTIFF_REPO=${LIBTIFF_REPO:=https://gitlab.com/libtiff/libtiff.git}
LOCAL_DEPS_DIR=${LOCAL_DEPS_DIR:=${PWD}/ext}
LIBTIFF_BUILD_DIR=${LIBTIFF_BUILD_DIR:=${LOCAL_DEPS_DIR}/libtiff}
LIBTIFF_INSTALL_DIR=${LIBTIFF_INSTALL_DIR:=${PWD}/ext/dist}
LIBTIFF_VERSION=${LIBTIFF_VERSION:=v4.1.0}
if [[ `uname` == `Linux` ]] ; then
LIBTIFF_CXX_FLAGS=${LIBTIFF_CXX_FLAGS:="-O3 -Wno-unused-function -Wno-deprecated-declarations -Wno-cast-qual -Wno-write-strings"}
fi
LIBTIFF_BUILDOPTS="${LIBTIFF_BUILDOPTS}"
BASEDIR=`pwd`
pwd
echo "libtiff install dir will be: ${LIBTIFF_INSTALL_DIR}"
mkdir -p ${LOCAL_DEPS_DIR}
pushd ${LOCAL_DEPS_DIR}
# Clone libtiff project from GitHub and build
if [[ ! -e libtiff ]] ; then
echo "git clone ${LIBTIFF_REPO} libtiff"
git clone ${LIBTIFF_REPO} libtiff
fi
cd libtiff
echo "git checkout ${LIBTIFF_VERSION} --force"
git checkout ${LIBTIFF_VERSION} --force
mkdir -p build
cd build
time cmake -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=${LIBTIFF_INSTALL_DIR} \
-DCMAKE_CXX_FLAGS="${LIBTIFF_CXX_FLAGS}" \
${LIBTIFF_BUILDOPTS} ..
time cmake --build . --config Release --target install
popd
# ls -R ${LIBTIFF_INSTALL_DIR}
#echo "listing .."
#ls ..
# Set up paths. These will only affect the caller if this script is
# run with 'source' rather than in a separate shell.
export LIBTIFF_ROOT=$LIBTIFF_INSTALL_DIR
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${LIBTIFF_INSTALL_DIR}/lib
|
<reponame>muthukumaravel7/armnn
var _test_shared_object_8hpp =
[
[ "TestFunction1", "_test_shared_object_8hpp.xhtml#ad510dfceb4a71e10cfbc1c3e19c6e370", null ],
[ "TestFunction2", "_test_shared_object_8hpp.xhtml#a5801bca5d3399a22206a3d1c53bf67be", null ],
[ "TestFunction3", "_test_shared_object_8hpp.xhtml#a192325f79068b32530ccf11154d7b397", null ]
]; |
export const cat = (vals) => {
return vals.reduce((prev, next) => String(prev) + String(next));
};
|
export KIDSRUBY_START_INSTALL="Starting KidsRuby install..."
export KIDSRUBY_CREATE_INSTALL_DIRECTORY="Creating installation directory..."
export KIDSRUBY_CREATE_CODE_DIRECTORY="Creating code directory..."
export KIDSRUBY_INSTALLING_QT="Installing Qt..."
export KIDSRUBY_INSTALLING_GIT="Installing git..."
export KIDSRUBY_INSTALLING_RUBY="Installing Ruby 1.9.2..."
export KIDSRUBY_INSTALLING_EDITOR="Installing KidsRuby editor..."
export KIDSRUBY_INSTALLING_COMMANDS="Installing commands..."
export KIDSRUBY_END_INSTALL="KidsRuby installation complete. Have fun!"
export KIDSRUBY_INSTALLING_GEMS="Installing gems..."
export KIDSRUBY_INSTALLING_QTBINDINGS="Installing qtbindings gem..."
export KIDSRUBY_INSTALLING_GOSU="Installing gosu gem.."
export KIDSRUBY_ERROR_NOT_SUPPORTED="Sorry, KidsRuby is not currently supported on your operating system."
|
var gulp = require('gulp');
var karma = require('karma').server;
var mocha = require('gulp-mocha');
var gutil = require('gulp-util');
var runSequence = require('run-sequence');
var protractor = require('gulp-protractor').protractor;
module.exports = gulp.task('test:client', function (done) {
karma.start({
configFile: global.project_dir + '/karma.conf.js'
}, done);
});
gulp.task('test:protractor', function(){
return gulp.src([global.project_dir + '/tests/client/e2e/*.spec.js'])
.pipe(protractor({
configFile: 'tests/client/protractor.conf.js',
}))
.on('error', function(e) {
throw e;
});
});
gulp.task('test:server:production', function () {
process.env.NODE_ENV = 'test';
return gulp.src([
'src/server/**/*.mocked.js',
'src/server/**/*.spec.js'], {read: false})
.pipe(mocha({
reporter: 'mocha-teamcity-reporter'
}))
.once('end', function () {
process.exit();
});
});
// run mocha in debug mode
// node_modules/gulp-mocha/node_modules/mocha/bin/mocha --debug-brk src/server/**/*.spec.js
gulp.task('test:server:dev', function () {
process.env.NODE_ENV = 'test';
return gulp.src([
// any mocked modules will be loaded before the specs
'src/server/**/*.mocked.js',
'src/server/**/*.spec.js'], {read: false})
.pipe(mocha({
reporter: 'list'
}))
.on('error', gutil.log);
});
// shortcuts...
gulp.task('tc', ['test:client']);
gulp.task('ts', ['test:server:dev']);
// test task for production
gulp.task('test', ['test:client'], function(){
runSequence(['test:server:production']);
}); |
The user input should be sanitized to avoid SQL injection attacks by using parameterized queries or using an ORM which automatically takes care of sanitizing user input. |
/*
Copyright (c) 2005-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Do not include task.h directly. Use scheduler_common.h instead
#include "scheduler_common.h"
#include "governor.h"
#include "arena.h"
#include "thread_data.h"
#include "task_dispatcher.h"
#include "waiters.h"
#include "itt_notify.h"
#include "oneapi/tbb/detail/_task.h"
#include "oneapi/tbb/partitioner.h"
#include "oneapi/tbb/task.h"
#include <cstring>
namespace tbb {
namespace detail {
namespace r1 {
//------------------------------------------------------------------------
// resumable tasks
//------------------------------------------------------------------------
#if __TBB_RESUMABLE_TASKS
void suspend(suspend_callback_type suspend_callback, void* user_callback) {
thread_data& td = *governor::get_thread_data();
td.my_task_dispatcher->suspend(suspend_callback, user_callback);
// Do not access td after suspend.
}
void resume(suspend_point_type* sp) {
assert_pointers_valid(sp, sp->m_arena);
task_dispatcher& task_disp = sp->m_resume_task.m_target;
__TBB_ASSERT(task_disp.m_thread_data == nullptr, nullptr);
// TODO: remove this work-around
// Prolong the arena's lifetime while all coroutines are alive
// (otherwise the arena can be destroyed while some tasks are suspended).
arena& a = *sp->m_arena;
a.my_references += arena::ref_external;
if (task_disp.m_properties.critical_task_allowed) {
// The target is not in the process of executing critical task, so the resume task is not critical.
a.my_resume_task_stream.push(&sp->m_resume_task, random_lane_selector(sp->m_random));
} else {
#if __TBB_PREVIEW_CRITICAL_TASKS
// The target is in the process of executing critical task, so the resume task is critical.
a.my_critical_task_stream.push(&sp->m_resume_task, random_lane_selector(sp->m_random));
#endif
}
// Do not access target after that point.
a.advertise_new_work<arena::wakeup>();
// Release our reference to my_arena.
a.on_thread_leaving<arena::ref_external>();
}
suspend_point_type* current_suspend_point() {
thread_data& td = *governor::get_thread_data();
return td.my_task_dispatcher->get_suspend_point();
}
static task_dispatcher& create_coroutine(thread_data& td) {
// We may have some task dispatchers cached
task_dispatcher* task_disp = td.my_arena->my_co_cache.pop();
if (!task_disp) {
void* ptr = cache_aligned_allocate(sizeof(task_dispatcher));
task_disp = new(ptr) task_dispatcher(td.my_arena);
task_disp->init_suspend_point(td.my_arena, td.my_arena->my_market->worker_stack_size());
}
// Prolong the arena's lifetime until all coroutines is alive
// (otherwise the arena can be destroyed while some tasks are suspended).
// TODO: consider behavior if there are more than 4K external references.
td.my_arena->my_references += arena::ref_external;
return *task_disp;
}
void task_dispatcher::suspend(suspend_callback_type suspend_callback, void* user_callback) {
__TBB_ASSERT(suspend_callback != nullptr, nullptr);
__TBB_ASSERT(user_callback != nullptr, nullptr);
__TBB_ASSERT(m_thread_data != nullptr, nullptr);
arena_slot* slot = m_thread_data->my_arena_slot;
__TBB_ASSERT(slot != nullptr, nullptr);
task_dispatcher& default_task_disp = slot->default_task_dispatcher();
// TODO: simplify the next line, e.g. is_task_dispatcher_recalled( task_dispatcher& )
bool is_recalled = default_task_disp.get_suspend_point()->m_is_owner_recalled.load(std::memory_order_acquire);
task_dispatcher& target = is_recalled ? default_task_disp : create_coroutine(*m_thread_data);
thread_data::suspend_callback_wrapper callback = { suspend_callback, user_callback, get_suspend_point() };
m_thread_data->set_post_resume_action(thread_data::post_resume_action::callback, &callback);
resume(target);
if (m_properties.outermost) {
recall_point();
}
}
bool task_dispatcher::resume(task_dispatcher& target) {
// Do not create non-trivial objects on the stack of this function. They might never be destroyed
{
thread_data* td = m_thread_data;
__TBB_ASSERT(&target != this, "We cannot resume to ourself");
__TBB_ASSERT(td != nullptr, "This task dispatcher must be attach to a thread data");
__TBB_ASSERT(td->my_task_dispatcher == this, "Thread data must be attached to this task dispatcher");
__TBB_ASSERT(td->my_post_resume_action != thread_data::post_resume_action::none, "The post resume action must be set");
__TBB_ASSERT(td->my_post_resume_arg, "The post resume action must have an argument");
// Change the task dispatcher
td->detach_task_dispatcher();
td->attach_task_dispatcher(target);
}
__TBB_ASSERT(m_suspend_point != nullptr, "Suspend point must be created");
__TBB_ASSERT(target.m_suspend_point != nullptr, "Suspend point must be created");
// Swap to the target coroutine.
m_suspend_point->m_co_context.resume(target.m_suspend_point->m_co_context);
// Pay attention that m_thread_data can be changed after resume
if (m_thread_data) {
thread_data* td = m_thread_data;
__TBB_ASSERT(td != nullptr, "This task dispatcher must be attach to a thread data");
__TBB_ASSERT(td->my_task_dispatcher == this, "Thread data must be attached to this task dispatcher");
td->do_post_resume_action();
// Remove the recall flag if the thread in its original task dispatcher
arena_slot* slot = td->my_arena_slot;
__TBB_ASSERT(slot != nullptr, nullptr);
if (this == slot->my_default_task_dispatcher) {
__TBB_ASSERT(m_suspend_point != nullptr, nullptr);
m_suspend_point->m_is_owner_recalled.store(false, std::memory_order_relaxed);
}
return true;
}
return false;
}
void thread_data::do_post_resume_action() {
__TBB_ASSERT(my_post_resume_action != thread_data::post_resume_action::none, "The post resume action must be set");
__TBB_ASSERT(my_post_resume_arg, "The post resume action must have an argument");
switch (my_post_resume_action) {
case post_resume_action::register_waiter:
{
static_cast<market_concurrent_monitor::resume_context*>(my_post_resume_arg)->notify();
break;
}
case post_resume_action::resume:
{
r1::resume(static_cast<suspend_point_type*>(my_post_resume_arg));
break;
}
case post_resume_action::callback:
{
suspend_callback_wrapper callback = *static_cast<suspend_callback_wrapper*>(my_post_resume_arg);
callback();
break;
}
case post_resume_action::cleanup:
{
task_dispatcher* to_cleanup = static_cast<task_dispatcher*>(my_post_resume_arg);
// Release coroutine's reference to my_arena
my_arena->on_thread_leaving<arena::ref_external>();
// Cache the coroutine for possible later re-usage
my_arena->my_co_cache.push(to_cleanup);
break;
}
case post_resume_action::notify:
{
suspend_point_type* sp = static_cast<suspend_point_type*>(my_post_resume_arg);
sp->m_is_owner_recalled.store(true, std::memory_order_release);
// Do not access sp because it can be destroyed after the store
auto is_our_suspend_point = [sp](market_context ctx) {
return std::uintptr_t(sp) == ctx.my_uniq_addr;
};
my_arena->my_market->get_wait_list().notify(is_our_suspend_point);
break;
}
default:
__TBB_ASSERT(false, "Unknown post resume action");
}
my_post_resume_action = post_resume_action::none;
my_post_resume_arg = nullptr;
}
#else
void suspend(suspend_callback_type, void*) {
__TBB_ASSERT_RELEASE(false, "Resumable tasks are unsupported on this platform");
}
void resume(suspend_point_type*) {
__TBB_ASSERT_RELEASE(false, "Resumable tasks are unsupported on this platform");
}
suspend_point_type* current_suspend_point() {
__TBB_ASSERT_RELEASE(false, "Resumable tasks are unsupported on this platform");
return nullptr;
}
#endif /* __TBB_RESUMABLE_TASKS */
void notify_waiters(std::uintptr_t wait_ctx_addr) {
auto is_related_wait_ctx = [&] (market_context context) {
return wait_ctx_addr == context.my_uniq_addr;
};
r1::governor::get_thread_data()->my_arena->my_market->get_wait_list().notify(is_related_wait_ctx);
}
} // namespace r1
} // namespace detail
} // namespace tbb
|
<filename>src/engine/score_on_death_system.h
#ifndef INCLUDED_ENGINE_SCORE_ON_DEATH_SYSTEM_H
#define INCLUDED_ENGINE_SCORE_ON_DEATH_SYSTEM_H
#include "core/scene.h"
#include "engine/system.h"
namespace engine {
class ScoreOnDeathSystem : public System
{
public:
DEFINE_SYSTEM_BASE( ScoreOnDeathSystem )
ScoreOnDeathSystem();
protected:
virtual void Init();
virtual void Update( double DeltaTime );
private:
Scene& mScene;
};
} // namespace engine
#endif//INCLUDED_ENGINE_SCORE_ON_DEATH_SYSTEM_H
//command: "classgenerator.exe" -g "system" -c "score_on_death_system" -t "score_on_death"
|
export const foo = "foo";
export const bar = 1;
export const baz = (): void => {
// eslint-disable-next-line no-console
console.log("baz");
};
export function qat(): boolean {
return true;
}
|
/**
* Created by zhangwei on 14-5-2.
*/
var webshot = require('webshot');
var UPYun = require('upyun-official').UPYun;
var fs = require('fs');
var stream = require('stream');
var config = require('config');
var weibo = require('weibo');
var upyun = new UPYun('ys-rsbook','zhangwei','zhangwei13');
var weibo_user= {
appkey : '1852823608',
secret : 'f7623417c5fbe029dd366e4e01e34e48',
user: {"access_token":"<KEY>","remind_in":"157679999","expires_in":157679999,"uid":"1658122963", blogtype:'weibo'}
}
var tqq_user = {
appkey : '801503676',
secret : '4fe89516aa3ad2d545866c129aada22e',
user: {access_token:'<KEY>',blogtype:'tqq',expires_in:'8035200',refresh_token:'<KEY>',openid:'40e560a138fbc4084c6cd6f5be2a41e4', name:'X-Spirit', nick:'ๅผ ๅจ'}
}
console.log(JSON.stringify(config));
url = "http://mail.163.com/";
filename = url.replace(/(:\/\/|\/|\.|\?|=)/g, '_') + ".png";
localfile = '/tmp/' + filename;
cloudfile = '/test/' + filename;
weibo.init('weibo', weibo_user.appkey, weibo_user.secret, 'http://asdfsdf.com/ss');
weibo.init('tqq', tqq_user.appkey, tqq_user.secret, 'http://www.credentialsapp.com/example');
var user = {blogtype: 'weibo', access_token:weibo_user.user.access_token}
var cursor = {count:20};
//weibo.upload(weibo_user.user, 'test from node','/tmp/http_www_baidu_com_.png', function(err, statuses){
// console.log(err, statuses);
//});
webshot(url, localfile, config.webshot.proto, function(err) {
// screenshot now saved to google.png
fs.readFile(localfile, null, function(err, filedata){
// weibo.upload(weibo_user.user, status,{data:filedata, name:'baidu.png',content_type:'image/png'}, function(err, statuses){
// console.log(err, statuses);
// });
// weibo.upload(tqq_user.user, status, {data:filedata, name:'baidu.png',content_type:'image/png'}, function(err, statuses){
// console.log(err, statuses);
// });
status = 'upload from node2' + new Date();
// if (err) {console.log(err); return;}
// upyun.writeFile(cloudfile, data, true, function(err, resBody){
// console.log(err);
// console.log(resBody);
// });
});
// upyun.getFileInfo(cloudfile,function(err, resbody){
// console.log(err, resbody);
// upyun.readFile('/test/http_www_qq_com_.png','/Users/zhangwei/Downloads/bb.png', function(err, data){
// fs.writeFileSync('/Users/zhangwei/Downloads/aa.png',data);
// })
// })
});
|
# coding:utf-8
from ._typing import Details
__all__ = [
'Details'
]
|
<reponame>jsaynysa/Ksiazka
//===============================================================================
// @ IvResourceManagerD3D11.cpp
//
// D3D11 implementation of resource manager
// ------------------------------------------------------------------------------
// Copyright (C) 2008-2015 <NAME> and <NAME>.
// All rights reserved.
//
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//===============================================================================
//-------------------------------------------------------------------------------
//-- Dependencies ---------------------------------------------------------------
//-------------------------------------------------------------------------------
#include "IvResourceManagerD3D11.h"
#include "IvVertexBufferD3D11.h"
#include "IvIndexBufferD3D11.h"
#include "IvVertexShaderD3D11.h"
#include "IvFragmentShaderD3D11.h"
#include "IvShaderProgramD3D11.h"
#include "IvTextureD3D11.h"
//-------------------------------------------------------------------------------
//-- Static Members -------------------------------------------------------------
//-------------------------------------------------------------------------------
//-------------------------------------------------------------------------------
//-- Methods --------------------------------------------------------------------
//-------------------------------------------------------------------------------
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::IvResourceManagerD3D11()
//-------------------------------------------------------------------------------
// Default constructor
//-------------------------------------------------------------------------------
IvResourceManagerD3D11::IvResourceManagerD3D11(ID3D11Device* device) : IvResourceManager()
{
mDevice = device;
mDevice->AddRef();
} // End of IvResourceManagerD3D11::IvResourceManagerD3D11()
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::~IvResourceManagerD3D11()
//-------------------------------------------------------------------------------
// Destructor
//-------------------------------------------------------------------------------
IvResourceManagerD3D11::~IvResourceManagerD3D11()
{
mDevice->Release();
} // End of IvResourceManagerD3D11::~IvResourceManagerD3D11()
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::CreateVertexBuffer()
//-------------------------------------------------------------------------------
// Create platform-dependent vertex buffer
//-------------------------------------------------------------------------------
IvVertexBuffer*
IvResourceManagerD3D11::CreateVertexBuffer( IvVertexFormat format, unsigned int numVertices,
void* data, IvDataUsage usage )
{
IvVertexBufferD3D11* newBuffer = new IvVertexBufferD3D11();
if ( !newBuffer->Create( format, numVertices, data, usage, mDevice ) )
{
delete newBuffer;
newBuffer = 0;
}
return newBuffer;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::Destroy()
//-------------------------------------------------------------------------------
// Delete platform-dependent vertex buffer
//-------------------------------------------------------------------------------
void
IvResourceManagerD3D11::Destroy( IvVertexBuffer* vb)
{
IvVertexBufferD3D11* vbD3D11 = static_cast<IvVertexBufferD3D11*>(vb);
vbD3D11->Destroy();
delete vbD3D11;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::CreateIndexBuffer()
//-------------------------------------------------------------------------------
// Create platform-dependent index buffer
//-------------------------------------------------------------------------------
IvIndexBuffer*
IvResourceManagerD3D11::CreateIndexBuffer(unsigned int numIndices, void* data, IvDataUsage usage)
{
IvIndexBufferD3D11* newBuffer = new IvIndexBufferD3D11();
if ( !newBuffer->Create( numIndices, data, usage, mDevice ) )
{
delete newBuffer;
newBuffer = 0;
}
return newBuffer;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::Destroy()
//-------------------------------------------------------------------------------
// Delete platform-dependent index buffer
//-------------------------------------------------------------------------------
void
IvResourceManagerD3D11::Destroy( IvIndexBuffer* ib)
{
IvIndexBufferD3D11* ibD3D11 = static_cast<IvIndexBufferD3D11*>(ib);
ibD3D11->Destroy();
delete ibD3D11;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::CreateVertexShaderFromFile()
//-------------------------------------------------------------------------------
// Create platform-dependent vertex shader
//-------------------------------------------------------------------------------
IvVertexShader*
IvResourceManagerD3D11::CreateVertexShaderFromFile( const char* filename )
{
IvVertexShaderD3D11* vertexShader = new IvVertexShaderD3D11();
if ( !vertexShader->CreateFromFile( filename, mDevice ) )
{
delete vertexShader;
vertexShader = 0;
}
return vertexShader;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::CreateVertexShaderFromString()
//-------------------------------------------------------------------------------
// Create platform-dependent vertex shader
//-------------------------------------------------------------------------------
IvVertexShader*
IvResourceManagerD3D11::CreateVertexShaderFromString( const char* string )
{
IvVertexShaderD3D11* vertexShader = new IvVertexShaderD3D11();
if ( !vertexShader->CreateFromString( string, mDevice ) )
{
delete vertexShader;
vertexShader = 0;
}
return vertexShader;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::CreateVertexShader()
//-------------------------------------------------------------------------------
// Create platform-dependent vertex shader
//-------------------------------------------------------------------------------
IvVertexShader*
IvResourceManagerD3D11::CreateDefaultVertexShader( IvVertexFormat format )
{
IvVertexShaderD3D11* vertexShader = new IvVertexShaderD3D11();
if ( !vertexShader->CreateDefault( format, mDevice ) )
{
delete vertexShader;
vertexShader = 0;
}
return vertexShader;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::Destroy()
//-------------------------------------------------------------------------------
// Delete platform-dependent vertex shader
//-------------------------------------------------------------------------------
void
IvResourceManagerD3D11::Destroy( IvVertexShader* vs)
{
if ( vs )
{
IvVertexShaderD3D11* vsD3D11 = static_cast<IvVertexShaderD3D11*>(vs);
vsD3D11->Destroy();
delete vsD3D11;
}
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::CreateFragmentShaderFromFile()
//-------------------------------------------------------------------------------
// Create platform-dependent vertex shader
//-------------------------------------------------------------------------------
IvFragmentShader*
IvResourceManagerD3D11::CreateFragmentShaderFromFile( const char* filename )
{
IvFragmentShaderD3D11* fragmentShader = new IvFragmentShaderD3D11();
if ( !fragmentShader->CreateFromFile( filename, mDevice ) )
{
delete fragmentShader;
fragmentShader = 0;
}
return fragmentShader;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::CreateFragmentShaderFromString()
//-------------------------------------------------------------------------------
// Create platform-dependent vertex shader
//-------------------------------------------------------------------------------
IvFragmentShader*
IvResourceManagerD3D11::CreateFragmentShaderFromString( const char* string )
{
IvFragmentShaderD3D11* fragmentShader = new IvFragmentShaderD3D11();
if ( !fragmentShader->CreateFromString( string, mDevice ) )
{
delete fragmentShader;
fragmentShader = 0;
}
return fragmentShader;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::CreateFragmentShader()
//-------------------------------------------------------------------------------
// Create platform-dependent vertex shader
//-------------------------------------------------------------------------------
IvFragmentShader*
IvResourceManagerD3D11::CreateDefaultFragmentShader( IvVertexFormat format )
{
IvFragmentShaderD3D11* fragmentShader = new IvFragmentShaderD3D11();
if ( !fragmentShader->CreateDefault( format, mDevice ) )
{
delete fragmentShader;
fragmentShader = 0;
}
return fragmentShader;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::Destroy()
//-------------------------------------------------------------------------------
// Delete platform-dependent vertex shader
//-------------------------------------------------------------------------------
void
IvResourceManagerD3D11::Destroy( IvFragmentShader* fs)
{
if ( fs )
{
IvFragmentShaderD3D11* fsD3D11 = static_cast<IvFragmentShaderD3D11*>(fs);
fsD3D11->Destroy();
delete fsD3D11;
}
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::CreateShaderProgram()
//-------------------------------------------------------------------------------
// Create platform-dependent shader program
//-------------------------------------------------------------------------------
IvShaderProgram*
IvResourceManagerD3D11::CreateShaderProgram( IvVertexShader* vs, IvFragmentShader* fs )
{
// make sure we're not handed garbage
if ( vs == 0 || fs == 0 )
return 0;
IvShaderProgramD3D11* newProgram = new IvShaderProgramD3D11();
if ( !newProgram->Create( static_cast<IvVertexShaderD3D11*>(vs),
static_cast<IvFragmentShaderD3D11*>(fs) ) )
{
delete newProgram;
newProgram = 0;
}
Destroy(vs);
Destroy(fs);
return newProgram;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::Destroy()
//-------------------------------------------------------------------------------
// Delete platform-dependent index buffer
//-------------------------------------------------------------------------------
void
IvResourceManagerD3D11::Destroy( IvShaderProgram* sp )
{
IvShaderProgramD3D11* spD3D11 = static_cast<IvShaderProgramD3D11*>(sp);
spD3D11->Destroy();
delete spD3D11;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::CreateTexture()
//-------------------------------------------------------------------------------
// Create platform-dependent texture
//-------------------------------------------------------------------------------
IvTexture*
IvResourceManagerD3D11::CreateTexture(IvTextureFormat format,
unsigned int width, unsigned int height,
void* data, IvDataUsage usage)
{
IvTextureD3D11* newTexture = new IvTextureD3D11();
if (!newTexture->Create(width, height, format, data, usage, mDevice))
{
delete newTexture;
newTexture = 0;
}
return newTexture;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::CreateMipmappedTexture()
//-------------------------------------------------------------------------------
// Create platform-dependent texture
//-------------------------------------------------------------------------------
IvTexture*
IvResourceManagerD3D11::CreateMipmappedTexture(IvTextureFormat format,
unsigned int width, unsigned int height,
void** data, unsigned int levels, IvDataUsage usage)
{
IvTextureD3D11* newTexture = new IvTextureD3D11();
if (!newTexture->CreateMipmapped(width, height, format, data, levels, usage, mDevice))
{
delete newTexture;
newTexture = 0;
}
return newTexture;
}
//-------------------------------------------------------------------------------
// @ IvResourceManagerD3D11::Destroy()
//-------------------------------------------------------------------------------
// Delete platform-dependent texture
//-------------------------------------------------------------------------------
void
IvResourceManagerD3D11::Destroy( IvTexture* tex )
{
IvTextureD3D11* texD3D11 = static_cast<IvTextureD3D11*>(tex);
texD3D11->Destroy();
delete texD3D11;
}
|
import numpy as np
class Robot:
def __init__(self, wheels_width, wheels_scale, camera_matrix, camera_dist):
# State is a vector of [x,y,theta]' (3x1 column vector)
self.state = np.zeros((3,1))
# Wheel parameters
self.wheels_width = wheels_width # The distance between the left and right wheels
self.wheels_scale = wheels_scale # The scaling factor converting ticks/s to m/s
# Camera parameters
self.camera_matrix = camera_matrix # Matrix of the focal lengths and camera centre
self.camera_dist = camera_dist # Distortion coefficients
def drive(self, drive_meas):
# left_speed and right_speed are the speeds in ticks/s of the left and right wheels.
# dt is the length of time to drive for
# Compute the linear and angular velocity
linear_velocity, angular_velocity = self.convert_wheel_speeds(drive_meas.left_speed, drive_meas.right_speed)
# Apply the velocities
dt = drive_meas.dt
# TODO: compute state (x,y,theta) from linear and angular velocity
current_theta = self.state[2]
if angular_velocity != 0:
# Radius of curvature
R = linear_velocity / angular_velocity
predict_theta = current_theta + angular_velocity * dt
self.state[0] = self.state[0] + R*(-np.sin(current_theta) + np.sin(predict_theta))
self.state[1] = self.state[1] + R*(np.cos(current_theta) - np.cos(predict_theta))
self.state[2] = predict_theta
else:
self.state[0] = self.state[0] + linear_velocity * dt * np.cos(self.state[2])
self.state[1] = self.state[1] + linear_velocity * dt * np.sin(self.state[2])
self.state[2] = self.state[2]
def measure(self, markers, idx_list):
# Markers are 2d landmarks in a 2xn structure where there are n landmarks.
# The index list tells the function which landmarks to measure in order.
# Construct a 2x2 rotation matrix from the robot angle
th = self.state[2]
Rot_theta = np.block([[np.cos(th), -np.sin(th)],[np.sin(th), np.cos(th)]])
robot_xy = self.state[0:2,:]
measurements = []
for idx in idx_list:
marker = markers[:,idx:idx+1]
marker_bff = Rot_theta.T @ (marker - robot_xy)
measurements.append(marker_bff)
# Stack the measurements in a 2xm structure.
markers_bff = np.concatenate(measurements, axis=1)
return markers_bff
def convert_wheel_speeds(self, left_speed, right_speed):
# Convert to m/s
left_speed_m = left_speed * self.wheels_scale
right_speed_m = right_speed * self.wheels_scale
# Compute the linear and angular velocity
linear_velocity = (left_speed_m + right_speed_m) / 2.0
angular_velocity = (right_speed_m - left_speed_m) / self.wheels_width
return linear_velocity, angular_velocity
# Derivatives and Covariance
# --------------------------
def derivative_drive(self, drive_meas):
# Compute the differential of drive w.r.t. the robot state
DFx = np.zeros((3,3))
DFx[0,0] = 1
DFx[1,1] = 1
DFx[2,2] = 1
lin_vel, ang_vel = self.convert_wheel_speeds(drive_meas.left_speed, drive_meas.right_speed)
dt = drive_meas.dt
th = self.state[2]
if ang_vel == 0:
DFx[0,2] = -np.sin(th) * lin_vel * dt
DFx[1,2] = np.cos(th) * lin_vel * dt
else:
DFx[0,2] = lin_vel / ang_vel * (np.cos(th+dt*ang_vel) - np.cos(th))
DFx[1,2] = -lin_vel / ang_vel * (-np.sin(th+dt*ang_vel) + np.sin(th))
return DFx
def derivative_measure(self, markers, idx_list):
# Compute the derivative of the markers in the order given by idx_list w.r.t. robot and markers
n = 2*len(idx_list)
m = 3 + 2*markers.shape[1]
DH = np.zeros((n,m))
robot_xy = self.state[0:2,:]
th = self.state[2]
Rot_theta = np.block([[np.cos(th), -np.sin(th)],[np.sin(th), np.cos(th)]])
DRot_theta = np.block([[-np.sin(th), -np.cos(th)],[np.cos(th), -np.sin(th)]])
for i in range(n//2):
j = idx_list[i]
# i identifies which measurement to differentiate.
# j identifies the marker that i corresponds to.
lmj_inertial = markers[:,j:j+1]
# lmj_bff = Rot_theta.T @ (lmj_inertial - robot_xy)
# robot xy DH
DH[2*i:2*i+2,0:2] = - Rot_theta.T
# robot theta DH
DH[2*i:2*i+2, 2:3] = DRot_theta.T @ (lmj_inertial - robot_xy)
# lm xy DH
DH[2*i:2*i+2, 3+2*j:3+2*j+2] = Rot_theta.T
# print(DH[i:i+2,:])
return DH
def covariance_drive(self, drive_meas):
# Derivative of lin_vel, ang_vel w.r.t. left_speed, right_speed
Jac1 = np.array([[self.wheels_scale/2, self.wheels_scale/2],
[-self.wheels_scale/self.wheels_width, self.wheels_scale/self.wheels_width]])
lin_vel, ang_vel = self.convert_wheel_speeds(drive_meas.left_speed, drive_meas.right_speed)
th = self.state[2]
dt = drive_meas.dt
th2 = th + dt*ang_vel
# Derivative of x,y,theta w.r.t. lin_vel, ang_vel
Jac2 = np.zeros((3,2))
if ang_vel == 0:
Jac2[0,0] = np.cos(th)
Jac2[1,0] = np.sin(th)
else:
Jac2[0,0] = 1/ang_vel * (np.sin(th2) - np.sin(th))
Jac2[0,1] = -1/(ang_vel**2) * (np.sin(th2) - np.sin(th)) + \
lin_vel / ang_vel * (dt * np.cos(th2))
Jac2[1,0] = -1/ang_vel * (np.cos(th2) - np.cos(th))
Jac2[1,1] = 1/(ang_vel**2) * (np.cos(th2) - np.cos(th)) + \
-lin_vel / ang_vel * (-dt * np.sin(th2))
Jac2[2,1] = dt
# Derivative of x,y,theta w.r.t. left_speed, right_speed
Jac = Jac2 @ Jac1
# Compute covariance
cov = np.diag((drive_meas.left_cov, drive_meas.right_cov))
cov = Jac @ cov @ Jac.T
return cov
|
<reponame>ksattler/piglet
package dbis.piglet.codegen.flink.emitter
import dbis.piglet.expr.Func
import dbis.piglet.codegen.CodeGenContext
import dbis.piglet.op.Accumulate
import dbis.piglet.codegen.scala_lang.ScalaEmitter
import dbis.piglet.expr.Expr
import scala.collection.mutable.{ Map => MMap }
import dbis.piglet.expr.DerefTuple
import dbis.piglet.expr.RefExprExtractor
import dbis.piglet.expr.NamedField
import dbis.piglet.expr.PositionalField
import dbis.piglet.schema.Types
import dbis.piglet.op._
import dbis.piglet.expr._
import dbis.piglet.udf._
import dbis.piglet.schema._
import dbis.piglet.codegen.CodeEmitter
import scala.collection.mutable.ListBuffer
import dbis.piglet.codegen.CodeGenException
class AccumulateEmitter extends dbis.piglet.codegen.scala_lang.AccumulateEmitter {
override def template: String = """ val <out> = <in>.aggregate(Aggregations.<init_aggr_expr>)<more_aggr_expr:{exp|.and(Aggregations.<exp>)}>""".stripMargin
override def code(ctx: CodeGenContext, op: Accumulate): String = {
if (!op.schema.isDefined)
throw CodeGenException("schema required in ACCUMULATE")
val inputSchemaDefined = op.inputSchema.isDefined
val outClassName = ScalaEmitter.schemaClassName(op.schema.get.className)
var initAggrFun: String = ""
var moreAggrFuns: ListBuffer[String] = new ListBuffer()
val updExpr = op.generator.exprs.zipWithIndex.map {
case (e, i) =>
require(e.expr.isInstanceOf[Func])
val funcName = e.expr.asInstanceOf[Func].f.toUpperCase
val traverse = new RefExprExtractor
e.expr.traverseAnd(null, traverse.collectRefExprs)
val refExpr = traverse.exprs.head
val str: String = refExpr.r match {
case nf @ NamedField(n, _) => s"$op.inputSchema.get.indexOfField(nf)"
case PositionalField(p) => if (inputSchemaDefined) s"$p" else "0"
case _ => ""
}
if (i == 0) initAggrFun = (funcName + "," + str) else moreAggrFuns += (funcName + "," + str)
}
render(Map("out" -> op.outPipeName, "in" -> op.inPipeName, "class" -> outClassName,
"init_aggr_expr" -> initAggrFun, "more_aggr_expr" -> moreAggrFuns))
}
}
object AccumulateEmitter {
lazy val instance = new AccumulateEmitter
} |
import java.util.ArrayList;
import java.util.List;
public class Fibonacci {
public static List<Integer> getFirst10Values() {
List<Integer> sequence = new ArrayList<Integer>();
int first = 0;
int second = 1;
sequence.add(first);
sequence.add(second);
for (int i = 2; i < 10; i++) {
int next = first + second;
sequence.add(next);
first = second;
second = next;
}
return sequence;
}
} |
<gh_stars>0
package io.quarkuscoffeeshop.counter.domain;
import java.math.BigDecimal;
/**
* Models the Menu Item
*/
public enum Item {
//Beverages
CAPPUCCINO(BigDecimal.valueOf(4.50)), COFFEE_BLACK(BigDecimal.valueOf(3.00)), COFFEE_WITH_ROOM(BigDecimal.valueOf(3.00)), ESPRESSO(BigDecimal.valueOf(3.50)), ESPRESSO_DOUBLE(BigDecimal.valueOf(4.50)), LATTE(BigDecimal.valueOf(4.50)),
//Food
CAKEPOP(BigDecimal.valueOf(2.50)), CROISSANT(BigDecimal.valueOf(3.25)), MUFFIN(BigDecimal.valueOf(3.00)), CROISSANT_CHOCOLATE(BigDecimal.valueOf(3.50));
private BigDecimal price;
public BigDecimal getPrice() {
return this.price;
}
private Item(BigDecimal price) {
this.price = price;
}
}
|
#include <iostream>
class Node {
public:
int idxf_; // Example member variable representing the starting index
// Other member variables and methods
};
class Range {
// Define the Range class with necessary member variables and methods
};
class SlicedPathIterator {
public:
SlicedPathIterator(Node* node, Range* range_p) {
// Implement the constructor based on the Node and Range objects
}
// Other member variables and methods
};
class NArgs {
public:
int off_; // Example member variable representing the offset
int src_; // Example member variable representing the source data
// Other member variables and methods
};
// Define the dispatchWrite function
int dispatchWrite(SlicedPathIterator* it, NArgs* nargs) {
// Implement the dispatchWrite function based on the SlicedPathIterator and NArgs objects
// Return the value of rval for each iteration
}
// Function to calculate the total value of rval after processing MMIO writes
int calculateTotalRval(Node** node, int to, Range* range_p, int srcStride) {
int rval = 0;
NArgs nargs;
for (int i = (*node)->idxf_; i <= to; i++) {
SlicedPathIterator it(*node, range_p);
rval += dispatchWrite(&it, &nargs);
nargs.off_ += getStride();
nargs.src_ += srcStride;
}
return rval;
}
int main() {
// Test the calculateTotalRval function with sample inputs
// Sample usage:
Node* node = new Node(); // Initialize the Node object
Range* range = new Range(); // Initialize the Range object
int totalRval = calculateTotalRval(&node, 10, range, 5);
std::cout << "Total rval: " << totalRval << std::endl;
return 0;
} |
var request = require("request");
var utils = require("../utils");
var HttpError = require("../../lib/http-error");
module.exports = function(config, req, res, next) {
var user = req.user;
var project = req.project;
var fileId = req.params.fileId;
var url = config.publishURL + "/files/" + fileId;
request(
{
method: "DELETE",
uri: url,
headers: {
Authorization: "token " + user.token
}
},
function(err, response) {
if (err) {
res.status(500);
next(
HttpError.format(
{
message: `Failed to send request to ${url}`,
context: err
},
req
)
);
return;
}
if (response.statusCode !== 204) {
res.status(response.statusCode);
next(
HttpError.format(
{
message: `Request to ${url} returned a status of ${
response.statusCode
}`,
context: response.body
},
req
)
);
return;
}
project.date_updated = req.query.dateUpdated || new Date().toISOString();
utils.updateProject(config, user, project, function(err, status) {
if (err) {
res.status(status);
next(HttpError.format(err, req));
return;
}
res.sendStatus(200);
});
}
);
};
|
<filename>structure_denoise/tools.py
from __future__ import print_function
import os
from datetime import datetime
import warnings
import numpy as np
from matplotlib.figure import Figure
import matplotlib.animation as animation
from pywt import wavedec, waverec, dwt_max_level
from tqdm import tqdm, trange
from .denoise import clean_frames, clean_frames_quick, clean_frames_quickest
# ------- Headline method -------
def clean_blocks(data_source, block_size, block_overlap=0, multiresolution=False, wavelet='db2', wave_levels=None,
skip_lowpass=False, **cleaner_kwargs):
"""
Take blocks from a DataSource object and clean the frames by removing a subspace
learned to represent the structured noise.
"""
chan_map = data_source.channel_map
data_source.write_parameters(block_size=block_size, multiresolution=multiresolution, wavelet=wavelet,
wave_levels=wave_levels, **cleaner_kwargs)
L = int(data_source.samp_rate * block_size)
P = int(data_source.samp_rate * block_overlap)
prev_sl = None
prev_clean_block = None
for block, sl in tqdm(data_source.iter_blocks(L, overlap=P, return_slice=True)):
if multiresolution:
b_coefs = wavedec(block, wavelet, axis=1, level=wave_levels)
start_coefs = int(skip_lowpass)
clean_coefs = [clean_frames_quickest(c, chan_map, **cleaner_kwargs) for c in b_coefs[start_coefs:]]
if skip_lowpass:
clean_coefs.insert(0, b_coefs[0])
clean_block = waverec(clean_coefs, wavelet, axis=1)
else:
clean_block = clean_frames_quickest(block, chan_map, **cleaner_kwargs)
# write current block
data_source[sl] = clean_block
# if overlapping, stich overlap region with the previous block
if P > 0 and prev_sl:
stitch_frames(data_source, prev_sl, sl, prev_clean_block, clean_block)
prev_sl = sl
prev_clean_block = clean_block
def stitch_frames(data_source, left_slice, right_slice, left_clean, right_clean):
"""Linearly interpolate between residual templates for the overlapped duration between slices."""
P = left_slice.stop - right_slice.start
# compute overlapping slices for the data source and the left/right blocks
data_slice = slice(right_slice.start, left_slice.stop)
L = left_slice.stop - left_slice.start
left_overlap = (slice(None), slice(L - P, L))
right_overlap = (slice(None), slice(0, P))
raw_overlap = data_source[data_slice]
left_resid = raw_overlap - left_clean[left_overlap]
right_resid = raw_overlap - right_clean[right_overlap]
# the left-block and right-block residuals will be crossed over linearly from
# fully left to fully right
right_mix = np.linspace(0, 1, P)
data_source[data_slice] = raw_overlap - (1 - right_mix) * left_resid - right_mix * right_resid
return
# ------- Diagnostics -------
def new_save_dir():
"""Create a new plots directory based on the current time"""
save_dir = datetime.now().strftime('%y%m%d-%H%M%S')
save_dir = os.path.join('diagnostic_plots', save_dir)
n = 0
while os.path.exists(save_dir + '_{}'.format(n)):
print('adding to', save_dir)
n += 1
if n > 0:
save_dir = save_dir + '_{}'.format(n)
print('renamed', save_dir)
os.makedirs(save_dir)
return save_dir
def all_wavelets_diagnostics(data_source, block_size=0.5, wavelet='db2', **kwargs):
L = int(block_size * data_source.samp_rate)
max_level = dwt_max_level(L, wavelet)
kwargs['multiresolution'] = True
if kwargs.get('block', None) is None:
start = np.random.randint(0, data_source.series_length - L)
kwargs['block'] = start // L
coefs = []
save_dir = new_save_dir()
for level in range(0, max_level + 1):
cf = diagnostics(data_source,
block_size=block_size,
wavelet=wavelet,
wave_level=level,
return_cleaned=True,
save_dir=save_dir,
**kwargs)[2]
coefs.append(cf)
if kwargs.get('video', True):
cleaned = waverec(coefs, wavelet, axis=1)
start = kwargs['block'] * L
raw = data_source[start:start + L]
video_file = os.path.join(save_dir, 'error_image_frames_fullband.mp4')
make_video(raw, cleaned, data_source.channel_map, video_file)
def diagnostics(data_source,
block_size=0.5,
block=None,
multiresolution=False,
wavelet='db2',
wave_level=2,
video=True,
save_figs=True,
return_cleaned=False,
save_dir='',
**cleaner_kwargs):
if (save_figs or video) and not save_dir:
print('need a savedir')
save_dir = new_save_dir()
chan_map = data_source.channel_map
L = int(block_size * data_source.samp_rate)
if block is None:
start = np.random.randint(0, data_source.series_length - L)
else:
start = block * L
block = data_source[start:start + L]
if multiresolution:
b_coefs = wavedec(block, wavelet, axis=1)
clean_coefs = []
# clean_coefs = [clean_frames(c, chan_map, **cleaner_kwargs) for c in b_coefs]
# clean_block = waverec(clean_coefs, wavelet, axis=1)
# hold out the 2nd level for plots?
if wave_level >= len(b_coefs):
print('wave level too high -- using highest available')
wave_level = len(b_coefs) - 1
for n in range(len(b_coefs)):
if n == wave_level:
# c, params = clean_frames(b_coefs[n], chan_map, return_diagnostics=True, **cleaner_kwargs)
c, params = clean_frames_quickest(b_coefs[n], chan_map, return_diagnostics=True, **cleaner_kwargs)
clean_coefs.append(c)
continue
# clean_coefs[n][:] = 0
b_coefs[n][:] = 0
clean_coefs.append(b_coefs[n].copy())
clean_block = waverec(clean_coefs, wavelet, axis=1)
block = waverec(b_coefs, wavelet, axis=1)
## f1, f2 = make_process_figures(raw_holdout, clean_holdout, chan_map, params)
## f1.savefig(os.path.join(save_dir, 'denoising_figure.pdf'))
## f2.savefig(os.path.join(save_dir, 'subspaces_figure.pdf'))
## if video:
## make_video(raw_holdout, clean_holdout, chan_map, os.path.join(save_dir, 'error_image_frames.mp4'))
img_tag = 'WL_{}'.format(wave_level)
print('Model order:', params['model_order'], 'resid order:', params['resid_basis'].shape[1])
# print 'Resid order:', params['resid_basis'].shape[1]
else:
# clean_block, params = clean_frames(block, chan_map, return_diagnostics=True, **cleaner_kwargs)
clean_block, params = clean_frames_quickest(block, chan_map, return_diagnostics=True, **cleaner_kwargs)
img_tag = 'full_band'
cleaner_kwargs.pop('max_order', None)
cleaner_kwargs.pop('use_local_regression', None)
f1, f2 = make_process_figures(block, clean_block, chan_map, params, **cleaner_kwargs)
if save_figs:
f1.savefig(os.path.join(save_dir, 'denoising_figure_{}.pdf'.format(img_tag)))
f2.savefig(os.path.join(save_dir, 'subspaces_figure_{}.pdf'.format(img_tag)))
if video:
make_video(block, clean_block, chan_map, os.path.join(save_dir, 'error_image_frames_{}.mp4'.format(img_tag)))
if return_cleaned:
cleaned = clean_coefs[wave_level] if multiresolution else clean_blocks
return f1, f2, cleaned
return f1, f2
def make_process_figures(raw, clean, channel_map, params, **kwargs):
from .denoise import covar_model, error_image
import matplotlib.pyplot as pp
error = raw - clean
c_model = {'theta': params['theta']}
lam, Vm = covar_model(c_model, channel_map)
# figure of low, median, and high error frames
frame_var = np.argsort(np.var(raw, axis=0))
n_frames = len(frame_var)
f_frames, axs = pp.subplots(3, 3, figsize=(10, 10))
row_titles = ['Raw frame', 'Clean frame', 'Error frame']
clim = np.percentile(raw, [5, 95])
for r, pt in enumerate([25, 50, 75]):
i = frame_var[int(pt * n_frames / 100.0)]
row_images = [raw[:, i], clean[:, i], error[:, i]]
for n in range(3):
channel_map.image(row_images[n], ax=axs[r, n], cbar=False, clim=clim)
if r == 0:
axs[0, n].set_title(row_titles[n])
f_frames.tight_layout()
# figure of model AR
n_row = 3 if 'resid_basis' in params else 2
f_AR, axs = pp.subplots(n_row, 3, figsize=(10, 10 / 3.0 * n_row))
# row 1: variogram, eigenvec 1, eigenvec 2
axs[0, 0].plot(params['xb'], params['yb'], marker='s', ls='--')
axs[0, 0].set_xlabel('Site-site distance (mm)')
axs[0, 0].set_ylabel('Semivariane (uV^2)')
axs[0, 0].axhline(params['y_half'], color='gray')
axs[0, 0].axvline(params['x_half'], color='gray')
axs[0, 0].set_title('Length scale: {:0.1f} mm'.format(params['theta']))
channel_map.image(Vm[:, -1], cbar=False, ax=axs[0, 1])
axs[0, 1].set_title('Eigenvec 1')
channel_map.image(Vm[:, -2], cbar=False, ax=axs[0, 2])
axs[0, 2].set_title('Eigenvec 2')
# row 2: local filter, AR image, error image
i = frame_var[n_frames // 2]
## # AR_resid = error_image(raw[:, i:i+1], c_model, params['model_order'], channel_map, **kwargs).squeeze()
## AR_resid = error_image(raw[:, i:i+1], c_model, len(channel_map) - 1, channel_map, **kwargs).squeeze()
## AR_image = raw[:, i] - AR_resid
## # make a prediction filter for channel 3, 3
## i_pred = channel_map.lookup(3, 3)
## i_samp = np.setdiff1d(np.arange(len(channel_map)), i_pred)
## from denoise import _kriging_predictor
## # W = _kriging_predictor(Vm, lam, i_pred, i_samp, params['model_order'])
## W = _kriging_predictor(Vm, lam, i_pred, i_samp, len(channel_map) - 1)
## ## Vr = Vm[:, -params['model_order']:]
## ## C_inv = np.dot(Vr[i_samp], Vr[i_samp].T)
## ## C_xn = np.dot(Vr[i_samp], Vr[i_pred])
## ## W = np.dot(C_inv, C_xn)
## W = channel_map.as_channels(channel_map.subset(i_samp).embed(W))
## row_images = [W, AR_image, AR_resid]
## row_titles = ['AR filter', 'AR image', 'AR error']
## for n in range(3):
## channel_map.image(row_images[n], ax=axs[1, n], cbar=False)
## axs[1, n].set_title(row_titles[n])
# row 2: raw_image, projected image, residual image
Vr = Vm[:, -params['model_order']:]
raw_frame = raw[:, i]
lowpass = np.dot(Vr, np.dot(Vr.T, raw[:, i]))
resid = raw_frame - lowpass
row_images = [raw_frame, lowpass, resid]
clim = np.percentile(raw_frame, [2, 98])
row_titles = ['Raw', 'Lowpass {} modes'.format(params['model_order']), 'Resid']
for n in range(3):
channel_map.image(row_images[n], ax=axs[1, n], cbar=False, clim=clim)
axs[1, n].set_title(row_titles[n])
# row 3: eigenimage 1, 2, 3
if n_row == 3:
Vn = params['resid_basis']
# row_images = [Vn[:, -1], Vn[:, -2], Vn[:, -3]]
row_images = Vn[:, ::-1].T
row_titles = ['Eigenvec 1 ({})', 'Eigenvec 2 ({})', 'Eigenvec 3 ({})']
resid_order = Vn.shape[1]
row_titles = [t.format(resid_order) for t in row_titles]
for n in range(3):
if n >= len(row_images):
break
channel_map.image(row_images[n], ax=axs[2, n], cbar=False)
axs[2, n].set_title(row_titles[n])
f_AR.tight_layout()
return f_frames, f_AR
def plot_projections(frames, channel_map, model_var=0.95, deviation=0.5, bias=0,
multiresolution=False, wavelet='db2', wave_level=4, compress_range=False,
projected=True, f_idx=None):
from .denoise import fast_semivariogram, covar_model, error_image, range_compression
import matplotlib.pyplot as pp
# 3x3 panels:
# 1st row: raw, "correct" projection (# of eigenvectors), residual
# 2nd-3rd row: raw, "misjudged" projection, residual
if multiresolution:
coefs = wavelet_bandpass(frames, wavelet, wave_level, return_coefs=True)
if compress_range:
cx_scale = range_compression(coefs[wave_level])
coefs[wave_level] *= cx_scale[:, None]
xb, yb = fast_semivariogram(coefs[wave_level], channel_map.site_combinations, xbin=0.5, trimmed=True)
frames = waverec(coefs, wavelet, axis=1)
else:
if compress_range:
cx_scale = range_compression(frames)
frames *= cx_scale[:, None]
xb, yb = fast_semivariogram(frames, channel_map.site_combinations, xbin=0.5, trimmed=True)
yb = yb[xb < 0.7 * xb.max()]
xb = xb[xb < 0.7 * xb.max()]
y_half = 0.5 * np.percentile(yb, [10, 90]).sum()
x_half = xb[np.where(yb > y_half)[0][0]]
theta = x_half / np.log(2) + bias
if not f_idx:
f_idx = np.random.randint(0, frames.shape[1])
F = frames[:, f_idx]
clim = F.min(), F.max()
n_rows = 4 if deviation > 0 else 2
f, axs = pp.subplots(n_rows, 2, figsize=(7, 3 * n_rows))
channel_map.image(F, cbar=False, clim=clim, ax=axs[0, 0])
axs[0, 0].set_title('Raw frame')
axs[0, 1].plot(xb, yb, marker='s', ls='--')
axs[0, 1].axhline(y_half)
axs[0, 1].axvline(x_half)
axs[0, 1].set_title('Scale: {:.1f}'.format(theta))
for row in range(1, n_rows):
if row == 1:
scale = 1.0
theta_ = scale * theta
elif row == 2:
scale = 1 + deviation
theta_ = scale * theta
elif row == 3:
scale = 1 - deviation
theta_ = scale * theta
lam, V = covar_model({'theta': theta_}, chan_map=channel_map)
pct_var = np.cumsum(lam[::-1]) / np.sum(lam)
if model_var >= pct_var.max():
order = len(lam)
else:
order = np.where(pct_var > model_var)[0][0]
if projected:
Vr = V[:, -order:]
smoothed = np.dot(Vr, np.dot(Vr.T, frames))
else:
resid = error_image(frames, {'theta': theta_}, order, chan_map=channel_map, radius=3.5, dist='inf')
smoothed = frames - resid
p_var = smoothed.var() / frames.var()
images = [smoothed[:, f_idx], F - smoothed[:, f_idx]]
for ax, vec, label in zip(axs[row], images, ('projected', 'residual')):
if label == 'residual':
channel_map.image(vec, cbar=False, ax=ax)
ax.set_title(label + ' ({:.2f}x scale)'.format(scale))
else:
channel_map.image(vec, cbar=False, clim=clim, ax=ax)
ax.set_title('{} dim '.format(order) + label + ' ({:.2f} var)'.format(p_var))
f.tight_layout()
return f
def wavelet_bandpass(frames, wavelet, wave_level, level=None, return_coefs=False):
coefs = wavedec(frames, wavelet, level=level, axis=1)
for n in range(len(coefs)):
if n == wave_level:
continue
coefs[n][:] = 0
if return_coefs:
return coefs
return waverec(coefs, wavelet, axis=1)
def make_video(raw, clean, channel_map, fname):
error = raw - clean
frames = np.concatenate([channel_map.embed(raw.T, axis=1),
channel_map.embed(clean.T, axis=1),
channel_map.embed(error.T, axis=1)], axis=2)
clim = np.percentile(raw, [2, 98])
write_frames(frames, fname, quicktime=True, origin='upper', clim=clim, figsize=(10, 3.5))
def setup_animated_frames(
frames, timer='ms', time=(), static_title='', axis_toggle='on',
figsize=None, colorbar=False, cbar_label='', cbar_orientation='vertical',
figure_canvas=True,
**imshow_kw
):
if figure_canvas:
from matplotlib.pyplot import figure
f = figure(figsize=figsize)
else:
f = Figure(figsize=figsize)
ax = f.add_subplot(111)
im = ax.imshow(frames[0], **imshow_kw)
ax.axis('image')
ax.axis(axis_toggle)
if isinstance(time, bool) and time:
time = np.arange(len(frames))
timer = 'samp'
if len(time):
ttl = ax.set_title('{0:.2f} {1}'.format(time[0], timer))
elif static_title:
ax.set_title(static_title)
ttl = None
else:
ttl = None
def _step_time(num, frames, frame_im):
frame_im.set_data(frames[num])
if ttl:
ttl.set_text('{0:.2f} {1}'.format(time[num], timer))
return (frame_im, ttl)
return (frame_im,)
func = lambda x: _step_time(x, frames, im)
if colorbar:
cb = f.colorbar(im, ax=ax, use_gridspec=True, orientation=cbar_orientation)
cb.set_label(cbar_label)
with warnings.catch_warnings():
warnings.simplefilter('ignore')
f.tight_layout(pad=0.2)
return f, func
def write_anim(
fname, fig, func, n_frame,
title='Array Movie', fps=5, quicktime=False, qtdpi=300
):
FFMpegWriter = animation.writers['ffmpeg']
metadata = dict(title=title, artist='ecoglib')
writer = FFMpegWriter(
fps=fps, metadata=metadata, codec='h264'
)
if quicktime:
# do arguments that are quicktime compatible
extra_args = ['-pix_fmt', 'yuv420p', '-qp', '1']
# yuv420p looks a bit crappy, but upping the res helps
dpi = qtdpi
else:
# yuv422p seems pretty good
extra_args = ['-pix_fmt', 'yuv422p', '-qp', '0']
dpi = fig.dpi
writer.extra_args = extra_args
fname = fname.split('.mp4')[0]
with writer.saving(fig, fname+'.mp4', dpi):
print('Writing {0} frames'.format(n_frame))
for n in trange(n_frame):
func(n)
writer.grab_frame()
def write_frames(
frames, fname, fps=5, quicktime=False, qtdpi=300,
title='Array movie', **anim_kwargs
):
f, func = setup_animated_frames(frames, figure_canvas=False, **anim_kwargs)
write_anim(
fname, f, func, frames.shape[0], fps=fps, title=title,
quicktime=quicktime, qtdpi=qtdpi
)
def animate_frames(frames, fps=5, blit=False, **anim_kwargs):
f, func = setup_animated_frames(frames, figure_canvas=True, **anim_kwargs)
anim = animation.FuncAnimation(
f, func, frames=len(frames), interval=1000.0 / fps, blit=blit
)
return anim
|
var _cl_softmax_base_workload_8hpp =
[
[ "ClSoftmaxWorkloadValidate", "_cl_softmax_base_workload_8hpp.xhtml#abc6f7e5fe77e5aed3f7842755dd34073", null ]
]; |
import { Hl7Parser } from './hl7parser';
let module = {
Hl7Parser: Hl7Parser
};
export { Element } from './models/element.model';
export { Field } from './models/field.model';
export { Hl7Message } from './models/hl7message.model';
export { RepeatingField } from './models/repeating-field.model';
export { Segment } from './models/segment.model';
export { SubField } from './models/sub-field.model';
export { DefinitionBuilder } from './definitionBuilder';
export { Hl7Parser } from './hl7parser';
export {module as default}; |
override public func asJSON(with options: FHIRJSONOptions) -> FHIRJSON {
var json = super.asJSON(with: options)
// Add custom properties to the JSON representation based on the object's state
// Consider the options provided to customize the JSON output
// Example:
// if options.includeMetadata {
// json["metadata"] = self.metadata
// }
// if options.includeDetails {
// json["details"] = self.details
// }
// Convert other properties to JSON and add them to the 'json' dictionary
return json
} |
package com.ifast.common.domain;
import com.baomidou.mybatisplus.activerecord.Model;
import com.baomidou.mybatisplus.annotations.TableField;
import com.baomidou.mybatisplus.annotations.TableId;
import com.baomidou.mybatisplus.annotations.TableName;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
/**
* <pre>
* </pre>
* <small> 2018ๅนด3ๆ23ๆฅ | Aron</small>
*/
@TableName("sys_log")
@Data
public class LogDO extends Model<LogDO> implements Serializable {
@TableField(exist = false)
private static final long serialVersionUID = -938654836571738415L;
@TableId
private Long id;
private Long userId;
private String username;
private String operation;
private Integer time;
private String method;
private String params;
private String ip;
@JsonFormat(timezone = "GMT+8", pattern = "yyyy-MM-dd HH:mm:ss")
private Date gmtCreate;
@Override
protected Serializable pkVal() {
return this.id;
}
} |
<reponame>xsolve-pl/xsolve-feat
import { platformBrowserDynamic } from '@angular/platform-browser-dynamic';
import { enableProdMode } from '@angular/core';
import { environment } from './environments/environment';
import { AppModule } from './app/app.module';
if (environment.production) {
enableProdMode();
}
// Capture and store authentication token sent via URL.
const parser = document.createElement('a');
parser.href = window.location.href;
const token = parser.hash.replace(/^#/, '');
if (token) {
localStorage.setItem('token', token);
window.location.replace(parser.href.substr(0, -token.length - 1));
}
platformBrowserDynamic().bootstrapModule(AppModule);
|
<gh_stars>0
import Chip from '@material-ui/core/Chip'
import { makeStyles } from '@material-ui/core/styles'
import React from 'react'
const useStyles = makeStyles((theme) => ({
chip: {
margin: theme.spacing(0.5),
},
}))
function AuthorsArray ({ authors }) {
const arr = authors && authors.split(',').map((x) => x.trim())
const classes = useStyles()
return (
<div>
{arr &&
arr.map((author) => (
<Chip
label={` @${author} `}
key={author}
clickable
className={classes.chip}
component="a"
variant="outlined"
href={'https://github.com/' + author.trim()}
target="_blank"
rel="noopener noreferrer nofollow"
/>
))}
</div>
)
}
export default AuthorsArray
|
from typing import Union
class MetadataGenerator:
def generate(self, mcf: dict, schema: str = None) -> Union[dict, str]:
"""
Generate metadata in a given schema
Args:
mcf (dict): Dictionary containing metadata information
schema (str, optional): Schema to be used for generating the metadata. Defaults to None.
Returns:
Union[dict, str]: Generated metadata in the specified schema format
"""
if schema is None:
return mcf # Return the metadata as a dictionary if no schema is provided
if schema == "json":
import json
return json.dumps(mcf) # Return the metadata in JSON format
if schema == "xml":
xml_metadata = "<metadata>\n"
for key, value in mcf.items():
xml_metadata += f" <{key}>{value}</{key}>\n"
xml_metadata += "</metadata>"
return xml_metadata # Return the metadata in XML format
# Handle other schema formats if needed
return "Unsupported schema" # Return a message for unsupported schema formats |
#!/bin/bash
bin=`cd "$( dirname "$0" )"; pwd`
# Load the Tachyon configuration
. "$bin/tachyon-config.sh"
if [ -e $TACHYON_HOME/conf/tachyon-env.sh ] ; then
. $TACHYON_HOME/conf/tachyon-env.sh
fi
if [ -z $TACHYON_RAM_FOLDER ] ; then
TACHYON_RAM_FOLDER=/Volumes/tachyon
echo "TACHYON_RAM_FOLDER was not set. Using the default one: $TACHYON_RAM_FOLDER"
fi
if [[ $TACHYON_RAM_FOLDER != "/Volumes/"* ]]; then
echo "Invalid TACHYON_RAM_FOLDER: $TACHYON_RAM_FOLDER"
echo "TACHYON_RAM_FOLDER must set to /Volumes/[name] on Mac OS X."
exit 1
fi
# Remove the "/Volumes/" part so we can get the name of the volume.
F=${TACHYON_RAM_FOLDER/#\/Volumes\//}
# Lower case memory size.
MEM_SIZE=$(echo "$TACHYON_WORKER_MEMORY_SIZE" | tr -s '[:upper:]' '[:lower:]')
# Convert the memory size to number of sectors. Each sector is 512 Byte.
if [[ $MEM_SIZE == *"gb" ]]; then
# Size was specified in gigabytes.
SIZE_IN_GB=${MEM_SIZE/%gb/}
NUM_SECTORS=$(($SIZE_IN_GB * 1024 * 2048))
elif [[ $MEM_SIZE == *"mb" ]]; then
# Size was specified in megabytes.
SIZE_IN_MB=${MEM_SIZE/%mb/}
NUM_SECTORS=$(($SIZE_IN_MB * 2048))
elif [[ $MEM_SIZE == *"kb" ]]; then
# Size was specified in kilobytes.
SIZE_IN_KB=${MEM_SIZE/%kb/}
NUM_SECTORS=$(($SIZE_IN_KB * 2))
elif [[ "$MEM_SIZE" =~ ^[0-9]+$ ]] ; then
# Size was specified in bytes.
NUM_SECTORS=$((MEM_SIZE / 512))
else
echo "Please specify TACHYON_WORKER_MEMORY_SIZE in a correct form."
exit 1
fi
echo "Formatting RamFS: $F ($NUM_SECTORS sectors)."
diskutil unmount /Volumes/$F
diskutil erasevolume HFS+ $F `hdiutil attach -nomount ram://$NUM_SECTORS`
|
<reponame>tignear/bot
export const text2speechTargetTextChannels: ["speech", "targets"] = [
"speech",
"targets",
];
|
<reponame>oueya1479/OpenOLAT<filename>src/main/java/org/olat/core/commons/editor/htmleditor/HTMLReadOnlyController.java
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.core.commons.editor.htmleditor;
import org.olat.core.commons.modules.singlepage.SinglePageController;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.controller.BasicController;
import org.olat.core.util.vfs.VFSContainer;
/**
*
* Initial date: 25 Mar 2019<br>
* @author uhensler, <EMAIL>, http://www.frentix.com
*
*/
public class HTMLReadOnlyController extends BasicController {
private Link closeLink;
private SinglePageController singlePageCtrl;
public HTMLReadOnlyController(UserRequest ureq, WindowControl wControl, VFSContainer rootContainer, String fileName, boolean showClose) {
super(ureq, wControl);
VelocityContainer mainVC = createVelocityContainer("readonly");
singlePageCtrl = new SinglePageController(ureq, wControl, rootContainer, fileName, false);
listenTo(singlePageCtrl);
mainVC.put("content", singlePageCtrl.getInitialComponent());
if (showClose) {
closeLink = LinkFactory.createButton("close", mainVC, this);
}
putInitialPanel(mainVC);
}
@Override
protected void event(UserRequest ureq, Component source, Event event) {
if (source == closeLink) {
fireEvent(ureq, Event.DONE_EVENT);
}
}
}
|
<filename>index.js<gh_stars>10-100
/**
* @overview An XKCD-style password generator for Node.js
*
* @author <NAME> <<EMAIL>>
* @copyright 2014 <NAME>
* @license MIT - See the included 'LICENSE' for details.
* @version 2.0.0
* @extends EventEmitter
*/
var fs = require("fs")
var path = require("path")
var util = require("util")
var events = require("events")
var async = require("async")
var rand = require("random-lib")
var when = require("when")
var DEFAULTS = {
numWords: 4,
minLength: 5,
maxLength: 8
}
module.exports = XKCDPassword
/**
* Creates the password generator.
*
* @constructor
* @since 0.0.1
*
* @returns {Generator} the word generator
*/
function XKCDPassword() {
if (!(this instanceof XKCDPassword)) return new XKCDPassword()
var self = this
events.EventEmitter.call(self)
self.wordlist = null
self.wordfile = null
// if we've got a wordlist at the ready
self.ready = false
self.initialized = false
return this
}
// Extends EventEmitter
util.inherits(XKCDPassword, events.EventEmitter)
/**
* Initializes the password generator with a pre-defined word list.
*
* @since 0.0.1
* @param {array} wordlist - The array of words to use.
*
* @returns {Generator} the word generator
*/
XKCDPassword.prototype.initWithWordList = function(wordlist) {
var self = this
// verify that the wordlist is an appropriate object
if (typeof wordlist === "object" && wordlist.length > 0) {
self.wordlist = wordlist
self.ready = true
return self
} else {
throw new Error("Wordlist provided was not an array.")
}
}
/**
* Initializes the generator
*
* @since 0.2.0
* @param {object} options
*
* @emits {Generator} 'ready' event
*/
XKCDPassword.prototype._initialize = function() {
var self = this
self.initialized = true
// We don't have a wordlist yet, and need to get one
if (!self.wordlist) {
if (!self.wordfile) {
// use internal wordlist
self.wordfile = path.join(__dirname, "./vendor/mwords/113809of.fic")
}
// perform our file reading asynchronously, then call the _generate function
self.wordlist = []
require("readline")
.createInterface({
input: fs.createReadStream(self.wordfile),
terminal: false
})
.on("line", function readWordFileLine(line) {
// append to internal wordlist
self.wordlist.push(line)
})
.on("close", function resolveReadOfWordFile() {
// emit that we're ready, and call the next function
self.ready = true
self.emit("ready", self)
})
}
}
/**
* Initializes the password generator with a newline delimited word file.
*
* @since 0.0.1
* @param {string} wordfile - Path to the word file to be used.
*
* @returns {Generator} the word generator
*/
XKCDPassword.prototype.initWithWordFile = function(wordfile) {
var self = this
if (typeof wordfile !== "string") {
throw new Error("Wordfile provided was not a string.")
} else {
self.wordfile = wordfile
return self
}
}
/**
* Parses the options and generates the password.
*
* @since 0.2.0
* @param {object} options - The object containing options, or alternately a
* number which is just the words to generate, everything else default.
* @param {generateCallback} next - The callback function to call after
* generation.
*
* @returns {Promise} the promise object
*/
XKCDPassword.prototype.generate = function(options, next) {
var self = this
var numWords = DEFAULTS.numWords
var minLength = DEFAULTS.minLength
var maxLength = DEFAULTS.maxLength
var deferred = null
if (typeof options === "number") {
numWords = options
} else if (typeof options === "function") {
next = options
options = DEFAULTS
} else if (typeof options !== "undefined" && options) {
if (typeof options.numWords === "number") {
numWords = options.numWords
}
if (typeof options.minLength === "number") {
minLength = options.minLength
}
if (typeof options.maxLength === "number") {
maxLength = options.maxLength
}
}
if (!next || typeof next !== "function") {
next = function() {}
deferred = when.defer()
}
if (self.ready) {
self._generate(numWords, minLength, maxLength, next, deferred)
} else {
self.on("ready", function onReadyGenerate() {
self._generate(numWords, minLength, maxLength, next, deferred)
})
// run the init if we haven't already
if (!self.initialized) {
self._initialize()
}
}
return deferred ? deferred.promise : null
}
/**
* Callback executed after password is generated.
*
* @since 0.0.1
* @callback {generateCallback}
* @param {Error} err - Error if there was one, null if not.
* @param {array} result - Resulting array of words.
*/
/**
* Actual word generation function that is called after everything is
* initialized. Should not be used directly.
*
* @since 0.0.1
* @param {integer} numWords - Number of words to generate.
* @param {integer} minLength - The minimum length of a word to use.
* @param {integer} maxLength - The maximum length of a word to use.
* @param {generateCallback} next - The callback function to call after or none
* @param {Object} deferred - The promise to resolve, or none
* generation.
*/
XKCDPassword.prototype._generate = function(
numWords,
minLength,
maxLength,
next,
deferred
) {
var self = this
numWords = parseInt(numWords, 10)
minLength = parseInt(minLength, 10)
maxLength = parseInt(maxLength, 10)
var err = null
// ensure that required parameters have been set
if (numWords <= 0 || minLength < 0 || maxLength < 2) {
err = new Error("Parameters provided were not correct.")
process.nextTick(function resolveParametersError() {
next(err)
if (deferred) {
deferred.reject(err)
}
})
} else if (numWords > self.wordlist.length) {
// make sure we're not asking for more unique words than we have available
err = new Error(
"More words than were available in the wordlist were requested."
)
process.nextTick(function resolveTooManyWordsError() {
next(err)
if (deferred) {
deferred.reject(err)
}
})
} else if (maxLength < minLength) {
err = new Error(
"Your maximum word length can't be less than your minimum length. " +
"Try specifying a maximum length and minimum length directly."
)
process.nextTick(function resolveMaxLengthIncorrectError() {
next(err)
if (deferred) {
deferred.reject(err)
}
})
} else {
// generate the numbers
// because we want to generate unique numbers
rand.randomInts(
{ unique: true, num: numWords, min: 0, max: self.wordlist.length },
// eslint-disable-next-line no-shadow
function generateWords(err, ints) {
if (err) {
next(err)
if (deferred) {
deferred.reject(err)
}
return
}
var position = 0
var numLoops = 0
var words = []
async.doWhilst(
function generateWord(callback) {
// if the word is too short, we need a new random number
if (
self.wordlist[ints[position]].length > maxLength ||
self.wordlist[ints[position]].length < minLength
) {
rand.randomInt(
{ min: 0, max: self.wordlist.length - 1 },
// eslint-disable-next-line no-shadow
function generateAnotherInt(err, int) {
if (err) return callback(err)
if (ints.indexOf(int) > -1) {
// we already found that random number, run callback to make the loop again
callback()
} else {
// replace the integer in that position and run the loop again
ints[position] = int
callback()
}
}
)
} else {
// it's a good word, push it onto the stack
words.push(self.wordlist[ints[position]])
position++
callback()
}
},
function postCheckGeneration() {
if (!process.env.DISABLE_LOOP_PREVENTION) {
if (
numLoops >
(numWords * 10000 > 850000 ? 850000 : numWords * 10000)
) {
return false
}
numLoops++
}
return words.length < numWords
},
// eslint-disable-next-line no-shadow
function resolveGeneration(err) {
if (err) {
next(err)
if (deferred) {
deferred.reject(err)
}
} else if (words.length < numWords) {
var tooFewWordsError = new Error(
"You asked for more words than could be generated. This may " +
"because you're asking for too many words of a certain length."
)
next(tooFewWordsError)
if (deferred) {
deferred.reject(tooFewWordsError)
}
} else {
next(null, words)
if (deferred) {
deferred.resolve(words)
}
}
}
)
}
)
}
}
|
#!/usr/bin/env sh
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PROG="$(basename "$0")"
USER="${SUDO_USER:-${USER}}"
HOME="${USER_HOME:-${HOME}}"
CNAME="tammy.malaks.us"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
printf '%s\n' "deploying website in $(basename "$PWD")"
cd $PWD || exit 1
rm -Rf ./dist ./docs
npm run build || exitCode=1
if [ -d "./dist" ] && [ "${exitCode:-0}" -eq 0 ]; then
echo "$CNAME" >./dist/CNAME
touch ./dist/.nojekyll
mv -fv ./dist ./docs
[ -d "./docs" ] && gitcommit deploy docs/ && exitCode=0 || exitCode=1
fi
if [ "$exitCode" -ne 0 ]; then
printf '%s\n' "Failed to deploy website"
exitCode=1
fi
exit ${exitCode:-$?}
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
export ZEPPELIN_INTERPRETERS="org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.hive.HiveInterpreter"
export ZEPPELIN_PORT=8080
export ZEPPELIN_CONF_DIR=/etc/zeppelin/conf
export ZEPPELIN_LOG_DIR=/var/log/zeppelin
export ZEPPELIN_PID_DIR=/var/run/zeppelin
export ZEPPELIN_WAR_TEMPDIR=/var/run/zeppelin/webapps
export ZEPPELIN_NOTEBOOK_DIR=/var/lib/zeppelin/notebook
export MASTER=yarn-client
export SPARK_HOME=/usr/lib/spark
export HADOOP_CONF_DIR=/etc/hadoop/conf
|
<filename>tests/testCases.ts
import {Argument} from "../src";
export type TestCase = {
name: string,
value: Argument[],
expectedResult: string
};
export const benchmarkCases: TestCase[] = [
{
name: 'benchmark: measurethat',
value: [
'style',
{'style-2': true, 'style-3': false, 'style-4': 9, 'style-5': null },
6,
'another-string',
{a: undefined, b: 'asdf'}
],
expectedResult: 'style style-2 style-4 6 another-string b'
},
{
name: 'benchmark: strings',
value: ['one', 'two', 'three'],
expectedResult: 'one two three'
},
{
name: 'benchmark: object',
value: [{ one: true, two: true, three: false }],
expectedResult: 'one two'
},
{
name: 'benchmark: strings, object',
value: ['one', 'two', { four: true, three: false }],
expectedResult: 'one two four'
},
{
name: 'benchmark: mix',
value: ['one', { two: true, three: false }, { four: 'four', five: true }, 6, {}],
expectedResult: 'one two four five 6'
}
];
export const testCases: TestCase[] = [
// classnames own testing cases that fit this package' API
{
name: 'keeps object keys with truthy values',
value: [{ a: true, b: false, c: 0, d: null, e: undefined, f: 1 }],
expectedResult: 'a f'
},
{
name: 'mix',
value: ['a', 0, null, undefined, 1, 'b'],
// we do want our package to render 0 as string (in diff to classnames)
expectedResult: 'a 1 b'
},
{
name: 'supports heterogenous arguments',
value: [{ a: true }, 'b', 0],
expectedResult: 'a b'
},
{
name: 'should be trimmed',
value: ['', 'b', {}, ''],
expectedResult: 'b'
},
{
name: 'returns an empty string for an empty configuration',
value: [{}],
expectedResult: ''
},
// classnames own readme examples
{
name: 'foo bar literals',
value: ['foo', 'bar'],
expectedResult: 'foo bar'
},
{
name: 'foo literal bar object true',
value: ['foo', { bar: true }],
expectedResult: 'foo bar'
},
{
name: 'foo-bar object true',
value: [{ 'foo-bar': true }],
expectedResult: 'foo-bar'
},
{
name: 'foo-bar object false',
value: [{ 'foo-bar': false }],
expectedResult: ''
},
{
name: 'foo object bar object',
value: [{ foo: true }, { bar: true }],
expectedResult: 'foo bar'
},
{
name: 'foo bar object',
value: [{ foo: true, bar: true }],
expectedResult: 'foo bar'
},
{
name: 'lots of arguments',
value: ['foo', { bar: true, duck: false }, 'baz', { quux: true }],
expectedResult: 'foo bar baz quux'
},
{
name: 'lots of arguments with numbers',
value: [null, 'bar', undefined, 0, 1, { baz: null }, ''],
expectedResult: 'bar 1'
},
// classnames official benchmark fixtures that are compatible with this package' API
...benchmarkCases,
// my own test cases
{
name: 'one item object',
value: [{ a: true, b: false }],
expectedResult: 'a'
},
{
name: 'many values object with literal',
value: [{ a: '6', b: null, c: 6, d: null }, '3'],
expectedResult: 'a c 3'
},
{
name: 'many values object with two literals',
value: ['6', { a: '9', b: null, c: 6, d: null }, '3'],
expectedResult: '6 a c 3'
},
{
name: 'two objects',
value: [{ a: '6', b: null, c: 6, d: null }, { e: true, f: false }],
expectedResult: 'a c e'
},
{
name: 'two objects with literal',
value: [{ a: '6', b: null, c: 6, d: null }, 'gu', { e: true, f: false }],
expectedResult: 'a c gu e'
},
{
name: 'literal string',
value: ['3'],
expectedResult: '3'
},
{
name: 'remove `undefined` and `null`',
value: [undefined, null],
expectedResult: ''
},
{
name: 'many values with `undefined` and `null`',
value: ['a', undefined, 'b', null, 'c', 'd', undefined, undefined, 'e'],
expectedResult: 'a b c d e'
},
{
name: 'number literal',
value: [6],
expectedResult: '6'
},
{
name: 'many number literals',
value: [6, 9, 4, 2, 0],
expectedResult: '6 9 4 2'
},
{
name: 'strings with numbers',
value: [6, '9', 4, '2', 0],
expectedResult: '6 9 4 2'
},
{
name: 'mix of every possible type',
value: [6, undefined, {}, null, { 9: true, 'asdf': 0 }],
expectedResult: '6 9'
}
]; |
<reponame>aymerick/kowa
package builder
import (
"fmt"
"log"
)
// ErrorCollector holds a list of errors
type ErrorCollector struct {
Errors map[string][]error
ErrorsNb int
}
// NewErrorCollector instanciates a new ErrorCollector
func NewErrorCollector() *ErrorCollector {
return &ErrorCollector{
Errors: make(map[string][]error),
ErrorsNb: 0,
}
}
// Add a new error for given step
func (collector *ErrorCollector) addError(step string, err error) {
collector.Errors[step] = append(collector.Errors[step], err)
collector.ErrorsNb++
}
// Dump all errors
func (collector *ErrorCollector) dump() {
if collector.ErrorsNb > 0 {
log.Printf("[ERR] Built with %d error(s)", collector.ErrorsNb)
errNb := 1
for step, errors := range collector.Errors {
log.Printf("[ERR] %s:", step)
for _, err := range errors {
log.Printf(fmt.Sprintf("[ERR] %d. %v", errNb, err.Error()))
errNb++
}
}
}
}
|
/*
*
*/
package net.community.apps.eclipse.cp2pom;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Container;
import java.awt.Desktop;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.TreeMap;
import javax.swing.AbstractButton;
import javax.swing.JCheckBox;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JToolBar;
import net.community.apps.common.BaseMainFrame;
import net.community.apps.eclipse.cp2pom.resources.ResourcesAnchor;
import net.community.chest.apache.maven.helpers.BaseTargetDetails;
import net.community.chest.apache.maven.helpers.BuildProject;
import net.community.chest.apache.maven.helpers.BuildTargetDetails;
import net.community.chest.apache.maven.helpers.BuildTargetFile;
import net.community.chest.awt.attributes.Textable;
import net.community.chest.dom.DOMUtils;
import net.community.chest.eclipse.wst.WstUtils;
import net.community.chest.io.EOLStyle;
import net.community.chest.io.dom.PrettyPrintTransformer;
import net.community.chest.lang.ExceptionUtil;
import net.community.chest.lang.StringUtil;
import net.community.chest.swing.component.button.BaseCheckBox;
import net.community.chest.swing.component.scroll.ScrolledComponent;
import net.community.chest.swing.options.BaseOptionPane;
import net.community.chest.ui.helpers.panel.input.LRFieldWithButtonPanel;
import net.community.chest.util.logging.LoggerWrapper;
import net.community.chest.util.logging.factory.WrapperFactoryManager;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* <P>Copyright as per GPLv2</P>
*
* @author <NAME>.
* @since Jul 27, 2009 9:30:50 AM
*/
final class MainFrame extends BaseMainFrame<ResourcesAnchor> {
/**
*
*/
private static final long serialVersionUID = 6009715905543540702L;
private static final LoggerWrapper _logger=WrapperFactoryManager.getLogger(MainFrame.class);
/*
* @see net.community.apps.common.BaseMainFrame#getLogger()
*/
@Override
protected LoggerWrapper getLogger ()
{
return _logger;
}
/*
* @see net.community.apps.common.BaseMainFrame#getResourcesAnchor()
*/
@Override
public ResourcesAnchor getResourcesAnchor ()
{
return ResourcesAnchor.getInstance();
}
private String _repoPrefix="M2_REPO";
public String getRepositoryPrefix ()
{
return _repoPrefix;
}
public void setRepositoryPrefix (final String s)
{
_repoPrefix = s;
}
private boolean _verbOutput=true;
public boolean isVerboseOutput ()
{
return _verbOutput;
}
public void setVerboseOutput (final boolean f)
{
_verbOutput = f;
}
private static final File resolveFileInstance (final Textable fld, final File curFile)
{
final String fldPath=(null == fld) ? null : fld.getText(),
curPath=(null == curFile) ? null : curFile.getAbsolutePath();
if (0 == StringUtil.compareDataStrings(fldPath, curPath, true))
return curFile;
if ((null == fldPath) || (fldPath.length() <= 0))
return null;
return new File(fldPath);
}
private LRFieldWithButtonPanel _outputPanel;
private File _outputFile;
private File resolveOutputFile ()
{
return resolveFileInstance(_outputPanel, _outputFile);
}
/*
* @see net.community.apps.common.BaseMainFrame#saveFile(java.io.File, org.w3c.dom.Element)
*/
@Override
public void saveFile (File f, Element dlgElement)
{
if (_outputFile != f)
_outputFile = f;
if (_outputPanel != null)
_outputPanel.setText((null == f) ? "" : f.getAbsolutePath());
setInitialFileChooserFolder(f, Boolean.TRUE);
}
public static final String DEFAULT_NAME_VALUE="cp2pom";
private String _groupId;
public String getGroupId ()
{
if (null == _groupId)
return DEFAULT_NAME_VALUE;
return _groupId;
}
public void setGroupId (String groupId)
{
_groupId = groupId;
}
private String _artifactId;
public String getArtifactId ()
{
if (null == _artifactId)
return DEFAULT_NAME_VALUE;
return _artifactId;
}
public void setArtifactId (String artifactId)
{
_artifactId = artifactId;
}
private String _projectName;
public String getProjectName ()
{
if (null == _projectName)
return DEFAULT_NAME_VALUE;
return _projectName;
}
public void setProjectName (String projectName)
{
_projectName = projectName;
}
private String _projectVersion;
public String getProjectVersion ()
{
if (null == _projectVersion)
return DEFAULT_NAME_VALUE;
return _projectVersion;
}
public void setProjectVersion (String projectVersion)
{
_projectVersion = projectVersion;
}
protected <D extends Document> D createMavenDependencies (
final D doc,
final Collection<? extends RepositoryEntry> rl,
final File outFile)
throws Exception
{
final Element proj=doc.createElement(BuildProject.PROJECT_ELEMENT_NAME);
doc.appendChild(proj);
{
final String[] hdrs={
BuildTargetDetails.MODELVERSION_ELEM_NAME, BuildTargetDetails.DEFAULT_MODEL_VERSION,
BaseTargetDetails.GROUPID_ELEM_NAME, getGroupId(),
BaseTargetDetails.ARTIFACTID_ELEM_NAME, getArtifactId(),
BuildTargetDetails.PACKAGING_ELEM_NAME, BuildTargetFile.POM_FILE_TYPE,
BuildTargetDetails.NAME_ELEM_NAME, getProjectName(),
BaseTargetDetails.VERSION_ELEM_NAME, getProjectVersion()
};
for (int hIndex=0; hIndex < hdrs.length; hIndex += 2)
{
final Element elem=
DOMUtils.createElementValue(doc, hdrs[hIndex], hdrs[hIndex+1]);
if (null == elem)
continue;
proj.appendChild(elem);
}
}
{
final Element deps=doc.createElement(BuildProject.DEPENDENCIES_ELEM_NAME);
for (final RepositoryEntry re : rl)
{
try
{
final Element elem=
(null == re) ? null : BaseTargetDetails.toXml(re, doc, false);
if (null == elem)
continue;
deps.appendChild(elem);
}
catch(Exception e)
{
_logger.error("createMavenDependencies(" + outFile + ")[" + re + " " + e.getClass().getName() + " while create document: " + e.getMessage(), e);
throw e;
}
}
proj.appendChild(deps);
}
return doc;
}
protected <D extends Document> D createAntDependencies (
final D doc,
final Collection<? extends RepositoryEntry> rl,
final File outFile)
throws Exception
{
final Element deps=doc.createElement(BuildProject.DEPENDENCIES_ELEM_NAME);
for (final RepositoryEntry re : rl)
{
try
{
final Element elem=
(null == re) ? null : BaseTargetDetails.toXml(re, doc, true);
if (null == elem)
continue;
deps.appendChild(elem);
}
catch(Exception e)
{
_logger.error("createMavenDependencies(" + outFile + ")[" + re + " " + e.getClass().getName() + " while create document: " + e.getMessage(), e);
throw e;
}
}
doc.appendChild(deps);
return doc;
}
public RuntimeException createDependencies (
final Collection<? extends RepositoryEntry> rl,
final boolean asAntFormat,
final File outFile)
{
if (null == outFile)
return new IllegalStateException("No output file");
if ((null == rl) || (rl.size() <= 0))
return new IllegalArgumentException("No entries to create");
if (outFile.exists())
{
// TODO check if running with no UI
if (JOptionPane.showConfirmDialog(this, "Output file already exists - override ?", "Confirm output file overwrite", JOptionPane.YES_NO_OPTION) != JOptionPane.YES_OPTION)
return null;
}
final Document doc;
try
{
doc = DOMUtils.createDefaultDocument();
if (asAntFormat)
createAntDependencies(doc, rl, outFile);
else
createMavenDependencies(doc, rl, outFile);
}
catch(Exception e)
{
_logger.error("createDependencies(" + outFile + ") " + e.getClass().getName() + " while create document: " + e.getMessage(), e);
return ExceptionUtil.toRuntimeException(e);
}
try
{
PrettyPrintTransformer.DEFAULT.transform(doc, outFile);
}
catch(Exception e)
{
_logger.error("createDependencies(" + outFile + ") " + e.getClass().getName() + " while write document: " + e.getMessage(), e);
return ExceptionUtil.toRuntimeException(e);
}
// TODO check if running with no UI
{
final int nRes=JOptionPane.showConfirmDialog(this, "Open generated file ?", "Conversion complete", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE);
if (JOptionPane.YES_OPTION == nRes)
{
final Desktop d=Desktop.getDesktop();
try
{
d.edit(outFile);
}
catch (IOException e)
{
_logger.error("createDependencies(" + outFile + ") " + e.getClass().getName() + " while edit output file: " + e.getMessage(), e);
return ExceptionUtil.toRuntimeException(e);
}
}
}
return null;
}
private LRFieldWithButtonPanel _inputPanel;
private File _inputFile;
private File resolveInputFile ()
{
return resolveFileInstance(_inputPanel, _inputFile);
}
private static final boolean isCPFile (final File f)
{
final String n=(null == f) ? null : f.getName();
if (WstUtils.COMPONENTS_FILENAME.equalsIgnoreCase(n))
return false;
else
return true;
}
private RepositoryEntriesTable _entriesTable;
private Collection<? extends RepositoryEntry> loadFile (
final File f, final boolean populateTable)
{
try
{
final Collection<? extends RepositoryEntry> el=RepositoryEntry.loadEntries(getRepositoryPrefix(), f, isCPFile(f), isVerboseOutput());
if (populateTable && (_entriesTable != null))
_entriesTable.setEntries(el);
if (_inputFile != f)
_inputFile = f;
if (_inputPanel != null)
_inputPanel.setText((null == f) ? "" : f.getAbsolutePath());
setInitialFileChooserFolder(f, Boolean.FALSE);
return el;
}
catch(Exception e)
{
_logger.error("loadFile(" + f + ") " + e.getClass().getName() + ": " + e.getMessage(), e);
// TODO check if running with no UI
BaseOptionPane.showMessageDialog(this, e);
return null;
}
}
/*
* @see net.community.apps.common.BaseMainFrame#loadFile(java.io.File, java.lang.String, org.w3c.dom.Element)
*/
@Override
public void loadFile (File f, String cmd, Element dlgElement)
{
loadFile(f, true);
}
private JCheckBox _asAntFormat;
public boolean isUseAntFormat ()
{
return (_asAntFormat != null) && _asAntFormat.isSelected();
}
public void setUseAntFormat (boolean v)
{
if (_asAntFormat != null)
_asAntFormat.setSelected(v);
}
protected void createDependencies ()
{
final File f=resolveInputFile();
final Collection<? extends RepositoryEntry> rl=(f == _inputFile)
? ((null == _entriesTable) ? null : _entriesTable.getEntries())
: loadFile(f, true)
;
final RuntimeException re=createDependencies(rl, isUseAntFormat(), resolveOutputFile());
if (re != null)
BaseOptionPane.showMessageDialog(this, re);
}
protected void refreshInputFile ()
{
if (_inputFile != null)
loadFile(_inputFile, true);
}
private static final String[] USAGE={
"cp2pom [options]",
"",
"\tWhere options are:",
"",
"\t\t-i,--input <file> - pre-load input file",
"\t\t-o,--output <file> - pre-load output file",
"\t\t-g,--group <group> - Maven group name to use (default=" + DEFAULT_NAME_VALUE + ")",
"\t\t-a,--artifact <artifact> - Maven artifact name to use (default=" + DEFAULT_NAME_VALUE + ")",
"\t\t-n,--name <name> - Maven project name to use (default=" + DEFAULT_NAME_VALUE + ")",
"\t\t-v,--version <version> - Maven project version to use (default=" + DEFAULT_NAME_VALUE + ")",
"\t\t-p,--prefix <prefix> - repository variable prefix",
"\t\t-f,--format <format> - output format (maven or ant) (default=maven)",
"\t\t-s,--silent - no UI while working",
"\t\t-h,--help - show this message"
};
protected static final void showUsage (final Component popupParent /* null == use stdout */)
{
final StringBuilder sb=new StringBuilder(USAGE.length * 64);
final char[] eolChars=EOLStyle.LOCAL.getStyleChars();
for (final String s : USAGE)
{
if (sb.length() > 0)
sb.append(eolChars);
sb.append(s);
}
if (popupParent != null)
JOptionPane.showMessageDialog(popupParent, sb.toString(), "Usage", JOptionPane.INFORMATION_MESSAGE);
else
System.out.println(sb.toString());
}
private static final String RUN_CMD="run", REFRESH_CMD="refresh";
/*
* @see net.community.apps.common.BaseMainFrame#getActionListenersMap(boolean)
*/
@Override
protected Map<String,? extends ActionListener> getActionListenersMap (boolean createIfNotExist)
{
final Map<String,? extends ActionListener> org=super.getActionListenersMap(createIfNotExist);
if (((org != null) && (org.size() > 0)) || (!createIfNotExist))
return org;
final Map<String,ActionListener> lm=new TreeMap<String,ActionListener>(String.CASE_INSENSITIVE_ORDER);
lm.put(LOAD_CMD, getLoadFileListener());
lm.put(SAVE_CMD, getSaveFileListener());
lm.put(EXIT_CMD, getExitActionListener());
lm.put(ABOUT_CMD, getShowManifestActionListener());
lm.put(RUN_CMD, new ActionListener() {
/*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed (ActionEvent e)
{
if (e != null)
createDependencies();
}
});
lm.put(REFRESH_CMD, new ActionListener() {
/*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed (ActionEvent e)
{
if (e != null)
refreshInputFile();
}
});
lm.put("usage", new ActionListener() {
/*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed (ActionEvent e)
{
if (e != null)
showUsage(getMainFrameInstance());
}
});
setActionListenersMap(lm);
return lm;
}
protected LRFieldWithButtonPanel createInputPanel (Element elem)
{
// delay auto-layout till after setting the text field
final LRFieldWithButtonPanel p=new LRFieldWithButtonPanel(elem, false);
final ClasspathInputTextField txtField=new ClasspathInputTextField();
p.setTextField(txtField);
p.layoutComponent();
p.addActionListener(getLoadFileListener());
return p;
}
protected LRFieldWithButtonPanel createOutputPanel (Element elem)
{
// delay auto-layout till after setting the text field
final LRFieldWithButtonPanel p=new LRFieldWithButtonPanel(elem, false);
final PomInputTextField txtField=new PomInputTextField();
p.setTextField(txtField);
p.layoutComponent();
p.addActionListener(getSaveFileListener());
return p;
}
protected RepositoryEntriesTable createEntriesTable (Element elem)
{
final RepositoryEntriesTable t=new RepositoryEntriesTable();
try
{
t.fromXml(elem);
}
catch (Exception e)
{
throw _logger.errorObject("createEntriesTable(" + DOMUtils.toString(elem) + ") " + e.getClass().getName() + ": " + e.getMessage(), e, ExceptionUtil.toRuntimeException(e));
}
t.addKeyListener(new EntriesTableKeyListener(t));
return t;
}
protected JCheckBox createAntFormatSelector (final Element elem)
{
try
{
return new BaseCheckBox(elem);
}
catch(Exception e)
{
throw _logger.errorObject("createAntFormatSelector(" + DOMUtils.toString(elem) + ") " + e.getClass().getName() + ": " + e.getMessage(), e, ExceptionUtil.toRuntimeException(e));
}
}
/*
* @see net.community.apps.common.BaseMainFrame#layoutSection(java.lang.String, org.w3c.dom.Element)
*/
@Override
public void layoutSection (String name, Element elem) throws RuntimeException
{
if ("input-panel".equalsIgnoreCase(name))
{
if (_inputPanel != null)
throw new IllegalStateException("layoutSection(" + name + ")[" + DOMUtils.toString(elem) + "] already initialized");
_inputPanel = createInputPanel(elem);
}
else if ("output-panel".equalsIgnoreCase(name))
{
if (_outputPanel != null)
throw new IllegalStateException("layoutSection(" + name + ")[" + DOMUtils.toString(elem) + "] already initialized");
_outputPanel = createOutputPanel(elem);
}
else if ("entries-table".equalsIgnoreCase(name))
{
if (_entriesTable != null)
throw new IllegalStateException("layoutSection(" + name + ")[" + DOMUtils.toString(elem) + "] already initialized");
_entriesTable = createEntriesTable(elem);
}
else if ("output-format".equalsIgnoreCase(name))
{
if (_asAntFormat != null)
throw new IllegalStateException("layoutSection(" + name + ")[" + DOMUtils.toString(elem) + "] already initialized");
_asAntFormat = createAntFormatSelector(elem);
}
else
super.layoutSection(name, elem);
}
/*
* @see net.community.apps.common.BaseMainFrame#layoutComponent()
*/
@Override
public void layoutComponent () throws RuntimeException
{
super.layoutComponent();
final Container ctPane=getContentPane();
final JPanel northPanel=new JPanel(new GridLayout(0, 1, 0, 5));
try
{
final JToolBar b=getMainToolBar();
final Map<String,AbstractButton> hm=setToolBarHandlers(b);
if ((hm != null) && (hm.size() > 0))
northPanel.add(b);
}
catch(Exception e)
{
throw ExceptionUtil.toRuntimeException(e);
}
{
final Component[] northComps={ _inputPanel, _outputPanel, _asAntFormat };
for (final Component c : northComps)
{
if (null == c) // should not happen
continue;
northPanel.add(c);
}
}
ctPane.add(northPanel, BorderLayout.NORTH);
if (_entriesTable != null)
ctPane.add(new ScrolledComponent<RepositoryEntriesTable>(RepositoryEntriesTable.class, _entriesTable));
}
private void processMainArguments (final String ... args)
{
if ((null == args) || (args.length <= 0))
return;
Boolean antFormat=null;
for (int aIndex=0; aIndex < args.length; aIndex++)
{
final String a=args[aIndex];
final int aLen=(null == a) ? 0 : a.length();
if (aLen <= 0)
continue;
if ("-i".equals(a) || "--input".equals(a))
{
aIndex++;
if (aIndex >= args.length)
throw new IllegalStateException("processMainArguments(" + a + ") missing argument(s)");
final String argVal=args[aIndex];
if ((null == argVal) || (argVal.length() <= 0))
throw new IllegalStateException("processMainArguments(" + a + ") argument value cannot be null/empty");
if (_inputFile != null)
throw new IllegalStateException("processMainArguments(" + a + ") already set");
_inputFile = new File(argVal);
}
else if ("-o".equals(a) || "--output".equals(a))
{
aIndex++;
if (aIndex >= args.length)
throw new IllegalStateException("processMainArguments(" + a + ") missing argument(s)");
final String argVal=args[aIndex];
if ((null == argVal) || (argVal.length() <= 0))
throw new IllegalStateException("processMainArguments(" + a + ") argument value cannot be null/empty");
if (_outputFile != null)
throw new IllegalStateException("processMainArguments(" + a + ") already set");
_outputFile = new File(argVal);
}
else if ("-g".equals(a) || "--group".equals(a))
{
aIndex++;
if (aIndex >= args.length)
throw new IllegalStateException("processMainArguments(" + a + ") missing argument(s)");
final String argVal=args[aIndex];
if ((null == argVal) || (argVal.length() <= 0))
throw new IllegalStateException("processMainArguments(" + a + ") argument value cannot be null/empty");
if (_groupId != null)
throw new IllegalStateException("processMainArguments(" + a + ") already set");
_groupId = argVal;
}
else if ("-a".equals(a) || "--artifact".equals(a))
{
aIndex++;
if (aIndex >= args.length)
throw new IllegalStateException("processMainArguments(" + a + ") missing argument(s)");
final String argVal=args[aIndex];
if ((null == argVal) || (argVal.length() <= 0))
throw new IllegalStateException("processMainArguments(" + a + ") argument value cannot be null/empty");
if (_artifactId != null)
throw new IllegalStateException("processMainArguments(" + a + ") already set");
_artifactId = argVal;
}
else if ("-n".equals(a) || "--name".equals(a))
{
aIndex++;
if (aIndex >= args.length)
throw new IllegalStateException("processMainArguments(" + a + ") missing argument(s)");
final String argVal=args[aIndex];
if ((null == argVal) || (argVal.length() <= 0))
throw new IllegalStateException("processMainArguments(" + a + ") argument value cannot be null/empty");
if (_projectName != null)
throw new IllegalStateException("processMainArguments(" + a + ") already set");
_projectName = argVal;
}
else if ("-f".equals(a) || "--format".equals(a))
{
aIndex++;
if (aIndex >= args.length)
throw new IllegalStateException("processMainArguments(" + a + ") missing argument(s)");
final String argVal=args[aIndex];
if ((null == argVal) || (argVal.length() <= 0))
throw new IllegalStateException("processMainArguments(" + a + ") argument value cannot be null/empty");
if (antFormat != null)
throw new IllegalStateException("processMainArguments(" + a + ") already set");
if ("ant".equals(argVal))
antFormat = Boolean.TRUE;
else if ("maven".equals(argVal))
antFormat = Boolean.FALSE;
}
else
{
showUsage(this);
throw new IllegalStateException("processMainArguments(" + a + ") unknown argument");
}
}
if (_inputFile != null)
loadFile(_inputFile, true);
if (_outputFile != null)
saveFile(_outputFile, null);
if (antFormat != null)
setUseAntFormat(antFormat.booleanValue());
}
/**
* @param args original arguments as received by <I>main</I> entry point
* @throws Exception if unable to start main frame and application
*/
MainFrame (final String ... args) throws Exception
{
super(args);
processMainArguments(args);
}
}
|
<reponame>caHarkness/android-dl<filename>Library/src/main/java/com/caharkness/support/views/SupportToolbar2.java
package com.caharkness.support.views;
import android.content.Context;
import android.graphics.Color;
import android.graphics.Typeface;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.View;
import android.view.animation.AnimationUtils;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.caharkness.support.R;
import com.caharkness.support.SupportApplication;
import com.caharkness.support.utilities.SupportDrawable;
import com.caharkness.support.utilities.SupportMath;
import com.caharkness.support.utilities.SupportColors;
public class SupportToolbar2 extends LinearLayout
{
private Integer foreground_color;
public Integer getToolbarForegroundColor()
{
if (foreground_color == null)
foreground_color = SupportColors.getForegroundColor(getContext());
return foreground_color;
}
public void setToolbarForegroundColor(int color)
{
foreground_color = color;
}
private Integer background_color;
public Integer getToolbarBackgroundColor()
{
if (background_color == null)
background_color = SupportColors.getBackgroundColor(getContext());
return background_color;
}
public void setToolbarBackgroundColor(int color)
{
background_color = color;
}
private LinearLayout left_layout;
public LinearLayout getLeftLayout()
{
if (left_layout == null)
{
left_layout = new LinearLayout(getContext());
left_layout.setPadding(
SupportMath.inches(1 / 16f), 0,
SupportMath.inches(1 / 16f), 0);
}
return left_layout;
}
private LinearLayout center_layout;
public LinearLayout getCenterLayout()
{
if (center_layout == null)
{
center_layout = new LinearLayout(getContext());
center_layout.setOrientation(LinearLayout.VERTICAL);
center_layout.setGravity(Gravity.CENTER | Gravity.LEFT);
center_layout.setLayoutParams(
new LayoutParams(
LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT, 1f));
center_layout.setPadding(
0, SupportMath.inches(1 / 16f),
0, SupportMath.inches(1 / 16f));
}
return center_layout;
}
private LinearLayout right_layout;
public LinearLayout getRightLayout()
{
if (right_layout == null)
{
right_layout = new LinearLayout(getContext());
right_layout.setPadding(
SupportMath.inches(1 / 16f), 0,
SupportMath.inches(1 / 16f), 0);
}
return right_layout;
}
private LinearLayout master_toolbar;
public LinearLayout getMasterToolbar()
{
if (master_toolbar == null)
{
master_toolbar = new LinearLayout(getContext());
master_toolbar.setMinimumHeight(SupportMath.inches(3 / 8f));
master_toolbar.setGravity(Gravity.CENTER);
master_toolbar.setLayoutParams(
new LayoutParams(
LayoutParams.MATCH_PARENT,
LayoutParams.WRAP_CONTENT));
master_toolbar.addView(getLeftLayout());
master_toolbar.addView(getCenterLayout());
master_toolbar.addView(getRightLayout());
}
return master_toolbar;
}
private LinearLayout container;
public LinearLayout getContainer()
{
if (container == null)
{
container = new LinearLayout(getContext());
container.setOrientation(LinearLayout.VERTICAL);
container.setLayoutParams(
new LayoutParams(
LayoutParams.MATCH_PARENT,
LayoutParams.WRAP_CONTENT));
container.addView(getMasterToolbar());
}
return container;
}
public SupportToolbar2(Context context)
{
super(context);
addView(getContainer());
}
//
//
//
// Temporary toolbars
//
//
//
public void addTemporaryToolbar(SupportToolbar2 temporary)
{
for (int i = 0; i < container.getChildCount(); i++)
{
View view = container.getChildAt(i);
view.setVisibility(View.GONE);
}
container.addView(temporary);
}
public void removeTemporaryToolbar()
{
if (container.getChildCount() > 1)
{
int remove_index = container.getChildCount() - 1;
int show_index = container.getChildCount() - 2;
View remove_view = container.getChildAt(remove_index);
View show_view = container.getChildAt(show_index);
container.removeView(remove_view);
show_view.setVisibility(View.VISIBLE);
}
}
public void removeAllTemporaryToolbars()
{
while (container.getChildCount() > 1)
{
View view =
container.getChildAt(container.getChildCount() - 1);
container.removeView(view);
}
container
.getChildAt(0)
.setVisibility(View.VISIBLE);
}
public void setTemporaryToolbar(SupportToolbar2 temporary)
{
removeAllTemporaryToolbars();
container.getChildAt(0).setVisibility(View.GONE);
container.addView(temporary);
}
//
//
//
// Toolbar appearance
//
//
//
/**
* Adds a title to the menu item at 16pt using the full opacity of the foreground color.
*/
public SupportToolbar2 setTitle(String text)
{
TextView v =
getCenterLayout()
.findViewWithTag("title");
if (v == null)
{
v = new TextView(getContext());
v.setTextSize(20f);
v.setTypeface(Typeface.DEFAULT_BOLD);
v.setTag("title");
getCenterLayout()
.addView(v);
}
v.setText(text);
v.setTextColor(getToolbarForegroundColor());
if (text == null || text.length() < 1)
v.setVisibility(GONE);
else v.setVisibility(VISIBLE);
return this;
}
public String getTitle()
{
try
{
TextView v =
getCenterLayout().findViewWithTag("title");
return
v.getText()
.toString();
}
catch (Exception x) {}
return "";
}
/**
* Adds a subtitle to the menu item at 14pt using the foreground color with translucency.
*/
public SupportToolbar2 setSubtitle(String text)
{
TextView v =
getCenterLayout()
.findViewWithTag("subtitle");
if (v == null)
{
v = new TextView(getContext());
v.setTextSize(14f);
v.setTypeface(Typeface.DEFAULT_BOLD);
v.setTag("subtitle");
getCenterLayout()
.addView(v);
}
v.setText(text);
v.setTextColor(
SupportColors.translucent(
getToolbarForegroundColor(),
0x60));
if (text == null || text.length() < 1)
v.setVisibility(GONE);
else v.setVisibility(VISIBLE);
return this;
}
public String getSubtitle()
{
try
{
TextView v =
getCenterLayout().findViewWithTag("subtitle");
return
v.getText()
.toString();
}
catch (Exception x) {}
return "";
}
private LinearLayout createToolbarButtonView(final String name, final int resource, final Runnable runnable)
{
ImageView
icon = new ImageView(getContext());
icon.setImageDrawable(
SupportDrawable.tint(
SupportDrawable.fromResourceSmall(resource),
getToolbarForegroundColor()));
//
//
//
LinearLayout button = new LinearLayout(getContext());
button.addView(icon);
button.setPadding(
SupportMath.inches(1 / 16f), SupportMath.inches(1 / 16f),
SupportMath.inches(1 / 16f), SupportMath.inches(1 / 16f));
button.setClickable(true);
button.setOnClickListener(new OnClickListener()
{
@Override
public void onClick(View view)
{
view.startAnimation(
AnimationUtils.loadAnimation(
getContext(),
R.anim.bounce_in));
if (runnable != null)
post(runnable);
}
});
button.setOnLongClickListener(new OnLongClickListener()
{
@Override
public boolean onLongClick(View view)
{
Toast.makeText(
getContext(),
name,
Toast.LENGTH_SHORT
).show();
return true;
}
});
return button;
}
public SupportToolbar2 setNavigationButton(int icon, Runnable runnable)
{
getLeftLayout().removeAllViews();
getLeftLayout().addView(
createToolbarButtonView(
"Navigation",
icon,
runnable));
return this;
}
public SupportToolbar2 setNavigationButtonAsBack()
{
getLeftLayout().removeAllViews();
getLeftLayout().addView(
createToolbarButtonView(
"Back",
R.drawable.ic_arrow_back,
new Runnable()
{
@Override
public void run()
{
dispatchKeyEvent(new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_BACK));
dispatchKeyEvent(new KeyEvent(KeyEvent.ACTION_UP, KeyEvent.KEYCODE_BACK));
}
}));
return this;
}
public SupportToolbar2 removeActions()
{
getRightLayout()
.removeAllViews();
return this;
}
public SupportToolbar2 addAction(String name, int icon, final Runnable runnable)
{
getRightLayout().addView(
createToolbarButtonView(
name,
icon,
runnable));
return this;
}
public SupportToolbar2 setColor(int color)
{
setColors(
//
// Determine foreground color to use
//
SupportColors.isLight(color)?
SupportColors.subtract(color, 0x7F) :
SupportColors.get("white"),
//
// Background color
//
color);
return this;
}
public SupportToolbar2 setColors(int foreground_color, int background_color)
{
setToolbarForegroundColor(foreground_color);
setToolbarBackgroundColor(background_color);
setBackgroundColor(background_color);
setTitle(getTitle());
setSubtitle(getSubtitle());
return this;
}
public SupportToolbar2 setStringPreference(final String hint, final String pref, final Runnable runnable, final boolean focus)
{
final EditText edit = new EditText(getContext());
edit.setText(SupportApplication.getString(pref));
edit.addTextChangedListener(new TextWatcher()
{
@Override
public void beforeTextChanged(CharSequence charSequence, int x, int y, int z) {}
@Override
public void onTextChanged(CharSequence charSequence, int x, int y, int z) {}
@Override
public void afterTextChanged(Editable editable)
{
SupportApplication.setString(
pref,
editable.toString());
if (runnable != null)
post(runnable);
}
});
edit.setBackgroundColor(Color.TRANSPARENT);
edit.setTextColor(getToolbarForegroundColor());
edit.setHint(hint);
edit.setHintTextColor(SupportColors.translucent(getToolbarForegroundColor()));
edit.setPadding(0, 0, 0, 0);
edit.setFocusable(true);
edit.setFocusableInTouchMode(true);
edit.clearFocus();
edit.setOnClickListener(new OnClickListener()
{
@Override
public void onClick(View view)
{
edit.requestLayout();
edit.requestFocus();
edit.setInputType(edit.getInputType());
edit.setRawInputType(edit.getInputType());
((InputMethodManager) edit.getContext().getSystemService(Context.INPUT_METHOD_SERVICE))
.showSoftInput(edit, InputMethodManager.SHOW_IMPLICIT);
}
});
if (focus)
{
edit.addOnAttachStateChangeListener(
new OnAttachStateChangeListener()
{
@Override
public void onViewAttachedToWindow(View view)
{
view.performClick();
}
@Override
public void onViewDetachedFromWindow(View view)
{
}
});
}
getCenterLayout()
.addView(edit);
return this;
}
}
|
import random
def get_random_name(names):
name = random.choice(names)
return name
names = ["Alice", "Bob", "Taylor", "David"]
random_name = get_random_name(names)
print(random_name) |
<gh_stars>1-10
/**
* @file Manages createWeekends module, used to create weekends property the
* class Month.
*/
// โโ MODULE โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
/**
* The `createWeekends()` function returns a number `array` that represent the
* month's days that are weekend. The value of the returned elements is the same
* `Date.prototype.getDate()`.
*
* The function requires an parameter `options`, must be an object with the
* properties `YYMMDD` `WEEKEND` and `SCE`.
*
* The `options.YYMMDD` value must be a number array with three elements, that
* represents a date (year, month and day). The value of the month is as
* `Date.prototype.getDate()`.
*
* The `options.WEEKEND` value must be a number array. The array values represent
* the days of the week that are weekend, the value of the elements is like
* `Date.prototype.getDay()`.
*
* The `options.SCE` value must be a number array with three elements. The array
* values represent the month's start day, month's current day and month's end
* day. The value of the elements is like `Date.prototype.getDate()`.
*
* @private
* @param {object} options - Function options.
* @param {Array.<number>} options.YYMMDD - A number array with three elements.
* @param {Array.<number>} options.WEEKEND - A number array.
* @param {Array.<number>} options.SCE - A number array with three elements.
* @returns {Array.<number>} A number array.
* @example const weekends = createWeekends({
* YYMMDD: [2020, 0, 1],
* WEEKEND: [6, 0],
* SCE: [1, 12, 31],
* }); // expected value [4, 5, 11, 12, 18, 19, 25, 26]
*
*/
const createWeekends = ({ YYMMDD, WEEKEND, SCE }) => {
const [YY, MM] = YYMMDD;
const total = SCE[2];
const temporary = new Date(YY, MM, 1);
const weekends = [];
for (let i = 1; i <= total; i += 1) {
temporary.setDate(i);
const weekday = temporary.getDay();
if (WEEKEND.includes(weekday)) weekends.push(i);
}
return weekends;
};
// โโ EXPORT MODULE โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
module.exports = createWeekends;
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_fast_rewind_twotone = void 0;
var ic_fast_rewind_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M9 14.14V9.86L5.97 12zm9 0V9.86L14.97 12z",
"opacity": ".3"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M11 6l-8.5 6 8.5 6V6zm-2 8.14L5.97 12 9 9.86v4.28zM20 6l-8.5 6 8.5 6V6zm-2 8.14L14.97 12 18 9.86v4.28z"
},
"children": []
}]
};
exports.ic_fast_rewind_twotone = ic_fast_rewind_twotone; |
public class MyArray {
private int[] array;
public MyArray(int size) {
array = new int[size];
}
public void setToOne() {
for (int i = 0; i < array.length; i++) {
array[i] = 1;
}
}
} |
#!/usr/bin/env bash
# get run options
while test $# -gt 0; do
case "$1" in
-h|--help)
echo "pac-man$ docker-test - run lambda package"
echo " "
echo "pac-man$ docker-test [options]"
echo " "
echo "options:"
echo "-h, --help show brief help"
echo "-b, --build build lambda package prior to running"
exit 0
;;
-b|--build)
shift
export PACMAN_BUILD=1
;;
*)
break
;;
esac
done
# cd to pac-man directory
cd "$(dirname "$0")"
if [[ -n ${PACMAN_BUILD} && "${PACMAN_BUILD}"=="1" ]]; then
# build lambda package
docker run --rm \
-v ${PWD}:/code \
-v ${HOME}/.cargo/registry:/root/.cargo/registry \
-v ${HOME}/.cargo/git:/root/.cargo/git \
softprops/lambda-rust && \
unzip -o \
target/lambda/release/pac-man.zip \
-d /tmp/lambda && \
echo "Enter Payload Then Press CTRL-D..." && \
docker run \
-i -e DOCKER_LAMBDA_USE_STDIN=1 \
-e AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} \
-e AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} \
--rm \
-v /tmp/lambda:/var/task \
lambci/lambda:provided
else
echo "Enter Payload Then Press CTRL-D..." && \
docker run \
-i -e DOCKER_LAMBDA_USE_STDIN=1 \
-e AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} \
-e AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} \
--rm \
-v /tmp/lambda:/var/task \
lambci/lambda:provided
fi
|
<gh_stars>1-10
import * as V3 from 'shared/types/models/widgets/versioned/v3';
import * as V4 from 'shared/types/models/widgets/versioned/v4';
import { IMigrator } from 'shared/types/app';
import { defaultPresetV4 } from 'shared/constants';
function migrate(config: V3.IUserConfig): V4.IUserConfig {
return {
...config,
presets: [{ ...defaultPresetV4 }],
version: 4,
};
}
export const v4Migrator: IMigrator<4> = { migrate, version: 4 };
|
#!/usr/bin/env bash
echo "===== Installing dependencies ====="
apt-get update
apt-get install -y mercurial wget pkg-config gettext libbsd-dev libx11-dev x11proto-xext-dev libxext-dev libxt-dev libxi-dev libxmu-dev
# get parsec
echo "===== Downloading Parsec ====="
mkdir -p /root/bin/benchmarks
cd /data/
if [ -d "parsec-3.0" ]; then
rm -rf parsec-3.0
fi
wget -nc http://parsec.cs.princeton.edu/download/3.0/parsec-3.0.tar.gz
tar -xzf parsec-3.0.tar.gz
ln -s /data/parsec-3.0 /root/bin/benchmarks/parsec-3.0
echo "===== Patching Parsec ====="
cd /root/bin/benchmarks/parsec-3.0
cp ${HAFT}install/patches/parsec-hgignore .hgignore
hg init && hg add * && hg add .hgignore
echo -e "[ui]\nusername = Your Name <your@mail>" > .hg/hgrc
hg com -m "1"
hg import ${HAFT}install/patches/parsec-complete_20150703.patch -m "2"
rm version
sed -i 's|HUGE|HUGE_VAL|g' pkgs/apps/ferret/src/benchmark/ferret-*
sed -i 's|HUGE|HUGE_VAL|g' pkgs/apps/ferret/src/src/lsh/LSH_query*
cd bin/
chmod +x parsecperf
chmod +x parsecperftx
# prepare Parsec for Hardening
echo "===== Building Parsec ====="
cd /root/bin/benchmarks/parsec-3.0
source env.sh
declare -a benchmarks=("blackscholes" "ferret" "swaptions" "vips" "x264" "canneal" "streamcluster" "dedup")
declare -a typesarr=("clang")
set -e
for benchmark in "${benchmarks[@]}"; do
for type in "${typesarr[@]}"; do
parsecmgmt -a build -p ${benchmark} -c ${type}
done # type
done # benchmarks
set +e
# get inputs and setup tests
echo "===== Preparing Input files ====="
cd ${HAFT}src/benches/parsec
./copyinputs.sh
./collect.sh
|
<reponame>OneRainbowDev/django-machina<gh_stars>0
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from faker import Faker
from machina.forms.widgets import SelectWithDisabled
faker = Faker()
class TestSelectWithDisabled(object):
def test_can_render_a_single_option(self):
# Setup
widget = SelectWithDisabled()
# Run
rendered = widget.render_option([], '1', 'Test forum')
# Check
assert rendered == '<option value="1">Test forum</option>'
def test_can_render_a_single_option_that_is_selected(self):
# Setup
widget = SelectWithDisabled()
# Run
rendered = widget.render_option(['1', ], '1', 'Test forum')
# Check
assert rendered == '<option value="1" selected="selected">Test forum</option>'
def test_can_render_a_single_disabled_option(self):
# Setup
widget = SelectWithDisabled()
# Run
rendered = widget.render_option([], '1', {'label': 'Test forum', 'disabled': True})
# Check
assert rendered == '<option value="1" disabled="disabled">Test forum</option>'
|
#! /bin/sh
# This file is part of flex.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# Neither the name of the University nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
# THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE.
if test ! $# = 3; then
echo 'Usage: mkskel.sh srcdir m4 version' >&2
exit 1
fi
echo '/* File created from flex.skl via mkskel.sh */
#include "flexdef.h"
const char *skel[] = {'
srcdir=$1
m4=$2
VERSION=$3
case $VERSION in
*[!0-9.]*) echo 'Invalid version number' >&2; exit 1;;
esac
IFS=.
set $VERSION
sed 's/4_/a4_/g
s/m4preproc_/m4_/g
' "$srcdir/flex.skl" |
"$m4" -P -I "$srcdir" "-DFLEX_MAJOR_VERSION=$1" \
"-DFLEX_MINOR_VERSION=$2" \
"-DFLEX_SUBMINOR_VERSION=$3" |
sed '/^%#/d
s/m4_/m4preproc_/g
s/a4_/4_/g
s/[\\"]/\\&/g
s/.*/ "&",/'
echo ' 0
};'
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
# If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
# resources to, so exit 0 (signalling the script phase was successful).
exit 0
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY:-}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/Fastboard/Icons.bundle"
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/Fastboard/LocalizedStrings.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/Fastboard/Icons.bundle"
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/Fastboard/LocalizedStrings.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find -L "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
else
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
fi
fi
|
({
navigateToDetailsView: function (component) {
var property = component.get('v.property');
var myEvent = $A.get('e.force:navigateToSObject');
myEvent.setParams({
recordId: property.Id
});
myEvent.fire();
},
propertySelected: function (component) {
var property = component.get('v.property');
var myEvent = $A.get('e.ltng:selectSObject');
myEvent.setParams({ recordId: property.Id, channel: 'Properties' });
myEvent.fire();
}
});
|
// +build !integration
// +build darwin freebsd linux openbsd windows
package cpu
import (
"runtime"
"testing"
"github.com/elastic/gosigar"
"github.com/stretchr/testify/assert"
)
func TestGetCpuTimes(t *testing.T) {
cpu_stat, err := GetCpuTimes()
assert.NotNil(t, cpu_stat)
assert.Nil(t, err)
assert.True(t, (cpu_stat.User > 0))
assert.True(t, (cpu_stat.Sys > 0))
}
func TestCpuPercentage(t *testing.T) {
cpu := CPU{}
cpu1 := CpuTimes{
Cpu: gosigar.Cpu{
User: 10855311,
Nice: 0,
Sys: 2021040,
Idle: 17657874,
Wait: 0,
Irq: 0,
SoftIrq: 0,
Stolen: 0,
},
}
cpu.AddCpuPercentage(&cpu1)
assert.Equal(t, cpu1.UserPercent, 0.0)
assert.Equal(t, cpu1.SystemPercent, 0.0)
cpu2 := CpuTimes{
Cpu: gosigar.Cpu{
User: 10855693,
Nice: 0,
Sys: 2021058,
Idle: 17657876,
Wait: 0,
Irq: 0,
SoftIrq: 0,
Stolen: 0,
},
}
cpu.AddCpuPercentage(&cpu2)
assert.Equal(t, cpu2.UserPercent, 0.9502)
assert.Equal(t, cpu2.SystemPercent, 0.0448)
}
func TestGetSystemLoad(t *testing.T) {
if runtime.GOOS == "windows" {
return //no load data on windows
}
load, err := GetSystemLoad()
assert.NotNil(t, load)
assert.Nil(t, err)
assert.True(t, (load.Load1 > 0))
assert.True(t, (load.Load5 > 0))
assert.True(t, (load.Load15 > 0))
}
|
<reponame>LarsBehrenberg/e-wallet
import React from 'react';
import ImportCSV from '../components/ImportPage/ImportCSV';
import CloudUploadTwoToneIcon from '@material-ui/icons/CloudUploadTwoTone';
export default function Import() {
return (
<>
<div className="app-page-title app-page-title--shadow py-4 px-5 mt-0 ml-0 mb-3">
<div>
<div className="app-page-title--first">
<div className="app-page-title--iconbox d-70">
<div className="d-100 d-flex align-items-center justify-content-center display-1">
<CloudUploadTwoToneIcon className="d-40 text-primary" />
</div>
</div>
{/* )} */}
<div className="app-page-title--heading">
<h1>Import</h1>
<div className="app-page-title--description">
Import your CSV and add transactions to your E-Wallet database.
</div>
</div>
</div>
</div>
<div className="d-flex align-items-center"></div>
</div>
<ImportCSV />
</>
);
}
|
def capitalize_first_char(sentence):
words = sentence.split()
res = ""
for word in words:
res += word[0].upper() + word[1:] + " "
return res.strip()
sentence = "hello world"
print(capitalize_first_char(sentence)) # Output: "Hello World" |
package me.zhengjie.modules.system.service.dto;
import lombok.Getter;
import lombok.Setter;
import me.zhengjie.base.CommonDto;
import java.io.Serializable;
import java.util.List;
import java.util.Objects;
/**
* @author jinjin
* @date 2020-09-25
*/
@Getter
@Setter
public class MenuDto extends CommonDto implements Serializable {
private static final long serialVersionUID = 1L;
private Long id;
private Long pid;
private List<MenuDto> children;
private Integer subCount;
private Integer type;
private String title;
private String componentName;
private String component;
private Integer menuSort;
private String icon;
private String path;
private Boolean iFrame;
private Boolean cache;
private Boolean hidden;
private String permission;
public Boolean getHasChildren() {
return subCount > 0;
}
public Boolean getLeaf() {
return subCount <= 0;
}
public String getLabel() {
return title;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MenuDto menuDto = (MenuDto) o;
return Objects.equals(id, menuDto.id);
}
@Override
public int hashCode() {
return Objects.hash(id);
}
}
|
<filename>export_sprite_from_tiled.py
import os
import json
from itertools import product
from PIL import Image
class ExportSprite:
def __init__(self, tiled_map_path: str = './tiled_files/tilemap.json', map_data_path: str = './tiled_files/mapdata.json') -> None:
self.tilemap = self._load_tiled_map_file(tiled_map_path)
self.mapdata = self._load_map_data_file(map_data_path)
self.tileset = Image.open('./tiled_files/tileset.png')
self.generate_animated_sprite_sheet()
def _load_tiled_map_file(self, tiled_map_path):
with open(tiled_map_path, encoding = 'utf-8') as f:
return json.load(f)
def _load_map_data_file(self, map_data_path):
with open(map_data_path, encoding = 'utf-8') as f:
return json.load(f)
def _get_map_matrix(self):
matrixes = []
for value in self.mapdata['layers']:
tmp = {}
for data_key, data_value in enumerate(value['data']):
tmp[data_key] = data_value
matrixes.append(tmp)
return matrixes
def _get_tile_by_id(self, value):
w, h = self.tileset.size
d = self.tilemap['tilewidth']
for idx, (i, j) in enumerate(product(range(0, h-h%d, d), range(0, w-w%d, d))):
if idx == value:
box = (j, i, j+d, i+d)
return self.tileset.crop(box)
def _save_results(self, image):
os.mkdir('./results')
image.save(f"./results/result_sprite.png")
def generate_sprite_sheet(self):
matrixes = self._get_map_matrix()
sprite_width = self.mapdata['tilewidth'] * self.mapdata['width']
sprite_height = self.mapdata['tilewidth'] * self.mapdata['height']
sprite = Image.new('RGB', (sprite_width, sprite_height))
for matrix in matrixes:
x_offset = 0
y_offset = 0
for key, value in matrix.items():
if value != 0:
tile = self._get_tile_by_id(value - 1)
if x_offset == sprite_width:
x_offset = 0
y_offset += tile.size[1]
if value != 0:
sprite.paste(tile, (x_offset, y_offset), tile)
x_offset += tile.size[0]
self._save_results(sprite)
def generate_animated_sprite_sheet(self, frames: int = 4):
matrixes = self._get_map_matrix()
sprite_width = self.mapdata['tilewidth'] * self.mapdata['width']
sprite_height = self.mapdata['tilewidth'] * self.mapdata['height']
sprite = Image.new('RGB', (sprite_width * frames, sprite_height))
for frame in range(frames):
for matrix in matrixes:
x_offset = 0 + (sprite_width * frame)
y_offset = 0
for key, value in matrix.items():
tile_id = value - 1
if value != 0:
if "animation" in self.tilemap['tiles'][tile_id]:
tile = self._get_tile_by_id(self.tilemap['tiles'][tile_id]['animation'][frame]['tileid'])
else:
tile = self._get_tile_by_id(tile_id)
if x_offset == sprite_width * (frame + 1):
x_offset = 0 + (sprite_width * frame)
y_offset += tile.size[1]
if value != 0:
sprite.paste(tile, (x_offset, y_offset), tile)
x_offset += tile.size[0]
self._save_results(sprite)
ExportSprite()
|
import random
def getRandomUniqueInteger(n):
res = []
for i in range(0, n):
num = random.randint(1, 1000)
while num in res:
num = random.randint(1, 1000)
res.append(num)
return res |
#!/usr/bin/env bash
#export MACHINE_STORAGE_PATH="/run/media/picodotdev/BMOVE ROJO/docker-machine/"
eval $(docker-machine env node-01)
docker volume create --driver rexray --name registry --opt size=5
docker stack deploy -c docker-compose-stack-registry.yml registry
sleep 30s
echo -e "\n# Cluster services"
docker service ls
echo -e "\n# Registry service tasks"
docker service ps registry_registry
for i in "01" "02" "03"; do
echo -e "\n# Node $i containers"
eval $(docker-machine env node-$i)
docker ps
done
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.