text stringlengths 1 1.05M |
|---|
#!/bin/sh
SRC=$1
DST=$2
sed -r -e '/ (MC|MR|MW|PC) /d' -e 's/^ +[0-9]+ (PW|PR)/\1/' $SRC >$DST
|
var _cl_activation_workload_8hpp =
[
[ "ClActivationWorkload", "classarmnn_1_1_cl_activation_workload.xhtml", "classarmnn_1_1_cl_activation_workload" ],
[ "ClActivationWorkloadValidate", "_cl_activation_workload_8hpp.xhtml#a42ef3cee193102dc7755193579209cca", null ]
]; |
#! /bin/sh
# Version 0.5
# This is a startup script for traccar
# Based on UniFi Controller on Debian for https://metis.fi/en/2018/02/unifi-on-gcp/ v1.3.3
#
# You may use this as you see fit as long as I am credited for my work.
# (c) 2018 Petri Riihikallio Metis Oy
# (c) 2019 Jonathan Dixon
###########################################################
#
# Set up logging for unattended scripts and UniFi's MongoDB log
# Variables $LOG used later on in the script.
#
LOG="/var/log/unifi/gcp-traccar.log"
if [ ! -f /etc/logrotate.d/traccar-unifi.conf ]; then
cat > /etc/logrotate.d/traccar-unifi.conf <<_EOF
$LOG {
monthly
rotate 4
compress
}
_EOF
echo "Script logrotate set up"
fi
###########################################################
#
# Turn off IPv6 for now
#
if [ ! -f /etc/sysctl.d/20-disableIPv6.conf ]; then
echo "net.ipv6.conf.all.disable_ipv6=1" > /etc/sysctl.d/20-disableIPv6.conf
sysctl --system > /dev/null
echo "IPv6 disabled"
fi
###########################################################
#
# Update DynDNS as early in the script as possible
#
ddns=$(curl -fs -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/attributes/ddns-url")
if [ ${ddns} ]; then
curl -fs ${ddns}
echo "Dynamic DNS accessed"
fi
###########################################################
#
# Create a swap file for small memory instances and increase /run
#
if [ ! -f /swapfile ]; then
memory=$(free -m | grep "^Mem:" | tr -s " " | cut -d " " -f 2)
echo "${memory} megabytes of memory detected"
if [ -z ${memory} ] || [ "0${memory}" -lt "2048" ]; then
fallocate -l 2G /swapfile
chmod 600 /swapfile
mkswap /swapfile >/dev/null
swapon /swapfile
echo '/swapfile none swap sw 0 0' >> /etc/fstab
echo 'tmpfs /run tmpfs rw,nodev,nosuid,size=400M 0 0' >> /etc/fstab
mount -o remount,rw,nodev,nosuid,size=400M tmpfs /run
echo "Swap file created"
fi
fi
###########################################################
#
# Add backports if it doesn't exist
#
release=$(lsb_release -a 2>/dev/null | grep "^Codename:" | cut -f 2)
if [ ${release} ] && [ ! -f /etc/apt/sources.list.d/backports.list ]; then
cat > /etc/apt/sources.list.d/backports.list <<_EOF
deb http://deb.debian.org/debian/ ${release}-backports main
deb-src http://deb.debian.org/debian/ ${release}-backports main
_EOF
echo "Backports (${release}) added to APT sources"
fi
###########################################################
#
# Install stuff
#
# Required preliminiaries
if [ ! -f /usr/share/misc/apt-upgraded-1 ]; then
export APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn # For CGP packages
curl -Lfs https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - # For CGP packages
apt-get -qq update -y >/dev/null
DEBIAN_FRONTEND=noninteractive apt-get -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" upgrade -y >/dev/null # GRUB upgrades require special flags
rm /usr/share/misc/apt-upgraded # Old flag file
touch /usr/share/misc/apt-upgraded-1
echo "System upgraded"
fi
# HAVEGEd is straightforward
haveged=$(dpkg-query -W --showformat='${Status}\n' haveged 2>/dev/null)
if [ "x${haveged}" != "xinstall ok installed" ]; then
if apt-get -qq install -y haveged >/dev/null; then
echo "Haveged installed"
fi
fi
certbot=$(dpkg-query -W --showformat='${Status}\n' certbot 2>/dev/null)
if [ "x${certbot}" != "xinstall ok installed" ]; then
if (apt-get -qq install -y -t ${release}-backports certbot >/dev/null) || (apt-get -qq install -y certbot >/dev/null); then
echo "CertBot installed"
fi
fi
f2b=$(dpkg-query -W --showformat='${Status}\n' fail2ban 2>/dev/null)
if [ "x${f2b}" != "xinstall ok installed" ]; then
if apt-get -qq install -y fail2ban >/dev/null; then
echo "Fail2Ban installed"
fi
fi
apt-get -qq install -y apache2
###########################################################
#
# Set the time zone
#
tz=$(curl -fs -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/attributes/timezone")
if [ ${tz} ] && [ -f /usr/share/zoneinfo/${tz} ]; then
apt-get -qq install -y dbus >/dev/null
if ! systemctl start dbus; then
echo "Trying to start dbus"
sleep 15
systemctl start dbus
fi
if timedatectl set-timezone $tz; then echo "Localtime set to ${tz}"; fi
systemctl reload-or-restart rsyslog
fi
###########################################################
#
# Set up unattended upgrades after 04:00 with automatic reboots
#
if [ ! -f /etc/apt/apt.conf.d/51unattended-upgrades-unifi ]; then
cat > /etc/apt/apt.conf.d/51unattended-upgrades-unifi <<_EOF
Acquire::AllowReleaseInfoChanges "true";
Unattended-Upgrade::Origins-Pattern {
"o=Debian,a=stable";
"c=ubiquiti";
};
Unattended-Upgrade::Remove-Unused-Dependencies "true";
Unattended-Upgrade::Automatic-Reboot "true";
_EOF
cat > /etc/systemd/system/timers.target.wants/apt-daily-upgrade.timer <<_EOF
[Unit]
Description=Daily apt upgrade and clean activities
After=apt-daily.timer
[Timer]
OnCalendar=4:00
RandomizedDelaySec=30m
Persistent=true
[Install]
WantedBy=timers.target
_EOF
systemctl daemon-reload
systemctl reload-or-restart unattended-upgrades
echo "Unattended upgrades set up"
fi
###########################################################
#
# Set up daily backup to a bucket after 01:00
#
bucket=$(curl -fs -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/attributes/bucket")
if [ ${bucket} ]; then
cat > /usr/local/sbin/traccar_data_backup.sh <<_EOF
#! /bin/sh
systemctl stop traccar.service
# TODO: add the -d option to rsync, if we have versioning enabled on the bucket
/usr/bin/gsutil -m rsync -r /opt/traccar/data gs://$bucket
systemctl start traccar.service
_EOF
cat > /etc/systemd/system/unifi-backup.service <<_EOF
[Unit]
Description=Daily backup to ${bucket} service
After=network-online.target
Wants=network-online.target
[Service]
Type=oneshot
ExecStart=/bin/sh /usr/local/sbin/traccar_data_backup.sh
_EOF
cat > /etc/systemd/system/unifi-backup.timer <<_EOF
[Unit]
Description=Daily backup to ${bucket} timer
[Timer]
OnCalendar=1:00
RandomizedDelaySec=30m
[Install]
WantedBy=timers.target
_EOF
systemctl daemon-reload
systemctl start unifi-backup.timer
echo "Backups to ${bucket} set up"
fi
###########################################################
#
# Set up Let's Encrypt
#
dnsname=$(curl -fs -H "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/attributes/dns-name")
if [ -z ${dnsname} ]; then exit 0; fi
privkey=/etc/letsencrypt/live/${dnsname}/privkey.pem
pubcrt=/etc/letsencrypt/live/${dnsname}/cert.pem
chain=/etc/letsencrypt/live/${dnsname}/chain.pem
caroot=/usr/share/misc/ca_root.pem
# Write the cross signed root certificate to disk
if [ ! -f $caroot ]; then
cat > $caroot <<_EOF
-----BEGIN CERTIFICATE-----
MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
-----END CERTIFICATE-----
_EOF
fi
# Write pre and post hooks to stop Apache2 for the renewal
if [ ! -d /etc/letsencrypt/renewal-hooks/pre ]; then
mkdir -p /etc/letsencrypt/renewal-hooks/pre
fi
cat > /etc/letsencrypt/renewal-hooks/pre/apache2 <<_EOF
#! /bin/sh
service apache2 stop
_EOF
chmod a+x /etc/letsencrypt/renewal-hooks/pre/apache2
if [ ! -d /etc/letsencrypt/renewal-hooks/post ]; then
mkdir -p /etc/letsencrypt/renewal-hooks/post
fi
cat > /etc/letsencrypt/renewal-hooks/post/apache2 <<_EOF
#! /bin/sh
service apache2 start
_EOF
chmod a+x /etc/letsencrypt/renewal-hooks/post/apache2
# Write a script to acquire the first certificate (for a systemd timer)
cat > /usr/local/sbin/certbotrun.sh <<_EOF
#! /bin/sh
echo >> $LOG
echo "CertBot run on \$(date)" >> $LOG
if [ ! -d /etc/letsencrypt/live/${dnsname} ]; then
systemctl stop apache2
if certbot certonly -d $dnsname --standalone --agree-tos --register-unsafely-without-email >> $LOG; then
echo "Received certificate for ${dnsname}" >> $LOG
fi
systemctl start apache2
fi
if /etc/letsencrypt/renewal-hooks/deploy/unifi; then
systemctl stop certbotrun.timer
echo "Certificate installed for ${dnsname}" >> $LOG
fi
else
echo "No action because ${dnsname} doesn't resolve to ${extIP}" >> $LOG
fi
_EOF
chmod a+x /usr/local/sbin/certbotrun.sh
# Write the systemd unit files
if [ ! -f /etc/systemd/system/certbotrun.timer ]; then
cat > /etc/systemd/system/certbotrun.timer <<_EOF
[Unit]
Description=Run CertBot hourly until success
[Timer]
OnCalendar=hourly
RandomizedDelaySec=15m
[Install]
WantedBy=timers.target
_EOF
systemctl daemon-reload
cat > /etc/systemd/system/certbotrun.service <<_EOF
[Unit]
Description=Run CertBot hourly until success
After=network-online.target
Wants=network-online.target
[Service]
Type=oneshot
ExecStart=/usr/local/sbin/certbotrun.sh
_EOF
fi
# Start the above
if [ ! -d /etc/letsencrypt/live/${dnsname} ]; then
if ! /usr/local/sbin/certbotrun.sh; then
echo "Installing hourly CertBot run"
systemctl start certbotrun.timer
fi
fi
# Joth additions-------
# 1/ Use $dnsname in the redirect, rather than the requested hostname, to ensure navigation to
# IP address goes to the intended URL
# TODO: also redirect port 443 HTTPS. (currently it's not binding to that port at all)
# -- done above
# 2/ Enable stackdriver logging and monitoring
mkdir -p /etc/google-fluentd/config.d/
cat > /etc/google-fluentd/config.d/unifi.conf <<_EOF
<source>
@type tail
format none
path /usr/lib/unifi/logs/*.log
pos_file /var/lib/google-fluentd/pos/unifi.pos
read_from_head true
tag unifi
</source>
_EOF
if [ ! -f install-logging-agent.sh ] ; then
curl -sSO https://dl.google.com/cloudagents/install-logging-agent.sh
sudo bash install-logging-agent.sh
fi
if [ ! -f install-monitoring-agent.sh ] ; then
curl -sSO https://dl.google.com/cloudagents/install-monitoring-agent.sh
sudo bash install-monitoring-agent.sh
fi
echo "Installed Stackdriver logging and monitoring agents"
# 3/ Install handy utils
apt install less
# 4/ Apache rev TLS proxy
# from https://www.traccar.org/secure-connection/
a2enmod ssl
a2enmod proxy
a2enmod proxy_http
a2enmod proxy_wstunnel
a2enmod rewrite
a2dissite 000-default
cat > /etc/apache2/sites-available/traccar.conf <<_EOF
<IfModule mod_ssl.c>
<VirtualHost _default_:443>
ServerName ${dnsname}
ServerAdmin webmaster@localhost
DocumentRoot /var/www/html
ProxyPass /api/socket ws://localhost:8082/api/socket enablereuse=off
ProxyPassReverse /api/socket ws://localhost:8082/api/socket enablereuse=off
ProxyPass / http://localhost:8082/ enablereuse=off
ProxyPassReverse / http://localhost:8082/ enablereuse=off
SSLEngine on
SSLCertificateFile /etc/letsencrypt/live/${dnsname}/fullchain.pem
SSLCertificateKeyFile /etc/letsencrypt/live/${dnsname}/privkey.pem
</VirtualHost>
</IfModule>
<VirtualHost *:80>
ServerName ${dnsname}
Redirect / https://${dnsname}
</VirtualHost>
_EOF
a2ensite traccar
service apache2 restart
|
function hasCapitalLetters($string)
{
//Iterate through the character in the string
for($i=0;$i<strlen($string) ;$i++)
{
//If the character is capital letter,
if((ord($string[$i]) >= ord('A')) && (ord($string[$i]) <= ord('Z')))
{
return true;
}
}
return false;
} |
<reponame>vadi2/codeql<gh_stars>1000+
package constants;
class Initializers {
static final int SFIELD = 12;
final int IFIELD = 20;
final int IFIELD2;
Initializers() {
// Not an initializer
IFIELD2 = 22;
}
void stuff() {
int x = 300;
int y;
y = 400;
}
static final Object SFIELD_OBJECT = "a string";
final static int fsf;
static int sf = 3;
final int ff;
int f = 4;
static {
// Not initializers
fsf = 42;
sf = 42;
}
{
// Not initializers
ff = 42;
f = 42;
}
}
|
<filename>src/org/sosy_lab/cpachecker/cpa/assumptions/storage/AssumptionStorageCPA.java
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2014 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.assumptions.storage;
import java.util.Collection;
import org.sosy_lab.common.ShutdownNotifier;
import org.sosy_lab.common.configuration.Configuration;
import org.sosy_lab.common.configuration.InvalidConfigurationException;
import org.sosy_lab.common.log.LogManager;
import org.sosy_lab.cpachecker.cfa.CFA;
import org.sosy_lab.cpachecker.cfa.model.CFAEdge;
import org.sosy_lab.cpachecker.cfa.model.CFANode;
import org.sosy_lab.cpachecker.core.AnalysisDirection;
import org.sosy_lab.cpachecker.core.defaults.AutomaticCPAFactory;
import org.sosy_lab.cpachecker.core.defaults.MergeSepOperator;
import org.sosy_lab.cpachecker.core.interfaces.AbstractDomain;
import org.sosy_lab.cpachecker.core.interfaces.AbstractState;
import org.sosy_lab.cpachecker.core.interfaces.CPAFactory;
import org.sosy_lab.cpachecker.core.interfaces.ConfigurableProgramAnalysis;
import org.sosy_lab.cpachecker.core.interfaces.MergeOperator;
import org.sosy_lab.cpachecker.core.interfaces.PrecisionAdjustment;
import org.sosy_lab.cpachecker.core.interfaces.StateSpacePartition;
import org.sosy_lab.cpachecker.core.interfaces.StopOperator;
import org.sosy_lab.cpachecker.core.interfaces.TransferRelation;
import org.sosy_lab.cpachecker.core.interfaces.pcc.ProofChecker;
import org.sosy_lab.cpachecker.exceptions.CPAException;
import org.sosy_lab.cpachecker.exceptions.CPATransferException;
import org.sosy_lab.cpachecker.util.predicates.pathformula.ctoformula.CtoFormulaConverter;
import org.sosy_lab.cpachecker.util.predicates.pathformula.ctoformula.CtoFormulaTypeHandler;
import org.sosy_lab.cpachecker.util.predicates.pathformula.ctoformula.FormulaEncodingOptions;
import org.sosy_lab.cpachecker.util.predicates.smt.BooleanFormulaManagerView;
import org.sosy_lab.cpachecker.util.predicates.smt.FormulaManagerView;
import org.sosy_lab.cpachecker.util.predicates.smt.Solver;
/**
* CPA used to capture the assumptions that ought to be dumped.
*
* <p>Note that once the CPA algorithm has finished running, a call to dumpInvariants() is needed to
* process the reachable states and produce the actual invariants.
*/
public class AssumptionStorageCPA
implements ConfigurableProgramAnalysis, ProofChecker, AutoCloseable {
public static CPAFactory factory() {
return AutomaticCPAFactory.forType(AssumptionStorageCPA.class);
}
private final AssumptionStorageTransferRelation transferRelation;
private final FormulaManagerView formulaManager;
private final AssumptionStorageState topState;
private final Solver solver;
private AssumptionStorageCPA(Configuration config, LogManager logger, ShutdownNotifier pShutdownNotifier, CFA cfa) throws InvalidConfigurationException {
solver = Solver.create(config, logger, pShutdownNotifier);
formulaManager = solver.getFormulaManager();
FormulaEncodingOptions options = new FormulaEncodingOptions(config);
CtoFormulaTypeHandler typeHandler = new CtoFormulaTypeHandler(logger, cfa.getMachineModel());
CtoFormulaConverter converter = new CtoFormulaConverter(options, formulaManager, cfa.getMachineModel(), cfa.getVarClassification(), logger, pShutdownNotifier, typeHandler, AnalysisDirection.FORWARD);
BooleanFormulaManagerView bfmgr = formulaManager.getBooleanFormulaManager();
topState = new AssumptionStorageState(formulaManager, bfmgr.makeTrue(), bfmgr.makeTrue());
transferRelation = new AssumptionStorageTransferRelation(converter, formulaManager, topState);
}
public FormulaManagerView getFormulaManager() {
return formulaManager;
}
@Override
public AbstractDomain getAbstractDomain() {
return new AssumptionStorageDomain();
}
@Override
public AbstractState getInitialState(CFANode pNode, StateSpacePartition pPartition) {
return topState;
}
@Override
public MergeOperator getMergeOperator() {
return MergeSepOperator.getInstance();
}
@Override
public StopOperator getStopOperator() {
return new AssumptionStorageStop();
}
@Override
public TransferRelation getTransferRelation() {
return transferRelation;
}
@Override
public PrecisionAdjustment getPrecisionAdjustment() {
return new AssumptionStoragePrecisionAdjustment(transferRelation);
}
@Override
public boolean areAbstractSuccessors(AbstractState pState, CFAEdge pCfaEdge,
Collection<? extends AbstractState> pSuccessors) throws CPATransferException, InterruptedException {
// always assume is successor, only write and read states that have true assumptions, stop formulae
return true;
}
@Override
public boolean isCoveredBy(AbstractState pState, AbstractState pOtherState) throws CPAException, InterruptedException {
// always assume is covered, only write and read states that have true assumptions, stop formulae
return true;
}
@Override
public void close() {
solver.close();
}
}
|
function validateForm(form) {
let name = form.name.value;
let email = form.email.value;
let dateOfBirth = form.dateOfBirth.value;
// check if the name is valid
if (!name) {
alert('Please enter a valid name');
return false;
}
// check if the email address is valid
if (!/\S+@\S+\.\S+/.test(email)) {
alert('Please enter a valid email');
return false;
}
// check if the date of birth is valid
if (!/^(0[1-9]|1[0-2])\/ (0[1-9]|1\d|2\d|3[01])\/ (19|20)\d{2}$/.test(dateOfBirth)) {
alert('Please enter a valid date');
return false;
}
return true;
} |
#!/usr/bin/env bash
. ./env.sh
aws cloudformation delete-stack --stack-name $STACK_NAME
|
<gh_stars>0
package net.kunmc.lab.superhot.task;
import net.kunmc.lab.superhot.GameManager;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.entity.Player;
import org.bukkit.scheduler.BukkitRunnable;
import java.util.UUID;
public class PlayerLocationFixer extends BukkitRunnable {
private final UUID uuid;
private final Location loc;
private final GameManager manager = GameManager.getInstance();
public PlayerLocationFixer(UUID uuid, Location loc) {
this.uuid = uuid;
this.loc = loc;
}
@Override
public void run() {
if (manager.isStateMoving()) {
return;
}
Player p = Bukkit.getPlayer(uuid);
if (p != null) {
p.teleportAsync(loc);
}
}
}
|
/*
Copyright (c) Facebook, Inc. and its affiliates.
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
*/
package tester_test
import (
"math/rand"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/facebookincubator/fbender/tester"
)
func TestLessThan__Compare(t *testing.T) {
s := rand.NewSource(time.Now().UnixNano())
r := rand.New(s)
for i := 0; i < 4096; i++ {
x, y := r.Float64(), r.Float64()
assert.Equal(t, x < y, tester.LessThan.Compare(x, y), "%f < %f", x, y)
}
}
func TestLessThan__Name(t *testing.T) {
assert.Equal(t, "<", tester.LessThan.Name())
}
func TestParseComparator_LessThan(t *testing.T) {
cmp, err := tester.ParseComparator("<")
assert.NoError(t, err)
assertPointerEqual(t, tester.LessThan, cmp)
}
func TestGreaterThan__Compare(t *testing.T) {
s := rand.NewSource(time.Now().UnixNano())
r := rand.New(s)
for i := 0; i < 4096; i++ {
x, y := r.Float64(), r.Float64()
assert.Equal(t, x > y, tester.GreaterThan.Compare(x, y), "%f > %f", x, y)
}
}
func TestGreaterThan__Name(t *testing.T) {
assert.Equal(t, ">", tester.GreaterThan.Name())
}
func TestParseComparator_GreaterThan(t *testing.T) {
cmp, err := tester.ParseComparator(">")
assert.NoError(t, err)
assertPointerEqual(t, tester.GreaterThan, cmp)
}
func TestParseComparator(t *testing.T) {
cmp, err := tester.ParseComparator("!")
assert.Nil(t, cmp)
assert.Equal(t, tester.ErrInvalidComparator, err)
}
|
<filename>week-14/MultiplyOdds/MultiplyOdds.test.js
const MultiplyOdds = require('./MultiplyOdds.js');
describe('MultiplyOdds function ', ()=>{
test("It should multiply each odd value by the previous even value",()=>{
const input = [2, 33, 4, 41, 6, 9, 77, 11, 13, 27 ];
const output = [66, 164, 54, 462, 66, 78, 162];
const input1 = [3,2,3 ];
const output1 = [3,6];
expect(MultiplyOdds(input)).toEqual(output);
});
test("It should multiply each odd value by the previous even value",()=>{
const input1 = [3,2,3 ];
const output1 = [3,6];
expect(MultiplyOdds(input1)).toEqual(output1);
});
}); |
<filename>src/math/Boj14604.java
package math;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 14604번: Over fitting (small)
*
* @see https://www.acmicpc.net/problem/14604/
*
*/
public class Boj14604 {
private static final String FAN = "LOVELYZ";
private static ArrayList<Coordinate> fan = new ArrayList<>();
private static ArrayList<Coordinate> notFan = new ArrayList<>();
private static int result;
private static class Coordinate {
int x;
int y;
public Coordinate(int x, int y) {
this.x = x;
this.y = y;
}
}
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
while(N-- > 0) {
StringTokenizer st = new StringTokenizer(br.readLine());
int x = Integer.parseInt(st.nextToken());
int y = Integer.parseInt(st.nextToken());
if(FAN.equals(st.nextToken())) fan.add(new Coordinate(x, y));
else notFan.add(new Coordinate(x, y));
}
makeLovelyz(notFan, notFan); // considering all cases
makeLovelyz(fan, notFan);
System.out.println(result);
}
private static void makeLovelyz(ArrayList<Coordinate> f, ArrayList<Coordinate> nf) {
int size = f.size();
int nSize = nf.size();
int[] sizes = {fan.size(), notFan.size()};
boolean[][] used = new boolean[size][nSize];
for(int i = 0; i < size; i++) {
for(int j = 0; j < nSize; j++) { // make set with fan fan, fan notFan, notFan notFan
if(used[i][j]) continue;
used[i][j] = true;
Coordinate current = f.get(i);
Coordinate another = nf.get(j);
if(isSame(current, another)) continue;
int b = another.x - current.x; // get a, b, c
int a = current.y - another.y;
int c = a * current.x + b * current.y;
int[] fans = new int[2];
for(int k = 0; k < sizes[0]; k++) { // location relation checker
Coordinate target = fan.get(k);
if(online(target, a, b, c) >= 0) fans[0]++;
if(online(target, a, b, c) <= 0) fans[1]++;
}
int[] notFans = new int[2];
for(int k = 0; k < sizes[1]; k++) {
Coordinate target = notFan.get(k);
if(online(target, a, b, c) > 0) notFans[0]++;
if(online(target, a, b, c) < 0) notFans[1]++;
}
if(notFans[0] * notFans[1] != 0) continue;
if(notFans[0] == 0) result = Math.max(fans[0], result);
if(notFans[1] == 0) result = Math.max(fans[1], result);
}
}
}
private static boolean isSame(Coordinate c1, Coordinate c2) {
return c1.x == c2.x && c1.y == c2.y;
}
private static int online(Coordinate target, int a, int b, int c) {
return a * target.x + b * target.y - c;
}
}
|
(function () {
var findMurderConsoleID;
var myID;
var myName;
var SEARCH_RADIUS = 1000;
var SEARCH_POSITION = { x: 0, y: 0, z: 0};
var RESET_TIME_MS = 1000;
var isMurderConsoleFound = false;
var reset = true;
this.remotelyCallable = ["removeGun"
];
this.preload = function (entityID) {
myID = entityID;
myName = Entities.getEntityProperties(myID,"name").name;
};
Script.setInterval(function () {
reset = true;
}, RESET_TIME_MS);
this.removeGun = function(id,param) {
print("i'm removing it");
MyAvatar.endReaction("point");
Entities.deleteEntity(myID);
};
function findMurderConsole() {
var entities = Entities.findEntities(SEARCH_POSITION, SEARCH_RADIUS);
for (var i in entities) {
var entProps = Entities.getEntityProperties(entities[i]);
if (entProps.name === "MurderConsole") {
findMurderConsoleID = entProps.id;
isMurderConsoleFound = true;
}
}
}
function onInputEvent(input, value) {
if (!isMurderConsoleFound) {
findMurderConsole();
print("found console");
}
if (input === Controller.Standard.RT && value > 0.9 && reset === true) {
reset = false;
Entities.callEntityServerMethod(
findMurderConsoleID,
"receiveDataFromItem",
[MyAvatar.sessionUUID,myID,myName,"shoot"]
);
print("shootweapon");
}
}
function onMouseEvent(event) {
if (!isMurderConsoleFound) {
findMurderConsole();
print("found console");
}
if (event.isLeftButton && reset) {
reset = false;
Entities.callEntityServerMethod(
findMurderConsoleID,
"receiveDataFromItem",
[MyAvatar.sessionUUID,myID,myName,"shoot"]
);
print("shootweapon");
}
}
Controller.mousePressEvent.connect(onMouseEvent);
Controller.inputEvent.connect(onInputEvent);
Script.scriptEnding.connect(function () {
Controller.mousePressEvent.disconnect(onMouseEvent);
Controller.inputEvent.disconnect(onInputEvent);
});
});
|
<filename>OSS13 Server/Sources/World/Subsystems/Atmos/Atmos.cpp
#include "Atmos.h"
#include <algorithm>
#include <World/World.hpp>
#include <World/Map.hpp>
#include <World/Tile.hpp>
using namespace subsystem;
using namespace atmos;
Atmos::Atmos(World *world) :
world(world)
{
EXPECT(world);
generateLocales();
}
void Atmos::Update(std::chrono::microseconds timeElapsed) {
updateTiles();
updateLocales(timeElapsed);
}
void Atmos::updateTiles() {
auto updatedTiles = synchronizeTiles();
for (auto &tile : updatedTiles) {
if (tile->GetLocale())
static_cast<Locale *>(tile->GetLocale())->RemoveTile(tile);
}
for (auto &tile : updatedTiles) {
checkLocaleForTile(tile);
}
}
void Atmos::updateLocales(std::chrono::microseconds timeElapsed) {
std::move(newLocales.begin(), newLocales.end(), std::back_inserter(locales));
newLocales.clear();
for (auto iter = locales.begin(); iter != locales.end();) {
auto *locale = iter->get();
locale->Update(timeElapsed);
if (!locale->GetSize())
iter = locales.erase(iter);
else
iter++;
}
}
ILocale *Atmos::CreateLocale(IAtmosTile *tile) {
EXPECT(tile);
EXPECT(!tile->GetLocale());
newLocales.push_back(std::make_unique<Locale>(this, tile));
return newLocales.back().get();
}
ILocale *Atmos::CreateLocale(std::unique_ptr<Graph<IAtmosTile *>> &&graph) {
EXPECT(graph->Count());
newLocales.push_back(std::make_unique<Locale>(this, std::forward<std::unique_ptr<Graph<IAtmosTile *>>>(graph)));
return newLocales.back().get();
}
void Atmos::generateLocales() {
synchronizeTiles();
generateLocalesForMap(world->GetMap());
}
void Atmos::generateLocalesForMap(Map *map) {
EXPECT(map);
for (auto &tile : map->GetTiles().Items()) {
checkLocaleForTile(tile.get());
}
}
void Atmos::checkLocaleForTile(IAtmosTile *tile) {
EXPECT(tile);
LOGD_(1) << "Check tile " << tile->GetPos().toString();
if (tile->GetAirtightness().GetFraction(uf::Direction::CENTER) == 0.f)
return;
for (auto direction : uf::PURE_DIRECTIONS_LIST) {
if (direction == uf::Direction::CENTER)
continue;
auto neighbour = reinterpret_cast<IAtmosTile *>(tile->StepTo(direction));
if (neighbour && tile->GetAirtightnessTo(direction) == 1.f) {
Locale *locale = reinterpret_cast<Locale *>(tile->GetLocale());
if (!locale)
locale = reinterpret_cast<Locale *>(neighbour->GetLocale());
if (locale) {
locale->AddConnection(tile, neighbour);
continue;
}
}
}
if (!tile->GetLocale())
CreateLocale(tile);
}
std::set<IAtmosTile *> Atmos::synchronizeTiles() {
std::set<IAtmosTile *> updatedTiles;
for (auto &tile : world->GetMap()->GetTiles().Items()) {
if (tile->SynchronizeAtmos())
updatedTiles.insert(tile.get());
}
return updatedTiles;
}
|
#!/bin/bash
## VFT Build script v1, written by pry0cc 27/11/2017
startdir=$PWD
giturl="git@gitlab.com:bbriggs1/vft.git"
mkdir -p ./bin/
if [ -z ${GOPATH+x} ]
then
echo "No GOPATH set, creating one"
mkdir -p $HOME/tmp/golang/
export GOPATH="$HOME/tmp/golang/"
else
echo "GOPATH set, continuing."
fi
cd $GOPATH
mkdir -p ./src/github.com/bbriggs
echo "Cloning vft"
git clone $giturl ./src/github.com/bbriggs/vft
cd $GOPATH/src/github.com/bbriggs/vft/cmd/vft
echo "Installing dependencies"
go get ./...
echo "Building vft"
go build -o $startdir/bin/vft ./vft.go
echo "vft compiled to $startdir/bin/vft, enjoy!"
|
<reponame>khepherer/java_lleida_01_06_2017
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.mycompany.capas.dao;
import com.mycompany.capas.modelo.Usuario;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Query;
/**
*
* @author Administrador
*/
class DefaultAutenticacionDao implements AutenticacionDao {
private final EntityManagerFactory emf;
public DefaultAutenticacionDao(EntityManagerFactory emf) {
this.emf = emf;
}
@Override
public Boolean existe(Usuario aBuscar) {
EntityManager em = emf.createEntityManager();
em.getTransaction().begin();
Query q = em.createQuery("select u from Usuario u where u.nombre=:nombre and u.clave=:clave");
q.setParameter("nombre", aBuscar.getNombre());
q.setParameter("clave", aBuscar.getClave());
List<Usuario> resultado = q.getResultList();
em.getTransaction().commit();
return resultado.size() == 1;
}
@Override
public Usuario guardar(Usuario nuevo) {
EntityManager em = emf.createEntityManager();
em.getTransaction().begin();
em.persist(nuevo);
em.getTransaction().commit();
return nuevo;
}
}
|
module KibanaAPI
module V7
module Actions
module UserActions
def user_features
request(
http_method: :get,
endpoint: "api/features"
)
end
end
end
end
end
|
#!/usr/bin/env sh
# abort on errors
set -e
# build
npm run docs:build
# navigate into the build output directory
cd docs/.vuepress/dist
# if you are deploying to a custom domain
# echo 'www.example.com' > CNAME
git init
git add -A
git commit -m 'deploy'
# if you are deploying to https://<USERNAME>.github.io
# git push -f git@github.com:<USERNAME>/<USERNAME>.github.io.git master
# if you are deploying to https://<USERNAME>.github.io/<REPO>
git push -f git@github.com:wowthemesnet/vuepress-theme-mediumish.git master:gh-pages
cd -
|
package org.rzo.yajsw.os.posix.linux;
import org.rzo.yajsw.os.posix.PosixProcessManager;
public class LinuxProcessManager extends PosixProcessManager
{
}
|
import { Services } from 'i18next';
import { I18nextCLILanguageDetector } from '../i18next-cli-language-detector';
const mockServices = {
languageUtils: {
formatLanguageCode: jest.fn((lng: string) => {
return lng;
}),
isWhitelisted: jest.fn((_lng: string) => {
return true;
}),
},
} as Services;
describe('I18nextCLILanguageDetector ', () => {
let languageDetector: I18nextCLILanguageDetector;
beforeEach(() => {
languageDetector = new I18nextCLILanguageDetector();
});
afterEach(() => {
delete process.env.LC_ALL;
delete process.env.LC_MESSAGES;
delete process.env.LANG;
delete process.env.LANGUAGE;
});
it('has nothing to do with cache feature yet', () => {
languageDetector.init(mockServices, {}, {});
expect(languageDetector.cacheUserLanguage()).toBeUndefined();
});
it('detects shell language', () => {
process.env.LC_ALL = 'en.UTF-8';
languageDetector.init(mockServices, {}, {});
const language = languageDetector.detect();
expect(language).toBe('en');
});
it('detects shell language with region', () => {
process.env.LC_ALL = 'en_US.UTF-8';
languageDetector.init(mockServices, {}, {});
const language = languageDetector.detect();
expect(language).toBe('en-US');
});
it('detects shell language with prioriy', () => {
process.env.LC_ALL = 'en_US.UTF-8:ja_JP.UTF-8';
languageDetector.init(mockServices, {}, {});
const language = languageDetector.detect();
expect(language).toBe('en-US');
});
it('detects shell language via LC_MESSAGES', () => {
process.env.LC_MESSAGES = 'en_US.UTF-8';
languageDetector.init(mockServices, {}, {});
const language = languageDetector.detect();
expect(language).toBe('en-US');
});
it('detects shell language via LANG', () => {
process.env.LANG = 'en_US.UTF-8';
languageDetector.init(mockServices, {}, {});
const language = languageDetector.detect();
expect(language).toBe('en-US');
});
it('detects shell language via LANGUAGE', () => {
process.env.LANGUAGE = 'en_US.UTF-8';
languageDetector.init(mockServices, {}, {});
const language = languageDetector.detect();
expect(language).toBe('en-US');
});
it('fallbacks when invalid LC passed', () => {
process.env.LC_ALL = '';
languageDetector.init(mockServices, {}, { fallbackLng: 'ja-JP' });
const language = languageDetector.detect();
expect(language).toBe('ja-JP');
});
it('fallbacks when the shell loale was blacklisted', () => {
(mockServices.languageUtils.isWhitelisted as jest.Mock).mockImplementation(
(lng) => lng !== 'en-US',
);
process.env.LC_ALL = 'en_US.UTF-8';
languageDetector.init(mockServices, {}, { fallbackLng: 'ja-JP' });
const language = languageDetector.detect();
expect(language).toBe('ja-JP');
});
it('fallbacks when LC=C passed', () => {
process.env.LC_ALL = 'C';
languageDetector.init(mockServices, {}, { fallbackLng: 'en' });
const language = languageDetector.detect();
expect(language).toBe('en');
});
it('fallbacks the 1st element from the array when multiple fallbackLng passed', () => {
process.env.LC_ALL = 'C';
languageDetector.init(mockServices, {}, { fallbackLng: ['zh', 'en'] });
const language = languageDetector.detect();
expect(language).toBe('zh');
});
it('fallbacks the 1st element from the array of `default` key when object passed to the fallbackLng', () => {
process.env.LC_ALL = 'C';
languageDetector.init(
mockServices,
{},
{ fallbackLng: { default: ['fr', 'en'] } },
);
const language = languageDetector.detect();
expect(language).toBe('fr');
});
});
|
<filename>alg/sort/bubble.js
'use strict';
const swap = require('../../util/swap');
module.exports = (a, g, k) => {
g = g || 1;
k = k || 0;
for (;;) {
let still = true;
for (let i = k; i < a.length; i += g) {
if (a[i - g] > a[i]) {
still = false;
swap(a, i, i - g);
}
}
if (still) {
break;
}
}
return a;
};
|
#!/bin/bash
PROJECT=$1
CONFIGURATIONS=$2
PRE=$3
POST=$4
set -e
echo "Building project $PROJECT for configurations '$CONFIGURATIONS' with MPLAB X v6.00 and XC8 v2.36"
if [ ! -z "$PRE" ]; then
eval $PRE
fi
for CONFIGURATION in ${CONFIGURATIONS}; do
echo "Generating Makefiles for configuration $CONFIGURATION"
/opt/mplabx/mplab_platform/bin/prjMakefilesGenerator.sh "$PROJECT@$CONFIGURATION" || exit 1
echo "Building configuration $CONFIGURATION"
make -C "$PROJECT" CONF="$CONFIGURATION" build || exit 2
done
if [ ! -z "$POST" ]; then
eval $POST
fi |
#!/bin/bash
#
# Example of commands to process multi-parametric data of the spinal cord.
#
# Please note that this batch script has a lot of redundancy and should not
# be used as a pipeline for regular processing. For example, there is no need
# to process both t1 and t2 to extract CSA values.
#
# For information about acquisition parameters, see: https://osf.io/wkdym/
# N.B. The parameters are set for these type of data. With your data, parameters
# might be slightly different.
#
# Usage:
#
# [option] $SCT_DIR/batch_processing.sh
#
# Prevent (re-)downloading sct_example_data:
# SCT_BP_DOWNLOAD=0 $SCT_DIR/batch_processing.sh
#
# Specify quality control (QC) folder (Default is ~/qc_batch_processing):
# SCT_BP_QC_FOLDER=/user/toto/my_qc_folder $SCT_DIR/batch_processing.sh
# Abort on error
set -ve
# For full verbose, uncomment the next line
# set -x
# Fetch OS type
if uname -a | grep -i darwin > /dev/null 2>&1; then
# OSX
open_command="open"
elif uname -a | grep -i linux > /dev/null 2>&1; then
# Linux
open_command="xdg-open"
fi
# Check if users wants to use his own data
if [[ -z "$SCT_BP_DOWNLOAD" ]]; then
SCT_BP_DOWNLOAD=1
fi
# QC folder
if [[ -z "$SCT_BP_QC_FOLDER" ]]; then
SCT_BP_QC_FOLDER=`pwd`/"qc_example_data"
fi
# Remove QC folder
if [ -z "$SCT_BP_NO_REMOVE_QC" -a -d "$SCT_BP_QC_FOLDER" ]; then
echo "Removing $SCT_BP_QC_FOLDER folder."
rm -rf "$SCT_BP_QC_FOLDER"
fi
# get starting time:
TIME_START=$(date +%x_%r)
# download example data
if [[ "$SCT_BP_DOWNLOAD" == "1" ]]; then
sct_download_data -d sct_example_data
fi
cd sct_example_data
# t2
# ===========================================================================================
cd t2
# Segment spinal cord
sct_deepseg_sc -i t2.nii.gz -c t2 -qc "$SCT_BP_QC_FOLDER"
# Tips: If you are not satisfied with the results you can try with another algorithm:
# sct_propseg -i t2.nii.gz -c t2 -qc "$SCT_BP_QC_FOLDER"
# Vertebral labeling
# Tips: for manual initialization of labeling by clicking at disc C2-C3, use flag -initc2
sct_label_vertebrae -i t2.nii.gz -s t2_seg.nii.gz -c t2 -qc "$SCT_BP_QC_FOLDER"
# Create labels at in the cord at C2 and C5 mid-vertebral levels
sct_label_utils -i t2_seg_labeled.nii.gz -vert-body 2,5 -o labels_vert.nii.gz
# Tips: you can also create labels manually using:
# sct_label_utils -i t2.nii.gz -create-viewer 2,5 -o labels_vert.nii.gz
# Register to template
sct_register_to_template -i t2.nii.gz -s t2_seg.nii.gz -l labels_vert.nii.gz -c t2 -qc "$SCT_BP_QC_FOLDER"
# Tips: If you are not satisfied with the results, you can tweak registration parameters.
# For example here, we would like to take into account the rotation of the cord, as well as
# adding a 3rd registration step that uses the image intensity (not only cord segmentations).
# so we could do something like this:
# sct_register_multimodal -i $SCT_DIR/data/PAM50/template/PAM50_t2s.nii.gz -iseg $SCT_DIR/data/PAM50/template/PAM50_cord.nii.gz -d t2s.nii.gz -dseg t2s_seg.nii.gz -param step=1,type=seg,algo=slicereg,smooth=3:step=2,type=seg,algo=bsplinesyn,slicewise=1,iter=3 -initwarp ../t2/warp_template2anat.nii.gz
# Warp template without the white matter atlas (we don't need it at this point)
sct_warp_template -d t2.nii.gz -w warp_template2anat.nii.gz -a 0
# Compute cross-sectional area (and other morphometry measures) for each slice
sct_process_segmentation -i t2_seg.nii.gz -qc "$SCT_BP_QC_FOLDER"
# Compute cross-sectional area and average between C2 and C3 levels
sct_process_segmentation -i t2_seg.nii.gz -vert 2:3 -o csa_c2c3.csv
# Go back to root folder
cd ..
# t2s (stands for t2-star)
# ===========================================================================================
cd t2s
# Spinal cord segmentation
sct_deepseg_sc -i t2s.nii.gz -c t2s -qc "$SCT_BP_QC_FOLDER"
# Segment gray matter
sct_deepseg_gm -i t2s.nii.gz -qc "$SCT_BP_QC_FOLDER"
# Register template->t2s (using warping field generated from template<->t2 registration)
sct_register_multimodal -i $SCT_DIR/data/PAM50/template/PAM50_t2s.nii.gz -iseg $SCT_DIR/data/PAM50/template/PAM50_cord.nii.gz -d t2s.nii.gz -dseg t2s_seg.nii.gz -param step=1,type=seg,algo=centermass:step=2,type=seg,algo=bsplinesyn,slicewise=1,iter=3:step=3,type=im,algo=syn,slicewise=1,iter=1,metric=CC -initwarp ../t2/warp_template2anat.nii.gz -initwarpinv ../t2/warp_anat2template.nii.gz
# rename warping fields for clarity
mv warp_PAM50_t2s2t2s.nii.gz warp_template2t2s.nii.gz
mv warp_t2s2PAM50_t2s.nii.gz warp_t2s2template.nii.gz
# Warp template
sct_warp_template -d t2s.nii.gz -w warp_template2t2s.nii.gz
# Subtract GM segmentation from cord segmentation to obtain WM segmentation
sct_maths -i t2s_seg.nii.gz -sub t2s_gmseg.nii.gz -o t2s_wmseg.nii.gz
# Compute cross-sectional area of the gray and white matter between C2 and C5
sct_process_segmentation -i t2s_wmseg.nii.gz -vert 2:5 -perlevel 1 -o csa_wm.csv
sct_process_segmentation -i t2s_gmseg.nii.gz -vert 2:5 -perlevel 1 -o csa_gm.csv
# OPTIONAL: Update template registration using information from gray matter segmentation
# # <<<
# # Register WM/GM template to WM/GM seg
# sct_register_graymatter -gm t2s_gmseg.nii.gz -wm t2s_wmseg.nii.gz -w warp_template2t2s.nii.gz -winv warp_t2s2template.nii.gz
# # Rename warping fields for clarity
# mv warp_template2t2s_reg_gm.nii.gz warp_template2t2s.nii.gz
# mv warp_t2s2template_reg_gm.nii.gz warp_t2s2template.nii.gz
# # Warp template (this time corrected for internal structure)
# sct_warp_template -d t2s.nii.gz -w warp_template2t2s.nii.gz
# # >>>
cd ..
# t1
# ===========================================================================================
cd t1
# Segment spinal cord
sct_deepseg_sc -i t1.nii.gz -c t1
# Smooth spinal cord along superior-inferior axis
sct_smooth_spinalcord -i t1.nii.gz -s t1_seg.nii.gz
# Flatten cord in the right-left direction (to make nice figure)
sct_flatten_sagittal -i t1.nii.gz -s t1_seg.nii.gz
# Go back to root folder
cd ..
# mt
# ===========================================================================================
cd mt
# Get centerline from mt1 data
sct_get_centerline -i mt1.nii.gz -c t2
# sct_get_centerline -i mt1.nii.gz -c t2 -qc "$SCT_BP_QC_FOLDER"
# Create mask
sct_create_mask -i mt1.nii.gz -p centerline,mt1_centerline.nii.gz -size 45mm
# Crop data for faster processing
sct_crop_image -i mt1.nii.gz -m mask_mt1.nii.gz -o mt1_crop.nii.gz
# Segment spinal cord
sct_deepseg_sc -i mt1_crop.nii.gz -c t2 -qc "$SCT_BP_QC_FOLDER"
# Register mt0->mt1
# Tips: here we only use rigid transformation because both images have very similar sequence parameters. We don't want to use SyN/BSplineSyN to avoid introducing spurious deformations.
# Tips: here we input -dseg because it is needed by the QC report
sct_register_multimodal -i mt0.nii.gz -d mt1_crop.nii.gz -dseg mt1_crop_seg.nii.gz -param step=1,type=im,algo=rigid,slicewise=1,metric=CC -x spline -qc "$SCT_BP_QC_FOLDER"
# Register template->mt1
# Tips: here we only use the segmentations due to poor SC/CSF contrast at the bottom slice.
# Tips: First step: slicereg based on images, with large smoothing to capture potential motion between anat and mt, then at second step: bpslinesyn in order to adapt the shape of the cord to the mt modality (in case there are distortions between anat and mt).
sct_register_multimodal -i $SCT_DIR/data/PAM50/template/PAM50_t2.nii.gz -iseg $SCT_DIR/data/PAM50/template/PAM50_cord.nii.gz -d mt1_crop.nii.gz -dseg mt1_crop_seg.nii.gz -param step=1,type=seg,algo=slicereg,smooth=3:step=2,type=seg,algo=bsplinesyn,slicewise=1,iter=3 -initwarp ../t2/warp_template2anat.nii.gz -initwarpinv ../t2/warp_anat2template.nii.gz
# Rename warping fields for clarity
mv warp_PAM50_t22mt1_crop.nii.gz warp_template2mt.nii.gz
mv warp_mt1_crop2PAM50_t2.nii.gz warp_mt2template.nii.gz
# Warp template
sct_warp_template -d mt1_crop.nii.gz -w warp_template2mt.nii.gz -qc "$SCT_BP_QC_FOLDER"
# Compute mtr
sct_compute_mtr -mt0 mt0_reg.nii.gz -mt1 mt1_crop.nii.gz
# Register t1w->mt1
# Tips: We do not need to crop the t1w image before registration because step=0 of the registration is to put the source image in the space of the destination image (equivalent to cropping the t1w)
sct_register_multimodal -i t1w.nii.gz -d mt1_crop.nii.gz -dseg mt1_crop_seg.nii.gz -param step=1,type=im,algo=rigid,slicewise=1,metric=CC -x spline -qc "$SCT_BP_QC_FOLDER"
# Compute MTsat
# Tips: Check your TR and Flip Angle from the Dicom data
sct_compute_mtsat -mt mt1_crop.nii.gz -pd mt0_reg.nii.gz -t1 t1w_reg.nii.gz -trmt 30 -trpd 30 -trt1 15 -famt 9 -fapd 9 -fat1 15
# Extract MTR, T1 and MTsat within the white matter between C2 and C5.
# Tips: Here we use "-discard-neg-val 1" to discard inconsistent negative values in MTR calculation which are caused by noise.
sct_extract_metric -i mtr.nii.gz -method map -o mtr_in_wm.csv -l 51 -vert 2:5
sct_extract_metric -i mtsat.nii.gz -method map -o mtsat_in_wm.csv -l 51 -vert 2:5
sct_extract_metric -i t1map.nii.gz -method map -o t1_in_wm.csv -l 51 -vert 2:5
# Bring MTR to template space (e.g. for group mapping)
sct_apply_transfo -i mtr.nii.gz -d $SCT_DIR/data/PAM50/template/PAM50_t2.nii.gz -w warp_mt2template.nii.gz
# Go back to root folder
cd ..
# dmri
# ===========================================================================================
cd dmri
# bring t2 segmentation in dmri space to create mask (no optimization)
sct_maths -i dmri.nii.gz -mean t -o dmri_mean.nii.gz
sct_register_multimodal -i ../t2/t2_seg.nii.gz -d dmri_mean.nii.gz -identity 1 -x nn
# create mask to help moco and for faster processing
sct_create_mask -i dmri_mean.nii.gz -p centerline,t2_seg_reg.nii.gz -size 35mm
# crop data
sct_crop_image -i dmri.nii.gz -m mask_dmri_mean.nii.gz -o dmri_crop.nii.gz
# motion correction
# Tips: if data have very low SNR you can increase the number of successive images that are averaged into group with "-g". Also see: sct_dmri_moco -h
sct_dmri_moco -i dmri_crop.nii.gz -bvec bvecs.txt
# segmentation with propseg
sct_deepseg_sc -i dmri_crop_moco_dwi_mean.nii.gz -c dwi -qc "$SCT_BP_QC_FOLDER"
# Register template to dwi
# Tips: Again, here, we prefer to stick to segmentation-based registration. If there are susceptibility distortions in your EPI, then you might consider adding a third step with bsplinesyn or syn transformation for local adjustment.
sct_register_multimodal -i $SCT_DIR/data/PAM50/template/PAM50_t1.nii.gz -iseg $SCT_DIR/data/PAM50/template/PAM50_cord.nii.gz -d dmri_crop_moco_dwi_mean.nii.gz -dseg dmri_crop_moco_dwi_mean_seg.nii.gz -param step=1,type=seg,algo=centermass:step=2,type=seg,algo=bsplinesyn,metric=MeanSquares,smooth=1,iter=3 -initwarp ../t2/warp_template2anat.nii.gz -initwarpinv ../t2/warp_anat2template.nii.gz -qc "$SCT_BP_QC_FOLDER"
# Rename warping fields for clarity
mv warp_PAM50_t12dmri_crop_moco_dwi_mean.nii.gz warp_template2dmri.nii.gz
mv warp_dmri_crop_moco_dwi_mean2PAM50_t1.nii.gz warp_dmri2template.nii.gz
# Warp template and white matter atlas
sct_warp_template -d dmri_crop_moco_dwi_mean.nii.gz -w warp_template2dmri.nii.gz -qc "$SCT_BP_QC_FOLDER"
# Compute DTI metrics
# Tips: The flag -method "restore" allows you to estimate the tensor with robust fit (see: sct_dmri_compute_dti -h)
sct_dmri_compute_dti -i dmri_crop_moco.nii.gz -bval bvals.txt -bvec bvecs.txt
# Compute FA within right and left lateral corticospinal tracts from slices 2 to 14 using weighted average method
sct_extract_metric -i dti_FA.nii.gz -z 2:14 -method wa -l 4,5 -o fa_in_cst.csv
# Bring metric to template space (e.g. for group mapping)
sct_apply_transfo -i dti_FA.nii.gz -d $SCT_DIR/data/PAM50/template/PAM50_t2.nii.gz -w warp_dmri2template.nii.gz
# Go back to root folder
cd ..
# fmri
# ===========================================================================================
cd fmri
# Average all fMRI time series (to be able to do the next step)
sct_maths -i fmri.nii.gz -mean t -o fmri_mean.nii.gz
# Get cord centerline
sct_get_centerline -i fmri_mean.nii.gz -c t2s
# Create mask around the cord to help motion correction and for faster processing
sct_create_mask -i fmri_mean.nii.gz -p centerline,fmri_mean_centerline.nii.gz -size 35mm
# Crop data
sct_crop_image -i fmri.nii.gz -m mask_fmri_mean.nii.gz -o fmri_crop.nii.gz
# Motion correction
# Tips: Here data have sufficient SNR and there is visible motion between two consecutive scans, so motion correction is more efficient with -g 1 (i.e. not average consecutive scans)
sct_fmri_moco -i fmri_crop.nii.gz -g 1
# Segment spinal cord manually
# Since these data have very poor cord/CSF contrast, it is difficult to segment the cord properly
# and hence in this case we do it manually. The file is called: fmri_crop_moco_mean_seg_manual.nii.gz
# Register template->fmri
sct_register_multimodal -i $SCT_DIR/data/PAM50/template/PAM50_t2.nii.gz -iseg $SCT_DIR/data/PAM50/template/PAM50_cord.nii.gz -d fmri_crop_moco_mean.nii.gz -dseg fmri_crop_moco_mean_seg_manual.nii.gz -param step=1,type=seg,algo=slicereg,metric=MeanSquares,smooth=2:step=2,type=im,algo=bsplinesyn,metric=MeanSquares,iter=5,gradStep=0.5 -initwarp ../t2/warp_template2anat.nii.gz -initwarpinv ../t2/warp_anat2template.nii.gz -qc "$SCT_BP_QC_FOLDER"
# Rename warping fields for clarity
mv warp_PAM50_t22fmri_crop_moco_mean.nii.gz warp_template2fmri.nii.gz
mv warp_fmri_crop_moco_mean2PAM50_t2.nii.gz warp_fmri2template.nii.gz
# Warp template and spinal levels (here we don't need the WM atlas)
sct_warp_template -d fmri_crop_moco_mean.nii.gz -w warp_template2fmri.nii.gz -a 0 -s 1
# Note, once you have computed fMRI statistics in the subject's space, you can use
# warp_fmri2template.nii.gz to bring the statistical maps on the template space, for group analysis.
cd ..
# Display results (to easily compare integrity across SCT versions)
# ===========================================================================================
set +v
echo "Started at: $TIME_START"
echo "Ended at: $(date +%x_%r)"
echo
echo "t2/CSA: " `awk -F"," ' {print $6}' t2/csa_c2c3.csv | tail -1`
echo "mt/MTR(WM): " `awk -F"," ' {print $8}' mt/mtr_in_wm.csv | tail -1`
echo "t2s/CSA_GM: " `awk -F"," ' {print $6}' t2s/csa_gm.csv | tail -1`
echo "t2s/CSA_WM: " `awk -F"," ' {print $6}' t2s/csa_wm.csv | tail -1`
echo "dmri/FA(CST_r): " `awk -F"," ' {print $8}' dmri/fa_in_cst.csv | tail -1`
echo "dmri/FA(CST_l): " `awk -F"," ' {print $8}' dmri/fa_in_cst.csv | tail -2 | head -1`
echo
# Display syntax to open QC report on web browser
echo "To open Quality Control (QC) report on a web-browser, run the following:"
echo "$open_command $SCT_BP_QC_FOLDER/index.html"
|
<gh_stars>1-10
require 'set'
require 'active_support'
require 'active_support/core_ext'
require 'acts_as_markup/version'
require 'acts_as_markup/railtie' if defined?(Rails)
module ActsAsMarkup
# This exception is raised when an unsupported markup language is supplied to acts_as_markup.
class UnsupportedMarkupLanguage < ArgumentError
end
# This exception is raised when an unsupported Markdown library is set to the config value.
class UnsportedMarkdownLibrary < ArgumentError
end
MARKDOWN_LIBS = { :rdiscount => {:class_name => "RDiscount",
:lib_name => "rdiscount"},
:bluecloth => {:class_name => "BlueClothText",
:lib_name => "bluecloth"},
:rpeg => {:class_name => "PEGMarkdown",
:lib_name => "peg_markdown"},
:maruku => {:class_name => "Maruku",
:lib_name => "maruku"},
:redcarpet => {:class_name => "RedcarpetText",
:lib_name => 'redcarpet'} }
LIBRARY_EXTENSIONS = ::Set.new(Dir[File.join(File.expand_path(File.dirname(__FILE__)), 'acts_as_markup/exts/*.rb')].map {|file| File.basename(file, '.rb')}).delete('string')
mattr_accessor :markdown_library
# Returns the version string for the library.
class << self
def version
VERSION
end
def markup_class(markup_name)
load_markup_class(markup_name)
end
private
def get_markdown_class
if ActsAsMarkup::MARKDOWN_LIBS.keys.include? ActsAsMarkup.markdown_library
markdown_library_names = ActsAsMarkup::MARKDOWN_LIBS[ActsAsMarkup.markdown_library]
require markdown_library_names[:lib_name]
require_extensions(markdown_library_names[:lib_name])
return markdown_library_names[:class_name].constantize
else
raise ActsAsMarkup::UnsportedMarkdownLibrary, "#{ActsAsMarkup.markdown_library} is not currently supported."
end
end
def require_extensions(library)# :nodoc:
if ActsAsMarkup::LIBRARY_EXTENSIONS.include? library.to_s
require "acts_as_markup/exts/#{library}"
end
end
def require_library_and_get_class(language)
case language
when :markdown
return get_markdown_class
when :textile
require 'redcloth'
return RedCloth
when :rdoc
require 'rdoc'
require_extensions 'rdoc'
return RDocText
else
return String
end
end
def load_markup_class(markup_name)
if [:markdown, :textile, :rdoc].include?(markup_name.to_sym)
require_library_and_get_class(markup_name.to_sym)
else
raise ActsAsMarkup::UnsupportedMarkupLanguage, "#{markup_name} is not a currently supported markup language."
end
end
end
end
ActiveSupport.run_load_hooks(:acts_as_markup, ActsAsMarkup)
|
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
#
# netsend.py
#
# Consume items and send them to a remote machine
import socket, pickle
# 把数据发送到网络上receiver
class NetConsumer(object):
def __init__(self,addr):
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect(addr)
def send(self,item): # 外部调用api
pitem = pickle.dumps(item)
self.s.sendall(pitem)
def close(self):
self.s.close()
# Example use. This requires you to run receivefrom.py first.
if __name__ == '__main__':
from broadcast import *
from follow import *
from apachelog import *
# A class that sends 404 requests to another host
class Stat404(NetConsumer):
def send(self,item):
if item['status'] == 404:
NetConsumer.send(self,item)
stat404 = Stat404(("",15000))
lines = follow(open("run/foo/access-log"))
log = apache_log(lines)
broadcast(log,[stat404])
|
module tuna.gantt {
export interface IViewTemplate {
onRender(instance: JSGantt): JQuery[];
onMounted?(instance: JSGantt, element: JQuery): void;
}
export const views: IViews<IViewTemplate> = {
days: {
onRender(instance: JSGantt) {
const range = { ...instance.options.range! };
return parts.months(instance, range, (item: JQuery, current: moment.Moment) => {
const range = Utils.createRange(current, "month");
return item.append(parts.weeks(instance, range, (item: JQuery, current: moment.Moment) => {
const range = Utils.createRange(current, "week");
const days = parts.container(item => item.append(parts.days(instance, range))).addClass(`vn-header`);
const columns = parts.container(item => item.append(parts.cells(instance, range, "day"))).addClass(`vn-columns`);
return item.append([days, columns]).addClass(`vn-header`);
})).addClass(`vn-header`);
});
}
},
weeks: {
onRender(instance: JSGantt) {
const range = { ...instance.options.range! };
return parts.months(instance, range, (item: JQuery, current: moment.Moment) => {
const range = Utils.createRange(current, "month");
return item.append(parts.weeks(instance, range));
});
}
},
months: {
onRender(instance: JSGantt) {
const range = { ...instance.options.range! };
return parts.years(instance, range, (item: JQuery, current: moment.Moment) => {
const range = Utils.createRange(current, "year");
return item.append(parts.months(instance, range));
});
}
},
years: {
onRender(instance: JSGantt) {
const range = { ...instance.options.range! };
return parts.years(instance, range);
}
}
};
} |
Page({
data: {
list: [
{
id: 'view',
name: '紧急电话',
open: false,
subName: ['火警', '盗警', '急救', '报时', '电力抢修', '管道液化气抢修', '市话障碍', '交通事故', '天气预报', '号码查询', '自来水抢修'],
phone: ['119', '110', '120', '117', '95598', '87221599', '112', '122', '121', '114', '87011670']
}, {
id: 'form',
name: '银行电话',
open: false,
subName: ['工商银行', '建设银行', '农业银行', '中国银行', '交通银行', '浦发银行', '民生银行', '兴业银行', '中信银行', '深圳发展银行', '华夏银行', '招商银行', '广发银行', '广东农信', '光大银行'],
phone: ['95588', '95533', '95599', '95566', '95559', '95528', '95568', '95561', '95558', '95501', '95577', '95555', '95508', '96138', '95595']
}, {
id: 'feedback',
name: '快递电话',
open: false,
subName: ['申通快递', 'EMS', '第三人民医院', '顺丰速运', ' 圆通速递', '中通速递', '韵达快运', '天天快递', '汇通快运', '速尔快递', '德邦物流', '中铁快运', '鑫飞鸿快递', 'UPS', 'FedEx(联邦快递)'],
phone: ['4008895543', '4008100999', '400-811-1111', '021-69777888', '021-39777777', '021-39207888', '021-67662333', '021-62963636', '4008822168', '4008305555', '95572', '021-69781999', '4008208388', '4008861888']
}, {
id: 'nav',
name: '通讯客服',
open: false,
subName: ['中国移动', '中国联通', '中国电信', '中国网通', '中国铁通', '中国邮政'],
phone: ['10086', '10010', '10000', '10060', '10050', '11185']
}, {
id: 'media',
name: '投诉举报',
open: false,
subName: ['消费者投诉热线', '价格投诉热线', '质量投诉', '环保投诉', '税务投诉', '公共卫生监督', '电信投诉', '市长热线', '法律援助', '妇女维权', '民工维权'],
phone: ['12315', '12358', '12365', '12369', '12366', '12320', '12300', '12366', '12351', '12338', '12333']
}, {
id: 'map',
name: '铁路航空',
subName: ['铁路', '国航', '海航', '南航', '东航', '深航', '厦航', '山航'],
phone: ['12306', '4008100999', '950718', '4006695539', '95530', '4008895080', '95557', '4006096777']
}, {
id: 'canvas',
name: '售后服务',
subName: ['苹果', '诺基亚', '三星', '联想', '戴尔', '索尼', '飞利浦', '松下', '东芝', 'TCL'],
phone: ['4006272273', '4008800123', '8008108888', '8008580888', '8008209000', '8008201201', '8008100781', '8008108208', '4008123456']
}, {
id: 'canvas1',
name: '法律相关',
subName: ['工资拖欠问题举报', '经济犯罪举报中心', '打拐买举报电话', '土地矿产法律热线', '水利工程建设举报', '扫黄打非举报电话', '农业安全生产事故', '消费者申诉举报电话', '税务违法举报'],
phone: ['010-68304532', '010-65204333', '010-84039250', '16829999', '010-63205050', '010-65254722', '010-64192512', '12315', '010-63417425']
}
]
},
widgetsToggle: function (e) {
var id = e.currentTarget.id, list = this.data.list;
for (var i = 0, len = list.length; i < len; ++i) {
if (list[i].id == id) {
list[i].open = !list[i].open;
} else {
list[i].open = false;
}
}
this.setData({
list: list
});
},
callPhone: function (e) {
wx.makePhoneCall({
phoneNumber: e.target.dataset.phone
})
}
});
|
package de.unibi.agbi.biodwh2.reactome.entities;
import org.neo4j.ogm.annotation.Relationship;
import java.util.HashSet;
import java.util.Set;
/**
* Created by manuel on 11.12.19.
*/
public class InstanceEdit extends DatabaseObject {
public String dateTime;
public String note;
@Relationship(type = "Person")
public Set<Person> author = new HashSet<>();
public InstanceEdit() {
}
public String getDateTime() {
return dateTime;
}
public String getNote() {
return note;
}
public Set<Person> getAuthor() {
return author;
}
}
|
#!/bin/bash -u
# This example shows the best way to check if a program fails in bash
# Notes:
# - you must collect the return code *right after* the running of the program.
# This is because any program run after it will change this return code ($?).
# - if you want to check if a program succeeds it easier since you can just
# do: if [program].
#
# References:
# - https://askubuntu.com/questions/29370/how-to-check-if-a-command-succeeded
# - https://stackoverflow.com/questions/10552711/how-to-make-if-not-true-condition
false
code=$?
if [ $code -ne 0 ]
then
echo "yes, program fails"
fi
# this is the best way
if ! false
then
echo "yes, program fails"
fi
|
import { TravelPerk } from '@services';
import { IStore } from '@store';
import { ILogger } from '@utils';
import { Currency, IInvoice, IInvoiceLine, IManager } from './contracts';
import { Manager } from './Manager';
export { IManager, IInvoice, IInvoiceLine, Currency };
export type IManagerFactory = (client: TravelPerk.IClient, store: IStore, accountId: string, logger: ILogger) => IManager;
export const createManager: IManagerFactory = (
client: TravelPerk.IClient,
store: IStore,
accountId: string,
logger: ILogger,
) => new Manager(
client,
store,
accountId,
logger,
);
|
#!/bin/sh
rm -f l2ping.o
rm -f replay_l2cap_packet_*.o
rm -f replay_l2cap_packet_*.c
rm -f replay
|
document.getElementById('do-you-recycle--yes').onclick = function() {
document.getElementById('recycle--yes--hidden').style = 'display: block';
document.getElementById('recycle--no--hidden').style = 'display: none';
}
document.getElementById('do-you-recycle--no').onclick = function() {
document.getElementById('recycle--no--hidden').style = 'display: block';
document.getElementById('recycle--yes--hidden').style = 'display: none';
}
document.getElementById('recycle-type--other').onclick = function() {
if (document.getElementById('recycle-type--other').checked) {
document.getElementById('recycle-type--other--hidden').style = 'display: block';
} else {
document.getElementById('recycle-type--other--hidden').style = 'display: none';
}
} |
<filename>rpc/client/examples_test.go
package client_test
import (
"bytes"
"context"
"fmt"
"log"
"github.com/arcology-network/consensus-engine/abci/example/kvstore"
rpchttp "github.com/arcology-network/consensus-engine/rpc/client/http"
ctypes "github.com/arcology-network/consensus-engine/rpc/core/types"
rpctest "github.com/arcology-network/consensus-engine/rpc/test"
)
func ExampleHTTP_simple() {
// Start a tendermint node (and kvstore) in the background to test against
app := kvstore.NewApplication()
node := rpctest.StartTendermint(app, rpctest.SuppressStdout, rpctest.RecreateConfig)
defer rpctest.StopTendermint(node)
// Create our RPC client
rpcAddr := rpctest.GetConfig().RPC.ListenAddress
c, err := rpchttp.New(rpcAddr, "/websocket")
if err != nil {
log.Fatal(err) //nolint:gocritic
}
// Create a transaction
k := []byte("name")
v := []byte("satoshi")
tx := append(k, append([]byte("="), v...)...)
// Broadcast the transaction and wait for it to commit (rather use
// c.BroadcastTxSync though in production).
bres, err := c.BroadcastTxCommit(context.Background(), tx)
if err != nil {
log.Fatal(err)
}
if bres.CheckTx.IsErr() || bres.DeliverTx.IsErr() {
log.Fatal("BroadcastTxCommit transaction failed")
}
// Now try to fetch the value for the key
qres, err := c.ABCIQuery(context.Background(), "/key", k)
if err != nil {
log.Fatal(err)
}
if qres.Response.IsErr() {
log.Fatal("ABCIQuery failed")
}
if !bytes.Equal(qres.Response.Key, k) {
log.Fatal("returned key does not match queried key")
}
if !bytes.Equal(qres.Response.Value, v) {
log.Fatal("returned value does not match sent value")
}
fmt.Println("Sent tx :", string(tx))
fmt.Println("Queried for :", string(qres.Response.Key))
fmt.Println("Got value :", string(qres.Response.Value))
// Output:
// Sent tx : name=satoshi
// Queried for : name
// Got value : satoshi
}
func ExampleHTTP_batching() {
// Start a tendermint node (and kvstore) in the background to test against
app := kvstore.NewApplication()
node := rpctest.StartTendermint(app, rpctest.SuppressStdout, rpctest.RecreateConfig)
// Create our RPC client
rpcAddr := rpctest.GetConfig().RPC.ListenAddress
c, err := rpchttp.New(rpcAddr, "/websocket")
if err != nil {
log.Fatal(err)
}
defer rpctest.StopTendermint(node)
// Create our two transactions
k1 := []byte("firstName")
v1 := []byte("satoshi")
tx1 := append(k1, append([]byte("="), v1...)...)
k2 := []byte("lastName")
v2 := []byte("nakamoto")
tx2 := append(k2, append([]byte("="), v2...)...)
txs := [][]byte{tx1, tx2}
// Create a new batch
batch := c.NewBatch()
// Queue up our transactions
for _, tx := range txs {
// Broadcast the transaction and wait for it to commit (rather use
// c.BroadcastTxSync though in production).
if _, err := batch.BroadcastTxCommit(context.Background(), tx); err != nil {
log.Fatal(err) //nolint:gocritic
}
}
// Send the batch of 2 transactions
if _, err := batch.Send(context.Background()); err != nil {
log.Fatal(err)
}
// Now let's query for the original results as a batch
keys := [][]byte{k1, k2}
for _, key := range keys {
if _, err := batch.ABCIQuery(context.Background(), "/key", key); err != nil {
log.Fatal(err)
}
}
// Send the 2 queries and keep the results
results, err := batch.Send(context.Background())
if err != nil {
log.Fatal(err)
}
// Each result in the returned list is the deserialized result of each
// respective ABCIQuery response
for _, result := range results {
qr, ok := result.(*ctypes.ResultABCIQuery)
if !ok {
log.Fatal("invalid result type from ABCIQuery request")
}
fmt.Println(string(qr.Response.Key), "=", string(qr.Response.Value))
}
// Output:
// firstName = satoshi
// lastName = nakamoto
}
|
var locations = [
['Freedom Coworking <p><b>Sin descuento</b></p> <p>Avenida Santa Fé 2459</p>', -34.5945051,-58.4039819, 2],
['PC3 Coworking <p><b>Descuento 5%</b></p> <p> <NAME> 545</p>', -34.6026208,-58.3911502, 1],
];
var map = new google.maps.Map(document.getElementById('map'), {
zoom: 11,
center: new google.maps.LatLng(-34.586814,-58.428149),
mapTypeId: google.maps.MapTypeId.ROADMAP
});
var infowindow = new google.maps.InfoWindow();
var marker, i;
for (i = 0; i < locations.length; i++) {
marker = new google.maps.Marker({
position: new google.maps.LatLng(locations[i][1], locations[i][2]),
map: map
});
google.maps.event.addListener(marker, 'click', (function(marker, i) {
return function() {
infowindow.setContent(locations[i][0]);
infowindow.open(map, marker);
}
})(marker, i));
}
|
<gh_stars>10-100
package edu.vesit.deliveryapp;
import android.app.IntentService;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Handler;
import android.util.Log;
import com.google.android.gms.gcm.GoogleCloudMessaging;
import com.google.android.gms.iid.InstanceID;
import java.io.IOException;
public class RegistrationIntentService extends IntentService
{
public RegistrationIntentService()
{
super("Register");
}
@Override
public void onHandleIntent(Intent intent)
{
try
{
InstanceID instanceID = InstanceID.getInstance(this);
final String token = instanceID.getToken(getString(R.string.gcm_defaultSenderId), GoogleCloudMessaging.INSTANCE_ID_SCOPE, null);
SharedPreferences app_cache = getSharedPreferences("app_cache", Context.MODE_PRIVATE);
final String id = app_cache.getString("id", " ");
final Handler mHandler = new Handler();
new Thread(new Runnable()
{
@Override
public void run()
{
mHandler.post(new Runnable()
{
@Override
public void run()
{
new UpdateGCMIdTask().execute(id, token);
}
});
}
}).start();
}
catch(IOException e)
{
Log.e("Error in token : ", e.getMessage());
}
}
} |
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
import React from 'react';
import { getBlockParentPage, getTextContent } from 'notion-utils';
import { useLocalStorage, useWindowSize } from 'react-use';
import Dropdown from 'rc-dropdown';
import Menu, { Item as MenuItem } from 'rc-menu';
import { CollectionViewIcon } from '../icons/collection-view-icon';
import { ChevronDownIcon } from '../icons/chevron-down-icon';
import { CollectionView } from './collection-view';
import { PageIcon } from './page-icon';
import { useNotionContext } from '../context';
import { cs } from '../utils';
var isServer = typeof window === 'undefined';
var triggers = ['click'];
export var Collection = function (_a) {
var _b, _c, _d, _e;
var block = _a.block, className = _a.className;
var _f = useNotionContext(), recordMap = _f.recordMap, showCollectionViewDropdown = _f.showCollectionViewDropdown;
var collectionId = block.collection_id, viewIds = block.view_ids;
var _g = useLocalStorage(block.id, {
collectionViewId: viewIds[0]
}), collectionState = _g[0], setCollectionState = _g[1];
var collectionViewId = viewIds.find(function (id) { return id === collectionState.collectionViewId; }) || viewIds[0];
var onChangeView = React.useCallback(function (_a) {
var collectionViewId = _a.key;
console.log('change collection view', collectionViewId);
setCollectionState(__assign(__assign({}, collectionState), { collectionViewId: collectionViewId }));
}, [collectionState]);
var width = useWindowSize().width;
if (isServer) {
width = 1024;
}
// TODO: customize for mobile?
var maxNotionBodyWidth = 708;
var notionBodyWidth = maxNotionBodyWidth;
var parentPage = getBlockParentPage(block, recordMap);
if ((_b = parentPage === null || parentPage === void 0 ? void 0 : parentPage.format) === null || _b === void 0 ? void 0 : _b.page_full_width) {
notionBodyWidth = (width - 2 * Math.min(96, width * 0.08)) | 0;
}
else {
notionBodyWidth =
width < maxNotionBodyWidth
? (width - width * 0.02) | 0 // 2vw
: maxNotionBodyWidth;
}
var padding = 0; // isServer ? 96 : ((width - notionBodyWidth) / 2) | 0
// console.log({ width, notionBodyWidth, padding })
console.log('sending padding', padding);
var collection = (_c = recordMap.collection[collectionId]) === null || _c === void 0 ? void 0 : _c.value;
var collectionView = (_d = recordMap.collection_view[collectionViewId]) === null || _d === void 0 ? void 0 : _d.value;
var collectionData = (_e = recordMap.collection_query[collectionId]) === null || _e === void 0 ? void 0 : _e[collectionViewId];
if (!(collection && collectionView && collectionData)) {
console.log('skipping missing collection view for block', block.id);
return null;
}
var style = {};
if (collectionView.type === 'table' || collectionView.type === 'board') {
// style.paddingLeft = padding
style.paddingRight = padding;
}
var title = getTextContent(collection.name).trim();
if (collection.icon) {
block.format = __assign(__assign({}, block.format), { page_icon: collection.icon });
}
return (React.createElement("div", { className: cs('notion-collection', className) },
React.createElement("div", { className: 'notion-collection-header', style: style },
title && (React.createElement("div", { className: 'notion-collection-header-title' },
React.createElement(React.Fragment, null,
React.createElement(PageIcon, { block: block, className: 'notion-page-title-icon', hideDefaultIcon: true }),
title))),
viewIds.length > 1 && showCollectionViewDropdown && (React.createElement(Dropdown, { trigger: triggers, overlay: React.createElement(Menu, { onSelect: onChangeView }, viewIds.map(function (viewId) {
var _a;
return (React.createElement(MenuItem, { key: viewId, className: 'notion-collection-view-type-menu-item' },
React.createElement(CollectionViewColumnDesc, { collectionView: (_a = recordMap.collection_view[viewId]) === null || _a === void 0 ? void 0 : _a.value })));
})), animation: 'slide-up' },
React.createElement(CollectionViewColumnDesc, { className: 'notion-collection-view-dropdown', collectionView: collectionView },
React.createElement(ChevronDownIcon, { className: 'notion-collection-view-dropdown-icon' }))))),
React.createElement(CollectionView, { collection: collection, collectionView: collectionView, collectionData: collectionData, padding: 0, width: width })));
};
var CollectionViewColumnDesc = function (_a) {
var collectionView = _a.collectionView, className = _a.className, children = _a.children, rest = __rest(_a, ["collectionView", "className", "children"]);
var type = collectionView.type;
var name = collectionView.name || "" + type[0].toUpperCase() + type.slice(1) + " view";
return (React.createElement("div", __assign({ className: cs('notion-collection-view-type', className) }, rest),
React.createElement(CollectionViewIcon, { className: 'notion-collection-view-type-icon', type: type }),
React.createElement("span", { className: 'notion-collection-view-type-title' }, name),
children));
};
//# sourceMappingURL=collection.js.map |
import * as React from 'react';
export var ClearFix = () => <div className="clearfix"></div>; |
<gh_stars>10-100
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.model.interestrate;
import javax.time.calendar.ZonedDateTime;
import org.apache.commons.lang.Validate;
import com.opengamma.analytics.financial.model.interestrate.definition.StandardDiscountBondModelDataBundle;
import com.opengamma.analytics.financial.model.tree.RecombiningBinomialTree;
import com.opengamma.analytics.math.function.Function1D;
import com.opengamma.analytics.math.rootfinding.BrentSingleRootFinder;
import com.opengamma.analytics.math.rootfinding.RealSingleRootFinder;
import com.opengamma.util.time.DateUtils;
import com.opengamma.util.tuple.Triple;
/**
*
*/
public class BlackDermanToyYieldOnlyInterestRateModel {
private final RealSingleRootFinder _rootFinder = new BrentSingleRootFinder();
private final int _n;
private final int _j;
public BlackDermanToyYieldOnlyInterestRateModel(final int n) {
if (n < 2) {
throw new IllegalArgumentException("Must have more than one node");
}
_n = n;
_j = RecombiningBinomialTree.NODES.evaluate(_n);
}
public Function1D<StandardDiscountBondModelDataBundle, RecombiningBinomialTree<Triple<Double, Double, Double>>> getTrees(final ZonedDateTime time) {
Validate.notNull(time, "time");
return new Function1D<StandardDiscountBondModelDataBundle, RecombiningBinomialTree<Triple<Double, Double, Double>>>() {
@SuppressWarnings({"unchecked", "synthetic-access" })
@Override
public RecombiningBinomialTree<Triple<Double, Double, Double>> evaluate(final StandardDiscountBondModelDataBundle data) {
Validate.notNull(data, "data");
final double[][] r = new double[_n + 1][_j];
final double[][] q = new double[_n + 1][_j];
final double[][] d = new double[_n + 1][_j];
final double[] u = new double[_n + 1];
final double[] p = new double[_n + 2];
final double t = DateUtils.getDifferenceInYears(data.getDate(), time);
final double dt = t / _n;
final double dtSqrt = Math.sqrt(dt);
final double r1 = data.getShortRate(dt);
final double sigma = data.getShortRateVolatility(dt);
p[0] = 1.0;
for (int i = 1; i <= _n + 1; i++) {
p[i] = 1. / Math.pow((1 + data.getShortRate(i) * dt), dt * i);
}
q[0][0] = 1.;
u[0] = r1;
r[0][0] = r1;
d[0][0] = 1. / (1 + r1 * dt);
for (int i = 1; i <= _n; i++) {
q[i][0] = 0.5 * q[i - 1][0] * d[i - 1][0];
q[i][i] = 0.5 * q[i - 1][i - 1] * d[i - 1][i - 1];
for (int j = -i + 2, k = 1; j <= i - 2; j += 2, k++) {
q[i][k] = 0.5 * (q[i - 1][k - 1] * d[i - 1][k - 1] + q[i - 1][k] * d[i - 1][k]);
}
u[i] = _rootFinder.getRoot(getMedian(sigma, i, dt, q, p[i + 1]), 0., 1.);
for (int j = -i, k = 0; j <= i; j += 2, k++) {
r[i][k] = u[i] * Math.exp(sigma * j * dtSqrt);
d[i][k] = 1. / (1 + r[i][k] * dt);
}
}
final Triple<Double, Double, Double>[][] result = new Triple[_n + 1][_j];
for (int i = 0; i <= _n; i++) {
for (int j = 0; j < _j; j++) {
result[i][j] = new Triple<Double, Double, Double>(r[i][j], d[i][j], q[i][j]);
}
}
return new RecombiningBinomialTree<Triple<Double, Double, Double>>(result);
}
};
}
protected Function1D<Double, Double> getMedian(final double sigma, final int i, final double dt, final double[][] q, final double p) {
return new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double u) {
double sum = 0.;
final double dtSqrt = Math.sqrt(dt);
for (int j = -i, k = 0; j <= i; j += 2, k++) {
sum += q[i][k] / (1 + u * Math.exp(sigma * j * dtSqrt) * dt);
}
return sum - p;
}
};
}
}
|
<reponame>WaizungTaam/Deep-AutoEncoder-Network<filename>src/math/utils.h
/*
Copyright 2016 WaizungTaam. All rights reserved.
License: Apache License 2.0
Email: <EMAIL>
*/
#include <cmath>
#include <string>
#include "vector.h"
#include "matrix.h"
namespace nn {
Vector _forall(const Vector &, double (*pf)(double));
Matrix _forall(const Matrix &, double (*pf)(double));
bool approx(double, double, double);
double exp(double);
Vector exp(const Vector &);
Matrix exp(const Matrix &);
double log(double);
Vector log(const Vector &);
Matrix log(const Matrix &);
double pow(double, double);
Vector pow(const Vector &, double);
Matrix pow(const Matrix &, double);
double sqrt(double);
Vector sqrt(const Vector &);
Matrix sqrt(const Matrix &);
double mean(const Vector &);
double mean(const Matrix &);
Vector mean(const Matrix &, int);
double variance(const Vector &);
double variance(const Matrix &);
Vector variance(const Matrix &, int);
double stddev(const Vector &);
double stddev(const Matrix &);
Vector stddev(const Matrix &, int);
Vector activ_func(const Vector &, std::string);
Matrix activ_func(const Matrix &, std::string);
Vector d_activ_func(const Vector &, std::string);
Matrix d_activ_func(const Matrix &, std::string);
double relu(double);
Vector relu(const Vector &);
Matrix relu(const Matrix &);
double d_relu(double);
Vector d_relu(const Vector &);
Matrix d_relu(const Matrix &);
double logistic(double);
Vector logistic(const Vector &);
Matrix logistic(const Matrix &);
double d_logistic(double);
Vector d_logistic(const Vector &);
Matrix d_logistic(const Matrix &);
double tanh(double);
Vector tanh(const Vector &);
Matrix tanh(const Matrix &);
double d_tanh(double);
Vector d_tanh(const Vector &);
Matrix d_tanh(const Matrix &);
Vector softmax(const Vector &);
Matrix softmax(const Matrix &);
Vector d_softmax(const Vector &);
Matrix d_softmax(const Matrix &);
Vector convolve(const Vector &, const Vector &);
Matrix convolve(const Matrix &, const Matrix &);
double binomial_sample(int, double);
Vector binomial_sample(int, const Vector &);
Matrix binomial_sample(int, const Matrix &);
double normal_sample(double, double);
Vector normal_sample(const Vector &, double);
Matrix normal_sample(const Matrix &, double);
Matrix normal_sample(const Matrix &, const Vector &);
#ifndef PARAM_LIST
#define PARAM_LIST
class param_list {
public:
int num_epochs;
int batch_size;
double learning_rate;
double momentum;
std::string activ_func;
std::string output_func;
param_list() = default;
param_list(const param_list &) = default;
param_list(param_list &&) = default;
param_list & operator=(const param_list &) = default;
param_list & operator=(param_list &&) = default;
~param_list() = default;
param_list(int n, int b, double l, double m,
std::string a, std::string o) :
num_epochs(n), batch_size(b),
learning_rate(l), momentum(m),
activ_func(a), output_func(o) {}
};
#endif // class param_lisst
} // namespace nn
|
<reponame>Flytrex/mavlink<gh_stars>10-100
package io.dronefleet.mavlink.uavionix;
import io.dronefleet.mavlink.annotations.MavlinkEntryInfo;
import io.dronefleet.mavlink.annotations.MavlinkEnum;
/**
* GPS lataral offset encoding
*/
@MavlinkEnum
public enum UavionixAdsbOutCfgGpsOffsetLat {
/**
*
*/
@MavlinkEntryInfo(0)
UAVIONIX_ADSB_OUT_CFG_GPS_OFFSET_LAT_NO_DATA,
/**
*
*/
@MavlinkEntryInfo(1)
UAVIONIX_ADSB_OUT_CFG_GPS_OFFSET_LAT_LEFT_2M,
/**
*
*/
@MavlinkEntryInfo(2)
UAVIONIX_ADSB_OUT_CFG_GPS_OFFSET_LAT_LEFT_4M,
/**
*
*/
@MavlinkEntryInfo(3)
UAVIONIX_ADSB_OUT_CFG_GPS_OFFSET_LAT_LEFT_6M,
/**
*
*/
@MavlinkEntryInfo(4)
UAVIONIX_ADSB_OUT_CFG_GPS_OFFSET_LAT_RIGHT_0M,
/**
*
*/
@MavlinkEntryInfo(5)
UAVIONIX_ADSB_OUT_CFG_GPS_OFFSET_LAT_RIGHT_2M,
/**
*
*/
@MavlinkEntryInfo(6)
UAVIONIX_ADSB_OUT_CFG_GPS_OFFSET_LAT_RIGHT_4M,
/**
*
*/
@MavlinkEntryInfo(7)
UAVIONIX_ADSB_OUT_CFG_GPS_OFFSET_LAT_RIGHT_6M
}
|
package sword.bitstream.huffman;
/**
* Huffman table that allow encoding integer numbers.
* This means that all zero, positive and negative numbers are allowed without any decimal.
* <p>
* This table assign less bits to the values closer to 0 and more bits to ones further.
* Thus, zero is always the most probable one and then the one that takes less bits.
* <p>
* This Huffman table assign always amount of bits that are multiple of the given
* bit align. Trying to fit inside the closer values and adding more bits for further values.
* <p>
* E.g. if bitAlign is 4 the resulting table will assign symbols from -4 to 3 to
* the unique symbols with 4 bits once included, leaving the first bit as a switch
* to extend the number of bits.
* <code>
* <br> 0000 ⇒ 0
* <br> 0001 ⇒ 1
* <br> 0010 ⇒ 2
* <br> 0011 ⇒ 3
* <br> 0100 ⇒ -4
* <br> 0101 ⇒ -3
* <br> 0110 ⇒ -2
* <br> 0111 ⇒ -1
* <br></code>
* <p>
* Note that all encoded symbols start with <code>0</code>. In reality the amount of <code>1</code> before
* this zero reflects the number of bits for this symbol. When the zero is the first
* one, the amount of bits for the symbol is understood to match the bit align value.
* When there are one <code>1</code> in front the zero (<code>10</code>) then it will be the bit align
* value multiplied by 2. Thus <code>110</code> will be <code>bitAlign * 3</code>, <code>1110</code> will be
* <code>bitAlign * 4</code> and so on.
* <code>
* <br> 10000000 ⇒ 4
* <br> 10000001 ⇒ 5
* <br> ...
* <br> 10011111 ⇒ 35
* <br> 10100000 ⇒ -36
* <br> ...
* <br> 10111111 ⇒ -5
* <br> 110000000000 ⇒ 36
* <br> 110000000001 ⇒ 37
* <br> ...
* <br></code>
* <p>
* This table can theoretically include any number, even if it is really big.
* Technically it is currently limited to the int bounds (32-bit integer).
* As it can include any number and numbers are infinite, this table is
* infinite as well and its iterable will not converge.
*/
public final class IntegerNumberHuffmanTable extends AbstractIntegerNumberHuffmanTable<Integer> {
/**
* Create a new instance with the given bit alignment.
* @param bitAlign Number of bits that the most probable symbols will have.
* Check {@link IntegerNumberHuffmanTable} for more information.
*/
public IntegerNumberHuffmanTable(int bitAlign) {
super(bitAlign);
}
@Override
Integer box(long value) {
if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) {
throw new AssertionError("Symbol is out of bounds. Consider using LongIntegerNumberHiffmanTable instead");
}
return (int) value;
}
}
|
def find_winner(piles):
xor = 0
for pile in piles:
xor = xor ^ pile
return 'Second' if xor != 0 else 'First'
def perfect_nim_game(piles):
while (True):
# If the piles are all empty, player 2 won the game.
if (all(i == 0 for i in piles)):
print("Player 2 won the game")
break
# Find the current winner
curr_winner = find_winner(piles)
# Player 1
if (curr_winner == 'First'):
# Pick one pile to take out stones
piles[piles.index(max(piles))] -= 1
# Prints the number of stones taken out
# from the current pile.
print("Player 1 took out 1 stone from pile: ",
piles.index(max(piles)) + 1)
# Player 2
else:
piles = list(map( lambda x: x//2, piles ))
stones = 0
for i in range(len(piles)):
stones += piles[i]
piles[i] = 0
piles[piles.index(max(piles))] = stones
print("Player 2 took out", stones // 2,
"stones from pile:", piles.index(max(piles)) + 1)
piles = [5, 5]
perfect_nim_game(piles)
# Output: Player 2 took out 2 stones from pile: 1
# Player 1 took out 1 stone from pile: 2
# Player 2 took out 1 stones from pile: 1
# Player 1 took out 1 stone from pile: 2
# Player 2 took out 1 stones from pile: 1
# Player 1 took out 1 stone from pile: 2
# Player 2 won the game |
package io.casperlabs.casper.highway.mocks
import cats._
import cats.implicits._
import cats.effect._
import cats.effect.concurrent.Ref
import io.casperlabs.casper.consensus.Era
import io.casperlabs.storage.BlockHash
import io.casperlabs.storage.era.EraStorage
class MockEraStorage[F[_]: Applicative](
erasRef: Ref[F, Map[BlockHash, Era]]
) extends EraStorage[F] {
def addEra(era: Era): F[Boolean] =
erasRef.modify { es =>
es.updated(era.keyBlockHash, era) -> !es.contains(era.keyBlockHash)
}
def getEra(keyBlockHash: BlockHash): F[Option[Era]] =
erasRef.get.map(_.get(keyBlockHash))
def getChildEras(keyBlockHash: BlockHash): F[Set[Era]] = ???
def getChildlessEras: F[Set[Era]] = ???
}
object MockEraStorage {
def apply[F[_]: Sync] =
for {
erasRef <- Ref.of[F, Map[BlockHash, Era]](Map.empty)
} yield new MockEraStorage(erasRef)
}
|
import re
import random
import numpy as np
import pandas as pd
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Embedding
from keras.layers import LSTM
from sklearn.model_selection import train_test_split
#Load the dataset
df = pd.read_csv('stories_dataset.csv')
#Split into training and test set
X = df['stories']
y = df['labels']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
#Create movie summary tokenizer
tokenizer = Tokenizer(to_lower=True, split=" ")
tokenizer.fit_on_texts(X_train)
vocab_size = len(tokenizer.word_index) + 1
#Sequence encode each example
X_train = tokenizer.texts_to_sequences(X_train)
X_test = tokenizer.texts_to_sequences(X_test)
#Pad them out to maximum length
x_train = sequence.pad_sequences(X_train, maxlen=1000)
x_test = sequence.pad_sequences(X_test, maxlen=1000)
#Create the model
model = Sequential()
model.add(Embedding(vocab_size, 256))
model.add(LSTM(1024))
model.add(Dense(1, activation='sigmoid'))
#Compile the model
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
#Fit the model
model.fit(x_train, y_train, validation_data=(x_test, y_test), batch_size=64, epochs=20) |
ssh-add -K ~/.ssh/id_rsa
|
<filename>platforms/android/kirin-for-android/kirin-lib/src/main/java/com/futureplatforms/kirin/generated/location/KirinLocationData.java
package com.futureplatforms.kirin.generated.location;
/**
* This is a Location data object. It is produced by the device, and then passed into the Javascript.
*/
public interface KirinLocationData {
void setLatitude(double latitude);
double getLatitude();
void setLongitude(double longitude);
double getLongitude();
void setTimestamp(double timestamp);
double getTimestamp();
void setHorizontalAccuracy(double horizontalAccuracy);
double getHorizontalAccuracy();
} |
public void createProjection(String absNodePath) {
try {
Node node = session.getNode(absNodePath);
documentStore.createProjection(node);
} catch (RepositoryException e) {
// Handle repository exception
e.printStackTrace();
}
} |
import numpy as np
from sklearn.datasets import load_boston
from keras.models import Sequential
from keras.layers import Dense
# Load the Boston dataset
data = load_boston()
# Define the input (X) and output (y)
X = data['data']
y = data['target']
# Define the model
model = Sequential()
model.add(Dense(4, input_shape=(3,), activation='relu'))
model.add(Dense(1))
# Compile the model
model.compile(optimizer='adam', loss='mse')
# Fit the model
model.fit(X, y, epochs=200, verbose=0) |
package pulse.tasks.listeners;
public interface ResultFormatListener {
public void resultFormatChanged(ResultFormatEvent rfe);
}
|
#!/bin/bash
PATH=/bin:/usr/bin:/sbin:/usr/sbin export PATH
launchctl load -w /Library/LaunchAgents/edu.usc.remotebooter.plist
launchctl load -w /Library/LaunchDaemons/edu.usc.remotebootpicker.plist
exit 0 |
import { baseApi } from '../../app/api/base';
export const homeApi = baseApi({
entityTypes: ['home'],
reducerPath: 'home',
resolvers: (builder) => ({})
});
export const { useLoadHomeQuery, useLoadPagingHomeQuery, useDeleteHomeMutation } = homeApi;
export const {
endpoints: { loadHome, loadPagingHome, deleteHome }
} = homeApi;
|
'use strict';
/*
Approach here is to have this service *own* all state. In that, no changes should be made to state directly.
This service should be used to retrieve pieces of state, and pass the name of the playlist to an action method
that will then actually make the modification to state. Then, either state is emitted or returned by the method.
This keeps a single source of truth for all less-transient data (aka less transient than search) and all
workings of state are, as much as possible, abstracted away from all other entities within the app.
It also means that there is a single chain of events
1. Entities reach out for pieces of state/data, retrieved from this service
2. That data is then presented to the user as something that can be acted upon
3. When acted upon, entities provide an action (by calling a particular method)
as well as the information necessary to identify what to change (playlist name)
and what is to be added/removed/modified.
To make this even more effective, maintainable and higher performing, RxJS (a library for which exists in AngularJS/ng 1)
and its Observables would likely be of great help. Not too sure how well a full redux framework would work with
AngularJS, however there are several implementations for Vanillajs, React, and Angular 2+.
*/
(function () {
angular.module('app')
.service('PlaylistService', function ($rootScope) {
var playlists = [];
var maxSongCount = 10;
this.addNewPlaylist = function (playlistName) {
var index = this.findPlaylistByName(playlistName);
if (index === -1) {
playlists.push({
name: playlistName,
songs: [],
note: '',
image: ''
});
return true;
}
return false;
}
this.getAllPlaylists = function () {
return playlists;
}
this.getAllPlaylistsNames = function () {
var names = [];
for (var i = 0; i < playlists.length; i++) {
names.push(playlists[i].name);
}
return names;
}
this.deletePlaylist = function (playlistName) {
var index = this.findPlaylistByName(playlistName);
if (index >= 0) {
playlists = playlists.splice(index, 1);
return true;
}
return false;
}
this.changePlaylistName = function (playlistName, newPlaylistName) {
var isUnique = this.findPlaylistByName(newPlaylistName);
if (isUnique) {
var index = this.findPlaylistByName(playlistName);
playlists[index].name = newPlaylistName;
$rootScope.$broadcast('NameChangedFor' + playlistName, playlists[index]);
return true;
}
return false;
}
// Sadly ng-repeat has "issues" with duplicates, so you can't include your favorite song twice
this.addSongToPlaylist = function (song, playlistName) {
var index = this.findPlaylistByName(playlistName);
var playlist = playlists[index];
for (var i = 0; i < playlist.songs.length; i++) {
if (song.name === playlist.songs[i].name) {
return false;
}
}
if (playlist.songs.length < maxSongCount) {
playlist.songs.push(song);
$rootScope.$broadcast('SongAddedTo' + playlistName, playlist);
return true;
}
return false;
}
this.removeSongFromPlaylist = function (songName, playlistName) {
var index = this.findPlaylistByName(playlistName);
var songIndex = -1;
for (var i = 0; i < playlists[index].songs.length; i++) {
if (songName === playlists[index].songs[i]) {
songIndex = i;
}
}
if (songIndex >= 0) {
playlists[index].songs.splice(songIndex, 1);
return true;
}
return false;
}
this.addNoteToPlaylist = function (note, playlistName) {
var index = this.findPlaylistByName(playlistName);
playlists[index].note = note;
}
this.addImageToPlaylist = function (imageSrc, playlistName) {
var index = this.findPlaylistByName(playlistName);
playlists[index].image = imageSrc;
$rootScope.$broadcast('ImageAddedTo' + playlistName, imageSrc);
}
this.removeImageFromPlaylist = function (playListName) {
var index = this.findPlaylistByName(playlistName);
playlists[index].image = '';
}
this.findPlaylistByName = function (playlistName) {
for (var i = 0; i < playlists.length; i++) {
if (playlists[i].name === playlistName) {
return i;
}
}
return -1;
}
this.getPlaylistByName = function (playlistName) {
var index = this.findPlaylistByName(playlistName);
return playlists[index];
}
});
})(); |
package one.microproject.rpi.hardware.gpio.sensors;
import com.pi4j.io.i2c.I2CBus;
import com.pi4j.io.i2c.I2CDevice;
import com.pi4j.io.i2c.I2CFactory;
import com.pi4j.system.SystemInfo;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.NumberFormat;
/**
* BMP-180 I2C
* 3.3V Bosch temperature and barometric pressure sensor.
* @author gergej
*
*/
public class BMP180 {
public final static int LITTLE_ENDIAN = 0;
public final static int BIG_ENDIAN = 1;
private final static int BMP180_ENDIANNESS = BIG_ENDIAN;
/*
Prompt> sudo i2cdetect -y 1
0 1 2 3 4 5 6 7 8 9 a b c d e f
00: -- -- -- -- -- -- -- -- -- -- -- -- --
10: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
20: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
30: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
40: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
50: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
60: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
70: -- -- -- -- -- -- -- 77
*/
// This next addresses is returned by "sudo i2cdetect -y 1", see above.
public final static int BMP180_ADDRESS = 0x77;
// Operating Modes
public final static int BMP180_ULTRALOWPOWER = 0;
public final static int BMP180_STANDARD = 1;
public final static int BMP180_HIGHRES = 2;
public final static int BMP180_ULTRAHIGHRES = 3;
// BMP180 Registers
public final static int BMP180_CAL_AC1 = 0xAA; // R Calibration data (16 bits)
public final static int BMP180_CAL_AC2 = 0xAC; // R Calibration data (16 bits)
public final static int BMP180_CAL_AC3 = 0xAE; // R Calibration data (16 bits)
public final static int BMP180_CAL_AC4 = 0xB0; // R Calibration data (16 bits)
public final static int BMP180_CAL_AC5 = 0xB2; // R Calibration data (16 bits)
public final static int BMP180_CAL_AC6 = 0xB4; // R Calibration data (16 bits)
public final static int BMP180_CAL_B1 = 0xB6; // R Calibration data (16 bits)
public final static int BMP180_CAL_B2 = 0xB8; // R Calibration data (16 bits)
public final static int BMP180_CAL_MB = 0xBA; // R Calibration data (16 bits)
public final static int BMP180_CAL_MC = 0xBC; // R Calibration data (16 bits)
public final static int BMP180_CAL_MD = 0xBE; // R Calibration data (16 bits)
public final static int BMP180_CONTROL = 0xF4;
public final static int BMP180_TEMPDATA = 0xF6;
public final static int BMP180_PRESSUREDATA = 0xF6;
public final static int BMP180_READTEMPCMD = 0x2E;
public final static int BMP180_READPRESSURECMD = 0x34;
private int cal_AC1 = 0;
private int cal_AC2 = 0;
private int cal_AC3 = 0;
private int cal_AC4 = 0;
private int cal_AC5 = 0;
private int cal_AC6 = 0;
private int cal_B1 = 0;
private int cal_B2 = 0;
private int cal_MB = 0;
private int cal_MC = 0;
private int cal_MD = 0;
private static boolean verbose = "true".equals(System.getProperty("i2c.verbose", "false"));
private I2CBus bus;
private I2CDevice bmp180;
private int mode = BMP180_STANDARD;
public BMP180()
{
this(BMP180_ADDRESS);
}
public BMP180(int address)
{
try
{
// Get i2c bus
bus = I2CFactory.getInstance(I2CBus.BUS_1); // Depends onthe RasPI version
if (verbose)
System.out.println("Connected to bus. OK.");
// Get device itself
bmp180 = bus.getDevice(address);
if (verbose)
System.out.println("Connected to device. OK.");
try { this.readCalibrationData(); }
catch (Exception ex)
{ ex.printStackTrace(); }
}
catch (IOException e)
{
System.err.println(e.getMessage());
} catch (I2CFactory.UnsupportedBusNumberException e) {
System.err.println(e.getMessage());
}
}
private int readU8(int reg) throws Exception
{
// "Read an unsigned byte from the I2C device"
int result = 0;
try
{
result = this.bmp180.read(reg);
if (verbose)
System.out.println("I2C: Device " + BMP180_ADDRESS + " returned " + result + " from reg " + reg);
}
catch (Exception ex)
{ ex.printStackTrace(); }
return result;
}
private int readS8(int reg) throws Exception
{
// "Reads a signed byte from the I2C device"
int result = 0;
try
{
result = this.bmp180.read(reg);
if (result > 127)
result -= 256;
if (verbose)
System.out.println("I2C: Device " + BMP180_ADDRESS + " returned " + result + " from reg " + reg);
}
catch (Exception ex)
{ ex.printStackTrace(); }
return result;
}
private int readU16(int register) throws Exception
{
int hi = this.readU8(register);
int lo = this.readU8(register + 1);
return (BMP180_ENDIANNESS == BIG_ENDIAN) ? (hi << 8) + lo : (lo << 8) + hi; // Big Endian
}
private int readS16(int register) throws Exception
{
int hi = 0, lo = 0;
if (BMP180_ENDIANNESS == BIG_ENDIAN)
{
hi = this.readS8(register);
lo = this.readU8(register + 1);
}
else
{
lo = this.readS8(register);
hi = this.readU8(register + 1);
}
return (hi << 8) + lo;
}
public void readCalibrationData() throws Exception
{
// Reads the calibration data from the IC
cal_AC1 = readS16(BMP180_CAL_AC1); // INT16
cal_AC2 = readS16(BMP180_CAL_AC2); // INT16
cal_AC3 = readS16(BMP180_CAL_AC3); // INT16
cal_AC4 = readU16(BMP180_CAL_AC4); // UINT16
cal_AC5 = readU16(BMP180_CAL_AC5); // UINT16
cal_AC6 = readU16(BMP180_CAL_AC6); // UINT16
cal_B1 = readS16(BMP180_CAL_B1); // INT16
cal_B2 = readS16(BMP180_CAL_B2); // INT16
cal_MB = readS16(BMP180_CAL_MB); // INT16
cal_MC = readS16(BMP180_CAL_MC); // INT16
cal_MD = readS16(BMP180_CAL_MD); // INT16
if (verbose)
showCalibrationData();
}
private void showCalibrationData()
{
// Displays the calibration values for debugging purposes
System.out.println("DBG: AC1 = " + cal_AC1);
System.out.println("DBG: AC2 = " + cal_AC2);
System.out.println("DBG: AC3 = " + cal_AC3);
System.out.println("DBG: AC4 = " + cal_AC4);
System.out.println("DBG: AC5 = " + cal_AC5);
System.out.println("DBG: AC6 = " + cal_AC6);
System.out.println("DBG: B1 = " + cal_B1);
System.out.println("DBG: B2 = " + cal_B2);
System.out.println("DBG: MB = " + cal_MB);
System.out.println("DBG: MC = " + cal_MC);
System.out.println("DBG: MD = " + cal_MD);
}
public int readRawTemp() throws Exception
{
// Reads the raw (uncompensated) temperature from the sensor
bmp180.write(BMP180_CONTROL, (byte)BMP180_READTEMPCMD);
waitfor(5); // Wait 5ms
int raw = readU16(BMP180_TEMPDATA);
if (verbose)
System.out.println("DBG: Raw Temp: " + (raw & 0xFFFF) + ", " + raw);
return raw;
}
public int readRawPressure() throws Exception
{
// Reads the raw (uncompensated) pressure level from the sensor
bmp180.write(BMP180_CONTROL, (byte)(BMP180_READPRESSURECMD + (this.mode << 6)));
if (this.mode == BMP180_ULTRALOWPOWER)
waitfor(5);
else if (this.mode == BMP180_HIGHRES)
waitfor(14);
else if (this.mode == BMP180_ULTRAHIGHRES)
waitfor(26);
else
waitfor(8);
int msb = bmp180.read(BMP180_PRESSUREDATA);
int lsb = bmp180.read(BMP180_PRESSUREDATA + 1);
int xlsb = bmp180.read(BMP180_PRESSUREDATA + 2);
int raw = ((msb << 16) + (lsb << 8) + xlsb) >> (8 - this.mode);
if (verbose)
System.out.println("DBG: Raw Pressure: " + (raw & 0xFFFF) + ", " + raw);
return raw;
}
public float readTemperature() throws Exception
{
// Gets the compensated temperature in degrees celcius
int UT = 0;
int X1 = 0;
int X2 = 0;
int B5 = 0;
float temp = 0.0f;
// Read raw temp before aligning it with the calibration values
UT = this.readRawTemp();
X1 = ((UT - this.cal_AC6) * this.cal_AC5) >> 15;
X2 = (this.cal_MC << 11) / (X1 + this.cal_MD);
B5 = X1 + X2;
temp = ((B5 + 8) >> 4) / 10.0f;
if (verbose)
System.out.println("DBG: Calibrated temperature = " + temp + " C");
return temp;
}
public float readPressure() throws Exception
{
// Gets the compensated pressure in pascal
int UT = 0;
int UP = 0;
int B3 = 0;
int B5 = 0;
int B6 = 0;
int X1 = 0;
int X2 = 0;
int X3 = 0;
int p = 0;
int B4 = 0;
int B7 = 0;
UT = this.readRawTemp();
UP = this.readRawPressure();
// You can use the datasheet values to test the conversion results
// boolean dsValues = true;
boolean dsValues = false;
if (dsValues)
{
UT = 27898;
UP = 23843;
this.cal_AC6 = 23153;
this.cal_AC5 = 32757;
this.cal_MB = -32768;
this.cal_MC = -8711;
this.cal_MD = 2868;
this.cal_B1 = 6190;
this.cal_B2 = 4;
this.cal_AC3 = -14383;
this.cal_AC2 = -72;
this.cal_AC1 = 408;
this.cal_AC4 = 32741;
this.mode = BMP180_ULTRALOWPOWER;
if (verbose)
this.showCalibrationData();
}
// True Temperature Calculations
X1 = (int)((UT - this.cal_AC6) * this.cal_AC5) >> 15;
X2 = (this.cal_MC << 11) / (X1 + this.cal_MD);
B5 = X1 + X2;
if (verbose)
{
System.out.println("DBG: X1 = " + X1);
System.out.println("DBG: X2 = " + X2);
System.out.println("DBG: B5 = " + B5);
System.out.println("DBG: True Temperature = " + (((B5 + 8) >> 4) / 10.0) + " C");
}
// Pressure Calculations
B6 = B5 - 4000;
X1 = (this.cal_B2 * (B6 * B6) >> 12) >> 11;
X2 = (this.cal_AC2 * B6) >> 11;
X3 = X1 + X2;
B3 = (((this.cal_AC1 * 4 + X3) << this.mode) + 2) / 4;
if (verbose)
{
System.out.println("DBG: B6 = " + B6);
System.out.println("DBG: X1 = " + X1);
System.out.println("DBG: X2 = " + X2);
System.out.println("DBG: X3 = " + X3);
System.out.println("DBG: B3 = " + B3);
}
X1 = (this.cal_AC3 * B6) >> 13;
X2 = (this.cal_B1 * ((B6 * B6) >> 12)) >> 16;
X3 = ((X1 + X2) + 2) >> 2;
B4 = (this.cal_AC4 * (X3 + 32768)) >> 15;
B7 = (UP - B3) * (50000 >> this.mode);
if (verbose)
{
System.out.println("DBG: X1 = " + X1);
System.out.println("DBG: X2 = " + X2);
System.out.println("DBG: X3 = " + X3);
System.out.println("DBG: B4 = " + B4);
System.out.println("DBG: B7 = " + B7);
}
if (B7 < 0x80000000)
p = (B7 * 2) / B4;
else
p = (B7 / B4) * 2;
if (verbose)
System.out.println("DBG: X1 = " + X1);
X1 = (p >> 8) * (p >> 8);
X1 = (X1 * 3038) >> 16;
X2 = (-7357 * p) >> 16;
if (verbose)
{
System.out.println("DBG: p = " + p);
System.out.println("DBG: X1 = " + X1);
System.out.println("DBG: X2 = " + X2);
}
p = p + ((X1 + X2 + 3791) >> 4);
if (verbose)
System.out.println("DBG: Pressure = " + p + " Pa");
return p;
}
private int standardSeaLevelPressure = 101325;
public void setStandardSeaLevelPressure(int standardSeaLevelPressure)
{
this.standardSeaLevelPressure = standardSeaLevelPressure;
}
public double readAltitude() throws Exception
{
// "Calculates the altitude in meters"
double altitude = 0.0;
float pressure = readPressure();
altitude = 44330.0 * (1.0 - Math.pow(pressure / standardSeaLevelPressure, 0.1903));
if (verbose)
System.out.println("DBG: Altitude = " + altitude);
return altitude;
}
protected static void waitfor(long howMuch)
{
try { Thread.sleep(howMuch); } catch (InterruptedException ie) { ie.printStackTrace(); }
}
public static void main(String[] args)
{
final NumberFormat NF = new DecimalFormat("##00.00");
BMP180 sensor = new BMP180();
float press = 0;
float temp = 0;
double alt = 0;
try { press = sensor.readPressure(); }
catch (Exception ex)
{
System.err.println(ex.getMessage());
ex.printStackTrace();
}
sensor.setStandardSeaLevelPressure((int)press); // As we ARE at the sea level (in San Francisco).
try { alt = sensor.readAltitude(); }
catch (Exception ex)
{
System.err.println(ex.getMessage());
ex.printStackTrace();
}
try { temp = sensor.readTemperature(); }
catch (Exception ex)
{
System.err.println(ex.getMessage());
ex.printStackTrace();
}
System.out.println("Temperature: " + NF.format(temp) + " C");
System.out.println("Pressure : " + NF.format(press / 100) + " hPa");
System.out.println("Altitude : " + NF.format(alt) + " m");
// Bonus : CPU Temperature
try
{
System.out.println("CPU Temperature : " + SystemInfo.getCpuTemperature());
System.out.println("CPU Core Voltage : " + SystemInfo.getCpuVoltage());
}
catch (InterruptedException ie)
{
ie.printStackTrace();
}
catch (IOException e)
{
e.printStackTrace();
}
}
}
|
def check_cycle(head):
slow = head
fast = head
while fast and fast.next:
slow = slow.next
fast = fast.next.next
if slow == fast:
return True
return False |
<reponame>paynejacob/rke<filename>vendor/golang.org/x/sys/unix/ioctl.go
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
package unix
import (
"runtime"
"unsafe"
)
// ioctl itself should not be exposed directly, but additional get/set
// functions for specific types are permissible.
// IoctlSetInt performs an ioctl operation which sets an integer value
// on fd, using the specified request number.
func IoctlSetInt(fd int, req uint, value int) error {
return ioctl(fd, req, uintptr(value))
}
// IoctlSetWinsize performs an ioctl on fd with a *Winsize argument.
//
// To change fd's window size, the req argument should be TIOCSWINSZ.
func IoctlSetWinsize(fd int, req uint, value *Winsize) error {
// TODO: if we get the chance, remove the req parameter and
// hardcode TIOCSWINSZ.
err := ioctl(fd, req, uintptr(unsafe.Pointer(value)))
runtime.KeepAlive(value)
return err
}
// IoctlSetTermios performs an ioctl on fd with a *Termios.
//
// The req value will usually be TCSETA or TIOCSETA.
func IoctlSetTermios(fd int, req uint, value *Termios) error {
// TODO: if we get the chance, remove the req parameter.
err := ioctl(fd, req, uintptr(unsafe.Pointer(value)))
runtime.KeepAlive(value)
return err
}
// IoctlGetInt performs an ioctl operation which gets an integer value
// from fd, using the specified request number.
func IoctlGetInt(fd int, req uint) (int, error) {
var value int
err := ioctl(fd, req, uintptr(unsafe.Pointer(&value)))
return value, err
}
func IoctlGetWinsize(fd int, req uint) (*Winsize, error) {
var value Winsize
err := ioctl(fd, req, uintptr(unsafe.Pointer(&value)))
return &value, err
}
func IoctlGetTermios(fd int, req uint) (*Termios, error) {
var value Termios
err := ioctl(fd, req, uintptr(unsafe.Pointer(&value)))
return &value, err
}
|
<filename>eiseg/data/datasets/human.py
from pathlib import Path
import os
import cv2
import numpy as np
from data.base import ISDataset
from data.sample import DSample
class HumanDataset(ISDataset):
def __init__(self, dataset_path,
split = 'train',
**kwargs):
super(HumanDataset, self).__init__(**kwargs)
self.mode = split.lower()
self.path = dataset_path
if self.mode == 'train':
file_path = os.path.join(self.path, 'train_mini.txt')
else:
file_path = os.path.join(self.path, 'val_mini.txt')
self.dataset_samples = []
with open(file_path, 'r') as f:
for line in f.readlines():
line = line.strip()
if line != '':
self.dataset_samples.append(line)
def get_sample(self, index):
items = self.dataset_samples[index].split(' ')
if 'person_detection__ds' in items[0]:
image_path, image_name = items[0].rsplit('/', 1)
items[0] = image_path.rsplit('/', 1)[0] + '/' + image_name
mask_path, mask_name = items[1].rsplit('/', 1)
items[1] = mask_path.rsplit('/', 1)[0] + '/' + mask_name
image_path = os.path.join(self.path, items[0])
mask_path = os.path.join(self.path, items[1])
image = cv2.imread(image_path)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
instances_mask = np.max(cv2.imread(mask_path).astype(np.int32), axis=2)
instances_mask[instances_mask > 0] = 1
return DSample(image, instances_mask, objects_ids=[1], sample_id=index)
|
<reponame>KharkovIT/PP
package com.ua.nure.TestHelper.controller;
import com.ua.nure.TestHelper.domain.Template;
import com.ua.nure.TestHelper.domain.Test;
import com.ua.nure.TestHelper.domain.Test4Group;
import com.ua.nure.TestHelper.service.Test4GroupService;
import com.ua.nure.TestHelper.service.TestService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.util.List;
@RequestMapping("tests")
@RestController
public class TestController {
@Autowired
TestService testService;
@Autowired
Test4GroupService test4GroupService;
@CrossOrigin
@RequestMapping(value = "/getTeacherTest", method = RequestMethod.GET)
public List<Test> getTeacherTest(@RequestParam("user") String userId, @RequestParam("group") String group) {
try {
System.out.println(testService.getAllbyTeacherNotIn(group, userId));
return testService.getAllbyTeacherNotIn(group, userId);
} catch (NullPointerException e) {
System.out.println("no no no");
}
return null;
}
@CrossOrigin
@RequestMapping(value = "/addOldTest", method = RequestMethod.GET)
public Test4Group addOldTest(@RequestParam("idGroup") String groupId, @RequestParam("idTest") String idTest) {
try {
Test4Group addedTest = new Test4Group();
// addedTest.setIdTest(Integer.valueOf(idTest));
addedTest.setIdTest(idTest);
addedTest.setIdGroup(groupId);
return test4GroupService.addTest(addedTest);
} catch (NullPointerException e) {
System.out.println("no no no");
}
return null;
}
@CrossOrigin
@RequestMapping(value = "/add", method = RequestMethod.GET)
public void addTest(@RequestParam(name = "idTest") String idTest,
@RequestParam(name = "name") String name,
@RequestParam(name = "template") String Idtemplate) throws IOException {
Test newTest = new Test();
newTest.setIdTest(idTest);
newTest.setName(name);
newTest.setIdTemplate(Long.parseLong(Idtemplate));
testService.addTest(newTest);
}
}
|
<gh_stars>0
package aula5;
import java.util.Scanner;
public class Exercicio1 {
public static void main(String[] args) {
Scanner rc = new Scanner(System.in);
System.out.println("Digite um numero:");
int x =rc.nextInt();
System.out.println("Digite um numero:");
int y =rc.nextInt();
int resultado;
resultado= x+y;
System.out.println("A soma é: "+resultado);
rc.close();
}
}
|
'use strict'
function initSlider() { // Initiating the slier with minRange and maxRange
const formEl = document.getElementById('slider-form')
formEl.addEventListener('click', () => { event.preventDefault() })
const minRange = +document.getElementById('min-value').value
const maxRange = +document.getElementById('max-value').value
sliderCmp(minRange, maxRange)
}
// Slider component start
function sliderCmp(minRange, maxRange, currentPos = (minRange + maxRange) / 2) {
const range = maxRange - minRange
const pathEl = document.getElementById('slider-path')
const pivotEl = document.getElementById('pivot')
const pivotAbsPos = 58; // Absolute X position of the pivot (the slider handle)
const circle = document.getElementById('circle')
const leftTransitionProp = pivotEl.style.transition // The CSS inline style transition
let currentValue = currentPos
setNewPosToPivot(null, currentValue) // Setting the pivot on pivot path
registerEventListeners()
function registerEventListeners() {
pathEl.addEventListener('click', moveSlider)
pivotEl.addEventListener('mousedown', moveSlider) // Listener for dragging
window.addEventListener('resize', onWindowResize)
circle.addEventListener('mousedown', animateCircle)
}
function animateCircle() { // Changing the pivot size
circle.classList.add('shape-circle')
setTimeout(() => {
circle.classList.remove('shape-circle')
}, 1000);
}
function onWindowResize() {
setNewPosToPivot(null, currentValue)
}
function getPathMargins() { // Calc the areas between ends of pivot path to client window
return getWindowWidth() - getPathWidth()
}
function getPathWidth() {
const pathWidthStr = getComputedStyle(pathEl).width
return +pathWidthStr.slice(0, pathWidthStr.indexOf('px'))
}
function getWindowWidth() {
return Math.max(document.documentElement.clientWidth, window.innerWidth || 0);
}
function moveSlider(ev) {
const eventType = ev.type
const xPos = ev.clientX
if (isEventInsideSliderPath(xPos)) {
switch (eventType) {
case 'mousedown':
animateCircle()
pivotEl.style.left = ev.clientX - pivotAbsPos;
registerToDragSlider();
return
default:
setNewPosToPivot(ev)
}
}
}
function isEventInsideSliderPath(xPos) {
return xPos > getPathMargins() / 2 && xPos < (getWindowWidth() - getPathMargins() / 2) ? true : false
}
function setNewPosToPivot(ev, currentValue) {
const xPos = ev
? ev.clientX
: getXposFromSliderValue(currentValue)
if (isEventInsideSliderPath(xPos)) {
pivotEl.style.left = xPos - pivotAbsPos
const value = ev
? getSliderValueFromXpos(xPos)
: currentValue - minRange
setCurrentValue(value)
}
}
function getXposFromSliderValue(value) {
return ((value - minRange) / range * getPathWidth()) + (getPathMargins() / 2)
}
function getSliderValueFromXpos(xPos) {
return (xPos - (getPathMargins() / 2)) * range / getPathWidth()
}
function registerToDragSlider() {
window.addEventListener('mousemove', onDrag)
window.addEventListener('mouseup', UnRegisterFromDragSlider)
}
function onDrag(ev) {
pivotEl.style.transition = '' // Cancel CSS transition on drag
setNewPosToPivot(ev)
}
function UnRegisterFromDragSlider() {
window.removeEventListener('mouseup', UnRegisterFromDragSlider)
window.removeEventListener('mousemove', onDrag)
pivotEl.style.transition = leftTransitionProp // Re-add CSS transition on drag
}
function setCurrentValue(value) { // Render slider's value
currentValue = Math.round(value) + minRange
document.getElementById('current-value').innerText = currentValue
}
}
|
<gh_stars>0
/* See LICENSE file for copyright and license details. */
#ifndef UTIL_H
#define UTIL_H
#include <stddef.h>
#include <stdint.h>
#define LEN(x) (sizeof (x) / sizeof *(x))
struct range {
uint32_t lower;
uint32_t upper;
};
struct range_list {
struct range *data;
size_t len;
};
/* 16-slot (0,...,15) optionally undetermined binary state */
struct heisenstate {
uint_least16_t determined;
uint_least16_t state;
};
int heisenstate_get(struct heisenstate *, int);
int heisenstate_set(struct heisenstate *, int, int);
#endif /* UTIL_H */
|
<filename>app/models/report.rb
class Report < ActiveRecord::Base
Statuses = ["Open", "Closed"]
Weathers = ["Good", "Fair", "Poor"]
Greens = ["Summer", "Mixed", "Winter"]
Reasons = ["Frozen Ground", "Saturation", "Snow"]
belongs_to :source
validates :source_id, presence: true
validates :weather, presence: true, inclusion: { in: Weathers }
validates :status, presence: true, inclusion: { in: Statuses }
validates :greens, presence: true, inclusion: { in: Greens }, if: -> { status != "Closed" }
validates :reason, presence: true, inclusion: { in: Reasons }, if: -> { greens.present? && greens != "Summer" }
before_validation do
self.greens = nil if status == "Closed"
self.reason = nil if greens.nil? || greens == "Summer"
end
def description
s = "The course is #{status.downcase}."
w = "Weather is #{weather.downcase}."
if status == "Closed"
# Course closed with poor weather: probably the weather's fault.
return [s, w].join(" ") if weather == "Poor"
# Otherwise, we don't know why it's closed, so just keep it short.
return s
end
g = greens_detail
return [s, w, g].join(" ")
end
private
def greens_detail
case greens
when "Summer"
"Summer greens."
when "Mixed"
"Summer greens/some temporary greens (due to #{reason.downcase})."
when "Winter"
"Winter greens (due to #{reason.downcase})."
end
end
end
|
<reponame>LarsSaalbrink/Sub-IoT-sdu<filename>stack/framework/hal/chips/si4460/si4460_interface.h
/*
* Copyright (c) 2015-2021 University of Antwerp, Aloxy NV.
*
* This file is part of Sub-IoT.
* See https://github.com/Sub-IoT/Sub-IoT-Stack for further info.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
*
* Authors:
* <EMAIL>
*/
#ifndef SI4455_INTERFACE_H
#define SI4455_INTERFACE_H
#ifdef __cplusplus
extern "C" {
#endif
#include <stdint.h>
#include <stdbool.h>
#include "ecode.h"
#include "hwradio.h"
// Error code definitions of EZRadioDrv
#define ECODE_EMDRV_EZRADIODRV_OK ( ECODE_OK ) ///< Success return value.
#define ECODE_EMDRV_EZRADIODRV_ILLEGAL_HANDLE ( ECODE_EMDRV_EZRADIODRV_BASE | 0x00000001 ) ///< Illegal SPI handle.
#define ECODE_EMDRV_EZRADIODRV_TRANSMIT_PLUGIN_BASE ( ECODE_EMDRV_EZRADIODRV_BASE | 0x00000100 ) ///< Transmit plugin base error code.
#define ECODE_EMDRV_EZRADIODRV_RECEIVE_PLUGIN_BASE ( ECODE_EMDRV_EZRADIODRV_BASE | 0x00000200 ) ///< Receive plugin base error code.
#define ECODE_EMDRV_EZRADIODRV_CRC_ERROR_PLUGIN_BASE ( ECODE_EMDRV_EZRADIODRV_BASE | 0x00000300 ) ///< CRC error plugin base error code.
#define ECODE_EMDRV_EZRADIODRV_AUTO_ACK_PLUGIN_BASE ( ECODE_EMDRV_EZRADIODRV_BASE | 0x00000400 ) ///< Receive plugin base error code.
#define ECODE_EMDRV_EZRADIODRV_UNMOD_CARRIER_PLUGIN_BASE ( ECODE_EMDRV_EZRADIODRV_BASE | 0x00000500 ) ///< Receive plugin base error code.
#define ECODE_EMDRV_EZRADIODRV_PN9_PLUGIN_BASE ( ECODE_EMDRV_EZRADIODRV_BASE | 0x00000600 ) ///< Receive plugin base error code.
#define ECODE_EMDRV_EZRADIODRV_DIRECT_TRANSMIT_PLUGIN_BASE ( ECODE_EMDRV_EZRADIODRV_BASE | 0x00000700 ) ///< Transmit plugin base error code.
#define ECODE_EMDRV_EZRADIODRV_DIRECT_RECEIVE_PLUGIN_BASE ( ECODE_EMDRV_EZRADIODRV_BASE | 0x00000800 ) ///< Receive plugin base error code.
// Transmit plugin related error codes
#define ECODE_EMDRV_EZRADIODRV_TRANSMIT_FAILED ( ECODE_EMDRV_EZRADIODRV_TRANSMIT_PLUGIN_BASE | 0x00000001 ) ///< Unable to start transmission.
typedef void (*ezradio_int_callback_t)();
void ezradioInit(ezradio_int_callback_t cb);
void ezradioResetTRxFifo(void);
Ecode_t ezradioStartRx(uint8_t channel, bool packet_handler);
Ecode_t ezradioStartTx(hw_radio_packet_t* packet, uint8_t channel_id, bool rx_after, uint8_t data_length);
Ecode_t ezradioStartTxUnmodelated(uint8_t channel_id);
const char *byte_to_binary(uint8_t x);
#ifdef __cplusplus
}
#endif
#endif /* SI4455_INTERFACE_H */
|
<filename>src/ui/add-button.js
import { styled } from 'linaria/react'
export const AddButton = styled.button`
height: 2rem;
background-color: var(--green);
border-radius: 3px;
color: #ffffff;
font-weight: bold;
border: none;
cursor: pointer;
font: inherit;
min-width: 100px;
&:disabled {
background-color: lightgray;
cursor: not-allowed;
}
`
|
/*!
* \brief Record the basic usage of MPI_Send/MPI_Recv and MPI_ISend/MPI_IRecv.
*/
#include "mpi.h"
#include <stdio.h>
#include <stdlib.h>
#define MASTER 0
int main (int argc, char *argv[]) {
int num_tasks, rank, rc;
MPI_Init(&argc, &argv);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Comm_size(MPI_COMM_WORLD, &num_tasks);
// Need an even number of tasks
if (num_tasks % 2 != 0) {
if (rank == MASTER) {
printf("Quitting. Need an even number of tasks: num_tasks=%d\n", num_tasks);
MPI_Abort(MPI_COMM_WORLD, rc);
exit(1);
}
}
if (rank == MASTER)
printf("MASTER: Number of MPI tasks is: %d\n",num_tasks);
// Test nonblocking MPI_Isend and MPI_Irecv.
{
int partner, message;
MPI_Status stats[2];
MPI_Request reqs[2];
// Determine partner and then send/receive with partne.
if (rank < num_tasks/2)
partner = num_tasks/2 + rank;
else if (rank >= num_tasks/2)
partner = rank - num_tasks/2;
// The tag of sending and receiving shoule be the same.
// MPI_Irecv (&buf, count, datatype, source, tag, comm, &request)
MPI_Irecv(&message, 1, MPI_INT, partner, 123, MPI_COMM_WORLD, &reqs[0]);
// MPI_Isend (&buf, count, datatype, dest, tag, comm, &request)
MPI_Isend(&rank, 1, MPI_INT, partner, 123, MPI_COMM_WORLD, &reqs[1]);
// Now block until requests are complete.
// MPI_Waitall (count,&array_of_requests,&array_of_statuses)
MPI_Waitall(2, reqs, stats);
// Print partner info and exit.
printf("[nonblocking] Task %d is partner with %d\n", rank, message);
}
MPI_Barrier(MPI_COMM_WORLD);
// Test blocking MPI_Send and MPI_Recv.
{
int partner, message;
MPI_Status status;
// Determine partner and then send/receive with partner.
if (rank < num_tasks/2) {
partner = num_tasks/2 + rank;
// MPI_Send (&buf, count, datatype, dest, tag, comm)
MPI_Send(&rank, 1, MPI_INT, partner, 123, MPI_COMM_WORLD);
MPI_Recv(&message, 1, MPI_INT, partner, 123, MPI_COMM_WORLD, &status);
}
else if (rank >= num_tasks/2) {
partner = rank - num_tasks/2;
// MPI_Recv (&buf, count, datatype, source, tag, comm, &status)
MPI_Recv(&message, 1, MPI_INT, partner, 123, MPI_COMM_WORLD, &status);
MPI_Send(&rank, 1, MPI_INT, partner, 123, MPI_COMM_WORLD);
}
int count;
rc = MPI_Get_count(&status, MPI_INT, &count);
printf("[blocking] Task %d is partner with %d, received %d int(s) with tag %d\n",
rank, message, count, status.MPI_TAG);
}
MPI_Finalize();
}
|
// This file is part of the Orbbec Astra SDK [https://orbbec3d.com]
// Copyright (c) 2015 Or<NAME>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Be excellent to each other.
#ifndef HND_DEBUG_VISUALIZER_H
#define HND_DEBUG_VISUALIZER_H
#include <vector>
#include "hnd_tracked_point.hpp"
#include <astra/capi/streams/stream_types.h>
namespace astra { namespace hand {
using namespace std;
class debug_visualizer
{
public:
void overlay_crosshairs(const vector<tracked_point>& points,
_astra_imageframe& imageFrame)
{
int width = imageFrame.metadata.width;
int height = imageFrame.metadata.height;
uint8_t* colorData = static_cast<uint8_t*>(imageFrame.data);
uint8_t bytesPerPixel;
astra_pixelformat_get_bytes_per_pixel(imageFrame.metadata.pixelFormat, &bytesPerPixel);
for (auto iter = points.begin(); iter != points.end(); ++iter)
{
tracked_point tracked = *iter;
Point2i position = tracked.position;
bool isActivePoint = tracked.pointType == tracked_point_type::active_point;
bool isLostTrackingPoint = isActivePoint && tracked.trackingStatus == tracking_status::lost;
int y0 = MAX(0, position.y - 1);
int y1 = MIN(height - 1, position.y + 1);
int x0 = MAX(0, position.x - 1);
int x1 = MIN(width - 1, position.x + 1);
for (int y = y0; y <= y1; y++)
{
for (int x = x0; x <= x1; x++)
{
uint8_t r = 0;
uint8_t g = 0;
uint8_t b = 0;
if ((y == position.y || x == position.x))
{
if (isLostTrackingPoint)
{
r = 255;
g = 0;
b = 0;
}
else if (isActivePoint)
{
r = 0;
g = 139;
b = 69;
}
else
{
r = 255;
g = 255;
b = 0;
}
}
else
{
r = 0;
g = 0;
b = 0;
}
int index = x + y * width;
uint8_t* pixel = colorData + index * bytesPerPixel;
*(pixel) = r;
*(pixel + 1) = g;
*(pixel + 2) = b;
}
}
}
}
void overlay_mask(const BitmapMask& matMask,
_astra_imageframe& imageFrame,
const RgbPixel& maskColor,
const pixel_type targetValue)
{
assert(matMask.width() == imageFrame.metadata.width);
assert(matMask.height() == imageFrame.metadata.height);
int width = matMask.width();
int height = matMask.height();
RgbPixel* colorData = static_cast<RgbPixel*>(imageFrame.data);
for (int y = 0; y < height; ++y)
{
const auto* maskRow = matMask.data(y);
for (int x = 0; x < width; ++x, ++maskRow, ++colorData)
{
pixel_type maskValue = static_cast<pixel_type>(*maskRow);
if (maskValue == targetValue)
{
*colorData = maskColor;
}
}
}
}
void show_depth_matrix(const BitmapF& matDepth,
_astra_imageframe& imageFrame)
{
assert(matDepth.width() == imageFrame.metadata.width);
assert(matDepth.height() == imageFrame.metadata.height);
int width = matDepth.width();
int height = matDepth.height();
RgbPixel* colorData = static_cast<RgbPixel*>(imageFrame.data);
for (int y = 0; y < height; ++y)
{
const float* depthRow = matDepth.data(y);
for (int x = 0; x < width; ++x, ++depthRow, ++colorData)
{
float depth = *depthRow;
float normDepth = std::min(1.0f, std::max(0.0f, (depth - 400.0f) / 5600.0f));
uint8_t value = 255 * (1 - normDepth);
if (depth == 0)
{
value = 0;
}
RgbPixel color(0, value, value);
*colorData = color;
}
}
}
void show_velocity_matrix(const BitmapF& matVelocity,
float maxScale,
_astra_imageframe& imageFrame)
{
assert(matVelocity.width() == imageFrame.metadata.width);
assert(matVelocity.height() == imageFrame.metadata.height);
if (maxScale == 0)
{
throw new std::invalid_argument("maxScale cannot be 0");
}
const int width = matVelocity.width();
const int height = matVelocity.height();
RgbPixel* colorData = static_cast<RgbPixel*>(imageFrame.data);
for (int y = 0; y < height; ++y)
{
const float* velocityRow = matVelocity.data(y);
for (int x = 0; x < width; ++x, ++velocityRow, ++colorData)
{
float velocity = *velocityRow;
uint8_t velocityValue = static_cast<uint8_t>(255 * sqrt(min(1.0f, abs(velocity / maxScale))));
RgbPixel color(0, velocityValue, 0);
if (velocity < 0)
{
color.r = velocityValue;
color.g = velocityValue;
}
*colorData = color;
}
}
}
template<typename T>
void show_norm_array(const Bitmap<T>& mat,
const BitmapMask& mask,
_astra_imageframe& imageFrame)
{
assert(mat.width() == imageFrame.metadata.width);
assert(mat.height() == imageFrame.metadata.height);
int width = mat.width();
int height = mat.height();
MinMaxLoc<T> minMaxLoc;
bool emptyMask = all_zero(mask);
if (!emptyMask)
{
assert(mat.size() == mask.size());
minMaxLoc = find_min_max_loc(mat, mask);
}
else
{
minMaxLoc = find_min_max_loc(mat);
}
double range = minMaxLoc.max - minMaxLoc.min;
bool rangeZero = abs(range) < 0.00001;
uint8_t* colorData = static_cast<uint8_t*>(imageFrame.data);
uint8_t bytesPerPixel;
astra_pixelformat_get_bytes_per_pixel(imageFrame.metadata.pixelFormat, &bytesPerPixel);
for (int y = 0; y < height; ++y)
{
const T* dataRow = mat.data(y);
const uint8_t* maskRow = nullptr;
if (!emptyMask)
{
maskRow = mask.data(y);
}
for (int x = 0; x < width; ++x, ++dataRow, colorData += bytesPerPixel)
{
float data = *dataRow;
uint8_t maskValue = 1;
if (!emptyMask)
{
maskValue = *maskRow;
++maskRow;
}
float value;
if (0 == data || 0 == maskValue)
{
value = 0;
}
else if (rangeZero)
{
value = 255;
}
else
{
value = 55 + 200 * ((*dataRow - minMaxLoc.min) / range);
}
*(colorData) = value;
*(colorData + 1) = value;
*(colorData + 2) = value;
}
}
}
};
}}
#endif // HND_DEBUGVISUALIZER_H
|
#!/bin/sh
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
BASE="$(realpath "$(dirname "$0")/..")"
mkdir -p "${BASE}/build"
echo "Signature: 8a477f597d28d172789f06886806bc55" >"${BASE}/build/CACHEDIR.TAG"
DIR="${BASE}/build/dev"
mkdir -p "$DIR"
cd "$DIR"
cmake "$@" "$BASE"
while echo Restarting ; sleep 1 ; do
find "$BASE" -name '*.h' -or -name '*.cc' \
| CTEST_OUTPUT_ON_FAILURE=1 entr -d make -j all -k test
done
|
<reponame>kutay-celebi/gotodo
package util
import (
"fmt"
)
type ErrorResponse struct {
Message string
ErrorUUID string
}
var ErrInternal = fmt.Errorf("internal error")
var ErrRecordNotFound = fmt.Errorf("record not found")
|
#! /usr/bin/env python3
# -*-coding:UTF-8 -*-
# @Time : 2018/12/21 18:29:16
# @Author : che
# @Email : <EMAIL>
# 自己定义线程的终止条件
import threading
class StoppableThread(threading.Thread):
def __init__(self):
super().__init__()
self._terminate = False
self._suspend_lock = threading.Lock()
def terminate(self):
self._terminate = True
def suspend(self):
self._suspend_lock.acquire()
def resume(self):
self._suspend_lock.release()
def run(self):
while True:
if self._terminate:
break
self._suspend_lock.acquire()
self._suspend_lock.release()
pass
|
<reponame>bitbrain/braingdx<filename>core/src/main/java/de/bitbrain/braingdx/graphics/pipeline/CombinedRenderPipe.java
/* Copyright 2017 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.bitbrain.braingdx.graphics.pipeline;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.glutils.FrameBuffer;
import com.badlogic.gdx.utils.GdxRuntimeException;
import de.bitbrain.braingdx.graphics.BatchResolver;
import de.bitbrain.braingdx.graphics.postprocessing.PostProcessor;
import de.bitbrain.braingdx.graphics.postprocessing.PostProcessorEffect;
import de.bitbrain.braingdx.graphics.shader.BatchPostProcessor;
import de.bitbrain.braingdx.util.Resizeable;
import java.util.Map;
class CombinedRenderPipe implements RenderPipe, Resizeable {
private final RenderLayer layer;
private final BatchPostProcessor batchPostProcessor;
private final SpriteBatch batch;
private boolean enabled = true;
private final Map<Class<?>, BatchResolver<?>> batchResolverMap;
public CombinedRenderPipe(RenderLayer layer, PostProcessor processor, SpriteBatch batch, Map<Class<?>, BatchResolver<?>> batchResolverMap,
PostProcessorEffect... effects) {
this.layer = layer;
this.batchPostProcessor = new BatchPostProcessor(processor, effects);
this.batch = batch;
this.batchResolverMap = batchResolverMap;
}
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
@Override
public void addEffects(PostProcessorEffect... effects) {
batchPostProcessor.addEffects(effects);
}
@Override
public void setEffects(PostProcessorEffect[] effects) {
batchPostProcessor.clear();
batchPostProcessor.addEffects(effects);
}
@Override
public boolean hasEffects() {
return batchPostProcessor.hasEffects();
}
@Override
public RenderLayer<?> getLayer() {
return layer;
}
@Override
public void beforeRender() {
layer.beforeRender();
}
@Override
public void render(float delta, FrameBuffer buffer) {
if (isEnabled()) {
BatchResolver<?> batchResolver = batchResolverMap.get(layer.getBatchCass());
if (batchResolver == null) {
throw new GdxRuntimeException("No batch resolver defined for type=" + layer.getBatchCass());
}
Object batch = batchResolver.getBatch();
if (buffer == null) {
layer.render(batch, delta);
} else if (batchPostProcessor.hasEffects()) {
batchPostProcessor.begin();
this.batch.begin();
this.batch.draw(buffer.getColorBufferTexture(), 0f, 0f);
this.batch.end();
layer.render(batch, delta);
batchPostProcessor.end(buffer);
} else {
buffer.begin();
layer.render(batch, delta);
buffer.end();
}
}
}
@Override
public void resize(int width, int height) {
if (layer instanceof Resizeable) {
((Resizeable) layer).resize(width, height);
}
}
@Override
public String toString() {
return "CombinedRenderPipe{" +
"layer=" + layer.getClass().getName() +
'}';
}
}
|
R=SendReduced/src/$1/res
convert images/Lens_and_wavefronts$1.png -resize 114x114 icon114$1.png
convert images/Lens_and_wavefronts$1.png -resize 135x135 icon135$1.png
convert images/Lens_and_wavefronts$1.png -resize 512x512 icon512$1.png
convert images/Lens_and_wavefronts$1.png -resize 144x144 $R/drawable-xxhdpi/icon.png
convert images/Lens_and_wavefronts$1.png -resize 96x96 $R/drawable-xhdpi/icon.png
convert images/Lens_and_wavefronts$1.png -resize 72x72 $R/drawable-hdpi/icon.png
convert images/Lens_and_wavefronts$1.png -resize 72x72 $R/drawable-hdpi/icon.png
convert images/Lens_and_wavefronts$1.png -resize 48x48 $R/drawable-mdpi/icon.png
convert images/Lens_and_wavefronts$1.png -resize 32x32 $R/drawable-ldpi/icon.png
|
<reponame>vasuki1996/GraphEX-Test
let wam;
let media = 'video';
let jsActive = true;
let jsCanvasOn = true;
let playing = true;
let filter = 'Normal', prevFilter;
let frameNum;
let slowSpeed = 0.5, fastSpeed = 2;
let t0, t1 = Infinity, t2, t3 = Infinity, line1, line2, perf1, perf2, perfStr1, perfStr2, avg1, avg2, wasmStats, jsStats, percent=0;
let counter=0, sum1=0, sum2=0;
let pixels, pixels2;
let cw, cw2, ch, ch2;
let speedDiv = document.getElementById('speedHead');
let avgDisplay = document.getElementById('avg');
loadWASM()
.then(module => {
wam = module;
}).catch((err) => {
console.log('Error in fetching module: ', err);
}).then(() => {
window.onload = (() => {
createStats();
addButtons();
graphStats();
appendWasmCheck();
})();
});
function disableJS() {
jsActive = !jsActive;
if (!jsActive) document.getElementById('jsButton').innerHTML = 'Enable JavaScript';
else document.getElementById('jsButton').innerHTML = 'Disable JavaScript';
}
function disableJsCanvas() {
jsCanvasOn = !jsCanvasOn;
if (jsCanvasOn) {
document.getElementById('jsCanvas').innerHTML = 'Hide JS Canvas';
document.getElementById('jsCanvasHeading').style.visibility = "visible";
document.getElementById('c2').style.visibility = "visible";
}
else {
document.getElementById('jsCanvas').innerHTML = 'Show JS Canvas';
document.getElementById('jsCanvasHeading').style.visibility = "hidden";
document.getElementById('c2').style.visibility = "hidden";
}
}
function webcamToggle() {
media = media === 'video' ? 'webcam' : 'video';
if(media==='webcam') {
controlContent = document.getElementById('controls').innerHTML;
timingContent = document.getElementById('timing').innerHTML;
document.getElementById('webcamButton').innerHTML = 'Switch to Video';
navigator.mediaDevices.getUserMedia({video: true})
.then((stream) => {
vid.srcObject = stream;
vid2.srcObject = stream;
document.getElementById('controls').innerHTML = "Switch back to video for player controls";
document.getElementById('timing').innerHTML = '';
})
.catch(function(err) {
media = 'video';
console.log(err.name);
});
}
else {
document.getElementById('controls').innerHTML = controlContent;
document.getElementById('timing').innerHTML = timingContent;
document.getElementById('webcamButton').innerHTML = 'Switch to Webcam';
vid.srcObject = null;
vid2.srcObject = null;
}
}
//wasm video
var vid = document.getElementById('v');
var canvas = document.getElementById('c');
var context = canvas.getContext('2d');
vid.addEventListener("loadeddata", function() {
canvas.setAttribute('height', vid.videoHeight);
canvas.setAttribute('width', vid.videoWidth);
cw = canvas.clientWidth; //usually same as canvas.height
ch = canvas.clientHeight;
draw();
timeData();
});
//javascript video
var vid2 = document.getElementById('v2');
var canvas2 = document.getElementById('c2');
var context2 = canvas2.getContext('2d');
vid2.addEventListener("loadeddata", function() {
canvas2.setAttribute('height', vid2.videoHeight);
canvas2.setAttribute('width', vid2.videoWidth);
cw2 = canvas2.clientWidth; //usually same as canvas.height
ch2 = canvas2.clientHeight;
draw2();
});
function draw() {
if (vid.paused) return false;
context.drawImage(vid, 0, 0);
// console.log('check', vid, context);
pixels = context.getImageData(0, 0, vid.videoWidth, vid.videoHeight);
if (filter !== 'Normal') {
t0 = performance.now();
setPixels(filter, 'wasm');
//doublePixels('Longhorn', 'Sunset'); - Order matters, kind of cool
t1 = performance.now();
}
context.putImageData(pixels, 0, 0);
frameNum = requestAnimationFrame(draw);
}
//for javascript example
function draw2() {
if (vid2.paused) return false;
context2.drawImage(vid2, 0, 0);
pixels2 = context2.getImageData(0, 0, vid2.videoWidth, vid2.videoHeight);
if (filter !== 'Normal') {
t2 = performance.now();
setPixels(filter, 'js');
t3 = performance.now();
}
context2.putImageData(pixels2, 0, 0);
requestAnimationFrame(draw2);
}
//case for when loop is off and video pauses at end without someone clicking play button
vid.onpause = () => document.getElementById('playImg').setAttribute('src', 'img/play1.svg');
function playToggle () { //does both vids together
if (vid.paused) {
document.getElementById('playImg').setAttribute('src', 'img/pause1.svg')
vid.play();
vid2.play()
draw();
draw2();
}
else {
document.getElementById('playImg').setAttribute('src', 'img/play1.svg')
vid.pause()
vid2.pause()
}
}
function rewind() {
vid.currentTime = vid.currentTime - 5 > 0 ? vid.currentTime - 5 : 0;
vid2.currentTime = vid2.currentTime - 5 > 0 ? vid2.currentTime - 5 : 0;
}
function fastForward() {
vid.currentTime = vid.currentTime + 5 > vid.duration ? vid.duration : vid.currentTime + 5;
vid2.currentTime = vid2.currentTime + 5 > vid2.duration ? vid2.duration : vid2.currentTime + 5;
}
function loopToggle () { //does both vids together
if (vid.hasAttribute('loop')){
document.getElementById('loopImg').setAttribute('src', 'img/loop1.svg')
vid.removeAttribute('loop')
vid2.removeAttribute('loop')
}
else {
if (vid.paused) {
playToggle();
}
document.getElementById('loopImg').setAttribute('src', 'img/noloop1.svg')
vid.setAttribute('loop', 'true')
vid2.setAttribute('loop', 'true')
}
}
function timeData () {
//FrameNum in Time div, and then time in Canvas div;
//Can put total frames next to video length;
let vidTime = document.getElementById('vidTime');
function getTimeCode(microseconds) {
let hours = Math.floor(microseconds / 3600);
if (hours < 10) hours = '0' + hours;
let minutes = Math.floor(microseconds/ 60);
if (minutes < 10) minutes = '0' + minutes;
let seconds = Math.floor(microseconds);
if (seconds < 10) seconds = '0' + seconds;
let milliseconds = microseconds - seconds;
milliseconds = String(milliseconds).slice(2,4);
return result = `${String(hours)} : ${String(minutes)} : ${String(seconds)} : ${String(milliseconds)}`;
}
//add thing for frameNum;
vidTime.innerHTML = `${getTimeCode(vid.currentTime)}`;
setTimeout(timeData,15);
}
function slowToggle () {
if (vid.playbackRate === slowSpeed) {
document.getElementById('slowButton').innerHTML = 'Toggle Slow Motion';
vid.playbackRate = 1;
vid2.playbackRate = 1;
}
else if (vid.playbackRate === 1.0) {
document.getElementById('slowButton').innerHTML = 'Toggle Regular Speed';
vid.playbackRate = slowSpeed;
vid2.playbackRate = slowSpeed;
}
else if (vid.playbackRate === fastSpeed) {
document.getElementById('slowButton').innerHTML = 'Toggle Regular Speed';
document.getElementById('fastButton').innerHTML = 'Toggle Fast Motion';
vid.playbackRate = slowSpeed;
vid2.playbackRate = slowSpeed;
}
}
function fastToggle () {
if (vid.playbackRate === fastSpeed) {
document.getElementById('fastButton').innerHTML = 'Toggle Fast Motion';
vid.playbackRate = 1;
vid2.playbackRate = 1;
}
else if (vid.playbackRate === 1.0) {
document.getElementById('fastButton').innerHTML = 'Toggle Regular Speed';
vid.playbackRate = fastSpeed;
vid2.playbackRate = fastSpeed;
}
else if (vid.playbackRate === slowSpeed) {
document.getElementById('fastButton').innerHTML = 'Toggle Regular Speed';
document.getElementById('slowButton').innerHTML = 'Toggle Slow Motion';
vid.playbackRate = fastSpeed;
vid2.playbackRate = fastSpeed;
}
}
//STATS, Buttons adding, SetPixels function stuff starts below
function graphStats () {
// reset values;
if (prevFilter !== filter) {
perf1 = 0;
perf2 = 0;
sum1 = 0;
sum2 = 0;
avg1 = 0;
avg2 = 0;
counter = 0;
};
if (filter !== 'Normal') {
perf1 = t1 - t0;
perf2 = t3 - t2;
sum1 += perf1;
sum2 += perf2;
counter += 1;
if (counter % 5 === 0) {
avg1 = sum1 / counter;
avg2 = sum2 / counter;
avgDisplay.innerText = `Average computation time WASM: ${avg1.toString().slice(0, 4)} ms, JS: ${avg2.toString().slice(0, 4)} ms`;
line1.append(new Date().getTime(), 500 / perf1);
line2.append(new Date().getTime(), 500 / perf2);
}
perfStr1 = perf1.toString().slice(0, 4);
perfStr2 = perf2.toString().slice(0, 5);
wasmStats = `WASM computation time: ${perfStr1} ms`;
jsStats = ` JS computation time: ${perfStr2} ms`;
document.getElementById("stats").textContent = wasmStats + jsStats;
percent = Math.round(((perf2 - perf1) / perf1) * 100);
}
if (filter !== 'Normal' && jsActive) {
speedDiv.innerText = `Performance Comparison: WASM is currently ${percent}% faster than JS`;
}
else speedDiv.innerText = 'Performance Comparison';
prevFilter = filter;
setTimeout(graphStats, 500);
}
function createStats() {
let smoothie = new SmoothieChart({
maxValueScale: 1.1,
minValueScale: 0.5,
grid: {
strokeStyle: 'rgb(60, 60, 60)',
fillStyle: 'rgb(30, 30, 30)',
lineWidth: 1,
millisPerLine: 250,
verticalSections: 5,
},
labels: {
fillStyle: 'rgb(255, 255, 255)',
fontSize: 14,
},
});
// send smoothie data to canvas
smoothie.streamTo(document.getElementById('statsCanvas'), 500);
// declare smoothie timeseries
line1 = new TimeSeries();
line2 = new TimeSeries();
// define graph lines and colors
smoothie.addTimeSeries(line1,
{
strokeStyle: 'rgb(0, 255, 0)',
fillStyle: 'rgba(0, 255, 0, 0.075)',
lineWidth: 3,
}
);
smoothie.addTimeSeries(line2,
{ strokeStyle: 'rgb(0, 0, 255)',
fillStyle: 'rgba(0, 0, 255, 0.075)',
lineWidth: 3,
}
);
}
function addButtons (filtersArr) {
const filters = ['Normal', 'Grayscale', 'Invert', 'Bacteria', 'Sunset',
'Emboss', 'Super Edge', 'Super Edge Inv',
'Gaussian Blur', 'Moss', 'Robbery', 'Brighten', 'Swamp','Ghost', 'Good Morning', 'Acid', 'Urple', 'Romance', 'Hippo', 'Longhorn', 'Security', 'Underground', 'Rooster', 'Mist', 'Tingle', 'Kaleidoscope', 'Noise', 'Forest', 'Dewdrops', 'Analog TV', 'Color Destruction', 'Hulk Edge', 'Twisted', 'Clarity', 'Sharpen','Uber Sharpen'];
const buttonDiv = document.createElement('div');
buttonDiv.id = 'filters';
const editor = document.getElementById('editor')
editor.insertBefore(buttonDiv, editor.firstChild);
for (let i = 0; i < filters.length; i++) {
let filterDiv = document.createElement('div');
filterDiv.className = "indFilter";
filterDiv.innerText = filters[i];
filterDiv.addEventListener('click', function() {
filter = filters[i];
//remove any that have it;
if(document.getElementsByClassName('selectedFilter')[0]) document.getElementsByClassName('selectedFilter')[0].classList.remove('selectedFilter');
this.classList.add('selectedFilter');
});
buttonDiv.appendChild(filterDiv);
}
}
function appendWasmCheck () {
let p = document.createElement('p');
p.className = 'wasmCheck';
let before = document.getElementById('editor');
if ('WebAssembly' in window) {
p.innerHTML = '(\u2713 WebAssembly is supported in your browser)';
document.body.insertBefore(p,before);
}
else if (/Mobi/.test(navigator.userAgent)) {
document.getElementById('statsContainer').innerHTML = `<h3 style="color:#a37c6e;">\u2639 WebAssembly is not yet supported on mobile devices. Please view on desktop browser.</h3>`
}
else {
document.getElementById('statsContainer').innerHTML = `<h3 style="color:#a37c6e;">\u2639 WebAssembly is not supported in your browser. Please update to the latest version of Chrome or Firefox to enable WebAssembly and compare .WASM & .JS performance</h3>`
}
}
function setPixels (filter, language) {
if (language === 'wasm') {
let kernel, divisor;
switch (filter) {
case 'Grayscale': pixels.data.set(wam.grayScale(pixels.data)); break;
case 'Brighten': pixels.data.set(wam.brighten(pixels.data)); break;
case 'Invert': pixels.data.set(wam.invert(pixels.data)); break;
case 'Noise': pixels.data.set(wam.noise(pixels.data)); break;
case 'Sunset': pixels.data.set(wam.sunset(pixels.data, cw)); break;
case 'Analog TV': pixels.data.set(wam.analogTV(pixels.data, cw)); break;
case 'Emboss': pixels.data.set(wam.emboss(pixels.data, cw)); break;
case 'Super Edge': pixels.data.set(wam.sobelFilter(pixels.data, cw, ch)); break;
case 'Super Edge Inv': pixels.data.set(wam.sobelFilter(pixels.data, cw, ch, true)); break;
case 'Gaussian Blur': pixels.data.set(wam.blur(pixels.data, cw, ch)); break;
case 'Sharpen': pixels.data.set(wam.sharpen(pixels.data, cw, ch)); break;
case 'Uber Sharpen': pixels.data.set(wam.strongSharpen(pixels.data, cw, ch)); break;
case 'Clarity': pixels.data.set(wam.clarity(pixels.data, cw, ch)); break;
case 'Good Morning': pixels.data.set(wam.goodMorning(pixels.data, cw, ch)); break;
case 'Acid': pixels.data.set(wam.acid(pixels.data, cw, ch)); break;
case 'Urple': pixels.data.set(wam.urple(pixels.data, cw)); break;
case 'Forest': pixels.data.set(wam.forest(pixels.data, cw)); break;
case 'Romance': pixels.data.set(wam.romance(pixels.data, cw)); break;
case 'Hippo': pixels.data.set(wam.hippo(pixels.data, cw)); break;
case 'Longhorn': pixels.data.set(wam.longhorn(pixels.data, cw)); break;
case 'Underground': pixels.data.set(wam.underground(pixels.data, cw)); break;
case 'Rooster': pixels.data.set(wam.rooster(pixels.data, cw)); break;
case 'Moss': pixels.data.set(wam.moss(pixels.data, cw)); break;
case 'Mist': pixels.data.set(wam.mist(pixels.data, cw)); break;
case 'Tingle': pixels.data.set(wam.tingle(pixels.data, cw)); break;
case 'Kaleidoscope': pixels.data.set(wam.kaleidoscope(pixels.data, cw)); break;
case 'Bacteria': pixels.data.set(wam.bacteria(pixels.data, cw)); break;
case 'Dewdrops': pixels.data.set(wam.dewdrops(pixels.data, cw, ch)); break;
case 'Color Destruction': pixels.data.set(wam.destruction(pixels.data, cw, ch)); break;
case 'Hulk Edge': pixels.data.set(wam.hulk(pixels.data, cw)); break;
case 'Ghost': pixels.data.set(wam.ghost(pixels.data, cw)); break;
case 'Swamp': pixels.data.set(wam.swamp(pixels.data, cw)); break;
case 'Twisted': pixels.data.set(wam.twisted(pixels.data, cw)); break;
case 'Security': pixels.data.set(wam.security(pixels.data, cw)); break;
case 'Robbery': pixels.data.set(wam.robbery(pixels.data, cw)); break;
}
} else if (jsActive) {
switch (filter) {
case 'Grayscale': pixels2.data.set(js_grayScale(pixels2.data)); break;
case 'Brighten': pixels2.data.set(js_brighten(pixels2.data)); break;
case 'Invert': pixels2.data.set(js_invert(pixels2.data)); break;
case 'Noise': pixels2.data.set(js_noise(pixels2.data)); break;
case 'Sunset': pixels2.data.set(js_sunset(pixels2.data, cw2)); break;
case 'Analog TV': pixels2.data.set(js_analog(pixels2.data, cw2)); break;
case 'Emboss': pixels2.data.set(js_emboss(pixels2.data, cw2)); break;
case 'Super Edge': pixels2.data.set(js_sobelFilter(pixels2.data, cw2, ch2)); break;
case 'Super Edge Inv': pixels2.data.set(js_sobelFilter(pixels2.data, cw2, ch2, true)); break;
case 'Gaussian Blur': pixels2.data.set(js_blur(pixels2.data, cw2, ch2));break;
case 'Sharpen': pixels2.data.set(js_sharpen(pixels2.data, cw2, ch2)); break;
case 'Uber Sharpen': pixels2.data.set(js_strongSharpen(pixels2.data, cw2, ch2)); break;
case 'Clarity': pixels2.data.set(js_clarity(pixels2.data, cw2, ch2)); break;
case 'Good Morning': pixels2.data.set(js_goodMorning(pixels2.data, cw2, ch2)); break;
case 'Acid': pixels2.data.set(js_acid(pixels2.data, cw2, ch2)); break;
case 'Urple': pixels2.data.set(js_urple(pixels2.data, cw2)); break;
case 'Forest': pixels2.data.set(js_forest(pixels2.data, cw2)); break;
case 'Romance': pixels2.data.set(js_romance(pixels2.data, cw2)); break;
case 'Hippo': pixels2.data.set(js_hippo(pixels2.data, cw2)); break;
case 'Longhorn': pixels2.data.set(js_longhorn(pixels2.data, cw2)); break;
case 'Underground': pixels2.data.set(js_underground(pixels2.data, cw2)); break;
case 'Rooster': pixels2.data.set(js_rooster(pixels2.data, cw2)); break;
case 'Mist': pixels2.data.set(js_mist(pixels2.data, cw2)); break;
case 'Moss': pixels2.data.set(js_mist(pixels2.data, cw2)); break;
case 'Tingle': pixels2.data.set(js_tingle(pixels2.data, cw2)); break;
case 'Kaleidoscope': pixels2.data.set(js_tingle(pixels2.data, cw2)); break;
case 'Bacteria': pixels2.data.set(js_bacteria(pixels2.data, cw2)); break;
case 'Dewdrops': pixels2.data.set(js_dewdrops(pixels2.data, cw2, ch2)); break;
case 'Color Destruction': pixels2.data.set(js_destruction(pixels2.data, cw2, ch2)); break;
case 'Hulk Edge': pixels2.data.set(js_hulk(pixels2.data, cw2)); break;
case 'Ghost': pixels2.data.set(js_ghost(pixels2.data, cw2)); break;
case 'Swamp': pixels2.data.set(js_twisted(pixels2.data, cw2)); break;
case 'Twisted': pixels2.data.set(js_twisted(pixels2.data, cw2)); break;
case 'Security': pixels2.data.set(js_security(pixels2.data, cw2)); break;
case 'Robbery': pixels2.data.set(js_security(pixels2.data, cw2)); break;
}
}
}
function doublePixels (filter1, filter2) {
setPixels(filter1, 'wasm');
setPixels(filter2, 'wasm');
} |
class Item:
def __init__(self, name, weight):
self.name = name
self.weight = weight
def find_smallest_path(result):
smallest_val = float('inf')
path = []
for solution in result:
total_cost = sum(map(lambda x: x.weight, solution))
if smallest_val > total_cost:
path = solution
smallest_val = total_cost
return path |
const poller = async () => {
// Implementation of the poller function
};
const tester = (state) => {
// Implementation of the tester function
};
const retryPoller = async (maxAttempts) => {
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
try {
const state = await poller();
if (tester(state)) {
return state;
}
} catch (e) {
// Swallow errors and continue to next attempt
}
}
throw new Error('Maximum number of attempts reached without successful state');
}; |
import * as core from '@actions/core'
import * as actionSlasher from './action-slasher'
import * as commands from './commands'
import * as chatops from './chatops'
import * as events from './events'
const run = async (): Promise<void> => {
core.debug(`Payload: ${JSON.stringify(chatops.context.payload, null, 2)}`)
core.debug(`Project: ${chatops.context.project}`)
core.debug(
`Repository: ${JSON.stringify(chatops.context.repository, null, 2)}`
)
core.debug(`Comment ID: ${chatops.context.commentId}`)
core.debug(`Deployment ID: ${chatops.context.deploymentId}`)
core.debug(
`Issue Number: ${chatops.context.issueNumber} (pr? ${chatops.context.isPullRequest})`
)
try {
await actionSlasher.run({commands, events})
} catch (error) {
core.setFailed(error.message || error)
}
}
run()
|
package com.nio.buffer;
import java.nio.ByteBuffer;
public class BufferDemo1 {
public static void main(String[] args) {
ByteBuffer buf = ByteBuffer.allocate(1024);
// System.out.println(buf.position());
// System.out.println(buf.limit());
// System.out.println(buf.capacity());
buf.put("abcdefghijklmn".getBytes());
buf.flip();
// while(buf.hasRemaining()){
// char c = (char) buf.get();
// System.out.println(c);
// }
byte [] bs = buf.array();
String str = new String(bs,0,buf.limit());
System.out.println(str);
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.arrowLeftOutline = void 0;
var arrowLeftOutline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M10.928 21c-.801 0-1.555-.312-2.121-.879l-7.121-7.121 7.121-7.121c1.133-1.134 3.109-1.134 4.242 0 .566.564.879 1.317.879 2.119 0 .746-.27 1.451-.764 2.002h4.836c1.654 0 3 1.346 3 3s-1.346 3-3 3h-4.836c.493.549.764 1.252.764 1.998.002.802-.312 1.557-.879 2.124-.567.566-1.32.878-2.121.878zm-6.414-8l5.707 5.707c.379.378 1.035.378 1.414 0 .189-.189.293-.441.293-.708s-.104-.517-.291-.705l-3.295-3.294h9.658c.552 0 1-.449 1-1s-.448-1-1-1h-9.658l3.293-3.293c.189-.189.293-.441.293-.708s-.104-.517-.292-.705c-.381-.38-1.036-.379-1.415-.001l-5.707 5.707z"
},
"children": []
}]
};
exports.arrowLeftOutline = arrowLeftOutline; |
const path = require('path');
export function getRuntime(dir?: string, cwd?: string, name?: string) {
const _cwd = cwd ?? path.resolve(process.cwd());
const _dir = dir ?? path.resolve(__dirname);
const _name = name ?? process.argv[1].split(path.sep).splice(-2)[0];
const localNodeModules = path.resolve('node_modules', _name);
const localNodeModulesBin = path.resolve('node_modules', '.bin', _name);
return {
isLocalNode: !_dir.match('node_modules') && !_dir.match('npm-cache'),
isLocalInstalled:
_dir === localNodeModules || _dir === localNodeModulesBin || false,
isNpx: !!_dir.match('npm-cache'),
isGlobal:
!!_dir.match('node_modules') &&
!_dir.match('npm-cache') &&
_dir !== localNodeModules &&
_dir !== localNodeModulesBin,
dir: _dir,
cwd: _cwd,
name: _name,
};
}
|
#pragma once
#include <type_traits>
#include <clean-core/function_ptr.hh>
#include <clean-core/span.hh>
#include <clean-core/stream_ref.hh>
#include <clean-core/string_stream.hh>
#include <clean-core/string_view.hh>
#include <clean-core/typedefs.hh>
namespace cc
{
template <class T>
struct format_arg;
namespace detail
{
template <class T, class = std::void_t<>>
struct has_to_string_ss_args_t : std::false_type
{
};
template <class T>
struct has_to_string_ss_args_t<T, std::void_t<decltype(to_string(std::declval<stream_ref<char>>(), std::declval<T>(), std::declval<string_view>()))>> : std::true_type
{
};
template <class T>
constexpr bool has_to_string_ss_args = has_to_string_ss_args_t<T>::value;
template <class T, class = std::void_t<>>
struct has_to_string_ss_t : std::false_type
{
};
template <class T>
struct has_to_string_ss_t<T, std::void_t<decltype(to_string(std::declval<stream_ref<char>>(), std::declval<T>()))>> : std::true_type
{
};
template <class T>
constexpr bool has_to_string_ss = has_to_string_ss_t<T>::value;
template <class T, class = std::void_t<>>
struct has_to_string_args_t : std::false_type
{
};
template <class T>
struct has_to_string_args_t<T, std::void_t<decltype(string_view(o_string(std::declval<T>(), std::declval<string_view>())))>> : std::true_type
{
};
template <class T>
constexpr bool has_to_string_args = has_to_string_ss_t<T>::value;
template <class T, class = std::void_t<>>
struct has_to_string_t : std::false_type
{
};
template <class T>
struct has_to_string_t<T, std::void_t<decltype(string_view(to_string(std::declval<T>())))>> : std::true_type
{
};
template <class T>
constexpr bool has_to_string = has_to_string_t<T>::value;
template <class T, class = std::void_t<>>
struct has_member_to_string_t : std::false_type
{
};
template <class T>
struct has_member_to_string_t<T, std::void_t<decltype(string_view(std::declval<T>().to_string()))>> : std::true_type
{
};
template <class T>
constexpr bool has_member_to_string = has_member_to_string_t<T>::value;
struct default_formatter
{
template <class T>
static void do_format(stream_ref<char> s, T const& v, string_view fmt_args)
{
if constexpr (detail::has_to_string_ss_args<T>)
{
to_string(s, v, fmt_args);
}
else if constexpr (detail::has_to_string_args<T>)
{
if constexpr (detail::has_to_string_ss<T>)
{
if (fmt_args.empty())
to_string(s, v);
else
s << to_string(v, fmt_args);
}
else
{
s << string_view(to_string(v, fmt_args));
}
}
else if constexpr (detail::has_to_string_ss<T>)
{
to_string(s, v);
}
else if constexpr (detail::has_to_string<T>)
{
s << string_view(to_string(v));
}
else if constexpr (detail::has_member_to_string<T>)
{
s << string_view(v.to_string());
}
else
{
static_assert(cc::always_false<T>, "Type requires a to_string() function");
}
}
};
struct arg_info
{
function_ptr<void(stream_ref<char>, void const*, string_view)> do_format;
void const* data = nullptr;
string_view name;
};
template <class Formatter = default_formatter, class T>
arg_info make_arg_info(T const& v)
{
return {[](stream_ref<char> s, void const* data, string_view options) -> void { Formatter::do_format(s, *static_cast<T const*>(data), options); }, &v, {}};
}
template <class Formatter = default_formatter, class T>
arg_info make_arg_info(format_arg<T> const& a)
{
return {[](stream_ref<char> ss, void const* data, string_view options) -> void { Formatter::do_format(ss, *static_cast<T const*>(data), options); },
&a.value, a.name};
}
void vformat_to(stream_ref<char> s, string_view fmt_str, span<arg_info const> args);
}
template <class T>
struct format_arg
{
format_arg(string_view name, T const& v) : name{name}, value{v} {}
string_view name;
T const& value;
};
template <class Formatter = detail::default_formatter, class... Args>
void format_to(stream_ref<char> s, string_view fmt_str, Args const&... args)
{
if constexpr (sizeof...(args) == 0)
{
detail::vformat_to(s, fmt_str, {});
}
else
{
detail::arg_info vargs[] = {detail::make_arg_info(args)...};
detail::vformat_to(s, fmt_str, vargs);
}
}
template <class Formatter = detail::default_formatter, class... Args>
string format(char const* fmt_str, Args const&... args)
{
string_stream ss;
format_to<Formatter>(make_stream_ref<char>(ss), fmt_str, args...);
return ss.to_string();
}
namespace format_literals
{
namespace detail
{
struct arg_capture
{
string_view name;
template <class T>
cc::format_arg<T> operator=(T const& rhs)
{
return cc::format_arg(name, rhs);
}
};
}
inline detail::arg_capture operator"" _a(const char* name, std::size_t size) { return {{name, size}}; }
}
}
|
class CatController {
constructor(private catsService: CatsService) {}
async created(
@Param('name') name: string,
@Param('desc') desc: string,
): Promise<Object> {
const parm = {
title: name,
desc: desc,
};
return this.catsService.create(parm);
}
} |
// JavaScript program to calculate the sum of two given integers
function sum(num1, num2) {
return num1 + num2;
}
// Test code
console.log(sum(2, 3)); |
#!/usr/bin/env bash
source scripts/include/setup.sh
require_tools helm kubectl
HELM_ARGS=()
for TEST in brain cf_acceptance smoke; do
HELM_ARGS+=(--set "testing.${TEST}_tests.enabled=true")
done
if [ -z "${LOCAL_IP:-}" ]; then
CONTEXT="$(kubectl config current-context)"
if [ "${CONTEXT}" = "minikube" ]; then
require_tools minikube
LOCAL_IP=$(minikube ip)
elif [[ "${CONTEXT}" =~ ^kind- ]]; then
LOCAL_IP="$(kubectl get node ${CLUSTER_NAME}-control-plane \
-o jsonpath='{ .status.addresses[?(@.type == "InternalIP")].address }')"
fi
fi
if [ -n "${LOCAL_IP:-}" ]; then
HELM_ARGS+=(--set "system_domain=${LOCAL_IP}.xip.io")
for SERVICE in router tcp-router ssh-proxy; do
HELM_ARGS+=(
--set "services.${SERVICE}.type=LoadBalancer"
--set "services.${SERVICE}.externalIPs[0]=${LOCAL_IP}"
)
done
fi
if [ -n "${FEATURE_AUTOSCALER:-}" ]; then
HELM_ARGS+=(--set "features.autoscaler.enabled=true")
fi
if [ -n "${FEATURE_EIRINI:-}" ]; then
HELM_ARGS+=(--set "features.eirini.enabled=true")
fi
if [ -n "${FEATURE_INGRESS:-}" ]; then
HELM_ARGS+=(--set "features.ingress.enabled=true")
fi
if [ -n "${VALUES:-}" ]; then
HELM_ARGS+=(--values "${VALUES}")
fi
if [ -z "${CHART:-}" ]; then
CHART="output/kubecf-$(./scripts/version.sh).tgz"
fi
helm upgrade kubecf "${CHART}" \
--install --namespace "${KUBECF_NS}" "${HELM_ARGS[@]}" "$@"
|
const fetch = require('node-fetch');
const url = 'https://hacker-news.firebaseio.com/v0/topstories.json';
const getArticles = async () => {
try {
const res = await fetch(url);
const json = await res.json();
const topIds = json.slice(0, 10); // Get the top 10 articles
const articlesPromises = topIds.map(async (id) => {
const articleUrl = `https://hacker-news.firebaseio.com/v0/item/${id}.json`;
const articleRes = await fetch(articleUrl);
const articleJson = await articleRes.json();
return articleJson;
});
return Promise.all(articlesPromises);
} catch (err) {
console.error(err);
}
};
getArticles().then(articles => {
console.log(articles);
}); |
def multiple_of_3(N):
for i in range (N+1):
if i%3==0:
print(i) |
package org.museautomation.ui.steptree;
import javafx.scene.*;
import net.christophermerrill.FancyFxTree.*;
import org.museautomation.ui.step.inline.*;
import org.museautomation.ui.extend.edit.*;
import org.museautomation.ui.extend.edit.step.*;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
public class StepCellEditor implements FancyTreeCellEditor
{
StepCellEditor(StepEditContext context, StepConfigurationFacade facade, boolean start_editor_in_full_mode)
{
EditInProgress edit = new EditInProgress()
{
@Override
public void cancel()
{
_cell.cancelEdit();
}
@Override
public void commit(Object target)
{
_cell.commitEdit(facade);
}
};
_cell_editor = new InlineStepEditorContainerImplementation(context, facade.getModelNode(), edit, start_editor_in_full_mode);
_cell_editor.getNode().getStyleClass().add(STYLE_CLASS);
}
@Override
public Node getNode()
{
return _cell_editor.getNode();
}
@Override
public void setCell(FancyTreeCell cell)
{
_cell = cell;
}
@Override
public void cancelEdit() { }
public void requestFocus()
{
_cell_editor.requestFocus();
}
public void destroy()
{
_cell_editor.destroy();
}
private InlineStepEditorContainerImplementation _cell_editor;
private FancyTreeCell _cell;
public final static String STYLE_CLASS = "step-cell-editor";
}
|
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# What to do
sign=false
verify=false
build=false
setupenv=false
# Systems to build
linux=true
windows=true
osx=true
# Other Basic variables
SIGNER=
VERSION=
commit=false
url=https://github.com/mtcoin/mtcoin
proc=2
mem=2000
lxc=true
osslTarUrl=http://downloads.sourceforge.net/project/osslsigncode/osslsigncode/osslsigncode-1.7.1.tar.gz
osslPatchUrl=https://bitcoincore.org/cfields/osslsigncode-Backports-to-1.7.1.patch
scriptName=$(basename -- "$0")
signProg="gpg --detach-sign"
commitFiles=true
# Help Message
read -d '' usage <<- EOF
Usage: $scriptName [-c|u|v|b|s|B|o|h|j|m|] signer version
Run this script from the directory containing the mtcoin, gitian-builder, gitian.sigs, and mtcoin-detached-sigs.
Arguments:
signer GPG signer to sign each build assert file
version Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified
Options:
-c|--commit Indicate that the version argument is for a commit or branch
-u|--url Specify the URL of the repository. Default is https://github.com/mtcoin/mtcoin
-v|--verify Verify the gitian build
-b|--build Do a gitian build
-s|--sign Make signed binaries for Windows and Mac OSX
-B|--buildsign Build both signed and unsigned binaries
-o|--os Specify which Operating Systems the build is for. Default is lwx. l for linux, w for windows, x for osx, a for aarch64
-j Number of processes to use. Default 2
-m Memory to allocate in MiB. Default 2000
--kvm Use KVM instead of LXC
--setup Setup the gitian building environment. Uses KVM. If you want to use lxc, use the --lxc option. Only works on Debian-based systems (Ubuntu, Debian)
--detach-sign Create the assert file for detached signing. Will not commit anything.
--no-commit Do not commit anything to git
-h|--help Print this help message
EOF
# Get options and arguments
while :; do
case $1 in
# Verify
-v|--verify)
verify=true
;;
# Build
-b|--build)
build=true
;;
# Sign binaries
-s|--sign)
sign=true
;;
# Build then Sign
-B|--buildsign)
sign=true
build=true
;;
# PGP Signer
-S|--signer)
if [ -n "$2" ]
then
SIGNER=$2
shift
else
echo 'Error: "--signer" requires a non-empty argument.'
exit 1
fi
;;
# Operating Systems
-o|--os)
if [ -n "$2" ]
then
linux=false
windows=false
osx=false
aarch64=false
if [[ "$2" = *"l"* ]]
then
linux=true
fi
if [[ "$2" = *"w"* ]]
then
windows=true
fi
if [[ "$2" = *"x"* ]]
then
osx=true
fi
if [[ "$2" = *"a"* ]]
then
aarch64=true
fi
shift
else
echo 'Error: "--os" requires an argument containing an l (for linux), w (for windows), x (for Mac OSX), or a (for aarch64)\n'
exit 1
fi
;;
# Help message
-h|--help)
echo "$usage"
exit 0
;;
# Commit or branch
-c|--commit)
commit=true
;;
# Number of Processes
-j)
if [ -n "$2" ]
then
proc=$2
shift
else
echo 'Error: "-j" requires an argument'
exit 1
fi
;;
# Memory to allocate
-m)
if [ -n "$2" ]
then
mem=$2
shift
else
echo 'Error: "-m" requires an argument'
exit 1
fi
;;
# URL
-u)
if [ -n "$2" ]
then
url=$2
shift
else
echo 'Error: "-u" requires an argument'
exit 1
fi
;;
# kvm
--kvm)
lxc=false
;;
# Detach sign
--detach-sign)
signProg="true"
commitFiles=false
;;
# Commit files
--no-commit)
commitFiles=false
;;
# Setup
--setup)
setup=true
;;
*) # Default case: If no more options then break out of the loop.
break
esac
shift
done
# Set up LXC
if [[ $lxc = true ]]
then
export USE_LXC=1
export LXC_BRIDGE=lxcbr0
sudo ifconfig lxcbr0 up 10.0.2.2
fi
# Check for OSX SDK
if [[ ! -e "gitian-builder/inputs/MacOSX10.11.sdk.tar.gz" && $osx == true ]]
then
echo "Cannot build for OSX, SDK does not exist. Will build for other OSes"
osx=false
fi
# Get signer
if [[ -n "$1" ]]
then
SIGNER=$1
shift
fi
# Get version
if [[ -n "$1" ]]
then
VERSION=$1
COMMIT=$VERSION
shift
fi
# Check that a signer is specified
if [[ $SIGNER == "" ]]
then
echo "$scriptName: Missing signer."
echo "Try $scriptName --help for more information"
exit 1
fi
# Check that a version is specified
if [[ $VERSION == "" ]]
then
echo "$scriptName: Missing version."
echo "Try $scriptName --help for more information"
exit 1
fi
# Add a "v" if no -c
if [[ $commit = false ]]
then
COMMIT="v${VERSION}"
fi
echo ${COMMIT}
# Setup build environment
if [[ $setup = true ]]
then
sudo apt-get install ruby apache2 git apt-cacher-ng python-vm-builder qemu-kvm qemu-utils
git clone https://github.com/mtcoin/gitian.sigs.git
git clone https://github.com/mtcoin/mtcoin-detached-sigs.git
git clone https://github.com/devrandom/gitian-builder.git
pushd ./gitian-builder
if [[ -n "$USE_LXC" ]]
then
sudo apt-get install lxc
bin/make-base-vm --suite trusty --arch amd64 --lxc
else
bin/make-base-vm --suite trusty --arch amd64
fi
popd
fi
# Set up build
pushd ./mtcoin
git fetch
git checkout ${COMMIT}
popd
# Build
if [[ $build = true ]]
then
# Make output folder
mkdir -p ./mtcoin-binaries/${VERSION}
# Build Dependencies
echo ""
echo "Building Dependencies"
echo ""
pushd ./gitian-builder
mkdir -p inputs
wget -N -P inputs $osslPatchUrl
wget -N -P inputs $osslTarUrl
make -C ../mtcoin/depends download SOURCES_PATH=`pwd`/cache/common
# Linux
if [[ $linux = true ]]
then
echo ""
echo "Compiling ${VERSION} Linux"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit mtcoin=${COMMIT} --url mtcoin=${url} ../mtcoin/contrib/gitian-descriptors/gitian-linux.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-linux --destination ../gitian.sigs/ ../mtcoin/contrib/gitian-descriptors/gitian-linux.yml
mv build/out/mtcoin-*.tar.gz build/out/src/mtcoin-*.tar.gz ../mtcoin-binaries/${VERSION}
fi
# Windows
if [[ $windows = true ]]
then
echo ""
echo "Compiling ${VERSION} Windows"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit mtcoin=${COMMIT} --url mtcoin=${url} ../mtcoin/contrib/gitian-descriptors/gitian-win.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-win-unsigned --destination ../gitian.sigs/ ../mtcoin/contrib/gitian-descriptors/gitian-win.yml
mv build/out/mtcoin-*-win-unsigned.tar.gz inputs/mtcoin-win-unsigned.tar.gz
mv build/out/mtcoin-*.zip build/out/mtcoin-*.exe ../mtcoin-binaries/${VERSION}
fi
# Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Compiling ${VERSION} Mac OSX"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit mtcoin=${COMMIT} --url mtcoin=${url} ../mtcoin/contrib/gitian-descriptors/gitian-osx.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-osx-unsigned --destination ../gitian.sigs/ ../mtcoin/contrib/gitian-descriptors/gitian-osx.yml
mv build/out/mtcoin-*-osx-unsigned.tar.gz inputs/mtcoin-osx-unsigned.tar.gz
mv build/out/mtcoin-*.tar.gz build/out/mtcoin-*.dmg ../mtcoin-binaries/${VERSION}
fi
# AArch64
if [[ $aarch64 = true ]]
then
echo ""
echo "Compiling ${VERSION} AArch64"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit mtcoin=${COMMIT} --url mtcoin=${url} ../mtcoin/contrib/gitian-descriptors/gitian-aarch64.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-aarch64 --destination ../gitian.sigs/ ../mtcoin/contrib/gitian-descriptors/gitian-aarch64.yml
mv build/out/mtcoin-*.tar.gz build/out/src/mtcoin-*.tar.gz ../mtcoin-binaries/${VERSION}
popd
if [[ $commitFiles = true ]]
then
# Commit to gitian.sigs repo
echo ""
echo "Committing ${VERSION} Unsigned Sigs"
echo ""
pushd gitian.sigs
git add ${VERSION}-linux/${SIGNER}
git add ${VERSION}-aarch64/${SIGNER}
git add ${VERSION}-win-unsigned/${SIGNER}
git add ${VERSION}-osx-unsigned/${SIGNER}
git commit -a -m "Add ${VERSION} unsigned sigs for ${SIGNER}"
popd
fi
fi
# Verify the build
if [[ $verify = true ]]
then
# Linux
pushd ./gitian-builder
echo ""
echo "Verifying v${VERSION} Linux"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-linux ../mtcoin/contrib/gitian-descriptors/gitian-linux.yml
# Windows
echo ""
echo "Verifying v${VERSION} Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-win-unsigned ../mtcoin/contrib/gitian-descriptors/gitian-win.yml
# Mac OSX
echo ""
echo "Verifying v${VERSION} Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-unsigned ../mtcoin/contrib/gitian-descriptors/gitian-osx.yml
# AArch64
echo ""
echo "Verifying v${VERSION} AArch64"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-aarch64 ../mtcoin/contrib/gitian-descriptors/gitian-aarch64.yml
# Signed Windows
echo ""
echo "Verifying v${VERSION} Signed Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../mtcoin/contrib/gitian-descriptors/gitian-osx-signer.yml
# Signed Mac OSX
echo ""
echo "Verifying v${VERSION} Signed Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../mtcoin/contrib/gitian-descriptors/gitian-osx-signer.yml
popd
fi
# Sign binaries
if [[ $sign = true ]]
then
pushd ./gitian-builder
# Sign Windows
if [[ $windows = true ]]
then
echo ""
echo "Signing ${VERSION} Windows"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../mtcoin/contrib/gitian-descriptors/gitian-win-signer.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-win-signed --destination ../gitian.sigs/ ../mtcoin/contrib/gitian-descriptors/gitian-win-signer.yml
mv build/out/mtcoin-*win64-setup.exe ../mtcoin-binaries/${VERSION}
mv build/out/mtcoin-*win32-setup.exe ../mtcoin-binaries/${VERSION}
fi
# Sign Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Signing ${VERSION} Mac OSX"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../mtcoin/contrib/gitian-descriptors/gitian-osx-signer.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-osx-signed --destination ../gitian.sigs/ ../mtcoin/contrib/gitian-descriptors/gitian-osx-signer.yml
mv build/out/mtcoin-osx-signed.dmg ../mtcoin-binaries/${VERSION}/mtcoin-${VERSION}-osx.dmg
fi
popd
if [[ $commitFiles = true ]]
then
# Commit Sigs
pushd gitian.sigs
echo ""
echo "Committing ${VERSION} Signed Sigs"
echo ""
git add ${VERSION}-win-signed/${SIGNER}
git add ${VERSION}-osx-signed/${SIGNER}
git commit -a -m "Add ${VERSION} signed binary sigs for ${SIGNER}"
popd
fi
fi
|
cat <<EOF
(cons '(
a ()
) consider-generating-lisp-code)
EOF
|
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.deskclock;
import android.app.Activity;
import android.app.AlarmManager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.Resources;
import android.database.ContentObserver;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.provider.Settings;
import android.support.annotation.NonNull;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.format.DateUtils;
import android.view.GestureDetector;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextClock;
import android.widget.TextView;
import com.google.android.deskclock.data.City;
import com.google.android.deskclock.data.CityListener;
import com.google.android.deskclock.data.DataModel;
import com.google.android.deskclock.events.Events;
import com.google.android.deskclock.uidata.UiDataModel;
import com.google.android.deskclock.worldclock.CitySelectionActivity;
import java.util.Calendar;
import java.util.List;
import java.util.TimeZone;
import static android.app.AlarmManager.ACTION_NEXT_ALARM_CLOCK_CHANGED;
import static android.view.View.GONE;
import static android.view.View.INVISIBLE;
import static android.view.View.VISIBLE;
import static com.google.android.deskclock.uidata.UiDataModel.Tab.CLOCKS;
import static java.util.Calendar.DAY_OF_WEEK;
/**
* Fragment that shows the clock (analog or digital), the next alarm info and the world clock.
*/
public final class ClockFragment extends DeskClockFragment {
// Updates dates in the UI on every quarter-hour.
private final Runnable mQuarterHourUpdater = new QuarterHourRunnable();
// Updates the UI in response to changes to the scheduled alarm.
private BroadcastReceiver mAlarmChangeReceiver;
// Detects changes to the next scheduled alarm pre-L.
private ContentObserver mAlarmObserver;
private TextClock mDigitalClock;
private AnalogClock mAnalogClock;
private View mClockFrame;
private SelectedCitiesAdapter mCityAdapter;
private RecyclerView mCityList;
private String mDateFormat;
private String mDateFormatForAccessibility;
/**
* The public no-arg constructor required by all fragments.
*/
public ClockFragment() {
super(CLOCKS);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mAlarmObserver = Utils.isPreL() ? new AlarmObserverPreL() : null;
mAlarmChangeReceiver = Utils.isLOrLater() ? new AlarmChangedBroadcastReceiver() : null;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle icicle) {
super.onCreateView(inflater, container, icicle);
final View fragmentView = inflater.inflate(R.layout.clock_fragment, container, false);
mDateFormat = getString(R.string.abbrev_wday_month_day_no_year);
mDateFormatForAccessibility = getString(R.string.full_wday_month_day_no_year);
mCityAdapter = new SelectedCitiesAdapter(getActivity(), mDateFormat,
mDateFormatForAccessibility);
mCityList = (RecyclerView) fragmentView.findViewById(R.id.cities);
mCityList.setLayoutManager(new LinearLayoutManager(getActivity()));
mCityList.setAdapter(mCityAdapter);
mCityList.setItemAnimator(null);
DataModel.getDataModel().addCityListener(mCityAdapter);
final ScrollPositionWatcher scrollPositionWatcher = new ScrollPositionWatcher();
mCityList.addOnScrollListener(scrollPositionWatcher);
final Context context = container.getContext();
mCityList.setOnTouchListener(new CityListOnLongClickListener(context));
fragmentView.setOnLongClickListener(new StartScreenSaverListener());
// On tablet landscape, the clock frame will be a distinct view. Otherwise, it'll be added
// on as a header to the main listview.
mClockFrame = fragmentView.findViewById(R.id.main_clock_left_pane);
if (mClockFrame != null) {
mDigitalClock = (TextClock) mClockFrame.findViewById(R.id.digital_clock);
mAnalogClock = (AnalogClock) mClockFrame.findViewById(R.id.analog_clock);
Utils.setClockIconTypeface(mClockFrame);
Utils.updateDate(mDateFormat, mDateFormatForAccessibility, mClockFrame);
Utils.setClockStyle(mDigitalClock, mAnalogClock);
Utils.setClockSecondsEnabled(mDigitalClock, mAnalogClock);
}
// Schedule a runnable to update the date every quarter hour.
UiDataModel.getUiDataModel().addQuarterHourCallback(mQuarterHourUpdater, 100);
return fragmentView;
}
@Override
public void onResume() {
super.onResume();
final Activity activity = getActivity();
mDateFormat = getString(R.string.abbrev_wday_month_day_no_year);
mDateFormatForAccessibility = getString(R.string.full_wday_month_day_no_year);
// Watch for system events that effect clock time or format.
if (mAlarmChangeReceiver != null) {
final IntentFilter filter = new IntentFilter(ACTION_NEXT_ALARM_CLOCK_CHANGED);
activity.registerReceiver(mAlarmChangeReceiver, filter);
}
// Resume can be invoked after changing the clock style or seconds display.
if (mDigitalClock != null && mAnalogClock != null) {
Utils.setClockStyle(mDigitalClock, mAnalogClock);
Utils.setClockSecondsEnabled(mDigitalClock, mAnalogClock);
}
final View view = getView();
if (view != null && view.findViewById(R.id.main_clock_left_pane) != null) {
// Center the main clock frame by hiding the world clocks when none are selected.
mCityList.setVisibility(mCityAdapter.getItemCount() == 0 ? GONE : VISIBLE);
}
refreshAlarm();
// Alarm observer is null on L or later.
if (mAlarmObserver != null) {
@SuppressWarnings("deprecation")
final Uri uri = Settings.System.getUriFor(Settings.System.NEXT_ALARM_FORMATTED);
activity.getContentResolver().registerContentObserver(uri, false, mAlarmObserver);
}
}
@Override
public void onPause() {
super.onPause();
final Activity activity = getActivity();
if (mAlarmChangeReceiver != null) {
activity.unregisterReceiver(mAlarmChangeReceiver);
}
if (mAlarmObserver != null) {
activity.getContentResolver().unregisterContentObserver(mAlarmObserver);
}
}
@Override
public void onDestroyView() {
super.onDestroyView();
UiDataModel.getUiDataModel().removePeriodicCallback(mQuarterHourUpdater);
DataModel.getDataModel().removeCityListener(mCityAdapter);
}
@Override
public void onFabClick(@NonNull ImageView fab) {
startActivity(new Intent(getActivity(), CitySelectionActivity.class));
}
@Override
public void onUpdateFab(@NonNull ImageView fab) {
fab.setVisibility(VISIBLE);
fab.setImageResource(R.drawable.ic_public);
fab.setContentDescription(fab.getResources().getString(R.string.button_cities));
}
@Override
public void onUpdateFabButtons(@NonNull Button left, @NonNull Button right) {
left.setVisibility(INVISIBLE);
right.setVisibility(INVISIBLE);
}
/**
* Refresh the next alarm time.
*/
private void refreshAlarm() {
if (mClockFrame != null) {
Utils.refreshAlarm(getActivity(), mClockFrame);
} else {
mCityAdapter.refreshAlarm();
}
}
/**
* Long pressing over the main clock starts the screen saver.
*/
private final class StartScreenSaverListener implements View.OnLongClickListener {
@Override
public boolean onLongClick(View view) {
startActivity(new Intent(getActivity(), ScreensaverActivity.class)
.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
.putExtra(Events.EXTRA_EVENT_LABEL, R.string.label_deskclock));
return true;
}
}
/**
* Long pressing over the city list starts the screen saver.
*/
private final class CityListOnLongClickListener extends GestureDetector.SimpleOnGestureListener
implements View.OnTouchListener {
private final GestureDetector mGestureDetector;
private CityListOnLongClickListener(Context context) {
mGestureDetector = new GestureDetector(context, this);
}
@Override
public void onLongPress(MotionEvent e) {
final View view = getView();
if (view != null) {
view.performLongClick();
}
}
@Override
public boolean onDown(MotionEvent e) {
return true;
}
@Override
public boolean onTouch(View v, MotionEvent event) {
return mGestureDetector.onTouchEvent(event);
}
}
/**
* This runnable executes at every quarter-hour (e.g. 1:00, 1:15, 1:30, 1:45, etc...) and
* updates the dates displayed within the UI. Quarter-hour increments were chosen to accommodate
* the "weirdest" timezones (e.g. Nepal is UTC/GMT +05:45).
*/
private final class QuarterHourRunnable implements Runnable {
@Override
public void run() {
mCityAdapter.notifyDataSetChanged();
}
}
/**
* Prior to L, a ContentObserver was used to monitor changes to the next scheduled alarm.
* In L and beyond this is accomplished via a system broadcast of
* {@link AlarmManager#ACTION_NEXT_ALARM_CLOCK_CHANGED}.
*/
private final class AlarmObserverPreL extends ContentObserver {
private AlarmObserverPreL() {
super(new Handler());
}
@Override
public void onChange(boolean selfChange) {
refreshAlarm();
}
}
/**
* Update the display of the scheduled alarm as it changes.
*/
private final class AlarmChangedBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
refreshAlarm();
}
}
/**
* Updates the vertical scroll state of this tab in the {@link UiDataModel} as the user scrolls
* the recyclerview or when the size/position of elements within the recyclerview changes.
*/
private final class ScrollPositionWatcher extends RecyclerView.OnScrollListener
implements View.OnLayoutChangeListener {
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
setTabScrolledToTop(Utils.isScrolledToTop(mCityList));
}
@Override
public void onLayoutChange(View v, int left, int top, int right, int bottom,
int oldLeft, int oldTop, int oldRight, int oldBottom) {
setTabScrolledToTop(Utils.isScrolledToTop(mCityList));
}
}
/**
* This adapter lists all of the selected world clocks. Optionally, it also includes a clock at
* the top for the home timezone if "Automatic home clock" is turned on in settings and the
* current time at home does not match the current time in the timezone of the current location.
* If the phone is in portrait mode it will also include the main clock at the top.
*/
private static final class SelectedCitiesAdapter extends RecyclerView.Adapter
implements CityListener {
private final static int MAIN_CLOCK = R.layout.main_clock_frame;
private final static int WORLD_CLOCK = R.layout.world_clock_item;
private final LayoutInflater mInflater;
private final Context mContext;
private final boolean mIsPortrait;
private final boolean mShowHomeClock;
private final String mDateFormat;
private final String mDateFormatForAccessibility;
private SelectedCitiesAdapter(Context context, String dateFormat,
String dateFormatForAccessibility) {
mContext = context;
mDateFormat = dateFormat;
mDateFormatForAccessibility = dateFormatForAccessibility;
mInflater = LayoutInflater.from(context);
mIsPortrait = Utils.isPortrait(context);
mShowHomeClock = DataModel.getDataModel().getShowHomeClock();
}
@Override
public int getItemViewType(int position) {
if (position == 0 && mIsPortrait) {
return MAIN_CLOCK;
}
return WORLD_CLOCK;
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
final View view = mInflater.inflate(viewType, parent, false);
switch (viewType) {
case WORLD_CLOCK:
return new CityViewHolder(view);
case MAIN_CLOCK:
return new MainClockViewHolder(view);
default:
throw new IllegalArgumentException("View type not recognized");
}
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
final int viewType = getItemViewType(position);
switch (viewType) {
case WORLD_CLOCK:
// Retrieve the city to bind.
final City city;
// If showing home clock, put it at the top
if (mShowHomeClock && position == (mIsPortrait ? 1 : 0)) {
city = getHomeCity();
} else {
final int positionAdjuster = (mIsPortrait ? 1 : 0)
+ (mShowHomeClock ? 1 : 0);
city = getCities().get(position - positionAdjuster);
}
((CityViewHolder) holder).bind(mContext, city, position, mIsPortrait);
break;
case MAIN_CLOCK:
((MainClockViewHolder) holder).bind(mContext, mDateFormat,
mDateFormatForAccessibility, getItemCount() > 1);
break;
default:
throw new IllegalArgumentException("Unexpected view type: " + viewType);
}
}
@Override
public int getItemCount() {
final int mainClockCount = mIsPortrait ? 1 : 0;
final int homeClockCount = mShowHomeClock ? 1 : 0;
final int worldClockCount = getCities().size();
return mainClockCount + homeClockCount + worldClockCount;
}
private City getHomeCity() {
return DataModel.getDataModel().getHomeCity();
}
private List<City> getCities() {
return DataModel.getDataModel().getSelectedCities();
}
private void refreshAlarm() {
if (mIsPortrait && getItemCount() > 0) {
notifyItemChanged(0);
}
}
@Override
public void citiesChanged(List<City> oldCities, List<City> newCities) {
notifyDataSetChanged();
}
private static final class CityViewHolder extends RecyclerView.ViewHolder {
private final TextView mName;
private final TextClock mDigitalClock;
private final AnalogClock mAnalogClock;
private final TextView mHoursAhead;
private CityViewHolder(View itemView) {
super(itemView);
mName = (TextView) itemView.findViewById(R.id.city_name);
mDigitalClock = (TextClock) itemView.findViewById(R.id.digital_clock);
mAnalogClock = (AnalogClock) itemView.findViewById(R.id.analog_clock);
mHoursAhead = (TextView) itemView.findViewById(R.id.hours_ahead);
}
private void bind(Context context, City city, int position, boolean isPortrait) {
final String cityTimeZoneId = city.getTimeZone().getID();
// Configure the digital clock or analog clock depending on the user preference.
if (DataModel.getDataModel().getClockStyle() == DataModel.ClockStyle.ANALOG) {
mDigitalClock.setVisibility(GONE);
mAnalogClock.setVisibility(VISIBLE);
mAnalogClock.setTimeZone(cityTimeZoneId);
mAnalogClock.enableSeconds(false);
} else {
mAnalogClock.setVisibility(GONE);
mDigitalClock.setVisibility(VISIBLE);
mDigitalClock.setTimeZone(cityTimeZoneId);
mDigitalClock.setFormat12Hour(Utils.get12ModeFormat(0.3f /* amPmRatio */,
false));
mDigitalClock.setFormat24Hour(Utils.get24ModeFormat(false));
}
// Supply top and bottom padding dynamically.
final Resources res = context.getResources();
final int padding = res.getDimensionPixelSize(R.dimen.medium_space_top);
final int top = position == 0 && !isPortrait ? 0 : padding;
final int left = itemView.getPaddingLeft();
final int right = itemView.getPaddingRight();
final int bottom = itemView.getPaddingBottom();
itemView.setPadding(left, top, right, bottom);
// Bind the city name.
mName.setText(city.getName());
// Compute if the city week day matches the weekday of the current timezone.
final Calendar localCal = Calendar.getInstance(TimeZone.getDefault());
final Calendar cityCal = Calendar.getInstance(city.getTimeZone());
final boolean displayDayOfWeek =
localCal.get(DAY_OF_WEEK) != cityCal.get(DAY_OF_WEEK);
// Compare offset from UTC time on today's date (daylight savings time, etc.)
final TimeZone currentTimeZone = TimeZone.getDefault();
final TimeZone cityTimeZone = TimeZone.getTimeZone(cityTimeZoneId);
final long currentTimeMillis = System.currentTimeMillis();
final long currentUtcOffset = currentTimeZone.getOffset(currentTimeMillis);
final long cityUtcOffset = cityTimeZone.getOffset(currentTimeMillis);
final long offsetDelta = cityUtcOffset - currentUtcOffset;
final int hoursDifferent = (int) (offsetDelta / DateUtils.HOUR_IN_MILLIS);
final int minutesDifferent = (int) (offsetDelta / DateUtils.MINUTE_IN_MILLIS) % 60;
final boolean displayMinutes = offsetDelta % DateUtils.HOUR_IN_MILLIS != 0;
final boolean isAhead = hoursDifferent > 0 || (hoursDifferent == 0
&& minutesDifferent > 0);
if (!Utils.isLandscape(context)) {
// Bind the number of hours ahead or behind, or hide if the time is the same.
final boolean displayDifference = hoursDifferent != 0 || displayMinutes;
mHoursAhead.setVisibility(displayDifference ? VISIBLE : GONE);
final String timeString = Utils.createHoursDifferentString(
context, displayMinutes, isAhead, hoursDifferent, minutesDifferent);
mHoursAhead.setText(displayDayOfWeek ?
(context.getString(isAhead ? R.string.world_hours_tomorrow
: R.string.world_hours_yesterday, timeString))
: timeString);
} else {
// Only tomorrow/yesterday should be shown in landscape view.
mHoursAhead.setVisibility(displayDayOfWeek ? View.VISIBLE : View.GONE);
if (displayDayOfWeek) {
mHoursAhead.setText(context.getString(isAhead ? R.string.world_tomorrow
: R.string.world_yesterday));
}
}
}
}
private static final class MainClockViewHolder extends RecyclerView.ViewHolder {
private final View mHairline;
private final TextClock mDigitalClock;
private final AnalogClock mAnalogClock;
private MainClockViewHolder(View itemView) {
super(itemView);
mHairline = itemView.findViewById(R.id.hairline);
mDigitalClock = (TextClock) itemView.findViewById(R.id.digital_clock);
mAnalogClock = (AnalogClock) itemView.findViewById(R.id.analog_clock);
Utils.setClockIconTypeface(itemView);
}
private void bind(Context context, String dateFormat,
String dateFormatForAccessibility, boolean showHairline) {
Utils.refreshAlarm(context, itemView);
Utils.updateDate(dateFormat, dateFormatForAccessibility, itemView);
Utils.setClockStyle(mDigitalClock, mAnalogClock);
mHairline.setVisibility(showHairline ? VISIBLE : GONE);
Utils.setClockSecondsEnabled(mDigitalClock, mAnalogClock);
}
}
}
}
|
<gh_stars>0
require_relative '../Base/AbstractShape'
class SVG < SVGAbstract::SVGContainer
class Path < SVGAbstract::AbstractShape
def initialize(d)
super()
@name = 'path'
@attributes[:d] = parse_path d
yield self if block_given?
return self
end
#Add some convenience methods for drawing the path section by section
#These methods all have the same name as the corresponding SVG command
#e.g. you can draw a triangle like this:
#self.M(0,0).L(0,1).L(1,0).Z
#Also available: m, l, H, h, V, v, C, c, S, s, Q, q, T, t, A, a
@@command_arities = {:M=>2,:L=>2,:H=>1,:V=>1,:C=>[2,2,2],:S=>[2,2],
:Q=>[2,2], :T=>2, :A=>7, :Z=>0}
@@command_arities.each do |k,v|
unless v.is_a? Array
v = [v]
end
define_method(k) do |*args, &block|
formatted_args = v.map{|n| args.slice!(0,n).join(' ')}.join(', ')
@attributes[:d] += " #{k} #{formatted_args}"
#we can't use yield/block_given? here inside define_method
#yield self if block_given?
block.call(self) if block.is_a? Proc
return self
end
k = k.to_s.downcase.to_sym
define_method(k) do |*args, &block|
formatted_args = v.map{|n| args.slice!(0,n).join(' ')}.join(', ')
@attributes[:d] += " #{k} #{formatted_args}"
#yield self if block_given?
block.call(self) if block.is_a? Proc
return self
end
end
#add some alias names
alias_method :to, :L
alias_method :line_to, :L
alias_method :move_to, :M
alias_method :close_path, :Z
alias_method :cubic_bezier, :C
alias_method :quadratic_bezier, :Q
alias_method :arc, :A
#RVG allows paths to be specified that aren't actually quite
#standard, but manages to interpret them. Thus we'll do that too
def parse_path(d)
parsed = ''
while (off = (d =~ /[A-Za-z]/))
command = d[off]
d.slice!(0..off)
if @@command_arities.has_key? command.upcase.to_sym
arity = @@command_arities[command.upcase.to_sym]
arity = [arity] unless arity.is_a? Array
args = arity.map do |n|
nums = []
n.times do |i|
off = (d =~ /[-0-9]+/)
off += ($~[0].length-1)
d.slice!(0..off)
nums << $~[0]
end
nums
end
parsed += " #{command} #{args.map{|s| s.join(' ')}.join(', ')}"
end
end
return parsed.slice(1..-1) #cut off initial space
end
private :parse_path
end
end
|
<gh_stars>0
import unittest
from unittest.mock import patch
from tmc import points
from tmc.utils import load_module, reload_module, get_stdout, sanitize
exercise = 'src.sum_and_mean'
@points('1.sum_and_mean')
class SumAndMeanTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', return_value = '0'):
cls.module = load_module(exercise, 'en')
def test_1234(self):
with patch('builtins.input', side_effect = [ '1', '2', '3', '4', AssertionError("Input is asked too many times.") ]) as prompt:
reload_module(self.module)
output = get_stdout()
self.assertFalse(prompt.call_count < 4, 'The program is expected to ask input four times.')
self.assertTrue(len(output)>0, 'Your program does not print anything.')
self.assertTrue('10' in output, 'The program does not print the sum of the numbers 1, 2, 3 and 4 correctly. Expected: 10'+ '\noutput was\n'+ str(output))
self.assertTrue('2.5' in output, 'The program does not print the mean of the numbers 1, 2, 3 and 4 correctly. Expected: 2.5'+ '\noutput was\n'+ str(output))
expected = "The sum of the numbers is 10 and the mean is 2.5"
self.assertTrue(sanitize(expected) in sanitize(output), "with inputs 1, 2, 3 and 4 program is expected to print\n{}\nyour program's output was\n{}".format(expected, output))
def test_additional_tests(self):
testset = [
[ '3', '7', '2', '8' ],
[ '8', '-22', '75', '5' ],
[ '0', '0', '0', '0' ],
]
for a, b, c, d in testset:
with patch('builtins.input', side_effect = [ a, b, c, d, AssertionError("Input is asked too many times.") ]) as prompt:
reload_module(self.module)
output = get_stdout()
sum = int(a) + int(b) + int(c) + int(d)
avg = sum / 4
inputs = f"{a}, {b}, {c} and {d}"
self.assertTrue(str(sum) in output, 'With inputs {} the sum is incorrectly calculated. Expected: {}'.format(inputs, sum))
self.assertTrue(str(avg) in output, 'With inputs {} the mean is incorrectly calculated. Expected: {}'.format(inputs, avg))
expected = f"The sum of the numbers is {sum} and the mean is {avg}"
self.assertTrue(sanitize(expected) in sanitize(output), "With inputs {} your program is expected to printout: \n{}".format(inputs, expected))
if __name__ == '__main__':
unittest.main()
|
// -----------------------------------------------------------------------------
// MIT License
//
// Copyright (c) 2020 <NAME>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
// -----------------------------------------------------------------------------
/**
* \brief Test the string functions.
*/
#include "IllustratorSDK.h"
#include "test_string_functions.h"
#include "testing_utlity.h"
#include "../utility/string_functions.h"
/**
*
*/
void L2A::TEST::TestStringFunctions(L2A::TEST::UTIL::UnitTest& ut)
{
// Set test name.
ut.SetTestName(ai::UnicodeString("StringFunctions"));
// Convert integer to string.
ai::UnicodeString int_to_string = L2A::UTIL::IntegerToString(1234567890);
ut.CompareStr(int_to_string, ai::UnicodeString("1234567890"));
// Convert integer to string padded.
int_to_string = L2A::UTIL::IntegerToString(1234567890, 15);
ut.CompareStr(int_to_string, ai::UnicodeString("000001234567890"));
// Convert string to integer.
int string_to_int = L2A::UTIL::StringToInteger(ai::UnicodeString("1234567890"));
ut.CompareInt(string_to_int, 1234567890);
string_to_int = L2A::UTIL::StringToInteger(ai::UnicodeString("000001234567890"));
ut.CompareInt(string_to_int, 1234567890);
// Test the overloaded operators.
ai::UnicodeString old_operator("value1");
old_operator += ai::UnicodeString("value2");
old_operator += ai::UnicodeString("value3");
old_operator += ai::UnicodeString("value4");
old_operator += "value5";
old_operator += ai::UnicodeString("value6");
ai::UnicodeString new_operator = ai::UnicodeString("value2") + ai::UnicodeString("value3");
new_operator = "value1" + new_operator;
new_operator = new_operator + "value4";
new_operator = new_operator + "value5" + ai::UnicodeString("value6");
ut.CompareStr(old_operator, new_operator);
// Test the starts with functions.
ai::UnicodeString long_name("starTName");
ai::UnicodeString start_name("starT");
ai::UnicodeString start_name_case("start");
ai::UnicodeString start_name_wrong("stat");
ut.CompareInt(1, L2A::UTIL::StartsWith(long_name, start_name));
ut.CompareInt(0, L2A::UTIL::StartsWith(long_name, start_name_case));
ut.CompareInt(1, L2A::UTIL::StartsWith(long_name, start_name_case, true));
ut.CompareInt(0, L2A::UTIL::StartsWith(long_name, start_name_wrong));
// Test the replace functions.
ai::UnicodeString full_string("hello $name with $other $other $name and line breaks \n\n\r\n\r\n\n just like that");
L2A::UTIL::StringReplaceAll(full_string, ai::UnicodeString("$name"), ai::UnicodeString("Full Name"));
L2A::UTIL::StringReplaceAll(full_string, ai::UnicodeString("$other"), ai::UnicodeString("s$other and more"));
L2A::UTIL::StringReplaceAll(full_string, ai::UnicodeString("\r\n"), ai::UnicodeString("\n"));
ut.CompareStr(full_string,
ai::UnicodeString("hello Full Name with s$other and more s$other and more Full Name and line breaks \n\n\n\n\n "
"just like that"));
// Test the split function.
ai::UnicodeString split_string("text1%text22%text333");
std::vector<ai::UnicodeString> split_ref = {
ai::UnicodeString("text1"), ai::UnicodeString("text22"), ai::UnicodeString("text333")};
auto split = L2A::UTIL::SplitString(split_string, ai::UnicodeString("%"));
ut.CompareStringVector(split, split_ref);
split_string = ai::UnicodeString("text1%%text22%%text333");
split = L2A::UTIL::SplitString(split_string, ai::UnicodeString("%%"));
ut.CompareStringVector(split, split_ref);
split_string = ai::UnicodeString("text1%%text22%%text333%");
split_ref = {ai::UnicodeString("text1"), ai::UnicodeString(""), ai::UnicodeString("text22"), ai::UnicodeString(""),
ai::UnicodeString("text333"), ai::UnicodeString("")};
split = L2A::UTIL::SplitString(split_string, ai::UnicodeString("%"));
ut.CompareStringVector(split, split_ref);
// Test the hash function.
ut.CompareStr(L2A::UTIL::StringHash(ai::UnicodeString(L2A::TEST::UTIL::test_string_1_)),
ai::UnicodeString("4e496d7d9d7ae4a3"));
}
|
// Import the file system module for reading the module file
const fs = require('fs');
function resolveExportAll(modulePath) {
try {
// Read the content of the module file
const moduleContent = fs.readFileSync(modulePath, 'utf8');
// Create an empty object to store the exported bindings
const exportedBindings = {};
// Use a regular expression to match all export statements
const exportRegex = /export\s+(?:\*\s+as\s+\w+)?\s+from\s+['"](.*)['"]/g;
let match;
while ((match = exportRegex.exec(moduleContent)) !== null) {
// Read the content of the exported module
const exportedModulePath = match[1];
const exportedModuleContent = fs.readFileSync(exportedModulePath, 'utf8');
// Use a regular expression to extract the named exports
const namedExportRegex = /export\s+((?:const|let|var|function|class)\s+\w+)/g;
let namedMatch;
while ((namedMatch = namedExportRegex.exec(exportedModuleContent)) !== null) {
// Extract the exported binding name and add it to the exportedBindings object
const binding = namedMatch[1].split(' ')[1];
exportedBindings[binding] = true;
}
}
return exportedBindings;
} catch (error) {
// Handle any errors, such as file not found or permission issues
console.error('Error resolving export all:', error);
return {};
}
}
// Example usage
const resolvedBindings = resolveExportAll('./getActionInputs');
console.log(resolvedBindings); |
<reponame>barbarasilveiraf/Graphs_Dijkstra
package grafos;
public class Main {
public static void main(String[] args) {
Grafo grafo = new Grafo();
// Parametros:
// [0] -> caminho do arquivo + nome do arquivo (entrada)
// [1] -> caminho do arquivo + nome do arquivo (saida)
Arquivo a = new Arquivo(args[0], args[1], grafo);
a.leArquivo();
// Executar o Algorimo Dijkstra para todos os Vertices
for (Vertice origem : grafo.vertices) {
grafo.caminhosDijkstra(origem);
}
// Metodo para contar a quantas vezes um vertice aparece no caminho, o qual e a
// inflencia dos deputados
grafo.contaVerticesNosCaminhos();
// Escreve no arquivo de saida
a.escreveArquivo();
System.out.println("Programa Executado!");
}
}
|
#!chuck_extends project/settings/common.py
#!chuck_appends AUTHENTICATION_BACKENDS
# In case you want to use Mongoengine to handle auth operations
# uncomment the following:
# 'mongoengine.django.auth.MongoEngineBackend',
#!chuck_appends SETTINGS
import mongoengine
mongoengine.connect('db_$PROJECT_NAME')
# In case you want to use Mongoengine to handle sessions
# uncomment the following:
# SESSION_ENGINE = 'mongoengine.django.sessions'
# In case you want to use the MongoDB's GridFS feature for storage
# purposes uncomment the following 2 lines:
# from mongoengine.django.storage import GridFSStorage
# fs = GridFSStorage()
#!end
|
#!/usr/bin/env node
const Application = require('../lib/cli')
const app = new Application()
;(async () => {
const results = await app.search(process.argv[2])
await app.download(results)
console.log(`Downloads saved in ${app.dir}`)
})()
|
#!/bin/bash
# Run_QL_AMP4_Dhrystone.sh
# Check Environment
if [ -z ${IMPERAS_HOME} ]; then
echo "IMPERAS_HOME not set. Please check environment setup."
exit
fi
# Check Installation supports this demo
if [ -e ${IMPERAS_HOME}/bin/${IMPERAS_ARCH}/checkinstall.exe ]; then
${IMPERAS_HOME}/bin/${IMPERAS_ARCH}/checkinstall.exe --group run --noruntime -p install.pkg --nobanner || exit
fi
${IMPERAS_ISS} --verbose --output imperas.log \
--program ../../../Applications/dhrystone/dhrystone.IMG_MIPS32R2LE-O3-g.elf \
--processorvendor mips.ovpworld.org --processorname mips32_r1r5 --variant 24Kc \
--numprocessors 4 \
--parameter endian=little --semihostname mips32Newlib --semihostvendor mips.ovpworld.org \
--parallel \
"$@" \
-argv 4000000
|
package training.dynamicprogramming;
import org.junit.jupiter.api.Test;
import java.util.HashMap;
import java.util.Map;
import java.util.function.ToIntFunction;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* 309. 最佳买卖股票时机含冷冻期: https://leetcode-cn.com/problems/best-time-to-buy-and-sell-stock-with-cooldown/
*
* 给定一个整数数组,其中第 i 个元素代表了第 i 天的股票价格 。
* 设计一个算法计算出最大利润。在满足以下约束条件下,你可以尽可能地完成更多的交易(多次买卖一支股票):
* - 你不能同时参与多笔交易(你必须在再次购买前出售掉之前的股票)。
* - 卖出股票后,你无法在第二天买入股票 (即冷冻期为 1 天)。
*
* 此题和 {@link E122_Medium_BestTimeToBuyAndSellStockII} 类似,只不过多了冷冻期约束。
*
* 例 1:
* 输入: [1,2,3,0,2]
* 输出: 3
* 解释: 对应的交易状态为: [买入, 卖出, 冷冻期, 买入, 卖出]
*/
public class E309_Medium_BestTimeToBuyAndSellStockWithCooldown {
static void test(ToIntFunction<int[]> method) {
assertEquals(method.applyAsInt(new int[]{1,2,3,0,2}), 3);
assertEquals(method.applyAsInt(new int[]{1,2,4}), 3);
}
/**
* 题解框架和 {@link E122_Medium_BestTimeToBuyAndSellStockII} 类似。
*
* LeetCode 耗时:204 ms - 20.55%
* 内存消耗:39.2 MB - 5.19%
*/
public int maxProfit(int[] prices) {
return maxProfit(prices, 0, new HashMap<>((int) (prices.length / 0.75)));
}
private int maxProfit(int[] prices, int lo, Map<Integer, Integer> memory) {
if (lo >= prices.length - 1)
return 0;
int res = memory.getOrDefault(lo, -1);
if (res != -1)
return res;
int curMin = prices[lo];
for (int sell = lo + 1; sell < prices.length; sell++) {
curMin = Math.min(curMin, prices[sell]);
res = Math.max(res, prices[sell] - curMin + maxProfit(prices, sell + 2, memory));
}
memory.put(lo, res);
return res;
}
@Test
public void testMaxProfit() {
test(this::maxProfit);
}
/**
* 参见 {@link E188_Hard_BestTimeToBuyAndSellStockIV#compressMethod(int, int[])}。
*
* LeetCode 耗时:1 ms - 99.48%
* 内存消耗:36.3 MB - 86.23%
*/
public int compressMethod(int[] prices) {
final int days = prices.length;
// 使用 lastNoHold 表示上上次没有持有的状态
int noHold = 0, hold = Integer.MIN_VALUE, lastNoHold = 0;
for (int i = 0; i < days; i++) {
// k 无穷,则 k 和 k - 1 是一样的。
// 注意先保存 noHold,防止值被改变
int tmp = noHold;
noHold = Math.max(noHold, hold + prices[i]);
// 等一天:dp[i][1] = max(dp[i-1][1], dp[i-2][0] - prices[i])
// 第 i 天选择买入时,要从 i - 2 的状态转移(因为只有卖出才能买入,而卖出后需要等一天)
hold = Math.max(hold, lastNoHold - prices[i]);
lastNoHold = tmp;
}
return noHold;
}
@Test
public void testCompressMethod() {
test(this::compressMethod);
}
}
|
<gh_stars>1000+
import * as React from 'react';
export interface ExpoModuleTemplateViewProps {
greatProp: string;
}
interface ExpoModuleTemplateViewState {
}
/**
* Great view that would suit your needs!
*
* @example
* ```tsx
* <ExpoModuleTemplateNativeView
* greatProp="great"
* />
* ```
*/
export default class ExpoModuleTemplateView extends React.Component<ExpoModuleTemplateViewProps, ExpoModuleTemplateViewState> {
render(): JSX.Element;
}
export {};
|
<filename>docs/src/components/menu/example.js
export default `
\`\`\`jsx
import React, { Component } from 'react';
import Menu from 'anchor-ui/menu';
import IconRocket from 'anchor-ui/icons/icon-rocket';
class MyComponent extends Component {
state = {
open: false,
activeRoute: ''
};
toggleMenu() {
this.setState({
open: !this.state.open
});
}
handleClick(location) {
this.setState({
activeRoute: location
});
}
render() {
const { open, activeRoute } = this.state;
return (
<section>
<Menu
closeMenu={this.toggleMenu}
header="Menu"
open={open}
headerIcon={<IconRocket />}
footer="Footer"
>
<MenuItem
text="Home"
onClick={() => this.handleClick('/')}
active={activeRoute === '/'}
/>
<MenuItem
text="Docs"
onClick={() => this.handleClick('/docs')}
active={activeRoute === '/docs'}
/>
<MenuItem
text="Dashboard"
onClick={() => this.handleClick('/dashboard')}
active={activeRoute === '/dashboard'}
/>
</Menu>
</section>
);
}
}
\`\`\`
`;
|
#!/usr/bin/python3
import re
import jk_utils
import jk_logging
from jk_pathpatternmatcher2 import *
PATTERNS = {
"": None,
"some/***/path": None,
#"some/path/": None,
"some//path": None,
"some/x**/path": None,
"some/**x/path": None,
"some/x**x/path": None,
"**": r"^xxxxxxxxxxxxx$",
"**/path": r"^(.*/)?path$",
"some/path": r"^some/path$",
"/some/path": r"^/some/path$",
"some/*/path": r"^some/[^/]*/path$",
"some/**/path": r"^some/(.*/)?path$",
"some/*/**/path": r"^some/[^/]*/(.*/)?path$",
"some/path*": r"^some/path[^/]*$",
"some/path*/xy": r"^some/path[^/]*/xy$",
"some/path/*": r"^some/path/[^/]*$",
"some/path/**": r"^some/path/.*$",
"some/*.txt": r"^some/[^/]*\.txt$",
}
TESTS = {
"": [],
"some/***/path": [],
#"some/path/": [],
"some//path": [],
"some/x**/path": [],
"some/**x/path": [],
"some/x**x/path": [],
"**/path": [],
"some/path": [
( "some/path", True ),
( "some", False ),
( "some/other/path", False ),
( "/some/path", False ),
( "/some", False ),
( "/some/other/path", False ),
],
"/some/path": [
( "some/path", False ),
( "some", False ),
( "some/other/path", False ),
( "/some/path", True ),
( "/some", False ),
( "/some/other/path", False ),
],
"some/*/path": [
( "some/path", False ),
( "some/other/path", True ),
( "some/more/other/path", False ),
( "/some/path", False ),
( "/some/other/path", False ),
( "/some/more/other/path", False ),
],
"some/*/**/path": [
( "some/path", False ),
( "some/other/path", True ),
( "some/more/other/path", True ),
( "/some/path", False ),
( "/some/other/path", False ),
( "/some/more/other/path", False ),
],
"some/**/path": [
( "some/path", True ),
( "some/other/path", True ),
( "some/more/other/path", True ),
( "/some/path", False ),
( "/some/other/path", False ),
( "/some/more/other/path", False ),
],
"some/path*": [
( "some/xy", False ),
( "some/path", True ),
( "some/pathXX", True ),
( "some/pathXX/as", False ),
( "/some/xy", False ),
( "/some/path", False ),
( "/some/pathXX", False ),
( "/some/pathXX/as", False ),
],
"some/path*/xy": [
( "some/xy", False ),
( "some/path", False ),
( "some/path/xy", True ),
( "some/pathX", False ),
( "some/pathY/xy", True ),
( "some/pathY/abc/xy", False ),
( "/some/xy", False ),
( "/some/path", False ),
( "/some/path/xy", False ),
( "/some/pathX", False ),
( "/some/pathY/xy", False ),
( "/some/pathY/abc/xy", False ),
],
"some/path/*": [
( "some/xy", False ),
( "some/path", False ),
( "some/path/XX", True ),
( "some/path/XX/as", False ),
( "/some/xy", False ),
( "/some/path", False ),
( "/some/path/XX", False ),
( "/some/path/XX/as", False ),
],
"some/path/**": [
( "some/xy", False ),
( "some/path", False ),
( "some/path/XX", True ),
( "some/path/XX/as", True ),
( "/some/xy", False ),
( "/some/path", False ),
( "/some/path/XX", False ),
( "/some/path/XX/as", False ),
],
"some/*.txt": [
( "some/abc", False ),
( "some/.tx", False ),
( "some/txt", False ),
( "some/.txt", True ),
( "some/abc.txt", True ),
( "/some/abc", False ),
( "/some/.tx", False ),
( "/some/txt", False ),
( "/some/.txt", False ),
( "/some/abc.txt", False ),
]
}
with jk_logging.wrapMain() as log:
nSucceeded = 0
nFailed = 0
for testName, testRecords in TESTS.items():
with log.descend("Testing: " + repr(testName)) as log2:
regExProvided = PATTERNS[testName]
regexCompiled = compilePattern(testName, raiseExceptionOnError=False)
bResult = None
if regexCompiled:
s = regexCompiled.regexPattern
if isinstance(regexCompiled, PathPatternMatcher):
bResult = regexCompiled.regexPattern == regExProvided
else:
bResult = False
else:
s = None
if regExProvided:
bResult = False
else:
bResult = True
if bResult is None:
raise jk_utils.ImplementationError()
if bResult:
nSucceeded += 1
log2.info("OK : compiled = " + repr(s) + ", expected = " + repr(regExProvided))
else:
nFailed += 1
log2.error("ERR : compiled = " + repr(s) + ", expected = " + repr(regExProvided))
if regExProvided:
reTest = re.compile(regExProvided)
for testStr, expectedResult in testRecords:
m = reTest.match(testStr)
bResult = m is not None
if bResult == expectedResult:
nSucceeded += 1
log2.info("OK : " + repr(testStr) + " :: result = " + str(bResult) + ", expected = " + str(expectedResult))
else:
nFailed += 1
log2.error("ERR : " + repr(testStr) + " :: result = " + str(bResult) + ", expected = " + str(expectedResult))
if nFailed:
log.error("nSucceeded = " + str(nSucceeded))
log.error("nFailed = " + str(nFailed))
else:
log.success("nSucceeded = " + str(nSucceeded))
log.success("nFailed = " + str(nFailed))
#
|
<filename>utils/padd-two.js
module.exports = number => ('00' + number).slice(-2);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.