text stringlengths 1 1.05M |
|---|
package com.atguigu.gmall.search.controller;
import com.atguigu.gmall.common.bean.ResponseVo;
import com.atguigu.gmall.search.pojo.SearchParamVo;
import com.atguigu.gmall.search.pojo.SearchResponseVo;
import com.atguigu.gmall.search.service.SearchService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping("search")
public class SearchController {
@Autowired
private SearchService searchService;
@GetMapping
public String search(SearchParamVo paramVo, Model model) {
SearchResponseVo responseVo = this.searchService.search(paramVo);
model.addAttribute("response", responseVo);
model.addAttribute("searchParam", paramVo);
return "search";
}
}
|
#!/bin/bash
source test_tipc/common_func.sh
set -o errexit
set -o nounset
FILENAME=$1
# MODE be one of ['lite_train_lite_infer' 'lite_train_whole_infer' 'whole_train_whole_infer',
# 'whole_infer', 'klquant_whole_infer',
# 'cpp_infer', 'serving_infer']
MODE=$2
dataline=$(cat ${FILENAME})
# parser params
IFS=$'\n'
lines=(${dataline})
# The training params
model_name=$(func_parser_value "${lines[1]}")
trainer_list=$(func_parser_value "${lines[14]}")
model_path=test_tipc/output/${model_name}/
# download pretrained model
if [ ${MODE} = "whole_infer" ] || [ ${MODE} = "klquant_whole_infer" ]; then
if [ ${model_name} == "fcn_hrnetw18_small" ];then
wget -nc -P $model_path https://paddleseg.bj.bcebos.com/dygraph/humanseg/train/fcn_hrnetw18_small_v1_humanseg_192x192.zip
cd $model_path && unzip fcn_hrnetw18_small_v1_humanseg_192x192.zip && cd -
elif [ ${model_name} == "pphumanseg_lite" ];then
wget -nc -P $model_path https://paddleseg.bj.bcebos.com/dygraph/humanseg/train/pphumanseg_lite_generic_192x192.zip
cd $model_path && unzip pphumanseg_lite_generic_192x192.zip && cd -
elif [ ${model_name} == "deeplabv3p_resnet50" ];then
wget -nc -P $model_path https://paddleseg.bj.bcebos.com/dygraph/humanseg/train/deeplabv3p_resnet50_os8_humanseg_512x512_100k.zip
cd $model_path && unzip deeplabv3p_resnet50_os8_humanseg_512x512_100k.zip && cd -
elif [ ${model_name} == "bisenetv2" ];then
wget -nc -P $model_path https://bj.bcebos.com/paddleseg/dygraph/cityscapes/bisenet_cityscapes_1024x1024_160k/model.pdparams
elif [ ${model_name} == "ocrnet_hrnetw18" ];then
wget -nc -P $model_path https://bj.bcebos.com/paddleseg/dygraph/cityscapes/ocrnet_hrnetw18_cityscapes_1024x512_160k/model.pdparams
elif [ ${model_name} == "segformer_b0" ];then
wget -nc -P $model_path https://bj.bcebos.com/paddleseg/dygraph/cityscapes/segformer_b0_cityscapes_1024x1024_160k/model.pdparams
elif [ ${model_name} == "stdc_stdc1" ];then
wget -nc -P $model_path https://bj.bcebos.com/paddleseg/dygraph/cityscapes/stdc1_seg_cityscapes_1024x512_80k/model.pdparams
elif [ ${model_name} == "ppmatting" ];then
wget -nc -P $model_path https://paddleseg.bj.bcebos.com/matting/models/modnet-mobilenetv2.pdparams
fi
fi
# download data
if [ ${model_name} == "fcn_hrnetw18_small" ] || [ ${model_name} == "pphumanseg_lite" ] || [ ${model_name} == "deeplabv3p_resnet50" ];then
rm -rf ./test_tipc/data/mini_supervisely
wget -nc -P ./test_tipc/data/ https://paddleseg.bj.bcebos.com/humanseg/data/mini_supervisely.zip
cd ./test_tipc/data/ && unzip mini_supervisely.zip && cd -
elif [ ${model_name} == "ocrnet_hrnetw18" ] || [ ${model_name} == "bisenetv2" ] || [ ${model_name} == "segformer_b0" ] || [ ${model_name} == "stdc_stdc1" ];then
rm -rf ./test_tipc/data/cityscapes
wget -nc -P ./test_tipc/data/ https://paddleseg.bj.bcebos.com/dataset/cityscapes.tar
cd ./test_tipc/data/ && tar -xvf cityscapes.tar && cd -
elif [ ${model_name} == "ppmatting" ];then
rm -rf ./test_tipc/data/PPM-100
wget -nc -P ./test_tipc/data/ https://paddleseg.bj.bcebos.com/matting/datasets/PPM-100.zip
cd ./test_tipc/data/ && unzip PPM-100.zip && cd -
fi
|
package org.ednovo.gooru.core.api.model;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Set;
import com.fasterxml.jackson.annotation.JsonFilter;
@JsonFilter("userRole")
public class UserRole extends OrganizationModel implements Serializable{
/**
*
*/
private static final long serialVersionUID = -5564110791867719163L;
private Integer roleId;
private String name;
private String description;
private Set<RoleEntityOperation> roleOperations;
public static final Integer ROLE_TEACHER = 1;
public static final Integer ROLE_STUDENT = 2;
public static final Integer ROLE_CONTENT_ADMIN = 3;
public static final Integer ROLE_ANONYMOUS = 4;
public static final Integer ROLE_AUTHENTICATED = 5;
public static final Integer ROLE_PUBLISHER = 6;
public static final Integer SUPER_ADMIN=7;
public static enum UserRoleType{
TEACHER("Teacher"),
STUDENT("Student"),
CONTENT_ADMIN("Content_Admin"),
ANONYMOUS("ANONYMOUS"),
AUTHENTICATED_USER("User"),
PARENT("Parent"),
OTHER("other"),
PUBLISHER("Publisher"),
SUPER_ADMIN("superadmin");
private String type;
UserRoleType(String type){
this.type=type;
}
public String getType() {
return type;
}
}
public UserRole() {
this.roleOperations = new HashSet<RoleEntityOperation>();
}
public Integer getRoleId() {
return roleId;
}
public void setRoleId(Integer roleId) {
this.roleId = roleId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Set<RoleEntityOperation> getRoleOperations() {
return roleOperations;
}
public void setRoleOperations(Set<RoleEntityOperation> roleOperations) {
this.roleOperations = roleOperations;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((roleId == null) ? 0 : roleId.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
UserRole other = (UserRole) obj;
if (roleId == null) {
if (other.roleId != null) {
return false;
}
} else if (!roleId.equals(other.roleId)) {
return false;
}
return true;
}
} |
#!/bin/bash
# Wait for postgresql container
/wait-for.sh postgres 5432
# Migrate database and collectstatic
python manage.py collectstatic --noinput
python manage.py makemigrations
python manage.py migrate
echo "from obnk_apps.users.models import User; User.objects.filter(email='admin@obnk.com').delete(); User.objects.create_superuser('admin@obnk.com', 'obnk2019')" | python manage.py shell
# Run gunicorn for django server
gunicorn obnk.wsgi -b 0.0.0.0:8001 --reload |
#!/bin/bash
#
# Postfix (SMTP)
# --------------
#
# Postfix handles the transmission of email between servers
# using the SMTP protocol. It is a Mail Transfer Agent (MTA).
#
# Postfix listens on port 25 (SMTP) for incoming mail from
# other servers on the Internet. It is responsible for very
# basic email filtering such as by IP address and greylisting,
# it checks that the destination address is valid, rewrites
# destinations according to aliases, and passses email on to
# another service for local mail delivery.
#
# The first hop in local mail delivery is to Spamassassin via
# LMTP. Spamassassin then passes mail over to Dovecot for
# storage in the user's mailbox.
#
# Postfix also listens on ports 465/587 (SMTPS, SMTP+STARTLS) for
# connections from users who can authenticate and then sends
# their email out to the outside world. Postfix queries Dovecot
# to authenticate users.
#
# Address validation, alias rewriting, and user authentication
# is configured in a separate setup script mail-users.sh
# because of the overlap of this part with the Dovecot
# configuration.
source setup/functions.sh # load our functions
source /etc/mailinabox.conf # load global vars
# ### Install packages.
# Install postfix's packages.
#
# * `postfix`: The SMTP server.
# * `postfix-pcre`: Enables header filtering.
# * `postgrey`: A mail policy service that soft-rejects mail the first time
# it is received. Spammers don't usually try agian. Legitimate mail
# always will.
# * `ca-certificates`: A trust store used to squelch postfix warnings about
# untrusted opportunistically-encrypted connections.
echo "Installing Postfix (SMTP server)..."
apt_install postfix postfix-sqlite postfix-pcre postgrey ca-certificates
# ### Basic Settings
# Set some basic settings...
#
# * Have postfix listen on all network interfaces.
# * Make outgoing connections on a particular interface (if multihomed) so that SPF passes on the receiving side.
# * Set our name (the Debian default seems to be "localhost" but make it our hostname).
# * Set the name of the local machine to localhost, which means xxx@localhost is delivered locally, although we don't use it.
# * Set the SMTP banner (which must have the hostname first, then anything).
tools/editconf.py /etc/postfix/main.cf \
inet_interfaces=all \
smtp_bind_address=$PRIVATE_IP \
smtp_bind_address6=$PRIVATE_IPV6 \
myhostname=$PRIMARY_HOSTNAME\
smtpd_banner="\$myhostname ESMTP Hi, I'm a Mail-in-a-Box (Ubuntu/Postfix; see https://mailinabox.email/)" \
mydestination=localhost
# Tweak some queue settings:
# * Inform users when their e-mail delivery is delayed more than 3 hours (default is not to warn).
# * Stop trying to send an undeliverable e-mail after 2 days (instead of 5), and for bounce messages just try for 1 day.
tools/editconf.py /etc/postfix/main.cf \
delay_warning_time=3h \
maximal_queue_lifetime=2d \
bounce_queue_lifetime=1d
# ### Outgoing Mail
# Enable the 'submission' ports 465 and 587 and tweak their settings.
#
# * Enable authentication. It's disabled globally so that it is disabled on port 25,
# so we need to explicitly enable it here.
# * Do not add the OpenDMAC Authentication-Results header. That should only be added
# on incoming mail. Omit the OpenDMARC milter by re-setting smtpd_milters to the
# OpenDKIM milter only. See dkim.sh.
# * Even though we dont allow auth over non-TLS connections (smtpd_tls_auth_only below, and without auth the client cant
# send outbound mail), don't allow non-TLS mail submission on this port anyway to prevent accidental misconfiguration.
# Setting smtpd_tls_security_level=encrypt also triggers the use of the 'mandatory' settings below (but this is ignored with smtpd_tls_wrappermode=yes.)
# * Give it a different name in syslog to distinguish it from the port 25 smtpd server.
# * Add a new cleanup service specific to the submission service ('authclean')
# that filters out privacy-sensitive headers on mail being sent out by
# authenticated users. By default Postfix also applies this to attached
# emails but we turn this off by setting nested_header_checks empty.
tools/editconf.py /etc/postfix/master.cf -s -w \
"smtps=inet n - - - - smtpd
-o smtpd_tls_wrappermode=yes
-o smtpd_sasl_auth_enable=yes
-o syslog_name=postfix/submission
-o smtpd_milters=inet:127.0.0.1:8891
-o cleanup_service_name=authclean" \
"submission=inet n - - - - smtpd
-o smtpd_sasl_auth_enable=yes
-o syslog_name=postfix/submission
-o smtpd_milters=inet:127.0.0.1:8891
-o smtpd_tls_security_level=encrypt
-o cleanup_service_name=authclean" \
"authclean=unix n - - - 0 cleanup
-o header_checks=pcre:/etc/postfix/outgoing_mail_header_filters
-o nested_header_checks="
# Install the `outgoing_mail_header_filters` file required by the new 'authclean' service.
cp conf/postfix_outgoing_mail_header_filters /etc/postfix/outgoing_mail_header_filters
# Modify the `outgoing_mail_header_filters` file to use the local machine name and ip
# on the first received header line. This may help reduce the spam score of email by
# removing the 127.0.0.1 reference.
sed -i "s/PRIMARY_HOSTNAME/$PRIMARY_HOSTNAME/" /etc/postfix/outgoing_mail_header_filters
sed -i "s/PUBLIC_IP/$PUBLIC_IP/" /etc/postfix/outgoing_mail_header_filters
# Enable TLS on incoming connections. It is not required on port 25, allowing for opportunistic
# encryption. On ports 465 and 587 it is mandatory (see above). Shared and non-shared settings are
# given here. Shared settings include:
# * Require TLS before a user is allowed to authenticate.
# * Set the path to the server TLS certificate and 2048-bit DH parameters for old DH ciphers.
# For port 25 only:
# * Disable extremely old versions of TLS and extremely unsafe ciphers, but some mail servers out in
# the world are very far behind and if we disable too much, they may not be able to use TLS and
# won't fall back to cleartext. So we don't disable too much. smtpd_tls_exclude_ciphers applies to
# both port 25 and port 587, but because we override the cipher list for both, it probably isn't used.
# Use Mozilla's "Old" recommendations at https://ssl-config.mozilla.org/#server=postfix&server-version=3.3.0&config=old&openssl-version=1.1.1
tools/editconf.py /etc/postfix/main.cf \
smtpd_tls_security_level=may\
smtpd_tls_auth_only=yes \
smtpd_tls_cert_file=$STORAGE_ROOT/ssl/ssl_certificate.pem \
smtpd_tls_key_file=$STORAGE_ROOT/ssl/ssl_private_key.pem \
smtpd_tls_dh1024_param_file=$STORAGE_ROOT/ssl/dh2048.pem \
smtpd_tls_protocols="!SSLv2,!SSLv3" \
smtpd_tls_ciphers=medium \
tls_medium_cipherlist=ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES256-SHA256:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA \
smtpd_tls_exclude_ciphers=aNULL,RC4 \
tls_preempt_cipherlist=no \
smtpd_tls_received_header=yes
# For ports 465/587 (via the 'mandatory' settings):
# * Use Mozilla's "Intermediate" TLS recommendations from https://ssl-config.mozilla.org/#server=postfix&server-version=3.3.0&config=intermediate&openssl-version=1.1.1
# using and overriding the "high" cipher list so we don't conflict with the more permissive settings for port 25.
tools/editconf.py /etc/postfix/main.cf \
smtpd_tls_mandatory_protocols="!SSLv2,!SSLv3,!TLSv1,!TLSv1.1" \
smtpd_tls_mandatory_ciphers=high \
tls_high_cipherlist=ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384 \
smtpd_tls_mandatory_exclude_ciphers=aNULL,DES,3DES,MD5,DES+MD5,RC4
# Prevent non-authenticated users from sending mail that requires being
# relayed elsewhere. We don't want to be an "open relay". On outbound
# mail, require one of:
#
# * `permit_sasl_authenticated`: Authenticated users (i.e. on port 465/587).
# * `permit_mynetworks`: Mail that originates locally.
# * `reject_unauth_destination`: No one else. (Permits mail whose destination is local and rejects other mail.)
tools/editconf.py /etc/postfix/main.cf \
smtpd_relay_restrictions=permit_sasl_authenticated,permit_mynetworks,reject_unauth_destination
# ### DANE
# When connecting to remote SMTP servers, prefer TLS and use DANE if available.
#
# Prefering ("opportunistic") TLS means Postfix will use TLS if the remote end
# offers it, otherwise it will transmit the message in the clear. Postfix will
# accept whatever SSL certificate the remote end provides. Opportunistic TLS
# protects against passive easvesdropping (but not man-in-the-middle attacks).
# Since we'd rather have poor encryption than none at all, we use Mozilla's
# "Old" recommendations at https://ssl-config.mozilla.org/#server=postfix&server-version=3.3.0&config=old&openssl-version=1.1.1
# for opportunistic encryption but "Intermediate" recommendations when DANE
# is used (see next and above). The cipher lists are set above.
# DANE takes this a step further:
# Postfix queries DNS for the TLSA record on the destination MX host. If no TLSA records are found,
# then opportunistic TLS is used. Otherwise the server certificate must match the TLSA records
# or else the mail bounces. TLSA also requires DNSSEC on the MX host. Postfix doesn't do DNSSEC
# itself but assumes the system's nameserver does and reports DNSSEC status. Thus this also
# relies on our local DNS server (see system.sh) and `smtp_dns_support_level=dnssec`.
#
# The `smtp_tls_CAfile` is superflous, but it eliminates warnings in the logs about untrusted certs,
# which we don't care about seeing because Postfix is doing opportunistic TLS anyway. Better to encrypt,
# even if we don't know if it's to the right party, than to not encrypt at all. Instead we'll
# now see notices about trusted certs. The CA file is provided by the package `ca-certificates`.
tools/editconf.py /etc/postfix/main.cf \
smtp_tls_protocols=\!SSLv2,\!SSLv3 \
smtp_tls_ciphers=medium \
smtp_tls_exclude_ciphers=aNULL,RC4 \
smtp_tls_security_level=dane \
smtp_dns_support_level=dnssec \
smtp_tls_mandatory_protocols="!SSLv2,!SSLv3,!TLSv1,!TLSv1.1" \
smtp_tls_mandatory_ciphers=high \
smtp_tls_CAfile=/etc/ssl/certs/ca-certificates.crt \
smtp_tls_loglevel=2
# ### Incoming Mail
# Pass any incoming mail over to a local delivery agent. Spamassassin
# will act as the LDA agent at first. It is listening on port 10025
# with LMTP. Spamassassin will pass the mail over to Dovecot after.
#
# In a basic setup we would pass mail directly to Dovecot by setting
# virtual_transport to `lmtp:unix:private/dovecot-lmtp`.
tools/editconf.py /etc/postfix/main.cf "virtual_transport=lmtp:[127.0.0.1]:10025"
# Because of a spampd bug, limit the number of recipients in each connection.
# See https://github.com/mail-in-a-box/mailinabox/issues/1523.
tools/editconf.py /etc/postfix/main.cf lmtp_destination_recipient_limit=1
# Who can send mail to us? Some basic filters.
#
# * `reject_non_fqdn_sender`: Reject not-nice-looking return paths.
# * `reject_unknown_sender_domain`: Reject return paths with invalid domains.
# * `reject_authenticated_sender_login_mismatch`: Reject if mail FROM address does not match the client SASL login
# * `reject_rhsbl_sender`: Reject return paths that use blacklisted domains.
# * `permit_sasl_authenticated`: Authenticated users (i.e. on port 587) can skip further checks.
# * `permit_mynetworks`: Mail that originates locally can skip further checks.
# * `reject_rbl_client`: Reject connections from IP addresses blacklisted in zen.spamhaus.org
# * `reject_unlisted_recipient`: Although Postfix will reject mail to unknown recipients, it's nicer to reject such mail ahead of greylisting rather than after.
# * `check_policy_service`: Apply greylisting using postgrey.
#
# Notes: #NODOC
# permit_dnswl_client can pass through mail from whitelisted IP addresses, which would be good to put before greylisting #NODOC
# so these IPs get mail delivered quickly. But when an IP is not listed in the permit_dnswl_client list (i.e. it is not #NODOC
# whitelisted) then postfix does a DEFER_IF_REJECT, which results in all "unknown user" sorts of messages turning into #NODOC
# "450 4.7.1 Client host rejected: Service unavailable". This is a retry code, so the mail doesn't properly bounce. #NODOC
tools/editconf.py /etc/postfix/main.cf \
smtpd_sender_restrictions="reject_non_fqdn_sender,reject_unknown_sender_domain,reject_authenticated_sender_login_mismatch,reject_rhsbl_sender dbl.spamhaus.org" \
smtpd_recipient_restrictions=permit_sasl_authenticated,permit_mynetworks,"reject_rbl_client zen.spamhaus.org",reject_unlisted_recipient,"check_policy_service inet:127.0.0.1:10023","check_policy_service inet:127.0.0.1:12340"
# Postfix connects to Postgrey on the 127.0.0.1 interface specifically. Ensure that
# Postgrey listens on the same interface (and not IPv6, for instance).
# A lot of legit mail servers try to resend before 300 seconds.
# As a matter of fact RFC is not strict about retry timer so postfix and
# other MTA have their own intervals. To fix the problem of receiving
# e-mails really latter, delay of greylisting has been set to
# 180 seconds (default is 300 seconds).
tools/editconf.py /etc/default/postgrey \
POSTGREY_OPTS=\"'--inet=127.0.0.1:10023 --delay=180'\"
# We are going to setup a newer whitelist for postgrey, the version included in the distribution is old
cat > /etc/cron.daily/mailinabox-postgrey-whitelist << EOF;
#!/bin/bash
# Mail-in-a-Box
# check we have a postgrey_whitelist_clients file and that it is not older than 28 days
if [ ! -f /etc/postgrey/whitelist_clients ] || find /etc/postgrey/whitelist_clients -mtime +28 | grep -q '.' ; then
# ok we need to update the file, so lets try to fetch it
if curl https://postgrey.schweikert.ch/pub/postgrey_whitelist_clients --output /tmp/postgrey_whitelist_clients -sS --fail > /dev/null 2>&1 ; then
# if fetching hasn't failed yet then check it is a plain text file
# curl manual states that --fail sometimes still produces output
# this final check will at least check the output is not html
# before moving it into place
if [ "\$(file -b --mime-type /tmp/postgrey_whitelist_clients)" == "text/plain" ]; then
mv /tmp/postgrey_whitelist_clients /etc/postgrey/whitelist_clients
service postgrey restart
else
rm /tmp/postgrey_whitelist_clients
fi
fi
fi
EOF
chmod +x /etc/cron.daily/mailinabox-postgrey-whitelist
/etc/cron.daily/mailinabox-postgrey-whitelist
# Increase the message size limit from 10MB to 128MB.
# The same limit is specified in nginx.conf for mail submitted via webmail and Z-Push.
tools/editconf.py /etc/postfix/main.cf \
message_size_limit=134217728
# Allow the two SMTP ports in the firewall.
ufw_allow smtp
ufw_allow smtps
ufw_allow submission
# Restart services
restart_service postfix
restart_service postgrey
|
#! /bin/bash
set -u
# wait till node is up, send txs
ADDR=$1 #="127.0.0.1:26657"
curl -s $ADDR/status > /dev/null
ERR=$?
while [ "$ERR" != 0 ]; do
sleep 1
curl -s $ADDR/status > /dev/null
ERR=$?
done
# send a bunch of txs over a few blocks
echo "Node is up, sending txs"
for i in $(seq 1 5); do
for _ in $(seq 1 100); do
tx=$(head -c 8 /dev/urandom | hexdump -ve '1/1 "%.2X"')
curl -s "$ADDR/broadcast_tx_async?tx=0x$tx" &> /dev/null
done
echo "sent 100"
sleep 1
done
|
package io.opensphere.osh.aerialimagery.model;
import static org.junit.Assert.assertEquals;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Date;
import org.junit.Test;
import io.opensphere.core.model.Altitude;
import io.opensphere.core.model.Altitude.ReferenceLevel;
import io.opensphere.core.model.GeographicConvexQuadrilateral;
import io.opensphere.core.model.GeographicPosition;
import io.opensphere.core.model.LatLonAlt;
/**
* Unit test for {@link PlatformMetadata}.
*/
public class PlatformMetadataTest
{
/**
* Tests the {@link PlatformMetadata}.
*/
@Test
public void test()
{
PlatformMetadata metadata = new PlatformMetadata();
metadata.setCameraPitchAngle(10.5);
metadata.setCameraRollAngle(11.8);
metadata.setCameraYawAngle(43.2);
GeographicConvexQuadrilateral footprint = new GeographicConvexQuadrilateral(
new GeographicPosition(LatLonAlt.createFromDegrees(-10, -10)),
new GeographicPosition(LatLonAlt.createFromDegrees(-10, 10)),
new GeographicPosition(LatLonAlt.createFromDegrees(10, 10)),
new GeographicPosition(LatLonAlt.createFromDegrees(10, -10)));
metadata.setFootprint(footprint);
LatLonAlt location = LatLonAlt.createFromDegreesMeters(5, 5, Altitude.createFromMeters(101, ReferenceLevel.ELLIPSOID));
metadata.setLocation(location);
metadata.setPitchAngle(-17.3);
metadata.setRollAngle(-43);
Date time = new Date(System.currentTimeMillis());
metadata.setTime(time);
metadata.setYawAngle(90.3);
assertEquals(10.5, metadata.getCameraPitchAngle(), 0d);
assertEquals(11.8, metadata.getCameraRollAngle(), 0d);
assertEquals(43.2, metadata.getCameraYawAngle(), 0d);
assertEquals(-17.3, metadata.getPitchAngle(), 0d);
assertEquals(-43d, metadata.getRollAngle(), 0d);
assertEquals(90.3, metadata.getYawAngle(), 0d);
assertEquals(time, metadata.getTime());
assertEquals(footprint, metadata.getFootprint());
assertEquals(location, metadata.getLocation());
}
/**
* Tests the {@link PlatformMetadata}.
*
* @throws IOException Bad IO.
* @throws ClassNotFoundException Bad class.
*/
@Test
public void testSerialization() throws IOException, ClassNotFoundException
{
PlatformMetadata metadata = new PlatformMetadata();
metadata.setCameraPitchAngle(10.5);
metadata.setCameraRollAngle(11.8);
metadata.setCameraYawAngle(43.2);
GeographicConvexQuadrilateral footprint = new GeographicConvexQuadrilateral(
new GeographicPosition(LatLonAlt.createFromDegrees(-10, -10)),
new GeographicPosition(LatLonAlt.createFromDegrees(-10, 10)),
new GeographicPosition(LatLonAlt.createFromDegrees(10, 10)),
new GeographicPosition(LatLonAlt.createFromDegrees(10, -10)));
metadata.setFootprint(footprint);
LatLonAlt location = LatLonAlt.createFromDegreesMeters(5, 5, Altitude.createFromMeters(101, ReferenceLevel.ELLIPSOID));
metadata.setLocation(location);
metadata.setPitchAngle(-17.3);
metadata.setRollAngle(-43);
Date time = new Date(System.currentTimeMillis());
metadata.setTime(time);
metadata.setYawAngle(90.3);
ByteArrayOutputStream out = new ByteArrayOutputStream();
ObjectOutputStream objOutStream = new ObjectOutputStream(out);
objOutStream.writeObject(metadata);
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
ObjectInputStream objInStream = new ObjectInputStream(in);
metadata = (PlatformMetadata)objInStream.readObject();
assertEquals(10.5, metadata.getCameraPitchAngle(), 0d);
assertEquals(11.8, metadata.getCameraRollAngle(), 0d);
assertEquals(43.2, metadata.getCameraYawAngle(), 0d);
assertEquals(-17.3, metadata.getPitchAngle(), 0d);
assertEquals(-43d, metadata.getRollAngle(), 0d);
assertEquals(90.3, metadata.getYawAngle(), 0d);
assertEquals(time, metadata.getTime());
assertEquals(footprint.getVertices(), metadata.getFootprint().getVertices());
assertEquals(location, metadata.getLocation());
}
}
|
/*
* Copyright (c) 2015, 2016 Oracle and/or its affiliates. All rights reserved. This
* code is released under a tri EPL/GPL/LGPL license. You can use it,
* redistribute it and/or modify it under the terms of the:
*
* Eclipse Public License version 1.0
* GNU General Public License version 2
* GNU Lesser General Public License version 2.1
*/
package org.jruby.truffle.core.format.convert;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.dsl.NodeChild;
import com.oracle.truffle.api.dsl.NodeChildren;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.object.DynamicObject;
import org.jruby.truffle.RubyContext;
import org.jruby.truffle.core.format.FormatNode;
import org.jruby.truffle.language.dispatch.CallDispatchHeadNode;
import org.jruby.truffle.language.dispatch.DispatchHeadNodeFactory;
@NodeChildren({
@NodeChild(value = "value", type = FormatNode.class),
})
public abstract class ToIntegerNode extends FormatNode {
@Child private CallDispatchHeadNode integerNode;
public ToIntegerNode(RubyContext context) {
super(context);
}
public abstract Object executeToInteger(VirtualFrame frame, Object object);
@Specialization
public int toInteger(int value) {
return value;
}
@Specialization
public long toInteger(long value) {
return value;
}
@Specialization(guards = "isRubyBignum(value)")
public DynamicObject toInteger(DynamicObject value) {
return value;
}
@Specialization
public long toInteger(double value) {
return (long) value;
}
@Specialization(guards = {
"!isInteger(value)",
"!isLong(value)",
"!isRubyBignum(value)"})
public Object toInteger(VirtualFrame frame, Object value) {
if (integerNode == null) {
CompilerDirectives.transferToInterpreter();
integerNode = insert(DispatchHeadNodeFactory.createMethodCall(getContext(), true));
}
return integerNode.call(frame, getContext().getCoreLibrary().getKernelModule(), "Integer", null, value);
}
}
|
<filename>projects/enterprize/angular-ui/src/lib/form-controls/directives/message-error.directive.ts
import {
Directive, Host, Input, TemplateRef, ViewContainerRef
} from "@angular/core";
import { MessagesDirective } from "./messages.directive";
/**
* @Directive
* Diretiva estrutural para registrar uma mensagem de validação de erro em determinada situação.
*
* @author <NAME>
* @since 08/05/2019
*/
@Directive({
selector: "[etzMessageError]"
})
export class MessageErrorDirective {
//#region Inputs
/**
* @Input
* Chave do erro de validação que ativa esta mensagem
*/
@Input("etzMessageError")
public etzMessageError: string;
//#endregion
//#region Protected Attributes
/**
* Referência ao template da diretiva estrutural
* @injected
*/
protected readonly templateRef: TemplateRef<any>;
/**
* Referência do container no qual o {@link templateRef} pode ser inserido.
* @injected
*/
protected readonly viewContainerRef: ViewContainerRef;
/**
* Flag que indica se está criado ou não a mensagem.
* @default false
*/
protected _created: boolean;
//#endregion
//#region Constructor
constructor(templateRef: TemplateRef<any>, viewContainerRef: ViewContainerRef,
@Host() messages: MessagesDirective) {
// Injetados
this.templateRef = templateRef;
this.viewContainerRef = viewContainerRef;
// Protegidos
this._created = false;
// Registrar na diretiva mestra de mensagens esta diretiva de mensagem de erro
messages.addError(this);
}
//#endregion
//#region Getters
/**
* Flag que indica se está criado ou não a mensagem.
*/
public get created(): boolean {
return this._created;
}
//#endregion
//#region Public Methods
/**
* Instanciar o template e colocar no container da view, apresentando a menssagem de erro.
*/
public create(): void {
if (!this._created) {
this.viewContainerRef.createEmbeddedView(this.templateRef);
this._created = true;
}
}
/**
* Limpar o conteúdo do container da view, removendo a mensagem de erro.
*/
public destroy(): void {
this.viewContainerRef.clear();
this._created = false;
}
//#endregion
}
|
#!/bin/sh
./rshell.out
echo this && mkdir newfiles && ls -a; rm newfiles #|| rmdir newfiles && echo newfiles was deleted! |
echo "************Welcome to Openstack installation************"
echo "
1. Controller Node
2. Compute Node
Your Choice:"
read choice
if [ $choice == 1 ]
then
echo "************Setting up Controller Node************"
elif [ $choice == 2 ]
then
echo "************Setting up Compute Node************"
echo "Enter Compute node IP: "
read compute_ip
fi
echo "Enter Management IP: "
read management_ip
cd ..
#Python Script to edit Network Card Configurations
python environment/interface_setup.py
#Python Script to edit hosts
if [ $choice == 1 ]
then
python environment/hosts_setup.py $management_ip
elif [ $choice == 2 ]
then
#statements
python environment/hosts_setup.py $management_ip $compute_ip
fi
|
<filename>engine/physics/src/box2d/dynamics/b2_body.cpp
// MIT License
// Copyright (c) 2019 <NAME>
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
#include "b2_body.h"
#include "b2_contact.h"
#include "b2_fixture.h"
#include "b2_joint.h"
#include "b2_world.h"
b2Body::b2Body(const b2BodyDef* bd, b2World* world)
{
b2Assert(bd->position.IsValid());
b2Assert(bd->linearVelocity.IsValid());
b2Assert(b2IsValid(bd->angle));
b2Assert(b2IsValid(bd->angularVelocity));
b2Assert(b2IsValid(bd->angularDamping) && bd->angularDamping >= 0.0f);
b2Assert(b2IsValid(bd->linearDamping) && bd->linearDamping >= 0.0f);
m_flags = 0;
if (bd->bullet)
{
m_flags |= e_bulletFlag;
}
if (bd->fixedRotation)
{
m_flags |= e_fixedRotationFlag;
}
if (bd->allowSleep)
{
m_flags |= e_autoSleepFlag;
}
if (bd->awake)
{
m_flags |= e_awakeFlag;
}
if (bd->active)
{
m_flags |= e_activeFlag;
}
m_world = world;
m_xf.p = bd->position;
m_xf.q.Set(bd->angle);
m_sweep.localCenter.SetZero();
m_sweep.c0 = m_xf.p;
m_sweep.c = m_xf.p;
m_sweep.a0 = bd->angle;
m_sweep.a = bd->angle;
m_sweep.alpha0 = 0.0f;
m_jointList = nullptr;
m_contactList = nullptr;
m_prev = nullptr;
m_next = nullptr;
m_linearVelocity = bd->linearVelocity;
m_angularVelocity = bd->angularVelocity;
m_linearDamping = bd->linearDamping;
m_angularDamping = bd->angularDamping;
m_gravityScale = bd->gravityScale;
m_force.SetZero();
m_torque = 0.0f;
m_sleepTime = 0.0f;
m_type = bd->type;
if (m_type == b2_dynamicBody)
{
m_mass = 1.0f;
m_invMass = 1.0f;
}
else
{
m_mass = 0.0f;
m_invMass = 0.0f;
}
m_I = 0.0f;
m_invI = 0.0f;
m_userData = bd->userData;
m_fixtureList = nullptr;
m_fixtureCount = 0;
//Added by dotGears - <NAME>
m_copy_flags = 0;
m_limit_flags = 0;
m_ratio_pos_x = 1.0f;
m_ratio_pos_y = 1.0f;
m_ratio_rotation = 1.0f;
m_ratio_linear_velo = 1.0f;
m_ratio_angular_velo = 1.0f;
m_offset_pos_x = 0.0f;
m_offset_pos_y = 0.0f;
m_offset_rotation = 0.0f;
m_offset_linear_velo = 0.0f;
m_offset_angular_velo = 0.0f;
m_min_pos_x = 0.0f;
m_min_pos_y = 0.0f;
m_min_rotation = 0.0f;
m_min_linear_velo = 0.0f;
m_min_angular_velo = 0.0f;
m_max_pos_x = 0.0f;
m_max_pos_y = 0.0f;
m_max_rotation = 0.0f;
m_max_linear_velo = 0.0f;
m_max_angular_velo = 0.0f;
}
b2Body::~b2Body()
{
// shapes and joints are destroyed in b2World::Destroy
}
void b2Body::SetType(b2BodyType type)
{
b2Assert(m_world->IsLocked() == false);
if (m_world->IsLocked() == true)
{
return;
}
if (m_type == type)
{
return;
}
m_type = type;
ResetMassData();
if (m_type == b2_staticBody)
{
m_linearVelocity.SetZero();
m_angularVelocity = 0.0f;
m_sweep.a0 = m_sweep.a;
m_sweep.c0 = m_sweep.c;
SynchronizeFixtures();
}
SetAwake(true);
m_force.SetZero();
m_torque = 0.0f;
// Delete the attached contacts.
b2ContactEdge* ce = m_contactList;
while (ce)
{
b2ContactEdge* ce0 = ce;
ce = ce->next;
m_world->m_contactManager.Destroy(ce0->contact);
}
m_contactList = nullptr;
// Touch the proxies so that new contacts will be created (when appropriate)
b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase;
for (b2Fixture* f = m_fixtureList; f; f = f->m_next)
{
int32 proxyCount = f->m_proxyCount;
for (int32 i = 0; i < proxyCount; ++i)
{
broadPhase->TouchProxy(f->m_proxies[i].proxyId);
}
}
}
b2Fixture* b2Body::CreateFixture(const b2FixtureDef* def)
{
b2Assert(m_world->IsLocked() == false);
if (m_world->IsLocked() == true)
{
return nullptr;
}
b2BlockAllocator* allocator = &m_world->m_blockAllocator;
void* memory = allocator->Allocate(sizeof(b2Fixture));
b2Fixture* fixture = new (memory) b2Fixture;
fixture->Create(allocator, this, def);
if (m_flags & e_activeFlag)
{
b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase;
fixture->CreateProxies(broadPhase, m_xf);
}
fixture->m_next = m_fixtureList;
m_fixtureList = fixture;
++m_fixtureCount;
fixture->m_body = this;
// Adjust mass properties if needed.
if (fixture->m_density > 0.0f)
{
ResetMassData();
}
// Let the world know we have a new fixture. This will cause new contacts
// to be created at the beginning of the next time step.
m_world->m_flags |= b2World::e_newFixture;
return fixture;
}
b2Fixture* b2Body::CreateFixture(const b2Shape* shape, float density)
{
b2FixtureDef def;
def.shape = shape;
def.density = density;
return CreateFixture(&def);
}
void b2Body::DestroyFixture(b2Fixture* fixture)
{
if (fixture == NULL)
{
return;
}
b2Assert(m_world->IsLocked() == false);
if (m_world->IsLocked() == true)
{
return;
}
b2Assert(fixture->m_body == this);
// Remove the fixture from this body's singly linked list.
b2Assert(m_fixtureCount > 0);
b2Fixture** node = &m_fixtureList;
bool found = false;
while (*node != nullptr)
{
if (*node == fixture)
{
*node = fixture->m_next;
found = true;
break;
}
node = &(*node)->m_next;
}
// You tried to remove a shape that is not attached to this body.
b2Assert(found);
// Destroy any contacts associated with the fixture.
b2ContactEdge* edge = m_contactList;
while (edge)
{
b2Contact* c = edge->contact;
edge = edge->next;
b2Fixture* fixtureA = c->GetFixtureA();
b2Fixture* fixtureB = c->GetFixtureB();
if (fixture == fixtureA || fixture == fixtureB)
{
// This destroys the contact and removes it from
// this body's contact list.
m_world->m_contactManager.Destroy(c);
}
}
b2BlockAllocator* allocator = &m_world->m_blockAllocator;
if (m_flags & e_activeFlag)
{
b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase;
fixture->DestroyProxies(broadPhase);
}
fixture->m_body = nullptr;
fixture->m_next = nullptr;
fixture->Destroy(allocator);
fixture->~b2Fixture();
allocator->Free(fixture, sizeof(b2Fixture));
--m_fixtureCount;
// Reset the mass data.
ResetMassData();
}
void b2Body::ResetMassData()
{
// Compute mass data from shapes. Each shape has its own density.
m_mass = 0.0f;
m_invMass = 0.0f;
m_I = 0.0f;
m_invI = 0.0f;
m_sweep.localCenter.SetZero();
// Static and kinematic bodies have zero mass.
if (m_type == b2_staticBody || m_type == b2_kinematicBody)
{
m_sweep.c0 = m_xf.p;
m_sweep.c = m_xf.p;
m_sweep.a0 = m_sweep.a;
return;
}
b2Assert(m_type == b2_dynamicBody);
// Accumulate mass over all fixtures.
b2Vec2 localCenter = b2Vec2_zero;
for (b2Fixture* f = m_fixtureList; f; f = f->m_next)
{
if (f->m_density == 0.0f)
{
continue;
}
b2MassData massData;
f->GetMassData(&massData);
m_mass += massData.mass;
localCenter += massData.mass * massData.center;
m_I += massData.I;
}
// Compute center of mass.
if (m_mass > 0.0f)
{
m_invMass = 1.0f / m_mass;
localCenter *= m_invMass;
}
else
{
// Force all dynamic bodies to have a positive mass.
m_mass = 1.0f;
m_invMass = 1.0f;
}
if (m_I > 0.0f && (m_flags & e_fixedRotationFlag) == 0)
{
// Center the inertia about the center of mass.
m_I -= m_mass * b2Dot(localCenter, localCenter);
b2Assert(m_I > 0.0f);
m_invI = 1.0f / m_I;
}
else
{
m_I = 0.0f;
m_invI = 0.0f;
}
// Move center of mass.
b2Vec2 oldCenter = m_sweep.c;
m_sweep.localCenter = localCenter;
m_sweep.c0 = m_sweep.c = b2Mul(m_xf, m_sweep.localCenter);
// Update center of mass velocity.
m_linearVelocity += b2Cross(m_angularVelocity, m_sweep.c - oldCenter);
}
// DEFOLD
void b2Body::PurgeContacts(b2Fixture* fixture)
{
// Destroy any contacts associated with the fixture.
b2ContactEdge* edge = m_contactList;
while (edge)
{
b2Contact* c = edge->contact;
edge = edge->next;
b2Fixture* fixtureA = c->GetFixtureA();
b2Fixture* fixtureB = c->GetFixtureB();
if (fixture == fixtureA || fixture == fixtureB)
{
// This destroys the contact and removes it from
// this body's contact list.
m_world->m_contactManager.Destroy(c);
}
}
}
void b2Body::SetMassData(const b2MassData* massData)
{
b2Assert(m_world->IsLocked() == false);
if (m_world->IsLocked() == true)
{
return;
}
if (m_type != b2_dynamicBody)
{
return;
}
m_invMass = 0.0f;
m_I = 0.0f;
m_invI = 0.0f;
m_mass = massData->mass;
if (m_mass <= 0.0f)
{
m_mass = 1.0f;
}
m_invMass = 1.0f / m_mass;
if (massData->I > 0.0f && (m_flags & b2Body::e_fixedRotationFlag) == 0)
{
m_I = massData->I - m_mass * b2Dot(massData->center, massData->center);
b2Assert(m_I > 0.0f);
m_invI = 1.0f / m_I;
}
// Move center of mass.
b2Vec2 oldCenter = m_sweep.c;
m_sweep.localCenter = massData->center;
m_sweep.c0 = m_sweep.c = b2Mul(m_xf, m_sweep.localCenter);
// Update center of mass velocity.
m_linearVelocity += b2Cross(m_angularVelocity, m_sweep.c - oldCenter);
}
bool b2Body::ShouldCollide(const b2Body* other) const
{
// At least one body should be dynamic.
if (m_type != b2_dynamicBody && other->m_type != b2_dynamicBody)
{
return false;
}
// Does a joint prevent collision?
for (b2JointEdge* jn = m_jointList; jn; jn = jn->next)
{
if (jn->other == other)
{
if (jn->joint->m_collideConnected == false)
{
return false;
}
}
}
return true;
}
void b2Body::SetTransform(const b2Vec2& position, float angle)
{
b2Assert(m_world->IsLocked() == false);
if (m_world->IsLocked() == true)
{
return;
}
m_xf.q.Set(angle);
m_xf.p = position;
m_sweep.c = b2Mul(m_xf, m_sweep.localCenter);
m_sweep.a = angle;
m_sweep.c0 = m_sweep.c;
m_sweep.a0 = angle;
b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase;
for (b2Fixture* f = m_fixtureList; f; f = f->m_next)
{
f->Synchronize(broadPhase, m_xf, m_xf);
}
}
void b2Body::SynchronizeFixtures()
{
b2Transform xf1;
xf1.q.Set(m_sweep.a0);
xf1.p = m_sweep.c0 - b2Mul(xf1.q, m_sweep.localCenter);
b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase;
for (b2Fixture* f = m_fixtureList; f; f = f->m_next)
{
f->Synchronize(broadPhase, xf1, m_xf);
}
}
// Defold Modification
void b2Body::SynchronizeSingle(b2Shape* shape, int32 index)
{
// Defold fix: Shapes call this function blindly not knowing if proxies have been created or not.
// b2Body only has proxied created when active, so discard calls when not active so shapes can be
// updated without crash on inactive objects.
if (!IsActive())
{
return;
}
b2Transform xf1;
xf1.q.Set(m_sweep.a0);
xf1.p = m_sweep.c0 - b2Mul(xf1.q, m_sweep.localCenter);
b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase;
for (b2Fixture* f = m_fixtureList; f; f = f->m_next)
{
if (f->GetShape() == shape)
{
f->SynchronizeSingle(broadPhase, index, xf1, m_xf);
}
}
}
void b2Body::SetActive(bool flag)
{
b2Assert(m_world->IsLocked() == false);
if (flag == IsActive())
{
return;
}
if (flag)
{
m_flags |= e_activeFlag;
// Create all proxies.
b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase;
for (b2Fixture* f = m_fixtureList; f; f = f->m_next)
{
f->CreateProxies(broadPhase, m_xf);
}
// Contacts are created the next time step.
}
else
{
m_flags &= ~e_activeFlag;
// Destroy all proxies.
b2BroadPhase* broadPhase = &m_world->m_contactManager.m_broadPhase;
for (b2Fixture* f = m_fixtureList; f; f = f->m_next)
{
f->DestroyProxies(broadPhase);
}
// Destroy the attached contacts.
b2ContactEdge* ce = m_contactList;
while (ce)
{
b2ContactEdge* ce0 = ce;
ce = ce->next;
m_world->m_contactManager.Destroy(ce0->contact);
}
m_contactList = nullptr;
}
}
void b2Body::SetFixedRotation(bool flag)
{
bool status = (m_flags & e_fixedRotationFlag) == e_fixedRotationFlag;
if (status == flag)
{
return;
}
if (flag)
{
m_flags |= e_fixedRotationFlag;
}
else
{
m_flags &= ~e_fixedRotationFlag;
}
m_angularVelocity = 0.0f;
ResetMassData();
}
//Added by <NAME>
void b2Body::SetCustomProperties(const char *propertyName, int intValue)
{
if (m_customProperties_Int.count(propertyName) > 0)
{
m_customProperties_Int.erase(propertyName);
}
m_customProperties_Int.insert(std::pair<std::string, int>(std::string(propertyName), intValue));
}
void b2Body::SetCustomProperties(const char *propertyName, float floatValue)
{
if (m_customProperties_Float.count(propertyName) > 0)
{
m_customProperties_Float.erase(propertyName);
}
m_customProperties_Float.insert(std::pair<std::string, float>(std::string(propertyName), floatValue));
}
void b2Body::SetCustomProperties(const char *propertyName, const char * stringValue)
{
if (m_customProperties_String.count(propertyName) > 0)
{
m_customProperties_String.erase(propertyName);
}
m_customProperties_String.insert(std::pair<std::string, std::string>(std::string(propertyName), std::string(stringValue)));
}
//void b2Body::SetCustomProperties(const char *propertyName, b2Vec2 value)
//{
// m_customProperties_b2Vec2.insert(std::pair<std::string, b2Vec2>(std::string(propertyName), value));
//}
//void b2Body::SetCustomProperties(const char *propertyName, b2Color color)
//{
//}
void b2Body::SetCustomProperties(const char *propertyName, bool value)
{
if (m_customProperties_Bool.count(propertyName) > 0)
{
m_customProperties_Bool.erase(propertyName);
}
m_customProperties_Bool.insert(std::pair<std::string, bool>(std::string(propertyName), value));
}
int b2Body::GetCustomPropertiesInt(const char * propertyName)
{
std::string key = std::string(propertyName);
if (m_customProperties_Int.count(key) > 0)
{
return m_customProperties_Int.at(key);
}
else
return 0;
}
float b2Body::GetCustomPropertiesFloat(const char * propertyName)
{
std::string key = std::string(propertyName);
if (m_customProperties_Float.count(key) > 0)
{
return m_customProperties_Float.at(key);
}
else
return 0;
}
const char * b2Body::GetCustomPropertiesString(const char * propertyName)
{
std::string key = std::string(propertyName);
if (m_customProperties_String.count(key) > 0)
{
return m_customProperties_String.at(key).c_str();
}
else
return "";
}
bool b2Body::GetCustomPropertiesBool(const char * propertyName)
{
std::string key = std::string(propertyName);
if (m_customProperties_Bool.count(key) > 0)
{
return m_customProperties_Bool.at(key);
}
else
return false;
}
void b2Body::Dump()
{
int32 bodyIndex = m_islandIndex;
b2Log("{\n");
b2Log(" b2BodyDef bd;\n");
b2Log(" bd.type = b2BodyType(%d);\n", m_type);
b2Log(" bd.position.Set(%.15lef, %.15lef);\n", m_xf.p.x, m_xf.p.y);
b2Log(" bd.angle = %.15lef;\n", m_sweep.a);
b2Log(" bd.linearVelocity.Set(%.15lef, %.15lef);\n", m_linearVelocity.x, m_linearVelocity.y);
b2Log(" bd.angularVelocity = %.15lef;\n", m_angularVelocity);
b2Log(" bd.linearDamping = %.15lef;\n", m_linearDamping);
b2Log(" bd.angularDamping = %.15lef;\n", m_angularDamping);
b2Log(" bd.allowSleep = bool(%d);\n", m_flags & e_autoSleepFlag);
b2Log(" bd.awake = bool(%d);\n", m_flags & e_awakeFlag);
b2Log(" bd.fixedRotation = bool(%d);\n", m_flags & e_fixedRotationFlag);
b2Log(" bd.bullet = bool(%d);\n", m_flags & e_bulletFlag);
b2Log(" bd.active = bool(%d);\n", m_flags & e_activeFlag);
b2Log(" bd.gravityScale = %.15lef;\n", m_gravityScale);
b2Log(" bodies[%d] = m_world->CreateBody(&bd);\n", m_islandIndex);
b2Log("\n");
for (b2Fixture* f = m_fixtureList; f; f = f->m_next)
{
b2Log(" {\n");
f->Dump(bodyIndex);
b2Log(" }\n");
}
b2Log("}\n");
}
void b2Body::UpdateStateFromMasterBody()
{
if (m_masterBody == NULL || m_copy_flags == 0)
{
return;
}
b2Vec2 position = this->GetPosition();
if (( m_copy_flags & e_position_x) == e_position_x)
{
position.x = m_masterBody->GetPosition().x * m_ratio_pos_x + m_offset_pos_x;
if ((m_limit_flags & e_position_x) == e_position_x)
{
position.x = position.x > m_max_pos_x ? m_max_pos_x : position.x < m_min_pos_x ? m_min_pos_x : position.x;
}
}
if (( m_copy_flags & e_position_y) == e_position_y)
{
position.y = m_masterBody->GetPosition().y * m_ratio_pos_y + m_offset_pos_y;
if ((m_limit_flags & e_position_y) == e_position_y)
{
position.y = position.y > m_max_pos_y ? m_max_pos_y : position.y < m_min_pos_y ? m_min_pos_y : position.y;
}
}
float angle = this->GetAngle();
if ((m_copy_flags & e_rotation) == e_rotation)
{
angle = m_masterBody->GetAngle() * m_ratio_rotation + m_offset_rotation;
if ((m_limit_flags & e_rotation) == e_rotation)
{
angle = angle > m_max_rotation ? m_max_rotation : angle < m_min_rotation ? m_min_rotation : angle;
}
}
b2Vec2 linear_velocity = this->GetLinearVelocity();
if ((m_copy_flags & e_linear_velo) == e_linear_velo)
{
linear_velocity.x = m_masterBody->GetLinearVelocity().x * m_ratio_linear_velo + m_offset_linear_velo;
linear_velocity.y = m_masterBody->GetLinearVelocity().y * m_ratio_linear_velo + m_offset_linear_velo;
if ((m_limit_flags & e_linear_velo) == e_linear_velo)
{
linear_velocity.x = linear_velocity.x > m_max_linear_velo ? m_max_linear_velo : linear_velocity.x < m_min_linear_velo ? m_min_linear_velo : linear_velocity.x;
linear_velocity.y = linear_velocity.y > m_max_linear_velo ? m_max_linear_velo : linear_velocity.y < m_min_linear_velo ? m_min_linear_velo : linear_velocity.y;
}
}
float angular_velo = this->GetAngularVelocity();
if ((m_copy_flags & e_angular_velo) == e_angular_velo)
{
angular_velo = m_masterBody->GetAngularVelocity() * m_ratio_angular_velo + m_offset_angular_velo;
if ((m_limit_flags & e_angular_velo) == e_angular_velo)
{
angular_velo = angular_velo > m_max_angular_velo ? m_max_angular_velo : angular_velo < m_min_angular_velo ? m_min_angular_velo : angular_velo;
}
}
this->SetTransform(position, angle);
this->SetLinearVelocity(linear_velocity);
this->SetAngularVelocity(angular_velo);
}
b2Body * b2Body::CopyTo(b2World * world)
{
b2BodyDef bd;
bd.type = this->m_type;
bd.position.Set(m_xf.p.x, m_xf.p.y);
bd.angle = m_sweep.a;
bd.linearVelocity.Set(m_linearVelocity.x, m_linearVelocity.y);
bd.angularVelocity = m_angularVelocity;
bd.linearDamping = m_linearDamping;
bd.angularDamping = m_angularDamping;
bd.allowSleep = m_flags & e_autoSleepFlag;
bd.awake = m_flags & e_awakeFlag;
bd.fixedRotation = m_flags & e_fixedRotationFlag;
bd.bullet = m_flags & e_bulletFlag;
bd.active = m_flags & e_activeFlag;
bd.gravityScale = m_gravityScale;
b2Body * newBody = world->CreateBody(&bd);
newBody->m_islandIndex = m_islandIndex;
newBody->m_id = m_id;
newBody->SetName(m_name);
for (b2Fixture* f = m_fixtureList; f; f = f->m_next)
{
f->CopyTo(newBody);
}
return newBody;
}
void b2Body::Scale(float scale_factor)
{
b2FixtureDef def[100];
int defCount = 0;
for (b2Fixture* f = m_fixtureList; f; f = f->m_next)
{
b2Shape * shape = f->GetShape();
b2FixtureDef fixtureDef = f->GenerateScaledCopyDefinition(scale_factor);
def[defCount] = fixtureDef;
defCount++;
}
b2Fixture * fixture = m_fixtureList;
while (fixture)
{
b2Fixture * next = fixture->GetNext();
DestroyFixture(fixture);
fixture = next;
}
for (int i = 0; i < defCount; i++)
{
CreateFixture(&def[i]);
delete def[i].shape;
def[i].shape = NULL;
}
b2MassData massData;
this->GetMassData(&massData);
massData.mass = massData.mass * scale_factor;
this->SetMassData(&massData);
this->SetCustomProperties("body_scale", scale_factor);
}
|
/*globals just_started,schedule,scheduled,completed,workflow_input,stop,results,waiting_for */
if (just_started) {
schedule("sw-53", {
"activityType": "fetch",
"input": {
"URL": [
"http://www.newyorker.com/feed/books",
"http://www.newyorker.com/feed/humor"
]
}
});
}
if (!scheduled("_OUTPUT") && completed("sw-64")) {
var params = {
"activityType": "output",
"input": {}
};
params.input["_INPUT"] = results("sw-64")["_OUTPUT"];
schedule("_OUTPUT", params);
}
if (!scheduled("sw-64") && completed("sw-53")) {
var params = {
"activityType": "sort",
"input": {
"KEY": [
{
"field": "pubDate",
"dir": "DESC"
}
]
}
};
params.input["_INPUT"] = results("sw-53")["_OUTPUT"];
schedule("sw-64", params);
}
if (completed('_OUTPUT')) {
stop('finished !');
} |
<filename>main.go<gh_stars>1-10
package main
import (
"flag"
"fmt"
"log"
"net/http"
"github.com/rtwire/mock/service"
)
var (
addr = flag.String("addr", ":8085", "service address")
)
func main() {
flag.Parse()
url := fmt.Sprintf("http://%s/v1/mainnet/", *addr)
log.Printf("RTWire service running at %s.", url)
log.Fatal(http.ListenAndServe(*addr, service.New()))
}
|
set -e
dotnet build Example.csproj
cp -R ../lib/hyper bin/Debug/netcoreapp3.1
cp -R ../lib/libtableauhyperapi.* bin/Debug/netcoreapp3.1/
|
package io.opensphere.arcgis2.migration;
import java.util.Map;
import org.apache.log4j.Logger;
import io.opensphere.arcgis.config.v1.ArcGISServerSource;
import io.opensphere.core.appl.PreConfigurationUpdateModule;
import io.opensphere.core.preferences.PreferencesRegistry;
import io.opensphere.mantle.datasources.impl.UrlDataSource;
/**
* Migrates the old ArcGIS plugin configs to match the new configs for the this
* new plugin.
*/
public class Migrator implements PreConfigurationUpdateModule
{
/**
* Used to log messages.
*/
private static final Logger LOGGER = Logger.getLogger(Migrator.class);
@Override
public void updateConfigs(PreferencesRegistry prefsRegistry)
{
LOGGER.info("Migrating old ArcGIS configurations.");
ServerMigrator xyzServerMigrator = new ServerMigrator(prefsRegistry);
Map<ArcGISServerSource, UrlDataSource> oldToNewServers = xyzServerMigrator.migrate();
MicroMigrator[] migrators = new MicroMigrator[] { new FeatureServerMigrator(prefsRegistry),
new ActiveLayersMigrator(prefsRegistry), new OrderManagerMigrator(prefsRegistry), new StyleMigrator(prefsRegistry) };
for (MicroMigrator migrator : migrators)
{
migrator.migrate(oldToNewServers);
}
LOGGER.info("Done migrating old ArcGIS configurations.");
}
}
|
#!/usr/bin/env bash
# Copyright 2020 Amazon.com Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ "$AWS_ROLE_ARN" == "" ]; then
echo "Empty AWS_ROLE_ARN, this script must be run in a postsubmit pod with IAM Roles for Service Accounts"
exit 1
fi
if [ "$TEST_ROLE_ARN" == "" ]; then
echo "Empty AWS_ROLE_ARN, this script must be run in a postsubmit pod with IAM Roles for Service Accounts"
exit 1
fi
BASEDIR=$(dirname "$0")
cat << EOF > config
[default]
output=json
region=${AWS_REGION:-${AWS_DEFAULT_REGION:-us-west-2}}
role_arn=$AWS_ROLE_ARN
web_identity_token_file=/var/run/secrets/eks.amazonaws.com/serviceaccount/token
[profile conformance-test]
role_arn=$TEST_ROLE_ARN
region=${AWS_REGION:-${AWS_DEFAULT_REGION:-us-west-2}}
source_profile=default
EOF
export AWS_CONFIG_FILE=$(pwd)/config
export AWS_PROFILE=conformance-test
unset AWS_ROLE_ARN AWS_WEB_IDENTITY_TOKEN_FILE
DEFAULT_KOPS_ZONE_NAME="prod-build-pdx.kops-ci.model-rocket.aws.dev"
KOPS_ZONE_NAME=${KOPS_ZONE_NAME:-"${DEFAULT_KOPS_ZONE_NAME}"}
export KOPS_CLUSTER_NAME=${RELEASE_BRANCH}-$(git rev-parse --short HEAD).${KOPS_ZONE_NAME}
${BASEDIR}/run_all.sh
|
<gh_stars>10-100
/*-
* Copyright (c) 2019 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include <sys/cdefs.h>
#include <sys/systm.h>
#include <sys/ringbuf.h>
void
mdx_ringbuf_init(struct mdx_ringbuf_softc *sc,
void *base, uint32_t basesize,
void *buf, uint32_t bufsize)
{
struct mdx_ringbuf *rb;
int entry_bufsize;
int i;
KASSERT(basesize > 0, ("Invalid arguments"));
sc->nentries = basesize / sizeof(struct mdx_ringbuf);
entry_bufsize = bufsize / sc->nentries;
printf("%s: nentries %d, entry_bufsize %d\n", __func__,
sc->nentries, entry_bufsize);
rb = (struct mdx_ringbuf *)base;
for (i = 0; i < sc->nentries; i++) {
rb[i].buf = (void *)((uintptr_t)buf + entry_bufsize * i);
rb[i].bufsize = entry_bufsize;
rb[i].fill = 0;
rb[i].flags = 0;
if (i == (sc->nentries - 1))
rb[i].next = &rb[0];
else
rb[i].next = &rb[i + 1];
}
sc->role = MDX_RINGBUF_ROLE_SENDER;
sc->head = &rb[0];
}
void
mdx_ringbuf_join(struct mdx_ringbuf_softc *sc,
void *base)
{
struct mdx_ringbuf *rb;
rb = (struct mdx_ringbuf *)base;
sc->role = MDX_RINGBUF_ROLE_RECVER;
sc->head = &rb[0];
}
int
mdx_ringbuf_head(struct mdx_ringbuf_softc *sc,
struct mdx_ringbuf **rb0)
{
struct mdx_ringbuf *rb;
rb = sc->head;
if (sc->role == MDX_RINGBUF_ROLE_SENDER &&
rb->flags & MDX_RINGBUF_OWN)
return (MDX_ERROR);
if (sc->role == MDX_RINGBUF_ROLE_RECVER &&
(rb->flags & MDX_RINGBUF_OWN) == 0)
return (MDX_ERROR);
*rb0 = sc->head;
return (0);
}
void
mdx_ringbuf_submit(struct mdx_ringbuf_softc *sc)
{
struct mdx_ringbuf *rb;
rb = sc->head;
if (sc->role == MDX_RINGBUF_ROLE_SENDER)
rb->flags |= MDX_RINGBUF_OWN;
else
rb->flags &= ~MDX_RINGBUF_OWN;
sc->head = rb->next;
}
|
#!/bin/sh
#
# Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# @test
# @bug 6888925
# @run shell PublicKeyInterop.sh
# @summary SunMSCAPI's Cipher can't use RSA public keys obtained from other
# sources.
#
# set a few environment variables so that the shell-script can run stand-alone
# in the source directory
if [ "${TESTSRC}" = "" ] ; then
TESTSRC="."
fi
if [ "${TESTCLASSES}" = "" ] ; then
TESTCLASSES="."
fi
if [ "${TESTJAVA}" = "" ] ; then
echo "TESTJAVA not set. Test cannot execute."
echo "FAILED!!!"
exit 1
fi
OS=`uname -s`
case "$OS" in
Windows* | CYGWIN* )
echo "Creating a temporary RSA keypair in the Windows-My store..."
${TESTJAVA}/bin/keytool \
-genkeypair \
-storetype Windows-My \
-keyalg RSA \
-alias 6888925 \
-dname "cn=6888925,c=US" \
-noprompt
echo
echo "Running the test..."
${TESTJAVA}/bin/javac -d . ${TESTSRC}\\PublicKeyInterop.java
${TESTJAVA}/bin/java ${TESTVMOPTS} PublicKeyInterop
rc=$?
echo
echo "Removing the temporary RSA keypair from the Windows-My store..."
${TESTJAVA}/bin/keytool \
-delete \
-storetype Windows-My \
-alias 6888925
echo done.
exit $rc
;;
* )
echo "This test is not intended for '$OS' - passing test"
exit 0
;;
esac
|
<filename>src/main/java/de/perdian/apps/devlauncher/DevLauncherShutdownListener.java
/*
* DevLauncher
* Copyright 2013 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.perdian.apps.devlauncher;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.catalina.startup.Tomcat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class DevLauncherShutdownListener {
static final String SHUTDOWN_COMMAND = "shutdown";
static final String SHUTDOWN_CONFIRMATION = "shutdownConfirmation";
static final Logger log = LoggerFactory.getLogger(DevLauncherShutdownListener.class);
static final Lock shutdownLock = new ReentrantLock();
/**
* Shutdown an already running server
*
* @param shutdownPort
* the port on which the connection to the already running server will be
* initiated
*/
static void shutdownExistingServer(Integer shutdownPort) {
if (shutdownPort != null) {
log.debug("Try shutting down running server using port: " + shutdownPort);
try {
try (Socket shutdownSocket = new Socket()) {
shutdownSocket.connect(new InetSocketAddress(InetAddress.getByName("localhost"), shutdownPort.intValue()), 100);
try (BufferedWriter shutdownWriter = new BufferedWriter(new OutputStreamWriter(shutdownSocket.getOutputStream(), "UTF-8"))) {
// Send the shutdown command
shutdownWriter.write(DevLauncherShutdownListener.SHUTDOWN_COMMAND + "\n");
shutdownWriter.flush();
log.debug("Shutdown command successfully sent to running server");
// Wait for response
try (BufferedReader confirmationReader = new BufferedReader(new InputStreamReader(shutdownSocket.getInputStream(), "UTF-8"))) {
for (String line = confirmationReader.readLine(); line != null; line = confirmationReader.readLine()) {
if (DevLauncherShutdownListener.SHUTDOWN_CONFIRMATION.equals(line)) {
log.debug("Previous server instance confirmed shutdown");
}
}
} catch (Exception e) {
log.debug("No response from server that was to be shutdown could be received - it may be shutdown, it may not [" + e + "]");
}
}
}
} catch (Exception e) {
log.debug("No running server detected or server could not be shutdown [" + e + "]");
}
}
}
/**
* Installs the listener on a running tomcat instance.
*
* @param tomcat
* the tomcat instance that will be shutdown once a connection from a new
* application instance is received
* @param shutdownPort
* the port on which the server will listen to new clients that want to
* initiate a shutdown
*/
static void installForServer(final Tomcat tomcat, final Integer shutdownPort) throws Exception {
if (shutdownPort != null) {
// Start a daemon thread that listens on the shutdown port for
// incoming connections. Whenever there actually is a connection
// sending the shutdown command, we - well - shutdown the system
// by trying a clean stop of the embedded server and then performing
// a System.exit call to terminate the virtual machine.
Thread shutdownThread = new Thread(() -> {
log.info("Start listening for shutdown commands on port: " + shutdownPort);
try (ServerSocket serverSocket = new ServerSocket(shutdownPort.intValue(), 0, InetAddress.getByName("localhost"))) {
while (serverSocket.isBound()) {
try (Socket clientSocket = serverSocket.accept()) {
DevLauncherShutdownListener.handleShutdownConnection(clientSocket, tomcat);
} catch (Exception e) {
log.trace("Cannot accept shutdown socket connection", e);
}
}
} catch (Exception e) {
log.debug("Cannot install shutdown listener on port: " + shutdownPort, e);
}
});
shutdownThread.setDaemon(true);
shutdownThread.setName(DevLauncherShutdownListener.class.getSimpleName() + "[" + shutdownPort + "]");
shutdownThread.start();
tomcat.getServer().await();
log.info("Embedded webserver has been stopped - exiting application");
DevLauncherShutdownListener.shutdownLock.lock();
try {
System.exit(0);
} finally {
DevLauncherShutdownListener.shutdownLock.unlock();
}
}
}
static void handleShutdownConnection(Socket clientSocket, Tomcat tomcat) throws Exception {
BufferedReader clientReader = new BufferedReader(new InputStreamReader(clientSocket.getInputStream(), "UTF-8"));
for (String clientLine = clientReader.readLine(); clientLine != null; clientLine = clientReader.readLine()) {
if (DevLauncherShutdownListener.SHUTDOWN_COMMAND.equalsIgnoreCase(clientLine)) {
DevLauncherShutdownListener.shutdownLock.lock();
try {
log.info("Shutdown command received - Stopping embedded webserver");
try {
DevLauncherShutdownListener.handleShutdownServer(tomcat);
} finally {
try (BufferedWriter confirmationWriter = new BufferedWriter(new OutputStreamWriter(clientSocket.getOutputStream(), "UTF-8"))) {
confirmationWriter.write(DevLauncherShutdownListener.SHUTDOWN_CONFIRMATION + "\n");
confirmationWriter.flush();
} catch (Exception e) {
log.debug("Could not send shutdown confirmation command", e);
}
}
} catch (Exception e) {
log.error("Cannot stop embedded webserver correctly - using System.exit to force shutdown", e);
System.exit(-1);
} finally {
DevLauncherShutdownListener.shutdownLock.unlock();
}
}
}
}
static void handleShutdownServer(Tomcat tomcat) throws Exception {
tomcat.getServer().stop();
}
} |
#!/bin/sh
if [ "$DATABASE" = "postgres" ]
then
echo "+++++++++++++++++++++++"
echo "Waiting for postgres..."
echo "+++++++++++++++++++++++"
while ! nc -z $SQL_HOST $SQL_PORT; do
sleep 0.1
done
echo "+++++++++++++++++++++++"
echo "PostgreSQL started"
echo "+++++++++++++++++++++++"
fi
# python manage.py flush --no-input
# python manage.py migrate
exec "$@" |
package de.unistuttgart.ims.coref.annotator.plugins;
import java.util.Map;
import javax.swing.text.AttributeSet;
import javax.swing.text.MutableAttributeSet;
import javax.swing.text.StyleContext;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.TypeSystem;
import de.unistuttgart.ims.coref.annotator.StyleManager;
public class DefaultStylePlugin implements StylePlugin {
@Override
public String getName() {
return "Default";
}
@Override
public MutableAttributeSet getBaseStyle() {
return StyleManager.getDefaultParagraphStyle();
}
@Override
public Map<AttributeSet, Type> getSpanStyles(TypeSystem typeSystem, StyleContext context,
AttributeSet defaultStyle) {
return null;
}
@Override
public String getDescription() {
return "Default";
}
}
|
#!/bin/bash
yum -y install vsftpd #conf目录/etc/vsftpd/vsftpd.conf
#增加用户
useradd -d /home/ftpuser -m ftpuser
echo "ftp_micros"| passwd --stdin ftpuser #设置密码为: ftp_micros
#修改 selinux配置
getsebool -a | grep ftp
setsebool -P allow_ftpd_full_access on
setsebool -P ftp_home_dir on
#修改配置
sed -i 's/^anonymous_enable=.*/anonymous_enable=NO/' /etc/vsftpd/vsftpd.conf
sed -i '$a\pasv_min_port=30000' /etc/vsftpd/vsftpd.conf
sed -i '$a\pasv_max_port=30999' /etc/vsftpd/vsftpd.conf
#设置ftp的默认访问路径
echo "local_root=/nfs_share" >>/etc/vsftpd/vsftpd.conf
#设置可访问目录
mkdir -p /nfs_share
chown -R ftpuser: /nfs_share
chmod 777 -R /nfs_share
#开启 21端口
# vim /etc/sysconfig/iptables
# service iptables restart
# chkconfig vsftpd on #设置开机启动
systemctl start vsftpd
systemctl enable vsftpd
#https://blog.csdn.net/csdn_lqr/article/details/53333946 |
#!/bin/bash
# echo "Your script args ($#) are: $@"
usage() {
echo -e "${BYELLOW}setup [dbFlow]${NC} - generate project structure and install dependencies. "
echo
echo -e "${BWHITE}USAGE${NC}"
echo -e " $0 <COMMAND>"
echo
echo -e "${BWHITE}COMMANDS${NC}"
echo -e " generate <project-name> generates project structure ${BWHITE}*required${NC}"
echo -e ""
echo -e " install installs project dependencies to db"
echo -e " -f (force overwrite) features will be reinstalled if exists"
echo -e " schemas/users will be dropped if exists"
echo -e " and recreated"
echo
echo -e " export <target-schema>|ALL exports targetschema or ${BWHITE}ALL${NC} to filesystem ${BWHITE}*required${NC}"
echo -e " -o specific object (emp)"
echo
echo
echo -e "${BWHITE}EXAMPLE${NC}"
echo -e " $0 generate example"
echo -e " $0 install"
echo -e " $0 export ALL"
echo -e " $0 export hr_data -o dept"
echo
echo
exit 1
}
# get required functions and vars
source ./.dbFlow/lib.sh
# target environment
[ ! -f ./build.env ] || source ./build.env
[ ! -f ./apply.env ] || source ./apply.env
# name of setup directory
targetpath="db/_setup"
basepath=$(pwd)
# array of subdirectories inside $targetpath to scan for executables (sh/sql)
array=( tablespaces directories users features workspaces workspace_users acls )
notify() {
[[ $1 = 0 ]] || echo ❌ EXIT $1
# you can notify some external services here,
# ie. Slack webhook, Github commit/PR etc.
remove2envsql
}
trap '(exit 130)' INT
trap '(exit 143)' TERM
trap 'rc=$?; notify $rc; exit $rc' EXIT
print2envsql() {
echo define project=${PROJECT} > $targetpath/env.sql
echo define app_schema=${APP_SCHEMA} >> $targetpath/env.sql
echo define data_schema=${DATA_SCHEMA} >> $targetpath/env.sql
echo define logic_schema=${LOGIC_SCHEMA} >> $targetpath/env.sql
echo define workspace=${WORKSPACE} >> $targetpath/env.sql
echo define db_app_pwd=${DB_APP_PWD} >> $targetpath/env.sql
echo define db_app_user=${DB_APP_USER} >> $targetpath/env.sql
echo define apex_user=${APEX_USER} >> $targetpath/env.sql
if [[ ${DB_ADMINUSER} != "sys" ]]; then
echo define deftablespace=data >> $targetpath/env.sql
else
echo define deftablespace=users >> $targetpath/env.sql
fi
}
show_generate_summary() {
echo -e
echo -e
echo -e "Your project ${YELLOW}$1${NC} has just been created ${GREEN}successfully${NC}."
echo -e "APEX applications are stored in the ${CYAN}apex${NC} directory. "
echo -e "If you use REST servies, you can store them in the ${CYAN}rest${NC} directory. "
echo -e "Both can be exported to VSCode with our VSCode Exctension (dbFlow-vsce)"
echo -e
echo -e "The ${CYAN}db${NC} directory contains all your database objects, whereas the ${CYAN}_setup${NC} folder contains "
echo -e "objects / dependencies whose installation requires ${PURPLE}sys${NC} permissions."
echo -e "So before you start installing the components, you can edit or add them in the respective directories. "
echo -e "Features are stored in the directory with the same name. "
echo -e "At the beginning these are logger, utPlsql, teplsql and tapi."
echo -e "You can also find more information in the readme: ${BYELLOW}.dbFlow/readme.md${NC}"
}
remove2envsql() {
rm -f ${basepath}/${targetpath}/env.sql
}
install() {
local yes=${1:-"NO"}
if [ $yes == "YES" ]; then
echo_warning "Force option detected!"
fi
if [ -z "$DB_ADMINUSER" ]
then
read -p "Enter username of admin user (admin, sys, ...) [sys]: " DB_ADMINUSER
DB_ADMINUSER=${DB_ADMINUSER:-"sys"}
fi
if [[ $(toLowerCase $DB_ADMINUSER) != "sys" ]]; then
DBA_OPTION=""
fi
if [ -z "$DB_PASSWORD" ]
then
ask4pwd "Enter password für user ${DB_ADMINUSER}: "
DB_PASSWORD=${pass}
fi
if [ -z "$DB_APP_PWD" ]
then
ask4pwd "Enter password für user ${DB_APP_USER}: "
DB_APP_PWD=${pass}
fi
PROJECT_INSTALLED=$(is_any_schema_installed)
if [[ "${PROJECT_INSTALLED}" == *"true"* ]] && [[ ${yes} == "NO" ]]
then
echo_error "Project allready installed and option force not recoginized. \nTry option -f to force overwrite (drop + create)"
exit 1
fi
print2envsql
#-----------------------------------------------------------#
# check every path in given order
for path in "${array[@]}"
do
if [[ -d "$targetpath"/$path ]]
then
echo "Installing $path"
for file in $(ls "$targetpath"/$path | sort )
do
if [ -f "$targetpath"/$path/${file} ]
then
BASEFL=$(basename -- "${file}")
EXTENSION="${BASEFL##*.}"
if [ $EXTENSION == "sql" ]
then
cd $targetpath/$path
echo "Calling $targetpath/$path/${file}"
exit | ${SQLCLI} -s ${DB_ADMINUSER}/${DB_PASSWORD}@${DB_TNS}${DBA_OPTION} @${file}
cd ../../..
elif [ $EXTENSION == "sh" ]
then
cd $targetpath/$path
echo "Executing $targetpath/$path/${file}"
./${file} ${yes} ${DB_PASSWORD}
cd ../../..
fi
fi
done #file
fi
done #path
#-----------------------------------------------------------#
remove2envsql
echo_success "Installation done"
} # install
generate() {
local project_name=$1
read -p "Would you like to have a single or multi scheme app (S/M) [M]: " db_scheme_type
db_scheme_type=${db_scheme_type:-"M"}
# create directories
if [ $(toLowerCase $db_scheme_type) == "m" ]; then
mkdir -p db/{.hooks/{pre,post},${project_name}_data/{.hooks/{pre,post},sequences,tables,tables_ddl,indexes/{primaries,uniques,defaults},constraints/{primaries,foreigns,checks,uniques},contexts,policies,sources/{types,packages,functions,procedures,triggers},jobs,views,tests/packages,ddl/{init,pre,post},dml/{base,init,pre,post}}}
mkdir -p db/{.hooks/{pre,post},${project_name}_logic/{.hooks/{pre,post},sequences,tables,tables_ddl,indexes/{primaries,uniques,defaults},constraints/{primaries,foreigns,checks,uniques},contexts,policies,sources/{types,packages,functions,procedures,triggers},jobs,views,tests/packages,ddl/{init,pre,post},dml/{base,init,pre,post}}}
mkdir -p db/{.hooks/{pre,post},${project_name}_app/{.hooks/{pre,post},sequences,tables,tables_ddl,indexes/{primaries,uniques,defaults},constraints/{primaries,foreigns,checks,uniques},contexts,policies,sources/{types,packages,functions,procedures,triggers},jobs,views,tests/packages,ddl/{init,pre,post},dml/{base,init,pre,post}}}
elif [ $(toLowerCase $db_scheme_type) == "s" ]; then
mkdir -p db/{.hooks/{pre,post},${project_name}/{.hooks/{pre,post},sequences,tables,tables_ddl,indexes/{primaries,uniques,defaults},constraints/{primaries,foreigns,checks,uniques},contexts,policies,sources/{types,packages,functions,procedures,triggers},jobs,views,tests/packages,ddl/{init,pre,post},dml/{base,init,pre,post}}}
else
echo_error "unknown type ${db_scheme_type}"
exit 1
fi
# write .env files
# build.env
echo "# project name" > build.env
echo "PROJECT=${project_name}" >> build.env
echo "" >> build.env
echo "# what are the schema-names" >> build.env
if [ $(toLowerCase $db_scheme_type) == "m" ]; then
echo "APP_SCHEMA=${project_name}_app" >> build.env
echo "DATA_SCHEMA=${project_name}_data" >> build.env
echo "LOGIC_SCHEMA=${project_name}_logic" >> build.env
else
echo "APP_SCHEMA=${project_name}" >> build.env
echo "DATA_SCHEMA=${project_name}" >> build.env
echo "LOGIC_SCHEMA=${project_name}" >> build.env
fi
echo "" >> build.env
echo "" >> build.env
echo "# workspace app belongs to" >> build.env
echo "WORKSPACE=${project_name}" >> build.env
echo "" >> build.env
# ask for some vars to put into file
read -p "Enter database connections [localhost:1521/xepdb1]: " db_tns
db_tns=${db_tns:-"localhost:1521/xepdb1"}
read -p "Enter username of admin user (admin, sys, ...) [sys]: " db_adminuser
db_adminuser=${db_adminuser:-"sys"}
ask4pwd "Enter password for ${db_adminuser} [leave blank and you will be asked for]: "
db_password=${pass}
if [ $(toLowerCase $db_scheme_type) == "m" ]; then
ask4pwd "Enter password for deployment_user (proxyuser: ${project_name}_depl) [leave blank and you will be asked for]: "
else
ask4pwd "Enter password for application_user (user: ${project_name}) [leave blank and you will be asked for]: "
fi
db_app_pwd=${pass}
read -p "Enter path to depot [_depot]: " depot_path
depot_path=${depot_path:-"_depot"}
read -p "Enter stage of this configuration mapped to branch (develop, test, master) [develop]: " stage
stage=${stage:-"develop"}
read -p "Enter apex schema [APEX_210100]: " apex_user
apex_user=${apex_user:-"APEX_210100"}
read -p "Do you wish to generate and install default tooling? (Logger, utPLSQL, teplsql, tapi) [Y]: " with_tools
with_tools=${with_tools:-"Y"}
# apply.env
echo "# DB Connection" > apply.env
echo "DB_TNS=${db_tns}" >> apply.env
echo "" >> apply.env
echo "# Deployment User" >> apply.env
if [ $(toLowerCase $db_scheme_type) == "m" ]; then
echo "DB_APP_USER=${project_name}_depl" >> apply.env
else
echo "DB_APP_USER=${project_name}" >> apply.env
fi
echo "DB_APP_PWD=${db_app_pwd}" >> apply.env
echo "" >> apply.env
echo "# SYS/ADMIN Pass" >> apply.env
echo "DB_ADMINUSER=${db_adminuser}" >> apply.env
echo "DB_PASSWORD=${db_password}" >> apply.env
echo "" >> apply.env
echo "# Path to Depot" >> apply.env
echo "DEPOT_PATH=${depot_path}" >> apply.env
echo "" >> apply.env
echo "# Stage mapped to source branch ( develop test master )" >> apply.env
echo "# this is used to get artifacts from depot_path" >> apply.env
echo "STAGE=${stage}" >> apply.env
echo "" >> apply.env
echo "" >> apply.env
echo "# ADD this to original APP-NUM" >> apply.env
echo "APP_OFFSET=0" >> apply.env
echo "" >> apply.env
echo "# What is the APEX Owner" >> apply.env
echo "APEX_USER=${apex_user}" >> apply.env
read -p "Install with sql(cl) or sqlplus? [sqlplus]: " SQLCLI
SQLCLI=${SQLCLI:-"sqlplus"}
echo "# Scripts are executed with" >> apply.env
echo "SQLCLI=${SQLCLI}" >> apply.env
# write gitignore
echo "# dbFlow target infos" >> .gitignore
echo "apply.env" >> .gitignore
echo "" >> .gitignore
echo "static files" >> .gitignore
echo "static/f*/dist" >> .gitignore
# create targetpath directory
mkdir -p ${targetpath}/{tablespaces,directories,users,features,workspaces,workspace_users,acls}
mkdir -p ${depot_path}
# copy some examples into it
cp -rf .dbFlow/scripts/setup/workspaces/* ${targetpath}/workspaces
cp -rf .dbFlow/scripts/setup/workspace_users/* ${targetpath}/workspace_users
cp -rf .dbFlow/scripts/setup/acls/* ${targetpath}/acls
if [ $(toLowerCase $with_tools) == "y" ]; then
cp -rf .dbFlow/scripts/setup/features/* ${targetpath}/features
chmod +x ${targetpath}/features/*.sh
else
mkdir -p ${targetpath}/features
fi
# create gen_users..
if [ $(toLowerCase $db_scheme_type) == "m" ]; then
cp -rf .dbFlow/scripts/setup/users/01_data.sql ${targetpath}/users/01_${project_name}_data.sql
cp -rf .dbFlow/scripts/setup/users/02_logic.sql ${targetpath}/users/02_${project_name}_logic.sql
cp -rf .dbFlow/scripts/setup/users/03_app.sql ${targetpath}/users/03_${project_name}_app.sql
cp -rf .dbFlow/scripts/setup/users/04_depl.sql ${targetpath}/users/04_${project_name}_depl.sql
else
cp -rf .dbFlow/scripts/setup/users/03_app.sql ${targetpath}/users/03_${project_name}_app.sql
fi
# ask for application IDs
read -p "Enter application IDs (comma separated) you wish to use initialy [1000,2000]: " apex_ids
apex_ids=${apex_ids:-"1000,2000"}
# ask for restful Modulsa
read -p "Enter restful Moduls (comma separated) you wish to use initialy [com.${project_name}.api.version,com.${project_name}.api.test]: " rest_modules
rest_modules=${rest_modules:-"com.${project_name}.api.version,com.${project_name}.api.test"}
# split ids gen directories
apexids=(`echo $apex_ids | sed 's/,/\n/g'`)
apexidsquotes="\""${apex_ids/,/"\",\""}"\""
for apxID in "${apexids[@]}"
do
mkdir -p apex/f"$apxID"
mkdir -p static/f"$apxID"/{dist/{css,img,js},src/{css,img,js}}
done
# split modules
restmodules=(`echo $rest_modules | sed 's/,/\n/g'`)
restmodulesquotes="\""${rest_modules/,/"\",\""}"\""
for restMOD in "${restmodules[@]}"
do
mkdir -p rest/modules/"$restMOD"
done
mkdir -p rest/privileges
mkdir -p rest/roles
show_generate_summary ${project_name}
} # generate
is_any_schema_installed () {
${SQLCLI} -s ${DB_ADMINUSER}/${DB_PASSWORD}@${DB_TNS}${DBA_OPTION} <<!
set heading off
set feedback off
set pages 0
with checksql as (select count(1) cnt
from all_users
where username in (upper('$DATA_SCHEMA'), upper('$LOGIC_SCHEMA'), upper('$APP_SCHEMA') ))
select case when cnt > 1 then 'true' else 'false' end ding
from checksql;
!
}
export_schema() {
local targetschema=${1:-"ALL"}
local object_name=${2:-"ALL"}
ALL_SCHEMAS=( ${DATA_SCHEMA} ${LOGIC_SCHEMA} ${APP_SCHEMA} )
SCHEMAS=($(printf "%s\n" "${ALL_SCHEMAS[@]}" | tr '\n' ' '))
echo "targetschema: $targetschema"
echo "object_name: $object_name"
# export file wegräumen
for file in $(ls db | grep 'exp.zip')
do
rm "db/${file}"
done
if [ -z "$DB_APP_PWD" ]
then
ask4pwd "Enter password für user ${DB_APP_USER}: "
DB_APP_PWD=${pass}
fi
if [[ $targetschema == "ALL" ]]; then
for schema in "${SCHEMAS[@]}"
do
echo_warning " ... exporting $schema"
exit | sql -s "$(get_connect_string $schema)" @.dbFlow/scripts/schema_export/export.sql ${object_name}
if [[ -f "db/$schema.exp.zip" ]]; then
unzip -qo "db/$schema.exp.zip" -d "db/${schema}"
rm "db/$schema.exp.zip"
else
echo_error "no export artifacts found!"
fi
done
else
echo_warning " ... exporting $targetschema"
exit | sql -s "$(get_connect_string $targetschema)" @.dbFlow/scripts/schema_export/export.sql ${object_name}
if [[ -f "db/$targetschema.exp.zip" ]]; then
unzip -qo "db/$targetschema.exp.zip" -d "db/${targetschema}"
rm "db/$targetschema.exp.zip"
else
echo_error "no export artifacts found!"
fi
fi
# for file in $(ls db | grep 'exp.zip')
# do
# unzip -qo "db/${file}" -d "db/${targetschema}"
# rm "db/${file}"
# done
echo -e "${GREEN}Done${NC}"
} # export_schema
if [ $# -lt 1 ]; then
echo -e "${RED}No parameters found${NC}" 1>&2
usage
exit 1
else
# Parse options to the `setup` command
while getopts ":h" opt; do
case ${opt} in
h | help)
usage
exit 0
;;
\? )
echo -e "${RED}Invalid Option: -$OPTARG${NC}" 1>&2
usage
exit 1
;;
esac
done
shift $((OPTIND -1))
subcommand=$1; shift # Remove 'setup' from the argument list
case "$subcommand" in
# Parse options to the install sub command
generate)
[[ -z ${1-} ]] \
&& echo -e "${RED}ERROR: You have to specify a project${NC}" \
&& exit 1 \
project=$1; shift # Remove 'generate' from the argument list
# Process package options
while getopts ":t:" opt; do
case ${opt} in
t )
target=$OPTARG
;;
\? )
echo_error "Invalid Option: -$OPTARG"
usage
;;
: )
echo_error "Invalid Option: -$OPTARG requires an argument" 1>&2
usage
;;
esac
done
shift $((OPTIND -1))
generate $project
;;
install)
force="NO"
# Process install options
while getopts ":f" opt; do
case ${opt} in
f )
force="YES"
;;
\? )
echo_error "Invalid Option: -$OPTARG"
usage
;;
esac
done
shift $((OPTIND -1))
install $force
;;
export)
[[ -z ${1-} ]] \
&& echo_error "ERROR: You have to specify a target-schema or ALL" \
&& exit 1
targetschema=$1
if [[ $targetschema != "ALL" ]]; then
if [[ ! " ${SCHEMAS[@]} " =~ " ${targetschema} " ]]; then
echo_error "ERROR: unknown targetschema $targetschema (use ALL or anything of: ${SCHEMAS[*]})"
exit 1
fi
fi
object=""
# Process package options
while getopts ":o:" opt; do
case ${opt} in
o )
object=$OPTARG
if [[ $targetschema == "ALL" ]]; then
echo_error "specific object export requires a target-schema"
exit 1
fi
;;
\? )
echo_error "Invalid Option: -$OPTARG"
usage
;;
: )
echo_error "Invalid Option: -$OPTARG requires an argument"
usage
;;
esac
done
shift $((OPTIND -1))
export_schema $targetschema $object
;;
*)
echo_error "Invalid Argument see help"
usage
;;
esac
fi |
<reponame>chipturner/advent-of-code-2021
import helpers
import itertools
import collections
from dataclasses import dataclass
@dataclass
class Fish:
span: int
def main() -> None:
lines = helpers.read_input()
fishes = [ Fish(int(s)) for s in lines[0].split()[-1].split(',')]
print(fishes)
for d in range(256):
msg = ','.join(str(f.span) for f in fishes)
print(f"After {d:2} days: {msg}")
new_fishes = []
for fish in fishes:
fish.span -= 1
if fish.span == -1:
fish.span = 6
new_fishes.append(Fish(8))
fishes.extend(new_fishes)
print(f"Total fish: {len(fishes)}")
main()
|
<gh_stars>1-10
import { SyncLayoutBatcher } from "../types";
/**
* Create a batcher to process VisualElements
*/
export declare function createBatcher(): SyncLayoutBatcher;
|
#!/bin/bash
# stop the chain instance
eris chains stop simplechain
# clean up after docker
# source: http://stackoverflow.com/questions/32723111/how-to-remove-old-and-unused-docker-images
docker rm $(docker ps -qa --no-trunc --filter "status=exited") && docker rmi $(docker images --filter "dangling=true" -q --no-trunc)
# stop the docker-machine instance
docker-machine stop default
|
#!/bin/sh
echo "Setting up zsh..."
# Install zsh & helpers
helpers=(
zsh
zsh-syntax-highlighting
zsh-autosuggestions
zsh-history-substring-search
)
brew install ${helpers[@]}
# Make zsh the default environment
chsh -s $(which zsh)
|
UPDATE Employees
SET DateOfBirth = '2000-01-01'
WHERE Id = '12345'; |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import glob
import os
import sys
import shutil
class Alglib(MakefilePackage):
"""ALGLIB is a cross-platform numerical analysis and data processing
library."""
homepage = "http://www.alglib.net"
url = "http://www.alglib.net/translator/re/alglib-3.11.0.cpp.gpl.tgz"
version('3.11.0', 'f87bb05349924d486e8809590dee9f80')
def url_for_version(self, version):
return 'http://www.alglib.net/translator/re/alglib-{0}.cpp.gpl.tgz'.format(version.dotted)
build_directory = 'src'
def edit(self, spec, prefix):
# this package has no build system!
make_file_src = join_path(os.path.dirname(self.module.__file__),
'Makefile')
make_file = join_path(self.stage.source_path, 'src', 'Makefile')
shutil.copy(make_file_src, make_file)
filter_file(r'so', dso_suffix, make_file)
def install(self, spec, prefix):
name = 'libalglib.{0}'.format(dso_suffix)
with working_dir('src'):
mkdirp(prefix.lib)
install(name, prefix.lib)
mkdirp(prefix.include)
headers = glob.glob('*.h')
for h in headers:
install(h, prefix.include)
@run_after('install')
def fix_darwin_install(self):
# The shared libraries are not installed correctly on Darwin:
if sys.platform == 'darwin':
fix_darwin_install_name(self.spec.prefix.lib)
|
#include <unordered_map>
#include <string>
// Assuming that the object sizes are stored in a map for each key
std::unordered_map<std::string, std::unordered_map<uint64_t, uint32_t>> objectSizes;
uint32_t get_size_by_time(const std::string& key, uint64_t ts_us, uint32_t subgroup_index, uint32_t shard_index) {
// Assuming objectSizes is populated with object sizes for each key and timestamp
if (objectSizes.find(key) != objectSizes.end()) {
auto& sizeMap = objectSizes[key];
if (sizeMap.find(ts_us) != sizeMap.end()) {
return sizeMap[ts_us];
} else {
// Handle case when size for the given timestamp is not found
// This could involve interpolation or returning a default value
return 0; // Default value for size not found
}
} else {
// Handle case when key is not found
return 0; // Default value for key not found
}
} |
<reponame>dutinmeow/library
#define PROBLEM "https://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=DSL_2_E"
#include <bits/stdc++.h>
using namespace std;
#include "data-structure/tnemges-tree.hpp"
int main() {
int N, Q;
cin >> N >> Q;
tnemges_tree<long long> tib(N + 1);
while (Q--) {
int t; cin >> t;
if (t == 0) {
int l, r; long long v;
cin >> l >> r >> v;
tib.update(l, r, v);
} else {
int i; cin >> i;
cout << tib.query(i) << '\n';
}
}
} |
package com.example.co4sat;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity {
EditText etUsername, etPassword;
Button btSubmit;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
etUsername = findViewById(R.id.et_username);
etPassword = findViewById(R.id.et_password);
btSubmit = findViewById(R.id.bt_submit);
btSubmit.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (etUsername.getText().toString().equals("me<PASSWORD>") && etPassword.getText().toString().equals("<PASSWORD>")) {
Intent intent = new Intent(MainActivity.this, Alertes.class);
startActivity(intent);
Toast.makeText(getApplicationContext(),"logged in", Toast.LENGTH_SHORT).show();
}else {
Toast.makeText(getApplicationContext(),"identifiant ou mot de passe incorrect", Toast.LENGTH_SHORT).show();
}
}
});
}
} |
#!/usr/bin/env bash
# Arguments:
# 1: next version
# 2: channel
source "$(dirname "$0")/docker-common.sh" $1 $2
#echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
# Push docker images (built previously)
cd komga
docker buildx build \
--platform $PLATFORMS \
--cache-from tahngarth/komga:$DOCKER_CHANNEL \
--tag tahngarth/komga:$DOCKER_CHANNEL \
--tag tahngarth/komga:$1 \
--file ./Dockerfile . \
--push
|
<reponame>hugonasciutti/Exercises<filename>Jest/1/public/components/About/__tests__/About.spec.js
import React from 'react';
import { shallow, mount } from 'enzyme';
import toJson from 'enzyme-to-json';
import About from '../index';
describe('<About />', () => {
it('should render <About /> component', () => {
const component = shallow(<About />);
const tree = toJson(component);
expect(tree).toMatchSnapshot();
});
it('should delete item from array', () => {
const component = mount(<About />);
let tree = toJson(component);
const items = component.find('.item');
expect(tree).toMatchSnapshot();
items.at(2).simulate('click');
tree = toJson(component);
expect(tree).toMatchSnapshot();
});
});
|
#!/usr/bin/env python
# Copyright (C) 2013 Intel Corporation
#
# Released under the MIT license (see COPYING.MIT)
# Log module used by base testrunner
# It catch stdout and stderr to a new local file
"""
log module
"""
import os, sys
class MultipleCall(object):
"""It's a wraper, if this obj was called, it will call
all objs' method and return the firstObj's result
"""
def __init__(self, streams, func):
self.streams = streams
self.func = func
def __call__(self, *args, **kwargs):
"""call func and return result of the first stream's method"""
return [getattr(x, self.func)(*args, **kwargs) for x in\
self.streams][0]
class Tee(object):
"""Tee is a file distributer to apply call to all streams"""
def __init__(self, *args):
self.streams = list(args)
self.old_stream = self.streams[0]
def __getattr__(self, name):
if hasattr(self.old_stream, name):
if callable(getattr(self.old_stream, name)):
return MultipleCall(self.streams, name)
return getattr(self.old_stream, name)
return getattr(self.streams, name)
def close(self, start=1, end=None):
"""close streams"""
MultipleCall(self.streams[start:end], "close")()
del self.streams[start:end]
class LogHandler(object):
"""log hander"""
def __init__(self, dirpath):
"""
@para dirpath: the output dir for log file
"""
self.dirpath = dirpath
self.tee_std = None
self.tee_err = None
self.output = None
def start(self):
"""start catch output to log file"""
self.output = open(os.path.join(self.dirpath, "output.log"), "w")
self.tee_std = sys.stdout if isinstance(sys.stdout, Tee) else \
Tee(sys.stdout)
self.tee_err = sys.stderr if isinstance(sys.stderr, Tee) else \
Tee(sys.stderr)
self.tee_std.append(self.output)
self.tee_err.append(self.output)
sys.stdout = self.tee_std
sys.stderr = self.tee_err
def end(self):
"""close log file"""
sys.stdout.close()
sys.stderr.close()
sys.stdout = self.tee_std.old_stream
sys.stderr = self.tee_err.old_stream
# redirect stdout and stderr to a Tee object
sys.stdout = Tee(sys.stdout)
sys.stderr = Tee(sys.stderr)
|
Using the given dataset, I identified several features that could be useful for predictions. I looked for correlations between the features and the target variable and found that the age of the person, the median income of the area, and the number of bedrooms in the property all had strong correlations to the target variable. I also noticed that the location of the property had a strong influence on the target variable. Based on this analysis, I concluded that these factors should be taken into consideration when creating a model for predicting the property's price. |
def check_if_number_in_range(number, range_start, range_end):
# Check if range_start <= number < range_end
if range_start <= number < range_end:
return True
else:
return False
# Input
number = 4
range_start = 0
range_end = 5
# Output
print(check_if_number_in_range(number, range_start, range_end))
# Output: True |
#!/bin/bash
# ========== Experiment Seq. Idx. 3050 / 59.4.2.0 / N. 0 - _S=59.4.2.0 D1_N=37 a=1 b=1 c=-1 d=1 e=-1 f=-1 D3_N=5 g=1 h=-1 i=1 D4_N=0 j=0 D5_N=0 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 3050 / 59.4.2.0 / N. 0 - _S=59.4.2.0 D1_N=37 a=1 b=1 c=-1 d=1 e=-1 f=-1 D3_N=5 g=1 h=-1 i=1 D4_N=0 j=0 D5_N=0 ==========\n\n'
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
RESULTS_DIR="$JBHI_DIR/results"
if [[ "No" == "Yes" ]]; then
SVM_SUFFIX="svm"
PREDICTIONS_FORMAT="isbi"
else
SVM_SUFFIX="nosvm"
PREDICTIONS_FORMAT="titans"
fi
RESULTS_PREFIX="$RESULTS_DIR/deep.37.layer.5.test.0.index.3050.$SVM_SUFFIX"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$RESULTS_PREFIX.finish.txt"
# ...this experiment is a little different --- only one master procedure should run, so there's only a master lock file
METRICS_TEMP_PATH="$RESULTS_DIR/this_results.anova.txt"
METRICS_PATH="$RESULTS_DIR/all_results.anova.txt"
START_PATH="$METRICS_PATH.start.txt"
FINISH_PATH="-"
LOCK_PATH="$METRICS_PATH.running.lock"
LAST_OUTPUT="$METRICS_PATH"
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
if [[ "$STARTED_BEFORE" == "Yes" ]]; then
# If the experiment was started before, do any cleanup necessary
echo -n
else
echo "D1_N;D3_N;D4_N;a;b;c;d;e;f;g;h;i;j;m_ap;m_auc;m_tn;m_fp;m_fn;m_tp;m_tpr;m_fpr;k_ap;k_auc;k_tn;k_fp;k_fn;k_tp;k_tpr;k_fpr;isbi_auc" > "$METRICS_PATH"
fi
python \
"$SOURCES_GIT_DIR/etc/compute_metrics.py" \
--metadata_file "$SOURCES_GIT_DIR/data/all-metadata.csv" \
--predictions_format "$PREDICTIONS_FORMAT" \
--metrics_file "$METRICS_TEMP_PATH" \
--predictions_file "$RESULTS_PATH"
EXPERIMENT_STATUS="$?"
echo -n "37;5;0;" >> "$METRICS_PATH"
echo -n "1;1;-1;1;-1;-1;1;-1;1;0;" >> "$METRICS_PATH"
tail "$METRICS_TEMP_PATH" -n 1 >> "$METRICS_PATH"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
#!/usr/bin/env bash
# Usage
# bash parallel.sh {browser} {threads} {test-per-thread=5}
# bash parallel.sh chrome 2
# bash parallel.sh firefox 2
# time ( VIDEO=true bash parallel.sh hybrid 4 )
# VIDEO=true bash parallel.sh hybrid 2
set -e
echoerr() { printf "%s\n" "$*" >&2; }
# print error and exit
die () {
echoerr "ERROR: $1"
# if $2 is defined AND NOT EMPTY, use $2; otherwise, set to "3"
errnum=${2-3}
( ps aux | grep -i "python test/x chrome" | grep -v grep | awk '{print $2}' | xargs kill >/dev/null 2>&1 ) || true
( ps aux | grep -i "python test/x firefox" | grep -v grep | awk '{print $2}' | xargs kill >/dev/null 2>&1 ) || true
( ps aux | grep parallel.sh | grep -v grep | awk '{print $2}' | xargs kill >/dev/null 2>&1 ) || true
( ps aux | grep -i "python test/x chrome" | grep -v grep | awk '{print $2}' | xargs kill -9 >/dev/null 2>&1 ) || true
( ps aux | grep -i "python test/x firefox" | grep -v grep | awk '{print $2}' | xargs kill -9 >/dev/null 2>&1 ) || true
( ps aux | grep parallel.sh | grep -v grep | awk '{print $2}' | xargs kill -9 >/dev/null 2>&1 ) || true
exit $errnum
}
TEST_TYPE=$1
TOT_THREADS=$2
TESTS_PER_THREAD=$3
[ "${TEST_TYPE}" == "" ] && die "1st param must be one of 'chrome', 'firefox', 'hybrid'"
[ "${TOT_THREADS}" == "" ] && die "2nd param should be the amount of parallel tests to run!"
[ $((TOT_THREADS%2)) -eq 0 ] || die "The amount of threads needs to be an even number!"
# By default 5 tests per thread, for stress testing
[ "${TESTS_PER_THREAD}" == "" ] && TESTS_PER_THREAD=5
function get_mock_port() {
echo $(docker inspect -f='{{(index (index .NetworkSettings.Ports "8082/tcp") 0).HostPort}}' adwords_mock)
}
function mock_is_running() {
docker inspect -f {{.State.Running}} adwords_mock | grep true
}
export MOCK_SERVER_PORT=8082
if mock_is_running >/dev/null 2>&1; then
docker stop adwords_mock || true
fi
docker rm adwords_mock || true
docker run -d --name=adwords_mock -e MOCK_SERVER_PORT \
-p $MOCK_SERVER_PORT:$MOCK_SERVER_PORT elgalu/google_adwords_mock
export MOCK_SERVER_HOST=`docker inspect -f='{{.NetworkSettings.IPAddress}}' adwords_mock`
if [ "${MOCK_SERVER_HOST}" == "" ]; then
die "Failed to grab IP from adwords_mock"
fi
if [ "$(uname)" == 'Darwin' ]; then
MOCK_URL="http://localhost:${MOCK_SERVER_PORT}/adwords"
else
MOCK_URL="http://${MOCK_SERVER_HOST}:${MOCK_SERVER_PORT}/adwords"
fi
echo "Mock server should be found at ${MOCK_URL}"
while ! curl -s "${MOCK_URL}"; do
echo -n '.'
sleep 0.2
done
if [ "${TEST_TYPE}" == "hybrid" ]; then
LOOP_END_NUM=$(($TOT_THREADS/2-1))
else
LOOP_END_NUM=$(($TOT_THREADS-1))
fi
echo "Mock server is running. Will now run ${1} threads. LOOP_END_NUM=${LOOP_END_NUM}"
for i in `seq 0 $LOOP_END_NUM`; do
if [ "${TEST_TYPE}" == "chrome" ] || [ "${TEST_TYPE}" == "hybrid" ]; then
(
if [ "${TEST_TYPE}" == "hybrid" ]; then
chrome_thread_num="$((i*2+1))"
else
chrome_thread_num="$((i+1))"
fi
for j in `seq 1 $TESTS_PER_THREAD`; do
test_id_chrome=${STAGE}thread-${chrome_thread_num}_seq-$j
TEST_ID=$test_id_chrome python test/x chrome || \
TEST_ID=$test_id_chrome python test/x chrome || \
die "Test failed on chrome $test_id_chrome"
done
) &
fi
if [ "${TEST_TYPE}" == "firefox" ] || [ "${TEST_TYPE}" == "hybrid" ]; then
(
if [ "${TEST_TYPE}" == "hybrid" ]; then
firefox_thread_num="$((i*2+2))"
else
firefox_thread_num="$((i+1))"
fi
for j in `seq 1 $TESTS_PER_THREAD`; do
test_id_firefox=${STAGE}thread-${firefox_thread_num}_seq-$j
TEST_ID=$test_id_firefox python test/x firefox || \
TEST_ID=$test_id_firefox python test/x firefox || \
die "Test failed on firefox $test_id_firefox"
done
) &
fi
done
wait
|
#!/bin/sh
bindir=$(pwd)
cd /Users/BrightLand/Learn_MetalAPI/GRAPHICS API/openGL/playground/
export
if test "x$1" = "x--debugger"; then
shift
if test "x" = "xYES"; then
echo "r " > $bindir/gdbscript
echo "bt" >> $bindir/gdbscript
GDB_COMMAND-NOTFOUND -batch -command=$bindir/gdbscript /Users/BrightLand/Learn_MetalAPI/GRAPHICS\ API/openGL/cmake_on_mac/Debug/playground
else
"/Users/BrightLand/Learn_MetalAPI/GRAPHICS\ API/openGL/cmake_on_mac/Debug/playground"
fi
else
"/Users/BrightLand/Learn_MetalAPI/GRAPHICS\ API/openGL/cmake_on_mac/Debug/playground"
fi
|
import React, { useState, useEffect } from 'react'
import {StyleSheet, Text, View, Button, TextInput} from 'react-native';
export default function App() {
const [num1, setNum1] = useState('');
const [num2, setNum2] = useState('');
const [result, setResult] = useState(0);
const calculate = () => {
let sum = Number(num1) + Number(num2);
let subtract = Number(num1) - Number(num2);
let multiply = Number(num1) * Number(num2);
let divide = Number (num1) / Number (num2);
setResult({sum, subtract, multiply, divide});
}
return (
<View style={styles.container}>
<TextInput
value={num1}
style={styles.numberInput}
onChangeText={setNum1}
placeholder='Input Number 1'
/>
<TextInput
value={num2}
style={styles.numberInput}
onChangeText={setNum2}
placeholder='Input Number 2'
/>
<Button
style={styles.button}
onPress={calculate}
title='Calculate'
/>
<Text>Result: {result.sum}, {result.subtract}, {result.multiply}, {result.divide}</Text>
</View>
)
};
const styles = StyleSheet.create({
container: {
flex: 1,
},
numberInput: {
fontSize:18,
borderBottomWidth: 1.25,
margin: 10,
padding: 10,
textAlign: 'center',
},
button: {
backgroundColor:'steelblue',
marginTop: 10,
},
}); |
<reponame>liuhanling/Logger<gh_stars>1-10
package com.liuhanling.logger;
import android.content.Context;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.StringReader;
import java.io.StringWriter;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
public class LogPrinter implements Printer {
static final int CRASH = 8;
private final ThreadLocal<String> mLocalTag = new ThreadLocal<>();
private final Config mConfig;
LogPrinter(Context context) {
this(new LogConfig.Builder(context).build());
}
LogPrinter(Config config) {
this.mConfig = Utils.checkNotNull(config);
}
@Override
public Printer t(String tag) {
if (tag != null) {
mLocalTag.set(tag);
}
return this;
}
@Override
public void v(Object object) {
log(Log.VERBOSE, Utils.toString(object));
}
@Override
public void v(String message, Throwable tr) {
log(Log.VERBOSE, message, tr);
}
@Override
public void v(String message, Object... args) {
log(Log.VERBOSE, message, args);
}
@Override
public void d(Object object) {
log(Log.DEBUG, Utils.toString(object));
}
@Override
public void d(String message, Throwable tr) {
log(Log.DEBUG, message, tr);
}
@Override
public void d(String message, Object... args) {
log(Log.DEBUG, message, args);
}
@Override
public void i(Object object) {
log(Log.INFO, Utils.toString(object));
}
@Override
public void i(String message, Throwable tr) {
log(Log.INFO, message, tr);
}
@Override
public void i(String message, Object... args) {
log(Log.INFO, message, args);
}
@Override
public void w(Object object) {
log(Log.WARN, Utils.toString(object));
}
@Override
public void w(String message, Throwable tr) {
log(Log.WARN, message, tr);
}
@Override
public void w(String message, Object... args) {
log(Log.WARN, message, args);
}
@Override
public void e(Object object) {
log(Log.ERROR, Utils.toString(object));
}
@Override
public void e(String message, Throwable tr) {
log(Log.ERROR, message, tr);
}
@Override
public void e(String message, Object... args) {
log(Log.ERROR, message, Utils.toString(args));
}
@Override
public void a(Object object) {
log(Log.ASSERT, Utils.toString(object));
}
@Override
public void a(String message, Throwable tr) {
log(Log.ASSERT, message, tr);
}
@Override
public void a(String message, Object... args) {
log(Log.ASSERT, message, args);
}
@Override
public void c(String message, Throwable tr) {
log(CRASH, message, tr);
}
@Override
public void j(String json) {
j("", json);
}
@Override
public void j(String message, String json) {
Utils.checkNotNull(message);
if (Utils.isEmpty(json)) {
d(message + "Empty/Null json content");
return;
}
try {
json = json.trim();
if (json.startsWith("{")) {
JSONObject object = new JSONObject(json);
message = Utils.isEmpty(message) ? message : message + '\n';
d(message + object.toString(2));
return;
}
if (json.startsWith("[")) {
JSONArray object = new JSONArray(json);
message = Utils.isEmpty(message) ? message : message + '\n';
d(message + object.toString(2));
return;
}
e(message + "Invalid Json");
} catch (JSONException e) {
e(message, e);
}
}
@Override
public void x(String xml) {
x("", xml);
}
@Override
public void x(String message, String xml) {
Utils.checkNotNull(message);
if (Utils.isEmpty(xml)) {
d(message + "Empty/Null xml content");
return;
}
try {
Source source = new StreamSource(new StringReader(xml));
StreamResult result = new StreamResult(new StringWriter());
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
transformer.transform(source, result);
String msg = result.getWriter().toString().replaceFirst(">", ">\n");
message = Utils.isEmpty(message) ? message : message + '\n';
d(message + msg);
} catch (TransformerException e) {
e(message, e);
}
}
@Override
public void log(int priority, String message, Throwable tr) {
log(priority, message + '\n' + Log.getStackTraceString(tr));
}
@Override
public synchronized void log(int priority, String message, Object... args) {
Utils.checkNotNull(message);
String tag = getTempTag();
String msg = Utils.isEmpty(args) ? message : String.format(message, args);
mConfig.log(priority, tag, msg);
}
private String getTempTag() {
String tag = mLocalTag.get();
if (tag != null) {
mLocalTag.remove();
}
return tag;
}
} |
<reponame>zonesgame/StendhalArcClient<filename>core/src/mindustry/content/Liquids.java
package mindustry.content;
import arc.graphics.Color;
import mindustry.ctype.ContentList;
import mindustry.type.Liquid;
public class Liquids implements ContentList{
public static Liquid water, slag, oil, cryofluid;
@Override
public void load(){
water = new Liquid("water", Color.valueOf("596ab8")){{
heatCapacity = 0.4f;
effect = StatusEffects.wet;
}};
slag = new Liquid("slag", Color.valueOf("ffa166")){{
temperature = 1f;
viscosity = 0.8f;
effect = StatusEffects.melting;
lightColor = Color.valueOf("f0511d").a(0.4f);
}};
oil = new Liquid("oil", Color.valueOf("313131")){{
viscosity = 0.7f;
flammability = 1.2f;
explosiveness = 1.2f;
heatCapacity = 0.7f;
barColor = Color.valueOf("6b675f");
effect = StatusEffects.tarred;
}};
cryofluid = new Liquid("cryofluid", Color.valueOf("6ecdec")){{
heatCapacity = 0.9f;
temperature = 0.25f;
effect = StatusEffects.freezing;
lightColor = Color.valueOf("0097f5").a(0.2f);
}};
}
}
|
#!/bin/bash
#SBATCH -n 4 --gres=gpu:volta:1
# Loading the required module
source /etc/profile
#module load anaconda/2021a #cuda/11.1
source activate nerf_pl
# Run the script
python train_efficient_sm.py --dataset_name efficient_sm --root_dir ../../datasets/variable_cam/statue_200_var_cam_v1_sigma150/ --N_importance 128 --N_samples 64 --num_gpus 0 --img_wh 64 64 --noise_std 0 --num_epochs 300 --optimizer adam --lr 0.00001 --exp_name statue_200_sigma150_64x64_sm2_nimp128_nsamp64_run1 --num_sanity_val_steps 1 --Light_N_importance 128 --grad_on_light --batch_size 4096 --ckpt_path ./eff_sm_updated_light_matrix_NEW_feb27/ckpts/statue_200_sigma150_64x64_sm2_nimp128_nsamp64_run1/epoch\=13.ckpt |
import fs from 'fs';
import path from 'path';
const loadModules = (folderPath, namespace) => {
const modules = {};
const directory = path.resolve(folderPath);
const files = fs.readdirSync(directory);
let ignoreRegex = /(index)(\.js)/;
const dirStats = fs.statSync(folderPath); //throws if the directory doesnt exist or permission is denied
if (!dirStats.isDirectory()) throw new Error(`${folderPath} is not a directory!`);
const walk = (directory) => {
const moduleFiles = [];
const files = fs.readdirSync(directory);
console.log('files', files);
for (let i = 0; i < files.length; i++) {
let file = files[i];
if (file.match(ignoreRegex))
continue;
const stat = fs.statSync(path.join(directory, file));
if (stat.isDirectory()) {
const fileList = walk(path.join(directory, file));
moduleFiles.push(...fileList);
} else {
moduleFiles.push(path.join(directory, file));
}
}
return moduleFiles;
};
let result = walk(directory);
console.log('result', result);
for (let value of result) {
let m = require(value);
console.log('module', m);
if (m[namespace]) {
//store potentially incomplete reference containing the namespace here
modules[path.basename(value, '.js')] = m;
}
}
console.log('modules', modules);
//Now bring the namespaced modules to top after all have been required
//this should resolve circular dependency problems if any
const moduleNames = Object.keys(modules);
for (let moduleName of moduleNames) {
modules[moduleName] = modules[moduleName][namespace];
}
return modules;
};
const sortedFieldObject = (obj) => {
const sortedFieldObject = {};
Object.keys(obj).sort().map((key) => {
sortedFieldObject[key] = obj[key];
});
return sortedFieldObject;
}
const Common = {
loadModules,
sortedFieldObject
};
export {
loadModules,
sortedFieldObject
};
export default Common;
|
<filename>js/pages/charts-chartjs.js
//------------- charts-chartjs.js -------------//
$(document).ready(function() {
//generate random number for charts
randNum = function(){
//return Math.floor(Math.random()*101);
return (Math.floor( Math.random()* (1+40-20) ) ) + 20;
}
//------------- Line chart -------------//
var lineData = {
labels : ["January","February","March","April","May","June","July"],
datasets : [
{
label: "PayPal",
fillColor : "rgba(136,187,200,0.2)",
strokeColor : "rgba(136,187,200,1)",
pointColor : "rgba(136,187,200,1)",
pointStrokeColor : "#fff",
pointHighlightFill : "#fff",
pointHighlightStroke : "rgba(136,187,200,1)",
data : [3+randNum(),5+randNum(),8+randNum(),13+randNum(),17+randNum(),21+randNum(),23+randNum()]
},
{
label: "Credit card",
fillColor : "rgba(223,106,120,0.2)",
strokeColor : "rgba(223,106,120,1)",
pointColor : "rgba(223,106,120,1)",
pointStrokeColor : "#fff",
pointHighlightFill : "#fff",
pointHighlightStroke : "rgba(223,106,120,1)",
data : [randNum()-5,randNum()-2,randNum()-4,randNum()-1,randNum()-3,randNum()-2,randNum()-5]
}
]
}
var ctx = document.getElementById("line-chartjs").getContext("2d");
var myLineChart = new Chart(ctx).Line(lineData, {
responsive: true,
scaleShowGridLines : true,
scaleGridLineColor : "#f3f3f3",
scaleGridLineWidth : 0.2,
bezierCurve : false,
//points
pointDot : false,
datasetStroke : true,
datasetStrokeWidth : 2,
datasetFill : true,
//animations
animation: true,
animationSteps: 60,
animationEasing: "easeOutQuart",
//scale
showScale: true,
scaleFontFamily: "'Open Sans'",
scaleFontSize: 13,
scaleFontStyle: "normal",
scaleFontColor: "#333",
//tooltips
showTooltips: true,
tooltipFillColor: "#344154",
tooltipFontFamily: "'Open Sans'",
tooltipFontSize: 13,
tooltipFontColor: "#fff",
tooltipYPadding: 8,
tooltipXPadding: 10,
tooltipCornerRadius: 2,
tooltipTitleFontFamily: "'Open Sans'",
});
//------------- Line chart with dots -------------//
var lineDotsData = {
labels : ["January","February","March","April","May","June","July"],
datasets : [
{
label: "PayPal",
fillColor : "rgba(136,187,200,0.2)",
strokeColor : "rgba(136,187,200,1)",
pointColor : "rgba(136,187,200,1)",
pointStrokeColor : "#fff",
pointHighlightFill : "#fff",
pointHighlightStroke : "rgba(136,187,200,1)",
data : [3+randNum(),5+randNum(),8+randNum(),13+randNum(),17+randNum(),21+randNum(),23+randNum()]
},
{
label: "Credit card",
fillColor : "rgba(223,106,120,0.2)",
strokeColor : "rgba(223,106,120,1)",
pointColor : "rgba(223,106,120,1)",
pointStrokeColor : "#fff",
pointHighlightFill : "#fff",
pointHighlightStroke : "rgba(223,106,120,1)",
data : [randNum()-5,randNum()-2,randNum()-4,randNum()-1,randNum()-3,randNum()-2,randNum()-5]
}
]
}
var ctxDots = document.getElementById("line-dots-chartjs").getContext("2d");
var myLineDotsChart = new Chart(ctxDots).Line(lineDotsData, {
responsive: true,
scaleShowGridLines : true,
scaleGridLineColor : "#f3f3f3",
scaleGridLineWidth : 0.2,
bezierCurve : false,
bezierCurveTension : 0.4,
//points
pointDot : true,
pointDotRadius : 4,
pointDotStrokeWidth : 1,
pointHitDetectionRadius : 20,
datasetStroke : true,
datasetStrokeWidth : 2,
datasetFill : true,
//animations
animation: true,
animationSteps: 60,
animationEasing: "easeOutQuart",
//scale
showScale: true,
scaleFontFamily: "'Open Sans'",
scaleFontSize: 13,
scaleFontStyle: "normal",
scaleFontColor: "#333",
//tooltips
showTooltips: true,
tooltipFillColor: "#344154",
tooltipFontFamily: "'Open Sans'",
tooltipFontSize: 13,
tooltipFontColor: "#fff",
tooltipYPadding: 8,
tooltipXPadding: 10,
tooltipCornerRadius: 2,
tooltipTitleFontFamily: "'Open Sans'",
});
//------------- Line chart unfilled -------------//
var lineData1 = {
labels : ["January","February","March","April","May","June","July"],
datasets : [
{
label: "PayPal",
fillColor : "rgba(136,187,200,0.2)",
strokeColor : "rgba(136,187,200,1)",
pointColor : "rgba(136,187,200,1)",
pointStrokeColor : "#fff",
pointHighlightFill : "#a1a1a1",
pointHighlightStroke : "#fff",
data : [3+randNum(),5+randNum(),8+randNum(),13+randNum(),17+randNum(),21+randNum(),23+randNum()]
},
{
label: "Credit card",
fillColor : "rgba(223,106,120,0.2)",
strokeColor : "rgba(223,106,120,1)",
pointColor : "rgba(223,106,120,1)",
pointStrokeColor : "#fff",
pointHighlightFill : "#fff",
pointHighlightStroke : "rgba(223,106,120,1)",
data : [randNum()-5,randNum()-2,randNum()-4,randNum()-1,randNum()-3,randNum()-2,randNum()-5]
}
]
}
var ctx1 = document.getElementById("line-unfilled-chartjs").getContext("2d");
var myLineChart1 = new Chart(ctx1).Line(lineData1, {
responsive: true,
scaleShowGridLines : true,
scaleGridLineColor : "#f3f3f3",
scaleGridLineWidth : 0.2,
bezierCurve : true,
//points
pointDot : false,
datasetFill : false,
//animations
animation: true,
animationSteps: 60,
animationEasing: "easeOutQuart",
//scale
showScale: true,
scaleFontFamily: "'Open Sans'",
scaleFontSize: 13,
scaleFontStyle: "normal",
scaleFontColor: "#333",
//tooltips
showTooltips: true,
tooltipFillColor: "#344154",
tooltipFontFamily: "'Open Sans'",
tooltipFontSize: 13,
tooltipFontColor: "#fff",
tooltipYPadding: 8,
tooltipXPadding: 10,
tooltipCornerRadius: 2,
tooltipTitleFontFamily: "'Open Sans'",
});
//------------- Bar chart -------------//
var barChartData = {
labels : ["January","February","March","April","May","June","July"],
datasets : [
{
fillColor : "rgba(136,187,200,0.5)",
strokeColor : "rgba(136,187,200,0.3)",
highlightFill: "rgba(136,187,200,0.75)",
highlightStroke: "rgba(136,187,200,1)",
data : [3+randNum(),5+randNum(),8+randNum(),13+randNum(),17+randNum(),21+randNum(),23+randNum()]
},
{
fillColor : "rgba(223,106,120,0.5)",
strokeColor : "rgba(223,106,120,0.3)",
highlightFill : "rgba(223,106,120,0.75)",
highlightStroke : "rgba(223,106,120,1)",
data : [randNum()-5,randNum()-2,randNum()-4,randNum()-1,randNum()-3,randNum()-2,randNum()-5]
}
]
}
var ctxBar = document.getElementById("bar-chartjs").getContext("2d");
myBar = new Chart(ctxBar).Bar(barChartData, {
responsive : true,
scaleShowGridLines : true,
scaleGridLineColor : "#f3f3f3",
scaleGridLineWidth : 0.2,
//bar options
barShowStroke : true,
barStrokeWidth : 2,
barValueSpacing : 5,
barDatasetSpacing : 2,
//animations
animation: true,
animationSteps: 60,
animationEasing: "easeOutQuart",
//scale
showScale: true,
scaleFontFamily: "'Open Sans'",
scaleFontSize: 13,
scaleFontStyle: "normal",
scaleFontColor: "#333",
scaleBeginAtZero : true,
//tooltips
showTooltips: true,
tooltipFillColor: "#344154",
tooltipFontFamily: "'Open Sans'",
tooltipFontSize: 13,
tooltipFontColor: "#fff",
tooltipYPadding: 8,
tooltipXPadding: 10,
tooltipCornerRadius: 2,
tooltipTitleFontFamily: "'Open Sans'",
});
//------------- Pie chart -------------//
var pieData = [
{
value: 300,
color:"#88bbc8",
highlight: "#db5565",
label: "SEO"
},
{
value: 50,
color: "#ed7a53",
highlight: "#0bacd3",
label: "Coding"
},
{
value: 100,
color: "#9FC569",
highlight: "#51bf87",
label: "Hosting"
},
{
value: 40,
color: "#bbdce3",
highlight: "#f4ad49",
label: "Design"
},
{
value: 120,
color: "#9a3b1b",
highlight: "#262d37",
label: "Other"
}
];
var ctxPie = document.getElementById("pie-chartjs").getContext("2d");
myPie = new Chart(ctxPie).Pie(pieData, {
responsive : true,
//pie options
segmentShowStroke : true,
segmentStrokeColor : "#fff",
segmentStrokeWidth : 2,
percentageInnerCutout : 0, // This is 0 for Pie charts
//animations
animation: true,
animationSteps: 100,
animationEasing: "easeOutBounce",
animateRotate : true,
animateScale : false,
//tooltips
showTooltips: true,
tooltipFillColor: "#344154",
tooltipFontFamily: "'Open Sans'",
tooltipFontSize: 13,
tooltipFontColor: "#fff",
tooltipYPadding: 8,
tooltipXPadding: 10,
tooltipCornerRadius: 2,
tooltipTitleFontFamily: "'Open Sans'",
});
//------------- Donut chart -------------//
var donutData = [
{
value: 300,
color:"#88bbc8",
highlight: "#db5565",
label: "SEO"
},
{
value: 50,
color: "#ed7a53",
highlight: "#0bacd3",
label: "Coding"
},
{
value: 100,
color: "#9FC569",
highlight: "#51bf87",
label: "Hosting"
},
{
value: 40,
color: "#bbdce3",
highlight: "#f4ad49",
label: "Design"
},
{
value: 120,
color: "#9a3b1b",
highlight: "#262d37",
label: "Other"
}
];
var ctxDonut = document.getElementById("donut-chartjs").getContext("2d");
myDonut = new Chart(ctxDonut).Doughnut(donutData, {
responsive : true,
//donut options
segmentShowStroke : true,
segmentStrokeColor : "#fff",
segmentStrokeWidth : 2,
percentageInnerCutout : 45, // This is 0 for Pie charts
//animations
animation: true,
animationSteps: 100,
animationEasing: "easeOutBounce",
animateRotate : true,
animateScale : true,
//tooltips
showTooltips: true,
tooltipFillColor: "#344154",
tooltipFontFamily: "'Open Sans'",
tooltipFontSize: 13,
tooltipFontColor: "#fff",
tooltipYPadding: 8,
tooltipXPadding: 10,
tooltipCornerRadius: 2,
tooltipTitleFontFamily: "'Open Sans'",
});
//------------- Radar chart -------------//
var radarChartData = {
labels: ["Eating", "Drinking", "Sleeping", "Designing", "Coding", "Cycling", "Running"],
datasets: [
{
label: "My First dataset",
fillColor: "rgba(136,187,200,0.2)",
strokeColor: "rgba(136,187,200,1)",
pointColor: "rgba(136,187,200,1)",
pointStrokeColor: "#fff",
pointHighlightFill: "#fff",
pointHighlightStroke: "rgba(136,187,200,1)",
data: [65,59,90,81,56,55,40]
},
{
label: "My Second dataset",
fillColor: "rgba(223,106,120,0.2)",
strokeColor: "rgba(223,106,120,1)",
pointColor: "rgba(223,106,120,1)",
pointStrokeColor: "#fff",
pointHighlightFill: "#fff",
pointHighlightStroke: "rgba(223,106,120,1)",
data: [28,48,40,19,96,27,100]
}
]
};
myRadar = new Chart(document.getElementById("radar-chartjs").getContext("2d")).Radar(radarChartData, {
responsive: true,
//radar options
scaleShowLine : true,
angleShowLineOut : true,
scaleShowLabels : false,
angleLineColor : "rgba(0,0,0,.0.5)",
angleLineWidth : 1,
pointDotRadius : 3,
pointDotStrokeWidth : 1,
//points
pointLabelFontFamily : "'Open Sans'",
pointDot : true,
//animations
animation: true,
animationSteps: 100,
animationEasing: "easeOutBounce",
animateRotate : true,
animateScale : true,
//tooltips
showTooltips: true,
tooltipFillColor: "#344154",
tooltipFontFamily: "'Open Sans'",
tooltipFontSize: 13,
tooltipFontColor: "#fff",
tooltipYPadding: 8,
tooltipXPadding: 10,
tooltipCornerRadius: 2,
tooltipTitleFontFamily: "'Open Sans'",
});
//------------- Polar area -------------//
var polarData = [
{
value: 300,
color:"#88bbc8",
highlight: "#db5565",
label: "SEO"
},
{
value: 50,
color: "#ed7a53",
highlight: "#0bacd3",
label: "Coding"
},
{
value: 100,
color: "#9FC569",
highlight: "#51bf87",
label: "Hosting"
},
{
value: 40,
color: "#bbdce3",
highlight: "#f4ad49",
label: "Design"
},
{
value: 120,
color: "#9a3b1b",
highlight: "#262d37",
label: "Other"
}
];
var ctxPolar = document.getElementById("polar-chartjs").getContext("2d");
myPolarArea = new Chart(ctxPolar).PolarArea(polarData, {
responsive:true,
//animations
animation: true,
animationSteps: 100,
animationEasing: "easeOutBounce",
animateRotate : true,
animateScale : false,
//tooltips
showTooltips: true,
tooltipFillColor: "#344154",
tooltipFontFamily: "'Open Sans'",
tooltipFontSize: 13,
tooltipFontColor: "#fff",
tooltipYPadding: 8,
tooltipXPadding: 10,
tooltipCornerRadius: 2,
tooltipTitleFontFamily: "'Open Sans'",
});
});
//sparkline in sidebar area
var positive = [1,5,3,7,8,6,10];
var negative = [10,6,8,7,3,5,1]
var negative1 = [7,6,8,7,6,5,4]
$('#stat1').sparkline(positive,{
height:15,
spotRadius: 0,
barColor: '#9FC569',
type: 'bar'
});
$('#stat2').sparkline(negative,{
height:15,
spotRadius: 0,
barColor: '#ED7A53',
type: 'bar'
});
$('#stat3').sparkline(negative1,{
height:15,
spotRadius: 0,
barColor: '#ED7A53',
type: 'bar'
});
$('#stat4').sparkline(positive,{
height:15,
spotRadius: 0,
barColor: '#9FC569',
type: 'bar'
});
//sparkline in widget
$('#stat5').sparkline(positive,{
height:15,
spotRadius: 0,
barColor: '#9FC569',
type: 'bar'
});
$('#stat6').sparkline(positive, {
width: 70,//Width of the chart - Defaults to 'auto' - May be any valid css width - 1.5em, 20px, etc (using a number without a unit specifier won't do what you want) - This option does nothing for bar and tristate chars (see barWidth)
height: 20,//Height of the chart - Defaults to 'auto' (line height of the containing tag)
lineColor: '#88bbc8',//Used by line and discrete charts to specify the colour of the line drawn as a CSS values string
fillColor: '#f2f7f9',//Specify the colour used to fill the area under the graph as a CSS value. Set to false to disable fill
spotColor: '#e72828',//The CSS colour of the final value marker. Set to false or an empty string to hide it
maxSpotColor: '#005e20',//The CSS colour of the marker displayed for the maximum value. Set to false or an empty string to hide it
minSpotColor: '#f7941d',//The CSS colour of the marker displayed for the mimum value. Set to false or an empty string to hide it
spotRadius: 3,//Radius of all spot markers, In pixels (default: 1.5) - Integer
lineWidth: 2//In pixels (default: 1) - Integer
}); |
import sqlite3
def generate_html_table(conn, table) -> str:
output = "<table><tr>"
# Retrieve column names of the specified table
c = conn.cursor()
for tn in c.execute(f"PRAGMA table_info('{table}')"):
output += f"<th>{tn[1]}</th>"
output += "</tr>"
# Retrieve all rows from the specified table
for tb in c.execute(f"SELECT * FROM {table}"):
output += f"<tr>"
for i in tb:
output += f"<td>{i}</td>"
output += f"</tr>"
output += "</table>"
return output
# Sample usage
conn = sqlite3.connect('sample.db')
table_name = 'employees'
html_table = generate_html_table(conn, table_name)
print(html_table) |
<reponame>isaec/gune
//I want to make this in wasm at some point, its gonna be pricy to run I imagine
//can't know till i write it and perf test it =P
const FArray = require("../shared/array").FArray
/*
http://www.roguebasin.com/index.php?title=The_Incredible_Power_of_Dijkstra_Maps
To get a Dijkstra map, you start with an integer array representing your map,
with some set of goal cells set to zero and all the rest set to a very high
number. Iterate through the map's "floor" cells -- skip the impassable wall
cells. If any floor tile has a value greater than 1 regarding to its lowest-
value floor neighbour (in a cardinal direction - i.e. up, down, left or right;
a cell next to the one we are checking), set it to be exactly 1 greater than
its lowest value neighbor. Repeat until no changes are made. The resulting grid
of numbers represents the number of steps that it will take to get from any
given tile to the nearest goal.
*/
function* neighbor(x, y) {
yield new Cord(x, y + -1)
yield new Cord(x, y + 1)
yield new Cord(x + 1, y)
yield new Cord(x + -1, y)
yield new Cord(x + -1, y + -1)
yield new Cord(x + -1, y + 1)
yield new Cord(x + 1, y + -1)
yield new Cord(x + 1, y + 1)
}
function* emptyNeighbor(x, y, occCallback) {
for (const cord of neighbor(x, y)) {
if (!occCallback(cord.x, cord.y)) yield cord
}
}
//shuffle code from stack overflow =P
function inPlaceShuffle(array) {
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
}
function* fluidOffsets(x, y, occCallback) {
let innerCords = [
new Cord(x, y + -1),
new Cord(x, y + 1),
new Cord(x + 1, y),
new Cord(x + -1, y),
new Cord(x + -1, y + -1),
new Cord(x + -1, y + 1),
new Cord(x + 1, y + -1),
new Cord(x + 1, y + 1),
]
inPlaceShuffle(innerCords)
if (!occCallback(x, y)) yield new Cord(x, y)
for (let cord of innerCords) if (!occCallback(cord.x, cord.y)) yield cord
let outerCords = [
new Cord(x + -2, y + -2),
new Cord(x + -2, y + -1),
new Cord(x + -2, y),
new Cord(x + -2, y + 1),
new Cord(x + -2, y + 2),
new Cord(x + -1, y + -2),
new Cord(x + -1, y + 2),
new Cord(x, y + -2),
new Cord(x, y + 2),
new Cord(x + 1, y + -2),
new Cord(x + 1, y + 2),
new Cord(x + 2, y + -2),
new Cord(x + 2, y + -1),
new Cord(x + 2, y),
new Cord(x + 2, y + 1),
new Cord(x + 2, y + 2),
]
inPlaceShuffle(outerCords)
for (let cord of outerCords) if (!occCallback(cord.x, cord.y)) yield cord
}
function distance(cord1, cord2) {
return Math.sqrt((cord1.x - cord2.x) * (cord1.x - cord2.x) + (cord1.y - cord2.y) * (cord1.y - cord2.y))
}
/**
* returns a map of distances from goals
* goalArray expects objects with x and y properites
*/
class Dij {
constructor(width, mapCallback, goalArray, maxDistance = 20) {
this.distance = new FArray(width)
this.mapCallback = mapCallback
this.goalArray = goalArray
this.maxDistance = maxDistance
this.calc()
}
calc() {
this.distance.clean()
let frontier = []
for (const goal of this.goalArray) {
frontier.push(new Cord(goal.x, goal.y))
this.distance.set(goal.x, goal.y, 0)
}
while (frontier.length > 0) {
let newFrontier = []
for (let i = 0; i < frontier.length; i++) {
let curr = frontier[i]
for (let cord of neighbor(curr.x, curr.y)) {
if (this.mapCallback(cord.x, cord.y) == 2) continue
if (this.distance.get(cord.x, cord.y) == undefined) {
newFrontier.push(cord)
if (this.distance.set(cord.x, cord.y, this.distance.get(curr.x, curr.y) + 1) > this.maxDistance) return
}
}
}
frontier = newFrontier
}
}
}
const rollDown = (dij, fromCord, occCallback) => {
let lowest = fromCord
let lowestVal = dij.get(fromCord.x, fromCord.y)
for (let cord of emptyNeighbor(fromCord.x, fromCord.y, occCallback)) {
const val = dij.get(cord.x, cord.y)
if (val < lowestVal) {
lowest = cord
lowestVal = val
}
}
return (lowest !== fromCord) ? new Cord(lowest.x - fromCord.x, lowest.y - fromCord.y) : undefined
}
class Cord {
constructor(x, y) {
this.x = x
this.y = y
}
} //didnt know i could do this
module.exports = {
Cord,
Dij,
rollDown,
emptyNeighbor,
fluidOffsets,
distance
} |
# This file is copied to spec/ when you run 'rails generate rspec:install'
require_relative "../config/application"
Rails.application.load_tasks
require "spec_helper"
ENV["RAILS_ENV"] ||= "test"
ENV["DATABASE_URL"] = ENV["DATABASE_TEST_URL"] ||
ENV["DATABASE_URL"].gsub("hyrax?pool", "hyrax-test?pool")
ENV["SOLR_URL"] = ENV["SOLR_TEST_URL"] if ENV["SOLR_TEST_URL"]
require File.expand_path("../config/environment", __dir__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require "rspec/rails"
# Add additional requires below this line. Rails is not loaded until this point!
Dir[Rails.root.join("spec", "support", "**", "*.rb")].sort.each { |f| require f }
ActiveJob::Base.queue_adapter = :test
begin
db_config = ActiveRecord::Base.configurations[ENV["RAILS_ENV"]]
ActiveRecord::Tasks::DatabaseTasks.create(db_config)
ActiveRecord::Migrator.migrations_paths = [Pathname.new(ENV["RAILS_ROOT"]).join("db", "migrate").to_s]
ActiveRecord::Tasks::DatabaseTasks.migrate
ActiveRecord::Base.descendants.each(&:reset_column_information)
rescue ActiveRecord::PendingMigrationError => e
puts e.to_s.strip
exit 1
end
# register a test adapter for unit tests
Valkyrie::MetadataAdapter
.register(Valkyrie::Persistence::Memory::MetadataAdapter.new,
:test_adapter)
query_registration_target =
Valkyrie::MetadataAdapter.find(:test_adapter).query_service.custom_queries
[Hyrax::CustomQueries::Navigators::CollectionMembers,
Hyrax::CustomQueries::Navigators::ChildFilesetsNavigator,
Hyrax::CustomQueries::Navigators::ChildWorksNavigator,
Hyrax::CustomQueries::FindAccessControl,
Hyrax::CustomQueries::FindCollectionsByType,
Hyrax::CustomQueries::FindManyByAlternateIds,
Hyrax::CustomQueries::FindIdsByModel,
Hyrax::CustomQueries::FindFileMetadata,
Hyrax::CustomQueries::Navigators::FindFiles].each do |handler|
query_registration_target.register_query_handler(handler)
end
# register/use the memory storage adapter for tests
Valkyrie::StorageAdapter
.register(Valkyrie::Storage::Memory.new,
:memory)
RSpec.configure do |config|
config.fixture_path = "#{::Rails.root}/spec/fixtures"
config.use_transactional_fixtures = true
config.infer_spec_type_from_file_location!
config.filter_rails_from_backtrace!
DatabaseCleaner.allow_remote_database_url = true
config.before :suite do
DatabaseCleaner.strategy = :transaction
DatabaseCleaner.clean_with(:truncation)
end
config.before do
Hyrax.index_adapter.wipe!
end
config.around(:example) do |example|
Valkyrie.config.metadata_adapter = :test_adapter
Valkyrie.config.storage_adapter = :memory
example.run
Valkyrie.config.metadata_adapter = :comet_metadata_store
Valkyrie.config.storage_adapter = :repository_s3
end
config.around(:example, :metadata_adapter) do |example|
Valkyrie.config.metadata_adapter = example.metadata[:metadata_adapter]
example.run
Valkyrie.config.metadata_adapter = :comet_metadata_store
end
config.around(:example, :storage_adapter) do |example|
Valkyrie.config.storage_adapter = example.metadata[:storage_adapter]
example.run
Valkyrie.config.storage_adapter = :repository_s3
end
config.include Capybara::RSpecMatchers, type: :input
config.include Devise::Test::ControllerHelpers, type: :controller
config.include Devise::Test::IntegrationHelpers, type: :system
config.before(:each, type: :system) do
Capybara.app_host = "http://#{Capybara.server_host}:#{Capybara.server_port}"
if ENV["SKIP_SELENIUM"].present?
driven_by(:rack_test)
else
driven_by(:selenium_standalone_chrome_headless_sandboxless)
end
end
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
config.after(:each, type: :system) do
Capybara.reset_sessions!
page.driver.reset!
end
config.after do
DatabaseCleaner.clean
end
end
|
<reponame>team-mayes/hartree<gh_stars>0
package org.cmayes.hartree.disp.txt;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.FileReader;
import java.io.StringWriter;
import java.io.Writer;
import java.util.List;
import java.util.stream.Collectors;
import org.cmayes.hartree.model.LowestEnergyMapper;
import org.junit.Test;
import com.cmayes.common.model.Atom;
import com.cmayes.common.model.impl.DefaultAtom;
import com.cmayes.common.util.EnvUtils;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* Tests for {@link LowestEnergyTemplateDisplay}.
*
* @author cmayes
*/
public class TestLowestEnergyTemplateDisplay {
/** The prefix for file locations. */
private static final String FILE_DIR_PFX = "src/test/resources/files/";
private ObjectMapper mapper = new ObjectMapper();
/**
* Checks that the display matches a previously-generated example.
*
* @throws Exception
* When there's a problem.
*/
@Test
public void testSimple() throws Exception {
final List<DefaultAtom> results = mapper.readValue(new File(FILE_DIR_PFX,
"json/aglc_b14_157.json"),
new TypeReference<List<DefaultAtom>>() {
});
final LowestEnergyMapper lowMap = new LowestEnergyMapper();
lowMap.add(1, results.stream().map(Atom.class::cast).collect(Collectors.toList()));
final Writer stringWriter = new StringWriter();
final LowestEnergyTemplateDisplay lowDisp = new LowestEnergyTemplateDisplay();
lowDisp.setTplName(FILE_DIR_PFX + "tpl/"
+ LowestEnergyTemplateDisplay.DEF_LOWTPL);
lowDisp.write(stringWriter, lowMap);
assertEquals(EnvUtils.getStringFromReader(new FileReader(new File(
FILE_DIR_PFX, "txt/aglc_b14_157.txt"))),
stringWriter.toString());
}
}
|
<filename>frontend/src/index.tsx
import React from 'react';
import ReactDOM from 'react-dom';
import { BrowserRouter } from 'react-router-dom';
import reportWebVitals from './reportWebVitals';
import { ApolloProvider } from '@apollo/client';
import 'antd/dist/antd.css';
import './index.css';
import "./locales";
import { defaultPage, menus, routes } from './routes';
// import { Router } from './shared/Router';
import { AppLayout } from './shared/AppLayout';
import { graphqlClient } from './graphql';
ReactDOM.render(
<React.StrictMode>
<ApolloProvider client={graphqlClient}>
<BrowserRouter>
<AppLayout defaultRoute={defaultPage} routerRoot="/" routes={routes} menu={menus} />
</BrowserRouter>
</ApolloProvider>
</React.StrictMode>,
document.getElementById('root')
)
// If you want to start measuring performance in your app, pass a function
// to log results (for example: reportWebVitals(console.log))
// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
reportWebVitals();
|
<filename>backend/grad-trax-test-suite/src/main/java/ca/bc/gov/educ/gtts/model/dto/grad/algorithm/StudentExams.java
package ca.bc.gov.educ.gtts.model.dto.grad.algorithm;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.stereotype.Component;
import java.util.List;
@Component
@Data
@AllArgsConstructor
@NoArgsConstructor
public class StudentExams {
private List<StudentExam> studentExamList;
}
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as vscode from 'vscode';
import * as path from 'path';
import { MarkdownEngine } from './markdownEngine';
import * as nls from 'vscode-nls';
import { Logger } from "./logger";
const localize = nls.loadMessageBundle();
export interface ContentSecurityPolicyArbiter {
isEnhancedSecurityDisableForWorkspace(rootPath: string): boolean;
addTrustedWorkspace(rootPath: string): Thenable<void>;
removeTrustedWorkspace(rootPath: string): Thenable<void>;
}
const previewStrings = {
cspAlertMessageText: localize('preview.securityMessage.text', 'Scripts have been disabled in this document'),
cspAlertMessageTitle: localize('preview.securityMessage.title', 'Scripts are disabled in the markdown preview. Change the Markdown preview secuirty setting to enable scripts'),
cspAlertMessageLabel: localize('preview.securityMessage.label', 'Scripts Disabled Security Warning')
};
export function isMarkdownFile(document: vscode.TextDocument) {
return document.languageId === 'markdown'
&& document.uri.scheme !== 'markdown'; // prevent processing of own documents
}
export function getMarkdownUri(uri: vscode.Uri) {
if (uri.scheme === 'markdown') {
return uri;
}
return uri.with({
scheme: 'markdown',
path: uri.path + '.rendered',
query: uri.toString()
});
}
class MarkdownPreviewConfig {
public static getCurrentConfig() {
return new MarkdownPreviewConfig();
}
public readonly scrollBeyondLastLine: boolean;
public readonly wordWrap: boolean;
public readonly previewFrontMatter: string;
public readonly doubleClickToSwitchToEditor: boolean;
public readonly scrollEditorWithPreview: boolean;
public readonly scrollPreviewWithEditorSelection: boolean;
public readonly markEditorSelection: boolean;
public readonly lineHeight: number;
public readonly fontSize: number;
public readonly fontFamily: string | undefined;
public readonly styles: string[];
private constructor() {
const editorConfig = vscode.workspace.getConfiguration('editor');
const markdownConfig = vscode.workspace.getConfiguration('markdown');
this.scrollBeyondLastLine = editorConfig.get<boolean>('scrollBeyondLastLine', false);
this.wordWrap = editorConfig.get<string>('wordWrap', 'off') !== 'off';
this.previewFrontMatter = markdownConfig.get<string>('previewFrontMatter', 'hide');
this.scrollPreviewWithEditorSelection = !!markdownConfig.get<boolean>('preview.scrollPreviewWithEditorSelection', true);
this.scrollEditorWithPreview = !!markdownConfig.get<boolean>('preview.scrollEditorWithPreview', true);
this.doubleClickToSwitchToEditor = !!markdownConfig.get<boolean>('preview.doubleClickToSwitchToEditor', true);
this.markEditorSelection = !!markdownConfig.get<boolean>('preview.markEditorSelection', true);
this.fontFamily = markdownConfig.get<string | undefined>('preview.fontFamily', undefined);
this.fontSize = +markdownConfig.get<number>('preview.fontSize', NaN);
this.lineHeight = +markdownConfig.get<number>('preview.lineHeight', NaN);
this.styles = markdownConfig.get<string[]>('styles', []);
}
public isEqualTo(otherConfig: MarkdownPreviewConfig) {
for (let key in this) {
if (this.hasOwnProperty(key) && key !== 'styles') {
if (this[key] !== otherConfig[key]) {
return false;
}
}
}
// Check styles
if (this.styles.length !== otherConfig.styles.length) {
return false;
}
for (let i = 0; i < this.styles.length; ++i) {
if (this.styles[i] !== otherConfig.styles[i]) {
return false;
}
}
return true;
}
[key: string]: any;
}
export class MDDocumentContentProvider implements vscode.TextDocumentContentProvider {
private _onDidChange = new vscode.EventEmitter<vscode.Uri>();
private _waiting: boolean = false;
private config: MarkdownPreviewConfig;
private extraStyles: Array<vscode.Uri> = [];
private extraScripts: Array<vscode.Uri> = [];
constructor(
private engine: MarkdownEngine,
private context: vscode.ExtensionContext,
private cspArbiter: ContentSecurityPolicyArbiter,
private logger: Logger
) {
this.config = MarkdownPreviewConfig.getCurrentConfig();
}
public addScript(resource: vscode.Uri): void {
this.extraScripts.push(resource);
}
public addStyle(resource: vscode.Uri): void {
this.extraStyles.push(resource);
}
private getMediaPath(mediaFile: string): string {
return vscode.Uri.file(this.context.asAbsolutePath(path.join('media', mediaFile))).toString();
}
private fixHref(resource: vscode.Uri, href: string): string {
if (!href) {
return href;
}
// Use href if it is already an URL
const hrefUri = vscode.Uri.parse(href);
if (['file', 'http', 'https'].indexOf(hrefUri.scheme) >= 0) {
return hrefUri.toString();
}
// Use href as file URI if it is absolute
if (path.isAbsolute(href)) {
return vscode.Uri.file(href).toString();
}
// use a workspace relative path if there is a workspace
let rootPath = vscode.workspace.rootPath;
if (rootPath) {
return vscode.Uri.file(path.join(rootPath, href)).toString();
}
// otherwise look relative to the markdown file
return vscode.Uri.file(path.join(path.dirname(resource.fsPath), href)).toString();
}
private computeCustomStyleSheetIncludes(uri: vscode.Uri): string {
if (this.config.styles && Array.isArray(this.config.styles)) {
return this.config.styles.map((style) => {
return `<link rel="stylesheet" class="code-user-style" data-source="${style.replace(/"/g, '"')}" href="${this.fixHref(uri, style)}" type="text/css" media="screen">`;
}).join('\n');
}
return '';
}
private getSettingsOverrideStyles(nonce: string): string {
return `<style nonce="${nonce}">
body {
${this.config.fontFamily ? `font-family: ${this.config.fontFamily};` : ''}
${this.config.fontSize > 0 ? `font-size: ${this.config.fontSize}px;` : ''}
${this.config.lineHeight > 0 ? `line-height: ${this.config.lineHeight};` : ''}
}
</style>`;
}
private getStyles(uri: vscode.Uri, nonce: string): string {
const baseStyles = [
this.getMediaPath('markdown.css'),
this.getMediaPath('tomorrow.css')
].concat(this.extraStyles.map(resource => resource.toString()));
return `${baseStyles.map(href => `<link rel="stylesheet" type="text/css" href="${href}">`).join('\n')}
${this.getSettingsOverrideStyles(nonce)}
${this.computeCustomStyleSheetIncludes(uri)}`;
}
private getScripts(nonce: string): string {
const scripts = [this.getMediaPath('main.js')].concat(this.extraScripts.map(resource => resource.toString()));
return scripts
.map(source => `<script async src="${source}" nonce="${nonce}"></script>`)
.join('\n');
}
public provideTextDocumentContent(uri: vscode.Uri): Thenable<string> {
const sourceUri = vscode.Uri.parse(uri.query);
let initialLine: number | undefined = undefined;
const editor = vscode.window.activeTextEditor;
if (editor && editor.document.uri.fsPath === sourceUri.fsPath) {
initialLine = editor.selection.active.line;
}
return vscode.workspace.openTextDocument(sourceUri).then(document => {
this.config = MarkdownPreviewConfig.getCurrentConfig();
const initialData = {
previewUri: uri.toString(),
source: sourceUri.toString(),
line: initialLine,
scrollPreviewWithEditorSelection: this.config.scrollPreviewWithEditorSelection,
scrollEditorWithPreview: this.config.scrollEditorWithPreview,
doubleClickToSwitchToEditor: this.config.doubleClickToSwitchToEditor
};
this.logger.log('provideTextDocumentContent', initialData);
// Content Security Policy
const nonce = new Date().getTime() + '' + new Date().getMilliseconds();
let csp = `<meta http-equiv="Content-Security-Policy" content="default-src 'self'; img-src 'self' http: https: data:; media-src 'self' http: https: data:; child-src 'none'; script-src 'nonce-${nonce}'; style-src 'self' 'unsafe-inline' http: https: data:; font-src 'self' http: https: data:;">`;
if (this.cspArbiter.isEnhancedSecurityDisableForWorkspace(vscode.workspace.rootPath || sourceUri.toString())) {
csp = '';
}
const body = this.engine.render(sourceUri, this.config.previewFrontMatter === 'hide', document.getText());
return `<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-type" content="text/html;charset=UTF-8">
${csp}
<meta id="vscode-markdown-preview-data" data-settings="${JSON.stringify(initialData).replace(/"/g, '"')}" data-strings="${JSON.stringify(previewStrings).replace(/"/g, '"')}">
<script src="${this.getMediaPath('csp.js')}" nonce="${nonce}"></script>
<script src="${this.getMediaPath('loading.js')}" nonce="${nonce}"></script>
${this.getStyles(uri, nonce)}
<base href="${document.uri.toString(true)}">
</head>
<body class="vscode-body ${this.config.scrollBeyondLastLine ? 'scrollBeyondLastLine' : ''} ${this.config.wordWrap ? 'wordWrap' : ''} ${this.config.markEditorSelection ? 'showEditorSelection' : ''}">
${body}
<div class="code-line" data-line="${document.lineCount}"></div>
${this.getScripts(nonce)}
</body>
</html>`;
});
}
public updateConfiguration() {
const newConfig = MarkdownPreviewConfig.getCurrentConfig();
if (!this.config.isEqualTo(newConfig)) {
this.config = newConfig;
// update all generated md documents
vscode.workspace.textDocuments.forEach(document => {
if (document.uri.scheme === 'markdown') {
this.update(document.uri);
}
});
}
}
get onDidChange(): vscode.Event<vscode.Uri> {
return this._onDidChange.event;
}
public update(uri: vscode.Uri) {
if (!this._waiting) {
this._waiting = true;
setTimeout(() => {
this._waiting = false;
this._onDidChange.fire(uri);
}, 300);
}
}
}
|
<gh_stars>1-10
package com.testvagrant.ekam.testBases.cucumber;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.testvagrant.ekam.config.models.EkamConfig;
import com.testvagrant.ekam.internal.executiontimeline.models.EkamTest;
import com.testvagrant.ekam.internal.modules.EkamConfigModule;
import io.cucumber.java.Scenario;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.MDC;
import org.testng.ITestResult;
import static com.testvagrant.ekam.logger.EkamLogger.ekamLogger;
public abstract class ScenarioDefinition {
protected EkamConfig ekamConfig;
public ScenarioDefinition() {
Injector injector = Guice.createInjector(new EkamConfigModule());
ekamConfig = injector.getInstance(EkamConfig.class);
}
protected EkamTest buildEkamTest(Scenario scenario) {
return EkamTest.builder()
.scenario(scenario.getName())
.feature(StringUtils.capitalize(scenario.getName().replaceAll("\\s", "")))
.build();
}
protected void initLogger(Scenario scenario) {
MDC.put("logFileName", scenario.getId());
ekamLogger().info("Running test {}", scenario.getName());
}
}
|
package gamesite.servlet;
import java.io.*;
import java.net.*;
import java.sql.*;
import java.text.*;
import java.util.*;
import javax.servlet.*;
import javax.servlet.http.*;
import gamesite.model.ShoppingCart;
import gamesite.model.ShoppingCartItem;
import gamesite.utils.DBConnection;
import gamesite.utils.SQLQuery;
public class CustomerInformationServlet extends HttpServlet
{
public String getServletInfo()
{
return "Servlet verifies customer information is in the creditcards table database";
}
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException
{
System.out.println("CustomerInformation class is active");
response.setContentType("text/html"); // Response mime type
try
{
Connection dbcon = DBConnection.create();
HttpSession session = request.getSession();
String first_name = (String)request.getParameter("first_name");
String last_name = (String)request.getParameter("last_name");
String cc_id = (String)request.getParameter("cc_id");
String expiration = (String)request.getParameter("expiration");
java.sql.Date expDate = java.sql.Date.valueOf(expiration);
String query = "SELECT * FROM creditcards WHERE id='" + cc_id
+ "' and first_name=? and last_name=? and expiration=?";
PreparedStatement statement = dbcon.prepareStatement(query);
statement.setString(1,first_name);
statement.setString(2,last_name);
statement.setDate(3,expDate);
ResultSet result = statement.executeQuery();
//get the number of rows in the set executed by query
result.last();
int rowCount = result.getRow();
System.out.println(query);
System.out.println(rowCount);
if(rowCount == 1)
{
System.out.println(first_name + " " + last_name + " was found in the creditcards table");
//for every item in cart,insert each successful purchased item into sales table.
String customerIdQuery = "SELECT id FROM customers WHERE first_name='" + first_name + "' and last_name='" + last_name + "'";//"' and cc_id='" + cc_id + "';";
ResultSet custIdSet = statement.executeQuery(customerIdQuery);
Integer customerID = null;
if(custIdSet.next())
{
customerID = custIdSet.getInt("id");
}
else
{
System.out.println("Customer's identity could not be verified in the database");
statement.close();
DBConnection.close(dbcon);
session.setAttribute("invalidFlag","Customer's identity: " + first_name + " could not be verified in the database");
response.sendRedirect("/CustomerInformation/confirmationPage.jsp");
return;
}
ShoppingCart cart = (ShoppingCart)session.getAttribute("cart");
//String gameIdQuery = "SELECT id FROM games WHERE id = ?";
String insertQuery = "INSERT INTO sales (customer_id, salesdate, game_id) VALUES( ?, CURDATE(), ?)";
PreparedStatement insertStatement = dbcon.prepareStatement(insertQuery);
//used to verify if the game id is a valid id in the database.
if(cart != null && !cart.isEmpty())
{
//insert every game bought in cart into the sales table.
for(Map.Entry<String,ShoppingCartItem> entry : cart.itemSet())
{
Integer gameID = Integer.valueOf(entry.getKey());
insertStatement.setInt(1, customerID);
insertStatement.setInt(2, gameID);
//insert game into sales table x times where x = quantity bought
for(int i = 0;i < entry.getValue().getQuantity(); ++i)
{
insertStatement.executeUpdate();
}
}
}
else
{
System.out.println("Cart is empty or has not been initialized");
}
insertStatement.close();
session.setAttribute("first_name",first_name);
response.sendRedirect("/CustomerInformation/confirmationPage.jsp");
}
else if(rowCount > 1)
{
System.out.println("There are multiple records in the database with the same information");
session.setAttribute("invalidFlag","There are multiple records in the database with the same information");
response.sendRedirect("/CustomerInformation/confirmationPage.jsp");
}
else//if rowCount == 0
{
try
{
System.out.println("Supplied information not found or does not match in creditcards table");
session.setAttribute("invalidFlag", "Supplied information not found or does not match in creditcards table");
response.sendRedirect("/CustomerInformation/index.jsp");
}
catch (IOException e)
{
e.printStackTrace();
}
}
statement.close();
DBConnection.close(dbcon);
}
catch (SQLException ex) {
while (ex != null) {
System.out.println ("SQL Exception: " + ex.getMessage ());
ex = ex.getNextException ();
} // end while
} // end catch SQLException
catch(java.lang.Exception ex)
{
System.out.println("<HTML>" +
"<HEAD><TITLE>" +
"MovieDB: Error" +
"</TITLE></HEAD>\n<BODY>" +
"<P>SQL error in doGet: " +
ex.getMessage() + "</P></BODY></HTML>");
return;
}
}
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException
{
doGet(request, response);
}
}
|
<table>
<tr>
<th>Item</th>
<th>Price</th>
</tr>
<tr>
<td>Pen</td>
<td>10</td>
</tr>
<tr>
<td>Book</td>
<td>20</td>
</tr>
<tr>
<td>Shirt</td>
<td>30</td>
</tr>
<tr>
<td>Notebook</td>
<td>40</td>
</tr>
</table> |
<gh_stars>0
#!/usr/bin/env node
/*
* Libra status reset test
*/
exports.test = function ( done ) {
var log = console.log
, emptyFn = function () {}
, assert = require( 'assert' )
, util = require( 'util' )
, Libra = require( '../' )
, Syllabus = require( 'syllabus' )
, l = Libra()
, syl = Syllabus()
, ping = syl.commands.ping()
, status = {
subscription : {
on : 0
, channels : 0
, pchannels : 0
}
, transaction : {
on : 0
, active : 0
}
, monitoring : {
on : 0
, active : 0
}
, auth : 0
, select : 0
}
, exit = typeof done === 'function' ? done : function () {}
;
log( '- #push a PING command to the queue.' );
l.push( ping );
log( '- hack Libra status properties.' );
l.status = {
subscription : {
on : 1
, channels : 1
, pchannels : 1
}
, transaction : {
on : 1
, active : 1
}
, monitoring : {
on : 1
, active : 1
}
, auth : 1
, select : 1
};
log( '- Libra#flush.' );
l.flush();
log( '- Libra internal queue size should be 0.' );
assert.equal( l.cqueue.size(), 0 );
log( '- check if all status properites are correctly resetted.' );
assert.deepEqual(l.status, status );
exit();
}; |
<filename>benchmarks/REVE/triangular/Eq/newV.java
package benchmarks.REVE.triangular.Eq;
public class newV {
private int g(int n, int s){
int r;
r = 0;
if (n <= 0) {
r = s;
} else {
r = g(n - 1, n + s);
}
return r;
}
int triangle(int n) {
int r;
r = g(n, 0);
return r;
}
} |
<reponame>tliang1/Java-Practice
package main;
import java.util.Scanner;
/**
* @author <NAME>
*
*/
public class FutureInvestmentValueRevised
{
public static void main(String[] args)
{
Scanner input = new Scanner(System.in);
int YEARS_TO_INVESTMENT = 30;
System.out.print("Enter investment amount: ");
double investmentAmount = input.nextDouble();
while (investmentAmount < 0.0)
{
System.out.print("Enter investment amount (Must be positive): ");
investmentAmount = input.nextDouble();
}
System.out.print("Enter annual interest rate(e.g., 3 for 3%): ");
double annualInterestRate = input.nextDouble() / 100;
while (annualInterestRate < 0.0)
{
System.out.print("Enter annual interest rate(e.g., 3 for 3%) (Must be positive): ");
annualInterestRate = input.nextDouble() / 100;
}
double monthlyInterestRate = annualInterestRate / 12;
System.out.println("The amount invested: " + investmentAmount);
System.out.println("Annual interest rate: " + (annualInterestRate * 100) + "%");
System.out.println("Years\tFuture Value");
for (int year = 1; year <= YEARS_TO_INVESTMENT; year++)
{
System.out.printf("%d\t%8.2f\n", year,
futureInvestmentValue(investmentAmount, monthlyInterestRate, year));
}
System.out.println();
input.close();
}
/**
* Returns the future investment value using the specified investment amount, monthly interest rate, and years.
* <ul>
* <li>
* If the first argument is negative, the future investment value will be 0.0.
* </li>
* <li>
* If the second argument is negative, it will default to 0.0.
* </li>
* <li>
* If the third argument is less than 1, it will default to 1.
* </li>
* </ul>
*
* @param investmentAmount amount of money to invest
* @param monthlyInterestRate monthly interest rate (e.g., 0.03 for 3% rate)
* @param years year of investment (e.g., 4 for the 4th year)
* @return the future investment value
*/
public static double futureInvestmentValue(double investmentAmount, double monthlyInterestRate, int years)
{
if (investmentAmount < 0.0)
{
return 0.0;
}
if (monthlyInterestRate < 0.0)
{
monthlyInterestRate = 0.0;
}
if (years < 1)
{
years = 1;
}
return (int)(investmentAmount * Math.pow((1 + monthlyInterestRate), years * 12) * 100) / 100.0;
}
} |
#!/bin/bash
set -x
# create a Secret
vim 11-04_kuard-secret.yaml
read -p "Continue?"
kubectl apply -f 11-04_kuard-secret.yaml
read -p "Continue?"
# describe the Pod
kubectl describe pod kuard-tls
read -p "Continue?"
# setup port forwarding
kubectl port-forward kuard-tls 8443:8443 &
read -p "Continue?"
# inspect the Pod in a browser
echo "Open in browser:"
echo " 1. https://127.0.0.1:8443/"
echo " and check the SSL certificate"
|
import java.util.Scanner;
/*Escreva um sistema que leia três números inteiros e positivos (A, B, C)
e calcule a seguinte expressão: d = r+s/2
, onde r = (a+b)² e s= (b+c)²
*/
public class CalculandoExpressao {
public static void main(String[] args) {
int a,b,c,r,s;
double d;
Scanner input = new Scanner(System.in);
System.out.print("Qual é o valor de A: ");
a= input.nextInt();
System.out.print("Qual é o valor de B: ");
b= input.nextInt();
System.out.print("Qual é o valor de C: ");
c= input.nextInt();
if(a < 0 || b< 0 || c<0){
System.out.print("\33[31mVocê não pode digitar valores negativos, tente novamente.");
}
if(a > 0 && b> 0 && c>0){
r = (int) Math.pow((a+b),2);
s = (int) Math.pow((b+c),2);
d= (r+s)/2;
System.out.println("\33[34mO valor da expressão é: " + d);}
input.close();
}
}
|
#!/usr/bin/env bash
if [ $(pgrep mysqld) ] || [ -f "/tmp/live.lock" ]; then
if [ -f /opt/mysql/data/sst_in_progress ]; then
# sst in progress marker - check how long this state has been
if [ ! -f /tmp/sst.time ]; then
echo -n $(date +%s) > /tmp/sst.time
else
# if more than 2 mins then check for evidence
if [ $(date +%s) -gt $(( $(cat /tmp/sst.time) + 120 )) ]; then
# if socat or xtrabackup (prepare) is running exit 0
if $(pgrep socat >/dev/null 2>&1) || $(pgrep xtrabackup >/dev/null 2>&1); then
exit 0
else
# sst has failed - mysql entered uninterutable state - kill it
echo "SST failed"
killall -9 mysqld
exit 1
fi
fi
fi
fi
exit 0
else
exit 1
fi
|
use std::result;
// Define the WaveFormsError type
#[derive(Debug)]
enum WaveFormsError {
// Define potential errors
// For example:
// InvalidHandle,
// CommunicationError,
// Other errors specific to the WaveForms library
}
// Define the DeviceHandle type
struct DeviceHandle {
// Define the properties and methods of the DeviceHandle type
}
// Implement the max_clock_divider function
impl DeviceHandle {
fn max_clock_divider(&self) -> Result<u32, WaveFormsError> {
// Call the get_int! macro to retrieve the clock divider information from the device handle
// For example:
// Ok(get_int!(FDwfDigitalInDividerInfo self.device_handle)?)
// Replace the above line with the actual implementation using the WaveForms library
unimplemented!()
}
} |
#!/bin/bash
export GOOGLE_PROJECT=docker-239201
#test -n "$(docker-machine ls | grep docker-host)" || eval $(docker-machine env --unset)
#start machine
docker-machine create --driver google \
--google-machine-image https://www.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/family/ubuntu-1604-lts \
--google-machine-type n1-standard-1 \
--google-disk-size 20 \
--google-zone europe-west1-b docker-host
#
sleep 5
eval $(docker-machine env docker-host)
docker-machine ls
docker-machine ip logging
|
import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import Dense, LSTM
# Read the stock data
data = pd.read_csv('stock_data.csv')
# Pre-processing
scaler = MinMaxScaler(feature_range = (0, 1))
data = scaler.fit_transform(data)
# Split the data into train and test
x_train = data[:int(len(data)*0.8),:-1]
x_test = data[int(len(data)*0.8):,:-1]
y_train = data[:int(len(data)*0.8),-1]
y_test = data[int(len(data)*0.8):,-1]
# Reshape the data
x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1))
x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
# Create a model
model = Sequential()
model.add(LSTM(50, return_sequences=True, input_shape=(x_train.shape[1], 1)))
model.add(LSTM(50))
model.add(Dense(1))
# Compile and Fit
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(x_train, y_train, epochs = 100, batch_size = 1, verbose = 2) |
<reponame>Danathus/react3d<gh_stars>1-10
#include <ragdoll/sortme/DeltaRagdoll.h>
#include <assert.h>
#include <ode/ode.h>
#include <ode/collision.h>
#include <cal3d/model.h>
#include <cal3d/skeleton.h>
#include <osg/Node>
#include <osg/ShapeDrawable>
#include <dtCore/scene.h>
#include <dtCore/object.h>
#include <dtCore/odebodywrap.h>
#include <dtAnim/animationgameactor.h>
#include <dtAnim/skeletaldrawable.h>
#include <ragdoll/bridge/osg/Math.h>
#include <ragdoll/bridge/osg/BoneBridgeOSG.h>
#include <ragdoll/bridge/cal3d/BoneBridgeCAL3D.h>
#include <ragdoll/bridge/delta3d/BoneBridgeDelta3D.h>
#include <ragdoll/sortme/Ragdoll.h>
#include <ragdoll/sortme/RagdollBone.h>
#define RESET_WORLD_TRANSFORM 0 // 0->1
#define HIDE_SKIN 0 // (commit as 0)
#define DRAW_SKELETON 0 // (commit as 0)
#define DRAW_ROOT_SPHERE 0 // (commit as 0)
////////////////////////////////////////////////////////////////////////////////
DeltaRagdoll::DeltaRagdoll(dtCore::Scene& scene, dtCore::Transformable* actor)
: mScene(scene)
, mpRagdoll(NULL)
{
#if HIDE_SKIN
// control animation game actor rendering characteristics
{
// get the geode so we can perform scenegraph surgery
osg::Geode *cal3dGeode = dynamic_cast<osg::Geode *>(actor->GetHelper()->GetNode());
assert(cal3dGeode);
// remove the skin drawable as a hack
cal3dGeode->removeDrawables(0, 1);
//cal3dGeode->removeDrawables(0, cal3dGeode->getNumDrawables());
//cal3dGeode->removeDrawables(cal3dGeode->getNumDrawables()-1, 1);
}
#endif
// create the rag doll within this world
mpRagdoll = new Ragdoll(actor);
// last-minute hackery
const double stepSize =
1.0f / 120.0f;
//1.0f / 60.0f;
//1.0f / 30.0f;
mScene.SetPhysicsStepSize(stepSize);
#if DRAW_SKELETON
// control animation game actor rendering characteristics
{
// get the geode so we can perform scenegraph surgery
osg::Geode* cal3dGeode = dynamic_cast<osg::Geode *>(actor->GetHelper()->GetNode());
assert(cal3dGeode);
// remove the skin drawable as a hack
//cal3dGeode->removeDrawables(0, 1);
cal3dGeode->addDrawable(new dtAnim::SkeletalDrawable(actor->GetHelper()->GetModelWrapper()));
}
#endif
#if DRAW_ROOT_SPHERE
{
// get the geode so we can perform scenegraph surgery
osg::Geode* sphereGeode = new osg::Geode();
assert(sphereGeode);
sphereGeode->addDrawable(new osg::ShapeDrawable(new osg::Sphere(osg::Vec3(0,0,0), 10.0)));
actor->GetOSGNode()->asTransform()->addChild(sphereGeode);
}
#endif
}
////////////////////////////////////////////////////////////////////////////////
DeltaRagdoll::~DeltaRagdoll()
{
// remove all the bones
for (size_t i = 0; i < GetRagdoll()->GetNumBones(); ++i)
{
RagdollBone* bone = GetRagdoll()->GetBoneByIndex(i);
bool success = RemoveBone(bone);
assert(success);
bone = 0;
}
if (mpRagdoll)
{
delete mpRagdoll;
mpRagdoll = 0;
}
}
////////////////////////////////////////////////////////////////////////////////
void DeltaRagdoll::AddBone(int calBoneID, RagdollBone* bone)
{
// add to scene at this point
if (bone->GetBridgeDelta3D())
{
mScene.AddDrawable(bone->GetBridgeDelta3D()->GetObject());
}
// initialize physics here
bone->GetBridgeDelta3D()->InitializePhysics(bone->GetKernelBone().GetDimensions());
// build cal bone ID to delta rag doll bone map through this phase
mCalBoneIDtoRagdollBone[calBoneID] = bone;
// and initialize pose (must go after adding to scene)
bone->UpdatePhysicsFromAnimation();
}
////////////////////////////////////////////////////////////////////////////////
bool DeltaRagdoll::RemoveBone(RagdollBone* bone)
{
// first find in vector
BoneList::size_type index;
for (index = 0; index < GetRagdoll()->GetNumBones(); ++index)
{
if (GetRagdoll()->GetBoneByIndex(index) == bone)
{
break;
}
}
if (index >= GetRagdoll()->GetNumBones())
{
// could not find it -- bail
return false;
}
// clear cal bone ID to delta rag doll bone map
if (bone->GetBridgeCAL3D())
{
int calBoneID = bone->GetBridgeCAL3D()->GetCalBoneID();
//printf("removing %d from mCalBoneIDtoDeltaRagdollBone\n", calBoneID);
mCalBoneIDtoRagdollBone.erase(calBoneID);
}
// remove from scene
if (bone->GetBridgeDelta3D())
{
mScene.RemoveDrawable(bone->GetBridgeDelta3D()->GetObject());
}
return true;
}
////////////////////////////////////////////////////////////////////////////////
RagdollBone* DeltaRagdoll::GetBoneByCalBoneID(int calBoneID)
{
return mCalBoneIDtoRagdollBone[calBoneID];
}
////////////////////////////////////////////////////////////////////////////////
const RagdollBone* DeltaRagdoll::GetBoneByCalBoneID(int calBoneID) const
{
std::map<int, RagdollBone*>::const_iterator it = mCalBoneIDtoRagdollBone.find(calBoneID);
if (it == mCalBoneIDtoRagdollBone.end())
{
return NULL;
}
else
{
return it->second;
}
}
////////////////////////////////////////////////////////////////////////////////
osg::Matrixd DeltaRagdoll::CalculateRagdollWorldTransformFromBones() const
{
// find position and orientation of the body at large by finding position
// and orientation of a bone at the root, and subtracting out its offset
// in model space
osg::Matrix bodyWorldTransform;
{
// let's assume that bone zero is close to the root...
const RagdollBone* baseBone = GetRagdoll()->GetBoneByIndex(0);
//const DeltaRagdollBone *baseBone = mBones[3]; // zero doesn't seem to work so well, uhh, let's go with 1
// get the bone position in world space -- position of delta transformable in world space (total concatenated)
const osg::Matrix boneWorldTransform = ConvertKerneltoOSG(baseBone->GetTransformOfBoneCenterInWorld());
//*
// get the bone position in model space
const osg::Matrix boneModelTransform = ConvertKerneltoOSG(baseBone->GetModelSpaceCenterTransform()); // all but world space, concatenated
// subtract the model space transform out
// to get approximate skeleton position
{
// do this by first finding the inverse of the model transform
const osg::Matrix invBoneModelTransform = osg::Matrix::inverse(boneModelTransform);
// then multiply!
//bodyWorldTransform = invBoneModelTransform * boneWorldTransform;
//*
// one -- looks pretty good, but still a bit off...
bodyWorldTransform = boneWorldTransform;
bodyWorldTransform.preMult(invBoneModelTransform);
//*/
/*
// two -- totally off!
bodyWorldTransform = boneWorldTransform;
bodyWorldTransform.postMult(invBoneModelTransform);
//*/
/*
// three -- just trying things now...
bodyWorldTransform = boneWorldTransform;
//*/
}
/*/
// wait a second...aren't these the same?
bodyWorldTransform = boneWorldTransform;
// no -- bone world transform is out of date...
//*/
}
return bodyWorldTransform;
}
////////////////////////////////////////////////////////////////////////////////
|
//defining player's input
const buttons = document.querySelectorAll('.btn');
let playerSelection;
buttons.forEach((button) => {button.addEventListener('click',()=>{
playerSelection = button.id;
playRound(playerSelection, computerSelection);
})})
//defining computer's input
function computerPlay() {
return (Math.floor(Math.random() * 3));
}
function changeValue() {
if (computerPlay() === 0) {
return 'rock';
} else if (computerPlay() === 1) {
return 'paper';
} else {
return 'scissors';
}
}
computerSelection = changeValue();
//we have inputs, let's play
let playerScore = 0;
let computerScore =0;
function playRound(playerSelection, computerSelection) {
let results = "";
playerSelection = playerSelection.toLowerCase();
computerSelection;// = changeValue();
if ((playerSelection === 'rock' && computerSelection === 'scissors') ||
(playerSelection === 'scissors' && computerSelection === 'paper') ||
(playerSelection === 'paper' && computerSelection === 'rock')) {
playerScore+= 1;
results = ("You win! " + playerSelection + " beats " + computerSelection
+ "<br>Player score: "+ playerScore + "<br>Computer score: " + computerScore);
if (playerScore === 5) {
finalResult = ('Congratulations, you WON the game ! reload to play again.');
disableButtons();
document.getElementById('finalResult').innerHTML = finalResult;
}
}
else if (playerSelection === computerSelection) {
results = ("It's a tie, you both played " + playerSelection + "<br>Player score: " + playerScore +
"<br>Computer score: " + computerScore );
}
else {
computerScore += 1;
results = ("You lose ! " + computerSelection + ' beats ' + playerSelection
+ "<br>Player score: " + playerScore + "<br>Computer score: " + computerScore);
if (computerScore === 5) {
finalResult = ('Computer WINS the game !!! reload to play again.');
disableButtons();
document.getElementById('finalResult').innerHTML = finalResult;
}
}
document.getElementById('results').innerHTML = results;
//document.getElementById('finalResult').innerHTML = finalResult;
}
function disableButtons() {
buttons.forEach(elem => {
elem.disabled = true;
})
}
|
let adicionais = [];
var maximo = 1;
var VALOR = 0;
var VALOR_REAL = 0;
$(function () {
VALOR = $('#valor_produto').html();
VALOR_REAL = VALOR = parseFloat(VALOR);
maximo = $('#maximo_adicionais_pizza').val();
})
function selet_add(adicional, nome){
controlaMaximo(adicional.id, (cl)=> {
if(cl == false){
verificaAdicionado(adicional.id, (res) => {
if(res == true){
$('#adicional_'+adicional.id).css('background', '#fff')
removeElemento(adicional.id)
}else{
$('#adicional_'+adicional.id).css('background', '#81c784')
adicionais.push({
'id': adicional.id,
'nome': nome,
'valor': adicional.valor
})
}
somaTotal();
})
}
})
}
function controlaMaximo(id, call){
let ret = false;
console.log(adicionais.length)
if(adicionais.length >= maximo){
ret = true
}
adicionais.map((rs) => {
if(rs.id == id)
ret = false;
})
if(ret == true){
swal("Atenção!", 'Maximo de '+maximo+' adicionais!!', "warning")
}
call(ret)
}
function removeElemento(elem_id){
let temp = [];
adicionais.map((v) => {
if(v.id != elem_id){
temp.push(v)
}
});
adicionais = temp;
}
function verificaAdicionado(elem_id, call){
let b = false;
adicionais.map((v) => {
if(v.id == elem_id){
b = true;
}
});
call(b);
}
function somaTotal(){
let valorProduto = $('#valor_produto').html();
valorProduto = parseFloat(valorProduto)
adicionais.map((v) => {
valorProduto += parseFloat(v.valor);
})
VALOR = valorProduto
$('#valor_total').html(convertMoney(valorProduto))
}
function convertMoney(v){
return v.toFixed(2).replace(/\d(?=(\d{3})+\.)/g, '$&,');
}
function adicionar(){
let tk = $('#_token').val();
let sabores = JSON.parse($('#sabores').val());
let quantidade = $('#quantidade').val();
let observacao = $('#observacao').val();
let tamanho = $('#tamanho').val();
let js = {
_token: tk,
sabores: sabores,
tamanho: tamanho,
adicionais: adicionais,
quantidade: quantidade,
observacao: observacao,
valor: parseFloat(VALOR_REAL)
};
console.log(js)
$.post(path + "pedido/addPizza", js
)
.done(function(data) {
if(data){
sucesso();
}
})
.fail( function(err) {
console.log(err)
console.log(err.status)
if(err.status == 401) location.href = path + 'pedido';
if(err.status == 404) {
swal("Atenção!", err.responseJSON, "warning")
.then((sim) => {
location.href = path + 'pedido';
})
}
});
}
function sucesso(){
$('#content').css('display', 'none');
$('#anime').css('display', 'block');
setTimeout(() => {
location.href = path + 'pedido';
}, 3000)
}
|
<filename>com.dubture.symfony.index/src/com/dubture/symfony/index/SymfonyDbFactory.java
/*******************************************************************************
* This file is part of the Symfony eclipse plugin.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
******************************************************************************/
package com.dubture.symfony.index;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.jobs.ILock;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.core.runtime.preferences.IPreferencesService;
import org.h2.jdbcx.JdbcConnectionPool;
import org.h2.tools.DeleteDbFiles;
import com.dubture.symfony.index.dao.IParameterDao;
import com.dubture.symfony.index.dao.IResourceDao;
import com.dubture.symfony.index.dao.IRouteDao;
import com.dubture.symfony.index.dao.IServiceDao;
import com.dubture.symfony.index.dao.ITransUnitDao;
import com.dubture.symfony.index.dao.sql.ParameterDao;
import com.dubture.symfony.index.dao.sql.ResourceDao;
import com.dubture.symfony.index.dao.sql.RouteDao;
import com.dubture.symfony.index.dao.sql.ServiceDao;
import com.dubture.symfony.index.dao.sql.TransUnitDao;
import com.dubture.symfony.index.log.Logger;
import com.dubture.symfony.index.preferences.SymfonyIndexPreferences;
/**
*
* Database Factory for the SQL Index.
*
*
* @author "<NAME> <<EMAIL>>"
*
*/
public class SymfonyDbFactory {
private static ILock instanceLock = Job.getJobManager().newLock();
private static final String DB_NAME = "symfonymodel"; //$NON-NLS-1$
private static final String DB_USER = ""; //$NON-NLS-1$
private static final String DB_PASS = ""; //$NON-NLS-1$
private JdbcConnectionPool pool;
private static SymfonyDbFactory instance = null;
private ServiceDao serviceDao = new ServiceDao();
private ParameterDao parameterDao = new ParameterDao();
private RouteDao routeDao = new RouteDao();
private ResourceDao resourceDao = new ResourceDao();
private TransUnitDao transUnitDao = new TransUnitDao();
public static SymfonyDbFactory getInstance() {
if (instance == null) {
try {
instanceLock.acquire();
instance = new SymfonyDbFactory();
/*
* Explicitly register shutdown handler, so it
* would be disposed only if class was loaded.
*
* We don't want static initialization code to
* be executed during framework shutdown.
*/
SymfonyIndex
.addShutdownListener(new IShutdownListener() {
public void shutdown() {
if (instance != null) {
try {
instance.dispose();
} catch (SQLException e) {
Logger.logException(e);
}
instance = null;
}
}
});
} catch (Exception e) {
Logger.logException(e);
} finally {
instanceLock.release();
}
}
return instance;
}
private SymfonyDbFactory() throws Exception {
IPath dbPath = SymfonyIndex.getDefault().getStateLocation();
String connString = getConnectionString(dbPath);
pool = JdbcConnectionPool.create(connString, DB_USER, DB_PASS);
pool.setMaxConnections(100);
Schema schema = new Schema();
boolean initializeSchema = false;
int tries = 2; // Tries for opening database
Connection connection = null;
do {
try {
connection = pool.getConnection();
try {
Statement statement = connection.createStatement();
try {
statement
.executeQuery("SELECT COUNT(*) FROM SERVICES WHERE 1=0;");
initializeSchema = !schema.isCompatible();
} catch (SQLException e) {
// Basic table doesn't exist
initializeSchema = true;
} finally {
statement.close();
}
if (initializeSchema) {
connection.close();
pool.dispose();
// Destroy schema by removing DB (if exists)
DeleteDbFiles.execute(dbPath.toOSString(), DB_NAME,
true);
pool = JdbcConnectionPool.create(connString, DB_USER,
DB_PASS);
pool.setMaxConnections(100);
connection = pool.getConnection();
schema.initialize(connection);
}
} finally {
if (connection != null) {
connection.close();
}
}
} catch (SQLException e) {
Logger.logException(e);
// remove corrupted DB
try {
DeleteDbFiles.execute(dbPath.toOSString(), DB_NAME, true);
} catch (Exception e1) {
Logger.logException(e1);
throw e1;
}
}
} while (connection == null && --tries > 0);
}
/**
* Generates connection string using user preferences
*
* @param dbPath
* Path to the database files
* @return
*/
private String getConnectionString(IPath dbPath) {
IPreferencesService preferencesService = Platform
.getPreferencesService();
StringBuilder buf = new StringBuilder("jdbc:h2:").append(dbPath.append(
DB_NAME).toOSString());
buf.append(";UNDO_LOG=0");
buf.append(";LOCK_MODE=").append(
preferencesService.getInt(SymfonyIndex.PLUGIN_ID,
SymfonyIndexPreferences.DB_LOCK_MODE, 0, null));
buf.append(";CACHE_TYPE=").append(
preferencesService.getString(SymfonyIndex.PLUGIN_ID,
SymfonyIndexPreferences.DB_CACHE_TYPE, null, null));
buf.append(";CACHE_SIZE=").append(
preferencesService.getInt(SymfonyIndex.PLUGIN_ID,
SymfonyIndexPreferences.DB_CACHE_SIZE, 0, null));
buf.append(";QUERY_CACHE_SIZE=").append(
preferencesService.getInt(SymfonyIndex.PLUGIN_ID,
SymfonyIndexPreferences.DB_QUERY_CACHE_SIZE, 0, null));
buf.append(";LARGE_RESULT_BUFFER_SIZE=").append(
preferencesService
.getInt(SymfonyIndex.PLUGIN_ID,
SymfonyIndexPreferences.DB_LARGE_RESULT_BUFFER_SIZE,
0, null));
buf.append(";FILE_LOCK=NO");
return buf.toString();
}
public Connection createConnection() throws SQLException {
return pool == null ? null : pool.getConnection();
}
public void dispose() throws SQLException {
if (pool != null) {
pool.dispose();
pool = null;
}
}
public IServiceDao getServiceDao() throws SQLException {
return serviceDao;
}
public IRouteDao getRouteDao() throws SQLException {
return routeDao;
}
public ITransUnitDao getTransDao() throws SQLException {
return transUnitDao;
}
public IResourceDao getResourceDao() throws SQLException {
return resourceDao;
}
public IParameterDao getParamDao() throws SQLException {
return parameterDao;
}
}
|
import eslintReporter from 'eslint-formatter-codeframe';
export default async function(files, jsLinter) {
const report = await jsLinter.lintFiles(files);
if (report.length > 0) {
let result = eslintReporter(report);
if (result) {
console.log(result);
}
return result;
}
}
|
def get_number_occurrence(self):
return self.execute("select chosen_number, count(chosen_number) as occurrences, sum(case when outcome < 1 then 1 else 0 end) as negative_outcomes, sum(case when outcome > 1 then 1 else 0 end) as successful_outcomes from wager_history group by chosen_number")
def get_multiplier_occurrence(self):
query_result = self.execute("select outcome/chosen_number as multiplier, count(*) as occurrences from wager_history group by multiplier")
multiplier_occurrences = {row[0]: row[1] for row in query_result}
return multiplier_occurrences |
<filename>src/js/index.js
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
var server = __importStar(require("./server"));
var router = __importStar(require("./router"));
var requestHandler = __importStar(require("./requestHandlers"));
var handle = {};
handle["/"] = requestHandler.start;
handle["/start"] = requestHandler.start;
handle["/upload"] = requestHandler.upload;
handle["/download"] = requestHandler.download;
handle["/index"] = requestHandler.index;
handle["/show"] = requestHandler.show;
server.start(router.route, handle);
|
##############################################################################
# Compile programs on Mist
##############################################################################
gcc -o serial calcpi_serial.c
gcc -o omp -fopenmp calcpi_openmp.c
#g++ -o tbb -L/usr/local/packages/intel_xe/14.0.1/tbb/lib/intel64/gcc4.1 -ltbb calcpi_tbb.cpp
icpc -o cilk calcpi_cilk.cpp
##############################################################################
# Run examples
##############################################################################
rm sampleOutput.txt
touch sampleOutput.txt
./serial 10000000 >> sampleOutput.txt
echo "********************************************************************" >> sampleOutput.txt
./omp 8 10000000 >> sampleOutput.txt
echo "********************************************************************" >> sampleOutput.txt
#./tbb 8 1000000 >> sampleOutput.txt
#echo "********************************************************************" >> sampleOutput.txt
./cilk 8 10000000 >> sampleOutput.txt
##############################################################################
# Remove executables
##############################################################################
rm serial omp cilk
#rm serial omp tbb cilk
|
'''
A non-empty array A consisting of N integers is given.
A permutation is a sequence containing each element from 1 to N once, and only once.
For example, array A such that:
A[0] = 4
A[1] = 1
A[2] = 3
A[3] = 2
is a permutation, but array A such that:
A[0] = 4
A[1] = 1
A[2] = 3
is not a permutation, because value 2 is missing.
The goal is to check whether array A is a permutation.
Write a function:
def solution(A)
that, given an array A, returns 1 if array A is a permutation and 0 if it is not.
For example, given array A such that:
A[0] = 4
A[1] = 1
A[2] = 3
A[3] = 2
the function should return 1.
Given array A such that:
A[0] = 4
A[1] = 1
A[2] = 3
the function should return 0.
Write an efficient algorithm for the following assumptions:
N is an integer within the range [1..100,000];
each element of array A is an integer within the range [1..1,000,000,000].
'''
def solution(A):
if len(A) ==1:
return 0
else:
# if there is repetition of number(s) then it is not permutation
if len(list(set(A))) != len(A):
return 0
else:
A = list(set(A))
my_list=[]
for i in range(len(A)-1):
difference = A[i+1] - A[i]
my_list.append(difference)
if sum(my_list)+1 == len(A):
return 1
else:
return 0
# Testing:
import unittest
class Test_solution(unittest.TestCase):
def test_permutation(self):
self.assertEqual(solution([1,2,4,3]), 1)
self.assertEqual(solution([1,2,4]), 0)
self.assertEqual(solution([1,2,3,4,4]), 0)
self.assertEqual(solution([10,11,12,12,14,13]), 0)
self.assertEqual(solution([10,11,12,14,13]), 1)
self.assertEqual(solution([10]), 0)
if __name__ == "__main__":
unittest.main() |
n = x // 2
for i = 2 to n
if (x % i == 0)
Print "x is not prime"
Exit
Print "x is prime" |
#!/bin/bash
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
if [ $# != 4 ] && [ $# != 5 ]
then
echo "Usage: sh run_distribute_train.sh [resnet50|resnet101] [cifar10|imagenet2012] [RANK_TABLE_FILE] [DATASET_PATH] [PRETRAINED_CKPT_PATH](optional)"
exit 1
fi
if [ $1 != "resnet50" ] && [ $1 != "resnet101" ]
then
echo "error: the selected net is neither resnet50 nor resnet101"
exit 1
fi
if [ $2 != "cifar10" ] && [ $2 != "imagenet2012" ]
then
echo "error: the selected dataset is neither cifar10 nor imagenet2012"
exit 1
fi
if [ $1 == "resnet101" ] && [ $2 == "cifar10" ]
then
echo "error: training resnet101 with cifar10 dataset is unsupported now!"
exit 1
fi
get_real_path(){
if [ "${1:0:1}" == "/" ]; then
echo "$1"
else
echo "$(realpath -m $PWD/$1)"
fi
}
PATH1=$(get_real_path $3)
PATH2=$(get_real_path $4)
if [ $# == 5 ]
then
PATH3=$(get_real_path $5)
fi
if [ ! -f $PATH1 ]
then
echo "error: RANK_TABLE_FILE=$PATH1 is not a file"
exit 1
fi
if [ ! -d $PATH2 ]
then
echo "error: DATASET_PATH=$PATH2 is not a directory"
exit 1
fi
if [ $# == 5 ] && [ ! -f $PATH3 ]
then
echo "error: PRETRAINED_CKPT_PATH=$PATH3 is not a file"
exit 1
fi
ulimit -u unlimited
export DEVICE_NUM=8
export RANK_SIZE=8
export RANK_TABLE_FILE=$PATH1
export MS_COMM_TYPE=zmq
export MS_SCHED_NUM=1
export MS_WORKER_NUM=$RANK_SIZE
export MS_SERVER_NUM=8
export MS_SCHED_HOST=127.0.0.1
export MS_SCHED_PORT=8081
export MS_ROLE=MS_SCHED
export DEVICE_ID=0
export RANK_ID=0
rm -rf ./sched
mkdir ./sched
cp ../*.py ./sched
cp *.sh ./sched
cp -r ../src ./sched
cd ./sched || exit
echo "start scheduler"
if [ $# == 4 ]
then
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=1 --dataset_path=$PATH2 --parameter_server=True &> sched.log &
fi
if [ $# == 5 ]
then
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=1 --dataset_path=$PATH2 --parameter_server=True --pre_trained=$PATH3 &> sched.log &
fi
cd ..
export MS_ROLE=MS_PSERVER
for((i=0; i<1; i++))
do
export DEVICE_ID=$i
export RANK_ID=$i
rm -rf ./server_$i
mkdir ./server_$i
cp ../*.py ./server_$i
cp *.sh ./server_$i
cp -r ../src ./server_$i
cd ./server_$i || exit
echo "start server"
if [ $# == 4 ]
then
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=1 --dataset_path=$PATH2 --parameter_server=True &> server_$i.log &
fi
if [ $# == 5 ]
then
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=1 --dataset_path=$PATH2 --parameter_server=True --pre_trained=$PATH3 &> server_$i.log &
fi
cd ..
done
export MS_ROLE=MS_WORKER
for((i=0; i<${DEVICE_NUM}; i++))
do
export DEVICE_ID=$i
export RANK_ID=$i
rm -rf ./worker_$i
mkdir ./worker_$i
cp ../*.py ./worker_$i
cp *.sh ./worker_$i
cp -r ../src ./worker_$i
cd ./worker_$i || exit
echo "start training for worker rank $RANK_ID, device $DEVICE_ID"
env > env.log
if [ $# == 4 ]
then
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=$DEVICE_NUM --dataset_path=$PATH2 --parameter_server=True &> worker_$i.log &
fi
if [ $# == 5 ]
then
python train.py --net=$1 --dataset=$2 --run_distribute=True --device_num=$DEVICE_NUM --dataset_path=$PATH2 --parameter_server=True --pre_trained=$PATH3 &> worker_$i.log &
fi
cd ..
done
|
import pulumi
from pulumi import ResourceOptions
from pulumi_openstack import network, compute
class FloatingIP(pulumi.CustomResource):
def __init__(self, name: str, network_id: str, opts: ResourceOptions = None):
super().__init__('custom:resource:FloatingIP', name, {
'network_id': network_id,
}, opts)
def create(self):
# Create floating IP
floating_ip = network.FloatingIp('floating_ip', pool='public')
self.register_outputs({
'floating_ip_address': floating_ip.address,
})
def update(self):
# Update floating IP
# Not implemented in this example
def delete(self):
# Delete floating IP
floating_ip = network.FloatingIp('floating_ip', pool='public')
floating_ip.delete() |
<gh_stars>1-10
/*
Pangram is a sentence that contains all the letters in the alphabet
https://en.wikipedia.org/wiki/Pangram
*/
const checkPangram = ( string ) => {
if ( typeof string !== 'string' ) {
throw new TypeError( 'The given value is not a string' )
}
const frequency = new Set()
for ( const letter of string.toLowerCase() ) {
if ( letter >= 'a' && letter <= 'z' ) {
frequency.add( letter )
}
}
return frequency.size === 26
}
export {
checkPangram
}
|
<reponame>theweaver19/wbtc-cafe
declare module "react-smooth-marquee";
declare module "wallet-address-validator";
|
def reverseCharacters (string):
reverseStr = ""
for i in range(len(string)-1, -1, -1):
reverseStr += string[i]
return reverseStr |
def find_max_index(arr):
max = -float('inf')
max_index = -1
for i, num in enumerate(arr):
if num > max:
max_index = i
max = num
return max_index
arr = [5, 3, 1, 7, 9, 2]
max_index = find_max_index(arr)
print(max_index) # 4 |
#!/usr/bin/env bash
set -ex
target=${1}
openssl pkcs12 -in ${target}.pfx -nocerts -out key.pem -nodes
openssl pkcs12 -in ${target}.pfx -nokeys -out cert.pem
openssl rsa -in key.pem -out client.key
base64 -i cert.pem -o cert.pem.b64
base64 -i client.key -o client.key.b64
|
def executeKrillCode(code):
output = ""
data = [0] * 30000 # Initialize the data array with 30000 cells
pointer = 0
code_ptr = 0
while code_ptr < len(code):
command = code[code_ptr]
if command == '>':
pointer += 1
elif command == '<':
pointer -= 1
elif command == '+':
data[pointer] = (data[pointer] + 1) % 256
elif command == '-':
data[pointer] = (data[pointer] - 1) % 256
elif command == '.':
output += chr(data[pointer])
elif command == ',':
# Assume input is provided externally for simplicity
pass
elif command == '[':
if data[pointer] == 0:
loop_count = 1
while loop_count != 0:
code_ptr += 1
if code[code_ptr] == '[':
loop_count += 1
elif code[code_ptr] == ']':
loop_count -= 1
elif command == ']':
loop_count = 1
while loop_count != 0:
code_ptr -= 1
if code[code_ptr] == '[':
loop_count -= 1
elif code[code_ptr] == ']':
loop_count += 1
code_ptr -= 1
code_ptr += 1
return output |
#!/bin/bash
########## Define Resources Needed with SBATCH Lines ##########
#SBATCH --time=4:00:00
#SBATCH --mem=6G
#SBATCH --ntasks 1
#SBATCH --cpus-per-task 1
#SBATCH --job-name batch~1006,step~1026,pop~2,id1~ko-s_i_spikersharing
#SBATCH --account=devolab
#SBATCH --output="/mnt/home/mmore500/slurmlogs/slurm-%A.out"
#SBATCH --mail-type=FAIL
#SBATCH --mail-user=mmore500@msu.edu
################################################################################
echo
echo "Setup Exit and Error Traps"
echo "--------------------------"
################################################################################
function on_exit() {
echo
echo "Run Exit Trap"
echo "-------------"
qstat -f ${SLURM_JOB_ID}
# prepare python environment
module purge; module load GCC/7.3.0-2.30 OpenMPI/3.1.1 Python/3.6.6
source "/mnt/home/mmore500/myPy/bin/activate"
echo " SECONDS" $SECONDS
echo " MINUTES" $(python3 -c "print( ${SECONDS}/60 )")
echo " HOURS " $(python3 -c "print( ${SECONDS}/3600 )")
cp ${SLURM_LOGPATH} .
}
function on_error() {
echo
echo "Run Error Trap (FAIL)"
echo "---------------------"
echo " EXIT STATUS ${1}"
echo " LINE NO ${2}"
cp ${SLURM_LOGPATH} "/mnt/home/mmore500/err_slurmlogs"
qstat -f ${SLURM_JOB_ID} \
>> "/mnt/home/mmore500/err_slurmlogs/${SLURM_LOGFILE}"
echo "---------------------"
echo
}
trap 'on_error $? $LINENO' ERR
trap "on_exit" EXIT
################################################################################
echo
echo "Prepare Env Vars"
echo "----------------"
################################################################################
SEED_OFFSET=1000
SEED=$((SEED_OFFSET))
OUTPUT_DIR="/mnt/scratch/mmore500/mono-local/treat=batch~1006,step~1026,pop~2,id1~ko-s_i_spikersharing+seed=${SEED}"
CONFIG_DIR="/mnt/home/mmore500/dishtiny/mono-local"
echo " SEED" $SEED
echo " OUTPUT_DIR" $OUTPUT_DIR
echo " CONFIG_DIR" $CONFIG_DIR
export SLURM_LOGFILE="slurm-${SLURM_JOB_ID}.out"
export SLURM_LOGPATH="/mnt/home/mmore500/slurmlogs/${SLURM_LOGFILE}"
echo " SLURM_LOGFILE" $SLURM_LOGFILE
echo " SLURM_LOGPATH" $SLURM_LOGPATH
################################################################################
echo
echo "Setup Work Dir"
echo "--------------"
################################################################################
rm -rf ${OUTPUT_DIR}/* || echo " not a redo"
mkdir -p ${OUTPUT_DIR}
cd ${OUTPUT_DIR}
tar -xvf "${CONFIG_DIR}/treat=batch~1006,step~1026,pop~2,id1~ko-s_i_spikersharing+ext=.tar.gz"
mv treatment_directory/* .
rm -rf treatment_directory
cp ${CONFIG_DIR}/dishtiny* . # copy over executable
echo " PWD" $PWD
################################################################################
echo
echo "Do Work"
echo "-------"
################################################################################
module purge; module load GCC/8.2.0-2.31.1 OpenMPI/3.1.3 HDF5/1.10.4;
export OMP_NUM_THREADS=1
./dishtiny -SEED $SEED -SEED_POP 1 -RUN_LENGTH 1 -SNAPSHOT_LENGTH 8192 \
>"title=run+seed=${SEED}+ext=.log" 2>&1
################################################################################
echo
echo "Done! (SUCCESS)"
echo "---------------"
################################################################################
|
import pandas as pd
# read the data
df = pd.read_csv('data_file.csv')
# remove empty values
df.dropna(inplace=True)
# get the row counts
row_count = len(df)
# calculate the average
if row_count > 0:
average = sum(df['score']) / row_count
else:
print('No data') |
<reponame>ckpt/backend-services
package players
import (
"errors"
"github.com/m4rw3r/uuid"
"time"
)
type DummyPlayerStorage struct {
players []*Player
users []*User
debts []*Debt
}
func (dps *DummyPlayerStorage) init() {
dummyUUIDs := createUUIDs(2)
dps.users = []*User{
&User{
Username: "mortenk",
password: "<PASSWORD>",
Apikey: "secretsupersecret",
Admin: true,
Locked: false,
},
}
dps.players = []*Player{
&Player{
UUID: dummyUUIDs[0],
Profile: Profile{
Birthday: time.Date(1979, time.April, 14, 0, 0, 0, 0, time.Local),
Name: "<NAME>",
Email: "<EMAIL>",
},
Nick: "Panzer",
Quotes: []string{"Blinde høner kan også finne korn!"},
User: *dps.users[0],
Active: true,
},
&Player{
UUID: dummyUUIDs[1],
Profile: Profile{
Birthday: time.Date(1979, time.October, 20, 0, 0, 0, 0, time.Local),
Name: "<NAME>",
Email: "<EMAIL>",
},
Nick: "Bjøro",
Quotes: []string{"Horespill!"},
Active: true,
},
}
}
func (dps *DummyPlayerStorage) Store(p *Player) error {
for i := range dps.players {
if dps.players[i].UUID == p.UUID {
dps.players[i] = p
return nil
}
}
dps.players = append(dps.players, p)
return nil
}
func (dps *DummyPlayerStorage) Load(uuid uuid.UUID) (*Player, error) {
for _, player := range dps.players {
if player.UUID == uuid {
return player, nil
}
}
return nil, errors.New("Not found")
}
func (dps *DummyPlayerStorage) Delete(uuid uuid.UUID) error {
// FIXME: Not implemented yet
return errors.New("Not implemented yet")
}
func (dps *DummyPlayerStorage) LoadAll() ([]*Player, error) {
return dps.players, nil
}
func (dps *DummyPlayerStorage) LoadUser(username string) (*User, error) {
for _, user := range dps.users {
if user.Username == username {
return user, nil
}
}
return nil, errors.New("Not found")
}
func NewDummyPlayerStorage() *DummyPlayerStorage {
dps := new(DummyPlayerStorage)
dps.init()
return dps
}
func createUUIDs(number int) []uuid.UUID {
var uuids []uuid.UUID
for number > 0 {
uuid, _ := uuid.V4()
uuids = append(uuids, uuid)
number--
}
return uuids
}
|
<filename>node_modules/vue-gmaps/src/index.js
import loadGoogleMapsAPI from 'load-google-maps-api'
function plugin (Vue, {
libraries = [ 'places' ],
key,
client,
version = '3',
loadGoogleApi = true
} = {}) {
if (plugin.installed) {
return
}
Vue.directive('gmaps-searchbox', {
inserted: function (el, binding) {
const propertyToSet = binding.arg ? binding.arg : 'place'
ensureGoogleMaps((google) => {
var searchBox = new google.places.SearchBox(el)
searchBox.addListener('places_changed', function () {
var places = searchBox.getPlaces()
if (places.length === 0) {
return
}
let place = {}
let originalPlace = places[ 0 ]
var keys = Object.keys(binding.modifiers)
if (keys.length > 0) {
keys.forEach(function (key) {
place[ key ] = originalPlace[ key ]
})
} else {
place = originalPlace
}
Vue.set(binding.value, propertyToSet, place)
})
})
}
})
function ensureGoogleMaps (fn) {
if (!loadGoogleApi) {
fn(window.google.maps ? window.google.maps : window.google)
} else if (Vue.google) {
fn(Vue.google)
} else {
loadGoogleMapsAPI({
key: key, client: client, libraries: libraries, v: version
}).then(google => {
Vue.google = google
Vue.prototype.$google = google
fn(google)
})
}
}
}
plugin.version = '0.0.9'
export default plugin
if (typeof window !== 'undefined' && window.Vue) {
window.Vue.use(plugin)
}
|
<gh_stars>0
export * from './api';
export * from './decorator';
export * from './dto-compiler';
export * from './vault';
export * from './result';
export * from './type';
|
def test_first_only(self, mock_fetch):
"""Function: test_first_only
Description: Test with first argument only.
"""
def process_data(*args):
if len(args) == 1:
return args[0]
else:
return args
# Test case for single argument
result_single_arg = process_data("single")
self.assertEqual(result_single_arg, "single")
# Test case for multiple arguments
result_multiple_args = process_data("first", "second", "third")
self.assertEqual(result_multiple_args, ("first", "second", "third")) |
package com.appium.config;
import com.appium.pages.CommentPage;
import com.appium.pages.LoginPage;
import com.appium.pages.PublishPage;
import com.appium.pages.WelcomePage;
import io.appium.java_client.MobileElement;
import java.io.IOException;
public interface DeviceInterface {
public void login(LoginPage loginPage, String username, String password) throws IOException, InterruptedException;
public void waitForHomePage(WelcomePage welcomePage);
public void moveToLogOutScreen(WelcomePage welcomePage);
public void writeContent(PublishPage publishPage);
boolean validateComments(CommentPage commentPage);
MobileElement scrollUp(MobileElement welcomePage);
}
|
#!/bin/bash
ocm_namespace="open-cluster-management"
oc project $ocm_namespace
operator_subscription="acm-operator-subscription"
operator_csv="advanced-cluster-management.v1.0.0"
custom_catalog_source="acm-custom-registry"
custom_registry_service="acm-custom-registry"
custom_registry_deployment="acm-custom-registry"
# Remove acm resources
oc delete subscriptions.operators.coreos.com $operator_subscription --ignore-not-found
oc delete csv $operator_csv --ignore-not-found
# Remove hub resources
oc delete crd multiclusterhubs.operators.open-cluster-management.io --ignore-not-found
oc delete validatingwebhookconfiguration multiclusterhub-operator-validating-webhook --ignore-not-found
oc delete mutatingwebhookconfiguration multiclusterhub-operator-mutating-webhook --ignore-not-found
# Remove etcd resources
oc delete subscriptions.operators.coreos.com etcd-singlenamespace-alpha-community-operators-openshift-marketplace --ignore-not-found
oc get csv | grep "etcd" | awk '{ print $1 }' | xargs oc delete csv --wait=false --ignore-not-found
oc get crd | grep "etcd" | awk '{ print $1 }' | xargs oc delete crd --wait=false --ignore-not-found
oc get service | grep "etcd" | awk '{ print $1 }' | xargs oc delete service --wait=false --ignore-not-found
# Remove subscription operator resources
# Note: No separate operator subscription to delete when installed via composite ACM CSV
# oc delete subscriptions.operators.coreos.com multicluster-operators-subscription-alpha-community-operators-openshift-marketplace --ignore-not-found
# oc get csv | grep "multicluster-operators-subscription" | awk '{ print $1 }' | xargs oc delete csv --wait=false --ignore-not-found
oc delete crd clusters.clusterregistry.k8s.io --ignore-not-found
oc delete crd channels.apps.open-cluster-management.io --ignore-not-found
oc delete crd subscriptions.apps.open-cluster-management.io --ignore-not-found
oc delete crd helmreleases.apps.open-cluster-management.io --ignore-not-found
oc delete crd deployables.apps.open-cluster-management.io --ignore-not-found
oc delete crd placementrules.apps.open-cluster-management.io --ignore-not-found
oc delete crd applications.app.k8s.io --ignore-not-found
oc delete crd clusters.clusterregistry.k8s.io --ignore-not-found
oc get service | grep "multicluster" | awk '{ print $1 }' | xargs oc delete service --wait=false --ignore-not-found
# delete these objects via nuke script only
# oc get scc | grep "multicluster" | awk '{ print $1 }' | xargs oc delete scc --wait=false --ignore-not-found
# oc get scc | grep "multicloud" | awk '{ print $1 }' | xargs oc delete scc --wait=false --ignore-not-found
# Remove custom registry resources
oc delete catalogsource $custom_catalog_source --ignore-not-found
oc delete service $custom_registry_service --ignore-not-found
oc delete deployment $custom_registry_deployment --ignore-not-found
oc delete namespace $ocm_namespace --wait=false
|
/*
Copyright 2020-2021 University of Oxford
and Health and Social Care Information Centre, also known as NHS Digital
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
*/
import { Component, OnInit, Inject } from '@angular/core';
import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material/dialog';
import { VersioningGraphModalConfiguration } from './versioning-graph-modal.model';
import { MergableCatalogueItem } from '@maurodatamapper/mdm-resources';
@Component({
selector: 'mdm-versioning-graph-modal',
templateUrl: './versioning-graph-modal.component.html',
styleUrls: ['./versioning-graph-modal.component.scss']
})
export class VersioningGraphModalComponent implements OnInit {
catalogueItem: MergableCatalogueItem;
constructor(
public dialogRef: MatDialogRef<VersioningGraphModalComponent>,
@Inject(MAT_DIALOG_DATA) public data: VersioningGraphModalConfiguration) { }
ngOnInit() {
this.catalogueItem = this.data.catalogueItem;
}
}
|
#!/usr/bin/env bash
set -ex
ROOT=${PWD}
PREFIX=${PWD}/install
mkdir -p build
pushd build
conan install .. -s build_type=Release --build missing
cmake .. \
-DCMAKE_PREFIX_PATH=${PWD} \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=${PREFIX}
cmake --build . --target install
TMPDIR=`dirname $(mktemp -u -t tmp.XXXXXXXXXX)`
curl "https://github.com/megastep/makeself/releases/download/release-2.4.0/makeself-2.4.0.run" --output $TMPDIR/makeself.run -L
chmod +x $TMPDIR/makeself.run
$TMPDIR/makeself.run --target $TMPDIR/makeself
python ${ROOT}/deploy.py $PREFIX
$TMPDIR/makeself/makeself.sh $PREFIX ${ROOT}/md5.run "conan-generated makeself.sh" "./conan-entrypoint.sh"
popd
|
#!/bin/bash
num1=4
num2=3
sum=$(($num1+$num2))
echo "$num1 + $num2 = $sum" |
package org.jooby.internal.metrics;
import static org.easymock.EasyMock.expectLastCall;
import java.util.Map;
import org.jooby.test.MockUnit;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import com.codahale.metrics.health.HealthCheck;
import com.codahale.metrics.health.HealthCheckRegistry;
import com.google.common.collect.ImmutableMap;
@RunWith(PowerMockRunner.class)
@PrepareForTest(HealthCheckRegistryInitializer.class)
public class HealthCheckRegistryInitializerTest {
@Test
public void register() throws Exception {
new MockUnit(HealthCheckRegistry.class, HealthCheck.class)
.expect(unit -> {
HealthCheckRegistry registry = unit.get(HealthCheckRegistry.class);
registry.register("h", unit.get(HealthCheck.class));
expectLastCall();
})
.run(unit -> {
Map<String, HealthCheck> checks = ImmutableMap.of("h", unit.get(HealthCheck.class));
new HealthCheckRegistryInitializer(unit.get(HealthCheckRegistry.class), checks);
});
}
}
|
<reponame>fabianklonsdorf/ixhh
'use strict';
var _20 = {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 20,
height: 20,
},
content: [
{
elem: 'path',
attrs: {
d:
'M18 31h2v-2a1 1 0 0 1 1-1h6a1 1 0 0 1 1 1v2h2v-2a3.003 3.003 0 0 0-3-3h-6a3.003 3.003 0 0 0-3 3zm6-6a4 4 0 1 1 4-4 4.004 4.004 0 0 1-4 4zm0-6a2 2 0 1 0 2 2 2.003 2.003 0 0 0-2-2zM2 31h2v-2a1 1 0 0 1 1-1h6a1 1 0 0 1 1 1v2h2v-2a3.003 3.003 0 0 0-3-3H5a3.003 3.003 0 0 0-3 3zm6-6a4 4 0 1 1 4-4 4.004 4.004 0 0 1-4 4zm0-6a2 2 0 1 0 2 2 2.002 2.002 0 0 0-2-2zm10-3h2v-2a1 1 0 0 1 1-1h6a1 1 0 0 1 1 1v2h2v-2a3.003 3.003 0 0 0-3-3h-6a3.003 3.003 0 0 0-3 3zm6-6a4 4 0 1 1 4-4 4.004 4.004 0 0 1-4 4zm0-6a2 2 0 1 0 2 2 2.002 2.002 0 0 0-2-2zM2 16h2v-2a1.001 1.001 0 0 1 1-1h6a1.001 1.001 0 0 1 1 1v2h2v-2a3.003 3.003 0 0 0-3-3H5a3.003 3.003 0 0 0-3 3zm6-6a4 4 0 1 1 4-4 4.005 4.005 0 0 1-4 4zm0-6a2 2 0 1 0 2 2 2.002 2.002 0 0 0-2-2z',
},
},
],
name: 'events--alt',
size: 20,
};
module.exports = _20;
|
import os
import requests
import subprocess
import time
from src.ondisk_config import WORK_DIR
from src.ondisk_config import reload_nginx_config
def tear_down():
os.system('docker-compose down')
os.system(f'rm -rf {WORK_DIR}')
os.system(f'echo "unettest has finished its business"')
def spin_up(detach=True, build=True, reboot_openresty=False):
"""
reboot_openresty: when openresty is running side by side with a uwsgi
app it needs a kick in the seat of the pants after
startup to connect. set to True to administer said
kick.
"""
build_arg = " --build " if build else ""
detach_arg = " --detach " if detach else ""
if not detach:
p = subprocess.Popen(['/usr/local/bin/docker-compose', 'up', '--build'])
if reboot_openresty:
print('nginx reloading!!!!')
reload_nginx_config()
print('nginx reloaded!!!!')
p.wait()
else:
os.system(f'docker-compose up {build_arg} {detach_arg}')
if reboot_openresty:
print('nginx reloading!!!!')
reload_nginx_config()
print('nginx reloading!!!!')
|
<filename>app/helpers/reviews_helper.rb
module ReviewsHelper
def form_url_helper(campsite)
#if campsite exists,
campsite ? campsite_reviews_path(campsite) : campsites_path
end
def find_user(review)
@user = User.find_by(id: review.user_id).username
end
def star_rating(review)
"🌟" * (review.rating)
end
end
|
<reponame>PauloGoncalvesBH/zup-protractor-with-docker
const { Home, ResultadoBusca, DetalhesDoProduto, DispositivosEcho, Carrinho } = require('../page_objects')
describe('Fluxo de sucesso', () => {
beforeEach(() => Home.visit())
afterEach(() => Carrinho.excluirProduto())
it('Adicionar produto a partir da caixa de pesquisa', () => {
const nomeProduto = 'Echo Dot (3ª Geração): Smart Speaker com Alexa - <NAME>'
Home.fazerBuscaDeProduto(nomeProduto)
ResultadoBusca.selecionarProduto(nomeProduto)
DetalhesDoProduto.adicionarProdutoNaSacola()
Home.acessarCarrinho()
Carrinho.validarExistenciaDeProduto(nomeProduto)
})
it('Adicionar produto a partir da seção "Dispositivos Echo" na home', () => {
const nomeProduto = 'Echo Studio - Smart Speaker com áudio de alta fidelidade e Alexa'
Home.abrirListaDeDispositivosEcho()
DispositivosEcho.selecionarProduto(nomeProduto)
DetalhesDoProduto.adicionarProdutoNaSacola()
Home.acessarCarrinho()
Carrinho.validarExistenciaDeProduto(nomeProduto)
})
})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.