text
stringlengths 1
1.05M
|
|---|
require File.expand_path(File.dirname(__FILE__) + '/test_helper')
class CountryTest < Phonie::TestCase
def test_find_by_country_name
country = Phonie::Country.find_by_name('canada')
assert_equal "Canada", country.name
country = Phonie::Country.find_by_name('Canada')
assert_equal "Canada", country.name
assert_nil Phonie::Country.find_by_name(nil)
assert_nil Phonie::Country.find_by_country_code(nil)
assert_equal [], Phonie::Country.find_all_by_phone_code(nil)
end
def test_find_by_country_code
country = Phonie::Country.find_by_country_code('NO')
assert_equal "Norway", country.name
end
def test_find_all_by_phone_code
countries = Phonie::Country.find_all_by_phone_code('47')
assert_equal 1, countries.length
assert_equal "Norway", countries.first.name
end
end
|
#!/bin/bash
# Copyright 2013 Johns Hopkins University (author: Daniel Povey)
# Apache 2.0
# This script allows you to specify a 'segments' file with segments
# relative to existing utterances, with lines like
# utterance_foo-1 utterance_foo 7.5 8.2
# utterance_foo-2 utterance_foo 8.9 10.1
# and a 'text' file with sub-segmented text like
# utterance_foo-1 hello there
# utterance_foo-2 how are you
# and combine this with an existing data-dir that was all relative
# to the original utterance-ids like 'utterance_foo', producing
# a new subsegmented output directory.
#
# It does the right thing for you on the various files that the
# data directory contained (except you have to recreate
# the CMVN stats).
segment_end_padding=0.0
cmd=run.pl
nj=1
. utils/parse_options.sh
if [ $# != 4 ] && [ $# != 3 ]; then
echo "Usage: "
echo " $0 [options] <srcdir> <subsegments-file> [<text-file>] <destdir>"
echo "This script sub-segments a data directory. <subsegments-file> is to"
echo "have lines of the form <new-utt> <old-utt> <start-time-within-old-utt> <end-time-within-old-utt>"
echo "and <text-file> is of the form <new-utt> <word1> <word2> ... <wordN>."
echo "This script appropriately combines the <subsegments-file> with the original"
echo "segments file, if necessary, and if not, creates a segments file."
echo "e.g.:"
echo " $0 data/train [options] exp/tri3b_resegment/segments exp/tri3b_resegment/text data/train_resegmented"
echo " Options:"
echo " --segment-end-padding <padding-time> # e.g. 0.02. Default 0.0. If provided,"
echo " # we will add this value to the end times of <destdir>/segments"
echo " # when creating it. This can be useful to account for"
echo " # end effects in feature generation. The reason this is"
echo " # not just applied to the input segments file, is that"
echo " # for purposes of computing the num-frames of the parts of"
echo " # matrices in feats.scp, the padding should not be done."
echo " See also: resolve_ctm_overlaps.py"
exit 1;
fi
export LC_ALL=C
srcdir=$1
subsegments=$2
add_subsegment_text=false
if [ $# -eq 4 ]; then
new_text=$3
dir=$4
add_subsegment_text=true
if [ ! -f "$new_text" ]; then
echo "$0: no such file $new_text"
exit 1
fi
else
dir=$3
fi
for f in "$subsegments" "$srcdir/utt2spk"; do
if [ ! -f "$f" ]; then
echo "$0: no such file $f"
exit 1;
fi
done
if ! mkdir -p $dir; then
echo "$0: failed to create directory $dir"
fi
if $add_subsegment_text; then
if ! cmp <(awk '{print $1}' <$subsegments) <(awk '{print $1}' <$new_text); then
echo "$0: expected the first fields of the files $subsegments and $new_text to be identical"
exit 1
fi
fi
# create the utt2spk in $dir
if ! awk '{if (NF != 4 || !($4 > $3)) { print("Bad line: " $0); exit(1) } }' <$subsegments; then
echo "$0: failed checking subsegments file $subsegments"
exit 1
fi
set -e
set -o pipefail
# Create a mapping from the new to old utterances. This file will be deleted later.
awk '{print $1, $2}' < $subsegments > $dir/new2old_utt
# Create the new utt2spk file [just map from the second field
utils/apply_map.pl -f 2 $srcdir/utt2spk < $dir/new2old_utt >$dir/utt2spk
# .. and the new spk2utt file.
utils/utt2spk_to_spk2utt.pl <$dir/utt2spk >$dir/spk2utt
if $add_subsegment_text; then
# the new text file is just what the user provides.
cp $new_text $dir/text
fi
# copy the source wav.scp
cp $srcdir/wav.scp $dir
if [ -f $srcdir/reco2file_and_channel ]; then
cp $srcdir/reco2file_and_channel $dir
fi
# copy the source reco2dur
if [ -f $srcdir/reco2dur ]; then
cp $srcdir/reco2dur $dir
fi
if [ -f $srcdir/segments ]; then
# we have to map the segments file.
# What's going on below is a little subtle.
# $srcdir/segments has lines like: <old-utt-id> <recording-id> <start-time> <end-time>
# and $subsegments has lines like: <new-utt-id> <old-utt-id> <start-time> <end-time>
# The apply-map command replaces <old-utt-id> [the 2nd field of $subsegments]
# with <recording-id> <start-time> <end-time>.
# so after that first command we have lines like
# <new-utt-id> <recording-id> <start-time-of-old-utt-within-recording> <end-time-old-utt-within-recording> \
# <start-time-of-new-utt-within-old-utt> <end-time-of-new-utt-within-old-utt>
# which the awk command turns into:
# <new-utt-id> <recording-id> <start-time-of-new-utt-within-recording> <end-time-of-new-utt-within-recording>
utils/apply_map.pl -f 2 $srcdir/segments <$subsegments | \
awk -v pad=$segment_end_padding '{ print $1, $2, $5+$3, $6+$3+pad; }' >$dir/segments
else
# the subsegments file just becomes the segments file.
awk -v pad=$segment_end_padding '{$4 += pad; print}' <$subsegments >$dir/segments
fi
if [ -f $srcdir/utt2uniq ]; then
utils/apply_map.pl -f 2 $srcdir/utt2uniq <$dir/new2old_utt >$dir/utt2uniq
fi
if [ -f $srcdir/feats.scp ]; then
# We want to avoid recomputing the features. We'll use sub-matrices of the
# original feature matrices, using the [] notation that is available for
# matrices in Kaldi.
if [ ! -s $srcdir/frame_shift ]; then
frame_shift=$(utils/data/get_frame_shift.sh $srcdir) || exit 1
else
frame_shift=$(cat $srcdir/frame_shift)
fi
echo "$0: note: frame shift is $frame_shift [affects feats.scp]"
# The subsegments format is <new-utt-id> <old-utt-id> <start-time> <end-time>.
# e.g. 'utt_foo-1 utt_foo 7.21 8.93'
# The first awk command replaces this with the format:
# <new-utt-id> <old-utt-id> <first-frame> <last-frame>
# e.g. 'utt_foo-1 utt_foo 721 893'
# and the apply_map.pl command replaces 'utt_foo' (the 2nd field) with its corresponding entry
# from the original wav.scp, so we get a line like:
# e.g. 'utt_foo-1 foo-bar.ark:514231 721 892'
# Note: the reason we subtract one from the last time is that it's going to
# represent the 'last' frame, not the 'end' frame [i.e. not one past the last],
# in the matlab-like, but zero-indexed [first:last] notion. For instance, a segment with 1 frame
# would have start-time 0.00 and end-time 0.01, which would become the frame range
# [0:0]
# The second awk command turns this into something like
# utt_foo-1 foo-bar.ark:514231[721:892]
# It has to be a bit careful because the format actually allows for more general things
# like pipes that might contain spaces, so it has to be able to produce output like the
# following:
# utt_foo-1 some command|[721:892]
# The 'end' frame is ensured to not exceed the feature archive size of
# <old-utt-id>. This is done using the script fix_subsegment_feats.pl.
# e.g if the number of frames in foo-bar.ark is 891, then the features are
# truncated to that many frames.
# utt_foo-1 foo-bar.ark:514231[721:890]
# Lastly, utils/data/normalize_data_range.pl will only do something nontrivial if
# the original data-dir already had data-ranges in square brackets.
# Here, we computes the maximum 'end' frame allowed for each <new-utt-id>.
# This is equal to the number of frames in the feature archive for <old-utt-id>.
if [ ! -f $srcdir/utt2num_frames ]; then
echo "$0: WARNING: Could not find $srcdir/utt2num_frames. It might take a long time to run get_utt2num_frames.sh."
echo "Increase the number of jobs or write this file while extracting features by passing --write-utt2num-frames true to steps/make_mfcc.sh etc."
fi
utils/data/get_utt2num_frames.sh --cmd "$cmd" --nj $nj $srcdir
awk '{print $1" "$2}' $subsegments | \
utils/apply_map.pl -f 2 $srcdir/utt2num_frames > \
$dir/utt2max_frames
awk -v s=$frame_shift '{print $1, $2, int(($3/s)+0.5), int(($4/s)-0.5);}' <$subsegments| \
utils/apply_map.pl -f 2 $srcdir/feats.scp | \
awk '{p=NF-1; for (n=1;n<NF-2;n++) printf("%s ", $n); k=NF-2; l=NF-1; printf("%s[%d:%d]\n", $k, $l, $NF)}' | \
utils/data/fix_subsegment_feats.pl $dir/utt2max_frames | \
utils/data/normalize_data_range.pl >$dir/feats.scp || { echo "Failed to create $dir/feats.scp" && exit; }
# Parse the frame ranges from feats.scp, which is in the form of [first-frame:last-frame]
# and write the number-of-frames = last-frame - first-frame + 1 for the utterance.
cat $dir/feats.scp | perl -ne 'm/^(\S+) .+\[(\d+):(\d+)\]$/; print "$1 " . ($3-$2+1) . "\n"' > \
$dir/utt2num_frames
# Here we add frame ranges to the elements of vad.scp, as we did for rows of feats.scp above.
if [ -f $srcdir/vad.scp ]; then
cat $subsegments | awk -v s=$frame_shift '{print $1, $2, int(($3/s)+0.5), int(($4/s)-0.5);}' | \
utils/apply_map.pl -f 2 $srcdir/vad.scp | \
awk '{p=NF-1; for (n=1;n<NF-2;n++) printf("%s ", $n); k=NF-2; l=NF-1; printf("%s[%d:%d]\n", $k, $l, $NF)}' | \
utils/data/fix_subsegment_feats.pl $dir/utt2max_frames | \
utils/data/normalize_data_range.pl >$dir/vad.scp
fi
fi
if [ -f $dir/cmvn.scp ]; then
rm $dir/cmvn.scp
echo "$0: warning: removing $dir/cmvn.scp, you will have to regenerate it from the features."
fi
# remove the utt2dur file in case it's now invalid-- it be regenerated from the segments file.
rm $dir/utt2dur 2>/dev/null || true
if [ -f $srcdir/spk2gender ]; then
cp $srcdir/spk2gender $dir
fi
if [ -f $srcdir/glm ]; then
cp $srcdir/glm $dir
fi
if [ -f $srcdir/stm ]; then
cp $srcdir/stm $dir
fi
for f in ctm; do
if [ -f $srcdir/$f ]; then
echo "$0: not copying $srcdir/$f to $dir because sub-segmenting it is "
echo " ... not implemented yet (and probably it's not needed.)"
fi
done
rm $dir/new2old_utt
echo "$0: subsegmented data from $srcdir to $dir"
|
<filename>common/tests/integrations/test_redirect_mobile.py
from django.conf import settings
from django.test import override_settings
from common.tests.core import BaseLivePhoneTestCase, BaseLiveTestCase
class RedirectMobileTestMixin(object):
@override_settings(
SITE_INFO={
'domain': 'cpdb.local',
'mobile_host': 'm.cpdb.local'
},
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'common.middleware.subdomain.SubdomainURLRoutingMiddleware',
'common.middleware.mobile_redirect.MobileRedirectMiddleware',
)
)
def run(self, *args, **kwargs):
super(RedirectMobileTestMixin, self).run(*args, **kwargs)
class RedirectMobileTest(RedirectMobileTestMixin, BaseLivePhoneTestCase):
def test_redirect_phone_to_mobile_site(self):
any_uri = '/any_uri'
self.visit(any_uri)
expected_url = '{mobile_subdomain}{uri}'.format(mobile_subdomain=settings.SITE_INFO['mobile_host'], uri=any_uri)
self.browser.current_url.should.contain(expected_url)
class RedirectMobileDesktopTest(RedirectMobileTestMixin, BaseLiveTestCase):
def test_does_not_reidrect_on_desktop_site(self):
any_uri = '/any_uri'
self.visit(any_uri)
expected_url = '{mobile_subdomain}{uri}'.format(mobile_subdomain=settings.SITE_INFO['mobile_host'], uri=any_uri)
self.browser.current_url.shouldnt.contain(expected_url)
|
def max_sum(arr):
n = len(arr)
max_sum_so_far = 0
for i in range(n):
max_ending_here = 0
for j in range(i, n):
max_ending_here += arr[j]
if (max_sum_so_far < max_ending_here):
max_sum_so_far = max_ending_here
return max_sum_so_far
result = max_sum([1,-3,7,12,-4,8])
print(result)
|
#!/bin/bash
(
IFS='
'
for line in `ipcs | grep 644`; do
ipcrm shm `echo $line | cut -f2 -d' '`
done
)
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.riot.system;
import java.io.OutputStream ;
import java.io.Writer ;
import org.apache.jena.atlas.io.AWriter ;
import org.apache.jena.atlas.io.IO ;
import org.apache.jena.atlas.lib.Sink ;
import org.apache.jena.graph.Graph ;
import org.apache.jena.graph.Node ;
import org.apache.jena.graph.Triple ;
import org.apache.jena.riot.lang.StreamRDFCounting ;
import org.apache.jena.riot.out.CharSpace ;
import org.apache.jena.riot.writer.WriterStreamRDFPlain ;
import org.apache.jena.shared.JenaException ;
import org.apache.jena.shared.PrefixMapping ;
import org.apache.jena.sparql.core.DatasetGraph ;
import org.apache.jena.sparql.core.Quad ;
/** Various Common StreamRDF setups */
public class StreamRDFLib
{
/** Send everything to nowhere ... efficiently */
public static StreamRDF sinkNull() { return new StreamRDFBase() ; }
public static StreamRDF writer(OutputStream out) { return new WriterStreamRDFPlain(IO.wrapUTF8(out)) ; }
public static StreamRDF writer(AWriter out) { return new WriterStreamRDFPlain(out) ; }
public static StreamRDF writer(Writer out) { return new WriterStreamRDFPlain(IO.wrap(out)) ; }
public static StreamRDF writer(OutputStream out, CharSpace charSpace)
{
switch (charSpace) {
case ASCII:
return new WriterStreamRDFPlain(IO.wrapASCII(out), charSpace);
case UTF8:
default:
return writer(out);
}
}
public static StreamRDF writer(AWriter out, CharSpace charSpace)
{
return new WriterStreamRDFPlain(out, charSpace);
}
public static StreamRDF writer(Writer out, CharSpace charSpace)
{
return new WriterStreamRDFPlain(IO.wrap(out), charSpace);
}
public static StreamRDF graph(Graph graph) { return new ParserOutputGraph(graph) ; }
public static StreamRDF dataset(DatasetGraph dataset) { return new ParserOutputDataset(dataset) ; }
/**
* Output to a sink; prefix and base handled only within the parser.
* Unfortunately, Java needs different names for the triples and
* quads versions because of type erasure.
*/
public static StreamRDF sinkTriples(Sink<Triple> sink) { return new ParserOutputSinkTriples(sink) ; }
/**
* Output to a sink; prefix and base handled only within the parser.
* Unfortunately, Java needs different names for the triples and
* quads versions because of type erasure.
*/
public static StreamRDF sinkQuads(Sink<Quad> sink) { return new ParserOutputSinkQuads(sink) ; }
/** Convert any triples seen to a quads, adding a graph node of {@link Quad#tripleInQuad} */
public static StreamRDF extendTriplesToQuads(StreamRDF base)
{ return extendTriplesToQuads(Quad.tripleInQuad, base) ; }
/** Convert any triples seen to a quads, adding the specified graph node */
public static StreamRDF extendTriplesToQuads(Node graphNode, StreamRDF base)
{ return new ParserOutputSinkTriplesToQuads(graphNode, base) ; }
public static StreamRDFCounting count()
{ return new StreamRDFCountingBase(sinkNull()) ; }
public static StreamRDFCounting count(StreamRDF other)
{ return new StreamRDFCountingBase(other) ; }
private static class ParserOutputSinkTriplesToQuads extends StreamRDFWrapper
{
private final Node gn ;
ParserOutputSinkTriplesToQuads(Node gn, StreamRDF base)
{ super(base) ; this.gn = gn ; }
@Override public void triple(Triple triple)
{ sink.quad(new Quad(gn, triple)) ; }
}
private static class ParserOutputSinkTriples extends StreamRDFBase
{
private final Sink<Triple> sink ;
public ParserOutputSinkTriples(Sink<Triple> sink)
{ this.sink = sink ; }
@Override
public void triple(Triple triple)
{ sink.send(triple) ; }
@Override
public void finish()
{ sink.flush() ; }
}
private static class ParserOutputSinkQuads extends StreamRDFBase
{
private final Sink<Quad> sink ;
public ParserOutputSinkQuads(Sink<Quad> sink)
{ this.sink = sink ; }
@Override
public void quad(Quad quad)
{ sink.send(quad) ; }
@Override
public void finish()
{ sink.flush() ; }
}
private static class ParserOutputGraph extends StreamRDFBase
{
protected final Graph graph ;
protected boolean warningIssued = false ;
public ParserOutputGraph(Graph graph) { this.graph = graph ; }
@Override public void triple(Triple triple) { graph.add(triple) ; }
@Override public void quad(Quad quad)
{
if ( quad.isTriple() || quad.isDefaultGraph() )
graph.add(quad.asTriple()) ;
else
{
if ( ! warningIssued )
{
//SysRIOT.getLogger().warn("Only triples or default graph data expected : named graph data ignored") ;
// Not ideal - assumes the global default.
ErrorHandlerFactory.getDefaultErrorHandler().warning("Only triples or default graph data expected : named graph data ignored", -1, -1) ;
}
warningIssued = true ;
}
//throw new IllegalStateException("Quad passed to graph parsing") ;
}
@Override public void base(String base)
{ }
@Override public void prefix(String prefix, String uri)
{
try { // Jena applies XML rules to prerfixes.
graph.getPrefixMapping().setNsPrefix(prefix, uri) ;
} catch (JenaException ex) {}
}
}
private static class ParserOutputDataset extends StreamRDFBase
{
protected final DatasetGraph dsg ;
protected final PrefixMapping prefixMapping ;
public ParserOutputDataset(DatasetGraph dsg)
{
this.dsg = dsg ;
this.prefixMapping = dsg.getDefaultGraph().getPrefixMapping() ;
// = dsg.getPrefixMapping().setNsPrefix(prefix, uri) ;
}
@Override public void triple(Triple triple)
{
dsg.add(Quad.defaultGraphNodeGenerated, triple.getSubject(), triple.getPredicate(), triple.getObject()) ;
//throw new IllegalStateException("Triple passed to dataset parsing") ;
}
@Override public void quad(Quad quad)
{
if ( quad.isTriple() )
dsg.add(Quad.defaultGraphNodeGenerated, quad.getSubject(), quad.getPredicate(), quad.getObject()) ;
else
dsg.add(quad) ;
}
@Override public void base(String base)
{ }
@Override public void prefix(String prefix, String uri)
{
try { // Jena applies XML rules to prerfixes.
prefixMapping.setNsPrefix(prefix, uri) ;
} catch (JenaException ex) {}
}
}
}
|
package org.telegram.telegrambots.meta.api.objects;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import org.telegram.telegrambots.meta.api.interfaces.InputBotApiObject;
import org.telegram.telegrambots.meta.api.interfaces.Validable;
import org.telegram.telegrambots.meta.exceptions.TelegramApiValidationException;
import java.io.File;
import java.io.InputStream;
/**
* @author <NAME>
* @version 4.0.0
*
* Input file used to upload a file to Telegram server and use it afterwards
*/
@SuppressWarnings({"WeakerAccess", "UnusedReturnValue"})
@JsonSerialize(using = InputFileSerializer.class, as = String.class)
public class InputFile implements InputBotApiObject, Validable {
private String attachName;
@JsonIgnore
private String mediaName; ///< Name of the media to upload
@JsonIgnore
private File newMediaFile; ///< New media file
@JsonIgnore
private InputStream newMediaStream; ///< New media stream
@JsonIgnore
private boolean isNew; ///< True if the file is new, false if it is a file_id
public InputFile() {
super();
}
public InputFile(String attachName) {
this();
setMedia(attachName);
}
/**
* Constructor to set a new file
*
* @param mediaFile File to send
* @param fileName Name of the file
*/
public InputFile(File mediaFile, String fileName) {
this();
setMedia(mediaFile, fileName);
}
/**
* Constructor to set a new file as stream
*
* @param mediaStream File to send
* @param fileName Name of the file
*/
public InputFile(InputStream mediaStream, String fileName) {
this();
setMedia(mediaStream, fileName);
}
/**
* Use this setter to send new file.
* @param mediaFile File to send
* @param fileName Name of the file
* @return This object
*/
public InputFile setMedia(File mediaFile, String fileName) {
this.newMediaFile = mediaFile;
this.mediaName = fileName;
this.attachName = "attach://" + fileName;
this.isNew = true;
return this;
}
/**
* Use this setter to send new file as stream.
* @param mediaStream File to send
* @param fileName Name of the file
* @return This object
*/
public InputFile setMedia(InputStream mediaStream, String fileName) {
this.newMediaStream = mediaStream;
this.mediaName = fileName;
this.attachName = "attach://" + fileName;
this.isNew = true;
return this;
}
public InputFile setMedia(String attachName) {
this.attachName = attachName;
this.isNew = false;
return this;
}
public String getAttachName() {
return attachName;
}
public String getMediaName() {
return mediaName;
}
public File getNewMediaFile() {
return newMediaFile;
}
public InputStream getNewMediaStream() {
return newMediaStream;
}
public boolean isNew() {
return isNew;
}
@Override
public void validate() throws TelegramApiValidationException {
if (isNew) {
if (mediaName == null || mediaName.isEmpty()) {
throw new TelegramApiValidationException("Media name can't be empty", this);
}
if (newMediaFile == null && newMediaStream == null) {
throw new TelegramApiValidationException("Media can't be empty", this);
}
} else {
if (attachName == null || attachName.isEmpty()) {
throw new TelegramApiValidationException("File_id can't be empty", this);
}
}
}
}
|
wget https://huggingface.co/datasets/princeton-nlp/datasets-for-simcse/resolve/main/senteval.tar
tar xvf senteval.tar
|
<reponame>DevopsChina/conf
export {};
//# sourceMappingURL=temp-any.js.map
|
from django.shortcuts import render
from django_redis import get_redis_connection
from django import http
from decimal import Decimal
import json, http
from meiduo_mall.utils.views import LoginRequiredView
from user.models import Address
from goods.models import SKU
from orders.models import OrderInfo
class OrderSettlementView(LoginRequiredView):
"""结算订单"""
def get(self, request):
"""提供订单结算页面"""
# 获取登录用户
user = request.user
# 查询地址信息
addresses = Address.objects.filter(user=user, is_deleted=False)
# 如果地址为空,渲染模板时会判断,并跳转到地址编辑页面
# addresses = addresses or None
# 从Redis购物车中查询出被勾选的商品信息
redis_conn = get_redis_connection('carts')
redis_cart = redis_conn.hgetall('carts_%s' % user.id)
cart_selected = redis_conn.smembers('selected_%s' % user.id)
cart = {} # 包装购物车中那些勾选商品的sku_id : count
for sku_id in cart_selected:
cart[int(sku_id)] = int(redis_cart[sku_id])
# 准备初始值
total_count = 0
total_amount = Decimal('0.00')
# 查询商品信息
skus = SKU.objects.filter(id__in=cart.keys())
for sku in skus:
sku.count = cart[sku.id]
sku.amount = sku.count * sku.price
# 计算总数量和总金额
total_count += sku.count
total_amount += sku.amount
# 补充运费
freight = Decimal('10.00')
# 渲染界面
context = {
'addresses': addresses,
'skus': skus,
'total_count': total_count,
'total_amount': total_amount,
'freight': freight,
'payment_amount':total_amount + freight
}
return render(request, 'place_order.html', context)
class OrderCommitView(LoginRequiredView):
"""提交订单"""
def post(self, request):
# 接收请求体数据
json_dict = json.loads(request.body.decode())
address_id = json_dict.get('address_id')
pay_method = json_dict.get('pay_method')
# 校验
if all([address_id,pay_method]) is False:
return http.HttpResponseForbidden('缺少必传参数')
user = request.user
try:
address = Address.objects.get(id=address_id, user=user, is_deleted=True)
except Address.DoesNotExist:
return http.HttpResponseForbidden('address_id有误')
# if pay_method not in [1, 2]:
if pay_method not in [OrderInfo.PAY_METHODS_ENUM['CASH'], OrderInfo.PAY_METHODS_ENUM['ALIPAY']]:
return http.HttpResponseForbidden('支付方式有误')
# 新增订单基本信息记录(OrderInfo) 一
OrderInfo.objects.create(
order_id='',
user=user,
address=address,
total_count=0, # 后期再修改
total_amount=Decimal('0.00'),
freight=Decimal('10.00'),
pay_method=pay_method,
status=''
)
# 修改sku库存和销量
# 修改spu销量
# 新增订单商品信息记录(OrderGoods) 多
# 将购物车中已提交订单的商品删除
# 响应 order_id
pass
|
function arraySum(arr: number[]): number {
let sum: number = 0;
// Iterate through array elements
// and add them to sum
for (let i of arr) {
sum += i;
}
return sum;
}
let arr = [1, 2, 3, 4, 5];
console.log(arraySum(arr)); // Output: 15
|
<reponame>dogballs/battle-city
import { GameObject, Subject } from '../../../core';
import { GameUpdateArgs } from '../../../game';
import { MenuInputContext } from '../../../input';
import * as config from '../../../config';
import { SpriteText } from '../../text';
import { MenuItem } from '../MenuItem';
// TODO: calculate dynamically
const ARROW_WIDTH = 28;
const ARROW_OFFSET = 16;
const ITEM_HEIGHT = 28;
export interface SelectorMenuItemChoice<T> {
value: T;
text: string;
}
export interface SelectorMenuItemOptions {
backgroundColor?: string;
color?: string;
containerWidth?: number;
itemOriginX?: number;
}
const DEFAULT_OPTIONS = {
color: config.COLOR_WHITE,
containerWidth: 256,
itemOriginX: 0.5,
};
export class SelectorMenuItem<T> extends MenuItem {
public changed = new Subject<SelectorMenuItemChoice<T>>();
public zIndex = 0;
private choices: SelectorMenuItemChoice<T>[] = [];
private options: SelectorMenuItemOptions;
private leftArrow: SpriteText;
private rightArrow: SpriteText;
private container: GameObject;
private selectedIndex = 0;
private items: SpriteText[] = [];
constructor(
choices: SelectorMenuItemChoice<T>[] = [],
options: SelectorMenuItemOptions = {},
) {
super();
this.choices = choices;
this.options = Object.assign({}, DEFAULT_OPTIONS, options);
}
public setValue(value: T): void {
const choiceIndex = this.choices.findIndex(
(choice) => choice.value === value,
);
if (choiceIndex === -1) {
return;
}
this.selectChoice(choiceIndex);
}
public getValue(): T {
const focusedChoice = this.choices[this.selectedIndex];
if (focusedChoice === undefined) {
return null;
}
const { value } = focusedChoice;
return value;
}
public updateFocused(updateArgs: GameUpdateArgs): void {
const { inputManager } = updateArgs;
const inputMethod = inputManager.getActiveMethod();
if (inputMethod.isDownAny(MenuInputContext.HorizontalNext)) {
this.selectNext();
this.emitChange();
}
if (inputMethod.isDownAny(MenuInputContext.HorizontalPrev)) {
this.selectPrev();
this.emitChange();
}
}
protected setup(): void {
this.container = new GameObject(this.options.containerWidth, ITEM_HEIGHT);
this.container.position.setX(ARROW_WIDTH + ARROW_OFFSET);
this.add(this.container);
this.choices.forEach((choice) => {
const item = new SpriteText(choice.text, {
color: this.options.color,
});
item.origin.setX(this.options.itemOriginX);
item.position.setX(
this.options.containerWidth * this.options.itemOriginX,
);
item.setZIndex(this.zIndex + 1);
this.container.add(item);
});
this.leftArrow = new SpriteText('←', { color: this.options.color });
this.add(this.leftArrow);
this.rightArrow = new SpriteText('→', { color: this.options.color });
this.rightArrow.position.setX(
ARROW_WIDTH + ARROW_OFFSET + this.options.containerWidth + ARROW_OFFSET,
);
this.add(this.rightArrow);
this.size.set(
this.options.containerWidth + (ARROW_WIDTH + ARROW_OFFSET) * 2,
28,
);
this.selectChoice();
}
private emitChange(): void {
const choice = this.choices[this.selectedIndex];
this.changed.notify(choice);
}
private selectPrev(): void {
let prevIndex = this.selectedIndex - 1;
if (prevIndex < 0) {
prevIndex = this.choices.length - 1;
}
this.selectChoice(prevIndex);
}
private selectNext(): void {
let nextIndex = this.selectedIndex + 1;
if (nextIndex > this.choices.length - 1) {
nextIndex = 0;
}
this.selectChoice(nextIndex);
}
private selectChoice(nextIndex?: number): void {
if (nextIndex === undefined) {
nextIndex = this.selectedIndex;
}
if (this.choices[nextIndex] === undefined) {
this.selectedIndex = -1;
} else {
this.selectedIndex = nextIndex;
}
if (this.hasBeenSetup()) {
this.container.dirtyPaintBox();
this.container.children.forEach((item, index) => {
if (this.selectedIndex === index) {
item.setVisible(true);
} else {
item.setVisible(false);
}
});
}
}
}
|
module Fog
module Compute
class OpenStack
class Real
def get_server_volumes(server_id)
request(
:expects => 200,
:method => 'GET',
:path => "/servers/#{server_id}/os-volume_attachments"
)
end
end
class Mock
end
end
end
end
|
<gh_stars>10-100
import pytest
from bayesian_testing.experiments import BinaryDataTest
@pytest.fixture
def conv_test():
cv = BinaryDataTest()
cv.add_variant_data("A", [0, 1, 0, 1, 0, 0, 0, 0, 0, 1])
cv.add_variant_data("B", [0, 0, 0, 1, 0, 0, 0, 0, 0, 1])
cv.add_variant_data_agg("C", 11, 2, a_prior=1, b_prior=2)
cv.add_variant_data_agg("D", 10, 10)
cv.add_variant_data_agg("D", 20, 20, replace=False)
cv.add_variant_data_agg("D", 20, 20, replace=True)
cv.delete_variant("D")
return cv
def test_variants(conv_test):
assert conv_test.variant_names == ["A", "B", "C"]
def test_totals(conv_test):
assert conv_test.totals == [10, 10, 11]
def test_positives(conv_test):
assert conv_test.positives == [3, 2, 2]
def test_a_priors(conv_test):
assert conv_test.a_priors == [0.5, 0.5, 1]
def test_b_priors(conv_test):
assert conv_test.b_priors == [0.5, 0.5, 2]
def test_probabs_of_being_best(conv_test):
pbbs = conv_test.probabs_of_being_best(sim_count=20000, seed=52)
assert pbbs == {"A": 0.57225, "B": 0.233, "C": 0.19475}
def test_evaluate(conv_test):
eval_report = conv_test.evaluate(sim_count=20000, seed=52)
assert eval_report == [
{
"variant": "A",
"totals": 10,
"positives": 3,
"positive_rate": 0.3,
"prob_being_best": 0.57225,
},
{
"variant": "B",
"totals": 10,
"positives": 2,
"positive_rate": 0.2,
"prob_being_best": 0.233,
},
{
"variant": "C",
"totals": 11,
"positives": 2,
"positive_rate": 0.18182,
"prob_being_best": 0.19475,
},
]
|
#!/bin/bash
# The MIT License (MIT)
#
# Copyright (c) 2010 Technische Universitaet Berlin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Author: Tilman Rassy <rassy@math.tu-berlin.de>
# $Id: build.sh,v 1.68 2009/09/18 16:39:52 rassy Exp $
# Build script for japs
# Set fixed variabes (constants):
readonly program_name=build.sh
readonly program_version='$Revision: 1.68 $'
readonly user_config_file=build.conf
# Set variable defaults:
prefix=${MM_BUILD_PREFIX:-/usr/local}
task=process_targets
tomcat_home=$TOMCAT_HOME
webapp_name=cocoon
db_host=localhost
db_port=5432
db_name=mdb01
db_encoding=DEFAULT
db_user_name=japs
db_user_password=japsen
db_admin_name=$USER
db_admin_password=foo
japs_admin_name=admin
japs_admin_password=mumie
password_encryptor_class=net.mumie.cocoon.util.MD5PasswordEncryptor
mail_domain=localhost
mail_smtp_host=localhost
mail_from_name=Admin
mail_from_address=''
mail_reply_name=''
mail_reply_address=''
qf_applet_mail_to_name='QF Applet'
qf_applet_mail_to_address=''
receipt_dir=''
sign_keystore_file=''
sign_keystore_type=JKS
sign_keystore_password=japsen
sign_key_alias=mumie
sign_key_dn=''
sign_key_validity=''
upload_dir=''
correction_tmp_dir=''
msg_dest_table_filename=''
checkin_root=${MM_CHECKIN_ROOT:-$HOME/mumie/checkin}
jdk_apidocs_url=http://java.sun.com/j2se/1.4.2/docs/api
datasheet_apidocs_url=''
cocoon_apidocs_url=http://xml.apache.org/cocoon/apidocs
avalon_framework_apidocs_url=''
excalibur_xmlutil_apidocs_url=''
# Source user config file:
[ -e "$user_config_file" ] && source "$user_config_file"
# Process command line parameters:
params=`getopt \
--longoptions force,targets,ignore-deps,help,version,vars,check,release:,cvsroot:,javac-verbose,javac-deprecation,javac-debug \
--options f,t,D,h,v \
-- \
"$@"`
if [ $? -ne 0 ] ; then exit 1 ; fi
eval set -- "$params"
while true ; do
case "$1" in
--targets|-t) task=show_targets ; shift ;;
--ignore-deps|-D) ignore_deps=ignore_deps ; shift ;;
--help|-h) task=show_help ; shift ;;
--version|-v) task=show_version ; shift ;;
--vars) task=print_variables ; shift ;;
--check) task=check ; shift ;;
--force|-f) force=force ; shift ;;
--release) release="$2" ; shift 2 ;;
--cvsroot) cvsroot="$2" ; shift 2 ;;
--javac-verbose) javac_verbose=enabled ; shift ;;
--javac-deprecation) javac_deprecation=enabled ; shift ;;
--javac-debug) javac_debug=enabled ; shift ;;
--) shift ; break ;;
esac
done
targets=${*:-'japs-lib'}
# Store the current directory:
base_dir=`pwd`
# Store the pid of this process
pid=$$
# Counter for warnings:
declare -i numwarn=0
# Set the variables if not set already:
java_lib_dir=$prefix/lib/java
doc_dir=$prefix/share/doc/japs
webapp_root=$tomcat_home/webapps/$webapp_name
webapp_webinf=$webapp_root/WEB-INF
webapp_lib_dir=$webapp_webinf/lib
java_cmd="java $java_opts"
version_file=VERSION
build_tools_jar=$base_dir/java/lib/mumie-japs-build.jar
avalon_framework_api_jar=avalon-framework-api-4.3.jar
avalon_framework_impl_jar=avalon-framework-impl-4.3.jar
mail_from_address=${mail_from_address:-admin@${mail_domain}}
mail_reply_name=${mail_reply_name:-$mail_from_name}
mail_reply_address=${mail_reply_address:-$mail_from_address}
qf_applet_mail_to_address=${qf_applet_mail_to_address:-qf-applet@$mail_domain}
receipt_dir=${receipt_dir:-$webapp_root/receipts}
sign_keystore_file=${sign_keystore_file:-$tomcat_home/conf/keystore}
sign_key_password=${sign_key_password:-$sign_keystore_password}
sign_key_validity=${sign_key_validity:-365}
upload_dir=${upload_dir:-$webapp_webinf/upload}
correction_tmp_dir=${correction_tmp_dir:-$webapp_webinf/correction}
msg_dest_table_filename=${msg_dest_table_filename:-$webapp_webinf/message_destination_table.xml}
checkin_root=${MM_CHECKIN_ROOT:-$HOME/mumie/checkin}
datasheet_apidocs_url=${datasheet_apidocs_url:-$prefix/doc/japs_client/apidocs}
# Notion class source files:
notion_class_sources="
DocType.java
DbTable.java
DbColumn.java
XMLElement.java
XMLAttribute.java
XMLNamespace.java
DataFormat.java
AnnType.java
RefType.java
MediaType.java
UserRole.java
FileRole.java
TimeFormat.java
Category.java
RequestParam.java
ResponseHeader.java
SyncCmdName.java
UserGroupName.java
RefAttrib.java
PseudoDocType.java
UseMode.java
SessionAttrib.java
WorksheetState.java
Lang.java
LangCode.java
Theme.java
EventName.java"
# Config files to install/uninstall:
config_install_files="
cocoon.xconf
mumie.roles
web.xml
logkit.xconf
checkin_defaults.xml"
# Original Cocoon config files to save:
orig_config_files="
cocoon.xconf
web.xml
logkit.xconf"
# fs-content files to install/uninstall:
fsc_install_files="
login_form.xml
login_form.xsl
checkin_response.xsl
not_logged_in.xhtml
logout_failed.xhtml
transform.xsl
create_account_form.xml
resources/i18n_de.xml
resources/styles.css
resources/mumie_logo.png
resources/logo.png
resources/top_bar_bg.png
"
# Libraries to install/uninstall. This variable contains libraries which are
# included in this packege or are created by the build.
lib_install_files_1="
mumie-japs.jar
mail-1.3.3_01.jar
activation-1.0.2.jar
mumie_mathletfactory_base.jar
"
# Libraries to install/uninstall. This variable contains libraries which are
# in $prefix/lib/java.
lib_install_files_2="
mumie-util.jar
mumie-sql.jar
mumie-japs-datasheet.jar
mumie-japs-client.jar
"
# --------------------------------------------------------------------------------
# Utility functions
# --------------------------------------------------------------------------------
# Prints the time of last modification of a file to stdout. The time is expressed
# as seconds since Epoch
function mtime
{
run_cmd stat -c %Y $1
}
# Returns all Java classes which are below a certain package path and must be rebuild
function get_java_source_files
{
local package_path=$1
local src_dir=$base_dir/java/src
local target_dir=$base_dir/java/classes
local saved_dir=`pwd`
run_cmd cd $src_dir/$package_path
local source_file
for source_file in `find -name "*.java"` ; do
local target_file=$target_dir/$package_path/${source_file%java}class
if [ "$force" ] || \
[ ! -e "$target_file" ] || \
[ `mtime $source_file` -gt `mtime $target_file` ]
then
echo $source_file
fi
done
run_cmd cd $saved_dir
}
# Returns the notion class sources that are out of date and must be rebuild
function get_outdated_notion_classes_sources
{
local xsl_dir=$1
local result_dir=$2
local mtime_config=`mtime $base_dir/config/config.xml`
local name
for name in $notion_class_sources ; do
local result_file=$result_dir/$name
local xsl_file=$xsl_dir/${name%java}xsl
if [ "$force" ] || \
[ ! -e "$result_file" ] || \
[ `mtime $xsl_file` -gt `mtime $result_file` ] || \
[ $mtime_config -gt `mtime $result_file` ]
then
echo $name
fi
done
}
# Aborts with an error message
function error
{
echo "$program_name: ERROR: $*"
echo
kill -9 $pid
# exit 1
}
# Prints a warning
function warning
{
echo "$program_name: WARNING: $*"
let numwarn++
}
# Checks the exit code of the last command, terminates with an error message if the
# exit code is not 0
function check_exit_code
{
local exit_code=$?
[ "$exit_code" -eq 0 ] || error "Last command returned with code $exit_code"
}
# Runs a command, checks the exit code, terminates with an error message if the exit
# code is not 0
function run_cmd
{
"$@"
check_exit_code
}
# Copies files to a directory, preserves subdirectory struture
# Usage: cp_to_dir DIR FILE1 FILE2 ...
# Copies FILE1 FILE2 ... to DIR, with their relative paths. Subdirectories are created
# if necessary
function cp_to_dir
{
local dir=$1
shift
local file
for file in "$@" ; do
local path=`dirname $file | sed s/^\\.\\\\///`
if [ "$path" != '.' ] ; then
run_cmd mkdir -vp $dir/$path
run_cmd cp -v $file $dir/$path
else
run_cmd mkdir -vp $dir
run_cmd cp -v $file $dir
fi
done
}
# Calls ant:
function run_ant
{
local ant_cmd="\
ant -e \
-Djaps.install.prefix=$prefix \
-Djaps.install.lib.java.dir=$java_lib_dir \
-Djaps.tomcat.dir=$tomcat_home \
-Djaps.webapp.dir=$webapp_root \
-Djaps.apidocs.jdk.url=$jdk_apidocs_url \
-Djaps.apidocs.japs-datasheet.url=$datasheet_apidocs_url \
-Djaps.apidocs.cocoon.url=$cocoon_apidocs_url \
-Djaps.apidocs.avalon-framework.url=$avalon_framework_apidocs_url \
-Djaps.apidocs.excalibur-xmlutil.url=$excalibur_xmlutil_apidocs_url"
[ "$force" ] && ant_cmd="$ant_cmd -Djaps.force=yes"
[ "$javac_verbose" ] && ant_cmd="$ant_cmd -Djaps.javac.verbose=yes"
[ "$javac_deprecation" ] && ant_cmd="$ant_cmd -Djaps.javac.deprecation=yes"
[ "$javac_debug" ] && ant_cmd="$ant_cmd -Djaps.javac.debug=yes"
[ -e "$base_dir/$version_file" ] &&
ant_cmd="$ant_cmd -Djaps.version=`cat $base_dir/$version_file`"
run_cmd $ant_cmd "$@"
}
# Sets the classpath:
function set_classpath
{
local part
local classpath
for part in $* ; do
if [ "$classpath" ] ; then
classpath="$classpath:$part"
else
classpath=$part
fi
done
run_cmd export CLASSPATH=$classpath
}
# Runs psql as db admin
function psql_admin
{
export PGPASSWD=$db_admin_password
export PGPASSWORD=$db_admin_password
run_cmd psql \
--host $db_host \
--port $db_port \
--username $db_admin_name \
--dbname template1 \
"$@"
}
# Runs psql as db user
function psql_user
{
export PGPASSWD=$db_user_password
export PGPASSWORD=$db_user_password
run_cmd psql \
--host $db_host \
--port $db_port \
--username $db_user_name \
--dbname $db_name \
"$@"
}
# Tries to find the db user $db_user_name in the database. Returns the user name if yes,
# the empty string if no.
function find_db_user
{
psql_admin \
--tuples-only \
--command "SELECT usename FROM pg_user WHERE usename='$db_user_name';"
}
# Tries to find the database $db_user_name in the DNMS. Returns the database name
# if yes, the empty string if no.
function find_db
{
psql_admin \
--tuples-only \
--command "SELECT datname FROM pg_database WHERE datname='$db_name';"
}
# Makes a copy of the (original) sitemap.
function backup_sitemap
{
run_cmd mkdir -pv $base_dir/uninstall
local uninstall_dir=$base_dir/uninstall
# If necessary, save original:
[ -e "$webapp_root/sitemap.xmap" ] && [ ! -e "$uninstall_dir/sitemap.xmap" ] && \
run_cmd cp -v "$webapp_root/sitemap.xmap" $uninstall_dir
}
# Quotes the character '/' with a backslash. Used in sed input.
function quote
{
echo "$@" | sed 's/\//\\\//g'
}
# --------------------------------------------------------------------------------
# Functions implementing targets
# --------------------------------------------------------------------------------
# Creates the build tools. I.e., compiles the corresponding Java classes and jars
# them. Calls an Ant process.
function create_build_tools
{
echo
echo "======================================================================"
echo "Creating build tools"
echo "======================================================================"
echo
if [ "`get_java_source_files net/mumie/japs/build`" ] ; then
run_ant -buildfile tools/ant/buildfiles/build_tools.xml
fi
echo "$program_name: build tools done"
create_build_tools_done=done
}
# Creates the notion class sources. For each notion class source Xxx.java, there
# is an XSL stylesheet Xxx.xsl, which, applied to config.xml, creates Xxx.java.
# This function calls a Java class from the build tools that performs all that
# XSL transformations.
function create_notion_class_sources
{
[ "$create_build_tools_done" ] || [ "$ignore_deps" ] || create_build_tools
echo
echo "======================================================================"
echo "Creating notion class sources"
echo "======================================================================"
echo
local xsl_dir=$base_dir/tools/xsl
local result_dir=$base_dir/java/src/net/mumie/cocoon/notions
if [ "`get_outdated_notion_classes_sources $xsl_dir $result_dir`" ] ; then
echo "$program_name: calling NotionClassSourcesCreator"
set_classpath \
$java_lib_dir/mumie-util.jar \
$base_dir/java/lib/mumie-japs-build.jar
run_cmd $java_cmd net.mumie.japs.build.NotionClassSourcesCreator \
--xsl-dir=$xsl_dir \
--result-dir=$result_dir \
--config-dir=$base_dir/config \
$notion_class_sources
fi
create_notion_class_sources_done=done
echo "$program_name: notion class sources done"
}
# Creates the db helper sources. This is done by three XSL transformations
# producing the three sources DbHelper.java, AbstractDbHelper.java, and
# PostgreSQLDbHelper.java. The function calls a Java class from the build
# tools performing all three XSL transformations.
function create_db_helper_sources
{
[ "$create_build_tools_done" ] || [ "$ignore_deps" ] || create_build_tools
echo
echo "======================================================================"
echo "Creating db helper sources"
echo "======================================================================"
echo
run_cmd cd $base_dir
# Check if targets must be (re)build:
local needs_build=''
if [ "$force" ] ; then
# Force flag enabled, thus (re)build required:
needs_build=enabled
else
# Check if target files exist:
local dbh_src_dir=java/src/net/mumie/cocoon/db
local dbh_java=$dbh_src_dir/DbHelper.java
local abs_dbh_java=$dbh_src_dir/AbstractDbHelper.java
local pg_dbh_java=$dbh_src_dir/PostgreSQLDbHelper.java
if [ ! -e "$dbh_java" ] || [ ! -e "$abs_dbh_java" ] || [ ! -e "$pg_dbh_java" ] ; then
# Not all target files exist, thus (re)build required:
needs_build=enabled
else
# Target files exist, check modification times:
local mtime_dbh_java=`mtime $dbh_java`
local mtime_abs_dbh_java=`mtime $abs_dbh_java`
local mtime_pg_dbh_java=`mtime $pg_dbh_java`
local mtime_dbh_xsl=`mtime tools/xsl/DbHelper.xsl`
local mtime_dbh_xml=`mtime $dbh_src_dir/DbHelper.xml`
local mtime_dbh_skel=`mtime $dbh_src_dir/DbHelper.skeleton`
local mtime_abs_dbh_skel=`mtime $dbh_src_dir/AbstractDbHelper.skeleton`
local mtime_pg_dbh_skel=`mtime $dbh_src_dir/PostgreSQLDbHelper.skeleton`
local mtime_build_tools=`mtime $build_tools_jar`
if [ $mtime_dbh_java -le $mtime_dbh_xml ] || \
[ $mtime_dbh_java -le $mtime_dbh_skel ] || \
[ $mtime_dbh_java -le $mtime_dbh_xsl ] || \
[ $mtime_abs_dbh_java -le $mtime_dbh_xml ] || \
[ $mtime_abs_dbh_java -le $mtime_abs_dbh_skel ] || \
[ $mtime_abs_dbh_java -le $mtime_dbh_xsl ] || \
[ $mtime_pg_dbh_java -le $mtime_dbh_xml ] || \
[ $mtime_pg_dbh_java -le $mtime_pg_dbh_skel ] || \
[ $mtime_pg_dbh_java -le $mtime_dbh_xsl ] ; then
# At least one target file is outdated, thus (re)build required:
needs_build=enabled
fi
fi
fi
# (Re)build targets if necessary:
if [ "$needs_build" ] ; then
echo "$program_name: calling DbHelperSourcesCreator"
set_classpath \
$java_lib_dir/mumie-util.jar \
$java_lib_dir/mumie-sql.jar \
$base_dir/java/lib/mumie-japs-build.jar
run_cmd $java_cmd net.mumie.japs.build.DbHelperSourcesCreator \
--xsl-dir=$base_dir/tools/xsl \
--db-helper-dir=$base_dir/java/src/net/mumie/cocoon/db
fi
echo "$program_name: db helper sources done"
create_db_helper_sources_done=done
run_cmd cd $base_dir
}
# Creates the document class sources. All sources XxxDocument.java are created
# in one XSL transformation. All sources XxxToCheckin.java are created in a
# second XSL transformation. In both cases, the input is config.xml. This function
# performs the two XSL transformations by means of the XSLTransformer class from
# the build tools. The whole logic concerning which target files are outdated and
# must be (re)build is contained in the XSL stylesheets.
function create_document_class_sources
{
[ "$create_build_tools_done" ] || [ "$ignore_deps" ] || create_build_tools
echo
echo "======================================================================"
echo "Creating document class sources"
echo "======================================================================"
echo
run_cmd cd $base_dir
set_classpath \
$java_lib_dir/mumie-util.jar \
$base_dir/java/lib/mumie-japs-build.jar
local force_param=no
[ "$force" ] && force_param=yes
echo "$program_name: calling XSLTransformer for XxxDocument.java sources"
run_cmd $java_cmd net.mumie.japs.build.XSLTransformer \
--stylesheet=$base_dir/tools/xsl/Document.xsl \
--input=config/config.xml \
--param base-dir=$base_dir/java/src \
--param force=$force_param \
--param xsl-dir=$base_dir/tools/xsl
echo "$program_name: calling XSLTransformer for XxxToCheckin.java sources"
run_cmd $java_cmd net.mumie.japs.build.XSLTransformer \
--stylesheet=$base_dir/tools/xsl/DocumentToCheckin.xsl \
--input=config/config.xml \
--param base-dir=$base_dir/java/src \
--param force=$force_param \
--param xsl-dir=$base_dir/tools/xsl
create_document_class_sources_done=done
echo "$program_name: document class sources done"
}
# Creates the rootxsl stylesheet. This is done by applying create_rootxsl.xsl
# to rootxsl.xsl.src. The function does this by means of the XSLTransformer class
# from the build tools.
function create_rootxsl
{
[ "$create_build_tools_done" ] || [ "$ignore_deps" ] || create_build_tools
echo
echo "======================================================================"
echo "Creating rootxsl"
echo "======================================================================"
echo
local rootxsl_src=$base_dir/java/src/net/mumie/cocoon/transformers/rootxsl.xsl.src
local rootxsl_trg=$base_dir/java/classes/net/mumie/cocoon/transformers/rootxsl.xsl
local rootxsl_stl=$base_dir/tools/xsl/create_rootxsl.xsl
# Check if target must be (re)build:
if [ "$force" ] || [ ! -e "$rootxsl_trg" ] || \
[ "`mtime $rootxsl_trg`" -le "`mtime $rootxsl_src`" ] || \
[ "`mtime $rootxsl_trg`" -le "`mtime $rootxsl_stl`" ] ; then
# (Re)build target:
[ "$url_prefix" ] || error "url_prefix not set"
mkdir -vp $base_dir/java/classes/net/mumie/cocoon/transformers
echo "$program_name: calling XSLTransformer"
set_classpath \
$java_lib_dir/mumie-util.jar \
$base_dir/java/lib/mumie-japs-build.jar
run_cmd $java_cmd net.mumie.japs.build.XSLTransformer \
--stylesheet=$rootxsl_stl \
--input=$rootxsl_src \
--output=$rootxsl_trg \
--param url-prefix=$url_prefix
fi
echo "$program_name: rootxsl done"
create_rootxsl=done
run_cmd cd $base_dir
}
# Creates the Japs Java library
function create_japs_lib
{
[ "$create_db_helper_sources_done" ] || [ "$ignore_deps" ] || \
create_db_helper_sources
[ "$create_document_class_sources_done" ] || [ "$ignore_deps" ] || \
create_document_class_sources
[ "$create_notion_class_sources_done" ] || [ "$ignore_deps" ] || \
create_notion_class_sources
[ "$create_rootxsl_done" ] || [ "$ignore_deps" ] || \
create_rootxsl
echo
echo "======================================================================"
echo "Creating Japs lib"
echo "======================================================================"
echo
local rootxsl=$base_dir/java/classes/net/mumie/cocoon/transformers/rootxsl.xsl
local jar_file=$base_dir/java/lib/mumie-japs.jar
[ "$tomcat_home" ] || error "tomcat_home not set"
[ "`find $webapp_lib_dir -name 'postgresql-*.jar'`" ] || error "Missing postgres jar in $webapp_lib_dir"
[ -e "$rootxsl" ] || error "Missing $rootxsl"
if [ "`get_java_source_files net/mumie/cocoon`" ] || \
[ "`mtime $rootxsl`" -ge "`mtime $jar_file`" ] ; then
run_ant -buildfile tools/ant/buildfiles/japs_lib.xml
fi
echo "$program_name: japs lib done"
create_japs_lib_done=done
}
# Copies the Java libraries to the WEB-INF/lib directory of the Cocoon webapp in Tomcat.
# Some of the libraries are copied from the java/lib directory of the build tree, some
# are copied from its usual installation location, i.e., $prefix/lib/java
function install_libs
{
echo
echo "======================================================================"
echo "Installing libraries"
echo "======================================================================"
echo
run_cmd cd $base_dir/java/lib
echo "$program_name: changed into java/lib"
run_cmd cp -v $lib_install_files_1 $webapp_lib_dir
run_cmd mkdir -vp $correction_tmp_dir/tools
run_cmd cp -v mumie-japs-corrutil.jar $correction_tmp_dir/tools
run_cmd cd $java_lib_dir
echo "$program_name: changed into $java_lib_dir"
run_cmd cp -v $lib_install_files_2 $webapp_lib_dir
run_cmd cd $base_dir
echo "$program_name: installing libs done"
install_libs_done=done
}
# Removes the Japs Java library (mumie-japs.jar) and the Mumie SQL Java library
# (mumie-sql.jar) from the WEB-INF/lib directory of the Cocoon webapp in Tomcat.
function uninstall_libs
{
echo
echo "======================================================================"
echo "Uninstalling libraries"
echo "======================================================================"
echo
run_cmd cd $webapp_lib_dir
echo "$program_name: changed into $webapp_lib_dir"
run_cmd rm -fv $lib_install_files_1 $lib_install_files_2
run_cmd cd $correction_tmp_dir/tools
echo "$program_name: changed into $correction_tmp_dir/tools"
run_cmd rm -fv mumie-japs-corrutil.jar
run_cmd cd $base_dir
echo "$program_name: uninstalling libs done"
uninstall_libs_done=done
}
# Creates the "notion XSLs" db_notions.xsl and xml_notions.xsl
function create_notion_xsl
{
echo
echo "======================================================================"
echo "Creating notion XSLs"
echo "======================================================================"
echo
local xsl_dir=$base_dir/tools/xsl
local config_file=$base_dir/config/config.xml
local db_notions_xsl=$xsl_dir/db_notions.xsl
local xml_notions_xsl=$xsl_dir/xml_notions.xsl
local db_notions_stl=$xsl_dir/config_to.db_notions.xsl.xsl
local xml_notions_stl=$xsl_dir/config_to.xml_notions.xsl.xsl
set_classpath \
$java_lib_dir/mumie-util.jar \
$base_dir/java/lib/mumie-japs-build.jar
# (re)create db_notions.xsl if necessary:
if [ "$force" ] || \
[ ! -e "$db_notions_xsl" ] || \
[ "`mtime $db_notions_xsl`" -le "`mtime $config_file`" ] || \
[ "`mtime $db_notions_xsl`" -le "`mtime $db_notions_stl`" ]
then
run_cmd $java_cmd net.mumie.japs.build.XSLTransformer \
--stylesheet=$db_notions_stl \
--input=$config_file \
--output=$db_notions_xsl
fi
# (re)create xml_notions.xsl if necessary:
if [ "$force" ] || \
[ ! -e "$xml_notions_xsl" ] || \
[ "`mtime $xml_notions_xsl`" -le "`mtime $config_file`" ] || \
[ "`mtime $xml_notions_xsl`" -le "`mtime $xml_notions_stl`" ]
then
run_cmd $java_cmd net.mumie.japs.build.XSLTransformer \
--stylesheet=$xml_notions_stl \
--input=$config_file \
--output=$xml_notions_xsl
fi
echo "$program_name: notion XSLs done"
create_notion_xsl_done=done
}
# [Auxiliary target]
# Checks if the db user $db_user_name exists
function check_db_user
{
echo
echo "======================================================================"
echo "Checking db user"
echo "======================================================================"
echo
if [ "`find_db_user`" ] ; then
echo "$program_name: user \"$db_user_name\" exists"
db_user_exists=exists
else
echo "$program_name: user \"$db_user_name\" does not exist"
fi
echo "$program_name: checking db user done"
check_db_user=done
}
# [Auxiliary target]
# Creates the database user provided the user does not exist already. This target
# depends on check_db_user, which sets the flag $db_user_exists if the user exists.
# If the flag is not set, this target (create_db_user) is executed; otherwise, it
# is skipped.
function create_db_user
{
[ "$check_db_user_done" ] || [ "$ignore_deps" ] || check_db_user
echo
echo "======================================================================"
echo "Creating db user"
echo "======================================================================"
echo
if [ ! "$db_user_exists" ] ; then
echo "\
BEGIN TRANSACTION;
CREATE USER $db_user_name NOCREATEDB NOCREATEUSER;
ALTER USER $db_user_name WITH PASSWORD '$db_user_password';
COMMIT TRANSACTION;
" \
| psql_admin
echo "$program_name: creating db user done"
else
echo "$program_name: skipped (user already exists)"
fi
# Update check_db_user flags:
db_user_exists=exists
check_db_user=done
create_db_user_done=done
}
# Deletes the database user provided the user exists. This target depends on
# check_db_user, which sets the flag $db_user_exists if the user exists. If
# the flag is set, this target (drop_db_user) is executed, otherwise it is
# skipped.
function drop_db_user
{
[ "$check_db_user_done" ] || [ "$ignore_deps" ] || check_db_user
echo
echo "======================================================================"
echo "Dropping db user"
echo "======================================================================"
echo
if [ "$db_user_exists" ] ; then
psql_admin --command "DROP USER $db_user_name;"
echo "$program_name: dropping db user done"
else
echo "$program_name: skipped (user does not exist)"
fi
# Update check_db_user flags:
db_user_exists=''
check_db_user=done
drop_db_user_done=done
}
# [Auxiliary target]
# Checks if the database $db_name exists
function check_db
{
echo
echo "======================================================================"
echo "Checking db"
echo "======================================================================"
echo
if [ "`find_db`" ] ; then
echo "$program_name: database \"$db_name\" exists"
db_exists=exists
else
echo "$program_name: database \"$db_name\" does not exist"
fi
echo "$program_name: checking db done"
check_db=done
}
# [Auxiliary target]
# Creates the database provided it does not exist already. This target depends on
# check_db, which sets the flag $db_exists if the database exists. If the flag is
# not set, this target (create_db) is executed; otherwise, it is skipped.
function create_db
{
[ "$check_db_done" ] || [ "$ignore_deps" ] || check_db
echo
echo "======================================================================"
echo "Creating db"
echo "======================================================================"
echo
if [ ! "$db_exists" ] ; then
psql_admin \
-c "CREATE DATABASE $db_name OWNER=$db_user_name ENCODING=$db_encoding;"
echo "$program_name: creating db done"
else
echo "$program_name: skipped (database already exists)"
fi
# Update check_db flags:
db_exists=exists
check_db=done
create_db_done=done
}
# Deletes the database provided it exists. This target depends on check_db, which
# sets the flag $db_exists if the database exists. If the flag is set, this target
# (drop_db) is executed; otherwise, it is skipped.
function drop_db
{
[ "$check_db_done" ] || [ "$ignore_deps" ] || check_db
echo
echo "======================================================================"
echo "Dropping db"
echo "======================================================================"
echo
if [ "$db_exists" ] ; then
psql_admin --command "DROP DATABASE $db_name;"
echo "$program_name: dropping db done"
else
echo "$program_name: skipped (database does not exist)"
fi
# Update check_db flags:
db_exists=''
check_db=done
drop_db_done=done
}
# [Auxiliary target]
# Creates the SQL code that defines the core database structure. This is done by applying
# create_db_core_sql.xsl to the config file (config/config.xml). The function does this by means of
# the XSLTransformer class from the build tools. This target requires the build tools and
# the Japs library; however, it does not call create_build_tools or create_japs_lib
# automatically. This is for more efficiency in the build process.
function create_db_core_sql
{
[ "$create_notion_xsl_done" ] || [ "$ignore_deps" ] || create_notion_xsl
echo
echo "======================================================================"
echo "Creating db core SQL"
echo "======================================================================"
echo
local config_file=$base_dir/config/config.xml
local core_sql_trg=$base_dir/db/src/core.sql
local core_sql_stl=$base_dir/tools/xsl/create_db_core_sql.xsl
# Check if target must be (re)build:
if [ "$force" ] || [ ! -e "$core_sql_trg" ] || \
[ "`mtime $core_sql_trg`" -le "`mtime $config_file`" ] || \
[ "`mtime $core_sql_trg`" -le "`mtime $core_sql_stl`" ] ; then
# (Re)build target:
echo "$program_name: calling XSLTransformer"
set_classpath \
$java_lib_dir/mumie-util.jar \
$java_lib_dir/mumie-sql.jar \
$base_dir/java/lib/mumie-japs-build.jar \
$base_dir/java/lib/mumie-japs.jar \
$webapp_lib_dir/$avalon_framework_api_jar \
$webapp_lib_dir/$avalon_framework_impl_jar
run_cmd $java_cmd \
-Dpassword.encryptor.class=$password_encryptor_class \
net.mumie.japs.build.XSLTransformer \
--stylesheet=$core_sql_stl \
--input=$config_file \
--output=$core_sql_trg \
--param db-core.admin-user.login_name=$japs_admin_name \
--param db-core.admin-user.password=$japs_admin_password
fi
echo "$program_name: db core sql done"
create_db_core_sql_done=done
run_cmd cd $base_dir
}
# [Auxiliary target]
# Creates the database tables (but does not fill them with content). This is done
# by executing the SQL code created by create_db_core_sql in psql. This target
# requires that the database user and the database exists and that the database is
# empty. These requirements are not checked automatically, and the corresponding
# targets are not called automatically if needed. This is for more simplicity in
# the build process. However, the dependency on create_db_core_sql is implemented.
function create_db_tables
{
[ "$create_db_core_sql_done" ] || [ "$ignore_deps" ] || create_db_core_sql
echo
echo "======================================================================"
echo "Creating db tables"
echo "======================================================================"
echo
echo "\
\\set ON_ERROR_STOP
\\i $base_dir/db/src/core.sql" \
| psql_user
echo "$program_name: creating db tables done"
create_db_tables_done=done
}
# [Auxiliary target]
# Creates several server-side database functions by executing corresponding SQL
# scripts in psql. For the sake of simplicty in the build process, requirements
# (database exists, database user exists, etc.) are not treated automatically.
function create_db_functions
{
echo
echo "======================================================================"
echo "Creating db functions"
echo "======================================================================"
echo
echo "\
\\set ON_ERROR_STOP
\\i $base_dir/db/src/path_for_section_id.sql" \
| psql_user
echo "$program_name: creating db functions done"
create_db_functions_done=done
}
# Creates the database, the database tables, functions, and views. Essentially,
# this is an aggregation of the targets create_db_user, create_db, create_db_tables
# and create_db_functions. But before the targets are executed, it
# is checked if the dababase exists already. If this is the case, an error is
# signaled. This prevents the user from accidentally deleting an existing datatbase.
function build_db
{
check_db
[ "$db_exists" ] && error "Database already exists (use drop-db to remove it)"
create_db_user
create_db
create_db_tables
create_db_functions
}
# Creates the cocoon.xconf file. The file is created from an XML source,
# cocoon.xconf.src. The creation is done by applying an XSL stylesheet to
# the global config file, config.xml. The source (cocoon.xconf.src) is read
# via the document() XPath function.
function create_xconf
{
[ "$create_build_tools_done" ] || [ "$ignore_deps" ] || create_build_tools
echo
echo "======================================================================"
echo "Creating xconf"
echo "======================================================================"
echo
run_cmd cd $base_dir
local target_file=$base_dir/config/cocoon.xconf
local source_file=$base_dir/config/cocoon.xconf.src
local config_file=$base_dir/config/config.xml
local xsl_file=$base_dir/tools/xsl/xconf.xsl
# Check if target must be (re)build:
local needs_build=''
if [ "$force" ] ; then
# Force flag enabled, thus (re)build required:
needs_build=enabled
else
# Check if target file exists:
if [ ! -e "$target_file" ] ;then
# Does not exist, (re)build required:
needs_build=enabled
else
# Target file exists, check modification times:
local target_mtime=`mtime $target_file`
if [ $target_mtime -le "`mtime $source_file`" ] || \
[ $target_mtime -le "`mtime $config_file`" ] || \
[ $target_mtime -le "`mtime $user_config_file`" ] || \
[ $target_mtime -le "`mtime $xsl_file`" ] ; then
# Target is outdated, thus (re)build required:
needs_build=enabled
fi
fi
fi
# (Re)build target if necessary:
if [ "$needs_build" ] ; then
set_classpath \
$java_lib_dir/mumie-util.jar \
$base_dir/java/lib/mumie-japs-build.jar
echo "$program_name: calling XSLTransformer"
run_cmd $java_cmd net.mumie.japs.build.XSLTransformer \
--stylesheet=$xsl_file \
--input=$config_file \
--output=$target_file \
--param xconf.source.filename=$source_file \
--param xconf.db.host="$db_host" \
--param xconf.db.port="$db_port" \
--param xconf.db.name="$db_name" \
--param xconf.db.user.name="$db_user_name" \
--param xconf.db.user.password="$db_user_password" \
--param xconf.mail.smtp.host="$mail_smtp_host" \
--param xconf.mail.from.name="$mail_from_name" \
--param xconf.mail.from.address="$mail_from_address" \
--param xconf.mail.reply.name="$mail_reply_name" \
--param xconf.mail.reply.address="$mail_reply_address" \
--param xconf.receipt.dir="$receipt_dir" \
--param xconf.sign.keystore.filename="$sign_keystore_file" \
--param xconf.sign.keystore.type="$sign_keystore_type" \
--param xconf.sign.keystore.password="$sign_keystore_password" \
--param xconf.sign.key.alias="$sign_key_alias" \
--param xconf.sign.key.password="$sign_key_password" \
--param xconf.upload.dir="$upload_dir" \
--param xconf.checkin.defaults.file="$webapp_webinf/checkin_defaults.xml" \
--param xconf.correction.tmp.dir="$correction_tmp_dir" \
--param xconf.message.destination.table.filename="$msg_dest_table_filename"
fi
echo "$program_name: creating xconf done"
create_xconf_done=done
run_cmd cd $base_dir
}
# Creates the mumie.roles file. The file is created from an XML source,
# mumie.roles.src. The creation is done by applying an XSL stylesheet to
# the global config file, config.xml. The source (mumie.roles.src) is read
# via the document() XPath function.
function create_roles
{
[ "$create_build_tools_done" ] || [ "$ignore_deps" ] || create_build_tools
echo
echo "======================================================================"
echo "Creating roles"
echo "======================================================================"
echo
run_cmd cd $base_dir
local target_file=$base_dir/config/mumie.roles
local source_file=$base_dir/config/mumie.roles.src
local config_file=$base_dir/config/config.xml
local xsl_file=$base_dir/tools/xsl/roles.xsl
# Check if target must be (re)build:
local needs_build=''
if [ "$force" ] ; then
# Force flag enabled, thus (re)build required:
needs_build=enabled
else
# Check if target file exists:
if [ ! -e "$target_file" ] ;then
# Does not exist, (re)build required:
needs_build=enabled
else
# Target file exists, check modification times:
local target_mtime=`mtime $target_file`
if [ $target_mtime -le "`mtime $source_file`" ] || \
[ $target_mtime -le "`mtime $config_file`" ] || \
[ $target_mtime -le "`mtime $xsl_file`" ] ; then
# Target is outdated, thus (re)build required:
needs_build=enabled
fi
fi
fi
# (Re)build target if necessary:
if [ "$needs_build" ] ; then
set_classpath \
$java_lib_dir/mumie-util.jar \
$base_dir/java/lib/mumie-japs-build.jar
echo "$program_name: calling XSLTransformer"
run_cmd $java_cmd net.mumie.japs.build.XSLTransformer \
--stylesheet=$xsl_file \
--input=$config_file \
--output=$target_file \
--param roles.source.filename=$source_file
fi
echo "$program_name: creating roles done"
create_roles_done=done
run_cmd cd $base_dir
}
# Installs the configuration files except sitemap.xmap
function install_config
{
[ "$create_xconf_done" ] || [ "$ignore_deps" ] || create_xconf
[ "$create_roles_done" ] || [ "$ignore_deps" ] || create_roles
echo
echo "======================================================================"
echo "Installing config files"
echo "======================================================================"
echo
run_cmd mkdir -pv $base_dir/uninstall
local config_file
local uninstall_dir=$base_dir/uninstall
local config_dir=$base_dir/config
for config_file in $config_install_files ; do
# If necessary, save original:
[ -e "$webapp_webinf/$config_file" ] && [ ! -e "$uninstall_dir/$config_file" ] && \
run_cmd cp -v "$webapp_webinf/$config_file" $uninstall_dir
# Copy config file:
run_cmd cp -v "$config_dir/$config_file" $webapp_webinf
done
run_cmd cd $base_dir
echo "$program_name: installing config files done"
install_config_done=done
}
# Uninstalls the configuration files except sitemap.xmap
function uninstall_config
{
echo
echo "======================================================================"
echo "Uninstalling config files"
echo "======================================================================"
echo
local config_file
local uninstall_dir=$base_dir/uninstall
local config_dir=$base_dir/config
for config_file in $config_install_files ; do
run_cmd rm -vf $webapp_webinf/$config_file
[ -e "$uninstall_dir/$config_file" ] && \
run_cmd mv -v $uninstall_dir/$config_file $webapp_webinf
done
run_cmd cd $base_dir
echo "$program_name: uninstalling config files done"
uninstall_config_done=done
}
# Installs the "file system content" files
function install_fs_content
{
echo
echo "======================================================================"
echo "Installing fs content"
echo "======================================================================"
echo
run_cmd mkdir -pv $webapp_root/fs_content
run_cmd cd $base_dir/fs_content
echo "$program_name: changed into fs_content/"
cp_to_dir $webapp_root/fs_content $fsc_install_files
echo "program_name: installing fs content done"
install_fs_content_done=done
run_cmd cd $base_dir
}
# Uninstalls the "file system content" files
function uninstall_fs_content
{
echo
echo "======================================================================"
echo "Uninstalling fs content"
echo "======================================================================"
echo
if [ -e $webapp_root/fs_content ] ; then
run_cmd cd $webapp_root/fs_content
echo "program_name: changed into $webapp_root/fs_content"
rm -vf $fsc_install_files
fi
echo "program_name: uninstalling fs content done"
uninstall_fs_content_done=done
run_cmd cd $base_dir
}
# Creates the checkin.zip archive
function create_checkin_zip
{
echo
echo "======================================================================"
echo "Creating checkin.zip"
echo "======================================================================"
echo
run_cmd rm -vf $base_dir/checkin/checkin.zip
run_cmd cd $checkin_root
echo "$program_name: changed into $checkin_root"
run_cmd find -L -name "*.*" \
| egrep '\.meta\.xml$|\.content\.[^.~]+$|\.src\.[^.~]+$' \
| zip -@ $base_dir/checkin/checkin.zip
echo "$program_name: creating checkin.zip done"
create_checkin_zip_done=done
run_cmd cd $base_dir
}
#
#
function prepare_sitemaps
{
echo
echo "======================================================================"
echo "Preparing sitemaps"
echo "======================================================================"
echo
[ "$url_prefix" ] || error "url_prefix not set"
run_cmd cd $base_dir/config
echo "$program_name: changed into config/"
local template
for template in build_sitemap.xmap.tpl sitemap.xmap.src.tpl ; do
local target=${template%.tpl}
echo "$program_name: Creating $target"
run_cmd cat $template \
| run_cmd sed "s/\@url-prefix\@/`quote $url_prefix`/g" \
| run_cmd sed "s/\@qf-applet-mail-to-name\@/`quote $qf_applet_mail_to_name`/g" \
| run_cmd sed "s/\@qf-applet-mail-to-address\@/`quote $qf_applet_mail_to_address`/g" \
> $target
done
run_cmd cd $base_dir
echo "$program_name: preparing sitemaps done"
prepare_sitemaps_done=done
}
# Installs the sitemap and checkin defaults which are used temporarily during
# the build
function tmp_settings
{
[ "$prepare_sitemaps_done" ] || [ "$ignore_deps" ] || prepare_sitemaps
echo
echo "======================================================================"
echo "Temorary server settings"
echo "======================================================================"
echo
# Backup original sitemap:
backup_sitemap
run_cmd cd $base_dir/config
echo "$program_name: changed into config/"
# Install build sitemap:
run_cmd cp -v build_sitemap.xmap $webapp_root/sitemap.xmap
run_cmd cd $base_dir
echo "$program_name: temporary server settings done"
tmp_settings_done=done
}
# Checks-in initial documents and (pseudo-)documents
function checkin
{
[ "$create_checkin_zip_done" ] || [ "$ignore_deps" ] || create_checkin_zip
echo
echo "======================================================================"
echo "Checkin"
echo "======================================================================"
echo
set_classpath \
$java_lib_dir/mumie-japs-client.jar \
$java_lib_dir/jcookie-0.8c.jar \
$java_lib_dir/mumie-util.jar \
$base_dir/java/lib/mumie-japs-build.jar
run_cmd $java_cmd \
net.mumie.japs.build.JapsUpload \
--url-prefix=$url_prefix \
--path=protected/checkin/checkin \
--account=$japs_admin_name \
--password=$japs_admin_password \
--file=$base_dir/checkin/checkin.zip
run_cmd cd $base_dir
echo "$program_name: checkin done"
checkin_done=done
}
# Creates the sitemap
function create_sitemap
{
[ "$create_build_tools_done" ] || [ "$ignore_deps" ] || create_build_tools
[ "$install_fs_content_done" ] || [ "$ignore_deps" ] || install_fs_content
[ "$prepare_sitemaps_done" ] || [ "$ignore_deps" ] || prepare_sitemaps
echo
echo "======================================================================"
echo "Creating sitemap"
echo "======================================================================"
echo
local target_file=$base_dir/config/sitemap.xmap
local source_file=$base_dir/config/sitemap.xmap.src
if [ "$force" ] || \
[ ! -e $target_file ] || \
[ "`mtime $target_file`" -le "`mtime $source_file`" ]
then
echo "$program_name: calling JapsUpload"
set_classpath \
$java_lib_dir/mumie-japs-client.jar \
$java_lib_dir/jcookie-0.8c.jar \
$java_lib_dir/mumie-util.jar \
$base_dir/java/lib/mumie-japs-build.jar
run_cmd $java_cmd \
net.mumie.japs.build.JapsUpload \
--url-prefix=$url_prefix \
--path=protected/admin/create-sitemap \
--account=$japs_admin_name \
--password=$japs_admin_password \
--file=$source_file \
--output=$target_file
fi
run_cmd cd $base_dir
echo "$program_name: creating sitemap done"
create_sitemap_done=done
}
# Installs the sitemap
function install_sitemap
{
echo
echo "======================================================================"
echo "Installing sitemap"
echo "======================================================================"
echo
backup_sitemap
run_cmd cp -v $base_dir/config/sitemap.xmap $webapp_root
run_cmd cd $base_dir
echo "$program_name: installing sitemap done"
install_sitemap_done=done
}
# Uninstalls the sitemap
function uninstall_sitemap
{
echo
echo "======================================================================"
echo "Uninstalling sitemap"
echo "======================================================================"
echo
local config_file
local uninstall_dir=$base_dir/uninstall
[ -e "$uninstall_dir/sitemap.xmap" ] && \
run_cmd mv -v "$uninstall_dir/sitemap.xmap" $webapp_root
run_cmd cd $base_dir
echo "$program_name: uninstalling sitemap done"
uninstall_sitemap_done=done
}
# Installs the checkin_defaults.xml file
function install_checkin_defaults
{
echo
echo "======================================================================"
echo "Installing checkin defaults"
echo "======================================================================"
echo
run_cmd cp -v $base_dir/config/checkin_defaults.xml $webapp_webinf
run_cmd cd $base_dir
echo "$program_name: installing checkin defaults done"
install_checkin_defaults_done=done
}
# Uninstalls the checkin_defaults.xml file
function uninstall_checkin_defaults
{
echo
echo "======================================================================"
echo "Uninstalling checkin defaults"
echo "======================================================================"
echo
run_cmd rm -fv $webapp_webinf/checkin_defaults.xml
run_cmd cd $base_dir
echo "$program_name: uninstalling checkin defaults done"
uninstall_checkin_defaults_done=done
}
# Creates the API documentation for the Japs Java library and the build tools
function create_java_apidocs
{
echo
echo "======================================================================"
echo "Creating Java apidocs"
echo "======================================================================"
echo
run_ant -buildfile tools/ant/buildfiles/java_apidocs.xml
run_ant -buildfile tools/ant/buildfiles/build_tools.xml apidocs
echo "$program_name: java apidocs done"
create_java_apidocs_done=done
}
# Creates the distribution
function create_dist
{
echo
echo "======================================================================"
echo "Creating distribution"
echo "======================================================================"
echo
[ "$release" ] || error "No release specified"
run_cmd cd $base_dir
run_cmd mkdir -pv dist
run_cmd cd dist
echo "$program_name: Changed into dist/"
echo "$program_name: Checking-out release"
local dist_name="japs_${release}"
local archive="${dist_name}.tgz"
local tag="ver-`echo $release | tr '.' '-'`"
run_cmd rm -rfv $dist_name
run_cmd rm -fv $archive
local cvscmd=cvs
[ "$cvsroot" ] && cvscmd="cvs -d $cvsroot"
run_cmd $cvscmd export -r $tag japs
run_cmd mv -v japs $dist_name
echo "$program_name: Creating version file"
run_cmd echo $release > $dist_name/$version_file
echo "$program_name: Creating tgz"
run_cmd tar czf $archive $dist_name
run_cmd cd $base_dir
}
# Creates the Japs Java library
function create_japs_lib_for_mf
{
echo
echo "======================================================================"
echo "Creating Japs lib for matheletfactory"
echo "======================================================================"
echo
run_ant -buildfile tools/ant/buildfiles/japs_lib_for_mf.xml
echo "$program_name: japs lib done"
create_japs_lib_for_mf_done=done
}
# Creates a keystore for the sign helper
function create_signhelper_keystore
{
echo
echo "======================================================================"
echo "Creating signhelper keystore"
echo "======================================================================"
echo
[ "$sign_key_dn" ] || error "Nor distinguished name for sign keystore specified"
local local_keystore_filename=`basename "$sign_keystore_file"`
check_exit_code
run_cmd keytool -genkeypair \
-alias "$sign_key_alias" \
-dname "$sign_key_dn" \
-keypass "$sign_keystore_password" \
-validity "$sign_key_validity" \
-storetype "$sign_keystore_type" \
-keystore "$base_dir/config/$local_keystore_filename" \
-storepass "$sign_keystore_password"
echo "$program_name: creating signhelper keystore done"
create_signhelper_keystore_done=done
}
# Copies the sign helper keystore to its installation location
function install_signhelper_keystore
{
echo
echo "======================================================================"
echo "Installing signhelper keystore"
echo "======================================================================"
echo
local local_keystore_filename=`basename "$sign_keystore_file"`
check_exit_code
run_cmd cp -v $base_dir/config/$local_keystore_filename $sign_keystore_file
echo "$program_name: installing signhelper keystore done"
install_signhelper_keystore_done=done
}
# Removes the sign helper keystore from its installation location
function uninstall_signhelper_keystore
{
echo
echo "======================================================================"
echo "Uninstalling signhelper keystore"
echo "======================================================================"
echo
run_cmd rm -vf $sign_keystore_file
echo "$program_name: uninstalling signhelper keystore done"
uninstall_signhelper_keystore_done=done
}
# Creates the receipt directory
function create_receipt_dir
{
echo
echo "======================================================================"
echo "Creating receipt directory"
echo "======================================================================"
echo
run_cmd mkdir -pv $receipt_dir
echo "$program_name: creating receipt directory done"
create_receipt_dir_done=done
}
# --------------------------------------------------------------------------------
# Functions implementing tasks
# --------------------------------------------------------------------------------
# Processes the targets
function process_targets
{
local target
for target in $targets ; do
case $target in
build-tools)
create_build_tools ;;
notion-class-sources)
create_notion_class_sources ;;
db-helper-sources)
create_db_helper_sources ;;
doc-class-sources)
create_document_class_sources ;;
japs-lib)
create_japs_lib ;;
install-libs)
install_libs ;;
rootxsl)
create_rootxsl ;;
notion-xsl)
create_notion_xsl ;;
notion-xsl)
create_notion_xsl ;;
db-user)
create_db_user ;;
drop-db-user)
drop_db_user ;;
create-db)
create_db ;;
drop-db)
drop_db ;;
db-core-sql)
create_db_core_sql ;;
db-tables)
create_db_tables ;;
db-functions)
create_db_functions ;;
db)
build_db ;;
xconf)
create_xconf ;;
roles)
create_roles ;;
install-config)
install_config ;;
install-fs-content)
install_fs_content ;;
checkin-zip)
create_checkin_zip ;;
prepare-sitemaps)
prepare_sitemaps ;;
tmp-settings)
tmp_settings ;;
receipt-dir)
create_receipt_dir ;;
checkin)
checkin ;;
sitemap)
create_sitemap ;;
install-sitemap)
install_sitemap ;;
uninstall-libs)
uninstall_libs ;;
uninstall-config)
uninstall_config ;;
uninstall-fs-content)
uninstall_fs_content ;;
uninstall-sitemap)
uninstall_sitemap ;;
all-step1)
create_japs_lib
install_libs
build_db
install_config
install_fs_content
tmp_settings
create_receipt_dir ;;
all-step2)
checkin
create_sitemap ;;
all-step3)
install_sitemap ;;
apidocs)
create_java_apidocs ;;
dist)
create_dist ;;
japs-lib-for-mf)
create_japs_lib_for_mf ;;
signhelper-keystore)
create_signhelper_keystore ;;
install-signhelper-keystore)
install_signhelper_keystore ;;
uninstall-signhelper-keystore)
uninstall_signhelper_keystore ;;
*)
echo "ERROR: Unknown target: $target"
exit 3 ;;
esac
done
echo
echo "$program_name: BUILD DONE"
echo
}
function check
{
[ -e "$java_lib_dir/mumie-util.jar" ] || \
warning "mumie-util.jar not found in $java_lib_dir"
[ -e "$java_lib_dir/mumie-sql.jar" ] || \
warning "mumie-sql.jar not found in $java_lib_dir"
[ -e "$tomcat_home" ] || \
warning "$tomcat_home not found";
[ -e "$webapp_root" ] || \
warning "$webapp_root not found";
[ -e "$webapp_lib_dir/postgresql.jar" ] || \
warning "postgresql.jar not found in $webapp_lib_dir"
which psql > /dev/null 2>&1
[ $? -eq 0 ] ||
warning "psql not found in PATH"
if [ $numwarn -gt 0 ] ; then
echo "$program_name: $numwarn warning(s)"
else
echo "No warnings"
fi
}
function print_variables
{
cat <<EOF
avalon_framework_apidocs_url = $avalon_framework_apidocs_url
checkin_root = $checkin_root
cocoon_apidocs_url = $cocoon_apidocs_url
cvsroot = $cvsroot
datasheet_apidocs_url = $datasheet_apidocs_url
db_admin_name = $db_admin_name
db_admin_password = $db_admin_password
db_encoding = $db_encoding
db_host = $db_host
db_name = $db_name
db_port = $db_port
db_user_name = $db_user_name
db_user_password = $db_user_password
doc_dir = $doc_dir
excalibur_xmlutil_apidocs_url = $excalibur_xmlutil_apidocs_url
force = $force
ignore_deps = $ignore_deps
japs_admin_name = $japs_admin_name
japs_admin_password = $japs_admin_password
java_cmd = $java_cmd
java_lib_dir = $java_lib_dir
javac_debug = $javac_debug
javac_deprecation = $javac_deprecation
javac_verbose = $javac_verbose
jdk_apidocs_url = $jdk_apidocs_url
mail_domain = $mail_domain
mail_from_address = $mail_from_address
mail_from_name = $mail_from_name
mail_reply_address = $mail_reply_address
mail_reply_name = $mail_reply_name
mail_smtp_host = $mail_smtp_host
password_encryptor_class = $password_encryptor_class
prefix = $prefix
qf_applet_mail_to_address = $qf_applet_mail_to_address
qf_applet_mail_to_name = $qf_applet_mail_to_name
release = $release
sign_key_alias = $sign_key_alias
sign_key_dn = $sign_key_dn
sign_key_password = $sign_key_password
sign_keystore_file = $sign_keystore_file
sign_keystore_password = $sign_keystore_password
sign_keystore_type = $sign_keystore_type
sign_keystore_validity = $sign_keystore_validity
targets = $targets
task = $task
tomcat_home = $tomcat_home
upload_dir = $upload_dir
url_prefix = $url_prefix
webapp_lib_dir = $webapp_lib_dir
webapp_name = $webapp_name
webapp_root = $webapp_root
webapp_webinf = $webapp_webinf
EOF
}
function show_targets
{
cat <<EOF
build-tools - Creates the build tools
notion-class-sources - Creates the notion class sources
db-helper-sources - Creates the db helper sources
doc-class-sources - Creates the document class sources
rootxsl - Creates the rootxsl stylesheet
japs-lib - Creates the Japs Java library
install-libs - Installs the Java libraries in the WEB-INF/lib directory
notion-xsl - Creates the "notion" XSL stylesheets db_notions.xsl
and xml_notions.xsl
db-user - Creates the db user (if not exists already)
drop-db-user - Drops the db user (if exists)
create-db - Creates the db (if not exists already)
drop-db - Drops the db (if exists)
db-core-sql - Creates the SQL code for the core db structures
db-tables - Creates the db tables
db-functions - Creates the db functions
db - Creates the db, db tables, functions, and views
xconf - Creates the cocoon.xconf file
roles - Creates the mumie.roles file
install-config - Installs the configuration files except sitemap.xmap
install-fs-content - Installs the "file system content" files
prepare-sitemaps - Creates the build sitemap and the sitemap XML source from
*.tpl templates
tmp-settings - Installs a temporary sitemap and checkin defaults file for
use during the build
checkin-zip - Creates the checkin.zip archive
checkin - Checks-in initial documents and pseudo-documents
sitemap - Creates the sitemap
install-sitemap - Installs the sitemap
uninstall-libs - Uninstalls the Java libraries
uninstall-config - Uninstalls the configuration files except sitemap.xmap
uninstall-fs-content - Uninstalls the "file system content" files
uninstall-sitemap - Uninstalls the sitemap
all-step1 - Complete build, step 1: creates and installs the Japs Java
library; creates the db with tables, functions, and views,
creates and installs the configuration files except
sitemap.xmap; installs the "file system content" files,
installs the checkin defaults file
all-step2 - Complete build, step 2: checks-in initial documents and
pseudo-documents, creates the sitemap
all-step3 - Complete build, step 3: installs the sitemap
apidocs - Creates the API documentation for the Japs libraries and
the build tools
dist - Creates a distribution
japs-lib-for-mf - Creates a special version of the Java library for the
Mathelt Factory
signhelper-keystore - Creates a keystore for the sign helper
install-signhelper-keystore
- Installs the keystore for the sign helper
uninstall-signhelper-keystore
- Uninstalls the keystore for the sign helper
EOF
}
function show_help
{
cat <<EOF
Usage:
./build.sh [OPTIONS] [TARGETS]
Description:
Builds and/or installs the japs package, or parts of it. What is
actually done is controlled by TARGETS, which is a list of keywords called
targets. Type ./build.sh -t to get a list of all targets. The default target
is "japs-lib"; it is assumed if no targets are specified.
Options:
--targets, -t
List all targets
--force, -f
Create files even if they are up-to-date.
--ignore-deps, -D
Ignore target dependencies. If a target is build with this option,
then targets required by this target are not build automatically.
--javac-verbose
Turns the "verbose" flag on when compiling the java sources.
--javac-debug
Turns the "debug" flag on when compiling the java sources.
--javac-deprecation
Turns the "deprecation" flag on when compiling the java sources.
--release=VERSION_NUMBER
Set the release for the distribution to build. In effect only with
the "dist" target, otherwise ignored.
--cvsroot=CVSROOT
Set the cvs root for retrieving the distribution to build. In effect
only with the "dist" target, otherwise ignored. If not set, the
environment variable \$CVSROOT is used
--vars
Prints the build variables to stdout
--check
Performs some checks
--help, -h
Print this help text and exit.
--version, -v
Print version information and exit.
EOF
}
function show_version
{
echo $program_version
}
$task
|
SCRIPT_DIR=$(cd $(dirname $0); pwd)
#####FTPサーバ Vsftpd環境の作成
echo '-----------install vsftpd-----------'
sudo apt -y install vsftpd #FTPサーバのインストール
FTP_CONFIG_TRGET=/etc/vsftpd.conf
if [ ! -e $FTP_CONFIG_TRGET'-default' ];then
sudo cp $FTP_CONFIG_TRGET $FTP_CONFIG_TRGET'-default'
fi
TIME=$(date)
echo "#-----------change by setup.bash: "$TIME"-----------" | sudo tee -a $FTP_CONFIG_TRGET>/dev/null
echo 'local_enable=YES' | sudo tee -a $FTP_CONFIG_TRGET>/dev/null
echo 'write_enable=YES' | sudo tee -a $FTP_CONFIG_TRGET>/dev/null
echo 'local_umask=022' | sudo tee -a $FTP_CONFIG_TRGET>/dev/null
echo 'ascii_upload_enable=YES' | sudo tee -a $FTP_CONFIG_TRGET>/dev/null
echo 'ascii_download_enable=YES' | sudo tee -a $FTP_CONFIG_TRGET>/dev/null
cat $FTP_CONFIG_TRGET
sudo service vsftpd start #FTPサーバの再起動
#####ショートカット用bashファイルの作成
#echo '-----------install vsftpd-----------'
#SHORTCUT_BASH=~/start_communication.bash
#echo "cd "$SCRIPT_DIR>$SHORTCUT_BASH
#echo "python3 processControlMain.py">>$SHORTCUT_BASH
#####pipでファイル管理モジュールwatchdogをインストール(python2.7はバージョン1.0.0以下を指定)
echo '-----------install watchdog-----------'
#sudo apt-get install python-pip
#sudo apt-get install python3-pip
#pip install watchdog==0.10.6
sudo pip3 install watchdog
######PHP server setup
#sudo apt update
#sudo apt install apache2 -y
#sudo apt install php libapache2-mod-php -y
#sudo apt install apache2 -y
#sudo service apache2 start
######move default file to /var/www/scripts/
#PYTHON_FOLDER=scripts
#cd /var/www/
#mkdir $PYTHON_FOLDER
#cp $SCRIPT_DIR/../php/file_upload.php ./
#cp $SCRIPT_DIR/../html/send_form.html ./
#cp $SCRIPT_DIR/../scripts/main.py ./$PYTHON_FOLDER
#cp $SCRIPT_DIR/../scripts/start.py ./$PYTHON_FOLDER
#cp $SCRIPT_DIR/../scripts/stop.py ./$PYTHON_FOLDER
#cp $SCRIPT_DIR/../scripts/reset.py ./$PYTHON_FOLDER
######set chmod of /var/www/
#sudo chmod -R 777 /var/www/
######set chmod of scripts/
echo '-----------set chmod to scripts/-----------'
sudo chmod 777 $SCRIPT_DIR/start_communication.sh
sudo chmod -R 777 $SCRIPT_DIR/../scripts/
######setup UDP port
echo '-----------setup UDP port-----------'
sudo apt -y install ufw
#sudo ufw allow 22
#sudo ufw allow 21
#sudo ufw allow 60000
#sudo ufw allow 60001
sudo ufw allow from 192.168.100.0/24
sudo ufw enable
sudo ufw reload
######set autostart file
echo '-----------setup auto start file-----------'
TARGET_SHELL=start_communication.sh
### auto start setup CUI version
AUTO_SHELL=/etc/rc.local
if [ ! -e $AUTO_SHELL'-default' ];then
sudo cp $AUTO_SHELL $AUTO_SHELL'-default'
sudo sed -i '/exit 0/d' $AUTO_SHELL
command=$SCRIPT_DIR/$TARGET_SHELL'>'$SCRIPT_DIR/../output.log' &'
sudo echo $command | sudo tee -a $AUTO_SHELL>/dev/null
sudo echo 'exit 0' | sudo tee -a $AUTO_SHELL>/dev/null
else
echo 'default file is already exist. skip.'
fi
###auto start setup for GUI version
#FTP_START_TARGET=ftp_start.desktop
#FTP_START_DIR=~/.config/autostart
#if [ ! -d $FTP_START_DIR ];then
# mkdir $FTP_START_DIR
# cd $_
# touch $FTP_START_TARGET
# echo '[Desktop Entry]' | sudo tee -a $FTP_START_TARGET>/dev/null
# echo 'Exec=lxterminal -e '$SCRIPT_DIR'/../shell/'$TARGET_SHELL | sudo tee -a $FTP_START_TARGET>/dev/null
# echo 'Type=Application' | sudo tee -a $FTP_START_TARGET>/dev/null
# echo 'Name=FTP_server' | sudo tee -a $FTP_START_TARGET>/dev/null
# echo 'Terminal=true' | sudo tee -a $FTP_START_TARGET>/dev/null
#fi
######set up this robot
echo '-----------setup header file for robot-----------'
echo 'please select this header folder...'
cd ${SCRIPT_DIR}/../
if [ -e temp.txt ];then
rm temp.txt
fi
select VAR in exit robot_arm wheel_robot
do
echo 'your selected item is '$VAR
if [ $VAR = 'exit' ];then
break
else
echo -n $VAR>>temp.txt
fi
done
echo 'complete the set up proccess'
|
<gh_stars>10-100
/*---------------------------------------------------------------------------*\
| Subject: document.cookie
| NameSpace: System.Net.MzCookie
| Author: meizz
| Created: 2004-12-07
| Version: 2006-04-03
|-------------------------------------------------------------
| MSN: <EMAIL> QQ: 112889082 http://www.meizz.com
| Email: <EMAIL> CSDN ID:meizz Copyright (c) meizz
\*---------------------------------------------------------------------------*/
function MzCookie()
{
var now = new Date();
now.setTime(now.getTime() + 1000*60*60*24*3); //Save 3 days
this.path = "/";
this.expires = now;
this.domain = "";
this.secure = "";
}
MzCookie.Extends(System, "MzCookie");
//name: cookie name
//value: cookie value
MzCookie.prototype.add = function(name, value)
{
document.cookie =
name + "="+ escape (value)
+ ";expires=" + this.expires.toGMTString()
+ ";path="+ this.path
+ (this.domain == "" ? "" : ("; domain=" + this.domain))
+ (this.secure ? "; secure" : "");
};
//name: cookie name
MzCookie.prototype.get = function(name)
{
var arr,reg=new RegExp("(^| )"+name+"=([^;]*)(;|$)");
if(arr=document.cookie.match(reg)) return unescape(arr[2]);
else return null;
};
//name: cookie name
MzCookie.prototype.remove = function(name)
{
var now = new Date();
now.setTime(now.getTime() - 1);
var V = this.get(name);
if(V!=null) document.cookie= name + "="+ V
+";expires="+ now.toGMTString() + ";path="+ this.path;
};
MzCookie.prototype.setExpires = function(milliseconds)
{
var now = new Date();
now.setTime(now.getTime() + milliseconds);
this.expires = now;
};
|
<reponame>hhaip/langtaosha<gh_stars>1-10
package cn.lts.common.util;
import java.awt.BasicStroke;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.Shape;
import java.awt.geom.RoundRectangle2D;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.Hashtable;
import javax.imageio.ImageIO;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.EncodeHintType;
import com.google.zxing.MultiFormatWriter;
import com.google.zxing.common.BitMatrix;
/**
* 生成带logo的二维码
* @project uparty-common
* @author czz
* @date 2015年5月7日下午3:48:30
*/
public class LogoQrCodeUtils {
private static final String CHARSET = "utf-8";
private static final String FORMAT_NAME = "PNG";
public static BufferedImage createImage(String content, String imgPath,int qrcode_size,int log_size,boolean needCompress) throws Exception {
Hashtable<EncodeHintType, Object> hints = new Hashtable<EncodeHintType, Object>();
hints.put(EncodeHintType.CHARACTER_SET, CHARSET);
hints.put(EncodeHintType.MARGIN, 0);
BitMatrix bitMatrix = new MultiFormatWriter().encode(content,BarcodeFormat.QR_CODE, qrcode_size, qrcode_size, hints);
int[] rec = bitMatrix.getEnclosingRectangle();
int resWidth = rec[2] + 1;
int resHeight = rec[3] + 1;
BitMatrix resMatrix = new BitMatrix(resWidth, resHeight);
resMatrix.clear();
for (int i = 0; i < resWidth; i++) {
for (int j = 0; j < resHeight; j++) {
if (bitMatrix.get(i + rec[0], j + rec[1])) {
resMatrix.set(i, j);
}
}
}
int width = resMatrix.getWidth();
int height = resMatrix.getHeight();
BufferedImage image = new BufferedImage(width, height,BufferedImage.TYPE_INT_RGB);
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
image.setRGB(x, y, resMatrix.get(x, y) ? 0xFF000000 : 0xFFFFFFFF);
}
}
if (imgPath == null || "".equals(imgPath)) {
return image;
}
// 插入图片
insertImage(image, imgPath,width,log_size,needCompress);
return image;
}
/**
* 插入LOGO
*
* @param source
* 二维码图片
* @param imgPath
* LOGO图片地址
* @param needCompress
* 是否压缩
* @throws Exception
*/
private static void insertImage(BufferedImage source, String imgPath,int QRCODE_SIZE,int log_size,boolean needCompress) throws Exception {
File file = new File(imgPath);
if (!file.exists()) {
System.err.println(""+imgPath+" 该文件不存在!");
return;
}
Image src = ImageIO.read(new File(imgPath));
int width = src.getWidth(null);
int height = src.getHeight(null);
if (needCompress) { // 压缩LOGO
if (width > log_size) {
width = log_size;
}
if (height > log_size) {
height = log_size;
}
Image image = src.getScaledInstance(width, height,Image.SCALE_SMOOTH);
BufferedImage tag = new BufferedImage(width, height,BufferedImage.TYPE_INT_RGB);
Graphics g = tag.getGraphics();
g.drawImage(image, 0, 0, null); // 绘制缩小后的图
g.dispose();
src = image;
}
// 插入LOGO
Graphics2D graph = source.createGraphics();
int x = (QRCODE_SIZE - width) / 2;
int y = (QRCODE_SIZE - height) / 2;
graph.drawImage(src, x, y, width, height, null);
Shape shape = new RoundRectangle2D.Float(x, y, width, width, 6, 6);
graph.setStroke(new BasicStroke(3f));
graph.draw(shape);
graph.dispose();
}
/**
* 生成二维码(内嵌LOGO)
* @param content 内容地址
* @param imgPath LOGO地址
* @param qrcode_size 生成的二维码的大小
* @param log_size 二维中间log图片的大小
* @param destPath 存放目标地址
* @param needCompress 是否压缩LOGO
* @throws Exception
*/
public static void encode(String content, String imgPath,int qrcode_size,int log_size,String destUrl,boolean needCompress) throws Exception {
BufferedImage image = createImage(content, imgPath,qrcode_size,log_size,needCompress);
File file = new File(destUrl);
if(!file.getParentFile().exists()) {
file.getParentFile().mkdirs();
}
file = new File(destUrl);
if(!file.exists()){
try {
file.createNewFile();
} catch (Exception e) {
e.printStackTrace();
}
}
ImageIO.write(image, FORMAT_NAME, new File(destUrl));
}
/**
* 当文件夹不存在时,mkdirs会自动创建多层目录,区别于mkdir.(mkdir如果父目录不存在则会抛出异常)
* @param destPath 存放目录
*/
public static void mkdirs(String destPath) {
File file =new File(destPath);
if (!file.exists() && !file.isDirectory()) {
file.mkdirs();
}
}
public static void main(String[] args) throws Exception {
encode("http://jianyong2015.xicp.net/menu/transfer?menu=bindRoom&roomId=1&roomCode=mTOrg8A1430982799352", "D:\\test\\img\\pub_logo.png",120,15, "D:\\test\\img\\code120.png", true);
//encode("http://test.uparty.cn/menu/transfer?menu=bindRoom&roomId=1&roomCode=mTOrg8A1430982799352", "D:\\test\\img\\15.png",120,15, "D:\\test\\img\\code100.png", true);
try {
test();
} catch (Exception e) {
// TODO: handle exception
}
}
private static void test() throws Exception{
/**大图片中二维码的大小*/
final int big_code_size = 260;
/**大图片中logo的大小*/
final int big_logo_size = 30;
/**二维码在大图片中的坐标*/
final int big_code_x = 522;
final int big_code_y = 245;
/**小图片中二维码的大小*/
final int small_code_size = 130;
/**小图片中logo的大小*/
final int small_logo_size = 15;
/**二维码在小图片中的坐标*/
final int small_code_x = 15;
final int small_code_y = 15;
/**房间二维码内容详情地址*/
String ROOM_CODE_CONTENT_URL = "/menu/transfer?menu=bindRoom";
String host = "http://test.uparty.cn";
String rootPath = "D:/test/img/out";
String logoUrl = "D:/test/img/pub_logo.png";//logo图片目录
String bigImg = "D:/test/img/in/room_big.png";//大图片目录
String smallImg = "D:/test/img/in/room_small.png";//小图片目录
String contURL = host + ROOM_CODE_CONTENT_URL + "&roomId=1&roomCode=1234567";
String desc = "/1";
//导入大图片
BufferedImage big_img = ImgUtils.loadImageLocal(bigImg);
//创建大二维码流
BufferedImage big_code = createImage(contURL, logoUrl, big_code_size, big_logo_size, true);
//二维码定位写入
BufferedImage big_buffer = ImgUtils.modifyBuffer(big_code, big_img, big_code_x, big_code_y);
String bigUrl = rootPath + desc + "/1_big.png";
ImageIO.write(big_buffer, "png", new File(bigUrl));
//导入小图片
BufferedImage small_img = ImgUtils.loadImageLocal(smallImg);
//创建小二维码流
BufferedImage small_code = createImage(contURL, logoUrl, small_code_size, small_logo_size, true);
//二维码定位写入
BufferedImage small_buffer = ImgUtils.modifyBuffer(small_code, small_img, small_code_x, small_code_y);
String smallUrl = rootPath + desc + "/1_small.png";
ImageIO.write(small_buffer, "png", new File(smallUrl));
}
}
|
package vectorwing.farmersdelight.data;
import net.minecraft.data.DataGenerator;
import net.minecraft.data.tags.BlockTagsProvider;
import net.minecraft.data.tags.ItemTagsProvider;
import net.minecraft.resources.ResourceLocation;
import net.minecraft.tags.BlockTags;
import net.minecraft.world.item.Items;
import net.minecraftforge.common.Tags;
import net.minecraftforge.common.data.ExistingFileHelper;
import vectorwing.farmersdelight.common.registry.ModItems;
import vectorwing.farmersdelight.common.tag.ForgeTags;
import vectorwing.farmersdelight.common.tag.ModTags;
import javax.annotation.Nullable;
public class ItemTags extends ItemTagsProvider
{
public ItemTags(DataGenerator generatorIn, BlockTagsProvider blockTagProvider, String modId, @Nullable ExistingFileHelper existingFileHelper) {
super(generatorIn, blockTagProvider, modId, existingFileHelper);
}
@Override
protected void addTags() {
copy(ModTags.WILD_CROPS, ModTags.WILD_CROPS_ITEM);
copy(BlockTags.CARPETS, net.minecraft.tags.ItemTags.CARPETS);
copy(BlockTags.SMALL_FLOWERS, net.minecraft.tags.ItemTags.SMALL_FLOWERS);
tag(net.minecraft.tags.ItemTags.TALL_FLOWERS).add(ModItems.WILD_RICE.get());
tag(net.minecraft.tags.ItemTags.PIGLIN_LOVED).add(ModItems.GOLDEN_KNIFE.get());
this.registerModTags();
this.registerForgeTags();
}
private void registerModTags() {
tag(ModTags.KNIVES).add(ModItems.FLINT_KNIFE.get(), ModItems.IRON_KNIFE.get(), ModItems.DIAMOND_KNIFE.get(), ModItems.GOLDEN_KNIFE.get(), ModItems.NETHERITE_KNIFE.get());
tag(ModTags.STRAW_HARVESTERS).addTag(ModTags.KNIVES);
tag(ModTags.WOLF_PREY).addTag(ForgeTags.RAW_CHICKEN).addTag(ForgeTags.RAW_MUTTON).add(Items.RABBIT);
tag(ModTags.CABBAGE_ROLL_INGREDIENTS).addTag(ForgeTags.RAW_PORK).addTag(ForgeTags.RAW_FISHES).addTag(ForgeTags.RAW_CHICKEN).addTag(ForgeTags.RAW_BEEF).addTag(ForgeTags.RAW_MUTTON).addTag(ForgeTags.EGGS).addTag(Tags.Items.MUSHROOMS).add(Items.CARROT, Items.POTATO, Items.BEETROOT);
tag(ModTags.CANVAS_SIGNS)
.add(ModItems.CANVAS_SIGN.get())
.add(ModItems.WHITE_CANVAS_SIGN.get())
.add(ModItems.ORANGE_CANVAS_SIGN.get())
.add(ModItems.MAGENTA_CANVAS_SIGN.get())
.add(ModItems.LIGHT_BLUE_CANVAS_SIGN.get())
.add(ModItems.YELLOW_CANVAS_SIGN.get())
.add(ModItems.LIME_CANVAS_SIGN.get())
.add(ModItems.PINK_CANVAS_SIGN.get())
.add(ModItems.GRAY_CANVAS_SIGN.get())
.add(ModItems.LIGHT_GRAY_CANVAS_SIGN.get())
.add(ModItems.CYAN_CANVAS_SIGN.get())
.add(ModItems.PURPLE_CANVAS_SIGN.get())
.add(ModItems.BLUE_CANVAS_SIGN.get())
.add(ModItems.BROWN_CANVAS_SIGN.get())
.add(ModItems.GREEN_CANVAS_SIGN.get())
.add(ModItems.RED_CANVAS_SIGN.get())
.add(ModItems.BLACK_CANVAS_SIGN.get());
tag(ModTags.OFFHAND_EQUIPMENT).add(Items.SHIELD).addOptional(new ResourceLocation("create:extendo_grip"));
}
@SuppressWarnings("unchecked")
private void registerForgeTags() {
tag(ForgeTags.BREAD).addTag(ForgeTags.BREAD_WHEAT);
tag(ForgeTags.BREAD_WHEAT).add(Items.BREAD);
tag(ForgeTags.COOKED_BACON).add(ModItems.COOKED_BACON.get());
tag(ForgeTags.COOKED_BEEF).add(Items.COOKED_BEEF, ModItems.BEEF_PATTY.get());
tag(ForgeTags.COOKED_CHICKEN).add(Items.COOKED_CHICKEN, ModItems.COOKED_CHICKEN_CUTS.get());
tag(ForgeTags.COOKED_PORK).add(Items.COOKED_PORKCHOP, ModItems.COOKED_BACON.get());
tag(ForgeTags.COOKED_MUTTON).add(Items.COOKED_MUTTON, ModItems.COOKED_MUTTON_CHOPS.get());
tag(ForgeTags.COOKED_EGGS).add(ModItems.FRIED_EGG.get());
tag(ForgeTags.COOKED_FISHES).addTags(ForgeTags.COOKED_FISHES_COD, ForgeTags.COOKED_FISHES_SALMON);
tag(ForgeTags.COOKED_FISHES_COD).add(Items.COOKED_COD, ModItems.COOKED_COD_SLICE.get());
tag(ForgeTags.COOKED_FISHES_SALMON).add(Items.COOKED_SALMON, ModItems.COOKED_SALMON_SLICE.get());
tag(ForgeTags.CROPS).addTags(ForgeTags.CROPS_CABBAGE, ForgeTags.CROPS_ONION, ForgeTags.CROPS_RICE, ForgeTags.CROPS_TOMATO);
tag(ForgeTags.CROPS_CABBAGE).add(ModItems.CABBAGE.get(), ModItems.CABBAGE_LEAF.get());
tag(ForgeTags.CROPS_ONION).add(ModItems.ONION.get());
tag(ForgeTags.CROPS_RICE).add(ModItems.RICE.get());
tag(ForgeTags.CROPS_TOMATO).add(ModItems.TOMATO.get());
tag(ForgeTags.EGGS).add(Items.EGG);
tag(ForgeTags.GRAIN).addTags(ForgeTags.GRAIN_WHEAT, ForgeTags.GRAIN_RICE);
tag(ForgeTags.GRAIN_WHEAT).add(Items.WHEAT);
tag(ForgeTags.GRAIN_RICE).add(ModItems.RICE.get());
tag(ForgeTags.MILK).addTags(ForgeTags.MILK_BUCKET, ForgeTags.MILK_BOTTLE);
tag(ForgeTags.MILK_BUCKET).add(Items.MILK_BUCKET);
tag(ForgeTags.MILK_BOTTLE).add(ModItems.MILK_BOTTLE.get());
tag(ForgeTags.PASTA).addTags(ForgeTags.PASTA_RAW_PASTA);
tag(ForgeTags.PASTA_RAW_PASTA).add(ModItems.RAW_PASTA.get());
tag(ForgeTags.RAW_BACON).add(ModItems.BACON.get());
tag(ForgeTags.RAW_BEEF).add(Items.BEEF, ModItems.MINCED_BEEF.get());
tag(ForgeTags.RAW_CHICKEN).add(Items.CHICKEN, ModItems.CHICKEN_CUTS.get());
tag(ForgeTags.RAW_PORK).add(Items.PORKCHOP, ModItems.BACON.get());
tag(ForgeTags.RAW_MUTTON).add(Items.MUTTON, ModItems.MUTTON_CHOPS.get());
tag(ForgeTags.RAW_FISHES).addTags(ForgeTags.RAW_FISHES_COD, ForgeTags.RAW_FISHES_SALMON, ForgeTags.RAW_FISHES_TROPICAL);
tag(ForgeTags.RAW_FISHES_COD).add(Items.COD, ModItems.COD_SLICE.get());
tag(ForgeTags.RAW_FISHES_SALMON).add(Items.SALMON, ModItems.SALMON_SLICE.get());
tag(ForgeTags.RAW_FISHES_TROPICAL).add(Items.TROPICAL_FISH);
tag(ForgeTags.SALAD_INGREDIENTS).addTags(ForgeTags.SALAD_INGREDIENTS_CABBAGE);
tag(ForgeTags.SALAD_INGREDIENTS_CABBAGE).add(ModItems.CABBAGE.get(), ModItems.CABBAGE_LEAF.get());
tag(ForgeTags.SEEDS).addTags(ForgeTags.SEEDS_CABBAGE, ForgeTags.SEEDS_RICE, ForgeTags.SEEDS_TOMATO);
tag(ForgeTags.SEEDS_CABBAGE).add(ModItems.CABBAGE_SEEDS.get());
tag(ForgeTags.SEEDS_RICE).add(ModItems.RICE.get());
tag(ForgeTags.SEEDS_TOMATO).add(ModItems.TOMATO_SEEDS.get());
tag(ForgeTags.VEGETABLES).addTags(ForgeTags.VEGETABLES_BEETROOT, ForgeTags.VEGETABLES_CARROT, ForgeTags.VEGETABLES_ONION, ForgeTags.VEGETABLES_POTATO, ForgeTags.VEGETABLES_TOMATO);
tag(ForgeTags.VEGETABLES_BEETROOT).add(Items.BEETROOT);
tag(ForgeTags.VEGETABLES_CARROT).add(Items.CARROT);
tag(ForgeTags.VEGETABLES_ONION).add(ModItems.ONION.get());
tag(ForgeTags.VEGETABLES_POTATO).add(Items.POTATO);
tag(ForgeTags.VEGETABLES_TOMATO).add(ModItems.TOMATO.get());
tag(ForgeTags.TOOLS).addTags(ForgeTags.TOOLS_AXES, ForgeTags.TOOLS_KNIVES, ForgeTags.TOOLS_PICKAXES, ForgeTags.TOOLS_SHOVELS);
tag(ForgeTags.TOOLS_AXES).add(Items.WOODEN_AXE, Items.STONE_AXE, Items.IRON_AXE, Items.DIAMOND_AXE, Items.GOLDEN_AXE, Items.NETHERITE_AXE);
tag(ForgeTags.TOOLS_KNIVES).add(ModItems.FLINT_KNIFE.get(), ModItems.IRON_KNIFE.get(), ModItems.DIAMOND_KNIFE.get(), ModItems.GOLDEN_KNIFE.get(), ModItems.NETHERITE_KNIFE.get());
tag(ForgeTags.TOOLS_PICKAXES).add(Items.WOODEN_PICKAXE, Items.STONE_PICKAXE, Items.IRON_PICKAXE, Items.DIAMOND_PICKAXE, Items.GOLDEN_PICKAXE, Items.NETHERITE_PICKAXE);
tag(ForgeTags.TOOLS_SHOVELS).add(Items.WOODEN_SHOVEL, Items.STONE_SHOVEL, Items.IRON_SHOVEL, Items.DIAMOND_SHOVEL, Items.GOLDEN_SHOVEL, Items.NETHERITE_SHOVEL);
}
}
|
#!/bin/bash -e
#
# Copyright (c) 2018 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
function log() {
echo "[$(date -u)]: $*"
}
trap 'exit' TERM SIGINT
openvpn_port="${OPENVPN_PORT:-1194}"
tcp_keepalive_time="${TCP_KEEPALIVE_TIME:-7200}"
tcp_keepalive_intvl="${TCP_KEEPALIVE_INTVL:-75}"
tcp_keepalive_probes="${TCP_KEEPALIVE_PROBES:-9}"
tcp_retries2="${TCP_RETRIES2:-5}"
ENDPOINT="${ENDPOINT:-vpn-seed-server}"
function set_value() {
if [ -f $1 ] ; then
log "Setting $2 on $1"
echo "$2" > $1
fi
}
function configure_tcp() {
set_value /proc/sys/net/ipv4/tcp_keepalive_time $tcp_keepalive_time
set_value /proc/sys/net/ipv4/tcp_keepalive_intvl $tcp_keepalive_intvl
set_value /proc/sys/net/ipv4/tcp_keepalive_probes $tcp_keepalive_probes
set_value /proc/sys/net/ipv4/tcp_retries2 $tcp_retries2
}
configure_tcp
# for each cidr config, it looks first at its env var, then a local file (which may be a volume mount), then the default
baseConfigDir="/init-config"
fileServiceNetwork=
filePodNetwork=
fileNodeNetwork=
[ -e "${baseConfigDir}/serviceNetwork" ] && fileServiceNetwork=$(cat ${baseConfigDir}/serviceNetwork)
[ -e "${baseConfigDir}/podNetwork" ] && filePodNetwork=$(cat ${baseConfigDir}/podNetwork)
[ -e "${baseConfigDir}/nodeNetwork" ] && fileNodeNetwork=$(cat ${baseConfigDir}/nodeNetwork)
service_network="${SERVICE_NETWORK:-${fileServiceNetwork}}"
service_network="${service_network:-100.64.0.0/13}"
pod_network="${POD_NETWORK:-${filePodNetwork}}"
pod_network="${pod_network:-100.96.0.0/11}"
node_network="${NODE_NETWORK:-${fileNodeNetwork}}"
node_network="${node_network:-}"
# calculate netmask for given CIDR (required by openvpn)
CIDR2Netmask() {
local cidr="$1"
local ip=$(echo $cidr | cut -f1 -d/)
local numon=$(echo $cidr | cut -f2 -d/)
local numoff=$(( 32 - $numon ))
while [ "$numon" -ne "0" ]; do
start=1${start}
numon=$(( $numon - 1 ))
done
while [ "$numoff" -ne "0" ]; do
end=0${end}
numoff=$(( $numoff - 1 ))
done
local bitstring=$start$end
bitmask=$(echo "obase=16 ; $(( 2#$bitstring )) " | bc | sed 's/.\{2\}/& /g')
for t in $bitmask ; do
str=$str.$((16#$t))
done
echo $str | cut -f2- -d\.
}
service_network_address=$(echo $service_network | cut -f1 -d/)
service_network_netmask=$(CIDR2Netmask $service_network)
pod_network_address=$(echo $pod_network | cut -f1 -d/)
pod_network_netmask=$(CIDR2Netmask $pod_network)
sed -e "s/\${SERVICE_NETWORK_ADDRESS}/${service_network_address}/" \
-e "s/\${SERVICE_NETWORK_NETMASK}/${service_network_netmask}/" \
-e "s/\${POD_NETWORK_ADDRESS}/${pod_network_address}/" \
-e "s/\${POD_NETWORK_NETMASK}/${pod_network_netmask}/" \
openvpn.config.template > openvpn.config
if [[ ! -z "$node_network" ]]; then
for n in $(echo $node_network | sed 's/[][]//g' | sed 's/,/ /g')
do
node_network_address=$(echo $n | cut -f1 -d/)
node_network_netmask=$(CIDR2Netmask $n)
echo "pull-filter accept \"route ${node_network_address} ${node_network_netmask}\"" >> openvpn.config
done
fi
echo "pull-filter accept \"route 192.168.123.\"" >> openvpn.config
echo "pull-filter ignore \"route\"" >> openvpn.config
echo "pull-filter ignore redirect-gateway" >> openvpn.config
echo "pull-filter ignore route-ipv6" >> openvpn.config
echo "pull-filter ignore redirect-gateway-ipv6" >> openvpn.config
while : ; do
if [[ ! -z $ENDPOINT ]]; then
openvpn --remote ${ENDPOINT} --port ${openvpn_port} --config openvpn.config
else
log "No tunnel endpoint found"
fi
sleep 1
done
|
<gh_stars>100-1000
/*
* Copyright © 2021 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*
*/
import * as fs from 'fs-extra';
import { homedir } from 'os';
import * as path from 'path';
import * as Config from '@oclif/config';
import { getBlockchainDBPath } from '../../../../src/utils/path';
import * as downloadUtils from '../../../../src/utils/download';
import { ImportCommand } from '../../../../src/bootstrapping/commands/blockchain/import';
import { getConfig } from '../../../helpers/config';
describe('blockchain:import', () => {
const defaultDataPath = path.join(homedir(), '.lisk', 'lisk-core');
const defaultBlockchainDBPath = getBlockchainDBPath(defaultDataPath);
const pathToBlockchainGzip = '/path/to/blockchain.db.tar.gz';
let stdout: string[];
let stderr: string[];
let config: Config.IConfig;
beforeEach(async () => {
stdout = [];
stderr = [];
config = await getConfig();
jest.spyOn(process.stdout, 'write').mockImplementation(val => stdout.push(val as string) > -1);
jest.spyOn(process.stderr, 'write').mockImplementation(val => stderr.push(val as string) > -1);
jest.spyOn(fs, 'existsSync').mockReturnValue(false);
jest.spyOn(fs, 'removeSync').mockReturnValue();
jest.spyOn(fs, 'ensureDirSync').mockReturnValue();
jest.spyOn(path, 'extname').mockReturnValue('.gz');
jest.spyOn(downloadUtils, 'extract').mockReturnValue('' as never);
});
describe('when importing with no path argument', () => {
it('should log error and return', async () => {
await expect(ImportCommand.run([], config)).rejects.toThrow('Missing 1 required arg:');
});
});
describe('when importing with no existing blockchain data', () => {
it('should import "blockchain.db" from given path', async () => {
await ImportCommand.run([pathToBlockchainGzip], config);
expect(fs.existsSync).toHaveBeenCalledTimes(1);
expect(fs.existsSync).toHaveBeenCalledWith(defaultBlockchainDBPath);
expect(fs.ensureDirSync).toHaveBeenCalledTimes(1);
expect(fs.ensureDirSync).toHaveBeenCalledWith(defaultBlockchainDBPath);
expect(downloadUtils.extract).toHaveBeenCalledTimes(1);
expect(downloadUtils.extract).toHaveBeenCalledWith(
path.dirname(pathToBlockchainGzip),
'blockchain.db.tar.gz',
defaultBlockchainDBPath,
);
});
});
describe('when importing with --data-path flag', () => {
const dataPath = getBlockchainDBPath('/my/app/');
it('should import "blockchain.db" from given path', async () => {
await ImportCommand.run([pathToBlockchainGzip, '--data-path=/my/app/'], config);
expect(fs.existsSync).toHaveBeenCalledTimes(1);
expect(fs.existsSync).toHaveBeenCalledWith(dataPath);
expect(fs.ensureDirSync).toHaveBeenCalledTimes(1);
expect(fs.ensureDirSync).toHaveBeenCalledWith(dataPath);
expect(downloadUtils.extract).toHaveBeenCalledTimes(1);
expect(downloadUtils.extract).toHaveBeenCalledWith(
path.dirname(pathToBlockchainGzip),
'blockchain.db.tar.gz',
dataPath,
);
});
});
describe('when importing with existing blockchain data', () => {
beforeEach(() => {
(fs.existsSync as jest.Mock).mockReturnValue(true);
});
describe('when importing without --force flag', () => {
it('should log error and return', async () => {
await expect(ImportCommand.run([pathToBlockchainGzip], config)).rejects.toThrow(
`There is already a blockchain data file found at ${defaultDataPath}. Use --force to override.`,
);
});
});
describe('when importing with --force flag', () => {
it('should import "blockchain.db" to given data-path', async () => {
await ImportCommand.run([pathToBlockchainGzip, '--force'], config);
expect(fs.existsSync).toHaveBeenCalledTimes(1);
expect(fs.existsSync).toHaveBeenCalledWith(defaultBlockchainDBPath);
expect(fs.ensureDirSync).toHaveBeenCalledTimes(1);
expect(fs.ensureDirSync).toHaveBeenCalledWith(defaultBlockchainDBPath);
expect(downloadUtils.extract).toHaveBeenCalledTimes(1);
expect(downloadUtils.extract).toHaveBeenCalledWith(
path.dirname(pathToBlockchainGzip),
'blockchain.db.tar.gz',
defaultBlockchainDBPath,
);
});
});
});
});
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jena;
import java.lang.reflect.InvocationTargetException ;
import java.lang.reflect.Method ;
public class InvokingUtil
{
public static void invokeCmd(String className, String[] args)
{
Class<?> cmd = null ;
try { cmd = Class.forName(className) ; }
catch (ClassNotFoundException ex)
{
System.err.println("Class '"+className+"' not found") ;
System.exit(1) ;
}
Method method = null ;
try { method = cmd.getMethod("main", new Class[]{String[].class}) ; }
catch (NoSuchMethodException ex)
{
System.err.println("'main' not found but the class '"+className+"' was") ;
System.exit(1) ;
}
try
{
method.invoke(null, new Object[]{args}) ;
return ;
} catch (IllegalArgumentException ex)
{
System.err.println("IllegalArgumentException exception: "+ex.getMessage());
System.exit(7) ;
} catch (IllegalAccessException ex)
{
System.err.println("IllegalAccessException exception: "+ex.getMessage());
System.exit(8) ;
} catch (InvocationTargetException ex)
{
System.err.println("InvocationTargetException exception: "+ex.getMessage());
System.exit(9) ;
}
//arq.query.main(args) ;
}
}
|
<reponame>openharmony-gitee-mirror/ark_runtime_core<gh_stars>1-10
/*
* Copyright (c) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <sys/mman.h>
#include "libpandabase/mem/mem.h"
#include "libpandabase/os/mem.h"
#include "libpandabase/utils/asan_interface.h"
#include "libpandabase/utils/logger.h"
#include "libpandabase/utils/math_helpers.h"
#include "runtime/include/runtime.h"
#include "runtime/mem/alloc_config.h"
#include "runtime/mem/freelist_allocator-inl.h"
#include "runtime/tests/allocator_test_base.h"
namespace panda::mem {
using NonObjectFreeListAllocator = FreeListAllocator<EmptyAllocConfigWithCrossingMap>;
class FreeListAllocatorTest : public AllocatorTest<NonObjectFreeListAllocator> {
public:
FreeListAllocatorTest()
{
// We need to create a runtime instance to be able to use CrossingMap.
options_.SetShouldLoadBootPandaFiles(false);
options_.SetShouldInitializeIntrinsics(false);
Runtime::Create(options_);
thread_ = panda::MTManagedThread::GetCurrent();
thread_->ManagedCodeBegin();
if (!CrossingMapSingleton::IsCreated()) {
CrossingMapSingleton::Create();
crossingmap_manual_handling_ = true;
}
}
~FreeListAllocatorTest()
{
thread_->ManagedCodeEnd();
ClearPoolManager();
if (crossingmap_manual_handling_) {
CrossingMapSingleton::Destroy();
}
Runtime::Destroy();
}
protected:
panda::MTManagedThread *thread_ {nullptr};
static constexpr size_t DEFAULT_POOL_SIZE_FOR_ALLOC = NonObjectFreeListAllocator::GetMinPoolSize();
static constexpr size_t DEFAULT_POOL_ALIGNMENT_FOR_ALLOC = FREELIST_DEFAULT_ALIGNMENT;
static constexpr size_t POOL_HEADER_SIZE = sizeof(NonObjectFreeListAllocator::MemoryPoolHeader);
static constexpr size_t MAX_ALLOC_SIZE = NonObjectFreeListAllocator::GetMaxSize();
void AddMemoryPoolToAllocator(NonObjectFreeListAllocator &alloc) override
{
os::memory::LockHolder lock(pool_lock_);
Pool pool = PoolManager::GetMmapMemPool()->AllocPool(DEFAULT_POOL_SIZE_FOR_ALLOC, SpaceType::SPACE_TYPE_OBJECT,
AllocatorType::FREELIST_ALLOCATOR, &alloc);
ASSERT(pool.GetSize() == DEFAULT_POOL_SIZE_FOR_ALLOC);
if (pool.GetMem() == nullptr) {
ASSERT_TRUE(0 && "Can't get a new pool from PoolManager");
}
allocated_pools_by_pool_manager_.push_back(pool);
if (!alloc.AddMemoryPool(pool.GetMem(), pool.GetSize())) {
ASSERT_TRUE(0 && "Can't add mem pool to allocator");
}
}
void AddMemoryPoolToAllocatorProtected(NonObjectFreeListAllocator &alloc) override
{
// We use common PoolManager from Runtime. Therefore, we have the same pool allocation for both cases.
AddMemoryPoolToAllocator(alloc);
}
bool AllocatedByThisAllocator(NonObjectFreeListAllocator &allocator, void *mem) override
{
return allocator.AllocatedByFreeListAllocator(mem);
}
void ClearPoolManager(bool clear_crossing_map = false)
{
for (auto i : allocated_pools_by_pool_manager_) {
PoolManager::GetMmapMemPool()->FreePool(i.GetMem(), i.GetSize());
if (clear_crossing_map) {
// We need to remove corresponding Pools from the CrossingMap
CrossingMapSingleton::RemoveCrossingMapForMemory(i.GetMem(), i.GetSize());
}
}
allocated_pools_by_pool_manager_.clear();
}
std::vector<Pool> allocated_pools_by_pool_manager_;
RuntimeOptions options_;
bool crossingmap_manual_handling_ {false};
// Mutex, which allows only one thread to add pool to the pool vector
os::memory::Mutex pool_lock_;
};
TEST_F(FreeListAllocatorTest, SimpleAllocateDifferentObjSizeTest)
{
LOG(DEBUG, ALLOC) << "SimpleAllocateDifferentObjSizeTest";
mem::MemStatsType *mem_stats = new mem::MemStatsType();
NonObjectFreeListAllocator allocator(mem_stats);
AddMemoryPoolToAllocator(allocator);
for (size_t i = 23; i < 300; i++) {
void *mem = allocator.Alloc(i);
LOG(DEBUG, ALLOC) << "Allocate obj with size " << i << " at " << std::hex << mem;
}
delete mem_stats;
}
TEST_F(FreeListAllocatorTest, AllocateWriteFreeTest)
{
AllocateAndFree(FREELIST_ALLOCATOR_MIN_SIZE, 512);
}
TEST_F(FreeListAllocatorTest, AllocateRandomFreeTest)
{
static constexpr size_t ALLOC_SIZE = FREELIST_ALLOCATOR_MIN_SIZE;
AllocateFreeDifferentSizesTest<ALLOC_SIZE, 2 * ALLOC_SIZE>(512);
}
TEST_F(FreeListAllocatorTest, AllocateTooBigObjTest)
{
AllocateTooBigObjectTest<MAX_ALLOC_SIZE + 1>();
}
TEST_F(FreeListAllocatorTest, AlignmentAllocTest)
{
static constexpr size_t POOLS_COUNT = 2;
AlignedAllocFreeTest<FREELIST_ALLOCATOR_MIN_SIZE, MAX_ALLOC_SIZE / 4096>(POOLS_COUNT);
}
TEST_F(FreeListAllocatorTest, AllocateTooMuchTest)
{
static constexpr size_t ALLOC_SIZE = FREELIST_ALLOCATOR_MIN_SIZE;
AllocateTooMuchTest(ALLOC_SIZE, DEFAULT_POOL_SIZE_FOR_ALLOC / ALLOC_SIZE);
}
TEST_F(FreeListAllocatorTest, ObjectIteratorTest)
{
ObjectIteratorTest<FREELIST_ALLOCATOR_MIN_SIZE, MAX_ALLOC_SIZE>();
}
TEST_F(FreeListAllocatorTest, ObjectCollectionTest)
{
ObjectCollectionTest<FREELIST_ALLOCATOR_MIN_SIZE, MAX_ALLOC_SIZE>();
}
TEST_F(FreeListAllocatorTest, ObjectIteratorInRangeTest)
{
ObjectIteratorInRangeTest<FREELIST_ALLOCATOR_MIN_SIZE, MAX_ALLOC_SIZE>(
CrossingMapSingleton::GetCrossingMapGranularity());
}
TEST_F(FreeListAllocatorTest, AsanTest)
{
AsanTest();
}
TEST_F(FreeListAllocatorTest, VisitAndRemoveFreePoolsTest)
{
static constexpr size_t POOLS_COUNT = 5;
VisitAndRemoveFreePools<POOLS_COUNT>(MAX_ALLOC_SIZE);
}
TEST_F(FreeListAllocatorTest, AllocatedByFreeListAllocatorTest)
{
AllocatedByThisAllocatorTest();
}
TEST_F(FreeListAllocatorTest, FailedLinksTest)
{
static constexpr size_t min_alloc_size = FREELIST_ALLOCATOR_MIN_SIZE;
mem::MemStatsType *mem_stats = new mem::MemStatsType();
NonObjectFreeListAllocator allocator(mem_stats);
AddMemoryPoolToAllocator(allocator);
std::pair<void *, size_t> pair;
std::array<std::pair<void *, size_t>, 3> memory_elements;
for (size_t i = 0; i < 3; i++) {
void *mem = allocator.Alloc(min_alloc_size);
ASSERT_TRUE(mem != nullptr);
size_t index = SetBytesFromByteArray(mem, min_alloc_size);
std::pair<void *, size_t> new_pair(mem, index);
memory_elements.at(i) = new_pair;
}
pair = memory_elements[1];
ASSERT_TRUE(CompareBytesWithByteArray(std::get<0>(pair), min_alloc_size, std::get<1>(pair)));
allocator.Free(std::get<0>(pair));
pair = memory_elements[0];
ASSERT_TRUE(CompareBytesWithByteArray(std::get<0>(pair), min_alloc_size, std::get<1>(pair)));
allocator.Free(std::get<0>(pair));
{
void *mem = allocator.Alloc(min_alloc_size * 2);
ASSERT_TRUE(mem != nullptr);
size_t index = SetBytesFromByteArray(mem, min_alloc_size * 2);
std::pair<void *, size_t> new_pair(mem, index);
memory_elements.at(0) = new_pair;
}
{
void *mem = allocator.Alloc(min_alloc_size);
ASSERT_TRUE(mem != nullptr);
size_t index = SetBytesFromByteArray(mem, min_alloc_size);
std::pair<void *, size_t> new_pair(mem, index);
memory_elements.at(1) = new_pair;
}
{
pair = memory_elements[0];
ASSERT_TRUE(CompareBytesWithByteArray(std::get<0>(pair), min_alloc_size * 2, std::get<1>(pair)));
allocator.Free(std::get<0>(pair));
}
{
pair = memory_elements[1];
ASSERT_TRUE(CompareBytesWithByteArray(std::get<0>(pair), min_alloc_size, std::get<1>(pair)));
allocator.Free(std::get<0>(pair));
}
{
pair = memory_elements[2];
ASSERT_TRUE(CompareBytesWithByteArray(std::get<0>(pair), min_alloc_size, std::get<1>(pair)));
allocator.Free(std::get<0>(pair));
}
delete mem_stats;
}
TEST_F(FreeListAllocatorTest, MaxAllocationSizeTest)
{
static constexpr size_t alloc_size = MAX_ALLOC_SIZE;
static constexpr size_t alloc_count = 2;
mem::MemStatsType *mem_stats = new mem::MemStatsType();
NonObjectFreeListAllocator allocator(mem_stats);
AddMemoryPoolToAllocator(allocator);
std::array<void *, alloc_count> memory_elements;
for (size_t i = 0; i < alloc_count; i++) {
void *mem = allocator.Alloc(alloc_size);
ASSERT_TRUE(mem != nullptr);
memory_elements.at(i) = mem;
}
for (size_t i = 0; i < alloc_count; i++) {
allocator.Free(memory_elements.at(i));
}
delete mem_stats;
}
TEST_F(FreeListAllocatorTest, AllocateTheWholePoolFreeAndAllocateAgainTest)
{
size_t min_size_power_of_two;
if ((FREELIST_ALLOCATOR_MIN_SIZE & (FREELIST_ALLOCATOR_MIN_SIZE - 1)) == 0U) {
min_size_power_of_two = panda::helpers::math::GetIntLog2(FREELIST_ALLOCATOR_MIN_SIZE);
} else {
min_size_power_of_two = ceil(std::log(FREELIST_ALLOCATOR_MIN_SIZE) / std::log(2U));
}
if (((1 << min_size_power_of_two) - sizeof(freelist::MemoryBlockHeader)) < FREELIST_ALLOCATOR_MIN_SIZE) {
min_size_power_of_two++;
}
size_t alloc_size = (1 << min_size_power_of_two) - sizeof(freelist::MemoryBlockHeader);
// To cover all memory we need to consider pool header size at first bytes of pool memory.
size_t first_alloc_size = (1 << min_size_power_of_two) - sizeof(freelist::MemoryBlockHeader) - POOL_HEADER_SIZE;
if (first_alloc_size < FREELIST_ALLOCATOR_MIN_SIZE) {
first_alloc_size = (1 << (min_size_power_of_two + 1)) - sizeof(freelist::MemoryBlockHeader) - POOL_HEADER_SIZE;
}
mem::MemStatsType *mem_stats = new mem::MemStatsType();
NonObjectFreeListAllocator allocator(mem_stats);
AddMemoryPoolToAllocator(allocator);
std::vector<void *> memory_elements;
size_t alloc_count = 0;
// Allocate first element
void *first_alloc_mem = allocator.Alloc(first_alloc_size);
ASSERT_TRUE(first_alloc_mem != nullptr);
// Allocate and use the whole alloc pool
while (true) {
void *mem = allocator.Alloc(alloc_size);
if (mem == nullptr) {
break;
}
alloc_count++;
memory_elements.push_back(mem);
}
// Free all elements
allocator.Free(first_alloc_mem);
for (size_t i = 0; i < alloc_count; i++) {
allocator.Free(memory_elements.back());
memory_elements.pop_back();
}
// Allocate first element again
first_alloc_mem = allocator.Alloc(first_alloc_size);
ASSERT_TRUE(first_alloc_mem != nullptr);
// Allocate again
for (size_t i = 0; i < alloc_count; i++) {
void *mem = allocator.Alloc(alloc_size);
ASSERT_TRUE(mem != nullptr);
memory_elements.push_back(mem);
}
// Free all elements again
allocator.Free(first_alloc_mem);
for (size_t i = 0; i < alloc_count; i++) {
allocator.Free(memory_elements.back());
memory_elements.pop_back();
}
delete mem_stats;
}
TEST_F(FreeListAllocatorTest, MTAllocFreeTest)
{
static constexpr size_t MIN_ELEMENTS_COUNT = 500;
static constexpr size_t MAX_ELEMENTS_COUNT = 1000;
#if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
// We have an issue with QEMU during MT tests. Issue 2852
static constexpr size_t THREADS_COUNT = 1;
#else
static constexpr size_t THREADS_COUNT = 10;
#endif
static constexpr size_t MAX_MT_ALLOC_SIZE = MAX_ALLOC_SIZE / 128;
static constexpr size_t MT_TEST_RUN_COUNT = 5;
// Threads can concurrently add Pools to the allocator, therefore, we must make it into account
// And also we must take fragmentation into account
ASSERT_TRUE(mem::MemConfig::GetObjectPoolSize() >
2 * (AlignUp(MAX_ELEMENTS_COUNT * MAX_MT_ALLOC_SIZE, DEFAULT_POOL_SIZE_FOR_ALLOC)) +
THREADS_COUNT * DEFAULT_POOL_SIZE_FOR_ALLOC);
for (size_t i = 0; i < MT_TEST_RUN_COUNT; i++) {
MT_AllocFreeTest<FREELIST_ALLOCATOR_MIN_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(MIN_ELEMENTS_COUNT,
MAX_ELEMENTS_COUNT);
ClearPoolManager(true);
}
}
TEST_F(FreeListAllocatorTest, MTAllocIterateTest)
{
static constexpr size_t MIN_ELEMENTS_COUNT = 500;
static constexpr size_t MAX_ELEMENTS_COUNT = 1000;
#if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
// We have an issue with QEMU during MT tests. Issue 2852
static constexpr size_t THREADS_COUNT = 1;
#else
static constexpr size_t THREADS_COUNT = 10;
#endif
static constexpr size_t MAX_MT_ALLOC_SIZE = MAX_ALLOC_SIZE / 128;
static constexpr size_t MT_TEST_RUN_COUNT = 5;
// Threads can concurrently add Pools to the allocator, therefore, we must make it into account
// And also we must take fragmentation into account
ASSERT_TRUE(mem::MemConfig::GetObjectPoolSize() >
2 * (AlignUp(MAX_ELEMENTS_COUNT * MAX_MT_ALLOC_SIZE, DEFAULT_POOL_SIZE_FOR_ALLOC)) +
THREADS_COUNT * DEFAULT_POOL_SIZE_FOR_ALLOC);
for (size_t i = 0; i < MT_TEST_RUN_COUNT; i++) {
MT_AllocIterateTest<FREELIST_ALLOCATOR_MIN_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(
MIN_ELEMENTS_COUNT, MAX_ELEMENTS_COUNT, CrossingMapSingleton::GetCrossingMapGranularity());
ClearPoolManager(true);
}
}
TEST_F(FreeListAllocatorTest, MTAllocCollectTest)
{
static constexpr size_t MIN_ELEMENTS_COUNT = 500;
static constexpr size_t MAX_ELEMENTS_COUNT = 1000;
#if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
// We have an issue with QEMU during MT tests. Issue 2852
static constexpr size_t THREADS_COUNT = 1;
#else
static constexpr size_t THREADS_COUNT = 10;
#endif
static constexpr size_t MAX_MT_ALLOC_SIZE = MAX_ALLOC_SIZE / 128;
static constexpr size_t MT_TEST_RUN_COUNT = 5;
// Threads can concurrently add Pools to the allocator, therefore, we must make it into account
// And also we must take fragmentation into account
ASSERT_TRUE(mem::MemConfig::GetObjectPoolSize() >
2 * (AlignUp(MAX_ELEMENTS_COUNT * MAX_MT_ALLOC_SIZE, DEFAULT_POOL_SIZE_FOR_ALLOC)) +
THREADS_COUNT * DEFAULT_POOL_SIZE_FOR_ALLOC);
for (size_t i = 0; i < MT_TEST_RUN_COUNT; i++) {
MT_AllocCollectTest<FREELIST_ALLOCATOR_MIN_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(MIN_ELEMENTS_COUNT,
MAX_ELEMENTS_COUNT);
ClearPoolManager(true);
}
}
} // namespace panda::mem
|
'use strict';
// Imports
const {commandUtils} = require('@gkalpak/cli-utils');
const chalk = require('chalk');
// Constants
const _PR_REMOTE_ALIAS_PREFIX = 'gcoghpr';
const _PR_LOCAL_BRANCH_PREFIX = 'gcoghpr';
const GH_TOKEN_NAME = 'GITHUB_ACCESS_TOKEN';
const PR_LOCAL_BRANCH_TOP = `${_PR_LOCAL_BRANCH_PREFIX}-top`;
const PR_LOCAL_BRANCH_BASE = `${_PR_LOCAL_BRANCH_PREFIX}-base`;
// Classes
/**
* @class Gcoghpr
*
* @description
* Allow checking out a GitHub pull request (PR) as a local branch.
*
* @example
* ```js
* new Gcoghpr().run(['12345'])
* ```
*/
class _Gcoghpr {
get _rl() { return this._lazyLoader.get('readline'); }
get _constants() { return this._lazyLoader.get('../constants'); }
constructor() {
this._logger = new exps._Logger();
this._lazyLoader = new exps._LazyLoader();
this._ghUtils = new exps._GitHubUtils(this._logger, this._lazyLoader);
this._remoteUrlRe = /^https:\/\/github\.com\/([^/]+)\/([^/]+?)(?:\.git|\/)?$/i;
}
/**
* @method run
*
* @description
* Do all necessary operations to check out the specified PR as a local branch.
*
* In a nutshell, this involves the following:
*
* 1. Guess the upstream URL (looking at remotes `upstream` or `origin`).
* 2. Retrieve the PR author and branch (either extracting it from the input or fetching it from GitHub).
* 3. Check if a local branch with the same name already exists. (If it does, ask for permission to overwrite it.)
* 4. Check out master.
* 5. Fetch the PR branch from the author's remote into a local branch (with the same name).
* 6. Switch to the new branch.
* 7. Set up the local branch to track the PR branch.
* 8. If the number of commits in the PR is available (e.g. retrieved from GitHub), log them.
*
* The PR can be identified either as `<author>:<branch>` or as a PR number (in which case the PR info is fetched from
* GitHub).
*
* NOTE: Making requests to the GitHub API is subject to rate-limiting. You can avoid this, by setting an environment
* variable to a GitHub access token (with an appropriate scope).
*
* @param {string[]} runtimeArgs - A list of arguments. Currently only the PR identifier (either as
* `<author>:<branch>` or as a number).
* @param {IRunConfig} [config={}] - A configuration object. See {@link commandUtils#IRunConfig} for more details.
*
* @return {Promise<void>} - A promise that resolves once all operations have completed.
*/
run(runtimeArgs, config = {}) {
if (config.dryrun) {
this._logger.log('Check out a GitHub pull request on a local branch.');
return Promise.resolve();
}
const executor = new exps._Executor(this._logger, Object.assign({sapVersion: 2}, config), config.debug);
return Promise.resolve().
then(() => this._validateInput(runtimeArgs)).
then(({prId}) => prId && Promise.resolve().
then(() => this._logger.forceColor('gray')).
then(() => executor.execForOutput('git remote get-url upstream || git remote get-url origin')).
then(upstreamUrl => {
const [, upstreamUser, upstreamRepo] = this._remoteUrlRe.exec(upstreamUrl);
const upstreamInfo = {user: upstreamUser, repo: upstreamRepo, url: upstreamUrl};
if (config.debug) {
this._logger.debug(`Upstream info: ${JSON.stringify(upstreamInfo)}`);
}
return this._getPrInfo(upstreamInfo, prId);
}).
then(prInfo => {
const localBranchSuffix = (prId.indexOf(':') !== -1) ? prInfo.branch : `pr${prId}`;
const localBranch = `${_PR_LOCAL_BRANCH_PREFIX}-${localBranchSuffix}`;
const remoteAlias = `${_PR_REMOTE_ALIAS_PREFIX}-${prInfo.author}`;
if (config.debug) {
this._logger.debug(`PR info: ${JSON.stringify(prInfo)}`);
this._logger.debug(`Local branch: ${localBranch}`);
}
return {...prInfo, localBranch, remoteAlias};
}).
then(({branch, originUrl, commits, localBranch, remoteAlias}) => Promise.resolve().
then(() => executor.exec(`git show-ref --heads --quiet ${localBranch}`).
then(() => this._confirmOverwriteBranch(localBranch), () => undefined)).
then(() => executor.execForOutput('git rev-parse --abbrev-ref HEAD')).
then(currentBranch => (currentBranch === localBranch) && executor.exec('git checkout master')).
then(() => executor.exec(`git remote remove ${remoteAlias} || true`)).
then(() => executor.exec(`git remote add ${remoteAlias} ${originUrl}`)).
then(() => executor.exec(`git fetch --no-tags ${remoteAlias} ${branch}`)).
then(() => executor.exec(`git branch --force --track ${localBranch} ${remoteAlias}/${branch}`)).
then(() => executor.exec(`git branch --force ${PR_LOCAL_BRANCH_TOP} ${localBranch}`)).
then(() => executor.exec(`git branch --force ${PR_LOCAL_BRANCH_BASE} ${localBranch}~${commits || 0}`)).
then(() => executor.exec(`git checkout ${localBranch}`)).
then(() => this._reportSuccess(localBranch, commits, executor)).
then(() => this._logger.forceColor(null))).
catch(err => {
this._logger.forceColor(null);
return Promise.reject(err);
}));
}
_confirmOverwriteBranch(branch) {
return new Promise((resolve, reject) => {
const rlInstance = this._rl.createInterface(process.stdin, process.stdout);
const question = chalk.yellow(
`Branch '${branch}' does already exist.\n` +
`Overwrite it? ${chalk.white('[y/N]')} `);
rlInstance.question(question, answer => {
rlInstance.close();
/^(?:y|yes)$/i.test(answer) ? resolve() : reject('Aborting...');
});
});
}
_getPrInfo(upstreamInfo, prId) {
const prDataPromise = (prId.indexOf(':') !== -1) ?
// `author:branch`
Promise.resolve({commits: 0, label: prId}) :
// `12345`
this._ghUtils.getPr(upstreamInfo, prId).then(({commits, head}) => ({commits, label: head.label}));
return prDataPromise.then(({commits, label}) => {
const [author, branch] = label.split(':');
const originUrl = `https://github.com/${author}/${upstreamInfo.repo}.git`;
return {author, branch, originUrl, commits};
});
}
_getPrintCommand(message) {
return message.
split(/\r?\n/).
map(line => `node --print "'${line.replace(/'/g, '\\$&')}'"`).
join(' && ');
}
_reportSuccess(localBranch, commits, executor) {
let ready = Promise.resolve();
const commands = [];
const messageMain = `\nFetched PR into local branch '${chalk.green(localBranch)}'`;
const messageExt = !commits ?
'.' :
` (and also branch range '${chalk.cyan(PR_LOCAL_BRANCH_BASE)}..${chalk.cyan(PR_LOCAL_BRANCH_TOP)}').\n` +
'\n' +
`PR commits (${commits})\\n---`;
commands.push(this._getPrintCommand(`${messageMain}${messageExt}`));
if (commits) {
const gl1RawCmd = this._constants.ALIASES.git.gl1.getSpec().command;
const gl1Args = [`-${commits + 1}`];
ready = ready.
then(() => commandUtils.expandCmd(gl1RawCmd, gl1Args, {})).
then(gl1Cmd => commands.push(gl1Cmd));
}
return ready.then(() => executor.execWithStyle(null, commands.join(' && ')));
}
_usage() {
const scriptName = __filename.slice(__dirname.length +1).replace(/\.js$/, '');
const command = chalk.bgBlack.green(`${scriptName} @(<pr-number>|<author>:<branch>)`);
const usageMessage = `${chalk.cyan('Usage:')} ${command}`;
this._logger.log(`\n${usageMessage}\n`);
}
_validateInput(args) {
const onError = errorMsg => {
this._logger.error(errorMsg);
this._usage();
throw new Error('Invalid input.');
};
const [prId] = args;
switch (args.length) {
case 0:
this._usage();
return {};
case 1:
if ((prId.indexOf(':') === -1) && !/^\d+$/.test(prId)) {
onError(`Unexpected PR identifier: ${prId}`);
break;
}
return {prId};
default:
onError(`Expected 1 argument, found: ${args.join(', ')}`);
break;
}
}
}
class Executor {
constructor(logger, baseConfig = {}, debugMode = false) {
this._logger = logger;
this._baseConfig = baseConfig;
this._debugMode = debugMode;
}
exec(cmd, config) {
config = Object.assign({}, this._baseConfig, config);
const cmdSep = ' && ';
const printableCmd = cmd.
split(cmdSep).
filter(c => !/^(?:echo|node (?:-p|--print)) /.test(c)).
join(cmdSep);
if (this._debugMode) {
this._logger.debug(`Running command '${cmd}' (config: ${JSON.stringify(config)})...`);
}
this._logger.info(`RUN: ${printableCmd}`);
return commandUtils.spawnAsPromised(cmd, config);
}
execForOutput(cmd, config) {
config = Object.assign({}, config, {returnOutput: true});
return this.exec(cmd, config).then(output => output.trim());
}
execWithStyle(color, cmd, config) {
const tempStyle = this._logger.getTempStyle(color);
const preCmd = `node --print ${JSON.stringify(`'\b${tempStyle.open}'`)}`;
const resetStyle = () => process.stdout.write(tempStyle.close);
const promise = this.exec(`${preCmd} && ${cmd}`, config);
promise.then(resetStyle, resetStyle);
return promise;
}
}
class GitHubUtils {
get _https() { return this._lazyLoader.get('https'); }
get _url() { return this._lazyLoader.get('url'); }
get _constants() { return this._lazyLoader.get('../constants'); }
constructor(logger, lazyLoader) {
this._logger = logger;
this._lazyLoader = lazyLoader;
this._baseUrl = 'https://api.github.com/';
this._shownTokenWarning = false;
}
get(path) {
const url = this._baseUrl + path;
const ghToken = process.env[GH_TOKEN_NAME] || '';
const options = {
headers: {
Authorization: ghToken && `token ${ghToken}`,
'User-Agent': this._constants.VERSION_STAMP.replace(/\s/g, '_'),
},
};
if (!ghToken && !this._shownTokenWarning) {
this._shownTokenWarning = true;
this._logger.warn(
`No GitHub access token found in \`${GH_TOKEN_NAME}\` environment variable.\n` +
'Proceeding anonymously (and subject to rate-limiting)...');
}
return this._httpsGet(url, options).then(responseText => {
try {
return JSON.parse(responseText);
} catch (err) {
this._logger.error(`Response:\n${responseText}`);
throw err;
}
});
}
getPr({user: upstreamUser, repo: upstreamRepo}, prNumber) {
return this.get(`repos/${upstreamUser}/${upstreamRepo}/pulls/${prNumber}`);
}
_httpsGet(url, options = {}) {
return new Promise((resolve, reject) => {
this._logger.info(`GET: ${url} (options: {${Object.keys(options).join(', ')}})`);
let data = '';
this._https.
// Do not use the `get(url, options, cb)` signature,
// to remain compatible with versions older than 10.9.0.
get({...this._url.parse(url), ...options}, res => res.
on('error', reject).
on('data', d => data += d).
on('end', () => ((200 <= res.statusCode) && (res.statusCode < 300)) ?
resolve(data) :
reject(`Request to '${url}' failed (status: ${res.statusCode}):\n${data}`))).
on('error', reject);
});
}
}
class LazyLoader {
constructor() {
this._loaded = Object.create(null);
}
get(dep) {
return this._loaded[dep] || (this._loaded[dep] = require(dep));
}
}
class Logger {
constructor() {
this._forcedStyle = {
color: 'reset',
open: '',
close: '',
};
}
forceColor(color) {
const oldStyle = this._forcedStyle;
const newStyle = this._forcedStyle = this._computeStyle(color);
process.stdout.write(oldStyle.close + newStyle.open);
}
getTempStyle(color) {
const originalStyle = this._forcedStyle;
const tempStyle = this._computeStyle(color);
const isSameColor = (originalStyle.color === tempStyle.color);
const originalToTemp = isSameColor ? '' : originalStyle.close + tempStyle.open;
const tempToOriginal = isSameColor ? '' : tempStyle.close + originalStyle.open;
return {
color: `${originalStyle.color} --> ${tempStyle.color}`,
open: originalToTemp,
close: tempToOriginal,
};
}
debug(msg) { console.debug(this._withStyle(chalk.gray(`[debug] ${msg}`))); }
error(msg) { console.error(chalk.red(msg)); }
info(msg) { console.info(this._withStyle(`[info] ${msg}`)); }
log(msg) { console.log(this._withStyle(msg)); }
warn(msg) { console.warn(this._withStyle(chalk.yellow(msg))); }
_computeStyle(color) {
color = color || 'reset';
return (color === 'reset') ?
{color, open: '', close: ''} :
{
color,
open: chalk[color]('foo').replace(/foo.+$/, ''),
close: chalk[color]('foo').replace(/^.+foo/, ''),
};
}
_withStyle(msg) { return this._forcedStyle.open + msg + this._forcedStyle.open; }
}
// Exports
const exps = module.exports = {
PR_LOCAL_BRANCH_PREFIX: _PR_LOCAL_BRANCH_PREFIX,
PR_REMOTE_ALIAS_PREFIX: _PR_REMOTE_ALIAS_PREFIX,
Gcoghpr: _Gcoghpr,
main: __main,
/** Exposed for testing purposes only. */
_GH_TOKEN_NAME: GH_TOKEN_NAME,
/** Exposed for testing purposes only. */
_PR_LOCAL_BRANCH_BASE: PR_LOCAL_BRANCH_BASE,
/** Exposed for testing purposes only. */
_PR_LOCAL_BRANCH_TOP: PR_LOCAL_BRANCH_TOP,
/** Exposed for testing purposes only. */
_Executor: Executor,
/** Exposed for testing purposes only. */
_GitHubUtils: GitHubUtils,
/** Exposed for testing purposes only. */
_LazyLoader: LazyLoader,
/** Exposed for testing purposes only. */
_Logger: Logger,
};
// Functions - Definitions
function __main(runtimeArgs, config) {
return new exps.Gcoghpr().run(runtimeArgs, config);
}
|
#!/bin/sh
rm -rf docs/
mkdir docs
# copy all files in src/ to docs/
cp -r src/. docs
# compile .ts file in docs folder
tsc -p docs && echo '[build: success]'
# remove unnecessary files
rm -rf docs/*.ts docs/tsconfig.json
|
<filename>components/Post/PostHeader/style.ts
import { StyleSheet } from "react-native";
const styles = StyleSheet.create({
postHeaderContainer: {
flexDirection: 'row',
alignItems: 'center',
marginBottom: 10
},
middle: {
marginHorizontal: 10,
flex: 1,
},
username: {
fontWeight: 'bold',
marginBottom: 5
},
createdAt: {
color: 'grey'
},
threeDotsIcon: {
alignSelf: 'flex-start'
},
centeredView: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
marginTop: 22,
},
modalView: {
margin: 20,
backgroundColor: 'white',
borderRadius: 20,
width: '70%',
padding: 10,
shadowColor: '#000',
shadowOffset: {
width: 0,
height: 2,
},
shadowOpacity: 0.25,
shadowRadius: 3.84,
elevation: 5,
},
confirmText: {
alignSelf: 'center',
marginTop: 10
},
exitBtn: {
alignSelf: 'flex-end'
},
btnContainer: {
width: '100%',
flexDirection: 'row',
justifyContent: 'center'
},
deleteBtn: {
backgroundColor: 'red',
alignSelf: 'center',
marginTop: 20,
borderRadius: 10,
paddingHorizontal: 20,
paddingVertical: 10
},
cancelBtn: {
backgroundColor: 'blue',
alignSelf: 'center',
marginTop: 20,
marginLeft: 20,
borderRadius: 10,
paddingHorizontal: 20,
paddingVertical: 10
},
btnText: {
color: '#fff'
},
textStyle: {
color: 'white',
fontWeight: 'bold',
textAlign: 'center',
fontSize: 25,
},
modalText: {
marginBottom: 15,
textAlign: 'center',
},
})
export default styles;
|
#include <iostream>
#include <thread>
#include <chrono>
class TrafficLight {
public:
void run() {
bool running = true;
while (running) {
// Set the traffic light to Green
std::cout << "Green" << std::endl;
std::this_thread::sleep_for(std::chrono::seconds(10));
// Set the traffic light to Yellow
std::cout << "Yellow" << std::endl;
std::this_thread::sleep_for(std::chrono::seconds(3));
// Set the traffic light to Red
std::cout << "Red" << std::endl;
std::this_thread::sleep_for(std::chrono::seconds(5));
}
}
};
int main() {
TrafficLight trafficLight;
trafficLight.run();
return 0;
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.jfoenix.utils;
import javafx.animation.PauseTransition;
import javafx.event.EventHandler;
import javafx.scene.Node;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.Background;
import javafx.scene.layout.BackgroundFill;
import javafx.scene.layout.Region;
import javafx.scene.paint.Color;
import javafx.scene.paint.Paint;
import javafx.util.Duration;
import java.util.Locale;
/**
* @author <NAME>
* @version 1.0
* @since 2017-02-11
*/
public class JFXNodeUtils {
public static void updateBackground(Background newBackground, Region nodeToUpdate) {
updateBackground(newBackground, nodeToUpdate, Color.BLACK);
}
public static void updateBackground(Background newBackground, Region nodeToUpdate, Paint fill) {
if (newBackground != null && !newBackground.getFills().isEmpty()) {
final BackgroundFill[] fills = new BackgroundFill[newBackground.getFills().size()];
for (int i = 0; i < newBackground.getFills().size(); i++) {
BackgroundFill bf = newBackground.getFills().get(i);
fills[i] = new BackgroundFill(fill,bf.getRadii(),bf.getInsets());
}
nodeToUpdate.setBackground(new Background(fills));
}
}
public static String colorToHex(Color c) {
if (c != null) {
return String.format((Locale) null, "#%02x%02x%02x",
Math.round(c.getRed() * 255),
Math.round(c.getGreen() * 255),
Math.round(c.getBlue() * 255)).toUpperCase();
} else {
return null;
}
}
public static void addPressAndHoldHandler(Node node, Duration holdTime,
EventHandler<MouseEvent> handler) {
class Wrapper<T> {
T content;
}
Wrapper<MouseEvent> eventWrapper = new Wrapper<>();
PauseTransition holdTimer = new PauseTransition(holdTime);
holdTimer.setOnFinished(event -> handler.handle(eventWrapper.content));
node.addEventHandler(MouseEvent.MOUSE_PRESSED, event -> {
eventWrapper.content = event;
holdTimer.playFromStart();
});
node.addEventHandler(MouseEvent.MOUSE_RELEASED, event -> holdTimer.stop());
node.addEventHandler(MouseEvent.DRAG_DETECTED, event -> holdTimer.stop());
}
}
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
touch log/stderror.err
ktools_monitor.sh $$ & pid0=$!
exit_handler(){
exit_code=$?
kill -9 $pid0 2> /dev/null
if [ "$exit_code" -gt 0 ]; then
echo 'Ktools Run Error - exitcode='$exit_code
else
echo 'Run Completed'
fi
set +x
group_pid=$(ps -p $$ -o pgid --no-headers)
sess_pid=$(ps -p $$ -o sess --no-headers)
script_pid=$$
printf "Script PID:%d, GPID:%s, SPID:%d
" $script_pid $group_pid $sess_pid >> log/killout.txt
ps -jf f -g $sess_pid > log/subprocess_list
PIDS_KILL=$(pgrep -a --pgroup $group_pid | awk 'BEGIN { FS = "[ \t\n]+" }{ if ($1 >= '$script_pid') print}' | grep -v celery | egrep -v *\\.log$ | egrep -v *\\.sh$ | sort -n -r)
echo "$PIDS_KILL" >> log/killout.txt
kill -9 $(echo "$PIDS_KILL" | awk 'BEGIN { FS = "[ \t\n]+" }{ print $1 }') 2>/dev/null
exit $exit_code
}
trap exit_handler QUIT HUP INT KILL TERM ERR EXIT
check_complete(){
set +e
proc_list="eve getmodel gulcalc fmcalc summarycalc eltcalc aalcalc leccalc pltcalc ordleccalc"
has_error=0
for p in $proc_list; do
started=$(find log -name "$p*.log" | wc -l)
finished=$(find log -name "$p*.log" -exec grep -l "finish" {} + | wc -l)
if [ "$finished" -lt "$started" ]; then
echo "[ERROR] $p - $((started-finished)) processes lost"
has_error=1
elif [ "$started" -gt 0 ]; then
echo "[OK] $p"
fi
done
if [ "$has_error" -ne 0 ]; then
false # raise non-zero exit code
fi
}
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
mkdir output/full_correlation/
rm -R -f fifo/*
mkdir fifo/full_correlation/
rm -R -f work/*
mkdir work/kat/
mkdir work/full_correlation/
mkdir work/full_correlation/kat/
mkdir work/gul_S1_summaryaalcalc
mkdir work/full_correlation/gul_S1_summaryaalcalc
mkdir work/il_S1_summaryaalcalc
mkdir work/full_correlation/il_S1_summaryaalcalc
mkfifo fifo/full_correlation/gul_fc_P1
mkfifo fifo/full_correlation/gul_fc_P2
mkfifo fifo/gul_P1
mkfifo fifo/gul_P2
mkfifo fifo/gul_S1_summary_P1
mkfifo fifo/gul_S1_eltcalc_P1
mkfifo fifo/gul_S1_summarycalc_P1
mkfifo fifo/gul_S1_pltcalc_P1
mkfifo fifo/gul_S1_summary_P2
mkfifo fifo/gul_S1_eltcalc_P2
mkfifo fifo/gul_S1_summarycalc_P2
mkfifo fifo/gul_S1_pltcalc_P2
mkfifo fifo/il_P1
mkfifo fifo/il_P2
mkfifo fifo/il_S1_summary_P1
mkfifo fifo/il_S1_eltcalc_P1
mkfifo fifo/il_S1_summarycalc_P1
mkfifo fifo/il_S1_pltcalc_P1
mkfifo fifo/il_S1_summary_P2
mkfifo fifo/il_S1_eltcalc_P2
mkfifo fifo/il_S1_summarycalc_P2
mkfifo fifo/il_S1_pltcalc_P2
mkfifo fifo/full_correlation/gul_P1
mkfifo fifo/full_correlation/gul_P2
mkfifo fifo/full_correlation/gul_S1_summary_P1
mkfifo fifo/full_correlation/gul_S1_eltcalc_P1
mkfifo fifo/full_correlation/gul_S1_summarycalc_P1
mkfifo fifo/full_correlation/gul_S1_pltcalc_P1
mkfifo fifo/full_correlation/gul_S1_summary_P2
mkfifo fifo/full_correlation/gul_S1_eltcalc_P2
mkfifo fifo/full_correlation/gul_S1_summarycalc_P2
mkfifo fifo/full_correlation/gul_S1_pltcalc_P2
mkfifo fifo/full_correlation/il_P1
mkfifo fifo/full_correlation/il_P2
mkfifo fifo/full_correlation/il_S1_summary_P1
mkfifo fifo/full_correlation/il_S1_eltcalc_P1
mkfifo fifo/full_correlation/il_S1_summarycalc_P1
mkfifo fifo/full_correlation/il_S1_pltcalc_P1
mkfifo fifo/full_correlation/il_S1_summary_P2
mkfifo fifo/full_correlation/il_S1_eltcalc_P2
mkfifo fifo/full_correlation/il_S1_summarycalc_P2
mkfifo fifo/full_correlation/il_S1_pltcalc_P2
# --- Do insured loss computes ---
( eltcalc < fifo/il_S1_eltcalc_P1 > work/kat/il_S1_eltcalc_P1 ) 2>> log/stderror.err & pid1=$!
( summarycalctocsv < fifo/il_S1_summarycalc_P1 > work/kat/il_S1_summarycalc_P1 ) 2>> log/stderror.err & pid2=$!
( pltcalc < fifo/il_S1_pltcalc_P1 > work/kat/il_S1_pltcalc_P1 ) 2>> log/stderror.err & pid3=$!
( eltcalc -s < fifo/il_S1_eltcalc_P2 > work/kat/il_S1_eltcalc_P2 ) 2>> log/stderror.err & pid4=$!
( summarycalctocsv -s < fifo/il_S1_summarycalc_P2 > work/kat/il_S1_summarycalc_P2 ) 2>> log/stderror.err & pid5=$!
( pltcalc -s < fifo/il_S1_pltcalc_P2 > work/kat/il_S1_pltcalc_P2 ) 2>> log/stderror.err & pid6=$!
tee < fifo/il_S1_summary_P1 fifo/il_S1_eltcalc_P1 fifo/il_S1_summarycalc_P1 fifo/il_S1_pltcalc_P1 work/il_S1_summaryaalcalc/P1.bin > /dev/null & pid7=$!
tee < fifo/il_S1_summary_P2 fifo/il_S1_eltcalc_P2 fifo/il_S1_summarycalc_P2 fifo/il_S1_pltcalc_P2 work/il_S1_summaryaalcalc/P2.bin > /dev/null & pid8=$!
( summarycalc -m -f -1 fifo/il_S1_summary_P1 < fifo/il_P1 ) 2>> log/stderror.err &
( summarycalc -m -f -1 fifo/il_S1_summary_P2 < fifo/il_P2 ) 2>> log/stderror.err &
# --- Do ground up loss computes ---
( eltcalc < fifo/gul_S1_eltcalc_P1 > work/kat/gul_S1_eltcalc_P1 ) 2>> log/stderror.err & pid9=$!
( summarycalctocsv < fifo/gul_S1_summarycalc_P1 > work/kat/gul_S1_summarycalc_P1 ) 2>> log/stderror.err & pid10=$!
( pltcalc < fifo/gul_S1_pltcalc_P1 > work/kat/gul_S1_pltcalc_P1 ) 2>> log/stderror.err & pid11=$!
( eltcalc -s < fifo/gul_S1_eltcalc_P2 > work/kat/gul_S1_eltcalc_P2 ) 2>> log/stderror.err & pid12=$!
( summarycalctocsv -s < fifo/gul_S1_summarycalc_P2 > work/kat/gul_S1_summarycalc_P2 ) 2>> log/stderror.err & pid13=$!
( pltcalc -s < fifo/gul_S1_pltcalc_P2 > work/kat/gul_S1_pltcalc_P2 ) 2>> log/stderror.err & pid14=$!
tee < fifo/gul_S1_summary_P1 fifo/gul_S1_eltcalc_P1 fifo/gul_S1_summarycalc_P1 fifo/gul_S1_pltcalc_P1 work/gul_S1_summaryaalcalc/P1.bin > /dev/null & pid15=$!
tee < fifo/gul_S1_summary_P2 fifo/gul_S1_eltcalc_P2 fifo/gul_S1_summarycalc_P2 fifo/gul_S1_pltcalc_P2 work/gul_S1_summaryaalcalc/P2.bin > /dev/null & pid16=$!
( summarycalc -m -i -1 fifo/gul_S1_summary_P1 < fifo/gul_P1 ) 2>> log/stderror.err &
( summarycalc -m -i -1 fifo/gul_S1_summary_P2 < fifo/gul_P2 ) 2>> log/stderror.err &
# --- Do insured loss computes ---
( eltcalc < fifo/full_correlation/il_S1_eltcalc_P1 > work/full_correlation/kat/il_S1_eltcalc_P1 ) 2>> log/stderror.err & pid17=$!
( summarycalctocsv < fifo/full_correlation/il_S1_summarycalc_P1 > work/full_correlation/kat/il_S1_summarycalc_P1 ) 2>> log/stderror.err & pid18=$!
( pltcalc < fifo/full_correlation/il_S1_pltcalc_P1 > work/full_correlation/kat/il_S1_pltcalc_P1 ) 2>> log/stderror.err & pid19=$!
( eltcalc -s < fifo/full_correlation/il_S1_eltcalc_P2 > work/full_correlation/kat/il_S1_eltcalc_P2 ) 2>> log/stderror.err & pid20=$!
( summarycalctocsv -s < fifo/full_correlation/il_S1_summarycalc_P2 > work/full_correlation/kat/il_S1_summarycalc_P2 ) 2>> log/stderror.err & pid21=$!
( pltcalc -s < fifo/full_correlation/il_S1_pltcalc_P2 > work/full_correlation/kat/il_S1_pltcalc_P2 ) 2>> log/stderror.err & pid22=$!
tee < fifo/full_correlation/il_S1_summary_P1 fifo/full_correlation/il_S1_eltcalc_P1 fifo/full_correlation/il_S1_summarycalc_P1 fifo/full_correlation/il_S1_pltcalc_P1 work/full_correlation/il_S1_summaryaalcalc/P1.bin > /dev/null & pid23=$!
tee < fifo/full_correlation/il_S1_summary_P2 fifo/full_correlation/il_S1_eltcalc_P2 fifo/full_correlation/il_S1_summarycalc_P2 fifo/full_correlation/il_S1_pltcalc_P2 work/full_correlation/il_S1_summaryaalcalc/P2.bin > /dev/null & pid24=$!
( summarycalc -m -f -1 fifo/full_correlation/il_S1_summary_P1 < fifo/full_correlation/il_P1 ) 2>> log/stderror.err &
( summarycalc -m -f -1 fifo/full_correlation/il_S1_summary_P2 < fifo/full_correlation/il_P2 ) 2>> log/stderror.err &
# --- Do ground up loss computes ---
( eltcalc < fifo/full_correlation/gul_S1_eltcalc_P1 > work/full_correlation/kat/gul_S1_eltcalc_P1 ) 2>> log/stderror.err & pid25=$!
( summarycalctocsv < fifo/full_correlation/gul_S1_summarycalc_P1 > work/full_correlation/kat/gul_S1_summarycalc_P1 ) 2>> log/stderror.err & pid26=$!
( pltcalc < fifo/full_correlation/gul_S1_pltcalc_P1 > work/full_correlation/kat/gul_S1_pltcalc_P1 ) 2>> log/stderror.err & pid27=$!
( eltcalc -s < fifo/full_correlation/gul_S1_eltcalc_P2 > work/full_correlation/kat/gul_S1_eltcalc_P2 ) 2>> log/stderror.err & pid28=$!
( summarycalctocsv -s < fifo/full_correlation/gul_S1_summarycalc_P2 > work/full_correlation/kat/gul_S1_summarycalc_P2 ) 2>> log/stderror.err & pid29=$!
( pltcalc -s < fifo/full_correlation/gul_S1_pltcalc_P2 > work/full_correlation/kat/gul_S1_pltcalc_P2 ) 2>> log/stderror.err & pid30=$!
tee < fifo/full_correlation/gul_S1_summary_P1 fifo/full_correlation/gul_S1_eltcalc_P1 fifo/full_correlation/gul_S1_summarycalc_P1 fifo/full_correlation/gul_S1_pltcalc_P1 work/full_correlation/gul_S1_summaryaalcalc/P1.bin > /dev/null & pid31=$!
tee < fifo/full_correlation/gul_S1_summary_P2 fifo/full_correlation/gul_S1_eltcalc_P2 fifo/full_correlation/gul_S1_summarycalc_P2 fifo/full_correlation/gul_S1_pltcalc_P2 work/full_correlation/gul_S1_summaryaalcalc/P2.bin > /dev/null & pid32=$!
( summarycalc -m -i -1 fifo/full_correlation/gul_S1_summary_P1 < fifo/full_correlation/gul_P1 ) 2>> log/stderror.err &
( summarycalc -m -i -1 fifo/full_correlation/gul_S1_summary_P2 < fifo/full_correlation/gul_P2 ) 2>> log/stderror.err &
( tee < fifo/full_correlation/gul_fc_P1 fifo/full_correlation/gul_P1 | fmcalc -a2 > fifo/full_correlation/il_P1 ) 2>> log/stderror.err &
( tee < fifo/full_correlation/gul_fc_P2 fifo/full_correlation/gul_P2 | fmcalc -a2 > fifo/full_correlation/il_P2 ) 2>> log/stderror.err &
( eve 1 2 | getmodel | gulcalc -S0 -L0 -r -j fifo/full_correlation/gul_fc_P1 -a1 -i - | tee fifo/gul_P1 | fmcalc -a2 > fifo/il_P1 ) 2>> log/stderror.err &
( eve 2 2 | getmodel | gulcalc -S0 -L0 -r -j fifo/full_correlation/gul_fc_P2 -a1 -i - | tee fifo/gul_P2 | fmcalc -a2 > fifo/il_P2 ) 2>> log/stderror.err &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10 $pid11 $pid12 $pid13 $pid14 $pid15 $pid16 $pid17 $pid18 $pid19 $pid20 $pid21 $pid22 $pid23 $pid24 $pid25 $pid26 $pid27 $pid28 $pid29 $pid30 $pid31 $pid32
# --- Do insured loss kats ---
kat -s work/kat/il_S1_eltcalc_P1 work/kat/il_S1_eltcalc_P2 > output/il_S1_eltcalc.csv & kpid1=$!
kat work/kat/il_S1_pltcalc_P1 work/kat/il_S1_pltcalc_P2 > output/il_S1_pltcalc.csv & kpid2=$!
kat work/kat/il_S1_summarycalc_P1 work/kat/il_S1_summarycalc_P2 > output/il_S1_summarycalc.csv & kpid3=$!
# --- Do insured loss kats for fully correlated output ---
kat -s work/full_correlation/kat/il_S1_eltcalc_P1 work/full_correlation/kat/il_S1_eltcalc_P2 > output/full_correlation/il_S1_eltcalc.csv & kpid4=$!
kat work/full_correlation/kat/il_S1_pltcalc_P1 work/full_correlation/kat/il_S1_pltcalc_P2 > output/full_correlation/il_S1_pltcalc.csv & kpid5=$!
kat work/full_correlation/kat/il_S1_summarycalc_P1 work/full_correlation/kat/il_S1_summarycalc_P2 > output/full_correlation/il_S1_summarycalc.csv & kpid6=$!
# --- Do ground up loss kats ---
kat -s work/kat/gul_S1_eltcalc_P1 work/kat/gul_S1_eltcalc_P2 > output/gul_S1_eltcalc.csv & kpid7=$!
kat work/kat/gul_S1_pltcalc_P1 work/kat/gul_S1_pltcalc_P2 > output/gul_S1_pltcalc.csv & kpid8=$!
kat work/kat/gul_S1_summarycalc_P1 work/kat/gul_S1_summarycalc_P2 > output/gul_S1_summarycalc.csv & kpid9=$!
# --- Do ground up loss kats for fully correlated output ---
kat -s work/full_correlation/kat/gul_S1_eltcalc_P1 work/full_correlation/kat/gul_S1_eltcalc_P2 > output/full_correlation/gul_S1_eltcalc.csv & kpid10=$!
kat work/full_correlation/kat/gul_S1_pltcalc_P1 work/full_correlation/kat/gul_S1_pltcalc_P2 > output/full_correlation/gul_S1_pltcalc.csv & kpid11=$!
kat work/full_correlation/kat/gul_S1_summarycalc_P1 work/full_correlation/kat/gul_S1_summarycalc_P2 > output/full_correlation/gul_S1_summarycalc.csv & kpid12=$!
wait $kpid1 $kpid2 $kpid3 $kpid4 $kpid5 $kpid6 $kpid7 $kpid8 $kpid9 $kpid10 $kpid11 $kpid12
( aalcalc -Kil_S1_summaryaalcalc > output/il_S1_aalcalc.csv ) 2>> log/stderror.err & lpid1=$!
( aalcalc -Kgul_S1_summaryaalcalc > output/gul_S1_aalcalc.csv ) 2>> log/stderror.err & lpid2=$!
( aalcalc -Kfull_correlation/il_S1_summaryaalcalc > output/full_correlation/il_S1_aalcalc.csv ) 2>> log/stderror.err & lpid3=$!
( aalcalc -Kfull_correlation/gul_S1_summaryaalcalc > output/full_correlation/gul_S1_aalcalc.csv ) 2>> log/stderror.err & lpid4=$!
wait $lpid1 $lpid2 $lpid3 $lpid4
rm -R -f work/*
rm -R -f fifo/*
check_complete
exit_handler
|
// Copyright (c) 2021 The Jaeger Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import React from 'react';
import { shallow } from 'enzyme';
import {
MonitorATMServicesViewImpl as MonitorATMServicesView,
mapStateToProps,
mapDispatchToProps,
getLoopbackInterval,
} from '.';
import LoadingIndicator from '../../common/LoadingIndicator';
import MonitorATMEmptyState from '../EmptyState';
import ServiceGraph from './serviceGraph';
import { originInitialState, serviceMetrics, serviceOpsMetrics } from '../../../reducers/metrics.mock';
const state = {
services: {},
metrics: originInitialState,
selectedService: undefined,
};
const props = mapStateToProps(state);
Date.now = jest.fn(() => 1487076708000); // Tue, 14 Feb 2017 12:51:48 GMT'
describe('<MonitorATMServicesView>', () => {
let wrapper;
const mockFetchServices = jest.fn();
const mockFetchAllServiceMetrics = jest.fn();
const mockFetchAggregatedServiceMetrics = jest.fn();
beforeAll(() => {
Date.now = jest.fn(() => 1466424490000);
});
beforeEach(() => {
wrapper = shallow(
<MonitorATMServicesView
{...props}
fetchServices={mockFetchServices}
fetchAllServiceMetrics={mockFetchAllServiceMetrics}
fetchAggregatedServiceMetrics={mockFetchAggregatedServiceMetrics}
/>
);
});
afterEach(() => {
wrapper = null;
jest.clearAllMocks();
});
it('does not explode', () => {
expect(wrapper.length).toBe(1);
});
it('shows a loading indicator when loading services list', () => {
wrapper.setProps({ servicesLoading: true });
expect(wrapper.find(LoadingIndicator).length).toBe(1);
});
it('do not show a loading indicator once data loaded', () => {
wrapper.setProps({
services: ['s1'],
selectedService: undefined,
metrics: {
...originInitialState,
serviceMetrics,
serviceOpsMetrics,
loading: false,
isATMActivated: true,
},
});
expect(wrapper.find(LoadingIndicator).length).toBe(0);
});
it('Render ATM not configured page', () => {
wrapper.setProps({
services: [],
selectedService: undefined,
metrics: {
...originInitialState,
serviceMetrics,
serviceOpsMetrics,
loading: false,
isATMActivated: false,
},
});
expect(wrapper.find(MonitorATMEmptyState).length).toBe(1);
});
it('function invocation check on page load', () => {
expect(mockFetchServices).toHaveBeenCalled();
expect(mockFetchAllServiceMetrics).not.toHaveBeenCalled();
expect(mockFetchAggregatedServiceMetrics).not.toHaveBeenCalled();
wrapper.setProps({
services: ['s1'],
metrics: {
...originInitialState,
serviceMetrics,
serviceOpsMetrics,
loading: false,
isATMActivated: true,
},
});
expect(mockFetchAllServiceMetrics).toHaveBeenCalled();
expect(mockFetchAggregatedServiceMetrics).toHaveBeenCalled();
});
it('ATM snapshot test', () => {
mockFetchServices.mockResolvedValue(['s1', 's2']);
wrapper.setProps({
services: ['s1', 's2'],
metrics: {
...originInitialState,
serviceMetrics,
serviceOpsMetrics,
loading: false,
isATMActivated: true,
},
});
expect(wrapper).toMatchSnapshot();
});
it('ComponentWillUnmount remove listener', () => {
const remover = jest.spyOn(global, 'removeEventListener').mockImplementation(() => {});
wrapper.unmount();
expect(remover).toHaveBeenCalled();
});
it('resize window test', () => {
const selectedInput = 'graphDivWrapper';
wrapper.instance()[selectedInput] = {
current: {
offsetWidth: 100,
},
};
global.dispatchEvent(new Event('resize'));
expect(wrapper.state().graphWidth).toBe(76);
});
it('search test', () => {
mockFetchServices.mockResolvedValue(['cartservice']);
wrapper.setProps({
services: ['s1', 's2'],
metrics: {
...originInitialState,
serviceMetrics,
serviceOpsMetrics,
loading: false,
isATMActivated: true,
},
});
wrapper.find('Search').simulate('change', { target: { value: 'place' } });
expect(wrapper.state().serviceOpsMetrics.length).toBe(1);
wrapper.find('Search').simulate('change', { target: { value: 'qqq' } });
expect(wrapper.state().serviceOpsMetrics.length).toBe(0);
wrapper.find('Search').simulate('change', { target: { value: '' } });
expect(wrapper.state().serviceOpsMetrics.length).toBe(1);
});
it('Error in serviceLatencies ', () => {
wrapper.setProps({
services: ['s1', 's2'],
selectedService: 's1',
metrics: {
...originInitialState,
serviceMetrics,
serviceOpsMetrics,
loading: false,
isATMActivated: true,
serviceError: {
...originInitialState.serviceError,
service_latencies_50: new Error('some API error'),
},
},
});
expect(
wrapper
.find(ServiceGraph)
.first()
.prop('error')
).toBeNull();
wrapper.setProps({
services: ['s1', 's2'],
selectedService: 's1',
metrics: {
...originInitialState,
serviceMetrics,
serviceOpsMetrics,
loading: false,
isATMActivated: true,
serviceError: {
...originInitialState.serviceError,
service_latencies_50: new Error('some API error'),
service_latencies_75: new Error('some API error'),
},
},
});
expect(
wrapper
.find(ServiceGraph)
.first()
.prop('error')
).toBeNull();
wrapper.setProps({
services: ['s1', 's2'],
selectedService: 's1',
metrics: {
...originInitialState,
serviceMetrics,
serviceOpsMetrics,
loading: false,
isATMActivated: true,
serviceError: {
service_latencies_50: new Error('some API error'),
service_latencies_75: new Error('some API error'),
service_latencies_95: new Error('some API error'),
},
},
});
expect(
wrapper
.find(ServiceGraph)
.first()
.prop('error')
).not.toBeNull();
});
});
describe('<MonitorATMServicesView> on page switch', () => {
// eslint-disable-next-line no-unused-vars
let wrapper;
const stateOnPageSwitch = {
services: {
services: ['s1'],
},
metrics: originInitialState,
selectedService: undefined,
};
const propsOnPageSwitch = mapStateToProps(stateOnPageSwitch);
const mockFetchServices = jest.fn();
const mockFetchAllServiceMetrics = jest.fn();
const mockFetchAggregatedServiceMetrics = jest.fn();
beforeEach(() => {
wrapper = shallow(
<MonitorATMServicesView
{...propsOnPageSwitch}
fetchServices={mockFetchServices}
fetchAllServiceMetrics={mockFetchAllServiceMetrics}
fetchAggregatedServiceMetrics={mockFetchAggregatedServiceMetrics}
/>
);
});
it('function invocation check on page load', () => {
expect(mockFetchServices).toHaveBeenCalled();
expect(mockFetchAllServiceMetrics).toHaveBeenCalled();
expect(mockFetchAggregatedServiceMetrics).toHaveBeenCalled();
});
});
describe('mapStateToProps()', () => {
it('refines state to generate the props', () => {
expect(mapStateToProps(state)).toEqual({
metrics: originInitialState,
services: [],
selectedService: 's1',
selectedTimeFrame: 3600000,
});
});
});
describe('mapDispatchToProps()', () => {
it('providers the `fetchServices` , `fetchAllServiceMetrics` and `fetchAggregatedServiceMetrics` prop', () => {
expect(mapDispatchToProps({})).toEqual({
fetchServices: expect.any(Function),
fetchAllServiceMetrics: expect.any(Function),
fetchAggregatedServiceMetrics: expect.any(Function),
});
});
});
describe('getLoopbackInterval()', () => {
it('undefined value', () => {
expect(getLoopbackInterval()).toBe('');
});
it('timeframe NOT exists', () => {
expect(getLoopbackInterval(111)).toBe('');
});
it('timeframe exists', () => {
expect(getLoopbackInterval(48 * 3600000)).toBe('last 2 days');
});
});
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_transform = void 0;
var ic_transform = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M22 18v-2H8V4h2L7 1 4 4h2v2H2v2h4v8c0 1.1.9 2 2 2h8v2h-2l3 3 3-3h-2v-2h4zM10 8h6v6h2V8c0-1.1-.9-2-2-2h-6v2z"
},
"children": []
}]
};
exports.ic_transform = ic_transform;
|
<gh_stars>0
"use strict";
const child_process = require("child_process");
const path = require("path");
const transform = require.resolve("./transform");
it("transforms correctly", () => {
const result = child_process.spawnSync(
path.join(__dirname, "node_modules", ".bin", "jscodeshift"),
[
"--dry",
"--print",
"--run-in-band",
"-t",
transform,
"--extensions=ts",
"--parser=ts",
path.join(__dirname, "./transform.input.ts")
],
{
encoding: "utf8"
}
);
expect(result.stdout).toEqual(
expect.not.stringContaining("Array<string>")
);
});
|
#!/usr/bin/env bash
ufw_enable() {
belt_remote_exec "ufw --force enable &>/dev/null"
}
ufw_disable() {
belt_remote_exec "ufw --force disable &>/dev/null"
}
ufw_allow() {
local port="$1"
belt_remote_exec "ufw allow \"$port\" &>/dev/null"
}
ufw_deny() {
local port="$1"
belt_remote_exec "ufw deny \"$port\" &>/dev/null"
}
ufw_deny_from() {
local host="$1"
belt_remote_exec "ufw insert 1 deny from \"$host\" to any &>/dev/null"
}
|
import fetchMock from 'fetch-mock';
import { SignupAPI } from '@authenticator/requests';
import config from '@authenticator/config';
describe('SignupAPI Test', (): void => {
afterEach((): void => {
fetchMock.restore();
});
test('registers a new user', async (): Promise<void> => {
const url = `${config.api.baseURL}/api/v1/signup`;
fetchMock.mock(url, {
status: 201,
body: {
token: 'jwt-token',
clientID: 'client-id',
},
});
const response = await SignupAPI.register({
password: '<PASSWORD>',
identity: '<EMAIL>',
type: 'email',
});
expect(response.ok).toBe(true);
expect(response.resultSuccess).toEqual({
token: 'jwt-token',
clientID: 'client-id',
});
});
test('verifies a new user', async (): Promise<void> => {
const url = `${config.api.baseURL}/api/v1/signup/verify`;
fetchMock.mock(url, {
status: 201,
body: {
token: 'jwt-token',
clientID: 'client-id',
},
});
const response = await SignupAPI.verify({
code: '012345',
});
expect(response.ok).toBe(true);
expect(response.resultSuccess).toEqual({
token: 'jwt-token',
clientID: 'client-id',
});
});
});
|
<filename>app/actions/action-browse.js<gh_stars>0
import itemsService from 'services/items';
export const PREFIX = 'actionBrowse';
export const ACTION_SEND_REQ = `${PREFIX}.ACTION_SEND_REQ`;
export const ACTION_SEND_RES = `${PREFIX}.ACTION_SEND_RES`;
export const DESTROY = `${PREFIX}.DESTROY`;
export function getItems(start) {
return dispatch => {
dispatch({ type: ACTION_SEND_REQ });
itemsService.fetchItems(start)
.then(({ items, totalItems }) => dispatch({
type: ACTION_SEND_RES,
payload: { items, totalItems }
}))
.catch(err => console.error(err));
};
}
export function destroy() {
return dispatch => {
dispatch({ type: DESTROY });
}
}
|
package com.dam.provider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import com.dam.provider.rest.consumer.Client;
@EnableConfigurationProperties (ConfigProperties.class)
@SpringBootApplication
public class Application {
@Autowired
Client client;
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
|
<gh_stars>0
import { ErrorResponse, Response } from "@webiny/handler-graphql";
import {
CmsModelCreateInput,
CmsModelUpdateInput,
CmsContext,
CmsModelCreateFromInput
} from "~/types";
import { GraphQLSchemaPlugin } from "@webiny/handler-graphql/plugins/GraphQLSchemaPlugin";
import { Resolvers } from "@webiny/handler-graphql/types";
import { CmsModelPlugin } from "~/content/plugins/CmsModelPlugin";
interface CreateCmsModelArgs {
data: CmsModelCreateInput;
}
interface CreateFromCmsModelFromArgs {
modelId: string;
data: CmsModelCreateFromInput;
}
interface ReadCmsModelArgs {
modelId: string;
}
interface UpdateCmsModelArgs extends ReadCmsModelArgs {
data: CmsModelUpdateInput;
}
interface DeleteCmsModelArgs {
modelId: string;
}
const plugin = (context: CmsContext): GraphQLSchemaPlugin<CmsContext> => {
const resolvers: Resolvers<CmsContext> = {
Query: {
getContentModel: async (_: unknown, args: ReadCmsModelArgs, context) => {
try {
const model = await context.cms.getModel(args.modelId);
return new Response(model);
} catch (e) {
return new ErrorResponse(e);
}
},
listContentModels: async (_: unknown, __: unknown, context: CmsContext) => {
try {
const model = await context.cms.listModels();
return new Response(model);
} catch (e) {
return new ErrorResponse(e);
}
}
},
CmsContentModel: {
plugin: async (model, _, context) => {
const modelPlugin: CmsModelPlugin = context.plugins
.byType<CmsModelPlugin>(CmsModelPlugin.type)
.find((item: CmsModelPlugin) => item.contentModel.modelId === model.modelId);
return Boolean(modelPlugin);
}
}
};
let manageSchema = "";
if (context.cms.MANAGE) {
resolvers.Mutation = {
createContentModel: async (_: unknown, args: CreateCmsModelArgs, context) => {
try {
const model = await context.cms.createModel(args.data);
return new Response(model);
} catch (e) {
return new ErrorResponse(e);
}
},
createContentModelFrom: async (
_: unknown,
args: CreateFromCmsModelFromArgs,
context
) => {
try {
const model = await context.cms.createModelFrom(args.modelId, args.data);
return new Response(model);
} catch (e) {
return new ErrorResponse(e);
}
},
updateContentModel: async (_: unknown, args: UpdateCmsModelArgs, context) => {
const { modelId, data } = args;
try {
const model = await context.cms.updateModel(modelId, data);
return new Response(model);
} catch (e) {
return new ErrorResponse(e);
}
},
deleteContentModel: async (_: unknown, args: DeleteCmsModelArgs, context) => {
const { modelId } = args;
try {
await context.cms.deleteModel(modelId);
return new Response(true);
} catch (e) {
return new ErrorResponse(e);
}
}
};
manageSchema = /* GraphQL */ `
input CmsPredefinedValueInput {
label: String!
value: String!
}
input CmsPredefinedValuesInput {
enabled: Boolean
values: [CmsPredefinedValueInput]
}
input CmsFieldRendererInput {
name: String
}
input CmsFieldValidationInput {
name: String!
message: String
settings: JSON
}
input CmsContentModelFieldInput {
id: ID!
label: String!
helpText: String
placeholderText: String
fieldId: String!
type: String!
multipleValues: Boolean
predefinedValues: CmsPredefinedValuesInput
renderer: CmsFieldRendererInput
validation: [CmsFieldValidationInput]
listValidation: [CmsFieldValidationInput]
settings: JSON
}
input CmsContentModelCreateInput {
name: String!
modelId: String
group: RefInput!
description: String
}
input CmsContentModelCreateFromInput {
name: String!
modelId: String
group: RefInput!
description: String
locale: String
}
input CmsContentModelUpdateInput {
name: String
group: RefInput
description: String
layout: [[ID!]!]!
fields: [CmsContentModelFieldInput!]!
titleFieldId: String
}
extend type Mutation {
createContentModel(data: CmsContentModelCreateInput!): CmsContentModelResponse
createContentModelFrom(
modelId: ID!
data: CmsContentModelCreateFromInput!
): CmsContentModelResponse
updateContentModel(
modelId: ID!
data: CmsContentModelUpdateInput!
): CmsContentModelResponse
deleteContentModel(modelId: ID!): CmsDeleteResponse
}
`;
}
return new GraphQLSchemaPlugin<CmsContext>({
typeDefs: /* GraphQL */ `
type CmsFieldValidation {
name: String!
message: String
settings: JSON
}
type CmsFieldRenderer {
name: String
}
type CmsPredefinedValue {
label: String
value: String
}
type CmsPredefinedValues {
enabled: Boolean
values: [CmsPredefinedValue]
}
type CmsContentModelField {
id: ID!
fieldId: String!
label: String!
helpText: String
placeholderText: String
type: String!
multipleValues: Boolean
predefinedValues: CmsPredefinedValues
renderer: CmsFieldRenderer
validation: [CmsFieldValidation!]
listValidation: [CmsFieldValidation!]
settings: JSON
}
type CmsContentModel {
name: String!
modelId: String!
description: String
group: CmsContentModelGroup!
createdOn: DateTime
savedOn: DateTime
createdBy: CmsCreatedBy
fields: [CmsContentModelField!]!
lockedFields: [JSON]
layout: [[String!]!]!
titleFieldId: String
# Returns true if the content model is registered via a plugin.
plugin: Boolean!
}
type CmsContentModelResponse {
data: CmsContentModel
error: CmsError
}
type CmsContentModelListResponse {
data: [CmsContentModel]
meta: CmsListMeta
error: CmsError
}
extend type Query {
getContentModel(modelId: ID!, where: JSON, sort: String): CmsContentModelResponse
listContentModels: CmsContentModelListResponse
}
${manageSchema}
`,
resolvers
});
};
export default plugin;
|
<gh_stars>0
import os.path
name = 1
potato = {}
while os.path.isfile(str(name) + ".in"):
inp = open(str(name) + ".in", "r")
size = inp.readline()
size = size[:-1]
try:
i = int(size)
if i < 15:
print str(name) + " : " + str(i)
except ValueError:
pass
if size not in potato:
potato[size] = 1
else:
potato[size] += 1
name += 1
inp.close()
print sorted(potato)
|
<gh_stars>10-100
var _euler_transform_sensor_8cs =
[
[ "EulerTransformSensor", "classdroid_1_1_runtime_1_1_prototyping_1_1_sensors_1_1_transform_1_1_euler_transform_sensor.html", "classdroid_1_1_runtime_1_1_prototyping_1_1_sensors_1_1_transform_1_1_euler_transform_sensor" ],
[ "ObservationSpace", "_euler_transform_sensor_8cs.html#ad2add9ffe59d36cd4f7a7d8c93bc36b8", [
[ "Local_", "_euler_transform_sensor_8cs.html#ad2add9ffe59d36cd4f7a7d8c93bc36b8aa6644602d7843839ee6197788c5aa417", null ],
[ "Global_", "_euler_transform_sensor_8cs.html#ad2add9ffe59d36cd4f7a7d8c93bc36b8aca4edb27051ed96c597e5b398b47f17a", null ],
[ "Environment_", "_euler_transform_sensor_8cs.html#ad2add9ffe59d36cd4f7a7d8c93bc36b8a8ef33e57a1d8fa462fcda63520cfe834", null ]
] ]
];
|
/**
* This file was generated by the JPA Modeler
*/
package com.example.demo.model;
import lombok.Getter;
import lombok.Setter;
import java.io.Serializable;
import java.util.List;
/**
* @author dzni0816
*/
@Getter
@Setter
public class CustomerDto implements Serializable {
private String id;
private String firstName;
private String lastName;
private String contact;
private String email;
private String phone;
private String userId;
private String username;
private String passNumber;
private String countNumber;
private String location;
private AddressDto address1;
private List<LocationDto> locations;
private List<RelatedLocationDto> relatedLocations;
private List<StatisticsCollectorDto> statisticscollectors;
private List<SoDto> soes;
}
|
<filename>lib/graphql/execution/typecast.rb
# frozen_string_literal: true
module GraphQL
module Execution
# GraphQL object `{value, current_type}` can be cast to `potential_type` when:
# - `current_type == potential_type`
# - `current_type` is a union and it contains `potential_type`
# - `potential_type` is a union and it contains `current_type`
# - `current_type` is an interface and `potential_type` implements it
# - `potential_type` is an interface and `current_type` implements it
module Typecast
# While `value` is exposed by GraphQL as an instance of `current_type`,
# should it _also_ be treated as an instance of `potential_type`?
#
# This is used for checking whether fragments apply to an object.
#
# @param current_type [GraphQL::BaseType] the type which GraphQL is using now
# @param potential_type [GraphQL::BaseType] can this type be used from here?
# @param query_ctx [GraphQL::Query::Context] the context for the current query
# @return [Boolean] true if `value` be evaluated as a `potential_type`
def self.compatible?(current_type, potential_type, query_ctx)
if current_type == potential_type
true
elsif current_type.kind.union?
current_type.possible_types.include?(potential_type)
elsif potential_type.kind.union?
potential_type.include?(current_type)
elsif current_type.kind.interface? && potential_type.kind.object?
potential_type.interfaces.include?(current_type)
elsif potential_type.kind.interface? && current_type.kind.object?
current_type.interfaces.include?(potential_type)
else
false
end
end
end
end
end
|
<filename>src/daemon/event/signal.c
#include <inttypes.h>
#include <signal.h>
#include <stdio.h>
#include <sys/signalfd.h>
#include <unistd.h>
#include "daemon/event.h"
#include "daemon/log.h"
#include "daemon/supervisor.h"
void daemon_event_signal_handle(struct daemon_supervisor *const sv)
{
struct signalfd_siginfo info;
if (read(sv->signal_fd, &info, sizeof(info)) != sizeof(struct signalfd_siginfo)) {
daemon_log_fail("failed to read signalfd information");
sv->running = 0;
}
DAEMON_DEBUG_DESERIALIZE(&info);
switch (info.ssi_signo) {
case SIGCHLD:
daemon_log_info("SIGCHLD from PID %" PRIu32 " currently ignoring...", info.ssi_pid);
break;
case SIGHUP:
daemon_log_info("SIGHUP from PID %" PRIu32 " currently ignoring...", info.ssi_pid);
break;
case SIGTERM:
__attribute__((fallthrough));
case SIGINT:
DAEMON_DEBUG("signal to stop recieved...");
sv->running = 0;
break;
default:
daemon_log_info("unhandled signal %d", info.ssi_signo);
}
}
|
/*
Copyright © 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package model
import "os"
// App represents the all the necessary information about clencli
type App struct {
// Name of file to look for inside the path
Name string
ConfigurationsDir string
ConfigurationsName string
ConfigurationsType string
ConfigurationsPath string
ConfigurationsPermissions os.FileMode
CredentialsName string
CredentialsType string
CredentialsPath string
CredentialsPermissions os.FileMode
LogsDir string
LogsName string
LogsType string
LogsPath string
LogsPermissions os.FileMode
WorkingDir string
}
// ReadMe struct of the readme.yaml
type ReadMe struct {
Logo struct {
URL string `yaml:"url"`
Label string `yaml:"label"`
} `yaml:"logo,omitempty"`
Shields struct {
Badges []struct {
Description string `yaml:"description"`
Image string `yaml:"image"`
URL string `yaml:"url"`
} `yaml:"badges"`
} `yaml:"shields,omitempty"`
App struct {
Name string `yaml:"name"`
Function string `yaml:"function"`
ID string `yaml:"id"`
} `yaml:"app,omitempty"`
Screenshots []struct {
Caption string `yaml:"caption"`
Label string `yaml:"label"`
URL string `yaml:"url"`
} `yaml:"screenshots,omitempty"`
Usage string `yaml:"usage"`
Prerequisites []struct {
Description string `yaml:"description"`
Name string `yaml:"name"`
URL string `yaml:"url"`
} `yaml:"prerequisites,omitempty"`
Installing string `yaml:"installing,omitempty"`
Testing string `yaml:"testing,omitempty"`
Deployment string `yaml:"deployment,omitempty"`
Include []string `yaml:"include,omitempty"`
Contributors []struct {
Name string `yaml:"name"`
Role string `yaml:"role"`
Email string `yaml:"email"`
} `yaml:"contributors,omitempty"`
Acknowledgments []struct {
Name string `yaml:"name"`
Role string `yaml:"role"`
} `yaml:"acknowledgments,omitempty"`
References []struct {
Description string `yaml:"description"`
Name string `yaml:"name"`
URL string `yaml:"url"`
} `yaml:"references,omitempty"`
License string `yaml:"license,omitempty"`
Copyright string `yaml:"copyright,omitempty"`
}
|
<gh_stars>1-10
#####
# BA Amadou 16 187 314
# YING Xu 18 205 032
# ABOU Hamza 17 057 836
###
import os, sys
sys.path.append(os.path.dirname(os.path.join(os.getcwd())))
from sklearn.ensemble import RandomForestClassifier
from classifiers.abstract_classifier import AbstractClassifier
class RandomForestAlgorithmClassifier(AbstractClassifier):
def __init__(self, approch='0'):
model = RandomForestClassifier(n_estimators=100)
super().__init__(model, approch)
|
package plugin.album;
import android.content.ContentResolver;
import android.database.Cursor;
import android.os.AsyncTask;
import android.provider.MediaStore;
import android.util.Size;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import plugin.album.data.AlbumItem;
import plugin.album.data.MediaItem;
import plugin.album.utils.FileUtils;
import plugin.album.utils.MediaCompression;
import plugin.album.utils.MediaFileGet;
import plugin.album.utils.MediaInfoExtract;
import plugin.album.utils.Utils;
public class PickerMgr {
private static final String TAG = "PickerMgr";
private AlbumPlugin mAlbumPlugin;
private static final PickerMgr sInstance = new PickerMgr();
private ContentResolver mContentResolver;
private ArrayList<MediaItem> mSelectedImages = new ArrayList<>();
private ArrayList<AlbumItem> mAlbumItemList = new ArrayList<>();
private List<Map<String, Object>> mSendDataList = new ArrayList<>();
private boolean mOriginalMode = false;
private PickerMgr() {
}
public void initAlbumPlugin(AlbumPlugin albumPlugin) {
mAlbumPlugin = albumPlugin;
mContentResolver = mAlbumPlugin.gContext.getContentResolver();
}
public static PickerMgr getInstance() {
return sInstance;
}
public void clear() {
mOriginalMode = false;
mSelectedImages.clear();
mAlbumItemList.clear();
mSendDataList.clear();
}
public boolean getOriginalMode() {
return mOriginalMode;
}
public void setOriginalMode(boolean isOriginal) {
mOriginalMode = isOriginal;
}
public ArrayList<MediaItem> getSelectedImages() {
return mSelectedImages;
}
public void changeSelectItemState(MediaItem item) {
boolean isContained = mSelectedImages.contains(item);
if (isContained) {
mSelectedImages.remove(item);
} else {
mSelectedImages.add(item);
}
}
public List<Map<String, Object>> getSelectedPath() {
return mSendDataList;
}
public void setSendDataList(List<Map<String, Object>> dataList) {
mSendDataList.clear();
if (dataList != null) {
mSendDataList.addAll(dataList);
}
}
public void mediaItemCompress(MediaItem item, MediaCompression.OnCompressionListener listener) {
new MediaCompression().onCompress(item, mContentResolver, listener);
}
public void mediaItemCompress(String filePath, MediaCompression.OnCompressionListener listener) {
Size wh = FileUtils.getImageWidthHeight(filePath);
if (wh == null || wh.getWidth() == 0 || wh.getHeight() == 0) {
if (listener != null)
listener.onCompressionComplete(null);
return;
}
MediaItem item = new MediaItem(MediaItem.TYPE_IMAGE, "0", filePath, filePath,
0, 0, wh.getWidth(), wh.getHeight());
mediaItemCompress(item, listener);
}
public boolean onMediaFileCompress(Object arguments) {
if (arguments instanceof List) {
List<Map<String, Object>> tempList = (List<Map<String, Object>>) arguments;
if (tempList != null && !tempList.isEmpty()) {
List<MediaItem> itemList = new ArrayList<>();
for (Map<String, Object> item : tempList) {
itemList.add(new MediaItem(item));
}
new MediaCompression().onCompress(itemList, mContentResolver,
new MediaCompression.OnCompressionListener() {
@Override
public void onCompressionComplete(Map<String, Object> item) {
if (mAlbumPlugin != null) {
mAlbumPlugin.onSendMediaFile(item);
}
}
});
return true;
}
}
return false;
}
public void getMediaInfoList(MediaInfoExtract.OnGetInfoListener listener) {
new MediaInfoExtract().getMediaInfoList(mSelectedImages, mOriginalMode,
mContentResolver, listener);
}
public ArrayList<AlbumItem> getAlbumList() {
return mAlbumItemList;
}
public void getAlbumList(int type) {
new LoadAlbumList(type).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
public void getImageList(long bucketId, int type, MediaFileGet.OnGetListener listener) {
new MediaFileGet().getMediaFile(bucketId, type, 99999, mContentResolver, listener);
}
public void getLatestMediaFile(int maxCount, final MediaInfoExtract.OnGetInfoListener listener) {
if (maxCount <= 0) maxCount = 100;
new MediaFileGet().getMediaFile(0, 0, maxCount,
mContentResolver, new MediaFileGet.OnGetListener() {
@Override
public void onGetComplete(List<MediaItem> dataList) {
if (dataList == null || dataList.isEmpty()) {
listener.onGetInfoComplete(null);
} else {
new MediaInfoExtract().getMediaInfoList(dataList, false,
mContentResolver, listener);
}
}
});
}
private class LoadAlbumList extends AsyncTask<Void, Void, ArrayList<AlbumItem>> {
private int mType;
LoadAlbumList(int type) {
this.mType = type;
}
@Override
protected ArrayList<AlbumItem> doInBackground(Void... params) {
HashMap<Long, AlbumItem> albumHashMap = new HashMap<>();
final String orderBy = MediaStore.Files.FileColumns.DATE_MODIFIED + " DESC";
String[] projection = new String[]{
MediaStore.MediaColumns.DATA,
MediaStore.Images.Media.BUCKET_DISPLAY_NAME,
MediaStore.Images.Media.BUCKET_ID,
MediaStore.Files.FileColumns.MEDIA_TYPE};
String selection = "bucket_id IS NOT NULL AND (media_type = 3 or (media_type = 1 AND width > 0 AND height > 0))";
Cursor cursor = mContentResolver.query(MediaStore.Files.getContentUri("external"),
projection, selection, null, orderBy);
int totalCount = 0;
int totalVideoCount = 0;
String allItemThumbnailPath = null;
String allVideoThumbnailPath = null;
if (cursor != null) {
int bucketData = cursor.getColumnIndex(MediaStore.MediaColumns.DATA);
int albumNameIndex = cursor.getColumnIndex(MediaStore.Images.Media.BUCKET_DISPLAY_NAME);
int bucketIndex = cursor.getColumnIndex(MediaStore.Images.Media.BUCKET_ID);
int typeIndex = cursor.getColumnIndex(MediaStore.Files.FileColumns.MEDIA_TYPE);
while (cursor.moveToNext()) {
int type = cursor.getInt(typeIndex);
String path = cursor.getString(bucketData);
if (mType != 0) {
if (type != MediaStore.Files.FileColumns.MEDIA_TYPE_IMAGE) continue;
if (mType == 1 && Utils.isGif(path)) continue;
}
totalCount++;
long bucketId = cursor.getInt(bucketIndex);
AlbumItem albumItem = albumHashMap.get(bucketId);
if (albumItem == null) {
String albumName = cursor.getString(albumNameIndex);
albumHashMap.put(bucketId, new AlbumItem(bucketId, albumName, path, 1));
if (allItemThumbnailPath == null) {
allItemThumbnailPath = path;
}
} else {
albumItem.counter++;
}
if (type == MediaStore.Files.FileColumns.MEDIA_TYPE_VIDEO) {
totalVideoCount++;
if (allVideoThumbnailPath == null) {
allVideoThumbnailPath = cursor.getString(bucketData);
}
}
}
cursor.close();
}
ArrayList<AlbumItem> albumItemList = new ArrayList<>();
for (AlbumItem albumItem : albumHashMap.values()) {
albumItemList.add(albumItem);
}
Collections.sort(albumItemList, new Comparator<AlbumItem>() {
@Override
public int compare(AlbumItem o1, AlbumItem o2) {
return o2.counter - o1.counter; //降序
}
});
if (allItemThumbnailPath != null && totalCount > 0) {
albumItemList.add(0, new AlbumItem(Constants.TYPE_ALL_MEDIA, mType != 0 ? "所有图片" : "图片和视频", allItemThumbnailPath, totalCount));
if (allVideoThumbnailPath != null && totalVideoCount > 0) {
albumItemList.add(1, new AlbumItem(Constants.TYPE_ALL_VIDEO, "全部视频", allVideoThumbnailPath, totalVideoCount));
}
}
return albumItemList;
}
@Override
protected void onPostExecute(ArrayList<AlbumItem> albumItemList) {
super.onPostExecute(albumItemList);
mAlbumItemList = albumItemList;
}
}
}
|
#!/bin/bash
__dir="$(dirname "$0")"
source $__dir/config.sh
# Pushes all web sources to remote machine
# DBG+=" --dry-run" # testing
FLAGS+=$DBG
FLAGS+=" -avr"
FLAGS+=" --delete"
# excludes+="--exclude=bootstrap/cache/ "
# excludes+="--exclude=storage/framework/cache/ "
# excludes+="--exclude=storage/logs/ "
# excludes+="--exclude=storage/framework/sessions/ "
# excludes+="--exclude=storage/framework/views/ "
# excludes+="--exclude=storage/app/ "
# excludes+="--exclude=Build "
# excludes+="--exclude=.composer.lock"
# excludes+="--exclude=_build "
# excludes+="--exclude=blib "
# excludes+="--exclude=public/anyplace_architect/libs/ "
# excludes+="--exclude=public/anyplace_viewer_campus/libs/ "
# excludes+="--exclude=public/anyplace_viewer/libs/ "
excludes+="--exclude=anyplace_views "
excludes+="--exclude=logs "
excludes+="--exclude=dist "
excludes+="--exclude=tmp "
excludes+="--exclude=public/anyplace_architect/build/ "
excludes+="--exclude=public/anyplace_viewer_campus/build/ "
excludes+="--exclude=public/anyplace_viewer/build/ "
excludes+="--exclude=.env "
excludes+="--exclude=.DS_Store "
excludes+="--exclude=\".git*\" "
excludes+="--exclude=.idea "
excludes+="--exclude=node_modules "
excludes+="--exclude=bower_components "
# Generated play files
excludes+="--exclude=target "
excludes+="--exclude=test " # scala testing
lfolders="$LFOLDER"
rsync $FLAGS $LFOLDER $REMOTE:$RFOLDER $excludes
# output=$(rsync $FLAGS $LFOLDER $REMOTE:$RFOLDER $excludes)
if [[ $DBG != "" ]]; then
echo ""
echo "DRY RUN:"
echo rsync $FLAGS $LFOLDER $REMOTE:$RFOLDER $excludes
echo "FULL OUTPUT:"
echo -e $output
fi
output=$(echo "$output" | egrep -v "building file")
output=$(echo "$output" | egrep -v "sent")
output=$(echo "$output" | egrep -v "total")
if [[ $NOTIFIER != "" ]]; then
$NOTIFIER -title "Synced" -message "$output"
fi
|
import {test} from "rome";
import {testLintMultiple} from "../testHelpers";
test(
"jsx-a11y anchor has content",
async (t) => {
await testLintMultiple(
t,
[
// INVALID
"<a />",
"<a><TextWrapper aria-hidden /></a>",
// VALID
"<a>Anchor Content!</a>",
"<a><TextWrapper /></a>",
"<a dangerouslySetInnerHTML={{ __html: 'foo' }} />",
"<a><TextWrapper aria-hidden /> visible content</a>",
],
{category: "lint/jsx-a11y/anchorHasContent"},
);
},
);
|
#!/bin/sh
# run prettier for each javascript files
SCRIPT_DIR=$(dirname $0)
PRETTIER=${SCRIPT_DIR}/../node_modules/.bin/prettier
find ${SCRIPT_DIR} -name '*.mjs' | while read i; do (${PRETTIER} ${i} --write &); done
|
def get_nth_element(list_input, n):
return list_input[n]
print(get_nth_element(list_input, n))
|
<filename>blosc/trunc-prec.h
/*********************************************************************
Blosc - Blocked Shuffling and Compression Library
Copyright (C) 2021 The Blosc Developers <<EMAIL>>
https://blosc.org
License: BSD 3-Clause (see LICENSE.txt)
See LICENSE.txt for details about copyright and rights to use.
**********************************************************************/
#ifndef BLOSC_TRUNC_PREC_H
#define BLOSC_TRUNC_PREC_H
#include <stdio.h>
#include <stdint.h>
void truncate_precision(uint8_t prec_bits, int32_t typesize, int32_t nbytes,
const uint8_t* src, uint8_t* dest);
#endif //BLOSC_TRUNC_PREC_H
|
<reponame>stefb965/JRAW<gh_stars>1-10
package net.dean.jraw.endpoints;
import com.google.common.base.Joiner;
import net.dean.jraw.Endpoint;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* This class is responsible for the compilation of
* <a href="https://github.com/thatJavaNerd/JRAW/blob/master/ENDPOINTS.md">ENDPOINITS.md</a>, which is a collection of
* implemented and unimplemented Reddit API endpoints.
*/
public class MarkdownGenerator extends AbstractEndpointGenerator {
private final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd 'at' HH:mm:ss z");
/**
* Instantiates a new AbstractEndpointGenerator
*
* @param endpoints A map of endpoints where the key is the category and the value is a list of endpoints in that category
*/
public MarkdownGenerator(List<Endpoint> endpoints) {
super(endpoints, true);
}
@Override
protected void _generate(File dest, IndentAwareFileWriter bw) throws IOException {
// http://stackoverflow.com/a/4829998/1275092
bw.write(String.format("<!--- Generated %s. Use `gradle endpoints:update` to update. DO NOT MODIFY DIRECTLY -->%n",
dateFormat.format(new Date())));
// Main header
bw.writeLine("#Endpoints\n");
bw.writeLine("This file contains a list of all the endpoints (regardless of if they have been implemented) that " +
"can be found at the [official reddit API docs](https://www.reddit.com/dev/api/oauth). To update this file, " +
"run `gradle endpoints:update`.\n");
// Summary
bw.writeLine(String.format("So far **%s** endpoints (out of %s total) have been implemented.",
getImplementedEndpointsCount(), getTotalEndpoints()));
for (Map.Entry<String, List<Endpoint>> category : sortEndpoints(endpoints).entrySet()) {
String catName = category.getKey();
List<Endpoint> endpointList = category.getValue();
// Category header
bw.writeLine("\n##" + catName);
// Start table
bw.writeLine("Method|Endpoint|Implemention");
bw.writeLine(":----:|--------|------------");
for (Endpoint e : endpointList) {
StringBuilder sb = new StringBuilder();
String implString = "None";
if (e.isImplemented()) {
implString = String.format("[`%s`](%s)",
getStringRepresentation(e.getMethod()),
getJavadocUrl(e));
}
// ex: `GET`|[`/api/me.json`](https://www.reddit.com/dev/api#GET_api_me.json)|[`RedditClient.me()`](url and line #)
// or: `POST`|[`/api/clear_sessions`](https://www.reddit.com/dev/api#POST_api_clear_sessions)|No
sb.append('`').append(e.getVerb()).append("`|")
.append("[`").append(e.getUri()).append("`](").append(getRedditDocUrl(e)).append(")|")
.append(implString);
bw.writeLine(sb.toString());
}
}
}
/**
* Formats a method to be as succint as possible. The basic format is "{@code {class name}.{method name}()}". For example,
* a method declared as "{@code public void com.foo.bar.MyClass.myMethod(String, String, int) throws IllegalArgumentException}"
* would result in "{@code MyClass.myMethod()}"
*
* @param m The method to simplify
* @return A condensed version of the given method
*/
private String getStringRepresentation(Method m) {
return m.getDeclaringClass().getSimpleName() + "." + m.getName() + "(" + formatMethodParameters(m) + ")";
}
private String formatMethodParameters(Method m) {
Class<?>[] params = m.getParameterTypes();
if (params.length == 0) {
return "";
}
String[] parameterClasses = new String[params.length];
for (int i = 0; i < params.length; i++) {
parameterClasses[i] = params[i].getSimpleName();
}
return Joiner.on(", ").join(parameterClasses);
}
}
|
#!/usr/bin/env python
""" Extract and print all 'To:' addresses from a mailbox """
import mailbox
def main(mailbox_path):
addresses = {}
mb = mailbox.PortableUnixMailbox(file(mailbox_path))
for msg in mb:
toaddr = msg.getaddr('To')[1]
addresses[toaddr] = 1
addresses = addresses.keys()
addresses.sort()
for address in addresses:
print address
if __name__ == '__main__':
import sys
main(sys.argv[1])
|
const ResponseStatus = {
SUCCESS: 0,
FAILED: 4,
};
const ResponseStatusMsg = {
SUCCESS: 'success',
FAILED: 'failed',
};
const ResponseBody = {
generate: (status, message, payload) => {
return {
status, message, data: payload
}
}
};
const HttpUtil = {
ResponseStatus,
ResponseStatusMsg,
ResponseBody,
};
export {
ResponseStatus,
ResponseStatusMsg,
ResponseBody,
};
export default HttpUtil;
|
#!/usr/bin/env bash
set -e
function assert_code {
set +e
$1 >/dev/null 2>&1
CODE="$?"
set -e
if [ "$CODE" != "$2" ]; then
echo "exit code of '$1' is $CODE (expected $2)"
false
fi
}
echo abcd | pgspawn examples/id.yml | ./stdin-eq 'abcd\n'
echo abcd | pgspawn examples/id_explicite.yml | ./stdin-eq 'abcd\n'
pgspawn examples/join.yml | sort | ./stdin-eq 'one\nthree\ntwo\n'
pgspawn examples/join_explicite.yml | sort | ./stdin-eq 'one\nthree\ntwo\n'
pgspawn examples/join_pipe.yml | sort | ./stdin-eq 'one\nthree\ntwo\n'
echo -en 'a\nb\nc\n' | pgspawn examples/split_pipe.yml | sort | ./stdin-eq 'a\nb\n'
pgspawn examples/socket.yml
assert_code "pgspawn examples_bad/id.yml" 1
assert_code "pgspawn examples_bad/fd_conflict.yml" 1
assert_code "pgspawn examples/orphaned_write_end.yml" 0
assert_code "pgspawn examples/orphaned_read_end.yml" 0
assert_code "pgspawn examples_bad/write_input.yml" 1
assert_code "pgspawn examples_bad/read_output.yml" 1
assert_code "pgspawn examples_bad/extra_keys.yml" 0
assert_code "pgspawn examples_bad/extra_keys.yml" 0
assert_code "pgspawn examples/exit_max.yml" 57
assert_code "pgspawn examples/empty.yml" 0
|
#!/bin/bash
#cp node_modules/socket.io-client/dist/socket.io.min.js ../web/js/socket.io.min.js
|
#!/bin/bash
/opt/husarion/tools/rpi-linux/ros-core2-client /dev/ttyCORE2 __name:=serial_node_2 cmd_vel:=rosbot2/cmd_vel pose:=rosbot2/pose
|
# Move to project directory
cd App/
# Build Maven project
#mvn package
# Dump output to terminal
FILE=sorted/part-r-00000
if test -f "$FILE";
then
cat sorted/part-r-00000
rm -rf output
rm -rf sorted
else
cat output/part-r-00000
rm -rf output
fi
|
<gh_stars>1-10
// Import packages
const Discord = require('discord.js');
const client = new Discord.Client({ partials: ['MESSAGE', 'CHANNEL', 'REACTION'] })
const fs = require('fs');
// Set variables
client.commands = new Discord.Collection();
client.events = new Discord.Collection();
client.config = require('./config.json');
client.commandTimeouts = new Map();
client.database = require('./database.json');
client.chalk = require('chalk');
client.modules = new Object();
client.modules.cooldowns = new Discord.Collection();
client.modules.presets = require('./discord/modules/presets.js');
const chalk = client.chalk;
const npm = require('./package.json');
if (npm.version !== client.database.version.id) console.warn(chalk.bgOrange('WARN') + ' NPM version ID does not match database version ID.');
// Keep stored database up-to-date
setInterval(() => {
fs.writeFileSync('./database.json', JSON.stringify(client.database, null, 2));
}, 1000);
// Web server so you can check uptime
const express = require('express');
const app = express();
const port = 3000;
app.get('/', (req, res) => res.send('Bot online!'));
app.listen(port, () =>
console.log(`Web server online.`)
);
// Bot startup sequence starts here
console.log('Bot starting...');
// Event Listener
console.log('Loading Events...');
fs.readdir('./discord/events/', (err, files) => {
if (err) return console.error(`Unable to fetch events :\n${err.trace}`)
files.forEach(file => {
// For each file found in the directory, run the following code, where "file" is the file's name:
// Ignore non-js files
if (!file.endsWith(".js")) return;
// Copy the function over to this variable, while also sending the correct arguments and the client variable to the function
// event.bind(null, client) copies the entire function over, then sets it to automatically prepend client to its list of args it recieves.
let eventFunction = require(`./discord/events/${file}`);
// Get event name, so we know which event this actually is
let eventName = file.split('.')[0];
client.on(eventName, eventFunction.bind(null, client));
client.events.set(eventName, eventFunction.bind(null, client).toString());
});
});
// Command Listener
console.log('Fetching Commands...');
fs.readdir('./discord/commands/', (err, files) => {
if (err) return console.error(`Unable to fetch commands :\n${err.trace}`)
files.forEach(file => {
// For each file found in the directory, run the following code, where "file" is the file's name:
// Ignore non-js files
if (!file.endsWith(".js")) return;
// Like in the events listener, set the function and the name
let commandExports = require(`./discord/commands/${file}`);
let commandName = file.split(".")[0];
// Set the cooldown to the main list
commandExports.cooldown(client);
client.commandTimeouts.set(commandName, new Map())
// Load the actual command to the client.commands collection
client.commands.set(commandName, commandExports);
});
});
if (process.env.DISCORD_TOKEN) {
client.login();
} else { throw new Error('No token specified in env file.') }
|
#!/bin/bash
# note, TDNN is the same as what we used to call multisplice.
# This version of the script, nnet3/chain/train_tdnn.sh, is for 'chain' systems.
# Copyright 2012-2015 Johns Hopkins University (Author: Daniel Povey).
# 2013 Xiaohui Zhang
# 2013 Guoguo Chen
# 2014 Vimal Manohar
# 2014 Vijayaditya Peddinti
# Apache 2.0.
# Begin configuration section.
cmd=run.pl
num_epochs=10 # Number of epochs of training;
# the number of iterations is worked out from this.
# Be careful with this: we actually go over the data
# num-epochs * frame-subsampling-factor times, due to
# using different data-shifts.
truncate_deriv_weights=0 # can be used to set to zero the weights of derivs from frames
# near the edges. (counts subsampled frames).
apply_deriv_weights=true
initial_effective_lrate=0.0002
final_effective_lrate=0.00002
extra_left_context=0 # actually for recurrent setups.
pnorm_input_dim=3000
pnorm_output_dim=300
relu_dim= # you can use this to make it use ReLU's instead of p-norms.
jesus_opts= # opts to steps/nnet3/make_jesus_configs.py.
# If nonempty, assumes you want to use the jesus nonlinearity,
# and you should supply various options to that script in
# this string.
rand_prune=4.0 # Relates to a speedup we do for LDA.
minibatch_size=512 # This default is suitable for GPU-based training.
# Set it to 128 for multi-threaded CPU-based training.
lm_opts= # options to chain-est-phone-lm
l2_regularize=0.0
leaky_hmm_coefficient=0.00001
xent_regularize=0.0
frames_per_iter=800000 # each iteration of training, see this many [input]
# frames per job. This option is passed to get_egs.sh.
# Aim for about a minute of training time
right_tolerance=5 # tolerance at the same frame-rate as the alignment directory.
left_tolerance=5 # tolerance at the same frame-rate as the alignment directory.
num_jobs_initial=1 # Number of neural net jobs to run in parallel at the start of training
num_jobs_final=8 # Number of neural net jobs to run in parallel at the end of training
frame_subsampling_factor=3 # ratio of frames-per-second of features we train
# on, to chain model's output
alignment_subsampling_factor=3 # ratio of frames-per-second of input alignments
# to chain model's output
get_egs_stage=0 # can be used for rerunning after partial
online_ivector_dir=
max_param_change=2.0
remove_egs=true # set to false to disable removing egs after training is done.
max_models_combine=20 # The "max_models_combine" is the maximum number of models we give
# to the final 'combine' stage, but these models will themselves be averages of
# iteration-number ranges.
ngram_order=3
shuffle_buffer_size=5000 # This "buffer_size" variable controls randomization of the samples
# on each iter. You could set it to 0 or to a large value for complete
# randomization, but this would both consume memory and cause spikes in
# disk I/O. Smaller is easier on disk and memory but less random. It's
# not a huge deal though, as samples are anyway randomized right at the start.
# (the point of this is to get data in different minibatches on different iterations,
# since in the preconditioning method, 2 samples in the same minibatch can
# affect each others' gradients.
final_layer_normalize_target=1.0 # you can set this to less than one if you
# think the final layer is learning too fast
# compared with the other layers.
add_layers_period=2 # by default, add new layers every 2 iterations.
stage=-7
exit_stage=-100 # you can set this to terminate the training early. Exits before running this stage
# count space-separated fields in splice_indexes to get num-hidden-layers.
splice_indexes="-4,-3,-2,-1,0,1,2,3,4 0 -2,2 0 -4,4 0"
pool_type='none'
pool_window=
pool_lpfilter_width=
# Format : layer<hidden_layer>/<frame_indices>....layer<hidden_layer>/<frame_indices> "
# note: hidden layers which are composed of one or more components,
# so hidden layer indexing is different from component count
randprune=4.0 # speeds up LDA.
use_gpu=true # if true, we run on GPU.
cleanup=true
egs_dir=
max_lda_jobs=20 # use no more than 20 jobs for the LDA accumulation.
lda_opts=
egs_opts=
transform_dir= # If supplied, this dir used instead of latdir to find transforms.
cmvn_opts= # will be passed to get_lda.sh and get_egs.sh, if supplied.
# only relevant for "raw" features, not lda.
feat_type=raw # or set to 'lda' to use LDA features.
frames_per_eg=25 # number of frames of output per chunk. To be passed on to get_egs.sh.
left_deriv_truncate= # number of time-steps to avoid using the deriv of, on the left.
right_deriv_truncate= # number of time-steps to avoid using the deriv of, on the right.
# End configuration section.
trap 'for pid in $(jobs -pr); do kill -TERM $pid; done' INT QUIT TERM
echo "$0 $@" # Print the command line for logging
if [ -f path.sh ]; then . ./path.sh; fi
. parse_options.sh || exit 1;
if [ $# != 4 ]; then
echo "Usage: $0 [opts] <data> <tree-dir> <phone-lattice-dir> <exp-dir>"
echo " e.g.: $0 data/train exp/chain/tri3b_tree exp/tri3_latali exp/chain/tdnn_a"
echo ""
echo "Main options (for others, see top of script file)"
echo " --config <config-file> # config file containing options"
echo " --cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs."
echo " --num-epochs <#epochs|15> # Number of epochs of training"
echo " --initial-effective-lrate <lrate|0.02> # effective learning rate at start of training."
echo " --final-effective-lrate <lrate|0.004> # effective learning rate at end of training."
echo " # data, 0.00025 for large data"
echo " --num-hidden-layers <#hidden-layers|2> # Number of hidden layers, e.g. 2 for 3 hours of data, 4 for 100hrs"
echo " --add-layers-period <#iters|2> # Number of iterations between adding hidden layers"
echo " --num-jobs-initial <num-jobs|1> # Number of parallel jobs to use for neural net training, at the start."
echo " --num-jobs-final <num-jobs|8> # Number of parallel jobs to use for neural net training, at the end"
echo " --num-threads <num-threads|16> # Number of parallel threads per job, for CPU-based training (will affect"
echo " # results as well as speed; may interact with batch size; if you increase"
echo " # this, you may want to decrease the batch size."
echo " --parallel-opts <opts|\"-pe smp 16 -l ram_free=1G,mem_free=1G\"> # extra options to pass to e.g. queue.pl for processes that"
echo " # use multiple threads... note, you might have to reduce mem_free,ram_free"
echo " # versus your defaults, because it gets multiplied by the -pe smp argument."
echo " --io-opts <opts|\"-tc 10\"> # Options given to e.g. queue.pl for jobs that do a lot of I/O."
echo " --minibatch-size <minibatch-size|128> # Size of minibatch to process (note: product with --num-threads"
echo " # should not get too large, e.g. >2k)."
echo " --frames-per-iter <#frames|400000> # Number of frames of data to process per iteration, per"
echo " # process."
echo " --splice-indexes <string|layer0/-4:-3:-2:-1:0:1:2:3:4> "
echo " # Frame indices used for each splice layer."
echo " # Format : layer<hidden_layer_index>/<frame_indices>....layer<hidden_layer>/<frame_indices> "
echo " # (note: we splice processed, typically 40-dimensional frames"
echo " --lda-dim <dim|''> # Dimension to reduce spliced features to with LDA"
echo " --stage <stage|-4> # Used to run a partially-completed training process from somewhere in"
echo " # the middle."
exit 1;
fi
data=$1
treedir=$2
latdir=$3
dir=$4
# Check some files.
for f in $data/feats.scp $treedir/ali.1.gz $treedir/final.mdl $treedir/tree \
$latdir/lat.1.gz $latdir/final.mdl $latdir/num_jobs $latdir/splice_opts; do
[ ! -f $f ] && echo "$0: no such file $f" && exit 1;
done
# Set some variables.
nj=`cat $treedir/num_jobs` || exit 1; # number of jobs in alignment dir...
sdata=$data/split$nj
utils/split_data.sh $data $nj
mkdir -p $dir/log
echo $nj > $dir/num_jobs
cp $treedir/tree $dir
# First work out the feature and iVector dimension, needed for tdnn config creation.
case $feat_type in
raw) feat_dim=$(feat-to-dim --print-args=false scp:$data/feats.scp -) || \
{ echo "$0: Error getting feature dim"; exit 1; }
;;
lda) [ ! -f $treedir/final.mat ] && echo "$0: With --feat-type lda option, expect $treedir/final.mat to exist."
# get num-rows in lda matrix, which is the lda feature dim.
feat_dim=$(matrix-dim --print-args=false $treedir/final.mat | cut -f 1)
;;
*)
echo "$0: Bad --feat-type '$feat_type';"; exit 1;
esac
if [ -z "$online_ivector_dir" ]; then
ivector_dim=0
else
ivector_dim=$(feat-to-dim scp:$online_ivector_dir/ivector_online.scp -) || exit 1;
fi
if [ $stage -le -7 ]; then
echo "$0: creating phone language-model"
$cmd $dir/log/make_phone_lm.log \
chain-est-phone-lm $lm_opts \
"ark:gunzip -c $treedir/ali.*.gz | ali-to-phones $treedir/final.mdl ark:- ark:- |" \
$dir/phone_lm.fst || exit 1
fi
if [ $stage -le -6 ]; then
echo "$0: creating denominator FST"
copy-transition-model $treedir/final.mdl $dir/0.trans_mdl
$cmd $dir/log/make_den_fst.log \
chain-make-den-fst $dir/tree $dir/0.trans_mdl $dir/phone_lm.fst \
$dir/den.fst $dir/normalization.fst || exit 1;
fi
# work out num-leaves
num_leaves=$(am-info $dir/0.trans_mdl | grep -w pdfs | awk '{print $NF}') || exit 1;
[ $num_leaves -gt 0 ] || exit 1;
if [ $stage -le -5 ]; then
echo "$0: creating neural net configs";
if [ ! -z "$jesus_opts" ]; then
$cmd $dir/log/make_configs.log \
python steps/nnet3/make_jesus_configs.py \
--xent-regularize=$xent_regularize \
--include-log-softmax=false \
--splice-indexes "$splice_indexes" \
--feat-dim $feat_dim \
--ivector-dim $ivector_dim \
$jesus_opts \
--num-targets $num_leaves \
$dir/configs || exit 1;
else
[ $xent_regularize != "0.0" ] && \
echo "$0: --xent-regularize option not supported by tdnn/make_configs.py." && exit 1;
if [ ! -z "$relu_dim" ]; then
dim_opts="--relu-dim $relu_dim"
else
dim_opts="--pnorm-input-dim $pnorm_input_dim --pnorm-output-dim $pnorm_output_dim"
fi
# create the config files for nnet initialization
pool_opts=
pool_opts=$pool_opts${pool_type:+" --pool-type $pool_type "}
pool_opts=$pool_opts${pool_window:+" --pool-window $pool_window "}
pool_opts=$pool_opts${pool_lpfilter_width:+" --pool-lpfilter-width $pool_lpfilter_width "}
python steps/nnet3/tdnn/make_configs.py $pool_opts \
--include-log-softmax=false \
--final-layer-normalize-target $final_layer_normalize_target \
--splice-indexes "$splice_indexes" \
--feat-dim $feat_dim \
--ivector-dim $ivector_dim \
$dim_opts \
--num-targets $num_leaves \
--use-presoftmax-prior-scale false \
$dir/configs || exit 1;
fi
# Initialize as "raw" nnet, prior to training the LDA-like preconditioning
# matrix. This first config just does any initial splicing that we do;
# we do this as it's a convenient way to get the stats for the 'lda-like'
# transform.
$cmd $dir/log/nnet_init.log \
nnet3-init --srand=-2 $dir/configs/init.config $dir/init.raw || exit 1;
fi
# sourcing the "vars" below sets
# left_context=(something)
# right_context=(something)
# num_hidden_layers=(something)
. $dir/configs/vars || exit 1;
# the next 2 lines are in case the configs were created by an older
# config-generating script, which writes to left_context and right_context
# instead of model_left_context and model_right_context.
[ -z $model_left_context ] && model_left_context=$left_context
[ -z $model_right_context ] && model_right_context=$right_context
! [ "$num_hidden_layers" -gt 0 ] && echo \
"$0: Expected num_hidden_layers to be defined" && exit 1;
[ -z "$transform_dir" ] && transform_dir=$latdir
if [ $stage -le -4 ] && [ -z "$egs_dir" ]; then
extra_opts=()
[ ! -z "$cmvn_opts" ] && extra_opts+=(--cmvn-opts "$cmvn_opts")
[ ! -z "$feat_type" ] && extra_opts+=(--feat-type $feat_type)
[ ! -z "$online_ivector_dir" ] && extra_opts+=(--online-ivector-dir $online_ivector_dir)
extra_opts+=(--transform-dir $transform_dir)
# we need a bit of extra left-context and right-context to allow for frame
# shifts (we use shifted version of the data for more variety).
extra_opts+=(--left-context $[$model_left_context+$frame_subsampling_factor/2+$extra_left_context])
extra_opts+=(--right-context $[$model_right_context+$frame_subsampling_factor/2])
echo "$0: calling get_egs.sh"
steps/nnet3/chain/get_egs.sh $egs_opts "${extra_opts[@]}" \
--frames-per-iter $frames_per_iter --stage $get_egs_stage \
--cmd "$cmd" \
--right-tolerance "$right_tolerance" \
--left-tolerance "$left_tolerance" \
--frames-per-eg $frames_per_eg \
--frame-subsampling-factor $frame_subsampling_factor \
--alignment-subsampling-factor $alignment_subsampling_factor \
$data $dir $latdir $dir/egs || exit 1;
fi
[ -z $egs_dir ] && egs_dir=$dir/egs
if [ "$feat_dim" != "$(cat $egs_dir/info/feat_dim)" ]; then
echo "$0: feature dimension mismatch with egs in $egs_dir: $feat_dim vs $(cat $egs_dir/info/feat_dim)";
exit 1;
fi
if [ "$ivector_dim" != "$(cat $egs_dir/info/ivector_dim)" ]; then
echo "$0: ivector dimension mismatch with egs in $egs_dir: $ivector_dim vs $(cat $egs_dir/info/ivector_dim)";
exit 1;
fi
# copy any of the following that exist, to $dir.
cp $egs_dir/{cmvn_opts,splice_opts,final.mat} $dir 2>/dev/null
# confirm that the egs_dir has the necessary context (especially important if
# the --egs-dir option was used on the command line).
egs_left_context=$(cat $egs_dir/info/left_context) || exit -1
egs_right_context=$(cat $egs_dir/info/right_context) || exit -1
( [ $egs_left_context -lt $model_left_context ] || \
[ $egs_right_context -lt $model_right_context ] ) && \
echo "$0: egs in $egs_dir have too little context" && exit -1;
frames_per_eg=$(cat $egs_dir/info/frames_per_eg) || { echo "error: no such file $egs_dir/info/frames_per_eg"; exit 1; }
num_archives=$(cat $egs_dir/info/num_archives) || { echo "error: no such file $egs_dir/info/frames_per_eg"; exit 1; }
num_archives_expanded=$[$num_archives*$frame_subsampling_factor]
[ $num_jobs_initial -gt $num_jobs_final ] && \
echo "$0: --initial-num-jobs cannot exceed --final-num-jobs" && exit 1;
[ $num_jobs_final -gt $num_archives_expanded ] && \
echo "$0: --final-num-jobs cannot exceed #archives $num_archives_expanded." && exit 1;
if [ $stage -le -3 ]; then
echo "$0: getting preconditioning matrix for input features."
num_lda_jobs=$num_archives
[ $num_lda_jobs -gt $max_lda_jobs ] && num_lda_jobs=$max_lda_jobs
# Write stats with the same format as stats for LDA.
$cmd JOB=1:$num_lda_jobs $dir/log/get_lda_stats.JOB.log \
nnet3-chain-acc-lda-stats --rand-prune=$rand_prune \
$dir/init.raw "ark:$egs_dir/cegs.JOB.ark" $dir/JOB.lda_stats || exit 1;
all_lda_accs=$(for n in $(seq $num_lda_jobs); do echo $dir/$n.lda_stats; done)
$cmd $dir/log/sum_transform_stats.log \
sum-lda-accs $dir/lda_stats $all_lda_accs || exit 1;
rm $all_lda_accs || exit 1;
# this computes a fixed affine transform computed in the way we described in
# Appendix C.6 of http://arxiv.org/pdf/1410.7455v6.pdf; it's a scaled variant
# of an LDA transform but without dimensionality reduction.
$cmd $dir/log/get_transform.log \
nnet-get-feature-transform $lda_opts $dir/lda.mat $dir/lda_stats || exit 1;
ln -sf ../lda.mat $dir/configs/lda.mat
fi
if [ $stage -le -1 ]; then
# Add the first layer; this will add in the lda.mat and
# presoftmax_prior_scale.vec.
echo "$0: creating initial raw model"
$cmd $dir/log/add_first_layer.log \
nnet3-init --srand=-1 $dir/init.raw $dir/configs/layer1.config $dir/0.raw || exit 1;
# The model-format for a 'chain' acoustic model is just the transition
# model and then the raw nnet, so we can use 'cat' to create this, as
# long as they have the same mode (binary or not binary).
# We ensure that they have the same mode (even if someone changed the
# script to make one or both of them text mode) by copying them both
# before concatenating them.
echo "$0: creating initial model"
$cmd $dir/log/init_model.log \
nnet3-am-init $dir/0.trans_mdl $dir/0.raw $dir/0.mdl || exit 1;
fi
echo $frame_subsampling_factor >$dir/frame_subsampling_factor || exit 1;
# set num_iters so that as close as possible, we process the data $num_epochs
# times, i.e. $num_iters*$avg_num_jobs) == $num_epochs*$num_archives_expanded
# where avg_num_jobs=(num_jobs_initial+num_jobs_final)/2.
num_archives_to_process=$[$num_epochs*$num_archives_expanded]
num_archives_processed=0
num_iters=$[($num_archives_to_process*2)/($num_jobs_initial+$num_jobs_final)]
! [ $num_iters -gt $[$finish_add_layers_iter+2] ] \
&& echo "$0: Insufficient epochs" && exit 1
finish_add_layers_iter=$[$num_hidden_layers * $add_layers_period]
echo "$0: Will train for $num_epochs epochs = $num_iters iterations"
if $use_gpu; then
parallel_suffix=""
train_queue_opt="--gpu 1"
combine_queue_opt="--gpu 1"
prior_gpu_opt="--use-gpu=yes"
prior_queue_opt="--gpu 1"
parallel_train_opts=
if ! cuda-compiled; then
echo "$0: WARNING: you are running with one thread but you have not compiled"
echo " for CUDA. You may be running a setup optimized for GPUs. If you have"
echo " GPUs and have nvcc installed, go to src/ and do ./configure; make"
exit 1
fi
else
echo "$0: without using a GPU this will be very slow. nnet3 does not yet support multiple threads."
parallel_train_opts="--use-gpu=no"
train_queue_opt="--num-threads $num_threads"
combine_queue_opt="" # the combine stage will be quite slow if not using
# GPU, as we didn't enable that program to use
# multiple threads.
prior_gpu_opt="--use-gpu=no"
prior_queue_opt=""
fi
approx_iters_per_epoch_final=$[$num_archives_expanded/$num_jobs_final]
# First work out how many iterations we want to combine over in the final
# nnet3-combine-fast invocation. (We may end up subsampling from these if the
# number exceeds max_model_combine). The number we use is:
# min(max(max_models_combine, approx_iters_per_epoch_final),
# 1/2 * iters_after_last_layer_added)
num_iters_combine=$max_models_combine
if [ $num_iters_combine -lt $approx_iters_per_epoch_final ]; then
num_iters_combine=$approx_iters_per_epoch_final
fi
half_iters_after_add_layers=$[($num_iters-$finish_add_layers_iter)/2]
if [ $num_iters_combine -gt $half_iters_after_add_layers ]; then
num_iters_combine=$half_iters_after_add_layers
fi
first_model_combine=$[$num_iters-$num_iters_combine+1]
x=0
deriv_time_opts=
[ ! -z "$left_deriv_truncate" ] && deriv_time_opts="--optimization.min-deriv-time=$left_deriv_truncate"
[ ! -z "$right_deriv_truncate" ] && \
deriv_time_opts="$deriv_time_opts --optimization.max-deriv-time=$((frames_per_eg - right_deriv_truncate))"
while [ $x -lt $num_iters ]; do
[ $x -eq $exit_stage ] && echo "$0: Exiting early due to --exit-stage $exit_stage" && exit 0;
this_num_jobs=$(perl -e "print int(0.5+$num_jobs_initial+($num_jobs_final-$num_jobs_initial)*$x/$num_iters);")
ilr=$initial_effective_lrate; flr=$final_effective_lrate; np=$num_archives_processed; nt=$num_archives_to_process;
this_learning_rate=$(perl -e "print (($x + 1 >= $num_iters ? $flr : $ilr*exp($np*log($flr/$ilr)/$nt))*$this_num_jobs);");
echo "On iteration $x, learning rate is $this_learning_rate."
if [ $x -ge 0 ] && [ $stage -le $x ]; then
# Set off jobs doing some diagnostics, in the background.
# Use the egs dir from the previous iteration for the diagnostics
$cmd $dir/log/compute_prob_valid.$x.log \
nnet3-chain-compute-prob --l2-regularize=$l2_regularize --leaky-hmm-coefficient=$leaky_hmm_coefficient --xent-regularize=$xent_regularize \
"nnet3-am-copy --raw=true $dir/$x.mdl -|" $dir/den.fst \
"ark,bg:nnet3-chain-merge-egs ark:$egs_dir/valid_diagnostic.cegs ark:- |" &
$cmd $dir/log/compute_prob_train.$x.log \
nnet3-chain-compute-prob --l2-regularize=$l2_regularize --leaky-hmm-coefficient=$leaky_hmm_coefficient --xent-regularize=$xent_regularize \
"nnet3-am-copy --raw=true $dir/$x.mdl -|" $dir/den.fst \
"ark,bg:nnet3-chain-merge-egs ark:$egs_dir/train_diagnostic.cegs ark:- |" &
if [ $x -gt 0 ]; then
# This doesn't use the egs, it only shows the relative change in model parameters.
$cmd $dir/log/progress.$x.log \
nnet3-show-progress --use-gpu=no "nnet3-am-copy --raw=true $dir/$[$x-1].mdl - |" \
"nnet3-am-copy --raw=true $dir/$x.mdl - |" '&&' \
nnet3-am-info $dir/$x.mdl &
fi
echo "Training neural net (pass $x)"
if [ $x -gt 0 ] && \
[ $x -le $[($num_hidden_layers-1)*$add_layers_period] ] && \
[ $[$x%$add_layers_period] -eq 0 ]; then
do_average=false # if we've just mixed up, don't do averaging but take the
# best.
cur_num_hidden_layers=$[1+$x/$add_layers_period]
config=$dir/configs/layer$cur_num_hidden_layers.config
mdl="nnet3-am-copy --raw=true --learning-rate=$this_learning_rate $dir/$x.mdl - | nnet3-init --srand=$x - $config - |"
cache_io_opts=""
else
do_average=true
if [ $x -eq 0 ]; then do_average=false; fi # on iteration 0, pick the best, don't average.
mdl="nnet3-am-copy --raw=true --learning-rate=$this_learning_rate $dir/$x.mdl -|"
cache_io_opts="--read-cache=$dir/cache.$x"
fi
if $do_average; then
this_minibatch_size=$minibatch_size
this_max_param_change=$max_param_change
else
# on iteration zero or when we just added a layer, use a smaller minibatch
# size (and we will later choose the output of just one of the jobs): the
# model-averaging isn't always helpful when the model is changing too fast
# (i.e. it can worsen the objective function), and the smaller minibatch
# size will help to keep the update stable.
this_minibatch_size=$[$minibatch_size/2];
this_max_param_change=$(perl -e "print ($max_param_change/sqrt(2));")
fi
rm $dir/.error 2>/dev/null
(
trap 'for pid in $(jobs -pr); do kill -TERM $pid; done' INT QUIT TERM
# this sub-shell is so that when we "wait" below,
# we only wait for the training jobs that we just spawned,
# not the diagnostic jobs that we spawned above.
# We can't easily use a single parallel SGE job to do the main training,
# because the computation of which archive and which --frame option
# to use for each job is a little complex, so we spawn each one separately.
for n in $(seq $this_num_jobs); do
k=$[$num_archives_processed + $n - 1]; # k is a zero-based index that we'll derive
# the other indexes from.
archive=$[($k%$num_archives)+1]; # work out the 1-based archive index.
frame_shift=$[($k/$num_archives)%$frame_subsampling_factor];
if [ $n -eq 1 ]; then
# opts for computation cache (storing compiled computation).
this_cache_io_opts="$cache_io_opts --write-cache=$dir/cache.$[$x+1]"
else
this_cache_io_opts="$cache_io_opts"
fi
$cmd $train_queue_opt $dir/log/train.$x.$n.log \
nnet3-chain-train --apply-deriv-weights=$apply_deriv_weights \
--l2-regularize=$l2_regularize --leaky-hmm-coefficient=$leaky_hmm_coefficient --xent-regularize=$xent_regularize \
$this_cache_io_opts $parallel_train_opts $deriv_time_opts \
--max-param-change=$this_max_param_change \
--print-interval=10 "$mdl" $dir/den.fst \
"ark,bg:nnet3-chain-copy-egs --truncate-deriv-weights=$truncate_deriv_weights --frame-shift=$frame_shift ark:$egs_dir/cegs.$archive.ark ark:- | nnet3-chain-shuffle-egs --buffer-size=$shuffle_buffer_size --srand=$x ark:- ark:-| nnet3-chain-merge-egs --minibatch-size=$this_minibatch_size ark:- ark:- |" \
$dir/$[$x+1].$n.raw || touch $dir/.error &
done
wait
)
# the error message below is not that informative, but $cmd will
# have printed a more specific one.
[ -f $dir/.error ] && echo "$0: error on iteration $x of training" && exit 1;
models_to_average=$(steps/nnet3/get_successful_models.py --difference-threshold 0.1 $this_num_jobs $dir/log/train.$x.%.log)
nnets_list=
for n in $models_to_average; do
nnets_list="$nnets_list $dir/$[$x+1].$n.raw"
done
if $do_average; then
# average the output of the different jobs.
$cmd $dir/log/average.$x.log \
nnet3-average $nnets_list - \| \
nnet3-am-copy --set-raw-nnet=- $dir/$x.mdl $dir/$[$x+1].mdl || exit 1;
else
# choose the best from the different jobs.
n=$(perl -e '($nj,$pat)=@ARGV; $best_n=1; $best_logprob=-1.0e+10; for ($n=1;$n<=$nj;$n++) {
$fn = sprintf($pat,$n); open(F, "<$fn") || die "Error opening log file $fn";
undef $logprob; while (<F>) { if (m/log-prob-per-frame=(\S+)/) { $logprob=$1; } }
close(F); if (defined $logprob && $logprob > $best_logprob) { $best_logprob=$logprob;
$best_n=$n; } } print "$best_n\n"; ' $num_jobs_nnet $dir/log/train.$x.%d.log) || exit 1;
[ -z "$n" ] && echo "Error getting best model" && exit 1;
$cmd $dir/log/select.$x.log \
nnet3-am-copy --set-raw-nnet=$dir/$[$x+1].$n.raw $dir/$x.mdl $dir/$[$x+1].mdl || exit 1;
fi
rm $nnets_list
[ ! -f $dir/$[$x+1].mdl ] && exit 1;
if [ -f $dir/$[$x-1].mdl ] && $cleanup && \
[ $[($x-1)%10] -ne 0 ] && [ $[$x-1] -lt $first_model_combine ]; then
rm $dir/$[$x-1].mdl
fi
fi
rm $dir/cache.$x 2>/dev/null
x=$[$x+1]
num_archives_processed=$[$num_archives_processed+$this_num_jobs]
done
if [ $stage -le $num_iters ]; then
echo "Doing final combination to produce final.mdl"
# Now do combination. In the nnet3 setup, the logic
# for doing averaging of subsets of the models in the case where
# there are too many models to reliably esetimate interpolation
# factors (max_models_combine) is moved into the nnet3-combine
nnets_list=()
for n in $(seq 0 $[num_iters_combine-1]); do
iter=$[$first_model_combine+$n]
[ ! -f $dir/$iter.mdl ] && echo "Expected $mdl to exist" && exit 1;
mdl="nnet3-am-copy --raw=true $dir/$iter.mdl - |"
nnets_list[$n]="$mdl";
done
# Below, we use --use-gpu=no to disable nnet3-combine-fast from using a GPU,
# as if there are many models it can give out-of-memory error; and we set
# num-threads to 8 to speed it up (this isn't ideal...)
$cmd $combine_queue_opt $dir/log/combine.log \
nnet3-chain-combine --num-iters=40 --l2-regularize=$l2_regularize --leaky-hmm-coefficient=$leaky_hmm_coefficient \
--enforce-sum-to-one=true --enforce-positive-weights=true \
--verbose=3 $dir/den.fst "${nnets_list[@]}" "ark,bg:nnet3-chain-merge-egs --minibatch-size=$minibatch_size ark:$egs_dir/combine.cegs ark:-|" \
"|nnet3-am-copy --set-raw-nnet=- $dir/$first_model_combine.mdl $dir/final.mdl" || exit 1;
# Compute the probability of the final, combined model with
# the same subset we used for the previous compute_probs, as the
# different subsets will lead to different probs.
$cmd $dir/log/compute_prob_valid.final.log \
nnet3-chain-compute-prob --l2-regularize=$l2_regularize --leaky-hmm-coefficient=$leaky_hmm_coefficient --xent-regularize=$xent_regularize \
"nnet3-am-copy --raw=true $dir/final.mdl - |" $dir/den.fst \
"ark,bg:nnet3-chain-merge-egs ark:$egs_dir/valid_diagnostic.cegs ark:- |" &
$cmd $dir/log/compute_prob_train.final.log \
nnet3-chain-compute-prob --l2-regularize=$l2_regularize --leaky-hmm-coefficient=$leaky_hmm_coefficient --xent-regularize=$xent_regularize \
"nnet3-am-copy --raw=true $dir/final.mdl - |" $dir/den.fst \
"ark,bg:nnet3-chain-merge-egs ark:$egs_dir/train_diagnostic.cegs ark:- |" &
fi
if [ ! -f $dir/final.mdl ]; then
echo "$0: $dir/final.mdl does not exist."
# we don't want to clean up if the training didn't succeed.
exit 1;
fi
sleep 2
echo Done
if $cleanup; then
echo Cleaning up data
if $remove_egs && [[ $egs_dir =~ $dir/egs* ]]; then
steps/nnet2/remove_egs.sh $egs_dir
fi
echo Removing most of the models
for x in `seq 0 $num_iters`; do
if [ $[$x%100] -ne 0 ] && [ $x -ne $num_iters ] && [ -f $dir/$x.mdl ]; then
# delete all but every 100th model; don't delete the ones which combine to form the final model.
rm $dir/$x.mdl
fi
done
fi
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package br.sistemalojaroupas.view.util;
import br.sistemalojaroupas.model.entities.Brand;
import br.sistemalojaroupas.model.entities.Category;
import br.sistemalojaroupas.model.entities.Color;
import br.sistemalojaroupas.model.entities.Product;
import br.sistemalojaroupas.model.entities.TableContract;
import com.toedter.calendar.JDateChooser;
import java.awt.Component;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import javax.swing.JComboBox;
import javax.swing.JFormattedTextField;
import javax.swing.JPanel;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.table.DefaultTableModel;
/**
*
* @author silas
*/
public class Utils {
public static void updateTable(Collection<? extends TableContract> objList, JTable table) {
DefaultTableModel dtm = (DefaultTableModel) table.getModel();
dtm.setRowCount(0);
objList.forEach(obj -> {
dtm.addRow(obj.tableRowModel());
});
}
public static void updateComboBox(List<?> list, JComboBox cb) {
cb.removeAllItems();
cb.addItem("Selecione...");
list.forEach(c -> {
cb.addItem(c);
});
}
public static void updateComboBox(List<?> list, JComboBox cb, String firstOption) {
cb.removeAllItems();
cb.addItem(firstOption);
list.forEach(c -> {
cb.addItem(c);
});
}
public static void clearFields(JPanel pn){
for (Component c : pn.getComponents()) {
if (c instanceof JComboBox) ((JComboBox)c).setSelectedIndex(0);
if (c instanceof JTextField) ((JTextField)c).setText("");
if (c instanceof JFormattedTextField) ((JFormattedTextField)c).setText("");
if (c instanceof JDateChooser) ((JDateChooser)c).setDate(null);
}
}
public static boolean isAllFieldsFilled(JPanel pn) {
for (Component c : pn.getComponents()) {
if (c instanceof JComboBox) {
int i = ((JComboBox)c).getSelectedIndex();
if (i < 1) return false;
}
if (c instanceof JTextField) {
String s = ((JTextField)c).getText();
if (s.trim().equals("")) return false;
}
if (c instanceof JFormattedTextField) {
try {
((JFormattedTextField)c).commitEdit();
} catch (ParseException e) {
return false;
}
Object obj = ((JFormattedTextField)c).getValue();
if (obj == null) {
return false;
}
}
if (c instanceof JDateChooser) {
JDateChooser dc = (JDateChooser) c;
if (dc.getDate() == null) {
return false;
}
if (dc.getDate().before(dc.getMinSelectableDate()) || dc.getDate().after(dc.getMaxSelectableDate())) {
return false;
}
}
}
return true;
}
public static Double tryParseToDouble(String arg) {
try {
return Double.parseDouble(arg);
} catch (NumberFormatException e) {
return null;
}
}
public static Long tryParseToLong(String arg) {
try {
return Long.parseLong(arg);
} catch (NumberFormatException e) {
return null;
}
}
public static Integer tryParseToInt(String arg) {
try {
return Integer.parseInt(arg);
} catch (NumberFormatException e) {
return null;
}
}
public static String formatCpf(String cpf) {
String[] dividedCpf = cpf.replace('.', ',').replace('-', ',').split(",");
StringBuilder sb = new StringBuilder();
for(int i = 0; i < dividedCpf.length; i++) {
sb.append(dividedCpf[i]);
}
return sb.toString();
}
public static List<Product> productFilters(Collection<Product> product, Category category, Brand brand, Color color, String size) {
List<Product> list = new ArrayList<>();
list.addAll(product);
if (category != null) {
list.removeIf(p -> !p.getCategory().equals(category));
}
if (brand != null) {
list.removeIf(p -> !p.getBrand().equals(brand));
}
if (color != null) {
list.removeIf(p -> !p.getColor().equals(color));
}
if (size != null) {
list.removeIf(p -> !p.getSize().equals(size));
}
return list;
}
}
|
#include <iostream>
#include <string>
class FileAsset {
private:
std::string path;
public:
void setPath(const std::string& newPath) {
if (!newPath.empty()) {
path = newPath;
} else {
std::cout << "Error: File path cannot be empty." << std::endl;
}
}
std::string getPath() const {
return path;
}
};
int main() {
FileAsset* newAsset = new FileAsset();
newAsset->setPath("example/file/path.txt");
std::cout << "File path: " << newAsset->getPath() << std::endl;
newAsset->setPath(""); // This should produce an error message
std::cout << "File path: " << newAsset->getPath() << std::endl; // Should still be the previous path
delete newAsset;
return 0;
}
|
#!/bin/bash
export PROJECT_NAME=$CFG_PROJECT
if [ "${MAKEJOBS}" = "" ]; then
export MAKEJOBS=1
fi
function buildProject {
export CFG_PROJECT=$1
if [ ! -d $CFG_PROJECT ]; then
mkdir $CFG_PROJECT
fi
pushd $CFG_PROJECT
if [ -f CMakeCache.txt ]; then
cmake -G"Unix Makefiles" $CMAKE_SOURCE_DIR
else
cmake -G"Unix Makefiles" -DCMAKE_TOOLCHAIN_FILE=$CFG_TOOLCHAIN_FILE $CMAKE_SOURCE_DIR
fi
if [ $? != 0 ]; then
echo "execute cmake error."
exit
fi
if [ "${MAKECLEAN}" = "1" ]; then
echo "Clean build..."
make clean
fi
make -j $MAKEJOBS VERBOSE=$VERBOSE
popd
}
if [ -f $CFG_PROJECT/project/$TARGET/$TARGET ]; then
rm $CFG_PROJECT/project/$TARGET/$TARGET
fi
if [ "$CODEC" = "1" ]; then
buildProject "risc1_code"
buildProject "codec"
fi
if [ "$CODEC_EX" = "1" ]; then
buildProject "codec_ex"
fi
if [ "$CODEC_IT9910" = "1" ]; then
buildProject "codec_it9910"
fi
if [ "$CODEC_EX_IT9910" = "1" ]; then
buildProject "codec_ex_it9910"
fi
if [ "$CODEC_IT9850" = "1" ]; then
buildProject "codec_it9850"
fi
if [ "$CODEC_EX_IT9850" = "1" ]; then
buildProject "codec_ex_it9850"
fi
if [ "$RISC_TEST" = "1" ]; then
buildProject "risc1_code"
buildProject "risc2_code"
buildProject "risc1_code_it9850"
buildProject "risc2_code_it9850"
fi
buildProject "$PROJECT_NAME"
|
require 'date'
def string_to_date(string)
Date.strptime(string, '%d %B %Y')
end
string_to_date("01 April 2020") # Output: 2020-04-01
|
import requests
import re
def scrape_phone_numbers(urls):
phone_numbers = []
for url in urls:
response = requests.get(url)
numbers = re.findall("([+]\d{2}\s?0?\d{10})", response.text)
phone_numbers.extend(numbers)
return phone_numbers
|
<filename>netket/machine/torch.py
from .abstract_machine import AbstractMachine
import torch as _torch
import numpy as _np
import warnings
def _get_number_parameters(m):
r"""Returns total number of variational parameters in a torch.nn.Module."""
return sum(map(lambda p: p.numel(), _get_differentiable_parameters(m)))
def _get_differentiable_parameters(m):
r"""Returns total number of variational parameters in a torch.nn.Module."""
return filter(lambda p: p.requires_grad, m.parameters())
class Torch(AbstractMachine):
def __init__(self, module, hilbert):
self._module = _torch.jit.load(module) if isinstance(module, str) else module
self._module.double()
self._n_par = _get_number_parameters(self._module)
self._parameters = list(_get_differentiable_parameters(self._module))
self.n_visible = hilbert.size
# TODO check that module has input shape compatible with hilbert size
super().__init__(hilbert)
@property
def parameters(self):
return (
_torch.cat(
tuple(p.view(-1) for p in _get_differentiable_parameters(self._module))
)
.detach()
.numpy()
.astype(_np.complex128)
)
def assign_beta(self, beta):
self._module.beta = beta
return
def save(self, filename):
_torch.save(self._module.state_dict(), filename)
return
def load(self, filename):
self._module.load_state_dict(_torch.load(filename))
return
@parameters.setter
def parameters(self, p):
if not _np.all(p.imag == 0.0):
warnings.warn(
"PyTorch machines have real parameters, imaginary part will be discarded"
)
torch_pars = _torch.from_numpy(p.real)
if torch_pars.numel() != self.n_par:
raise ValueError(
"p has wrong shape: {}; expected [{}]".format(
torch_pars.size(), self.n_par
)
)
i = 0
for x in map(
lambda x: x.view(-1), _get_differentiable_parameters(self._module)
):
x.data.copy_(torch_pars[i : i + len(x)].data)
i += len(x)
@property
def n_par(self):
r"""Returns the total number of trainable parameters in the machine.
"""
return self._n_par
def log_val(self, x, out=None):
if len(x.shape) == 1:
x = x[_np.newaxis, :]
batch_shape = x.shape[:-1]
with _torch.no_grad():
t_out = self._module(_torch.from_numpy(x)).numpy().view(_np.complex128)
if out is None:
return t_out.reshape(batch_shape)
_np.copyto(out, t_out.reshape(-1))
return out
def der_log(self, x, out=None):
if len(x.shape) == 1:
x = x[_np.newaxis, :]
batch_shape = x.shape[:-1]
x = x.reshape(-1, x.shape[-1])
x = _torch.tensor(x, dtype=_torch.float64)
out = x.new_empty([x.size(0), 2, self._n_par], dtype=self._parameters[0].dtype)
m = self._module(x)
for i in range(x.size(0)):
dws_real = _torch.autograd.grad(
m[i, 0], self._parameters, retain_graph=True
)
dws_imag = _torch.autograd.grad(
m[i, 1], self._parameters, retain_graph=True
)
_torch.cat([dw.flatten() for dw in dws_real], out=out[i, 0, ...])
_torch.cat([dw.flatten() for dw in dws_imag], out=out[i, 1, ...])
out_complex = _np.zeros((out.size(0), out.size(2)), dtype=_np.complex128)
out_complex = out[:, 0, :].numpy() + 1.0j * out[:, 1, :].numpy()
return out_complex.reshape(
tuple(list(batch_shape) + list(out_complex.shape[-1:]))
)
def vector_jacobian_prod(self, x, vec, out=None):
if out is None:
out = _np.empty(self.n_par, dtype=_np.complex128)
def write_to(dst):
dst = _torch.from_numpy(dst)
i = 0
for g in (
p.grad.flatten() for p in self._module.parameters() if p.requires_grad
):
dst[i : i + g.numel()].copy_(g)
i += g.numel()
def zero_grad():
for g in (p.grad for p in self._module.parameters() if p.requires_grad):
if g is not None:
g.zero_()
vecj = _torch.empty(x.shape[0], 2, dtype=_torch.float64)
def get_vec(is_real):
if is_real:
vecj[:, 0] = _torch.from_numpy(vec.real)
vecj[:, 1] = _torch.from_numpy(vec.imag)
else:
vecj[:, 0] = _torch.from_numpy(vec.imag)
vecj[:, 1] = _torch.from_numpy(-vec.real)
return vecj
y = self._module(_torch.from_numpy(x))
zero_grad()
y.backward(get_vec(True), retain_graph=True)
write_to(out.real)
zero_grad()
y.backward(get_vec(False))
write_to(out.imag)
return out
@property
def is_holomorphic(self):
r"""PyTorch models are real-valued only, thus non holomorphic.
"""
return False
@property
def state_dict(self):
from collections import OrderedDict
return OrderedDict(
[(k, v.detach().numpy()) for k, v in self._module.state_dict().items()]
)
class TorchLogCosh(_torch.nn.Module):
"""
Log(cosh) activation function for PyTorch modules
"""
def __init__(self):
"""
Init method.
"""
super().__init__() # init the base class
def forward(self, input):
"""
Forward pass of the function.
"""
return -input + _torch.nn.functional.softplus(2.0 * input)
class TorchView(_torch.nn.Module):
"""
Reshaping layer for PyTorch modules
"""
def __init__(self, shape):
"""
Init method.
"""
super().__init__() # init the base class
self.shape = shape
def forward(self, x):
return x.view(*self.shape)
|
<filename>src/app/pages/incident/incident.module.ts<gh_stars>1-10
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { IonicModule } from '@ionic/angular';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { Routes, RouterModule } from '@angular/router';
import { TranslateModule, TranslateLoader } from '@ngx-translate/core';
import { HttpLoaderFactory } from '../../http-loader-factory';
import { HttpClient } from '@angular/common/http';
import { IonicSelectableModule } from 'ionic-selectable';
import { HomeIncidentPage } from './home-incident/home-incident.page';
import { AuthIncidentGuard } from '../../guard/auth-incident.guard';
import { AddIncidentPage } from './add-incident/add-incident.page';
import { AllIncidentPage } from './all-incident/all-incident.page';
import { VoirIncidentPage } from './voir-incident/voir-incident.page';
import { PopMenuIncidentComponent } from '../../components/popmenu-incident/pop-menu-incident';
import { IncidentService } from '../../service/incident/incident.service';
import { IncidentTraitementService } from '../../service/incident/incident-traitement.service';
import { OrderByPipe } from '../../shared/pipes/orderby.pipe';
import { FilterIncidentPipe } from '../../shared/pipes/filterincident.pipe';
const routes: Routes = [
{
path: '',
component: HomeIncidentPage,
canActivate: [AuthIncidentGuard]
},
{
path: 'add-incident',
component: AddIncidentPage,
canActivate: [AuthIncidentGuard]
},
{
path: 'edit-incident',
component: AddIncidentPage,
canActivate: [AuthIncidentGuard]
},
{
path: 'all-incident',
component: AllIncidentPage,
canActivate: [AuthIncidentGuard]
},
{
path: 'voir-incident',
component: VoirIncidentPage
}
];
@NgModule({
imports: [
CommonModule,
FormsModule,
ReactiveFormsModule,
IonicModule,
IonicSelectableModule,
TranslateModule.forRoot({
loader: {
provide: TranslateLoader,
useFactory: (HttpLoaderFactory),
deps: [HttpClient]
}
}),
RouterModule.forChild(routes)
],
declarations: [
// OrderByPipe,
FilterIncidentPipe,
HomeIncidentPage,
AddIncidentPage,
AllIncidentPage,
PopMenuIncidentComponent,
VoirIncidentPage],
providers:[AuthIncidentGuard,IncidentService,IncidentTraitementService],
entryComponents: [PopMenuIncidentComponent]
})
export class IncidentModule { }
|
#!/bin/bash
#
# Based mostly on the WSJ/Librispeech recipe. The training database is #####,
# it consists of 95hrs korean speech with cleaned automatic transcripts:
#
# http://www.openslr.org/resources (Mirror).
#
# Copyright 2017 Atlas Guide (Author : Lucas Jo)
# 2017 Gridspace Inc. (Author: Wonkyum Lee)
#
# Apache 2.0
#
# Check list before start
# 1. locale setup
# 2. pre-installed package: awscli, Morfessor-2.0.1, flac, sox, same cuda library, unzip
# 3. pre-install or symbolic link for easy going: rirs_noises.zip (takes pretty long time)
# 4. parameters: nCPU, num_jobs_initial, num_jobs_final, --max-noises-per-minute
# plz enter your valid credential to access AWS zeroth repo
export AWS_ACCESS_KEY_ID=""
export AWS_SECRET_ACCESS_KEY=""
export AWS_SESSION_TOKEN=""
data=./speechDATA
nCPU=16
. ./cmd.sh
. ./path.sh
# you might not want to do this for interactive shells.
set -e
startTime=$(date +'%F-%H-%M')
echo "started at" $startTime
# download the data.
for part in train_data_01 test_data_01; do
local/download_and_untar.sh $data $part
done
# download the LM resources
local/download_lm.sh data/local/lm
# format the data as Kaldi data directories
for part in train_data_01 test_data_01; do
# use underscore-separated names in data directories.
local/data_prep.sh $data/$part data/$(echo $part | sed s/-/_/g)
done
# update segmentation of transcripts
for part in train_data_01 test_data_01; do
local/updateSegmentation.sh data/$part data/local/lm
done
# prepare dictionary and language model
local/prepare_dict.sh data/local/lm data/local/dict_nosp
utils/prepare_lang.sh data/local/dict_nosp \
"<UNK>" data/local/lang_tmp_nosp data/lang_nosp
local/format_lms.sh --src-dir data/lang_nosp data/local/lm
# Create ConstArpaLm format language model for full 3-gram and 4-gram LMs
# it takes long time and do this again after computing silence prob.
# you can do comment out here this time
#utils/build_const_arpa_lm.sh data/local/lm/zeroth.lm.tg.arpa.gz \
# data/lang_nosp data/lang_nosp_test_tglarge
#utils/build_const_arpa_lm.sh data/local/lm/zeroth.lm.fg.arpa.gz \
# data/lang_nosp data/lang_nosp_test_fglarge
# Feature extraction (MFCC)
mfccdir=mfcc
hostInAtlas="ares hephaestus jupiter neptune"
if [[ ! -z $(echo $hostInAtlas | grep -o $(hostname -f)) ]]; then
mfcc=$(basename mfccdir) # in case was absolute pathname (unlikely), get basename.
utils/create_split_dir.pl /mnt/{ares,hephaestus,jupiter,neptune}/$USER/kaldi-data/zeroth-kaldi/s5/$mfcc/storage \
$mfccdir/storage
fi
for part in train_data_01 test_data_01; do
steps/make_mfcc.sh --cmd "$train_cmd" --nj $nCPU data/$part exp/make_mfcc/$part $mfccdir
steps/compute_cmvn_stats.sh data/$part exp/make_mfcc/$part $mfccdir
done
# ... and then combine data sets into one (for later extension)
utils/combine_data.sh \
data/train_clean data/train_data_01
utils/combine_data.sh \
data/test_clean data/test_data_01
# Make some small data subsets for early system-build stages.
utils/subset_data_dir.sh --shortest data/train_clean 2000 data/train_2kshort
utils/subset_data_dir.sh data/train_clean 5000 data/train_5k
utils/subset_data_dir.sh data/train_clean 10000 data/train_10k
echo "#### Monophone Training ###########"
# train a monophone system & align
steps/train_mono.sh --boost-silence 1.25 --nj $nCPU --cmd "$train_cmd" \
data/train_2kshort data/lang_nosp exp/mono
steps/align_si.sh --boost-silence 1.25 --nj $nCPU --cmd "$train_cmd" \
data/train_5k data/lang_nosp exp/mono exp/mono_ali_5k
echo "#### Triphone Training, delta + delta-delta ###########"
# train a first delta + delta-delta triphone system on a subset of 5000 utterancesa
# number of maximum pdf, gaussian (under/over fitting)
# recognition result
steps/train_deltas.sh --boost-silence 1.25 --cmd "$train_cmd" \
2000 10000 data/train_5k data/lang_nosp exp/mono_ali_5k exp/tri1
steps/align_si.sh --nj $nCPU --cmd "$train_cmd" \
data/train_10k data/lang_nosp exp/tri1 exp/tri1_ali_10k
echo "#### Triphone Training, LDA+MLLT ###########"
# train an LDA+MLLT system.
steps/train_lda_mllt.sh --cmd "$train_cmd" \
--splice-opts "--left-context=3 --right-context=3" 2500 15000 \
data/train_10k data/lang_nosp exp/tri1_ali_10k exp/tri2b
# Align a 10k utts subset using the tri2b model
steps/align_si.sh --nj $nCPU --cmd "$train_cmd" --use-graphs true \
data/train_clean data/lang_nosp exp/tri2b exp/tri2b_ali_train_clean
echo "#### Triphone Training, LDA+MLLT+SAT ###########"
# Train tri3b, which is LDA+MLLT+SAT on 10k utts
#steps/train_sat.sh --cmd "$train_cmd" 3000 25000 \
steps/train_sat.sh --cmd "$train_cmd" 4200 40000 \
data/train_clean data/lang_nosp exp/tri2b_ali_train_clean exp/tri3b
# Now we compute the pronunciation and silence probabilities from training data,
# and re-create the lang directory.
# silence transition probability ...
steps/get_prons.sh --cmd "$train_cmd" \
data/train_clean data/lang_nosp exp/tri3b
utils/dict_dir_add_pronprobs.sh --max-normalize true \
data/local/dict_nosp \
exp/tri3b/pron_counts_nowb.txt exp/tri3b/sil_counts_nowb.txt \
exp/tri3b/pron_bigram_counts_nowb.txt data/local/dict
utils/prepare_lang.sh data/local/dict \
"<UNK>" data/local/lang_tmp data/lang
local/format_lms.sh --src-dir data/lang data/local/lm
utils/build_const_arpa_lm.sh \
data/local/lm/zeroth.lm.tg.arpa.gz data/lang data/lang_test_tglarge
utils/build_const_arpa_lm.sh \
data/local/lm/zeroth.lm.fg.arpa.gz data/lang data/lang_test_fglarge
# align the entire train_clean using the tri3b model
steps/align_fmllr.sh --nj $nCPU --cmd "$train_cmd" \
data/train_clean data/lang exp/tri3b exp/tri3b_ali_train_clean
echo "#### SAT again on train_clean ###########"
# train another LDA+MLLT+SAT system on the entire subset
steps/train_sat.sh --cmd "$train_cmd" 4200 40000 \
data/train_clean data/lang exp/tri3b_ali_train_clean exp/tri4b
# decode using the tri4b model with pronunciation and silence probabilities
utils/mkgraph.sh \
data/lang_test_tgsmall exp/tri4b exp/tri4b/graph_tgsmall
# the size is properly set?
utils/subset_data_dir.sh data/test_clean 200 data/test_200
for test in test_200; do
nspk=$(wc -l <data/${test}/spk2utt)
steps/decode_fmllr.sh --nj $nspk --cmd "$decode_cmd" \
exp/tri4b/graph_tgsmall data/$test \
exp/tri4b/decode_tgsmall_$test
#steps/lmrescore.sh --cmd "$decode_cmd" data/lang_test_{tgsmall,tgmed} \
# data/$test exp/tri4b/decode_{tgsmall,tgmed}_$test
steps/lmrescore_const_arpa.sh \
--cmd "$decode_cmd" data/lang_test_{tgsmall,tglarge} \
data/$test exp/tri4b/decode_{tgsmall,tglarge}_$test
steps/lmrescore_const_arpa.sh \
--cmd "$decode_cmd" data/lang_test_{tgsmall,fglarge} \
data/$test exp/tri4b/decode_{tgsmall,fglarge}_$test
done
# align train_clean_100 using the tri4b model
steps/align_fmllr.sh --nj $nCPU --cmd "$train_cmd" \
data/train_clean data/lang exp/tri4b exp/tri4b_ali_train_clean
#echo "GMM trainig is finished" |\
# mail -s "[alarm]finishing" -aFrom:jupiter lucasjo@goodatlas.com
finishTime=$(date +'%F-%H-%M')
echo "GMM trainig is finished at" $finishTime
## online chain recipe using only clean data set
echo "#### online chain training ###########"
## check point: sudo nvidia-smi --compute-mode=3 if you have multiple GPU's
#local/chain/run_tdnn_1a.sh
#local/chain/run_tdnn_1b.sh
#local/chain/multi_condition/run_tdnn_lstm_1e.sh --nj $nCPU
local/chain/multi_condition/run_tdnn_1n.sh --nj $nCPU
#echo "DNN training is finished" |\
# mail -s "[alarm]finishing" -aFrom:jupiter lucasjo@goodatlas.com
finishTime=$(date +'%F-%H-%M')
echo "DNN trainig is finished at" $finishTime
echo "started at" $startTime
echo "finished at" $finishTime
exit 0;
|
def process_query(sentence, query_on_field, nominal_groupL):
if not query_on_field or query_on_field == 'QUERY_ON_DIRECT_OBJ':
if not query_on_field:
sentence.sn = [ng for [prep, ngL] in nominal_groupL for ng in ngL]
sentence.sv = []
elif query_on_field == 'QUERY_ON_DIRECT_OBJ':
sentence.sv[0].d_obj = [ng for [prep, ngL] in nominal_groupL for ng in ngL]
|
#!/usr/bin/env bash
set -e
if cargo --version | grep -q "nightly"; then
CARGO_CMD="cargo"
else
CARGO_CMD="cargo +nightly"
fi
CARGO_INCREMENTAL=0 RUSTFLAGS="-C link-arg=--export-table" $CARGO_CMD build --target=wasm32-unknown-unknown --release
for i in daqiao_runtime_wasm
do
wasm-gc target/wasm32-unknown-unknown/release/$i.wasm target/wasm32-unknown-unknown/release/$i.compact.wasm
done
|
<reponame>rsuite/rsuite-icons
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import GeSvg from '@rsuite/icon-font/lib/legacy/Ge';
const Ge = createSvgIcon({
as: GeSvg,
ariaLabel: 'ge',
category: 'legacy',
displayName: 'Ge'
});
export default Ge;
|
#!/usr/bin/env bash
set -euo pipefail
GOOS="linux" go build -ldflags='-s -w' -o bin/helper github.com/paketo-buildpacks/libjvm/cmd/helper
GOOS="linux" go build -ldflags='-s -w' -o bin/main github.com/paketo-buildpacks/adoptium/cmd/main
if [ "${STRIP:-false}" != "false" ]; then
strip bin/helper bin/main
fi
if [ "${COMPRESS:-none}" != "none" ]; then
$COMPRESS bin/helper bin/main
fi
ln -fs main bin/build
ln -fs main bin/detect
|
<html>
<head>
<title>Top 10 Employees</title>
</head>
<body>
<table>
<tr>
<th>Employee</th>
<th>Performance</th>
</tr>
<tr>
<td>Employee 1</td>
<td>8</td>
</tr>
<tr>
<td>Employee 2</td>
<td>7</td>
</tr>
<tr>
<td>Employee 3</td>
<td>9</td>
</tr>
<tr>
<td>Employee 4</td>
<td>5</td>
</tr>
<tr>
<td>Employee 5</td>
<td>6</td>
</tr>
<tr>
<td>Employee 6</td>
<td>8</td>
</tr>
<tr>
<td>Employee 7</td>
<td>9</td>
</tr>
<tr>
<td>Employee 8</td>
<td>8</td>
</tr>
<tr>
<td>Employee 9</td>
<td>7</td>
</tr>
<tr>
<td>Employee 10</td>
<td>4</td>
</tr>
</table>
</body>
</html>
|
#!/bin/bash
cat $TARGET_EMAIL | bundle exec ruby app.rb
|
<filename>src/test/java/com/datasift/client/behavioural/PylonGetSteps.java
package com.datasift.client.behavioural;
import com.datasift.client.pylon.PylonRecording;
import com.datasift.client.pylon.PylonRecordingList;
import com.fasterxml.jackson.databind.JsonNode;
import cucumber.api.java.en.Given;
import cucumber.api.java.en.Then;
import cucumber.api.java.en.When;
import io.higgs.core.ObjectFactory;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
//CHECKSTYLE:OFF
public class PylonGetSteps extends CucumberBase {
protected PylonRecording recording;
@Given("^a mock exists$")
public void aMockExists() throws Throwable {
mock.registerObjectFactory(new ObjectFactory(mock) {
public Object newInstance(Class<?> klass) {
return wrapper;
}
public boolean canCreateInstanceOf(Class<?> klass) {
return true;
}
});
}
@Given("^returns this body and status code \"([^\"]*)\" when the query string \"([^\"]*)\" at the path \"([^\"]*)\"$")
public void returnsThisBodyAndStatusCodeWhenTheQueryStringAtThePath(String statusCode, String queryString, String path, String body) throws Throwable {
final String[] queryParametersArray = queryString.split("&");
wrapper.response(body)
.matchQueryStringParams(new HashMap<String, String>() {
{
for (String queryParameter : queryParametersArray) {
String[] params = queryParameter.split("=");
put(params[0], params[1]);
}
}
}).statusCode(statusCode);
}
@When("^a get request is made with a recording_id \"([^\"]*)\" and no body$")
public void aGetRequestIsMadeWithARecording_idAndNoBody(String recordingId) throws Throwable {
recording = client.pylon().get(new PylonRecording.PylonRecordingId(recordingId)).sync();
assertEquals(recordingId, recording.getRecordingId().getId());
}
@Then("^the get response status code should be \"([^\"]*)\"$")
public void theGetResponseStatusCodeShouldBe(String statusCode) throws Throwable {
assertEquals(Integer.parseInt(statusCode), recording.getResponse().status());
}
@Then("^the get response body contains the JSON data$")
public void theGetResponseBodyContainsTheJSONData(String body) throws Throwable {
JsonNode expected = mapper.readTree(body);
JsonNode actual = mapper.readTree(recording.getResponse().data());
assertTrue(expected.equals(actual));
assertEquals("1234", recording.getRecordingId().getId());
}
@When("^a get request is made with page \"([^\"]*)\" and per_page \"([^\"]*)\" and no body$")
public void aGetRequestIsMadeWithPageAndPer_pageAndNoBody(String page, String perPage) throws Throwable {
int sentPage = Integer.parseInt(page);
int sentPerPage = Integer.parseInt(perPage);
PylonRecordingList pylonRecordingList = client.pylon().get(sentPage, sentPerPage).sync();
recording = pylonRecordingList.getSubscriptions().get(0);
assertEquals(sentPage, pylonRecordingList.getPage());
assertEquals(sentPerPage, pylonRecordingList.getPerPage());
}
@Given("^returns error \"([^\"]*)\" and status code \"([^\"]*)\" when no query string at the path \"([^\"]*)\"$")
public void returnsErrorAndStatusCodeWhenNoQueryStringAtThePath(String errorMessage, String statusCode, String path) throws Throwable {
wrapper.response(errorMessage).statusCode(statusCode);
}
@When("^a get request is made without recording_id and no body$")
public void aGetRequestIsMadeWithoutRecording_idAndNoBody() throws Throwable {
// Write code here that turns the phrase above into concrete actions
try {
PylonRecordingList pylonRecordingList = client.pylon().get().sync();
recording = pylonRecordingList.getSubscriptions().get(0);
} catch (Exception e) {
e.printStackTrace();
}
}
@When("^a get request is made with page \"([^\"]*)\" and no per_page and no body$")
public void aGetRequestIsMadeWithPageAndNoPer_pageAndNoBody(String page) throws Throwable {
int sentPage = Integer.parseInt(page);
PylonRecordingList pylonRecordingList = client.pylon().get(sentPage).sync();
recording = pylonRecordingList.getSubscriptions().get(0);
assertEquals(sentPage, pylonRecordingList.getPage());
}
}
//CHECKSTYLE:ON
|
func installOrder(_ packages: [Package]) -> [String] {
var graph: [String: Set<String>] = [:]
var indegree: [String: Int] = [:]
// Build the dependency graph and calculate indegrees
for package in packages {
graph[package.name] = Set(package.dependencies)
indegree[package.name] = 0
}
for (_, dependencies) in graph {
for dependency in dependencies {
indegree[dependency, default: 0] += 1
}
}
// Perform topological sort using Kahn's algorithm
var queue: [String] = []
for (package, count) in indegree {
if count == 0 {
queue.append(package)
}
}
var installOrder: [String] = []
while !queue.isEmpty {
let package = queue.removeFirst()
installOrder.append(package)
if let dependencies = graph[package] {
for dependency in dependencies {
indegree[dependency, default: 0] -= 1
if indegree[dependency, default: 0] == 0 {
queue.append(dependency)
}
}
}
}
return installOrder
}
|
<gh_stars>1-10
<?hh // strict
namespace Waffle\Router;
use namespace HH\Lib\Str;
use namespace HH\Lib\Vec;
use namespace HH\Lib\C;
use type Waffle\Contract\Http\Message\ResponseInterface;
use type Waffle\Contract\Http\Message\ServerRequestInterface;
use type Waffle\Contract\Http\Server\MiddlewareInterface;
use type Waffle\Contract\Http\Server\RequestHandlerInterface;
use function preg_match;
/**
* Value object representing a single route.
*
* Routes are a combination of path, middleware, and HTTP methods; two routes
* representing the same path and overlapping HTTP methods are not allowed,
* while two routes representing the same path and non-overlapping HTTP methods
* can be used (and should typically resolve to different middleware).
*
* Internally, only those three properties are required. However, underlying
* router implementations may allow or require additional information, such as
* information defining how to generate a URL from the given route, qualifiers
* for how segments of a route match, or even default values to use. These may
* be provided after instantiation via the "options" property and related
* setOptions() method.
*/
class Route implements MiddlewareInterface
{
const HTTP_METHOD_SEPARATOR = ':';
private dict<string, mixed> $options = dict[];
private ?vec<string> $methods = null;
private string $name;
/**
* @param string $path Path to match.
* @param MiddlewareInterface $middleware Middleware to use when this route is matched.
* @param null|string[] $methods Allowed HTTP methods; defaults to HTTP_METHOD_ANY.
* @param null|string $name the route name
*/
public function __construct(
private string $path,
private MiddlewareInterface $middleware,
?vec<string> $methods = null,
?string $name = null
) {
if (null !== $methods) {
$this->methods = $this->validateHttpMethods($methods);
}
if (null === $name) {
$name = $this->methods === null
? $path
: $path . '^' . Str\join($this->methods, self::HTTP_METHOD_SEPARATOR);
}
$this->name = $name;
}
/**
* Proxies to the middleware composed during instantiation.
*/
public function process(ServerRequestInterface $request, RequestHandlerInterface $handler): ResponseInterface
{
return $this->middleware->process($request, $handler);
}
public function getPath(): string
{
return $this->path;
}
/**
* Set the route name.
*/
public function setName(string $name): void
{
$this->name = $name;
}
public function getName(): string
{
return $this->name;
}
public function getMiddleware(): MiddlewareInterface
{
return $this->middleware;
}
/**
* @return null|Set<string> Returns null or set of allowed methods.
*/
public function getAllowedMethods(): ?vec<string>
{
return $this->methods;
}
/**
* Indicate whether the specified method is allowed by the route.
*
* @param string $method HTTP method to test.
*/
public function allowsMethod(string $method): bool
{
$method = Str\uppercase($method);
if (null === $this->methods || C\contains($this->methods, $method)) {
return true;
}
return false;
}
public function setOptions(dict<string, mixed> $options): void
{
$this->options = $options;
}
public function getOptions(): dict<string, mixed>
{
return $this->options;
}
/**
* Validate the provided HTTP method names.
*
* Validates, and then normalizes to upper case.
*
* @param Set<string> A Set of HTTP method names.
* @return string[]
* @throws Exception\InvalidArgumentException for any invalid method names.
*/
private function validateHttpMethods(vec<string> $methods): vec<string>
{
if (0 === C\count($methods)) {
throw new Exception\InvalidArgumentException(
'HTTP methods argument was empty; must contain at least one method'
);
}
$valid = true;
foreach ($methods as $method) {
if (! preg_match('/^[!#$%&\'*+.^_`\|~0-9a-z-]+$/i', $method)) {
$valid = false;
}
}
if (!$valid) {
throw new Exception\InvalidArgumentException('One or more HTTP methods were invalid');
}
return Vec\map($methods, ($method) ==> Str\uppercase($method));
}
}
|
#!/bin/bash
#
# Description : Alacritty
# Author : Jose Cerrejon Gonzalez (ulysess@gmail_dot._com)
# Version : 1.0.0 (09/Novp/20)
# Compatible : Raspberry Pi 4 (tested)
# Repository : https://github.com/alacritty/alacritty
#
. ../helper.sh || . ./scripts/helper.sh || . ./helper.sh || wget -q 'https://github.com/jmcerrejon/PiKISS/raw/master/scripts/helper.sh'
clear
check_board || { echo "Missing file helper.sh. I've tried to download it for you. Try to run the script again." && exit 1; }
readonly INSTALL_DIR="$HOME/apps"
readonly PACKAGES_DEV=(cmake pkg-config libfreetype6-dev libfontconfig1-dev libxcb-xfixes0-dev python3)
readonly BINARY_PATH="https://misapuntesde.com/rpi_share/alacritty-0.6.0-rpi.tar.gz"
readonly GITHUB_PATH="https://github.com/w23/alacritty"
runme() {
if [ ! -f "$INSTALL_DIR/alacritty/alacritty" ]; then
echo -e "\nFile does not exist.\n· Something is wrong.\n· Try to install again."
exit_message
fi
echo
read -p "Press [ENTER] to run..."
cd "$INSTALL_DIR"/alacritty && ./alacritty
exit_message
}
remove_files() {
rm -rf "$INSTALL_DIR"/alacritty ~/.config/alacritty
}
uninstall() {
read -p "Do you want to uninstall Alacritty (y/N)? " response
if [[ $response =~ [Yy] ]]; then
remove_files
if [[ -e $INSTALL_DIR/alacritty/alacritty ]]; then
echo -e "I hate when this happens. I could not find the directory, Try to uninstall manually. Apologies."
exit_message
fi
echo -e "\nSuccessfully uninstalled."
exit_message
fi
exit_message
}
if [[ -e $INSTALL_DIR/alacritty/alacritty ]]; then
echo -e "Alacritty already installed.\n"
uninstall
fi
generate_icon() {
echo -e "\nCreating shortcut icon..."
if [[ ! -e ~/.local/share/applications/alacritty.desktop ]]; then
cat <<EOF >~/.local/share/applications/alacritty.desktop
[Desktop Entry]
Name=Alacritty
Exec=${INSTALL_DIR}/alacritty/alacritty
Path=${INSTALL_DIR}/alacritty/
Icon=${INSTALL_DIR}/alacritty/icon.png
Type=Application
Comment=Alacritty is the fastest terminal emulator in existence. Using the GPU for rendering enables optimizations that simply aren't possible without it.
Categories=ConsoleOnly;Utility;System;
EOF
fi
}
post_install() {
cp -fr "$INSTALL_DIR/alacritty/.config/alacritty" ~/.config
}
install() {
echo -e "\nInstalling, please wait...\n"
download_and_extract "$BINARY_PATH" "$INSTALL_DIR"
generate_icon
post_install
echo -e "\nDone!. App at $INSTALL_DIR/alacritty or Go to Menu > System Tools > Alacritty"
runme
}
compile() {
install_packages_if_missing "${PACKAGES_DEV[@]}"
install_or_update_rust
mkdir -p "$HOME/sc" && cd "$_"
if [[ ! -d "$HOME/sc/alacritty" ]]; then
echo -e "\nCloning and compiling repo...\n"
git clone "$GITHUB_PATH" alacritty && cd "$_"
else
echo -e "\nDirectory already exists. Updating and compiling repo...\n"
cd "$HOME/sc"
git pull
fi
echo -e "\nEstimated Time on Raspberry Pi 4 (not overclocked): ~20 minutes (it's OK stopping long time at step 221/222)... \n"
cargo build --release
echo -e "\nDone!. You can found the app at $HOME/sc/alacritty/target/release"
exit_message
}
install_script_message
echo "
Alacritty
=========
· Alacritty is the fastest terminal emulator in existence.
· The software is considered to be at a beta level of readiness, but it's already used by many as a daily driver.
· This is a fork with OpenGL ES 2.0 support still not merged on the official repository.
· Using the GPU for rendering enables optimizations that simply aren't possible without it.
· I've used my own customization at ~/.config/alacritty/alacritty.yml. You can modify it following the next: https://github.com/alacritty/alacritty/wiki
· This version is 0.6.0-dev (WIP). More info about version 0.5.0: https://blog.christianduerr.com/alacritty_0_5_0_announcement
· More info about the PR at $GITHUB_PATH | https://github.com/alacritty/alacritty/pull/4373
"
read -p "Continue (Y/n)? " response
install
|
package com.projet.voiture.repository;
import com.projet.voiture.model.Adresse;
import com.projet.voiture.model.Marque;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import java.util.List;
@Repository
public class MarqueRepository {
@PersistenceContext
private EntityManager em;
@Autowired
JdbcTemplate template;
/* Getting all */
public List<Marque> getAllMarque() {
List<Marque> items = (List<Marque>) template.queryForObject("SELECT * FROM Marque", (result,
rowNum) -> new Adresse(result.getLong("id"), result.getString("nom")));
return items;
}
}
|
#!/bin/bash
# Compile Virginia
echo -n "[virginia] Compiling Virginia... "
gcc virginia.c -o virginia
echo "done"
|
<gh_stars>1-10
const app = require('./app');
app.listen(process.env.APP_PORT || 3333, () => {
return console.log('Server listening');
});
|
#!/bin/bash
##
## Copyright (c) 2015-2025 Industrial Technology Research Institute.
##
## Licensed to the Apache Software Foundation (ASF) under one
## or more contributor license agreements. See the NOTICE file
## distributed with this work for additional information
## regarding copyright ownership. The ASF licenses this file
## to you under the Apache License, Version 2.0 (the
## "License"); you may not use this file except in compliance
## with the License. You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing,
## software distributed under the License is distributed on an
## "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
## KIND, either express or implied. See the License for the
## specific language governing permissions and limitations
## under the License.
##
SOFA_DIR=/usr/sofa/
OS_Type=`uname -a | grep Ubuntu`
version=1.0.00
release=1
OS_Type=`uname -a | grep Ubuntu`
if [ "$OS_Type" ]; then
MODULE_NAME="SOFA-${version}-${release}_amd64.deb"
MODULE_PREFIX="SOFA-"
else
MODULE_NAME="SOFA-${version}-${release}.x86_64.rpm"
MODULE_PREFIX="SOFA-"
fi
SERVICE_NAME="SOFA"
function show_usage()
{
echo "Usage: undeploy_sofa.sh [version] $version [release] $release"
}
if [ $# -le 1 ]; then
if [ $# -eq 1 ]; then
version=$1
fi
elif [ $# -eq 2 ]; then
version=$1
release=$2
release=$3
else
show_usage
exit 1
fi
echo "$0: undeploy SOFA version=$version release=$release"
if [ "$OS_Type" ]; then
DEL_MODULE=`eval dpkg -l | grep $MODULE_PREFIX`
dpkg -r $MODULE_PREFIX
else
DEL_MODULE=`eval rpm -qa | grep $MODULE_PREFIX`
rpm -e $DEL_MODULE
fi
rm -f /etc/udev/rules.d/80-lfsm.rules
udevadm control --reload-rules && udevadm trigger
rm -rf $SOFA_DIR
echo "$0: Un-installed $DEL_MODULE"
|
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.linear_model import LogisticRegression
# read in the dataset
emails = pd.read_csv("emails.csv")
# split the emails into a training and testing set
X_train, X_test, y_train, y_test = train_test_split(emails["message"], emails["label"], test_size=0.2, random_state=0)
# extract features from the emails using a count vectorizer
vectorizer = CountVectorizer()
X_train_features = vectorizer.fit_transform(X_train)
# create and train a logistic regression model
model = LogisticRegression()
model.fit(X_train_features, y_train)
# test the model on the testing set
X_test_features = vectorizer.transform(X_test)
score = model.score(X_test_features, y_test)
print("Test accuracy: ", score)
|
<!DOCTYPE html>
<html>
<head>
<title>Navigation Bar</title>
</head>
<body>
<nav>
<a href="index.html">Home</a>
<a href="about.html">About</a>
<a href="contact.html">Contact</a>
</nav>
</body>
</html>
|
#!/bin/bash
rm -rf build
mkdir build
cd build
cmake -DBUILD_TESTS=ON ..
cmake --build .
|
from __future__ import absolute_import, division, print_function, unicode_literals
# TODO - Support multi-language errors
ERROR_LOOKUP = {'address_invalid': 'Invalid addresses attribute: %s. Party ID %s for Asset Manager %s',
'address_primary': 'Must set exactly one address as primary. Party ID %s for Asset Manager %s',
'am_type_invalid': 'Asset Manager Type: %s is invalid. Asset Manager: %s',
'am_account_type_invalid': 'Account Type: %s is invalid. Asset Manager: %s',
'book_type_invalid': 'Invalid book type %s. Book ID: %s for Asset Manager: %s',
'country_id_invalid': 'Country ID should be a ISO 3166-1 Alpha-3 code. Value: %s',
'currency_invalid': 'Invalid currency %s. Transaction ID: %s for asset manager: %s',
'email_invalid': 'Invalid emails attribute: %s. Party ID %s for Asset Manager %s',
'email_primary': 'Must set exactly one email as primary. Party ID %s for Asset Manager %s',
'email_address_invalid': 'Invalid email: %s.',
'amend_missing_previous': 'Cannot find party to amend: ID %s for Asset Manager %s',
'amend_missing_attribute': 'Partial amend failed for Asset Manager: %s on party: %s - '
'Attribute: %s does not exist',
'deactivate_missing_previous': 'Cannot Deactivate Party - Cannot Find ID: %s for Asset Manager: %s',
'party_status_invalid': 'Invalid party status %s. Party ID: %s for Asset Manager: %s',
'transaction_action_invalid': 'Invalid transaction action %s. Transaction ID: %s for Asset Manager: %s',
'transaction_status_invalid': 'Invalid transaction status %s. Transaction ID: %s for Asset Manager: %s',
'transaction_type_invalid': 'Invalid transaction type %s. Transaction ID: %s for Asset Manager: %s',
'transaction_link_not_found': 'Cannot remove link - not found'}
|
<filename>fuzzers/oatpp/parser/json/mapping/ObjectMapper.cpp
#include "oatpp/parser/json/mapping/ObjectMapper.hpp"
#include "oatpp/core/macro/codegen.hpp"
typedef oatpp::parser::Caret ParsingCaret;
typedef oatpp::parser::json::mapping::Serializer Serializer;
typedef oatpp::parser::json::mapping::Deserializer Deserializer;
#include OATPP_CODEGEN_BEGIN(DTO)
class EmptyDto : public oatpp::DTO {
DTO_INIT(EmptyDto, DTO)
};
class Test1 : public oatpp::DTO {
DTO_INIT(Test1, DTO)
DTO_FIELD(String, strF);
};
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
oatpp::String input(reinterpret_cast<const char*>(data), size);
oatpp::parser::json::mapping::ObjectMapper mapper;
try {
mapper.readFromString<oatpp::Object<Test1>>(input);
} catch(...) {}
return 0;
}
|
#!/bin/bash
# Copyright by Shlomi Fish, 2018 under the Expat licence
# https://opensource.org/licenses/mit-license.php
seq 2 50000000 | perl -lane 'print +(($_*$_)<<1)-1' | xargs factor |
perl -Mbytes -lane 'unless (/: [0-9]+ [0-9]/) { $c++; } print "$.: $c"' |
tee euler_216.dump
|
<reponame>MikeJfromVA/Database-Demo
package mybatis;
public class Customer {
public int code;
public String lastName;
public String firstName;
public char initial;
public int areaCode;
public int phone; // Should combine these two
public int balance; //numeric (9,2)
}
|
#!/bin/bash
if [ -z "$1" ]
then
echo "please provide a bucket to upload artifacts required for the cloudformation template"
exit 1
fi
aws cloudformation package --template-file cf.yml --s3-bucket $1 --output-template .packaged-template.yml $2 $3
aws cloudformation deploy --template-file .packaged-template.yml --capabilities CAPABILITY_IAM --stack-name FLOWS-util $2 $3 $4 $5
|
def monthlyInterest(balance, interestRate):
return balance * interestRate / 12
|
<gh_stars>0
import React, { Component } from 'react';
import { BrowserRouter, Route } from 'react-router-dom';
//import ReactDOM from 'react-dom';
import Home from '../containers/Home';
import Car from '../containers/Car';
import Checkout from '../containers/Checkout';
import Register from '../containers/Register';
import Login from '../containers/Login';
import Logout from '../containers/Logout';
import Products from '../containers/admin/Products';
import NewProduct from '../containers/admin/NewProduct';
import EditProduct from '../containers/admin/EditProduct';
import Categories from '../containers/admin/Categories';
import NewCategory from '../containers/admin/NewCategory';
import EditCategory from '../containers/admin/EditCategory';
import Users from '../containers/admin/Users';
import NewUser from '../containers/admin/NewUser';
import EditUser from '../containers/admin/EditUser';
import Orders from '../containers/admin/Orders';
import ShowOrder from '../containers/admin/ShowOrder';
//import App from './App';
//let store = createStore(helloReducer) // this is store
class App extends Component {
render() {
return (
<div>
<BrowserRouter>
<div>
<Route exact path="/" component={Home} />
<Route exact path="/car" component={Car} />
<Route exact path="/checkout" component={Checkout} />
<Route exact path="/register" component={Register} />
<Route exact path="/login" component={Login} />
<Route exact path="/logout" component={Logout} />
<Route exact path="/products" component={Products} />
<Route exact path="/products/new" component={NewProduct} />
<Route exact path="/products/edit/:id" component={EditProduct} />
<Route exact path="/categories" component={Categories} />
<Route exact path="/categories/new" component={NewCategory} />
<Route exact path="/categories/edit/:id" component={EditCategory} />
<Route exact path="/users" component={Users} />
<Route exact path="/users/new" component={NewUser} />
<Route exact path="/users/edit/:id" component={EditUser} />
<Route exact path="/orders" component={Orders} />
<Route exact path="/orders/:id" component={ShowOrder} />
</div>
</BrowserRouter>
</div>
);
}
}
//registerServiceWorker();
export default App;
|
public class Factorial{
public static int factorial(int n)
{
if(n == 0)
return 1;
else
return n * factorial(n-1);
}
}
|
<reponame>douyy/cakeManage<filename>app/view3/view3.js
'use strict';
angular.module('myApp.view3', ['ngRoute'])
.config(['$routeProvider', function($routeProvider) {
$routeProvider.when('/view3', {
templateUrl: 'view3/view3.html',
controller: 'View3Ctrl'
});
}])
.controller('View3Ctrl',function($http,$scope){
$http.get('http://localhost:3000/birthday/pl').then(function(res){
// console.log(res.data.data);
$scope.talks = res.data.data;
});
//显示所有
$scope.alls = true;
$scope.all = function(){
$scope.sphone = '';
$scope.phone = '';
$scope.alls = true;
};
//搜索
$scope.seach = function(val){
$scope.phone = val;
$scope.sphone = '';
}
//回复框弹出
$scope.reply = function(){
$scope.rep = true;
};
//查看详情
$scope.look = function(detail){
$scope.detail = detail;
$scope.rep = false;
};
//发送评论
$scope.send = function(phone,txt){
console.log(phone,txt);
if(txt == undefined){
alert('回复内容不能为空');
return;
}
$http.put('http://localhost:3000/birthday/pl',{phone:phone,reply:txt}).then(function(res){
if(res.data.success){
alert('回复成功');
}else{
alert('回复失败,请重新回复');
}
$scope.txt = '';
$scope.rep = false;
});
};
});
|
#!/bin/bash
set -e
echo "" > coverage.txt
for d in $(go list ./... | grep -v vendor); do
race=""
if [ $GOARCH == "amd64" ]; then
race="-race"
fi
go test $race -coverprofile=profile.out -covermode=atomic $d
if [ -f profile.out ]; then
cat profile.out >> coverage.txt
rm profile.out
fi
done
exit 0
|
from .authorization import Authorization
from .configuration import Configuration
from .endpoint import Endpoint
|
'use strict';
const ChildProcess = require('child_process');
const Fs = require('fs');
const Os = require('os');
const Path = require('path');
const Boom = require('@hapi/boom');
const Code = require('@hapi/code');
const File = require('@hapi/file');
const Hapi = require('@hapi/hapi');
const Hoek = require('@hapi/hoek');
const Inert = require('..');
const Lab = require('@hapi/lab');
const InertFs = require('../lib/fs');
const internals = {};
const lab = exports.lab = Lab.script();
const { describe, it } = lab;
const expect = Code.expect;
describe('file', () => {
describe('handler()', () => {
const provisionServer = async (options, etagsCacheMaxSize) => {
const defaults = { compression: { minBytes: 1 }, plugins: { inert: { etagsCacheMaxSize } } };
const server = new Hapi.Server(Hoek.applyToDefaults(defaults, options || {}));
await server.register(Inert);
return server;
};
it('returns a file in the response with the correct headers', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
const handler = (request, h) => {
return h.file('package.json', { confine: '../' }).code(499);
};
server.route({ method: 'GET', path: '/file', handler });
const res = await server.inject('/file');
expect(res.statusCode).to.equal(499);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
expect(res.headers['content-disposition']).to.not.exist();
});
it('returns a file using route relativeTo', async () => {
const server = await provisionServer();
const handler = (request, h) => {
return h.file('../package.json', { confine: false });
};
server.route({ method: 'GET', path: '/file', handler, config: { files: { relativeTo: __dirname } } });
const res = await server.inject('/file');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
});
it('returns a file in the response with the correct headers using cwd relative paths without content-disposition header', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/', handler: { file: './package.json' } });
const res = await server.inject('/');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
expect(res.headers['content-disposition']).to.not.exist();
});
it('returns a file in the response with the inline content-disposition header when using route config', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: './' } } });
server.route({ method: 'GET', path: '/', handler: { file: { path: './package.json', mode: 'inline' } } });
const res = await server.inject('/');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
expect(res.headers['content-disposition']).to.equal('inline; filename=package.json');
});
it('returns a file in the response with the inline content-disposition header when using route config and overriding filename', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: './' } } });
server.route({ method: 'GET', path: '/', handler: { file: { path: './package.json', mode: 'inline', filename: 'attachment.json' } } });
const res = await server.inject('/');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
expect(res.headers['content-disposition']).to.equal('inline; filename=attachment.json');
});
it('returns a file in the response with the attachment content-disposition header when using route config', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/', handler: { file: { path: './package.json', mode: 'attachment' } } });
const res = await server.inject('/');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
expect(res.headers['content-disposition']).to.equal('attachment; filename=package.json');
});
it('returns a file in the response with the attachment content-disposition header when using route config and overriding filename', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/', handler: { file: { path: './package.json', mode: 'attachment', filename: 'attachment.json' } } });
const res = await server.inject('/');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
expect(res.headers['content-disposition']).to.equal('attachment; filename=attachment.json');
});
it('returns a file in the response without the content-disposition header when using route config mode false', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/', handler: { file: { path: './package.json', mode: false } } });
const res = await server.inject('/');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
expect(res.headers['content-disposition']).to.not.exist();
});
it('returns a file with correct headers when using attachment mode', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
const handler = (request, h) => {
return h.file(Path.join(__dirname, '..', 'package.json'), { confine: '..', mode: 'attachment' });
};
server.route({ method: 'GET', path: '/file', handler });
const res = await server.inject('/file');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
expect(res.headers['content-disposition']).to.equal('attachment; filename=package.json');
});
it('returns a file with correct headers when using attachment mode and overriding the filename', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
const handler = (request, h) => {
return h.file(Path.join(__dirname, '..', 'package.json'), { confine: '..', mode: 'attachment', filename: 'attachment.json' });
};
server.route({ method: 'GET', path: '/file', handler });
const res = await server.inject('/file');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
expect(res.headers['content-disposition']).to.equal('attachment; filename=attachment.json');
});
it('returns a file with correct headers when using inline mode', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
const handler = (request, h) => {
return h.file(Path.join(__dirname, '..', 'package.json'), { confine: '..', mode: 'inline' });
};
server.route({ method: 'GET', path: '/file', handler });
const res = await server.inject('/file');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
expect(res.headers['content-disposition']).to.equal('inline; filename=package.json');
});
it('returns a file with correct headers when using inline mode and overriding filename', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
const handler = (request, h) => {
return h.file(Path.join(__dirname, '..', 'package.json'), { confine: '..', mode: 'inline', filename: 'attachment.json' });
};
server.route({ method: 'GET', path: '/file', handler });
const res = await server.inject('/file');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
expect(res.headers['content-disposition']).to.equal('inline; filename=attachment.json');
});
it('returns a partial file with the start option', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
const handler = (request, h) => {
return h.file(Path.join('file', 'note.txt'), { start: 2 });
};
server.route({ method: 'GET', path: '/file', handler });
const res = await server.inject('/file');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.equal('st');
expect(res.headers['content-type']).to.equal('text/plain; charset=utf-8');
expect(res.headers['content-length']).to.equal(2);
});
it('returns a partial file with the start and end option', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
const handler = (request, h) => {
return h.file(Path.join('file', 'note.txt'), { start: 1, end: 2 });
};
server.route({ method: 'GET', path: '/file', handler });
const res = await server.inject('/file');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.equal('es');
expect(res.headers['content-type']).to.equal('text/plain; charset=utf-8');
expect(res.headers['content-length']).to.equal(2);
});
it('returns a 404 when the file is not found', async () => {
const basePath = Path.join(process.platform === 'win32' ? 'C://' : '/', 'no/such/path/x1');
const server = await provisionServer({ routes: { files: { relativeTo: basePath } } });
server.route({ method: 'GET', path: '/filenotfound', handler: { file: 'nopes' } });
const res = await server.inject('/filenotfound');
expect(res.statusCode).to.equal(404);
expect(res.request.response._error.data.path).to.equal(Path.join(basePath, 'nopes'));
});
it('returns a 403 when the file is a directory', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/filefolder', handler: { file: 'lib' } });
const res = await server.inject('/filefolder');
expect(res.statusCode).to.equal(403);
expect(res.request.response._error.data.path).to.equal(Path.join(__dirname, '..', 'lib'));
});
it('returns a file using the built-in handler config', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: Path.join(__dirname, '..') } } });
server.route({ method: 'GET', path: '/staticfile', handler: { file: Path.join(__dirname, '..', 'package.json') } });
const res = await server.inject('/staticfile');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
});
it('returns a file using the file function with the built-in handler config', async () => {
const filenameFn = (request) => {
return './lib/' + request.params.file;
};
const server = await provisionServer({ routes: { files: { relativeTo: Path.join(__dirname, '..') } } });
server.route({ method: 'GET', path: '/filefn/{file}', handler: { file: filenameFn } });
const res = await server.inject('/filefn/index.js');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('Set correct confine value');
expect(res.headers['content-type']).to.equal('application/javascript; charset=utf-8');
expect(res.headers['content-length']).to.exist();
});
it('returns a file in the response with the correct headers (relative path)', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: '.' } } });
const relativeHandler = (request, h) => {
return h.file('./package.json', { confine: true });
};
server.route({ method: 'GET', path: '/relativefile', handler: relativeHandler });
const res = await server.inject('/relativefile');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
});
it('returns a file using the built-in handler config (relative path)', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: Path.join(__dirname, '..') } } });
server.route({ method: 'GET', path: '/relativestaticfile', handler: { file: './package.json' } });
const res = await server.inject('/relativestaticfile');
expect(res.statusCode).to.equal(200);
expect(res.payload).to.contain('hapi');
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-length']).to.exist();
});
it('returns a file with default mime type', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/', handler: { file: Path.join(__dirname, 'file', 'FILE') } });
const res = await server.inject('/');
expect(res.statusCode).to.equal(200);
expect(res.headers['content-type']).to.equal('application/octet-stream');
});
it('returns a file in the response with the correct headers using custom mime type', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
const handler = (request, h) => {
return h.file('../LICENSE.md', { confine: false }).type('application/example');
};
server.route({ method: 'GET', path: '/file', handler });
const res = await server.inject('/file');
expect(res.statusCode).to.equal(200);
expect(res.headers['content-type']).to.equal('application/example');
});
it('handles multiple simultaneous requests', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: Path.join(__dirname, '..', 'package.json') } });
const first = server.inject('/file');
const second = server.inject('/file');
const res1 = await first;
expect(res1.statusCode).to.equal(200);
expect(res1.headers).to.include('etag');
expect(res1.headers).to.include('last-modified');
const res2 = await second;
expect(res2.statusCode).to.equal(200);
expect(res2.headers).to.include('etag');
expect(res2.headers).to.include('last-modified');
});
it('does not cache etags', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } }, 0);
server.route({ method: 'GET', path: '/note', handler: { file: './file/note.txt' } });
const res1 = await server.inject('/note');
expect(res1.statusCode).to.equal(200);
expect(res1.result).to.equal('Test');
expect(res1.headers.etag).to.not.exist();
const res2 = await server.inject('/note');
expect(res2.statusCode).to.equal(200);
expect(res2.result).to.equal('Test');
expect(res2.headers.etag).to.not.exist();
});
it('does not return etag when etagMethod is false', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } }, 0);
server.route({ method: 'GET', path: '/note', handler: { file: { path: './file/note.txt', etagMethod: false } } });
const res1 = await server.inject('/note');
expect(res1.statusCode).to.equal(200);
expect(res1.result).to.equal('Test');
expect(res1.headers.etag).to.not.exist();
const res2 = await server.inject('/note');
expect(res2.statusCode).to.equal(200);
expect(res2.result).to.equal('Test');
expect(res2.headers.etag).to.not.exist();
});
it('invalidates etags when file changes (simple)', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
server.route({ method: 'GET', path: '/note', handler: { file: { path: './file/note.txt', etagMethod: 'simple' } } });
// No etag, never requested
const res1 = await server.inject('/note');
expect(res1.statusCode).to.equal(200);
expect(res1.result).to.equal('Test');
expect(res1.headers.etag).to.exist();
const etag1 = res1.headers.etag;
expect(etag1.slice(0, 1)).to.equal('"');
expect(etag1.slice(-1)).to.equal('"');
// etag
const res2 = await server.inject({ url: '/note', headers: { 'if-none-match': etag1 } });
expect(res2.statusCode).to.equal(304);
expect(res2.headers).to.not.include('content-length');
expect(res2.headers).to.include('etag');
expect(res2.headers).to.include('last-modified');
const fd1 = Fs.openSync(Path.join(__dirname, 'file', 'note.txt'), 'w');
Fs.writeSync(fd1, Buffer.from('Test'), 0, 4);
Fs.closeSync(fd1);
// etag after file modified, content unchanged
const res3 = await server.inject({ url: '/note', headers: { 'if-none-match': etag1 } });
expect(res3.statusCode).to.equal(200);
expect(res3.result).to.equal('Test');
expect(res3.headers.etag).to.exist();
const etag2 = res3.headers.etag;
expect(etag1).to.not.equal(etag2);
const fd2 = Fs.openSync(Path.join(__dirname, 'file', 'note.txt'), 'w');
Fs.writeSync(fd2, Buffer.from('Test1'), 0, 5);
Fs.closeSync(fd2);
// etag after file modified, content changed
const res4 = await server.inject({ url: '/note', headers: { 'if-none-match': etag2 } });
expect(res4.statusCode).to.equal(200);
expect(res4.result).to.equal('Test1');
expect(res4.headers.etag).to.exist();
const etag3 = res4.headers.etag;
expect(etag1).to.not.equal(etag3);
expect(etag2).to.not.equal(etag3);
const fd3 = Fs.openSync(Path.join(__dirname, 'file', 'note.txt'), 'w');
Fs.writeSync(fd3, Buffer.from('Test'), 0, 4);
Fs.closeSync(fd3);
});
it('invalidates etags when file changes (hash)', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
server.route({ method: 'GET', path: '/note', handler: { file: './file/note.txt' } });
// etag, never requested
const res1 = await server.inject('/note');
expect(res1.statusCode).to.equal(200);
expect(res1.result).to.equal('Test');
expect(res1.headers.etag).to.exist();
const etag1 = res1.headers.etag;
expect(etag1.slice(0, 1)).to.equal('"');
expect(etag1.slice(-1)).to.equal('"');
// etag
const res2 = await server.inject({ url: '/note', headers: { 'if-none-match': etag1 } });
expect(res2.statusCode).to.equal(304);
expect(res2.headers).to.not.include('content-length');
expect(res2.headers).to.include('etag');
expect(res2.headers).to.include('last-modified');
Fs.unlinkSync(Path.join(__dirname, 'file', 'note.txt'));
const fd1 = Fs.openSync(Path.join(__dirname, 'file', 'note.txt'), 'w');
Fs.writeSync(fd1, Buffer.from('Test'), 0, 4);
Fs.closeSync(fd1);
// etag after file modified, content unchanged
const res3 = await server.inject('/note');
expect(res3.statusCode).to.equal(200);
expect(res3.result).to.equal('Test');
expect(res3.headers.etag).to.exist();
const etag2 = res3.headers.etag;
expect(etag1).to.equal(etag2);
const fd2 = Fs.openSync(Path.join(__dirname, 'file', 'note.txt'), 'w');
Fs.writeSync(fd2, Buffer.from('Test1'), 0, 5);
Fs.closeSync(fd2);
// etag after file modified, content changed
const res4 = await server.inject({ url: '/note', headers: { 'if-none-match': etag2 } });
expect(res4.statusCode).to.equal(200);
expect(res4.result).to.equal('Test1');
expect(res4.headers.etag).to.exist();
const etag3 = res4.headers.etag;
expect(etag1).to.not.equal(etag3);
const fd3 = Fs.openSync(Path.join(__dirname, 'file', 'note.txt'), 'w');
Fs.writeSync(fd3, Buffer.from('Test'), 0, 4);
Fs.closeSync(fd3);
// etag, content restored
const res5 = await server.inject('/note');
expect(res5.statusCode).to.equal(200);
expect(res5.result).to.equal('Test');
expect(res5.headers.etag).to.exist();
const etag4 = res5.headers.etag;
expect(etag1).to.equal(etag4);
});
it('returns a 304 when the request has if-modified-since and the response has not been modified since (larger)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: Path.join(__dirname, '..', 'package.json') } });
const res1 = await server.inject('/file');
const last = new Date(Date.parse(res1.headers['last-modified']) + 1000);
const res2 = await server.inject({ url: '/file', headers: { 'if-modified-since': last.toUTCString() } });
expect(res2.statusCode).to.equal(304);
expect(res2.headers).to.not.include('content-length');
expect(res2.headers).to.include('etag');
expect(res2.headers).to.include('last-modified');
});
it('returns a 304 when the request has if-modified-since and the response has not been modified since (equal)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: Path.join(__dirname, '..', 'package.json') } });
const res1 = await server.inject('/file');
const res2 = await server.inject({ url: '/file', headers: { 'if-modified-since': res1.headers['last-modified'] } });
expect(res2.statusCode).to.equal(304);
expect(res2.headers).to.not.include('content-length');
expect(res2.headers).to.include('etag');
expect(res2.headers).to.include('last-modified');
});
it('computes etag header for 304 response', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: Path.join(__dirname, '..', 'package.json') } });
const future = new Date(Date.now() + 1000);
const res = await server.inject({ url: '/file', headers: { 'if-modified-since': future } });
expect(res.statusCode).to.equal(304);
expect(res.headers).to.include('etag');
expect(res.headers).to.include('last-modified');
});
it('computes etag header for head response', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: Path.join(__dirname, '..', 'package.json') } });
const res = await server.inject({ method: 'HEAD', url: '/file' });
expect(res.statusCode).to.equal(200);
expect(res.headers).to.include('etag');
expect(res.headers).to.include('last-modified');
});
it('changes etag when content encoding is used', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: Path.join(__dirname, '..', 'package.json') } });
const res1 = await server.inject('/file');
expect(res1.statusCode).to.equal(200);
expect(res1.headers).to.include('etag');
expect(res1.headers).to.include('last-modified');
const res2 = await server.inject({ url: '/file', headers: { 'accept-encoding': 'gzip' } });
expect(res2.statusCode).to.equal(200);
expect(res2.headers.vary).to.equal('accept-encoding');
expect(res2.headers.etag).to.not.equal(res1.headers.etag);
expect(res2.headers.etag).to.contain(res1.headers.etag.slice(0, -1) + '-');
expect(res2.headers['last-modified']).to.equal(res2.headers['last-modified']);
});
it('return a 500 on hashing errors', async () => {
const server = await provisionServer();
const filepath = Path.join(__dirname, '..', 'package.json');
server.route({ method: 'GET', path: '/file', handler: { file: filepath } });
// Prepare complicated mocking setup to fake an io error
const orig = InertFs.createReadStream;
InertFs.createReadStream = function (path, options) {
InertFs.createReadStream = orig;
process.nextTick(() => {
Fs.closeSync(options.fd);
});
return InertFs.createReadStream(path, options);
};
const res = await server.inject('/file');
expect(res.statusCode).to.equal(500);
expect(res.request.response._error).to.be.an.error(/^Failed to hash file/);
expect(res.request.response._error.data.path).to.equal(filepath);
});
it('handles multiple simultaneous request hashing errors', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: Path.join(__dirname, '..', 'package.json') } });
// Prepare complicated mocking setup to fake an io error
const orig = InertFs.createReadStream;
InertFs.createReadStream = function (path, options) {
InertFs.createReadStream = orig;
process.nextTick(() => {
Fs.closeSync(options.fd);
});
return InertFs.createReadStream(path, options);
};
const first = server.inject('/file');
const second = server.inject('/file');
await new Promise((resolve) => setImmediate(resolve));
const res1 = await first;
expect(res1.statusCode).to.equal(500);
expect(res1.request.response._error).to.be.an.error(/^Failed to hash file/);
const res2 = await second;
expect(res2.statusCode).to.equal(500);
expect(res2.request.response._error).to.be.an.error(/^Failed to hash file/);
});
it('returns valid http date responses in last-modified header', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: Path.join(__dirname, '..', 'package.json') } });
const res = await server.inject('/file');
expect(res.statusCode).to.equal(200);
expect(res.headers['last-modified']).to.equal(Fs.statSync(Path.join(__dirname, '..', 'package.json')).mtime.toUTCString());
});
it('returns 200 if if-modified-since is invalid', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: Path.join(__dirname, '..', 'package.json') } });
const res = await server.inject({ url: '/file', headers: { 'if-modified-since': 'some crap' } });
expect(res.statusCode).to.equal(200);
});
it('returns 200 if last-modified is invalid', async () => {
const server = await provisionServer();
server.route({
method: 'GET',
path: '/',
handler: (request, h) => {
return h.response('ok').header('last-modified', 'some crap');
}
});
const res = await server.inject({ url: '/', headers: { 'if-modified-since': 'Fri, 28 Mar 2014 22:52:39 GMT' } });
expect(res.statusCode).to.equal(200);
});
it('closes file handlers when not reading file stream', { skip: process.platform === 'win32' }, async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: Path.join(__dirname, '..', 'package.json') } });
const res1 = await server.inject('/file');
const res2 = await server.inject({ url: '/file', headers: { 'if-modified-since': res1.headers.date } });
expect(res2.statusCode).to.equal(304);
await new Promise((resolve) => {
const cmd = ChildProcess.spawn('lsof', ['-p', process.pid]);
let lsof = '';
cmd.stdout.on('data', (buffer) => {
lsof += buffer.toString();
});
cmd.stdout.on('end', () => {
let count = 0;
const lines = lsof.split('\n');
for (let i = 0; i < lines.length; ++i) {
count += !!lines[i].match(/package.json/);
}
expect(count).to.equal(0);
resolve();
});
cmd.stdin.end();
});
});
it('closes file handlers when not using a manually open file stream', { skip: process.platform === 'win32' }, async () => {
const server = await provisionServer();
server.route({
method: 'GET',
path: '/file',
handler: (request, h) => {
return h.response(Fs.createReadStream(Path.join(__dirname, '..', 'package.json'))).header('etag', 'abc');
}
});
const res1 = await server.inject('/file');
const res2 = await server.inject({ url: '/file', headers: { 'if-none-match': res1.headers.etag } });
expect(res2.statusCode).to.equal(304);
await new Promise((resolve) => {
const cmd = ChildProcess.spawn('lsof', ['-p', process.pid]);
let lsof = '';
cmd.stdout.on('data', (buffer) => {
lsof += buffer.toString();
});
cmd.stdout.on('end', () => {
let count = 0;
const lines = lsof.split('\n');
for (let i = 0; i < lines.length; ++i) {
count += !!lines[i].match(/package.json/);
}
expect(count).to.equal(0);
resolve();
});
cmd.stdin.end();
});
});
it('returns a gzipped file in the response when the request accepts gzip', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
const handler = (request, h) => {
return h.file(Path.join(__dirname, '..', 'package.json'), { confine: '..' });
};
server.route({ method: 'GET', path: '/file', handler });
const res = await server.inject({ url: '/file', headers: { 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-encoding']).to.equal('gzip');
expect(res.headers['content-length']).to.not.exist();
expect(res.payload).to.exist();
});
it('returns a plain file when not compressible', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
const handler = (request, h) => {
return h.file(Path.join(__dirname, 'file', 'image.png'));
};
server.route({ method: 'GET', path: '/file', handler });
const res = await server.inject({ url: '/file', headers: { 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-type']).to.equal('image/png');
expect(res.headers['content-encoding']).to.not.exist();
expect(res.headers['content-length']).to.equal(42010);
expect(res.payload).to.exist();
});
it('returns a deflated file in the response when the request accepts deflate', async () => {
const server = await provisionServer({ routes: { files: { relativeTo: __dirname } } });
const handler = (request, h) => {
return h.file(Path.join(__dirname, '..', 'package.json'), { confine: '..' });
};
server.route({ method: 'GET', path: '/file', handler });
const res = await server.inject({ url: '/file', headers: { 'accept-encoding': 'deflate' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-type']).to.equal('application/json; charset=utf-8');
expect(res.headers['content-encoding']).to.equal('deflate');
expect(res.headers['content-length']).to.not.exist();
expect(res.payload).to.exist();
});
it('returns a gzipped file using precompressed file', async () => {
const content = Fs.readFileSync('./test/file/image.png.gz');
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: './test/file/image.png', lookupCompressed: true } } });
const res = await server.inject({ url: '/file', headers: { 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-type']).to.equal('image/png');
expect(res.headers['content-encoding']).to.equal('gzip');
expect(res.headers['content-length']).to.equal(content.length);
expect(res.rawPayload.length).to.equal(content.length);
});
it('returns a gzipped file using precompressed file using lookupMap', async () => {
const content = Fs.readFileSync('./test/file/image.jpg#gz');
const lookupMap = { gzip: '#gz' };
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: './test/file/image.jpg', lookupCompressed: true, lookupMap } } });
const res = await server.inject({ url: '/file', headers: { 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-type']).to.equal('image/jpeg');
expect(res.headers['content-encoding']).to.equal('gzip');
expect(res.headers['content-length']).to.equal(content.length);
expect(res.rawPayload.length).to.equal(content.length);
});
it('returns a gzipped file when precompressed file not found', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: './test/file/note.txt', lookupCompressed: true } } });
const res = await server.inject({ url: '/file', headers: { 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-encoding']).to.equal('gzip');
expect(res.headers['content-length']).to.not.exist();
expect(res.payload).to.exist();
});
it('returns a 304 when using precompressed file and if-modified-since set', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: './test/file/image.png', lookupCompressed: true } } });
const res1 = await server.inject('/file');
const res2 = await server.inject({ url: '/file', headers: { 'if-modified-since': res1.headers.date, 'accept-encoding': 'gzip' } });
expect(res2.statusCode).to.equal(304);
});
it('ignores precompressed file when content-encoding not requested', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: './test/file/image.png', lookupCompressed: true } } });
const res = await server.inject('/file');
expect(res.statusCode).to.equal(200);
expect(res.headers['content-type']).to.equal('image/png');
expect(res.headers['content-encoding']).to.not.exist();
expect(res.payload).to.exist();
});
it('ignores precompressed file when connection compression is disabled', async () => {
const server = await provisionServer({ compression: false });
server.route({ method: 'GET', path: '/file', handler: { file: { path: './test/file/image.png', lookupCompressed: true } } });
const res = await server.inject({ url: '/file', headers: { 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-type']).to.equal('image/png');
expect(res.headers['content-encoding']).to.not.exist();
expect(res.payload).to.exist();
});
it('ignores precompressed file when using start option', async () => {
const server = await provisionServer();
server.route({
method: 'GET', path: '/file', handler: {
file: {
path: './test/file/image.png',
lookupCompressed: true,
start: 5
}
}
});
const res = await server.inject({ url: '/file', headers: { 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-type']).to.equal('image/png');
expect(res.headers['content-encoding']).to.not.exist();
expect(res.payload).to.exist();
});
it('ignores precompressed file when using start option', async () => {
const server = await provisionServer();
server.route({
method: 'GET', path: '/file', handler: {
file: {
path: './test/file/image.png',
lookupCompressed: true,
end: 199
}
}
});
const res = await server.inject({ url: '/file', headers: { 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-length']).to.equal(200);
expect(res.headers['content-type']).to.equal('image/png');
expect(res.headers['content-encoding']).to.not.exist();
expect(res.payload).to.exist();
});
it('does not throw an error when adding a route with a parameter and function path', async () => {
const server = await provisionServer();
const fn = () => {
server.route({ method: 'GET', path: '/fileparam/{path}', handler: { file: () => { } } });
server.route({ method: 'GET', path: '/filepathparam/{path}', handler: { file: { path: () => { } } } });
};
expect(fn).to.not.throw();
});
it('responds correctly when file is removed while processing', async () => {
const filename = File.uniqueFilename(Os.tmpdir()) + '.package.json';
Fs.writeFileSync(filename, 'data');
const server = await provisionServer();
server.route({ method: 'GET', path: '/', handler: { file: { path: filename, confine: false } } });
server.ext('onPreResponse', (request, h) => {
Fs.unlinkSync(filename);
return h.continue;
});
const res = await server.inject('/');
expect(res.statusCode).to.equal(200);
});
it('responds correctly when file is changed while processing', async () => {
const filename = File.uniqueFilename(Os.tmpdir()) + '.package.json';
Fs.writeFileSync(filename, 'data');
const server = await provisionServer();
server.route({ method: 'GET', path: '/', handler: { file: { path: filename, confine: false } } });
server.ext('onPreResponse', (request, h) => {
const tempfile = filename + '~';
if (process.platform === 'win32') {
// workaround to replace open file without a permission error
Fs.renameSync(filename, tempfile);
Fs.writeFileSync(filename, 'database');
Fs.unlinkSync(tempfile);
}
else {
// atomic file replace
Fs.writeFileSync(tempfile, 'database');
Fs.renameSync(tempfile, filename);
}
return h.continue;
});
const res = await server.inject('/');
Fs.unlinkSync(filename);
expect(res.statusCode).to.equal(200);
expect(res.headers['content-length']).to.equal(4);
expect(res.payload).to.equal('data');
});
it('does not marshal response on 304', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: Path.join(__dirname, '..', 'package.json') } });
const res1 = await server.inject('/file');
server.ext('onPreResponse', (request, h) => {
request.response._marshall = () => {
throw new Error('not called');
};
return h.continue;
});
const res = await server.inject({ url: '/file', headers: { 'if-modified-since': res1.headers.date } });
expect(res.statusCode).to.equal(304);
});
it('returns error when aborted while processing', async () => {
const filename = File.uniqueFilename(Os.tmpdir()) + '.package.json';
Fs.writeFileSync(filename, 'data');
const server = await provisionServer();
server.route({ method: 'GET', path: '/', handler: { file: { path: filename, confine: false } } });
server.ext('onPreResponse', (request, h) => {
throw Boom.internal('crapping out');
});
const res = await server.inject('/');
expect(res.statusCode).to.equal(500);
expect(res.request.response._error).to.be.an.error('crapping out');
});
it('returns error when stat fails unexpectedly', async () => {
const filename = File.uniqueFilename(Os.tmpdir()) + '.package.json';
Fs.writeFileSync(filename, 'data');
const orig = InertFs.fstat;
InertFs.fstat = function (fd) { // can return EIO error
InertFs.fstat = orig;
throw new Error('failed');
};
const server = await provisionServer();
server.route({ method: 'GET', path: '/', handler: { file: { path: filename, confine: false } } });
const res = await server.inject('/');
expect(res.statusCode).to.equal(500);
expect(res.request.response._error).to.be.an.error('Failed to stat file: failed');
expect(res.request.response._error.data.path).to.equal(filename);
});
it('returns error when open fails unexpectedly', async () => {
const filename = File.uniqueFilename(Os.tmpdir()) + '.package.json';
Fs.writeFileSync(filename, 'data');
const orig = InertFs.open;
InertFs.open = function () { // can return EMFILE error
InertFs.open = orig;
throw new Error('failed');
};
const server = await provisionServer();
server.route({ method: 'GET', path: '/', handler: { file: { path: filename, confine: false } } });
const res = await server.inject('/');
expect(res.statusCode).to.equal(500);
expect(res.request.response._error).to.be.an.error('Failed to open file: failed');
expect(res.request.response._error.data.path).to.equal(filename);
});
it('returns a 403 when missing file read permission', async () => {
const filename = File.uniqueFilename(Os.tmpdir()) + '.package.json';
Fs.writeFileSync(filename, 'data');
let retainedFd;
if (process.platform === 'win32') {
// make a permissionless file by unlinking an open file
retainedFd = Fs.openSync(filename, 'r');
Fs.unlinkSync(filename);
}
else {
Fs.chmodSync(filename, 0);
}
const server = await provisionServer();
server.route({ method: 'GET', path: '/', handler: { file: { path: filename, confine: false } } });
let didOpen = false;
const res1 = await server.inject('/');
const orig = InertFs.open;
InertFs.open = async function (path, mode) { // fake alternate permission error
InertFs.open = orig;
didOpen = true;
try {
return await InertFs.open(path, mode);
}
catch (err) {
if (err.code === 'EACCES') {
err.code = 'EPERM';
err.errno = -1;
}
else if (err.code === 'EPERM') {
err.code = 'EACCES';
err.errno = -13;
}
throw err;
}
};
const res2 = await server.inject('/');
// cleanup
if (typeof retainedFd === 'number') {
Fs.closeSync(retainedFd);
}
else {
Fs.unlinkSync(filename);
}
expect(res1.statusCode).to.equal(403);
expect(res2.statusCode).to.equal(403);
expect(didOpen).to.equal(true);
});
describe('response range', () => {
it('returns a subset of a file (start)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=0-4' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-length']).to.equal(5);
expect(res.headers['content-range']).to.equal('bytes 0-4/42010');
expect(res.headers['accept-ranges']).to.equal('bytes');
expect(res.rawPayload).to.equal(Buffer.from('\x89PNG\r', 'ascii'));
});
it('returns a subset of a file (middle)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=1-5' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-length']).to.equal(5);
expect(res.headers['content-range']).to.equal('bytes 1-5/42010');
expect(res.headers['accept-ranges']).to.equal('bytes');
expect(res.rawPayload).to.equal(Buffer.from('PNG\r\n', 'ascii'));
});
it('returns a subset of a file (-to)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=-5' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-length']).to.equal(5);
expect(res.headers['content-range']).to.equal('bytes 42005-42009/42010');
expect(res.headers['accept-ranges']).to.equal('bytes');
expect(res.rawPayload).to.equal(Buffer.from('D\xAEB\x60\x82', 'ascii'));
});
it('returns a subset of a file (from-)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=42005-' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-length']).to.equal(5);
expect(res.headers['content-range']).to.equal('bytes 42005-42009/42010');
expect(res.headers['accept-ranges']).to.equal('bytes');
expect(res.rawPayload).to.equal(Buffer.from('D\xAEB\x60\x82', 'ascii'));
});
it('returns a subset of a file (beyond end)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=42005-42011' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-length']).to.equal(5);
expect(res.headers['content-range']).to.equal('bytes 42005-42009/42010');
expect(res.headers['accept-ranges']).to.equal('bytes');
expect(res.rawPayload).to.equal(Buffer.from('D\xAEB\x60\x82', 'ascii'));
});
it('returns a subset of a file (if-range)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res1 = await server.inject('/file');
const res2 = await server.inject({ url: '/file', headers: { 'range': 'bytes=42005-42011', 'if-range': res1.headers.etag } });
expect(res2.statusCode).to.equal(206);
expect(res2.headers['content-length']).to.equal(5);
expect(res2.headers['content-range']).to.equal('bytes 42005-42009/42010');
expect(res2.headers['accept-ranges']).to.equal('bytes');
expect(res2.rawPayload).to.equal(Buffer.from('D\xAEB\x60\x82', 'ascii'));
});
it('returns 200 on incorrect if-range', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=42005-42011', 'if-range': 'abc' } });
expect(res.statusCode).to.equal(200);
});
it('returns 416 on invalid range (unit)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'horses=1-5' } });
expect(res.statusCode).to.equal(416);
expect(res.headers['content-range']).to.equal('bytes */42010');
});
it('returns 416 on invalid range (inversed)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=5-1' } });
expect(res.statusCode).to.equal(416);
expect(res.headers['content-range']).to.equal('bytes */42010');
});
it('returns 416 on invalid range (format)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes 1-5' } });
expect(res.statusCode).to.equal(416);
expect(res.headers['content-range']).to.equal('bytes */42010');
});
it('returns 416 on invalid range (empty range)', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=-' } });
expect(res.statusCode).to.equal(416);
expect(res.headers['content-range']).to.equal('bytes */42010');
});
it('returns 200 on multiple ranges', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=1-5,7-10' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-length']).to.equal(42010);
});
it('reads partial file content for a non-compressible file', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png'), etagMethod: false } } });
// Catch createReadStream options
let createOptions;
const orig = InertFs.createReadStream;
InertFs.createReadStream = function (path, options) {
InertFs.createReadStream = orig;
createOptions = options;
return InertFs.createReadStream(path, options);
};
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=1-4', 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-length']).to.equal(4);
expect(res.headers['content-range']).to.equal('bytes 1-4/42010');
expect(res.headers['accept-ranges']).to.equal('bytes');
expect(res.rawPayload).to.equal(Buffer.from('PNG\r', 'ascii'));
expect(createOptions).to.include({ start: 1, end: 4 });
});
it('returns 200 when content-length is missing', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png') } } });
server.ext('onPreResponse', (request, h) => {
delete request.response.headers['content-length'];
return h.continue;
});
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=1-5' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-length']).to.not.exist();
});
it('returns 200 for dynamically compressed responses', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/note.txt'), lookupCompressed: false } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=1-3', 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['content-encoding']).to.equal('gzip');
expect(res.headers['content-length']).to.not.exist();
expect(res.headers['content-range']).to.not.exist();
expect(res.headers['accept-ranges']).to.equal('bytes');
});
it('returns a subset of a file when compression is disabled', async () => {
const server = await provisionServer({ compression: false });
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/note.txt'), lookupCompressed: false } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=1-3', 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-encoding']).to.not.exist();
expect(res.headers['content-length']).to.equal(3);
expect(res.headers['content-range']).to.equal('bytes 1-3/4');
});
it('returns a subset of a file using precompressed file', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/image.png'), lookupCompressed: true } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=10-18', 'accept-encoding': 'gzip' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-encoding']).to.equal('gzip');
expect(res.headers['content-length']).to.equal(9);
expect(res.headers['content-range']).to.equal('bytes 10-18/41936');
expect(res.headers['accept-ranges']).to.equal('bytes');
expect(res.payload).to.equal('image.png');
});
it('returns a subset for dynamically compressed responses with "identity" encoding', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/note.txt'), lookupCompressed: false } } });
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=1-3', 'accept-encoding': 'identity' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-encoding']).to.not.exist();
expect(res.headers['content-length']).to.equal(3);
expect(res.headers['content-range']).to.equal('bytes 1-3/4');
});
it('returns a subset when content-type is missing', async () => {
const server = await provisionServer();
server.route({ method: 'GET', path: '/file', handler: { file: { path: Path.join(__dirname, 'file/note.txt') } } });
server.ext('onPreResponse', (request, h) => {
delete request.response.headers['content-type'];
return h.continue;
});
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=1-5' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-encoding']).to.not.exist();
expect(res.headers['content-length']).to.equal(3);
expect(res.headers['content-range']).to.equal('bytes 1-3/4');
expect(res.headers['content-type']).to.not.exist();
});
it('ignores range request when disabled in route config', async () => {
const server = await provisionServer();
server.route({
method: 'GET', path: '/file',
handler: { file: { path: Path.join(__dirname, 'file/image.png') } },
config: { response: { ranges: false } }
});
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=0-4' } });
expect(res.statusCode).to.equal(200);
expect(res.headers['accept-ranges']).to.not.exist();
});
it('returns a subset of a file with start option', async () => {
const server = await provisionServer();
server.route({
method: 'GET', path: '/file', handler: {
file: {
path: Path.join(__dirname, 'file/image.png'),
start: 1
}
}
});
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=2-3' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-length']).to.equal(2);
expect(res.headers['content-range']).to.equal('bytes 2-3/42009');
expect(res.headers['accept-ranges']).to.equal('bytes');
expect(res.rawPayload).to.equal(Buffer.from('G\r', 'ascii'));
});
it('returns a subset of a file with start and end option', async () => {
const server = await provisionServer();
server.route({
method: 'GET', path: '/file', handler: {
file: {
path: Path.join(__dirname, 'file/image.png'),
start: 2,
end: 400
}
}
});
const res = await server.inject({ url: '/file', headers: { 'range': 'bytes=0-2' } });
expect(res.statusCode).to.equal(206);
expect(res.headers['content-length']).to.equal(3);
expect(res.headers['content-range']).to.equal('bytes 0-2/399');
expect(res.headers['accept-ranges']).to.equal('bytes');
expect(res.rawPayload).to.equal(Buffer.from('NG\r', 'ascii'));
});
});
it('has not leaked file descriptors', { skip: process.platform === 'win32' }, async () => {
// validate that all descriptors has been closed
const cmd = ChildProcess.spawn('lsof', ['-p', process.pid]);
let lsof = '';
cmd.stdout.on('data', (buffer) => {
lsof += buffer.toString();
});
await new Promise((resolve) => {
cmd.stdout.on('end', () => {
let count = 0;
const lines = lsof.split('\n');
for (let i = 0; i < lines.length; ++i) {
count += !!lines[i].match(/package.json/);
}
expect(count).to.equal(0);
resolve();
});
cmd.stdin.end();
});
});
});
});
|
<filename>generator/generator.go
package generator
import (
"bytes"
"context"
"fmt"
"time"
"github.com/wzshiming/profile_stats"
"github.com/wzshiming/profile_stats/generator/activities"
"github.com/wzshiming/profile_stats/generator/charts"
"github.com/wzshiming/profile_stats/generator/now"
"github.com/wzshiming/profile_stats/generator/placeholder"
"github.com/wzshiming/profile_stats/generator/stats"
"github.com/wzshiming/profile_stats/source"
"github.com/wzshiming/xmlinjector"
)
const (
key = "PROFILE_STATS"
blankChar = "\n"
)
type Handler struct {
registry map[string]profile_stats.Generator
}
func NewHandler(src *source.Source) *Handler {
r := &Handler{
registry: map[string]profile_stats.Generator{},
}
r.register("now", now.NewNow())
r.register("updatedat", now.NewNow())
r.register("placeholder", placeholder.NewPlaceHolder())
r.register("activities", activities.NewActivities(src))
r.register("stats", stats.NewStats(src))
r.register("charts", charts.NewCharts(src))
return r
}
func (r *Handler) register(name string, generator profile_stats.Generator) {
r.registry[name] = generator
}
func (r *Handler) Handle(ctx context.Context, origin []byte) ([]byte, []string, error) {
buf := bytes.NewBuffer(nil)
var warnings []string
date, err := xmlinjector.Inject([]byte(key), origin, func(args, origin []byte) []byte {
tag := NewArgs(string(args), true)
template, ok := tag.String("template")
if !ok || template == "" {
warnings = append(warnings, fmt.Sprintf("%q: no template", args))
return errInfo("no template", origin)
}
blank, ok := tag.Int("blank")
if !ok {
blank = 2
}
generator, ok := r.registry[template]
if !ok {
warnings = append(warnings, fmt.Sprintf("%q: not support template %q", args, template))
return errInfo(fmt.Sprintf("not support template %q", template), origin)
}
buf.Reset()
err := generator.Generate(ctx, buf, tag)
if err != nil {
warnings = append(warnings, fmt.Sprintf("%q: %s", args, err.Error()))
return errInfo(err.Error(), origin)
}
raw := buf.Bytes()
raw = bytes.Trim(raw, blankChar)
var tmp []byte
if blank > 0 {
tmp = make([]byte, 0, len(raw)+blank*2)
blanks := bytes.Repeat([]byte(blankChar), blank)
tmp = append(tmp, blanks...)
tmp = append(tmp, raw...)
tmp = append(tmp, blanks...)
} else {
tmp = make([]byte, len(raw))
copy(tmp, raw)
}
return tmp
})
return date, warnings, err
}
func errInfo(msg string, origin []byte) []byte {
return append([]byte(fmt.Sprintf("\n<!-- profile_stats_error error:%q date:%q /-->\n", msg, time.Now().Format(time.RFC3339))), origin...)
}
|
TERMUX_PKG_HOMEPAGE=https://webkitgtk.org
TERMUX_PKG_DESCRIPTION="A full-featured port of the WebKit rendering engine"
TERMUX_PKG_LICENSE="LGPL-2.1"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=2.32.3
TERMUX_PKG_SRCURL=https://webkitgtk.org/releases/webkitgtk-${TERMUX_PKG_VERSION}.tar.xz
TERMUX_PKG_SHA256=c1f496f5ac654efe4cef62fbd4f2fbeeef265a07c5e7419e5d2900bfeea52cbc
TERMUX_PKG_DEPENDS="enchant, gst-plugins-base, gstreamer, gtk3, libcairo, libgcrypt, libhyphen, libicu, libnotify, libsoup, libtasn1, libwebp, libxslt, libxt, openjpeg, woff2"
TERMUX_PKG_BREAKS="webkit, webkitgtk"
TERMUX_PKG_REPLACES="webkit, webkitgtk"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
-DPORT=GTK
-DCMAKE_BUILD_TYPE=RelWithDebInfo
-DENABLE_GAMEPAD=OFF
-DUSE_SYSTEMD=OFF
-DUSE_LIBSECRET=OFF
-DENABLE_INTROSPECTION=OFF
-DUSE_WPE_RENDERER=OFF
"
|
package com.example.tour;
import java.util.zip.Inflater;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.view.View.OnClickListener;
import android.widget.ImageButton;
import android.widget.TextView;
public class SelectUser extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.select_user);
ImageButton ServiceProvider = (ImageButton) findViewById(R.id.imageButton1);
ImageButton Traveller = (ImageButton) findViewById(R.id.imageButton2);
final Intent intent = new Intent(SelectUser.this,UserMenu.class);
Traveller.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
startActivity(intent);
}
});
final Intent intent2 = new Intent(SelectUser.this,ServieProvider_menu.class);
ServiceProvider.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
startActivity(intent2);
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// TODO Auto-generated method stub
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu1, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// TODO Auto-generated method stub
if(item.getItemId() == R.id.exit){
AlertDialog.Builder builder = new AlertDialog.Builder(SelectUser.this);
builder.setMessage("Are you sure you want to exit ?");
builder.setCancelable(false);
builder.setPositiveButton("yes", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
Intent intent = new Intent(Intent.ACTION_MAIN);
intent.addCategory(Intent.CATEGORY_HOME);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
}
});
builder.setNegativeButton("No",new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
}
});
AlertDialog alet = builder.create();
alet.show();
}
return super.onOptionsItemSelected(item);
}
}
|
var application = require("application");
var common = require("utils/utils-common");
require("utils/module-merge").merge(common, exports);
var layout;
(function (layout) {
var density = -1;
var metrics;
var MODE_SHIFT = 30;
var MODE_MASK = 0x3 << MODE_SHIFT;
var sdkVersion = -1;
var useOldMeasureSpec = false;
function makeMeasureSpec(size, mode) {
if (sdkVersion === -1 && application.android && application.android.context) {
sdkVersion = application.android.context.getApplicationInfo().targetSdkVersion;
useOldMeasureSpec = sdkVersion <= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
}
if (useOldMeasureSpec) {
return size + mode;
}
return (size & ~MODE_MASK) | (mode & MODE_MASK);
}
layout.makeMeasureSpec = makeMeasureSpec;
function getDisplayDensity() {
if (density === -1) {
density = getDisplayMetrics().density;
}
return density;
}
layout.getDisplayDensity = getDisplayDensity;
function getDisplayMetrics() {
if (!metrics) {
metrics = application.android.context.getResources().getDisplayMetrics();
}
return metrics;
}
})(layout = exports.layout || (exports.layout = {}));
var ad;
(function (ad) {
var collections;
(function (collections) {
function stringArrayToStringSet(str) {
var hashSet = new java.util.HashSet();
if ("undefined" !== typeof str) {
for (var element in str) {
hashSet.add('' + str[element]);
}
}
return hashSet;
}
collections.stringArrayToStringSet = stringArrayToStringSet;
function stringSetToStringArray(stringSet) {
var arr = [];
if ("undefined" !== typeof stringSet) {
var it = stringSet.iterator();
while (it.hasNext()) {
var element = '' + it.next();
arr.push(element);
}
}
return arr;
}
collections.stringSetToStringArray = stringSetToStringArray;
})(collections = ad.collections || (ad.collections = {}));
var resources;
(function (resources_1) {
function getDrawableId(name) {
return getId(":drawable/" + name);
}
resources_1.getDrawableId = getDrawableId;
function getStringId(name) {
return getId(":string/" + name);
}
resources_1.getStringId = getStringId;
function getId(name) {
var context = application.android.context;
var resources = context.getResources();
var packageName = context.getPackageName();
var uri = packageName + name;
return resources.getIdentifier(uri, null, null);
}
resources_1.getId = getId;
})(resources = ad.resources || (ad.resources = {}));
})(ad = exports.ad || (exports.ad = {}));
function GC() {
gc();
}
exports.GC = GC;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.