text stringlengths 1 1.05M |
|---|
<gh_stars>1-10
require 'spec_helper'
module Etsource
describe AtlasLoader do
let(:yml) { Rails.root.join('spec/fixtures/atlas') }
let(:dir) { Pathname.new(Dir.mktmpdir) }
# Takes any YAML files in the +source+ directory, converts them into
# MessagePack format, then saves them into the +destination+ directory.
def yamls_to_msgpack!(source, destination)
Pathname.glob(source.join('*.yml')) do |path|
File.write(
destination.join("#{ path.basename('.yml') }.pack"),
MessagePack.pack(YAML.load_file(path)),
mode: 'wb'
)
end
end
describe AtlasLoader::PreCalculated do
let(:loader) { AtlasLoader::PreCalculated.new(dir) }
before { yamls_to_msgpack!(yml, dir) }
context 'loading a dataset' do
it 'loads the dataset from the production-mode file' do
expect(loader.load(:nl)).to be_an(Atlas::ProductionMode)
end
it 'raises an error when the production-mode file does not exist' do
dir.join('nl.pack').delete
expect { loader.load(:nl) }.to raise_error(/no atlas data/i)
end
it 'raises an error when no such region exists' do
expect { loader.load(:nope) }.to raise_error(/no atlas data/i)
end
end # loading a dataset
context 'expire_all!' do
it 'removes the dataset files' do
expect(dir.children.length).to eq(2)
loader.expire_all!
expect(dir.children.length).to be_zero
end
end
end # PreCalculated
describe AtlasLoader::Lazy do
let(:loader) { AtlasLoader::Lazy.new(dir) }
let(:subdir) { dir.join('lazy') }
before do
FileUtils.mkdir(subdir)
yamls_to_msgpack!(yml, subdir)
end
context 'loading a dataset' do
it 'loads the dataset from the production-mode file' do
expect(loader.load(:nl)).to be_an(Atlas::ProductionMode)
end
xit 'loads the dataset when the production-mode file does not exist' do
# Bad query value in a document: invalid value for convert(): ""
subdir.join('nl.pack').delete
expect(loader.load(:nl)).to be_an(Atlas::ProductionMode)
end
it 'raises an error when no such region exists' do
expect { loader.load(:nope) }.
to raise_error(Atlas::DocumentNotFoundError)
end
end # loading a dataset
end # AtlasLoader::PreCalculated
end # AtlasLoader
end # Etsource
|
const parseQueryString = (queryString) => {
let params = {};
queryString.substring(1)
.split('&')
.forEach(pair => {
const [key, value] = pair.split('=');
params[key] = value;
});
return params;
} |
#!/usr/bin/env bash
set -e
# version and keys are supplied as arguments
version="$1"
rc=`echo $version | awk -F - '{print $2}'`
if [[ -z $version ]]; then
echo "Usage: $0 VERSION"
exit 1
fi
# setup build-time vars
ldflags="-s -w -X 'github.com/uplo-tech/uplo/build.GitRevision=`git rev-parse --short HEAD`' -X 'github.com/uplo-tech/uplo/build.BuildTime=`git show -s --format=%ci HEAD`' -X 'github.com/uplo-tech/uplo/build.ReleaseTag=${rc}'"
function build {
os=$1
arch=$2
echo Building ${os}...
# create workspace
folder=release/Uplo-$version-$os-$arch
rm -rf $folder
mkdir -p $folder
# compile and hash binaries
for pkg in uploc uplod; do
bin=$pkg
if [ "$os" == "windows" ]; then
bin=${pkg}.exe
fi
GOOS=${os} GOARCH=${arch} go build -a -tags 'netgo' -trimpath -ldflags="$ldflags" -o $folder/$bin ./cmd/$pkg
(
cd release/
sha256sum Uplo-$version-$os-$arch/$bin >> Uplo-$version-SHA256SUMS.txt
)
done
cp -r doc LICENSE README.md $folder
}
# Build amd64 binaries.
for os in darwin linux windows; do
build "$os" "amd64"
done
# Build Raspberry Pi binaries.
build "linux" "arm64"
|
ARCH=$(uname -p)
sudo yum install -y yum-utils rpmdevtools
yumdownloader --source httpd.$ARCH
file=$(echo httpd-*.amzn2.src.rpm)
rpm -i $file
sudo yum-builddep -y $file
cp mod_proxy_http_subreq_connection_reuse.patch rpmbuild/SOURCES
cp proxypass_nomain_flag.patch rpmbuild/SOURCES
patch -p1 -d rpmbuild <httpd_spec.patch
rpmbuild -bb rpmbuild/SPECS/httpd.spec
# This part fails because there is a circular dependency between
# httpd and mod_http2
#
# rpm -i rpmbuild/RPMS/$ARCH/httpd-* rpmbuild/RPMS/$ARCH/mod_proxy_*
#
# So we just overwrite the existing modules with the patched ones
sudo yum -qy install httpd httpd-devel
find rpmbuild -name mod_proxy.so -exec sudo cp \{} /etc/httpd/modules
find rpmbuild -name mod_proxy_http.so -exec sudo cp \{} /etc/httpd/modules
|
package io.opensphere.core.pipeline.processor;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.util.Collection;
import java.util.List;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;
import io.opensphere.core.TimeManager;
import io.opensphere.core.geometry.ConstrainableGeometry;
import io.opensphere.core.geometry.Geometry;
import io.opensphere.core.geometry.constraint.MutableConstraints;
import io.opensphere.core.geometry.constraint.TimeConstraint;
import io.opensphere.core.model.time.TimeSpan;
import io.opensphere.core.model.time.TimeSpanList;
import io.opensphere.core.util.collections.New;
/**
* Tests the {@link MostRecentGeometryFilter} class.
*/
public class MostRecentGeometryFilterTest
{
/** Now. */
private static final long NOW = System.currentTimeMillis();
/** A second ago. */
private static final long A_SECOND_AGO = NOW - 1000;
/** A minute ago. */
private static final long A_MINUTE_AGO = NOW - 60000;
/** A minute from now. */
private static final long A_MINUTE_FROM_NOW = NOW + 60000;
/**
* Tests the {@link MostRecentGeometryFilter} with non constrained
* geometries, null constrained geometries, time constrained geometries, and
* most recent constrained geometries.
*/
@Test
public void test()
{
EasyMockSupport support = new EasyMockSupport();
Geometry nonConstrained = support.createMock(Geometry.class);
Geometry nullConstrained = support.createNiceMock(ConstrainableGeometry.class);
Geometry timeConstrained = createConstrainableGeometry(support, TimeConstraint.getTimeConstraint(TimeSpan.get(NOW)));
Geometry oldMostRecent = createConstrainableGeometry(support,
TimeConstraint.getMostRecentTimeConstraint("id", A_SECOND_AGO));
Geometry newMostRecent = createConstrainableGeometry(support, TimeConstraint.getMostRecentTimeConstraint("id", NOW));
Geometry futureMostRecent = createConstrainableGeometry(support,
TimeConstraint.getMostRecentTimeConstraint("id", A_MINUTE_FROM_NOW));
List<Geometry> geometries = New.list(nonConstrained, nullConstrained, timeConstrained, oldMostRecent, newMostRecent,
futureMostRecent);
TimeManager timeManager = createTimeManager(support);
support.replayAll();
MostRecentGeometryFilter filter = new MostRecentGeometryFilter();
Collection<Geometry> filtered = filter.filterMostRecent(geometries, timeManager, true);
assertEquals(geometries.size() - 2, filtered.size());
assertFalse(filtered.contains(oldMostRecent));
assertFalse(filtered.contains(futureMostRecent));
support.verifyAll();
}
/**
* Tests the {@link MostRecentGeometryFilter} with non constrained
* geometries, null constrained geometries, time constrained geometries, and
* most recent constrained geometries.
*/
@Test
public void testNoConstraints()
{
EasyMockSupport support = new EasyMockSupport();
Geometry nonConstrained = support.createMock(Geometry.class);
Geometry nullConstrained = support.createNiceMock(ConstrainableGeometry.class);
Geometry timeConstrained = createConstrainableGeometry(support, TimeConstraint.getTimeConstraint(TimeSpan.get(NOW)));
Geometry oldMostRecent = createConstrainableGeometry(support,
TimeConstraint.getMostRecentTimeConstraint("id", A_SECOND_AGO));
Geometry newMostRecent = createConstrainableGeometry(support, TimeConstraint.getMostRecentTimeConstraint("id", NOW));
Geometry futureMostRecent = createConstrainableGeometry(support,
TimeConstraint.getMostRecentTimeConstraint("id", A_MINUTE_FROM_NOW));
List<Geometry> geometries = New.list(nonConstrained, nullConstrained, timeConstrained, oldMostRecent, newMostRecent,
futureMostRecent);
TimeManager timeManager = createTimeManager(support);
support.replayAll();
MostRecentGeometryFilter filter = new MostRecentGeometryFilter();
Collection<Geometry> filtered = filter.filterMostRecent(geometries, timeManager, false);
assertEquals(geometries.size() - 2, filtered.size());
assertFalse(filtered.contains(oldMostRecent));
assertFalse(filtered.contains(newMostRecent));
support.verifyAll();
}
/**
* Creates an easy mocked {@link ConstrainableGeometry}.
*
* @param support Used to create the mock.
* @param constraint The time constraint for the geometry.
* @return The {@link ConstrainableGeometry}.
*/
private ConstrainableGeometry createConstrainableGeometry(EasyMockSupport support, TimeConstraint constraint)
{
ConstrainableGeometry geometry = support.createMock(ConstrainableGeometry.class);
EasyMock.expect(geometry.getConstraints()).andReturn(new MutableConstraints(constraint)).anyTimes();
return geometry;
}
/**
* Creates an easy mocked time manager.
*
* @param support Used to create the mock.
* @return The time manager.
*/
private TimeManager createTimeManager(EasyMockSupport support)
{
TimeManager timeManager = support.createMock(TimeManager.class);
EasyMock.expect(timeManager.getPrimaryActiveTimeSpans())
.andReturn(TimeSpanList.singleton(TimeSpan.get(A_MINUTE_AGO, NOW + 1))).anyTimes();
return timeManager;
}
}
|
export const MetricType = {
None: "none",
String: "string",
Binary: "binary",
Gauge: "gauge",
GaugeFloat: "gauge_float",
Counter: "counter",
GEO: "geo",
}
export const MetricTypeOptions = [
{ value: MetricType.None, label: "opts.metric_type.label_none" },
{ value: MetricType.String, label: "opts.metric_type.label_string" },
{ value: MetricType.Binary, label: "opts.metric_type.label_binary" },
{ value: MetricType.Gauge, label: "opts.metric_type.label_gauge" },
{ value: MetricType.GaugeFloat, label: "opts.metric_type.label_gauge_float" },
{ value: MetricType.Counter, label: "opts.metric_type.label_counter" },
{ value: MetricType.GEO, label: "opts.metric_type.label_geo" },
]
export const Duration = {
Last30Minutes: "-30m",
LastHour: "-1h",
Last2Hours: "-2h",
Last3Hours: "-3h",
Last6Hours: "-6h",
Last12Hours: "-12h",
Last24Hours: "-24h",
Last2Days: "-48h",
Last7Days: "-168h",
Last30Days: "-720h",
Last60Days: "-1440h",
Last90Days: "-2160h",
Last180Days: "-4320h",
Last365Days: "-8760h",
}
export const DurationOptions = [
{ value: Duration.Last30Minutes, tsFormat: "HH:mm", label: "opts.metric_duration.label_minute_30" },
{ value: Duration.LastHour, tsFormat: "HH:mm", label: "opts.metric_duration.label_hour_01" },
{ value: Duration.Last2Hours, tsFormat: "HH:mm", label: "opts.metric_duration.label_hour_02" },
{ value: Duration.Last3Hours, tsFormat: "HH:mm", label: "opts.metric_duration.label_hour_03" },
{ value: Duration.Last6Hours, tsFormat: "HH:mm", label: "opts.metric_duration.label_hour_06" },
{ value: Duration.Last12Hours, tsFormat: "HH:mm", label: "opts.metric_duration.label_hour_12" },
{ value: Duration.Last24Hours, tsFormat: "HH:mm", label: "opts.metric_duration.label_hour_24" },
{ value: Duration.Last2Days, tsFormat: "Do HH:mm", label: "opts.metric_duration.label_day_002" },
{ value: Duration.Last7Days, tsFormat: "Do HH:mm", label: "opts.metric_duration.label_day_007" },
{ value: Duration.Last30Days, tsFormat: "MMM Do HH:mm", label: "opts.metric_duration.label_day_030" },
{ value: Duration.Last60Days, tsFormat: "MMM Do HH:mm", label: "opts.metric_duration.label_day_060" },
{ value: Duration.Last90Days, tsFormat: "MMM Do HH:mm", label: "opts.metric_duration.label_day_090" },
{ value: Duration.Last180Days, tsFormat: "MMM Do HH:mm", label: "opts.metric_duration.label_day_180" },
{ value: Duration.Last365Days, tsFormat: "YYYY MMM Do HH:mm", label: "opts.metric_duration.label_day_365" },
]
export const AggregationInterval = {
Minute_1: "1m",
Minute_2: "2m",
Minute_3: "3m",
Minute_5: "5m",
Minute_10: "10m",
Minute_15: "15m",
Minute_30: "30m",
Hour_1: "1h",
Hour_3: "3h",
Hour_6: "6h",
Day_1: "24h",
}
export const AggregationIntervalOptions = [
{ value: AggregationInterval.Minute_1, label: "opts.interval.label_minute_01" },
{ value: AggregationInterval.Minute_2, label: "opts.interval.label_minute_02" },
{ value: AggregationInterval.Minute_3, label: "opts.interval.label_minute_03" },
{ value: AggregationInterval.Minute_5, label: "opts.interval.label_minute_05" },
{ value: AggregationInterval.Minute_10, label: "opts.interval.label_minute_10" },
{ value: AggregationInterval.Minute_15, label: "opts.interval.label_minute_15" },
{ value: AggregationInterval.Minute_30, label: "opts.interval.label_minute_30" },
{ value: AggregationInterval.Hour_1, label: "opts.interval.label_hour_01" },
{ value: AggregationInterval.Hour_3, label: "opts.interval.label_hour_03" },
{ value: AggregationInterval.Hour_6, label: "opts.interval.label_hour_06" },
{ value: AggregationInterval.Day_1, label: "opts.interval.label_day_01" },
]
export const getRecommendedInterval = (durationValue) => {
switch (durationValue) {
case Duration.Last30Minutes:
case Duration.LastHour:
case Duration.Last2Hours:
return AggregationInterval.Minute_1
case Duration.Last3Hours:
return AggregationInterval.Minute_2
case Duration.Last6Hours:
return AggregationInterval.Minute_3
case Duration.Last12Hours:
return AggregationInterval.Minute_5
case Duration.Last24Hours:
return AggregationInterval.Minute_10
case Duration.Last2Days:
return AggregationInterval.Minute_15
case Duration.Last7Days:
return AggregationInterval.Hour_1
case Duration.Last30Days:
return AggregationInterval.Hour_3
case Duration.Last60Days:
case Duration.Last90Days:
return AggregationInterval.Hour_6
case Duration.Last180Days:
case Duration.Last365Days:
return AggregationInterval.Day_1
default:
return AggregationInterval.Minute_15
}
}
export const MetricFunctionType = {
Mean: "mean",
Max: "max",
Min: "min",
Median: "median",
Sum: "sum",
Count: "count",
Percentile50: "percentile_50",
Percentile75: "percentile_75",
Percentile95: "percentile_95",
Percentile99: "percentile_99",
}
export const MetricFunctionTypeOptions = [
{ value: MetricFunctionType.Mean, label: "opts.metric_function.label_mean" },
{ value: MetricFunctionType.Min, label: "opts.metric_function.label_minimum" },
{ value: MetricFunctionType.Max, label: "opts.metric_function.label_maximum" },
{ value: MetricFunctionType.Median, label: "opts.metric_function.label_median" },
{ value: MetricFunctionType.Percentile50, label: "opts.metric_function.label_percentile_50" },
{ value: MetricFunctionType.Percentile75, label: "opts.metric_function.label_percentile_75" },
{ value: MetricFunctionType.Percentile95, label: "opts.metric_function.label_percentile_95" },
{ value: MetricFunctionType.Percentile99, label: "opts.metric_function.label_percentile_99" },
{ value: MetricFunctionType.Sum, label: "opts.metric_function.label_sum" },
{ value: MetricFunctionType.Count, label: "opts.metric_function.label_count" },
]
export const InterpolationType = {
Basis: "basis",
Cardinal: "cardinal",
CatmullRom: "catmullRom",
Linear: "linear",
MonotoneX: "monotoneX",
MonotoneY: "monotoneY",
Natural: "natural",
Step: "step",
StepAfter: "stepAfter",
StepBefore: "stepBefore",
}
export const InterpolationTypeLineOptions = [
{ value: InterpolationType.Basis, label: "opts.chart_interpolation.label_basis" },
{ value: InterpolationType.Cardinal, label: "opts.chart_interpolation.label_cardinal" },
{ value: InterpolationType.CatmullRom, label: "opts.chart_interpolation.label_catmull_rom" },
{ value: InterpolationType.Linear, label: "opts.chart_interpolation.label_linear" },
{ value: InterpolationType.MonotoneX, label: "opts.chart_interpolation.label_monotone_x" },
{ value: InterpolationType.MonotoneY, label: "opts.chart_interpolation.label_monotone_y" },
{ value: InterpolationType.Natural, label: "opts.chart_interpolation.label_natural" },
{ value: InterpolationType.Step, label: "opts.chart_interpolation.label_step" },
{ value: InterpolationType.StepAfter, label: "opts.chart_interpolation.label_step_after" },
{ value: InterpolationType.StepBefore, label: "opts.chart_interpolation.label_step_before" },
]
export const RefreshIntervalType = {
None: "0",
Seconds_5: "5000",
Seconds_10: "10000",
Seconds_15: "15000",
Seconds_30: "30000",
Seconds_45: "45000",
Minutes_1: "60000",
Minutes_5: "300000",
Minutes_15: "900000",
Minutes_30: "1800000",
}
export const RefreshIntervalTypeOptions = [
{ value: RefreshIntervalType.None, label: "opts.interval.label_never" },
{ value: RefreshIntervalType.Seconds_5, label: "opts.interval.label_second_05" },
{ value: RefreshIntervalType.Seconds_10, label: "opts.interval.label_second_10" },
{ value: RefreshIntervalType.Seconds_15, label: "opts.interval.label_second_15" },
{ value: RefreshIntervalType.Seconds_30, label: "opts.interval.label_second_30" },
{ value: RefreshIntervalType.Seconds_45, label: "opts.interval.label_second_45" },
{ value: RefreshIntervalType.Minutes_1, label: "opts.interval.label_minute_01" },
{ value: RefreshIntervalType.Minutes_5, label: "opts.interval.label_minute_05" },
{ value: RefreshIntervalType.Minutes_15, label: "opts.interval.label_minute_15" },
{ value: RefreshIntervalType.Minutes_30, label: "opts.interval.label_minute_30" },
]
export const DataUnitType = {
Bytes: "",
KiB: "KiB",
MiB: "MiB",
GiB: "GiB",
TiB: "TiB",
PiB: "PiB",
EiB: "EiB",
}
export const DataUnitTypeOptions = [
{ value: DataUnitType.Bytes, label: "opts.data_unit.label_bytes" },
{ value: DataUnitType.KiB, label: "opts.data_unit.label_kib" },
{ value: DataUnitType.MiB, label: "opts.data_unit.label_mib" },
{ value: DataUnitType.GiB, label: "opts.data_unit.label_gib" },
{ value: DataUnitType.TiB, label: "opts.data_unit.label_tib" },
{ value: DataUnitType.PiB, label: "opts.data_unit.label_pib" },
{ value: DataUnitType.EiB, label: "opts.data_unit.label_eib" },
]
|
#!/bin/bash
if [[ "$ENABLE_MAINTAINER_ZTS" == 1 ]]; then
TS="--enable-maintainer-zts";
else
TS="";
fi
if [[ "$ENABLE_DEBUG" == 1 ]]; then
DEBUG="--enable-debug --without-pcre-valgrind";
else
DEBUG="";
fi
if [[ -z "$CONFIG_LOG_FILE" ]]; then
CONFIG_QUIET="--quiet"
CONFIG_LOG_FILE="/dev/stdout"
else
CONFIG_QUIET=""
fi
if [[ -z "$MAKE_LOG_FILE" ]]; then
MAKE_QUIET="--quiet"
MAKE_LOG_FILE="/dev/stdout"
else
MAKE_QUIET=""
fi
MAKE_JOBS=${MAKE_JOBS:-2}
./buildconf --force
./configure \
--prefix="$HOME"/php-install \
$CONFIG_QUIET \
$DEBUG \
$TS \
--enable-phpdbg \
--enable-fpm \
--with-pdo-mysql=mysqlnd \
--with-mysqli=mysqlnd \
--with-pgsql \
--with-pdo-pgsql \
--with-pdo-sqlite \
--enable-intl \
--without-pear \
--with-gd \
--with-jpeg-dir=/usr \
--with-png-dir=/usr \
--enable-exif \
--enable-zip \
--without-libzip \
--with-zlib \
--with-zlib-dir=/usr \
--enable-soap \
--enable-xmlreader \
--with-xsl \
--with-curl=/usr \
--with-tidy \
--with-xmlrpc \
--enable-sysvsem \
--enable-sysvshm \
--enable-shmop \
--enable-pcntl \
--with-readline \
--enable-mbstring \
--with-curl \
--with-gettext \
--enable-sockets \
--with-bz2 \
--with-openssl \
--with-gmp \
--enable-bcmath \
--enable-calendar \
--enable-ftp \
--with-pspell=/usr \
--with-enchant=/usr \
--enable-wddx \
--with-freetype-dir=/usr \
--with-xpm-dir=/usr \
--with-kerberos \
--enable-sysvmsg \
--enable-zend-test=shared \
> "$CONFIG_LOG_FILE"
make "-j${MAKE_JOBS}" $MAKE_QUIET > "$MAKE_LOG_FILE"
make install >> "$MAKE_LOG_FILE"
|
def calculate_sum_and_sequence(a_string):
total = 0
string_sum = ""
for i, c in enumerate(a_string):
total += int(c)
string_sum += c
if i < len(a_string) - 1:
string_sum += "+"
result = [string_sum, total]
return result
# Test the function
print(calculate_sum_and_sequence("12345")) # Output: ["1+2+3+4+5", 15] |
#!/bin/sh
# this super hack will sync the explorer within the specified block height range
forcesync() {
blockcount=$1
echo "╒══════════════════<<"
echo "| height : $blockcount"
blockhash=`curl -s https://explorer.stpx.io/api/getblockhash?height=$blockcount`
echo "| ଓ hash : $blockhash"
curl -s https://explorer.stpx.io/block/$blockhash > /dev/null
echo "╘═══════════════════════════════>>"
}
main() {
if [ $currentblockcount -ne $endingblockcount ]; then
forcesync $currentblockcount
currentblockcount=$((currentblockcount + 1))
else exit; fi
main
}
startingblockcount=1213133
endingblockcount=1213143
echo "Syncing..."
currentblockcount=$startingblockcount
main
|
$(function () {
$("#jqGrid").jqGrid({
url: '../newsmessage/list',
datatype: "json",
colModel: [
{label: 'id', name: 'id', index: 'id', key: true, hidden: true},
{label: '新闻标题', name: 'title', index: 'title', width: 80,align:'center'},
{label: '新闻内容', name: 'details', index: 'details', width: 80,align:'center'},
{label: '新闻作者', name: 'author', index: 'author', width: 80,align:'center'},
{
label: '发布日期', name: 'releaseDate', index: 'release_date', width: 80,align:'center',formatter:function(value){
return transDate(value, 'yyyy-MM-dd hh:mm:ss');
}
},
{label: '新闻类型', name: 'typeName', index: 'type_id', width: 80,align:'center'},
{label: '点击量', name: 'clickRate', index: 'click_rate', width: 80,align:'center'},
{
label: '是否头条', name: 'showTop', index: 'show_top', width: 80,align:'center',formatter:function(value){
return transIsNot(value);
}
},
{
label: '是否热点', name: 'showHot', index: 'show_hot', width: 80,align:'center',formatter:function(value) {
return transIsNot(value);
}
},
{
label: '新闻图片', name: 'newsImageUrl', index: 'news_image_url', width: 80,align:'center',formatter:function(value){
return transImg(value);
}
},
{
label: '更新时间', name: 'updateTime', index: 'update_time', width: 80,align:'center',formatter:function(value){
return transDate(value, 'yyyy-MM-dd hh:mm:ss');
}
},
{label: '更新者', name: 'updateBy', index: 'update_by', width: 80,align:'center'},
],
viewrecords: true,
height: 385,
rowNum: 10,
rowList: [10, 30, 50],
rownumbers: true,
rownumWidth: 25,
autowidth: true,
multiselect: true,
pager: "#jqGridPager",
jsonReader: {
root: "page.list",
page: "page.currPage",
total: "page.totalPage",
records: "page.totalCount"
},
prmNames: {
page: "page",
rows: "limit",
order: "order"
},
gridComplete: function () {
$("#jqGrid").closest(".ui-jqgrid-bdiv").css({"overflow-x": "hidden"});
}
});
$("#newsDesc").editable({
inlineMode: false,
alwaysBlank: true,
height: '450px', //高度
minHeight: '200px',
language: "zh_cn",
spellcheck: false,
plainPaste: true,
enableScript: false,
imageButtons: ["floatImageLeft", "floatImageNone", "floatImageRight", "linkImage", "replaceImage", "removeImage"],
allowedImageTypes: ["jpeg", "jpg", "png", "gif"],
imageUploadURL: '../sys/oss/upload',
imageUploadParams: {id: "edit"},
imagesLoadURL: '../sys/oss/queryAll'
})
});
//函数引入
function transIsNot(value) {
if (value == 1) {
return '<span class="badge badge-info">是</span>';
}
return '<span class="badge badge-danger">否</span>';
};
/**
* 翻译图片
* @param url
* @returns {*}
*/
function transImg(url) {
if (url) {
return '<img width="50px" height="50px" src="' + url + '">';
} else {
return '-';
}
};
/**
* 翻译日期
* @param date
* @param fmt
* @returns {*}
*/
function transDate(date, fmt) {
if (date) {
if (typeof date == 'number') {
return new Date(date).dateFormat(fmt);
} else {
try {
return new Date(date.replace('-', '/').replace('-', '/')).dateFormat(fmt);
} catch (e) {
return '-';
}
}
} else {
return '-';
}
};
var ztree;
var setting = {
data: {
simpleData: {
enable: true,
idKey: "id",
pIdKey: "parentId",
rootPId: -1
},
key: {
url: "nourl"
}
}
};
var vm = new Vue({
el: '#rrapp',
data: {
showList: true,
title: null,
uploadList: [],
visible: false,
newsMessage: {
listPicUrl: '',
releaseDate:'',
typeId: '',
details: ''
},
newsTypes:[],
ruleValidate: {
title: [
{required: true, message: '新闻标题不能为空', trigger: 'blur'}
],
releaseDate: [
{required: true, type: 'datetime',message: '请选择发布日期', trigger: 'blur'}
],
typeId: [
{required: true, message: '新闻类型不能为空', trigger: 'blur'}
]
/* ,
details:[
{required: true, message: '新闻内容不能为空', trigger: 'blur'},
{type: 'string', min: 20, message: '内容不能少于20字', trigger: 'blur'}
]*/
},
q: {
name: ''
}
},
methods: {
query: function () {
vm.reload();
},
add: function () {
vm.showList = false;
vm.title = "新增";
//vm.uploadList = [];
vm.newsMessage = {
newsImageUrl: '',
showTop: 1,
showHot: 1,
typeId: '',
typeName: '',
details: ''
};
$("#newsDesc").editable('setHTML', '');
vm.newsTypes = [];
vm.getNewsTypes();
},
update: function (event) {
var id = getSelectedRow();
//let id = getSelectedRow();
if (id == null) {
return;
}
vm.showList = false;
vm.title = "修改";
vm.uploadList = [];
vm.getInfo(id);
vm.getNewsTypes();
},
saveOrUpdate: function (event) {
var url = vm.newsMessage.id == null ? "../newsmessage/save" : "../newsmessage/update";
//debugger
vm.newsMessage.details = $("#newsDesc").editable('getHTML');
//console.log(vm.newsMessage.details);
//console.log(vm.newsMessage);
//console.log(JSON.stringify(vm.newsMessage));
//console.log(JSON.stringify(vm.newsMessage.details));
$.ajax({
type: "POST",
url: url,
contentType: "application/json",
data: JSON.stringify(vm.newsMessage),
success: function (r) {
if (r.code === 0) {
alert('操作成功', function (index) {
vm.reload();
});
} else {
alert(r.msg);
}
}
});
},
del: function (event) {
var ids = getSelectedRows();
//let ids = getSelectedRows();
if (ids == null) {
return;
}
confirm('确定要删除选中的记录?', function () {
$.ajax({
type: "POST",
url: "../newsmessage/delete",
contentType: "application/json",
data: JSON.stringify(ids),
success: function (r) {
if (r.code == 0) {
alert('操作成功', function (index) {
$("#jqGrid").trigger("reloadGrid");
});
} else {
alert(r.msg);
}
}
});
});
},
getInfo: function (id) {
$.get("../newsmessage/info/" + id, function (r) {
vm.newsMessage = r.newsMessage;
$('#newsDesc').editable('setHTML', vm.newsMessage.details);
});
},
/**
* 获取新闻类型列表
*/
getNewsTypes: function () {
$.get("../newstype/queryAll", function (r) {
vm.newsTypes = r.list;
});
},
reload: function (event) {
vm.showList = true;
var page = $("#jqGrid").jqGrid('getGridParam', 'page');
$("#jqGrid").jqGrid('setGridParam', {
postData: {'name': vm.q.name},
page: page
}).trigger("reloadGrid");
vm.handleReset('formValidate');
},
handleSubmit: function (name) {
handleSubmitValidate(this, name, function () {
vm.saveOrUpdate()
});
},
handleFormatError: function (file) {
this.$Notice.warning({
title: '文件格式不正确',
desc: '文件 ' + file.name + ' 格式不正确,请上传 jpg 或 png 格式的图片。'
});
},
handleMaxSize: function (file) {
this.$Notice.warning({
title: '超出文件大小限制',
desc: '文件 ' + file.name + ' 太大,不能超过 2M。'
});
},
handleReset: function (name) {
handleResetForm(this, name);
},
handleSuccess(res, file) {
// 因为上传过程为实例,这里模拟添加 url
file.imgUrl = res.url;
file.name = res.url;
vm.uploadList.add(file);
},
handleBeforeUpload() {
const check = this.uploadList.length < 5;
if (!check) {
this.$Notice.warning({
title: '最多只能上传 5 张图片。'
});
}
return check;
},
handleSuccessNewsImageUrl: function (res, file) {
vm.newsMessage.newsImageUrl = file.response.url;
},
eyeNewsImageUrl: function () {
var url = vm.newsMessage.newsImageUrl;
eyeImage(url);
}
}
}); |
#!/bin/sh
# Compress and minify widget
set -e
OUT_FILE="public/plusfries.js"
mkdir -p "$(dirname $OUT_FILE)"
BIN="./node_modules/.bin"
echo "start minifying..."
CSS="$($BIN/uglifycss widget/plusfries.css)"
ES5="$($BIN/babel widget/plusfries.js)"
COMPACT="$(echo "$ES5" | $BIN/terser -m -c)"
echo "$COMPACT" | sed "s/PLUS_FRIES_CSS/${CSS}/g" >$OUT_FILE
echo "minification done!"
|
<gh_stars>1-10
/**
*
*
*/
"use strict";
var
config = require("./config"),
io = require('socket.io').listen(config.PRODUCER_PORT),
consumers = [];
/**
* A data structure to represent a consumer as seen by the producer.
* Some attributes overlap with those defined in the Consumer structure on the client-side,
* but this one can be thought of a server-side instrumented consumer.
*/
var Consumer = function (id, socketId, lifetime, heartbeatCount, lastHeartbeatTime, timeMsgCount) {
this.id = id;
this.socketId = socketId;
this.lifetime = lifetime;
this.heartbeatCount = heartbeatCount;
this.lastHeartbeatTime = lastHeartbeatTime;
this.timeMsgCount = timeMsgCount;
};
Consumer.prototype.describe = function () {
return config.sprintf("Consumer [id:%'02d, lifetime:%'02d, heartbeatCount:%'02d, lastHeartbeatTime: %s, timeMsgCount:%'02d]", this.id, this.lifetime, this.heartbeatCount, this.lastHeartbeatTime, this.timeMsgCount);
};
/**
* The main control flow
*/
config.async.waterfall([
listen,
sendTime
], function (err, result) {
});
/**
*
* @param callback
* @returns {*}
*/
function listen(callback) {
/* socket.io events, each connection goes through here */
io.on('connection', function (socket) {
/* After connection, the client initially sends a REGISTER message */
socket.on(config.MESSAGE_REGISTER, function (data) {
registerConsumer(socket, data);
/* Send an ack to the consumer */
console.log("CG-producer reg event");
socket.emit(config.MESSAGE_ACK, config.ACK_CONSUMER_REGISTRATION);
});
/* The client sends a heartbeat periodically in the form of a KEEPALIVE message */
socket.on(config.MESSAGE_KEEPALIVE, function (data) {
keepAliveHandler(socket, data);
/* Send an ack to the consumer */
console.log("CG-producer heartbeat event");
socket.emit(config.MESSAGE_ACK, config.ACK_CONSUMER_HEARTBEAT);
});
/* Support for simple echo */
socket.on(config.MESSAGE_ECHO, function (data) {
console.log("CG-producer echo event");
socket.emit(config.MESSAGE_ACK, data);
});
});
return callback(null);
}
/**
*
* Cron job to send TIME messages every 1 minute, to registered consumers who are currently alive
* @returns {*}
*/
function sendTime(callback) {
new config.cronJob({
cronTime: "0 */" + config.MESSAGE_TIME_FREQUENCY + " * * * *",
onTick: function () {
if (config._.isUndefined(consumers) || config._.isNull(consumers) || config._.isEmpty(consumers)) {
console.log("No consumers are currently registered. Not sending NTP time messages ...");
} else {
var aliveConsumers = config._.filter(consumers, function (consumer) {
return config.moment.utc(config.moment().diff(config.moment(consumer.lastHeartbeatTime))) < config.KEEPALIVE_WINDOW;
});
if (config._.isUndefined(aliveConsumers) || config._.isNull(aliveConsumers) || config._.isEmpty(aliveConsumers)) {
console.log("All registered consumers have reached their max lifetime. Not sending NTP time messages ...");
} else {
config.async.each(aliveConsumers, sendNTPTime, function (err) {
});
}
}
},
start: 'true'
});
return callback(null);
}
/**
* Register a new consumer
* @param socket
* @param data
*/
function registerConsumer(socket, data) {
var newConsumer = new Consumer(data.id, socket.id, data.lifetime, data.heartbeatCount, 0, 0);
console.log("Registered a new " + newConsumer.describe());
consumers.push(newConsumer);
}
/**
* Handle the keepAlive message sent by a registered consumer
* @param socket
* @param data
*/
function keepAliveHandler(socket, data) {
if (socket.id !== data.socketId) {
console.log("Something fishy!");
}
var registeredConsumer = config._.findWhere(consumers, {id: data.id, socketId: data.socketId});
if (typeof registeredConsumer === "undefined") {
console.log("Encountered an unregistered consumer " + data.id + " trying to send a keepAlive message. Ignoring this");
} else {
/* For a registered consumer who sent a keepAlive message, update relevant counters */
registeredConsumer.heartbeatCount += 1;
registeredConsumer.lastHeartbeatTime = new Date().getTime();
console.log("Received a keepAlive message from registered " + registeredConsumer.describe());
}
}
/**
*
* @param data
* @param callback
*/
function sendNTPTime(consumer, callback) {
/* Update the counter tracking number of TIME messages sent to this consumer */
++consumer.timeMsgCount;
console.log("Sending NTP time to " + consumer.describe());
/* Send the time to this specific consumer only */
var socketId = consumer.socketId;
io.sockets.connected[socketId].emit(config.MESSAGE_TIME, {'time': (new Date()).getTime()});
return callback(null);
} |
#!/bin/bash
# Set basedir to the absolute path of the directory containing the script
basedir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
# Build the Docker image
docker build . -t atone > /dev/null || { echo "Error: Failed to build the Docker image"; exit 1; }
# Run the Docker container
docker run atone |
# validated: 2018-01-05 EN 46d3d290a0b2 java/src/com/ctre/phoenix/motorcontrol/can/WPI_VictorSPX.java
#----------------------------------------------------------------------------
# Software License Agreement
#
# Copyright (C) Cross The Road Electronics. All rights
# reserved.
#
# Cross The Road Electronics (CTRE) licenses to you the right to
# use, publish, and distribute copies of CRF (Cross The Road) firmware files (*.crf) and Software
# API Libraries ONLY when in use with Cross The Road Electronics hardware products.
#
# THE SOFTWARE AND DOCUMENTATION ARE PROVIDED "AS IS" WITHOUT
# WARRANTY OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT
# LIMITATION, ANY WARRANTY OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT SHALL
# CROSS THE ROAD ELECTRONICS BE LIABLE FOR ANY INCIDENTAL, SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES, LOST PROFITS OR LOST DATA, COST OF
# PROCUREMENT OF SUBSTITUTE GOODS, TECHNOLOGY OR SERVICES, ANY CLAIMS
# BY THIRD PARTIES (INCLUDING BUT NOT LIMITED TO ANY DEFENSE
# THEREOF), ANY CLAIMS FOR INDEMNITY OR CONTRIBUTION, OR OTHER
# SIMILAR COSTS, WHETHER ASSERTED ON THE BASIS OF CONTRACT, TORT
# (INCLUDING NEGLIGENCE), BREACH OF WARRANTY, OR OTHERWISE
#----------------------------------------------------------------------------
import hal
from wpilib import MotorSafety, LiveWindow, SendableBase
from wpilib._impl.utils import match_arglist
from .victorspx import VictorSPX
from ._impl import ControlMode
__all__ = ['WPI_VictorSPX']
class WPI_VictorSPX (VictorSPX, SendableBase, MotorSafety):
"""WPI Compliant motor controller class.
WPILIB's object model requires many interfaces to be implemented to use
the various features.
This includes...
- Software PID loops running in the robot controller
- LiveWindow/Test mode features
- Motor Safety (auto-turn off of motor if Set stops getting called)
- Single Parameter set that assumes a simple motor controller.
"""
def __init__(self, deviceNumber: int):
super().__init__(deviceNumber)
hal.report(hal.UsageReporting.kResourceType_CTRE_future3, deviceNumber + 1)
self.description = "Victor SPX %s" % (deviceNumber,)
MotorSafety.__init__(self)
self.setExpiration(0.0)
self.setSafetyEnabled(False)
SendableBase.__init__(self)
LiveWindow.add(self)
self.setName("Victor SPX ", deviceNumber)
self.speed = 0.0
def set(self, *args, **kwargs):
"""
See :meth:`.BaseMotorController.set`
Can be called three ways:
- speed
- mode, value
- mode, demand0, demand1
largely a wrapper around :meth:`.VictorSPX.set`
:param value:
:type value: float
:param mode: ControlMode.PercentOutput if not provided
:type mode: ControlMode
:param speed:
:type speed: float
:param demand0:
:type demand0: float
:param demand1:
:type demand1: float
"""
speed_arg = ("speed", [float, int])
value_arg = ("value", [float, int])
mode_arg = ("mode", [ControlMode])
demand0_arg = ("demand0", [float, int])
demand1_arg = ("demand1", [float, int])
templates = [
[speed_arg],
[mode_arg, value_arg],
[mode_arg, demand0_arg, demand1_arg]]
index, results = match_arglist('WPI_VictorSPX.set',
args, kwargs, templates)
if index == 2:
super().set(results['mode'], results['demand0'], results['demand1'])
else:
if index == 0:
self.speed = value = results['speed']
mode = ControlMode.PercentOutput
elif index == 1:
value = results['value']
mode = results['mode']
super().set(mode, value)
self.feed()
def pidWrite(self, output: float):
self.set(output)
def get(self) -> float:
"""Common interface for getting the current set speed of a speed controller.
:returns: The current set speed. Value is between -1.0 and 1.0.
"""
return self.speed
#def setInverted(self, isInverted: bool):
# super().setInverted(isInverted)
#def getInverted(self):
# return super().getInverted()
def disable(self):
self.neutralOutput()
def stopMotor(self):
"""Common interface to stop the motor until :meth:`.set` is called again."""
self.neutralOutput()
def initSendable(self, builder):
builder.setSmartDashboardType("Speed Controller")
builder.setSafeState(self.stopMotor)
builder.addDoubleProperty("Value", self.get, self.set)
def getDescription(self):
return self.description
|
<filename>src/net/abi/abisEngine/components/Camera.java
/*******************************************************************************
* Copyright 2020 <NAME> | ABI INC.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package net.abi.abisEngine.components;
import net.abi.abisEngine.math.matrix.Matrix4f;
import net.abi.abisEngine.math.vector.Vector3f;
public class Camera extends SceneComponent {
private Matrix4f projection;
public Camera(float fov, float aspectRatio, float zNear, float zFar, String name) {
this.projection = new Matrix4f().initProjection(fov, aspectRatio, zNear, zFar);
super.setName(name);
}
public Camera(float left, float right, float bottom, float top, float zNear, float zFar, String name) {
this.projection = new Matrix4f().initOrthographic(left, right, bottom, top, zNear, zFar);
super.setName(name);
}
public Camera(Vector3f pos, Vector3f forward, Vector3f up, String name) {
up.normalize();
forward.normalize();
super.setName(name);
}
public Matrix4f getViewProjection() {
Matrix4f cameraRotationMatrix = super.getTransform().getTransformedRotation().conjugate().toRotationMatrix();
/*
* Doing Negative multiplication here to eradicate the use of it in the return
* statement.
*/
Vector3f cameraPosition = super.getTransform().getTransformedPosition().mul(-1);
Matrix4f cameraTranslationMatrix = new Matrix4f().initTranslation(cameraPosition.x(), cameraPosition.y(),
cameraPosition.z());
return projection.mul(cameraRotationMatrix.mul(cameraTranslationMatrix));
}
public Matrix4f getViewOrthographic() {
Matrix4f cameraRotationMatrix = super.getTransform().getTransformedRotation().conjugate().toRotationMatrix();
/*
* Doing Negative multiplication here to eradicate the use of it in the return
* statement.
*/
Vector3f cameraPosition = super.getTransform().getTransformedPosition().mul(-1);
Matrix4f cameraTranslationMatrix = new Matrix4f().initTranslation(cameraPosition.x(), cameraPosition.y(),
cameraPosition.z());
return projection.mul(cameraRotationMatrix.mul(cameraTranslationMatrix));
}
@Override
public void input(float delta) {
super.input(delta);
}
public Vector3f getLeft() {
Vector3f left = super.getTransform().getRotation().getForward()
.cross(super.getTransform().getRotation().getUp()).normalize();
return (left);
}
public Vector3f getRight() {
Vector3f right = super.getTransform().getRotation().getUp()
.cross(super.getTransform().getRotation().getForward()).normalize();
return (right);
}
@Override
public void addToScene() {
super.getParentScene().addCamera(this);
}
public void setProjection(Matrix4f projection) {
this.projection = projection;
}
public Matrix4f getProjection() {
return projection;
}
}
|
import { expect } from 'chai';
import React from 'react';
import { shallow, mount } from 'enzyme';
import Label from './Label'
describe('[Atom] Label', () => {
it('Has correct default markup', () => {
const wrapper = shallow(<Label>Placeholder</Label>);
expect(wrapper.type()).to.equal('label');
expect(wrapper.prop('className')).to.equal('ms-label');
expect(wrapper.text()).to.equal('Placeholder');
});
}); |
# https://developer.zendesk.com/rest_api/docs/help_center/translations#list-all-enabled-locales-and-default-locale
zdesk_help_center_locales_list () {
method=GET
url=/api/v2/help_center/locales.json
} |
import pandas as pd
def process_dataframe(df, samples_per_group, validate):
"""
Process the input dataframe to generate two distinct subsets based on the specified conditions.
Parameters
----------
df : pandas DataFrame
Input dataframe containing the data
samples_per_group : int
Number of samples per group
validate : bool
Whether to create a validation dataframe
Returns
-------
pandas DataFrame, pandas DataFrame
Two dataframes:
1. A subset of the input dataframe based on the specified conditions
2. A validation dataframe based on the specified conditions, or an empty dataframe if validate is False
"""
if df.shape[0] >= samples_per_group + 1:
# If enough points, take the number of samples
# The second dataframe returned makes up the validation dataset
_df = df.sample(n=samples_per_group+1, replace=False)
return _df.tail(samples_per_group), _df.head(int(validate))
else:
# If there are not enough points, return the entire dataframe as the first output
# Return an empty dataframe as the second output if validate is False
if validate:
return df, pd.DataFrame() # Return the entire dataframe and an empty dataframe
else:
return df.head(0), pd.DataFrame() # Return an empty dataframe and an empty dataframe |
import java.io.OutputStream;
public class EXIStreamEncoderImpl {
private EXIFactory exiFactory;
private EXIHeaderEncoder exiHeader;
private EXIBodyEncoder exiBody;
public EXIStreamEncoderImpl(EXIFactory exiFactory) throws EXIException {
this.exiFactory = exiFactory;
exiHeader = new EXIHeaderEncoder();
exiBody = exiFactory.createEXIBodyEncoder();
}
public void encodeHeader(OutputStream os) throws EXIException {
// Implementation for encoding the header
}
public void encodeBody(OutputStream os) throws EXIException {
exiBody.setOutputStream(os);
exiBody.encode();
}
} |
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome to ${PREFIX}'s app. Bada Bing! Bada Boom!
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
<filename>Validator/GenICam/library/CPP/include/xsde/cxx/serializer/context.hxx
// file : xsde/cxx/serializer/context.hxx
// author : <NAME> <<EMAIL>>
// copyright : Copyright (c) 2005-2011 Code Synthesis Tools CC
// license : GNU GPL v2 + exceptions; see accompanying LICENSE file
#ifndef XSDE_CXX_SERIALIZER_CONTEXT_HXX
#define XSDE_CXX_SERIALIZER_CONTEXT_HXX
#include <xsde/cxx/config.hxx>
#include <stddef.h> // size_t
#include <xsde/c/genx/genx.h>
#include <xsde/cxx/string.hxx>
#ifndef XSDE_EXCEPTIONS
# include <xsde/cxx/sys-error.hxx>
#endif
#include <xsde/cxx/serializer/genx/xml-error.hxx>
#ifdef XSDE_SERIALIZER_VALIDATION
# include <xsde/cxx/schema-error.hxx>
#endif
namespace xsde
{
namespace cxx
{
namespace serializer
{
class context
{
public:
context (genxWriter xml_serializer);
private:
context (const context&);
context& operator= (const context&);
public:
genxWriter
xml_serializer ();
#ifdef XSDE_POLYMORPHIC
public:
// Set/get the dynamic serializer type id (as opaque const void*)
// in case of polymorphic serialization. If type id is not set,
// static type is assumed.
//
void
type_id (const void*);
const void*
type_id ();
#endif
public:
#ifdef XSDE_EXCEPTIONS
void
start_element (const char* name);
void
start_element (const char* ns, const char* name);
void
end_element ();
void
start_attribute (const char* name);
void
start_attribute (const char* ns, const char* name);
void
end_attribute ();
void
attribute (const char* name, const char* value);
void
attribute (const char* ns, const char* name, const char* value);
void
characters (const char*);
void
characters (const char*, size_t);
void
declare_namespace (const char* ns, const char* prefix);
void
declare_default_namespace (const char* ns);
void
clear_default_namespace ();
const char*
lookup_namespace_prefix (const char* ns);
#ifdef XSDE_POLYMORPHIC
void
set_type (const char* type);
#endif
#else
bool
start_element (const char* name);
bool
start_element (const char* ns, const char* name);
bool
end_element ();
bool
start_attribute (const char* name);
bool
start_attribute (const char* ns, const char* name);
bool
end_attribute ();
bool
attribute (const char* name, const char* value);
bool
attribute (const char* ns, const char* name, const char* value);
bool
characters (const char*);
bool
characters (const char*, size_t);
bool
declare_namespace (const char* ns, const char* prefix);
bool
declare_default_namespace (const char* ns);
bool
clear_default_namespace ();
const char*
lookup_namespace_prefix (const char* ns);
#ifdef XSDE_POLYMORPHIC
bool
set_type (const char* type);
#endif
#endif
// Error handling via exceptions.
//
#ifdef XSDE_EXCEPTIONS
public:
void
throw_xml_error (genx::xml_error);
#endif
// Error handling via codes.
//
// Application error.
//
#ifndef XSDE_EXCEPTIONS
public:
int
app_error () const;
void
app_error (int);
#endif
// Schema error.
//
#ifdef XSDE_SERIALIZER_VALIDATION
public:
typedef cxx::schema_error::value schema_error_t;
schema_error_t
schema_error () const;
void
schema_error (schema_error_t);
#endif
// XML error.
//
#ifndef XSDE_EXCEPTIONS
public:
typedef genx::xml_error xml_error_t;
xml_error_t
xml_error () const;
void
xml_error (xml_error_t);
#endif
// System error.
//
#ifndef XSDE_EXCEPTIONS
public:
typedef cxx::sys_error::value sys_error_t;
sys_error_t
sys_error () const;
void
sys_error (sys_error_t);
#endif
// Implementation details.
//
#if defined(XSDE_SERIALIZER_VALIDATION) || !defined(XSDE_EXCEPTIONS)
public:
enum error_type_t
{
error_none = 0,
error_app,
error_schema,
error_xml,
error_sys
};
error_type_t
error_type () const;
protected:
error_type_t error_type_;
union
{
#ifndef XSDE_EXCEPTIONS
int app;
xml_error_t xml;
#endif
#ifdef XSDE_SERIALIZER_VALIDATION
schema_error_t schema;
#endif
#ifndef XSDE_EXCEPTIONS
sys_error_t sys;
#endif
} error_code_;
#endif // XSDE_SERIALIZER_VALIDATION || !XSDE_EXCEPTIONS
protected:
genxWriter xml_serializer_;
#ifdef XSDE_POLYMORPHIC
const void* type_id_;
#endif
// Support for ISO-8859-1 conversion.
//
#ifdef XSDE_ENCODING_ISO8859_1
protected:
const char*
conv_data (const char* iso_s, size_t utf_n, string& var);
const char*
conv_data (const char* iso_s, size_t iso_n, size_t utf_n, string& var);
const char*
conv_name (const char* iso_s, size_t utf_n, char* fix, string& var);
char data_buf_[256];
char name_buf1_[128];
char name_buf2_[128]; // Keep buf1 and buf2 sizes the same.
#endif
};
}
}
}
#include <xsde/cxx/serializer/context.ixx>
#endif // XSDE_CXX_SERIALIZER_CONTEXT_HXX
|
package malte0811.controlengineering.util.math;
import javax.annotation.Nullable;
public record RectangleI(int minX, int minY, int maxX, int maxY) {
public RectangleI(Vec2i min, Vec2i max) {
this(min.x(), min.y(), max.x(), max.y());
}
public boolean contains(RectangleI other) {
return containsClosed(other.minX, other.minY) && containsClosed(other.maxX, other.maxY);
}
public boolean containsClosed(Vec2i point) {
return containsClosed(point.x(), point.y());
}
public boolean containsClosed(Vec2d point) {
return containsClosed(point.x(), point.y());
}
public boolean containsClosed(double x, double y) {
return minX <= x && x <= maxX && minY <= y && y <= maxY;
}
public boolean disjoint(RectangleI other) {
return minX >= other.maxX || other.minX >= maxX || minY >= other.maxY || other.minY >= maxY;
}
public RectangleI union(@Nullable RectangleI other) {
if (other == null) {
return this;
}
return new RectangleI(
Math.min(minX(), other.minX()), Math.min(minY(), other.minY()),
Math.max(maxX(), other.maxX()), Math.max(maxY(), other.maxY())
);
}
public int getWidth() {
return maxX - minX;
}
public int getHeight() {
return maxY - minY;
}
public Vec2d center() {
return new Vec2d(minX() + getWidth() / 2., minY() + getHeight() / 2.);
}
public RectangleI offset(Vec2i by) {
return new RectangleI(minX() + by.x(), minY() + by.y(), maxX() + by.x(), maxY + by.y());
}
}
|
#!/usr/bin/env bash
# Copyright (c) 2019 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
set -euxo pipefail
TOPLEVEL=$(git rev-parse --show-toplevel)
CURRENT_DIR=$(dirname $(readlink -f "$0"))
TEST_PATCH="${CURRENT_DIR}/test-commit.patch"
: "${REMOTE:=origin}"
: "${MASTER_BRANCH:=master}"
REMOTE_AND_BRANCH="${REMOTE}/${MASTER_BRANCH}"
LATEST_MASTER=$(git rev-parse "${MASTER_BRANCH}")
test_autopatch() {
PATCH_FILE="$1"
EXPECTED_EXIT_CODE="$2"
PATCH_ARGS="--patch ${PATCH_FILE}"
# Setting the remote to this repo allows us to simulate an upstream without
# relying on external services for unit tests.
export EDITOR="${CURRENT_DIR}/test-commit-message.sh"
# Note: Do not use `-o ${REMOTE}` here because REMOTE may be on the local filesystem.
EXIT_CODE=0
"${CURRENT_DIR}/../autopatch.sh" -o testorigin -b "${MASTER_BRANCH}" --patch-args "${PATCH_ARGS}" || EXIT_CODE=$?
if [ "${EXIT_CODE}" -ne "${EXPECTED_EXIT_CODE}" ]; then
echo "Error: autopatch exited with '${EXIT_CODE}' when '${EXPECTED_EXIT_CODE}' was expected."
exit 1
fi
# Autopatch failed as expected, so sanity checks are not necessary
if [ "${EXPECTED_EXIT_CODE}" -ne 0 ]; then
exit 0
fi
# Sanity checks
if [ -n "$(git status --porcelain)" ]; then
echo "Error: There should be no uncommitted changes."
exit 10
fi
if [ "${LATEST_MASTER}" != "$(git rev-parse HEAD~)" ]; then
echo "Error: Failed to patch on latest master."
exit 11
fi
# Note: Remove 'index ...' line from 'git diff' as the SHA1 hash is unlikely
# to match.
DIFF_HEAD_AGAINST_PATCH="$(git diff HEAD~ | grep -v "^index " | diff - "${PATCH_FILE}" || :)"
if [ -n "${DIFF_HEAD_AGAINST_PATCH}" ]; then
echo "Error: Rebased changes do not match the given patch. Difference was:"
echo "${DIFF_HEAD_AGAINST_PATCH}"
exit 12
fi
}
TEST_STATUS="FAILED"
final_cleanup() {
# Cleanup the temporary test directory
rm -rf "$1"
# Nicely print the final test status
set +x
echo
echo "${0}:"
echo "${TEST_STATUS}"
}
TEMP_DIR=$(mktemp -d)
trap 'final_cleanup ${TEMP_DIR}' RETURN EXIT
cd "${TEMP_DIR}"
git init
# Set a temporary git config in case a global config isn't set
git config user.name "test-autopatch"
git config user.email "test@test.test"
git remote add testorigin "${TOPLEVEL}"
git pull testorigin "${REMOTE_AND_BRANCH}"
test_cleanup() {
# Cleanup current branch so that arcanist doesn't run out of branch names
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
git checkout "${MASTER_BRANCH}"
git reset --hard HEAD
git branch -D "${CURRENT_BRANCH}" || true
}
(
trap 'test_cleanup' RETURN ERR EXIT
echo "TEST: Simply sanity check that autopatch fast-forwards as expected"
git reset --hard HEAD~10
test_autopatch "${TEST_PATCH}" 0
)
test_file_not_present() {
if [ -n "$1" ] && [ -f "$1" ]; then
echo "Error: '$1' file was found but not expected!"
exit 51
fi
}
(
trap 'test_cleanup' RETURN ERR EXIT
echo "TEST: Locally committed changes cause the script to bail"
TEST_FILE="test-committed-changes"
touch "${TEST_FILE}"
git add "${TEST_FILE}"
git commit -m "test local commit"
test_autopatch "${TEST_PATCH}" 11
)
(
trap 'test_cleanup' RETURN ERR EXIT
echo "TEST: Staged changes are not included after autopatching"
TEST_FILE="test-staged-changes"
touch "${TEST_FILE}"
git add "${TEST_FILE}"
test_autopatch "${TEST_PATCH}" 10
test_file_not_present "${TEST_FILE}"
)
(
trap 'test_cleanup' RETURN ERR EXIT
echo "TEST: Unstaged changes are not included after autopatching"
TEST_FILE="test-unstaged-changes"
touch "${TEST_FILE}"
test_autopatch "${TEST_PATCH}" 10
test_file_not_present "${TEST_FILE}"
)
TEST_STATUS="PASSED"
|
#!/bin/bash
#
# Minimal example for deploying latest built 'Ansible Service Broker'
# on oc cluster up
#
#
# We deploy oc cluster up with an explicit hostname and routing suffix
# so that pods can access routes internally.
#
# For example, we need to register the ansible service broker route to
# the service catalog when we create the broker resource. The service
# catalog needs to be able to communicate to the ansible service broker.
#
# When we use the default "127.0.0.1.nip.io" route suffix, requests
# from inside the cluster fail with an error like:
#
# From Service Catalog: controller manager
# controller.go:196] Error syncing Broker ansible-service-broker:
# Get https://asb-1338-ansible-service-broker.127.0.0.1.nip.io/v2/catalog:
# dial tcp 127.0.0.1:443: getsockopt: connection refused
#
# To resolve this, we explicitly set the
# --public-hostname and --routing-suffix
#
# We use the IP of the docker interface on our host for testing in a
# local environment, or the external listening IP if we want to expose
# the cluster to the outside.
#
# Below will default to grabbing the IP of docker0, typically this is
# 172.17.0.1 if not customized
#
PUBLIC_IP="$(ip addr show docker0 | grep -Po 'inet \K[\d.]+')"
HOSTNAME=${PUBLIC_IP}.nip.io
ROUTING_SUFFIX="${HOSTNAME}"
oc cluster up --image=openshift/origin --version=v3.6.0-rc.0 --service-catalog=true --routing-suffix=${ROUTING_SUFFIX} --public-hostname=${HOSTNAME}
#
# A valid dockerhub username/password is required so the broker may
# authenticate with dockerhub to:
#
# 1) inspect the available repositories in an organization
# 2) read the manifest of each repository to determine metadata about
# the images
#
# This is how the Ansible Service Broker determines what content to
# expose to the Service Catalog
#
# Note: dockerhub API requirements require an authenticated user only,
# the user does not need any special access beyond read access to the
# organization.
#
# By default, the Ansible Service Broker will look at the
# 'ansibleplaybookbundle' organization, this can be overridden with the
# parameter DOCKERHUB_ORG being passed into the template.
#
DOCKERHUB_USER="changeme"
DOCKERHUB_PASS="changeme"
DOCKERHUB_ORG="ansibleplaybookbundle"
#
# Disabling basic auth allows "apb push" to work.
#
ENABLE_BASIC_AUTH="false"
curl -s https://raw.githubusercontent.com/openshift/ansible-service-broker/master/templates/deploy-ansible-service-broker.template.yaml > deploy-ansible-service-broker.template.yaml
#
# Logging in as system:admin so we can create a clusterrolebinding
#
oc login -u system:admin
oc new-project ansible-service-broker
oc process -f ./deploy-ansible-service-broker.template.yaml \
-n ansible-service-broker \
-p DOCKERHUB_USER="$DOCKERHUB_USER" \
-p DOCKERHUB_PASS="$DOCKERHUB_PASS" \
-p DOCKERHUB_ORG="$DOCKERHUB_ORG" \
-p ENABLE_BASIC_AUTH="$ENABLE_BASIC_AUTH" | oc create -f -
if [ "$?" -ne 0 ]; then
echo "Error processing template and creating deployment"
exit
fi
ASB_ROUTE=`oc get routes | grep ansible-service-broker | awk '{print $2}'`
cat <<EOF > ansible-service-broker.broker
apiVersion: servicecatalog.k8s.io/v1alpha1
kind: Broker
metadata:
name: ansible-service-broker
spec:
url: https://${ASB_ROUTE}
authInfo:
basicAuthSecret:
namespace: ansible-service-broker
name: asb-auth-secret
EOF
oc create -f ./ansible-service-broker.broker
#
# Then login as 'developer'/'developer' to WebUI
# Create a project
# Deploy mediawiki to new project (use a password other than
# admin since mediawiki forbids admin as password)
# Deploy PostgreSQL(ABP) to new project
# After they are up
# Click 'Create Binding' on the kebab menu for Mediawiki,
# select postgres
# Click deploy on mediawiki, after it's redeployed access webui
#
|
<gh_stars>0
const { calc } = require('./calc')
let args = process.argv;
console.log(args);
let command = args[2];
let num1 = args[3];
let num2 = args[4];
try{
if(isNaN(Number(num1)) || isNaN(Number(num2)))
throw "nan";
calc[command](num1,num2);
}
catch(e)
{
if(e == "nan")
console.log("You didn't enter a valid number!");
else
console.log('Command not found');
} |
<gh_stars>1-10
var words = require("./words.json").words;
var punctuations = ".!?".split("");
var table = words.map(function(word) {
return word.word;
}).concat(punctuations);
function hexify (n) {
var result = n.toString(16);
if (result.length === 1) {
return "0" + result;
} else {
return result;
}
}
module.exports = {
toHex: function toHex (str) {
var result = "";
var tokens = str.toLowerCase().match(/(?:\w+)|(?:[\.!\?])/g);
for (var i = 0, len = tokens.length; i < len; i ++) {
var index = table.indexOf(tokens[i]);
if (index !== -1) {
result += hexify(index);
}
}
return result;
},
fromHex: function fromHex (str) {
var result = "";
for (var i = 0, len = str.length; i < len; i += 2) {
var index = parseInt(str[i] + str[i + 1], 16);
var character = table[index];
if (character) {
result += character + " ";
}
}
return result.trim().replace(
/\s+([\.!\?])/g,
"$1"
);
}
};
|
#!/usr/bin/env bash
set -e
pip install --upgrade pip
pip install --upgrade -r requirements.txt
pip install --upgrade -r requirements-dev.txt
cd testing
./build-image.sh
cd ../building
./build-image.sh
|
package com.doodl6.demo.pattern;
/**
* 建造者模式
*/
public class Builder {
protected Product product = new Product();
public Builder partA(String partA) {
product.setPartA(partA);
return this;
}
public Builder partB(String partB) {
product.setPartB(partB);
return this;
}
public Builder partC(String partC) {
product.setPartC(partC);
return this;
}
public Product build() {
return product;
}
public static class Product {
private String partA;
private String partB;
private String partC;
public String getPartA() {
return partA;
}
public void setPartA(String partA) {
this.partA = partA;
}
public String getPartB() {
return partB;
}
public void setPartB(String partB) {
this.partB = partB;
}
public String getPartC() {
return partC;
}
public void setPartC(String partC) {
this.partC = partC;
}
@Override
public String toString() {
return "Product{" +
"partA='" + partA + '\'' +
", partB='" + partB + '\'' +
", partC='" + partC + '\'' +
'}';
}
}
public static void main(String[] args) {
Builder builder = new Builder();
Product product = builder.partA("good")
// .partB("nice")
.partC("perfect")
.build();
System.out.println(product.toString());
}
}
|
module PayPal
class Charge
include PayPal::Merchant::DoDirectPayment
attr_accessor :response, :amount, :card
class << self
def create(params = {})
new(params)
end
end
def initialize(params = {})
@amount, @card = params[:amount], params[:card]
if @amount && @card
@response = charge(@amount, @card, params)
else
raise "You must provide an amount and credit card"
end
end
def success?
@response && @response.try(:success?)
end
end
end |
/Applications/google_appengine/appcfg.py upload_data --config_file=bulkloader.yaml --kind=License --filename=license.csv --url=http://summer-sample-gae.appspot.com/remote_api
/Applications/google_appengine/appcfg.py upload_data --config_file=bulkloader.yaml --kind=Status --filename=status.csv --url=http://summer-sample-gae.appspot.com/remote_api
/Applications/google_appengine/appcfg.py upload_data --config_file=bulkloader.yaml --kind=Technology --filename=technology.csv --url=http://summer-sample-gae.appspot.com/remote_api
rm bulkloader-*.* |
import joplin from 'api';
import { noteVariables } from './noteVariables';
joplin.plugins.register({
onStart: async function () {
noteVariables.init();
},
});
|
#!bin/bash
delimiter=$1
script=$2
file=$3
#awk -F "$delimiter" "{len=split($col,value,\"$separator\");n=\"\";for(i=1;i<=len;++i){n=n \"\\\"\" NR \"\\\",\\\"\" value[i] \"\\\"\";if(i<=len-1){n=n\"\n\";}system(\"echo \"\" n \"\" >> tmp/csv/$newFile\");}$col=NR}" $file
#awk -F "$delimiter" "{len=split($col,value,\"$separator\");n=\"\";for(i=1;i<=len;++i){n=n \"\\\"\" NR \"\\\"$delimiter\\\"\" value[i] \"\\\"\";system(\"echo \\\"\" n \"\\\" >> tmp/csv/$newFile\");n=\"\"}$col=NR}" $file
echo "awk -F "$delimiter" "{$script}" tmp/csv/$file" > tmp/debugNormalization.txt
awk -F "$delimiter" "{$script}" tmp/csv/$file
echo "NORMALIZING $file"
|
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kubernetes
import (
"testing"
"time"
"github.com/stretchr/testify/require"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
)
func TestEquality(t *testing.T) {
pod1 := &unstructured.Unstructured{
Object: map[string]interface{}{
"apiVersion": "v1",
"kind": "Pod",
"metadata": map[string]interface{}{
"generation": 1,
"labels": map[string]string{
"isPod": "true",
},
"annotations": map[string]string{
"test": "true",
},
},
"spec": map[string]interface{}{
"image": "someimage",
},
"status": map[string]interface{}{
"timestamp": metav1.Now(),
},
},
}
pod2 := &unstructured.Unstructured{
Object: map[string]interface{}{
"apiVersion": "v1",
"kind": "Pod",
"metadata": map[string]interface{}{
"generation": 2,
"labels": map[string]string{
"isPod": "true",
},
"annotations": map[string]string{
"test": "true",
},
},
"spec": map[string]interface{}{
"image": "someimage",
},
"status": map[string]interface{}{
"timestamp": metav1.Time{Time: metav1.Now().Add(1 * time.Hour)},
},
},
}
equality := isEqual(pod1, pod2)
require.Equal(t, true, equality)
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
install_artifact() {
artifact="$1"
base="$(basename "$artifact")"
case $base in
*.framework)
install_framework "$artifact"
;;
*.dSYM)
# Suppress arch warnings since XCFrameworks will include many dSYM files
install_dsym "$artifact" "false"
;;
*.bcsymbolmap)
install_bcsymbolmap "$artifact"
;;
*)
echo "error: Unrecognized artifact "$artifact""
;;
esac
}
copy_artifacts() {
file_list="$1"
while read artifact; do
install_artifact "$artifact"
done <$file_list
}
ARTIFACT_LIST_FILE="${BUILT_PRODUCTS_DIR}/cocoapods-artifacts-${CONFIGURATION}.txt"
if [ -r "${ARTIFACT_LIST_FILE}" ]; then
copy_artifacts "${ARTIFACT_LIST_FILE}"
fi
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/KLApplicationEntry/KLApplicationEntry.framework"
install_framework "${BUILT_PRODUCTS_DIR}/lottie-ios/Lottie.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/KLApplicationEntry/KLApplicationEntry.framework"
install_framework "${BUILT_PRODUCTS_DIR}/lottie-ios/Lottie.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
const inputString = "Hello World!"
let splitString = new Array(inputString.length - 3 + 1);
for (let i = 0; i <= inputString.length - 3; i++) {
let substring = inputString.substring(i, i + 3);
splitString[i] = substring;
}
console.log(splitString); |
<gh_stars>1-10
/**
* Created by <EMAIL> on 2019/3/20.
*/
import "./style.less";
import React,{PureComponent} from 'react';
import QRCode from "qrcode.react";
import extensionizer from 'extensionizer';
import popup from "../../../../popup";
import Utils from "../../../../common/utils";
import Button from "../../../vendors/button";
export default class YOU extends PureComponent{
constructor(props){
super(props);
}
render(){
const {locale} = this.props;
const {account:{current},app:{network}} = this.props.state;
return (
<div className="deposit-wrapper">
<div className="account-region">
<section>
<h4>{locale.deposit_name}</h4>
<small>{locale.deposit_desc}</small>
</section>
<section>
<QRCode value={current} size={140}/>
</section>
<section>
{Utils.formatAddress(current,false)}
</section>
</div>
{
network.key === "main" ? <div className="obtain-region">
<section>{locale.deposit_exchange}</section>
<section>
<a onClick={()=>{
const url = "https://okex.me";
if(extensionizer.storage){
popup.messageToBackground("redirect",{url:url});
}
else{
window.open(url);
}
}}>
<span className="fa icon-okex">
<span className="path1"/>
<span className="path2"/>
<span className="path3"/>
<span className="path4"/>
<span className="path5"/>
<span className="path6"/>
<span className="path7"/>
<span className="path8"/>
<span className="path9"/>
</span>
</a>
</section>
</div> : null
}
{
network.key === "test" ? <div className="obtain-region">
<section>{locale.deposit_faucet}</section>
<section>
<Button
text={locale.deposit_faucet_button}
block={true}
onClick={()=>{
const url = `https://test-faucet.iyouchain.com?address=${current}&locale=${locale.key}`;
if(extensionizer.storage){
popup.messageToBackground("redirect",{url:url});
}
else{
window.open(url);
}
}}
/>
</section>
</div> : null
}
</div>
)
}
} |
function shouldReturn403(request) {
// Check if the request meets the criteria for returning a 403 status code
// For example, you might check if the request contains certain headers or if the request method is not allowed
// Implement your logic here and return true if a 403 should be returned, false otherwise
return false; // Placeholder return value, replace with actual logic
} |
<filename>app/src/block/schemas/properties/property-types/title-property.schema.ts
import { Prop, Schema, SchemaFactory } from '@nestjs/mongoose';
import { Document } from 'mongoose';
import { RichTextSchema, RichText } from '../../common/rich-text.schema';
@Schema()
export class TitleProperty {
@Prop({ _id: false, type: [RichTextSchema] })
title: RichText[];
}
export const TitlePropertySchema = SchemaFactory.createForClass(TitleProperty);
export type TitlePropertyDocument = TitleProperty & Document;
|
def validate_data(data, schema):
if schema["type"] == "integer":
return isinstance(data, int)
elif schema["type"] == "boolean":
return isinstance(data, bool)
elif schema["type"] == "number":
return isinstance(data, (int, float))
else:
return False # Invalid schema type |
#include "ClientOperate.h"
#include <QFileInfo>
ClientOperate::ClientOperate(QObject *parent)
: QObject(parent)
{
initOperate();
}
ClientOperate::~ClientOperate()
{
doDisconnect();
}
QString ClientOperate::getFilePath() const
{
QMutexLocker locker(&dataMutex);
return filePath;
}
void ClientOperate::setFilePath(const QString &path)
{
QMutexLocker locker(&dataMutex);
filePath=path;
}
bool ClientOperate::isConnected() const
{
QMutexLocker locker(&dataMutex);
return connectState;
}
void ClientOperate::setConnected(bool connected)
{
QMutexLocker locker(&dataMutex);
connectState=connected;
}
void ClientOperate::connectTcp(const QString &address, quint16 port)
{
if(socket->state()==QAbstractSocket::UnconnectedState){
//连接服务器
socket->connectToHost(QHostAddress(address),port);
}else{
emit logMessage("socket->state() != QAbstractSocket::UnconnectedState");
}
}
void ClientOperate::disconnectTcp()
{
doDisconnect();
}
void ClientOperate::startFileTransfer()
{
//之前如果打开了先释放
doCloseFile();
if(!socket->isValid())
return;
const QString file_path=getFilePath();
//无效路径
if(file_path.isEmpty() || !QFile::exists(file_path)){
emit logMessage("无效的文件路径"+file_path);
return;
}
file=new QFile(this);
file->setFileName(file_path);
//打开失败
if(!file->open(QIODevice::ReadOnly)){
doCloseFile();
emit logMessage("打开文件失败"+file_path);
return;
}
sendSize=0;
fileSize=file->size();
if(fileSize<0)
fileSize=0;
//大小高位字节顺序在前
char file_size[4]={0};
const quint64 data_size=fileSize; //有符号转无符号,会被截断
file_size[3]=data_size>>0%0x100;
file_size[2]=data_size>>8%0x100;
file_size[1]=data_size>>16%0x100;
file_size[0]=data_size>>24;
//把文件大小和文件名发送给服务端,然后等待确认命令的返回
QFileInfo info(file_path);
sendData(0x01,QByteArray(file_size,4)+info.fileName().toUtf8());
}
void ClientOperate::cancelFileTransfer()
{
//关闭文件
doCancel();
//发送停止传输指令
sendData(0x04,QByteArray());
}
void ClientOperate::initOperate()
{
socket=new QTcpSocket(this);
//收到数据,触发readyRead
connect(socket,&QTcpSocket::readyRead,[this]{
//没有可读的数据就返回
if(socket->bytesAvailable()<=0)
return;
//读取数据
operateReceiveData(socket->readAll());
});
//连接状态改变
connect(socket,&QTcpSocket::connected,[this]{
setConnected(true);
emit connectStateChanged(true);
emit logMessage(QString("已连接服务器 [%1:%2]")
.arg(socket->peerAddress().toString())
.arg(socket->peerPort()));
});
connect(socket,&QTcpSocket::disconnected,[this]{
setConnected(false);
emit connectStateChanged(false);
emit logMessage(QString("与服务器连接已断开 [%1:%2]")
.arg(socket->peerAddress().toString())
.arg(socket->peerPort()));
});
timer=new QTimer(this);
//通过定时器来控制数据发送
connect(timer,&QTimer::timeout,[this]{
if(!socket->isValid()){
doCancel();
emit logMessage("Socket不可操作,发送终止");
return;
}
if(!file||!file->isOpen()){
doCancel();
emit logMessage("文件操作失败,发送终止");
return;
}
const qint64 read_size=file->read(fileBuffer,4096);
//socket->write(fileBuffer,read_size);
sendFile(fileBuffer,read_size);
sendSize+=read_size;
file->seek(sendSize);
if(!socket->waitForBytesWritten()){
doCancel();
emit logMessage("文件发送超时,发送终止");
return;
}
//避免除零
if(fileSize>0){
emit progressChanged(sendSize*100/fileSize);
}
if(sendSize>=fileSize){
doCancel();
emit logMessage("文件发送完成");
emit progressChanged(100);
sendData(0x03,QByteArray());
return;
}
});
}
void ClientOperate::doDisconnect()
{
//断开socket连接,释放资源
socket->abort();
doCloseFile();
}
void ClientOperate::doCloseFile()
{
if(file){
file->close();
delete file;
file=nullptr;
}
}
void ClientOperate::doCancel()
{
timer->stop();
if(file){
//关闭文件
doCloseFile();
}
}
void ClientOperate::sendData(char type,const QByteArray &data)
{
//传输协议
//帧结构:帧头4+帧长2+帧类型1+帧数据N+帧尾2(没有校验段,懒得写)
//帧头:4字节定值 0x0F 0xF0 0x00 0xFF
//帧长:2字节数据段长度值 arr[4]*0x100+arr[5] 前面为高位后面为低位
//帧类型:1字节
//- 0x01 准备发送文件,后跟四字节文件长度和N字节utf8文件名,长度计算同帧长一样前面为高位后面为低位
//- 0x02 文件数据
//- 0x03 发送结束
//- 0x04 取消发送
//(服务端收到0x01 0x03开始和结束发送两个命令要进行应答,回同样的命令码无数据段)
//帧尾:2字节定值 0x0D 0x0A
if(!socket->isValid())
return;
frameHead[6]=type;
const quint64 data_size=data.count();
frameHead[5]=data_size%0x100;
frameHead[4]=data_size/0x100;
//发送头+数据+尾
socket->write(frameHead,7);
socket->write(data);
socket->write(frameTail,2);
}
void ClientOperate::sendFile(const char *data, int size)
{
if(!socket->isValid())
return;
frameHead[6]=(char)0x02;
const quint64 data_size=size;
frameHead[5]=data_size%0x100;
frameHead[4]=data_size/0x100;
//发送头+数据+尾
socket->write(frameHead,7);
socket->write(data,size);
socket->write(frameTail,2);
}
void ClientOperate::operateReceiveData(const QByteArray &data)
{
static QByteArray frame_head=QByteArray(frameHead,4);
//这里只是简单的处理,所以用了QByteArray容器做缓存
dataTemp+=data;
//处理数据
while(true){
//保证以帧头为起始
while(!dataTemp.startsWith(frame_head)&&dataTemp.size()>4){
dataTemp.remove(0,1); //左边移除一字节
}
//小于最小帧长
if(dataTemp.size()<7+2)
return;
//取数据段长度,这里没有判断长度有效性
const int data_size=uchar(dataTemp[4])*0x100+uchar(dataTemp[5]);
if(dataTemp.size()<7+2+data_size)
return;
//帧尾不一致,无效数据--这里懒得写校验位了
if(memcmp(dataTemp.constData()+7+data_size,frameTail,2)!=0){
dataTemp.clear();
return;
}
//取数据类型
const char type=dataTemp[6];
switch(type)
{
case 0x01: //开始发送数据应答
timer->start(0);
emit logMessage("服务器已准备好接收数据,开始发送"+getFilePath());
break;
case 0x03: //发送数据完成应答
{
//1成功,0失败
const bool result=(dataTemp[7]==(char)0x01);
emit logMessage(QString("服务器文件接收完毕,发送")+(result?"成功":"失败"));
}
break;
case 0x04: //服务端取消发送
doCancel();
emit logMessage("服务器取消发送,发送终止");
break;
default: break;
}
//移除处理完的字节
dataTemp.remove(0,7+2+data_size);
}
}
|
def gcd(x,y):
if x == 0:
return y
return gcd(y%x, x)
# Driver code
print(gcd(x,y)) |
class Calculator {
// Calculate the total of an array of numbers
static func total(numbers: [Int]) -> Int {
return numbers.reduce(0, +)
}
}
let arr = [1, 2, 3, 4, 5]
let total = Calculator.total(numbers: arr)
print("Total: \(total)") |
#include <iostream>
// Function to print the possible numbers combination
void printCombination(int n, int num)
{
int nums[n];
// Fill the array with the numbers from 1 to n
for (int i = 0; i < n; i++)
nums[i] = i + 1;
// Print all possible numbers combination
for (int i = 0; i < num; i++) {
for (int j = 0; j < n; j++)
std::cout << nums[j] << " ";
std::cout << std::endl;
// Generate next combination
int m = n - 1;
while (m >= 0 && nums[m] == num - n + m)
m--;
if (m >= 0) {
nums[m]++;
for (int k = m + 1; k < n; k++)
nums[k] = nums[k - 1] + 1;
}
}
}
// Main function
int main()
{
int n = 5;
int num = 3;
printCombination(n, num);
return 0;
} |
<gh_stars>0
import React from 'react';
import PropTypes from 'prop-types';
import HtmlToReact, { Parser } from 'html-to-react';
import Barcode from 'react-barcode';
import {
buildTemplate,
} from '../../../utils';
class ComponentToPrint extends React.Component {
static propTypes = {
dataSource: PropTypes.object.isRequired,
template: PropTypes.string.isRequired,
};
constructor(props) {
super(props);
const processNodeDefinitions = new HtmlToReact.ProcessNodeDefinitions(React);
this.rules = [
{
replaceChildren: true,
shouldProcessNode: node => node.name === 'barcode',
processNode: (node, children) => (<Barcode value={children[0] ? children[0].trim() : ' '} />),
},
{
shouldProcessNode: () => true,
processNode: processNodeDefinitions.processDefaultNode,
}
];
this.parser = new Parser();
this.template = buildTemplate(props.template);
}
render() {
const {
dataSource,
} = this.props;
const componentStr = this.template(dataSource);
return this.parser.parseWithInstructions(componentStr, () => true, this.rules) || null;
}
}
export default ComponentToPrint;
|
<filename>src/main/java/com/infinities/skyport/proxy/network/SubnetProxy.java
package com.infinities.skyport.proxy.network;
import java.io.Serializable;
import java.util.Map;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.xml.bind.annotation.XmlTransient;
import org.dasein.cloud.network.AllocationPool;
import org.dasein.cloud.network.IPVersion;
import org.dasein.cloud.network.RawAddress;
import org.dasein.cloud.network.Subnet;
import org.dasein.cloud.network.SubnetState;
import com.infinities.skyport.distributed.DistributedAtomicLong;
public class SubnetProxy extends Subnet implements Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
@XmlTransient
private volatile Subnet subnet;
private String configName;
private String configId;
private final DistributedAtomicLong isLocked;
public SubnetProxy(Subnet subnet, String configName, String configId, DistributedAtomicLong isLocked) {
super();
this.subnet = subnet;
this.configName = configName;
this.configId = configId;
this.isLocked = isLocked;
}
public Subnet getSubnet() {
return subnet;
}
public void setSubnet(Subnet subnet) {
this.subnet = subnet;
}
@Override
public @Nonnull AllocationPool[] getAllocationPools() {
return getSubnet().getAllocationPools();
}
@Override
public @Nonnegative int getAvailableIpAddresses() {
return getSubnet().getAvailableIpAddresses();
}
@Override
public String getCidr() {
return getSubnet().getCidr();
}
@Override
public SubnetState getCurrentState() {
return getSubnet().getCurrentState();
}
@Override
public @Nonnull String getDescription() {
return getSubnet().getDescription();
}
@Override
public @Nullable RawAddress getGateway() {
return getSubnet().getGateway();
}
@Override
public @Nonnull String getName() {
return getSubnet().getName();
}
@Override
public @Nullable String getProviderDataCenterId() {
return getSubnet().getProviderDataCenterId();
}
@Override
public @Nonnull String getProviderOwnerId() {
return getSubnet().getProviderOwnerId();
}
@Override
public @Nonnull String getProviderRegionId() {
return getSubnet().getProviderRegionId();
}
@Override
public @Nonnull String getProviderSubnetId() {
return getSubnet().getProviderSubnetId();
}
@Override
public @Nonnull String getProviderVlanId() {
return getSubnet().getProviderVlanId();
}
@Override
public @Nonnull IPVersion[] getSupportedTraffic() {
return getSubnet().getSupportedTraffic();
}
@Override
public @Nonnull Subnet havingAllocationPools(@Nonnull AllocationPool... pools) {
return getSubnet().havingAllocationPools(pools);
}
@Override
public @Nonnull Map<String, String> getTags() {
return getSubnet().getTags();
}
@Override
public void setTag(@Nonnull String key, @Nonnull String value) {
getSubnet().setTag(key, value);
}
@Override
public void setTags(@Nonnull Map<String, String> tags) {
getSubnet().setTags(tags);
}
@Override
public Subnet supportingTraffic(@Nonnull IPVersion... traffic) {
return getSubnet().supportingTraffic(traffic);
}
@Override
public @Nonnull Subnet usingGateway(@Nonnull RawAddress gatewayIp) {
return getSubnet().usingGateway(gatewayIp);
}
@Override
public @Nonnull Subnet withAvailableIpAddresses(int count) {
return getSubnet().withAvailableIpAddresses(count);
}
public String getConfigName() {
return configName;
}
public void setConfigName(String configName) {
this.configName = configName;
}
public String getConfigId() {
return configId;
}
public void setConfigId(String configId) {
this.configId = configId;
}
public DistributedAtomicLong getIsLocked() {
return isLocked;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((configId == null) ? 0 : configId.hashCode());
result = prime * result + ((configName == null) ? 0 : configName.hashCode());
result = prime * result + ((subnet == null) ? 0 : subnet.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
SubnetProxy other = (SubnetProxy) obj;
if (configId == null) {
if (other.configId != null)
return false;
} else if (!configId.equals(other.configId))
return false;
if (configName == null) {
if (other.configName != null)
return false;
} else if (!configName.equals(other.configName))
return false;
if (subnet == null) {
if (other.subnet != null)
return false;
} else if (!subnet.equals(other.subnet))
return false;
return true;
}
}
|
<reponame>michaelwoodruff/bs-grrrrid<filename>grid.js
(function() {
(function() {
var initGrrrridlet = function() {
return (window.grrrridlet = function() {
var el, $body, $style, $grrrrid, $grrrrid_container,
$counter, $grrrrid_counter, class_list, opacity, viewport, width, hide_counter, delta;
el = document;
$body = el.body;
if ($body.querySelector(".grrrrid") !== null) {
$body.removeChild(el.querySelector(".grrrrid"));
$body.removeChild(el.querySelector("#grrrrid-style"));
} else {
$grid = el.createElement("div");
$grid.setAttribute("class", "grrrrid");
$grid.innerHTML = '<div id="grrrrid-container" class="grrrrid-container"><div class="grrrrid-row"><div class="grrrrid-col"><div></div></div><div class="grrrrid-col"><div></div></div><div class="grrrrid-col"><div></div></div><div class="grrrrid-col"><div></div></div><div class="grrrrid-col"><div></div></div><div class="grrrrid-col"><div></div></div><div class="grrrrid-col"><div></div></div><div class="grrrrid-col"><div></div></div><div class="grrrrid-col"><div></div></div><div class="grrrrid-col"><div></div></div><div class="grrrrid-col"><div></div></div><div class="grrrrid-col"><div></div></div></div></div>';
$counter = el.createElement("span");
$counter.setAttribute("class", "grrrrid-counter");
$style = el.createElement("style");
$style.setAttribute("id", "grrrrid-style");
$style.textContent = ".grrrrid-counter{position:fixed;z-index:2147483647;top:10px;right:10px;font:1em/1 monospace;background:#FFF;padding:2px;color:#000;visibility:hidden;}.grrrrid *{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.grrrrid{position:fixed;top:0;left:0;z-index:2147483646;width:100%;height:100%;margin:0 auto;opacity:.5}.grrrrid div{height:100%}.grrrrid-container,.grrrrid-container-fluid{margin-right:auto;margin-left:auto;padding-left:15px;padding-right:15px}@media (min-width:768px){.grrrrid-container{width:750px}}@media (min-width:992px){.grrrrid-container{width:970px}}@media (min-width:1200px){.grrrrid-container{width:1170px}}.grrrrid-row{margin-left:-15px;margin-right:-15px}.grrrrid-col{float:left;width:8.33333333%;position:relative;min-height:1px;padding-left:15px;padding-right:15px}.grrrrid-col div{background:#FFA;height:100%}";
$body.appendChild($style);
$body.appendChild($grid);
$body.appendChild($counter);
$grrrrid = el.querySelector(".grrrrid");
$grrrrid_container = el.querySelector(".grrrrid-container");
$grrrrid_counter = el.querySelector(".grrrrid-counter");
// Ensure focus
function grab_focus() {
$grrrrid.focus();
}
grab_focus();
window.addEventListener("onfocus", grab_focus, false);
// handle mousewheel events
// IE9, Chrome, Safari, Opera
$body.addEventListener("mousewheel", mousewheel_handler, false);
// Firefox
$body.addEventListener("DOMMouseScroll", mousewheel_handler, false);
function mousewheel_handler(e) {
if (e.shiftKey || e.altKey) {
delta = Math.max(-1, Math.min(1, (e.wheelDelta || -e.detail)));
opacity = parseFloat(getComputedStyle($grrrrid)["opacity"]);
viewport = parseFloat($body.clientWidth);
width = parseFloat(getComputedStyle($grrrrid_container)["width"]);
if (e.shiftKey && e.ctrlKey) {
if (delta > 0) {
// scroll up
if (width > 0) {
$grrrrid_container.style.width = width + 5 + "px";
$grrrrid_counter.style.visibility = "visible";
$grrrrid_counter.textContent = getComputedStyle($grrrrid_container)["width"];
}
} else {
// scroll down
if (width < viewport) {
$grrrrid_container.style.width = width - 5 + "px";
$grrrrid_counter.style.visibility = "visible";
$grrrrid_counter.textContent = getComputedStyle($grrrrid_container)["width"];
}
}
return false;
}
if (e.shiftKey) {
if (delta > 0) {
// scroll up
if (opacity < 1) {
$grrrrid.style.opacity = opacity + 0.1;
}
} else {
// scroll down
if (opacity > 0) {
$grrrrid.style.opacity = opacity - 0.1;
}
}
return false;
}
}
return false;
}
// handle key press events
el.addEventListener("keyup", onkeyup, true);
el.addEventListener("keydown", onkeydown, true);
function onkeyup(e){
hide_counter = setTimeout(function(){
$grrrrid_counter.style.visibility = "hidden";
}, 750)
return false;
}
function onkeydown(e){
// console.log(e.keyCode);
opacity = parseFloat(getComputedStyle($grrrrid)["opacity"]);
viewport = parseFloat($body.clientWidth);
width = parseFloat(getComputedStyle($grrrrid_container)["width"]);
clearTimeout(hide_counter);
if (e.keyCode === 37 && e.shiftKey) { // left
if (width > 0) {
$grrrrid_container.style.width = width - 5 + "px";
$grrrrid_counter.style.visibility = "visible";
$grrrrid_counter.textContent = getComputedStyle($grrrrid_container)["width"];
}
return false;
}
if (e.keyCode === 39 && e.shiftKey) { // right
if (width < viewport) {
$grrrrid_container.style.width = width + 5 + "px";
$grrrrid_counter.style.visibility = "visible";
$grrrrid_counter.textContent = getComputedStyle($grrrrid_container)["width"];
}
return false;
}
if (e.keyCode === 38 && e.shiftKey) { // up
if (opacity < 1) {
$grrrrid.style.opacity = opacity + 0.1;
}
return false;
}
if (e.keyCode === 40 && e.shiftKey) { // down
if (opacity > 0) {
$grrrrid.style.opacity = opacity - 0.1;
}
return false;
}
if (e.keyCode === 70 && e.shiftKey) { // reset and toggle fluid
$grrrrid_counter.style.visibility = "hidden";
if ($grrrrid_container.getAttribute('style') != null) {
$grrrrid_container.removeAttribute('style');
} else {
class_list = el.querySelector("#grrrrid-container").classList;
class_list.toggle("grrrrid-container");
class_list.toggle("grrrrid-container-fluid");
}
return false;
}
if (e.keyCode === 27) { // exit
// remove nodes
$body.removeChild(el.querySelector(".grrrrid"));
$body.removeChild(el.querySelector("#grrrrid-style"));
$body.removeChild(el.querySelector("#grrrrid-counter"));
// remove event listeners
el.removeEventListener("keyup", onkeyup, true);
el.removeEventListener("keydown", onkeydown, true);
window.removeEventListener("onfocus", grab_focus, false);
$body.removeEventListener("mousewheel", mousewheel_handler, false);
$body.removeEventListener("DOMMouseScroll", mousewheel_handler, false);
return false;
}
}
}
})();
};
return initGrrrridlet();
})();
}).call(this);
|
'use strict';
const express = require('express');
const server = express();
const supergoose = require('@code-fellows/supergoose');
const mockRequest = supergoose(server);
const route = require('../routes/v1.js');
server.use(express.urlencoded({ extended: false }));
server.use(express.json());
server.use('/api/v1', route);
describe('categories and products', () => {
let theId = null;
it('get /categories', () => {
return mockRequest
.get('/api/v1/categories')
.then(results => {
expect(results.status).toBe(200);
});
});
it('post /categories', () => {
let testObj = { 'name': 'test', description: 'test' };
return mockRequest
.post('/api/v1/categories')
.send(testObj)
.then(results => {
theId = results.body._id;
expect(results.status).toBe(201);
Object.keys(testObj).forEach(key => {
expect(results.body[key]).toEqual(testObj[key]);
});
});
});
it('get /categories/:id', () => {
return mockRequest
.get(`/api/v1/categories/${theId}`)
.then(results => {
expect(results.status).toBe(200);
});
});
it('put /categories/:id', () => {
let testObj = { 'name': 'test', description: 'test' };
return mockRequest
.put(`/api/v1/categories/${theId}`, testObj)
.send(testObj)
.then(results => {
expect(results.status).toBe(200);
Object.keys(testObj).forEach(key => {
expect(results.body[key]).toEqual(testObj[key]);
});
});
});
it('delete /categories/:id', () => {
return mockRequest
.delete(`/api/v1/categories/${theId}`)
.then(results => {
expect(results.status).toBe(200);
});
});
it('get /products', () => {
return mockRequest
.get('/api/v1/products')
.then(results => {
expect(results.status).toBe(200);
});
});
it('get /products', () => {
return mockRequest
.get('/api/v1/products')
.then(results => {
expect(results.status).toBe(200);
});
});
it('post /products', () => {
let testObj = { 'category': 'test', 'name': 'test', description: 'test' };
return mockRequest
.post('/api/v1/products')
.send(testObj)
.then(results => {
theId = results.body._id;
expect(results.status).toBe(201);
Object.keys(testObj).forEach(key => {
expect(results.body[key]).toEqual(testObj[key]);
});
});
});
it('get /products/:id', () => {
return mockRequest
.get(`/api/v1/products/${theId}`)
.then(results => {
expect(results.status).toBe(200);
});
});
it('put /products/:id', () => {
let testObj = { 'category': 'test', 'name': 'test', description: 'test' };
return mockRequest
.put(`/api/v1/products/${theId}`, testObj)
.send(testObj)
.then(results => {
expect(results.status).toBe(200);
Object.keys(testObj).forEach(key => {
expect(results.body[key]).toEqual(testObj[key]);
});
});
});
it('delete /api/v1/products/:id', () => {
return mockRequest
.delete(`/api/v1/products/${theId}`)
.then(results => {
expect(results.status).toBe(200);
});
});
it('get wrong', () => {
return mockRequest
.get('/api/v1/wrong')
.then(results => {
expect(results.status).toBe(500);
});
});
});
|
#!/bin/bash
# loads the genemania identifier lookup and network databases
source $( dirname "${BASH_SOURCE[0]}" )/setenv.sh
cd $TOP
python -m app.genemania --id
|
#!/bin/bash
echo "# --- !Ups"
PGPASSWORD=postgres pg_dump --schema-only --no-owner -U postgres -h localhost -s -f new.sql brief
PGPASSWORD=postgres pg_dump --schema-only --no-owner -U postgres -h localhost -s -f original.sql newbrief 2>/dev/null
unset JAVA_TOOL_OPTIONS && java -jar apgdiff-2.4.jar --ignore-start-with original.sql new.sql
echo "# --- !Downs"
unset JAVA_TOOL_OPTIONS && java -jar apgdiff-2.4.jar --ignore-start-with new.sql original.sql
|
<reponame>ngCommerce/core<filename>src/services/home/home.service.js
import { Injectable, Inject } from "@angular/core";
import { Http } from '@angular/http';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/toPromise';
import { CorService } from "../../core.service";
import { API_URL } from "../../models/core.model";
export class HomeService {
constructor(apiURL, http, corService) {
this.http = http;
this.corService = corService;
this._apiURL = apiURL;
}
getHome() {
let headers = this.corService.createAuthorizationHeader();
return this.http.get(this._apiURL + 'homes/', { headers: headers })
.toPromise()
.then(response => response.json())
.catch(this.handleError);
}
getHomeSeller(shopId) {
let headers = this.corService.createAuthorizationHeader();
return this.http.get(this._apiURL + 'homeseller/' + shopId, { headers: headers })
.toPromise()
.then(response => response.json())
.catch(this.handleError);
}
seeAllProduct(name) {
let headers = this.corService.createAuthorizationHeader();
return this.http.get(this._apiURL + 'seeallproduct/' + name, { headers: headers })
.toPromise()
.then(response => response.json())
.catch(this.handleError);
}
seeAllShop(name) {
let headers = this.corService.createAuthorizationHeader();
return this.http.get(this._apiURL + 'seeallshop/' + name, { headers: headers })
.toPromise()
.then(response => response.json())
.catch(this.handleError);
}
getLastVisit() {
return window.localStorage.getItem('gLastVisit') ? JSON.parse(window.localStorage.getItem('gLastVisit')) : [];
}
handleError(error) {
return Promise.reject(error.message || error);
}
}
HomeService.decorators = [
{ type: Injectable },
];
/** @nocollapse */
HomeService.ctorParameters = () => [
{ type: String, decorators: [{ type: Inject, args: [API_URL,] },] },
{ type: Http, },
{ type: CorService, },
];
//# sourceMappingURL=home.service.js.map |
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.incubator.net.resource.label;
import com.google.common.annotations.Beta;
import org.onosproject.net.DeviceId;
/**
* Service for managing label resource.
*/
@Beta
public interface LabelResourceAdminService {
/**
* Creates the only label resource of some device id from begin label to end
* label.
*
* @param deviceId device identifier
* @param beginLabel represents for the first label id in the range of label
* pool
* @param endLabel represents for the last label id in the range of label
* pool
* @return success or fail
*/
boolean createDevicePool(DeviceId deviceId, LabelResourceId beginLabel,
LabelResourceId endLabel);
/**
* Creates the only global label resource pool.
*
* @param beginLabel represents for the first label id in the range of label
* pool
* @param endLabel represents for the last label id in the range of label
* pool
* @return success or fail
*/
boolean createGlobalPool(LabelResourceId beginLabel,
LabelResourceId endLabel);
/**
* Destroys a label resource pool of a specific device id.
*
* @param deviceId device identifier
* @return success or fail
*/
boolean destroyDevicePool(DeviceId deviceId);
/**
* Destroys the global label resource pool.
*
* @return success or fail
*/
boolean destroyGlobalPool();
}
|
docker run -d -p 127.0.0.1:5000:5000 covid-microservice-app |
/**
* Certificate Transparency Utilities
*
* By <NAME> <<EMAIL>>
* @module ctutils
*/
import CertHelper from './CertHelper';
import CompactMerkleTree from './CompactMerkleTree';
import CTLog from './CTLog';
import CTLogHelper from './CTLogHelper';
import CTMonitor from './CTMonitor';
import { setFetch, getFetch, setWebCrypto, getWebCrypto } from './Engines';
import { Version, LogEntryType, MerkleLeafType, SignatureType } from './Enums';
import MerkleTreeLeaf from './MerkleTreeLeaf';
import PreCert from './PreCert';
import SignedCertificateTimestamp from './SignedCertificateTimestamp';
import SignedTreeHead from './SignedTreeHead';
import TimestampedEntry from './TimestampedEntry';
export { CertHelper };
export { CompactMerkleTree };
export { CTLog };
export { CTLogHelper };
export { CTMonitor };
export { setFetch, getFetch, setWebCrypto, getWebCrypto };
export { Version, LogEntryType, MerkleLeafType, SignatureType };
export { MerkleTreeLeaf };
export { PreCert };
export { SignedCertificateTimestamp };
export { SignedTreeHead };
export { TimestampedEntry };
|
#!/usr/bin/env bash
set -e
cd "$( dirname "${BASH_SOURCE[0]}" )"
. "common.sh"
install_conda
init_conda
# Install torchaudio environments
for torchaudio in "${TORCHAUDIO_VERSIONS[@]}" ; do
for python in "${PYTHON_VERSIONS[@]}" ; do
create_env "${torchaudio}" "${python}"
activate_env "${torchaudio}" "${python}"
install_release "${torchaudio}"
done
done
|
def visualize_camera_image(event, subarray):
from ctapipe.visualization.bokeh import FastCameraDisplay
# Create a FastCameraDisplay object
camera_display = FastCameraDisplay(subarray=subarray)
# Add the event data to the camera display
camera_display.image = event.r0.tel[0].image[0]
# Add additional visualization settings if needed
# camera_display.add_colorbar()
# Return the visualization of the camera image
return camera_display |
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.logic.manage.freegen.table.json;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.nio.file.NoSuchFileException;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Scanner;
import java.util.Set;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import org.dbflute.exception.DfIllegalPropertySettingException;
import org.dbflute.helper.message.ExceptionMessageBuilder;
import org.dbflute.logic.manage.freegen.DfFreeGenResource;
import org.dbflute.util.DfCollectionUtil;
import org.dbflute.util.Srl;
/**
* @author jflute
* @author p1us2er0
*/
public class DfJsonFreeAgent {
// ===================================================================================
// Decode JSON
// ===========
public <RESULT> RESULT decodeJsonMap(String requestName, String resourceFile) throws DfJsonUrlCannotRequestException {
final String json;
if (resourceFile.startsWith("http://")) { // JSON response
json = requestJsonResponse(resourceFile);
} else { // relative path to local file
try (Scanner scanner = new Scanner(Paths.get(resourceFile), "UTF-8")) {
json = scanner.useDelimiter("\\Z").next();
} catch (NoSuchFileException e) {
throwJsonFileNotFoundException(requestName, resourceFile, e);
return null; // unreachable
} catch (IOException e) {
throwJsonFileCannotAccessException(requestName, resourceFile, e);
return null; // unreachable
}
}
return fromJson(requestName, resourceFile, json);
}
protected <RESULT> RESULT fromJson(String requestName, String resourceFile, String json) {
final ScriptEngineManager manager = new ScriptEngineManager();
final ScriptEngine engine = manager.getEngineByName("javascript");
try {
final String realExp;
if (json.startsWith("{") || json.startsWith("[")) { // map, list style
realExp = json;
} else { // map omitted?
realExp = "{" + json + "}";
}
engine.eval("var result = " + realExp);
} catch (ScriptException e) {
throwJsonParseFailureException(requestName, resourceFile, e);
}
@SuppressWarnings("unchecked")
final RESULT result = (RESULT) engine.get("result");
return filterJavaScriptObject(result);
}
@SuppressWarnings("unchecked")
protected <RESULT> RESULT filterJavaScriptObject(RESULT result) {
if (result instanceof List<?>) {
final List<Object> srcList = (List<Object>) result;
final List<Object> destList = new ArrayList<Object>(srcList.size());
for (Object element : srcList) {
destList.add(filterJavaScriptObject(element));
}
return (RESULT) destList;
} else if (result instanceof Map<?, ?>) {
final Map<Object, Object> srcMap = (Map<Object, Object>) result;
final List<Object> challengedList = challengeList(srcMap);
if (challengedList != null) {
return (RESULT) filterJavaScriptObject(challengedList);
} else {
final Map<Object, Object> destMap = new LinkedHashMap<Object, Object>(srcMap.size());
for (Entry<Object, Object> entry : srcMap.entrySet()) {
destMap.put(entry.getKey(), filterJavaScriptObject(entry.getValue()));
}
return (RESULT) destMap;
}
} else {
return result;
}
}
protected List<Object> challengeList(Map<Object, Object> map) {
int index = 0;
final Set<Object> keySet = map.keySet();
for (Object key : keySet) {
final String strKey = key.toString();
if (Srl.isNumberHarfAll(strKey) && Integer.parseInt(strKey) == index) {
++index;
continue;
}
return null;
}
return new ArrayList<Object>(map.values());
}
protected void throwJsonFileNotFoundException(String requestName, String resourceFile, IOException cause) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Not found the JSON file for FreeGen.");
br.addItem("FreeGen Request");
br.addElement(requestName);
br.addItem("JSON File");
br.addElement(resourceFile);
final String msg = br.buildExceptionMessage();
throw new IllegalStateException(msg, cause);
}
protected void throwJsonFileCannotAccessException(String requestName, String resourceFile, IOException cause) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Cannot access the JSON file for FreeGen.");
br.addItem("FreeGen Request");
br.addElement(requestName);
br.addItem("JSON File");
br.addElement(resourceFile);
final String msg = br.buildExceptionMessage();
throw new IllegalStateException(msg, cause);
}
protected void throwJsonParseFailureException(String requestName, String resourceFile, Exception cause) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Failed to parse the JSON file for FreeGen.");
br.addItem("FreeGen Request");
br.addElement(requestName);
br.addItem("JSON File");
br.addElement(resourceFile);
final String msg = br.buildExceptionMessage();
throw new IllegalStateException(msg, cause);
}
// -----------------------------------------------------
// Request URL
// -----------
protected String requestJsonResponse(String resourceFile) {
BufferedReader reader = null;
try {
final URL url = new URL(resourceFile);
final URLConnection uc = url.openConnection();
final InputStream ins = uc.getInputStream();
reader = new BufferedReader(new InputStreamReader(ins, "UTF-8"));
final StringBuilder sb = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
sb.append(line).append("\n");
}
return sb.toString();
} catch (IOException e) {
throw new DfJsonUrlCannotRequestException("Failed to access to the URL: " + resourceFile, e);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ignored) {}
}
}
}
public static class DfJsonUrlCannotRequestException extends RuntimeException {
private static final long serialVersionUID = 1L;
public DfJsonUrlCannotRequestException(String msg) {
super(msg);
}
public DfJsonUrlCannotRequestException(String msg, Throwable cause) {
super(msg, cause);
}
}
// ===================================================================================
// Trace KeyList
// =============
public List<String> traceKeyList(String requestName, DfFreeGenResource resource, Map<String, Object> rootMap, String keyPath,
List<String> pathList) {
// e.g.
// keyPath = categories -> map.keys
// keyPath = categories -> map.values -> list.elements
// keyPath = categories -> map.values -> list.map.foo
// keyPath = categories -> map.foo -> map.keys
List<String> keyList = null;
Object current = null;
for (String pathElement : pathList) {
if (current == null) {
current = rootMap.get(pathElement);
if (current == null) {
throwRootMapKeyNotFoundException(requestName, resource, keyPath, pathElement);
}
continue;
}
if (pathElement.startsWith("map.")) {
if (!(current instanceof Map<?, ?>)) {
if (current instanceof List<?> && ((List<?>) current).isEmpty()) {
current = DfCollectionUtil.emptyMap(); // if 'emptyKey: {}', empty List...
} else {
throwKeyPathExpectedMapButNotMapException(requestName, resource, keyPath, pathElement, current);
}
}
@SuppressWarnings("unchecked")
final Map<String, Object> currentMap = (Map<String, Object>) current;
if (pathElement.equals("map.keys")) { // found
keyList = new ArrayList<String>(currentMap.keySet());
break;
} else if (pathElement.equals("map.values")) {
current = new ArrayList<Object>(currentMap.values());
continue;
} else {
final String nextKey = Srl.substringFirstRear(pathElement, "map.");
current = currentMap.get(nextKey);
continue;
}
} else if (pathElement.startsWith("list.")) {
if (!(current instanceof List<?>)) {
throwKeyPathExpectedListButNotListException(requestName, resource, keyPath, pathElement, current);
}
@SuppressWarnings("unchecked")
final List<Object> currentList = (List<Object>) current;
if (pathElement.equals("list.elements")) { // found
keyList = new ArrayList<String>();
for (Object element : currentList) {
if (!(element instanceof String)) {
throwKeyPathExpectedStringListButNotStringException(requestName, resource, keyPath, pathElement, currentList,
element);
}
keyList.add((String) element);
}
break;
} else if (pathElement.startsWith("list.map.")) { // found
final String elementKey = Srl.substringFirstRear(pathElement, "list.map.");
keyList = new ArrayList<String>();
for (Object element : currentList) {
if (!(element instanceof Map<?, ?>)) {
throwKeyPathExpectedMapListButNotMapException(requestName, resource, keyPath, pathElement, currentList, element);
}
@SuppressWarnings("unchecked")
final Map<String, Object> elementMap = (Map<String, Object>) element;
final String elementValue = (String) elementMap.get(elementKey);
if (elementValue != null) {
keyList.add(elementValue);
}
}
break;
} else {
throwIllegalKeyPathElementException(requestName, resource, keyPath, pathElement);
}
} else {
throwIllegalKeyPathElementException(requestName, resource, keyPath, pathElement);
}
}
if (keyList == null) {
String msg = "Not found the keys: keyPath=" + keyPath;
throw new DfIllegalPropertySettingException(msg);
}
return keyList;
}
protected void throwRootMapKeyNotFoundException(String requestName, DfFreeGenResource resource, String keyPath, String rootMapKey) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Not found the key in the root map. (FreeGen)");
br.addItem("Request Name");
br.addElement(requestName);
br.addItem("JSON File");
br.addElement(resource.getResourceFile());
br.addItem("keyPath");
br.addElement(keyPath);
br.addItem("RootMap Key");
br.addElement(rootMapKey);
final String msg = br.buildExceptionMessage();
throw new DfIllegalPropertySettingException(msg);
}
protected void throwKeyPathExpectedMapButNotMapException(String requestName, DfFreeGenResource resource, String keyPath,
String targetPath, Object current) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("The key path expects map type but not map. (FreeGen)");
br.addItem("Request Name");
br.addElement(requestName);
br.addItem("JSON File");
br.addElement(resource.getResourceFile());
br.addItem("keyPath");
br.addElement(keyPath);
br.addItem("Target Path Element");
br.addElement(targetPath);
br.addItem("Actual Object");
br.addElement(current != null ? current.getClass().getName() : null);
br.addElement(current);
final String msg = br.buildExceptionMessage();
throw new DfIllegalPropertySettingException(msg);
}
protected void throwKeyPathExpectedListButNotListException(String requestName, DfFreeGenResource resource, String keyPath,
String targetPath, Object current) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("The key path expects list type but not list. (FreeGen)");
br.addItem("Request Name");
br.addElement(requestName);
br.addItem("JSON File");
br.addElement(resource.getResourceFile());
br.addItem("keyPath");
br.addElement(keyPath);
br.addItem("Target Path Element");
br.addElement(targetPath);
br.addItem("Actual Object");
br.addElement(current != null ? current.getClass().getName() : null);
br.addElement(current);
final String msg = br.buildExceptionMessage();
throw new DfIllegalPropertySettingException(msg);
}
protected void throwKeyPathExpectedStringListButNotStringException(String requestName, DfFreeGenResource resource, String keyPath,
String targetPath, List<Object> currentList, Object element) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("The key path expects string type in list but not string. (FreeGen)");
br.addItem("Request Name");
br.addElement(requestName);
br.addItem("JSON File");
br.addElement(resource.getResourceFile());
br.addItem("keyPath");
br.addElement(keyPath);
br.addItem("Target Path Element");
br.addElement(targetPath);
br.addItem("List Object");
br.addElement(currentList != null ? currentList.getClass().getName() : null);
br.addElement(currentList);
br.addItem("Actual Element");
br.addElement(element != null ? element.getClass().getName() : null);
br.addElement(element);
final String msg = br.buildExceptionMessage();
throw new DfIllegalPropertySettingException(msg);
}
protected void throwKeyPathExpectedMapListButNotMapException(String requestName, DfFreeGenResource resource, String keyPath,
String targetPath, List<Object> currentList, Object element) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("The key path expects string type in list but not string. (FreeGen)");
br.addItem("Request Name");
br.addElement(requestName);
br.addItem("JSON File");
br.addElement(resource.getResourceFile());
br.addItem("keyPath");
br.addElement(keyPath);
br.addItem("Target Path Element");
br.addElement(targetPath);
br.addItem("List Object");
br.addElement(currentList != null ? currentList.getClass().getName() : null);
br.addElement(currentList);
br.addItem("Actual Element");
br.addElement(element != null ? element.getClass().getName() : null);
br.addElement(element);
final String msg = br.buildExceptionMessage();
throw new DfIllegalPropertySettingException(msg);
}
protected void throwIllegalKeyPathElementException(String requestName, DfFreeGenResource resource, String keyPath,
String illegalPathElement) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Illegal key path was found. (FreeGen)");
br.addItem("Request Name");
br.addElement(requestName);
br.addItem("JSON File");
br.addElement(resource.getResourceFile());
br.addItem("keyPath");
br.addElement(keyPath);
br.addItem("Illegal Path Element");
br.addElement(illegalPathElement);
final String msg = br.buildExceptionMessage();
throw new DfIllegalPropertySettingException(msg);
}
// ===================================================================================
// Trace Map
// =========
public Map<String, Object> traceMap(String requestName, DfFreeGenResource resource, Map<String, Object> rootMap, String tracePath) {
// e.g.
// jsonPath = map
// jsonPath = tables -> map
// jsonPath = schema -> tables -> map
final List<String> pathList = Srl.splitListTrimmed(tracePath, "->");
Map<String, Object> currentMap = rootMap;
for (String pathElement : pathList) {
if ("map".equals(pathElement)) {
break;
}
final Object obj = currentMap.get(pathElement);
if (obj == null) {
throwJsonMapKeyNotFoundException(requestName, resource, tracePath, currentMap, pathElement);
}
if (!(obj instanceof Map<?, ?>)) {
throwJsonTracePathNotMapException(requestName, resource, tracePath, pathElement, obj);
}
@SuppressWarnings("unchecked")
final Map<String, Object> nextMap = (Map<String, Object>) obj;
currentMap = nextMap;
}
return currentMap;
}
protected void throwJsonMapKeyNotFoundException(String requestName, DfFreeGenResource resource, String tracePath,
Map<String, Object> currentMap, String pathElement) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Not found the key in the map. (FreeGen)");
br.addItem("Request Name");
br.addElement(requestName);
br.addItem("JSON File");
br.addElement(resource.getResourceFile());
br.addItem("Trace Path");
br.addElement(tracePath);
br.addItem("Current Map keySet()");
br.addElement(currentMap.keySet());
br.addItem("Path Element");
br.addElement(pathElement);
final String msg = br.buildExceptionMessage();
throw new DfIllegalPropertySettingException(msg);
}
protected void throwJsonTracePathNotMapException(String requestName, DfFreeGenResource resource, String tracePath, String pathElement,
Object current) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("The trace path expects map type but not map. (FreeGen)");
br.addItem("Request Name");
br.addElement(requestName);
br.addItem("JSON File");
br.addElement(resource.getResourceFile());
br.addItem("Trace Path");
br.addElement(tracePath);
br.addItem("Path Element");
br.addElement(pathElement);
br.addItem("Actual Object");
br.addElement(current != null ? current.getClass().getName() : null);
br.addElement(current);
final String msg = br.buildExceptionMessage();
throw new DfIllegalPropertySettingException(msg);
}
}
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
echo "Creating App Engine app"
gcloud app create --region "us-central"
echo "Making bucket: gs://$DEVSHELL_PROJECT_ID-media"
gsutil mb gs://$DEVSHELL_PROJECT_ID-media
echo "Exporting GCLOUD_PROJECT and GCLOUD_BUCKET"
export GCLOUD_PROJECT=$DEVSHELL_PROJECT_ID
export GCLOUD_BUCKET=$DEVSHELL_PROJECT_ID-media
echo "Installing dependencies"
npm install -g npm@6.11.3
npm update
echo "Installing Open API generator"
npm install -g api2swagger
echo "Creating Datastore entities"
node setup/add_entities.js
echo "Creating Cloud Pub/Sub topic"
gcloud pubsub topics create feedback
echo "Creating Cloud Spanner Instance, Database, and Table"
gcloud spanner instances create quiz-instance --config=regional-us-central1 --description="Quiz instance" --nodes=1
gcloud spanner databases create quiz-database --instance quiz-instance --ddl "CREATE TABLE Feedback ( feedbackId STRING(100) NOT NULL, email STRING(100), quiz STRING(20), feedback STRING(MAX), rating INT64, score FLOAT64, timestamp INT64 ) PRIMARY KEY (feedbackId);"
echo "Enabling Cloud Functions API"
gcloud services enable cloudfunctions.googleapis.com
echo "Creating Cloud Function"
gcloud functions deploy process-feedback --runtime nodejs12 --trigger-topic feedback --source ./function --stage-bucket $GCLOUD_BUCKET --entry-point subscribe
echo "Project ID: $DEVSHELL_PROJECT_ID"
|
#!/usr/bin/env bash
function bailout() {
echo "${1}: Exiting"
exit $2
}
# Create application directory
/bin/mkdir -p /opt/patchserver || bailout "Unable to create /opt/patchserver" 1
# Move required application files
/bin/cp -r ../../{requirements.txt,patchserver} /opt/patchserver
/bin/cp ./{config.py,wsgi.py} /opt/patchserver
/bin/chown -R apache:apache /opt/patchserver
/bin/cp ./patchserver.service /etc/systemd/system || bailout "Unable to copy patchserver.service" 2
/bin/chown root:root /etc/systemd/system/patchserver.service
/bin/chmod 644 /etc/systemd/system/patchserver.service
# Create application virtual environment
/bin/virtualenv -p python2.7 -q /usr/local/patchserver-venv || bailout "Unable to create virtual environment" 3
# Install Python dependencies
/usr/local/patchserver-venv/bin/pip install futures gunicorn -r /opt/patchserver/requirements.txt
# Enable and start the service
/usr/bin/systemctl enable patchserver.service
/usr/bin/systemctl start patchserver.service
# Verify the service has started
/usr/bin/systemctl status patchserver.service
|
#!/bin/bash
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# BEGIN environment bootstrap section
# Do not edit between here and END as this section should stay identical in all scripts
findpath () {
myname=${0}
mypath=${myname%/*}
myname=${myname##*/}
if [ "$mypath" ] && [ -d "$mypath" ]; then
return
fi
mypath=$(pwd)
if [ -f "${mypath}/${myname}" ]; then
return
fi
echo "FATAL: Could not figure out the path where $myname lives from $0"
exit 1
}
COMMON_ENV=libexec/vespa/common-env.sh
source_common_env () {
if [ "$VESPA_HOME" ] && [ -d "$VESPA_HOME" ]; then
export VESPA_HOME
common_env=$VESPA_HOME/$COMMON_ENV
if [ -f "$common_env" ]; then
. $common_env
return
fi
fi
return 1
}
findroot () {
source_common_env && return
if [ "$VESPA_HOME" ]; then
echo "FATAL: bad VESPA_HOME value '$VESPA_HOME'"
exit 1
fi
if [ "$ROOT" ] && [ -d "$ROOT" ]; then
VESPA_HOME="$ROOT"
source_common_env && return
fi
findpath
while [ "$mypath" ]; do
VESPA_HOME=${mypath}
source_common_env && return
mypath=${mypath%/*}
done
echo "FATAL: missing VESPA_HOME environment variable"
echo "Could not locate $COMMON_ENV anywhere"
exit 1
}
findhost () {
if [ "${VESPA_HOSTNAME}" = "" ]; then
VESPA_HOSTNAME=$(vespa-detect-hostname || hostname -f || hostname || echo "localhost") || exit 1
fi
validate="${VESPA_HOME}/bin/vespa-validate-hostname"
if [ -f "$validate" ]; then
"$validate" "${VESPA_HOSTNAME}" || exit 1
fi
export VESPA_HOSTNAME
}
findroot
findhost
# END environment bootstrap section
ROOT=${VESPA_HOME%/}
export ROOT
cd $ROOT || { echo "Cannot cd to $ROOT" 1>&2; exit 1; }
P_SENTINEL=var/run/sentinel.pid
P_CONFIG_PROXY=var/run/configproxy.pid
export P_SENTINEL P_CONFIG_PROXY
LOGDIR="$ROOT/logs/vespa"
LOGFILE="$LOGDIR/vespa.log"
VESPA_LOG_TARGET="file:$LOGFILE"
VESPA_LOG_CONTROL_DIR="$ROOT/var/db/vespa/logcontrol"
cp="libexec/vespa/patches/configproxy:lib/jars/config-proxy-jar-with-dependencies.jar"
VESPA_LOG_LEVEL="all -debug -spam"
export VESPA_LOG_TARGET VESPA_LOG_LEVEL VESPA_LOG_CONTROL_DIR
export VESPA_SENTINEL_PORT
mkdir -p "$LOGDIR"
mkdir -p "$VESPA_LOG_CONTROL_DIR"
hname=$(vespa-print-default hostname)
CONFIG_ID="hosts/$hname"
export CONFIG_ID
export MALLOC_ARENA_MAX=1 #Does not need fast allocation
export LD_LIBRARY_PATH="$VESPA_HOME/lib64"
case $1 in
start)
nohup sbin/vespa-retention-enforcer > ${LOGDIR}/vre-start.log 2>&1 </dev/null &
configsources=`bin/vespa-print-default configservers_rpc`
userargs=$VESPA_CONFIGPROXY_JVMARGS
if [ "$userargs" == "" ]; then
userargs=$services__jvmargs_configproxy
fi
jvmopts="-Xms32M -Xmx256M -XX:ThreadStackSize=256 -XX:MaxJavaStackTraceDepth=1000000"
VESPA_SERVICE_NAME=configproxy
export VESPA_SERVICE_NAME
echo "Starting config proxy using $configsources as config source(s)"
vespa-runserver -r 10 -s configproxy -p $P_CONFIG_PROXY -- \
java ${jvmopts} \
-XX:+ExitOnOutOfMemoryError $(getJavaOptionsIPV46) \
-Dproxyconfigsources="${configsources}" ${userargs} \
-cp $cp com.yahoo.vespa.config.proxy.ProxyServer 19090
echo "Waiting for config proxy to start"
fail=true
for ((sleepcount=0;$sleepcount<600;sleepcount=$sleepcount+1)) ; do
sleep 0.1
if [ -f $P_CONFIG_PROXY ] && kill -0 `cat $P_CONFIG_PROXY` && vespa-ping-configproxy -s $hname 2>/dev/null
then
echo "config proxy started (runserver pid `cat $P_CONFIG_PROXY`)"
fail=false
break
fi
done
if $fail ; then
echo "Failed to start config proxy!" 1>&2
echo "look for reason in vespa.log, last part follows..."
tail -n 15 $LOGFILE | vespa-logfmt -
exit 1
fi
VESPA_SERVICE_NAME=config-sentinel
export VESPA_SERVICE_NAME
vespa-runserver -s config-sentinel -r 10 -p $P_SENTINEL -- \
sbin/vespa-config-sentinel -c "$CONFIG_ID"
;;
stop)
vespa-runserver -s config-sentinel -p $P_SENTINEL -S
vespa-runserver -s configproxy -p $P_CONFIG_PROXY -S
;;
*)
echo "Unknown option $@" 1>&2
exit 1
;;
esac
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import media_feature_symbol
import in_generator
import template_expander
import name_utilities
import sys
class MakeMediaFeaturesWriter(in_generator.Writer):
defaults = {
'Conditional': None, # FIXME: Add support for Conditional.
'RuntimeEnabled': None,
'ImplementedAs': None,
}
filters = {
'symbol': media_feature_symbol.getMediaFeatureSymbolWithSuffix(''),
'to_macro_style': name_utilities.to_macro_style,
}
default_parameters = {
'namespace': '',
'export': '',
}
def __init__(self, in_file_path):
super(MakeMediaFeaturesWriter, self).__init__(in_file_path)
self._outputs = {
('MediaFeatures.h'): self.generate_header,
}
self._template_context = {
'namespace': '',
'export': '',
'entries': self.in_file.name_dictionaries,
}
@template_expander.use_jinja('MediaFeatures.h.tmpl', filters=filters)
def generate_header(self):
return self._template_context
if __name__ == '__main__':
in_generator.Maker(MakeMediaFeaturesWriter).main(sys.argv)
|
#!/usr/bin/env bash
set -eu -o pipefail
source ./utilities/run-cmd.sh
source ./projects/projects.sh
echo
echo "Get Projects"
echo "= = ="
echo
if [ -z ${ESC_HOME:-} ]; then
echo "ESC_HOME is not set; aborting"
echo
exit 1
fi
if [ ! -z ${GIT_AUTHORITY_PATH:-} ]; then
echo "The GIT_AUTHORITY_PATH environment variable is set: $GIT_AUTHORITY_PATH. It will be used for this script."
remote_authority_path=$GIT_AUTHORITY_PATH
else
remote_authority_path="git@github.com:esc-rb"
fi
if [ ! -z ${GIT_REMOTE_NAME:-} ]; then
echo "The GIT_REMOTE_NAME environment variable is set: $GIT_REMOTE_NAME. It will be used for this script."
remote_name=$GIT_REMOTE_NAME
else
remote_name=origin
fi
function clone-project {
name=$1
branch=${2:-main}
authority=${3:-$remote_authority_path}
remote_repository_url="$authority/$name.git"
echo "Cloning: $remote_repository_url"
clone_cmd="git -C $ESC_HOME clone -b $branch $remote_repository_url"
run-cmd "$clone_cmd"
}
function pull-project {
name=$1
branch=${2:-main}
authority=${3:-$remote_authority_path}
echo "Pulling: $name ($branch branch only)"
current_branch=$(git -C $ESC_HOME/$name symbolic-ref --short HEAD)
if [ $current_branch = $branch ]; then
run-cmd "git -C $ESC_HOME/$name pull --rebase $remote_name $branch"
else
run-cmd "git -C $ESC_HOME/$name fetch $remote_name $branch:$branch"
fi
}
function get-project {
name=$1
echo $name
echo "- - -"
dir="$ESC_HOME/$name"
if [ ! -d "$dir/.git" ]; then
clone-project $@
else
pull-project $@
fi
echo
}
echo
echo "Getting code from $remote_authority_path ($remote_name)"
echo "= = ="
echo
for project in "${projects[@]}"; do
get-project $project
done
|
rawspec -f 1024 -t 2 -d /datax/scratch/bbrzycki/data/raw_files/ /datax/scratch/bbrzycki/data/raw_files/test_snr_0
mv /datax/scratch/bbrzycki/data/raw_files/test_snr_0.rawspec.0000.fil /datax/scratch/bbrzycki/data/raw_files/test_snr_0_1024.rawspec.0000.fil
rawspec -f 1024 -t 2 -d /datax/scratch/bbrzycki/data/raw_files/ /datax/scratch/bbrzycki/data/raw_files/test_snr_1
mv /datax/scratch/bbrzycki/data/raw_files/test_snr_1.rawspec.0000.fil /datax/scratch/bbrzycki/data/raw_files/test_snr_1_1024.rawspec.0000.fil
rawspec -f 2048 -t 2 -d /datax/scratch/bbrzycki/data/raw_files/ /datax/scratch/bbrzycki/data/raw_files/test_snr_0
mv /datax/scratch/bbrzycki/data/raw_files/test_snr_0.rawspec.0000.fil /datax/scratch/bbrzycki/data/raw_files/test_snr_0_2048.rawspec.0000.fil
rawspec -f 2048 -t 2 -d /datax/scratch/bbrzycki/data/raw_files/ /datax/scratch/bbrzycki/data/raw_files/test_snr_1
mv /datax/scratch/bbrzycki/data/raw_files/test_snr_1.rawspec.0000.fil /datax/scratch/bbrzycki/data/raw_files/test_snr_1_2048.rawspec.0000.fil
rawspec -f 4096 -t 2 -d /datax/scratch/bbrzycki/data/raw_files/ /datax/scratch/bbrzycki/data/raw_files/test_snr_0
mv /datax/scratch/bbrzycki/data/raw_files/test_snr_0.rawspec.0000.fil /datax/scratch/bbrzycki/data/raw_files/test_snr_0_4096.rawspec.0000.fil
rawspec -f 4096 -t 2 -d /datax/scratch/bbrzycki/data/raw_files/ /datax/scratch/bbrzycki/data/raw_files/test_snr_1
mv /datax/scratch/bbrzycki/data/raw_files/test_snr_1.rawspec.0000.fil /datax/scratch/bbrzycki/data/raw_files/test_snr_1_4096.rawspec.0000.fil
|
import {AbstractSkeletosComponent, ISkeletosProps} from "./AbstractSkeletosComponent";
import {AbstractSkeletosState, ISkeletosDbListener} from "../../core";
/**
* The Root component listens to any changes in the Skeletos database and updates accordingly.
*/
export abstract class AbstractRootComponent<SkeletosStateType extends AbstractSkeletosState, ExtraPropsType={}>
extends AbstractSkeletosComponent<SkeletosStateType, ExtraPropsType, IAppContainerState> {
private _onDbChange: ISkeletosDbListener;
componentWillMount(): void {
super.componentWillMount();
this.state = {
numberOfTimesChanged: 0
};
this._onDbChange = this.onDbChange.bind(this);
}
componentDidMount(): void {
super.componentDidMount();
if (process.env.RENDER_ENV !== "server") {
this.skeletosState.cursor.db.addListener(this._onDbChange);
}
}
componentWillUnmount(): void {
super.componentWillUnmount();
if (process.env.RENDER_ENV !== "server") {
this.skeletosState.cursor.db.removeListener(this._onDbChange);
}
}
componentWillReceiveProps(nextProps: Readonly<ISkeletosProps<SkeletosStateType> & ExtraPropsType>,
nextContext: any): void {
super.componentWillReceiveProps(nextProps, nextContext);
if (process.env.RENDER_ENV !== "server") {
this.skeletosState.cursor.db.removeListener(this._onDbChange);
nextProps.skeletosState.cursor.db.addListener(this._onDbChange);
}
}
shouldComponentUpdate(nextProps: Readonly<ISkeletosProps<SkeletosStateType> & ExtraPropsType>,
nextState: IAppContainerState,
nextContext: any): boolean {
/**
* If we are in development mode, then we expect the ClientBootstrap.tsx to hot-reload this component, in which case,
* we also want to update the root component (remember that hot-reload doesn't change state, so our state
* hash will be the same leading the super.shouldComponentUpdate() to always return false here...which is not
* what we want for hot-reload).
*/
if (process.env.NODE_ENV !== "production") {
return true;
} else {
return super.shouldComponentUpdate(nextProps, nextState, nextState);
}
}
abstract render(): JSX.Element;
/**
* Overridden
*
* @param nextState
* @param currentState
* @returns {boolean}
*/
protected isLocalStateIdentical(nextState: IAppContainerState,
currentState: IAppContainerState): boolean {
return nextState.numberOfTimesChanged === currentState.numberOfTimesChanged;
}
private onDbChange(): void {
this.setState({
numberOfTimesChanged: this.state.numberOfTimesChanged + 1
});
}
}
/**
* Just a dummy state so that we can update the root component whenever the database supplied as props changes.
*/
export interface IAppContainerState {
numberOfTimesChanged: number;
} |
#include <iostream>
#include "XMLParser.h" // Include the header file for XMLParser
class XMLParser : public abstract_delegate {
public:
void onStartElement(const XML_Char *fullname, const XML_Char **atts) override {
// Implement the logic to handle the start element event
std::cout << "Start Element: " << fullname << std::endl;
// Process the attributes if needed
if (atts != nullptr) {
for (int i = 0; atts[i]; i += 2) {
std::cout << "Attribute: " << atts[i] << " = " << atts[i + 1] << std::endl;
}
}
}
void onEndElement(const XML_Char *fullname) override {
// Implement the logic to handle the end element event
std::cout << "End Element: " << fullname << std::endl;
}
void onCharacterData(const char *pBuf, int len) override {
// Implement the logic to handle the character data event
std::cout << "Character Data: " << std::string(pBuf, len) << std::endl;
}
void onProcessingInstruction(const XML_Char *target, const XML_Char *data) override {
// Implement the logic to handle the processing instruction event
std::cout << "Processing Instruction: " << target << " " << data << std::endl;
}
void onUnparsedEntityDecl(const XML_Char *entityName, const XML_Char *base, const XML_Char *systemId) override {
// Implement the logic to handle the unparsed entity declaration event
std::cout << "Unparsed Entity Declaration: " << entityName << " " << base << " " << systemId << std::endl;
}
};
int main() {
// Create an instance of XMLParser
XMLParser parser;
// Simulate XML events to test the parser
const XML_Char *fullname = "element";
const XML_Char *atts[] = { "attr1", "value1", "attr2", "value2", nullptr };
parser.onStartElement(fullname, atts);
parser.onCharacterData("Sample character data", 19);
parser.onProcessingInstruction("target", "data");
parser.onUnparsedEntityDecl("entity", "base", "systemId");
parser.onEndElement("element");
return 0;
} |
/* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.gui.j2d.entity;
import games.stendhal.client.IGameScreen;
import games.stendhal.client.ZoneInfo;
import games.stendhal.client.entity.ActionType;
import games.stendhal.client.entity.BreakableRing;
import games.stendhal.client.sprite.Sprite;
import games.stendhal.client.sprite.SpriteStore;
/**
* The 2D view of a ring.
*/
class BreakableRing2DView extends Item2DView<BreakableRing> {
/**
* The working sprite.
*/
private Sprite working;
/**
* The broken sprite.
*/
private Sprite broken;
/**
* The state changed.
*/
private volatile boolean stateChanged;
/**
* Create a 2D view of a chest.
*/
public BreakableRing2DView() {
super();
stateChanged = false;
}
@Override
public void initialize(final BreakableRing entity) {
super.initialize(entity);
setSprite(getStateSprite());
}
//
// Entity2DView
//
/**
* Populate named state sprites.
*
*/
@Override
protected void buildRepresentation(BreakableRing entity) {
final SpriteStore store = SpriteStore.get();
Sprite tiles;
if (isContained()) {
tiles = store.getSprite(translate(getClassResourcePath()));
} else {
ZoneInfo info = ZoneInfo.get();
tiles = store.getModifiedSprite(translate(getClassResourcePath()),
info.getZoneColor(), info.getColorMethod());
}
working = store.getTile(tiles, 0, 0, IGameScreen.SIZE_UNIT_PIXELS,
IGameScreen.SIZE_UNIT_PIXELS);
broken = store.getTile(tiles, 0, IGameScreen.SIZE_UNIT_PIXELS,
IGameScreen.SIZE_UNIT_PIXELS, IGameScreen.SIZE_UNIT_PIXELS);
setSprite(getStateSprite());
stateChanged = false;
}
/**
* Get the appropriate sprite for the current state.
*
* @return A sprite.
*/
private Sprite getStateSprite() {
if (entity.isWorking()) {
return working;
} else {
return broken;
}
}
//
// Entity2DView
//
/**
* Determines on top of which other entities this entity should be drawn.
* Entities with a high Z index will be drawn on top of ones with a lower Z
* index.
*
* Also, players can only interact with the topmost entity.
*
* @return The drawing index.
*/
@Override
public int getZIndex() {
return 5000;
}
/**
* Handle updates.
*/
@Override
protected void update() {
super.update();
if (stateChanged) {
stateChanged = false;
setSprite(getStateSprite());
}
}
@Override
void entityChanged(final Object property) {
super.entityChanged(property);
if (property == BreakableRing.PROP_WORKING) {
stateChanged = true;
}
}
//
// EntityView
//
/**
* Perform the default action.
*/
@Override
public void onAction() {
onAction(ActionType.LOOK);
}
}
|
#include <iostream>
#include <map>
#include <vector>
#include <algorithm>
int main()
{
std::map<int, int> myMap = { {4, 40}, {5, 50}, {6, 60} };
for_each(myMap.cbegin(), myMap.cend(),
[](const std::pair<int, int>& p) { std::cout << p.first << "->" << p.second << std::endl; });
std::vector<int> vec;
vec.push_back(1);
// Complete the code by adding a lambda function to modify the vector
std::for_each(myMap.begin(), myMap.end(), [&vec](const std::pair<int, int>& p) {
if (p.first < vec.size()) {
vec[p.first] *= p.second;
}
});
// Print the modified vector
for (int val : vec) {
std::cout << val << " ";
}
return 0;
} |
#!/bin/bash
set -e
set -x
# Parse inputs
TSAN=false
COMPRESS=false
while (( $# )); do
case "$1" in
--compress)
COMPRESS=true
;;
--tsan)
TSAN=true
;;
*)
break
;;
esac
shift
done
SUFFIX=${SUFFIX:=$(date +%Y%m%d)}
PREFIX=${MAPD_PATH:="/usr/local/mapd-deps/$SUFFIX"}
if [ ! -w $(dirname $PREFIX) ] ; then
SUDO=sudo
fi
$SUDO mkdir -p $PREFIX
$SUDO chown -R $USER $PREFIX
export PATH=$PREFIX/bin:$PATH
export LD_LIBRARY_PATH=$PREFIX/lib64:$PREFIX/lib:$LD_LIBRARY_PATH
# Needed to find xmltooling and xml_security_c
export PKG_CONFIG_PATH=$PREFIX/lib/pkgconfig:$PREFIX/lib64/pkgconfig:$PKG_CONFIG_PATH
SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source $SCRIPTS_DIR/common-functions.sh
sudo yum groupinstall -y "Development Tools"
sudo yum install -y \
zlib-devel \
epel-release \
libssh \
openssl-devel \
ncurses-devel \
git \
maven \
java-1.8.0-openjdk-devel \
java-1.8.0-openjdk-headless \
gperftools \
gperftools-devel \
gperftools-libs \
python-devel \
wget \
curl \
openldap-devel
sudo yum install -y \
jq \
pxz
# gmp, mpc, mpfr, autoconf, automake
# note: if gmp fails on POWER8:
# wget https://gmplib.org/repo/gmp/raw-rev/4a6d258b467f
# patch -p1 < 4a6d258b467f
# https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz
download_make_install ${HTTP_DEPS}/gmp-6.1.2.tar.xz "" "--enable-fat"
# http://www.mpfr.org/mpfr-current/mpfr-3.1.5.tar.xz
download_make_install ${HTTP_DEPS}/mpfr-4.0.1.tar.xz "" "--with-gmp=$PREFIX"
download_make_install ftp://ftp.gnu.org/gnu/mpc/mpc-1.1.0.tar.gz "" "--with-gmp=$PREFIX"
download_make_install ftp://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.xz # "" "--build=powerpc64le-unknown-linux-gnu"
download_make_install ftp://ftp.gnu.org/gnu/automake/automake-1.16.1.tar.xz
# gcc
VERS=8.4.0
download ftp://ftp.gnu.org/gnu/gcc/gcc-$VERS/gcc-$VERS.tar.xz
extract gcc-$VERS.tar.xz
pushd gcc-$VERS
export CPPFLAGS="-I$PREFIX/include"
./configure \
--prefix=$PREFIX \
--disable-multilib \
--enable-bootstrap \
--enable-shared \
--enable-threads=posix \
--enable-checking=release \
--with-system-zlib \
--enable-__cxa_atexit \
--disable-libunwind-exceptions \
--enable-gnu-unique-object \
--enable-languages=c,c++ \
--with-tune=generic \
--with-gmp=$PREFIX \
--with-mpc=$PREFIX \
--with-mpfr=$PREFIX #replace '--with-tune=generic' with '--with-tune=power8' for POWER8
makej
make install
popd
export CC=$PREFIX/bin/gcc
export CXX=$PREFIX/bin/g++
install_ninja
install_cmake
download_make_install ftp://ftp.gnu.org/gnu/libtool/libtool-2.4.6.tar.gz
# http://zlib.net/zlib-1.2.8.tar.xz
download_make_install ${HTTP_DEPS}/zlib-1.2.8.tar.xz
VERS=1.0.6
# http://bzip.org/$VERS/bzip2-$VERS.tar.gz
download ${HTTP_DEPS}/bzip2-$VERS.tar.gz
extract bzip2-$VERS.tar.gz
pushd bzip2-$VERS
makej
make install PREFIX=$PREFIX
popd
# https://www.openssl.org/source/openssl-1.0.2u.tar.gz
download_make_install ${HTTP_DEPS}/openssl-1.0.2u.tar.gz "" "linux-$(uname -m) no-shared no-dso -fPIC"
# libarchive
download_make_install ${HTTP_DEPS}/xz-5.2.4.tar.xz "" "--disable-shared"
download_make_install ${HTTP_DEPS}/libarchive-3.3.2.tar.gz "" "--without-openssl --disable-shared"
CFLAGS="-fPIC" download_make_install ftp://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.1.tar.gz # "" "--build=powerpc64le-unknown-linux-gnu"
download_make_install ftp://ftp.gnu.org/gnu/bison/bison-3.4.2.tar.xz # "" "--build=powerpc64le-unknown-linux-gnu"
# https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/flexpp-bisonpp/bisonpp-1.21-45.tar.gz
download_make_install ${HTTP_DEPS}/bisonpp-1.21-45.tar.gz bison++-1.21
CFLAGS="-fPIC" download_make_install ftp://ftp.gnu.org/gnu/readline/readline-7.0.tar.gz
VERS=1_72_0
# http://downloads.sourceforge.net/project/boost/boost/${VERS//_/.}/boost_$VERS.tar.bz2
download ${HTTP_DEPS}/boost_$VERS.tar.bz2
extract boost_$VERS.tar.bz2
pushd boost_$VERS
./bootstrap.sh --prefix=$PREFIX
./b2 cxxflags=-fPIC install --prefix=$PREFIX || true
popd
VERS=3.1.5
download https://github.com/google/double-conversion/archive/v$VERS.tar.gz
extract v$VERS.tar.gz
mkdir -p double-conversion-$VERS/build
pushd double-conversion-$VERS/build
cmake -DCMAKE_CXX_FLAGS="-fPIC" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$PREFIX ..
makej
make install
popd
VERS=2.2.2
download https://github.com/gflags/gflags/archive/v$VERS.tar.gz
extract v$VERS.tar.gz
mkdir -p gflags-$VERS/build
pushd gflags-$VERS/build
cmake -DCMAKE_CXX_FLAGS="-fPIC" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$PREFIX ..
makej
make install
popd
VERS=0.3.5
CXXFLAGS="-fPIC" download_make_install https://github.com/google/glog/archive/v$VERS.tar.gz glog-$VERS "--enable-shared=no" # --build=powerpc64le-unknown-linux-gnu"
# folly
VERS=2.1.10
download_make_install https://github.com/libevent/libevent/releases/download/release-$VERS-stable/libevent-$VERS-stable.tar.gz
VERS=2019.04.29.00
download https://github.com/facebook/folly/archive/v$VERS.tar.gz
extract v$VERS.tar.gz
pushd folly-$VERS/build/
CXXFLAGS="-fPIC -pthread" cmake -DCMAKE_INSTALL_PREFIX=$PREFIX ..
makej
make install
popd
# llvm
# http://thrysoee.dk/editline/libedit-20170329-3.1.tar.gz
download_make_install ${HTTP_DEPS}/libedit-20170329-3.1.tar.gz
# (see common-functions.sh)
install_llvm
VERS=7.69.0
# https://curl.haxx.se/download/curl-$VERS.tar.xz
download_make_install ${HTTP_DEPS}/curl-$VERS.tar.xz "" "--disable-ldap --disable-ldaps"
# thrift
VERS=0.13.0
# http://apache.claz.org/thrift/$VERS/thrift-$VERS.tar.gz
download ${HTTP_DEPS}/thrift-$VERS.tar.gz
extract thrift-$VERS.tar.gz
pushd thrift-$VERS
if [ "$TSAN" = "false" ]; then
THRIFT_CFLAGS="-fPIC"
THRIFT_CXXFLAGS="-fPIC"
elif [ "$TSAN" = "true" ]; then
THRIFT_CFLAGS="-fPIC -fsanitize=thread -fPIC -O1 -fno-omit-frame-pointer"
THRIFT_CXXFLAGS="-fPIC -fsanitize=thread -fPIC -O1 -fno-omit-frame-pointer"
fi
CFLAGS="$THRIFT_CFLAGS" CXXFLAGS="$THRIFT_CXXFLAGS" JAVA_PREFIX=$PREFIX/lib ./configure \
--prefix=$PREFIX \
--with-lua=no \
--with-python=no \
--with-php=no \
--with-ruby=no \
--with-qt4=no \
--with-qt5=no \
--with-java=no \
--with-boost-libdir=$PREFIX/lib
makej
make install
popd
# librdkafka
install_rdkafka static
# backend rendering
VERS=1.6.21
# http://download.sourceforge.net/libpng/libpng-$VERS.tar.xz
download_make_install ${HTTP_DEPS}/libpng-$VERS.tar.xz
VERS=3.0.2
download https://github.com/cginternals/glbinding/archive/v$VERS.tar.gz
extract v$VERS.tar.gz
BDIR="glbinding-$VERS/build"
mkdir -p $BDIR
pushd $BDIR
cmake \
-DCMAKE_BUILD_TYPE=Release \
-DOPTION_BUILD_DOCS=OFF \
-DOPTION_BUILD_EXAMPLES=OFF \
-DOPTION_BUILD_TESTS=OFF \
-DOPTION_BUILD_TOOLS=OFF \
-DOPTION_BUILD_WITH_BOOST_THREAD=OFF \
-DBUILD_SHARED_LIBS=OFF \
-DCMAKE_INSTALL_PREFIX=$PREFIX \
..
makej
make install
popd
install_snappy
# c-blosc
VERS=1.14.4
download https://github.com/Blosc/c-blosc/archive/v$VERS.tar.gz
extract v$VERS.tar.gz
BDIR="c-blosc-$VERS/build"
rm -rf "$BDIR"
mkdir -p "$BDIR"
pushd "$BDIR"
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$PREFIX -DBUILD_BENCHMARKS=off -DBUILD_TESTS=off -DPREFER_EXTERNAL_SNAPPY=off -DPREFER_EXTERNAL_ZLIB=off -DPREFER_EXTERNAL_ZSTD=off ..
makej
make install
popd
# Geo Support
install_gdal
install_geos
# TBB
install_tbb static
# Apache Arrow (see common-functions.sh)
install_arrow
# Go
install_go
# install AWS core and s3 sdk
install_awscpp -j $(nproc)
# glslang (with spirv-tools)
VERS=7.12.3352 # 8/20/19
rm -rf glslang
mkdir -p glslang
pushd glslang
wget --continue https://github.com/KhronosGroup/glslang/archive/$VERS.tar.gz
tar xvf $VERS.tar.gz
pushd glslang-$VERS
./update_glslang_sources.py
mkdir build
pushd build
cmake \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DCMAKE_INSTALL_PREFIX=$PREFIX \
..
make -j $(nproc)
make install
popd # build
popd # glslang-$VERS
popd # glslang
# spirv-cross
VERS=2019-09-04
rm -rf spirv-cross
mkdir -p spirv-cross
pushd spirv-cross
wget --continue https://github.com/KhronosGroup/SPIRV-Cross/archive/$VERS.tar.gz
tar xvf $VERS.tar.gz
pushd SPIRV-Cross-$VERS
mkdir build
pushd build
cmake \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DCMAKE_INSTALL_PREFIX=$PREFIX \
-DCMAKE_POSITION_INDEPENDENT_CODE=on \
-DSPIRV_CROSS_ENABLE_TESTS=off \
..
make -j $(nproc)
make install
popd # build
popd # SPIRV-Cross-$VERS
popd # spirv-cross
# Vulkan
# Custom tarball which excludes the spir-v toolchain
VERS=1.1.126.0 # 11/1/19
rm -rf vulkan
mkdir -p vulkan
pushd vulkan
wget --continue ${HTTP_DEPS}/vulkansdk-linux-x86_64-no-spirv-$VERS.tar.gz -O vulkansdk-linux-x86_64-no-spirv-$VERS.tar.gz
tar xvf vulkansdk-linux-x86_64-no-spirv-$VERS.tar.gz
rsync -av $VERS/x86_64/* $PREFIX
popd # vulkan
# install opensaml and its dependencies
VERS=3.2.2
download ${HTTP_DEPS}/xerces-c-$VERS.tar.gz
extract xerces-c-$VERS.tar.gz
XERCESCROOT=$PWD/xerces-c-$VERS
mkdir $XERCESCROOT/build
pushd $XERCESCROOT/build
cmake -DCMAKE_INSTALL_PREFIX=$PREFIX -DBUILD_SHARED_LIBS=off -Dnetwork=off -DCMAKE_BUILD_TYPE=release ..
makej
make install
popd
download_make_install ${HTTP_DEPS}/xml-security-c-2.0.2.tar.gz "" "--without-xalan --enable-static --disable-shared"
download_make_install ${HTTP_DEPS}/xmltooling-3.0.4-nolog4shib.tar.gz "" "--enable-static --disable-shared"
download_make_install ${HTTP_DEPS}/opensaml-3.0.1-nolog4shib.tar.gz "" "--enable-static --disable-shared"
sed -e "s|%MAPD_DEPS_ROOT%|$PREFIX|g" mapd-deps.modulefile.in > mapd-deps-$SUFFIX.modulefile
sed -e "s|%MAPD_DEPS_ROOT%|$PREFIX|g" mapd-deps.sh.in > mapd-deps-$SUFFIX.sh
cp mapd-deps-$SUFFIX.sh mapd-deps-$SUFFIX.modulefile $PREFIX
if [ "$COMPRESS" = "true" ] ; then
if [ "$TSAN" = "false" ]; then
TARBALL_TSAN=""
elif [ "$TSAN" = "true" ]; then
TARBALL_TSAN="tsan-"
fi
tar --use-compress-program=pxz -acvf mapd-deps-${TARBALL_TSAN}${SUFFIX}.tar.xz -C $(dirname $PREFIX) $SUFFIX
fi
|
#!/usr/bin/env bash
dvc run -f dvc/cad_experiment/evaluate_model.dvc --no-exec \
-d ucsgnet/ucsgnet \
-d ucsgnet/dataset.py \
-d ucsgnet/utils.py \
-d ucsgnet/common.py \
-d data/cad \
-d models/cad_main \
-o paper-stuff/metrics/cad_experiment \
python -m ucsgnet.ucsgnet.cad.eval_cad \
--data_path data/cad/cad.h5 \
--weights_path "models/cad_main/initial/ckpts/model.ckpt" \
--out_dir paper-stuff/metrics/cad_experiment
|
use App\Http\Controllers\Controller;
use App\Mail\OrderShipped;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Mail;
class OrderController extends Controller
{
/**
* Ship the given order and send email notification to the customer.
*
* @param Request $request
* @param int $orderId
* @return \Illuminate\Http\Response
*/
public function ship(Request $request, $orderId)
{
// Retrieve the order details and shipping information
$order = Order::find($orderId);
$customerEmail = $order->customer->email;
$shippingDetails = $order->shippingDetails;
// Send email notification to the customer
Mail::to($customerEmail)->send(new OrderShipped($shippingDetails));
// Update the order status or perform any other necessary actions
return response()->json(['message' => 'Order shipped successfully']);
}
} |
<filename>commands/team.js
const Discord = require("discord.js");
exports.run = async (client, message, args) => {
let logo = client.user.avatarURL;
let members = message.guild.roles
.find(r => r.name === "Developer")
.members.array();
let mods = message.guild.roles
.find(r => r.name === "Moderator")
.members.array()
.filter(m => !m.user.bot);
let team = new Discord.RichEmbed()
.setAuthor("TripleA Development")
.setTitle("Current status of the team")
.setColor(client.config.colors.primary)
.setTimestamp()
.setThumbnail(logo)
.setDescription(
`**Team Members: [${members.length}]**\n${members.join(
" • "
)}\n\n**Community Moderators: [${mods.length}]**\n${mods.join(" • ")}`
);
message.channel.send(team);
};
exports.help = {
name: "team",
description: "Get information about the team.",
cooldown: 0,
usage: "team"
};
|
<filename>app/scripts/application/components/country-description/index.js
'use strict';
var Vuex = require('vuex');
module.exports = {
template: require('./template.html'),
data: function() {
return {
isLoaded: false,
description: ''
};
},
methods: Vuex.mapActions([
'getCountryDescription'
]),
created: function() {
var that = this;
that.getCountryDescription().then(function(description) {
that.$data.description = description;
that.$data.isLoaded = true;
});
}
};
|
#!/bin/bash -e
CI_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
source $CI_DIR/controls/common.sh
while test $# -gt 0
do
case "$1" in
--clean) remove_images=true
;;
*) echo "bad argument $1" && exit 1
;;
esac
shift
done
$CI_DIR/controls/relayer/stop.sh
$CI_DIR/controls/relayer/remove.sh
$CI_DIR/controls/nearcore/stop.sh
$CI_DIR/controls/network/remove.sh
if [[ ! -z $remove_images ]]; then
$CI_DIR/controls/relayer/remove_images.sh
fi
|
<reponame>active9/screencap<gh_stars>10-100
var screencap = require('../index.js');
var screen = screencap({
audioCodec: "aac",
videoCodec: "libx264",
videoBitrate: "1000k",
audioBitrate: "96k",
format: "mp4"
},'test.mp4');
screen.capture('30'); |
import pandas as pd
# Load data
data = pd.read_csv('nyc_rental_data.csv')
# Create train and test set
X = data.drop(['average_monthly_rent'], axis=1)
y = data['average_monthly_rent']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Create and train the model
model = RandomForestRegressor(n_estimators=100, max_depth=10, random_state=0)
model.fit(X_train, y_train)
# Make predictions
y_pred = model.predict(X_test)
# Evaluate predictions
r2_score(y_test, y_pred) |
import { Component, OnInit } from '@angular/core';
import { QueryBuilderConfig } from 'angular2-query-builder';
import { Router } from '@angular/router';
import { SharedService } from '../services/shared.service';
import { error } from '@angular/compiler/src/util';
import { StoreService } from '../../store-management/services/store.service';
import * as moment from 'moment';
import { ToastrService } from 'ngx-toastr';
@Component({
selector: 'ngx-rules',
templateUrl: './rules.component.html',
styleUrls: ['./rules.component.scss']
})
export class RulesComponent implements OnInit {
query = {
condition: 'and',
rules: [
]
};
stores = [];
codeExits: boolean;
rules = {
id: '',
enabled: false,
code: '',
name: '',
// timeBased: '',
startDate: new Date(),
endDate: new Date(),
order: 0,
store: '',
selected_result: '',
}
actionsData: Array<any> = [];
selectedActionsData: Array<any> = [];
config: QueryBuilderConfig;
rules_time: boolean = false;
loadingList: boolean = false
shippingResult: Array<any> = [];
resultData: Array<any> = [];
selectedResult: any;
title: any = 'Add Rules'
buttonText: any = 'Submit'
constructor(
private sharedService: SharedService,
private storeService: StoreService,
public router: Router,
private toastr: ToastrService
) {
this.getStoreList();
//this.getShippingCondition()
this.getRulesCriterias()
this.getRulesActions()
}
ngOnInit() {
if (localStorage.getItem('rulesCode')) {
// setTimeout(() => {
this.sharedService.getShippingRulesDetails(localStorage.getItem('rulesCode'))
.subscribe(rulesData => {
// let rulesData = JSON.parse(localStorage.getItem('rulesCode'))
console.log(rulesData)
this.title = 'Update Rules'
this.buttonText = 'Update'
let j = this.stores.find(x => x.code === rulesData.store);
this.rules = rulesData
setTimeout(() => {
console.log(rulesData.store)
this.rules.store = rulesData.store
}, 3000);
this.rules.endDate = rulesData.endDate && new Date(rulesData.endDate)
this.rules.startDate = rulesData.startDate && new Date(rulesData.startDate)
this.query = rulesData.ruleSets[0];
// console.log(this.actionsData);\
let array1 = this.actionsData
var array3 = array1.filter(function (obj) {
return rulesData.actions.find((a) => {
if (a.value) {
obj.value = a.value
return a.code === obj.code
}
});
});
this.selectedActionsData = array3;
}, error => {
});
// }, 3000);
}
}
getStoreList() {
this.storeService.getListOfMerchantStoreNames({ 'store': '' })
.subscribe(res => {
// console.log(res);
res.forEach((store) => {
this.stores.push({ value: store.code, label: store.code });
});
});
}
onChangeCode(e) {
this.sharedService.checkCode(e.target.value)
.subscribe(res => {
console.log(res);
this.codeExits = true;
}, error => {
console.log(error);
this.codeExits = false;
});
}
getRulesCriterias() {
let fields = {}
this.sharedService.getRulesCriterias()
.subscribe(data => {
// console.log(data)
data.map((value) => {
fields[value.code] = {
"name": value.name,
// "type": value.options.length > 0 ? 'category' : value.format == 'DECIMAL' || value.format == 'NUMERIC' ? 'number' : value.format.toLowerCase(),
"type": value.criteriaType == 'text' ? 'string' : 'string',
"operators": value.operators,
"options": []
}
// value.options.map((opt) => {
// fields[value.code].options.push({ name: opt.name, value: opt.value })
// })
});
this.config = { fields };
});
}
getRulesActions() {
this.sharedService.getRulesActions()
.subscribe(data => {
// console.log(data);
this.actionsData = data
});
}
goToback() {
this.router.navigate(['/pages/shipping/rules']);
}
onAddActions(actions) {
console.log('onAddActions')
console.log(actions)
actions.value = ''
this.selectedActionsData.push(actions)
}
onDeleteIcon(index) {
this.selectedActionsData.splice(index, 1);
}
onSubmit() {
this.loadingList = true;
console.log(this.query)
let actions = [];
this.actionsData.map((value) => {
actions.push({ code: value.code, value: value.value })
});
let querys = { condition: this.query.condition, rules: [] };
this.query.rules.map((q) => {
if (typeof q.value === 'string' || q.value instanceof String) {
querys.rules.push({ field: q.field, operator: q.operator, value: [q.value] })
} else {
querys.rules.push({ field: q.field, operator: q.operator, value: q.value })
}
});
let param = {
"name": this.rules.name,
"code": this.rules.code,
"store": this.rules.store,
"enabled": this.rules.enabled,
"startDate": moment(this.rules.startDate).utc(),
"endDate": moment(this.rules.endDate).utc(),
"actions": actions,
"ruleSets": [
querys
]
}
console.log(param);
if (this.buttonText === 'Submit') {
this.sharedService.createShippingRules(param)
.subscribe(data => {
console.log(data);
this.loadingList = false;
this.toastr.success('Rules has been added successfully');
this.goToback()
}, error => {
this.toastr.error('Rules has been added fail.');
this.loadingList = false;
});
} else {
this.sharedService.updateShippingRules(this.rules.id, param)
.subscribe(data => {
console.log(data);
this.loadingList = false;
this.toastr.success('Rules has been updated successfully');
this.goToback()
}, error => {
this.toastr.error('Rules has been updated fail.');
this.loadingList = false;
});
}
}
// getShippingCondition() {
// this.loadingList = true;
// let fields = {}
// this.sharedService.getRulesCondition()
// .subscribe(data => {
// // console.log(data)
// data.map((value) => {
// fields[value.code] = {
// "name": value.name,
// "type": value.options.length > 0 ? 'category' : value.format == 'DECIMAL' || value.format == 'NUMERIC' ? 'number' : value.format.toLowerCase(),
// "operators": value.operators,
// "options": []
// }
// value.options.map((opt) => {
// fields[value.code].options.push({ name: opt.name, value: opt.value })
// })
// });
// // config: QueryBuilderConfig = { fields: {} }
// // console.log(fields);
// this.config = { fields };
// this.loadingList = false;
// // this.source.load(data);
// }, error => {
// this.loadingList = false;
// });
// this.getShippingResult();
// }
// getShippingResult() {
// this.sharedService.getRulesResult()
// .subscribe(data => {
// // console.log(data);
// this.shippingResult = data;
// }, error => {
// });
// this.getShippingRulesDetails()
// }
// getShippingRulesDetails() {
// if (localStorage.getItem('rulesCode')) {
// this.sharedService.getShippingRulesDetails(localStorage.getItem('rulesCode'))
// .subscribe(data => {
// console.log(data)
// this.rules = data;
// this.rules.startDate = new Date(data.startDate)
// this.rules.endDate = new Date(data.endDate)
// this.resultData = data.results;
// this.query = data.conditions[0]
// }, error => {
// });
// }
// }
onClickConfigure() {
// console.log(this.selected_result);
this.selectedResult = this.rules.selected_result
}
}
|
<filename>aura-modules/src/test/components/secureModuleTest/bootstrap/bootstrapController.js
({
/**
* Verify that a component, which has a equivalent module that has opted into locker, is not forcefully lockerized
* @param cmp
*/
testCmpLockerStateNotAffectedByEquivalentModule: function(cmp) {
var testUtils = cmp.get("v.testUtils");
testUtils.assertTrue(window.toString().indexOf("SecureWindow") === -1, "Expected window to"
+ " return raw window in component");
}
}) |
function removeStaticOffset(el) {
el.style.bottom = ''; // Remove the 'auto' value for the bottom style
el.style.right = ''; // Remove the 'auto' value for the right style
} |
// exports.handler = async (event) => {
// // TODO implement
// const response = {
// statusCode: 200,
// body: JSON.stringify('Hello from Lambda!')
// };
// return response;
// };
'use strict';
var AWS = require('aws-sdk'),
documentClient = new AWS.DynamoDB.DocumentClient();
/**
@summary retrieves the training for a given event, or if omited, all trainings
probably in the future we'll want to reduce that to just the current user or something idk, or introduce pagination
@note the return json format is because of the lambda proxy option in the API gateway
*/
exports.handler = function(event, context) {
console.log("params: ", event.queryStringParameters)
let eventName = ""
if (event.queryStringParameters && 'event' in event.queryStringParameters) {
eventName = event.queryStringParameters.event
}
console.log(eventName)
let params = eventName ? {
TableName : process.env.TABLE_NAME,
Key: { event: eventName },
ExpressionAttributeValues: {
':event': eventName
},
KeyConditionExpression: 'event = :event',
FilterExpression: 'contains(event, :event)'
} : {
TableName : process.env.TABLE_NAME
}
return new Promise((resolve, reject) => {
documentClient.scan(params, function(err, data){
if(err){
console.error("error: ", err)
reject({
"isBase64Encoded": false,
"statusCode": 500,
"headers": event.headers,
"body": err.errorMessage
})
}else{
resolve({
"isBase64Encoded": false,
"statusCode": 200,
"headers": event.headers,
"body": JSON.stringify(data.Items)
})
}
})
})
} |
#!/bin/bash
trap "echo ERROR && exit 1" ERR
echo "DEPLOYMENT BEGIN"
echo "stopping tomcat service."
service tomcat7 stop
maindir=`pwd -P`
hostdir='/var/lib/tomcat7'
if [ -d /var/lib/tomcat7/webapps/ROOT ]; then
#variable setup:
echo "CREATING BACKUP OF CURRENT DEPLOYMENT."
dstnt_digest="$(cd $hostdir/webapps/ROOT &>/dev/null && tar -cf - ./*|openssl md5|cut -d' ' -f2- && cd $maindir &>/dev/null)"
backup_name="ROOT-$dstnt_digest.tgz"
#make backup dir:
if [ ! -d $hostdir/backups ]; then
mkdir -p $hostdir/backups
fi
#backup to a tarball:
cd $hostdir/webapps
tar -czf $hostdir/backups/$backup_name ROOT/
cd $maindir
#remove old install
echo "removing old installation."
rm -rf $hostdir/webapps/ROOT* #remove outdated version
else
echo "no current deployment found. Continuing."
fi
echo "installing new application."
#if there's already a ROOT.war, the above operation failed.
if [ -f $hostdir/webapps/ROOT.war ]; then
echo "ROOT.war already exists. Exiting."
exit 1
fi
if [ -f deploy/ROOT.war ]; then
cp -v deploy/ROOT.war /var/lib/tomcat7/webapps/ #add new version
else
echo "No warfile found in local dir. Have you compiled?"
echo "exiting"
exit 1
fi
echo "restarting tomcat."
service tomcat7 start
echo "DEPLOYMENT DONE"
|
<reponame>Subodh22/made_test<filename>jube.py
import os
import json
import unicodedata
import multiprocessing
import concurrent.futures
import re
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from tqdm import tqdm
import pandas as pd
from multiprocessing import Pool
from concurrent.futures import TimeoutError
from multiprocessing import cpu_count
from neo4j import GraphDatabase
graphe=GraphDatabase.driver("bolt://localhost/:7687",auth=("neo4j","mathers22"))
def Get_youtube(lister):
try:
topic=lister[1]
subject=lister[0]
# youtube_data=[]
options = webdriver.ChromeOptions()
options.headless = True
options.add_argument("--window-size=1920,1080")
options.add_argument('--ignore-certificate-errors')
options.add_argument('--allow-running-insecure-content')
options.add_argument("--disable-extensions")
options.add_argument("--proxy-server='direct://'")
options.add_argument("--proxy-bypass-list=*")
options.add_argument("--start-maximized")
options.add_argument('--disable-gpu')
options.add_argument('--disable-dev-shm-usage')
options.add_argument('--no-sandbox')
driver = webdriver.Chrome(executable_path="../chromedriver", options=options)
urle="https://www.youtube.com/results?search_query="+topic
driver.get(urle)
# youtube_data=lister[2]
rank=lister[3]
# u_data=[]
elemente =WebDriverWait(driver,10).until(
EC.presence_of_all_elements_located((By.ID,"dismissible"))
)
driver.execute_script("window.scrollTo(0, 800)")
driver.execute_script("window.scrollTo(0, 1980)")
for i in tqdm(range(15)):
vid_det={}
vid_det["degree"]=lister[3]
vid_det["video_id"]=re.findall(r"watch\?v=(\S{11})",(elemente[i].find_element_by_id("thumbnail").get_attribute("href")))
toni = elemente[i].find_element_by_tag_name("yt-formatted-string").get_attribute("aria-label")
result = re.search('ago(.*)views', toni)
ioi=" ".join((result.group(1)).split(" ")[:-2])
tolir=elemente[i].find_element_by_id("metadata-line")
toli=tolir.find_elements_by_tag_name("span")
vid_det["duration"]=ioi
vid_det["views"]=toli[0].get_attribute("innerHTML")
vid_det["age"]=toli[1].get_attribute("innerHTML")
vid_det["title"]=elemente[i].find_element_by_tag_name("yt-formatted-string").get_attribute("innerHTML")
vid_det["img"]=elemente[i].find_element_by_id("img").get_attribute("src")
vid_det["topic"]=topic
vid_det["subject"]=subject
# df2 = pd.DataFrame(columns=["degree","video_id","duration","views","age","title","img","topic","subject"])
# df2=df2.append(vid_det,ignore_index=True)
# df2.to_csv('./pending/'+subject+'.csv', mode='a', header=False)
sess=graphe.session()
query="UNWIND $mer_list as row MATCH(x:topic{name:row.topic}) CREATE(y:Videos{title:row.title,duration=row.duration,views=row.views,age=row.age,id=row.video_id,img=row.img}),(y)-[:videos_of]->(x)"
sess.run(query,mer_list=vid_det)
sess.close()
print("okok")
print(subject+"donezo")
driver.quit()
except:
print("not_work")
def openJson(name_tag):
pathe='./amc/'+name_tag+'.json'
with open(pathe,'r') as college_data:
majors = json.load(college_data)
return majors
def june_bug(name_tag):
work_data={}
majors=[]
pathe='./amc/'+name_tag+'.json'
with open(pathe,'r') as college_data:
majors = json.load(college_data)
if(majors!=[]):
for i in range(len(majors)):
data_json=openJson(majors[i])
df = pd.DataFrame(columns=["degree","video_id","duration","views","age","title","img","topic","subject"])
df.to_csv('./pending/'+majors[i]+'.csv')
if(data_json!=[]):
work_data=data_json[0]
if(len(data_json)>1):
work_data=data_json[1]
item=((majors[i],s,0,work_data["topics"].index(s))for s in work_data["topics"])
with concurrent.futures.ProcessPoolExecutor() as executor:
executor.map(Get_youtube,item)
# with multiprocessing.get_context('spawn').Pool() as pool:
# pool.map(Get_youtube, item)
if __name__=="__main__":
june_bug('start_here')
os.remove('./amc/start_here.json')
|
<reponame>simplebam/GlideDemo<filename>app/src/main/java/com/yueyue/glidedemo/module/chapter_7/Chapter7_Fragment.java
package com.yueyue.glidedemo.module.chapter_7;
import android.content.Context;
import android.graphics.drawable.GradientDrawable;
import android.os.Build;
import android.support.v4.widget.SwipeRefreshLayout;
import android.widget.Button;
import android.widget.ImageView;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.request.RequestOptions;
import com.yueyue.glidedemo.R;
import com.yueyue.glidedemo.base.App;
import com.yueyue.glidedemo.base.BaseFragment;
import com.yueyue.glidedemo.utils.ConvertUtil;
import butterknife.BindView;
import butterknife.OnClick;
/**
* author : yueyue on 2018/4/21 20:53
* desc : Android图片加载框架最全解析(七),实现带进度的Glide图片加载功能 - CSDN博客
* <p> https://blog.csdn.net/guolin_blog/article/details/78357251</p>
*/
public class Chapter7_Fragment extends BaseFragment {
private static final String TAG = Chapter7_Fragment.class.getSimpleName();
@BindView(R.id.swipe_refresh)
SwipeRefreshLayout mSwipeRefresh;
@BindView(R.id.iv_image)
ImageView mIvImage;
@BindView(R.id.btn_glide_progress)
Button mBtnGlideProgress;
// @BindView(R.id.btn_blur_transformations)
// Button mBtnBlurTransformations;
@OnClick(R.id.btn_glide_progress)
void glide3Progress() {
unsubscribe();
changeSwipeRefreshState(true);
Context context = getContext() == null ? App.getContext() : getContext();
RequestOptions options = new RequestOptions()
.placeholder(R.drawable.placeholder)
.error(R.drawable.error)
.diskCacheStrategy(DiskCacheStrategy.NONE);
String url = "http://guolin.tech/book.png";
Glide.with(context).load(url).apply(options).into(mIvImage);
changeSwipeRefreshState(false);
}
private void changeSwipeRefreshState(boolean swipeRefresh) {
mSwipeRefresh.setRefreshing(swipeRefresh);
}
@Override
protected int initLayoutId() {
return R.layout.fragment_chapter7;
}
@Override
protected void initViews() {
initButtonBg();
}
private void initButtonBg() {
GradientDrawable gd = new GradientDrawable();
gd.setCornerRadius(ConvertUtil.dp2px(2));
gd.setColor(getResources().getColor(R.color.color_d6d7d7));
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1) {
mBtnGlideProgress.setBackgroundDrawable(gd);
} else {
mBtnGlideProgress.setBackground(gd);
}
}
@Override
protected void initData() {
}
public static Chapter7_Fragment launch() {
return new Chapter7_Fragment();
}
}
|
<gh_stars>1-10
/*
Speculative Contacts – a continuous collision engine
Adapted from a C# tutorial by <NAME>
http://www.wildbunny.co.uk/blog/2011/03/25/speculative-contacts-an-continuous-collision-engine-approach-part-1/
*/
! function() {
"use strict";
var screen = ge1doot.screen.init("screen", function () {
PHY2D.deleteStatic();
PHY2D.rectangle(screen.width / 2, screen.height + 10, screen.width, 30, 0, 0);
}, false);
var ctx = screen.ctx, rec;
var pointer = screen.pointer.init({
down: function () {
rec = {x0: pointer.pos.x, y0: pointer.pos.y, x1: pointer.pos.x, y1: pointer.pos.y}
},
move: function () {
if (rec) {
rec.x1 = pointer.pos.x;
rec.y1 = pointer.pos.y;
}
},
up: function () {
PHY2D.up();
if (rec) {
var w = Math.abs(rec.x1 - rec.x0);
var h = Math.abs(rec.y1 - rec.y0)
if (w > 0 && h > 0) {
PHY2D.rectangle(Math.min(rec.x0, rec.x1) + w / 2, Math.min(rec.y0, rec.y1) + h / 2, w, h, Math.sqrt(w * h) / 10, 0);
rec = null;
}
}
}
});
// vectors 2D prototype (does not create/return new objects at runtime)
function Vector (x, y) {
this.x = x || 0.0;
this.y = y || 0.0;
}
Vector.prototype = {
set: function (x, y) {
this.x = x;
this.y = y;
return this;
},
dot: function (v) {
return this.x * v.x + this.y * v.y;
},
lenSqr: function () {
return this.x * this.x + this.y * this.y;
},
transform: function (v, m) {
this.x = m.cos * v.x - m.sin * v.y + m.pos.x;
this.y = m.sin * v.x + m.cos * v.y + m.pos.y;
return this;
},
rotate: function (v, m) {
this.x = m.cos * v.x - m.sin * v.y;
this.y = m.sin * v.x + m.cos * v.y;
return this;
},
normal: function (a, b) {
var x = a.x - b.x,
y = a.y - b.y,
len = Math.sqrt(x * x + y * y);
this.x = -y / len;
this.y = x / len;
return this;
},
project: function (a, b, n) {
var x = a.x - b.x,
y = a.y - b.y,
len = Math.sqrt(x * x + y * y);
return (-y / len) * n.x + (x / len) * n.y;
},
addScale: function (v1, v2, s) {
this.x = v1.x + (v2.x * s);
this.y = v1.y + (v2.y * s);
return this;
},
subScale: function (v1, v2, s) {
this.x = v1.x - (v2.x * s);
this.y = v1.y - (v2.y * s);
return this;
},
add: function (v1, v2) {
this.x = v1.x + v2.x;
this.y = v1.y + v2.y;
return this;
},
sub: function (v1, v2) {
this.x = v1.x - v2.x;
this.y = v1.y - v2.y;
return this;
},
scale: function (v1, s) {
this.x = v1.x * s;
this.y = v1.y * s;
return this;
},
perp: function () {
var x = this.x;
this.x = -this.y;
this.y = x;
return this;
},
inv: function (v1) {
this.x = -v1.x;
this.y = -v1.y;
return this;
},
clamp: function (v, min, max) {
if (v > max) v = max; else if (v < min) v = min;
return v;
},
rotateIntoSpaceOf: function (a, m) {
var dx = -a.x, dy = -a.y;
this.x = dx * m.cos + dy * m.sin;
this.y = dx * -m.sin + dy * m.cos;
return this;
},
// SIMD array vectors
array: function (n, values) {
var array = new Array(n);
array.min = new Vector();
array.max = new Vector();
for (var i = 0; i < n; i++) {
array[i] = new Vector(
values ? values[i * 2 + 0] : 0.0,
values ? values[i * 2 + 1] : 0.0
);
}
array.transform = function (v, m) {
for (var i = 0, len = this.length; i < len; i++) {
var vi = v[i], elem = this[i];
var x = m.cos * vi.x - m.sin * vi.y + m.pos.x;
var y = m.sin * vi.x + m.cos * vi.y + m.pos.y;
if (x < this.min.x) this.min.x = x;
if (y < this.min.y) this.min.y = y;
if (x > this.max.x) this.max.x = x;
if (y > this.max.y) this.max.y = y;
elem.x = x;
elem.y = y;
}
return this;
}
array.rotate = function (v, m) {
for (var i = 0, len = this.length; i < len; i++) {
var vi = v[i], elem = this[i];
elem.x = m.cos * vi.x - m.sin * vi.y;
elem.y = m.sin * vi.x + m.cos * vi.y;
}
return this;
}
array.resetMinmax = function () {
this.min.x = 100000.0;
this.min.y = 100000.0;
this.max.x = -100000.0;
this.max.y = -100000.0;
}
array.normal = function (points) {
for (var i = 0; i < this.length; i++ ) {
this[i].normal(
points[(i + 1) % this.length],
points[i]
);
}
return this;
}
return array;
}
}
// Matrix container
function Matrix () {
this.cos = 0.0;
this.sin = 0.0;
this.pos = new Vector();
this.ang = 0.0;
}
Matrix.prototype = {
set: function (a, x, y) {
this.cos = Math.cos(a);
this.sin = Math.sin(a);
this.ang = a;
this.pos.x = x;
this.pos.y = y;
return this;
},
copy: function (matrix) {
this.cos = matrix.cos;
this.sin = matrix.sin;
this.ang = matrix.ang;
this.pos.x = matrix.pos.x;
this.pos.y = matrix.pos.y;
return this;
},
integrate: function (va, vx, vy, kTimeStep) {
this.pos.x += vx * kTimeStep;
this.pos.y += vy * kTimeStep;
this.ang += va * kTimeStep;
this.cos = Math.cos(this.ang);
this.sin = Math.sin(this.ang);
return this;
}
}
// Main PHY2D code
var PHY2D = function (ctx, pointer, Vector, Matrix) {
var kGravity = 5;
var kTimeStep = 1 / 60;
var kFriction = 0.3;
var objects = [];
var drag = false;
// temporary working vectors (TODO: need to get this managed by the vector module)
var v0 = new Vector();
var v1 = new Vector();
var v2 = new Vector();
var v3 = new Vector();
var v4 = new Vector();
var v5 = new Vector();
// contacts list
var contacts = [];
contacts.index = 0;
contacts.create = function (A, B, pa, pb, nx, ny) {
if (!this[this.index]) this[this.index] = new Contact();
this[this.index++].set(A, B, pa, pb, nx, ny);
}
// AABB container constructor
function AABB () {
this.x = 0.0;
this.y = 0.0;
this.w = 0.0;
this.h = 0.0;
}
// Polygon constructor
function Polygon (x, y, w, h, vertices, invMass, angle) {
this.vel = new Vector();
this.angularVel = 0.0;
this.invMass = invMass;
this.matrix = new Matrix().set(angle,x,y);
this.matrixNextFrame = new Matrix();
this.aabb = new AABB();
this.drag = false;
this.static = false;
this.length = (vertices.length / 2) | 0;
// vertices
this.localSpacePoints = new Vector().array(this.length, vertices);
this.localSpaceNormals = new Vector().array(this.length).normal(this.localSpacePoints);
this.worldSpaceNormals = new Vector().array(this.length);
this.worldSpacePoints = new Vector().array(this.length);
// calculate inverse inertia tensor
this.invI = (invMass > 0) ? 1 / ((1 / invMass) * (w * w + h * h) / 3) : 0
// contact points
this.c1 = new Vector();
this.c0 = new Vector();
// add rigid body
objects.push(this);
}
Polygon.prototype = {
// aabb motion box
motionAABB: function () {
this.worldSpacePoints.resetMinmax();
this.worldSpacePoints.transform(this.localSpacePoints, this.matrixNextFrame);
this.worldSpacePoints.transform(this.localSpacePoints, this.matrix);
this.worldSpaceNormals.rotate(this.localSpaceNormals, this.matrix);
var min = this.worldSpacePoints.min;
var max = this.worldSpacePoints.max;
this.aabb.x = (min.x + max.x) * 0.5;
this.aabb.y = (min.y + max.y) * 0.5;
this.aabb.w = (max.x - min.x) * 0.5;
this.aabb.h = (max.y - min.y) * 0.5;
},
// contact points
contact: function (that) {
var face, vertex, vertexRect, faceRect, fp, va, vb, vc, nx, ny, wsN, wdV0, wdV1, wsV0, wsV1;
// generate contacts for this pair
mostSeparated.set(100000, -1, -1, 0, 100000);
mostPenetrating.set(-100000, -1, -1, 0, 100000);
// face of A, vertices of B
this.featurePairJudgement(that, 2);
// faces of B, vertices of A
that.featurePairJudgement(this, 1);
if (mostSeparated.dist > 0 && mostSeparated.fpc !== 0) {
// objects are separated
face = mostSeparated.edge;
vertex = mostSeparated.closestI;
fp = mostSeparated.fpc;
} else if (mostPenetrating.dist <= 0) {
// objects are penetrating
face = mostPenetrating.edge;
vertex = mostPenetrating.closestI;
fp = mostPenetrating.fpc;
}
if (fp === 1) vertexRect = this, faceRect = that; else vertexRect = that, faceRect = this;
// world space vertex
wsN = faceRect.worldSpaceNormals[face];
// other vertex adjacent which makes most parallel normal with the collision normal
va = vertexRect.worldSpacePoints[(vertex - 1 + vertexRect.length) % vertexRect.length];
vb = vertexRect.worldSpacePoints[vertex];
vc = vertexRect.worldSpacePoints[(vertex + 1) % vertexRect.length];
if (v0.project(vb, va, wsN) < v1.project(vc, vb, wsN)) {
wdV0 = va;
wdV1 = vb;
} else {
wdV0 = vb;
wdV1 = vc;
}
// world space edge
wsV0 = faceRect.worldSpacePoints[face];
wsV1 = faceRect.worldSpacePoints[(face + 1) % faceRect.length];
// form contact
if (fp === 1) {
// project vertex onto edge
this.projectPointOntoEdge(wsV0, wsV1, wdV0, wdV1);
that.projectPointOntoEdge(wdV1, wdV0, wsV0, wsV1);
// normal is negated because it always needs to point from A->B
nx = -wsN.x;
ny = -wsN.y;
} else {
this.projectPointOntoEdge(wdV1, wdV0, wsV0, wsV1);
that.projectPointOntoEdge(wsV0, wsV1, wdV0, wdV1);
nx = wsN.x;
ny = wsN.y;
}
// create contacts
contacts.create(this, that, this.c0, that.c0, nx, ny);
contacts.create(this, that, this.c1, that.c1, nx, ny);
},
featurePairJudgement: function (that, fpc) {
var wsN, closestI, closest, dist;
for (var edge = 0; edge < this.length; edge++) {
// get support vertices
wsN = this.worldSpaceNormals[edge];
// rotate into RigidBody space
v5.rotateIntoSpaceOf(wsN, that.matrix);
var closestI = -1, closestD = -100000;
// Get the vertex most in the direction of the given vector
for (var i = 0; i < that.length; i++) {
var d = v5.dot(that.localSpacePoints[i]);
if (d > closestD) {
closestD = d;
closestI = i;
}
}
var closest = that.worldSpacePoints[closestI];
v0.sub(closest, this.worldSpacePoints[edge]);
// distance from origin to face
var dist = v0.dot(wsN);
if (dist > 0) {
// recompute distance to clamped edge
v1.sub(closest, this.worldSpacePoints[(edge + 1) % this.length]);
// project onto minkowski edge
dist = this.projectPointOntoEdgeZero(v0, v1).lenSqr();
// track separation
if (dist < mostSeparated.dist) {
mostSeparated.set(dist, closestI, edge, fpc);
}
} else {
// track penetration
if (dist > mostPenetrating.dist) {
mostPenetrating.set(dist, closestI, edge, fpc);
}
}
}
return true;
},
projectPointOntoEdge: function (p0, p1, e0, e1) {
var l = v2.sub(e1, e0).lenSqr() + 0.0000001;
this.c0.addScale(e0, v2, v3.clamp(v3.sub(p0, e0).dot(v2) / l, 0, 1));
this.c1.addScale(e0, v2, v3.clamp(v3.sub(p1, e0).dot(v2) / l, 0, 1));
},
projectPointOntoEdgeZero: function (e0, e1) {
var l = v2.sub(e1, e0).lenSqr() + 0.0000001;
return this.c0.addScale(e0, v2, v3.clamp(v3.inv(e0).dot(v2) / l, 0, 1));
},
// integration
integrate: function () {
if (this.drag) {
// dragging object
this.vel.x = (pointer.pos.x - this.matrix.pos.x) * 10;
this.vel.y = (pointer.pos.y - this.matrix.pos.y) * 10;
} else {
// gravity
if (this.invMass > 0) this.vel.y += kGravity;
}
// update position
this.matrix.integrate(this.angularVel, this.vel.x, this.vel.y, kTimeStep);
this.matrixNextFrame.copy(this.matrix).integrate(this.angularVel, this.vel.x, this.vel.y, kTimeStep);
// compute motion AABB
if (!this.static) this.motionAABB();
else {
if (this.invMass === 0) {
this.static = true;
this.motionAABB();
}
}
},
draw: function() {
ctx.beginPath();
for (var j = 0; j < this.length; j++ ) {
var a = this.worldSpacePoints[j];
ctx.lineTo(a.x, a.y);
}
ctx.closePath();
ctx.fillStyle = "rgb(255,255,255)";
ctx.fill();
/*if (pointer.active && !drag && this.invMass) {
if (ctx.isPointInPath(pointer.pos.x, pointer.pos.y)) {
this.drag = true;
drag = true;
}
}*/
}
}
// feature pair container
function FeaturePair () {
this.dist = 0;
this.closestI = 0;
this.edge = 0;
this.fpc = 0;
}
FeaturePair.prototype.set = function (dist, closestI, edge, fpc) {
this.dist = dist;
this.closestI = closestI;
this.edge = edge;
this.fpc = fpc;
}
var mostSeparated = new FeaturePair();
var mostPenetrating = new FeaturePair();
// contacts constructor
function Contact () {
this.a = null;
this.b = null;
this.normal = new Vector();
this.normalPerp = new Vector();
this.ra = new Vector();
this.rb = new Vector();
this.dist = 0;
this.impulseN = 0;
this.impulseT = 0;
this.invDenom = 0;
this.invDenomTan = 0;
}
Contact.prototype = {
// reusing existing contact objects
set: function (A, B, pa, pb, nx, ny) {
var ran, rbn;
this.a = A;
this.b = B;
this.normal.set(nx, ny);
this.normalPerp.set(-ny, nx);
this.dist = v1.sub(pb, pa).dot(this.normal);
this.impulseN = 0;
this.impulseT = 0;
// calculate radius arms
this.ra.sub(pa, A.matrix.pos).perp();
this.rb.sub(pb, B.matrix.pos).perp();
// compute denominator in impulse equation
ran = this.ra.dot(this.normal);
rbn = this.rb.dot(this.normal);
this.invDenom = 1 / (A.invMass + B.invMass + (ran * ran * A.invI) + (rbn * rbn * B.invI));
ran = this.ra.dot(this.normalPerp);
rbn = this.rb.dot(this.normalPerp);
this.invDenomTan = 1 / (A.invMass + B.invMass + (ran * ran * A.invI) + (rbn * rbn * B.invI));
},
applyImpulse: function (imp) {
// linear
this.a.vel.addScale(this.a.vel, imp, this.a.invMass);
this.b.vel.subScale(this.b.vel, imp, this.b.invMass);
// angular
this.a.angularVel += imp.dot(this.ra) * this.a.invI;
this.b.angularVel -= imp.dot(this.rb) * this.b.invI;
},
// speculative contact solver
solve: function () {
var newImpulse, absMag, dv = v1;
// get all of relative normal velocity
dv.sub(
v2.addScale(this.b.vel, this.rb, this.b.angularVel),
v3.addScale(this.a.vel, this.ra, this.a.angularVel)
);
// accumulated impulse
newImpulse = (dv.dot(this.normal) + this.dist / kTimeStep) * this.invDenom + this.impulseN;
// push only
if (newImpulse > 0) newImpulse = 0;
// apply impulse
this.applyImpulse(v2.scale(this.normal, newImpulse - this.impulseN));
this.impulseN = newImpulse;
// friction
absMag = Math.abs(this.impulseN) * kFriction;
newImpulse = v2.clamp(dv.dot(this.normalPerp) * this.invDenomTan + this.impulseT, -absMag, absMag);
// apply friction impulse
this.applyImpulse(v3.scale(this.normalPerp, newImpulse - this.impulseT));
this.impulseT = newImpulse;
}
}
// main render loop
function render () {
// brute force aabb broadphase
contacts.index = 0;
for (var i = 0, len = objects.length; i < len - 1; i++) {
var A = objects[i];
for (var j = i + 1; j < len; j++) {
var B = objects[j];
if (A.invMass || B.invMass) {
var a = A.aabb, b = B.aabb;
if (
Math.abs(b.x - a.x) - (a.w + b.w) < 0 &&
Math.abs(b.y - a.y) - (a.h + b.h) < 0
) A.contact(B);
}
}
}
// solver loop
var len = contacts.index;
for (var j = 0; j < 5; j++) {
for (var i = 0; i < len; i++) {
contacts[i].solve();
}
}
// integration loop
for (var i = 0, len = objects.length; i < len; i++) {
objects[i].integrate();
}
// draw
for (var i = 0; i < len; i++) {
var rb = objects[i];
rb.draw();
// delete lost bodies
if (rb.matrix.pos.y > screen.height * 2) {
objects.splice(i, 1);
len--;
i--;
}
}
}
return {
// public interface
render: render,
up: function () {
for (var i = 0; i < objects.length; i++) objects[i].drag = false;
drag = false;
},
// create new rectangles
rectangle : function (x, y, w, h, mass, angle) {
var vertices = [
w/2, -h/2,
-w/2, -h/2,
-w/2, h/2,
w/2, h/2
];
var invMass = mass ? 1 / mass : 0;
return new Polygon(x, y, w, h, vertices, invMass, angle);
},
// delete static objects
deleteStatic: function () {
var k = objects.length;
while (k--) {
var p = objects[k];
if (!p.invMass) {
objects.splice(k, 1);
}
}
}
}
} (ctx, pointer, Vector, Matrix); // injection
// create the pile 'O boxes
screen.resize();
var w = screen.width / 20;
for (var i = 0; i < 10; i++) {
for (var j = 0; j < 10; j++) {
PHY2D.rectangle(0.5 * w + w * 5 + i * w, j * w, w * 0.75, w * 0.75, 1, 0);
}
}
// ==== main loop ====
function run() {
requestAnimationFrame(run);
ctx.clearRect(0, 0, screen.width, screen.height);
if (rec) {
ctx.beginPath();
ctx.moveTo(rec.x0, rec.y0);
ctx.lineTo(rec.x1, rec.y0);
ctx.lineTo(rec.x1, rec.y1);
ctx.lineTo(rec.x0, rec.y1);
ctx.closePath();
ctx.fillStyle = "rgb(128,128,128)";
ctx.fill();
}
PHY2D.render();
}
// ==== start animation ====
requestAnimationFrame(run);
}(); |
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
# If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
# resources to, so exit 0 (signalling the script phase was successful).
exit 0
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY:-}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_ROOT}/GoogleMaps/Maps/Frameworks/GoogleMaps.framework/Resources/GoogleMaps.bundle"
fi
if [[ "$CONFIGURATION" == "Beta" ]]; then
install_resource "${PODS_ROOT}/GoogleMaps/Maps/Frameworks/GoogleMaps.framework/Resources/GoogleMaps.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_ROOT}/GoogleMaps/Maps/Frameworks/GoogleMaps.framework/Resources/GoogleMaps.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find -L "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
else
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
fi
fi
|
/** Dictionary interface, representing and old school word-lookup dictionary */
package spelling;
/** @author Christine */
public interface Dictionary {
/**
* Add this word to the dictionary.
*
* @param word The word to add
* @return true if the word was added to the dictionary (it wasn't already there).
*/
public abstract boolean addWord(String word);
/** Is this a word according to this dictionary? */
public abstract boolean isWord(String s);
/** Return the number of words in the dictionary */
public abstract int size();
}
|
#include <stdio.h>
#include <stdlib.h>
extern void reset();
void __VERIFIER_error(int);
void __VERIFIER_error(int i) {
fprintf(stderr, "error_%d ", i);
reset();
}
|
<reponame>tx0c/logbook
import { utils } from "ethers";
export const logbookABI = [
"constructor(string name_, string symbol_)",
"error InsufficientAmount(uint256 available, uint256 required)",
"error InsufficientBalance(uint256 available, uint256 required)",
"error InsufficientLogs(uint32 maxEndAt)",
"error InvalidBPS(uint256 min, uint256 max)",
"error InvalidTokenId(uint256 min, uint256 max)",
"error PublicSaleNotStarted()",
"error TokenNotExists()",
"error Unauthorized()",
"error ZeroAmount()",
"event Approval(address indexed owner, address indexed approved, uint256 indexed tokenId)",
"event ApprovalForAll(address indexed owner, address indexed operator, bool approved)",
"event Content(address indexed author, bytes32 indexed contentHash, string content)",
"event Donate(uint256 indexed tokenId, address indexed donor, uint256 amount)",
"event Fork(uint256 indexed tokenId, uint256 indexed newTokenId, address indexed owner, uint32 end, uint256 amount)",
"event OwnershipTransferred(address indexed previousOwner, address indexed newOwner)",
"event Pay(uint256 indexed tokenId, address indexed sender, address indexed recipient, uint256 amount, uint8 purpose)",
"event Publish(uint256 indexed tokenId, bytes32 indexed contentHash)",
"event SetDescription(uint256 indexed tokenId, string description)",
"event SetForkPrice(uint256 indexed tokenId, uint256 amount)",
"event SetTitle(uint256 indexed tokenId, string title)",
"event Transfer(address indexed from, address indexed to, uint256 indexed tokenId)",
"event Withdraw(address indexed account, uint256 amount)",
"function approve(address to, uint256 tokenId)",
"function balanceOf(address owner) view returns (uint256)",
"function claim(address to_, uint256 logrsId_)",
"function donate(uint256 tokenId_) payable",
"function donateWithCommission(uint256 tokenId_, address commission_, uint256 commissionBPS_) payable",
"function fork(uint256 tokenId_, uint32 endAt_) payable returns (uint256 tokenId)",
"function forkWithCommission(uint256 tokenId_, uint32 endAt_, address commission_, uint256 commissionBPS_) payable returns (uint256 tokenId)",
"function getApproved(uint256 tokenId) view returns (address)",
"function getBalance(address account_) view returns (uint256 amount)",
"function getLogbook(uint256 tokenId_) view returns (tuple(uint32 endAt, uint32 logCount, uint32 transferCount, uint160 createdAt, uint256 from, uint256 forkPrice, bytes32[] contentHashes) book)",
"function getLogs(uint256 tokenId_) view returns (bytes32[] contentHashes, address[] authors)",
"function isApprovedForAll(address owner, address operator) view returns (bool)",
"function logs(bytes32) view returns (address author, uint256 tokenId)",
"function multicall(bytes[] data) payable returns (bytes[] results)",
"function name() view returns (string)",
"function owner() view returns (address)",
"function ownerOf(uint256 tokenId) view returns (address)",
"function publicSale() view returns (uint256)",
"function publicSaleMint() payable returns (uint256 tokenId)",
"function publicSalePrice() view returns (uint256)",
"function publish(uint256 tokenId_, string content_)",
"function renounceOwnership()",
"function safeTransferFrom(address from, address to, uint256 tokenId)",
"function safeTransferFrom(address from, address to, uint256 tokenId, bytes _data)",
"function setApprovalForAll(address operator, bool approved)",
"function setDescription(uint256 tokenId_, string description_)",
"function setForkPrice(uint256 tokenId_, uint256 amount_)",
"function setPublicSalePrice(uint256 price_)",
"function setTitle(uint256 tokenId_, string title_)",
"function supportsInterface(bytes4 interfaceId) view returns (bool)",
"function symbol() view returns (string)",
"function tokenURI(uint256 tokenId_) view returns (string)",
"function transferFrom(address from, address to, uint256 tokenId)",
"function transferOwnership(address newOwner)",
"function turnOffPublicSale()",
"function turnOnPublicSale()",
"function withdraw()",
];
export const logbookInterface = new utils.Interface(logbookABI);
|
#include <stdio.h>
#include <math.h>
int main()
{
int num, dig, temp, sum = 0;
printf("Armstrong numbers from 0 to 1000:\n");
for (num = 0; num <= 1000; num++) {
temp = num;
while (temp != 0) {
dig = temp % 10;
sum = sum + pow(dig, 3);
temp = temp / 10;
}
if (sum == num)
printf("%d ", num);
sum = 0;
}
return 0;
} |
<reponame>StakedBlockchain/0x-starter-project
import Web3 from 'web3';
let web3 = null;
if (typeof window.web3 !== 'undefined') {
if (window.web3.currentProvider.isMetaMask === true) {
web3 = new Web3(window.web3.currentProvider);
}
} else {
alert('MetaMaskをインストールしてください');
}
export default web3; |
import React, {Component} from 'react'
import {createStore} from "redux";
import { createRenderer } from 'react-test-renderer/shallow';
import { createAPI, reducer, trace } from '../src';
const renderer = createRenderer();
const apiSpec = {
redactions: {
increment: (amount) => ({
count: {set: (state) => state.count + (amount || 1)}
})
},
selectors: {
count: (state) => state.count
}
}
//trace.log = (X)=>{console.log(X)};
describe('Component Testing', () => {
it('works end to end in a function', () => {
const api = createAPI(apiSpec).mount(createStore(reducer, {count: 34}));
const renderCount = 0;
const Counter = () => {
const {count, increment} = api({}, Counter);
return (
<button onClick={()=>increment(2)}>{count}</button>
)
}
renderer.render(<Counter />);
let output = renderer.getRenderOutput();
expect(output.props.children).toBe(34);
output.props.onClick({});
output = renderer.getRenderOutput();
expect(output.props.children).toBe(36);
})
it('works end to end in a class', () => {
const api = createAPI(apiSpec).mount(createStore(reducer, {count: 34}));
const renderCount = 0;
class Counter extends Component {
render () {
const {count, increment} = api({}, this);
return (
<button onClick={()=>increment(2)}>{count}</button>
)
}
}
renderer.render(<Counter />);
let output = renderer.getRenderOutput();
expect(output.props.children).toBe(34);
output.props.onClick({});
output = renderer.getRenderOutput();
expect(output.props.children).toBe(36);
})
})
|
import { expect } from "chai";
import { Constants } from "discord.js";
import { SinonSandbox, createSandbox } from "sinon";
import { CustomMocks } from "@lambocreeper/mock-discord.js";
import EventHandler from "../../../src/abstracts/EventHandler";
import LogMessageSingleDeleteHandler from "../../../src/event/handlers/LogMessageSingleDeleteHandler";
describe("LogMessageDeleteHandler", () => {
describe("constructor()", () => {
it("creates a handler for MESSAGE_Delete", () => {
const handler = new LogMessageSingleDeleteHandler();
expect(handler.getEvent()).to.equal(Constants.Events.MESSAGE_DELETE);
});
});
describe("handle()", () => {
let sandbox: SinonSandbox;
let handler: EventHandler;
beforeEach(() => {
sandbox = createSandbox();
handler = new LogMessageSingleDeleteHandler();
});
it("sends a message in logs channel when a message is deleted", async () => {
const message = CustomMocks.getMessage({
content: "message content"
});
const messageMock = sandbox.stub(message.guild.channels.cache, "find");
await handler.handle(message);
expect(messageMock.calledOnce).to.be.true;
});
it("does not send a message in logs channel when message is deleted but content is empty - only image", async () => {
const message = CustomMocks.getMessage({
content: ""
});
const messageMock = sandbox.stub(message.guild.channels.cache, "find");
await handler.handle(message);
expect(messageMock.calledOnce).to.be.false;
});
afterEach(() => {
sandbox.restore();
});
});
});
|
<filename>src/test/java/org/thymeleaf/engine/ElementProcessorIteratorTest.java
/*
* =============================================================================
*
* Copyright (c) 2011-2016, The THYMELEAF team (http://www.thymeleaf.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package org.thymeleaf.engine;
import java.util.Collections;
import java.util.Set;
import org.junit.Assert;
import org.junit.Test;
import org.thymeleaf.IEngineConfiguration;
import org.thymeleaf.context.ITemplateContext;
import org.thymeleaf.context.TestTemplateEngineConfigurationBuilder;
import org.thymeleaf.dialect.IDialect;
import org.thymeleaf.dialect.IProcessorDialect;
import org.thymeleaf.model.IOpenElementTag;
import org.thymeleaf.templatemode.TemplateMode;
import org.thymeleaf.templateparser.markup.HTMLTemplateParser;
import org.thymeleaf.templateparser.markup.XMLTemplateParser;
import org.thymeleaf.templateresource.StringTemplateResource;
public final class ElementProcessorIteratorTest {
private static final HTMLTemplateParser HTML_PARSER = new HTMLTemplateParser(2, 4096);
private static final XMLTemplateParser XML_PARSER = new XMLTemplateParser(2, 4096);
@Test
public void testProcessorIteration01() {
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src");
final TagObtentionTemplateHandler handler = computeHtmlTag("<a th:src='hello'>", dialect);
final ElementProcessorIterator iterator = handler.iter;
final OpenElementTag tag = handler.tag;
Assert.assertEquals("N-ELEMENT-5-null-{th:src,data-th-src}", iterator.next(handler.tag).toString());
Assert.assertEquals("N-ELEMENT-10-null-{th:src,data-th-src}", iterator.next(handler.tag).toString());
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration02() {
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src,N-ELEMENT-15-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
Assert.assertEquals("N-ELEMENT-5-null-{th:src,data-th-src}", iterator.next(tag).toString());
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
Assert.assertEquals("N-ELEMENT-10-null-{th:src,data-th-src}", iterator.next(tag).toString());
Assert.assertEquals("N-ELEMENT-15-null-{th:one,data-th-one}", iterator.next(tag).toString());
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration03() {
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src,N-ELEMENT-7-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
Assert.assertEquals("N-ELEMENT-5-null-{th:src,data-th-src}", iterator.next(tag).toString());
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
Assert.assertEquals("N-ELEMENT-7-null-{th:one,data-th-one}", iterator.next(tag).toString());
Assert.assertEquals("N-ELEMENT-10-null-{th:src,data-th-src}", iterator.next(tag).toString());
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration04() {
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src,N-ELEMENT-2-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
Assert.assertEquals("N-ELEMENT-5-null-{th:src,data-th-src}", iterator.next(tag).toString());
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
Assert.assertEquals("N-ELEMENT-2-null-{th:one,data-th-one}", iterator.next(tag).toString());
Assert.assertEquals("N-ELEMENT-10-null-{th:src,data-th-src}", iterator.next(tag).toString());
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration05() {
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src,N-ELEMENT-2-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
Assert.assertEquals("N-ELEMENT-2-null-{th:one,data-th-one}", iterator.next(tag).toString());
Assert.assertEquals("N-ELEMENT-5-null-{th:src,data-th-src}", iterator.next(tag).toString());
Assert.assertEquals("N-ELEMENT-10-null-{th:src,data-th-src}", iterator.next(tag).toString());
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration06() {
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src,N-ELEMENT-2-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<a th:src='hello'>", dialect);
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
tag = tag.removeAttribute("th:src");
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration07() {
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src,N-ELEMENT-2-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
tag = tag.removeAttribute("th:src");
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
Assert.assertEquals("N-ELEMENT-2-null-{th:one,data-th-one}", iterator.next(tag).toString());
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration08() {
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src,N-ELEMENT-2-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
Assert.assertEquals("N-ELEMENT-5-null-{th:src,data-th-src}", iterator.next(tag).toString());
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
tag = tag.removeAttribute("data-th-src");
Assert.assertEquals("N-ELEMENT-2-null-{th:one,data-th-one}", iterator.next(tag).toString());
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration09() {
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src,N-ELEMENT-2-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
Assert.assertEquals("N-ELEMENT-5-null-{th:src,data-th-src}", iterator.next(tag).toString());
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
Assert.assertEquals("N-ELEMENT-2-null-{th:one,data-th-one}", iterator.next(tag).toString());
tag = tag.removeAttribute("th:src");
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration10() {
// This one checks that iteration also works OK for tags using a non-standard implementation
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src,N-ELEMENT-2-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
Assert.assertEquals("N-ELEMENT-5-null-{th:src,data-th-src}", iterator.next(tag).toString());
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
Assert.assertEquals("N-ELEMENT-2-null-{th:one,data-th-one}", iterator.next(tag).toString());
tag = tag.removeAttribute("th:src");
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration11() {
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src,N-ELEMENT-2-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<div class='one'><a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
Assert.assertEquals("N-ELEMENT-5-null-{th:src,data-th-src}", iterator.next(tag).toString());
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
Assert.assertEquals("N-ELEMENT-2-null-{th:one,data-th-one}", iterator.next(tag).toString());
tag = tag.removeAttribute("th:src");
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration12() {
// This one checks that iteration also works OK for tags using a non-standard implementation
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-null-src,N-ELEMENT-2-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<div class='one'><a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
Assert.assertEquals("N-ELEMENT-5-null-{th:src,data-th-src}", iterator.next(tag).toString());
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
Assert.assertEquals("N-ELEMENT-2-null-{th:one,data-th-one}", iterator.next(tag).toString());
tag = tag.removeAttribute("th:src");
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration13() {
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-*a-src,N-ELEMENT-2-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<div class='one'><p th:src='uuuh'><a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
Assert.assertEquals("N-ELEMENT-5-{a}-{th:src,data-th-src}", iterator.next(tag).toString());
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
Assert.assertEquals("N-ELEMENT-2-null-{th:one,data-th-one}", iterator.next(tag).toString());
tag = tag.removeAttribute("th:src");
Assert.assertNull(iterator.next(tag));
}
@Test
public void testProcessorIteration14() {
// This one checks that iteration also works OK for tags using a non-standard implementation
final IProcessorDialect dialect =
ProcessorAggregationTestDialect.buildHTMLDialect("standard", "th",
"N-ELEMENT-10-null-src,N-ELEMENT-5-*a-src,N-ELEMENT-2-null-one");
final TagObtentionTemplateHandler handler = computeHtmlTag("<div class='one'><p th:src='uuuh'><a th:src='hello'>", dialect);
final AttributeDefinitions attributeDefinitions = handler.attributeDefinitions;
final ElementProcessorIterator iterator = handler.iter;
OpenElementTag tag = handler.tag;
Assert.assertEquals("N-ELEMENT-5-{a}-{th:src,data-th-src}", iterator.next(tag).toString());
tag = tag.setAttribute(attributeDefinitions, null, "th:one", "somevalue", null);
Assert.assertEquals("N-ELEMENT-2-null-{th:one,data-th-one}", iterator.next(tag).toString());
tag = tag.removeAttribute("th:src");
Assert.assertNull(iterator.next(tag));
}
private static TagObtentionTemplateHandler computeHtmlTag(final String input, final IDialect dialect) {
return computeHtmlTag(input, Collections.singleton(dialect));
}
private static TagObtentionTemplateHandler computeHtmlTag(final String input, final Set<IDialect> dialects) {
final String templateName = "test";
final TagObtentionTemplateHandler handler = new TagObtentionTemplateHandler();
final IEngineConfiguration templateEngineContext = TestTemplateEngineConfigurationBuilder.build(dialects);
handler.attributeDefinitions = templateEngineContext.getAttributeDefinitions();
HTML_PARSER.parseStandalone(templateEngineContext, templateName, templateName, null, new StringTemplateResource(input), TemplateMode.HTML, false, handler);
return handler;
}
private static TagObtentionTemplateHandler computeXmlTag(final String input, final IDialect dialect) {
return computeXmlTag(input, Collections.singleton(dialect));
}
private static TagObtentionTemplateHandler computeXmlTag(final String input, final Set<IDialect> dialects) {
final String templateName = "test";
final TagObtentionTemplateHandler handler = new TagObtentionTemplateHandler();
final IEngineConfiguration templateEngineContext = TestTemplateEngineConfigurationBuilder.build(dialects);
handler.attributeDefinitions = templateEngineContext.getAttributeDefinitions();
XML_PARSER.parseStandalone(templateEngineContext, templateName, templateName, null, new StringTemplateResource(input), TemplateMode.XML, false, handler);
return handler;
}
private static class TagObtentionTemplateHandler extends AbstractTemplateHandler {
AttributeDefinitions attributeDefinitions;
OpenElementTag tag;
ElementProcessorIterator iter = new ElementProcessorIterator();
TagObtentionTemplateHandler() {
super();
}
@Override
public void setContext(final ITemplateContext context) {
super.setContext(context);
this.attributeDefinitions = context.getConfiguration().getAttributeDefinitions();
}
@Override
public void handleOpenElement(final IOpenElementTag openElementTag) {
final OpenElementTag oetag = (OpenElementTag) openElementTag;
if (this.tag != null) {
this.iter.next(this.tag); // Force the creation and computation of the iterator, and leave it not-completed for more thorough testing
}
this.tag = oetag;
this.iter.reset();
}
}
}
|
#!/bin/sh
source ./commons.sh
IFS=' '
delete()
{
cf delete-service -f $1
}
file="./PCFServices.list"
while read service plan si
do
if [ ! "${service:0:1}" == "#" ]
then
delete $si &
fi
done < "$file"
wait
IN_PROGRESS=$(cf s | grep progress | wc -l | xargs )
while [ $IN_PROGRESS -gt 0 ]
do
echo "Pausing to allow Services to Delete - Remaining services to delete $IN_PROGRESS"
sleep 10
IN_PROGRESS=$(cf s | grep progress | wc -l | xargs )
done
summaryOfServices
exit 0
|
var MersenneTwister = function (a) {
if (a == undefined) {
a = new Date().getTime()
}
this.N = 624;
this.M = 397;
this.MATRIX_A = 2567483615;
this.UPPER_MASK = 2147483648;
this.LOWER_MASK = 2147483647;
this.mt = new Array(this.N);
this.mti = this.N + 1;
this.init_genrand(a)
};
MersenneTwister.prototype.init_genrand = function (a) {
this.mt[0] = a >>> 0;
for (this.mti = 1; this.mti < this.N; this.mti++) {
var a = this.mt[this.mti - 1] ^ (this.mt[this.mti - 1] >>> 30);
this.mt[this.mti] = (((((a & 4294901760) >>> 16) * 1812433253) << 16) + (a & 65535) * 1812433253) + this.mti;
this.mt[this.mti] >>>= 0
}
};
MersenneTwister.prototype.init_by_array = function (a, f) {
var d, c, b;
this.init_genrand(19650218);
d = 1;
c = 0;
b = (this.N > f ? this.N : f);
for (; b; b--) {
var e = this.mt[d - 1] ^ (this.mt[d - 1] >>> 30);
this.mt[d] = (this.mt[d] ^ (((((e & 4294901760) >>> 16) * 1664525) << 16) + ((e & 65535) * 1664525))) + a[c] + c;
this.mt[d] >>>= 0;
d++;
c++;
if (d >= this.N) {
this.mt[0] = this.mt[this.N - 1];
d = 1
}
if (c >= f) {
c = 0
}
}
for (b = this.N - 1; b; b--) {
var e = this.mt[d - 1] ^ (this.mt[d - 1] >>> 30);
this.mt[d] = (this.mt[d] ^ (((((e & 4294901760) >>> 16) * 1566083941) << 16) + (e & 65535) * 1566083941)) - d;
this.mt[d] >>>= 0;
d++;
if (d >= this.N) {
this.mt[0] = this.mt[this.N - 1];
d = 1
}
}
this.mt[0] = 2147483648
};
MersenneTwister.prototype.genrand_int32 = function () {
var c;
var b = new Array(0, this.MATRIX_A);
if (this.mti >= this.N) {
var a;
if (this.mti == this.N + 1) {
this.init_genrand(5489)
}
for (a = 0; a < this.N - this.M; a++) {
c = (this.mt[a] & this.UPPER_MASK) | (this.mt[a + 1] & this.LOWER_MASK);
this.mt[a] = this.mt[a + this.M] ^ (c >>> 1) ^ b[c & 1]
}
for (; a < this.N - 1; a++) {
c = (this.mt[a] & this.UPPER_MASK) | (this.mt[a + 1] & this.LOWER_MASK);
this.mt[a] = this.mt[a + (this.M - this.N)] ^ (c >>> 1) ^ b[c & 1]
}
c = (this.mt[this.N - 1] & this.UPPER_MASK) | (this.mt[0] & this.LOWER_MASK);
this.mt[this.N - 1] = this.mt[this.M - 1] ^ (c >>> 1) ^ b[c & 1];
this.mti = 0
}
c = this.mt[this.mti++];
c ^= (c >>> 11);
c ^= (c << 7) & 2636928640;
c ^= (c << 15) & 4022730752;
c ^= (c >>> 18);
return c >>> 0
};
MersenneTwister.prototype.genrand_int31 = function () {
return (this.genrand_int32() >>> 1)
};
MersenneTwister.prototype.genrand_real1 = function () {
return this.genrand_int32() * (1 / 4294967295)
};
MersenneTwister.prototype.random = function () {
return this.genrand_int32() * (1 / 4294967296)
};
MersenneTwister.prototype.genrand_real3 = function () {
return (this.genrand_int32() + 0.5) * (1 / 4294967296)
};
MersenneTwister.prototype.genrand_res53 = function () {
var d = this.genrand_int32() >>> 5,
c = this.genrand_int32() >>> 6;
return (d * 67108864 + c) * (1 / 9007199254740992)
}; |
const router = require("express").Router();
const { Park } = require('../../models');
router.get("/", async (req, res) => {
try {
const parkData = await Park.findAll();
const parks = parkData.map((park) => park.get({ plain: true }));
res.status(200).json(parks);
}
catch (err) {
res.status(400).json(err);
}
});
router.get("/:id", async (req, res) => {
try {
const parkData = await Park.findOne({where: { code: req.params.id}});
if (parkData){
const park = parkData.get({ plain: true })
res.status(200).json(park);
}
res.status(404);
}
catch (err) {
res.status(400).json(err);
}
});
router.post("/bulk", async (req, res) => {
try {
const parksIn = req.body;
const parks = await Park.bulkCreate(parksIn);
res.status(200).json(parks);
}
catch (err) {
res.status(400).json(err);
}
});
module.exports = router; |
package io.eventuate.tram.messaging.proxy.consumer;
import io.eventuate.tram.commands.common.Command;
public class TestCommand implements Command {
private String someImportantData;
public TestCommand() {
}
public TestCommand(String someImportantData) {
this.someImportantData = someImportantData;
}
public String getSomeImportantData() {
return someImportantData;
}
public void setSomeImportantData(String someImportantData) {
this.someImportantData = someImportantData;
}
}
|
<reponame>Skitionek/alpha-vantage<filename>src/index.js
/* DOCUMENT INFORMATION
- Author: <NAME>
- Email: <EMAIL>
- Created: 2019-05-01
*/
const { RESTDataSource } = require('apollo-datasource-rest/dist/index');
require('dotenv').config();
const apiKey = 'AV_KEY';
const path = require('path');
/**
* The Alpha Vantage core module.
*/
class AlphaVantageAPI extends RESTDataSource {
constructor(config={}) {
super();
this.initialize(config||{});
this.apikey = config.key || process.env[apiKey];
// Check for config errors.
if (!this.apikey) {
const errorMessage = `Missing Alpha Vantage API key`;
throw new Error(errorMessage);
}
this.baseURL = `https://www.alphavantage.co/query`;
// autoBind(this);
// this.fn = AlphaVantageAPI.fn.bind(this);
//
const self = this;
Object.keys(AlphaVantageAPI.prototype).forEach(key => {
if (self[key] instanceof Object) {
Object.keys(self[key]).forEach(key2 => {
self[key][key2] = self[key][key2].bind(self);
})
}
})
}
// bindMethods(self) {
// function bind(d) {
//
// }
// Object.keys(AlphaVantageAPI.prototype).filter(key => typeof this[key] === 'function').forEach(key => {
// this[key] = this[key].bind(this);
// })
// }
static extend(extensionPath, alias = path.parse(extensionPath).name) {
let extension = require(extensionPath);
if (typeof extension === 'function') extension = extension(this);
AlphaVantageAPI.prototype[alias] = extension;
}
}
// // Include all the submodules.
AlphaVantageAPI.extend('./lib/util');
AlphaVantageAPI.extend('./lib/data');
AlphaVantageAPI.extend('./lib/crypto');
AlphaVantageAPI.extend('./lib/forex');
AlphaVantageAPI.extend('./lib/technical');
AlphaVantageAPI.extend('./lib/performance');
export default AlphaVantageAPI; |
import typing
from datetime import datetime
GUILD_IDS = [12345, 67890, 54321] # Example list of enabled guild IDs
async def callback(ctx: lightbulb.context.Context) -> None:
guild_ids = enabled_guilds() # Retrieve the enabled guilds' IDs
current_weekday = datetime.today().weekday() # Get the current day of the week
for guild_id in guild_ids:
await respond_menu(ctx, guild_id, current_weekday) # Respond to the user for each enabled guild based on the current day |
import { HttpException, HttpStatus, Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { CreateCommentMangaDto } from './dto/create-comment-manga.dto';
import { UpdateCommentMangaDto } from './dto/update-comment-manga.dto';
import { CommentMangaEntity } from './entities/comment-manga.entity';
@Injectable()
export class CommentMangaService {
constructor(
@InjectRepository(CommentMangaEntity)
private repository: Repository<CommentMangaEntity>,
) {}
create(createCommentMangaDto: CreateCommentMangaDto, userId: number) {
return this.repository.save({
...createCommentMangaDto,
manga: { id: createCommentMangaDto.mangaId },
user: { id: userId },
});
}
findAll() {
return `This action returns all commentManga`;
}
getCommentsForManga(id: number) {
return this.repository.find({ where: { manga: { id } } });
}
async update(id: number, updateCommentMangaDto: UpdateCommentMangaDto) {
await this.repository.update(id, {
commentText: updateCommentMangaDto.commentText,
spoiler: updateCommentMangaDto.spoiler,
});
return this.repository.findOne({ where: { id } });
}
async remove(id: number, userId: number) {
const comment = await this.repository.findOne({ where: { id } });
if (!comment) {
throw new HttpException('Комментарий не найден', HttpStatus.NOT_FOUND);
}
if (comment.user.id !== userId) {
throw new HttpException(
'Вы не можете удалить эту статью',
HttpStatus.FORBIDDEN,
);
}
await this.repository.delete(id);
return comment;
}
}
|
#!/bin/bash
rm target/{debug,release}/pay-to-sudoku
|
<filename>src/models/Stat/Mana.java
package models.Stat;
public class Mana extends Secondary {
// currently based off Intellect and Level
public Mana(Stat stat_one, Stat stat_two) {
setName("Mana");
setStatOne(stat_one);
setStatTwo(stat_two);
calculate();
}
//TODO Fix this, causes a crash when initiating a pet
public void calculate() {
// setValue(((0.5 * stat_two.value()) + 50 + stat_one.value()) * stat_two.value());
setValue(400);
}
@Override
public void print() {System.out.println(name() + ": " + (int)value());}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.