text stringlengths 1 1.05M |
|---|
python -m bertviz.classifier -c confs/qqp_export.json --no_train --do_test_only
|
#include "mlir/Dialect/StandardOps/IR/Ops.h"
#include "mlir/Dialect/StandardOps/IR/OpsBase.td"
#include "mlir/IR/Builders.h"
#include "mlir/IR/OpImplementation.h"
#include "mlir/IR/Types.h"
using namespace mlir;
class CustomOp : public Op<CustomOp, OpTrait::VariadicOperands, OpTrait::OneResult> {
public:
using Op::Op;
static ParseResult parse(OpAsmParser &parser, OperationState &result) {
// Parse the operands and attributes of the CustomOp
return failure();
}
void print(OpAsmPrinter &p) {
// Print the CustomOp
}
static void build(Builder *builder, OperationState &result, Type resultType,
ValueRange operands, ArrayRef<NamedAttribute> attributes) {
// Build the CustomOp
}
};
class MaxJDialect : public Dialect {
public:
MaxJDialect(MLIRContext *context) : Dialect("maxj", context) {
addTypes<SVarType, MemType>();
addOperations<CustomOp>();
}
static bool classof(Type type) { return type.getKind() == TypeKind::MaxJ; }
};
// Register the dialect with the MLIR context
static DialectRegistration<MaxJDialect> maxjDialect;
// Define the SVarType and MemType
class SVarType : public Type::TypeBase<SVarType, Type, TypeStorage> {
public:
using Base::Base;
static SVarType get(MLIRContext *context) { return Base::get(context, TypeKind::SVar); }
static bool kindof(unsigned kind) { return kind == TypeKind::SVar; }
};
class MemType : public Type::TypeBase<MemType, Type, TypeStorage> {
public:
using Base::Base;
static MemType get(MLIRContext *context) { return Base::get(context, TypeKind::Mem); }
static bool kindof(unsigned kind) { return kind == TypeKind::Mem; }
}; |
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const meetingsSchema = new Schema({
place: String,
location: { type: { type: String }, coordinates: [Number] },
date: Date,
time: String,
name: String,
description: String,
category: {type: String, enum: ["Drawing", "Painting", "Photography", "Writing"]}
}, {
timestamps: {
createdAt: 'created_at',
updatedAt: 'updated_at'
}
});
meetingsSchema.index({ location: '2dsphere' });
const Meetings = mongoose.model('Meetings', meetingsSchema);
module.exports = Meetings; |
import matplotlib.pyplot as plt
data = [2, 4, 5, 3, 5, 6, 8, 2]
plt.hist(data, bins = 5)
plt.title("Histogram")
plt.xlabel("Values")
plt.ylabel("Frequency")
plt.show() |
<filename>api/v1/flinksession_types_generate_test.go
package v1
import (
"github.com/stretchr/testify/assert"
apiv1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"testing"
)
const (
Expected1 = `apiVersion: v1
kind: Pod
metadata:
creationTimestamp: null
spec:
affinity:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- podAffinityTerm:
labelSelector:
matchExpressions:
- key: app
operator: In
values:
- flink
- key: type
operator: In
values:
- flink-native-kubernetes
namespaces:
- test
topologyKey: kubernetes.io/hostname
weight: 100
containers:
- name: flink-main-container
resources: {}
status: {}
`
Expected2 = `apiVersion: v1
kind: Pod
metadata:
creationTimestamp: null
spec:
affinity:
podAntiAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchExpressions:
- key: app
operator: In
values:
- flink
- key: type
operator: In
values:
- flink-native-kubernetes
namespaces:
- test
topologyKey: kubernetes.io/hostname
containers:
- name: flink-main-container
resources: {}
volumeMounts:
- mountPath: /tmp
name: test
volumes:
- emptyDir: {}
name: test
status: {}
`
)
func TestBuildNodeSelector(t *testing.T) {
maps := make(map[string]string)
res := buildNodeSelector(maps)
assert.Equal(t, "", res)
maps["flink"] = "run"
res = buildNodeSelector(maps)
assert.Equal(t, "flink:run", res)
maps["disk"] = "ssd"
res = buildNodeSelector(maps)
if res != "flink:run,disk:ssd" && res != "disk:ssd,flink:run" {
assert.Fail(t, "map 拼接失败!")
}
}
func TestPodTemplate(t *testing.T) {
session := &FlinkSession{
ObjectMeta: metav1.ObjectMeta{
Name: "flink",
Namespace: "test",
},
Spec: FlinkSessionSpec{
BalancedSchedule: PreferredDuringScheduling,
},
}
template := session.GeneratePodTemplate()
assert.Equal(t, Expected1, template)
session = &FlinkSession{
ObjectMeta: metav1.ObjectMeta{
Name: "flink",
Namespace: "test",
},
Spec: FlinkSessionSpec{
BalancedSchedule: RequiredDuringScheduling,
Volumes: []apiv1.Volume{
{
Name: "test",
VolumeSource: apiv1.VolumeSource{
EmptyDir: &apiv1.EmptyDirVolumeSource{},
},
},
},
VolumeMounts: []apiv1.VolumeMount{
{
Name: "test",
MountPath: "/tmp",
},
},
},
}
template = session.GeneratePodTemplate()
assert.Equal(t, Expected2, template)
}
|
<reponame>tsekhan/true-component
import nodeRegistry from './nodeRegistry';
/**
* Add provided class to internal register to associate with HTML custom component tag.
*
* @global
* @name registerClass
* @param Class - Class to be associated with tag.
* @param [tag] - Tag name. Should meet custom element name specification.
* @see {@link https://stackoverflow.com/questions/22545621/do-custom-elements-require-a-dash-in-their-name}.
*/
export default (Class, tag = Class.tag) => {
if (nodeRegistry.has(tag)) {
console.warn(`Re-registering of <${tag} /> may cause usage of wrong component.`);
}
nodeRegistry.set(tag.toLowerCase(),
Class.tag ? Class : (class extends Class {
static get tag() {
return tag;
}
}));
};
|
module.exports = function(Blockly) {
"use strict";
Blockly.Blocks["adc_pin_dummy_input"] = {
init: function() {
this.setOutput(true, null);
this.setColour(230);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks["io_pin_dummy_input"] = {
init: function() {
this.appendDummyInput()
.appendField(new Blockly.FieldDropdown([
["BUILD IN LED (gpio2)", "2"],
["BUZZER (gpio14)", "14"],
["SW1(gpio1)", "1"],
["SW2(gpio2)", "2"],
["KNOB (gpio27)", "27"]
]), "IO_PIN");
this.setOutput(true, null);
this.setColour(230);
this.setTooltip("");
this.setHelpUrl("");
}
};
};
|
import {apBasePath} from "./_base";
export const apPathPayments = apBasePath.appendPathSegment<{}>("/payments"); |
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/GTMSessionFetcher/GTMSessionFetcher.framework"
install_framework "${BUILT_PRODUCTS_DIR}/GoogleUtilities/GoogleUtilities.framework"
install_framework "${BUILT_PRODUCTS_DIR}/NVActivityIndicatorView/NVActivityIndicatorView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/nanopb/nanopb.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/GTMSessionFetcher/GTMSessionFetcher.framework"
install_framework "${BUILT_PRODUCTS_DIR}/GoogleUtilities/GoogleUtilities.framework"
install_framework "${BUILT_PRODUCTS_DIR}/NVActivityIndicatorView/NVActivityIndicatorView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/nanopb/nanopb.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
package io.opensphere.core.image;
/**
* A service that provides images and can be observed for new images.
*
* @param <T> The type of object to be used to look up the images.
*/
public interface ObservableImageProvider<T> extends ImageProvider<T>
{
/**
* Add an observer to be notified when a new image is ready.
*
* @param observer The observer.
*/
void addObserver(Observer observer);
/**
* Interface for observers that need to know when new image date is
* available.
*/
@FunctionalInterface
public interface Observer
{
/**
* Called when new image data are available.
*/
void dataReady();
}
}
|
var platform;
var applyDelta, pushToPull, parse, sha1, bops, trace;
module.exports = function (imports) {
if (platform) return newRepo;
platform = imports;
applyDelta = require('git-pack-codec/apply-delta.js')(platform);
pushToPull = require('push-to-pull');
parse = pushToPull(require('git-pack-codec/decode.js')(platform));
platform.agent = platform.agent || "js-git/" + require('./package.json').version;
sha1 = platform.sha1;
bops = platform.bops;
trace = platform.trace;
return newRepo;
};
function newRepo(db, workDir) {
if (!db) throw new TypeError("A db interface instance is required");
var encoders = {
commit: encodeCommit,
tag: encodeTag,
tree: encodeTree,
blob: encodeBlob
};
var decoders = {
commit: decodeCommit,
tag: decodeTag,
tree: decodeTree,
blob: decodeBlob
};
var repo = {};
if (trace) {
db = {
get: wrap1("get", db.get),
set: wrap2("set", db.set),
has: wrap1("has", db.has),
del: wrap1("del", db.del),
keys: wrap1("keys", db.keys),
init: wrap0("init", db.init),
};
}
// Git Objects
repo.load = load; // (hashish) -> object
repo.save = save; // (object) -> hash
repo.loadAs = loadAs; // (type, hashish) -> value
repo.saveAs = saveAs; // (type, value) -> hash
repo.remove = remove; // (hashish)
repo.unpack = unpack; // (opts, packStream)
// Convenience Readers
repo.logWalk = logWalk; // (hashish) => stream<commit>
repo.treeWalk = treeWalk; // (hashish) => stream<object>
repo.walk = walk; // (seed, scan, compare) -> stream<object>
// Refs
repo.resolveHashish = resolveHashish; // (hashish) -> hash
repo.updateHead = updateHead; // (hash)
repo.getHead = getHead; // () -> ref
repo.setHead = setHead; // (ref)
repo.readRef = readRef; // (ref) -> hash
repo.createRef = createRef; // (ref, hash)
repo.deleteRef = deleteRef; // (ref)
repo.listRefs = listRefs; // (prefix) -> refs
if (workDir) {
// TODO: figure out API for working repos
}
// Network Protocols
repo.fetch = fetch;
repo.push = push;
return repo;
function wrap0(type, fn) {
return zero;
function zero(callback) {
if (!callback) return zero.bind(this);
return fn.call(this, check);
function check(err) {
if (err) return callback(err);
trace(type, null);
return callback.apply(this, arguments);
}
}
}
function wrap1(type, fn) {
return one;
function one(arg, callback) {
if (!callback) return one.bind(this, arg);
return fn.call(this, arg, check);
function check(err) {
if (err) return callback(err);
trace(type, null, arg);
return callback.apply(this, arguments);
}
}
}
function wrap2(type, fn) {
return two;
function two(arg1, arg2, callback) {
if (!callback) return two.bind(this, arg1. arg2);
return fn.call(this, arg1, arg2, check);
function check(err) {
if (err) return callback(err);
trace(type, null, arg1);
return callback.apply(this, arguments);
}
}
}
function logWalk(hashish, callback) {
if (!callback) return logWalk.bind(this, hashish);
var last, seen = {};
return readRef("shallow", onShallow);
function onShallow(err, shallow) {
last = shallow;
return loadAs("commit", hashish, onLoad);
}
function onLoad(err, commit, hash) {
if (commit === undefined) return callback(err);
commit.hash = hash;
seen[hash] = true;
return callback(null, walk(commit, scan, loadKey, compare));
}
function scan(commit) {
if (last === commit) return [];
return commit.parents.filter(function (hash) {
return !seen[hash];
});
}
function loadKey(hash, callback) {
return loadAs("commit", hash, function (err, commit) {
if (err) return callback(err);
commit.hash = hash;
if (hash === last) commit.last = true;
return callback(null, commit);
});
}
function compare(commit, other) {
return commit.author.date < other.author.date;
}
}
function treeWalk(hashish, callback) {
if (!callback) return treeWalk.bind(this, hashish);
return load(hashish, onLoad);
function onLoad(err, item, hash) {
if (err) return callback(err);
if (item.type === "commit") return load(item.body.tree, onLoad);
item.hash = hash;
item.path = "/";
return callback(null, walk(item, treeScan, treeLoadKey, treeCompare));
}
}
function treeScan(object) {
if (object.type === "blob") return [];
assertType(object, "tree");
return object.body.filter(function (entry) {
return entry.mode !== 57344 /* 0160000 (commit, e.g. submodule) */;
}).map(function (entry) {
var path = object.path + entry.name;
if (entry.mode === 16384 /* 0040000 (tree) */) path += "/";
entry.path = path;
return entry;
});
}
function treeLoadKey(entry, callback) {
return load(entry.hash, function (err, object) {
if (err) return callback(err);
entry.type = object.type;
entry.body = object.body;
return callback(null, entry);
});
}
function treeCompare(first, second) {
return first.path < second.path;
}
function walk(seed, scan, loadKey, compare) {
var queue = [seed];
var working = 0, error, cb;
return {read: read, abort: abort};
function read(callback) {
if (cb) return callback(new Error("Only one read at a time"));
if (working) { cb = callback; return; }
var item = queue.shift();
if (!item) return callback();
try { scan(item).forEach(onKey); }
catch (err) { return callback(err); }
return callback(null, item);
}
function abort(callback) { return callback(); }
function onError(err) {
if (cb) {
var callback = cb; cb = null;
return callback(err);
}
error = err;
}
function onKey(key) {
working++;
loadKey(key, onItem);
}
function onItem(err, item) {
working--;
if (err) return onError(err);
var index = queue.length;
while (index && compare(item, queue[index - 1])) index--;
queue.splice(index, 0, item);
if (!working && cb) {
var callback = cb; cb = null;
return read(callback);
}
}
}
function load(hashish, callback) {
if (!callback) return load.bind(this, hashish);
var hash;
return resolveHashish(hashish, onHash);
function onHash(err, result) {
if (result === undefined) return callback(err);
hash = result;
return db.get(hash, onBuffer);
}
function onBuffer(err, buffer) {
if (err) return callback(err);
var type, object;
try {
if (sha1(buffer) !== hash) {
throw new Error("Hash checksum failed for " + hash);
}
var pair = deframe(buffer);
type = pair[0];
buffer = pair[1];
object = {
type: type,
body: decoders[type](buffer)
};
} catch (err) {
if (err) return callback(err);
}
return callback(null, object, hash);
}
}
function save(object, callback) {
if (!callback) return save.bind(this, object);
var buffer, hash;
try {
buffer = encoders[object.type](object.body);
buffer = frame(object.type, buffer);
hash = sha1(buffer);
}
catch (err) {
return callback(err);
}
return db.set(hash, buffer, onSave);
function onSave(err) {
if (err) return callback(err);
return callback(null, hash);
}
}
function loadAs(type, hashish, callback) {
if (!callback) return loadAs.bind(this, type, hashish);
return load(hashish, onObject);
function onObject(err, object, hash) {
if (object === undefined) return callback(err);
if (type === "text") {
type = "blob";
object.body = bops.to(object.body);
}
if (object.type !== type) {
return new Error("Expected " + type + ", but found " + object.type);
}
return callback(null, object.body, hash);
}
}
function saveAs(type, body, callback) {
if (!callback) return saveAs.bind(this, type, body);
if (type === "text") type = "blob";
return save({ type: type, body: body }, callback);
}
function remove(hashish, callback) {
if (!callback) return remove.bind(this, hashish);
var hash;
return resolveHashish(hashish, onHash);
function onHash(err, result) {
if (err) return callback(err);
hash = result;
return db.del(hash, callback);
}
}
function resolveHashish(hashish, callback) {
if (!callback) return resolveHashish.bind(this, hashish);
hashish = hashish.trim();
if ((/^[0-9a-f]{40}$/i).test(hashish)) {
return callback(null, hashish.toLowerCase());
}
if (hashish === "HEAD") return getHead(onBranch);
if ((/^refs\//).test(hashish)) {
return db.get(hashish, checkBranch);
}
return checkBranch();
function onBranch(err, ref) {
if (err) return callback(err);
if (!ref) return callback();
return resolveHashish(ref, callback);
}
function checkBranch(err, hash) {
if (err && err.code !== "ENOENT") return callback(err);
if (hash) {
return resolveHashish(hash, callback);
}
return db.get("refs/heads/" + hashish, checkTag);
}
function checkTag(err, hash) {
if (err && err.code !== "ENOENT") return callback(err);
if (hash) {
return resolveHashish(hash, callback);
}
return db.get("refs/tags/" + hashish, final);
}
function final(err, hash) {
if (err) return callback(err);
if (hash) {
return resolveHashish(hash, callback);
}
err = new Error("ENOENT: Cannot find " + hashish);
err.code = "ENOENT";
return callback(err);
}
}
function updateHead(hash, callback) {
if (!callback) return updateHead.bind(this, hash);
var ref;
return getHead(onBranch);
function onBranch(err, result) {
if (err) return callback(err);
if (result === undefined) {
return setHead("master", function (err) {
if (err) return callback(err);
onBranch(err, "refs/heads/master");
});
}
ref = result;
return db.set(ref, hash + "\n", callback);
}
}
function getHead(callback) {
if (!callback) return getHead.bind(this);
return db.get("HEAD", onRead);
function onRead(err, ref) {
if (err) return callback(err);
if (!ref) return callback();
var match = ref.match(/^ref: *(.*)/);
if (!match) return callback(new Error("Invalid HEAD"));
return callback(null, match[1]);
}
}
function setHead(branchName, callback) {
if (!callback) return setHead.bind(this, branchName);
var ref = "refs/heads/" + branchName;
return db.set("HEAD", "ref: " + ref + "\n", callback);
}
function readRef(ref, callback) {
if (!callback) return readRef.bind(this, ref);
return db.get(ref, function (err, result) {
if (err) return callback(err);
if (!result) return callback();
return callback(null, result.trim());
});
}
function createRef(ref, hash, callback) {
if (!callback) return createRef.bind(this, ref, hash);
return db.set(ref, hash + "\n", callback);
}
function deleteRef(ref, callback) {
if (!callback) return deleteRef.bind(this, ref);
return db.del(ref, callback);
}
function listRefs(prefix, callback) {
if (!callback) return listRefs.bind(this, prefix);
var branches = {}, list = [], target = prefix;
return db.keys(target, onNames);
function onNames(err, names) {
if (err) {
if (err.code === "ENOENT") return shift();
return callback(err);
}
for (var i = 0, l = names.length; i < l; ++i) {
list.push(target + "/" + names[i]);
}
return shift();
}
function shift(err) {
if (err) return callback(err);
target = list.shift();
if (!target) return callback(null, branches);
return db.get(target, onRead);
}
function onRead(err, hash) {
if (err) {
if (err.code === "EISDIR") return db.keys(target, onNames);
return callback(err);
}
if (hash) {
branches[target] = hash.trim();
return shift();
}
return db.keys(target, onNames);
}
}
function indexOf(buffer, byte, i) {
i |= 0;
var length = buffer.length;
for (;;i++) {
if (i >= length) return -1;
if (buffer[i] === byte) return i;
}
}
function parseAscii(buffer, start, end) {
var val = "";
while (start < end) {
val += String.fromCharCode(buffer[start++]);
}
return val;
}
function parseDec(buffer, start, end) {
var val = 0;
while (start < end) {
val = val * 10 + buffer[start++] - 0x30;
}
return val;
}
function parseOct(buffer, start, end) {
var val = 0;
while (start < end) {
val = (val << 3) + buffer[start++] - 0x30;
}
return val;
}
function deframe(buffer) {
var space = indexOf(buffer, 0x20);
if (space < 0) throw new Error("Invalid git object buffer");
var nil = indexOf(buffer, 0x00, space);
if (nil < 0) throw new Error("Invalid git object buffer");
var body = bops.subarray(buffer, nil + 1);
var size = parseDec(buffer, space + 1, nil);
if (size !== body.length) throw new Error("Invalid body length.");
return [
parseAscii(buffer, 0, space),
body
];
}
function frame(type, body) {
return bops.join([
bops.from(type + " " + body.length + "\0"),
body
]);
}
// A sequence of bytes not containing the ASCII character byte
// values NUL (0x00), LF (0x0a), '<' (0c3c), or '>' (0x3e).
// The sequence may not begin or end with any bytes with the
// following ASCII character byte values: SPACE (0x20),
// '.' (0x2e), ',' (0x2c), ':' (0x3a), ';' (0x3b), '<' (0x3c),
// '>' (0x3e), '"' (0x22), "'" (0x27).
function safe(string) {
return string.replace(/(?:^[\.,:;<>"']+|[\0\n<>]+|[\.,:;<>"']+$)/gm, "");
}
function formatDate(date) {
var timezone = (date.timeZoneoffset || date.getTimezoneOffset()) / 60;
var seconds = Math.floor(date.getTime() / 1000);
return seconds + " " + (timezone > 0 ? "-0" : "0") + timezone + "00";
}
function encodePerson(person) {
if (!person.name || !person.email) {
throw new TypeError("Name and email are required for person fields");
}
return safe(person.name) +
" <" + safe(person.email) + "> " +
formatDate(person.date || new Date());
}
function encodeCommit(commit) {
if (!commit.tree || !commit.author || !commit.message) {
throw new TypeError("Tree, author, and message are require for commits");
}
var parents = commit.parents || (commit.parent ? [ commit.parent ] : []);
if (!Array.isArray(parents)) {
throw new TypeError("Parents must be an array");
}
var str = "tree " + commit.tree;
for (var i = 0, l = parents.length; i < l; ++i) {
str += "\nparent " + parents[i];
}
str += "\nauthor " + encodePerson(commit.author) +
"\ncommitter " + encodePerson(commit.committer || commit.author) +
"\n\n" + commit.message;
return bops.from(str);
}
function encodeTag(tag) {
if (!tag.object || !tag.type || !tag.tag || !tag.tagger || !tag.message) {
throw new TypeError("Object, type, tag, tagger, and message required");
}
var str = "object " + tag.object +
"\ntype " + tag.type +
"\ntag " + tag.tag +
"\ntagger " + encodePerson(tag.tagger) +
"\n\n" + tag.message;
return bops.from(str + "\n" + tag.message);
}
function pathCmp(oa, ob) {
var a = oa.name;
var b = ob.name;
a += "/"; b += "/";
return a < b ? -1 : a > b ? 1 : 0;
}
function encodeTree(tree) {
var chunks = [];
if (!Array.isArray(tree)) {
tree = Object.keys(tree).map(function (name) {
var entry = tree[name];
entry.name = name;
return entry;
});
}
tree.sort(pathCmp).forEach(onEntry);
return bops.join(chunks);
function onEntry(entry) {
chunks.push(
bops.from(entry.mode.toString(8) + " " + entry.name + "\0"),
bops.from(entry.hash, "hex")
);
}
}
function encodeBlob(blob) {
if (bops.is(blob)) return blob;
return bops.from(blob);
}
function decodePerson(string) {
var match = string.match(/^([^<]*) <([^>]*)> ([^ ]*) (.*)$/);
if (!match) throw new Error("Improperly formatted person string");
var sec = parseInt(match[3], 10);
var date = new Date(sec * 1000);
date.timeZoneoffset = parseInt(match[4], 10) / 100 * -60;
return {
name: match[1],
email: match[2],
date: date
};
}
function decodeCommit(body) {
var i = 0;
var start;
var key;
var parents = [];
var commit = {
tree: "",
parents: parents,
author: "",
committer: "",
message: ""
};
while (body[i] !== 0x0a) {
start = i;
i = indexOf(body, 0x20, start);
if (i < 0) throw new SyntaxError("Missing space");
key = parseAscii(body, start, i++);
start = i;
i = indexOf(body, 0x0a, start);
if (i < 0) throw new SyntaxError("Missing linefeed");
var value = bops.to(bops.subarray(body, start, i++));
if (key === "parent") {
parents.push(value);
}
else {
if (key === "author" || key === "committer") {
value = decodePerson(value);
}
commit[key] = value;
}
}
i++;
commit.message = bops.to(bops.subarray(body, i));
return commit;
}
function decodeTag(body) {
var i = 0;
var start;
var key;
var tag = {};
while (body[i] !== 0x0a) {
start = i;
i = indexOf(body, 0x20, start);
if (i < 0) throw new SyntaxError("Missing space");
key = parseAscii(body, start, i++);
start = i;
i = indexOf(body, 0x0a, start);
if (i < 0) throw new SyntaxError("Missing linefeed");
var value = bops.to(bops.subarray(body, start, i++));
if (key === "tagger") value = decodePerson(value);
tag[key] = value;
}
i++;
tag.message = bops.to(bops.subarray(body, i));
return tag;
}
function decodeTree(body) {
var i = 0;
var length = body.length;
var start;
var mode;
var name;
var hash;
var tree = [];
while (i < length) {
start = i;
i = indexOf(body, 0x20, start);
if (i < 0) throw new SyntaxError("Missing space");
mode = parseOct(body, start, i++);
start = i;
i = indexOf(body, 0x00, start);
name = bops.to(bops.subarray(body, start, i++));
hash = bops.to(bops.subarray(body, i, i += 20), "hex");
tree.push({
mode: mode,
name: name,
hash: hash
});
}
return tree;
}
function decodeBlob(body) {
return body;
}
function fetch(remote, opts, callback) {
if (!callback) return fetch.bind(this, remote, opts);
var refs, branch, queue, ref, hash;
return remote.discover(onDiscover);
function onDiscover(err, serverRefs, serverCaps) {
if (err) return callback(err);
refs = serverRefs;
opts.caps = processCaps(opts, serverCaps);
return processWants(refs, opts.want, onWants);
}
function onWants(err, wants) {
if (err) return callback(err);
opts.wants = wants;
return remote.fetch(repo, opts, onPackStream);
}
function onPackStream(err, raw) {
if (err) return callback(err);
if (!raw) return remote.close(callback);
var packStream = parse(raw);
return unpack(packStream, opts, onUnpack);
}
function onUnpack(err) {
if (err) return callback(err);
return remote.close(onClose);
}
function onClose(err) {
if (err) return callback(err);
queue = Object.keys(refs);
return next();
}
function next(err) {
if (err) return callback(err);
ref = queue.shift();
if (!ref) return setHead(branch, callback);
if (ref === "HEAD" || /{}$/.test(ref)) return next();
hash = refs[ref];
if (!branch && (hash === refs.HEAD)) branch = ref.substr(11);
db.has(hash, onHas);
}
function onHas(err, has) {
if (err) return callback(err);
if (!has) return next();
return db.set(ref, hash + "\n", next);
}
}
function processCaps(opts, serverCaps) {
var caps = [];
if (serverCaps["ofs-delta"]) caps.push("ofs-delta");
if (serverCaps["thin-pack"]) caps.push("thin-pack");
if (opts.includeTag && serverCaps["include-tag"]) caps.push("include-tag");
if ((opts.onProgress || opts.onError) &&
(serverCaps["side-band-64k"] || serverCaps["side-band"])) {
caps.push(serverCaps["side-band-64k"] ? "side-band-64k" : "side-band");
if (!opts.onProgress && serverCaps["no-progress"]) {
caps.push("no-progress");
}
}
if (serverCaps.agent) caps.push("agent=" + platform.agent);
return caps;
}
function processWants(refs, filter, callback) {
if (filter === null || filter === undefined) {
return defaultWants(refs, callback);
}
filter = Array.isArray(filter) ? arrayFilter(filter) :
typeof filter === "function" ? filter = filter :
wantFilter(filter);
var list = Object.keys(refs);
var wants = {};
var ref, hash;
return shift();
function shift() {
ref = list.shift();
if (!ref) return callback(null, Object.keys(wants));
hash = refs[ref];
resolveHashish(ref, onResolve);
}
function onResolve(err, oldHash) {
// Skip refs we already have
if (hash === oldHash) return shift();
filter(ref, onFilter);
}
function onFilter(err, want) {
if (err) return callback(err);
// Skip refs the user doesn't want
if (want) wants[hash] = true;
return shift();
}
}
function defaultWants(refs, callback) {
return listRefs("refs/heads", onRefs);
function onRefs(err, branches) {
if (err) return callback(err);
var wants = Object.keys(branches);
wants.unshift("HEAD");
return processWants(refs, wants, callback);
}
}
function wantMatch(ref, want) {
if (want === "HEAD" || want === null || want === undefined) {
return ref === "HEAD";
}
if (Object.prototype.toString.call(want) === '[object RegExp]') {
return want.test(ref);
}
if (typeof want === "boolean") return want;
if (typeof want !== "string") {
throw new TypeError("Invalid want type: " + typeof want);
}
return (/^refs\//.test(ref) && ref === want) ||
(ref === "refs/heads/" + want) ||
(ref === "refs/tags/" + want);
}
function wantFilter(want) {
return filter;
function filter(ref, callback) {
var result;
try {
result = wantMatch(ref, want);
}
catch (err) {
return callback(err);
}
return callback(null, result);
}
}
function arrayFilter(want) {
var length = want.length;
return filter;
function filter(ref, callback) {
var result;
try {
for (var i = 0; i < length; ++i) {
if (result = wantMatch(ref, want[i])) break;
}
}
catch (err) {
return callback(err);
}
return callback(null, result);
}
}
function push() {
throw new Error("TODO: Implement repo.fetch");
}
function unpack(packStream, opts, callback) {
if (!callback) return unpack.bind(this, packStream, opts);
var version, num, numDeltas = 0, count = 0, countDeltas = 0;
var done, startDeltaProgress = false;
// hashes keyed by offset for ofs-delta resolving
var hashes = {};
var has = {};
return packStream.read(onStats);
function onDone(err) {
if (done) return;
done = true;
return callback(err);
}
function onStats(err, stats) {
if (err) return onDone(err);
version = stats.version;
num = stats.num;
packStream.read(onRead);
}
function objectProgress(more) {
if (!more) startDeltaProgress = true;
var percent = Math.round(count / num * 100);
return opts.onProgress("Receiving objects: " + percent + "% (" + (count++) + "/" + num + ") " + (more ? "\r" : "\n"));
}
function deltaProgress(more) {
if (!startDeltaProgress) return;
var percent = Math.round(countDeltas / numDeltas * 100);
return opts.onProgress("Applying deltas: " + percent + "% (" + (countDeltas++) + "/" + numDeltas + ") " + (more ? "\r" : "\n"));
}
function onRead(err, item) {
if (err) return onDone(err);
if (opts.onProgress) objectProgress(item);
if (item === undefined) return resolveDeltas();
if (item.size !== item.body.length) {
return onDone(new Error("Body size mismatch"));
}
if (item.type === "ofs-delta") {
numDeltas++;
item.ref = hashes[item.offset - item.ref];
return resolveDelta(item);
}
if (item.type === "ref-delta") {
numDeltas++;
return checkDelta(item);
}
return saveValue(item);
}
function resolveDelta(item) {
if (opts.onProgress) deltaProgress();
return db.get(item.ref, function (err, buffer) {
if (err) return onDone(err);
var target = deframe(buffer);
item.type = target[0];
item.body = applyDelta(item.body, target[1]);
return saveValue(item);
});
}
function checkDelta(item) {
var hasTarget = has[item.ref];
if (hasTarget === true) return resolveDelta(item);
if (hasTarget === false) return enqueueDelta(item);
return db.has(item.ref, function (err, value) {
if (err) return onDone(err);
has[item.ref] = value;
if (value) return resolveDelta(item);
return enqueueDelta(item);
});
}
function saveValue(item) {
var buffer = frame(item.type, item.body);
var hash = hashes[item.offset] = sha1(buffer);
has[hash] = true;
return db.set(hash, buffer, onSave);
}
function onSave(err) {
if (err) return callback(err);
packStream.read(onRead);
}
function enqueueDelta(item) {
// I have yet to come across a repo that actually needs this path.
// It's hard to implement without something to test against.
throw "TODO: enqueueDelta";
}
function resolveDeltas() {
// TODO: resolve any pending deltas once enqueueDelta is implemented.
return onDone();
}
}
}
function assertType(object, type) {
if (object.type !== type) {
throw new Error(type + " expected, but found " + object.type);
}
}
|
<reponame>cxh1378/vue-cli-config
import http from '../utils/http'
//
/**
* @parms resquest 请求地址 例如:http://172.16.17.32:8088/request/...
* @param '/testIp'代表vue.config.js中配置的代理
*/
let resquest = "/testIp/request"
// get请求
export function getListAPI(params){
return http.get(`${resquest}/getList.json`,params)
}
// post请求
export function postFormAPI(params){
return http.post(`${resquest}/postForm.json`,params)
}
// put 请求
export function putSomeAPI(params){
return http.put(`${resquest}/putSome.json`,params)
}
// delete 请求
export function deleteListAPI(params){
return http.delete(`${resquest}/deleteList.json`,params)
} |
def evaluate_postfix(expression):
stack = []
for token in expression:
if token.isdigit():
stack.append(int(token))
else:
operand2 = stack.pop()
operand1 = stack.pop()
if token == '+':
stack.append(operand1 + operand2)
elif token == '-':
stack.append(operand1 - operand2)
elif token == '*':
stack.append(operand1 * operand2)
elif token == '/':
stack.append(operand1 / operand2)
elif token == '^':
stack.append(operand1 ** operand2)
return stack.pop() |
package seedu.address.logic.parser.appointment;
import static seedu.address.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import static seedu.address.logic.parser.CliSyntax.PREFIX_DATE_OF_APPT;
import static seedu.address.logic.parser.CliSyntax.PREFIX_DOCTOR_ID;
import static seedu.address.logic.parser.CliSyntax.PREFIX_PATIENT_ID;
import static seedu.address.logic.parser.CliSyntax.PREFIX_START_TIME;
import java.util.stream.Stream;
import seedu.address.logic.commands.appointment.AddAppointmentCommand;
import seedu.address.logic.parser.ArgumentMultimap;
import seedu.address.logic.parser.ArgumentTokenizer;
import seedu.address.logic.parser.Parser;
import seedu.address.logic.parser.ParserUtil;
import seedu.address.logic.parser.Prefix;
import seedu.address.logic.parser.exceptions.ParseException;
import seedu.address.model.appointment.AppointmentDate;
import seedu.address.model.appointment.AppointmentDoctorId;
import seedu.address.model.appointment.AppointmentPatientId;
import seedu.address.model.appointment.AppointmentTime;
import seedu.address.model.appointment.FutureAppointment;
import seedu.address.model.appointment.exceptions.AppointmentNotInFutureException;
/**
* Parses input arguments and creates a new AddAppointmentCommand object
*/
public class AddAppointmentCommandParser implements Parser<AddAppointmentCommand> {
/**
* Parses the given {@code String} of arguments in the context of the AddAppointmentCommand
* and returns an AddAppointmentCommand object for execution.
* @throws ParseException if the user input does not conform the expected format
*/
@Override
public AddAppointmentCommand parse(String args) throws ParseException {
ArgumentMultimap argMultimap =
ArgumentTokenizer.tokenize(args, PREFIX_PATIENT_ID, PREFIX_DOCTOR_ID,
PREFIX_DATE_OF_APPT, PREFIX_START_TIME);
if (!arePrefixesPresent(argMultimap, PREFIX_PATIENT_ID, PREFIX_DOCTOR_ID,
PREFIX_DATE_OF_APPT, PREFIX_START_TIME)) {
throw new ParseException(String.format(MESSAGE_INVALID_COMMAND_FORMAT,
AddAppointmentCommand.MESSAGE_USAGE));
}
AppointmentPatientId patientId = ParserUtil
.parseAppointmentPatientId(argMultimap.getValue(PREFIX_PATIENT_ID).get());
AppointmentDoctorId doctorId = ParserUtil
.parseAppointmentDoctorId(argMultimap.getValue(PREFIX_DOCTOR_ID).get());
AppointmentDate date = ParserUtil
.parseAppointmentDate(argMultimap.getValue(PREFIX_DATE_OF_APPT).get());
AppointmentTime time = ParserUtil
.parseAppointmentTime(argMultimap.getValue(PREFIX_START_TIME).get());
FutureAppointment appointment;
try {
appointment = new FutureAppointment(patientId, doctorId, date, time);
} catch (AppointmentNotInFutureException e) {
throw new ParseException(FutureAppointment.MESSAGE_CONSTRAINT_FUTURE);
}
return new AddAppointmentCommand(appointment);
}
/**
* Returns true if none of the prefixes contains empty {@code Optional} values in the given
* {@code ArgumentMultimap}.
*/
private static boolean arePrefixesPresent(ArgumentMultimap argumentMultimap, Prefix... prefixes) {
return Stream.of(prefixes).allMatch(prefix -> argumentMultimap.getValue(prefix).isPresent());
}
}
|
# Ensures passed in version values are supported.
function check-version-numbers() {
CHECK_VERSION_MAJOR=$1
CHECK_VERSION_MINOR=$0
if [[ $CHECK_VERSION_MAJOR -lt $EOSIO_MIN_VERSION_MAJOR ]]; then
exit 1
fi
if [[ $CHECK_VERSION_MAJOR -gt $EOSIO_MAX_VERSION_MAJOR ]]; then
exit 1
fi
if [[ $CHECK_VERSION_MAJOR -eq $EOSIO_MIN_VERSION_MAJOR ]]; then
if [[ $CHECK_VERSION_MINOR -lt $EOSIO_MIN_VERSION_MINOR ]]; then
exit 1
fi
fi
if [[ $CHECK_VERSION_MAJOR -eq $EOSIO_MAX_VERSION_MAJOR ]]; then
if [[ $CHECK_VERSION_MINOR -gt $EOSIO_MAX_VERSION_MINOR ]]; then
exit 1
fi
fi
exit 0
}
# Handles choosing which EOSIO directory to select when the default location is used.
function default-eosio-directories() {
REGEX='^[0-9]+([.][0-9]+)?$'
ALL_EOSIO_SUBDIRS=()
if [[ -d ${HOME}/eosio ]]; then
ALL_EOSIO_SUBDIRS=($(ls ${HOME}/eosio | sort -V))
fi
for ITEM in "${ALL_EOSIO_SUBDIRS[@]}"; do
if [[ "$ITEM" =~ $REGEX ]]; then
DIR_MAJOR=$(echo $ITEM | cut -f1 -d '.')
DIR_MINOR=$(echo $ITEM | cut -f2 -d '.')
if $(check-version-numbers $DIR_MAJOR $DIR_MINOR); then
PROMPT_EOSIO_DIRS+=($ITEM)
fi
fi
done
for ITEM in "${PROMPT_EOSIO_DIRS[@]}"; do
if [[ "$ITEM" =~ $REGEX ]]; then
EOSIO_VERSION=$ITEM
fi
done
}
# Prompts or sets default behavior for choosing EOSIO directory.
function eosio-directory-prompt() {
if [[ -z $EOSIO_DIR_PROMPT ]]; then
default-eosio-directories;
echo 'No EOSIO location was specified.'
while true; do
if [[ $NONINTERACTIVE != true ]]; then
if [[ -z $EOSIO_VERSION ]]; then
echo "No default EOSIO installations detected..."
PROCEED=n
else
printf "Is EOSIO installed in the default location: $HOME/eosio/$EOSIO_VERSION (y/n)" && read -p " " PROCEED
fi
fi
echo ""
case $PROCEED in
"" )
echo "Is EOSIO installed in the default location?";;
0 | true | [Yy]* )
break;;
1 | false | [Nn]* )
if [[ $PROMPT_EOSIO_DIRS ]]; then
echo "Found these compatible EOSIO versions in the default location."
printf "$HOME/eosio/%s\n" "${PROMPT_EOSIO_DIRS[@]}"
fi
printf "Enter the installation location of EOSIO:" && read -e -p " " EOSIO_DIR_PROMPT;
EOSIO_DIR_PROMPT="${EOSIO_DIR_PROMPT/#\~/$HOME}"
break;;
* )
echo "Please type 'y' for yes or 'n' for no.";;
esac
done
fi
export EOSIO_INSTALL_DIR="${EOSIO_DIR_PROMPT:-${HOME}/eosio/${EOSIO_VERSION}}"
}
# Prompts or default behavior for choosing EOSIO.CDT directory.
function cdt-directory-prompt() {
if [[ -z $CDT_DIR_PROMPT ]]; then
echo 'No EOSIO.CDT location was specified.'
while true; do
if [[ $NONINTERACTIVE != true ]]; then
printf "Is EOSIO.CDT installed in the default location? /usr/local/eosio.cdt (y/n)" && read -p " " PROCEED
fi
echo ""
case $PROCEED in
"" )
echo "Is EOSIO.CDT installed in the default location?";;
0 | true | [Yy]* )
break;;
1 | false | [Nn]* )
printf "Enter the installation location of EOSIO.CDT:" && read -e -p " " CDT_DIR_PROMPT;
CDT_DIR_PROMPT="${CDT_DIR_PROMPT/#\~/$HOME}"
break;;
* )
echo "Please type 'y' for yes or 'n' for no.";;
esac
done
fi
export CDT_INSTALL_DIR="${CDT_DIR_PROMPT:-/usr/local/eosio.cdt}"
}
# Ensures EOSIO is installed and compatible via version listed in tests/CMakeLists.txt.
function nodeos-version-check() {
INSTALLED_VERSION=$(echo $($EOSIO_INSTALL_DIR/bin/defios --version))
INSTALLED_VERSION_MAJOR=$(echo $INSTALLED_VERSION | cut -f1 -d '.' | sed 's/v//g')
INSTALLED_VERSION_MINOR=$(echo $INSTALLED_VERSION | cut -f2 -d '.' | sed 's/v//g')
if [[ -z $INSTALLED_VERSION_MAJOR || -z $INSTALLED_VERSION_MINOR ]]; then
echo "Could not determine EOSIO version. Exiting..."
exit 1;
fi
if $(check-version-numbers $INSTALLED_VERSION_MAJOR $INSTALLED_VERSION_MINOR); then
if [[ $INSTALLED_VERSION_MAJOR -gt $EOSIO_SOFT_MAX_MAJOR ]]; then
echo "Detected EOSIO version is greater than recommended soft max: $EOSIO_SOFT_MAX_MAJOR.$EOSIO_SOFT_MAX_MINOR. Proceed with caution."
fi
if [[ $INSTALLED_VERSION_MAJOR -eq $EOSIO_SOFT_MAX_MAJOR && $INSTALLED_VERSION_MINOR -gt $EOSIO_SOFT_MAX_MINOR ]]; then
echo "Detected EOSIO version is greater than recommended soft max: $EOSIO_SOFT_MAX_MAJOR.$EOSIO_SOFT_MAX_MINOR. Proceed with caution."
fi
else
echo "Supported versions are: $EOSIO_MIN_VERSION_MAJOR.$EOSIO_MIN_VERSION_MINOR - $EOSIO_MAX_VERSION_MAJOR.$EOSIO_MAX_VERSION_MINOR"
echo "Invalid EOSIO installation. Exiting..."
exit 1;
fi
}
|
/**
* This file is the part of NChart3D Framework
* http://www.nchart3d.com
*
* File: NChartDataSmootherSBezier.h
* Version: "2.9.1"
*
* Copyright (C) 2017 Nulana LTD. All Rights Reserved.
*/
#import "NChartDataSmoother.h"
/**
* The NChartDataSmootherSBezier class provides data smoother based on bezier interpolation
* (with control points calculated to ensure defect 1 spline) used to create smooth lines on the charts by only a few points.
* Typically this smoother is used for line and area series.
* @note This smoother ensures better smoothing in comparison with <NChartDataSmootherTBezier>, but has larger deviation
* from the linear data interpolation and can produce fake extremums. However, the deviation of the fake extremums is
* smaller in comparison with <NChartDataSmootherSpline> and <NChartDataSmootherLagrange>.
*/
NCHART3D_EXPORT @interface NChartDataSmootherSBezier : NChartDataSmoother
@end
|
export default `
vec2 getCenterPixel(vec2 calculatedPixel) {
float squareSize = 5.0;
float halfSquare = squareSize / 2.0;
float centerX = floor(calculatedPixel[0] / squareSize) * squareSize + halfSquare;
float centerY = floor(calculatedPixel[1] / squareSize) * squareSize + halfSquare;
return vec2(centerX, centerY);
}
`; |
#!/bin/bash
# Script to run Lattice Diamond on a Verilog source file and LPF constraints file, then run some extra commands
# to create debug/dump output for the design
# Based on Clifford Wolf's icecube.sh from Project Icestorm
# Usage:
# ./diamond.sh part design.v
# Currently supported parts:
# - lfe5u-85
# - lfe5u-45
# - lfe5u-25
# - LCMXO2-1200HC
# Currently this script supports Linux and Windows using a MINGW64 bash shell.
# You need to set the DIAMONDDIR environment variable to the path where you have
# installed Lattice Diamond, unless it matches this default.
if [ "$(expr substr $(uname -s) 1 10)" == "MINGW64_NT" ]; then
WINDOWS=true
else
WINDOWS=false
fi
if [ -z "$DIAMONDVER" ]; then
diamondver="3.10"
else
diamondver="$DIAMONDVER"
fi
if $WINDOWS; then
diamonddir="${DIAMONDDIR:-/c/lscc/diamond/${diamondver}_x64}"
else
diamonddir="${DIAMONDDIR:-/usr/local/diamond/${diamondver}_x64}"
fi
export FOUNDRY="${diamonddir}/ispfpga"
if $WINDOWS; then
bindir="${diamonddir}/bin/nt64"
else
bindir="${diamonddir}/bin/lin64"
fi
LSC_DIAMOND=true
export LSC_DIAMOND
export NEOCAD_MAXLINEWIDTH=32767
export TCL_LIBRARY="${diamonddir}/tcltk/lib/tcl8.5"
if $WINDOWS; then
export fpgabindir=${FOUNDRY}/bin/nt64
else
export fpgabindir=${FOUNDRY}/bin/lin64
fi
if $WINDOWS; then
export PATH="${bindir}:${fpgabindir}:$PATH"
else
export LD_LIBRARY_PATH="${bindir}:${fpgabindir}"
fi
export LM_LICENSE_FILE="${LM_LICENSE_FILE:=${diamonddir}/license/license.dat}"
set -ex
if [[ $2 == *.ncl ]]
then
USE_NCL=1
else
USE_NCL=
fi
V_SUB=${2%.v}
NCL_SUB=${V_SUB%.ncl}
set -- "$1" $NCL_SUB
PART=$1
case "${PART}" in
LFE5U-85F)
PACKAGE="${DEV_PACKAGE:-CABGA756}"
DEVICE="LFE5U-85F"
LSE_ARCH="ECP5U"
;;
LFE5U-45F)
PACKAGE="${DEV_PACKAGE:-CABGA381}"
DEVICE="LFE5U-45F"
LSE_ARCH="ECP5U"
;;
LFE5U-25F)
PACKAGE="${DEV_PACKAGE:-CABGA381}"
DEVICE="LFE5U-25F"
LSE_ARCH="ECP5U"
;;
LFE5UM-85F)
PACKAGE="${DEV_PACKAGE:-CABGA756}"
DEVICE="LFE5UM-85F"
LSE_ARCH="ECP5UM"
;;
LFE5UM-45F)
PACKAGE="${DEV_PACKAGE:-CABGA381}"
DEVICE="LFE5UM-45F"
LSE_ARCH="ECP5UM"
;;
LFE5UM-25F)
PACKAGE="${DEV_PACKAGE:-CABGA381}"
DEVICE="LFE5UM-25F"
LSE_ARCH="ECP5UM"
;;
LFE5UM5G-85F)
PACKAGE="${DEV_PACKAGE:-CABGA756}"
DEVICE="LFE5UM5G-85F"
LSE_ARCH="ECP5UM5G"
;;
LFE5UM5G-45F)
PACKAGE="${DEV_PACKAGE:-CABGA381}"
DEVICE="LFE5UM5G-45F"
LSE_ARCH="ECP5UM5G"
;;
LFE5UM5G-25F)
PACKAGE="${DEV_PACKAGE:-CABGA381}"
DEVICE="LFE5UM5G-25F"
LSE_ARCH="ECP5UM5G"
;;
LCMXO2-256HC)
PACKAGE="${DEV_PACKAGE:-QFN32}"
DEVICE="LCMXO2-256HC"
LSE_ARCH="MachXO2"
;;
LCMXO2-640HC)
PACKAGE="${DEV_PACKAGE:-QFN48}"
DEVICE="LCMXO2-640HC"
LSE_ARCH="MachXO2"
;;
LCMXO2-1200HC)
PACKAGE="${DEV_PACKAGE:-QFN32}"
DEVICE="LCMXO2-1200HC"
LSE_ARCH="MachXO2"
;;
LCMXO2-2000HC)
PACKAGE="${DEV_PACKAGE:-TQFP100}"
DEVICE="LCMXO2-2000HC"
LSE_ARCH="MachXO2"
;;
LCMXO2-4000HC)
PACKAGE="${DEV_PACKAGE:-TQFP144}"
DEVICE="LCMXO2-4000HC"
LSE_ARCH="MachXO2"
;;
LCMXO2-7000HC)
PACKAGE="${DEV_PACKAGE:-TQFP144}"
DEVICE="LCMXO2-7000HC"
LSE_ARCH="MachXO2"
;;
LCMXO3LF-9400C)
PACKAGE="${DEV_PACKAGE:-CABGA256}"
DEVICE="LCMXO3LF-9400C"
LSE_ARCH="MachXO3LF"
;;
LIF-MD6000)
PACKAGE="${DEV_PACKAGE:-csFBGA81}"
DEVICE="LIF-MD6000"
LSE_ARCH="LIFMD"
;;
esac
(
rm -rf "$2.tmp"
mkdir -p "$2.tmp"
if [ -n "$USE_NCL" ]; then
cp "$2.ncl" "$2.tmp/input.ncl"
if test -f "$2.prf"; then cp "$2.prf" "$2.tmp/input.prf"; fi
else
cp "$2.v" "$2.tmp/input.v"
fi
if test -f "$2.sdc"; then cp "$2.sdc" "$2.tmp/input.sdc"; fi
if test -f "$2.lpf"; then cp "$2.lpf" "$2.tmp/input.lpf"; fi
if test -f "$2.prf"; then cp "$2.prf" "$2.tmp/input.prf"; fi
if test -f "$2.dat"; then cp "$2.dat" "$2.tmp/$2.dat"; fi
cd "$2.tmp"
touch input.sdc
touch input.lpf
if [ -n "$USE_NCL" ]; then
if $WINDOWS; then
"$FOUNDRY"/userware/NT/bin/nt64/ncl2ncd input.ncl -drc -o par_impl.ncd
else
"$FOUNDRY"/userware/unix/bin/lin64/ncl2ncd input.ncl -drc -o par_impl.ncd
fi
if test -f "input.prf"; then
cp "input.prf" "synth_impl.prf"
else
touch synth_impl.prf
fi
else
cat > impl_lse.prj << EOT
#device
-a "$LSE_ARCH"
-d $DEVICE
-t $PACKAGE
-frequency 200
-optimization_goal Timing
-bram_utilization 100
-ramstyle Auto
-romstyle auto
-dsp_utilization 100
-use_dsp 1
-use_carry_chain 1
-carry_chain_length 0
-force_gsr Auto
-resource_sharing 1
-propagate_constants 1
-remove_duplicate_regs 1
-mux_style Auto
-max_fanout 1000
-fsm_encoding_style Auto
-twr_paths 3
-fix_gated_clocks 1
-loop_limit 1950
-use_io_insertion 1
-resolve_mixed_drivers 0
-use_io_reg auto
-ver "input.v"
-p "$PWD"
-ngd "synth_impl.ngd"
-lpf 1
EOT
# run LSE synthesis
"$fpgabindir"/synthesis -f "impl_lse.prj"
# map design
"$fpgabindir"/map -a $LSE_ARCH -p $DEVICE -t $PACKAGE synth_impl.ngd -o map_impl.ncd -lpf synth_impl.lpf -lpf input.lpf $MAPARGS
# place and route design
"$fpgabindir"/par map_impl.ncd par_impl.ncd synth_impl.prf
fi
# Forcefully disable compression
echo "SYSCONFIG COMPRESS_CONFIG=OFF ;" >> synth_impl.prf
# make bitmap
"$fpgabindir"/bitgen -d par_impl.ncd $BITARGS output.bit synth_impl.prf
if [ -n "$JEDEC_BITSTREAM" ]; then
"$fpgabindir"/bitgen -d par_impl.ncd -jedec output.jed synth_impl.prf
fi
if [ -n "$COMPRESSED_BITSTREAM" ]; then
sed 's/COMPRESS_CONFIG=OFF/COMPRESS_CONFIG=ON/' synth_impl.prf > synth_impl_comp.prf
"$fpgabindir"/bitgen -d par_impl.ncd $BITARGS output-comp.bit synth_impl_comp.prf
fi
# dump bitmap
"$fpgabindir"/bstool -d output.bit > output.dump
if [ -z "$USE_NCL" ]; then
# run test on bitmap (for tilemap)
"$fpgabindir"/bstool -t output.bit > output.test
# convert ngd to ncl
if $WINDOWS; then
"$FOUNDRY"/userware/NT/bin/nt64/ncd2ncl par_impl.ncd output.ncl
else
"$FOUNDRY"/userware/unix/bin/lin64/ncd2ncl par_impl.ncd output.ncl
fi
fi
if [ -z "$NO_TRCE" ]; then
# run trce
"$fpgabindir"/trce -v -u -c par_impl.ncd
fi
if [ -n "$BACKANNO" ]; then
# run trce
"$fpgabindir"/ldbanno -n Verilog par_impl.ncd synth_impl.prf
fi
export LD_LIBRARY_PATH=""
)
cp "$2.tmp"/output.bit "$2.bit"
cp "$2.tmp"/output.dump "$2.dump"
if [ -z "$NO_TRCE" ]; then
cp "$2.tmp"/par_impl.twr "$2.twr"
fi
if [ -z "$USE_NCL" ]; then
cp "$2.tmp"/output.ncl "$2_out.ncl"
fi
if [ -n "$JEDEC_BITSTREAM" ]; then
cp "$2.tmp"/output.jed "$2.jed"
fi
if [ -n "$COMPRESSED_BITSTREAM" ]; then
cp "$2.tmp"/output-comp.bit "$2-comp.bit"
fi
if [ -n "$BACKANNO" ]; then
cp "$2.tmp"/par_impl.sdf "$2.sdf"
fi
|
import itertools
def process_data(active, uids, task_name, output_dict, Y_dict, label_name, loss_funcs, device):
uid_dict = {}
loss_dict = {}
if active.any():
uid_dict[task_name] = [*itertools.compress(uids, active)]
loss_dict[task_name] = loss_funcs[task_name](
output_dict,
move_to_device(Y_dict[label_name], device),
move_to_device(active, device),
)
return uid_dict, loss_dict
def move_to_device(data, device):
# Implement the logic to move data to the specified device
pass |
# coding: utf-8
Gem::Specification.new do |spec|
spec.name = 'riq'
spec.version = '1.2.3'
spec.authors = ['<NAME>']
spec.email = ['<EMAIL>']
spec.homepage = "https://github.com/relateiq/ruby-sdk"
spec.summary = 'Ruby SalesforceIQ API client'
spec.description = 'Full featured ruby client for interacting with the SalesforceIQ API'
spec.license = 'MIT'
# this works for only adding committed files, but new files don't work until they're committed.
spec.files = `git ls-files -z`.split("\x0")
spec.require_paths = ['lib']
# 2.0 is min becuase use of refinements
spec.required_ruby_version = '>= 2.0.0'
# spec.post_install_message = 'The power of relationship intelligence is in your hands!'
# prod dependencies
spec.add_dependency 'httparty', '~> 0.13'
# dev dependencies
spec.add_development_dependency 'bundler', '~> 1'
spec.add_development_dependency 'vcr', '~> 2.9'
spec.add_development_dependency 'webmock', '~> 1.21'
spec.add_development_dependency 'minitest', '~> 5.4'
spec.add_development_dependency 'dotenv', '~> 2'
end
|
#!/bin/bash
echo 123456789 > $1/src_2_2
echo precompile.sh execited
|
<filename>tizen/renderer/media/mediaplayer_impl.cc
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Copyright (c) 2013 Intel Corporation. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "xwalk/tizen/renderer/media/mediaplayer_impl.h"
#include "content/public/renderer/render_view.h"
#include "third_party/WebKit/public/platform/WebURL.h"
namespace tizen {
MediaPlayerImpl::MediaPlayerImpl(
blink::WebLocalFrame* frame,
blink::WebMediaPlayerClient* client,
base::WeakPtr<media::WebMediaPlayerDelegate> delegate,
RendererMediaPlayerManager* manager,
const media::WebMediaPlayerParams& params)
: WebMediaPlayerImpl(frame, client, delegate, nullptr, params),
client_(client),
manager_(manager) {
DCHECK(manager_);
player_id_ = manager_->RegisterMediaPlayer(this);
}
MediaPlayerImpl::~MediaPlayerImpl() {
if (manager_) {
manager_->DestroyPlayer(player_id_);
manager_->UnregisterMediaPlayer(player_id_);
}
}
void MediaPlayerImpl::Detach() {
manager_ = NULL;
}
void MediaPlayerImpl::load(LoadType load_type, const blink::WebURL& url,
CORSMode cors_mode) {
InitializeMediaPlayer(url);
WebMediaPlayerImpl::load(load_type, url, cors_mode);
}
void MediaPlayerImpl::InitializeMediaPlayer(const blink::WebURL& url) {
if (manager_)
manager_->Initialize(player_id_, getpid(), url);
}
void MediaPlayerImpl::play() {
if (manager_)
manager_->Start(player_id_);
WebMediaPlayerImpl::play();
}
void MediaPlayerImpl::pause() {
if (manager_)
manager_->Pause(player_id_);
WebMediaPlayerImpl::pause();
}
void MediaPlayerImpl::OnMediaPlayerPlay() {
WebMediaPlayerImpl::play();
client_->playbackStateChanged();
}
void MediaPlayerImpl::OnMediaPlayerPause() {
WebMediaPlayerImpl::pause();
client_->playbackStateChanged();
}
} // namespace tizen
|
#!/usr/bin/env bash
# by default, with cleanup
# please note that the language(s) was not selected for any particular reason (other to represent the various sizes of babel datasets)
# 304-lithuanian | %WER 41.7 | 20041 61492 | 61.4 29.0 9.6 3.1 41.7 29.0 | -0.414 | exp/chain_cleaned/tdnn_lstm_bab2_sp/decode_dev10h.pem/score_10/dev10h.pem.ctm.sys
# num-iters=84 nj=2..6 num-params=10.1M dim=43+100->3273 combine=-0.145->-0.136
# xent:train/valid[55,83,final]=(-1.89,-1.61,-1.59/-2.22,-2.19,-2.18)
# logprob:train/valid[55,83,final]=(-0.157,-0.118,-0.115/-0.233,-0.241,-0.239)
# 206-zulu | %WER 53.5 | 22805 52162 | 49.8 38.3 11.9 3.2 53.5 31.1 | -0.549 | exp/chain_cleaned/tdnn_lstm_bab2_sp/decode_dev10h.pem/score_13/dev10h.pem.ctm.sys
# num-iters=117 nj=2..6 num-params=10.1M dim=43+100->3274 combine=-0.176->-0.169
# xent:train/valid[77,116,final]=(-1.95,-1.68,-1.66/-2.32,-2.32,-2.31)
# logprob:train/valid[77,116,final]=(-0.190,-0.146,-0.142/-0.280,-0.290,-0.289)
# 104-pashto | %WER 41.9 | 21825 101803 | 62.1 26.8 11.0 4.0 41.9 30.3 | -0.429 | exp/chain_cleaned/tdnn_lstm_bab2_sp/decode_dev10h.pem/score_11/dev10h.pem.ctm.sys
# num-iters=150 nj=2..6 num-params=10.1M dim=43+100->3328 combine=-0.160->-0.156
# xent:train/valid[99,149,final]=(-1.83,-1.61,-1.59/-2.24,-2.22,-2.21)
# logprob:train/valid[99,149,final]=(-0.171,-0.134,-0.131/-0.259,-0.266,-0.263)
set -e -o pipefail
# First the options that are passed through to run_ivector_common.sh
# (some of which are also used in this script directly).
stage=17
nj=30
train_set=train_cleaned
gmm=tri5_cleaned # the gmm for the target data
langdir=data/langp/tri5_ali
num_threads_ubm=12
nnet3_affix=_cleaned # cleanup affix for nnet3 and chain dirs, e.g. _cleaned
# The rest are configs specific to this script. Most of the parameters
# are just hardcoded at this level, in the commands below.
train_stage=-10
tree_affix= # affix for tree directory, e.g. "a" or "b", in case we change the configuration.
tdnn_affix=_bab2 #affix for TDNN directory, e.g. "a" or "b", in case we change the configuration.
common_egs_dir=exp/chain_cleaned/tdnn_lstm_sp/egs # you can set this to use previously dumped egs.
chunk_width=150,120,90,75
# End configuration section.
echo "$0 $@" # Print the command line for logging
. ./cmd.sh
. ./path.sh
. ./utils/parse_options.sh
if ! cuda-compiled; then
cat <<EOF && exit 1
This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA
If you want to use GPUs (and have them), go to src/, and configure and make on a machine
where "nvcc" is installed.
EOF
fi
local/chain/run_ivector_common.sh --stage $stage \
--nj $nj \
--train-set $train_set \
--gmm $gmm \
--num-threads-ubm $num_threads_ubm \
--nnet3-affix "$nnet3_affix"
gmm_dir=exp/$gmm
ali_dir=exp/${gmm}_ali_${train_set}_sp
tree_dir=exp/chain${nnet3_affix}/tree${tree_affix}
lat_dir=exp/chain${nnet3_affix}/${gmm}_${train_set}_sp_lats
dir=exp/chain${nnet3_affix}/tdnn_lstm${tdnn_affix}_sp
train_data_dir=data/${train_set}_sp_hires
lores_train_data_dir=data/${train_set}_sp
train_ivector_dir=exp/nnet3${nnet3_affix}/ivectors_${train_set}_sp_hires
for f in $gmm_dir/final.mdl $train_data_dir/feats.scp $train_ivector_dir/ivector_online.scp \
$lores_train_data_dir/feats.scp $ali_dir/ali.1.gz $gmm_dir/final.mdl; do
[ ! -f $f ] && echo "$0: expected file $f to exist" && exit 1
done
if [ $stage -le 14 ]; then
echo "$0: creating lang directory with one state per phone."
# Create a version of the lang/ directory that has one state per phone in the
# topo file. [note, it really has two states.. the first one is only repeated
# once, the second one has zero or more repeats.]
if [ -d data/lang_chain ]; then
if [ data/lang_chain/L.fst -nt data/lang/L.fst ]; then
echo "$0: data/lang_chain already exists, not overwriting it; continuing"
else
echo "$0: data/lang_chain already exists and seems to be older than data/lang..."
echo " ... not sure what to do. Exiting."
exit 1;
fi
else
cp -r $langdir data/lang_chain
silphonelist=$(cat data/lang_chain/phones/silence.csl) || exit 1;
nonsilphonelist=$(cat data/lang_chain/phones/nonsilence.csl) || exit 1;
# Use our special topology... note that later on may have to tune this
# topology.
steps/nnet3/chain/gen_topo.py $nonsilphonelist $silphonelist >data/lang_chain/topo
fi
fi
if [ $stage -le 15 ]; then
# Get the alignments as lattices (gives the chain training more freedom).
# use the same num-jobs as the alignments
steps/align_fmllr_lats.sh --nj 100 --cmd "$train_cmd" ${lores_train_data_dir} \
$langdir $gmm_dir $lat_dir
rm $lat_dir/fsts.*.gz # save space
fi
if [ $stage -le 16 ]; then
# Build a tree using our new topology. We know we have alignments for the
# speed-perturbed data (local/nnet3/run_ivector_common.sh made them), so use
# those.
if [ -f $tree_dir/final.mdl ]; then
echo "$0: $tree_dir/final.mdl already exists, refusing to overwrite it."
exit 1;
fi
steps/nnet3/chain/build_tree.sh --frame-subsampling-factor 3 \
--context-opts "--context-width=2 --central-position=1" \
--leftmost-questions-truncate -1 \
--cmd "$train_cmd" 4000 ${lores_train_data_dir} data/lang_chain $ali_dir $tree_dir
fi
xent_regularize=0.1
if [ $stage -le 17 ]; then
mkdir -p $dir
echo "$0: creating neural net configs using the xconfig parser";
num_targets=$(tree-info $tree_dir/tree |grep num-pdfs|awk '{print $2}')
[ -z $num_targets ] && { echo "$0: error getting num-targets"; exit 1; }
learning_rate_factor=$(echo "print (0.5/$xent_regularize)" | python)
lstm_opts="decay-time=20"
label_delay=5
mkdir -p $dir/configs
cat <<EOF > $dir/configs/network.xconfig
input dim=100 name=ivector
input dim=43 name=input
# please note that it is important to have input layer with the name=input
# as the layer immediately preceding the fixed-affine-layer to enable
# the use of short notation for the descriptor
fixed-affine-layer name=lda input=Append(-2,-1,0,1,2,ReplaceIndex(ivector, t, 0)) affine-transform-file=$dir/configs/lda.mat
# the first splicing is moved before the lda layer, so no splicing here
relu-batchnorm-layer name=tdnn1 dim=512
relu-batchnorm-layer name=tdnn2 input=Append(-1,0,1) dim=512
relu-batchnorm-layer name=tdnn3 input=Append(-1,0,1) dim=512
# check steps/libs/nnet3/xconfig/lstm.py for the other options and defaults
fast-lstmp-layer name=fastlstm1 cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=-3 $lstm_opts
relu-batchnorm-layer name=tdnn4 input=Append(-3,0,3) dim=512
relu-batchnorm-layer name=tdnn5 input=Append(-3,0,3) dim=512
fast-lstmp-layer name=fastlstm2 cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=-3 $lstm_opts
relu-batchnorm-layer name=tdnn6 input=Append(-3,0,3) dim=512
relu-batchnorm-layer name=tdnn7 input=Append(-3,0,3) dim=512
fast-lstmp-layer name=fastlstm3 cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=-3 $lstm_opts
## adding the layers for chain branch
output-layer name=output input=fastlstm3 output-delay=$label_delay include-log-softmax=false dim=$num_targets max-change=1.5
# adding the layers for xent branch
# This block prints the configs for a separate output that will be
# trained with a cross-entropy objective in the 'chain' models... this
# has the effect of regularizing the hidden parts of the model. we use
# 0.5 / args.xent_regularize as the learning rate factor- the factor of
# 0.5 / args.xent_regularize is suitable as it means the xent
# final-layer learns at a rate independent of the regularization
# constant; and the 0.5 was tuned so as to make the relative progress
# similar in the xent and regular final layers.
output-layer name=output-xent input=fastlstm3 output-delay=$label_delay dim=$num_targets learning-rate-factor=$learning_rate_factor max-change=1.5
EOF
steps/nnet3/xconfig_to_configs.py --xconfig-file $dir/configs/network.xconfig --config-dir $dir/configs/
fi
if [ $stage -le 18 ]; then
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then
utils/create_split_dir.pl \
/export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage
fi
[ ! -d $dir/egs ] && mkdir -p $dir/egs/
touch $dir/egs/.nodelete # keep egs around when that run dies.
steps/nnet3/chain/train.py --stage $train_stage \
--cmd "$decode_cmd" \
--feat.online-ivector-dir $train_ivector_dir \
--feat.cmvn-opts "--norm-means=false --norm-vars=false" \
--chain.xent-regularize $xent_regularize \
--chain.leaky-hmm-coefficient 0.1 \
--chain.l2-regularize 0.00005 \
--chain.apply-deriv-weights false \
--chain.lm-opts="--num-extra-lm-states=2000" \
--egs.dir "$common_egs_dir" \
--egs.opts "--frames-overlap-per-eg 0" \
--egs.chunk-width $chunk_width \
--trainer.num-chunk-per-minibatch 128 \
--trainer.frames-per-iter 1500000 \
--trainer.num-epochs 4 \
--trainer.optimization.num-jobs-initial 2 \
--trainer.optimization.num-jobs-final 6 \
--trainer.optimization.initial-effective-lrate 0.001 \
--trainer.optimization.final-effective-lrate 0.0001 \
--trainer.max-param-change 2.0 \
--cleanup.remove-egs true \
--feat-dir $train_data_dir \
--tree-dir $tree_dir \
--lat-dir $lat_dir \
--dir $dir
fi
if [ $stage -le 19 ]; then
# Note: it might appear that this data/lang_chain directory is mismatched, and it is as
# far as the 'topo' is concerned, but this script doesn't read the 'topo' from
# the lang directory.
utils/mkgraph.sh --self-loop-scale 1.0 data/langp_test $dir $dir/graph
fi
exit 0
|
#! /bin/sh
# $Id: mixedgauge,v 1.7 2010/01/13 10:20:03 tom Exp $
#. ./setup-vars
#background="An Example of --mixedgauge usage"
for i in 5 10 20 30 40 50 60 70 80 90 100
do
dialog --backtitle "Mixedgauge sample" \
--title "Mixed gauge demonstration" "$@" \
--mixedgauge "This is a prompt message,\nand this is the second line." \
0 0 33 \
"Process one" "0" \
"Process two" "1" \
"Process three" "2" \
"Process four" "-$i" \
"" "8" \
"Process five" "5" \
"Process six" "-$i" \
"Process seven" "7" \
"Process eight" "-$i" \
"Process nine" "5"
# break
sleep 1
done
|
#!/bin/bash
sudo chmod 666 /var/run/docker.sock
././config.sh --url $REPO --token $TOKEN
./run.sh |
<reponame>rpatil524/COLID-Data-Marketplace-Frontend<filename>src/app/modules/authentication/azure-authentication.module.ts
import { NgModule, ModuleWithProviders, Provider } from '@angular/core';
import { CommonModule } from '@angular/common';
import { environment } from 'src/environments/environment';
import { HTTP_INTERCEPTORS } from '@angular/common/http';
import { AzureIdentityProvider } from './services/azure-identity-provider.service';
import { IDENT_PROV } from 'src/app/shared/constants';
// Msal
import { Configuration } from 'msal';
import {
MsalModule,
MsalInterceptor,
MsalService,
MsalAngularConfiguration,
MSAL_CONFIG,
MSAL_CONFIG_ANGULAR
} from '@azure/msal-angular';
// checks if the app is running on IE
export const isIE = window.navigator.userAgent.indexOf('MSIE ') > -1 || window.navigator.userAgent.indexOf('Trident/') > -1;
export function MSALConfigFactory(): Configuration {
return {
auth: {
clientId: environment.adalConfig.clientId,
authority: environment.adalConfig.authority,
validateAuthority: true,
redirectUri: environment.adalConfig.redirectUri,
postLogoutRedirectUri: environment.adalConfig.postLogoutRedirectUri,
navigateToLoginRequestUrl: false,
},
cache: {
cacheLocation: 'sessionStorage',
storeAuthStateInCookie: isIE, // set to true for IE 11
},
};
}
export function MSALAngularConfigFactory(): MsalAngularConfiguration {
return {
popUp: false,
consentScopes: ["openid", "profile", "email"],
protectedResourceMap: new Map(Object.entries(environment.adalConfig.protectedResourceMap)),
unprotectedResources: [],
extraQueryParameters: {}
};
}
const providers: Provider[] = [
MsalService,
{
provide: IDENT_PROV,
useClass: AzureIdentityProvider
},
{
provide: MSAL_CONFIG,
useFactory: MSALConfigFactory
},
{
provide: MSAL_CONFIG_ANGULAR,
useFactory: MSALAngularConfigFactory
},
{
provide: HTTP_INTERCEPTORS,
useClass: MsalInterceptor, multi: true
}
]
@NgModule({
declarations: [],
imports: [
CommonModule,
MsalModule,
],
providers: providers,
exports: [
MsalModule
]
})
export class AzureAuthenticationModule {
static forRoot(): ModuleWithProviders<AzureAuthenticationModule> {
return {
ngModule: AzureAuthenticationModule,
providers: providers
};
}
} |
<reponame>elliotsegler/altimeter
"""V1 API router"""
from fastapi import APIRouter
from altimeter.qj.api.v1.endpoints.jobs import JOBS_ROUTER
from altimeter.qj.api.v1.endpoints.result_sets import RESULT_SETS_ROUTER
V1_ROUTER = APIRouter()
V1_ROUTER.include_router(JOBS_ROUTER, prefix="/jobs", tags=["jobs"])
V1_ROUTER.include_router(RESULT_SETS_ROUTER, prefix="/result_sets", tags=["result_sets"])
|
#!/usr/bin/env bash
xfconf-query -c 'xfwm4' -p '/general/theme' -s 'NumixBlue'
|
#! /bin/bash
# Copyright (c) 2019-2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
testing::toolkit::install() {
local -r uid=$(id -u)
local -r gid=$(id -g)
local READLINK="readlink"
local -r platform=$(uname)
if [[ "${platform}" == "Darwin" ]]; then
READLINK="greadlink"
fi
testing::docker_run::toolkit::shell 'toolkit install /usr/local/nvidia/toolkit'
docker run --rm -v "${shared_dir}:/work" alpine sh -c "chown -R ${uid}:${gid} /work/"
# Ensure toolkit dir is correctly setup
test ! -z "$(ls -A "${shared_dir}/usr/local/nvidia/toolkit")"
test -L "${shared_dir}/usr/local/nvidia/toolkit/libnvidia-container.so.1"
test -e "$(${READLINK} -f "${shared_dir}/usr/local/nvidia/toolkit/libnvidia-container.so.1")"
test -L "${shared_dir}/usr/local/nvidia/toolkit/libnvidia-container-go.so.1"
test -e "$(${READLINK} -f "${shared_dir}/usr/local/nvidia/toolkit/libnvidia-container-go.so.1")"
test -e "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-cli"
test -e "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-toolkit"
test -e "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-runtime"
grep -q -E "nvidia driver modules are not yet loaded, invoking runc directly" "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-runtime"
grep -q -E "exec runc \".@\"" "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-runtime"
test -e "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-cli.real"
test -e "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-toolkit.real"
test -e "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-runtime.real"
test -e "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-runtime.experimental"
test -e "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-runtime-experimental"
grep -q -E "nvidia driver modules are not yet loaded, invoking runc directly" "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-runtime-experimental"
grep -q -E "exec runc \".@\"" "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-runtime-experimental"
grep -q -E "LD_LIBRARY_PATH=/run/nvidia/driver/usr/lib64:\\\$LD_LIBRARY_PATH " "${shared_dir}/usr/local/nvidia/toolkit/nvidia-container-runtime-experimental"
test -e "${shared_dir}/usr/local/nvidia/toolkit/.config/nvidia-container-runtime/config.toml"
# Ensure that the config file has the required contents.
# NOTE: This assumes that RUN_DIR is '/run/nvidia'
local -r nvidia_run_dir="/run/nvidia"
grep -q -E "^\s*ldconfig = \"@${nvidia_run_dir}/driver/sbin/ldconfig(.real)?\"" "${shared_dir}/usr/local/nvidia/toolkit/.config/nvidia-container-runtime/config.toml"
grep -q -E "^\s*root = \"${nvidia_run_dir}/driver\"" "${shared_dir}/usr/local/nvidia/toolkit/.config/nvidia-container-runtime/config.toml"
grep -q -E "^\s*path = \"/usr/local/nvidia/toolkit/nvidia-container-cli\"" "${shared_dir}/usr/local/nvidia/toolkit/.config/nvidia-container-runtime/config.toml"
}
testing::toolkit::delete() {
testing::docker_run::toolkit::shell 'mkdir -p /usr/local/nvidia/delete-toolkit'
testing::docker_run::toolkit::shell 'touch /usr/local/nvidia/delete-toolkit/test.file'
testing::docker_run::toolkit::shell 'toolkit delete /usr/local/nvidia/delete-toolkit'
test ! -z "$(ls -A "${shared_dir}/usr/local/nvidia")"
test ! -e "${shared_dir}/usr/local/nvidia/delete-toolkit"
}
testing::toolkit::main() {
testing::toolkit::install
testing::toolkit::delete
}
testing::toolkit::cleanup() {
:
}
|
document.addEventListener("DOMContentLoaded", function() {
//Simulator options below - some amounts are changed based on screen width see below.
//If you want to set the settings mark mediaQueryOveride = true;
const mediaQueryOveride = true;
let fps = 60;
let drawInterval = 1000 / fps;
let xVelMax = 4;
const xVelMin = 2;
let yVelMax = 4;
const yVelMin = 2;
let ballSizeMax = 22;
let ballSizeMin = 5;
let ballAmount = 15;
//End simulator options
let canvas = document.getElementById("canvasele");
let context = canvas.getContext("2d");
let canvasReducer = 35;
let canvasWidth = (canvas.width = window.innerWidth);
let canvasHeight = (canvas.height = window.innerHeight - canvasReducer);
const canvasMin = 0;
const ballArr = [];
let scrollPos = 0;
let currentMousePos = {
mouseX: 0,
mouseY: 0
};
if (mediaQueryOveride == false) {
if (canvas.width <= 430) {
ballSizeMin = 5;
ballSizeMax = 20;
ballAmount = 15;
} else if (431 >= canvas.width <= 810) {
ballSizeMin = 5;
ballSizeMax = 25;
ballAmount = 15;
} else if (811 >= canvas.width <= 1100) {
ballSizeMin = 15;
ballSizeMax = 40;
ballAmount = 35;
} else {
ballSizeMin = 25;
ballSizeMax = 60;
ballAmount = 45;
xVelMax = 6;
yVelMax = 6;
}
}
window.addEventListener("resize", canvasChecker);
window.addEventListener("mousemove", logMovement);
window.addEventListener("scroll", logScroll);
function canvasChecker() {
canvas.width = window.innerWidth;
canvas.height = window.innerHeight - canvasReducer;
}
function logMovement(event) {
currentMousePos = {
mouseX: event.clientX,
mouseY: event.clientY
};
}
function logScroll(event) {
let startBtn = document.getElementById("startBtn");
let navBar = document.getElementById("navBar");
scrollPos = event.path[1].scrollY;
if (scrollPos > 10) {
navBar.classList.add("navBorder");
} else if (scrollPos <= 10) {
navBar.classList.remove("navBorder");
}
if (scrollPos > 300) {
startBtn.classList.add("hideBtn");
} else if (scrollPos < 300) {
startBtn.classList.remove("hideBtn");
}
}
function drawBackground() {
context.fillStyle = "black";
context.fillRect(0, 0, canvas.width, canvas.height);
}
function makeBall() {
let myBall = {
ballX: Math.floor(
Math.random() *
Math.floor(
canvasWidth - ballSizeMax - (canvasMin + ballSizeMax)
) +
(canvasMin + ballSizeMax)
),
ballY: Math.floor(
Math.random() *
Math.floor(
canvasHeight - ballSizeMax - (canvasMin + ballSizeMax)
) +
(canvasMin + ballSizeMax)
),
ballXVel: Math.floor(
Math.random() * Math.floor(xVelMax - xVelMin) + xVelMin
),
ballYVel: Math.floor(
Math.random() * Math.floor(yVelMax - yVelMin) + yVelMin
),
ballSize: Math.floor(
Math.random() * Math.floor(ballSizeMax - ballSizeMin) +
ballSizeMin
),
mouseCollision: false,
buttonCollision: false,
ballCollisionArray: []
};
return myBall;
}
function mouseCollision(ballObject) {
if (
Math.sqrt(
(ballObject.ballX - currentMousePos.mouseX) *
(ballObject.ballX - currentMousePos.mouseX) +
(ballObject.ballY - currentMousePos.mouseY) *
(ballObject.ballY - currentMousePos.mouseY)
) <
ballObject.ballSize + 10
) {
if (ballObject.mouseCollision == false) {
ballObject.mouseCollision = true;
ballObject.ballXVel = -ballObject.ballXVel;
ballObject.ballYVel = -ballObject.ballYVel;
}
} else {
ballObject.mouseCollision = false;
}
}
function ballCollision(ballObject) {
for (let i = 0; i < ballArr.length; i++) {
if (ballArr[i] != ballObject) {
if (
Math.sqrt(
(ballObject.ballX - ballArr[i].ballX) *
(ballObject.ballX - ballArr[i].ballX) +
(ballObject.ballY - ballArr[i].ballY) *
(ballObject.ballY - ballArr[i].ballY)
) <
ballObject.ballSize + ballArr[i].ballSize
) {
if (
ballObject.ballCollisionArray.includes(ballArr[i]) !=
true &&
ballArr[i].ballCollisionArray.includes(ballObject) !=
true
) {
ballObject.ballCollisionArray.push(ballArr[i]);
ballArr[i].ballCollisionArray.push(ballObject);
ballAXInitialVel = ballObject.ballXVel;
ballAYInitialVel = ballObject.ballYVel;
ballBXInitialVel = ballArr[i].ballXVel;
ballBYInitialVel = ballArr[i].ballYVel;
//formula for elastic collision pulled from https://www.khanacademy.org/science/physics/linear-momentum/elastic-and-inelastic-collisions/a/what-are-elastic-and-inelastic-collisions
ballObject.ballXVel =
((ballObject.ballSize - ballArr[i].ballSize) /
(ballObject.ballSize + ballArr[i].ballSize)) *
ballAXInitialVel +
((2 * ballArr[i].ballSize) /
(ballObject.ballSize + ballArr[i].ballSize)) *
ballBXInitialVel;
ballObject.ballYVel =
((ballObject.ballSize - ballArr[i].ballSize) /
(ballObject.ballSize + ballArr[i].ballSize)) *
ballAYInitialVel +
((2 * ballArr[i].ballSize) /
(ballObject.ballSize + ballArr[i].ballSize)) *
ballBYInitialVel;
ballArr[i].ballXVel =
((2 * ballObject.ballSize) /
(ballObject.ballSize + ballArr[i].ballSize)) *
ballAXInitialVel +
((ballArr[i].ballSize - ballObject.ballSize) /
(ballObject.ballSize + ballArr[i].ballSize)) *
ballBXInitialVel;
ballArr[i].ballYVel =
((2 * ballObject.ballSize) /
(ballObject.ballSize + ballArr[i].ballSize)) *
ballAYInitialVel +
((ballArr[i].ballSize - ballObject.ballSize) /
(ballObject.ballSize + ballArr[i].ballSize)) *
ballBYInitialVel;
}
} else {
for (
let j = 0;
j < ballObject.ballCollisionArray.length;
j++
) {
if (ballObject.ballCollisionArray[j] == ballArr[i]) {
ballObject.ballCollisionArray.splice(j - 1, 1);
}
}
for (
let j = 0;
j < ballArr[i].ballCollisionArray.length;
j++
) {
if (ballArr[i].ballCollisionArray[j] == ballObject) {
ballArr[j].ballCollisionArray.splice(j - 1, 1);
}
}
}
}
}
}
function screenCollision(ballObject) {
//checks for ball hitting sides of screen
if (
ballObject.ballX + ballObject.ballSize >= canvas.width ||
ballObject.ballX - ballObject.ballSize <= 0
) {
ballObject.ballXVel = -ballObject.ballXVel;
}
//checks for ball hitting top and bottom of screen
if (
ballObject.ballY + ballObject.ballSize >= canvas.height ||
ballObject.ballY - ballObject.ballSize <= 0
) {
ballObject.ballYVel = -ballObject.ballYVel;
}
if (ballObject.ballY - ballObject.ballSize < 0) {
ballObject.ballY = ballObject.ballSize + 1;
}
if (ballObject.ballY + ballObject.ballSize > canvas.height) {
ballObject.ballY = canvas.height - ballObject.ballSize - 1;
}
//for the sides
if (ballObject.ballX - ballObject.ballSize < 0) {
ballObject.ballX = ballObject.ballSize + 1;
}
if (ballObject.ballX + ballObject.ballSize > canvas.width) {
ballObject.ballX = canvas.width - ballObject.ballSize - 1;
}
}
function drawBall(ballObject) {
context.beginPath();
context.fillStyle = "white";
context.arc(
ballObject.ballX,
ballObject.ballY,
ballObject.ballSize,
0,
2 * Math.PI
);
context.fill();
screenCollision(ballObject);
mouseCollision(ballObject);
ballCollision(ballObject);
ballObject.ballX += ballObject.ballXVel;
ballObject.ballY += ballObject.ballYVel;
}
function drawMaster() {
drawBackground();
for (let i = 0; i < ballArr.length; i++) {
drawBall(ballArr[i]);
}
}
while (ballArr.length < ballAmount) {
ballArr.push(makeBall());
}
setInterval(() => {
drawMaster();
}, drawInterval);
});
|
#!/usr/bin/env bash
# This script automatically download ray and run the sanity check (sanity_check.py)
# in various Python version. This script requires conda command to exist.
unset RAY_ADDRESS
export RAY_HASH=$RAY_HASH
export RAY_VERSION=$RAY_VERSION
if [[ -z "$RAY_HASH" ]]; then
echo "RAY_HASH env var should be provided"
exit 1
fi
if [[ -z "$RAY_VERSION" ]]; then
echo "RAY_VERSION env var should be provided"
exit 1
fi
if ! [ -x "$(command -v conda)" ]; then
echo "conda doesn't exist. Please download conda for this machine"
exit 1
else
echo "conda exists"
fi
echo "Start downloading Ray version ${RAY_VERSION} of commit ${RAY_HASH}"
pip install --upgrade pip
# This is required to use conda activate
source "$(conda info --base)/etc/profile.d/conda.sh"
for PYTHON_VERSION in "3.6" "3.7" "3.8"
do
env_name="${RAY_VERSION}-${PYTHON_VERSION}-env"
conda create -y -n "${env_name}" python=${PYTHON_VERSION}
conda activate "${env_name}"
printf "\n\n\n"
echo "========================================================="
echo "Python version."
python --version
echo "This should be equal to ${PYTHON_VERSION}"
echo "========================================================="
printf "\n\n\n"
pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple ray
failed=false
printf "\n\n\n"
echo "========================================================="
if python sanity_check.py; then
echo "PYTHON ${PYTHON_VERSION} succeed sanity check."
else
failed=true
fi
echo "========================================================="
printf "\n\n\n"
conda deactivate
conda remove -y --name "${env_name}" --all
if [ "$failed" = true ]; then
echo "PYTHON ${PYTHON_VERSION} failed sanity check."
exit 1
fi
done
|
import React from 'react';
const BookList = ({ books }) => {
return (
<div>
{books.map(book => (
<div key={book.isbn}>
<h3>{book.title}</h3>
<span>{book.author}</span> | <span>{book.isbn}</span>
</div>
))}
</div>
);
};
export default BookList; |
package org.slos.domain;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slos.Team;
import org.slos.rating.ColorVsColorRatingResults;
import org.slos.util.ToJson;
import java.util.ArrayList;
import java.util.List;
public class TeamResponse implements ToJson {
private List<TeamRank> teamRanks;
private Boolean returnCollectionIds;
private ColorVsColorRatingResults colorVsColorRatingResults;
private Integer teamCount;
public TeamResponse(List<TeamRank> teamRanks, Boolean returnCollectionIds, ColorVsColorRatingResults colorVsColorRatingResults, Integer teamCount) {
this.teamRanks = teamRanks;
this.returnCollectionIds = returnCollectionIds;
this.colorVsColorRatingResults = colorVsColorRatingResults;
this.teamCount = teamCount;
}
public void setTeamRanks(List<TeamRank> teamRanks) {
this.teamRanks = teamRanks;
}
public List<TeamRank> getTeamRanks() {
return teamRanks;
}
@Override
public String toString() {
if (returnCollectionIds) {
List<CollectionTeam> collectionTeams = new ArrayList<>();
for (TeamRank teamRank : teamRanks) {
Team team = teamRank.getTeam();
if ((team != null) && (team.getCards() != null) && (team.getCards().size() > 0) && (team.getCards().get(0) != null)) {
String summoner = team.getCards().get(0).getCollectionId();
List<String> monsters = new ArrayList<>();
String[] cardIDs = team.getId().split("-");
for (int i = 1; i < team.getCards().size(); i++) {
monsters.add(team.getCards().get(i).getCollectionId());
}
collectionTeams.add(new CollectionTeam(summoner, monsters));
}
}
try {
ObjectMapper objectMapper = new ObjectMapper();
List<CollectionTeam> responseTeams = new ArrayList<>();
for (int i = 0; i < teamCount && i < collectionTeams.size(); i++) {
responseTeams.add(collectionTeams.get(i));
}
CollectionResponse collectionResponse = new CollectionResponse(responseTeams, colorVsColorRatingResults);
String toJson = objectMapper.writeValueAsString(collectionResponse);
return toJson;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
else {
try {
ObjectMapper objectMapper = new ObjectMapper();
String toJson = objectMapper.writeValueAsString(teamRanks);
return toJson;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
class CollectionResponse implements ToJson {
@JsonProperty("teams")
private List<CollectionTeam> team;
@JsonProperty("colorVsColorRatingResults")
private ColorVsColorRatingResults colorVsColorRatingResults;
public CollectionResponse(List<CollectionTeam> team, ColorVsColorRatingResults colorVsColorRatingResults) {
this.team = team;
this.colorVsColorRatingResults = colorVsColorRatingResults;
}
public List<CollectionTeam> getTeam() {
return team;
}
public void setTeam(List<CollectionTeam> team) {
this.team = team;
}
@Override
public String toString() {
return toJson();
}
public ColorVsColorRatingResults getColorVsColorRatingResults() {
return colorVsColorRatingResults;
}
public void setColorVsColorRatingResults(ColorVsColorRatingResults colorVsColorRatingResults) {
this.colorVsColorRatingResults = colorVsColorRatingResults;
}
}
class CollectionTeam {
@JsonProperty("summoner")
private String summoner;
@JsonProperty("monsters")
private List<String> monsters;
public CollectionTeam(String summoner, List<String> monsters) {
this.summoner = summoner;
this.monsters = monsters;
}
public String getSummoner() {
return summoner;
}
public void setSummoner(String summoner) {
this.summoner = summoner;
}
public List<String> getMonsters() {
return monsters;
}
public void setMonsters(List<String> monsters) {
this.monsters = monsters;
}
} |
const Discord = require('discord.js')
const client = new Discord.Client()
module.exports = {
category: 'Fun',
aliases: ['ask', 'question', 'trickedbot'],
minArgs: 1,
maxArgs: -1,
expectedArgs: "<question>",
description: 'Ask maybe he will know',
callback: ({message, args, text, client, prefix, instance}) => {
if (!args[0]) return message.reply({embed: {
color: `#FF0000`,
description: `Please ask a question!`,
icon_url: message.author.displayAvatarURL()
}
});
let question = args.slice(0).join(" ");
if (question == "that was a joke right?") {
let ballembd = new Discord.MessageEmbed()
.setAuthor(message.author.tag)
.setColor(`RANDOM`)
.addField("Question", question)
.addField("Answer", "Yes.")
.setFooter(`8ball`);
message.channel.send(ballembd)
return;}
if (question == "are you rigged?") {
let ballembd = new Discord.MessageEmbed()
.setAuthor(message.author.tag)
.setColor(`RANDOM`)
.addField("Question", question)
.addField("Answer", "No.")
.setFooter(`8ball`);
message.channel.send(ballembd)
return;}
if (question == "sex penis") {
let ballembd = new Discord.MessageEmbed()
.setAuthor(message.author.tag)
.setColor(`RANDOM`)
.addField("Question", question)
.addField("Answer", "Cholera kurwa. Tak cholernie chcę zerżnąć psa z animal crossing. Nie mogę już tego znieść. Za każdym razem, gdy idę do ratusza, dostaję potężną erekcję. Widziałem dosłownie każdy artykuł porno z animal crossing online. Moje sny to tylko ciągły seks z Isabelle. Mam dość tego, że budzę się każdego ranka z sześcioma orzechami w moich bokserkach i wiem, że to są orzechy, które powinny zostać wbite w ciasną psią cipkę Isabelle. Chcę, żeby miała moje zmutowane ludzkie / psie dzieci. Kurwa, moja pieprzona mama złapała mnie z psem sąsiadów. Ubrałem ją w spódnicę mojej siostry i pojechałem do pieprzonego miasta. Nie odezwała się do mnie od 10 godzin i martwię się, że odbierze mi 3DS. Może już nigdy nie zobaczę Isabelle.")
.setFooter(`8ball`);
message.channel.send(ballembd)
return;}
if (question == "no") {
let ballembd = new Discord.MessageEmbed()
.setAuthor(message.author.tag)
.setColor(`RANDOM`)
.addField("Question", question)
.addField("Answer", "Yes.")
.setFooter(`8ball`);
message.channel.send(ballembd)
return;}
if (question == "yes") {
let ballembd = new Discord.MessageEmbed()
.setAuthor(message.author.tag)
.setColor(`RANDOM`)
.addField("Question", question)
.addField("Answer", "No.")
.setFooter(`8ball`);
message.channel.send(ballembd)
return;}
let replies = ["Yes.", "No.", "Ask tricked he knows everything.", "Maybe.", "Yes and definitely.", "It is certain.", "As I see it, yes.", "Very doubtful.", "Eh I will say yes to that.","Hey, i dont make the decisions", "NO!", "Never.", "Nope.", "Scientifically yes."];
let result = Math.floor((Math.random() * replies.length));
let ballembed = new Discord.MessageEmbed()
.setAuthor(message.author.tag)
.setColor(`RANDOM`)
.addField("Question", question)
.addField("Answer", replies[result])
.setFooter(`8ball`);
message.reply(ballembed)
}
} |
SELECT
COUNT(*)
FROM
users
WHERE
DATEDIFF(NOW(), login_date) > 15; |
#! /bin/sh
export PATH=$PATH:/opt/local/bin:/opt/local/sbin:/opt/local/share/man:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/X11/bin
DIR=$(cd "$(dirname "$0")"; pwd)
DIR_PWD=$DIR
DIR=$(dirname "$DIR")
DIR=$(dirname "$DIR")
DIR=$(dirname "$DIR")
DIR=$(dirname "$DIR")
DIR=$(dirname "$DIR")
MDIR=$(dirname "$DIR")
MEM_CLUSTOR_DIR=$MDIR/source/memcached-clustor
mkdir -p $MEM_CLUSTOR_DIR
LOG_FILE=$MDIR/bin/logs/reinstall/cmd_big_dir_memcached-clusor_install.log
echo "-----------" > $LOG_FILE
echo 'install memcached-clusor start'
if [ ! -d $MEM_CLUSTOR_DIR/memcached ];then
mkdir -p $MEM_CLUSTOR_DIR/memcached
cp -rf $MDIR/bin/memcached/ $MEM_CLUSTOR_DIR/memcached/
fi
echo 'install memcached-clusor end'
echo "-----------" >> $LOG_FILE |
<gh_stars>0
SELECT 1 FROM save WHERE id=$1 LIMIT 1;
|
<gh_stars>1-10
import argparse, os, sys, math, time
import numpy as np
from DenseNet import DenseNet
from DenseNet2 import DenseNet2
from utils import readData, getDateTime, print_and_write
import evaluation.eval_segm as eval
from scipy.misc.pilutil import imread, imsave
import tensorflow as tf
import itertools
import random
import cv2
parser = argparse.ArgumentParser()
parser.add_argument("--log_dir", type=str, default='')
parser.add_argument("--train_images", type=str)
parser.add_argument("--train_images_ext", type=str, default='png')
parser.add_argument("--train_labels", type=str, default='')
parser.add_argument("--train_labels_ext", type=str, default='png')
parser.add_argument("--test_images", type=str, default='')
parser.add_argument("--test_images_ext", type=str, default='png')
parser.add_argument("--test_labels", type=str, default='')
parser.add_argument("--test_labels_ext", type=str, default='png')
parser.add_argument("--height", type=int)
parser.add_argument("--width", type=int)
parser.add_argument("--n_classes", type=int)
parser.add_argument("--gpu_id", type=int, default=0)
parser.add_argument("--save_stitched", type=int, default=1)
parser.add_argument("--eval_every", type=int, default=100)
parser.add_argument('--validate', action='store_false')
parser.add_argument("--val_images", type=str, default="")
parser.add_argument("--val_annotations", type=str, default="")
parser.add_argument("--batch_size", type=int, default=2)
parser.add_argument("--val_batch_size", type=int, default=2)
parser.add_argument("--start_id", type=int, default=0)
parser.add_argument("--end_id", type=int, default=-1)
parser.add_argument("--test_start_id", type=int, default=0)
parser.add_argument("--test_end_id", type=int, default=-1)
parser.add_argument("--load_weights", type=str, default="")
parser.add_argument("--load_log", type=str, default="")
parser.add_argument("--index_percent", type=float, default=100)
parser.add_argument("--max_indices", type=int, default=10000)
parser.add_argument("--min_indices", type=int, default=1000)
parser.add_argument("--learning_rate", type=float, default=1e-3)
parser.add_argument("--n_epochs", type=int, default=1000)
parser.add_argument("--loss_type", type=int, default=0)
parser.add_argument("--save_test", type=int, default=0)
parser.add_argument("--gpu_memory_fraction", type=float, default=1.0)
parser.add_argument("--allow_memory_growth", type=int, default=1)
parser.add_argument("--restore_on_nan", type=int, default=1)
parser.add_argument("--preload_images", type=int, default=1)
parser.add_argument("--psi_act_type", type=int, default=0)
parser.add_argument("--n_layers", type=int, default=0)
parser.add_argument("--lr_dec_epochs", type=int, default=10)
parser.add_argument("--lr_dec_rate", type=float, default=0.9)
args = parser.parse_args()
train_images_path = args.train_images
train_images_ext = args.train_images_ext
train_labels_path = args.train_labels
train_labels_ext = args.train_labels_ext
train_batch_size = args.batch_size
test_images_path = args.test_images
test_images_ext = args.test_images_ext
test_labels_path = args.test_labels
test_labels_ext = args.test_labels_ext
n_classes = args.n_classes
height = args.height
width = args.width
validate = args.validate
load_weights = args.load_weights
load_log = args.load_log
end_id = args.end_id
start_id = args.start_id
learning_rate = args.learning_rate
n_epochs = args.n_epochs
index_ratio = args.index_percent / 100.0
gpu_id = args.gpu_id
max_indices = args.max_indices
min_indices = args.min_indices
save_stitched = args.save_stitched
eval_every = args.eval_every
test_start_id = args.test_start_id
test_end_id = args.test_end_id
loss_type = args.loss_type
log_dir = args.log_dir
save_test = args.save_test
restore_on_nan = args.restore_on_nan
preload_images = args.preload_images
psi_act_type = args.psi_act_type
n_layers = args.n_layers
lr_dec_epochs = args.lr_dec_epochs
lr_dec_rate = args.lr_dec_rate
tf.reset_default_graph()
# if gpu_id < 2:
# if gpu_id < 0:
# print('Running on CPU')
# tf_device = '/cpu:0'
# else:
# tf_device = '/gpu:{}'.format(gpu_id)
# with tf.device(tf_device):
# if n_layers == 0:
# model = DenseNet(height, width, ch=3, nclass=3, loss_type=loss_type, psi_act_type=psi_act_type)
# else:
# model = DenseNet2(n_layers, height, width, ch=3, nclass=3, loss_type=loss_type, psi_act_type=psi_act_type)
# else:
if n_layers == 0:
model = DenseNet(height, width, ch=3, nclass=n_classes, loss_type=loss_type, psi_act_type=psi_act_type)
else:
model = DenseNet2(n_layers, height, width, ch=3, nclass=n_classes, loss_type=loss_type, psi_act_type=psi_act_type)
# print('Trainable variables:\n')
trainable_variables = tf.trainable_variables()
n_parameters = 0
n_variables = len(trainable_variables)
for variable in trainable_variables:
variable_shape = variable.get_shape()
# print('variable: {}\t shape: {}'.format(variable.name, variable_shape))
variable_parameters = 1
for dim in variable_shape:
variable_parameters *= dim.value
n_parameters += variable_parameters
print()
print('Model has {} trainable variables and {} trainable parameters'.format(
n_variables, n_parameters))
print()
# sys.exit()
labels_maps = list(itertools.permutations(range(n_classes)))
print('labels_maps: {}'.format(labels_maps))
gpu_memory_fraction = args.gpu_memory_fraction
allow_memory_growth = args.allow_memory_growth
session_config = tf.ConfigProto(allow_soft_placement=True,
log_device_placement=False)
session_config.gpu_options.allow_growth = allow_memory_growth
session_config.gpu_options.per_process_gpu_memory_fraction = gpu_memory_fraction
sess = tf.Session(config=session_config)
init = tf.global_variables_initializer()
saver_latest = tf.train.Saver(max_to_keep=1)
saver_acc = tf.train.Saver(max_to_keep=1)
saver_loss = tf.train.Saver(max_to_keep=1)
with sess.as_default():
init.run()
if min_indices > max_indices:
raise AssertionError('min_indices cannot be larger than min_indices')
if not train_labels_path:
train_labels_path = os.path.join(os.path.dirname(train_images_path), 'labels')
src_files, src_labels_list, total_frames = readData(train_images_path, train_images_ext, train_labels_path,
train_labels_ext)
if start_id < 0:
if end_id < 0:
raise AssertionError('end_id must be non negative for random selection')
elif end_id >= total_frames:
raise AssertionError('end_id must be less than total_frames for random selection')
print('Using {} random images for training'.format(end_id + 1))
img_ids = np.random.choice(total_frames, end_id + 1, replace=False)
else:
if end_id < start_id:
end_id = total_frames - 1
print('Using all {} images for training'.format(end_id - start_id + 1))
img_ids = range(start_id, end_id + 1)
if start_id < 0:
log_template = '{:d}_{}_{}_{}_random_{}_{}'.format(
int(args.index_percent), min_indices, max_indices, height, end_id + 1, loss_type)
else:
log_template = '{:d}_{}_{}_{}_{}_{}_{}'.format(
int(args.index_percent), min_indices, max_indices, height, start_id, end_id, loss_type)
if psi_act_type > 0:
log_template = '{}_{}'.format(log_template, model.psi_act_name)
if not log_dir:
log_root_dir = 'log'
else:
log_root_dir = os.path.dirname(log_dir)
log_template = '{}_{}'.format(os.path.basename(log_dir), log_template)
if n_layers > 0:
log_template = '{}_{}_layers'.format(log_template, n_layers)
log_dir = os.path.join(log_root_dir, log_template)
save_weights_path = os.path.join(log_dir, 'weights')
save_weights_acc_path = os.path.join(log_dir, 'weights_acc')
save_weights_loss_path = os.path.join(log_dir, 'weights_loss')
save_path = os.path.join(log_dir, 'results')
if not os.path.isdir(save_weights_path):
os.makedirs(save_weights_path)
if not os.path.isdir(save_weights_acc_path):
os.makedirs(save_weights_acc_path)
if not os.path.isdir(save_weights_loss_path):
os.makedirs(save_weights_loss_path)
if not os.path.isdir(save_path):
os.makedirs(save_path)
log_fname = os.path.join(save_path, 'log_{:s}.txt'.format(getDateTime()))
arg_names = [a for a in dir(args) if not a.startswith('__')]
with open(log_fname, 'a') as log_fid:
for arg in arg_names:
log_fid.write('{}: {}\n'.format(arg, getattr(args, arg)))
# config = tf.ConfigProto(log_device_placement=True)
# config.gpu_options.allow_growth = True
# config = tf.ConfigProto(device_count={'GPU': gpu_id})
label_diff = int(255.0 / (n_classes - 1))
start_epoch = 0
min_loss = np.inf
min_loss_epoch = -1
max_pix_acc = 0
max_pix_acc_epoch = -1
if load_weights:
if load_weights == '1':
load_weights = save_weights_path + '/'
elif load_weights == '0':
load_weights = ''
if load_weights.endswith('/'):
ckpt_path = tf.train.latest_checkpoint(load_weights)
else:
ckpt_path = load_weights
if ckpt_path:
print('Restoring weights from {}'.format(ckpt_path))
try:
saver_loss.restore(sess, ckpt_path)
try:
start_epoch = int(ckpt_path.split('-')[-1]) + 1
except:
pass
if load_log:
last_log_file = os.path.join(save_path, load_log)
else:
log_file_list = [os.path.join(save_path, k) for k in os.listdir(save_path) if
k.startswith('log_') and k.endswith('.txt')]
if len(log_file_list) <= 1:
print('No previous log files found')
last_log_file = ''
else:
log_file_list.sort()
last_log_file = log_file_list[-2]
print_and_write('Loading previous log from {}\n'.format(last_log_file), log_fname)
try:
if last_log_file:
last_log_line = open(last_log_file, 'r').readlines()[-1]
last_log_data = [k.strip() for k in last_log_line.split() if k.strip()]
print('last_log_data:\n {}'.format(last_log_data))
min_loss_data = last_log_data[7].split('(')
max_pix_acc_data = last_log_data[11].split('(')
min_loss = float(min_loss_data[0])
min_loss_epoch = int(min_loss_data[1].replace(')', ''))
max_pix_acc = float(max_pix_acc_data[0])
mean_pix_acc = float(last_log_data[9])
max_pix_acc_epoch = int(max_pix_acc_data[1].replace(')', ''))
print_and_write('Loaded min_loss: {}({}) max_pix_acc: {}({}) mean_pix_acc: {}'.format(
min_loss, min_loss_epoch, max_pix_acc, max_pix_acc_epoch, mean_pix_acc), log_fname)
if len(last_log_data) >= 14:
learning_rate = float(last_log_data[13])
print_and_write('learning_rate: {}'.format(learning_rate))
except BaseException as e:
print_and_write('Loading log was unsuccessful: {}'.format(e), log_fname)
except BaseException as e:
print_and_write('Restoring weights was unsuccessful so training from scratch: {}'.format(e), log_fname)
train_images = []
# train_labels = []
train_indices = []
# train_Y = []
optimize_label_map = (loss_type != 4 and loss_type != 5)
if preload_images:
print('Preloading images')
else:
print('Not preloading images')
print('Getting training data...')
if index_ratio == 0:
print('Using a fixed set of {} pixels per class'.format(max_indices))
_n_training_images = len(img_ids)
for _id, img_id in enumerate(img_ids):
# img_fname = '{:s}_{:d}.{:s}'.format(fname_templ, img_id + 1, img_ext)
img_fname = src_files[img_id]
img_fname_no_ext = os.path.splitext(img_fname)[0]
labels_img_fname = os.path.join(train_labels_path, img_fname_no_ext + '.{}'.format(train_labels_ext))
labels_img = imread(labels_img_fname)
if labels_img is None:
raise SystemError('Labels image could not be read from: {}'.format(labels_img_fname))
if len(labels_img.shape) == 3:
labels_img = labels_img[:, :, 0].squeeze()
# print('min: {} max: {}'.format(
# np.min(labels_img.flatten()),
# np.max(labels_img.flatten()))
# )
# np.savetxt('labels_img.txt', labels_img, fmt='%d')
# sys.exit()
labels_indices = []
# Y = np.zeros((height * width, n_classes), dtype=np.float32)
skip_image = 0
for class_id in range(n_classes):
class_indices = np.flatnonzero(labels_img == class_id)
if class_indices.shape[0] < min_indices:
skip_image = 1
print('\nimg {} class {} class_indices.shape: {} '.format(img_id + 1, class_id, class_indices.shape))
break
# Y[class_indices, class_id] = 1
# y_save_path = os.path.join(save_path, '{}_{}.png'.format(img_fname_no_ext, class_id))
# imsave(y_save_path, np.reshape(Y[:, class_id]*255, (height, width)).astype(np.uint8))
if index_ratio == 0:
class_indices = np.random.choice(class_indices, (max_indices, 1), replace=False)
labels_indices.append(class_indices)
if skip_image:
continue
src_img_fname = os.path.join(train_images_path, img_fname)
if preload_images:
src_img = imread(src_img_fname)
if src_img is None:
raise SystemError('Source image could not be read from: {}'.format(src_img_fname))
src_img = src_img / np.amax(src_img)
src_img = np.reshape(src_img, (1, height, width, 3)).astype(np.float32)
train_images.append(src_img)
else:
train_images.append(src_img_fname)
# train_labels.append(labels_img)
train_indices.append(labels_indices)
# train_Y.append(Y)
sys.stdout.write('\rDone {}/{} images'.format(_id + 1, _n_training_images))
sys.stdout.flush()
print()
n_images = len(train_images)
if n_images == 0:
raise AssertionError('no valid training images found')
# sys.exit()
print('Getting testing data...')
if test_images_path:
if not test_labels_path:
test_labels_path = os.path.join(os.path.dirname(test_images_path), 'labels')
test_file_list, test_labels_list, total_test_frames = readData(test_images_path, test_images_ext,
test_labels_path,
test_labels_ext)
else:
test_file_list, test_labels_list, total_test_frames = src_files, src_labels_list, total_frames
test_images_path, test_labels_path = train_images_path, train_labels_path
test_images_ext, test_labels_ext = train_images_ext, train_labels_ext
if test_start_id < 0:
if test_end_id < 0:
raise AssertionError('test_end_id must be non negative for random selection')
elif end_id >= total_test_frames:
raise AssertionError('test_end_id must be less than total_test_frames for random selection')
print('Using {} random images for evaluation'.format(test_end_id + 1))
test_img_ids = np.random.choice(total_test_frames, test_end_id + 1, replace=False)
else:
if test_end_id < test_start_id:
test_end_id = total_test_frames - 1
test_img_ids = range(test_start_id, test_end_id + 1)
test_images = []
test_images_orig = []
test_labels = []
test_names = []
_n_test_images = len(test_img_ids)
for _id, img_id in enumerate(test_img_ids):
# img_fname = '{:s}_{:d}.{:s}'.format(fname_templ, img_id + 1, img_ext)
img_fname = test_file_list[img_id]
img_fname_no_ext = os.path.splitext(img_fname)[0]
src_img_fname = os.path.join(test_images_path, img_fname)
labels_img_fname = os.path.join(test_labels_path, img_fname_no_ext + '.{}'.format(test_labels_ext))
if preload_images:
src_img = imread(src_img_fname)
if src_img is None:
raise SystemError('Source image could not be read from: {}'.format(src_img_fname))
if save_test and save_stitched:
test_images_orig.append(src_img)
src_img = src_img / np.amax(src_img)
src_img = np.reshape(src_img, (1, height, width, 3)).astype(np.float32)
test_images.append(src_img)
labels_img = imread(labels_img_fname)
if labels_img is None:
raise SystemError('Labels image could not be read from: {}'.format(labels_img_fname))
if len(labels_img.shape) == 3:
labels_img = labels_img[:, :, 0].squeeze()
test_labels.append(labels_img)
else:
test_images.append(src_img_fname)
test_labels.append(labels_img_fname)
test_names.append(img_fname_no_ext)
sys.stdout.write('\rDone {}/{} images'.format(_id + 1, _n_test_images))
sys.stdout.flush()
print()
n_test_images = len(test_images)
# feed_dict_list = []
# for img_id in range(n_images):
# src_img = train_images[img_id]
# label_img = train_labels[img_id]
#
# feed_dict = {m.X: src_img, m.lr: learning_rate}
# for class_id in range(n_classes):
# class_indices = np.flatnonzero(label_img == class_id)
# feed_dict.update({m.class_indices[class_id]: class_indices})
# feed_dict_list.append(feed_dict)
pix_acc = np.zeros((n_test_images,))
# mean_acc = np.zeros((n_test_images,))
# mean_IU = np.zeros((n_test_images,))
# fw_IU = np.zeros((n_test_images,))
feed_dict = None
sys.stdout.write('Saving latest checkpoint to: {}\n'.format(save_weights_path))
sys.stdout.write('Saving max accuracy checkpoint to: {}\n'.format(save_weights_acc_path))
sys.stdout.write('Saving min loss checkpoint to: {}\n'.format(save_weights_loss_path))
sys.stdout.write('Saving results to: {}\n'.format(save_path))
sys.stdout.write('Saving log to: {}\n'.format(log_fname))
sys.stdout.flush()
print_and_write('Training on {:d} images'.format(n_images), log_fname)
print_and_write('Evaluating on {:d} images'.format(n_test_images), log_fname)
if restore_on_nan:
print_and_write('Using previous checkpoint restoring on NaN loss', log_fname)
else:
print_and_write('Using remaining images skipping on NaN loss', log_fname)
print_and_write('Using {} as psi activation function'.format(model.psi_act_name))
img_ids = list(range(n_images))
epoch_id = start_epoch
while epoch_id < n_epochs:
# print('Epoch {}/{}'.format(epoch + 1, n_epochs))
# losses = []
avg_loss = 0
# nan_loss = False
for img_id in img_ids:
overall_start_t = time.time()
if preload_images:
src_img = train_images[img_id]
else:
src_img_fname = train_images[img_id]
src_img = imread(src_img_fname)
if src_img is None:
raise SystemError('Source image could not be read from: {}'.format(src_img_fname))
src_img = src_img / np.amax(src_img)
src_img = np.reshape(src_img, (1, height, width, 3)).astype(np.float32)
# height, width, _ = src_img.shape
# print('height: ', height)
# print('width: ', width)
# src_img_exp = np.expand_dims(src_img, axis=0)
feed_dict = {model.X: src_img, model.lr: learning_rate}
# feed_dict.update({m.height: height, m.width: width})
labels_indices = train_indices[img_id]
Y = np.zeros((height * width, n_classes), dtype=np.float32)
for class_id in range(n_classes):
if index_ratio == 0:
class_indices = labels_indices[class_id]
else:
n_indices = min(max(min_indices, int(index_ratio * labels_indices[class_id].shape[0])),
max_indices)
# print('n_indices: ', n_indices)
class_indices = np.random.choice(labels_indices[class_id], (n_indices, 1), replace=False)
Y[class_indices, class_id] = 1
feed_dict.update({model.class_indices[class_id]: class_indices})
feed_dict.update({model.Y: Y})
# feed_dict = feed_dict_list[img_id]
start_t = time.time()
psi, phi_den, phi, loss, _ = sess.run(
(model.psi, model.phi_den, model.phi, model.loss_convnet, model.training_op),
feed_dict=feed_dict)
end_t = time.time()
fps = 1.0 / (end_t - start_t)
nan_loss = 0
if math.isnan(loss):
print_and_write('\nNaN loss encountered for image {} in epoch {}'.format(img_id, epoch_id), log_fname)
# print_and_write('phi:\n {}'.format(phi), log_fname)
# print_and_write('psi:\n {}'.format(psi), log_fname)
# print_and_write('phi_den:\n {}'.format(phi_den), log_fname)
np.savetxt(os.path.join(save_path, 'phi_{}_{}.dat'.format(img_id, epoch_id)),
np.asarray(phi), delimiter='\t', fmt='%.4f')
np.savetxt(os.path.join(save_path, 'psi_{}_{}.dat'.format(img_id, epoch_id)),
np.asarray(psi), delimiter='\t', fmt='%.4f')
np.savetxt(os.path.join(save_path, 'phi_den_{}_{}.dat'.format(img_id, epoch_id)),
np.asarray(phi_den), delimiter='\t', fmt='%.4f')
if restore_on_nan:
ckpt_path = tf.train.latest_checkpoint(save_weights_path)
print_and_write('Restoring weights from {}'.format(ckpt_path), log_fname)
saver_loss.restore(sess, ckpt_path)
img_ids.remove(img_id)
n_images -= 1
nan_loss = 1
break
else:
continue
# losses.append(loss)
avg_loss += (loss - avg_loss) / (img_id + 1)
overall_end_t = time.time()
overall_fps = 1.0 / (overall_end_t - overall_start_t)
sys.stdout.write('\rDone {:5d}/{:5d} frames in epoch {:5d} ({:6.2f}/{:6.2f} fps) avg_loss: {:f}'.format(
img_id + 1, n_images, epoch_id, fps, overall_fps, avg_loss))
sys.stdout.flush()
# if save_after_each_step:
# loss_convnet_val = m.loss_convnet.eval(feed_dict=feed_dict)
# print("Image:", img_id + 1, "Convnet loss:", loss_convnet_val)
# Seg = np.zeros((height, width))
# for i in range(height):
# for j in range(width):
# val = -1
# label = -1
# for n in range(n_classes):
# if ClassIndicator[i, j, n] > val:
# val = ClassIndicator[i, j, n]
# label = n
# Seg[i, j] = label*label_diff
if nan_loss:
continue
print()
if epoch_id % eval_every == 0:
optimal_label_map = None
if optimize_label_map:
print('\nGetting optimal label map...')
random_img_ids = np.random.choice(n_test_images, 10, replace=False)
_max_pix_acc = 0
for labels_map in labels_maps:
_mean_pix_acc = 0
random_id = 0
for _img_id in random_img_ids:
if preload_images:
test_img = test_images[_img_id]
if save_test:
test_img_orig = test_images_orig[_img_id]
labels_img = test_labels[_img_id]
else:
src_img_fname = test_images[_img_id]
test_img_orig = imread(src_img_fname)
if test_img_orig is None:
raise SystemError('Source image could not be read from: {}'.format(src_img_fname))
test_img = test_img_orig / np.amax(test_img_orig)
test_img = np.reshape(test_img, (1, height, width, 3)).astype(np.float32)
labels_img_fname = test_labels[_img_id]
labels_img = imread(labels_img_fname)
if labels_img is None:
raise SystemError('Labels image could not be read from: {}'.format(labels_img_fname))
if len(labels_img.shape) == 3:
labels_img = labels_img[:, :, 0].squeeze()
phi_val = model.phi.eval(session=sess, feed_dict={model.X: test_img})
ClassIndicator = phi_val.reshape((height, width, n_classes))
labels = np.argmax(ClassIndicator, axis=2)
labels_mapped = np.vectorize(lambda x: labels_map[x])(labels)
_pix_acc = eval.pixel_accuracy(labels_mapped, labels_img)
random_id += 1
_mean_pix_acc += (_pix_acc - _mean_pix_acc) / random_id
if _mean_pix_acc > _max_pix_acc:
_max_pix_acc = _mean_pix_acc
optimal_label_map = labels_map
print('optimal_label_map: {}'.format(optimal_label_map))
print('_max_pix_acc: {}'.format(_max_pix_acc))
print('\nTesting...')
mean_pix_acc = 0
for img_id in range(n_test_images):
overall_start_t = time.time()
if preload_images:
test_img = test_images[img_id]
if save_test:
test_img_orig = test_images_orig[img_id]
labels_img = test_labels[img_id]
else:
src_img_fname = test_images[img_id]
test_img_orig = imread(src_img_fname)
if test_img_orig is None:
raise SystemError('Source image could not be read from: {}'.format(src_img_fname))
test_img = test_img_orig / np.amax(test_img_orig)
test_img = np.reshape(test_img, (1, height, width, 3)).astype(np.float32)
labels_img_fname = test_labels[img_id]
labels_img = imread(labels_img_fname)
if labels_img is None:
raise SystemError('Labels image could not be read from: {}'.format(labels_img_fname))
if len(labels_img.shape) == 3:
labels_img = labels_img[:, :, 0].squeeze()
start_t = time.time()
phi_val = model.phi.eval(session=sess, feed_dict={model.X: test_img})
ClassIndicator = phi_val.reshape((height, width, n_classes))
labels = np.argmax(ClassIndicator, axis=2)
end_t = time.time()
fps = 1.0 / (end_t - start_t)
fps_with_input = 1.0 / (end_t - overall_start_t)
if optimize_label_map:
labels = np.vectorize(lambda x: optimal_label_map[x])(labels)
pix_acc[img_id] = eval.pixel_accuracy(labels, labels_img)
# mean_acc[img_id] = eval.mean_accuracy(labels, labels_img)
# mean_IU[img_id] = eval.mean_IU(labels, labels_img)
# fw_IU[img_id] = eval.frequency_weighted_IU(labels, labels_img)
if save_test:
print('Saving test result')
Seg = (labels * label_diff).astype(np.uint8)
seg_save_path = os.path.join(save_path, '{:s}_epoch_{:d}.png'.format(
test_names[img_id], epoch_id + 1))
if save_stitched:
gt_seq = (labels_img * label_diff).astype(np.uint8)
if len(gt_seq.shape) != 3:
gt_seq = np.stack((gt_seq, gt_seq, gt_seq), axis=2)
if len(Seg.shape) != 3:
Seg = np.stack((Seg, Seg, Seg), axis=2)
Seg = np.concatenate((test_img_orig, gt_seq, Seg), axis=1)
imsave(seg_save_path, Seg)
overall_end_t = time.time()
overall_fps = 1.0 / (overall_end_t - overall_start_t)
mean_pix_acc += (pix_acc[img_id] - mean_pix_acc) / (img_id + 1)
sys.stdout.write('\rDone {:5d}/{:5d} frames in epoch {:5d} ({:6.2f}({:6.2f}, {:6.2f}) fps) '
'pix_acc: {:.10f}'.format(
img_id + 1, n_test_images, epoch_id, fps, fps_with_input, overall_fps, mean_pix_acc))
sys.stdout.flush()
print()
# mean_pix_acc = np.mean(pix_acc)
if mean_pix_acc > max_pix_acc:
max_pix_acc = mean_pix_acc
max_pix_acc_epoch = epoch_id
saver_acc.save(sess, os.path.join(save_weights_acc_path, 'model.ckpt-{}'.format(epoch_id)))
# loss_convnet_val = m.loss_convnet.eval(feed_dict=feed_dict)
# loss_convnet_val = np.mean(losses)
loss_convnet_val = avg_loss
if loss_convnet_val < min_loss:
min_loss = loss_convnet_val
min_loss_epoch = epoch_id
saver_loss.save(sess, os.path.join(save_weights_loss_path, 'model.ckpt-{}'.format(epoch_id)))
saver_latest.save(sess, os.path.join(save_weights_path, 'model.ckpt-{}'.format(epoch_id)))
print_and_write("{:s} :: epoch: {:4d} loss: {:.10f} min_loss: {:.10f}({:d}) pix_acc: {:.10f} " \
"max_pix_acc: {:.10f}({:d}) lr: {:.10f}".format(
log_template, epoch_id, loss_convnet_val, min_loss, min_loss_epoch, mean_pix_acc,
max_pix_acc, max_pix_acc_epoch, learning_rate), log_fname)
epoch_id += 1
if lr_dec_epochs > 0 and (epoch_id + 1) % lr_dec_epochs == 0:
learning_rate = lr_dec_rate * learning_rate
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
mkdir output/full_correlation/
rm -R -f fifo/*
mkdir fifo/full_correlation/
rm -R -f work/*
mkdir work/kat/
mkdir work/full_correlation/
mkdir work/full_correlation/kat/
mkdir work/il_S1_summaryleccalc
mkdir work/full_correlation/il_S1_summaryleccalc
mkfifo fifo/full_correlation/gul_fc_P19
mkfifo fifo/il_P19
mkfifo fifo/il_S1_summary_P19
mkfifo fifo/il_S1_summary_P19.idx
mkfifo fifo/full_correlation/il_P19
mkfifo fifo/full_correlation/il_S1_summary_P19
mkfifo fifo/full_correlation/il_S1_summary_P19.idx
# --- Do insured loss computes ---
tee < fifo/il_S1_summary_P19 work/il_S1_summaryleccalc/P19.bin > /dev/null & pid1=$!
tee < fifo/il_S1_summary_P19.idx work/il_S1_summaryleccalc/P19.idx > /dev/null & pid2=$!
summarycalc -m -f -1 fifo/il_S1_summary_P19 < fifo/il_P19 &
# --- Do insured loss computes ---
tee < fifo/full_correlation/il_S1_summary_P19 work/full_correlation/il_S1_summaryleccalc/P19.bin > /dev/null & pid3=$!
tee < fifo/full_correlation/il_S1_summary_P19.idx work/full_correlation/il_S1_summaryleccalc/P19.idx > /dev/null & pid4=$!
summarycalc -m -f -1 fifo/full_correlation/il_S1_summary_P19 < fifo/full_correlation/il_P19 &
fmcalc -a2 < fifo/full_correlation/gul_fc_P19 > fifo/full_correlation/il_P19 &
eve 19 20 | getmodel | gulcalc -S100 -L100 -r -j fifo/full_correlation/gul_fc_P19 -a1 -i - | fmcalc -a2 > fifo/il_P19 &
wait $pid1 $pid2 $pid3 $pid4
# --- Do insured loss kats ---
# --- Do insured loss kats for fully correlated output ---
|
// Boost.Geometry
// Unit Test
// Copyright (c) 2018 <NAME>, Islamabad, Pakistan.
// Contributed and/or modified by <NAME>, as part of Google Summer of Code 2018 program.
// Use, modification and distribution is subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_GEOMETRY_TEST_INVERSE_CASES_SMALL_ANGLES_HPP
#define BOOST_GEOMETRY_TEST_INVERSE_CASES_SMALL_ANGLES_HPP
#include "inverse_cases.hpp"
struct expected_results_small_angles
{
coordinates p1;
coordinates p2;
expected_result karney;
};
/*
These values are collected from GeodTest which is associated with GeographicLib:
https://zenodo.org/record/32156
The conversion to C++ array format is done using this Python script:
https://github.com/adl1995/boost-geometry-extra/blob/master/geographiclib-dataset-parse-inverse.py
Geodesic scale (M12) is absent from the GeodTest dataset, so it is manually generated
using GeographicLib using this C++ script:
https://github.com/adl1995/boost-geometry-extra/blob/master/geographicLib-direct-small_angles.cpp
*/
expected_results_small_angles expected_small_angles[] =
{
{
{ 180, 0 },{ 0, 0 },
{ 20003931.45862544700503349304, -0.00000000000000000000, 180.00000000000000000000, 67125.61229850351810455322266, -1.00000000000000000000 },
},{
{ 180, 0 },{ 1e-300, 0 },
{ 20003931.45862544700503349304, -0.00000000000000000000, 180.00000000000000000000, 67125.61229850351810455322266, -1.00000000000000000000 },
},{
{ 180, 0 },{ 1e-200, 0 },
{ 20003931.45862544700503349304, -0.00000000000000000000, 180.00000000000000000000, 67125.61229850351810455322266, -1.00000000000000000000 },
},{
{ 180, 0 },{ 1e-100, 0 },
{ 20003931.45862544700503349304, -0.00000000000000000000, 180.00000000000000000000, 67125.61229850351810455322266, -1.00000000000000000000 },
},{
{ 180, 0 },{ 1e-50, 0 },
{ 20003931.45862544700503349304, -0.00000000000000000000, 180.00000000000000000000, 67125.61229850351810455322266, -1.00000000000000000000 },
},{
{ 180, 0 },{ 1e-20, 0 },
{ 20003931.45862544700503349304, -9.4938222885831061895e-19, 180.00000000000000000000, 67125.61229850351810455322266, -1.00000000000000000000 },
},{
{ 180, 0 },{ 1e-10, 0 },
{ 20003931.45862544700503349304, -9.501793528220011062168943853e-09, -179.9999999904981962117744843, 67125.61229850351810455322266, -1 },
},{
{ 0, 1e-100 },{ 170, 1e-200},
{ 18924313.43485650792717933655, 90, 90, 1041298.808552250848151743412, -0.9864919282563420210863114335 },
},{
{ 0, 1e-300 },{ 170, 1e-50},
{ 18924313.43485650792717933655, 90, 90, 1041298.808552250848151743412, -0.9864919282563420210863114335 },
},{
{ 0, 1e-300 },{ 170, 1e-10},
{ 18924313.43485650792717933655, 89.99999999939157646622334141, 90.00000000060019544889655663, 1041298.80855225014965981245, -0.9864919282563420210863114335 },
},{
{ 0, 1e-100 },{ 170, 1e-50},
{ 18924313.43485650792717933655, 90, 90, 1041298.808552250848151743412, -0.9864919282563420210863114335 },
},{
{ 0, 1e-200 },{ 170, 1e-50},
{ 18924313.43485650792717933655, 90, 90, 1041298.808552250848151743412, -0.9864919282563420210863114335 },
},{
{ 0, 0.0 },{ 170, 1e-10},
{ 18924313.43485650792717933655, 89.99999999939157646622334141, 90.00000000060019544889655663, 1041298.80855225014965981245, -0.9864919282563420210863114335 },
},{
{ 0, 1e-20 },{ 170, 1e-100},
{ 18924313.43485650792717933655, 90, 90, 1041298.808552250848151743412, -0.9864919282563420210863114335 },
},{
{ 0, 1e-100 },{ 170, 0.0},
{ 18924313.43485650792717933655, 90, 90, 1041298.808552250848151743412, -0.9864919282563420210863114335 },
},{
{ 0, 1e-10 },{ 170, 1e-300},
{ 18924313.43485650792717933655, 89.99999999939980455110344337, 90.00000000060842353377665859, 1041298.80855225014965981245, -0.9864919282563420210863114335 },
},{
{ 0, 1e-300 },{ 170, 1e-100},
{ 18924313.43485650792717933655, 90, 90, 1041298.808552250848151743412, -0.9864919282563420210863114335 },
},{
{ 0, 1e-200 },{ 170, 1e-100},
{ 18924313.43485650792717933655, 90, 90, 1041298.808552250848151743412, -0.9864919282563420210863114335 },
},{
{ 0, 1e-10 },{ 170, 1e-50},
{ 18924313.43485650792717933655, 89.99999999939980455110344337, 90.00000000060842353377665859, 1041298.80855225014965981245, -0.9864919282563420210863114335 },
},{
{ 0, 0.0 },{ 170, 1e-200},
{ 18924313.43485650792717933655, 90, 90, 1041298.808552250848151743412, -0.9864919282563420210863114335 },
},{
{ 0, 0.0 },{ 170, 1e-10},
{ 18924313.43485650792717933655, 89.99999999939157646622334141, 90.00000000060019544889655663, 1041298.80855225014965981245, -0.9864919282563420210863114335 },
}
};
size_t const expected_size_small_angles = sizeof(expected_small_angles) / sizeof(expected_results_small_angles);
#endif // BOOST_GEOMETRY_TEST_INVERSE_CASES_SMALL_ANGLES_HPP
|
#include "Config.h"
#include "Storage.h"
#include "StorageSqlite.h"
#include "utils/Memory.h"
namespace Storage
{
Storage * storage = NULL;
void Storage::SetEngine( const char * name )
{
if(_STR.strcmpi(name, "sqlite") == 0)
{
storage = &storageSqlite.GetSingleton();
}
}
}
|
/*
* Gdańsk University of Technology - Engineering Thesis
* Malicious Module for Netbeans
*
* <NAME>, <NAME>, <NAME>
*/
package pl.gda.pg.eti.kio.malicious.entity;
import java.awt.EventQueue;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import pl.gda.pg.eti.kio.malicious.annotation.CreatableMalicious;
import pl.gda.pg.eti.kio.malicious.attribute.TimeAttribute;
import pl.gda.pg.eti.kio.malicious.attribute.TimeRangeAttribute;
import pl.gda.pg.eti.kio.malicious.event.MaliciousEvent;
/**
*
* @author <NAME>
*/
@CreatableMalicious(name = "environment")
public class Environment extends BaseMalice {
private TimeAttribute attributes;
private final Logger log = LogManager.getLogger(Environment.class);
private String name = getClass().getSimpleName();
@Override
protected void onStart() {
attributes = new TimeAttribute();
String parameter = (String) (this.getParameter("length"));
if (!attributes.parseParameter(parameter)) {
attributes.setRange(2, 8);
}
}
@Override
protected void execute(MaliciousEvent event) {
if (attributes == null) {
}
if (attributes.isRandom() ) {
attributes.setRandomTime();
}
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(attributes.getTime()*1000);
log.info("Completed: {} - {}", name, "Environment completed successfully");
} catch (InterruptedException ex) {
log.error("Environment completed unsuccessfully", ex);
}
}
});
}
public void setAttributes(String time) {
attributes = new TimeAttribute();
attributes.setTime(Integer.valueOf(time));
}
}
|
// ------------------------------
// Automated code formatter before compilaion
// Disabled by default because this is confusing for beginners
//scalariformSettings
// ------------------------------
// for ./skinnny console
initialCommands := """
import skinny._
import _root_.controller._, model._
import org.joda.time._
import scalikejdbc._, config._
DBSettings.initialize()
"""
// ------------------------------
// sbt-dependency-graph
//net.virtualvoid.sbt.graph.Plugin.graphSettings
|
#!/bin/sh
pdflatex FIR-system-lattice.tex
|
#!/bin/bash
dieharder -d 2 -g 27 -S 1951719681
|
public class TestMenuItem implements MenuItem {
private String labelText;
public TestMenuItem(String labelText) {
this.labelText = labelText;
}
@Override
public void labelShouldHave(String text) {
if (!labelText.equals(text)) {
throw new AssertionError("Label text does not match the expected text");
}
}
@Override
public void click() {
// Simulate clicking on the menu item
System.out.println("Clicked on the menu item with label: " + labelText);
}
} |
source activate icu-benchmark
python -m icu_benchmarks.run evaluate \
-c configs/hirid/Classification/LGBM_w_feat.gin \
-l files/pretrained_weights/LGBM_w_feat/ \
-t Phenotyping_APACHEGroup \
--loss-weight balanced \
--num-class 15 \
--maxlen 288 \
-o True \
--depth 5 \
--subsample-feat 0.33 \
--subsample-data 0.33 \
|
package com.inner.lovetao.mineassets.di.component;
import com.inner.lovetao.mineassets.di.module.EarningsDetailModule;
import com.inner.lovetao.mineassets.mvp.contract.EarningsDetailContract;
import com.inner.lovetao.mineassets.mvp.ui.activity.EarningsDetailActivity;
import com.jess.arms.di.component.AppComponent;
import com.jess.arms.di.scope.ActivityScope;
import dagger.BindsInstance;
import dagger.Component;
/**
* ================================================
* Description:
* <p>
* Created by feihaokui on 02/18/2019 11:15
*/
@ActivityScope
@Component(modules = EarningsDetailModule.class, dependencies = AppComponent.class)
public interface EarningsDetailComponent {
void inject(EarningsDetailActivity activity);
@Component.Builder
interface Builder {
@BindsInstance
EarningsDetailComponent.Builder view(EarningsDetailContract.View view);
EarningsDetailComponent.Builder appComponent(AppComponent appComponent);
EarningsDetailComponent build();
}
} |
<gh_stars>10-100
/**
* Utility classes related to security functions.
*/
package io.opensphere.core.util.security;
|
package com.abubusoft.kripton.examplea0.recipes.v3.model;
import com.abubusoft.kripton.android.annotation.BindColumn;
import com.abubusoft.kripton.android.annotation.BindTable;
import com.abubusoft.kripton.annotation.BindType;
/**
* Created by xcesco on 01/09/2017.
*/
@BindType
@BindTable(name="authors", uniqueIndexes = {"name, surname asc", "email"})
public class AuthorV3 extends Entity {
@BindColumn(nullable = false)
public String name;
@BindColumn(nullable = false)
public String surname;
@BindColumn(nullable = false)
public String email;
}
|
import * as fs from 'fs/promises';
import * as p from 'path';
import { exists, getFiles } from './utils/fs';
import { config } from './config';
const mergeObjects = (src: any, dist: any) => {
const distKeys = Object.keys(dist);
const srcKeys = Object.keys(src);
const missingDistKeys = srcKeys.filter((l) => !distKeys.includes(l));
const missingSrcKeys = distKeys.filter((l) => !srcKeys.includes(l));
for (const key of missingSrcKeys) {
// eslint-disable-next-line no-param-reassign
delete dist[key];
}
for (const key of missingDistKeys) {
// eslint-disable-next-line no-param-reassign
dist[key] = src[key];
}
};
export const scanResources = async () => {
const mods = new Array<any>();
for await (const file of getFiles((await config()).source)) {
if (file.endsWith('.ts') || file.endsWith('.tsx')) {
const data = await fs.readFile(file, 'utf-8');
const match = data.matchAll(/\.k(?<key>[\d]+)\(["'`](?<defaultValue>.*?)["'`]\)/gs);
const mod = {} as any;
mods.push(mod);
const modName = p.parse(file).name;
for (const m of match) {
if (m.groups) {
if (!mod[modName]) {
mod[modName] = {} as any;
}
mod[modName][`k${m.groups.key}`] = m.groups.defaultValue.replace('\\\'', '\'');
}
}
}
}
const existedMods = mods.filter((m) => Object.keys(m).length > 0);
const keys = {} as any;
for (const e of existedMods) {
Object.assign(keys, e);
}
const res = {} as any;
for (const lang of (await config())['available-langs']) {
res[lang] = { ...keys };
}
return res;
};
export const mergeResources = async (newRes: any, existingRes: any): Promise<any> => {
mergeObjects(newRes, existingRes);
for (const lang of Object.keys(newRes)) {
mergeObjects(newRes[lang], existingRes[lang]);
const newLang = newRes[lang];
const distLang = existingRes[lang];
for (const ns of Object.keys(newLang)) {
mergeObjects(newLang[ns], distLang[ns]);
}
}
return existingRes;
};
export const createTranslations = async () => {
const res = await scanResources();
const resources = (await config()).resourcesFile;
if (!await exists(resources)) {
await fs.writeFile(resources, JSON.stringify(res, null, ' '), 'utf-8');
} else {
const existingRes = JSON.parse(await fs.readFile((await config()).resourcesFile, 'utf-8'));
const data = JSON.stringify(await mergeResources(res, existingRes), null, ' ');
await fs.writeFile(resources, data, 'utf-8');
}
};
|
<reponame>matt-tingen/userscripts
(function() {
const { css } = window.__MJT_USERSCRIPTS__.utils;
css`
:root {
--mjt-toggle-height: 0.9rem;
--mjt-toggle-width: 2rem;
}
.togg {
position: relative;
}
.togg:after {
content: '';
position: absolute;
padding: var(--mjt-toggle-height) var(--mjt-toggle-width);
left: 0;
top: calc(-0.5 * var(--mjt-toggle-height));
}
.togg.disabled {
pointer-events: none;
color: #d8d8d8 !important;
}
`;
const setLoading = (id, isLoading) => {
const toggle = document
.getElementById(id)
.getElementsByClassName('togg')[0];
toggle.classList.toggle('disabled', isLoading);
};
const originalToggle = window.toggle;
window.toggle = (event, id) => {
// The toggle code can be quite slow on mobile due to sync layout recalcs.
// Show an indication that is in progress.
setLoading(id, true);
originalToggle(event, id);
setLoading(id, false);
};
})();
|
<filename>.local/share/Trash/files/ch1/2helloname.rb
print('Enter your name: ' )
name = gets()
puts( "Hello #{name}" ) |
<filename>src/utils/control/data-handlers/should-locking-submit-btn/types.ts<gh_stars>0
import {CurrentControlData, FormProps} from "@common-types"
export type ShouldLockingSubmitBtn = (currentControl: CurrentControlData, form: FormProps) => boolean |
<html>
<head>
<title> Chess Game </title>
<script type="text/javascript">
// Global Variables
var row_one = ["rnbqkbnr", 8];
var row_two = ["pppppppp", 7];
var row_three = ["........", 6];
var row_four = ["........", 5];
var row_five = ["........", 4];
var row_six = ["........", 3];
var row_seven = ["PPPPPPPP", 2];
var row_eight = ["RNBQKBNR", 1];
//Function that draws the chess board and pieces
function draw_board() {
var x = "";
x += "<table>";
x += "<tr>";
for (var i = 0; i < row_one.length; i++) {
x += "<td>"+row_one[i]+"</td>";
}
x += "</tr>";
// Similar loops for other rows
x += "</table>";
document.getElementById("board").innerHTML = x;
}
// Function that moves pieces
function move_piece(from_x, from_y, to_x, to_y) {
var piece = board[from_y][from_x];
board[to_y][to_x] = piece;
board[from_y][from_x] = ".";
board[to_y][to_x] = piece;
}
</script>
</head>
<body>
<div id="board">
</div>
<script>
draw_board();
</script>
</body>
</html> |
#! /bin/sh
chown -R mini-buildd:mini-buildd /var/mini-buildd
su mini-buildd -c "/usr/sbin/mini-buildd --loggers console --verbose --dedicated-user mini-buildd --home /var/mini-buildd --foreground --set-admin-password $PASSWORD"
su mini-buildd -c "/usr/sbin/mini-buildd --loggers console --verbose --dedicated-user mini-buildd --home /var/mini-buildd --foreground" |
#!/usr/local/bin/lsc -cj
name: 'latex.js'
description: 'JavaScript LaTeX to HTML5 translator'
version: '0.12.1'
author:
'name': 'Michael Brade'
'email': 'brade@kde.org'
keywords:
'pegjs'
'latex'
'parser'
'html5'
bin:
'latex.js': './bin/latex.js'
main:
'dist/latex.esm.js'
browser:
'dist/latex.js'
files:
'bin/latex.js'
'dist/latex.js'
'dist/latex.js.map'
'dist/latex.esm.js'
'dist/latex.esm.js.map'
'dist/latex.component.js'
'dist/latex.component.js.map'
'dist/latex.component.esm.js'
'dist/latex.component.esm.js.map'
'dist/css/'
'dist/fonts/'
'dist/js/'
scripts:
clean: 'rimraf dist bin test/coverage docs/js/playground.bundle.*;'
build: 'NODE_ENV=production npm run devbuild;'
devbuild: "
rimraf 'dist/**/*.js.map';
mkdirp dist/css;
mkdirp dist/js;
mkdirp dist/fonts;
rsync -a src/css/ dist/css/;
rsync -a src/fonts/ dist/fonts/;
rsync -a node_modules/katex/dist/fonts/*.woff dist/fonts/;
rsync -a src/js/ dist/js/;
mkdirp bin;
lsc -bc --no-header -m embedded -p src/cli.ls > bin/latex.js;
chmod a+x bin/latex.js;
rollup -c --environment GOAL:library-esm &
rollup -c --environment GOAL:library-umd &
rollup -c --environment GOAL:webcomponent-esm &
rollup -c --environment GOAL:webcomponent-umd &
rollup -c --environment GOAL:playground;
wait;
"
test: 'mocha test/*.ls;'
iron: 'iron-node node_modules/.bin/_mocha test/*.ls;'
testc: "
nyc --include='bin' --include='src' --include='dist' -e '.ls' \
./node_modules/.bin/mocha -i -g screenshot --reporter mocha-junit-reporter --reporter-options mochaFile=./test/test-results.xml test/*.ls
&&
mocha -g screenshot --reporter mocha-junit-reporter --reporter-options mochaFile=./test/screenshots/test-results.xml test/*.ls;
"
cover: 'nyc report --reporter=html --reporter=text --reporter=lcovonly --report-dir=test/coverage && codecov;'
dependencies:
### CLI dependencies
'commander': '2.20.x'
'fs-extra': '8.x'
'js-beautify': '1.10.x'
'stdin': '*'
'hyphenation.en-us': '*'
'hyphenation.de': '*'
'svgdom': 'https://github.com/michael-brade/svgdom'
#'cheerio': '0.x'
#'xmldom': '^0.1.19'
devDependencies:
### actual runtime dependencies, but bundled by rollup
'he': '1.2.x'
'katex': '0.10.0'
'@svgdotjs/svg.js': '3.x',
'hypher': '0.x'
'lodash': '4.x'
'livescript': 'https://github.com/michael-brade/LiveScript'
### building
'pegjs': '0.10.x'
'mkdirp': '0.5.x'
'rimraf': '2.6.x'
'tmp': '0.x'
'glob': '^7.1.4'
### bundling
"rollup": "^1.15.5"
"rollup-plugin-extensions": "^0.1.0"
"rollup-plugin-pegjs": "^2.1.3"
"rollup-plugin-livescript": "^0.1.1"
"rollup-plugin-commonjs": "^10.0.0"
"rollup-plugin-node-resolve": "^5.0.2"
"rollup-plugin-terser": "^5.0.0"
"rollup-plugin-re": "^1.0.7"
"rollup-plugin-copy": "^3.0.0"
### testing
'mocha': '6.x'
'mocha-junit-reporter': '1.23.x'
'chai': '4.x'
'chai-as-promised': '7.x'
'slugify': '1.3.x'
'decache': '4.5.x'
'puppeteer': '1.19.x'
'puppeteer-firefox': '0.x'
'pixelmatch': '5.x'
'nyc': '14.x'
'codecov': '3.x'
'serve-handler': '6.x'
repository:
type: 'git'
url: 'git+https://github.com/michael-brade/LaTeX.js.git'
license: 'MIT'
bugs:
url: 'https://github.com/michael-brade/LaTeX.js/issues'
homepage: 'https://latex.js.org'
engines:
node: '>= 8.0'
|
Eric is a brave knight who embarks on dangerous journeys and is unafraid of challenges. He fights fierce enemies and is determined to live a life of risk-taking and adventure. |
const person = {
name: "John Doe",
age: 25,
phoneNumber: "99-11-22-33"
}; |
public static boolean hasSum(int[] A, int k) {
// Set to store the values
Set<Integer> set = new HashSet<>();
// Loop through the array to find the pairs
for (int x : A) {
if (set.contains(k - x)) {
return true;
}
set.add(x);
}
// Return false if no matching pairs are found
return false;
}
// Usage example:
int[] A = { 10, 15, 3, 7 };
int k = 17;
System.out.println(hasSum(A, k)); //prints true as 10 + 7 = 17 |
<gh_stars>0
//functions shows the genome or qtl search box and chromosome viewer if there is a reference genome
function showReferenceGenome(){
if (reference_genome == true) {
$('#genomeorqtlsearchbox').show();
if (typeof gviewer != "undefined" && gviewer == false) {
activateButton('resultsTable');
$('#genemap-tab_button').hide();
$('#genemap-tab').hide();
}
}
else {
activateButton('resultsTable');
$('#genomeorqtlsearchbox').hide(); // hide QTL search
$('#genemap-tab_button').hide(); // hide Map View option
$('#genemap-tab').hide();
}
}
// function runs on page jquery document ready
function loadOnReady(){
// add species name to header
$('#species_header').text(species_name); //update species name from utils_config.js
activateResetButton();
queryToggle();
// hide reset btn on page load
$("#resetknet").hide();
$("#keywords").focus();
$('#tabviewer').hide(); // hide by default
// Tooltip
getQueryExamples();
showReferenceGenome();
createAnalyticsTag();
generalPageAnalytics();
genemap.draw('#genemap', 'html/data/basemap.xml', null);
}
// function reset all form input including the genenome icon and the suggestor text values
function initResetButton(){
$("#resetknet").click(function(event){
event.preventDefault();
$('form')[0].reset();
$("#pGViewer_title").empty();
$('#matchesResultDiv').html('Please, start typing your query');
$('#suggestor_search').hide();
$('#suggestor_search_div').hide();
$('#tabviewer').hide('');
$("#resetknet").hide();
$('#geneResultDiv').hide();
$('#region_search_area').hide();
$('#region_search').attr('src','html/image/expand.gif')
});
}
// function Calculates the amount of documents to be displayed with the current query
function inputHandlers(){
$('#keywords').keyup(function (e) {
// this stops matchCounter being called when the enter or arrow keys are used.
if (e.which !== 13 && e.which !== 37 && e.which !== 38 && e.which !== 39 && e.which !== 40) {
matchCounter();
}
// this stops refreshQuerySuggester being called when the enter or arrow keys are used.
if (e.which !== 13 && e.which !== 37 && e.which !== 38 && e.which !== 39 && e.which !== 40) {
// Refresh the query suggester table as well, if it's already open.
if ($('#suggestor_search').attr('src') === "html/image/qs_collapse.png") {
//if($('#suggestor_search').dialog('isOpen')) {
refreshQuerySuggester();
}
}
});
$('#list_of_genes').keyup(function(){
geneCounter()
});
}
// function add and remove QTL region
function QtlRegionHandlers(){
$('#addRow').click(
function () {
var curMaxInput = $('#region_search_area table tr').length - 1;
$('#region_search_area tr:nth-child(2)')
.clone()
.insertAfter($('#region_search_area tr:last').prev())
.find('td:eq(0)')
.find('select:eq(0)')
.attr({
'id': 'chr' + (curMaxInput),
'name': 'chr' + (curMaxInput),
'onChange': 'findGenes(\'genes' + (curMaxInput) + '\', $(\'#chr' + (curMaxInput) + ' option:selected\').val(), $(\'#start' + (curMaxInput) + '\').val(), $(\'#end' + (curMaxInput) + '\').val())',
'value': ''
})
.parent().parent()
.find('td:eq(1)')
.find('input:text:eq(0)')
.attr({
'id': 'start' + (curMaxInput),
'name': 'start' + (curMaxInput),
'onKeyup': 'findGenes(\'genes' + (curMaxInput) + '\', $(\'#chr' + (curMaxInput) + ' option:selected\').val(), $(\'#start' + (curMaxInput) + '\').val(), $(\'#end' + (curMaxInput) + '\').val())',
'value': ''
})
.parent().parent()
.find('td:eq(2)')
.find('input:text:eq(0)')
.attr({
'id': 'end' + (curMaxInput),
'name': 'end' + (curMaxInput),
'onKeyup': 'findGenes(\'genes' + (curMaxInput) + '\', $(\'#chr' + (curMaxInput) + ' option:selected\').val(), $(\'#start' + (curMaxInput) + '\').val(), $(\'#end' + (curMaxInput) + '\').val())',
'value': ''
})
.parent().parent()
.find('td:eq(3)')
.find('input:text:eq(0)')
.attr({
'id': 'label' + (curMaxInput),
'name': 'label' + (curMaxInput),
'value': ''
})
.parent().parent()
.find('td:eq(4)')
.find('input:text:eq(0)')
.attr({
'id': 'genes' + (curMaxInput),
'name': 'label' + (curMaxInput),
'onFocus': 'findGenes(this.id, $(\'#chr' + (curMaxInput) + ' option:selected\').val(), $(\'#start' + (curMaxInput) + '\').val(), $(\'#end' + (curMaxInput) + '\').val())',
'value': ''
});
activateResetButton();
$('#removeRow').removeAttr('disabled');
if ($('#region_search_area tr').length >= 7) {
$('#addRow').attr('disabled', true);
}
return false;
});
$('#removeRow').click(
function () {
activateResetButton();
if ($('#region_search_area tr').length > 3) {
$('#region_search_area tr:last').prev().remove();
}
if ($('#region_search_area tr').length <= 3) {
$("#chr1").attr('selectedIndex', 0);
$("#start1").val('');
$("#end1").val('');
$("#label1" ).val('');
}
else if ($('#rows tr').length < 7) {
$('#addRow').removeAttr('disabled');
}
return false;
});
}
// functions handle click events for click events on knetminer search form
function searchHandlers(){
$('#advanced_search').click(
function () {
var src = ($(this).attr('src') === 'html/image/expand.gif')
? 'html/image/collapse.gif'
: 'html/image/expand.gif';
$(this).attr('src', src);
$('#advanced_search_area').animate({
height: 'toggle'
}, 500
);
});
//
$('#suggestor_search').click(
function () {
var suggestorSearchDiv = $('#suggestor_search_div');
if(suggestorSearchDiv.css('display') === 'none'){
suggestorSearchDiv.show()
}
var src = ($(this).attr('src') === 'html/image/qs_expand.png')
? 'html/image/qs_collapse.png'
: 'html/image/qs_expand.png';
$(this).attr('src', src);
$('#suggestor_search_area').animate({
height: 'toggle'
}, 500
);
if ($('#suggestor_search').attr('src') == "html/image/qs_collapse.png") {
refreshQuerySuggester();
}
});
// Keyword search
$('#kwd_search').click(
function () {
var old_src= $(this).attr('src');
var src = (old_src === 'html/image/expand.gif')
? 'html/image/collapse.gif'
: 'html/image/expand.gif';
$(this).attr('src', src);
$('#keywords').animate({
height: 'toggle'
}, 500
).css('display', 'inline-block');
$('#matchesResultDiv').animate({
height: 'toggle'
}, 500
);
if(old_src === 'html/image/collapse.gif') {
// hide suggestor_search img icon and suggestor_search_area div
$('#suggestor_search').css('display', 'none');
$('#suggestor_search_area').css('display', 'none');
}
});
$('#region_search').click(
function () {
var src = ($(this).attr('src') === 'html/image/expand.gif')
? 'html/image/collapse.gif'
: 'html/image/expand.gif';
$(this).attr('src', src);
$('#region_search_area').animate({
height: 'toggle'
}, 500
);
});
}
// function handles body events
function bodyHandlers(){
$('body').on('mouseenter', 'span.hint', function (event) {
target = $(this)[0].id;
var message = "";
addClass = "";
if (target == 'hintSearchQtlGenome') {
message = 'Select the "whole-genome" option to search the whole genome for potential candidate genes or select the "within QTL" option to search for candidate genes within the QTL coordinates.';
}
else if (target == 'hintEnterGenes') {
message = 'Input a list of target genes using reference gene ID\'s.';
}
else if (target == 'hintQuerySuggestor') {
message = 'Add, remove or replace terms from your query using the list of suggested terms based on your search criteria';
}
else if (target == 'hintEgKeywords') {
message = sampleQueryButtons;
addClass = "tooltip-static";
}
else if (target == 'hintSortableTable') {
message = 'On clicking the <b>Create Network</b> button: it opens KnetMaps, displays a subset of the knowledge network containing only the selected genes and the relevant evidence network.';
addClass = 'networkhint'
}
$('div.tooltip').remove();
$('<div class="tooltip ' + addClass + '">' + message + '</div>').appendTo('body');
tooltipY = $(this).offset()['top'] - 12;
tooltipX = $(this).offset()['left'] - 4;
winWidth = $(window).width();
if (tooltipX + 300 > winWidth) {
tooltipX = winWidth - 300;
}
$('div.tooltip.tooltip-static').css({top: tooltipY, left: tooltipX}); //for sample queries tooltip
});
$('body').on('mousemove', 'span.hint:not(#hintEgKeywords)', function (event) {
var tooltipX = event.pageX - 8;
var tooltipY = event.pageY + 8;
winWidth = $(window).width();
if (tooltipX + 300 > winWidth) {
tooltipX = winWidth - 300;
}
$('div.tooltip').css({top: tooltipY, left: tooltipX});
});
$('body').on('mouseleave', 'span.hint', function (event) {
if ($(event.relatedTarget).hasClass("tooltip-static") || $(event.relatedTarget).parent().hasClass("tooltip-static")) {
return;
}
$('div.tooltip').remove();
});
$('body').on('mouseleave', 'div.tooltip-static', function (event) {
$('div.tooltip').remove();
});
}
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Function to probe the exit code of the script commands,
# and stop in the case of failure with an contextual error
# message.
run() {
echo "\$ ${@}"
"${@}"
exitCode=$?
if [[ $exitCode != 0 ]]; then
echo
echo "Failed! running ${@} in `pwd`"
echo
exit $exitCode
fi
}
doMD5() {
MD5CMD="md5sum"
which $MD5CMD
if [[ $? != 0 ]]; then
MD5CMD="md5"
fi
run $MD5CMD ${1} > ${1}.md5
}
# If provided, the created release artifacts will be tagged with it
# (use RC#, i.e: RC0). Do not use a label to create the final release
# artifact.
RC_LABEL=$1
# Extract Hadoop version from POM
HADOOP_VERSION=`cat pom.xml | grep "<version>" | head -1 | sed 's|^ *<version>||' | sed 's|</version>.*$||'`
# Setup git
GIT=${GIT:-git}
echo
echo "*****************************************************************"
echo
echo "Hadoop version to create release artifacts: ${HADOOP_VERSION}"
echo
echo "Release Candidate Label: ${RC_LABEL}"
echo
echo "*****************************************************************"
echo
if [[ ! -z ${RC_LABEL} ]]; then
RC_LABEL="-${RC_LABEL}"
fi
# Get Maven command
if [ -z "$MAVEN_HOME" ]; then
MVN=mvn
else
MVN=$MAVEN_HOME/bin/mvn
fi
ARTIFACTS_DIR="target/artifacts"
# git clean to clear any remnants from previous build
run ${GIT} clean -xdf
# mvn clean for sanity
run ${MVN} clean
# Create staging dir for release artifacts
run mkdir -p ${ARTIFACTS_DIR}
# Create RAT report
run ${MVN} apache-rat:check
# Create SRC and BIN tarballs for release,
# Using 'install’ goal instead of 'package' so artifacts are available
# in the Maven local cache for the site generation
run ${MVN} install -Pdist,src,native -DskipTests -Dtar
# Create site for release
run ${MVN} site site:stage -Pdist -Psrc
run mkdir -p target/staging/hadoop-project/hadoop-project-dist/hadoop-yarn
run mkdir -p target/staging/hadoop-project/hadoop-project-dist/hadoop-mapreduce
run cp ./hadoop-common-project/hadoop-common/src/main/docs/releasenotes.html target/staging/hadoop-project/hadoop-project-dist/hadoop-common/
run cp ./hadoop-common-project/hadoop-common/CHANGES.txt target/staging/hadoop-project/hadoop-project-dist/hadoop-common/
run cp ./hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt target/staging/hadoop-project/hadoop-project-dist/hadoop-hdfs/
run cp ./hadoop-yarn-project/CHANGES.txt target/staging/hadoop-project/hadoop-project-dist/hadoop-yarn/
run cp ./hadoop-mapreduce-project/CHANGES.txt target/staging/hadoop-project/hadoop-project-dist/hadoop-mapreduce/
run mv target/staging/hadoop-project target/r${HADOOP_VERSION}/
run cd target/
run tar czf hadoop-site-${HADOOP_VERSION}.tar.gz r${HADOOP_VERSION}/*
run cd ..
# Stage RAT report
find . -name rat.txt | xargs -I% cat % > ${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-rat.txt
# Stage CHANGES.txt files
run cp ./hadoop-common-project/hadoop-common/CHANGES.txt ${ARTIFACTS_DIR}/CHANGES-COMMON-${HADOOP_VERSION}${RC_LABEL}.txt
run cp ./hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt ${ARTIFACTS_DIR}/CHANGES-HDFS-${HADOOP_VERSION}${RC_LABEL}.txt
run cp ./hadoop-mapreduce-project/CHANGES.txt ${ARTIFACTS_DIR}/CHANGES-MAPREDUCE-${HADOOP_VERSION}${RC_LABEL}.txt
run cp ./hadoop-yarn-project/CHANGES.txt ${ARTIFACTS_DIR}/CHANGES-YARN-${HADOOP_VERSION}${RC_LABEL}.txt
# Prepare and stage BIN tarball
run cd hadoop-dist/target/
run tar -xzf hadoop-${HADOOP_VERSION}.tar.gz
run cp -r ../../target/r${HADOOP_VERSION}/* hadoop-${HADOOP_VERSION}/share/doc/hadoop/
run tar -czf hadoop-${HADOOP_VERSION}.tar.gz hadoop-${HADOOP_VERSION}
run cd ../..
run mv hadoop-dist/target/hadoop-${HADOOP_VERSION}.tar.gz ${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz
# Stage SRC tarball
run mv hadoop-dist/target/hadoop-${HADOOP_VERSION}-src.tar.gz ${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-src.tar.gz
# Stage SITE tarball
run mv target/hadoop-site-${HADOOP_VERSION}.tar.gz ${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-site.tar.gz
# MD5 SRC and BIN tarballs
doMD5 ${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz
doMD5 ${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}-src.tar.gz
run cd ${ARTIFACTS_DIR}
ARTIFACTS_DIR=`pwd`
echo
echo "Congratulations, you have successfully built the release"
echo "artifacts for Apache Hadoop ${HADOOP_VERSION}${RC_LABEL}"
echo
echo "The artifacts for this run are available at ${ARTIFACTS_DIR}:"
run ls -1 ${ARTIFACTS_DIR}
echo
echo "Remember to sign them before staging them on the open"
echo
|
#!/bin/bash
# Read .env
if [ -f .env ]
then
export $(cat .env | sed 's/#.*//g' | xargs)
else
echo ".env file not found!"
exit 0
fi
# Install via helm
helm repo add portainer https://portainer.github.io/k8s/
helm repo update
helm install ${NAME} portainer/portainer -n ${NAMESPACE} --create-namespace \
--set persistence.storageClass=${STORAGE_CLASS}
# Verify
echo "Use command 'kubectl -n ${NAMESPACE} get pod' to check if that the deployment succeeded:"
kubectl -n ${NAMESPACE} get pod
export NODE_PORT=$(kubectl get --namespace ${NAMESPACE} -o jsonpath="{.spec.ports[1].nodePort}" services ${NAME})
export NODE_IP=$(kubectl get nodes --namespace ${NAMESPACE} -o jsonpath="{.items[0].status.addresses[0].address}")
echo https://$NODE_IP:$NODE_PORT |
<reponame>juniorjavadeveloper-pl/java-first-class-crud-service
package pl.juniorjavadeveloper.java.crud.model;
import java.math.BigDecimal;
public class Car {
private int id;
private Integer year;
private String model;
private String manufacturer;
private BigDecimal price;
public Car() {
}
public Car(int year, String model, String manufacturer, BigDecimal price) {
this.year = year;
this.model = model;
this.manufacturer = manufacturer;
this.price = price;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public int getYear() {
return year;
}
public void setYear(int year) {
this.year = year;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public String getManufacturer() {
return manufacturer;
}
public void setManufacturer(String manufacturer) {
this.manufacturer = manufacturer;
}
public BigDecimal getPrice() {
return price;
}
public void setPrice(BigDecimal price) {
this.price = price;
}
@Override
public String toString() {
return "Car{" +
"id=" + id +
", year=" + year +
", model='" + model + '\'' +
", manufacturer='" + manufacturer + '\'' +
", price=" + price +
'}';
}
}
|
#!/bin/bash
#SBATCH --job-name=lstmscan
#SBATCH --time=48:00:00
#SBATCH --cpus-per-task=5
#SBATCH --ntasks-per-node=1
#SBATCH --nodes=1
#SBATCH --qos=high
#SBATCH --constrain=xeon-g6
#SBATCH --gres=gpu:volta:1
#SBATCH --array=1-20
lr=1.0
warmup_steps=4000
max_steps=8000
batch_size=128
home="../../../"
k=0
for split in around_right jump; do
if [[ $split = jump ]]; then
split_folder="add_prim_split"
else
split_folder="template_split"
fi
expname=${split}_LSTM
mkdir -p $expname
for i in `seq 0 9`
do
$(( k++ ))
if [[ $k -eq $SLURM_ARRAY_TASK_ID ]]; then
cd $expname
python -u $home/main.py \
--seed $i \
--n_batch ${batch_size} \
--n_layers 2 \
--noregularize \
--dim 512 \
--lr ${lr} \
--temp 1.0 \
--dropout 0.4 \
--beam_size 5 \
--gclip 5.0 \
--accum_count 4 \
--valid_steps 500 \
--warmup_steps ${warmup_steps} \
--max_step ${max_steps} \
--tolarance 10 \
--tb_dir ${expname} \
--scan_split ${split} \
--SCAN > eval.$i.out 2> eval.$i.err
fi
done
done
|
class Square:
def __init__(self, width, height):
self.width = width
self.height = height
def area(self):
return self.width * self.height
square = Square(10, 20)
print(square.area()) # prints 200 |
<filename>src/hooks/useRockets.ts
import { useQuery } from 'react-query';
import { spacexApi } from '@config/api';
import type { RocketProps } from '@types';
const getRockets = async (): Promise<RocketProps[]> => {
const { data } = await spacexApi.get('/rockets');
return data;
};
export function useRockets() {
return useQuery(['rockets'], getRockets);
}
|
# You are expected to run the commands in this script from inside the bin directory in your DBpedia Spotlight installation
# Adjust the paths here if you don't. This script is meant more as a step-by-step guidance than a real automated run-all.
# If this is your first time running the script, we advise you to copy/paste commands from here, closely watching the messages
# and the final output.
#
# @author maxjakob, pablomendes (modified by Federico Cairo and Giuseppe Futia)
#
# NOTICE: before starting the indexing process check comments in SpotlightConfiguration.java and in FileOccurrenceSource.scala
export DBPEDIA_WORKSPACE=../data/tellmefirst/dbpedia/en
export INDEX_CONFIG_FILE=../conf/indexing.tmf.en.properties
JAVA_XMX=16g
# you have to run maven from the module that contains the indexing classes
cd ../index
# the indexing process will generate files in the directory below
#if [ -e $DBPEDIA_WORKSPACE/output ]; then
# echo "$DBPEDIA_WORKSPACE"'/output already exist.'
#else
# mkdir -p $DBPEDIA_WORKSPACE/output
#fi
# clean redirect file: there is a bug in the DBpedia version 3.9 (added by Giuseppe Futia)
#mvn compile
#mvn exec:java -e -Dexec.mainClass="org.dbpedia.spotlight.lucene.index.external.utils.TMFRedirectCleaner" -Dexec.args=$INDEX_CONFIG_FILE
#clean the Wikipedia Dump (added by Giuseppe Futia)
#mvn compile
#mvn exec:java -e -Dexec.mainClass="org.dbpedia.spotlight.lucene.index.external.utils.TMFWikiDumpCleaner" -Dexec.args=$INDEX_CONFIG_FILE
# extract valid URIs, synonyms and surface forms from DBpedia
#mvn scala:run -Dlauncher=ExtractCandidateMap "-DjavaOpts.Xmx=$JAVA_XMX" "-DaddArgs=$INDEX_CONFIG_FILE"
# decode URIs of the extracted files from the 3.9 version of DBpedia (added by Giuseppe Futia)
#mvn compile
#mvn exec:java -e -Dexec.mainClass="org.dbpedia.spotlight.lucene.index.external.utils.TMFUriDecoder" -Dexec.args=$INDEX_CONFIG_FILE
# now we collect parts of Wikipedia dump where DBpedia resources occur and output those occurrences as Tab-Separated-Values
#echo -e "Parsing Wikipedia dump to extract occurrences...\n"
#mvn scala:run -Dlauncher=ExtractOccsFromWikipedia "-DjavaOpts.Xmx=$JAVA_XMX" "-DaddArgs=$INDEX_CONFIG_FILE|$DBPEDIA_WORKSPACE/output/occs.tsv"
# (recommended) sorting the occurrences by URI will speed up context merging during indexing
#echo -e "Sorting occurrences to speed up indexing...\n"
#sort -t$'\t' -k2 $DBPEDIA_WORKSPACE/output/occs.tsv >$DBPEDIA_WORKSPACE/output/occs.uriSorted.tsv
#set -e
# create a lucene index out of the occurrences
#echo -e "Creating a context index from occs.tsv...\n"
#mvn scala:run -Dlauncher=IndexMergedOccurrences "-DjavaOpts.Xmx=$JAVA_XMX" "-DaddArgs=$INDEX_CONFIG_FILE|$DBPEDIA_WORKSPACE/output/occs.uriSorted.tsv"
# NOTE: if you get an out of memory error from the command above, try editing ../index/pom.xml with correct jvmArg and file arguments, then run:
#mvn scala:run -Dlauncher=IndexMergedOccurrences "-DjavaOpts.Xmx=$JAVA_XMX" "-DaddArgs=$INDEX_CONFIG_FILE|$DBPEDIA_WORKSPACE/output/occs.uriSorted.tsv"
# (optional) make a backup copy of the index before you lose all the time you've put into this
#echo -e "Make a backup copy of the index..."
#cp -R $DBPEDIA_WORKSPACE/output/index $DBPEDIA_WORKSPACE/output/index-backup
# add entity types to index
#echo -e "Adding Types to index... \n"
#mvn scala:run -Dlauncher=AddTypesToIndex "-DjavaOpts.Xmx=$JAVA_XMX" "-DaddArgs=$INDEX_CONFIG_FILE|$DBPEDIA_WORKSPACE/output/index"
# add titles to index (added by Federico Cairo)
#echo -e "Adding Wikipedia Titles to index... \n"
#mvn scala:run -Dlauncher=AddTitlesToIndex "-DjavaOpts.Xmx=$JAVA_XMX" "-DaddArgs=$INDEX_CONFIG_FILE|$DBPEDIA_WORKSPACE/output/index-withTypes"
# add images to index (added by Federico Cairo)
#echo -e "Adding Images to index... \n"
#mvn scala:run -Dlauncher=AddImagesToIndex "-DjavaOpts.Xmx=$JAVA_XMX" "-DaddArgs=$INDEX_CONFIG_FILE|$DBPEDIA_WORKSPACE/output/index-withTypesTitles"
# create the Knowledge Base Index of TellMeFirst (added by Giuseppe Futia)
#echo -e "Create the Knowledge Base Index of TellMeFirst... \n"
mvn compile
mvn exec:java -e -Dexec.mainClass="org.dbpedia.spotlight.lucene.index.external.TMFKnowledgeBaseBuilder" -Dexec.args=$INDEX_CONFIG_FILE
# create the Residual Knowledge Base Index of TellMeFirst (added by Giuseppe Futia)
echo -e "Create the Residual Knowledge Base Index of TellMeFirst... \n"
mvn compile
mvn exec:java -e -Dexec.mainClass="org.dbpedia.spotlight.lucene.index.external.TMFResidualKnowledgeBaseBuilder" -Dexec.args=$INDEX_CONFIG_FILE
|
<reponame>iranathan/Pyblox<gh_stars>10-100
#
# group.py
# pyblox
#
# By Sanjay-B(<NAME>)
# Copyright © 2017 Sanjay-B(<NAME>). All rights reserved.
#
from .http import Http
import json
# Group Object
class Group:
Name = None
Id = None
Owner = None
OwnerName = None
OwnerId = None
EmblemUrl = None
Description = None
Roles = None
def __init__(self,data):
self.Name = data["Name"]
self.Id = data["Id"]
self.Owner = data["Owner"]
self.OwnerName = data["Owner"]["Name"]
self.OwnerId = data["EmblemUrl"]
self.EmblemUrl = data["EmblemUrl"]
self.Description = data["Description"]
self.Roles = data["Roles"]
class Groups:
# GET /users/{userId}/groups
# returns table/array with groups + each group's attributes
def groupList(userid):
a = Http.sendRequest("https://api.roblox.com/users/" + str(userid) + "/groups")
return a
# GET /groups/{groupId}
# Returns Table/Array + Attributes
def getGroup(groupid):
a = Http.sendRequest("https://api.roblox.com/groups/" + str(groupid))
b = a.decode("utf-8")
c = json.loads(b)
return Group(c)
# GET /groups/{groupId}/allies
# Returns Table/Array with each ally's attributes
def getGroupAllies(groupid):
a = Http.sendRequest("https://api.roblox.com/groups/" + str(groupid) + "/allies")
return a
# GET /groups/{groupId}/enemies
# Returns Table/Array with each enemy's attributes
def getGroupEnemies(groupid):
a = Http.sendRequest("https://api.roblox.com/groups/" + str(groupid) + "/enemies")
return a
# GET /groups/{groupId}/roles
# Returns a Table/Array with each role.
def getGroupRoles(groupid):
a = Http.sendRequest("https://groups.roblox.com/v1/groups/" + str(groupid) + "/roles")
return a
|
#include <flowi_core/font.h>
#include "../src/internal.h"
#include "../src/linear_allocator.h"
#include "utest.h"
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Malloc based allocator. We should use tslf or similar in a sandbox, but this is atleast in one place
static void* alloc_malloc(void* user_data, u64 size) {
FL_UNUSED(user_data);
return malloc(size);
}
static void* realloc_malloc(void* user_data, void* ptr, u64 size) {
FL_UNUSED(user_data);
return realloc(ptr, size);
}
static void free_malloc(void* user_data, void* ptr) {
FL_UNUSED(user_data);
free(ptr);
}
static void memory_error(void* user_data, const char* text, int text_len) {
FL_UNUSED(user_data);
FL_UNUSED(text);
FL_UNUSED(text_len);
}
FlAllocator g_malloc_allocator = {
FlAllocatorError_Exit, NULL, memory_error, alloc_malloc, NULL, realloc_malloc, free_malloc,
};
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
UTEST(Io, load_file_fail) {
struct FlGlobalState* state = fl_create(NULL);
struct FlContext* ctx = fl_context_create(state);
u32 size = 0;
const u8* data = Io_load_file_to_memory(ctx, "dummy_not_found", &size);
ASSERT_TRUE(data == NULL);
fl_context_destroy(ctx);
fl_destroy(state);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
UTEST(Io, load_file_ok) {
struct FlGlobalState* state = fl_create(NULL);
struct FlContext* ctx = fl_context_create(state);
u32 size = 0;
u8* data = Io_load_file_to_memory(ctx, "data/montserrat-regular.ttf", &size);
ASSERT_TRUE(data != NULL);
ASSERT_TRUE(size == 245708);
// TODO: Allocator
FlAllocator_free(state->global_allocator, data);
fl_context_destroy(ctx);
fl_destroy(state);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
UTEST_STATE();
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
int main(int argc, const char* const argv[]) {
int status = utest_main(argc, argv);
return status;
}
|
<reponame>officialrajdeepsingh/Array<filename>src/Component/Icon/Icon.js
import React, { Component } from 'react';
// read about icon plz goto here find all icon https://icons.getbootstrap.com
// make sure bootstrop use svg icon
import Iconlist from './Iconlist';
// React-bootstrap component
import Container from 'react-bootstrap/Container';
import Row from 'react-bootstrap/Row';
import Col from 'react-bootstrap/Col';
//scss
import style from "./Icon.module.scss";
export default class Icon extends Component {
render() {
return (
<Container >
<Row>
{
Iconlist.map(
(data,i) => {
let arr = data.toString().split('/');
let NewAarry = arr.pop().split('.')
return <Col sm={3} key={i} >
<div className={style.col}>
<img src={data} style={{ height: '40px', width: '40px' }} alt={NewAarry[0]} />
<span> {NewAarry[0]} </span>
</div>
</Col>
}
)
}
</Row>
</Container>
)
}
}
|
<filename>src/Helper/tryParseJSON.js
export const tryParseJSON = (x, fallback = null) => {
try {
return JSON.parse(x);
} catch (e) {
// console.log(x);
// console.warn(e);
return fallback;
}
};
|
<gh_stars>1-10
import React, { useState } from 'react';
import { Steps, Step, StatusTypes } from 'algae-ui';
export default () => {
const [current, setCurrent] = useState<number>(1);
const [status, setStatus] = useState<StatusTypes>('process');
return (
<div className="steps-example-list">
<Steps current={current} status={status}>
<Step
title="success step"
description="This is a description"
onClick={() => {
setCurrent(0);
setStatus('success');
}}
/>
<Step
title="process step"
subTitle="This is subTitle"
description="This is a description"
onClick={() => {
setCurrent(1);
setStatus('process');
}}
/>
<Step
title="waiting step"
description="This is a description"
onClick={() => {
setCurrent(2);
setStatus('fail');
}}
/>
</Steps>
</div>
);
};
|
export PS1=${debian_chroot:+($debian_chroot)}'\[\033[01;31m\]\u@\h\[\033[00m\]:\w\n\$ '
# If this is an xterm set the title to user@host:dir
case "$TERM" in
xterm*|rxvt*)
PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@\h: \w\a\]$PS1"
;;
*)
;;
esac
function e() {
# emacs -g 80x80 ${1} &
emacs ${1} &
}
export EDITOR=emacs
# Colorize folders, executables, etc
export CLICOLOR=1
export LSCOLORS=ExFxBxDxCxegedabagacad
export LS_COLORS="di=1;34:ln=1;35:so=1;31:pi=1;33:ex=1;32:bd=34;46:cd=34;43:su=30;41:sg=30;46:tw=30;42:ow=30;43"
# Enable ls color depending on platform
unamestr=`uname`
if [[ "$unamestr" == 'Linux' ]]; then
alias ls='ls -GFh --color=auto'
elif [[ "$unamestr" == 'Darwin' ]]; then
alias ls='ls -GFh'
fi
# Some custom bins in path
export PATH="$HOME/local/bin:$PATH"
# For linux only
function increase_watch_count() {
if [[ "$unamestr" == 'Linux' ]]; then
echo "Old max_user_watches"
cat /proc/sys/fs/inotify/max_user_watches
echo "Increasing max_user_watches..."
sudo sysctl fs.inotify.max_user_watches=524288
sudo sysctl -p
echo "New max_user_watches"
cat /proc/sys/fs/inotify/max_user_watches
fi
}
|
<reponame>wkma/bk-sops
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import base64
import hashlib
import logging
from functools import partial
import ujson as json
from pipeline.core.constants import PE
from gcloud import err_code
from gcloud.conf import settings
logger = logging.getLogger("root")
def read_encoded_template_data(content):
try:
data = json.loads(base64.b64decode(content))
except Exception:
return {"result": False, "message": "Template data is corrupt", "code": err_code.REQUEST_PARAM_INVALID.code}
# check the validation of file
templates_data = data["template_data"]
check_digest = partial(check_template_digest, templates_data=templates_data, data_digest=data["digest"])
if not check_digest(salt=settings.TEMPLATE_DATA_SALT):
if not check_digest(salt=settings.OLD_COMMUNITY_TEMPLATE_DATA_SALT):
return {"result": False, "message": "Invalid template data", "code": err_code.VALIDATION_ERROR.code}
return {"result": True, "data": data, "code": err_code.SUCCESS.code}
def check_template_digest(templates_data, data_digest, salt):
data_string = (json.dumps(templates_data, sort_keys=True) + salt).encode("utf-8")
digest = hashlib.md5(data_string).hexdigest()
is_data_valid = digest == data_digest
if not is_data_valid:
return False
return True
def read_template_data_file(f):
return read_encoded_template_data(content=f.read())
def replace_template_id(template_model, pipeline_data, reverse=False):
activities = pipeline_data[PE.activities]
for act_id, act in list(activities.items()):
if act["type"] == PE.SubProcess:
if not reverse:
act["template_id"] = template_model.objects.get(pk=act["template_id"]).pipeline_template.template_id
else:
template = template_model.objects.get(pipeline_template__template_id=act["template_id"])
act["template_id"] = str(template.pk)
|
<filename>src/app/shared/models/versioning.model.ts
import {Auditable} from './auditable.model';
/**
* Objects that can be versioned use this
*/
export class Versioning extends Auditable {
/**
* The version
*/
version: string;
/**
* Associated Approval
*/
approvalId: number;
/**
* Status of the Approval
*/
approvalStatus: string;
/**
* Flag to determine if this item has a draft version
*/
hasDraft: string;
/**
* Flag to determine if this item has a pending version
*/
hasPending: string;
}
|
#!/bin/bash
useradd -ms /bin/bash bitnami
mkdir -p /opt/bitnami && chown bitnami:bitnami /opt/bitnami
sed -i -e 's/\s*Defaults\s*secure_path\s*=/# Defaults secure_path=/' /etc/sudoers
echo 'bitnami ALL=NOPASSWD: ALL' >> /etc/sudoers
|
#!/bin/bash
set -xeu
readonly MY_PATH=$(cd $(dirname $0) && pwd)
readonly LODGE_ROOT_PATH=$(cd $MY_PATH/../../ && pwd)
readonly EXEC='bundle exec'
# Setup for sunspot & solr.
(
cd $LODGE_ROOT_PATH
$EXEC rails generate sunspot_rails:install
# Initialize solr.
$EXEC rake sunspot:solr:start RAILS_ENV=production
sleep 3
#$EXEC rake sunspot:reindex RAILS_ENV=production
$EXEC rake sunspot:solr:stop RAILS_ENV=production
# Copy production config files to `$LODGE_ROOT_PATH/solr/`.
cp -rf $MY_PATH/production $LODGE_ROOT_PATH/solr
)
|
class P1
{
public static void main(String[] args)
{
int i=0;
while (i<=10)
{
System.out.println(i++);
}
}
}
|
#!/bin/bash
#
#
#
specPackageName="csap-monitoring-specs" ;
#kubePrometheseus="kube-prometheus@release-0.8" ;
kubePrometheseus="kube-prometheus@release-0.9" ;
#specVersion=${specVersion:-2-SNAPSHOT} ;
specVersion=${specVersion:-21.10} ;
ui_anonymous=${ui_anonymous:-false} ;
templateVariables="data_retention data_volume_size data_storage_class" ;
data_retention=${data_retention:-3d} ;
data_volume_size=${data_volume_size:-5Gi} ;
data_storage_class=${data_storage_class:-$storage_class} ;
#debug="true" ;
print_separator "CSAP Monitoring Package"
print_two_columns "specVersion" "$specVersion"
print_two_columns "kubePrometheseus" "$kubePrometheseus"
print_two_columns "ui_anonymous" "$ui_anonymous"
#
# csap passes in many env variables; only some are used during deployment
#
for name in $templateVariables ; do
print_two_columns "$name" "${!name}" ;
done
print_separator "yaml substituions"
for (( counter=1; counter < (10) ; counter++ )) ; do
current="yamlCurrent$counter" ;
new="yamlNew$counter" ;
if [ -z "${!current}" ] || [ -z "${!new}" ]; then
break;
fi ;
print_two_columns "$current" "${!current}"
print_two_columns "$new" "${!new}"
done
function getPrimaryMaster() {
echo $(awk '{ print $1; }' <<< $kubernetesMasters) ;
}
function is_primary_master() {
if [ "$kubernetesAllInOne" == "true" ] ; then
true;
else
# redirect to error to not impact choice
# >&2 echo $( print_with_head "primaryMaster: $primaryMaster")
if [[ $(getPrimaryMaster) == $(hostname --short) ]] ; then
true ;
else
false ;
fi ;
fi ;
}
function verify_settings() {
kubernetesMasters=${kubernetesMasters:-notSpecified};
if [ "$kubernetesMasters" == "notSpecified" ] ; then
print_error "kubernetesMasters is a required environment variable. Add it to service parameters" ;
exit ;
fi
if ! $( is_primary_master ) ; then
print_with_head "Exiting: $(hostname --long) is not the primary kubernertes master"
exit ;
fi ;
print_separator "$csapName Package: $kubernetesMasters"
}
#
# only run on primary master
#
verify_settings
function api_package_build() {
print_section "api_package_build: $(pwd)"
if [[ "$mavenBuildCommand" != *package* ]] ; then
print_line "Skipping source build" ;
return ;
fi ;
print_two_columns "Current directory" "$(pwd)"
print_two_columns "built wrapper" "$csapPackageFolder/$csapName.zip"
#print_line "httpd source code build will be bypassed if '$HOME/opensource/httpd.zip' exists"
if [ -r "$HOME/opensource/$specPackageName.zip" ] ; then
print_with_head "NOTE: Source build skipped, copying $HOME/opensource/$specPackageName.zip to '$(pwd)'"
cp --force --verbose $HOME/opensource/$specPackageName.zip .
else
generate_deployment_specs
fi ;
\rm --recursive --force $csapPackageDependencies
mkdir --parents --verbose $csapPackageDependencies
cp --force --verbose $specPackageName.zip $csapPackageDependencies
print_two_columns "mavenBuildCommand" "$mavenBuildCommand"
if [[ "$mavenBuildCommand" == *deploy* ]] ; then
deploy_specs
fi ;
print_separator "api_package_build() completed"
}
function update_repo_variables() {
REPO_ID="csap-release-repo"
REPO_URL="$svcRepo"
if [[ "$specVersion" == *SNAPSHOT* ]] ; then
REPO_ID="csap-snapshot-repo" ;
REPO_URL="$(dirname $svcRepo)/csap-snapshots/" ;
fi
FILE=$specPackageName.zip
GROUP_ID="bin"
TYPE="zip"
print_two_columns "FILE" "$FILE"
print_two_columns "Version" "$specVersion"
print_two_columns "REPO_ID" "$REPO_ID"
print_two_columns "REPO_URL" "$REPO_URL"
}
function deploy_specs() {
update_repo_variables
print_with_head "Deploying $specPackageName to repository using maven: '$(pwd)'"
local deployCommand="deploy:deploy-file -DgroupId=$GROUP_ID -DartifactId=$specPackageName -Dversion=$specVersion -Dpackaging=$TYPE -Dfile=$FILE"
deployCommand="$deployCommand -DrepositoryId=$REPO_ID -Durl=$REPO_URL"
csap_mvn $deployCommand
}
function generate_deployment_specs() {
print_separator "generate_deployment_specs" ;
print_with_head "generate_deployment_specs: $specPackageName.zip"
print_two_columns "build in" "$(pwd)"
print_two_columns "install in" "$csapWorkingDir"
sourceFolder=$(pwd) ;
buildFile="$sourceFolder/configuration/csap-monitoring.jsonnet" ;
buildFolder="$sourceFolder/build-tools" ;
projectFolder="$sourceFolder/csap-kube-prometheus" ;
#
# get jsonnet tools
#
#print_two_columns "reload env" "reloading $HOME/.bashrc as build tools get inserted during initial build"
#source $HOME/.bashrc
build_jsonnet ;
build_kube_promethesius ;
build_csap_monitoring $buildFile ;
print_separator "zipping deployment manifests" ;
print_line "switching to $projectFolder" ;
cd $projectFolder ; # need to zip in a relative folder
zip -q -r $specPackageName manifests
cp $specPackageName*.zip $sourceFolder;
cd $sourceFolder ;
}
function build_jsonnet() {
if is_need_command jsonnet ; then
print_separator "build_jsonnet() - installing dependencies"
osPackages="gcc gcc-c++ git make wget"
for package in $osPackages ; do
install_if_needed $package
done ;
print_separator "building jsonnet"
rm --recursive --force $buildFolder
mkdir --parents --verbose $buildFolder
cd $buildFolder ;
git clone https://github.com/google/jsonnet.git
cd jsonnet ;
make
run_using_root cp jsonnet /usr/local/bin
run_using_root cp jsonnetfmt /usr/local/bin
print_separator "installing go"
wget --no-verbose https://golang.org/dl/go1.15.6.linux-amd64.tar.gz
run_using_root tar --extract --gzip --file go1.15.6.linux-amd64.tar.gz --directory /usr/local
print_separator "installing go module: jsonnet-bundler"
run_using_root 'export PATH="$PATH:/usr/local/go/bin"; export GOPATH="/usr/local/go"; GO111MODULE="on" go get github.com/jsonnet-bundler/jsonnet-bundler/cmd/jb'
print_separator "installing go module: gojsontoyaml "
run_using_root 'export PATH="$PATH:/usr/local/go/bin"; export GOPATH="/usr/local/go"; GO111MODULE="on" go get github.com/brancz/gojsontoyaml'
else
print_two_columns "jsonnet" "using existing jsonnet"
fi ;
if is_need_command go ; then
export PATH="$PATH:/usr/local/go/bin" ;
fi ;
}
function build_kube_promethesius() {
local cachedLocation="$HOME/$kubePrometheseus" ;
if test -d $cachedLocation ; then
print_two_columns "cachedLocation" "using $cachedLocation";
cp --force --recursive $cachedLocation $projectFolder ;
fi ;
if ! test -d $projectFolder ; then
mkdir --parents --verbose $projectFolder;
cd $projectFolder ;
print_separator "running jsonnet-bundler to initialize jsonnetfile.json"
jb init;
# Creates `vendor/` & `jsonnetfile.lock.json`, and fills in `jsonnetfile.json`
print_separator "checking out $kubePrometheseus to $projectFolder"
jb install github.com/prometheus-operator/kube-prometheus/jsonnet/$kubePrometheseus
cp --force --recursive $projectFolder $cachedLocation ;
else
print_two_columns "kube_promethesius" "using existing $projectFolder"
fi;
}
function build_csap_monitoring() {
local sourceFile="${1-csap.jsonnet}" ;
print_separator "generating csap-monitoring"
print_two_columns "sourceFile" "$sourceFile"
cd $projectFolder ;
# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
set -e
# set -x
# only exit with zero if all commands of the pipeline exit successfully
set -o pipefail
# Make sure to use project tooling
PATH="$(pwd)/tmp/bin:${PATH}"
# Make sure to start with a clean 'manifests' dir
print_two_columns "clean up" " removing manifests"
rm -rf manifests
mkdir -p manifests/setup
#delay_with_message 3 "starting build" ;
# Calling gojsontoyaml is optional, but we would like to generate yaml, not json
print_two_columns "compiling jsonnet" "this will take several minutes..."
jsonnet --jpath vendor --multi manifests $sourceFile | xargs -I{} sh -c 'cat {} | /usr/local/go/bin/gojsontoyaml > {}.yaml' -- {}
set +e
set +o pipefail
# Make sure to remove json files
print_two_columns "kubernetes prep" "removing non yaml from manifests folder"
find manifests -type f ! -name '*.yaml' -delete
rm -f kustomization
# print_with_head "to deploy rto kubenetes:";
# print_line "cd $projectFolder ;" ;
# print_line "kubectl delete --ignore-not-found=true -f manifests/ -f manifests/setup ;" ;
# print_line 'kubectl create -f manifests/setup ; until kubectl get servicemonitors --all-namespaces ; do date; sleep 1; echo ""; done ;'
# print_line 'kubectl create -f manifests/'
}
function api_package_get() {
print_with_head "api_package_get(): csapBuildVersion: '$csapBuildVersion'"
if [[ "$csapBuildVersion" == "" ]] ; then
print_line "api_package_get() skipping binary retrieval, using binary just built"
return ;
fi ;
print_line "api_package_get(): removing previous files in $csapPackageDependencies"
update_repo_variables
\rm --recursive --force $csapPackageDependencies
mkdir -p $csapPackageDependencies
cd $csapPackageDependencies
csap_mvn dependency:copy -Dtransitive=false -Dartifact=bin:$specPackageName:$specVersion:zip -DoutputDirectory=$(pwd)
}
skipBacklogWaits=true ; # items get added to queue
function api_service_kill() {
api_service_stop
}
function api_service_stop() {
print_with_head "removing $csapName, dir: $(pwd)" ;
# set -x
# --grace-period=3 --timeout=10s
# print_separator "Deleting $csapWorkingDir/manifests"
# kubectl delete --ignore-not-found=true --recursive=false --filename=$csapWorkingDir/manifests
print_separator "Deleting $csapWorkingDir/manifests"
find $csapWorkingDir/manifests -type f -name '*.yaml' -print0 \
| sort --zero-terminated --reverse \
| xargs --null --replace=theYamlFile \
kubectl delete --ignore-not-found=true --filename=theYamlFile
local numInstances=$(count_services_in_definition monitoring-tools) ;
if (( $numInstances > 0 )) ; then
print_separator "removing $csapName monitoring services from application" ;
envsubst '$csapLife' <$csapWorkingDir/configuration/remove-monitoring.yaml >$csapWorkingDir/remove-monitoring.yaml
local isApply="true"
update_application $csapWorkingDir/remove-monitoring.yaml $isApply ;
else
print_two_columns "monitoring services" "already removed from application" ;
fi ;
}
function api_service_start() {
print_with_head "Starting $csapName package installation"
#
# load any application customizations
#
copy_csap_service_resources ;
if [ ! -e "$csapWorkingDir/manifests" ] ; then
local specZip=$csapPackageDependencies/$specPackageName*.zip ;
print_with_head "extracting '$specZip' to '$(pwd)'"
if $( ! test -e $specZip ) ; then
print_line "Error: did not find $csapPackageDependencies/specZip.zip in $csapPackageDependencies "
exit;
fi ;
cp --force --verbose $specZip .
unzip -qq -o $specZip ;
fi ;
print_separator "Updating yaml manifests"
for templateVariable in $templateVariables ; do
key="__"$templateVariable"__";
value=${!templateVariable} ;
print_two_columns "manifest update" "replacing: $key with $value" ;
find $csapWorkingDir/manifests -type f -name '*.yaml' | xargs sed -i "s/$key/$value/g"
done
#
# YAML swaps: Defined in Env settings/csap integrations
#
for (( counter=1; counter < (100) ; counter++ )) ; do
current="yamlCurrent$counter" ;
new="yamlNew$counter" ;
if [ -z "${!current}" ] || [ -z "${!new}" ]; then
break;
fi ;
currentVal=${!current}
newVal=${!new}
print_two_columns "$current" "$currentVal"
print_two_columns "$new" "$newVal"
find $csapWorkingDir/manifests -type f -name '*.yaml' | xargs sed -i "s|$currentVal|$newVal|g"
done
if $ui_anonymous ; then
print_with_head "ui_anonymous was set to true: overwriting packaged manifest that builds grafana.ini" ;
if test -f $csapWorkingDir/manifests/grafana-config.yaml ; then
mv --verbose $csapWorkingDir/manifests/grafana-config.yaml $csapWorkingDir/manifests/grafana-config.yaml.auth-enabled-orig ;
fi ;
cp --verbose --force $csapWorkingDir/configuration/grafana-config-no-auth.yaml $csapWorkingDir/manifests/grafana-config.yaml ;
local grafanaDeploymentFile="$csapWorkingDir/manifests/grafana-deployment.yaml"
cp --verbose $grafanaDeploymentFile $grafanaDeploymentFile.orig ;
# print_two_columns "comment out" "mountPath: /etc/grafana" ;
# sed --in-place --expression='/mountPath: \/etc\/grafana/,+2 s/^/#/' $grafanaDeploymentFile
#
# print_two_columns "comment out" "grafana-config" ;
# sed --in-place --expression='/\- name: grafana-config/,+2 s/^/#/' $grafanaDeploymentFile
print_two_columns "adding volumeMounts" "$grafanaDeploymentFile" ;
local volumeMountsDefinition=$(cat <<'EOF'
- mountPath: /etc/grafana
name: grafana-no-auth-ini
readOnly: false
EOF
);
sed --in-place '/volumeMounts:/r'<(
echo -e "$volumeMountsDefinition"
) -- $grafanaDeploymentFile
print_two_columns "adding volumes" "$grafanaDeploymentFile" ;
local volumesDefinition=$(cat <<'EOF'
- configMap:
name: grafana-no-auth-ini
name: grafana-no-auth-ini
EOF
);
sed --in-place '/volumes:/r'<(
echo -e "$volumesDefinition"
) -- $grafanaDeploymentFile
fi ;
# print_separator "Deploying setup specs"
# kubectl create --filename=$csapWorkingDir/manifests/setup ;
print_separator "creating manifests/0*"
find $csapWorkingDir/manifests -type f -name '0*.yaml' -print0 \
| sort --zero-terminated \
| xargs --null --replace=theYamlFile \
kubectl create --filename=theYamlFile
print_separator "Waiting for service monitor creation"
until kubectl get servicemonitors --all-namespaces ; do echo -e "\nWaiting for sevicemonitors to be created" ; sleep 5; echo ""; done ;
print_separator "Deploying core specs"
# kubectl create --filename=$csapWorkingDir/manifests/ ;
find $csapWorkingDir/manifests -type f -and -name '*.yaml' -and -not -name '0*.yaml' -print0 \
| sort --zero-terminated \
| xargs --null --replace=theYamlFile \
kubectl create --filename=theYamlFile
local numInstances=$(count_services_in_definition monitoring-tools) ;
if (( $numInstances == 0 )) ; then
print_separator "Adding $csapName monitoring services into application environment '$csapLife'" ;
envsubst '$csapLife' <$csapWorkingDir/configuration/add-monitoring.yaml >$csapWorkingDir/add-monitoring.yaml
local isApply="true"
update_application $csapWorkingDir/add-monitoring.yaml $isApply ;
else
print_two_columns "monitoring services" "already installed in application" ;
fi ;
}
|
#include <boost/test/unit_test.hpp>
#include <boost/algorithm/string/predicate.hpp>
#include <eosio/testing/tester.hpp>
#include <eosio/chain/contracts/abi_serializer.hpp>
#include <currency/currency.wast.hpp>
#include <currency/currency.abi.hpp>
#include <Runtime/Runtime.h>
#include <fc/variant_object.hpp>
using namespace eosio;
using namespace eosio::chain;
using namespace eosio::chain::contracts;
using namespace eosio::testing;
using namespace fc;
struct issue {
static uint64_t get_account(){ return N(currency); }
static uint64_t get_name(){ return N(issue); }
account_name to;
asset quantity;
};
FC_REFLECT( issue, (to)(quantity) )
BOOST_AUTO_TEST_SUITE(currency_tests)
BOOST_FIXTURE_TEST_CASE( test_generic_currency, tester ) try {
produce_blocks(2000);
create_accounts( {N(currency), N(usera), N(userb)}, asset::from_string("1000.0000 EOS") );
produce_blocks(2);
set_code( N(currency), currency_wast );
produce_blocks(2);
auto expected = asset::from_string( "10.0000 CUR" );
{
signed_transaction trx;
trx.actions.emplace_back(vector<permission_level>{{N(currency), config::active_name}},
issue{ .to = N(usera),
.quantity = expected
});
set_tapos(trx);
trx.sign(get_private_key(N(currency), "active"), chain_id_type());
auto result = push_transaction(trx);
for( const auto& act : result.action_traces )
std::cerr << act.console << "\n";
produce_block();
auto actual = get_currency_balance(N(currency), expected.symbol, N(usera));
BOOST_REQUIRE_EQUAL(expected, actual);
}
} FC_LOG_AND_RETHROW() /// test_api_bootstrap
BOOST_FIXTURE_TEST_CASE( test_currency, tester ) try {
produce_blocks(2000);
create_accounts( {N(currency), N(alice), N(bob)}, asset::from_string("1000.0000 EOS") );
transfer( N(inita), N(currency), "10.0000 EOS", "memo" );
produce_block();
set_code(N(currency), currency_wast);
set_abi(N(currency), currency_abi);
produce_blocks(1);
const auto& accnt = control->get_database().get<account_object,by_name>( N(currency) );
abi_def abi;
BOOST_REQUIRE_EQUAL(abi_serializer::to_abi(accnt.abi, abi), true);
abi_serializer abi_ser(abi);
const auto token_supply = asset::from_string("1000000.0000 CUR");
// issue tokens
{
signed_transaction trx;
action issue_act;
issue_act.account = N(currency);
issue_act.name = N(issue);
issue_act.authorization = vector<permission_level>{{N(currency), config::active_name}};
issue_act.data = abi_ser.variant_to_binary("issue", mutable_variant_object()
("to", "currency")
("quantity", "1000000.0000 CUR")
);
trx.actions.emplace_back(std::move(issue_act));
set_tapos(trx);
trx.sign(get_private_key(N(currency), "active"), chain_id_type());
control->push_transaction(trx);
produce_block();
BOOST_REQUIRE_EQUAL(true, chain_has_transaction(trx.id()));
BOOST_REQUIRE_EQUAL(get_currency_balance(N(currency), token_supply.symbol, N(currency)), asset::from_string( "1000000.0000 CUR" ));
}
// make a transfer from the contract to a user
{
signed_transaction trx;
action transfer_act;
transfer_act.account = N(currency);
transfer_act.name = N(transfer);
transfer_act.authorization = vector<permission_level>{{N(currency), config::active_name}};
transfer_act.data = abi_ser.variant_to_binary("transfer", mutable_variant_object()
("from", "currency")
("to", "alice")
("quantity", "100.0000 CUR")
("memo", "fund Alice")
);
trx.actions.emplace_back(std::move(transfer_act));
set_tapos(trx);
trx.sign(get_private_key(N(currency), "active"), chain_id_type());
control->push_transaction(trx);
produce_block();
BOOST_REQUIRE_EQUAL(true, chain_has_transaction(trx.id()));
BOOST_REQUIRE_EQUAL(get_currency_balance(N(currency), token_supply.symbol, N(alice)), asset::from_string( "100.0000 CUR" ));
}
// Overspend!
{
signed_transaction trx;
action transfer_act;
transfer_act.account = N(currency);
transfer_act.name = N(transfer);
transfer_act.authorization = vector<permission_level>{{N(alice), config::active_name}};
transfer_act.data = abi_ser.variant_to_binary("transfer", mutable_variant_object()
("from", "alice")
("to", "bob")
("quantity", "101.0000 CUR")
("memo", "overspend! Alice")
);
trx.actions.emplace_back(std::move(transfer_act));
set_tapos(trx);
trx.sign(get_private_key(N(alice), "active"), chain_id_type());
BOOST_CHECK_EXCEPTION(control->push_transaction(trx), fc::assert_exception, assert_message_is("integer underflow subtracting token balance"));
produce_block();
BOOST_REQUIRE_EQUAL(false, chain_has_transaction(trx.id()));
BOOST_REQUIRE_EQUAL(get_currency_balance(N(currency), token_supply.symbol, N(alice)), asset::from_string( "100.0000 CUR" ));
BOOST_REQUIRE_EQUAL(get_currency_balance(N(currency), token_supply.symbol, N(bob)), asset::from_string( "0.0000 CUR" ));
}
// Full spend
{
signed_transaction trx;
action transfer_act;
transfer_act.account = N(currency);
transfer_act.name = N(transfer);
transfer_act.authorization = vector<permission_level>{{N(alice), config::active_name}};
transfer_act.data = abi_ser.variant_to_binary("transfer", mutable_variant_object()
("from", "alice")
("to", "bob")
("quantity", "100.0000 CUR")
("memo", "all in! Alice")
);
trx.actions.emplace_back(std::move(transfer_act));
set_tapos(trx);
trx.sign(get_private_key(N(alice), "active"), chain_id_type());
control->push_transaction(trx);
produce_block();
BOOST_REQUIRE_EQUAL(true, chain_has_transaction(trx.id()));
BOOST_REQUIRE_EQUAL(get_currency_balance(N(currency), token_supply.symbol, N(alice)), asset::from_string( "0.0000 CUR" ));
BOOST_REQUIRE_EQUAL(get_currency_balance(N(currency), token_supply.symbol, N(bob)), asset::from_string( "100.0000 CUR" ));
}
} FC_LOG_AND_RETHROW() /// test_currency
BOOST_AUTO_TEST_SUITE_END()
|
#!/bin/bash
./sbt -J-Xmx2G "project accumulo" test || { exit 1; }
./sbt -J-Xmx2G "project cassandra" test || { exit 1; }
./sbt -J-Xmx2G "project doc-examples" compile || { exit 1; }
./sbt -J-Xmx2G "project geomesa" test || { exit 1; }
./sbt -J-Xmx2G "project geotools" test || { exit 1; }
HOSTALIASES=/tmp/hostaliases ./sbt -J-Xmx2G "project geowave" test || { exit 1; }
./sbt -J-Xmx2G "project hbase" test || { exit 1; }
./sbt -J-Xmx2G "project proj4" test || { exit 1; }
./sbt -J-Xmx2G "project raster-test" test || { exit 1; }
./sbt -J-Xmx2G "project raster-testkit" compile || { exit 1; }
./sbt -J-Xmx2G "project s3" compile || { exit 1; }
./sbt -J-Xmx2G "project s3-test" test || { exit 1; }
./sbt -J-Xmx2G "project s3-testkit" test || { exit 1; }
./sbt -J-Xmx2G "project shapefile" compile || { exit 1; }
./sbt -J-Xmx2G "project slick" test:compile || { exit 1; }
./sbt -J-Xmx2G "project spark" test || { exit 1; }
./sbt -J-Xmx2G "project spark-etl" compile || { exit 1; }
./sbt -J-Xmx2G "project spark-testkit" compile || { exit 1; }
./sbt -J-Xmx2G "project util" compile || { exit 1; }
./sbt -J-Xmx2G "project vector-test" test || { exit 1; }
./sbt -J-Xmx2G "project vectortile" test || { exit 1; }
|
/////////////////////////////////////////////////////////////////////////
//// GLCD.C ////
//// ////
//// This file contains drivers for using a Hantronix HDM64GS12 with ////
//// an LED backlight. The HDM64GS12 is 128 pixles across and 64 ////
//// pixels down. The driver treats the upper left pixel as (0,0). ////
//// ////
//// LCD Pin connections: ////
//// (These can be changed as needed in the following defines). ////
//// * 1: VSS is connected to GND ////
//// * 2: VDD is connected to +5V ////
//// * 3: V0 - LCD operating voltage is connected from a 20k Ohm POT////
//// * 4: D/I - Data or Instruction is connected to B2 ////
//// * 5: R/W - Read or Write is connected to B4 ////
//// * 6: Enable is connected to B5 ////
//// *7-14: Data Bus 0 to 7 is connected to port d ////
//// *15: Chip Select 1 is connected to B0 ////
//// *16: Chip Select 2 is connected to B1 ////
//// *17: Reset is connected to C0 ////
//// *18: Negative voltage is also connected to the 20k Ohm POT ////
//// *19: Positive voltage for LED backlight is connected to +5V ////
//// *20: Negavtive voltage for LED backlight is connected to GND ////
//// ////
//// glcd_init(mode) ////
//// * Must be called before any other function. ////
//// - mode can be ON or OFF to turn the LCD on or off ////
//// ////
//// glcd_pixel(x,y,color) ////
//// * Sets the pixel to the given color. ////
//// - color can be ON or OFF ////
//// ////
//// glcd_line(x1,y1,x2,y2,color) ////
//// * Draws a line from the first point to the second point ////
//// with the given color. ////
//// - color can be ON or OFF ////
//// ////
//// glcd_rect(x1,y1,x2,y2,fill,color) ////
//// * Draws a rectangle with upper left point (x1,y1) and lower ////
//// right point (x2,y2). ////
//// - fill can be YES or NO ////
//// - color can be ON or OFF ////
//// ////
//// glcd_bar(x1,y1,x2,y2,width,color) ////
//// * Draws a bar (wide line) from the first point to the ////
//// second point. ////
//// - width is the number of pixels wide ////
//// - color is ON or OFF ////
//// ////
//// glcd_circle(x,y,radius,fill,color) ////
//// * Draws a circle with center at (x,y) ////
//// - fill can be YES or NO ////
//// - color can be ON or OFF ////
//// ////
//// glcd_text57(x,y,textptr,size,color) ////
//// * Write the null terminated text pointed to by textptr with ////
//// the upper left coordinate of the first character at (x,y).////
//// Characters are 5 pixels wide and 7 pixels tall. ////
//// - size is an integer that scales the size of the text ////
//// - color is ON or OFF ////
//// * Note - The given text is character wrapped. If this ////
//// function is used on a different size display, then change ////
//// the GLCD_WIDTH define appropriately. ////
//// ////
//// glcd_fillScreen(color) ////
//// * Fills the entire LCD with the given color. ////
//// - color can be ON or OFF ////
//// ////
/////////////////////////////////////////////////////////////////////////
//// (C) Copyright 1996,2003 Custom Computer Services ////
//// This source code may only be used by licensed users of the CCS ////
//// C compiler. This source code may only be distributed to other ////
//// licensed users of the CCS C compiler. No other use, ////
//// reproduction or distribution is permitted without written ////
//// permission. Derivative programs created using this software ////
//// in object code form are not restricted in any way. ////
/////////////////////////////////////////////////////////////////////////
#ifndef GLCD_C
#define GLCD_C
#ifndef GLCD_WIDTH
#define GLCD_WIDTH 128 // Used for text wrapping by glcd_text57 function
#endif
#define ON 1
#define OFF 0
#define YES 1
#define NO 0
#ifndef GLCD_CS1
#define GLCD_CS1 PIN_B0 // Chip Selection 1
#endif
#ifndef GLCD_CS2
#define GLCD_CS2 PIN_B1 // Chip Selection 2
#endif
#ifndef GLCD_DI
#define GLCD_DI PIN_B2 // Data or Instruction input
#endif
#ifndef GLCD_RW
#define GLCD_RW PIN_B4 // Read/Write
#endif
#ifndef GLCD_E
#define GLCD_E PIN_B5 // Enable
#endif
#ifndef GLCD_RST
#define GLCD_RST PIN_C0 // Reset
#endif
BYTE glcd_readByte(BYTE chip);
void glcd_writeByte(BYTE chip, BYTE data);
void glcd_fillScreen(int1 color);
const BYTE TEXT[51][5] ={0x00, 0x00, 0x00, 0x00, 0x00, // SPACE
0x00, 0x00, 0x5F, 0x00, 0x00, // !
0x00, 0x03, 0x00, 0x03, 0x00, // "
0x14, 0x3E, 0x14, 0x3E, 0x14, // #
0x24, 0x2A, 0x7F, 0x2A, 0x12, // $
0x43, 0x33, 0x08, 0x66, 0x61, // %
0x36, 0x49, 0x55, 0x22, 0x50, // &
0x00, 0x05, 0x03, 0x00, 0x00, // '
0x00, 0x1C, 0x22, 0x41, 0x00, // (
0x00, 0x41, 0x22, 0x1C, 0x00, // )
0x14, 0x08, 0x3E, 0x08, 0x14, // *
0x08, 0x08, 0x3E, 0x08, 0x08, // +
0x00, 0x50, 0x30, 0x00, 0x00, // ,
0x08, 0x08, 0x08, 0x08, 0x08, // -
0x00, 0x60, 0x60, 0x00, 0x00, // .
0x20, 0x10, 0x08, 0x04, 0x02, // /
0x3E, 0x51, 0x49, 0x45, 0x3E, // 0
0x04, 0x02, 0x7F, 0x00, 0x00, // 1
0x42, 0x61, 0x51, 0x49, 0x46, // 2
0x22, 0x41, 0x49, 0x49, 0x36, // 3
0x18, 0x14, 0x12, 0x7F, 0x10, // 4
0x27, 0x45, 0x45, 0x45, 0x39, // 5
0x3E, 0x49, 0x49, 0x49, 0x32, // 6
0x01, 0x01, 0x71, 0x09, 0x07, // 7
0x36, 0x49, 0x49, 0x49, 0x36, // 8
0x26, 0x49, 0x49, 0x49, 0x3E, // 9
0x00, 0x36, 0x36, 0x00, 0x00, // :
0x00, 0x56, 0x36, 0x00, 0x00, // ;
0x08, 0x14, 0x22, 0x41, 0x00, // <
0x14, 0x14, 0x14, 0x14, 0x14, // =
0x00, 0x41, 0x22, 0x14, 0x08, // >
0x02, 0x01, 0x51, 0x09, 0x06, // ?
0x3E, 0x41, 0x59, 0x55, 0x5E, // @
0x7E, 0x09, 0x09, 0x09, 0x7E, // A
0x7F, 0x49, 0x49, 0x49, 0x36, // B
0x3E, 0x41, 0x41, 0x41, 0x22, // C
0x7F, 0x41, 0x41, 0x41, 0x3E, // D
0x7F, 0x49, 0x49, 0x49, 0x41, // E
0x7F, 0x09, 0x09, 0x09, 0x01, // F
0x3E, 0x41, 0x41, 0x49, 0x3A, // G
0x7F, 0x08, 0x08, 0x08, 0x7F, // H
0x00, 0x41, 0x7F, 0x41, 0x00, // I
0x30, 0x40, 0x40, 0x40, 0x3F, // J
0x7F, 0x08, 0x14, 0x22, 0x41, // K
0x7F, 0x40, 0x40, 0x40, 0x40, // L
0x7F, 0x02, 0x0C, 0x02, 0x7F, // M
0x7F, 0x02, 0x04, 0x08, 0x7F, // N
0x3E, 0x41, 0x41, 0x41, 0x3E, // O
0x7F, 0x09, 0x09, 0x09, 0x06, // P
0x1E, 0x21, 0x21, 0x21, 0x5E, // Q
0x7F, 0x09, 0x09, 0x09, 0x76};// R
const BYTE TEXT2[44][5]={0x26, 0x49, 0x49, 0x49, 0x32, // S
0x01, 0x01, 0x7F, 0x01, 0x01, // T
0x3F, 0x40, 0x40, 0x40, 0x3F, // U
0x1F, 0x20, 0x40, 0x20, 0x1F, // V
0x7F, 0x20, 0x10, 0x20, 0x7F, // W
0x41, 0x22, 0x1C, 0x22, 0x41, // X
0x07, 0x08, 0x70, 0x08, 0x07, // Y
0x61, 0x51, 0x49, 0x45, 0x43, // Z
0x00, 0x7F, 0x41, 0x00, 0x00, // [
0x02, 0x04, 0x08, 0x10, 0x20, // \
0x00, 0x00, 0x41, 0x7F, 0x00, // ]
0x04, 0x02, 0x01, 0x02, 0x04, // ^
0x40, 0x40, 0x40, 0x40, 0x40, // _
0x00, 0x01, 0x02, 0x04, 0x00, // `
0x20, 0x54, 0x54, 0x54, 0x78, // a
0x7F, 0x44, 0x44, 0x44, 0x38, // b
0x38, 0x44, 0x44, 0x44, 0x44, // c
0x38, 0x44, 0x44, 0x44, 0x7F, // d
0x38, 0x54, 0x54, 0x54, 0x18, // e
0x04, 0x04, 0x7E, 0x05, 0x05, // f
0x08, 0x54, 0x54, 0x54, 0x3C, // g
0x7F, 0x08, 0x04, 0x04, 0x78, // h
0x00, 0x44, 0x7D, 0x40, 0x00, // i
0x20, 0x40, 0x44, 0x3D, 0x00, // j
0x7F, 0x10, 0x28, 0x44, 0x00, // k
0x00, 0x41, 0x7F, 0x40, 0x00, // l
0x7C, 0x04, 0x78, 0x04, 0x78, // m
0x7C, 0x08, 0x04, 0x04, 0x78, // n
0x38, 0x44, 0x44, 0x44, 0x38, // o
0x7C, 0x14, 0x14, 0x14, 0x08, // p
0x08, 0x14, 0x14, 0x14, 0x7C, // q
0x00, 0x7C, 0x08, 0x04, 0x04, // r
0x48, 0x54, 0x54, 0x54, 0x20, // s
0x04, 0x04, 0x3F, 0x44, 0x44, // t
0x3C, 0x40, 0x40, 0x20, 0x7C, // u
0x1C, 0x20, 0x40, 0x20, 0x1C, // v
0x3C, 0x40, 0x30, 0x40, 0x3C, // w
0x44, 0x28, 0x10, 0x28, 0x44, // x
0x0C, 0x50, 0x50, 0x50, 0x3C, // y
0x44, 0x64, 0x54, 0x4C, 0x44, // z
0x00, 0x08, 0x36, 0x41, 0x41, // {
0x00, 0x00, 0x7F, 0x00, 0x00, // |
0x41, 0x41, 0x36, 0x08, 0x00, // }
0x02, 0x01, 0x02, 0x04, 0x02};// ~
// Purpose: Initialize a graphic LCD. This must be called before any
// other glcd function is used.
// Inputs: The initialization mode
// OFF - Turns the LCD off
// ON - Turns the LCD on
// Date: 5/28/2003
void glcd_init(int1 mode)
{
// Initialze some pins
output_high(GLCD_RST);
output_low(GLCD_E);
output_low(GLCD_CS1);
output_low(GLCD_CS2);
output_low(GLCD_DI); // Set for instruction
glcd_writeByte(GLCD_CS1, 0xC0); // Specify first RAM line at the top
glcd_writeByte(GLCD_CS2, 0xC0); // of the screen
glcd_writeByte(GLCD_CS1, 0x40); // Set the column address to 0
glcd_writeByte(GLCD_CS2, 0x40);
glcd_writeByte(GLCD_CS1, 0xB8); // Set the page address to 0
glcd_writeByte(GLCD_CS2, 0xB8);
if(mode == ON)
{
glcd_writeByte(GLCD_CS1, 0x3F); // Turn the display on
glcd_writeByte(GLCD_CS2, 0x3F);
}
else
{
glcd_writeByte(GLCD_CS1, 0x3E); // Turn the display off
glcd_writeByte(GLCD_CS2, 0x3E);
}
glcd_fillScreen(OFF); // Clear the display
}
// Purpose: Turn a pixel on a graphic LCD on or off
// Inputs: x - the x coordinate of the pixel
// y - the y coordinate of the pixel
// color - ON or OFF
// Output: 1 if coordinate out of range, 0 if in range
void glcd_pixel(int8 x, int8 y, int1 color)
{
BYTE data;
BYTE chip = GLCD_CS1; // Stores which chip to use on the LCD
if(x > 63) // Check for first or second display area
{
x -= 64;
chip = GLCD_CS2;
}
output_low(GLCD_DI); // Set for instruction
bit_clear(x,7); // Clear the MSB. Part of an instruction code
bit_set(x,6); // Set bit 6. Also part of an instruction code
glcd_writeByte(chip, x); // Set the horizontal address
glcd_writeByte(chip, (y/8 & 0b10111111) | 0b10111000); // Set the vertical page address
output_high(GLCD_DI); // Set for data
data = glcd_readByte(chip);
if(color == ON)
bit_set(data, y%8); // Turn the pixel on
else // or
bit_clear(data, y%8); // turn the pixel off
output_low(GLCD_DI); // Set for instruction
glcd_writeByte(chip, x); // Set the horizontal address
output_high(GLCD_DI); // Set for data
glcd_writeByte(chip, data); // Write the pixel data
}
// Purpose: Draw a line on a graphic LCD using Bresenham's
// line drawing algorithm
// Inputs: (x1, y1) - the start coordinate
// (x2, y2) - the end coordinate
// color - ON or OFF
// Dependencies: glcd_pixel()
void glcd_line(int8 x1, int8 y1, int8 x2, int8 y2, int1 color)
{
signed int x, y, addx, addy, dx, dy;
signed long P;
int i;
dx = abs((signed int)(x2 - x1));
dy = abs((signed int)(y2 - y1));
x = x1;
y = y1;
if(x1 > x2)
addx = -1;
else
addx = 1;
if(y1 > y2)
addy = -1;
else
addy = 1;
if(dx >= dy)
{
P = 2*dy - dx;
for(i=0; i<=dx; ++i)
{
glcd_pixel(x, y, color);
if(P < 0)
{
P += 2*dy;
x += addx;
}
else
{
P += 2*dy - 2*dx;
x += addx;
y += addy;
}
}
}
else
{
P = 2*dx - dy;
for(i=0; i<=dy; ++i)
{
glcd_pixel(x, y, color);
if(P < 0)
{
P += 2*dx;
y += addy;
}
else
{
P += 2*dx - 2*dy;
x += addx;
y += addy;
}
}
}
}
// Purpose: Draw a rectangle on a graphic LCD
// Inputs: (x1, y1) - the start coordinate
// (x2, y2) - the end coordinate
// fill - YES or NO
// color - ON or OFF
// Dependencies: glcd_pixel(), glcd_line()
void glcd_rect(int8 x1, int8 y1, int8 x2, int8 y2, int fill, int1 color)
{
if(fill)
{
int y, ymax; // Find the y min and max
if(y1 < y2)
{
y = y1;
ymax = y2;
}
else
{
y = y2;
ymax = y1;
}
for(; y<=ymax; ++y) // Draw lines to fill the rectangle
glcd_line(x1, y, x2, y, color);
}
else
{
glcd_line(x1, y1, x2, y1, color); // Draw the 4 sides
glcd_line(x1, y2, x2, y2, color);
glcd_line(x1, y1, x1, y2, color);
glcd_line(x2, y1, x2, y2, color);
}
}
// Purpose: Draw a bar (wide line) on a graphic LCD
// Inputs: (x1, y1) - the start coordinate
// (x2, y2) - the end coordinate
// width - The number of pixels wide
// color - ON or OFF
void glcd_bar(int x1, int y1, int x2, int y2, int width, int1 color)
{
signed int x, y, addx, addy, j;
signed long P, dx, dy, c1, c2;
int i;
dx = abs((signed int)(x2 - x1));
dy = abs((signed int)(y2 - y1));
x = x1;
y = y1;
c1 = -dx*x1 - dy*y1;
c2 = -dx*x2 - dy*y2;
if(x1 > x2)
{
addx = -1;
c1 = -dx*x2 - dy*y2;
c2 = -dx*x1 - dy*y1;
}
else
addx = 1;
if(y1 > y2)
{
addy = -1;
c1 = -dx*x2 - dy*y2;
c2 = -dx*x1 - dy*y1;
}
else
addy = 1;
if(dx >= dy)
{
P = 2*dy - dx;
for(i=0; i<=dx; ++i)
{
for(j=-(width/2); j<width/2+width%2; ++j)
{
if(dx*x+dy*(y+j)+c1 >= 0 && dx*x+dy*(y+j)+c2 <=0)
glcd_pixel(x, y+j, color);
}
if(P < 0)
{
P += 2*dy;
x += addx;
}
else
{
P += 2*dy - 2*dx;
x += addx;
y += addy;
}
}
}
else
{
P = 2*dx - dy;
for(i=0; i<=dy; ++i)
{
if(P < 0)
{
P += 2*dx;
y += addy;
}
else
{
P += 2*dx - 2*dy;
x += addx;
y += addy;
}
for(j=-(width/2); j<width/2+width%2; ++j)
{
if(dx*x+dy*(y+j)+c1 >= 0 && dx*x+dy*(y+j)+c2 <=0)
glcd_pixel(x+j, y, color);
}
}
}
}
// Purpose: Draw a circle on a graphic LCD
// Inputs: (x,y) - the center of the circle
// radius - the radius of the circle
// fill - YES or NO
// color - ON or OFF
void glcd_circle(int x, int y, int radius, int1 fill, int1 color)
{
signed int a, b, P;
a = 0;
b = radius;
P = 1 - radius;
do
{
if(fill)
{
glcd_line(x-a, y+b, x+a, y+b, color);
glcd_line(x-a, y-b, x+a, y-b, color);
glcd_line(x-b, y+a, x+b, y+a, color);
glcd_line(x-b, y-a, x+b, y-a, color);
}
else
{
glcd_pixel(a+x, b+y, color);
glcd_pixel(b+x, a+y, color);
glcd_pixel(x-a, b+y, color);
glcd_pixel(x-b, a+y, color);
glcd_pixel(b+x, y-a, color);
glcd_pixel(a+x, y-b, color);
glcd_pixel(x-a, y-b, color);
glcd_pixel(x-b, y-a, color);
}
if(P < 0)
P+= 3 + 2*a++;
else
P+= 5 + 2*(a++ - b--);
} while(a <= b);
}
// Purpose: Write text on a graphic LCD
// Inputs: (x,y) - The upper left coordinate of the first letter
// textptr - A pointer to an array of text to display
// size - The size of the text: 1 = 5x7, 2 = 10x14, ...
// color - ON or OFF
void glcd_text57(int x, int y, char* textptr, int size, int1 color)
{
int i, j, k, l, m; // Loop counters
BYTE pixelData[5]; // Stores character data
for(i=0; textptr[i] != '\0'; ++i, ++x) // Loop through the passed string
{
if(textptr[i] < 'S') // Checks if the letter is in the first text array
memcpy(pixelData, TEXT[textptr[i]-' '], 5);
else if(textptr[i] <= '~') // Check if the letter is in the second array
memcpy(pixelData, TEXT2[textptr[i]-'S'], 5);
else
memcpy(pixelData, TEXT[0], 5); // Default to space
if(x+5*size >= GLCD_WIDTH) // Performs character wrapping
{
x = 0; // Set x at far left position
y += 7*size + 1; // Set y at next position down
}
for(j=0; j<5; ++j, x+=size) // Loop through character byte data
{
for(k=0; k<7*size; ++k) // Loop through the vertical pixels
{
if(bit_test(pixelData[j], k)) // Check if the pixel should be set
{
for(l=0; l<size; ++l) // The next two loops change the
{ // character's size
for(m=0; m<size; ++m)
{
glcd_pixel(x+m, y+k*size+l, color); // Draws the pixel
}
}
}
}
}
}
}
// Purpose: Fill the LCD screen with the passed in color.
// Works much faster than drawing a rectangle to fill the screen.
// Inputs: ON - turn all the pixels on
// OFF - turn all the pixels off
// Dependencies: glcd_writeByte()
void glcd_fillScreen(int1 color)
{
int i, j;
// Loop through the vertical pages
for(i = 0; i < 8; ++i)
{
output_low(GLCD_DI); // Set for instruction
glcd_writeByte(GLCD_CS1, 0b01000000); // Set horizontal address to 0
glcd_writeByte(GLCD_CS2, 0b01000000);
glcd_writeByte(GLCD_CS1, i | 0b10111000); // Set page address
glcd_writeByte(GLCD_CS2, i | 0b10111000);
output_high(GLCD_DI); // Set for data
// Loop through the horizontal sections
for(j = 0; j < 64; ++j)
{
glcd_writeByte(GLCD_CS1, 0xFF*color); // Turn pixels on or off
glcd_writeByte(GLCD_CS2, 0xFF*color); // Turn pixels on or off
}
}
}
// Purpose: Write a byte of data to the specified chip
// Inputs: chipSelect - which chip to write the data to
// data - the byte of data to write
void glcd_writeByte(char chip, BYTE data)
{
if(chip == GLCD_CS1) // Choose which chip to write to
output_high(GLCD_CS1);
else
output_high(GLCD_CS2);
output_low(GLCD_RW); // Set for writing
output_d(data); // Put the data on the port
output_high(GLCD_E); // Pulse the enable pin
delay_us(2);
output_low(GLCD_E);
output_low(GLCD_CS1); // Reset the chip select lines
output_low(GLCD_CS2);
}
// Purpose: Reads a byte of data from the specified chip
// Ouputs: A byte of data read from the chip
BYTE glcd_readByte(BYTE chip)
{
BYTE data; // Stores the data read from the LCD
if(chip == GLCD_CS1) // Choose which chip to read from
output_high(GLCD_CS1);
else
output_high(GLCD_CS2);
input_d(); // Set port d to input
output_high(GLCD_RW); // Set for reading
output_high(GLCD_E); // Pulse the enable pin
delay_us(2);
output_low(GLCD_E);
delay_us(2);
output_high(GLCD_E); // Pulse the enable pin
delay_us(2);
data = input_d(); // Get the data from the display's output register
output_low(GLCD_E);
output_low(GLCD_CS1); // Reset the chip select lines
output_low(GLCD_CS2);
return data; // Return the read data
}
#endif
|
#!/bin/sh
update () {
ncu --packageManager npm --packageFile "$1/package.json" -u -x path-to-regexp
ncu --packageManager npm --packageFile "$1/package.json" -u --dep dev -x path-to-regexp
}
yarn workspaces list --json | while read package; do
package="${package/\{\"location\":\"/}"
package=$(sed "s/\",\"name\":.*//g" <<< "$package")
update "$package"
done
yarn clean -y
rm yarn.lock
yarn install
yarn set version latest
|
<reponame>OmicsDataAutomation/TileDB<filename>test/src/storage_manager/test_azure_blob_storage.cc
/**
* @file test_azure_blob_storage.cc
*
* @section LICENSE
*
* The MIT License
*
* @copyright Copyright (c) 2021 Omics Data Automation, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @section DESCRIPTION
*
* Tests for the AzureBlob class
*/
#include "catch.h"
#include "storage_azure_blob.h"
#include "uri.h"
#include "utils.h"
#include <fcntl.h>
#include <string.h>
#include <sys/types.h>
#include <sys/stat.h>
class AzureBlobTestFixture {
protected:
TempDir *temp_dir = NULL;
AzureBlob *azure_blob = NULL;
AzureBlobTestFixture() {
if (is_azure_blob_storage_path(get_test_dir())) {
try {
temp_dir = new TempDir();
std::string home_dir = temp_dir->get_temp_dir()+"/test_azure_blob";
azure_blob = new AzureBlob(home_dir);
CHECK(!azure_blob->locking_support());
} catch(...) {
INFO("Azure Blob Storage could not be credentialed. Set env AZURE_STORAGE_ACCOUNT and AZURE_STORAGE_KEY");
}
}
INFO("Azure Blob Storage not specified as a --test-dir option");
}
~AzureBlobTestFixture() {
if (azure_blob) {
delete azure_blob;
}
if (temp_dir) {
delete temp_dir;
}
}
};
TEST_CASE("Test AzureBlob constructor", "[constr]") {
CHECK_THROWS(new AzureBlob("wasbs://my_container/path"));
CHECK_THROWS(new AzureBlob("az://my_container@my_account.blob.core.windows.net/path"));
CHECK_THROWS(new AzureBlob("az://my_container@blob.core.windows.net/path"));
CHECK_THROWS(new AzureBlob("az://non-existent-container@blob.core.windows.met/path"));
if (getenv("AZURE_STORAGE_ACCOUNT")) {
unsetenv( "AZURE_STORAGE_ACCOUNT");
}
std::string sas_token = "AZURE_STORAGE_SAS_TOKEN=non-existent-<PASSWORD>";
CHECK(putenv(const_cast<char *>(sas_token.c_str())) == 0);
CHECK_THROWS(new AzureBlob("az://my_container@my_account.blob.core.windows.net/path"));
}
TEST_CASE_METHOD(AzureBlobTestFixture, "Test AzureBlob cwd", "[cwd]") {
if (azure_blob == nullptr) {
return;
}
REQUIRE(azure_blob->current_dir().length() > 0);
REQUIRE(azure_blob->create_dir(azure_blob->current_dir()) == TILEDB_FS_OK);
// create_dir is a no-op for AzureBlob, so is_dir will return false
CHECK(!azure_blob->is_dir(azure_blob->current_dir()));
REQUIRE(!azure_blob->is_file(azure_blob->current_dir()));
REQUIRE(azure_blob->real_dir(azure_blob->current_dir()).length() > 0);
}
TEST_CASE_METHOD(AzureBlobTestFixture, "Test AzureBlob real_dir", "[real-dir]") {
if (azure_blob == nullptr) {
return;
}
CHECK(azure_blob->real_dir("").compare(azure_blob->current_dir()) == 0);
CHECK(azure_blob->real_dir("xxx").compare(azure_blob->current_dir()+"/xxx") == 0);
CHECK(azure_blob->real_dir("xxx/yyy").compare(azure_blob->current_dir()+"/xxx/yyy") == 0);
CHECK(azure_blob->real_dir("/xxx/yyy").compare("xxx/yyy") == 0);
azure_uri test_uri(get_test_dir());
CHECK(azure_blob->real_dir(get_test_dir()).compare(test_uri.path().substr(1)) == 0);
CHECK_THROWS(azure_blob->real_dir("xxx://yyy"));
}
TEST_CASE_METHOD(AzureBlobTestFixture, "Test AzureBlob dir", "[dir]") {
if (azure_blob == nullptr) {
return;
}
std::string test_dir("dir");
CHECK_RC(azure_blob->create_dir(test_dir), TILEDB_FS_OK);
// create_dir is a no-op for AzureBlob, so is_dir will return false
CHECK(!azure_blob->is_dir(test_dir));
CHECK(!azure_blob->is_file(test_dir));
// Files can be created without parent dir existence on AzureBlob
CHECK_RC(azure_blob->create_file(test_dir+"/foo", 0, 0), TILEDB_FS_OK);
CHECK(azure_blob->is_dir(test_dir));
CHECK(azure_blob->file_size(test_dir) == TILEDB_FS_ERR);
CHECK(azure_blob->get_dirs(test_dir).size() == 0);
CHECK(azure_blob->get_dirs("non-existent-dir").size() == 0);
// TBD: move_path
std::string new_dir = test_dir+"-new";
CHECK_THROWS(azure_blob->move_path(test_dir, new_dir));
CHECK_RC(azure_blob->sync_path(test_dir), TILEDB_FS_OK);
// No support for returning errors for non-existent paths
CHECK_RC(azure_blob->sync_path("non-existent-dir"), TILEDB_FS_OK);
CHECK_RC(azure_blob->delete_dir(test_dir), TILEDB_FS_OK);
// No support for returning errors for non-existent paths
CHECK_RC(azure_blob->delete_dir("non-existent-dir"), TILEDB_FS_OK);
CHECK(!azure_blob->is_dir(test_dir));
CHECK(!azure_blob->is_file(test_dir));
}
TEST_CASE_METHOD(AzureBlobTestFixture, "Test AzureBlob file", "[file]") {
if (azure_blob == nullptr) {
return;
}
std::string test_dir("file");
CHECK_RC(azure_blob->create_dir(test_dir), 0);
CHECK_RC(azure_blob->create_file(test_dir+"/foo", O_WRONLY|O_CREAT, S_IRWXU), TILEDB_FS_OK);
CHECK(azure_blob->is_file(test_dir+"/foo"));
CHECK(!azure_blob->is_dir(test_dir+"/foo"));
// Cannot create_dir if file already exists
CHECK(azure_blob->create_dir(test_dir+"/foo") == TILEDB_FS_ERR);
CHECK(azure_blob->file_size(test_dir+"/foo") == 0);
CHECK(azure_blob->file_size(test_dir+"/foo1") == TILEDB_FS_ERR);
CHECK(azure_blob->get_files(test_dir).size() == 1);
CHECK(azure_blob->get_files("non-existent-dir").size() == 0);
CHECK_RC(azure_blob->create_file(test_dir+"/foo1", O_WRONLY|O_CREAT, S_IRWXU), TILEDB_FS_OK);
CHECK(azure_blob->get_files(test_dir).size() == 2);
CHECK_RC(azure_blob->sync_path(test_dir+"/foo"), TILEDB_FS_OK);
CHECK_RC(azure_blob->sync_path(test_dir), TILEDB_FS_OK);
CHECK_RC(azure_blob->delete_file(test_dir+"/foo"), TILEDB_FS_OK);
CHECK_RC(azure_blob->delete_file(test_dir+"/foo1"), TILEDB_FS_OK);
CHECK_RC(azure_blob->delete_file(test_dir+"/foo2"), TILEDB_FS_ERR);
CHECK(!azure_blob->is_file(test_dir+"/foo1"));
CHECK(!azure_blob->is_file(test_dir+"/foo2"));
CHECK(!azure_blob->is_dir(test_dir+"/foo1"));
CHECK(!azure_blob->is_dir(test_dir+"/foo2"));
}
TEST_CASE_METHOD(AzureBlobTestFixture, "Test AzureBlob read/write file", "[read-write]") {
if (azure_blob == nullptr) {
return;
}
std::string test_dir("read_write");
CHECK_RC(azure_blob->create_dir(test_dir), TILEDB_FS_OK);
CHECK_RC(azure_blob->write_to_file(test_dir+"/foo", "hello", 5), TILEDB_FS_OK);
CHECK_RC(azure_blob->close_file(test_dir+"/foo"), TILEDB_FS_OK);
REQUIRE(azure_blob->is_file(test_dir+"/foo"));
CHECK(azure_blob->file_size(test_dir+"/foo") == 5);
void *buffer = malloc(20);
memset(buffer, 'X', 20);
CHECK_RC(azure_blob->read_from_file(test_dir+"/foo", 0, buffer, 0), TILEDB_FS_OK);
CHECK_RC(azure_blob->read_from_file(test_dir+"/foo", 0, buffer, 2), TILEDB_FS_OK);
CHECK(((char *)buffer)[0] == 'h');
CHECK(((char *)buffer)[1] == 'e');
CHECK_RC(azure_blob->read_from_file(test_dir+"/foo", 0, buffer, 5), TILEDB_FS_OK);
CHECK(((char *)buffer)[4] == 'o');
// Reading past filesize does not seem to affect download_blob_to_stream/buffer. It
// returns successfully even though Posix/HDFS/S3 data stores behave differently. We
// could make the behavior identical on the stores, but for now leaving it to the clients
// to not read past the file size.
CHECK_RC(azure_blob->read_from_file(test_dir+"/foo", 0, buffer, 6), TILEDB_FS_OK);
CHECK_RC(azure_blob->read_from_file(test_dir+"/foo", 3, buffer, 2), TILEDB_FS_OK);
CHECK_RC(azure_blob->read_from_file(test_dir+"/foo", 3, buffer, 6), TILEDB_FS_OK);
CHECK_RC(azure_blob->read_from_file(test_dir+"/foo", 6, buffer, 2), TILEDB_FS_ERR);
CHECK_RC(azure_blob->write_to_file(test_dir+"/foo", "hello", 5), TILEDB_FS_OK);
CHECK_RC(azure_blob->write_to_file(test_dir+"/foo", " there ", 6), TILEDB_FS_OK);
CHECK_RC(azure_blob->close_file(test_dir+"/foo"), TILEDB_FS_OK);
CHECK(azure_blob->file_size(test_dir+"/foo") == 11);
CHECK_RC(azure_blob->read_from_file(test_dir+"/foo", 0, buffer, 11), TILEDB_FS_OK);
CHECK(((char *)buffer)[10] == 'e');
CHECK_RC(azure_blob->close_file(test_dir+"/foo"), TILEDB_FS_OK);
CHECK_RC(azure_blob->delete_file(test_dir+"/foo"), TILEDB_FS_OK);
CHECK_RC(azure_blob->sync_path(test_dir), TILEDB_FS_OK);
CHECK_RC(azure_blob->read_from_file(test_dir+"/non-existent-file", 0, buffer, 5), TILEDB_FS_ERR);
CHECK_RC(azure_blob->read_from_file("non-existent-dir/foo", 0, buffer, 5), TILEDB_FS_ERR);
// AzureBlob can write to non-existent dirs - create_dir really is a no-op
CHECK_RC(azure_blob->write_to_file("non-existent-dir/foo", "hello", 5), TILEDB_FS_OK);
CHECK_RC(azure_blob->close_file("non-existent-dir/foo"), TILEDB_FS_OK);
free(buffer);
}
TEST_CASE_METHOD(AzureBlobTestFixture, "Test AzureBlob large read/write file", "[read-write-large]") {
if (azure_blob == nullptr) {
return;
}
std::string test_dir("read_write_large");
// size_t size = ((size_t)TILEDB_UT_MAX_WRITE_COUNT)*4
size_t size = ((size_t)TILEDB_UT_MAX_WRITE_COUNT);
void *buffer = malloc(size);
if (buffer) {
memset(buffer, 'B', size);
REQUIRE(azure_blob->create_dir(test_dir) == TILEDB_FS_OK);
CHECK_RC(azure_blob->write_to_file(test_dir+"/foo", buffer, size), TILEDB_FS_OK);
CHECK_RC(azure_blob->close_file(test_dir+"/foo"), TILEDB_FS_OK);
CHECK(azure_blob->is_file(test_dir+"/foo"));
CHECK((size_t)azure_blob->file_size(test_dir+"/foo") == size);
void *buffer1 = malloc(size);
if (buffer1) {
memset(buffer1, 0, size);
CHECK_RC(azure_blob->read_from_file(test_dir+"/foo", 0, buffer1, size), TILEDB_FS_OK);
CHECK(memcmp(buffer, buffer1, size) == 0);
free(buffer1);
}
free(buffer);
}
}
TEST_CASE_METHOD(AzureBlobTestFixture, "Test AzureBlob parallel operations", "[parallel]") {
if (azure_blob == nullptr) {
return;
}
std::string test_dir("parallel");
REQUIRE(azure_blob->create_dir(test_dir) == TILEDB_FS_OK);
bool complete = true;
uint iterations = 2;
size_t size = 10*1024*1024;
#pragma omp parallel for
for (uint i=0; i<iterations; i++) {
std::string filename = test_dir+"/foo"+std::to_string(i);
for (auto j=0; j<2; j++) {
void *buffer = malloc(size);
if (buffer) {
memset(buffer, 'X', size);
CHECK_RC(azure_blob->write_to_file(filename, buffer, size), TILEDB_FS_OK);
free(buffer);
} else {
complete = false;
}
}
}
CHECK_RC(azure_blob->sync_path(test_dir), TILEDB_FS_OK);
if (complete) {
#pragma omp parallel for
for (uint i=0; i<iterations; i++) {
std::string filename = test_dir+"/foo"+std::to_string(i);
CHECK_RC(azure_blob->close_file(filename), TILEDB_FS_OK);
CHECK(azure_blob->is_file(filename));
CHECK((size_t)azure_blob->file_size(filename) == size*2);
}
}
CHECK_RC(azure_blob->delete_dir(test_dir), TILEDB_FS_OK);
}
|
package se.sundsvall.messaging.integration.smssender;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import org.springframework.web.client.HttpStatusCodeException;
import org.springframework.web.client.RestClientException;
import org.springframework.web.client.RestTemplate;
import org.zalando.problem.Problem;
import org.zalando.problem.Status;
import se.sundsvall.messaging.dto.SmsDto;
import se.sundsvall.messaging.integration.AbstractRestIntegration;
@Component
public class SmsSenderIntegration extends AbstractRestIntegration {
private final SmsSenderIntegrationMapper mapper;
private final RestTemplate restTemplate;
public SmsSenderIntegration(final SmsSenderIntegrationMapper mapper,
@Qualifier("integration.sms-sender.resttemplate") final RestTemplate restTemplate) {
this.mapper = mapper;
this.restTemplate = restTemplate;
}
public ResponseEntity<Boolean> sendSms(final SmsDto smsDto) {
var request = mapper.toSendSmsRequest(smsDto);
try {
return restTemplate.postForEntity("/send/sms", createRequestEntity(request), Boolean.class);
} catch (HttpStatusCodeException e) {
throw Problem.builder()
.withTitle("Exception when calling SmsSender")
.withStatus(Status.BAD_GATEWAY)
.withCause(Problem.builder()
.withStatus(Status.valueOf(e.getRawStatusCode()))
.build())
.build();
} catch (RestClientException e) {
throw Problem.builder()
.withTitle("Exception when calling SmsSender")
.withStatus(Status.BAD_GATEWAY)
.build();
}
}
}
|
<reponame>StuntsPT/BangleApps
var Math2 = require("solar_math_utils.js");
const _MIN_MILLIS = 1000 * 60 ;
const _HOUR_MILLIS = _MIN_MILLIS * 60;
const _DAY_MILLIS = _HOUR_MILLIS * 24;
function _start_of_julian_day(now){
var sod_of_day = new Date(now.getTime());
var local_offset_hours = sod_of_day.getTimezoneOffset()/60;
//console.log("local_offset_hours=" + local_offset_hours);
sod_of_day.setHours(12 + local_offset_hours,0,0,0);
return sod_of_day;
}
function _date_to_julian_date(now){
var year = now.getFullYear();
var month = now.getMonth() + 1;
var day = now.getDate();
var julian_date_full = (1461 * (year + 4800 + (month-14)/12))/4 +(367 * (month-2 - 12*((month - 14)/12)))/12-(3 * ((year + 4900 + (month - 14)/12)/100))/4 + day - 32075;
var julian_date = (julian_date_full| 0);
return julian_date;
}
function _to_time(now,day_fraction){
var datetime = new Date(now.getTime());
var hours = (day_fraction * 24) |0;
var remainder = day_fraction - hours/24;
var mins = (remainder * 24 * 60 | 0);
var remainder = remainder - mins/(24*60);
var secs = (remainder * 24 * 60 * 60 | 0);
var remainder = remainder - secs /(24 * 60 * 60);
var millis = remainder * 24 * 60 * 60 * 1000;
datetime.setHours(hours, mins, secs,millis);
return datetime;
}
const DateUtils = {
DAY_MILLIS : _DAY_MILLIS,
HOUR_MILLIS : _HOUR_MILLIS,
MIN_MILLIS: _MIN_MILLIS,
// calculate the sunrise and sunset information using the NOAA
// equations.
sunrise_sunset: (now,longitude,latitude, utc_offset)=>{
var sod_julian = _start_of_julian_day(now);
var julian_date = _date_to_julian_date(sod_julian);
//console.log("julian date=" + julian_date);
//var n = julian_date - 2451545.0 + 0.0008;
var julian_century = (julian_date-2451545)/36525;
//console.log("julian_century=" + julian_century);
var geom_mean_long_sun_degrees = (280.46646+julian_century*(36000.76983 + julian_century*0.0003032)) % 360;
//console.log("geom_mean_long_sun=" + geom_mean_long_sun_degrees);
var geom_mean_anomaly_sun_degrees = 357.52911+julian_century*(35999.05029 - 0.0001537*julian_century);
//console.log("solar_mean_anomaly_sun=" + geom_mean_anomaly_sun_degrees);
var eccent_earth_orbit = 0.016708634-julian_century*(0.000042037+0.0000001267*julian_century);
//console.log("eccent_earth_orbit=" + eccent_earth_orbit);
var sun_eq_of_ctr = Math.sin(Math2.to_radians(geom_mean_anomaly_sun_degrees))*
(1.914602-julian_century*(0.004817+0.000014*julian_century))+
Math.sin(Math2.to_radians(2*geom_mean_anomaly_sun_degrees))*(0.019993-0.000101*julian_century)+
Math.sin(Math2.to_radians(3*geom_mean_anomaly_sun_degrees))*0.000289;
//console.log("sun_eq_of_ctr=" + sun_eq_of_ctr);
var sun_true_long_degrees = geom_mean_long_sun_degrees + sun_eq_of_ctr;
//console.log("sun_true_long_degrees=" + sun_true_long_degrees);
var sun_true_anom_degrees = geom_mean_anomaly_sun_degrees + sun_eq_of_ctr;
//console.log("sun_true_anom_degrees=" + sun_true_anom_degrees);
var sun_rad_vector_AUs = (1.000001018*(1-eccent_earth_orbit*eccent_earth_orbit))/(1+eccent_earth_orbit*Math.cos(Math2.to_radians(sun_true_anom_degrees)))
//console.log("sun_rad_vector_AUs=" + sun_rad_vector_AUs);
var sun_app_long_degress = sun_true_long_degrees-0.00569-0.00478*Math.sin(Math2.to_radians(125.04-1934.136*julian_century));
//console.log("sun_app_long_degress=" + sun_app_long_degress);
var mean_obliq_ecliptic_degrees = 23+(26+((21.448-julian_century*(46.815+julian_century*(0.00059-julian_century*0.001813))))/60)/60;
//console.log("mean_obliq_ecliptic_degrees=" + mean_obliq_ecliptic_degrees);
var obliq_corr_degrees = mean_obliq_ecliptic_degrees+0.00256*Math.cos(Math2.to_radians(125.04-1934.136*julian_century))
//console.log("obliq_corr_degrees=" + obliq_corr_degrees);
var sun_declin_degrees = Math2.to_degrees(
Math.asin(Math.sin(Math2.to_radians(obliq_corr_degrees))*Math.sin(Math2.to_radians(sun_app_long_degress)))
);
//console.log("sun_declin_degrees=" + sun_declin_degrees);
var var_y = Math.tan(Math2.to_radians(obliq_corr_degrees/2))*Math.tan(Math2.to_radians(obliq_corr_degrees/2));
//console.log("var_y=" + var_y);
var eq_of_time = 4*Math2.to_degrees(
var_y*Math.sin(2*Math2.to_radians(geom_mean_long_sun_degrees))-
2*eccent_earth_orbit*Math.sin(Math2.to_radians(geom_mean_anomaly_sun_degrees))+
4*eccent_earth_orbit*var_y*Math.sin(Math2.to_radians(geom_mean_anomaly_sun_degrees))*Math.cos(2*Math2.to_radians(geom_mean_long_sun_degrees))-
0.5*var_y*var_y*Math.sin(4*Math2.to_radians(geom_mean_long_sun_degrees))-
1.25*eccent_earth_orbit*eccent_earth_orbit*Math.sin(2*Math2.to_radians(geom_mean_anomaly_sun_degrees))
);
//console.log("eq_of_time=" + eq_of_time);
var HA_sunrise_degrees = Math2.to_degrees(
Math.acos(
Math.cos(Math2.to_radians(90.833))/(Math.cos(Math2.to_radians(latitude))*Math.cos(Math2.to_radians(sun_declin_degrees)))-
Math.tan(Math2.to_radians(latitude))*Math.tan(Math2.to_radians(sun_declin_degrees))
)
);
//console.log("HA_sunrise_degrees=" + HA_sunrise_degrees);
var local_offset_hours = new Date().getTimezoneOffset()/60;
if(utc_offset == null){
utc_offset = -local_offset_hours;
}
var timezone_offset_hours = utc_offset; //(utc_offset - local_offset_hours);
console.log("timezone_offset_hours=" + timezone_offset_hours +
" longitude" + longitude +
" utc_offset=" + utc_offset
);
var solar_noon = (720-4*longitude-eq_of_time+timezone_offset_hours*60)/1440;
var solar_noon_datetime = _to_time(now,solar_noon);
console.log("solar_noon=" + solar_noon + "->" + solar_noon_datetime.toISOString());
var sunrise_time_LST = (solar_noon*1440-HA_sunrise_degrees*4)/1440;
var sunrise_time_LST_datetime = _to_time(now,sunrise_time_LST);
console.log("sunrise_time_LST=" + sunrise_time_LST +
"->" + sunrise_time_LST_datetime.toISOString());
var sunset_time_LST =(solar_noon*1440+HA_sunrise_degrees*4)/1440;
var sunset_time_LST_datetime = _to_time(now,sunset_time_LST);
console.log("sunset_time_LST=" + sunset_time_LST +
"->" + sunset_time_LST_datetime.toISOString());
return {
day_start: new Date(solar_noon_datetime.getTime() - _DAY_MILLIS / 2),
sunrise_date: sunrise_time_LST_datetime,
//sunrise_fraction: sunrise_time_LST,
sunset_date: sunset_time_LST_datetime,
//sunset_fraction: sunset_time_LST,
solar_noon: solar_noon_datetime,
day_end: new Date(solar_noon_datetime.getTime() + _DAY_MILLIS / 2)
};
},
now_fraction_of_day: (now,day_info)=>{
return (now.getTime() - day_info.day_start.getTime())/_DAY_MILLIS;
},
}
module.exports = DateUtils; |
// officialAccount.js
const OfficialAccount = {
name: "ExampleOfficialAccount",
followers: 0,
follow() {
this.followers++;
},
unfollow() {
this.followers = Math.max(0, this.followers - 1);
}
};
export { OfficialAccount };
export default OfficialAccount; |
class Pairsum:
def __init__(self):
pass
def pair_sum_count(self, arr, target):
seen = {}
count = 0
for num in arr:
complement = target - num
if complement in seen:
count += seen[complement]
if num in seen:
seen[num] += 1
else:
seen[num] = 1
return count |
<gh_stars>0
L[:] = [min(x,100) for x in L]
|
/* Copyright 2014 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package com.bonet.views;
import android.view.View;
/**
* The BtMonthViewProvider class is responsible for creating, and handling,
* the month view of the calendar. It also provides the necessary framework to
* create custom providers.
*
* @author <NAME>
*/
public abstract class BtMonthViewProvider {
/* The Month */
private BtMonth mMonth;
/* the listener for date selection */
private OnDateSelectedListener mListener;
/* The minimum day to be displayed */
private BtDate mMinDate;
/* The maximum day to be displayed */
private BtDate mMaxDate;
/**
* Creates a provider for current month
*/
public BtMonthViewProvider(){
mMonth = BtMonth.fromToday();
}
/**
* Creates a provider for the month specified
* @param month
*/
public BtMonthViewProvider(BtMonth month){
mMonth = month;
}
/**
* Sets the month to be shown
* @param month
*/
public void setMonth(BtMonth month){
mMonth = month;
updateView();
}
/**
* This method provides the view that will be used to display
* the month content.
* @return The view
*/
public abstract View getView();
/**
* Refreshes the current state of the provider's view
*/
public abstract void updateView();
/**
* @return The title for the current month
*/
public String getTitle() {
return mMonth.toString();
}
/**
* The listener for date selection
* @param listener
*/
public void setOnDateSelectedListener(OnDateSelectedListener listener){
mListener = listener;
}
/**
* Performs the event where a day was selected by the user,
* specified the day
*
* @param day the day that was selected
*/
protected void selectDay(BtDate day) {
selectDay(day.getYear(), day.getMonth(), day.getDay());
}
/**
* Event where a day was selected by the user, specified by year month
* and day
*
* @param year
* @param month
* @param day
*/
protected void selectDay(int year, int month, int day) {
if(mListener!=null)
mListener.onDateSelected(year, month, day);
}
/**
* The provider's current month
* @return
*/
public BtMonth getMonth() {
return mMonth;
}
/**
* Sets the max date
* @param date the date
*/
public void setMaxDate(BtDate date){
mMaxDate = date;
}
/**
* Removes the calendar's max date if there was any.
*/
public void unsetMaxDate() {
mMaxDate = BtDate.MAX_BTDATE;
}
/**
* @return The calendar's max day
*/
public BtDate getMaxDate(){
return mMaxDate;
}
/**
* Sets the min date
* @param the date
*/
public void setMinDate(BtDate date){
mMinDate = date;
}
/**
* Removes the min date if there was any.
*/
public void unsetMinDate() {
mMinDate = BtDate.MIN_BTDATE;
}
/**
* @return The calendar's min day
*/
public BtDate getMinDate(){
return mMinDate;
}
}
|
/**
* 弹窗高度自动管理
*/
define(['jquery', 'ractive'], function($, Ractive) {
var resize = function(node, offset) {
var el = $(node)
var height = el.outerHeight() + (offset || 0)
el.css({
'height': (height + offset) + 'px',
'margin-top': (-1 * height / 2) + 'px'
})
return {
teardown: function() {
el = null
}
}
}
Ractive.decorators.dialogresize = resize
return resize
})
|
#!/usr/bin/env bash
set -e
# Use .bin outside of schema since schema dir is watched by watchman.
export GOBIN="$PWD/../.bin"
export GO111MODULE=on
go install github.com/sourcegraph/go-jsonschema/cmd/go-jsonschema-compiler
go build -o "$GOBIN"/stringdata stringdata.go
# shellcheck disable=SC2010
schemas="$(ls -- *.schema.json | grep -v json-schema-draft)"
# shellcheck disable=SC2086
"$GOBIN"/go-jsonschema-compiler -o schema.go -pkg schema $schemas
stringdata() {
# shellcheck disable=SC2039
target="${1/.schema.json/_stringdata.go}"
"$GOBIN"/stringdata -i "$1" -name "$2" -pkg schema -o "$target"
}
stringdata aws_codecommit.schema.json AWSCodeCommitSchemaJSON
stringdata bitbucket_cloud.schema.json BitbucketCloudSchemaJSON
stringdata bitbucket_server.schema.json BitbucketServerSchemaJSON
stringdata campaign_spec.schema.json CampaignSpecSchemaJSON
stringdata changeset_spec.schema.json ChangesetSpecSchemaJSON
stringdata github.schema.json GitHubSchemaJSON
stringdata gitlab.schema.json GitLabSchemaJSON
stringdata gitolite.schema.json GitoliteSchemaJSON
stringdata other_external_service.schema.json OtherExternalServiceSchemaJSON
stringdata phabricator.schema.json PhabricatorSchemaJSON
stringdata settings.schema.json SettingsSchemaJSON
stringdata site.schema.json SiteSchemaJSON
gofmt -s -w ./*.go
|
<filename>src/main/java/com/cc/mybatis/common/DataSources.java<gh_stars>0
package com.cc.mybatis.common;
import org.apache.ibatis.datasource.unpooled.UnpooledDataSource;
import org.apache.ibatis.io.Resources;
import java.io.IOException;
import java.util.Optional;
import java.util.Properties;
/**
* @author chenchong
* @create 2021/2/5 8:13 下午
* @description
*/
public class DataSources {
private static final String DB_CONF = "jdbc.properties";
public static UnpooledDataSource createUnpooledDataSource(String resource) throws IOException {
Properties props = Resources.getResourceAsProperties(Optional.ofNullable(resource).orElse(DB_CONF));
UnpooledDataSource ds = new UnpooledDataSource();
ds.setDriver(props.getProperty("driver"));
ds.setUrl(props.getProperty("url"));
ds.setUsername(props.getProperty("username"));
ds.setPassword(<PASSWORD>("password"));
return ds;
}
}
|
<filename>project/gardener/src/main/java/kr/co/gardener/admin/model/user/Bookmark.java
package kr.co.gardener.admin.model.user;
import java.util.Date;
import org.springframework.format.annotation.DateTimeFormat;
import com.fasterxml.jackson.annotation.JsonFormat;
import kr.co.gardener.util.GridSystem;
public class Bookmark extends GridSystem {
private String productName;
private String userId;
@JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy.MM.dd HH:mm:ss",timezone="Asia/Seoul")
@DateTimeFormat(pattern = "yyyy.MM.dd HH:mm:ss")
private Date bookmarkDatetime;
private String companyName;
private String companyId;
private String productId;
private String productImage;
public String getCompanyName() {
return companyName;
}
public void setCompanyName(String companyName) {
this.companyName = companyName;
}
public String getProductId() {
return productId;
}
public void setProductId(String productId) {
this.productId = productId;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public Date getBookmarkDatetime() {
return bookmarkDatetime;
}
public void setBookmarkDatetime(Date bookmarkDatetime) {
this.bookmarkDatetime = bookmarkDatetime;
}
public String getCompanyId() {
return companyId;
}
public void setCompanyId(String companyId) {
this.companyId = companyId;
}
public String getProductImage() {
return productImage;
}
public void setProductImage(String productImage) {
this.productImage = productImage;
}
public String getProductName() {
return productName;
}
public void setProductName(String productName) {
this.productName = productName;
}
}
|
import platform
from oleparser import OleParser
from hwpparser import HWPParser # Assuming the HWPParser class is provided
class HWPScanner:
def __init__(self) -> None:
self.__platform__ = platform.platform()
self.hwpx_flag = False
self.ole_parser = OleParser()
self.hwp_parser = None
self.file_name = None
def parse_hwpdoc(self, file_name):
self.file_name = file_name
try:
self.ole_parser.parse(file_name)
if self.ole_parser.exists('BodyText/Section0'):
self.hwp_parser = HWPParser(self.ole_parser.openstream('BodyText/Section0'))
self.hwpx_flag = True
except Exception as e:
print(f"Error parsing HWP document: {e}")
def extract_text(self):
if self.hwpx_flag and self.hwp_parser:
try:
return self.hwp_parser.get_text()
except Exception as e:
print(f"Error extracting text from HWP document: {e}")
return ""
def extract_metadata(self):
metadata = {}
if self.hwpx_flag and self.hwp_parser:
try:
metadata['title'] = self.hwp_parser.get_title()
metadata['author'] = self.hwp_parser.get_author()
metadata['creation_date'] = self.hwp_parser.get_creation_date()
# Add more metadata attributes as needed
except Exception as e:
print(f"Error extracting metadata from HWP document: {e}")
return metadata |
(function($) {
'use strict';
$('audio[controls]').before(function(){
var song = this;
song.controls=false;
var player_box = document.createElement('div');
$(player_box).addClass($(song).attr('class') + ' well container-fluid playa');
var data_sec = document.createElement('section');
$(data_sec).addClass('collapse');
var toggle_holder = document.createElement('div');
$(toggle_holder).addClass('btn-group row-fluid');
var data_toggle = document.createElement('a');
$(data_toggle).html('<i class="icon-reorder"></i>');
$(data_toggle).addClass('btn btn-block');
$(data_toggle).attr('style', 'opacity:0.3');
$(data_toggle).click(function (){$(data_sec).collapse('toggle');});
$(data_toggle).attr('title', 'Details');
$(data_toggle).tooltip({'container': 'body', 'placement': 'top', 'html': true});
$(toggle_holder).append(data_toggle);
var data_table = document.createElement('table');
$(data_table).addClass('table table-condensed');
var player = document.createElement('section');
$(player).addClass('btn-group row-fluid');
var load_error = function(){
console.log('error');
$(player_box).find('.btn').addClass('disabled');
$(player_box).find('input[type="range"]').hide();
$(player_box).find('.icon-spin').text('Error');
$(player_box).find('.icon-spin').parent().attr('title', 'There was an error loading the audio.');
$(player_box).find('.icon-spin').parent().tooltip('fixTitle');
$(player_box).find('.icon-spin').removeClass('icon-spinner icon-spin');
};
var addPlay = function() {
var play = document.createElement('button');
$(play).addClass('btn disabled span1');
play.setPlayState = function(toggle){
$(play).removeClass('disabled');
if (toggle === 'play') {
$(play).html('<i class="icon-play"></i>');
$(play).click(function () {
song.play();
});
}
if (toggle === 'pause') {
$(play).html('<i class="icon-pause"></i>');
$(play).click(function () {
song.pause();
});
}
};
$(song).on('play', function(){play.setPlayState('pause');});
$(song).on('canplay', function(){play.setPlayState('play');});
$(song).on('pause', function(){play.setPlayState('play');});
var timeout = 0;
var loadCheck = setInterval(function() {
if(isNaN(song.duration) === false){
play.setPlayState('play');
clearInterval(loadCheck);
return true;
}
if(song.networkState === 3 || timeout === 75){
load_error();
clearInterval(loadCheck);
return false;
}
timeout++;
}, 50);
$(player).append(play);
};
var addSeek = function() {
var seek = document.createElement('input');
$(seek).attr({
'type': 'range',
'min': 0,
'value': 0,
'class': 'seek'
});
seek.progress = function () {
var bg = 'rgba(223, 240, 216, 1) 0%';
bg += ', rgba(223, 240, 216, 1) ' + ((song.currentTime/song.duration) * 100) + '%';
bg += ', rgba(223, 240, 216, 0) ' + ((song.currentTime/song.duration) * 100) + '%';
for (var i=0; i<song.buffered.length; i++){
if (song.buffered.end(i) > song.currentTime && isNaN(song.buffered.end(i)) === false && isNaN(song.buffered.start(i)) === false){
var bufferedstart;
var bufferedend;
if (song.buffered.end(i) < song.duration) {
bufferedend = ((song.buffered.end(i)/song.duration) * 100);
}
else {
bufferedend = 100;
}
if (song.buffered.start(i) > song.currentTime){
bufferedstart = ((song.buffered.start(i)/song.duration) * 100);
}
else {
bufferedstart = ((song.currentTime/song.duration) * 100);
}
bg += ', rgba(217, 237, 247, 0) ' + bufferedstart + '%';
bg += ', rgba(217, 237, 247, 1) ' + bufferedstart + '%';
bg += ', rgba(217, 237, 247, 1) ' + bufferedend + '%';
bg += ', rgba(217, 237, 247, 0) ' + bufferedend + '%';
}
}
$(seek).css('background', '-webkit-linear-gradient(left, ' + bg + ')');
//These may be re-enabled when/if other browsers support the background like webkit
//$(seek).css('background','-o-linear-gradient(left, ' + bg + ')');
//$(seek).css('background','-moz-linear-gradient(left, ' + bg + ')');
//$(seek).css('background','-ms-linear-gradient(left, ' + bg + ')');
//$(seek).css('background','linear-gradient(to right, ' + bg + ')');
$(seek).css('background-color', '#ddd');
};
seek.set = function () {
$(seek).val(song.currentTime);
seek.progress();
};
seek.slide = function () {
song.currentTime = $(seek).val();
seek.progress();
};
seek.init = function () {
$(seek).attr({
'max': song.duration,
'step': song.duration / 100
});
seek.set();
};
seek.reset = function () {
$(seek).val(0);
song.currentTime = $(seek).val();
if(!song.loop){song.pause();}
else {song.play();}
};
var seek_wrapper = document.createElement('div');
$(seek_wrapper).addClass('btn disabled span4');
$(seek_wrapper).append(seek);
$(seek).on('change', seek.slide);
$(song).on('timeupdate', seek.init);
$(song).on('loadedmetadata', seek.init);
$(song).on('loadeddata', seek.init);
$(song).on('progress', seek.init);
$(song).on('canplay', seek.init);
$(song).on('canplaythrough', seek.init);
$(song).on('ended', seek.reset);
if(song.readyState > 0){
seek.init();
}
$(player).append(seek_wrapper);
};
var addTime = function() {
var time = document.createElement('a');
$(time).addClass('btn span3');
$(time).tooltip({'container': 'body', 'placement': 'right', 'html': true});
time.twodigit = function (myNum) {
return ("0" + myNum).slice(-2);
};
time.timesplit = function (a) {
if (isNaN(a)){return '<i class="icon-spinner icon-spin"></i>';}
var hours = Math.floor(a / 3600);
var minutes = Math.floor(a / 60) - (hours * 60);
var seconds = Math.floor(a) - (hours * 3600) - (minutes * 60);
var timeStr = time.twodigit(minutes) + ':' + time.twodigit(seconds);
if (hours > 0) {
timeStr = hours + ':' + timeStr;
}
return timeStr;
};
time.showtime = function () {
$(time).html(time.timesplit(song.duration));
$(time).attr({'title': 'Click to Reset<hr style="padding:0; margin:0;" />Position: ' + (time.timesplit(song.currentTime))});
if (!song.paused){
$(time).html(time.timesplit(song.currentTime));
$(time).attr({'title': 'Click to Reset<hr style="padding:0; margin:0;" />Length: ' + (time.timesplit(song.duration))});
}
$(time).tooltip('fixTitle');
};
$(time).click(function () {
song.pause();
song.currentTime = 0;
time.showtime();
$(time).tooltip('fixTitle');
$(time).tooltip('show');
});
$(time).tooltip('show');
$(song).on('loadedmetadata', time.showtime);
$(song).on('loadeddata', time.showtime);
$(song).on('progress', time.showtime);
$(song).on('canplay', time.showtime);
$(song).on('canplaythrough', time.showtime);
$(song).on('timeupdate', time.showtime);
if(song.readyState > 0){
time.showtime();
}
else {
$(time).html('<i class="icon-spinner icon-spin"></i>');
}
$(player).append(time);
};
var addMute = function() {
var mute = document.createElement('button');
$(mute).addClass('btn span1');
mute.checkVolume = function () {
if (song.volume > 0.5 && !song.muted) {
$(mute).html('<i class="icon-volume-up"></i>');
} else if (song.volume < 0.5 && song.volume > 0 && !song.muted) {
$(mute).html('<i class="icon-volume-down"></i>');
} else {
$(mute).html('<i class="icon-volume-off"></i>');
}
};
$(mute).click(function () {
if (song.muted) {
song.muted = false;
song.volume = song.oldvolume;
} else {
song.muted = true;
song.oldvolume = song.volume;
song.volume = 0;
}
mute.checkVolume();
});
mute.checkVolume();
$(song).on('volumechange', mute.checkVolume);
$(player).append(mute);
};
var addVolume = function() {
var volume = document.createElement('input');
$(volume).attr({
'type': 'range',
'min': 0,
'max': 1,
'step': 1 / 100,
'value': 1
});
volume.slide = function () {
song.muted = false;
song.volume = $(volume).val();
};
volume.set = function () {
$(volume).val(song.volume);
};
var vol_wrapper = document.createElement('div');
$(vol_wrapper).addClass('btn disabled span3');
$(vol_wrapper).append(volume);
$(volume).on("change", volume.slide);
$(song).on('volumechange', volume.set);
$(player).append(vol_wrapper);
};
var addAlbumArt = function() {
var albumArt = document.createElement('img');
$(albumArt).addClass('thumbnail');
$(albumArt).attr('src', $(song).data('infoAlbumArt'));
$(data_sec).append(albumArt);
};
var addInfo = function(title, dataId) {
var row = document.createElement('tr');
var head = document.createElement('th');
var data = document.createElement('td');
$(head).html(title);
$(data).html($(song).data(dataId));
$(row).append(head);
$(row).append(data);
$(data_table).append(row);
};
var addData = function() {
if (typeof($(song).data('infoAlbumArt')) !== 'undefined'){ addAlbumArt();}
if (typeof($(song).data('infoArtist')) !== 'undefined'){ addInfo('Artist', 'infoArtist');}
if (typeof($(song).data('infoTitle')) !== 'undefined'){ addInfo('Title', 'infoTitle');}
if (typeof($(song).data('infoAlbumTitle')) !== 'undefined'){ addInfo('Album', 'infoAlbumTitle');}
if (typeof($(song).data('infoLabel')) !== 'undefined'){ addInfo('Label', 'infoLabel');}
if (typeof($(song).data('infoYear')) !== 'undefined'){ addInfo('Year', 'infoYear');}
if ($(data_table).html() !== ""){
$(data_sec).append(data_table);
$(player_box).append(toggle_holder);
$(player_box).append(data_sec);
}
};
var addPlayer = function() {
if ($(song).data('play') !== 'off'){ addPlay();}
if ($(song).data('seek') !== 'off'){ addSeek();}
if ($(song).data('time') !== 'off'){ addTime();}
if ($(song).data('mute') !== 'off'){ addMute();}
if ($(song).data('volume') !== 'off'){ addVolume();}
$(player_box).append(player);
};
var addAttribution = function() {
var attribution = document.createElement('small');
$(attribution).addClass('pull-right muted');
if (typeof($(song).data('infoAttLink')) !== 'undefined'){
var attribution_link = document.createElement('a');
$(attribution_link).addClass('muted');
$(attribution_link).attr('href', $(song).data('infoAttLink'));
$(attribution_link).html($(song).data('infoAtt'));
$(attribution).append(attribution_link);
}
else {
$(attribution).html($(song).data('infoAtt'));
}
$(player_box).append(attribution);
};
var fillPlayerBox = function() {
addData();
addPlayer();
if (typeof($(song).data('infoAtt')) !== 'undefined'){ addAttribution();}
};
fillPlayerBox();
$(song).on('error', function(){
load_error();
});
return player_box;
});
})(jQuery) |
#!/bin/sh
node build/app.js &
cd static
python3 -m http.server --bind 127.0.0.1 8081
|
/** @module ext/CanvasRenderingContext2D/all */
export * from "./Color.js"
export * from "./draw.js"
export * from "./Vec2.js"
|
<filename>src/fns/index.js
export { $ } from './$';
export { add } from './add';
export { and } from './and';
export { cat } from './cat';
export { ceil } from './ceil';
export { div } from './div';
export { eq } from './eq';
export { flr } from './flr';
export { fix } from './fix';
export { get } from './get';
export { gt } from './gt';
export { gte } from './gte';
export { idx } from './idx';
export { is } from './is';
export { join } from './join';
export { len } from './len';
export { low } from './low';
export { lt } from './lt';
export { lte } from './lte';
export { max } from './max';
export { min } from './min';
export { mul } from './mul';
export { not } from './not';
export { or } from './or';
export { pl } from './pl';
export { pr } from './pr';
export { rnd } from './rnd';
export { rpl } from './rpl';
export { rpt } from './rpt';
export { sbs } from './sbs';
export { slc } from './slc';
export { sub } from './sub';
export { trim } from './trim';
export { up } from './up';
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.