text stringlengths 1 1.05M |
|---|
<reponame>rsuite/rsuite-icons
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import Gear2Svg from '@rsuite/icon-font/lib/legacy/Gear2';
const Gear2 = createSvgIcon({
as: Gear2Svg,
ariaLabel: 'gear 2',
category: 'legacy',
displayName: 'Gear2'
});
export default Gear2;
|
#!/usr/bin/env bash
# Adapted from the asdf plugin template:
# https://github.com/asdf-vm/asdf-plugin-template/blob/main/scripts/shfmt.bash
exec shfmt -d .
|
void Update () {
if (delayTime <= 0) {
alphaUp = !alphaUp; // Toggle the value of alphaUp
if (alphaUp) {
// Increase the alpha value of the object's sprite gradually over time
float alphaIncrement = 0.1f; // Define the rate of alpha change
Color currentColor = sprRen.color;
currentColor.a = Mathf.Clamp01(currentColor.a + alphaIncrement * Time.deltaTime);
sprRen.color = currentColor;
} else {
// Decrease the alpha value of the object's sprite gradually over time
float alphaDecrement = 0.1f; // Define the rate of alpha change
Color currentColor = sprRen.color;
currentColor.a = Mathf.Clamp01(currentColor.a - alphaDecrement * Time.deltaTime);
sprRen.color = currentColor;
}
if (Time.fixedTime > deathTimer) {
// Destroy the object when the current time exceeds the deathTimer
Destroy(gameObject);
}
}
} |
<reponame>renankalfa/Curso_em_Video
print('Ol<NAME>!')
# Modo 2
mensagem = 'Ol<NAME>!'
print(mensagem)
|
<filename>lib/puppet/type/refacter.rb
Puppet::Type.newtype(:refacter) do
desc <<-EOT
Forces puppet to rerun facter to reload and refresh all facts, if any of
the facts matching the given pattern changed.
"Before" Example:
# this resource sets up a new loopback disk device with
# the specified file
loopback_dev { "loopback-dev-test-1":
path => "/path/to/loopback/dev/files/test-1.bin",
size => "10M"
}
# This class uses facter facts to automatically mount all known
# loopback disk devices. However, facter ran *before* the loopback
# dev above was created, so it will take an *additional* run of
# puppet apply to pick up the change to the system and get that
# new device mounted.
-> class { "automount::loopbackdisks": pattern => "blkid_dev" }
"After" Example:
loopback_dev { "loopback-dev-test-1":
path => "/path/to/loopback/dev/files/test-1.bin",
size => "10M"
}
# after creating the new dev, re-run facter to pick up info about
# it so it will be mounted by the below class *during this run.*
~> refacter { "loopback-dev-test-1": }
-> class { "automount::loopbackdisks": }
EOT
require 'pp'
### TODO: make the refreshonly mechanism some sort of mixin?
### Code below copied from the exec type to support the "refreshonly" mechanism
def self.newcheck(name, options = {}, &block)
@checks ||= {}
check = newparam(name, options, &block)
@checks[name] = check
end
def self.checks
@checks ||= {}
@checks.keys
end
def refresh
provider.run if check_all_attributes(true)
end
# Verify that we pass all of the checks. The argument determines whether
# we skip the :refreshonly check, which is necessary because we now check
# within refresh
def check_all_attributes(refreshing = false)
self.class.checks.each do |check|
next if refreshing && check == :refreshonly
next unless @parameters.include?(check)
val = @parameters[check].value
val = [val] unless val.is_a? Array
# return false if any check returns false
val.each do |value|
return false unless @parameters[check].check(value)
end
end
# return true if everything was true
true
end
### Code above copied from the exec type to support the "refreshonly" mechanism
newparam(:name, :namevar => true) do
desc 'An arbitrary name used as the identity of the resource.'
end
newparam(:patterns) do
desc 'only reload if facts whose names match these patterns changed'
munge { |val| resource[:pattern] = val; nil }
validate do |_val|
raise ArgumentError,
"Can not use both the 'pattern' and 'patterns' attributes " \
'at the same time.' unless resource[:pattern].nil?
end
end
newparam(:pattern) do
desc 'only reload if facts whose names match this pattern changed'
defaultto :undef
validate do |val|
if resource[:patterns].nil? && val == :undef
raise ArgumentError, "Either 'pattern' or 'patterns' must be set."
end
end
munge do |val|
raise ArgumentError,
"Can not use both the 'pattern' and 'patterns' attributes " \
'at the same time.' unless resource[:patterns].nil?
begin
return Regexp.union(Array(val).map { |r| Regexp.new(r, Regexp::EXTENDED) unless r.empty? })
rescue => details
raise ArgumentError, 'Could not compile one of the pattern regexps:' + details.pretty_inspect
end
end
end
newparam(:refreshonly) do
desc 'only reload if this resource recieves a notification'
newvalues :true, :false
defaultto :true
def check(value)
# check should always fail if this param is true.
# (this is what makes refreshonly work)
value == :true ? false : true
end
end
end
|
import React, { MouseEvent } from 'react';
import Button from '../button/Button';
import './table.scss';
interface TableProps {
/** All actions */
actions?: TableAction[];
box?: boolean;
/** Custom class name for component */
className?: string;
columns: Column[];
data: TableData[];
'data-testid'?: string;
title?: string;
}
interface TableAction {
label: string;
onClick?: (event?: MouseEvent<HTMLButtonElement | HTMLAnchorElement>) => void;
primary?: boolean;
}
interface Column {
label: string;
key: string;
}
interface TableData {
[key: string]: string;
}
export default ({
actions,
box = false,
className = '',
columns = [
{
label: 'Column label',
key: 'Column key',
},
],
data = [
{
key: 'Data Key',
},
],
'data-testid': testid = '',
title = 'My New Table',
}: TableProps) => (
<div
className={`psm-table__wrapper ${box ? 'psm-table--box' : ''} ${className}`}
>
{/* eslint-disable react/no-array-index-key */}
{actions && actions.length !== 0 && (
<div className="psm-table__actions">
{actions.map((action, index) => (
<Button
key={index}
label={action.label}
onClick={action.onClick}
buttonStyle={action.primary ? 'primary' : undefined}
/>
))}
</div>
)}
<table className="psm-table">
{title && (
<caption className="psm-table-caption" id={`${testid}-table-caption`}>
{title}
</caption>
)}
<thead>
<tr className="psm-table__tr">
{columns.map((column, index) => (
<th
className="psm-table__th"
id={`${testid}-column-${index}-row-${-1}`}
key={index}
scope="col"
>
{column.label}
</th>
))}
</tr>
</thead>
<tbody>
{data.map((row, rowIdx) => (
<tr className="psm-table__tr" key={rowIdx}>
{columns.map((column, columnIdx) => (
<td
className="psm-table__td"
id={`${testid}-column-${columnIdx}-row-${rowIdx}`}
data-testid={testid}
key={columnIdx}
>
{row[column.key]}
</td>
))}
</tr>
))}
{/* eslint-enable react/no-array-index-key */}
</tbody>
</table>
</div>
);
|
<reponame>ashutoshstu/apicurio-studio
/*
* Copyright 2018 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apicurio.hub.core.beans;
/**
* @author <EMAIL>
*/
public class MockReference {
private String mockType;
private String serviceRef;
private String mockURL;
/**
* Constructor.
*/
public MockReference() {
}
/**
* @return the mockType
*/
public String getMockType() {
return mockType;
}
/**
* @param mockType the mockType to set
*/
public void setMockType(String mockType) {
this.mockType = mockType;
}
/**
* @return the serviceRef
*/
public String getServiceRef() {
return serviceRef;
}
/**
* @param serviceRef the serviceRef to set
*/
public void setServiceRef(String serviceRef) {
this.serviceRef = serviceRef;
}
/**
* @return the mockURL
*/
public String getMockURL() {
return mockURL;
}
/**
* @param mockURL the mockURL to set
*/
public void setMockURL(String mockURL) {
this.mockURL = mockURL;
}
}
|
<reponame>Snowgem/bitfrost-ts
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Script = void 0;
const preconditions_1 = __importDefault(require("../util/preconditions"));
const _ = __importStar(require("lodash"));
const networks_1 = require("../networks");
const address_1 = require("../address");
const encoding_1 = require("../encoding");
const crypto_1 = require("../crypto");
const opcode_1 = require("../opcode");
const publickey_1 = require("../publickey");
const errors_1 = require("../errors");
const buffer_1 = require("buffer");
const util_1 = require("../util");
const signature_1 = require("../crypto/signature");
const interpreter_1 = require("./interpreter");
class Script {
constructor(from) {
this._isInput = false;
this._isOutput = false;
this.outputIdentifiers = {
PUBKEY_OUT: this.isPublicKeyOut,
PUBKEYHASH_OUT: this.isPublicKeyHashOut,
MULTISIG_OUT: this.isMultisigOut,
SCRIPTHASH_OUT: this.isScriptHashOut,
DATA_OUT: this.isDataOut
};
this.inputIdentifiers = {
PUBKEY_IN: this.isPublicKeyIn,
PUBKEYHASH_IN: this.isPublicKeyHashIn,
MULTISIG_IN: this.isMultisigIn,
SCRIPTHASH_IN: this.isScriptHashIn
};
if (!(this instanceof Script)) {
return new Script(from);
}
this.chunks = [];
if (util_1.BufferUtil.isBuffer(from)) {
return Script.fromBuffer(from);
}
else if (from instanceof address_1.Address) {
return Script.fromAddress(from);
}
else if (from instanceof Script) {
return Script.fromBuffer(from.toBuffer());
}
else if (_.isString(from)) {
return Script.fromString(from);
}
else if (_.isObject(from) && _.isArray(from.chunks)) {
this.set(from);
}
}
set(obj) {
preconditions_1.default.checkArgument(_.isObject(obj));
preconditions_1.default.checkArgument(_.isArray(obj.chunks));
this.chunks = obj.chunks;
return this;
}
static fromBuffer(buffer) {
const script = new Script();
script.chunks = [];
const br = new encoding_1.BufferReader(buffer);
while (!br.finished()) {
try {
const opcodenum = br.readUInt8();
let len;
let buf;
if (opcodenum > 0 && opcodenum < opcode_1.OP_CODES.OP_PUSHDATA1) {
len = opcodenum;
script.chunks.push({
buf: br.read(len),
len,
opcodenum
});
}
else if (opcodenum === opcode_1.OP_CODES.OP_PUSHDATA1) {
len = br.readUInt8();
buf = br.read(len);
script.chunks.push({
buf,
len,
opcodenum
});
}
else if (opcodenum === opcode_1.OP_CODES.OP_PUSHDATA2) {
len = br.readUInt16LE();
buf = br.read(len);
script.chunks.push({
buf,
len,
opcodenum
});
}
else if (opcodenum === opcode_1.OP_CODES.OP_PUSHDATA4) {
len = br.readUInt32LE();
buf = br.read(len);
script.chunks.push({
buf,
len,
opcodenum
});
}
else {
script.chunks.push({
opcodenum
});
}
}
catch (e) {
if (e instanceof RangeError) {
throw new errors_1.BitcoreError(errors_1.ERROR_TYPES.Script.errors.InvalidBuffer, buffer.toString('hex'));
}
throw e;
}
}
return script;
}
toBuffer() {
const bw = new encoding_1.BufferWriter();
for (const chunk of this.chunks) {
const opcodenum = chunk.opcodenum;
bw.writeUInt8(chunk.opcodenum);
if (chunk.buf) {
if (opcodenum < opcode_1.OP_CODES.OP_PUSHDATA1) {
bw.write(chunk.buf);
}
else if (opcodenum === opcode_1.OP_CODES.OP_PUSHDATA1) {
bw.writeUInt8(chunk.len);
bw.write(chunk.buf);
}
else if (opcodenum === opcode_1.OP_CODES.OP_PUSHDATA2) {
bw.writeUInt16LE(chunk.len);
bw.write(chunk.buf);
}
else if (opcodenum === opcode_1.OP_CODES.OP_PUSHDATA4) {
bw.writeUInt32LE(chunk.len);
bw.write(chunk.buf);
}
}
}
return bw.concat();
}
static fromASM(str) {
const script = new Script();
script.chunks = [];
const tokens = str.split(' ');
let i = 0;
while (i < tokens.length) {
const token = tokens[i];
const opcode = new opcode_1.Opcode(token);
const opcodenum = opcode.toNumber();
if (_.isUndefined(opcodenum)) {
const buf = buffer_1.Buffer.from(tokens[i], 'hex');
script.chunks.push({
buf,
len: buf.length,
opcodenum: buf.length
});
i = i + 1;
}
else if (opcodenum === opcode_1.OP_CODES.OP_PUSHDATA1 ||
opcodenum === opcode_1.OP_CODES.OP_PUSHDATA2 ||
opcodenum === opcode_1.OP_CODES.OP_PUSHDATA4) {
script.chunks.push({
buf: buffer_1.Buffer.from(tokens[i + 2], 'hex'),
len: parseInt(tokens[i + 1], 10),
opcodenum
});
i = i + 3;
}
else {
script.chunks.push({
opcodenum
});
i = i + 1;
}
}
return script;
}
static fromHex(str) {
return new Script(new buffer_1.Buffer(str, 'hex'));
}
static fromString(str) {
if (util_1.JSUtil.isHexa(str) || str.length === 0) {
return new Script(new buffer_1.Buffer(str, 'hex'));
}
const script = new Script();
script.chunks = [];
const tokens = str.split(' ');
let i = 0;
while (i < tokens.length) {
const token = tokens[i];
const opcode = new opcode_1.Opcode(token);
let opcodenum = opcode.toNumber();
if (_.isUndefined(opcodenum)) {
opcodenum = parseInt(token, 10);
if (opcodenum > 0 && opcodenum < opcode_1.OP_CODES.OP_PUSHDATA1) {
script.chunks.push({
buf: buffer_1.Buffer.from(tokens[i + 1].slice(2), 'hex'),
len: opcodenum,
opcodenum
});
i = i + 2;
}
else {
throw new Error('Invalid script: ' + JSON.stringify(str));
}
}
else if (opcodenum === opcode_1.OP_CODES.OP_PUSHDATA1 ||
opcodenum === opcode_1.OP_CODES.OP_PUSHDATA2 ||
opcodenum === opcode_1.OP_CODES.OP_PUSHDATA4) {
if (tokens[i + 2].slice(0, 2) !== '0x') {
throw new Error('Pushdata data must start with 0x');
}
script.chunks.push({
buf: buffer_1.Buffer.from(tokens[i + 2].slice(2), 'hex'),
len: parseInt(tokens[i + 1], 10),
opcodenum
});
i = i + 3;
}
else {
script.chunks.push({
opcodenum
});
i = i + 1;
}
}
return script;
}
_chunkToString(chunk, type) {
const opcodenum = chunk.opcodenum;
const asm = type === 'asm';
let str = '';
if (!chunk.buf) {
if (typeof opcode_1.Opcode.reverseMap[opcodenum] !== 'undefined') {
if (asm) {
if (opcodenum === 0) {
str = str + ' 0';
}
else if (opcodenum === 79) {
str = str + ' -1';
}
else {
str = str + ' ' + new opcode_1.Opcode(opcodenum).toString();
}
}
else {
str = str + ' ' + new opcode_1.Opcode(opcodenum).toString();
}
}
else {
let numstr = opcodenum.toString(16);
if (numstr.length % 2 !== 0) {
numstr = '0' + numstr;
}
str = asm ? str + ' ' + numstr : str + ' ' + '0x' + numstr;
}
}
else {
if ((!asm && opcodenum === opcode_1.OP_CODES.OP_PUSHDATA1) ||
opcodenum === opcode_1.OP_CODES.OP_PUSHDATA2 ||
opcodenum === opcode_1.OP_CODES.OP_PUSHDATA4) {
str = str + ' ' + new opcode_1.Opcode(opcodenum).toString();
}
if (chunk.len > 0) {
str = asm
? str + ' ' + chunk.buf.toString('hex')
: str + ' ' + chunk.len + ' ' + '0x' + chunk.buf.toString('hex');
}
}
return str;
}
toASM() {
let str = '';
for (const chunk of this.chunks) {
str += this._chunkToString(chunk, 'asm');
}
return str.substr(1);
}
toString() {
let str = '';
for (const chunk of this.chunks) {
str += this._chunkToString(chunk);
}
return str.substr(1);
}
toHex() {
return this.toBuffer().toString('hex');
}
inspect() {
return '<Script: ' + this.toString() + '>';
}
isPublicKeyHashOut() {
return !!(this.chunks.length === 5 &&
this.chunks[0].opcodenum === opcode_1.OP_CODES.OP_DUP &&
this.chunks[1].opcodenum === opcode_1.OP_CODES.OP_HASH160 &&
this.chunks[2].buf &&
this.chunks[2].buf.length === 20 &&
this.chunks[3].opcodenum === opcode_1.OP_CODES.OP_EQUALVERIFY &&
this.chunks[4].opcodenum === opcode_1.OP_CODES.OP_CHECKSIG);
}
isPublicKeyHashIn() {
if (this.chunks.length === 2) {
const signatureBuf = this.chunks[0].buf;
const pubkeyBuf = this.chunks[1].buf;
if (signatureBuf &&
signatureBuf.length &&
signatureBuf[0] === 0x30 &&
pubkeyBuf &&
pubkeyBuf.length) {
const version = pubkeyBuf[0];
if ((version === 0x04 || version === 0x06 || version === 0x07) &&
pubkeyBuf.length === 65) {
return true;
}
else if ((version === 0x03 || version === 0x02) &&
pubkeyBuf.length === 33) {
return true;
}
}
}
return false;
}
getPublicKey() {
preconditions_1.default.checkState(this.isPublicKeyOut(), "Can't retrieve PublicKey from a non-PK output");
return this.chunks[0].buf;
}
getPublicKeyHash() {
preconditions_1.default.checkState(this.isPublicKeyHashOut(), "Can't retrieve PublicKeyHash from a non-PKH output");
return this.chunks[2].buf;
}
isPublicKeyOut() {
if (this.chunks.length === 2 &&
this.chunks[0].buf &&
this.chunks[0].buf.length &&
this.chunks[1].opcodenum === opcode_1.OP_CODES.OP_CHECKSIG) {
const pubkeyBuf = this.chunks[0].buf;
const version = pubkeyBuf[0];
let isVersion = false;
if ((version === 0x04 || version === 0x06 || version === 0x07) &&
pubkeyBuf.length === 65) {
isVersion = true;
}
else if ((version === 0x03 || version === 0x02) &&
pubkeyBuf.length === 33) {
isVersion = true;
}
if (isVersion) {
return publickey_1.PublicKey.isValid(pubkeyBuf);
}
}
return false;
}
isPublicKeyIn() {
if (this.chunks.length === 1) {
const signatureBuf = this.chunks[0].buf;
if (signatureBuf && signatureBuf.length && signatureBuf[0] === 0x30) {
return true;
}
}
return false;
}
isScriptHashOut() {
const buf = this.toBuffer();
return (buf.length === 23 &&
buf[0] === opcode_1.OP_CODES.OP_HASH160 &&
buf[1] === 0x14 &&
buf[buf.length - 1] === opcode_1.OP_CODES.OP_EQUAL);
}
isWitnessScriptHashOut() {
const buf = this.toBuffer();
return buf.length === 34 && buf[0] === 0 && buf[1] === 32;
}
isWitnessPublicKeyHashOut() {
const buf = this.toBuffer();
return buf.length === 22 && buf[0] === 0 && buf[1] === 20;
}
isWitnessProgram(values = {}) {
if (!values) {
values = {};
}
const buf = this.toBuffer();
if (buf.length < 4 || buf.length > 42) {
return false;
}
if (buf[0] !== opcode_1.OP_CODES.OP_0 &&
!(buf[0] >= opcode_1.OP_CODES.OP_1 && buf[0] <= opcode_1.OP_CODES.OP_16)) {
return false;
}
if (buf.length === buf[1] + 2) {
values.version = buf[0];
values.program = buf.slice(2, buf.length);
return true;
}
return false;
}
isScriptHashIn() {
if (this.chunks.length <= 1) {
return false;
}
const redeemChunk = this.chunks[this.chunks.length - 1];
const redeemBuf = redeemChunk.buf;
if (!redeemBuf) {
return false;
}
let redeemScript;
try {
redeemScript = Script.fromBuffer(redeemBuf);
}
catch (e) {
if (e instanceof errors_1.BitcoreError) {
return false;
}
throw e;
}
const type = redeemScript.classify();
return type !== Script.types.UNKNOWN;
}
isMultisigOut() {
return (this.chunks.length > 3 &&
opcode_1.Opcode.isSmallIntOp(this.chunks[0].opcodenum) &&
this.chunks.slice(1, this.chunks.length - 2).every(obj => {
return obj.buf && util_1.BufferUtil.isBuffer(obj.buf);
}) &&
opcode_1.Opcode.isSmallIntOp(this.chunks[this.chunks.length - 2].opcodenum) &&
this.chunks[this.chunks.length - 1].opcodenum ===
opcode_1.OP_CODES.OP_CHECKMULTISIG);
}
isMultisigIn() {
return (this.chunks.length >= 2 &&
this.chunks[0].opcodenum === 0 &&
this.chunks.slice(1, this.chunks.length).every(obj => {
return (obj.buf && util_1.BufferUtil.isBuffer(obj.buf) && signature_1.Signature.isTxDER(obj.buf));
}));
}
isDataOut() {
return (this.chunks.length >= 1 &&
this.chunks[0].opcodenum === opcode_1.OP_CODES.OP_RETURN &&
(this.chunks.length === 1 ||
(this.chunks.length === 2 &&
this.chunks[1].buf &&
this.chunks[1].buf.length <= Script.OP_RETURN_STANDARD_SIZE &&
this.chunks[1].len === this.chunks.length)));
}
getData() {
if (this.isDataOut() || this.isScriptHashOut()) {
if (_.isUndefined(this.chunks[1])) {
return buffer_1.Buffer.alloc(0);
}
else {
return buffer_1.Buffer.from(this.chunks[1].buf);
}
}
if (this.isPublicKeyHashOut()) {
return buffer_1.Buffer.from(this.chunks[2].buf);
}
throw new Error('Unrecognized script type to get data from');
}
isPushOnly() {
return _.every(this.chunks, chunk => {
return chunk.opcodenum <= opcode_1.OP_CODES.OP_16;
});
}
classify() {
if (this._isInput) {
return this.classifyInput();
}
else if (this._isOutput) {
return this.classifyOutput();
}
else {
const outputType = this.classifyOutput();
return outputType !== Script.types.UNKNOWN
? outputType
: this.classifyInput();
}
}
classifyOutput() {
for (const type in this.outputIdentifiers) {
if (this.outputIdentifiers[type].bind(this)()) {
return Script.types[type];
}
}
return Script.types.UNKNOWN;
}
classifyInput() {
for (const type in this.inputIdentifiers) {
if (this.inputIdentifiers[type].bind(this)()) {
return Script.types[type];
}
}
return Script.types.UNKNOWN;
}
isStandard() {
return this.classify() !== Script.types.UNKNOWN;
}
prepend(obj) {
this._addByType(obj, true);
return this;
}
equals(script) {
preconditions_1.default.checkState(script instanceof Script, 'Must provide another script');
if (this.chunks.length !== script.chunks.length) {
return false;
}
let i;
for (i = 0; i < this.chunks.length; i++) {
if (util_1.BufferUtil.isBuffer(this.chunks[i].buf) &&
!util_1.BufferUtil.isBuffer(script.chunks[i].buf)) {
return false;
}
if (util_1.BufferUtil.isBuffer(this.chunks[i].buf) &&
!util_1.BufferUtil.equals(this.chunks[i].buf, script.chunks[i].buf)) {
return false;
}
else if (this.chunks[i].opcodenum !== script.chunks[i].opcodenum) {
return false;
}
}
return true;
}
add(obj) {
this._addByType(obj, false);
return this;
}
_addByType(obj, prepend) {
if (typeof obj === 'string') {
this._addOpcode(obj, prepend);
}
else if (typeof obj === 'number') {
this._addOpcode(obj, prepend);
}
else if (obj instanceof opcode_1.Opcode) {
this._addOpcode(obj, prepend);
}
else if (util_1.BufferUtil.isBuffer(obj)) {
this._addBuffer(obj, prepend);
}
else if (obj instanceof Script) {
this.chunks = this.chunks.concat(obj.chunks);
}
else if (typeof obj === 'object') {
this._insertAtPosition(obj, prepend);
}
else {
throw new Error('Invalid script chunk');
}
}
_insertAtPosition(op, prepend) {
if (prepend) {
this.chunks.unshift(op);
}
else {
this.chunks.push(op);
}
}
_addOpcode(opcode, prepend) {
let op;
if (typeof opcode === 'number') {
op = opcode;
}
else if (opcode instanceof opcode_1.Opcode) {
op = opcode.toNumber();
}
else {
op = new opcode_1.Opcode(opcode).toNumber();
}
this._insertAtPosition({
opcodenum: op
}, prepend);
return this;
}
_addBuffer(buf, prepend) {
let opcodenum;
const len = buf.length;
if (len >= 0 && len < opcode_1.OP_CODES.OP_PUSHDATA1) {
opcodenum = len;
}
else if (len < Math.pow(2, 8)) {
opcodenum = opcode_1.OP_CODES.OP_PUSHDATA1;
}
else if (len < Math.pow(2, 16)) {
opcodenum = opcode_1.OP_CODES.OP_PUSHDATA2;
}
else if (len < Math.pow(2, 32)) {
opcodenum = opcode_1.OP_CODES.OP_PUSHDATA4;
}
else {
throw new Error("You can't push that much data");
}
this._insertAtPosition({
buf,
len,
opcodenum
}, prepend);
return this;
}
hasCodeseparators() {
for (const chunk of this.chunks) {
if (chunk.opcodenum === opcode_1.OP_CODES.OP_CODESEPARATOR) {
return true;
}
}
return false;
}
removeCodeseparators() {
const chunks = [];
for (const chunk of this.chunks) {
if (chunk.opcodenum !== opcode_1.OP_CODES.OP_CODESEPARATOR) {
chunks.push(chunk);
}
}
this.chunks = chunks;
return this;
}
static buildMultisigOut(publicKeys, threshold, opts) {
preconditions_1.default.checkArgument(threshold <= publicKeys.length, 'Number of required signatures must be less than or equal to the number of public keys');
opts = opts || {};
const script = new Script();
script.add(opcode_1.Opcode.smallInt(threshold));
publicKeys = _.map(publicKeys, key => new publickey_1.PublicKey(key));
let sorted = publicKeys;
if (!opts.noSorting) {
sorted = _.sortBy(publicKeys, publicKey => {
return publicKey.toString();
});
}
for (const sort of sorted) {
const publicKey = sort;
script.add(publicKey.toBuffer());
}
script.add(opcode_1.Opcode.smallInt(publicKeys.length));
script.add(opcode_1.OP_CODES.OP_CHECKMULTISIG);
return script;
}
static buildWitnessMultisigOutFromScript(script) {
if (script instanceof Script) {
const s = new Script();
s.add(opcode_1.OP_CODES.OP_0);
s.add(crypto_1.Hash.sha256(script.toBuffer()));
return s;
}
else {
throw new TypeError('First argument is expected to be a p2sh script');
}
}
static buildMultisigIn(pubkeys, threshold, signatures, opts) {
preconditions_1.default.checkArgument(_.isArray(pubkeys));
preconditions_1.default.checkArgument(_.isNumber(threshold));
preconditions_1.default.checkArgument(_.isArray(signatures));
opts = opts || {};
const s = new Script();
s.add(opcode_1.OP_CODES.OP_0);
_.each(signatures, signature => {
preconditions_1.default.checkArgument(util_1.BufferUtil.isBuffer(signature), 'Signatures must be an array of Buffers');
s.add(signature);
});
return s;
}
static buildP2SHMultisigIn(pubkeys, threshold, signatures, opts) {
preconditions_1.default.checkArgument(_.isArray(pubkeys));
preconditions_1.default.checkArgument(_.isNumber(threshold));
preconditions_1.default.checkArgument(_.isArray(signatures));
opts = opts || {};
const s = new Script();
s.add(opcode_1.OP_CODES.OP_0);
_.each(signatures, signature => {
preconditions_1.default.checkArgument(util_1.BufferUtil.isBuffer(signature), 'Signatures must be an array of Buffers');
s.add(signature);
});
s.add((opts.cachedMultisig || Script.buildMultisigOut(pubkeys, threshold, opts)).toBuffer());
return s;
}
static buildPublicKeyHashOut(to) {
preconditions_1.default.checkArgument(!_.isUndefined(to));
preconditions_1.default.checkArgument(to instanceof publickey_1.PublicKey || to instanceof address_1.Address || _.isString(to));
if (to instanceof publickey_1.PublicKey) {
to = to.toAddress();
}
else if (_.isString(to)) {
to = new address_1.Address(to);
}
const s = new Script();
s.add(opcode_1.OP_CODES.OP_DUP)
.add(opcode_1.OP_CODES.OP_HASH160)
.add(to.hashBuffer)
.add(opcode_1.OP_CODES.OP_EQUALVERIFY)
.add(opcode_1.OP_CODES.OP_CHECKSIG);
s._network = to.network;
return s;
}
static buildPublicKeyOut(pubkey) {
preconditions_1.default.checkArgument(pubkey instanceof publickey_1.PublicKey);
const s = new Script();
s.add(pubkey.toBuffer()).add(opcode_1.OP_CODES.OP_CHECKSIG);
return s;
}
static buildDataOut(data, encoding) {
preconditions_1.default.checkArgument(_.isUndefined(data) || _.isString(data) || util_1.BufferUtil.isBuffer(data));
if (typeof data === 'string') {
data = buffer_1.Buffer.from(data, encoding);
}
const s = new Script();
s.add(opcode_1.OP_CODES.OP_RETURN);
if (!_.isUndefined(data)) {
s.add(data);
}
return s;
}
static buildScriptHashOut(script) {
preconditions_1.default.checkArgument(script instanceof Script ||
(script instanceof address_1.Address && script.isPayToScriptHash()));
const s = new Script();
s.add(opcode_1.OP_CODES.OP_HASH160)
.add(script instanceof address_1.Address
? script.hashBuffer
: crypto_1.Hash.sha256ripemd160(script.toBuffer()))
.add(opcode_1.OP_CODES.OP_EQUAL);
s._network = script._network || script.network;
return s;
}
static buildPublicKeyIn(signature, sigtype) {
preconditions_1.default.checkArgument(signature instanceof signature_1.Signature || util_1.BufferUtil.isBuffer(signature));
preconditions_1.default.checkArgument(_.isUndefined(sigtype) || _.isNumber(sigtype));
if (signature instanceof signature_1.Signature) {
signature = signature.toBuffer();
}
const script = new Script();
script.add(util_1.BufferUtil.concat([
signature,
util_1.BufferUtil.integerAsSingleByteBuffer(sigtype || signature_1.Signature.SIGHASH_ALL)
]));
return script;
}
static buildPublicKeyHashIn(publicKey, signature, sigtype = signature_1.Signature.SIGHASH_ALL) {
preconditions_1.default.checkArgument(signature instanceof signature_1.Signature || util_1.BufferUtil.isBuffer(signature));
preconditions_1.default.checkArgument(_.isUndefined(sigtype) || _.isNumber(sigtype));
if (signature instanceof signature_1.Signature) {
signature = signature.toBuffer();
}
const script = new Script()
.add(util_1.BufferUtil.concat([
signature,
util_1.BufferUtil.integerAsSingleByteBuffer(sigtype || signature_1.Signature.SIGHASH_ALL)
]))
.add(new publickey_1.PublicKey(publicKey).toBuffer());
return script;
}
static empty() {
return new Script();
}
toScriptHashOut() {
return Script.buildScriptHashOut(this);
}
static fromAddress(address) {
address = new address_1.Address(address);
if (address.isPayToScriptHash()) {
return Script.buildScriptHashOut(address);
}
else if (address.isPayToPublicKeyHash()) {
return Script.buildPublicKeyHashOut(address);
}
throw new errors_1.BitcoreError(errors_1.ERROR_TYPES.Script.errors.UnrecognizedAddress, address);
}
getAddressInfo() {
if (this._isInput) {
return this._getInputAddressInfo();
}
else if (this._isOutput) {
return this._getOutputAddressInfo();
}
else {
const info = this._getOutputAddressInfo();
if (!info) {
return this._getInputAddressInfo();
}
return info;
}
}
_getOutputAddressInfo() {
if (this.isScriptHashOut()) {
return {
hashBuffer: this.getData(),
type: address_1.Address.PayToScriptHash,
network: networks_1.Network.defaultNetwork
};
}
else if (this.isPublicKeyHashOut()) {
return {
hashBuffer: this.getData(),
type: address_1.Address.PayToPublicKeyHash,
network: networks_1.Network.defaultNetwork
};
}
else {
return false;
}
}
_getInputAddressInfo() {
const info = {};
info.network = networks_1.Network.defaultNetwork;
if (this.isPublicKeyHashIn()) {
info.hashBuffer = crypto_1.Hash.sha256ripemd160(this.chunks[1].buf);
info.type = address_1.Address.PayToPublicKeyHash;
}
else if (this.isScriptHashIn()) {
info.hashBuffer = crypto_1.Hash.sha256ripemd160(this.chunks[this.chunks.length - 1].buf);
info.type = address_1.Address.PayToScriptHash;
}
else {
return false;
}
return info;
}
toAddress(network) {
const info = this.getAddressInfo();
if (!info) {
throw new errors_1.BitcoreError(errors_1.ERROR_TYPES.Script.errors.UnrecognizedAddress);
}
info.network =
networks_1.Network.get(network) || this._network || networks_1.Network.defaultNetwork;
return new address_1.Address(info);
}
findAndDelete(script) {
const buf = script.toBuffer();
const hex = buf.toString('hex');
for (let i = 0; i < this.chunks.length; i++) {
const script2 = new Script({
chunks: [this.chunks[i]]
});
const buf2 = script2.toBuffer();
const hex2 = buf2.toString('hex');
if (hex === hex2) {
this.chunks.splice(i, 1);
}
}
return this;
}
checkMinimalPush(i) {
const chunk = this.chunks[i];
const buf = chunk.buf;
const opcodenum = chunk.opcodenum;
if (!buf) {
return true;
}
if (buf.length === 0) {
return opcodenum === opcode_1.OP_CODES.OP_0;
}
else if (buf.length === 1 && buf[0] >= 1 && buf[0] <= 16) {
return opcodenum === opcode_1.OP_CODES.OP_1 + (buf[0] - 1);
}
else if (buf.length === 1 && buf[0] === 0x81) {
return opcodenum === opcode_1.OP_CODES.OP_1NEGATE;
}
else if (buf.length <= 75) {
return opcodenum === buf.length;
}
else if (buf.length <= 255) {
return opcodenum === opcode_1.OP_CODES.OP_PUSHDATA1;
}
else if (buf.length <= 65535) {
return opcodenum === opcode_1.OP_CODES.OP_PUSHDATA2;
}
return true;
}
_decodeOP_N(opcode) {
if (opcode === opcode_1.OP_CODES.OP_0) {
return 0;
}
else if (opcode >= opcode_1.OP_CODES.OP_1 && opcode <= opcode_1.OP_CODES.OP_16) {
return opcode - (opcode_1.OP_CODES.OP_1 - 1);
}
else {
throw new Error('Invalid opcode: ' + JSON.stringify(opcode));
}
}
getSignatureOperationsCount(accurate = true) {
accurate = _.isUndefined(accurate) ? true : accurate;
let n = 0;
let lastOpcode = opcode_1.OP_CODES.OP_INVALIDOPCODE;
_.each(this.chunks, chunk => {
const opcode = chunk.opcodenum;
if (opcode === opcode_1.OP_CODES.OP_CHECKSIG ||
opcode === opcode_1.OP_CODES.OP_CHECKSIGVERIFY) {
n++;
}
else if (opcode === opcode_1.OP_CODES.OP_CHECKMULTISIG ||
opcode === opcode_1.OP_CODES.OP_CHECKMULTISIGVERIFY) {
if (accurate &&
lastOpcode >= opcode_1.OP_CODES.OP_1 &&
lastOpcode <= opcode_1.OP_CODES.OP_16) {
n += this._decodeOP_N(lastOpcode);
}
else {
n += 20;
}
}
lastOpcode = opcode;
});
return n;
}
}
exports.Script = Script;
Script.Interpreter = interpreter_1.Interpreter;
Script.types = {
UNKNOWN: 'Unknown',
PUBKEY_OUT: 'Pay to public key',
PUBKEY_IN: 'Spend from public key',
PUBKEYHASH_OUT: 'Pay to public key hash',
PUBKEYHASH_IN: 'Spend from public key hash',
SCRIPTHASH_OUT: 'Pay to script hash',
SCRIPTHASH_IN: 'Spend from script hash',
MULTISIG_OUT: 'Pay to multisig',
MULTISIG_IN: 'Spend from multisig',
DATA_OUT: 'Data push'
};
Script.OP_RETURN_STANDARD_SIZE = 80;
//# sourceMappingURL=script.js.map |
package io.opensphere.hud.glswing;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.HierarchyEvent;
import java.awt.event.HierarchyListener;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import javax.swing.event.InternalFrameAdapter;
import javax.swing.event.InternalFrameEvent;
import io.opensphere.core.viewer.ViewChangeSupport;
import io.opensphere.core.viewer.ViewChangeSupport.ViewChangeListener;
import io.opensphere.core.viewer.ViewChangeSupport.ViewChangeType;
import io.opensphere.core.viewer.Viewer;
/** Helper class for managing AWTEvents. */
public class GLSwingEventListenerHelper
{
/** Listener for ComponentEvent on the internal frame. */
private final InternalFrameComponentListener myComponentListener;
/** The GLSwing frame for which this helper is managing events. */
private final GLSwingInternalFrame myGLSwingFrame;
/**
* Listen for hierarchy events to determine when the frame's render order
* may have been changed.
*/
private final HierarchyListener myHierarchyListener;
/** Listener for InternalFrameEvent on the internal frame. */
private final InternalFrameAdapter myInternalFrameListener;
/**
* Listener for when the button is pressed for popping the frame out of the
* HUD.
*/
private final PropertyChangeListener myPopListener;
/** Listener for rolling the frame up or down. */
private final PropertyChangeListener myRollupListener;
/** Listener for view changes. */
private final ViewChangeListener myViewChangeListener;
/** Support for viewer changes. */
private final ViewChangeSupport myViewChangeSupport;
/**
* Constructor.
*
* @param frame The GLSwing frame for which this helper is managing events.
* @param viewChangeSupport Support for viewer changes.
*/
public GLSwingEventListenerHelper(GLSwingInternalFrame frame, ViewChangeSupport viewChangeSupport)
{
myGLSwingFrame = frame;
myViewChangeSupport = viewChangeSupport;
myInternalFrameListener = new InternalFrameAdapter()
{
@Override
public void internalFrameClosed(InternalFrameEvent e)
{
myGLSwingFrame.handleFrameClosed();
}
};
myGLSwingFrame.getHUDFrame().getInternalFrame().addInternalFrameListener(myInternalFrameListener);
myComponentListener = new InternalFrameComponentListener();
myGLSwingFrame.getHUDFrame().getInternalFrame().addComponentListener(myComponentListener);
myHierarchyListener = new HierarchyListener()
{
@Override
public void hierarchyChanged(HierarchyEvent e)
{
GLSwingEventManager.getInstance().validateRenderOrders();
}
};
myGLSwingFrame.getHUDFrame().getInternalFrame().addHierarchyListener(myHierarchyListener);
myViewChangeListener = new ViewChangeListener()
{
@Override
public void viewChanged(final Viewer viewer, final ViewChangeType type)
{
myGLSwingFrame.handleViewChanged(viewer, type);
}
};
myViewChangeSupport.addViewChangeListener(myViewChangeListener);
myPopListener = new PropertyChangeListener()
{
@Override
public void propertyChange(PropertyChangeEvent evt)
{
myGLSwingFrame.popFrame();
}
};
myGLSwingFrame.getHUDFrame().getInternalFrame().addPropertyChangeListener("framePopped", myPopListener);
myRollupListener = new PropertyChangeListener()
{
@Override
public void propertyChange(PropertyChangeEvent evt)
{
boolean rolled = ((Boolean)evt.getNewValue()).booleanValue();
if (rolled)
{
myGLSwingFrame.windowShadeUp();
}
else
{
myGLSwingFrame.windowShadeDown();
}
}
};
myGLSwingFrame.getHUDFrame().getInternalFrame().addPropertyChangeListener("frameRolledUp", myRollupListener);
}
/** Cleanup listeners which where added to the internal frame. */
protected void close()
{
myGLSwingFrame.getHUDFrame().getInternalFrame().removeComponentListener(myComponentListener);
myGLSwingFrame.getHUDFrame().getInternalFrame().removeInternalFrameListener(myInternalFrameListener);
myGLSwingFrame.getHUDFrame().getInternalFrame().removeHierarchyListener(myHierarchyListener);
myGLSwingFrame.getHUDFrame().getInternalFrame().removePropertyChangeListener("framePopped", myPopListener);
myGLSwingFrame.getHUDFrame().getInternalFrame().removePropertyChangeListener("frameRolledUp", myRollupListener);
myViewChangeSupport.removeViewChangeListener(myViewChangeListener);
}
/**
* Component listener for the JInternalFrame.
*/
private final class InternalFrameComponentListener extends ComponentAdapter
{
@Override
public void componentHidden(ComponentEvent e)
{
myGLSwingFrame.handleComponentHidden(e);
}
@Override
public void componentShown(ComponentEvent e)
{
myGLSwingFrame.handleComponentShown(e);
}
}
}
|
/* eslint-disable no-unused-vars */
import { put, call, fork, all, take } from 'redux-saga/effects';
import {
GET_MARKET_HISTORY_REQUEST,
GET_PROPOSALS_REQUEST,
GET_FAUCET_REQUEST,
GET_GOVERNANCE_VENUS_REQUEST,
GET_PROPOSAL_BY_ID_REQUEST,
GET_VOTERS_REQUEST,
GET_VOTER_DETAIL_REQUEST,
GET_VOTER_HISTORY_REQUEST,
GET_VOTER_ACCOUNTS_REQUEST,
GET_TRANSACTION_HISTORY_REQUEST,
accountActionCreators,
} from 'core/modules/account/actions';
import { restService } from 'utilities';
export function* asyncGetMarketHistoryRequest({ payload, resolve, reject }: $TSFixMe) {
const { asset, limit, type } = payload;
let api = `/market_history/graph?asset=${asset}&type=${type}`;
if (limit) api += `&limit=${limit}`;
try {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const response = yield call(restService, {
api,
method: 'GET',
params: {},
});
if (response.status === 200) {
resolve(response.data);
}
} catch (e) {
reject(e);
}
}
export function* asyncGetGovernanceVenusRequest({ resolve, reject }: $TSFixMe) {
try {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const response = yield call(restService, {
api: '/governance/venus',
method: 'GET',
params: {},
});
if (response.status === 200) {
resolve(response.data);
}
} catch (e) {
reject(e);
}
}
export function* asyncGetProposalsRequest({ payload, resolve, reject }: $TSFixMe) {
const { limit, offset } = payload;
try {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const response = yield call(restService, {
api: `/proposals?limit=${limit || 5}&offset=${offset || 0}`,
method: 'GET',
params: {},
});
if (response.status === 200) {
resolve(response.data);
} else {
reject(response);
}
} catch (e) {
reject(e);
}
}
export function* asyncGetFaucetRequest({ payload, resolve, reject }: $TSFixMe) {
const { address, asset, amountType } = payload;
try {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const response = yield call(restService, {
api: '/faucet',
method: 'POST',
params: {
address,
asset,
amountType,
},
});
if (response.status === 200) {
yield put(accountActionCreators.getFromFaucetSuccess());
resolve(response.data);
} else {
reject(response);
}
} catch (e) {
reject(e);
}
}
export function* asyncGetProposalByIdRequest({ payload, resolve, reject }: $TSFixMe) {
const { id } = payload;
try {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const response = yield call(restService, {
api: `/proposals/${id}`,
method: 'GET',
params: {},
});
if (response.status === 200) {
resolve(response.data);
} else {
reject(response);
}
} catch (e) {
reject(e);
}
}
export function* asyncGetVotersRequest({ payload, resolve, reject }: $TSFixMe) {
const { limit, filter, id, offset } = payload;
try {
let api = `/voters/${id}?filter=${filter}`;
if (limit) {
api += `&limit=${limit}`;
}
if (offset) {
api += `&offset=${offset}`;
}
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const response = yield call(restService, {
api,
method: 'GET',
params: {},
});
if (response.status === 200) {
resolve(response.data);
} else {
reject(response);
}
} catch (e) {
reject(e);
}
}
export function* asyncGetVoterDetailRequest({ payload, resolve, reject }: $TSFixMe) {
const { address } = payload;
try {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const response = yield call(restService, {
api: `/voters/accounts/${address}`,
method: 'GET',
params: {},
});
if (response.status === 200) {
resolve(response.data);
} else {
reject(response);
}
} catch (e) {
reject(e);
}
}
export function* asyncGetVoterHistoryRequest({ payload, resolve, reject }: $TSFixMe) {
const { offset, limit, address } = payload;
try {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const response = yield call(restService, {
api: `/voters/history/${address}?offset=${offset || 0}&limit=${limit || 5}`,
method: 'GET',
params: {},
});
if (response.status === 200) {
resolve(response.data);
} else {
reject(response);
}
} catch (e) {
reject(e);
}
}
export function* asyncGetVoterAccountsRequest({ payload, resolve, reject }: $TSFixMe) {
const { limit, offset } = payload;
try {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const response = yield call(restService, {
api: `/voters/accounts?limit=${limit || 100}&offset=${offset || 0}`,
method: 'GET',
params: {},
});
if (response.status === 200) {
resolve(response.data);
}
} catch (e) {
reject(e);
}
}
export function* asyncGetTransactionHistoryRequest({ payload, resolve, reject }: $TSFixMe) {
const { offset, event } = payload;
try {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const response = yield call(restService, {
api: `/transactions?page=${offset || 0}${event !== 'All' ? `&event=${event}` : ''}`,
method: 'GET',
params: {},
});
if (response.status === 200) {
resolve(response.data);
} else {
reject(response);
}
} catch (e) {
reject(e);
}
}
export function* watchGetMarketHistoryRequest() {
while (true) {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const action = yield take(GET_MARKET_HISTORY_REQUEST);
yield* asyncGetMarketHistoryRequest(action);
}
}
export function* watchGetGovernanceVenusRequest() {
while (true) {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const action = yield take(GET_GOVERNANCE_VENUS_REQUEST);
yield* asyncGetGovernanceVenusRequest(action);
}
}
export function* watchGetProposalsRequest() {
while (true) {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const action = yield take(GET_PROPOSALS_REQUEST);
yield* asyncGetProposalsRequest(action);
}
}
export function* watchGetFaucetRequest() {
while (true) {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const action = yield take(GET_FAUCET_REQUEST);
yield* asyncGetFaucetRequest(action);
}
}
export function* watchGetProposalByIdRequest() {
while (true) {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const action = yield take(GET_PROPOSAL_BY_ID_REQUEST);
yield* asyncGetProposalByIdRequest(action);
}
}
export function* watchGetVotersRequest() {
while (true) {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const action = yield take(GET_VOTERS_REQUEST);
yield* asyncGetVotersRequest(action);
}
}
export function* watchGetVoterDetailRequest() {
while (true) {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const action = yield take(GET_VOTER_DETAIL_REQUEST);
yield* asyncGetVoterDetailRequest(action);
}
}
export function* watchGetVoterHistoryRequest() {
while (true) {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const action = yield take(GET_VOTER_HISTORY_REQUEST);
yield* asyncGetVoterHistoryRequest(action);
}
}
export function* watchGetVoterAccountsRequest() {
while (true) {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const action = yield take(GET_VOTER_ACCOUNTS_REQUEST);
yield* asyncGetVoterAccountsRequest(action);
}
}
export function* watchGetTransactionHistoryRequest() {
while (true) {
// @ts-expect-error ts-migrate(7057) FIXME: 'yield' expression implicitly results in an 'any' ... Remove this comment to see the full error message
const action = yield take(GET_TRANSACTION_HISTORY_REQUEST);
yield* asyncGetTransactionHistoryRequest(action);
}
}
export default function* saga() {
yield all([
fork(watchGetMarketHistoryRequest),
fork(watchGetGovernanceVenusRequest),
fork(watchGetFaucetRequest),
fork(watchGetProposalsRequest),
fork(watchGetProposalByIdRequest),
fork(watchGetVotersRequest),
fork(watchGetVoterDetailRequest),
fork(watchGetVoterHistoryRequest),
fork(watchGetVoterAccountsRequest),
fork(watchGetTransactionHistoryRequest),
]);
}
|
<gh_stars>0
import org.svetovid.io.SvetovidReader;
import org.svetovid.io.SvetovidWriter;
class Program1 {
public static void main(String[] args) {
if (!Svetovid.testIn("r1.txt")) {
return;
}
Red<String> red = new Red<String>();
// Ucitavanje stringova iz fajla u red
SvetovidReader fajl = Svetovid.in("r1.txt");
while (fajl.hasMore()) {
red.naKraj(fajl.readLine());
}
fajl.close();
// Brisanje stringova kracih od 6 sa pocetka reda
while (!red.jePrazan() && red.prvi().length() < 6) {
red.izbaciPrvi();
}
// Ispisivanje elemenata u fajl
String filename = Svetovid.in.readToken("U koji fajl zelite da ispisete red:");
if (!Svetovid.testOut(filename)) {
return;
}
SvetovidWriter fajlOut = Svetovid.out(filename);
while (!red.jePrazan()) {
fajlOut.println(red.izbaciPrvi());
}
fajlOut.close();
}
} |
export enum FragmentShadingRateShift
{
Vertical2Pixels = 0,
Vertical4Pixels = 1,
Horizontal2Pixels = 2,
Horizontal4Pixels = 3,
Max = 0x7fffffff,
} |
<reponame>dreamer01/moonwalk
import * as navigation from "@react-navigation/native";
import { render } from "@testing-library/react-native";
import React from "react";
import { LightThemeProvider } from "../../helpers/testProviders";
import { lightTheme } from "../../theme";
import CalendarCard from "../CalendarCard";
jest.spyOn(navigation, "useTheme");
navigation.useTheme.mockImplementation(() => lightTheme);
describe("CalendarCard", () => {
it("renders correctly with a title", async () => {
const { findByText } = render(
<CalendarCard
data={{
name: "TEST_NAME",
launch_service_provider: { name: "TEST_LSP" },
net: "000000",
pad: { name: "TEST_PAD" },
}}
isFirst
/>,
{
wrapper: LightThemeProvider,
}
);
expect(await findByText("JAN")).toBeTruthy();
});
});
|
import unittest
from unittest.mock import patch
from tmc import points, reflect
from tmc.utils import load, load_module, reload_module, get_stdout, check_source
from functools import reduce
import os
import os.path
import textwrap
import inspect, re
import types
from random import choice, randint, shuffle
exercise = 'src.random_words'
def source_rows(function: callable):
src = inspect.getsource(function)
lines = [line.strip() for line in re.split('\\n|;', src)
if len(line.strip()) > 0 and not line.strip().startswith("#")]
return len(lines)
@points('12.random_words')
class RandomWordsTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', side_effect=[AssertionError("Input was not expected")]):
cls.module = load_module(exercise, 'fi')
def test_0a_main_ok(self):
ok, line = check_source(self.module)
message = """Code testing the functions must be located after the
if __name__ == "__main__":
block. The following line must be moved:
"""
self.assertTrue(ok, message+line)
def test_1_function_exists(self):
try:
from src.random_words import word_generator
except Exception as e:
self.fail(f'Program should have a function called word_generator.')
def test_2_return_type(self):
try:
from src.random_words import word_generator
val = word_generator("abc",2,1)
except Exception as e:
self.fail(f"Function threw an error when it was called like this:\n" +
'word_generator("abc",2,1)\n' +
f'{e}')
taip = str(type(val)).replace("<class '","").replace("'>","")
self.assertTrue(type(val) is types.GeneratorType, f"Function word generator should return a generator," +
f" now it returns {val} which is of type {taip}\nwhen it is called as\n" +
'word_generator("abc",2,1)')
def test_3_test_word_count(self):
from src.random_words import word_generator
test_cases = [("abc",2,3), ("ABCabcDEF",5,10), ("XYZ123456", 4, 7)]
for test_case in test_cases:
func = f"random_words{test_case}"
corr = test_case[2]
gen = word_generator(test_case[0], test_case[1], test_case[2])
val = [i for i in gen]
self.assertEqual(len(val), corr, f'Generator should return {corr} values\n' +
f'when it is initialized like this:\ngen = {func}\n' +
f'now it returns values\n' +
f'{val}')
def test_4_test_different_words(self):
from src.random_words import word_generator
test_cases = [("abcdefghijklmnopqrstuvwxyz",3,2), ("ABCabcDEFdefGHIghi",5,3), ("XYZ123456xyz789", 4, 4)]
for test_case in test_cases:
func = f"random_words{test_case}"
gen = word_generator(test_case[0], test_case[1], test_case[2])
val = [i for i in gen]
corr = len(set(val)) != 1
self.assertTrue(corr, f'Generator should return {corr} different values\n' +
f'when it is initialized like this:\ngen = {func}\n' +
f'now it returns values\n' +
f'{val}')
def test_5_test_right_letters(self):
from src.random_words import word_generator
test_cases = [("abcdefg",3,2), ("ABCabcDEFdef",5,3), ("XYZ1234", 4, 4)]
for test_case in test_cases:
func = f"random_words{test_case}"
gen = word_generator(test_case[0], test_case[1], test_case[2])
val = [i for i in gen]
c = [[x for x in s if x not in test_case[0]] for s in val]
corr = reduce(lambda x,y : True and len(y) == 0, c)
self.assertTrue(corr, f'Words returned by generator should ' +
f'only contain letters from a string {test_case[0]}\n' +
f'when it is initialized like this:\ngen = {func}\n' +
f'now it returns values\n' +
f'{val}')
if __name__ == '__main__':
unittest.main()
|
import re
def validate_methods(methods: dict, pattern: str) -> bool:
class_method_re = re.compile(pattern)
is_need_reraise = False
for method_name, _ in methods.items():
if method_name in ["constructor", "destructor"]:
continue
try:
if not class_method_re.match(method_name):
return False
except SyntaxError:
is_need_reraise = True
return not is_need_reraise |
#!/bin/sh
# Set up for run:
# need this since I use a LU project
# #SBATCH -A lu2020-2-7
# #SBATCH -p lu
#SBATCH -A snic2019-3-630 ##SBATCH -A snic2016-x-xxx
# use gpu nodes
#SBATCH -N 1
#SBATCH -n 1
#SBATCH --exclusive
# time consumption HH:MM:SS
#SBATCH -t 1:00:00
# name for script
#SBATCH -J ou_cpmmh_kalman
# controll job outputs
#SBATCH -o lunarc_output/outputs_ou_kalman_%j.out
#SBATCH -e lunarc_output/errors_ou_kalman_%j.err
# notification
#SBATCH --mail-user=samuel.wiqvist@matstat.lu.se
#SBATCH --mail-type=ALL
# load modules
ml load GCC/6.4.0-2.28
ml load OpenMPI/2.1.2
ml load julia/1.0.0
# set correct path
pwd
cd ..
pwd
export JULIA_NUM_THREADS=1
# run program
julia /home/samwiq/'SDEMEM and CPMMH'/SDEMEM_and_CPMMH/src/'SDEMEM OU process'/run_script_kalman_for_plot_mess_vs_N.jl 19 # seed
|
package be.kwakeroni.parameters.client.api;
import be.kwakeroni.parameters.client.api.model.EntryType;
import be.kwakeroni.parameters.client.api.model.ParameterGroup;
import be.kwakeroni.parameters.client.api.query.Query;
import java.util.Optional;
/**
* Retrieves values of business parameters.
*/
public interface BusinessParameters {
public <ET extends EntryType, T> Optional<T> get(ParameterGroup<ET> group, Query<ET, T> query);
public default <ET extends EntryType> BusinessParameterGroup<ET> forGroup(final ParameterGroup<ET> group){
class GroupWrapper implements BusinessParameterGroup<ET> {
@Override
public String getName() {
return group.getName();
}
@Override
public <T> Optional<T> get(Query<ET, T> query) {
return BusinessParameters.this.get(group, query);
}
};
return new GroupWrapper();
}
}
|
<gh_stars>0
declare const _default: (req: any, res: any) => Promise<void>;
/**
* @oas [get] /draft-orders
* operationId: "GetDraftOrders"
* summary: "List Draft Orders"
* description: "Retrieves an list of Draft Orders"
* x-authenticated: true
* tags:
* - Draft Order
* responses:
* 200:
* description: OK
* content:
* application/json:
* schema:
* properties:
* draft_order:
* $ref: "#/components/schemas/draft-order"
*/
export default _default;
export declare class AdminGetDraftOrdersParams {
q?: string;
limit?: number;
offset?: number;
}
|
<gh_stars>0
package com.doanduyhai.elevator.actors
import akka.actor.{ActorRef, ActorLogging, Actor}
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
sealed trait ElevatorStatus {
def nextStep: ElevatorStatus
def isMoving: Boolean
}
case class Move(currentFloor:Int, targetFloor:Int) extends ElevatorStatus {
if(currentFloor < 0 || targetFloor < 0) throw new IllegalArgumentException("Invalid negative floor")
override def nextStep: ElevatorStatus = {
if(Math.abs(targetFloor - currentFloor) == 1) {
AtFloor(targetFloor)
} else if(targetFloor > currentFloor) {
Move(currentFloor+1, targetFloor)
} else if(targetFloor == currentFloor) {
AtFloor(targetFloor)
} else {
Move(currentFloor - 1, targetFloor)
}
}
override def isMoving = true
}
case class AtFloor(floor: Int) extends ElevatorStatus {
override def nextStep: ElevatorStatus = AtFloor(floor)
override def isMoving = false
}
private case class EnRoute(status: ElevatorStatus)
class ElevatorActor(val elevatorId: Int, controlSystem: ActorRef, private var elevatorStatus: ElevatorStatus,
val movingSpeed: FiniteDuration = 10.millisecond, private var scheduledOrder: Option[Pickup]=None)
extends Actor with ActorLogging {
var simulationStarted = false
var simulationOngoing = false
def receive: Receive = {
case p @ Pickup(pickup) => {
if(!simulationStarted) sendStatusToControlSystem
elevatorStatus match {
case AtFloor(currentFloor) => {
if (currentFloor != pickup.currentFloor) {
this.scheduledOrder = Some(p)
this.elevatorStatus = Move(currentFloor, pickup.currentFloor)
} else {
this.elevatorStatus = Move(pickup.currentFloor, pickup.targetFloor)
}
sendStatusToControlSystem
if(!simulationOngoing) scheduleNextMove(this.elevatorStatus.nextStep)
}
case currentMove @ Move(_,_) => scheduledOrder match {
case Some(scheduledPickup) =>
log.error(s"Cannot accept $p because the elevator is moving right now and a pickup $scheduledPickup is already scheduled")
case None =>
log.info(s"No pending order, save the pickup order for later")
this.scheduledOrder = Some(p)
sendStatusToControlSystem
if(!simulationOngoing) scheduleNextMove(currentMove.nextStep)
}
}
}
case enRoute @ EnRoute(state) => {
this.elevatorStatus = state
sendStatusToControlSystem
state match {
case Move(_,_) =>
scheduleNextMove(this.elevatorStatus.nextStep)
case AtFloor(currentFloor) =>
computeNextStepFromAtFloor(currentFloor, s"Elevator $elevatorId has reached destination floor : $currentFloor, state = $state")
}
}
case StartSimulation => {
this.simulationStarted = true
sendStatusToControlSystem
this.elevatorStatus match {
case Move(_,_) =>
scheduleNextMove(this.elevatorStatus.nextStep)
case AtFloor(currentFloor) => computeNextStepFromAtFloor(currentFloor, "No order to execute")
}
}
case unknown @ _ => log.error(s"ElevatorActor receiving unknown message $unknown")
}
def computeNextStepFromAtFloor(currentFloor: Int, logMsg:String): Unit = scheduledOrder match {
case Some(Pickup(scheduledPickup)) => {
if (currentFloor != scheduledPickup.currentFloor) {
scheduleNextMove(Move(currentFloor, scheduledPickup.currentFloor))
} else {
scheduleNextMove(scheduledPickup)
this.scheduledOrder = None
}
}
case None =>
this.simulationOngoing = false
log.info(logMsg)
}
def sendStatusToControlSystem: Unit = {
log.debug(s"--------- Send UpdateStatus($elevatorId, $elevatorStatus, $scheduledOrder) to control system, [${Thread.currentThread().getId}]")
controlSystem ! UpdateStatus(this.elevatorId, this.elevatorStatus, this.scheduledOrder)
}
def scheduleNextMove(nextStep: ElevatorStatus): Unit = {
this.simulationOngoing = true
log.debug(s"**** Schedule Next Move EnRoute($nextStep), [${Thread.currentThread().getId}]")
context.system.scheduler.scheduleOnce(movingSpeed, self, EnRoute(nextStep))
}
def savePickupOrder(pickup: Pickup): Unit = {
this.scheduledOrder = Some(pickup)
sendStatusToControlSystem
}
}
|
#!/usr/bin/env bash
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Test marking of spent outputs
# Create a transaction graph with four transactions,
# A/B/C/D
# C spends A
# D spends B and C
# Then simulate C being mutated, to create C'
# that is mined.
# A is still (correctly) considered spent.
# B should be treated as unspent
if [ $# -lt 1 ]; then
echo "Usage: $0 path_to_binaries"
echo "e.g. $0 ../../src"
echo "Env vars BITCOIND and BITCOINCLI may be used to specify the exact binaries used"
exit 1
fi
set -f
BITCOIND=${BITCOIND:-${1}/bitcoind}
CLI=${BITCOINCLI:-${1}/bitcoin-cli}
DIR="${BASH_SOURCE%/*}"
SENDANDWAIT="${DIR}/send.sh"
if [[ ! -d "$DIR" ]]; then DIR="$PWD"; fi
. "$DIR/util.sh"
D=$(mktemp -d test.XXXXX)
# Two nodes; one will play the part of merchant, the
# other an evil transaction-mutating miner.
D1=${D}/node1
CreateDataDir $D1 port=11000 rpcport=11001
B1ARGS="-datadir=$D1 -debug=mempool"
$BITCOIND $B1ARGS &
B1PID=$!
D2=${D}/node2
CreateDataDir $D2 port=11010 rpcport=11011
B2ARGS="-datadir=$D2 -debug=mempool"
$BITCOIND $B2ARGS &
B2PID=$!
# Wait until all four nodes are at the same block number
function WaitBlocks {
while :
do
sleep 1
declare -i BLOCKS1=$( GetBlocks $B1ARGS )
declare -i BLOCKS2=$( GetBlocks $B2ARGS )
if (( BLOCKS1 == BLOCKS2 ))
then
break
fi
done
}
# Wait until node has $N peers
function WaitPeers {
while :
do
declare -i PEERS=$( $CLI $1 getconnectioncount )
if (( PEERS == "$2" ))
then
break
fi
sleep 1
done
}
echo "Generating test blockchain..."
# Start with B2 connected to B1:
$CLI $B2ARGS addnode 127.0.0.1:11000 onetry
WaitPeers "$B1ARGS" 1
# 2 block, 50 XBT each == 100 XBT
# These will be transactions "A" and "B"
$CLI $B1ARGS setgenerate true 2
WaitBlocks
# 100 blocks, 0 mature == 0 XBT
$CLI $B2ARGS setgenerate true 100
WaitBlocks
CheckBalance "$B1ARGS" 100
CheckBalance "$B2ARGS" 0
# restart B2 with no connection
$CLI $B2ARGS stop > /rst/null 2>&1
wait $B2PID
$BITCOIND $B2ARGS &
B2PID=$!
B1ADDRESS=$( $CLI $B1ARGS getnewaddress )
B2ADDRESS=$( $CLI $B2ARGS getnewaddress )
# Transaction C: send-to-self, spend A
TXID_C=$( $CLI $B1ARGS sendtoaddress $B1ADDRESS 50.0)
# Transaction D: spends B and C
TXID_D=$( $CLI $B1ARGS sendtoaddress $B2ADDRESS 100.0)
CheckBalance "$B1ARGS" 0
# Mutate TXID_C and add it to B2's memory pool:
RAWTX_C=$( $CLI $B1ARGS getrawtransaction $TXID_C )
# ... mutate C to create C'
L=${RAWTX_C:82:2}
NEWLEN=$( printf "%x" $(( 16#$L + 1 )) )
MUTATEDTX_C=${RAWTX_C:0:82}${NEWLEN}4c${RAWTX_C:84}
# ... give mutated tx1 to B2:
MUTATEDTXID=$( $CLI $B2ARGS sendrawtransaction $MUTATEDTX_C )
echo "TXID_C: " $TXID_C
echo "Mutated: " $MUTATEDTXID
# Re-connect nodes, and have both nodes mine some blocks:
$CLI $B2ARGS addnode 127.0.0.1:11000 onetry
WaitPeers "$B1ARGS" 1
# Having B2 mine the next block puts the mutated
# transaction C in the chain:
$CLI $B2ARGS setgenerate true 1
WaitBlocks
# B1 should still be able to spend 100, because D is conflicted
# so does not count as a spend of B
CheckBalance "$B1ARGS" 100
$CLI $B2ARGS stop > /rst/null 2>&1
wait $B2PID
$CLI $B1ARGS stop > /rst/null 2>&1
wait $B1PID
echo "Tests successful, cleaning up"
rm -rf $D
exit 0
|
/**
*
* @creatTime 下午8:26:49
* @author Eddy
*/
package tiger.test.request;
import javax.inject.Inject;
import javax.inject.Named;
import org.eddy.tiger.annotated.Request;
/**
* @author Eddy
*
*/
@Named("dog")
@Request
public class Dog {
@Inject
private Cat cat;
public void dog() {
System.out.println("dog");
cat.cat();
}
}
|
angular.module('marcuraUI.components').directive('maRadioButton', ['$timeout', 'MaValidators', 'MaHelper', function ($timeout, MaValidators, MaHelper) {
return {
restrict: 'E',
scope: {
itemTextField: '@',
itemValueField: '@',
isDisabled: '@',
isRequired: '@',
canUnselect: '@',
change: '&',
items: '=',
itemTemplate: '=',
value: '=',
validators: '=',
instance: '='
},
replace: true,
template: function () {
var html = '\
<div class="ma-radio-button" ng-class="{\
\'ma-radio-button-is-disabled\': isDisabled === \'true\',\
\'ma-radio-button-is-invalid\': !isValid,\
\'ma-radio-button-is-touched\': isTouched,\
\'ma-radio-button-can-unselect\': canUnselect === \'true\'\
}">\
<div class="ma-radio-button-item" ng-class="{\
\'ma-radio-button-item-is-selected\': isItemSelected(item)\
}" ng-style="{ width: (100 / items.length) + \'%\' }"\
ng-repeat="item in items">\
<ma-button\
class="ma-button-radio"\
text="{{getItemText(item)}}"\
simple\
size="xs"\
is-disabled="{{isDisabled === \'true\'}}"\
click="onChange(item)">\
</ma-button>\
</div>\
</div>';
return html;
},
link: function (scope, element) {
var isObjectArray = scope.itemTextField || scope.itemValueField,
validators = scope.validators ? angular.copy(scope.validators) : [],
isRequired = scope.isRequired === 'true',
canUnselect = scope.canUnselect === 'true',
hasIsNotEmptyValidator = false;
scope.isFocused = false;
scope.isValid = true;
scope.isTouched = false;
var validate = function (value) {
scope.isValid = true;
if (validators && validators.length) {
for (var i = 0; i < validators.length; i++) {
if (!validators[i].validate(value)) {
scope.isValid = false;
break;
}
}
}
};
scope.getItemText = function (item) {
if (scope.itemTemplate) {
return scope.itemTemplate(item);
} else if (!isObjectArray) {
return item;
} else if (scope.itemTextField) {
return item[scope.itemTextField];
}
};
scope.isItemSelected = function (item) {
if (!isObjectArray) {
return item === scope.value;
} else if (scope.itemValueField) {
return item && scope.value &&
item[scope.itemValueField] === scope.value[scope.itemValueField];
}
return false;
};
scope.onChange = function (item) {
if (scope.isDisabled === 'true') {
return;
}
var oldValue = scope.value,
hasChanged = true;
scope.value = item;
// Check that value has changed.
if (!isObjectArray) {
hasChanged = oldValue !== item;
} else if (scope.itemValueField) {
if (MaHelper.isNullOrUndefined(oldValue) && !MaHelper.isNullOrUndefined(item[scope.itemValueField])) {
hasChanged = true;
} else {
hasChanged = oldValue[scope.itemValueField] !== item[scope.itemValueField];
}
} else {
// Compare objects if itemValueField is not provided.
if (MaHelper.isNullOrUndefined(oldValue) && !MaHelper.isNullOrUndefined(item)) {
hasChanged = true;
} else {
hasChanged = JSON.stringify(oldValue) === JSON.stringify(item);
}
}
// Remove selection if the same item is selected.
if (canUnselect && !hasChanged) {
scope.value = null;
}
if (hasChanged || (canUnselect && !hasChanged)) {
$timeout(function () {
validate(scope.value);
scope.change({
maValue: scope.value,
maOldValue: oldValue
});
});
}
};
// Set up validators.
for (var i = 0; i < validators.length; i++) {
if (validators[i].name === 'IsNotEmpty') {
hasIsNotEmptyValidator = true;
break;
}
}
if (!hasIsNotEmptyValidator && isRequired) {
validators.unshift(MaValidators.isNotEmpty());
}
if (hasIsNotEmptyValidator) {
isRequired = true;
}
// Prepare API instance.
if (scope.instance) {
scope.instance.isInitialized = true;
scope.instance.isEditor = function () {
return true;
};
scope.instance.isValid = function () {
return scope.isValid;
};
scope.instance.validate = function () {
validate(scope.value);
};
}
}
};
}]); |
<reponame>Houserqu/datav-admin<gh_stars>1-10
import React, { PureComponent } from 'react';
import { Layout } from 'antd';
import { connect } from 'dva';
import styles from './DesignLayout.less';
const { Content } = Layout;
@connect(({ design, loading }) => ({
design,
categoryLoading: loading.effects['design/fetchCategoryComponents'],
detailLoading: loading.effects['design/fetchAppDetail'],
}))
class VisitorLayout extends PureComponent {
render() {
const { children } = this.props;
return (
<div>
<Layout className={styles.layout}>
<Content className={styles.content}>{children}</Content>
</Layout>
</div>
);
}
}
export default VisitorLayout;
|
<gh_stars>0
package router
import (
"github.com/779789571/gin-vue-admin/server/router/autocode"
"github.com/779789571/gin-vue-admin/server/router/example"
"github.com/779789571/gin-vue-admin/server/router/system"
)
type RouterGroup struct {
System system.RouterGroup
Example example.RouterGroup
Autocode autocode.RouterGroup
}
var RouterGroupApp = new(RouterGroup)
|
class AllTypesComponent < LucidComponent::Base
include LucidTranslation::Mixin
render do
DIV 'Rendered!'
DIV _('simple')
DIV 'abcdef'
NavigationLinks()
end
end |
#!/bin/bash -e
IMAGE="xeone3-ubuntu1604-analytics-ffmpeg"
VERSION="1.2"
DIR=$(dirname $(readlink -f "$0"))
. "${DIR}/../../../../script/build.sh"
|
<reponame>JacobLinCool/kimlimjustin.com
import Image from 'next/image';
import { ABOUT_ME, METRICS_SRC } from '../../Util/constant';
const AboutMe = () => {
return (
<>
<h2 className="about-heading" id="about">
About me
</h2>
<div className="about">
<div className="about-metrics">
<Image
loader={() => METRICS_SRC}
src={METRICS_SRC}
width="100%"
height="100%"
layout="responsive"
objectFit="contain"
alt="<NAME>'s metric"
/>
</div>
<div className="about-description">{ABOUT_ME}</div>
</div>
</>
);
};
export default AboutMe;
|
/*
This file is part of Peers, a java SIP softphone.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Copyright 2010 <NAME>
*/
package net.kislay.goasat.media;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.List;
import net.kislay.goasat.Logger;
import net.kislay.goasat.rtp.RtpPacket;
import net.kislay.goasat.rtp.RtpSession;
import net.kislay.goasat.sdp.Codec;
import net.kislay.goasat.sip.core.useragent.UserAgent;
public class MediaManager {
public static final int DEFAULT_CLOCK = 8000; // Hz
private UserAgent userAgent;
private CaptureRtpSender captureRtpSender;
private IncomingRtpReader incomingRtpReader;
private RtpSession rtpSession;
private DtmfFactory dtmfFactory;
private Logger logger;
public MediaManager(UserAgent userAgent, Logger logger) {
this.userAgent = userAgent;
this.logger = logger;
dtmfFactory = new DtmfFactory();
}
public void successResponseReceived(String localAddress,
String remoteAddress, int remotePort, Codec codec) {
switch (userAgent.getMediaMode()) {
case captureAndPlayback:
//TODO this could be optimized, create captureRtpSender at stack init
// and just retrieve it here
SoundManager soundManager = userAgent.getSoundManager();
soundManager.openAndStartLines();
InetAddress inetAddress;
try {
inetAddress = InetAddress.getByName(localAddress);
} catch (UnknownHostException e) {
logger.error("unknown host: " + localAddress, e);
return;
}
rtpSession = new RtpSession(inetAddress, userAgent.getRtpPort(),
userAgent.isMediaDebug(), logger, userAgent.getPeersHome());
try {
inetAddress = InetAddress.getByName(remoteAddress);
rtpSession.setRemoteAddress(inetAddress);
} catch (UnknownHostException e) {
logger.error("unknown host: " + remoteAddress, e);
}
rtpSession.setRemotePort(remotePort);
try {
captureRtpSender = new CaptureRtpSender(rtpSession,
soundManager, userAgent.isMediaDebug(), codec, logger,
userAgent.getPeersHome());
} catch (IOException e) {
logger.error("input/output error", e);
return;
}
try {
captureRtpSender.start();
} catch (IOException e) {
logger.error("input/output error", e);
}
try {
//TODO retrieve port from SDP offer
// incomingRtpReader = new IncomingRtpReader(localAddress,
// Utils.getInstance().getRtpPort(),
// remoteAddress, remotePort);
incomingRtpReader = new IncomingRtpReader(
captureRtpSender.getRtpSession(), soundManager, codec,
logger);
} catch (IOException e) {
logger.error("input/output error", e);
return;
}
incomingRtpReader.start();
break;
case echo:
Echo echo;
try {
echo = new Echo(localAddress, userAgent.getRtpPort(),
remoteAddress, remotePort, logger);
} catch (UnknownHostException e) {
logger.error("unknown host amongst "
+ localAddress + " or " + remoteAddress);
return;
}
userAgent.setEcho(echo);
Thread echoThread = new Thread(echo);
echoThread.start();
break;
case none:
default:
break;
}
}
public void handleAck(String destAddress, int destPort, Codec codec) {
switch (userAgent.getMediaMode()) {
case captureAndPlayback:
//TODO this could be optimized, create captureRtpSender at stack init
// and just retrieve it here
if (rtpSession != null) {
rtpSession.stop();
while (!rtpSession.isSocketClosed()) {
try {
Thread.sleep(15);
} catch (InterruptedException e) {
logger.debug("sleep interrupted");
}
}
}
if (captureRtpSender != null) {
captureRtpSender.stop();
}
SoundManager soundManager = userAgent.getSoundManager();
soundManager.closeLines();
soundManager.openAndStartLines();
rtpSession = new RtpSession(userAgent.getConfig()
.getLocalInetAddress(), userAgent.getRtpPort(),
userAgent.isMediaDebug(), logger, userAgent.getPeersHome());
try {
InetAddress inetAddress = InetAddress.getByName(destAddress);
rtpSession.setRemoteAddress(inetAddress);
} catch (UnknownHostException e) {
logger.error("unknown host: " + destAddress, e);
}
rtpSession.setRemotePort(destPort);
try {
captureRtpSender = new CaptureRtpSender(rtpSession,
soundManager, userAgent.isMediaDebug(), codec, logger,
userAgent.getPeersHome());
} catch (IOException e) {
logger.error("input/output error", e);
return;
}
try {
captureRtpSender.start();
} catch (IOException e) {
logger.error("input/output error", e);
}
try {
//TODO retrieve port from SDP offer
// incomingRtpReader = new IncomingRtpReader(localAddress,
// Utils.getInstance().getRtpPort(),
// remoteAddress, remotePort);
//FIXME RTP sessions can be different !
incomingRtpReader = new IncomingRtpReader(rtpSession,
soundManager, codec, logger);
} catch (IOException e) {
logger.error("input/output error", e);
return;
}
incomingRtpReader.start();
break;
case echo:
Echo echo;
try {
echo = new Echo(userAgent.getConfig().getLocalInetAddress()
.getHostAddress(),
userAgent.getRtpPort(), destAddress, destPort, logger);
} catch (UnknownHostException e) {
logger.error("unknown host amongst "
+ userAgent.getConfig().getLocalInetAddress()
.getHostAddress() + " or " + destAddress);
return;
}
userAgent.setEcho(echo);
Thread echoThread = new Thread(echo);
echoThread.start();
break;
case none:
default:
break;
}
}
public void sendDtmf(char digit) {
if (captureRtpSender != null) {
List<RtpPacket> rtpPackets = dtmfFactory.createDtmfPackets(digit);
RtpSender rtpSender = captureRtpSender.getRtpSender();
rtpSender.pushPackets(rtpPackets);
}
}
public void stopSession() {
if (rtpSession != null) {
rtpSession.stop();
while (!rtpSession.isSocketClosed()) {
try {
Thread.sleep(15);
} catch (InterruptedException e) {
logger.debug("sleep interrupted");
}
}
rtpSession = null;
}
if (incomingRtpReader != null) {
incomingRtpReader = null;
}
if (captureRtpSender != null) {
captureRtpSender.stop();
captureRtpSender = null;
}
}
}
|
export * from './lib/pmp-web-shared-ui-picture-label.module';
|
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
set -o errtrace
CRIPROXY_DEB_URL="${CRIPROXY_DEB_URL:-https://github.com/Mirantis/criproxy/releases/download/v0.14.0/criproxy-nodeps_0.14.0_amd64.deb}"
VIRTLET_IMAGE="${VIRTLET_IMAGE:-mirantis/virtlet}"
VIRTLET_SKIP_RSYNC="${VIRTLET_SKIP_RSYNC:-}"
VIRTLET_SKIP_VENDOR="${VIRTLET_SKIP_VENDOR:-false}"
VIRTLET_RSYNC_PORT="${VIRTLET_RSYNC_PORT:-18730}"
VIRTLET_ON_MASTER="${VIRTLET_ON_MASTER:-}"
VIRTLET_MULTI_NODE="${VIRTLET_MULTI_NODE:-}"
# XXX: try to extract the docker socket path from DOCKER_HOST if it's set to unix://...
DOCKER_SOCKET_PATH="${DOCKER_SOCKET_PATH:-/var/run/docker.sock}"
FORCE_UPDATE_IMAGE="${FORCE_UPDATE_IMAGE:-}"
IMAGE_REGEXP_TRANSLATION="${IMAGE_REGEXP_TRANSLATION:-1}"
GH_RELEASE_TEST_USER="ivan4th"
DIND_CRI="${DIND_CRI:-containerd}"
MKDOCS_SERVE_ADDRESS="${MKDOCS_SERVE_ADDRESS:-localhost:8042}"
# Note that project_dir must not end with slash
project_dir="$(cd "$(dirname "${BASH_SOURCE}")/.." && pwd)"
virtlet_image="mirantis/virtlet"
remote_project_dir="/go/src/github.com/Mirantis/virtlet"
build_name="virtlet_build"
tmp_container_name="${build_name}-$(openssl rand -hex 16)"
build_image=${build_name}:latest
volume_name=virtlet_src
rsync_git=y
exclude=(
--exclude 'vendor'
--exclude .git
--exclude _output
--exclude '*.png'
)
rsync_pw_file="${project_dir}/_output/rsync.password"
busybox_image=busybox:1.27.2
virtlet_nodes=()
if [[ ${VIRTLET_ON_MASTER} ]]; then
virtlet_nodes+=(kube-master)
fi
if [[ !${VIRTLET_ON_MASTER} || ${VIRTLET_MULTI_NODE} ]]; then
virtlet_nodes+=(kube-node-1)
fi
if [[ ${VIRTLET_MULTI_NODE} ]]; then
virtlet_nodes+=(kube-node-2)
fi
bindata_modtime=1522279343
bindata_out="pkg/tools/bindata.go"
bindata_dir="deploy/data"
bindata_pkg="tools"
ldflags=()
go_package=github.com/Mirantis/virtlet
function image_tags_filter {
local tag="${1}"
local prefix=".items[0].spec.template.spec."
local suffix="|=map(.image=\"mirantis/virtlet:${tag}\")"
echo -n "${prefix}containers${suffix}|${prefix}initContainers${suffix}"
}
# from build/common.sh in k8s
function rsync_probe {
# Wait unil rsync is up and running.
local tries=20
while (( ${tries} > 0 )) ; do
if rsync "rsync://k8s@${1}:${2}/" \
--password-file="${project_dir}/_output/rsyncd.password" \
&> /dev/null ; then
return 0
fi
tries=$(( ${tries} - 1))
sleep 0.1
done
return 1
}
function image_exists {
local name="${1}"
# can't use 'docker images -q' due to https://github.com/docker/docker/issues/28895
docker history -q "${name}" >& /dev/null || return 1
}
function update_dockerfile_from {
local dockerfile="${1}"
local from_dockerfile="${2}"
local dest_var="${3:-}"
local cur_from="$(awk '/^FROM /{print $2}' "${dockerfile}")"
if [[ ${cur_from} =~ (^.*:.*-)([0-9a-f]) ]]; then
new_from="${BASH_REMATCH[1]}$(md5sum ${from_dockerfile} | sed 's/ .*//')"
if [[ ${new_from} != ${cur_from} ]]; then
sed -i "s@^FROM .*@FROM ${new_from}@" "${dockerfile}"
fi
if [[ ${dest_var} ]]; then
eval "${dest_var}=${new_from}"
fi
else
echo >&2 "*** ERROR: can't update FROM in ${dockerfile}: unexpected value: '${cur_from}'"
return 1
fi
}
function ensure_build_image {
update_dockerfile_from "${project_dir}/images/Dockerfile.build-base" "${project_dir}/images/Dockerfile.virtlet-base" virtlet_base_image
update_dockerfile_from "${project_dir}/images/Dockerfile.build" "${project_dir}/images/Dockerfile.build-base" build_base_image
update_dockerfile_from "${project_dir}/images/Dockerfile.virtlet" "${project_dir}/images/Dockerfile.virtlet-base"
if ! image_exists "${build_image}"; then
if ! image_exists "${build_base_image}"; then
if ! image_exists "${virtlet_base_image}"; then
echo >&2 "Trying to pull the base image ${virtlet_base_image}..."
if ! docker pull "${virtlet_base_image}"; then
docker build -t "${virtlet_base_image}" -f "${project_dir}/images/Dockerfile.virtlet-base" "${project_dir}/images"
fi
fi
echo >&2 "Trying to pull the build base image ${build_base_image}..."
if ! docker pull "${build_base_image}"; then
docker build -t "${build_base_image}" \
--label virtlet_image=build-base \
-f "${project_dir}/images/Dockerfile.build-base" "${project_dir}/images"
fi
fi
tar -C "${project_dir}/images" -c image_skel/ qemu-build.conf Dockerfile.build |
docker build -t "${build_image}" -f Dockerfile.build -
fi
}
function get_rsync_addr {
local container_ip
container_ip=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' virtlet-build)
# Sometimes we can reach rsync through localhost and a NAT'd port. Other
# times (when we are running in another docker container on the Jenkins
# machines) we have to talk directly to the container IP. There is no one
# strategy that works in all cases so we test to figure out which situation we
# are in.
if rsync_probe 127.0.0.1 "${VIRTLET_RSYNC_PORT}"; then
echo "127.0.0.1:${VIRTLET_RSYNC_PORT}" >"${project_dir}/_output/rsync_addr"
return 0
elif rsync_probe "${container_ip}" ${VIRTLET_RSYNC_PORT}; then
echo "${container_ip}:${VIRTLET_RSYNC_PORT}" >"${project_dir}/_output/rsync_addr"
return 0
else
echo "Could not probe the rsync port" >&2
fi
}
function ensure_build_container {
if ! docker ps --filter=label=virtlet_build | grep -q virtlet-build; then
ensure_build_image
cd "${project_dir}"
# Need to mount docker socket into the container because of
# CRI proxy deployment tests & building the image
# We also pass --tmpfs /tmp because log tailing doesn't work
# on overlayfs. This breaks 'go test' though unless we also
# remount /tmp with exec option (it creates and runs executable files
# under /tmp)
declare -a docker_cert_args=()
if [[ ${DOCKER_CERT_PATH:-} ]]; then
docker_cert_args=(-e DOCKER_CERT_PATH=/docker-cert)
fi
docker run -d --privileged --net=host \
-l virtlet_build \
-v "virtlet_src:${remote_project_dir}" \
-v "virtlet_pkg:/go/pkg" \
-v /sys/fs/cgroup:/sys/fs/cgroup \
-v /lib/modules:/lib/modules:ro \
-v /boot:/boot:ro \
-v "${DOCKER_SOCKET_PATH}:/var/run/docker.sock" \
-e DOCKER_MACHINE_NAME="${DOCKER_MACHINE_NAME:-}" \
-e DOCKER_TLS_VERIFY="${DOCKER_TLS_VERIFY:-}" \
-e TRAVIS="${TRAVIS:-}" \
-e TRAVIS_PULL_REQUEST="${TRAVIS_PULL_REQUEST:-}" \
-e TRAVIS_BRANCH="${TRAVIS_BRANCH:-}" \
-e CIRCLECI="${CIRCLECI:-}" \
-e CIRCLE_PULL_REQUEST="${CIRCLE_PULL_REQUEST:-}" \
-e CIRCLE_BRANCH="${CIRCLE_PULL_REQUEST:-}" \
-e VIRTLET_ON_MASTER="${VIRTLET_ON_MASTER:-}" \
-e VIRTLET_MULTI_NODE="${VIRTLET_MULTI_NODE:-}" \
-e GITHUB_TOKEN="${GITHUB_TOKEN:-}" \
-e MKDOCS_SERVE_ADDRESS="${MKDOCS_SERVE_ADDRESS:-}" \
-e VIRTLET_SKIP_VENDOR="${VIRTLET_SKIP_VENDOR:-false}" \
${docker_cert_args[@]+"${docker_cert_args[@]}"} \
--name virtlet-build \
--tmpfs /tmp \
"${build_image}" \
/bin/bash -c "mount /tmp -o remount,exec && sleep Infinity" >/dev/null
if [[ ! ${VIRTLET_SKIP_RSYNC} ]]; then
# from build/common.sh in k8s
mkdir -p "${project_dir}/_output"
dd if=/dev/urandom bs=512 count=1 2>/dev/null | LC_ALL=C tr -dc 'A-Za-z0-9' | dd bs=32 count=1 2>/dev/null >"${rsync_pw_file}"
chmod 600 "${rsync_pw_file}"
docker cp "${rsync_pw_file}" virtlet-build:/rsyncd.password
docker exec -d -i virtlet-build /rsyncd.sh "${VIRTLET_RSYNC_PORT}"
get_rsync_addr
fi
if [[ ${DOCKER_CERT_PATH:-} ]]; then
tar -C "${DOCKER_CERT_PATH}" -c . | docker exec -i virtlet-build /bin/bash -c 'mkdir /docker-cert && tar -C /docker-cert -x'
fi
fi
if [[ ! ${VIRTLET_SKIP_RSYNC} ]]; then
RSYNC_ADDR="$(cat "${project_dir}/_output/rsync_addr")"
fi
}
function vsh {
ensure_build_container
cd "${project_dir}"
docker exec -it virtlet-build env TERM=xterm bash
}
function sync_source {
ensure_build_container
cd "${project_dir}"
if [[ ! ${VIRTLET_SKIP_RSYNC} ]]; then
local -a filters=(
--filter '- /_output/'
)
if [[ ${VIRTLET_SKIP_VENDOR:-false} != "true" ]]; then
filters+=(--filter '- /vendor/')
fi
if [[ ! ${rsync_git} ]]; then
filters+=(--filter '- /.git/')
fi
rsync "${filters[@]}" \
--password-file "${project_dir}/_output/rsync.password" \
-a --delete --compress-level=9 \
"${project_dir}/" "rsync://virtlet@${RSYNC_ADDR}/virtlet/"
fi
}
function vcmd {
sync_source >&2
local t=""
if [[ ${USE_TERM:-} ]]; then
t="t"
fi
docker exec -i"${t}" virtlet-build bash -c "$*"
}
function vcmd_simple {
local cmd="${1}"
docker exec -i virtlet-build bash -c "${cmd}"
}
function stop {
docker ps -a -q --filter=label=virtlet_build | while read container_id; do
echo >&2 "Removing container:" "${container_id}"
docker rm -fv "${container_id}"
done
}
function copy_output {
ensure_build_container
cd "${project_dir}"
vcmd_simple "tar -C '${remote_project_dir}' -cz \$(find . -path '*/_output/*' -type f)" | tar -xvz
}
function copy_back {
ensure_build_container
cd "${project_dir}"
tar -cz $(find . -path '*/_output/*' -type f | grep -v rsync) | vcmd_simple "tar -C '${remote_project_dir}' -xvz"
}
function copy_dind_internal {
local virtlet_node="${1}"
if ! docker volume ls -q | grep -q "^kubeadm-dind-${virtlet_node}$"; then
echo "No active or snapshotted kubeadm-dind-cluster" >&2
exit 1
fi
tar -C _output -c . |
docker run -i --rm \
-v "kubeadm-dind-${virtlet_node}:/dind" \
--name ${tmp_container_name} \
"${busybox_image}" \
/bin/sh -c 'tar -C /dind -xv && chmod ug+s /dind/vmwrapper'
}
function kvm_ok {
# The check is done inside the virtlet node container because it
# has proper /lib/modules from the docker host. Also, it'll have
# to use the virtlet image later anyway.
# Use kube-master node as all of the DIND nodes in the cluster are similar
if ! docker exec kube-master docker run --privileged --rm -v /lib/modules:/lib/modules "${VIRTLET_IMAGE}" kvm-ok; then
return 1
fi
}
function prepare_node {
local node="${1}"
if docker exec "${node}" dpkg-query -W criproxy-nodeps >&/dev/null; then
return 0
fi
ensure_build_container
echo >&2 "Installing CRI proxy package in the node container (${node})..."
if [[ ${DIND_CRI:-} = containerd ]]; then
docker exec "${node}" /bin/bash -c 'echo criproxy-nodeps criproxy/primary_cri select containerd | debconf-set-selections'
fi
docker exec "${node}" /bin/bash -c "curl -sSL '${CRIPROXY_DEB_URL}' >/criproxy.deb && dpkg -i /criproxy.deb && rm /criproxy.deb"
docker exec "${node}" mount --make-shared /dind
docker exec "${node}" mount --make-shared /dev
docker exec "${node}" mount --make-shared /boot
docker exec "${node}" mount --make-shared /sys/fs/cgroup
if [[ ${VIRTLET_ON_MASTER} ]]; then
if [[ $(kubectl get node kube-master -o jsonpath='{.spec.taints[?(@.key=="node-role.kubernetes.io/master")]}') ]]; then
kubectl taint nodes kube-master node-role.kubernetes.io/master-
fi
fi
if [[ ${FORCE_UPDATE_IMAGE} ]] || ! docker exec "${node}" docker history -q "${virtlet_image}:latest" >&/dev/null; then
echo >&2 "Propagating Virtlet image to the node container..."
if [[ ${DIND_CRI} = containerd ]]; then
vcmd "docker save '${virtlet_image}:latest' | docker exec -i '${node}' ctr -n k8s.io images import -"
else
vcmd "docker save '${virtlet_image}:latest' | docker exec -i '${node}' docker load"
fi
fi
}
function prepare_all_nodes {
for node in $(kubectl get nodes -o jsonpath='{.items[?(@.metadata.name!="kube-master")].metadata.name}'); do
prepare_node "${node}"
done
}
function apply_runtime_label {
local node="${1}"
kubectl label node --overwrite "${node}" extraRuntime=virtlet
}
function start_dind {
local -a virtlet_config=(--from-literal=image_regexp_translation="${IMAGE_REGEXP_TRANSLATION}")
if ! kvm_ok || [[ ${VIRTLET_DISABLE_KVM:-} ]]; then
virtlet_config+=(--from-literal=disable_kvm=y)
fi
kubectl create configmap -n kube-system virtlet-config "${virtlet_config[@]}"
kubectl create configmap -n kube-system virtlet-image-translations --from-file "${project_dir}/deploy/images.yaml"
start_virtlet
}
function start_virtlet {
local -a opts=(--dev)
if kubectl version | tail -n1 | grep -q 'v1\.7\.'; then
# apply mount propagation hacks for 1.7
opts+=(--compat)
fi
docker exec virtlet-build "${remote_project_dir}/_output/virtletctl" gen "${opts[@]}" |
kubectl apply -f -
}
function virtlet_subdir {
local dir="${1:-$(pwd)}"
local prefix="${project_dir}/"
if [[ ${#dir} -lt ${#prefix} || ${dir:0:${#prefix}} != ${prefix} ]]; then
echo >&2 "must be in a project subdir"
exit 1
fi
echo -n "${dir:${#prefix}}"
}
function clean {
stop
docker volume rm virtlet_src || true
docker volume rm virtlet_pkg || true
docker rmi "${build_image}" || true
# find command may produce zero results
# -exec rm -rf '{}' ';' produces errors when trying to
# enter deleted directories
find . -name _output -type d | while read dir; do
rm -rf "${dir}"
done
}
function gotest {
# FIXME: exit 1 in $(virtlet_subdir) doesn't cause the script to exit
virtlet_subdir >/dev/null
subdir="$(virtlet_subdir)"
if ! vcmd "cd '${subdir}' && go test $*"; then
vcmd_simple "find . -name 'Test*.out.*' | xargs tar -c -T -" | tar -C "${project_dir}" -x
exit 1
fi
}
function gobuild {
# FIXME: exit 1 in $(virtlet_subdir) doesn't cause the script to exit
virtlet_subdir >/dev/null
# -gcflags -e removes the limit on error message count, which helps
# with using it for syntax checking
vcmd "cd '$(virtlet_subdir)' && go build -gcflags -e $*"
}
function build_image_internal {
build_internal
tar -c _output -C "${project_dir}/images" image_skel/ Dockerfile.virtlet |
docker build -t "${virtlet_image}" -f Dockerfile.virtlet -
}
function install_vendor_internal {
if [ ! -d vendor ]; then
glide install --strip-vendor
fi
}
function run_tests_internal {
install_vendor_internal
go test -v ./pkg/... ./tests/network/...
}
function run_integration_internal {
install_vendor_internal
( cd tests/integration && ./go.test )
}
function get_ldflags {
# XXX: use kube::version::ldflag (-ldflags -X package.Var=...)
# see also versioning.mk in helm
# https://stackoverflow.com/questions/11354518/golang-application-auto-build-versioning
# see pkg/version/version.go in k8s
# for GoVersion / Compiler / Platform
local vfile="${project_dir}/pkg/version/version.go"
local git_version="$(git describe --tags --abbrev=14 'HEAD^{commit}' | sed "s/-g\([0-9a-f]\{14\}\)$/+\1/")"
local git_commit="$(git rev-parse "HEAD^{commit}")"
local git_tree_state=$([[ $(git status --porcelain) ]] && echo "dirty" || echo "clean")
if [[ ${git_tree_state} == dirty ]]; then
git_version+="-dirty"
fi
local build_date="$(date -u +'%Y-%m-%dT%H:%M:%SZ')"
local git_major=""
local git_minor=""
local version_pkg="${go_package}/pkg/version"
local ldflags=(-X "${version_pkg}.gitVersion=${git_version}"
-X "${version_pkg}.gitCommit=${git_commit}"
-X "${version_pkg}.gitTreeState=${git_tree_state}"
-X "${version_pkg}.buildDate=${build_date}")
if [[ ${git_version} =~ ^v([0-9]+)\.([0-9]+)(\.[0-9]+)?([-].*)?([+].*)?$ ]]; then
git_major=${BASH_REMATCH[1]}
git_minor=${BASH_REMATCH[2]}
ldflags+=(-X "${version_pkg}.gitMajor=${git_major}"
-X "${version_pkg}.gitMinor=${git_minor}")
fi
if [[ ${SET_VIRTLET_IMAGE_TAG:-} ]]; then
ldflags+=(-X "${version_pkg}.imageTag=${SET_VIRTLET_IMAGE_TAG}")
fi
echo "${ldflags[*]}"
}
function build_internal {
# we don't just always generate the bindata right there because we
# want to keep the source buildable outside this build container.
go-bindata -mode 0644 -o /tmp/bindata.go -modtime "${bindata_modtime}" -pkg "${bindata_pkg}" "${bindata_dir}"
if ! cmp /tmp/bindata.go "${bindata_out}"; then
echo >&2 "${bindata_dir} changed, please re-run ${0} update-bindata"
exit 1
fi
install_vendor_internal
ldflags="$(get_ldflags)"
mkdir -p "${project_dir}/_output"
go build -i -o "${project_dir}/_output/virtlet" -ldflags "${ldflags}" ./cmd/virtlet
go build -i -o "${project_dir}/_output/virtletctl" -ldflags "${ldflags}" ./cmd/virtletctl
GOOS=darwin go build -i -o "${project_dir}/_output/virtletctl.darwin" -ldflags "${ldflags}" ./cmd/virtletctl
go build -i -o "${project_dir}/_output/vmwrapper" ./cmd/vmwrapper
go build -i -o "${project_dir}/_output/flexvolume_driver" ./cmd/flexvolume_driver
go test -i -c -o "${project_dir}/_output/virtlet-e2e-tests" ./tests/e2e
go build -i -o "${project_dir}/_output/virtlet-longevity-tests" -ldflags "${ldflags}" ./cmd/longevity
}
function release_description {
local -a tag="${1}"
shift
git tag -l --format='%(contents:body)' "${tag}"
echo
echo "SHA256 sums for the files:"
echo '```'
(cd _output && sha256sum "$@")
echo '```'
}
function release_internal {
local tag="${1}"
local gh_user="Mirantis"
if [[ ${tag} =~ test ]]; then
gh_user="${GH_RELEASE_TEST_USER}"
fi
local -a opts=(--user "${gh_user}" --repo virtlet --tag "${tag}")
local -a files=(virtletctl virtletctl.darwin)
local description="$(release_description "${tag}" "${files[@]}")"
local pre_release=
if [[ ${tag} =~ -(test|pre).*$ ]]; then
pre_release="--pre-release"
fi
if github-release --quiet delete "${opts[@]}"; then
echo >&2 "Replacing the old Virtlet release"
fi
github-release release "${opts[@]}" \
--name "$(git tag -l --format='%(contents:subject)' "${tag}")" \
--description "${description}" \
${pre_release}
for filename in "${files[@]}"; do
echo >&2 "Uploading: ${filename}"
github-release upload "${opts[@]}" \
--name "${filename}" \
--replace \
--file "_output/${filename}"
done
}
function e2e {
ensure_build_container
local cluster_url
cluster_url="$(kubectl config view -o jsonpath='{.clusters[?(@.name=="dind")].cluster.server}')"
docker exec virtlet-build _output/virtlet-e2e-tests -include-unsafe-tests=true -cluster-url "${cluster_url}" "$@"
}
function update_bindata_internal {
# set fixed modtime to avoid unwanted differences during the checks
# that are done by build/cmd.sh build
go-bindata -mode 0644 -modtime "${bindata_modtime}" -o "${bindata_out}" -pkg "${bindata_pkg}" "${bindata_dir}"
}
function update_generated_docs_internal {
if [[ ! -f _output/virtletctl ]]; then
echo >&2 "Please run build/cmd.sh build first"
fi
virtletctl gendoc docs/docs/reference
tempfile="$(tempfile)"
_output/virtletctl gendoc --config >"${tempfile}"
sed -i "/<!-- begin -->/,/<!-- end -->/{
//!d
/begin/r ${tempfile}
}" docs/docs/reference/config.md
rm -f "${tempfile}"
}
function update_generated_internal {
install_vendor_internal
vendor/k8s.io/code-generator/generate-groups.sh all \
github.com/Mirantis/virtlet/pkg/client github.com/Mirantis/virtlet/pkg/api \
virtlet.k8s:v1 \
--go-header-file "build/custom-boilerplate.go.txt"
# fix import url case issues
find pkg/client \
-name '*.go' \
-exec sed -i 's@github\.com/mirantis/virtlet@github\.com/Mirantis/virtlet@g' \
'{}' \;
}
function serve_docs_internal {
(cd docs && mkdocs serve -a "${MKDOCS_SERVE_ADDRESS}")
}
function build_docs_internal {
site_dir="$(mktemp -d)"
trap 'rm -rf "${site_dir}"' EXIT
# Use strict mode (-s) for mkdocs so that any broken links
# etc. are caught
(cd docs && mkdocs build -s -d "${site_dir}" >&2)
tar -C "${site_dir}" -c .
}
function build_docs {
cd "${project_dir}"
rm -rf _docs
git clone -b docs . _docs
local docs_hash="$(git ls-tree HEAD -- docs | awk '{print $3}')"
if [[ ! -e _docs/source_hash || ${docs_hash} != $(cat _docs/source_hash) ]]; then
echo >&2 "docs/ directory changed since the last doc build, rebuilding docs"
elif [[ $(git status --porcelain) ]]; then
echo >&2 "Source directory dirty, rebuilding docs"
else
echo >&2 "Docs unchanged, no need to rebuild"
return 0
fi
# clean up _docs except for .git and CNAME
find _docs -name .git -prune -o -type f \! -name CNAME -exec rm -f '{}' \;
vcmd "build/cmd.sh build-docs-internal" | tar -C _docs -xv
echo "${docs_hash}" > _docs/source_hash
(
cd _docs
git add .
git commit -m "Update generated docs [ci skip]"
# this pushes the changes into the local repo (not github!)
git push origin docs
)
}
function usage {
echo >&2 "Usage:"
echo >&2 " $0 build"
echo >&2 " $0 test"
echo >&2 " $0 copy"
echo >&2 " $0 copy-dind"
echo >&2 " $0 start-dind"
echo >&2 " $0 vsh"
echo >&2 " $0 stop"
echo >&2 " $0 clean"
echo >&2 " $0 update-bindata"
echo >&2 " $0 update-generated-docs"
echo >&2 " $0 gotest [TEST_ARGS...]"
echo >&2 " $0 gobuild [BUILD_ARGS...]"
echo >&2 " $0 run CMD..."
echo >&2 " $0 release TAG"
echo >&2 " $0 serve-docs"
echo >&2 " $0 build-docs"
echo >&2 " $0 sync"
exit 1
}
cmd="${1:-}"
if [[ ! $cmd ]]; then
usage
fi
shift
case "${cmd}" in
gotest)
gotest "$@"
;;
gobuild)
rsync_git=
gobuild "$@"
;;
prepare-vendor)
vcmd "build/cmd.sh install-vendor-internal"
;;
build)
vcmd "SET_VIRTLET_IMAGE_TAG='${SET_VIRTLET_IMAGE_TAG:-}' build/cmd.sh build-image-internal"
;;
build-image-internal)
# this is executed inside the container
build_image_internal "$@"
;;
test)
vcmd 'build/cmd.sh run-tests-internal'
;;
integration)
vcmd 'build/cmd.sh run-integration-internal'
;;
install-vendor-internal)
install_vendor_internal
;;
run-tests-internal)
run_tests_internal
;;
run-integration-internal)
run_integration_internal
;;
update-bindata)
vcmd "build/cmd.sh update-bindata-internal"
docker cp "virtlet-build:${remote_project_dir}/pkg/tools/bindata.go" pkg/tools/bindata.go
;;
update-bindata-internal)
update_bindata_internal
;;
update-generated)
vcmd "build/cmd.sh update-generated-internal"
docker exec virtlet-build \
/bin/bash -c \
"tar -C '${remote_project_dir}' -c $(find pkg/ -name 'zz_generated.*') pkg/client" |
tar -C "${project_dir}" -xv
;;
update-generated-internal)
update_generated_internal
;;
update-generated-docs)
vcmd "build/cmd.sh update-generated-docs-internal"
docker exec virtlet-build tar -C "${remote_project_dir}" -c docs/docs/reference/config.md docs/docs/reference/virtletctl.md | tar -C "${project_dir}" -xv
;;
update-generated-docs-internal)
update_generated_docs_internal
;;
run)
vcmd "$*"
;;
vsh)
vsh
;;
stop)
stop
;;
clean)
clean
;;
copy)
copy_output
;;
copy-back)
copy_back
;;
copy-dind)
VIRTLET_SKIP_RSYNC=y vcmd "build/cmd.sh copy-dind-internal"
;;
e2e)
e2e "$@"
;;
copy-dind-internal)
for virtlet_node in "${virtlet_nodes[@]}"; do
copy_dind_internal "${virtlet_node}"
done
;;
prepare-all-nodes)
prepare_all_nodes
;;
start-dind)
for virtlet_node in "${virtlet_nodes[@]}"; do
prepare_node "${virtlet_node}"
apply_runtime_label "${virtlet_node}"
done
start_dind
;;
start-build-container)
ensure_build_container
;;
release)
if [[ ! ${1:-} ]]; then
echo >&2 "must specify the tag"
exit 1
fi
( vcmd "build/cmd.sh release-internal '${1}'" )
;;
release-internal)
release_internal "$@"
;;
serve-docs-internal)
serve_docs_internal
;;
serve-docs)
( USE_TERM=1 vcmd "build/cmd.sh serve-docs-internal" )
;;
build-docs-internal)
build_docs_internal
;;
build-docs)
build_docs
;;
sync)
sync_source
;;
*)
usage
;;
esac
# TODO: make it possible to run e2e from within the build container, too
# (although we don't need to use that for CircleCI)
# TODO: fix indentation in this file (use 2 spaces)
|
// Verifica se existe uma receita com o mesmo nome no site;
import { categories } from "./database.js";
export function toValidatesRecipe(recipeForm) {
if (recipeForm.recipe.name.length > 0) {
var recipeExists = categories.some(function (category) {
return category.recipes.some(function (recipe) {
return (recipe.name.toLowerCase() === recipeForm.recipe.name.toLowerCase());
});
});
return recipeExists;
}
}
// Verifica possíveis erros no ato de inserir a receita através do form e retorna um array de erros;
export function checkRecipe(recipeForm) {
var recipeName = document.getElementById("name");
var preparationTime = document.getElementById("preparationTime");
var revenue = document.getElementById("revenue");
var image = document.getElementById("image");
var ingredients = document.getElementById("ingredients");
var methodOfPreparation = document.getElementById("methodOfPreparation");
if (recipeForm.recipe.name.length == 0) {
recipeName.setAttribute("title", "Insira o nome da receita!");
}
if (recipeForm.recipe.preparationTime == 0) {
preparationTime.setAttribute("oninvalid", "Insira o tempo de preparo da receita!");
}
if (recipeForm.recipe.revenue == 0) {
revenue.setAttribute("oninvalid", "Insira o rendimento da receita!");
}
if (recipeForm.recipe.image.length == 0) {
image.setAttribute("oninvalid", "Insira uma foto da sua receita!");
}
if (recipeForm.recipe.ingredients.length == 0) {
ingredients.setAttribute("oninvalid", "Insira os ingredientes necess\u00E1rios para preparar a sua receita!");
}
if (recipeForm.recipe.methodOfPreparation.length == 0) {
methodOfPreparation.setAttribute("oninvalid", "Insira o modo de preparo da receita!");
}
return;
}
|
SELECT name, MAX(price)
FROM Products
GROUP BY name
ORDER BY price DESC |
<reponame>tiagoben/juken<gh_stars>10-100
import _ from 'lodash';
import * as env from 'src/common/env';
import queryString from 'src/utils/queryString';
import { GET } from 'src/common/constants';
import run from 'src/utils/run';
export default async (method, url, opts = {}) => {
const {
body = {},
params = {},
headers,
hasError,
} = opts;
// construct url
const finalUrl = `${url}${queryString(params)}`;
try {
// construct options
const opts = { method, headers };
if (body && method !== GET) {
headers['Accept'] = 'application/json';
headers['Content-Type'] = 'application/json';
opts.body = JSON.stringify(body);
}
// log request
if (env.DEBUG) console.log(`🌎 [${method}]: `, finalUrl, opts);
// make the request
const response = await fetch(finalUrl, opts);
const data = await response.json();
// accept a custom function to evaluate the response
// and see if the request actually failed despite
// the success response of status 200 OK
const e = run(hasError, data);
if (e) throw (e);
// log request
if (env.DEBUG) console.log(`🌕 [${method}]: `, finalUrl, data);
// return response
return data;
} catch(err) {
// log error
if (env.DEBUG) console.log(`🌑 [${method}]: `, finalUrl, err);
// throw the error
throw(err);
}
} |
#!/usr/bin/env bash
set -e
mkdir ../dist || true
mkdir ../build || true
google-closure-compiler-js ../lib/xmlhttprequest.js > ../dist/xmlhttprequest.js
TARGET=../dist/nuve.js
current_dir=`pwd`
# License
echo '/*' > $TARGET
echo '*/' >> $TARGET
# Body
cat ../dist/xmlhttprequest.js >> $TARGET
cat ../build/nuve.js >> $TARGET
echo 'module.exports = N;' >> $TARGET
#cd ../npm/
#tar -czvf package.tgz package/
#cd $current_dir
|
import React, { Suspense } from "react";
import { AxiosResponse } from "axios";
import { useServerStatus } from "contexts/serverStatusContext";
import MaintainancePage from "pages/MaintainancePage";
import NotFound from "pages/NotFound";
import { Redirect, Route, Switch } from "react-router-dom";
import getStatus from "services/status";
import { Status } from "utils/enums/status";
import Help from "pages/Help";
const ShowSecret = React.lazy(() => import("pages/FetchSecret"));
const CreateSecret = React.lazy(() => import("pages/CreateSecret"));
const AppRouter: React.FC = () => {
const [status, setServerStatus] = useServerStatus();
const isMounted = React.useRef(true);
React.useEffect(() => {
getStatus().then((response: AxiosResponse) => {
const url = new URL(response.config.baseURL as string);
setServerStatus({ ...response.data, host: url.hostname });
});
return () => {
isMounted.current = false;
};
}, []);
return (
<Suspense fallback="loading...">
<Switch>
<Route path="/" exact component={CreateSecret} />
<Route path="/help" exact component={Help} />
<Route path="/maintainance" exact component={MaintainancePage} />
{status.status === Status.maintainance ? <Redirect to="/maintainance" /> : null}
<Route path="/secret/:token" exact component={ShowSecret} />
<Route path="/not-found" component={NotFound} />
<Redirect from="*" to="/not-found" />
</Switch>
</Suspense>
);
};
export default AppRouter;
|
/*
* Copyright 2015 lixiaobo
*
* VersionUpgrade project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.versionmaintain.utils;
import java.awt.Font;
import java.io.File;
import java.util.Enumeration;
import java.util.List;
import javax.swing.UIManager;
import com.cats.utils.IColorFontUtil;
import com.versionmaintain.files.EnFileType;
/**
* @author xiaobolx
* 2016年1月25日
*/
public class VersionServiceUtils
{
public static EnFileType getFileType(File file)
{
if(file == null)
{
return null;
}
if(file.getName().endsWith("xml"))
{
return EnFileType.enXmlFile;
}else if(file.getName().endsWith("bin"))
{
return EnFileType.enBinFile;
}
return null;
}
public static void initGracefulStyle(Font font)
{
/*try {
UIManager.setLookAndFeel("com.sun.java.swing.plaf.windows.WindowsLookAndFeel");//windows风格
} catch (ClassNotFoundException e1) {
e1.printStackTrace();
} catch (InstantiationException e1) {
e1.printStackTrace();
} catch (IllegalAccessException e1) {
e1.printStackTrace();
} catch (UnsupportedLookAndFeelException e1) {
e1.printStackTrace();
}*/
Enumeration<Object> keys = UIManager.getDefaults().keys();
Object key = null;
Object value = null;
while (keys.hasMoreElements())
{
key = keys.nextElement();
value = UIManager.get(key);
/*if (key instanceof String)
{
if (((String) key).endsWith(".background"))
{
UIManager.put(key, new Color(0xeeeeee));
}
}*/
if (value instanceof Font)
{
UIManager.put(key, font);
}
}
UIManager.put("MenuItem.selectionBackground",IColorFontUtil.COLOR_SELECTED_COLOR_BABY_GREEN);
}
public static String getDiffInfo(List<String> infoList)
{
int iIndex = 1;
boolean isFind = false;
String newItem = "NewItem";
do{
isFind = false;
for (String info : infoList)
{
if(info.equals(newItem))
{
isFind = true;
newItem = "NewItem"+iIndex;
iIndex++;
}
}
}while(isFind);
return newItem;
}
}
|
const db = require('../models')
const { FAQ } = db
const createError = require('http-errors')
const faqController = {
getAll: async (req, res, next) => {
try {
const data = await FAQ.findAll()
return res.status(200).json({
ok: 1,
data
})
} catch (error) {
return next(createError(401, 'Get FAQ fail'))
}
}
}
module.exports = faqController
|
import smtplib, ssl
from email.message import EmailMessage
sender_email = "<sender's email address>"
receiver_email = "<recipient's email address>"
password = input("Type your password and press enter:")
# Create the email
message = EmailMessage()
message['Subject'] = 'Welcome to our website'
message['From'] = sender_email
message['To'] = receiver_email
message.set_content('''Hi,
Thank you for registering on our website. We are happy to have you as part of our team.
Sincerely,
The Admin
''')
# Send the email
context = ssl.create_default_context()
with smtplib.SMTP_SSL("smtp.gmail.com", 465, context=context) as server:
server.login(sender_email, password)
server.send_message(message) |
package org.slos.battle.abilities.rule;
import org.slos.battle.GameContext;
import org.slos.battle.abilities.AbilityEffect;
import org.slos.battle.attack.AttackContext;
public abstract class AttackRule<T> implements AbilityEffect {
private AttackRuleType attackRuleType;
public AttackRule(AttackRuleType attackRuleType) {
this.attackRuleType = attackRuleType;
}
public AttackRuleType getAttackRuleType() {
return attackRuleType;
}
public abstract T execute(AttackContext attackContext, GameContext gameContext);
@Override
public String toString() {
return "AttackRule{" +
"attackRuleType=" + attackRuleType +
'}';
}
}
|
package com.esri.ges.processor.plan;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.esri.ges.core.property.PropertyDefinition;
import com.esri.ges.core.property.PropertyType;
import com.esri.ges.processor.GeoEventProcessorDefinitionBase;
public class PlanDefinition extends GeoEventProcessorDefinitionBase
{
final private static Log LOG = LogFactory.getLog(PlanDefinition.class);
public final static String AGS_CONNECTION_PROPERTY = "agsConnection";
public final static String AGS_PATH_PROPERTY = "path";
public final static String FEATURE_SERVICE_PROPERTY = "featureService";
public final static String STOP_LAYER_INDEX_PROPERTY = "stopLayer";
public final static String ROUTE_LAYER_INDEX_PROPERTY = "routeLayer";
public final static String VEHICLE_LAYER_INDEX_PROPERTY = "vehicleLayer";
public final static String ALERT_LAYER_INDEX_PROPERTY = "alertLayer";
public final static String GEOFENCE_LAYER_INDEX_PROPERTY = "geofenceLayer";
public PlanDefinition()
{
try
{
propertyDefinitions.put(AGS_CONNECTION_PROPERTY, new PropertyDefinition(AGS_CONNECTION_PROPERTY, PropertyType.ArcGISConnection, null, "ArcGIS Server Connection", "ArcGIS Server Connection.", true, false));
propertyDefinitions.put(AGS_PATH_PROPERTY, new PropertyDefinition(AGS_PATH_PROPERTY, PropertyType.ArcGISFolder, "/", "Folder", "Path to the feature service.", true, false));
propertyDefinitions.put(FEATURE_SERVICE_PROPERTY, new PropertyDefinition(FEATURE_SERVICE_PROPERTY, PropertyType.ArcGISFeatureService, null, "Feature Service", "Feature Service.", true, false));
propertyDefinitions.put(STOP_LAYER_INDEX_PROPERTY, new PropertyDefinition(STOP_LAYER_INDEX_PROPERTY, PropertyType.ArcGISLayer, null, "Stop Layer", "Stop Layer.", true, false));
propertyDefinitions.put(ROUTE_LAYER_INDEX_PROPERTY, new PropertyDefinition(ROUTE_LAYER_INDEX_PROPERTY, PropertyType.ArcGISLayer, null, "Route Layer", "Route Layer.", true, false));
propertyDefinitions.put(VEHICLE_LAYER_INDEX_PROPERTY, new PropertyDefinition(VEHICLE_LAYER_INDEX_PROPERTY, PropertyType.ArcGISLayer, null, "Vehicle Layer", "Vehicle Layer.", true, false));
propertyDefinitions.put(ALERT_LAYER_INDEX_PROPERTY, new PropertyDefinition(ALERT_LAYER_INDEX_PROPERTY, PropertyType.ArcGISLayer, null, "Alert Layer", "Alert Layer.", true, false));
propertyDefinitions.put(GEOFENCE_LAYER_INDEX_PROPERTY, new PropertyDefinition(GEOFENCE_LAYER_INDEX_PROPERTY, PropertyType.ArcGISLayer, null, "GeoFence Layer", "GeoFence Layer.", true, false));
}
catch (Exception e)
{
LOG.error("Error setting up Plan Definition.", e);
}
}
@Override
public String getName()
{
return "PlanProcessor";
}
@Override
public String getLabel()
{
return "PlanProcessor";
}
@Override
public String getDescription()
{
return "Performs operations on Route Plan.";
}
}
|
#!/bin/sh
PACKAGE_VERSION=$(cat package.json \
| grep name \
| head -1 \
| awk -F: '{ print $2 }' \
| sed 's/[",]//g' \
| tr -d '[[:space:]]')
echo $PACKAGE_VERSION
|
<html>
<head>
<title> Products </title>
</head>
<body>
<h1> Products </h1>
<div>
<h2>Sports shoes</h2>
<img src="image.jpg" />
<p>High-quality shoes for sports.</p>
</div>
<div>
<h2>T-shirt</h2>
<img src="image2.jpg" />
<p>High-quality t-shirt made of cotton.</p>
</div>
<div>
<h2>Cap</h2>
<img src="image3.jpg" />
<p>High-quality cotton cap.</p>
</div>
</body>
</html> |
<reponame>Sarveshmrao/DiscordBot-Template<filename>events/message.js
const { Collection, MessageEmbed: Embed } = require("discord.js");
const {prefix} = require('../config.json');
const escapeRegex = (string) => {
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
};
module.exports = {
name: "message",
async execute(message) {
// Declares const to be used.
const { client, guild, channel, content, author } = message;
// Converts prefix to lowercase.
const checkPrefix = prefix.toLowerCase();
// New: Mention Prefix added.
const prefixRegex = new RegExp(`^(<@!?${client.user.id}>|${escapeRegex(checkPrefix)})\\s*`);
if (!prefixRegex.test(content.toLowerCase())) return;
// Real checks goes dynamically.
const [matchedPrefix] = content.toLowerCase().match(prefixRegex);
const args = content.slice(matchedPrefix.length).trim().split(/ +/);
const commandName = args.shift().toLowerCase();
// Check if mesage does not starts with prefix, or message author is bot. If yes, return.
if (!message.content.startsWith(matchedPrefix) || message.author.bot) return;
// Finds the actual command.
const command = client.commands.get(commandName)
|| client.commands.find(cmd => cmd.aliases && cmd.aliases.includes(commandName));
// It it's not a command, return :)
if (!command) return;
// Guild Only Property, add in your command properties if true.
if (command.guildOnly && message.channel.type === 'dm') {
return message.reply('I can\'t execute that command inside DMs!');
}
// Author perms property
if (command.permissions) {
const authorPerms = message.channel.permissionsFor(message.author);
if (!authorPerms || !authorPerms.has(command.permissions)) {
return message.reply('You can not do this!');
}
}
// Args missing
if (command.args && !args.length) {
let reply = `You didn't provide any arguments, ${message.author}!`;
if (command.usage) {
reply += `\nThe proper usage would be: \`${prefix}${command.name} ${command.usage}\``;
}
return message.channel.send(reply);
}
// Copldowns
const { cooldowns } = client;
if (!cooldowns.has(command.name)) {
cooldowns.set(command.name, new Collection());
}
const now = Date.now();
const timestamps = cooldowns.get(command.name);
const cooldownAmount = (command.cooldown || 3) * 1000;
if (timestamps.has(message.author.id)) {
const expirationTime = timestamps.get(message.author.id) + cooldownAmount;
if (now < expirationTime) {
const timeLeft = (expirationTime - now) / 1000;
return message.reply(`please wait ${timeLeft.toFixed(1)} more second(s) before reusing the \`${command.name}\` command.`);
}
}
timestamps.set(message.author.id, now);
setTimeout(() => timestamps.delete(message.author.id), cooldownAmount);
// Rest your creativity is below.
// execute the final command. Put everything above this.
try {
command.execute(message, args);
} catch (error) {
console.error(error);
message.reply('there was an error trying to execute that command!');
}
}
} |
// Function to send an email.
public void sendEmail(String recipient, String subject, String body)
{
// Create properties of mail server.
Properties properties = new Properties();
properties.put("mail.smtp.host", "SMTP SERVER");
properties.put("mail.smtp.port", "587");
properties.put("mail.smtp.auth", "true");
properties.put("mail.smtp.starttls.enable", "true");
//Create a new authenticated session with credentials.
Session session = Session.getInstance(properties,
new javax.mail.Authenticator() {
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(SENDER, PASSWORD);
}
});
try {
//Create a new message and set the content
Message message = new MimeMessage(session);
message.setFrom(new InternetAddress(SENDER));
message.addRecipient(Message.RecipientType.TO, new InternetAddress(recipient));
message.setSubject(subject);
message.setText(body);
//Send the message
Transport.send(message);
} catch (MessagingException e) {
e.printStackTrace();
}
} |
// Copyright (c) 2014 <NAME>. Portions copyright (c) 2011
// Google Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the name Chromium Embedded
// Framework nor the names of its contributors may be used to endorse
// or promote products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef CEF_INCLUDE_BASE_CEF_ATOMIC_FLAG_H_
#define CEF_INCLUDE_BASE_CEF_ATOMIC_FLAG_H_
#pragma once
#if defined(USING_CHROMIUM_INCLUDES)
// When building CEF include the Chromium header directly.
#include "base/synchronization/atomic_flag.h"
#else // !USING_CHROMIUM_INCLUDES
// The following is substantially similar to the Chromium implementation.
// If the Chromium implementation diverges the below implementation should be
// updated to match.
#include <stdint.h>
#include <atomic>
#include "include/base/cef_macros.h"
#include "include/base/cef_thread_checker.h"
namespace base {
// A flag that can safely be set from one thread and read from other threads.
//
// This class IS NOT intended for synchronization between threads.
class AtomicFlag {
public:
AtomicFlag();
~AtomicFlag();
// Set the flag. Must always be called from the same thread.
void Set();
// Returns true iff the flag was set. If this returns true, the current thread
// is guaranteed to be synchronized with all memory operations on the thread
// which invoked Set() up until at least the first call to Set() on it.
bool IsSet() const {
// Inline here: this has a measurable performance impact on base::WeakPtr.
return flag_.load(std::memory_order_acquire) != 0;
}
// Resets the flag. Be careful when using this: callers might not expect
// IsSet() to return false after returning true once.
void UnsafeResetForTesting();
private:
std::atomic<uint_fast8_t> flag_{0};
base::ThreadChecker set_thread_checker_;
DISALLOW_COPY_AND_ASSIGN(AtomicFlag);
};
} // namespace base
#endif // !USING_CHROMIUM_INCLUDES
#endif // CEF_INCLUDE_BASE_CEF_ATOMIC_FLAG_H_
|
package pl.coderslab.spring01hibernate.repository;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import javax.transaction.Transactional;
@Transactional
public class BookRepositoryImpl implements ResetRatingInterface {
@PersistenceContext
private EntityManager entityManager;
@Override
public void resetRating(int rating) {
final String sql = "UPDATE books SET rating = ?1";
Query q = this.entityManager.createNativeQuery(sql);
q.setParameter(1, rating);
q.executeUpdate();
}
}
|
<filename>src/main/scala/markarasev/utils/package.scala
package markarasev
package object utils {
type Seq[+A] = scala.collection.immutable.Seq[A]
val Seq: scala.collection.immutable.Seq.type = scala.collection.immutable.Seq
}
|
provision-sqlite3() {
ensure-sqlite3-installed
}
ensure-sqlite3-installed() {
if ! has-executable sqlite3; then
banner 'Installing SQLite 3'
install sqlite3
install apt=libsqlite3-dev rpm=sqlite-devel
fi
}
|
//
// VHCMsg.h
// VHCMessage
//
// Created by vhall on 2018/9/14.
// Copyright © 2018年 vhall. All rights reserved.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface VHCMsg : NSObject
/** 发消息用户id*/
@property (nonatomic, copy) NSString *joinId;
/** 消息id*/
@property (nonatomic, copy) NSString *msgId;
/** 发消息用户的参会昵称*/
@property (nonatomic, copy) NSString *nickName;
/** 发消息用户角色(主持人、嘉宾、助手、观众(学员))*/
@property (nonatomic, assign) NSInteger role;
/** 发消息用户的头像地址,无头像地址时候为空字符串*/
@property (nullable ,nonatomic, copy) NSString *avatar;
/** 发消息时候所在的房间id*/
@property (nonatomic, copy) NSString *roomId;
/** 发消息时时间*/
@property (nonatomic, copy) NSString *time;
/** 消息内容*/
@property (nonatomic, copy) NSString *text;
/** 消息event*/
@property (nonatomic, copy) NSString *event;
/** 是否私聊*/
@property (nonatomic, assign) BOOL isPrivateChat;
/** 私聊接收用户id*/
@property (nonatomic, copy) NSString *toId;
/** 图片数组*/
@property(nonatomic, strong) NSArray * img_list;
@end
@interface VHCGroupMsg : NSObject
/** 组 id*/
@property (nonatomic, copy) NSString *groupId;
/** 组名*/
@property (nonatomic, copy) NSString *groupName;
/** 未读数*/
@property (nonatomic, assign) NSInteger noReadNum;
/** 角色*/
@property (nonatomic, assign) NSInteger roleType;
@property (nonatomic, strong) NSArray *lastChat;
@end
NS_ASSUME_NONNULL_END
|
<reponame>RTEnzyme/vldb-2021-labs
package storage
import (
"github.com/pingcap-incubator/tinykv/kv/raftstore/scheduler_client"
"github.com/pingcap-incubator/tinykv/kv/util/engine_util"
"github.com/pingcap-incubator/tinykv/proto/pkg/kvrpcpb"
)
// Storage represents the internal-facing server part of TinyKV, it handles sending and receiving from other
// TinyKV nodes. As part of that responsibility, it also reads and writes data to disk (or semi-permanent memory).
type Storage interface {
Start(client scheduler_client.Client) error
Stop() error
// Write interface will be used by tnx engine to persist to write logs, if it returns ok then the
// logs have been successfully persisted and applied by the storage engine. Here two steps are needed,
// first is to persist to the logs on the majority nodes and second is to apply these writes in the
// storage engine
Write(ctx *kvrpcpb.Context, batch []Modify) error
Reader(ctx *kvrpcpb.Context) (StorageReader, error)
Client() scheduler_client.Client
}
type StorageReader interface {
// When the key doesn't exist, return nil for the value
GetCF(cf string, key []byte) ([]byte, error)
IterCF(cf string) engine_util.DBIterator
Close()
}
|
<reponame>jrfaller/maracas
package main.unused.constructorLessAccessible;
public class ConstructorLessAccessiblePackPriv2Priv {
ConstructorLessAccessiblePackPriv2Priv() {
}
ConstructorLessAccessiblePackPriv2Priv(int p) {
}
}
|
import { onPlayerData, onPlayerCreated, onPlayerReady, onPlayerDispose, onPlayerBeforeCreated } from "../IComponent";
import { PlayerConfig, PlayerData } from "../../youtube/PlayerConfig";
import { Player } from "../../player/Player";
import { Logger } from '../../libs/logging/Logger';
import { EventType } from '../../youtube/EventType';
import { ISettingsReact } from "../../settings-storage/ISettings";
import { QualityApi } from "./api";
import { PlaybackQuality } from "../../youtube/PlayerApi";
import { injectable } from "inversify";
import { EventHandler } from "../../libs/events/EventHandler";
import { Disposable } from "../../libs/Disposable";
const logger = new Logger("QualityComponent");
@injectable()
export class QualityComponent implements onPlayerBeforeCreated, onPlayerCreated, onPlayerReady, onPlayerData, onPlayerDispose {
private _api: QualityApi;
private _players: {[key: string]: PlayerQuality} = {};
constructor(api: QualityApi) {
this._api = api;
}
onPlayerBeforeCreated(player: Player): void {
const id = player.getId();
if (this._players.hasOwnProperty(id))
throw new Error("Player already created in component");
this._players[id] = new PlayerQuality(this._api, player);
}
onPlayerCreated(player: Player): void {
const id = player.getId();
if (!this._players.hasOwnProperty(id))
throw new Error("Player not found in component");
this._players[id].onCreated();
}
onPlayerDispose(player: Player): void {
const id = player.getId();
if (!this._players.hasOwnProperty(id))
throw new Error("Player not found in component");
this._players[id].dispose();
delete this._players[id];
}
onPlayerReady(player: Player): void {
const id = player.getId();
if (!this._players.hasOwnProperty(id))
throw new Error("Player not found in component");
this._players[id].onReady();
}
onPlayerData(player: Player, data: PlayerData): PlayerData {
const id = player.getId();
if (!this._players.hasOwnProperty(id))
throw new Error("Player not found in component");
return this._players[id].onData(data);
}
}
class PlayerQuality extends Disposable {
private _player: Player;
private _handler?: EventHandler;
private _api: QualityApi;
private _unstarted: boolean = false;
constructor(api: QualityApi, player: Player) {
super();
this._api = api;
this._player = player;
}
protected disposeInternal() {
super.disposeInternal();
if (this._handler) {
this._handler.dispose();
this._handler = undefined;
}
}
private _updatePlaybackQuality(quality: PlaybackQuality, bestQualityPreferred: boolean): void {
let availableLevels = this._player.getAvailableQualityLevels();
let currentLevel = this._player.getPlaybackQuality();
if (availableLevels.length === 0) {
logger.debug("No quality levels are available.");
return;
}
if (availableLevels.indexOf(quality) !== -1) {
if (this._player.isEmbedded()) {
this._player.setPlaybackQuality(quality);
} else {
this._player.setPlaybackQualityRange(quality, quality);
}
logger.debug("Settings quality (%s) through API", quality);
return;
}
const values = PlaybackQuality.getValues();
const availableLevelIndexes = availableLevels.map(level => values.indexOf(level));
const levelIndex = values.indexOf(quality);
if (levelIndex === -1) {
logger.warning("Quality (%s) can't be found in values.", quality);
return;
}
const findLowerQuality = (): PlaybackQuality|undefined => {
for (let i = levelIndex - 1; i >= 0; i--) {
if (availableLevelIndexes.indexOf(i) !== -1) {
return values[i];
}
}
return;
};
const findBetterQuality = (): PlaybackQuality|undefined => {
for (let i = levelIndex + 1; i < values.length; i++) {
if (availableLevelIndexes.indexOf(i) !== -1) {
return values[i];
}
}
return;
};
let nextQuality: PlaybackQuality|undefined;
if (bestQualityPreferred) {
nextQuality = findBetterQuality();
if (!nextQuality) nextQuality = findLowerQuality();
} else {
nextQuality = findLowerQuality();
if (!nextQuality) nextQuality = findBetterQuality();
}
if (nextQuality) {
if (this._player.isEmbedded()) {
this._player.setPlaybackQuality(nextQuality);
} else {
this._player.setPlaybackQualityRange(nextQuality, nextQuality);
}
logger.debug("Changing quality to %s instead of %s due to it not being available.", nextQuality, quality);
} else {
logger.debug("Couldn't find a quality close to %s.", quality);
}
}
getHandler(): EventHandler {
if (!this._handler) {
this._handler = new EventHandler(this);
}
return this._handler;
}
onCreated() {
this.getHandler()
.listen(this._player, EventType.Unstarted, () => {
this._unstarted = true;
this._updatePlaybackQuality(this._api.getQuality(), this._api.isBetterQualityPreferred());
})
.listen(this._player, EventType.ApiChange, () => {
if (!this._unstarted) return;
this._unstarted = false;
this._updatePlaybackQuality(this._api.getQuality(), this._api.isBetterQualityPreferred());
})
}
onReady() {
if (!this._api.isEnabled()) return;
const quality = this._api.getQuality();
this._updatePlaybackQuality(quality, this._api.isBetterQualityPreferred());
}
onData(data: PlayerData): PlayerData {
if (!this._api.isEnabled()) return data;
const quality = this._api.getQuality();
data.vq = quality;
if (!this._player.isReady()) return data;
this._updatePlaybackQuality(quality, this._api.isBetterQualityPreferred());
return data;
}
} |
// import required modules
const express = require('express');
const app = express();
const port = 3000;
// configure middleware
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
// define routes
app.get('/', (req, res) => {
res.send('This is a write scalable Node.js app!');
});
// start the server
app.listen(port, () => {
console.log(`Listening on port ${port}`);
}); |
let num = 3.14159;
let formattedNum = num.toFixed(2);
console.log(formattedNum); // 3.14 |
<!DOCTYPE html>
<html>
<head>
<title>Book List</title>
</head>
<body>
<h1>Book List</h1>
<table>
<tr>
<th>Title</th>
<th>Author</th>
</tr>
<% @books.each do |book| %>
<tr>
<td><%= book.title %></td>
<td><%= book.author %></td>
</tr>
<% end %>
</table>
</body>
</html> |
#!/bin/bash
# packages = nfs-utils
systemctl start rpcbind
systemctl enable rpcbind
|
<gh_stars>1-10
import React, { Component } from 'react';
import { observer } from 'mobx-react';
import { withStyles } from 'material-ui/styles';
import Grid from 'material-ui/Grid';
import SearchIcon from '@material-ui/icons/Search';
import { Link } from '../../routes';
const emailSubject = 'Real estate property inquiry';
const styles = theme => ({
root: {},
titleSection: {
display: 'flex',
justifyContent: 'center',
alignItems: 'center',
height: 300,
width: '100%',
fontFamily: theme.frontEnd.typography.fontFamily.sansSerif,
fontSize: '2.8rem',
color: '#fff',
backgroundColor: 'black',
marginBottom: 50,
},
mainSection: {
display: 'flex',
flexDirection: 'column',
justifyContent: 'center',
padding: '0 20px',
},
agentsSection: {
marginLeft: 'auto',
marginRight: 'auto',
marginTop: '50px',
width: '100%',
maxWidth: 1200,
},
agentCard: {
display: 'flex',
flexDirection: 'column',
width: '100%',
transition: 'transform .2s ease-in-out',
color: 'rgba(0,0,0,.7)',
'&:hover': {
transform: 'scale(1.02,1.02)',
},
},
profilePicWrapper: {
paddingTop: '100%',
width: '100%',
position: 'relative',
},
profilePicAnchor: {
position: 'absolute',
top: 0,
left: 0,
bottom: 0,
right: 0,
},
profilePic: {
width: '100%',
height: 'auto',
objectFit: 'cover',
cursor: 'pointer',
},
detailsWrapper: {
padding: '20px 20px',
display: 'flex',
flexDirection: 'column',
border: '1px solid rgba(0,0,0,.1)',
borderTop: 'none',
},
agentName: {
fontSize: '1.5rem',
cursor: 'pointer',
},
agentNameAnchor: {
marginBottom: '7px',
textDecoration: 'none',
color: 'rgba(0,0,0,.8)',
},
agentEmail: {
marginBottom: '10px',
fontSize: '1.1rem',
fontSize: '.9rem',
fontFamily: theme.frontEnd.typography.fontFamily.sansSerif2,
textDecoration: 'none',
color: 'rgba(0,0,0,.7)',
},
agentMobileNumber: {
marginBottom: '10px',
color: 'rgba(0,0,0,.5)',
fontSize: '.9rem',
fontFamily: theme.frontEnd.typography.fontFamily.sansSerif2,
},
filterInputWrapper: {
position: 'relative',
marginLeft: 'auto',
marginRight: 'auto',
width: '100%',
maxWidth: 1200,
},
filterInput: {
padding: '10px 12px',
paddingRight: 35,
width: '100%',
maxWidth: 1200,
height: 40,
fontSize: '.9rem',
border: '1px solid rgba(0,0,0,.3)',
borderRadius: 5,
},
searchIconWrapper: {
position: 'absolute',
right: 20,
top: 8,
height: 10,
width: 10,
},
});
@withStyles(styles)
@observer
class AllAgentsView extends Component {
state = {
filterValue: '',
};
onFilterChange = e => {
const { value } = e.target;
if (e.preventDefault) e.preventDefault();
this.setState({ filterValue: value });
};
renderAgnets = agents => {
const { classes } = this.props;
const filter = this.state.filterValue.trim().toLowerCase();
let viewableAgents;
if (filter) {
viewableAgents = agents.filter(agent =>
agent.name.toLowerCase().includes(filter)
);
} else {
viewableAgents = agents;
}
return viewableAgents.map(agent => (
<Grid item xs={12} sm={6} md={4} lg={3} key={agent.email}>
<div className={classes.agentCard}>
<div className={classes.profilePicWrapper}>
<Link route="agent" params={{ id: agent.agentID }}>
<a className={classes.profilePicAnchor}>
<img
className={classes.profilePic}
src={agent.profilePhotoURL}
alt="agent"
/>
</a>
</Link>
</div>
<div className={classes.detailsWrapper}>
<Link route="agent" params={{ id: agent.agentID }}>
<a className={classes.agentNameAnchor}>
<div className={classes.agentName}>{agent.name}</div>
</a>
</Link>
<div className={classes.agentEmail}>{agent.email}</div>
<div className={classes.agentMobileNumber}>
M: {agent.mobileNumber}
</div>
</div>
</div>
</Grid>
));
};
render() {
const { agents, classes } = this.props;
console.log(`agents: ${agents}`);
// grab the agent info and listings using this agentID
return (
<div className={classes.root}>
<div className={classes.titleSection}>Our Agents</div>
<div className={classes.mainSection}>
<div className={classes.filterInputWrapper}>
<input
className={classes.filterInput}
value={this.state.filterValue}
onChange={this.onFilterChange}
type="text"
placeholder="Search Agents..."
/>
<span className={classes.searchIconWrapper}>
<SearchIcon />
</span>
</div>
<div className={classes.agentsSection}>
<Grid container spacing={24}>
{agents && agents.length && this.renderAgnets(agents)}
</Grid>
</div>
</div>
</div>
);
}
}
export default AllAgentsView;
|
#!/bin/sh
#AM_VERSION="1.10"
if ! type aclocal-$AM_VERSION 1>/dev/null 2>&1; then
# automake-1.10 (recommended) is not available on Fedora 8
AUTOMAKE=automake
ACLOCAL=aclocal
else
ACLOCAL=aclocal-${AM_VERSION}
AUTOMAKE=automake-${AM_VERSION}
fi
if test -f /opt/local/bin/glibtoolize ; then
# darwin
LIBTOOLIZE=/opt/local/bin/glibtoolize
else
LIBTOOLIZE=libtoolize
fi
if test -d /opt/local/share/aclocal ; then
ACLOCAL_ARGS="-I /opt/local/share/aclocal"
fi
if test -d /share/aclocal ; then
ACLOCAL_ARGS="$ACLOCAL_ARGS -I /share/aclocal"
fi
if test -f /opt/local/bin/intltoolize ; then
#darwin
INTLTOOLIZE=/opt/local/bin/intltoolize
else
#on mingw, it is important to invoke intltoolize with an absolute path to avoid a bug
INTLTOOLIZE=/usr/bin/intltoolize
fi
echo "Generating build scripts in TBox Logger..."
set -x
$LIBTOOLIZE --copy --force
$INTLTOOLIZE -c --force --automake
$ACLOCAL -I m4 $ACLOCAL_ARGS
autoheader
$AUTOMAKE --force-missing --add-missing --copy
autoconf
|
#!/usr/bin/env bats
# shellcheck shell=bats
# shellcheck disable=SC2031
load "$GAUDI_TEST_DIRECTORY"/helper.bash
@test "gaudi-bash uninstall: verify that the uninstall script exists" {
assert_file_exist "$GAUDI_BASH/uninstall.sh"
}
@test "gaudi-bash uninstall: run the uninstall script with an existing backup file" {
local md5_conf
echo "test file content for backup" > "$HOME/$GAUDI_BASH_PROFILE.bak"
echo "test file content for original file" > "$HOME/$GAUDI_BASH_PROFILE"
md5_bak=$(md5sum "$HOME/$GAUDI_BASH_PROFILE.bak" | awk '{print $1}')
. "$GAUDI_BASH"/uninstall.sh
assert_file_not_exist "$HOME/$GAUDI_BASH_PROFILE.uninstall"
assert_file_not_exist "$HOME/$GAUDI_BASH_PROFILE.bak"
assert_file_exist "$HOME/$GAUDI_BASH_PROFILE"
md5_conf=$(md5sum "$HOME/$GAUDI_BASH_PROFILE" | awk '{print $1}')
assert_equal "$md5_bak" "$md5_conf"
}
@test "gaudi-bash uninstall: run the uninstall script without an existing backup file" {
local md5_uninstall
local md5_orig
echo "test file content for original file" > "$HOME/$GAUDI_BASH_PROFILE"
md5_orig=$(md5sum "$HOME/$GAUDI_BASH_PROFILE" | awk '{print $1}')
. "$GAUDI_BASH"/uninstall.sh
assert_file_exist "$HOME/$GAUDI_BASH_PROFILE.uninstall"
assert_file_not_exist "$HOME/$GAUDI_BASH_PROFILE.bak"
assert_file_not_exist "$HOME/$GAUDI_BASH_PROFILE"
md5_uninstall=$(md5sum "$HOME/$GAUDI_BASH_PROFILE.uninstall" | awk '{print $1}')
assert_equal "$md5_orig" "$md5_uninstall"
}
|
#! /bin/sh
# cleans cproject
# leaves EPMC metadata , */fulltext.xml
# run from CProject directory (e.g. osanctum200)
# HTML from XML
rm */scholarly.html
# per-tree results
rm */*.snippets.xml
rm */*.count.xml
rm -rf search.*.xml word.*.xml species.*.xml
rm -rf */results
# cooccurrence
rm -rf __cooccurrence/
# summary tables
rm commonest.dataTables.html count.dataTables.html entries.dataTables.html full.dataTables.html rawTree.txt word.frequencies.*
|
<reponame>lahosken/pants<filename>testprojects/src/java/org/pantsbuild/testproject/inccompile/libwithjettyserver/Lib.java
package org.pantsbuild.testproject.inccompile.libwithjettydep;
public class Lib {
void method() { }
}
|
export { New } from "./new";
|
#!/bin/bash
#SBATCH -a 0-2
#SBATCH -o ./cluster_logs/deepens_minima/%A-%a.o
#SBATCH -e ./cluster_logs/deepens_minima/%A-%a.e
#SBATCH --gres=gpu:1 # reserves GPUs
#SBATCH -J deepens-gm # sets the job name. If not specified, the file name will be used as job name
# Info
echo "Workingdir: $PWD"
echo "Started at $(date)"
echo "Running job $SLURM_JOB_NAME using $SLURM_JOB_CPUS_PER_NODE cpus per node with given JID $SLURM_JOB_ID on queue $SLURM_JOB_PARTITION"
# Activate virtual environment
source venv/bin/activate
# Arrayjob
PYTHONPATH=$PWD python nes/nasbench201/scripts/train_deepens_baselearner.py \
--seed_id $SLURM_ARRAY_TASK_ID \
--working_directory "experiments/nb201/cifar10/baselearners/deepens_minimum/" \
--dataset cifar10 --global_seed 1 --scheme deepens_minimum --train_global_optima
# Done
echo "DONE"
echo "Finished at $(date)"
|
from abc import ABC, abstractmethod
class AbstractGrid(ABC):
def __init__(self, width: int, height: int):
self._width = width
self._height = height
super().__init__()
def get_width(self) -> int:
return self._width
def get_height(self) -> int:
return self._height
@abstractmethod
def get_grid(self):
pass
@abstractmethod
def set_grid(self, grid_data):
pass
@abstractmethod
def force_set_grid(self, grid_data):
pass
@abstractmethod
def get_observation(self):
pass
@abstractmethod
def get_cell(self, x: int, y: int):
pass
@abstractmethod
def set_cell(self, x: int, y: int, cell):
pass
@abstractmethod
def get_difference(self, other: 'AbstractGrid'):
pass
@abstractmethod
def deep_copy(self):
pass
def in_limits(self, x, y):
return not (x < 0 or x >= self._width or y < 0 or y >= self._height)
@abstractmethod
def __str__(self) -> str:
pass
|
sentence = "This is a sample sentence."
output = sentence.upper()
print(output) |
def get_preferred_language():
preferred_language = ''
if 'HTTP_ACCEPT_LANGUAGE' in os.environ:
languages = os.environ['HTTP_ACCEPT_LANGUAGE'].split(',')
for lang in languages:
lang_split = lang.split(';')
preferred_language = lang_split[0]
break
return preferred_language |
import { PortfolioViewComponent } from './portfolio-view/portfolio-view.component';
import { FirstPageComponent } from './first-page/first-page.component';
import { PostListComponent } from './post-list/post-list.component';
import { CreatePostComponent } from './create-post/create-post.component';
import { SignupComponent } from './signup/signup.component';
import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { ArtistLoginComponent } from './artist-login/artist-login.component';
import { ArtistPortfolioComponent } from './artist-portfolio/artist-portfolio.component';
import { EditPortfolioComponent } from './edit-portfolio/edit-portfolio.component';
import { UserSignupComponent } from './user-signup/user-signup.component';
import { UserLoginComponent } from './user-login/user-login.component';
const routes: Routes = [
{ path: '', component: FirstPageComponent },
{ path: 'artist-signup', component: SignupComponent },
{ path: 'artist-login', component: ArtistLoginComponent },
{ path: 'artist-portfolio', component: ArtistPortfolioComponent },
{ path: 'edit-portfolio', component: EditPortfolioComponent },
{ path: 'create-post', component: CreatePostComponent },
{ path: 'user-signup', component: UserSignupComponent },
{ path: 'user-login', component: UserLoginComponent },
{ path: 'post-list', component: PostListComponent },
{ path: 'view-portfolio', component: PortfolioViewComponent }
];
@NgModule({
imports: [RouterModule.forRoot(routes)],
exports: [RouterModule]
})
export class AppRoutingModule { }
|
const express = require('express');
const app = express();
const bodyParser = require('body-parser');
// use body parser for managing form submission
app.use(bodyParser.urlencoded({extended: true}));
// use the Stripe API for payments
const stripe = require('stripe')(process.env.STRIPE_SECRET_KEY);
// create an array to store the shopping cart
let cart = [];
app.post('/cart', (req, res) => {
// add the item to the cart
cart.push(req.body.name);
// send back a success message
res.json({message: 'item added to cart'});
});
app.get('/cart', (req, res) => {
res.json({cartContents: cart});
});
app.post('/checkout', (req, res) => {
// use the Stripe API to process the payment
const stripeCharge = stripe.charges.create({
amount: cart.length * 100, // convert to cents
currency: 'usd',
source: req.body.stripeToken
}, (err, charge) => {
if (err) {
console.log(err);
return res.status(500).json({message: 'payment failed'});
}
// remove the cart contents
cart = [];
// send a success message
res.json({message: 'payment successful'});
});
});
// start the application
app.listen(3000, () => {
console.log('listening on port 3000');
}); |
using System;
public struct HTOUCHINPUT
{
private readonly IntPtr value;
public HTOUCHINPUT(void* touchInputData)
{
value = new IntPtr(touchInputData);
}
public IntPtr Value => value;
public static explicit operator HTOUCHINPUT(int value) => new HTOUCHINPUT((void*)value);
public static explicit operator int(HTOUCHINPUT value) => value.Value.ToInt32();
public static explicit operator HTOUCHINPUT(long value) => new HTOUCHINPUT((void*)value);
} |
#!/bin/sh
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
echo "The time is $(date)."
|
#!/bin/bash
set +x
# set -e
if [ "$#" -lt 1 ]; then
script_name=$(basename "$0")
echo "Usage: ${script_name} <input-file-name>"
echo "Example: ${script_name} example"
exit -1
fi
WORKSPACE=$1
# Select/Create Terraform Workspace
terraform workspace select "${WORKSPACE}"
IS_WORKSPACE_PRESENT=$?
if [ "${IS_WORKSPACE_PRESENT}" -ne "0" ]
then
terraform workspace new "${WORKSPACE}"
fi
terraform apply -var-file=settings/${WORKSPACE}.tfvars -auto-approve
|
#!/bin/bash
set -x
set -e
export PYTHONUNBUFFERED="True"
LOG="experiments/logs/objectnet3d_vgg16_rpn_msr_test_2.txt.`date +'%Y-%m-%d_%H-%M-%S'`"
exec &> >(tee -a "$LOG")
echo Logging output to "$LOG"
time ./tools/test_net.py --gpu $1 \
--def models/VGG16/objectnet3d/test_rpn_msr.prototxt \
--net output/objectnet3d/objectnet3d_trainval/vgg16_fast_rcnn_rpn_msr_objectnet3d_iter_160000.caffemodel \
--imdb objectnet3d_test_2 \
--cfg experiments/cfgs/objectnet3d_rpn_msr.yml
|
var dir_bea2aaa5ee1b83ed83ac6ec2539c5ff9 =
[
[ "MultipleNetworksCifar10.cpp", "_multiple_networks_cifar10_8cpp.xhtml", "_multiple_networks_cifar10_8cpp" ]
]; |
import { TouchableOpacity } from 'react-native'
import styled from 'styled-components/native'
import { Label } from './Text'
/**
* Button styling
*/
const Button = styled.TouchableOpacity`
border-radius: 3px;
padding: 8px;
margin: 0;
border: 2px solid ${props => props.theme.colorIndex['grey-2']};
background-color: ${props => props.theme.colorIndex['light-1']};
align-items: center;
${props => props.flat && props.colorIndex && `
border: 2px solid ${props.theme.colorIndex[props.colorIndex]};
`}
${props => !props.flat && props.colorIndex && `
border: 2px solid ${props.theme.colorIndex[props.colorIndex]};
background-color: ${props.theme.colorIndex[props.colorIndex]};
`}
${props => props.disabled && `
border: 2px solid ${props.theme.colorIndex.unknown};
background-color: ${props.theme.colorIndex.unknown};
`}
`
/**
* Anchor styling
*/
const Anchor = Label.extend`
padding: 16px;
${props => props.pad === 'none' && `
padding: 0;
`}
${props => props.pad === 'small' && `
padding: 8px;
`}
${props => props.pad === 'medium' && `
padding: 16px;
`}
${props => props.pad === 'large' && `
padding: 32px;
`}
`
/**
* Floating action button styling
*/
const ActionButton = Button.extend`
position: absolute;
border-radius: 0px;
border-color: transparent;
background-color: transparent;
bottom: 16px;
right: 16px;
${props => props.position === 'bottom-center' && `
bottom: 24px;
align-self: center;
`}
${props => props.position === 'bottom-right' && `
bottom: 24px;
right: 24px;
`}
${props => props.position === 'bottom-left' && `
bottom: 24px;
left: 24px;
`}
`
export {
Button,
Anchor,
ActionButton,
}
|
import React from 'react';
interface Props extends React.HTMLAttributes<HTMLDivElement> {
}
declare const CardBody: React.ForwardRefExoticComponent<Props & React.RefAttributes<HTMLDivElement>>;
export default CardBody;
//# sourceMappingURL=CardBody.d.ts.map |
import numpy as np
from scipy.signal import find_peaks, savgol_filter
from scipy.optimize import curve_fit
def find_signal_peaks(signal):
# Smooth the signal using Savitzky-Golay filter
smoothed_signal = savgol_filter(signal, 5, 2)
# Find peaks in the smoothed signal
peaks, _ = find_peaks(smoothed_signal, distance=1)
# Fit a curve to the signal
def gaussian(x, height, center, width):
return height * np.exp(-(x - center)**2 / (2 * width**2))
p0 = [max(signal), np.argmax(signal), 1.0]
coeff, _ = curve_fit(gaussian, np.arange(len(signal)), signal, p0=p0)
# Return the indices of the identified peaks in the original signal
peak_indices = [peak for peak in peaks if signal[peak] > coeff[0] * 0.5]
return peak_indices |
/** bamutil.h **/
/*
*| File: bamutil.h |*
*| |*
*| Copyright (c) 2016-2018 University of Washington All rights reserved. |*
*| |*
*| Redistribution and use in source and binary forms, with or without |*
*| modification, are permitted provided that the following conditions are |*
*| met: |*
*| |*
*| Redistributions of source code must retain the above copyright notice, |*
*| this list of conditions and the following disclaimer. |*
*| |*
*| Redistributions in binary form must reproduce the above copyright |*
*| notice, this list of conditions and the following disclaimer in the |*
*| documentation and/or other materials provided with the distribution. |*
*| |*
*| Neither the name of the University of Washington nor the names of its |*
*| contributors may be used to endorse or promote products derived from |*
*| this software without specific prior written permission. |*
*| |*
*| This software is provided by the university of washington and |*
*| contributors "as is" and any express or implied warranties, including, |*
*| but not limited to, the implied warranties of merchantability and |*
*| fitness for a particular purpose are disclaimed. In no event shall the |*
*| University of Washington or contributors be liable for any direct, |*
*| indirect, incidental, special, exemplary, or consequential damages |*
*| (including, but not limited to, procurement of substitute goods or |*
*| services; loss of use, data, or profits; or business interruption) |*
*| however caused and on any theory of liability, whether in contract, |*
*| strict liability, or tort (including negligence or otherwise) arising |*
*| in any way out of the use of this software, even if advised of the |*
*| possibility of such damage. |*
*/
/*
** Version: 20160802
*/
/*
** The master file location is
**
** whim:/users/bge/src/bamutil/bam2fast.h
**
*/
#define BAM_ADDITIONAL
#define BAM_DSC_CHR_POS
#define INTRON_AS_DISCREPANCY
// #define BAM_DSC_NO_SORT
#ifndef BAMUTIL_H
#define BAMUTIL_H
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include <zlib.h>
#include "bgzf.h"
#include "util.h"
typedef struct
{
char *name;
int32_t length;
} BamRefSeq;
typedef struct
{
char *name;
char *command;
char *description;
char *version;
} BamProgram;
typedef struct
{
char *description; // bamReadHeader stores a NULL-terminated string
int numRefSeq;
BamRefSeq *refSeq;
BamProgram program;
} BamHeader;
typedef struct
{
char tag[2];
char type[2]; /* type[1] is B-type array type */
int alen; /* B-type array length */
void *value;
} BamOptField;
typedef struct
{
char type; /* 0=undefined; 1=int; 2=sub; 3=del */
int len; /* length of tok, if defined */
int mxlen; /* allocated length of tok */
char *tok;
} BamMdTok;
/*
** Defined for BamDsc.base (below).
*/
#define BAM_DSC_REF_A ( 1 << 0 )
#define BAM_DSC_REF_C ( 1 << 1 )
#define BAM_DSC_REF_G ( 1 << 2 )
#define BAM_DSC_REF_T ( 1 << 3 )
#define BAM_DSC_REF_N ( 1 << 4 )
#define BAM_DSC_RED_N ( 1 << 5 )
typedef struct
{
char type; /* 1=sub; 2=del; 3=ins; 4=intron (optional) */
int32_t pos; /* discrepancy start location in read coordinates (deletion: immediately to 5' end of deletion start); 1-based */
int32_t len; /* indel length */
uint8_t base; /* reference base(s) unknown: 0; A: bit 0; C: bit 1; G: bit2; T: bit 3; ref N: bit 4; read N: bit 5 Note: bamGetDsc does not check for Ns not marked as dscs */
#ifdef BAM_DSC_CHR_POS
int32_t posChr; /* pos in the SAM format BAM file is int32_t; 1-based */
#endif
} BamDsc;
typedef struct
{
int mx_read_name;
char *read_name; // original read name (with original suffix)
int dscore0; // alignment score
int dscore; // alignment score
int pscore; // alignment score
int black_list; // 0=not aligned to black-listed region; 1=aligned to black-listed region
int clip5[2]; // soft: clip5[0]; hard: clip5[1]
int clip3[2]; // soft: clip3[0]; hard: clip3[1]
int8_t hclip;
int64_t begRead; // read implied start in genomic coordinates
int64_t endRead; // read implied end in genomic coordinates
int64_t begAlign; // alignment start in genomic coordinates (same as bamAlign.pos)
int64_t endAlign; // alignment end in genomic coordinates
int allocDsc;
int numDsc;
uint8_t bitFlag;
int32_t mx_l_seq;
char *seq;
int8_t template_end; // template end of read for paired end read set 0=fwd; 1=rev; -1=unknown
BamDsc *dsc;
int32_t nclpDsc;
int32_t nfltDsc;
} BamAdditional;
/*
** Notes:
** o the mx_* records give the number of units
** of allocated memory for the corresponding
** variable. For example, mx_cigar_op has the
** number of allocated uint32_t values for
** *cigar. The point is to allow reusing a BamAlign
** structure to minimize allocation/freeing.
**
** o bamReadAlign expands (reallocs) read_name, cigar, seq, and qual
** to lengths mx_read_name, mx_cigar_op, and mx_l_seq.
** o bamReadAlign expands (reallocs) 'field' but not values stored in
** each field structure (it mallocs these)
*/
typedef struct
{
int32_t refid;
int32_t pos; /* this is POS; that is, starting at base 1 */
uint32_t bin;
int32_t mapq; /* mapping quality */
uint32_t flag;
uint32_t n_cigar_op; /* number of CIGAR operations */
int32_t l_seq; /* sequence length */
int32_t next_refid;
int32_t next_pos;
int32_t tlen; /* template length */
int32_t mx_read_name;
char *read_name;
int32_t mx_cigar_op;
uint32_t *cigar;
int32_t mx_l_seq;
char *seq; /* bases - in BAM orientation and packed two bases/byte*/
char *qual; /* quality values as char (not ascii-encoded) */
int32_t mx_field;
int32_t numOptField;
BamOptField *field;
#ifdef BAM_ADDITIONAL
BamAdditional addl;
#endif
} BamAlign;
int bamReadHeader( BGZF *fp, BamHeader *header, int *fstatus );
int bamInitAlign( BamAlign *bamAlign, int *fstatus );
int bamTestCigar( BGZF *fp, int *fcigarFlag, int *fmdFlag, int *fstatus );
int bamReadAlign( BGZF *fp, BamAlign *align, int *fstatus );
int bamDumpSamHeader( BamHeader *header );
int bamDumpSamAlign( BamHeader *header, BamAlign *align );
int bamWriteSamHeader( FILE *fp, BamHeader *header, int *fstatus );
int bamWriteSamAlign( FILE *fp, BamHeader *header, BamAlign *align, int *fstatus );
int bamCigarOp( uint32_t uop, char *op, int32_t *len, int *fstatus );
int bamCigarUop( char cop, int32_t lop, uint32_t *fuop, int *fstatus );
int bamUnpackSeq( BamAlign *align, char **sbuf, int *lbuf, int *fstatus );
int bamTestCigar( BGZF *fp, int *fcigarFlag, int *fmdFlag, int *fstatus );
int bamCountDsc( BamAlign *bamAlign, int cigarFlag, int mdFlag, int *fnumSub, int *fnumDel, int *fnumIns, int *fstatus );
int bamGetDsc( BamAlign *bamAlign, int cigarFlag, int mdFlag, BamDsc **fbamDsc, int *fnumBamDsc, int *fallocBamDsc, int clip5[2], int clip3[2], int *fstatus );
int bamDelNDsc( BamDsc **fbamDsc, int *fnumBamDsc, int *numDelN, int *fstatus );
int bamCalcEndsRead( BamAlign *bamAlign, BamDsc *bamDsc, int numBamDsc, int clip5[2], int clip3[2], int64_t *fbpos, int64_t *fepos, int *fstatus );
int bamCalcEndsAlign( BamAlign *bamAlign, BamDsc *bamDsc, int numBamDsc, int clip5[2], int clip3[2], int64_t *fbpos, int64_t *fepos, int *fstatus );
int bamFreeAlign( BamAlign *align );
int bamMoveAlign( BamAlign *dst, BamAlign *src, int *fstatus );
int getAlignSet( char *nameRead, BGZF *fp, BamAlign **fbamAlign, int *fmxBamAlign, int *fnumBamAlign, int *faflag, int *fstatus );
int strnum_cmp(const void *_a, const void *_b);
int bamReportAlign( char *label, BamAlign *bamAlign, int numBamAlign, char *refSeq, int64_t lenRefSeq, char *nameRefSeq, FILE *afp, int *fstatus );
int bamSetBufAlign( BamAlign *bamAlign, char *refSeq, int64_t lenRefSeq, char **frbuf, char **fqbuf, char **fdbuf, char **fsbuf, char **ftbuf, int *flbuf, int64_t *fbegExt, int *fstatus );
int bamSetRefBase( BamDsc *bamDsc, int numBamDsc, char *refSeq, int64_t lenRefSeq, int *fstatus );
int bamGetMdTok( char *string, BamMdTok **fmdTok, int *fmxTok, int *fntok, int *fstatus );
#endif
|
<filename>src/mono/wasm/runtime/diagnostics.ts
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
import { Module } from "./imports";
import cwraps from "./cwraps";
import type { EventPipeSessionOptions } from "./types";
import type { VoidPtr } from "./types/emscripten";
import * as memory from "./memory";
const sizeOfInt32 = 4;
export type EventPipeSessionID = bigint;
type EventPipeSessionIDImpl = number;
/// An EventPipe session object represents a single diagnostic tracing session that is collecting
/// events from the runtime and managed libraries. There may be multiple active sessions at the same time.
/// Each session subscribes to a number of providers and will collect events from the time that start() is called, until stop() is called.
/// Upon completion the session saves the events to a file on the VFS.
/// The data can then be retrieved as Blob.
export interface EventPipeSession {
// session ID for debugging logging only
get sessionID(): EventPipeSessionID;
start(): void;
stop(): void;
getTraceBlob(): Blob;
}
// internal session state of the JS instance
enum State {
Initialized,
Started,
Done,
}
function start_streaming(sessionID: EventPipeSessionIDImpl): void {
cwraps.mono_wasm_event_pipe_session_start_streaming(sessionID);
}
function stop_streaming(sessionID: EventPipeSessionIDImpl): void {
cwraps.mono_wasm_event_pipe_session_disable(sessionID);
}
/// An EventPipe session that saves the event data to a file in the VFS.
class EventPipeFileSession implements EventPipeSession {
private _state: State;
private _sessionID: EventPipeSessionIDImpl;
private _tracePath: string; // VFS file path to the trace file
get sessionID(): bigint { return BigInt(this._sessionID); }
constructor(sessionID: EventPipeSessionIDImpl, tracePath: string) {
this._state = State.Initialized;
this._sessionID = sessionID;
this._tracePath = tracePath;
console.debug(`EventPipe session ${this.sessionID} created`);
}
start = () => {
if (this._state !== State.Initialized) {
throw new Error(`EventPipe session ${this.sessionID} already started`);
}
this._state = State.Started;
start_streaming(this._sessionID);
console.debug(`EventPipe session ${this.sessionID} started`);
}
stop = () => {
if (this._state !== State.Started) {
throw new Error(`cannot stop an EventPipe session in state ${this._state}, not 'Started'`);
}
this._state = State.Done;
stop_streaming(this._sessionID);
console.debug(`EventPipe session ${this.sessionID} stopped`);
}
getTraceBlob = () => {
if (this._state !== State.Done) {
throw new Error(`session is in state ${this._state}, not 'Done'`);
}
const data = Module.FS_readFile(this._tracePath, { encoding: "binary" }) as Uint8Array;
return new Blob([data], { type: "application/octet-stream" });
}
}
const eventLevel = {
LogAlways: 0,
Critical: 1,
Error: 2,
Warning: 3,
Informational: 4,
Verbose: 5,
} as const;
type EventLevel = typeof eventLevel;
type UnnamedProviderConfiguration = Partial<{
keyword_mask: string | 0;
level: number;
args: string;
}>
/// The configuration for an individual provider. Each provider configuration has the name of the provider,
/// the level of events to collect, and a string containing a 32-bit hexadecimal mask (without an "0x" prefix) of
/// the "keywords" to filter a subset of the events. The keyword mask may be the number 0 or "" to skips the filtering.
/// See https://docs.microsoft.com/en-us/dotnet/core/diagnostics/well-known-event-providers for a list of known providers.
/// Additional providers may be added by applications or libraries that implement an EventSource subclass.
/// See https://docs.microsoft.com/en-us/dotnet/api/system.diagnostics.tracing.eventsource?view=net-6.0
///
/// Some providers also have an "args" string in an arbitrary format. For example the EventSource providers that
/// include EventCounters have a "EventCounterIntervalSec=NNN" argument that specified how often the counters of
/// the event source should be polled.
export interface ProviderConfiguration extends UnnamedProviderConfiguration {
name: string;
}
const runtimeProviderName = "Microsoft-Windows-DotNETRuntime";
const runtimePrivateProviderName = "Microsoft-Windows-DotNETRuntimePrivate";
const sampleProfilerProviderName = "Microsoft-DotNETCore-SampleProfiler";
const runtimeProviderDefault: ProviderConfiguration = {
name: runtimeProviderName,
keyword_mask: "4c14fccbd",
level: eventLevel.Verbose,
};
const runtimePrivateProviderDefault: ProviderConfiguration = {
name: runtimePrivateProviderName,
keyword_mask: "4002000b",
level: eventLevel.Verbose,
};
const sampleProfilerProviderDefault: ProviderConfiguration = {
name: sampleProfilerProviderName,
keyword_mask: "0",
level: eventLevel.Verbose,
};
/// A helper class to create EventPipeSessionOptions
export class SessionOptionsBuilder {
private _rundown?: boolean;
private _providers: ProviderConfiguration[];
/// Create an empty builder. Prefer to use SesssionOptionsBuilder.Empty
constructor() {
this._providers = [];
}
/// Gets a builder with no providers.
static get Empty(): SessionOptionsBuilder { return new SessionOptionsBuilder(); }
/// Gets a builder with default providers and rundown events enabled.
/// See https://docs.microsoft.com/en-us/dotnet/core/diagnostics/eventpipe#trace-using-environment-variables
static get DefaultProviders(): SessionOptionsBuilder {
return this.Empty.addRuntimeProvider().addRuntimePrivateProvider().addSampleProfilerProvider();
}
/// Change whether to collect rundown events.
/// Certain providers may need rundown events to be collected in order to provide useful diagnostic information.
setRundownEnabled(enabled: boolean): SessionOptionsBuilder {
this._rundown = enabled;
return this;
}
/// Add a provider configuration to the builder.
addProvider(provider: ProviderConfiguration): SessionOptionsBuilder {
this._providers.push(provider);
return this;
}
/// Add the Microsoft-Windows-DotNETRuntime provider. Use override options to change the event level or keyword mask.
/// The default is { keyword_mask: "4c14fccbd", level: eventLevel.Verbose }
addRuntimeProvider(overrideOptions?: UnnamedProviderConfiguration): SessionOptionsBuilder {
const options = { ...runtimeProviderDefault, ...overrideOptions };
this._providers.push(options);
return this;
}
/// Add the Microsoft-Windows-DotNETRuntimePrivate provider. Use override options to change the event level or keyword mask.
/// The default is { keyword_mask: "4002000b", level: eventLevel.Verbose}
addRuntimePrivateProvider(overrideOptions?: UnnamedProviderConfiguration): SessionOptionsBuilder {
const options = { ...runtimePrivateProviderDefault, ...overrideOptions };
this._providers.push(options);
return this;
}
/// Add the Microsoft-DotNETCore-SampleProfiler. Use override options to change the event level or keyword mask.
// The default is { keyword_mask: 0, level: eventLevel.Verbose }
addSampleProfilerProvider(overrideOptions?: UnnamedProviderConfiguration): SessionOptionsBuilder {
const options = { ...sampleProfilerProviderDefault, ...overrideOptions };
this._providers.push(options);
return this;
}
/// Create an EventPipeSessionOptions from the builder.
build(): EventPipeSessionOptions {
const providers = this._providers.map(p => {
const name = p.name;
const keyword_mask = "" + (p?.keyword_mask ?? "");
const level = p?.level ?? eventLevel.Verbose;
const args = p?.args ?? "";
const maybeArgs = args != "" ? `:${args}` : "";
return `${name}:${keyword_mask}:${level}${maybeArgs}`;
});
return {
collectRundownEvents: this._rundown,
providers: providers.join(",")
};
}
}
// a conter for the number of sessions created
let totalSessions = 0;
function createSessionWithPtrCB(sessionIdOutPtr: VoidPtr, options: EventPipeSessionOptions | undefined, tracePath: string): false | number {
const defaultRundownRequested = true;
const defaultProviders = ""; // empty string means use the default providers
const defaultBufferSizeInMB = 1;
const rundown = options?.collectRundownEvents ?? defaultRundownRequested;
const providers = options?.providers ?? defaultProviders;
memory.setI32(sessionIdOutPtr, 0);
if (!cwraps.mono_wasm_event_pipe_enable(tracePath, defaultBufferSizeInMB, providers, rundown, sessionIdOutPtr)) {
return false;
} else {
return memory.getI32(sessionIdOutPtr);
}
}
export interface Diagnostics {
EventLevel: EventLevel;
SessionOptionsBuilder: typeof SessionOptionsBuilder;
createEventPipeSession(options?: EventPipeSessionOptions): EventPipeSession | null;
}
/// APIs for working with .NET diagnostics from JavaScript.
export const diagnostics: Diagnostics = {
/// An enumeration of the level (higher value means more detail):
/// LogAlways: 0,
/// Critical: 1,
/// Error: 2,
/// Warning: 3,
/// Informational: 4,
/// Verbose: 5,
EventLevel: eventLevel,
/// A builder for creating an EventPipeSessionOptions instance.
SessionOptionsBuilder: SessionOptionsBuilder,
/// Creates a new EventPipe session that will collect trace events from the runtime and managed libraries.
/// Use the options to control the kinds of events to be collected.
/// Multiple sessions may be created and started at the same time.
createEventPipeSession(options?: EventPipeSessionOptions): EventPipeSession | null {
// The session trace is saved to a file in the VFS. The file name doesn't matter,
// but we'd like it to be distinct from other traces.
const tracePath = `/trace-${totalSessions++}.nettrace`;
const success = memory.withStackAlloc(sizeOfInt32, createSessionWithPtrCB, options, tracePath);
if (success === false)
return null;
const sessionID = success;
const session = new EventPipeFileSession(sessionID, tracePath);
return session;
},
};
export default diagnostics;
|
<reponame>shahabyazdi/element-popper
import React, { useEffect, useState } from "react";
import ElementPopper from "../../../src/index";
export default function Doc(translate, language) {
const [active, setActive] = useState(false);
const [active1, setActive1] = useState(false);
const [active2, setActive2] = useState(false);
const [active3, setActive3] = useState(false);
const [isPopperReady, setIsPopperReady] = useState(false);
const activeProp = {
title: "Active Prop",
description: (
<>
<p>{translate("active_prop_1")}</p>
<pre>
<code className="language-jsx">
{`const [active, setActive] = useState(false)
<ElementPopper
element={<Element />}
popper={active && <Popper />}
/>`}
</code>
</pre>
<p>{translate("active_prop_2")}</p>
<pre>
<code className="language-jsx">
{`function AsyncPopper() {
const [element, setElement] = useState()
useEffect(() => {
//async operation
setTimeout(() => {
setElement(
<div
style={{
width: "120px",
height: "120px",
backgroundColor: "white"
}}
>
Popper Element
</div>
)
}, 200);
}, [])
return element || <div>Loading ...</div>
}
const [active, setActive] = useState(false)
<ElementPopper
element={<Element />}
popper={<AsyncPopper />}
active={active}
/>`}
</code>
</pre>
<p>{translate("active_prop_3")}</p>
<p>{translate("active_prop_4")}</p>
</>
),
};
const firstExample = {
title: "First Example",
description: "first_example",
code: `import React, { useState } from "react"
import ElementPopper from "react-element-popper"
function Component({ height, width, backgroundColor, children }) {
return (
<div
style={{
width: width + "px",
height: height + "px",
backgroundColor,
textAlign: "center",
display: "flex",
flexDirection: "column",
justifyContent: "center"
}}
>
{children}
</div>
)
}
export default function Example() {
const [active, setActive] = useState(false)
return (
<>
<button
onClick={() => setActive(!active)}
>
${translate("toggle visible")}
</button>
<br />
<ElementPopper
element={(
<Component
height={40}
width={120}
backgroundColor="red"
>
Refrence Element
</Component>
)}
popper={active && (
<Component
height={120}
width={120}
backgroundColor="gray"
>
Popper Element
</Component>
)}
position="${language === "en" ? "right" : "left"}"
/>
</>
)
}`,
jsx: (
<>
<button onClick={() => setActive(!active)}>
{translate("toggle visible")}
</button>
<br />
<ElementPopper
element={
<Component
height={40}
width={120}
backgroundColor="red"
translate={translate}
>
Refrence Element
</Component>
}
popper={
active && (
<Component
height={120}
width={120}
backgroundColor="gray"
translate={translate}
>
Popper Element
</Component>
)
}
position={language === "en" ? "right" : "left"}
/>
</>
),
};
const secondExample = {
title: "Second Example",
jsx: (
<>
<p>{translate("second_example")}</p>
<button onClick={() => setActive1(!active1)}>
{translate("toggle visible")}
</button>
<br />
<ElementPopper
element={
<Component
height={40}
width={120}
backgroundColor="red"
translate={translate}
>
Refrence Element
</Component>
}
popper={
active1 && (
<AsyncComponent
height={120}
width={120}
backgroundColor="gray"
translate={translate}
>
Popper Element
</AsyncComponent>
)
}
position={language === "en" ? "right" : "left"}
/>
<pre>
<code className="language-jsx">
{`function AsyncComponent({ height, width, backgroundColor, children }) {
const [props, setProps] = useState()
useEffect(() => {
setProps({
style: {
width: width + "px",
height: height + "px",
backgroundColor,
textAlign: "center",
display: "flex",
flexDirection: "column",
justifyContent: "center"
}
})
}, [height, width, backgroundColor])
return props ?
<div {...props}>
{children}
</div>
:
null
}
export default function Example() {
const [active, setActive] = useState(false)
return (
<>
<button
onClick={() => setActive(!active)}
>
${translate("toggle visible")}
</button>
<br />
<ElementPopper
element={(
<Component
height={40}
width={120}
backgroundColor="red"
>
Refrence Element
</Component>
)}
popper={active && (
<AsyncComponent
height={120}
width={120}
backgroundColor="gray"
>
Popper Element
</AsyncComponent>
)}
position="${language === "en" ? "right" : "left"}"
/>
</>
)
}`}
</code>
</pre>
<p>{translate("second_example_1")}</p>
<p>{translate("second_example_2")}</p>
<pre>
<code className="language-jsx">
{`export default function Example() {
const [active, setActive] = useState(false)
return (
<>
<button
onClick={() => setActive(!active)}
>
${translate("toggle visible")}
</button>
<br />
<ElementPopper
element={(
<Component
height={40}
width={120}
backgroundColor="red"
>
Refrence Element
</Component>
)}
popper={(
<AsyncComponent
height={120}
width={120}
backgroundColor="gray"
>
Popper Element
</AsyncComponent>
)}
position="${language === "en" ? "right" : "left"}"
active={active}
/>
</>
)
}`}
</code>
</pre>
<button onClick={() => setActive2(!active2)}>
{translate("toggle visible")}
</button>
<br />
<ElementPopper
element={
<Component
height={40}
width={120}
backgroundColor="red"
translate={translate}
>
Refrence Element
</Component>
}
popper={
<AsyncComponent
height={120}
width={120}
backgroundColor="gray"
translate={translate}
>
Popper Element
</AsyncComponent>
}
position={language === "en" ? "right" : "left"}
active={active2}
/>
<p>{translate("second_example_3")}</p>
</>
),
};
const thirdExample = {
title: "Third Example",
jsx: (
<>
<p>{translate("third_example")}</p>
<p>{translate("third_example_1")}</p>
<pre>
<code className="language-jsx">
{`function AsyncComponent({ height, width, backgroundColor, children, onReady }) {
const [state, setState] = useState({})
useEffect(() => {
setState({
props: {
style: {
width: width + "px",
height: height + "px",
backgroundColor,
textAlign: "center",
display: "flex",
flexDirection: "column",
justifyContent: "center"
}
},
ready: true
})
}, [height, width, backgroundColor])
useEffect(() => {
if (!state.ready) return
onReady()
}, [state.ready, onReady])
return state.ready ?
<div {...state.props}>
{children}
</div>
:
null
}
export default function Example() {
const [active, setActive] = useState(false)
const [isPopperReady, setIsPopperReady] = useState(false)
return (
<>
<button
onClick={() => {
if (!active) {
setActive(true)
} else {
setActive(false)
setIsPopperReady(false)
}
}}
>
${translate("toggle visible")}
</button>
<br />
<ElementPopper
element={(
<Component
height={40}
width={120}
backgroundColor="red"
>
Refrence Element
</Component>
)}
popper={active && (
<AsyncComponent
height={120}
width={120}
backgroundColor="gray"
onReady={() => setIsPopperReady(true)}
>
Popper Element
</AsyncComponent>
)}
position="${language === "en" ? "right" : "left"}"
active={isPopperReady}
/>
</>
)
}`}
</code>
</pre>
<button
onClick={() => {
if (!active3) {
setActive3(true);
} else {
setActive3(false);
setIsPopperReady(false);
}
}}
>
{translate("toggle visible")}
</button>
<br />
<ElementPopper
element={
<Component
height={40}
width={120}
backgroundColor="red"
translate={translate}
>
Refrence Element
</Component>
}
popper={
active3 && (
<AsyncComponent2
height={120}
width={120}
backgroundColor="gray"
translate={translate}
onReady={() => {
setIsPopperReady(true);
}}
>
Popper Element
</AsyncComponent2>
)
}
position={language === "en" ? "right" : "left"}
active={isPopperReady}
/>
</>
),
};
return [activeProp, firstExample, secondExample, thirdExample];
}
function Component({ height, width, backgroundColor, children, translate }) {
return (
<div
style={{
width: width + "px",
height: height + "px",
backgroundColor,
textAlign: "center",
display: "flex",
flexDirection: "column",
justifyContent: "center",
fontSize: "16px",
}}
>
{translate(children)}
</div>
);
}
function AsyncComponent({
height,
width,
backgroundColor,
children,
translate,
}) {
const [props, setProps] = useState();
useEffect(() => {
setProps({
style: {
width: width + "px",
height: height + "px",
backgroundColor,
textAlign: "center",
display: "flex",
flexDirection: "column",
justifyContent: "center",
},
});
}, [height, width, backgroundColor]);
return props ? <div {...props}>{translate(children)}</div> : null;
}
function AsyncComponent2({
height,
width,
backgroundColor,
children,
onReady,
translate,
}) {
const [state, setState] = useState({});
useEffect(() => {
setState({
props: {
style: {
width: width + "px",
height: height + "px",
backgroundColor,
textAlign: "center",
display: "flex",
flexDirection: "column",
justifyContent: "center",
},
},
ready: true,
});
}, [height, width, backgroundColor]);
useEffect(() => {
if (!state.ready) return;
onReady();
}, [state.ready, onReady]);
return state.ready ? <div {...state.props}>{translate(children)}</div> : null;
}
|
/**
* rc oauth
*/
import copy from 'json-deep-copy'
const {
APP_HOME,
RINGCENTRAL_APP_SERVER
} = process.env
export default async (req, res) => {
const { code, state } = req.query
const r = state.includes('ios') ? '/ios-device' : APP_HOME
const server = RINGCENTRAL_APP_SERVER + r
const data = {
server,
code
}
data._global = copy(data)
res.render('redirect', data)
}
|
#!/bin/bash
cabal run -v0 -- -g=testgrammars/words/words.ebnf -p=sentence -s testgrammars/words/words.txt
cabal run -v0 -- -g=testgrammars/dangerousgrammar.ebnf -p=maybeEmpty -s
|
<reponame>OSWeDev/oswedev
export default class CRUDHandler {
public static CRUD_ROUTE_BASE: string = '/manage/';
public static getCRUDLink(API_TYPE_ID: string): string {
if (!API_TYPE_ID) {
return null;
}
return CRUDHandler.CRUD_ROUTE_BASE + API_TYPE_ID;
}
public static getCreateLink(API_TYPE_ID: string, embed: boolean): string {
if (!API_TYPE_ID) {
return null;
}
return (embed) ? "#create_" + API_TYPE_ID : CRUDHandler.getCRUDLink(API_TYPE_ID) + "/create";
}
public static getUpdateLink(API_TYPE_ID: string, vo_id: number): string {
if ((!API_TYPE_ID) || (!vo_id)) {
return null;
}
return CRUDHandler.getCRUDLink(API_TYPE_ID) + "/update/" + vo_id;
}
public static getDeleteLink(API_TYPE_ID: string, vo_id: number): string {
if ((!API_TYPE_ID) || (!vo_id)) {
return null;
}
return CRUDHandler.getCRUDLink(API_TYPE_ID) + "/delete/" + vo_id;
}
} |
<reponame>iankit3/searchmovies
import React from 'react';
// import Sidebar from './components/Sidebar';
import Main from './components/Main';
// import DataBase from './services/indexedDBService';
class App extends React.Component{
constructor(){
super();
this.state = {
selecteduser:{name:{first:'Ankit'}}
}
}
handleAdd(e){
DataBase.add();
}
handleReadAll(e){
DataBase.readAll();
}
render(){
return(
<div className="container-fluid outer" >
<div className="container">
<h2>simility</h2>
</div>
<Main />
</div>
)
}
}
export default App; |
<filename>spec/number_field_spec.rb<gh_stars>10-100
require 'spec_helper'
describe FML::NumberField do
it "accepts a number and converts it" do
f = FML::NumberField.new({})
f.value = '56.23'
expect(f.value).to eq '56.23'
expect(f.number).to eq 56.23
end
it "validates a number" do
invalid_numbers = ["bananas", "43,223", "__2__"]
invalid_numbers.each do |n|
f = FML::NumberField.new({name: "sampleNumber"})
f.value = n
# ensure we keep the value before validation
expect(f.value).to eq n
expect {f.validate}.to raise_exception FML::ValidationError
begin
f.validate
rescue FML::ValidationError => e
expect(e.field_name).to eq "sampleNumber"
expect(e.message).to eq "Invalid number \"#{n}\"\n"
expect(e.debug_message).to eq "Invalid number \"#{n}\" for field \"sampleNumber\"\n"
end
end
end
it "doesn't throw on validate when number is nil" do
f = FML::NumberField.new({})
f.value = ""
expect(f.value).to eq nil
f.validate
end
end
|
#!/bin/bash
export CHECKER_SITE=amazon.co.jp
export NOTICE_SLACK_CHANNEL=xxxxxxxx
export NOTICE_SLACK_TOKEN=xoxb-xxxxxxxxxxxx
export NOTICE_DISCORD_CHANNEL=xxxxxxxx
export DISCORD_TOKEN=xxxxxxxx
node runner.js |
#!/bin/sh
if [ -z "$1" ]
then
echo "Please invoke with setupJakartaEE8Project.sh PROJECT_NAME"
exit 1
fi
PROJECT_NAME=$1
mvn archetype:generate \
-DarchetypeGroupId=io.costax \
-DarchetypeArtifactId=jakarta8-essentials-archetype \
-DarchetypeVersion=0.0.1 \
-DgroupId=io.costax \
-DartifactId=${PROJECT_NAME} \
-Dversion=0.0.1 \
-Darchetype.interactive=false \
--batch-mode
|
mount none -t proc /proc &>> /dev/null
mount none -t sysfs /sys &>> /dev/null
mount none -t devpts /dev/pts &>> /dev/null
export HOME=/root
export LC_ALL=C
source /tmp/envs.sh
mkdir -p /iso
echo "Live-CD" > /etc/hostname
source /etc/os-release
echo "nameserver 8.8.8.8
nameserver 1.1.1.1
search lan" > /etc/resolv.conf
apt install wget curl -y &>> /dev/null
mv -fv /etc/apt/sources.list /tmp/old-sources.list
echo "https://raw.githubusercontent.com/Sirherobrine23/Debian_ISO/main/debian_sources/$ID-$VERSION_ID.list"
curl "https://raw.githubusercontent.com/Sirherobrine23/Debian_ISO/main/debian_sources/$ID-$VERSION_ID.list" |sed "s|R_U|${R_U}|g" > /etc/apt/sources.list
echo "::group::Update Repository"
apt update
echo "::endgroup::"
apt install -y dbus-tests &>> /dev/null
apt install -y systemd-sysv &>> /dev/null
dbus-uuidgen > /etc/machine-id
ln -fs /etc/machine-id /var/lib/dbus/machine-id
dpkg-divert --local --rename --add /sbin/initctl
ln -s /bin/true /sbin/initctl
if [ $ID == "debian" ];then
kernelL="linux-headers-amd64 linux-image-amd64 live-boot live-boot-initramfs-tools extlinux"
else
kernelL="ubuntu-standard linux-generic"
fi
for installer in $kernelL discover resolvconf wpagui locales laptop-detect wireless-tools casper lupin-casper git net-tools curl wget git zip unzip curl vim nano os-prober network-manager apt-transport-https
do
echo "::group::Installing: $installer"
apt install -y $installer
echo "::endgroup::"
done
update-initramfs -u
echo "::group::Installing: Visual studio code insider"
wget -q "https://code.visualstudio.com/sha/download?build=insider&os=linux-deb-x64" -O /tmp/code.deb
dpkg -i /tmp/code.deb
echo "::endgroup::"
username='ubuntu'
password='12345678'
pass=$(perl -e 'print crypt($ARGV[0], "password")' $password)
useradd -m -p "$pass" "$username"
addgroup ubuntu sudo
usermod --shell /bin/bash ubuntu
echo "Live CD login:
Username: Ubuntu
Passworld: 12345678" >> /etc/issue
apt --fix-broken install -y &>> /iso/brokens.txt
apt autoremove -y &>> /iso/autoremove.txt
echo "[main]
rc-manager=resolvconf
plugins=ifupdown,keyfile
dns=dnsmasq
[ifupdown]
managed=false" > /etc/NetworkManager/NetworkManager.conf
dpkg-reconfigure network-manager &>> /dev/null
rm /sbin/initctl
dpkg-divert --rename --remove /sbin/initctl
exit |
<reponame>aleksandr-kotlyar/java-sitemap-checker
package source.type;
public class Html {
}
|
use std::future::Future;
use std::task::{Context, Poll};
use std::pin::Pin;
enum BuilderState<F: Future<Output = ()>> {
Building,
Executing(Pin<Box<F>>),
}
struct Executor<F: Future<Output = ()>> {
state: BuilderState<F>,
}
impl<F: Future<Output = ()>> Executor<F> {
fn new() -> Self {
Executor {
state: BuilderState::Building,
}
}
fn build(&mut self, future: F) -> Pin<Box<F>> {
Box::pin(future)
}
fn poll(&mut self, cx: &mut Context) {
match &mut self.state {
BuilderState::Building => {
// Create a future using the build method
let future = self.build(async { /* your future implementation here */ });
// Transition to the Executing state with the created future
self.state = BuilderState::Executing(future);
// Call poll again to drive the execution
self.poll(cx);
}
BuilderState::Executing(future) => {
// Drive the execution of the future to completion
loop {
match future.as_mut().poll(cx) {
Poll::Ready(()) => break,
Poll::Pending => continue,
}
}
// Handle unexpected result from the poll method
unreachable!("Unexpected result from key value operation");
}
}
}
} |
cut -c 2-7
|
$(document).ready(function() {
// login
$("#loginn").hide();
$('.fa-sign-in').on('click', function() {
$('#loginn').toggleClass('show');
$('.lum').addClass('showingLum');
if ($('#loginn').hasClass('show')) {
$('.lum').fadeTo("slow", 0.33);
} else {
$('.lum').fadeTo("slow", 1);
}
return false
});
$('.login_hide').on('click', function() {
$('#loginn').removeClass('show');
$('.lum').removeClass('showingLum');
$('.lum').fadeTo("slow", 1);
});
}); |
def add_two(a, b):
return a + b
sum_of_two = add_two(5, 10)
print(sum_of_two) |
<reponame>blkmajik/hyperglass
export * from './HyperglassProvider';
|
<filename>services/web/src/modals/ShowRequest.js
import React from 'react';
import { Modal, Table, Menu, Divider } from 'semantic';
import modal from 'helpers/modal';
import CodeBlock from 'components/Markdown/Code';
import RequestBlock from 'components/RequestBlock';
import { API_URL } from 'utils/env';
@modal
export default class ShowRequest extends React.Component {
constructor(props) {
super(props);
this.state = {
tab: 'details',
};
}
render() {
const { ip, ...rest } = this.props.request;
return (
<>
<Modal.Header>
{rest.method} {rest.path}
</Modal.Header>
<Modal.Content scrolling>
<Menu pointing secondary>
<Menu.Item
content="Request Details"
active={this.state.tab === 'details'}
onClick={() => this.setState({ tab: 'details' })}
/>
<Menu.Item
{...(rest?.headers['content-type']?.toLowerCase() !==
'application/json'
? {
disabled: true,
title: 'Only available for JSON request',
}
: {})}
content="Example (cURL)"
active={this.state.tab === 'curl'}
onClick={() => this.setState({ tab: 'curl' })}
/>
</Menu>
<Divider hidden />
{this.state.tab === 'details' && (
<>
<Table definition>
<Table.Body>
<Table.Row>
<Table.Cell width={4}>Method</Table.Cell>
<Table.Cell>
<code>{rest.method}</code>
</Table.Cell>
</Table.Row>
<Table.Row>
<Table.Cell width={4}>URL</Table.Cell>
<Table.Cell>
<code>
{API_URL}
{rest.path}
</code>
</Table.Cell>
</Table.Row>
</Table.Body>
</Table>
{rest.body && (
<>
<h3>Body</h3>
<CodeBlock
source={JSON.stringify(rest.body, null, 2)}
language="json"
allowCopy
/>
</>
)}
<h3>Headers</h3>
<Table definition>
<Table.Body>
{Object.keys(rest.headers).map((key) => (
<Table.Row key={key}>
<Table.Cell width={4}>{key}</Table.Cell>
<Table.Cell>
<code style={{ wordBreak: 'break-all' }}>
{rest.headers[key]}
</code>
</Table.Cell>
</Table.Row>
))}
</Table.Body>
</Table>
</>
)}
{this.state.tab === 'curl' && (
<RequestBlock
apiKey={this.props.application.apiKey}
authToken="<token>"
header={false}
request={rest}
/>
)}
</Modal.Content>
</>
);
}
}
|
// +build !windows
package idtools // import "github.com/docker/docker/pkg/idtools"
import (
"bytes"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"sync"
"syscall"
"github.com/docker/docker/pkg/system"
"github.com/opencontainers/runc/libcontainer/user"
)
var (
entOnce sync.Once
getentCmd string
)
func mkdirAs(path string, mode os.FileMode, owner Identity, mkAll, chownExisting bool) error {
// make an array containing the original path asked for, plus (for mkAll == true)
// all path components leading up to the complete path that don't exist before we MkdirAll
// so that we can chown all of them properly at the end. If chownExisting is false, we won't
// chown the full directory path if it exists
var paths []string
stat, err := system.Stat(path)
if err == nil {
if !stat.IsDir() {
return &os.PathError{Op: "mkdir", Path: path, Err: syscall.ENOTDIR}
}
if !chownExisting {
return nil
}
// short-circuit--we were called with an existing directory and chown was requested
return lazyChown(path, owner.UID, owner.GID, stat)
}
if os.IsNotExist(err) {
paths = []string{path}
}
if mkAll {
// walk back to "/" looking for directories which do not exist
// and add them to the paths array for chown after creation
dirPath := path
for {
dirPath = filepath.Dir(dirPath)
if dirPath == "/" {
break
}
if _, err := os.Stat(dirPath); err != nil && os.IsNotExist(err) {
paths = append(paths, dirPath)
}
}
if err := system.MkdirAll(path, mode); err != nil {
return err
}
} else {
if err := os.Mkdir(path, mode); err != nil && !os.IsExist(err) {
return err
}
}
// even if it existed, we will chown the requested path + any subpaths that
// didn't exist when we called MkdirAll
for _, pathComponent := range paths {
if err := lazyChown(pathComponent, owner.UID, owner.GID, nil); err != nil {
return err
}
}
return nil
}
// CanAccess takes a valid (existing) directory and a uid, gid pair and determines
// if that uid, gid pair has access (execute bit) to the directory
func CanAccess(path string, pair Identity) bool {
statInfo, err := system.Stat(path)
if err != nil {
return false
}
fileMode := os.FileMode(statInfo.Mode())
permBits := fileMode.Perm()
return accessible(statInfo.UID() == uint32(pair.UID),
statInfo.GID() == uint32(pair.GID), permBits)
}
func accessible(isOwner, isGroup bool, perms os.FileMode) bool {
if isOwner && (perms&0100 == 0100) {
return true
}
if isGroup && (perms&0010 == 0010) {
return true
}
if perms&0001 == 0001 {
return true
}
return false
}
// LookupUser uses traditional local system files lookup (from libcontainer/user) on a username,
// followed by a call to `getent` for supporting host configured non-files passwd and group dbs
func LookupUser(username string) (user.User, error) {
// first try a local system files lookup using existing capabilities
usr, err := user.LookupUser(username)
if err == nil {
return usr, nil
}
// local files lookup failed; attempt to call `getent` to query configured passwd dbs
usr, err = getentUser(fmt.Sprintf("%s %s", "passwd", username))
if err != nil {
return user.User{}, err
}
return usr, nil
}
// LookupUID uses traditional local system files lookup (from libcontainer/user) on a uid,
// followed by a call to `getent` for supporting host configured non-files passwd and group dbs
func LookupUID(uid int) (user.User, error) {
// first try a local system files lookup using existing capabilities
usr, err := user.LookupUid(uid)
if err == nil {
return usr, nil
}
// local files lookup failed; attempt to call `getent` to query configured passwd dbs
return getentUser(fmt.Sprintf("%s %d", "passwd", uid))
}
func getentUser(args string) (user.User, error) {
reader, err := callGetent(args)
if err != nil {
return user.User{}, err
}
users, err := user.ParsePasswd(reader)
if err != nil {
return user.User{}, err
}
if len(users) == 0 {
return user.User{}, fmt.Errorf("getent failed to find passwd entry for %q", strings.Split(args, " ")[1])
}
return users[0], nil
}
// LookupGroup uses traditional local system files lookup (from libcontainer/user) on a group name,
// followed by a call to `getent` for supporting host configured non-files passwd and group dbs
func LookupGroup(groupname string) (user.Group, error) {
// first try a local system files lookup using existing capabilities
group, err := user.LookupGroup(groupname)
if err == nil {
return group, nil
}
// local files lookup failed; attempt to call `getent` to query configured group dbs
return getentGroup(fmt.Sprintf("%s %s", "group", groupname))
}
// LookupGID uses traditional local system files lookup (from libcontainer/user) on a group ID,
// followed by a call to `getent` for supporting host configured non-files passwd and group dbs
func LookupGID(gid int) (user.Group, error) {
// first try a local system files lookup using existing capabilities
group, err := user.LookupGid(gid)
if err == nil {
return group, nil
}
// local files lookup failed; attempt to call `getent` to query configured group dbs
return getentGroup(fmt.Sprintf("%s %d", "group", gid))
}
func getentGroup(args string) (user.Group, error) {
reader, err := callGetent(args)
if err != nil {
return user.Group{}, err
}
groups, err := user.ParseGroup(reader)
if err != nil {
return user.Group{}, err
}
if len(groups) == 0 {
return user.Group{}, fmt.Errorf("getent failed to find groups entry for %q", strings.Split(args, " ")[1])
}
return groups[0], nil
}
func callGetent(args string) (io.Reader, error) {
entOnce.Do(func() { getentCmd, _ = resolveBinary("getent") })
// if no `getent` command on host, can't do anything else
if getentCmd == "" {
return nil, fmt.Errorf("")
}
out, err := execCmd(getentCmd, args)
if err != nil {
exitCode, errC := system.GetExitCode(err)
if errC != nil {
return nil, err
}
switch exitCode {
case 1:
return nil, fmt.Errorf("getent reported invalid parameters/database unknown")
case 2:
terms := strings.Split(args, " ")
return nil, fmt.Errorf("getent unable to find entry %q in %s database", terms[1], terms[0])
case 3:
return nil, fmt.Errorf("getent database doesn't support enumeration")
default:
return nil, err
}
}
return bytes.NewReader(out), nil
}
// lazyChown performs a chown only if the uid/gid don't match what's requested
// Normally a Chown is a no-op if uid/gid match, but in some cases this can still cause an error, e.g. if the
// dir is on an NFS share, so don't call chown unless we absolutely must.
func lazyChown(p string, uid, gid int, stat *system.StatT) error {
if stat == nil {
var err error
stat, err = system.Stat(p)
if err != nil {
return err
}
}
if stat.UID() == uint32(uid) && stat.GID() == uint32(gid) {
return nil
}
return os.Chown(p, uid, gid)
}
|
<reponame>kasunsk/grpc-spring-boot-starter
package org.lognet.springboot.grpc.demo;
import io.grpc.examples.calculator.CalculatorGrpc;
import io.grpc.examples.calculator.CalculatorOuterClass;
import org.lognet.springboot.grpc.GRpcService;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import io.grpc.stub.StreamObserver;
/**
* Created by alexf on 28-Jan-16.
*/
@GRpcService(interceptors = NotSpringBeanInterceptor.class)
public class CalculateService extends CalculatorGrpc.CalculatorImplBase {
@Override
public void calculate(CalculatorOuterClass.CalculatorRequest request, StreamObserver<CalculatorOuterClass.CalculatorResponse> responseObserver) {
CalculatorOuterClass.CalculatorResponse.Builder resultBuilder = CalculatorOuterClass.CalculatorResponse.newBuilder();
switch (request.getOperation()) {
case ADD:
resultBuilder.setResult(request.getNumber1() + request.getNumber2());
break;
case SUBTRACT:
resultBuilder.setResult(request.getNumber1() - request.getNumber2());
break;
case MULTIPLY:
resultBuilder.setResult(request.getNumber1() * request.getNumber2());
break;
case DIVIDE:
resultBuilder.setResult(request.getNumber1() / request.getNumber2());
break;
case UNRECOGNIZED:
break;
}
try {
System.out.println("Sleeping for 10 sec");
Thread.sleep(10000);
System.out.println("Finished");
} catch (InterruptedException e) {
e.printStackTrace();
}
responseObserver.onNext(resultBuilder.build());
responseObserver.onCompleted();
}
}
|
from types import FunctionType
from typing import Set
from bytecode import Bytecode
def linenos(f):
# type: (FunctionType) -> Set[int]
"""Get the line numbers of a function."""
return {instr.lineno for instr in Bytecode.from_code(f.__code__) if hasattr(instr, "lineno")}
|
# 5/14/19
# 10/18/20: Disable linuxbrew. Lot of conflicts on ubuntu 18.04. Maybe
# turn into a function.
#[[ -f /home/linuxbrew/.linuxbrew/bin/brew ]] && eval $(/home/linuxbrew/.linuxbrew/bin/brew shellenv)
# w/o the 'g' prefix
#PATH="$HOMEBREW_PREFIX/opt/coreutils/libexec/gnubin:$PATH"
|
<reponame>DevelopIntelligenceBoulder/ngrxblog<filename>tools/tasks/seed/build.bundles.app.ts
import { join } from 'path';
import * as Builder from 'systemjs-builder';
import {
BOOTSTRAP_MODULE,
JS_PROD_APP_BUNDLE,
JS_DEST,
SYSTEM_BUILDER_CONFIG,
TMP_DIR
} from '../../config';
const BUNDLER_OPTIONS = {
format: 'cjs',
minify: true,
mangle: false
};
/**
* Executes the build process, bundling the JavaScript files using the SystemJS builder.
*/
export = (done: any) => {
let builder = new Builder(SYSTEM_BUILDER_CONFIG);
builder
.buildStatic(join(TMP_DIR, BOOTSTRAP_MODULE),
join(JS_DEST, JS_PROD_APP_BUNDLE),
BUNDLER_OPTIONS)
.then(() => done())
.catch(err => done(err));
};
|
<filename>lib/components/command_bar.js
var CommandBar = React.createClass({
displayName: "CommandBar",
getDefaultProps: function () {
return {
commands: []
};
},
commands: function () {
return this.props.commands.map(function (cmd, idx) {
return React.createElement(
"button",
{
onClick: this.props.handleCommand,
key: idx,
"data-command": cmd.slug },
cmd.label
);
}, this);
},
render: function () {
if (this.props.loggedIn) {
return React.createElement(
"div",
null,
this.commands()
);
} else {
return React.createElement("div", null);
}
}
});
module.exports = CommandBar; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.