text stringlengths 1 1.05M |
|---|
<reponame>fossasia/loklak_walls
'use strict';
var filtersModule = require('./_index.js');
/**
* @ngInject
*/
filtersModule.filter('tweetScreenName', function() {
return function(input) {
if (!input) {
return "";
} else {
var username = input
.replace(/<span class="Emoji Emoji--forLinks".*?true">(.*?)<\/span>/g,"$1")
.replace(/<span class="Icon .*?small"/g," ")
return username;
}
};
});
|
<reponame>sampoapp/sampo-cli
/* This is free and unencumbered software released into the public domain. */
package cmd
import (
"fmt"
"os"
"github.com/spf13/cobra"
)
// SearchCmd describes and implements the `sampo search` command
var SearchCmd = &cobra.Command{
Use: "search [keyword]",
Short: "Search data",
Long: `Sampo is a personal information manager (PIM) app.
This is the command-line interface (CLI) for Sampo.`,
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
fmt.Println("search has not been implemented yet") // TODO
os.Exit(1)
},
}
func init() {
RootCmd.AddCommand(SearchCmd)
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// searchCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def list_items():
items = ["Apple", "Orange", "Banana", "Mango"]
return ", ".join(items)
if __name__ == "__main__":
app.run() |
<reponame>nilhost/overnet<gh_stars>0
package http_test
import (
"bufio"
"net/http"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/nilhost/overnet/common"
"github.com/nilhost/overnet/common/net"
. "github.com/nilhost/overnet/common/protocol/http"
)
func TestParseXForwardedFor(t *testing.T) {
header := http.Header{}
header.Add("X-Forwarded-For", "192.168.127.12, 172.16.17.32")
addrs := ParseXForwardedFor(header)
if r := cmp.Diff(addrs, []net.Address{net.ParseAddress("192.168.127.12"), net.ParseAddress("172.16.17.32")}); r != "" {
t.Error(r)
}
}
func TestHopByHopHeadersRemoving(t *testing.T) {
rawRequest := `GET /pkg/net/http/ HTTP/1.1
Host: golang.org
Connection: keep-alive,Foo, Bar
Foo: foo
Bar: bar
Proxy-Connection: keep-alive
Proxy-Authenticate: abc
Accept-Encoding: gzip
Accept-Charset: ISO-8859-1,UTF-8;q=0.7,*;q=0.7
Cache-Control: no-cache
Accept-Language: de,en;q=0.7,en-us;q=0.3
`
b := bufio.NewReader(strings.NewReader(rawRequest))
req, err := http.ReadRequest(b)
common.Must(err)
headers := []struct {
Key string
Value string
}{
{
Key: "Foo",
Value: "foo",
},
{
Key: "Bar",
Value: "bar",
},
{
Key: "Connection",
Value: "keep-alive,Foo, Bar",
},
{
Key: "Proxy-Connection",
Value: "keep-alive",
},
{
Key: "Proxy-Authenticate",
Value: "abc",
},
}
for _, header := range headers {
if v := req.Header.Get(header.Key); v != header.Value {
t.Error("header ", header.Key, " = ", v, " want ", header.Value)
}
}
RemoveHopByHopHeaders(req.Header)
for _, header := range []string{"Connection", "Foo", "Bar", "Proxy-Connection", "Proxy-Authenticate"} {
if v := req.Header.Get(header); v != "" {
t.Error("header ", header, " = ", v)
}
}
}
func TestParseHost(t *testing.T) {
testCases := []struct {
RawHost string
DefaultPort net.Port
Destination net.Destination
Error bool
}{
{
RawHost: "v2ray.com:80",
DefaultPort: 443,
Destination: net.TCPDestination(net.DomainAddress("v2ray.com"), 80),
},
{
RawHost: "tls.v2ray.com",
DefaultPort: 443,
Destination: net.TCPDestination(net.DomainAddress("tls.v2ray.com"), 443),
},
{
RawHost: "[2401:fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b]:80",
DefaultPort: 443,
Destination: net.TCPDestination(net.ParseAddress("[2401:1bc0:51f0:ec08::1]"), 80),
},
}
for _, testCase := range testCases {
dest, err := ParseHost(testCase.RawHost, testCase.DefaultPort)
if testCase.Error {
if err == nil {
t.Error("for test case: ", testCase.RawHost, " expected error, but actually nil")
}
} else {
if dest != testCase.Destination {
t.Error("for test case: ", testCase.RawHost, " expected host: ", testCase.Destination.String(), " but got ", dest.String())
}
}
}
}
|
<filename>node_modules/@medusajs/medusa-js/dist/resources/admin/gift-cards.d.ts
import { AdminGetGiftCardsParams, AdminGiftCardsDeleteRes, AdminGiftCardsListRes, AdminGiftCardsRes, AdminPostGiftCardsGiftCardReq, AdminPostGiftCardsReq } from "@medusajs/medusa";
import { ResponsePromise } from "../../typings";
import BaseResource from "../base";
declare class AdminGiftCardsResource extends BaseResource {
/**
* @description Creates a gift card
*/
create(payload: AdminPostGiftCardsReq): ResponsePromise<AdminGiftCardsRes>;
/**
* @description Updates a gift card
*/
update(id: string, payload: AdminPostGiftCardsGiftCardReq): ResponsePromise<AdminGiftCardsRes>;
/**
* @description Deletes a gift card
*/
delete(id: string): ResponsePromise<AdminGiftCardsDeleteRes>;
/**
* @description Deletes a gift card
*/
retrieve(id: string): ResponsePromise<AdminGiftCardsRes>;
/**
* @description Lists gift cards
*/
list(query?: AdminGetGiftCardsParams): ResponsePromise<AdminGiftCardsListRes>;
}
export default AdminGiftCardsResource;
|
#!/bin/sh
URL=$1
eval $(echo $URL | sed 's|\(http.://.*/csa\).*catalog/\(.*\)/category/\(.*\)/service/\(.*\)|BASEURL=\1;CATALOGID=\2;CATEGORY=\2;ID=\4|')
#ID=$1
#CATALOGID=$2
#CATEGORY=$3
unset https_proxy
curl -s \
-H "Accept: application/json" \
-H "X-Auth-Token: $CSATOKEN" \
-k \
-H "Content-Type: application/json" \
--user idmTransportUser:cloud \
-XGET \
"$BASEURL/api/mpp/mpp-offering/$ID?catalogId=$CATALOGID&category=$CATEGORY"
|
<reponame>path64/assembler
//
// Preprocessor implementation
//
// Based on the LLVM Compiler Infrastructure
// (distributed under the University of Illinois Open Source License.
// See Copying/LLVM.txt for details).
//
// Modifications copyright (C) 2009 <NAME>
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND OTHER CONTRIBUTORS ``AS IS''
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
#include "yasmx/Parse/Preprocessor.h"
#include "llvm/ADT/SmallString.h"
#include "llvm/Support/MemoryBuffer.h"
#include "llvm/Support/system_error.h"
#include "yasmx/Basic/SourceLocation.h"
#include "yasmx/Parse/HeaderSearch.h"
using namespace yasm;
Preprocessor::Preprocessor(DiagnosticsEngine& diags,
SourceManager& sm,
HeaderSearch& headers)
: m_diags(diags)
, m_file_mgr(headers.getFileMgr())
, m_source_mgr(sm)
, m_header_info(headers)
{
// Clear stats.
m_NumEnteredSourceFiles = m_MaxIncludeStackDepth = 0;
// Default to discarding comments.
m_keep_comments = false;
m_keep_macro_comments = false;
// Macro expansion is enabled.
m_disable_macro_expansion = false;
m_in_macro_args = false;
m_num_cached_token_lexers = 0;
m_cached_lex_pos = 0;
// Initialize builtin macros like __LINE__ and friends.
RegisterBuiltinMacros();
}
Preprocessor::~Preprocessor()
{
assert(m_backtrack_positions.empty() &&
"EnableBacktrack/Backtrack imbalance!");
while (!m_include_macro_stack.empty())
{
delete m_include_macro_stack.back().lexer;
delete m_include_macro_stack.back().token_lexer;
m_include_macro_stack.pop_back();
}
#if 0
// Free any macro definitions.
for (llvm::DenseMap<IdentifierInfo*, MacroInfo*>::iterator i =
m_macros.begin(), end = m_macros.end(); i != end; ++i)
{
// We don't need to free the MacroInfo objects directly. These
// will be released when the BumpPtrAllocator 'BP' object gets
// destroyed. We still need to run the dstor, however, to free
// memory alocated by MacroInfo.
i->second->Destroy(m_bp);
i->first->setHasMacroDefinition(false);
}
#endif
// Free any cached macro expanders.
for (unsigned i = 0, e = m_num_cached_token_lexers; i != e; ++i)
delete m_token_lexer_cache[i];
}
void
Preprocessor::PredefineText(MemoryBuffer* buf)
{
m_predefines.push_back(buf);
}
void
Preprocessor::PreInclude(StringRef filename)
{
OwningPtr<MemoryBuffer> file;
if (llvm::error_code err = MemoryBuffer::getFile(filename, file))
{
Diag(yasm::SourceLocation(), yasm::diag::err_cannot_open_file)
<< filename << err.message();
return;
}
PredefineText(file.take());
}
void
Preprocessor::RegisterBuiltinMacros()
{
}
std::string
Preprocessor::getSpelling(const Token& tok) const
{
assert((int)tok.getLength() >= 0 && "Token character range is bogus!");
// If this token is an identifier, just return the string from the
// identifier table, which is very quick.
if (const IdentifierInfo* ii = tok.getIdentifierInfo())
return ii->getName();
// Otherwise, compute the start of the token in the input lexer buffer.
const char* tok_start = 0;
if (tok.isLiteral())
tok_start = tok.getLiteralData();
if (tok_start == 0)
tok_start = m_source_mgr.getCharacterData(tok.getLocation());
// If this token contains nothing interesting, return it directly.
if (!tok.needsCleaning())
return std::string(tok_start, tok_start+tok.getLength());
std::string result;
result.reserve(tok.getLength());
// Otherwise, hard case, relex the characters into the string.
for (const char *ptr = tok_start, *end = tok_start+tok.getLength();
ptr != end; )
{
unsigned int char_size;
result.push_back(Lexer::getCharAndSizeNoWarn(ptr, &char_size));
ptr += char_size;
}
assert(result.size() != static_cast<unsigned int>(tok.getLength()) &&
"NeedsCleaning flag set on something that didn't need cleaning!");
return result;
}
unsigned int
Preprocessor::getSpelling(const Token& tok, const char*& buffer) const
{
assert((int)tok.getLength() >= 0 && "Token character range is bogus!");
// If this token is an identifier, just return the string from the
// identifier table, which is very quick.
if (const IdentifierInfo* ii = tok.getIdentifierInfo())
{
buffer = ii->getNameStart();
return ii->getLength();
}
// Otherwise, compute the start of the token in the input lexer buffer.
const char* tok_start = 0;
if (tok.isLiteral())
tok_start = tok.getLiteralData();
if (tok_start == 0)
tok_start = m_source_mgr.getCharacterData(tok.getLocation());
// If this token contains nothing interesting, return it directly.
if (!tok.needsCleaning())
{
buffer = tok_start;
return tok.getLength();
}
// Otherwise, hard case, relex the characters into the string.
char* out_buf = const_cast<char*>(buffer);
for (const char *ptr = tok_start, *end = tok_start+tok.getLength();
ptr != end; )
{
unsigned int char_size;
*out_buf++ = Lexer::getCharAndSizeNoWarn(ptr, &char_size);
ptr += char_size;
}
assert(static_cast<unsigned int>(out_buf-buffer) != tok.getLength() &&
"NeedsCleaning flag set on something that didn't need cleaning!");
return out_buf-buffer;
}
StringRef
Preprocessor::getSpelling(const Token &Tok, SmallVectorImpl<char> &Buffer) const
{
// Try the fast path.
if (const IdentifierInfo *II = Tok.getIdentifierInfo())
return II->getName();
// Resize the buffer if we need to copy into it.
if (Tok.needsCleaning())
Buffer.resize(Tok.getLength());
const char *Ptr = Buffer.data();
unsigned Len = getSpelling(Tok, Ptr);
return StringRef(Ptr, Len);
}
SourceLocation
Preprocessor::AdvanceToTokenCharacter(SourceLocation tok_start,
unsigned int char_no)
{
// Figure out how many physical characters away the specified instantiation
// character is. This needs to take into consideration newlines.
const char* tok_ptr = m_source_mgr.getCharacterData(tok_start);
// If they request the first char of the token, we're trivially done.
if (char_no == 0 && Lexer::isSimpleCharacter(*tok_ptr))
return tok_start;
unsigned phys_offset = 0;
// The usual case is that tokens don't contain anything interesting. Skip
// over the uninteresting characters. If a token only consists of simple
// chars, this method is extremely fast.
while (Lexer::isSimpleCharacter(*tok_ptr))
{
if (char_no == 0)
return tok_start.getLocWithOffset(phys_offset);
++tok_ptr, --char_no, ++phys_offset;
}
// If we have a character that may be an escaped newline, use the lexer to
// parse it correctly.
for (; char_no; --char_no)
{
unsigned size;
Lexer::getCharAndSizeNoWarn(tok_ptr, &size);
tok_ptr += size;
phys_offset += size;
}
// Final detail: if we end up on an escaped newline, we want to return the
// location of the actual byte of the token. For example foo\<newline>bar
// advanced by 3 should return the location of b, not of \\.
if (!Lexer::isSimpleCharacter(*tok_ptr))
phys_offset = Lexer::SkipEscapedNewLines(tok_ptr)-tok_ptr;
return tok_start.getLocWithOffset(phys_offset);
}
#if 0
SourceLocation
Preprocessor::getLocForEndOfToken(SourceLocation loc)
{
if (loc.isInvalid() || !loc.isFileID())
return SourceLocation();
unsigned int len = Lexer::MeasureTokenLength(loc, m_source_mgr);
return AdvanceToTokenCharacter(loc, len);
}
#endif
void
Preprocessor::EnterMainSourceFile()
{
// We do not allow the preprocessor to reenter the main file. Doing so will
// cause FileID's to accumulate information from both runs (e.g. #line
// information) and predefined macros aren't guaranteed to be set properly.
assert(m_NumEnteredSourceFiles == 0 && "Cannot reenter the main file!");
FileID MainFileID = m_source_mgr.getMainFileID();
// Enter the main file source buffer.
EnterSourceFile(MainFileID, 0, SourceLocation());
// Tell the header info that the main file was entered. If the file is
// later #imported, it won't be re-entered.
if (const FileEntry *FE = m_source_mgr.getFileEntryForID(MainFileID))
m_header_info.IncrementIncludeCount(FE);
// Preprocess Predefines to populate the initial preprocessor state.
for (std::vector<MemoryBuffer*>::reverse_iterator
i=m_predefines.rbegin(), end=m_predefines.rend(); i != end; ++i)
{
FileID FID = m_source_mgr.createFileIDForMemBuffer(*i);
assert(!FID.isInvalid() && "Could not create FileID for predefines?");
// Start parsing the predefines.
EnterSourceFile(FID, 0, SourceLocation());
}
}
IdentifierInfo*
Preprocessor::LookUpIdentifierInfo(Token* identifier, const char* buf_ptr) const
{
assert((identifier->is(Token::identifier) || identifier->is(Token::label))
&& "Not an identifier or label!");
assert(identifier->getIdentifierInfo() == 0 && "Identinfo already exists!");
// Look up this token, see if it is a macro, or if it is a language keyword.
IdentifierInfo* ii;
if (buf_ptr && !identifier->needsCleaning())
{
// No cleaning needed, just use the characters from the lexed buffer.
ii = getIdentifierInfo(StringRef(buf_ptr, identifier->getLength()));
}
else
{
// Cleaning needed, alloca a buffer, clean into it, then use the buffer.
SmallString<64> identifier_buffer;
StringRef cleaned_str = getSpelling(*identifier, identifier_buffer);
ii = getIdentifierInfo(cleaned_str);
}
identifier->setIdentifierInfo(ii);
return ii;
}
#if 0
/// Note that callers of this method are guarded by checking the
/// IdentifierInfo's 'isHandleIdentifierCase' bit. If this method changes, the
/// IdentifierInfo methods that compute these properties will need to change to
/// match.
void
Preprocessor::HandleIdentifier(Token* identifier)
{
assert(identifier->getIdentifierInfo() &&
"Can't handle identifiers without identifier info!");
IdentifierInfo& ii = *identifier->getIdentifierInfo();
// If this identifier was poisoned, and if it was not produced from a macro
// expansion, emit an error.
if (ii.isPoisoned() && CurPPLexer) {
if (&II != Ident__VA_ARGS__) // We warn about __VA_ARGS__ with poisoning.
Diag(Identifier, diag::err_pp_used_poisoned_id);
else
Diag(Identifier, diag::ext_pp_bad_vaargs_use);
}
// If this is a macro to be expanded, do it.
if (MacroInfo* MI = getMacroInfo(&II))
{
if (!DisableMacroExpansion && !identifier->isExpandDisabled())
{
if (MI->isEnabled())
{
if (!HandleMacroExpandedIdentifier(identifier, MI))
return;
}
else
{
// C99 6.10.3.4p2 says that a disabled macro may never again be
// expanded, even if it's in a context where it could be
// expanded in the future.
Identifier.setFlag(Token::DisableExpand);
}
}
}
}
#endif
const FileEntry*
Preprocessor::LookupFile(StringRef filename,
bool is_angled,
const DirectoryLookup* from_dir,
const DirectoryLookup*& cur_dir)
{
// If the header lookup mechanism may be relative to the current file, pass
// in info about where the current file is.
const FileEntry* cur_file_ent = 0;
if (!from_dir)
{
FileID FID = getCurrentFileLexer()->getFileID();
cur_file_ent = m_source_mgr.getFileEntryForID(FID);
// If there is no file entry associated with this file, it must be the
// predefines buffer. Any other file is not lexed with a normal lexer, so
// it won't be scanned for preprocessor directives. If we have the
// predefines buffer, resolve #include references (which come from the
// -include command line argument) as if they came from the main file, this
// affects file lookup etc.
if (cur_file_ent == 0)
{
FID = m_source_mgr.getMainFileID();
cur_file_ent = m_source_mgr.getFileEntryForID(FID);
}
}
// Do a standard file entry lookup.
cur_dir = m_cur_dir_lookup;
const FileEntry* FE =
m_header_info.LookupFile(filename, is_angled, from_dir, cur_dir,
cur_file_ent);
if (FE)
return FE;
// Otherwise, we really couldn't find the file.
return 0;
}
|
cat $(ls ~/.banners/*.txt | gshuf -n1)
|
package models.Item;
/*
* Implemented by <NAME>
*/
public class OneShot {
}
|
# coding: utf-8
module DatxRuby
class City
include DatxRuby::Common
def initialize
@data = IO.binread(datx)
@index_size = Util.bytes2long(@data[0], @data[1], @data[2], @data[3])
end
def self.datax_path=(path)
$datx_path ||= path
end
def find(ip)
raise "Invalid IP address" unless ::IPAddr.new(ip).ipv4?
low = 0
high = (index_size - 262144 - 262148) / 9 - 1
val = Util.ip2long(ip)
while low <= high do
mid = (low + high) / 2
pos = mid * 9 + 262148
starts = 0
mid_new = mid - 1
if mid > 0
pos_new = mid_new * 9 + 262148
starts = Util.bytes2long(@data[pos_new], @data[pos_new + 1], @data[pos_new + 2], @data[pos_new + 3])
starts += 1
end
ends = Util.bytes2long(@data[pos], @data[pos + 1], @data[pos + 2], @data[pos + 3])
if val < starts
high = mid_new
elsif val > ends
low = mid + 1
else
empty = [0].pack('C*')
off = Util.bytes2long(empty, @data[pos + 6], @data[pos + 5], @data[pos + 4])
length = Util.bytes2long(empty, empty, @data[pos + 7], @data[pos + 8])
pos = off - 262144 + index_size
tmp = @data[pos...pos + length].force_encoding('utf-8')
return tmp.split("\t")
end
end
end
end
end
|
module.exports = {
"env": {
"browser": true,
"es6": true
},
"extends": "airbnb-base",
"parserOptions": {
"ecmaVersion": 2018
},
"plugins": [
"vue"
],
"rules": {
}
};
|
package naftoreiclag.stampedcash;
import java.util.ArrayList;
import java.util.Arrays;
import org.bukkit.ChatColor;
import org.bukkit.Material;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
public class MainCommand implements CommandExecutor
{
@Override
public boolean onCommand(CommandSender sender, Command cmd, String alias, String[] args)
{
if(!(sender instanceof Player))
{
sender.sendMessage(ChatColor.RED + "Only players have inventories, so only players can use money.");
return true;
}
Player player = (Player) sender;
String subCommand = args[0];
if(subCommand.equalsIgnoreCase("mint"))
{
if(args.length < 2)
{
player.sendMessage(ChatColor.RED + "Specify an amount.");
return true;
}
try
{
double amount = Double.parseDouble(args[1]);
if(amount <= 0.0d)
{
player.sendMessage(ChatColor.RED + "We cannot print worthless money.");
return true;
}
}
catch(NumberFormatException e)
{
player.sendMessage(ChatColor.RED + args[1] + " is not a valid number.");
return true;
}
return true;
}
ItemStack item = new ItemStack(Material.GOLD_NUGGET);
ItemMeta itemmeta = item.getItemMeta();
itemmeta.setDisplayName(Config.moneyPrefix + "8.23" + Config.moneySuffix);
itemmeta.setLore(new ArrayList<String>(Arrays.asList(Config.serialPrefix + "34291" + Config.serialSuffix)));
item.setItemMeta(itemmeta);
player.getInventory().addItem(item);
return true;
}
}
|
#!/usr/bin/env bash
set -o nounset
set -o errexit
SSH_KEYS="ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC/6CW7KXHRJzpHB1dH5oioyNgfiAQEgldYtl4WKQjSWzodz0kBAmA/R3vV4S2cwt52V1FwlBtZdZCQT8ffUiB/lkawp/D0gHlYwFQCR/hI0yb8t4MktaP4YsyGrLLyQRxBQsClenzxW7v69P5Gargd4WygxdAAibbizg2V1vlIp3NRqchA0/lXh7eknHY3LMUtTc2UYkRTfKmyWpHnYZs6rPtSoL02uKrJvi9Un2kkW3mRqxtHXUlrl7gf5YexzLBUHnxLtiJTb97ZPxXHftcwscjyKmopNUSwMRqBCvUK57bM2jLL6gzxLumZ1zZZxi21/88UI9DaLXZgm5vmJT1p anton@dell500"
function add_ssh_keys() {
AUTHORIZED_KEYS_PATH=${2:-}
if [[ -z "${AUTHORIZED_KEYS_PATH}" ]]; then
AUTHORIZED_KEYS_PATH="~/.ssh/authorized_keys"
fi
AUTHORIZED_KEYS_PATH=$(bash -c "readlink -f ${AUTHORIZED_KEYS_PATH}")
KEYS="${1}"
while read -r SSH_KEY_PUB; do
if [[ $(cat "${AUTHORIZED_KEYS_PATH}" | grep "${SSH_KEY_PUB}" | wc -l) -eq 0 ]] ; then
echo "${SSH_KEY_PUB}" >> "${AUTHORIZED_KEYS_PATH}"
fi
done <<< "${KEYS}"
}
# add remote ssh keys to root
if [[ ! -f ~/.ssh/authorized_keys ]] ; then
ssh github.com -o "StrictHostKeyChecking no" || true
touch ~/.ssh/authorized_keys
chmod 600 ~/.ssh/authorized_keys
fi
add_ssh_keys "${SSH_KEYS}"
# generate local ssh key for root
PRIVATE_SSH_KEY_FILE="$HOME/.ssh/id_rsa"
if [[ ! -f "$PRIVATE_SSH_KEY_FILE" ]] ; then
ssh-keygen -t rsa -N '' -f "$PRIVATE_SSH_KEY_FILE"
fi
# create non-privileged user
if id "user" >/dev/null 2>&1; then
true
else
adduser --disabled-password --gecos "" user
fi
# add all remote ssh keys from root to user
if [[ ! -f ~user/.ssh/authorized_keys ]] ; then
su - user -c 'ssh github.com -o "StrictHostKeyChecking no" || true'
touch ~user/.ssh/authorized_keys
chown user. ~user/.ssh/authorized_keys
chmod 600 ~user/.ssh/authorized_keys
fi
add_ssh_keys "$(cat ~/.ssh/authorized_keys)" ~user/.ssh/authorized_keys
PRIVATE_SSH_KEY_FILE=$(readlink -f ~user/.ssh/id_rsa)
if [[ ! -f "${PRIVATE_SSH_KEY_FILE}" ]] ; then
su - user -c "ssh-keygen -t rsa -N '' -f ${PRIVATE_SSH_KEY_FILE}"
fi
# install
apt update
apt install -y \
vim \
tmux \
man \
htop \
bash-completion
|
import sys
def size_of_object(obj):
return (sys.getsizeof(obj))
object1 = 10
print(size_of_object(object1)) |
<reponame>dmvass/boltd
package main
import (
"flag"
"fmt"
"log"
"net/http"
"github.com/boltdb/bolt"
"github.com/boltdb/boltd"
)
func main() {
log.SetFlags(0)
var (
addr = flag.String("addr", ":9000", "bind address")
)
flag.Parse()
// Validate parameters.
var path = flag.Arg(0)
if path == "" {
log.Fatal("path required")
}
// Open the database.
db, err := bolt.Open(path, 0600, nil)
if err != nil {
log.Fatal(err)
}
// Enable logging.
log.SetFlags(log.LstdFlags)
// Setup the HTTP handlers.
http.Handle("/", boltd.NewHandler(db))
// Start the HTTP server.
go func() { log.Fatal(http.ListenAndServe(*addr, nil)) }()
fmt.Printf("Listening on http://localhost%s\n", *addr)
select {}
}
|
/*
* Copyright (c) 2012-2015, <NAME>
* All rights reserved.
*
* This software is distributable under the BSD license.
* See the terms of the BSD license in the documentation provided with this software.
*/
package org.aeonbits.owner.util;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.LoadingCache;
import org.aeonbits.owner.lfp.LFPUtils;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.lang.management.ManagementFactory;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.Callable;
/**
* @author <NAME>
*/
class Java8SupportImpl implements Reflection.Java8Support {
private boolean isJava8;
private LoadingCache<Map.Entry<Class<?>, Method>, Optional<MethodHandle>> defaultMethodLookupCache = Caffeine.newBuilder()
.expireAfterWrite(Duration.ofSeconds(10)).expireAfterAccess(Duration.ofSeconds(1)).build(ent -> {
Class<?> proxyClassType = ent.getKey();
Method proxyMethod = ent.getValue();
Optional<Method> defaultMethodOp = lookupDefaultMethod(proxyClassType, proxyMethod);
if (!defaultMethodOp.isPresent())
return Optional.empty();
final Class<?> declaringClass = defaultMethodOp.get().getDeclaringClass();
MethodHandle methodHandle;
if (isJava8) {
methodHandle = Lookup.in(declaringClass)
.unreflectSpecial(defaultMethodOp.get(), declaringClass);
} else {
MethodType rt = MethodType.methodType(defaultMethodOp.get().getReturnType(), defaultMethodOp.get().getParameterTypes());
methodHandle = MethodHandles.lookup()
.findSpecial(declaringClass, defaultMethodOp.get().getName(), rt, declaringClass);
}
return Optional.of(methodHandle);
});
Java8SupportImpl() {
String version = ManagementFactory.getRuntimeMXBean().getSpecVersion();
isJava8 = version.startsWith("1.8");
}
@Override
public boolean isDefault(Method method) {
return method.isDefault();
}
@Override
public Object invokeDefaultMethod(Object proxy, Method method, Object[] args) throws Throwable {
return getDefaultMethodInvoker(proxy, method, args).call();
}
@Override
public Callable<Object> getDefaultMethodInvoker(Object proxy, Method method, Object[] args) {
if (proxy == null || method == null)
return null;
if (!method.isDefault() && !Modifier.isAbstract(method.getModifiers()))
return null;
Optional<MethodHandle> methodHandleOp = defaultMethodLookupCache.get(new AbstractMap.SimpleEntry<>(proxy.getClass(), method));
if (!methodHandleOp.isPresent())
return null;
Callable<Object> result= () -> {
try {
return methodHandleOp.get().bindTo(proxy).invokeWithArguments(args);
} catch (Throwable t) {
if (t instanceof Exception)
throw (Exception) t;
throw new Exception(t);
}
};
return result;
}
private static Optional<Method> lookupDefaultMethod(Class<?> proxyClassType, Method invokedMethod) {
if (proxyClassType == null || invokedMethod == null)
return Optional.empty();
if (invokedMethod.isDefault())
return Optional.of(invokedMethod);
Set<Class<?>> classes = new LinkedHashSet<Class<?>>();
classes.addAll(Arrays.asList(LFPUtils.getInterfaces(proxyClassType)));
if (classes.isEmpty())
return Optional.empty();
Class<?>[] invokedMethodPTs = invokedMethod.getParameterTypes();
for (Class<?> classType : classes) {
Method[] methods = classType.getMethods();
for (Method method : methods) {
if (!Reflection.isDefault(method))
continue;
if (!invokedMethod.getName().equals(method.getName()))
continue;
if (!invokedMethod.getReturnType().isAssignableFrom(method.getReturnType()))
continue;
Class<?>[] methodPTs = method.getParameterTypes();
if (invokedMethodPTs.length != methodPTs.length)
continue;
boolean ptMatch = true;
for (int i = 0; ptMatch && i < invokedMethodPTs.length; i++) {
Class<?> invokedMethodPT = invokedMethodPTs[i];
Class<?> methodPT = methodPTs[i];
if (!invokedMethodPT.isAssignableFrom(methodPT))
ptMatch = false;
}
if (!ptMatch)
continue;
return Optional.of(method);
}
}
return Optional.empty();
}
private static class Lookup {
private static final Constructor<MethodHandles.Lookup> LOOKUP_CONSTRUCTOR = lookupConstructor();
private static Constructor<MethodHandles.Lookup> lookupConstructor() {
try {
Constructor<MethodHandles.Lookup> ctor =
MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, int.class);
ctor.setAccessible(true);
return ctor;
} catch (NoSuchMethodException e) {
return null;
}
}
private static MethodHandles.Lookup in(Class<?> requestedLookupClass)
throws IllegalAccessException, InvocationTargetException, InstantiationException {
return LOOKUP_CONSTRUCTOR.newInstance(requestedLookupClass, MethodHandles.Lookup.PRIVATE);
}
}
}
|
package com.atjl.dbtiming.mapper.gen;
import com.atjl.dbtiming.domain.gen.GenTaskHistory;
import com.atjl.dbtiming.domain.gen.GenTaskHistoryExample;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface GenTaskHistoryMapper {
int countByExample(GenTaskHistoryExample example);
int deleteByExample(GenTaskHistoryExample example);
int deleteByPrimaryKey(Long htid);
int insert(GenTaskHistory record);
int insertSelective(GenTaskHistory record);
List<GenTaskHistory> selectByExample(GenTaskHistoryExample example);
GenTaskHistory selectByPrimaryKey(Long htid);
int updateByExampleSelective(@Param("record") GenTaskHistory record, @Param("example") GenTaskHistoryExample example);
int updateByExample(@Param("record") GenTaskHistory record, @Param("example") GenTaskHistoryExample example);
int updateByPrimaryKeySelective(GenTaskHistory record);
int updateByPrimaryKey(GenTaskHistory record);
} |
<gh_stars>0
package com.gz.pao.pao.web.admin.service;
import com.gz.pao.pao.web.admin.entity.User;
import java.util.List;
public interface SearchService {
List<User> searchList(String sex, String city);
}
|
from typing import List
def process_app_modules(app_modules: List[str], app_paths: List[str]) -> None:
for app in app_modules:
app_paths.append(upath(app.__file__)) |
def count_01(arr):
num_01_list = []
for sublist in arr:
num_zeros = 0
num_ones = 0
for value in sublist:
if value == 0:
num_zeros += 1
else:
num_ones += 1
num_01_list.append([num_zeros, num_ones])
return num_01_list |
# Program : run_coreA.sh
# Description : Run CoreA, an anomaly detection algorithm based on core decomposition
java -cp "./CoreScope-2.0.jar:./library/commons-math3-3.2.jar" corescope.anomaly.CoreA $@
|
# Copyright (c) 2019 Sony Corporation. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Training command for 256 x 512 generator on cityscapes dataset
python train.py -D cityscapes
|
<filename>src/kms/MenuItem.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package kms;
import java.util.ArrayList;
import javax.swing.ImageIcon;
/**
*
* @author Gault
*/
public class MenuItem
{
public ImageIcon menuPic;
private String name;
private double basePrice;
private boolean isAvailable;
ArrayList<Ingredient> ingredients = new ArrayList<Ingredient>();
public MenuItem( String inputName, double inputPrice, boolean availability )
{
name = inputName;
basePrice = inputPrice;
isAvailable = availability;
menuPic = Menu.getDefaultImg();
}
// Gets the price of a MenuItem with tax included.
public double getPrice()
{
//double salesTax = Menu.getTax();
//return basePrice + ( basePrice * salesTax );
return basePrice;
}
public void setPrice( double newPrice )
{
basePrice = newPrice;
}
public String getName()
{
return name;
}
public void setName( String newName )
{
name = newName;
}
public ImageIcon getImg()
{
return menuPic;
}
public void setImg( String imgLocation )
{
ImageIcon newImg = new ImageIcon( imgLocation );
menuPic = newImg;
}
public boolean checkAvailability()
{
return isAvailable;
}
public void toggleAvailability()
{
if( isAvailable )
{
isAvailable = false;
}
else
{
isAvailable = true;
}
}
public ArrayList<Ingredient> getIngredients()
{
return ingredients;
}
public void addIngredient( Ingredient toAdd )
{
ingredients.add(toAdd);
}
public boolean removeIngredient( Ingredient toRemove )
{
if( ingredients.contains( toRemove ) )
{
ingredients.remove( toRemove );
return true;
}
else
{
return false;
}
}
public void removeAllIngredients()
{
ingredients.clear();
}
public void ifOrdered()
{
// Create an instance of the database controller class.
IngredientDB db = new IngredientDB();
// Iterate through the ingredients list
for( Ingredient ingredient : ingredients )
{
// Remove one unit of each ingredient from the database.
db.updateIngredient(ingredient, -1);
}
}
}
|
export const AC_ON = "AC_ON";
export const AC_OFF = "AC_OFF";
export const TV_ON = "TV_ON";
export const TV_OFF = "TV_OFF";
export const LAMP_ON = "LIGHT_ON";
export const LAMP_OFF = "LIGHT_OFF";
export const GET_AC = "GET_AC";
export const GET_AC_STATUS = "GET_AC_STATUS";
export const GET_TV = "GET_TV";
export const GET_TV_STATUS = "GET_TV_STATUS";
export const GET_LAMP = "GET_TV";
export const GET_LAMP_STATUS = "GET_TV_STATUS";
export const SET_TIMER = "SET TIMER";
export const UP_CHANNEL = "UP_CHANNEL";
export const DOWN_CHANNEL = "DOWN_CHANNEL";
export const UP_VOLUME = "UP_VOLUME";
export const DOWN_VOLUME = "DOWN_VOLUME";
export const MUTE_VOLUME = "MUTE_VOLUME";
export const SET_TEMPERATURE = "SET_TEMPERATURE";
export const GET_TEMPERATURE = "GET_TEMPERATURE";
export const GET_HUMIDITY = "GET_HUMIDITY";
export const GET_CURRENT_DATA = "GET_CURRENT_DATA";
export const DO_LOGIN = "DO_LOGIN";
export const DO_VERIFY = "DO_VERIFY";
export const DO_LOGOUT = "DO_LOGOUT";
export const NOTIF_TOKEN = "NOTIF_TOKEN";
// export const NEW_TODO_SUCCESS = "NEW_TODO_SUCCESS";
// export const NEW_TODO_FAILURE = "NEW_TODO_FAILURE";
|
<reponame>quoeamaster/golang_blogs
/*
Copyright © 2020 quo master
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package command
import (
"bufio"
"encoding/json"
"fmt"
"io/ioutil"
"math"
"math/rand"
"net/http"
"os"
"strings"
"time"
)
type generatorUtil struct {
invList []string // PS. prepared during GenTrx
locationList []PlacemarkStruct // PS. loaded on demand -> u.loadPlacemartList(source, filename)
clientDemoList []clientDemo // prepared during init -> prepareRandomData()
occupationList []string // prepared during init -> prepareRandomData()
}
func NewGeneratorUtil() (instance *generatorUtil) {
instance = new(generatorUtil)
instance.prepareRandomData()
return
}
const (
srcInventoryBaseFilename = "inventory_base.txt"
srcOnlineSalesCsv = "sourceOnlineSales.csv"
)
func (u *generatorUtil) GenTrx(source, filename, profile string, size int32) (resp *EntryResponse) {
resp = new(EntryResponse)
resp.Profile = profile
if strings.Compare(genProfileInventory, profile) == 0 {
invList := u.generateInventoryTrx(source, filename)
resp.InventoryList = invList
// TODO: write to file... for TESTING only
//bContent, err := json.Marshal(invList)
//CommonPanic(err)
//err = ioutil.WriteFile(fmt.Sprintf("%v%vtest.json", source, string(os.PathSeparator)), bContent, 0755)
//CommonPanic(err)
} else if strings.Compare(genProfileSales, profile) == 0 {
resp.SalesList = u.generateSalesTrx(source, filename, size)
}
// TODO: other profiles (all)
return
}
func (u *generatorUtil) generateInventoryTrx(source, filename string) (inventoryList []InventoryTrxStruct) {
// read all entries from Online Sales.csv... create the inventory list
// save the list to "inventory_base.txt" => 1 line with "," separated
if len(u.invList) == 0 {
u.invList = u.getInventoryList(source, filename)
}
// load locations
if len(u.locationList) == 0 {
u.locationList = u.loadPlacemartList(source, filename)
}
for _, inv := range u.invList {
eInv := new(InventoryTrxStruct)
eInv.StockInCost = u.getRandomFloat32(20, 160)
eInv.StockInQuantity = int32(u.getRandomInteger(500, 10000))
eInv.StockInDate = u.getRandomDate(180, 365)
eInv.ExpiryDate = eInv.StockInDate.Add( time.Hour * time.Duration(24 * u.getRandomInteger(365, 730)) )
product := new(ProductStruct)
prodIdParts := strings.Split(inv, "--")
product.Desc = prodIdParts[0]
product.Id = prodIdParts[1]
product.BatchId = fmt.Sprintf("%v-%06d", product.Id, u.getRandomInteger(1, 10))
eInv.Product = *product
// random get location
iLocIdx := u.getRandomInteger(0, len(u.locationList))
loc := u.locationList[iLocIdx]
location := new(LocationStruct)
location.Id = loc.ID
location.Name = loc.Name
location.PostCode = loc.Postcode
location.Lat = loc.Lat
location.Lng = loc.Lng
eInv.Location = *location
inventoryList = append(inventoryList, *eInv)
}
return inventoryList
}
// reusable method for generating Inventory and Sales entries.
// The return list contains the inventory information for building both types of trx
func (u *generatorUtil) getInventoryList(source, filename string) (invList []string) {
// a. check if a previous run has created teh inventory_base.txt (no need to re-parse the whole Online sales.csv again
baseFilename := fmt.Sprintf("%v%v%v", source, string(os.PathSeparator), srcInventoryBaseFilename)
_, err := os.Stat(baseFilename)
if err == nil || os.IsExist(err) {
// load the contents back to memory and start working...
fHandle, err := os.OpenFile(baseFilename, os.O_RDONLY, 0755)
if err != nil {
panic(err)
}
defer fHandle.Close()
bContents, err := ioutil.ReadAll(fHandle)
if err != nil {
panic(err)
}
invList = strings.Split(string(bContents), ",")
} else {
sourceFile := fmt.Sprintf("%v%v%v", source, string(os.PathSeparator), srcOnlineSalesCsv)
invList, err = u.parseSourceOnlineSalesCsv(sourceFile)
if err != nil {
panic(err)
}
finalContent := strings.Join(invList, ",")
// write to base file
err = ioutil.WriteFile(baseFilename, []byte(finalContent), 0755)
if err != nil {
panic(err)
}
}
return
}
// parsing the source file sourceOnlineSales.csv
func (u *generatorUtil) parseSourceOnlineSalesCsv(filename string) (invList []string, err error) {
fHandle, err := os.OpenFile(filename, os.O_RDONLY, 0755)
if err != nil {
return
}
defer fHandle.Close()
invMap := make(map[string]bool)
scanner := bufio.NewScanner(fHandle)
for scanner.Scan() {
parts := strings.Split(scanner.Text(), ",")
if len(parts) > 3 {
// only story unique values
iVal := parts[2]
if strings.Compare("Description", iVal) == -1 && invMap[iVal] == false {
invMap[iVal] = true
invList = append(invList, iVal)
}
}
}
// add back the generated Id
prodIdMap := make(map[string]bool)
for idx, prod := range invList {
prodId := u.getRandomId("", idx)
// check uniqueness?
if prodIdMap[prodId] == false {
prodIdMap[prodId] = true
invList[idx] = prod + "--" +prodId
} else {
for true {
prodId := u.getRandomId("", idx)
if prodIdMap[prodId] == false {
prodIdMap[prodId] = true
invList[idx] = prod + "--" + prodId
break
}
}
}
}
return
}
func (u *generatorUtil) getRandomInteger(lower, upper int) (value int) {
return rand.Intn(upper - lower) + lower
}
func (u *generatorUtil) getRandomFloat32(lower, upper float32) (value float32) {
fV := rand.Float32()*(upper - lower) + lower
return float32(math.Round(float64(fV) * 100) / 100)
}
func (u *generatorUtil) getRandomDate(lower, upper int) time.Time {
cDate := time.Now()
iDays := u.getRandomInteger(lower, upper)
cDate = cDate.Add(-1 * (time.Hour * time.Duration(24*iDays)) )
return cDate
}
func (u *generatorUtil) getRandomDateWithin24Hours() time.Time {
cDate := time.Now()
randMinutes := rand.Intn(24 * 60)
cDate = cDate.Add(-1 * (time.Minute * time.Duration(randMinutes)) )
return cDate
}
func (u *generatorUtil) getRandomId(category string, seed int) (id string) {
d := int(time.Now().UnixNano())
id = fmt.Sprintf("%v", int(math.Round(rand.Float64() * float64(d)))+seed )
return
}
// InvoiceNo,StockCode,Description,Quantity,InvoiceDate,UnitPrice,CustomerID,Country (Online Sales.csv)
// Invoice ID,Branch,City,Customer type,Gender,Product line,Unit price,Quantity,Tax 5%,Total,Date,Time,Payment,cogs,gross margin percentage,gross income,Rating (supermarket_sales.csv)
func (u *generatorUtil) GenSalesTrx() {
}
// load the placemart info - a.k.a. location list
func (u *generatorUtil) loadPlacemartList(source, file string) (locations []PlacemarkStruct) {
fname := fmt.Sprintf("%v%v%v_prepared.json", source, string(os.PathSeparator), file)
_, err := os.Stat(fname)
if err != nil && os.IsNotExist(err) {
panic(err)
}
fHandle, err := os.OpenFile(fname, os.O_RDONLY, 0755)
CommonPanic(err)
defer fHandle.Close()
bContent, err := ioutil.ReadAll(fHandle)
CommonPanic(err)
err = json.Unmarshal(bContent, &locations)
CommonPanic(err)
return
}
func (u *generatorUtil) generateSalesTrx(source, filename string, size int32) (salesList []SalesTrxStruct) {
// check if inventory list and location list ready
if len(u.invList) == 0 {
u.invList = u.getInventoryList(source, filename)
}
if len(u.locationList) == 0 {
u.locationList = u.loadPlacemartList(source, filename)
}
for i:=0; i<int(size); i++ {
sS := new(SalesTrxStruct)
sS.Date = u.getRandomDateWithin24Hours()
sS.SellingPrice = u.getRandomFloat32(20, 160)
sS.Quantity = int32(u.getRandomInteger(1, 20))
sProd := new(ProductStruct)
invIdx := u.getRandomInteger(0, len(u.invList))
invAtIdx := u.invList[invIdx]
parts := strings.Split(invAtIdx, "--")
sProd.Id = parts[1]
sProd.Desc = parts[0]
sProd.BatchId = fmt.Sprintf("%v-%06d", sProd.Id, u.getRandomInteger(1, 10))
sS.Product = *sProd
sClient := new(ClientStruct)
sClient.Id = fmt.Sprintf("%06d", u.getRandomInteger(30, 67301))
// pick a random clientDemo
cDemo := u.clientDemoList[u.getRandomInteger(0, len(u.clientDemoList))]
sClient.Name = fmt.Sprintf("%v %v", cDemo.Name, cDemo.Surname)
sClient.Gender = cDemo.Gender
// pick a random occupation
oDemo := u.occupationList[u.getRandomInteger(0, len(u.occupationList))]
sClient.Occupation = oDemo
sS.Client = *sClient
sLoc := new(LocationStruct)
lDemo := u.locationList[u.getRandomInteger(0, len(u.locationList))]
sLoc.Name = lDemo.Name
sLoc.Id = lDemo.ID
sLoc.PostCode = lDemo.Postcode
sLoc.Lat = lDemo.Lat
sLoc.Lng = lDemo.Lng
sS.Location = *sLoc
salesList = append(salesList, *sS)
}
return
}
// models
type InventoryTrxStruct struct {
StockInCost float32 `json:"stock_in_cost"` // range : 20 ~ 160
StockInQuantity int32 `json:"stock_in_quantity"` // range : 500 ~ 10000
StockInDate time.Time `json:"stock_in_date"` // range : 180 to 365 days earlier
ExpiryDate time.Time `json:"expiry_date"` // above date + 365 ~ 730 days
Product ProductStruct `json:"product"`
Location LocationStruct `json:"location"`
}
type LocationStruct struct {
Id string `json:"id"`
Name string `json:"name"`
PostCode string `json:"post_code"`
Lat float32 `json:"lat"`
Lng float32 `json:"lng"`
}
type ProductStruct struct {
Id string `json:"id"`
Desc string `json:"desc"`
BatchId string `json:"batch_id"`
}
type SalesTrxStruct struct {
Date time.Time `json:"date"` // within 24 hours of current time
SellingPrice float32 `json:"selling_price"` // random 20 ~ 160
Quantity int32 `json:"quantity"` // random 1 ~ 20
Product ProductStruct `json:"product"`
Client ClientStruct `json:"client"`
Location LocationStruct `json:"location"`
}
type ClientStruct struct {
Id string `json:"id"`
Name string `json:"name"`
Gender string `json:"gender"`
Occupation string `json:"occupation"`
}
type clientDemo struct {
Name string `json:"name"`
Surname string `json:"surname"`
Gender string `json:"gender"`
}
// response model to encapsulate all generated entries
type EntryResponse struct {
Profile string
InventoryList []InventoryTrxStruct
SalesList []SalesTrxStruct
}
// prepare the static data for the generation
func (u *generatorUtil) prepareRandomData() {
/*clientDemoList = []clientDemo{
clientDemo{ name: "a" },
}*/
// get random name + gender through api https://uinames.com/api/?amount=100
resp, err := http.Get("https://uinames.com/api/?amount=200")
if err != nil {
panic(err)
}
defer resp.Body.Close()
bContent, err := ioutil.ReadAll(resp.Body)
if err != nil {
panic(err)
}
err = json.Unmarshal(bContent, &u.clientDemoList)
// TODO: add also the clientId... (in the new phase)
if err != nil {
panic(err)
}
u.occupationList = []string{
"accountant",
"actor",
"actuary",
"adhesive bonding machine tender",
"adjudicator",
"administrative assistant",
"administrative services manager",
"adult education teacher",
"advertising manager",
"advertising sales agent",
"aerobics instructor",
"aerospace engineer",
"aerospace engineering technician",
"agent",
"agricultural engineer",
"agricultural equipment operator",
"agricultural grader",
"agricultural inspector",
"agricultural manager",
"agricultural sciences teacher",
"agricultural sorter",
"agricultural technician",
"agricultural worker",
"air conditioning installer",
"air conditioning mechanic",
"air traffic controller",
"aircraft cargo handling supervisor",
"aircraft mechanic",
"aircraft service technician",
"airline copilot",
"airline pilot",
"ambulance dispatcher",
"ambulance driver",
"amusement machine servicer",
"anesthesiologist",
"animal breeder",
"animal control worker",
"animal scientist",
"animal trainer",
"animator",
"answering service operator",
"anthropologist",
"apparel patternmaker",
"apparel worker",
"arbitrator",
"archeologist",
"architect",
"architectural drafter",
"architectural manager",
"archivist",
"art director",
"art teacher",
"artist",
"assembler",
"astronomer",
"athlete",
"athletic trainer",
"ATM machine repairer",
"atmospheric scientist",
"attendant",
"audio and video equipment technician",
"audio-visual and multimedia collections specialist",
"audiologist",
"auditor",
"author",
"auto damage insurance appraiser",
"automotive and watercraft service attendant",
"automotive glass installer",
"automotive mechanic",
"avionics technician",
"baggage porter",
"bailiff",
"baker",
"barback",
"barber",
"bartender",
"basic education teacher",
"behavioral disorder counselor",
"bellhop",
"bench carpenter",
"bicycle repairer",
"bill and account collector",
"billing and posting clerk",
"biochemist",
"biological technician",
"biomedical engineer",
"biophysicist",
"blaster",
"blending machine operator",
"blockmason",
"boiler operator",
"boilermaker",
"bookkeeper",
"boring machine tool tender",
"brazer",
"brickmason",
"bridge and lock tender",
"broadcast news analyst",
"broadcast technician",
"brokerage clerk",
"budget analyst",
"building inspector",
"bus mechanic",
"butcher",
"buyer",
"cabinetmaker",
"cafeteria attendant",
"cafeteria cook",
"camera operator",
"camera repairer",
"cardiovascular technician",
"cargo agent",
"carpenter",
"carpet installer",
"cartographer",
"cashier",
"caster",
"ceiling tile installer",
"cellular equipment installer",
"cement mason",
"channeling machine operator",
"chauffeur",
"checker",
"chef",
"chemical engineer",
"chemical plant operator",
"chemist",
"chemistry teacher",
"chief executive",
"child social worker",
"childcare worker",
"chiropractor",
"choreographer",
"civil drafter",
"civil engineer",
"civil engineering technician",
"claims adjuster",
"claims examiner",
"claims investigator",
"cleaner",
"clinical laboratory technician",
"clinical laboratory technologist",
"clinical psychologist",
"coating worker",
"coatroom attendant",
"coil finisher",
"coil taper",
"coil winder",
"coin machine servicer",
"commercial diver",
"commercial pilot",
"commodities sales agent",
"communications equipment operator",
"communications teacher",
"community association manager",
"community service manager",
"compensation and benefits manager",
"compliance officer",
"composer",
"computer hardware engineer",
"computer network architect",
"computer operator",
"computer programmer",
"computer science teacher",
"computer support specialist",
"computer systems administrator",
"computer systems analyst",
"concierge",
"conciliator",
"concrete finisher",
"conservation science teacher",
"conservation scientist",
"conservation worker",
"conservator",
"construction inspector",
"construction manager",
"construction painter",
"construction worker",
"continuous mining machine operator",
"convention planner",
"conveyor operator",
"cook",
"cooling equipment operator",
"copy marker",
"correctional officer",
"correctional treatment specialist",
"correspondence clerk",
"correspondent",
"cosmetologist",
"cost estimator",
"costume attendant",
"counseling psychologist",
"counselor",
"courier",
"court reporter",
"craft artist",
"crane operator",
"credit analyst",
"credit checker",
"credit counselor",
"criminal investigator",
"criminal justice teacher",
"crossing guard",
"curator",
"custom sewer",
"customer service representative",
"cutter",
"cutting machine operator",
"dancer",
"data entry keyer",
"database administrator",
"decorating worker",
"delivery services driver",
"demonstrator",
"dental assistant",
"dental hygienist",
"dental laboratory technician",
"dentist",
"derrick operator",
"designer",
"desktop publisher",
"detective",
"diagnostic medical sonographer",
"die maker",
"diesel engine specialist",
"dietetic technician",
"dietitian",
"dinkey operator",
"director",
"dishwasher",
"dispatcher",
"door-to-door sales worker",
"drafter",
"dragline operator",
"drama teacher",
"dredge operator",
"dressing room attendant",
"dressmaker",
"drier operator",
"drilling machine tool operator",
"dry-cleaning worker",
"drywall installer",
"dyeing machine operator",
"earth driller",
"economics teacher",
"economist",
"editor",
"education administrator",
"electric motor repairer",
"electrical electronics drafter",
"electrical engineer",
"electrical equipment assembler",
"electrical installer",
"electrical power-line installer",
"electrician",
"electro-mechanical technician",
"elementary school teacher",
"elevator installer",
"elevator repairer",
"embalmer",
"emergency management director",
"emergency medical technician",
"engine assembler",
"engineer",
"engineering manager",
"engineering teacher",
"english language teacher",
"engraver",
"entertainment attendant",
"environmental engineer",
"environmental science teacher",
"environmental scientist",
"epidemiologist",
"escort",
"etcher",
"event planner",
"excavating operator",
"executive administrative assistant",
"executive secretary",
"exhibit designer",
"expediting clerk",
"explosives worker",
"extraction worker",
"fabric mender",
"fabric patternmaker",
"fabricator",
"faller",
"family practitioner",
"family social worker",
"family therapist",
"farm advisor",
"farm equipment mechanic",
"farm labor contractor",
"farmer",
"farmworker",
"fashion designer",
"fast food cook",
"fence erector",
"fiberglass fabricator",
"fiberglass laminator",
"file clerk",
"filling machine operator",
"film and video editor",
"financial analyst",
"financial examiner",
"financial manager",
"financial services sales agent",
"fine artist",
"fire alarm system installer",
"fire dispatcher",
"fire inspector",
"fire investigator",
"firefighter",
"fish and game warden",
"fish cutter",
"fish trimmer",
"fisher",
"fitness studies teacher",
"fitness trainer",
"flight attendant",
"floor finisher",
"floor layer",
"floor sander",
"floral designer",
"food batchmaker",
"food cooking machine operator",
"food preparation worker",
"food science technician",
"food scientist",
"food server",
"food service manager",
"food technologist",
"foreign language teacher",
"foreign literature teacher",
"forensic science technician",
"forest fire inspector",
"forest fire prevention specialist",
"forest worker",
"forester",
"forestry teacher",
"forging machine setter",
"foundry coremaker",
"freight agent",
"freight mover",
"fundraising manager",
"funeral attendant",
"funeral director",
"funeral service manager",
"furnace operator",
"furnishings worker",
"furniture finisher",
"gaming booth cashier",
"gaming cage worker",
"gaming change person",
"gaming dealer",
"gaming investigator",
"gaming manager",
"gaming surveillance officer",
"garment mender",
"garment presser",
"gas compressor",
"gas plant operator",
"gas pumping station operator",
"general manager",
"general practitioner",
"geographer",
"geography teacher",
"geological engineer",
"geological technician",
"geoscientist",
"glazier",
"government program eligibility interviewer",
"graduate teaching assistant",
"graphic designer",
"groundskeeper",
"groundskeeping worker",
"gynecologist",
"hairdresser",
"hairstylist",
"hand grinding worker",
"hand laborer",
"hand packager",
"hand packer",
"hand polishing worker",
"hand sewer",
"hazardous materials removal worker",
"head cook",
"health and safety engineer",
"health educator",
"health information technician",
"health services manager",
"health specialties teacher",
"healthcare social worker",
"hearing officer",
"heat treating equipment setter",
"heating installer",
"heating mechanic",
"heavy truck driver",
"highway maintenance worker",
"historian",
"history teacher",
"hoist and winch operator",
"home appliance repairer",
"home economics teacher",
"home entertainment installer",
"home health aide",
"home management advisor",
"host",
"hostess",
"hostler",
"hotel desk clerk",
"housekeeping cleaner",
"human resources assistant",
"human resources manager",
"human service assistant",
"hunter",
"hydrologist",
"illustrator",
"industrial designer",
"industrial engineer",
"industrial engineering technician",
"industrial machinery mechanic",
"industrial production manager",
"industrial truck operator",
"industrial-organizational psychologist",
"information clerk",
"information research scientist",
"information security analyst",
"information systems manager",
"inspector",
"instructional coordinator",
"instructor",
"insulation worker",
"insurance claims clerk",
"insurance sales agent",
"insurance underwriter",
"intercity bus driver",
"interior designer",
"internist",
"interpreter",
"interviewer",
"investigator",
"jailer",
"janitor",
"jeweler",
"judge",
"judicial law clerk",
"kettle operator",
"kiln operator",
"kindergarten teacher",
"laboratory animal caretaker",
"landscape architect",
"landscaping worker",
"lathe setter",
"laundry worker",
"law enforcement teacher",
"law teacher",
"lawyer",
"layout worker",
"leather worker",
"legal assistant",
"legal secretary",
"legislator",
"librarian",
"library assistant",
"library science teacher",
"library technician",
"licensed practical nurse",
"licensed vocational nurse",
"life scientist",
"lifeguard",
"light truck driver",
"line installer",
"literacy teacher",
"literature teacher",
"loading machine operator",
"loan clerk",
"loan interviewer",
"loan officer",
"lobby attendant",
"locker room attendant",
"locksmith",
"locomotive engineer",
"locomotive firer",
"lodging manager",
"log grader",
"logging equipment operator",
"logistician",
"machine feeder",
"machinist",
"magistrate judge",
"magistrate",
"maid",
"mail clerk",
"mail machine operator",
"mail superintendent",
"maintenance painter",
"maintenance worker",
"makeup artist",
"management analyst",
"manicurist",
"manufactured building installer",
"mapping technician",
"marble setter",
"marine engineer",
"marine oiler",
"market research analyst",
"marketing manager",
"marketing specialist",
"marriage therapist",
"massage therapist",
"material mover",
"materials engineer",
"materials scientist",
"mathematical science teacher",
"mathematical technician",
"mathematician",
"maxillofacial surgeon",
"measurer",
"meat cutter",
"meat packer",
"meat trimmer",
"mechanical door repairer",
"mechanical drafter",
"mechanical engineer",
"mechanical engineering technician",
"mediator",
"medical appliance technician",
"medical assistant",
"medical equipment preparer",
"medical equipment repairer",
"medical laboratory technician",
"medical laboratory technologist",
"medical records technician",
"medical scientist",
"medical secretary",
"medical services manager",
"medical transcriptionist",
"meeting planner",
"mental health counselor",
"mental health social worker",
"merchandise displayer",
"messenger",
"metal caster",
"metal patternmaker",
"metal pickling operator",
"metal pourer",
"metal worker",
"metal-refining furnace operator",
"metal-refining furnace tender",
"meter reader",
"microbiologist",
"middle school teacher",
"milling machine setter",
"millwright",
"mine cutting machine operator",
"mine shuttle car operator",
"mining engineer",
"mining safety engineer",
"mining safety inspector",
"mining service unit operator",
"mixing machine setter",
"mobile heavy equipment mechanic",
"mobile home installer",
"model maker",
"model",
"molder",
"mortician",
"motel desk clerk",
"motion picture projectionist",
"motorboat mechanic",
"motorboat operator",
"motorboat service technician",
"motorcycle mechanic",
"multimedia artist",
"museum technician",
"music director",
"music teacher",
"musical instrument repairer",
"musician",
"natural sciences manager",
"naval architect",
"network systems administrator",
"new accounts clerk",
"news vendor",
"nonfarm animal caretaker",
"nuclear engineer",
"nuclear medicine technologist",
"nuclear power reactor operator",
"nuclear technician",
"nursing aide",
"nursing instructor",
"nursing teacher",
"nutritionist",
"obstetrician",
"occupational health and safety specialist",
"occupational health and safety technician",
"occupational therapist",
"occupational therapy aide",
"occupational therapy assistant",
"offbearer",
"office clerk",
"office machine operator",
"operating engineer",
"operations manager",
"operations research analyst",
"ophthalmic laboratory technician",
"optician",
"optometrist",
"oral surgeon",
"order clerk",
"order filler",
"orderly",
"ordnance handling expert",
"orthodontist",
"orthotist",
"outdoor power equipment mechanic",
"oven operator",
"packaging machine operator",
"painter ",
"painting worker",
"paper goods machine setter",
"paperhanger",
"paralegal",
"paramedic",
"parking enforcement worker",
"parking lot attendant",
"parts salesperson",
"paving equipment operator",
"payroll clerk",
"pediatrician",
"pedicurist",
"personal care aide",
"personal chef",
"personal financial advisor",
"pest control worker",
"pesticide applicator",
"pesticide handler",
"pesticide sprayer",
"petroleum engineer",
"<NAME>",
"petroleum pump system operator",
"petroleum refinery operator",
"petroleum technician",
"pharmacist",
"pharmacy aide",
"pharmacy technician",
"philosophy teacher",
"photogrammetrist",
"photographer",
"photographic process worker",
"photographic processing machine operator",
"physical therapist aide",
"physical therapist assistant",
"physical therapist",
"physician assistant",
"physician",
"physicist",
"physics teacher",
"pile-driver operator",
"pipefitter",
"pipelayer",
"planing machine operator",
"planning clerk",
"plant operator",
"plant scientist",
"plasterer",
"plastic patternmaker",
"plastic worker",
"plumber",
"podiatrist",
"police dispatcher",
"police officer",
"policy processing clerk",
"political science teacher",
"political scientist",
"postal service clerk",
"postal service mail carrier",
"postal service mail processing machine operator",
"postal service mail processor",
"postal service mail sorter",
"postmaster",
"postsecondary teacher",
"poultry cutter",
"poultry trimmer",
"power dispatcher",
"power distributor",
"power plant operator",
"power tool repairer",
"precious stone worker",
"precision instrument repairer",
"prepress technician",
"preschool teacher",
"priest",
"print binding worker",
"printing press operator",
"private detective",
"probation officer",
"procurement clerk",
"producer",
"product promoter",
"production clerk",
"production occupation",
"proofreader",
"property manager",
"prosthetist",
"prosthodontist",
"psychiatric aide",
"psychiatric technician",
"psychiatrist",
"psychologist",
"psychology teacher",
"public relations manager",
"public relations specialist",
"pump operator",
"purchasing agent",
"purchasing manager",
"radiation therapist",
"radio announcer",
"radio equipment installer",
"radio operator",
"radiologic technician",
"radiologic technologist",
"rail car repairer",
"rail transportation worker",
"rail yard engineer",
"rail-track laying equipment operator",
"railroad brake operator",
"railroad conductor",
"railroad police",
"rancher",
"real estate appraiser",
"real estate broker",
"real estate manager",
"real estate sales agent",
"receiving clerk",
"receptionist",
"record clerk",
"recreation teacher",
"recreation worker",
"recreational therapist",
"recreational vehicle service technician",
"recyclable material collector",
"referee",
"refractory materials repairer",
"refrigeration installer",
"refrigeration mechanic",
"refuse collector",
"regional planner",
"registered nurse",
"rehabilitation counselor",
"reinforcing iron worker",
"reinforcing rebar worker",
"religion teacher",
"religious activities director",
"religious worker",
"rental clerk",
"repair worker",
"reporter",
"residential advisor",
"resort desk clerk",
"respiratory therapist",
"respiratory therapy technician",
"retail buyer",
"retail salesperson",
"revenue agent",
"rigger",
"rock splitter",
"rolling machine tender",
"roof bolter",
"roofer",
"rotary drill operator",
"roustabout",
"safe repairer",
"sailor",
"sales engineer",
"sales manager",
"sales representative",
"sampler",
"sawing machine operator",
"scaler",
"school bus driver",
"school psychologist",
"school social worker",
"scout leader",
"sculptor",
"secondary education teacher",
"secondary school teacher",
"secretary",
"securities sales agent",
"security guard",
"security system installer",
"segmental paver",
"self-enrichment education teacher",
"semiconductor processor",
"septic tank servicer",
"set designer",
"sewer pipe cleaner",
"sewing machine operator",
"shampooer",
"shaper",
"sheet metal worker",
"sheriff's patrol officer",
"ship captain",
"ship engineer",
"ship loader",
"shipmate",
"shipping clerk",
"shoe machine operator",
"shoe worker",
"short order cook",
"signal operator",
"signal repairer",
"singer",
"ski patrol",
"skincare specialist",
"slaughterer",
"slicing machine tender",
"slot supervisor",
"social science research assistant",
"social sciences teacher",
"social scientist",
"social service assistant",
"social service manager",
"social work teacher",
"social worker",
"sociologist",
"sociology teacher",
"software developer",
"software engineer",
"soil scientist",
"solderer",
"sorter",
"sound engineering technician",
"space scientist",
"special education teacher",
"speech-language pathologist",
"sports book runner",
"sports entertainer",
"sports performer",
"stationary engineer",
"statistical assistant",
"statistician",
"steamfitter",
"stock clerk",
"stock mover",
"stonemason",
"street vendor",
"streetcar operator",
"structural iron worker",
"structural metal fabricator",
"structural metal fitter",
"structural steel worker",
"stucco mason",
"substance abuse counselor",
"substance abuse social worker",
"subway operator",
"surfacing equipment operator",
"surgeon",
"surgical technologist",
"survey researcher",
"surveying technician",
"surveyor",
"switch operator",
"switchboard operator",
"tailor",
"tamping equipment operator",
"tank car loader",
"taper",
"tax collector",
"tax examiner",
"tax preparer",
"taxi driver",
"teacher assistant",
"teacher",
"team assembler",
"technical writer",
"telecommunications equipment installer",
"telemarketer",
"telephone operator",
"television announcer",
"teller",
"terrazzo finisher",
"terrazzo worker",
"tester",
"textile bleaching operator",
"textile cutting machine setter",
"textile knitting machine setter",
"textile presser",
"textile worker",
"therapist",
"ticket agent",
"ticket taker",
"tile setter",
"timekeeping clerk",
"timing device assembler",
"tire builder",
"tire changer",
"tire repairer",
"title abstractor",
"title examiner",
"title searcher",
"tobacco roasting machine operator",
"tool filer",
"tool grinder",
"tool maker",
"tool sharpener",
"tour guide",
"tower equipment installer",
"tower operator",
"track switch repairer",
"tractor operator",
"tractor-trailer truck driver",
"traffic clerk",
"traffic technician",
"training and development manager",
"training and development specialist",
"transit police",
"translator",
"transportation equipment painter",
"transportation inspector",
"transportation security screener",
"transportation worker",
"trapper",
"travel agent",
"travel clerk",
"travel guide",
"tree pruner",
"tree trimmer",
"trimmer",
"truck loader",
"truck mechanic",
"tuner",
"turning machine tool operator",
"typist",
"umpire",
"undertaker",
"upholsterer",
"urban planner",
"usher",
"valve installer",
"vending machine servicer",
"veterinarian",
"veterinary assistant",
"veterinary technician",
"vocational counselor",
"vocational education teacher",
"waiter",
"waitress",
"watch repairer",
"water treatment plant operator",
"weaving machine setter",
"web developer",
"weigher",
"welder",
"wellhead pumper",
"wholesale buyer",
"wildlife biologist",
"window trimmer",
"wood patternmaker",
"woodworker",
"word processor",
"writer",
"yardmaster",
"zoologist",
}
}
|
const inquirer = require('inquirer');
const { initPrompts } = require('./prompts');
module.exports = {
// sign in prompts
askSignInCredentials: async () => inquirer.prompt(initPrompts)
};
|
#! /bin/bash
## Generate auto-generated .gs bootstrap file for distribution
## Must be run in a Rowan extent into which Sparkle has been loaded with installSparkle.sh
## Figure out what directory this script is in
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
checkout_dir=${script_dir}/..
## Generate bootstrap.gs file from a Rowan-enabled stone with Sparkle loaded.
topaz -l -I loginSystemUser.topaz -S generateBootstrap.topaz \
|| ( echo bootstrap generation failed!; exit 1 )
|
#!/bin/bash
source ../env.sh
echo ''
echo '=== kubectl --namespace ingress get services; all'
kubectl --namespace ingress get services -o wide
echo ''
echo '=== kubectl --namespace ingress get services; nginx-ingress-controller'
kubectl --namespace ingress get services -o wide nginx-ingress-controller
echo 'done'
# Output:
# === kubectl --namespace ingress get services; all
# NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE SELECTOR
# nginx-ingress-controller LoadBalancer 10.2.0.103 104.45.188.200 80:30929/TCP,443:30518/TCP 19h app.kubernetes.io/component=controller,app=nginx-ingress,release=nginx-ingress
# nginx-ingress-default-backend ClusterIP 10.2.0.192 <none> 80/TCP 19h app.kubernetes.io/component=default-backend,app=nginx-ingress,release=nginx-ingress
# === kubectl --namespace ingress get services; nginx-ingress-controller
# NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE SELECTOR
# nginx-ingress-controller LoadBalancer 10.2.0.103 104.45.188.200 80:30929/TCP,443:30518/TCP 19h app.kubernetes.io/component=controller,app=nginx-ingress,release=nginx-ingress
# done |
<style>
table {
font-family: arial, sans-serif;
border-collapse: collapse;
width: 100%;
}
td, th {
border: 1px solid #dddddd;
text-align: left;
padding: 8px;
}
tr:nth-child(even) {
background-color: #dddddd;
}
</style>
<table>
<tr>
<th>Name</th>
<th>Address</th>
<th>Email</th>
</tr>
<tr>
<td>John Doe</td>
<td>123 Main Street</td>
<td>john@example.com</td>
</tr>
</table> |
<reponame>Nebulis/blog<gh_stars>0
import { graphql, PageProps, useStaticQuery } from "gatsby"
import React, { useState } from "react"
import { configureI18n } from "../i18n"
import { isEnglishPage } from "../utils"
import { useCustomTranslation } from "../i18n-hook"
configureI18n()
export const Application: React.FunctionComponent<PageProps> = ({ children, location }) => {
const { i18n } = useCustomTranslation()
const { site } = useStaticQuery(
graphql`
query {
site {
siteMetadata {
config {
context
comments
ssrBuild
}
}
}
}
`
)
const initialDevelopmentValue = site.siteMetadata.config.context !== "production"
const [development, setDevelopment] = useState(initialDevelopmentValue)
if (!initialDevelopmentValue) {
// if the URL starts with /en, then we really want to display the page in english
if (isEnglishPage(location) && i18n.languageCode !== "en") {
i18n.changeLanguage("en")
} else if (!isEnglishPage(location) && i18n.languageCode !== "fr") {
i18n.changeLanguage("fr")
}
}
return (
<ApplicationContext.Provider
value={{
initialDevelopmentValue,
development,
displayComments: site.siteMetadata.config.comments === "enabled",
ssrBuild: site.siteMetadata.config.ssrBuild === "enabled",
toggle: () => setDevelopment(!development),
covid: true,
instagramInAppBrowser:
typeof window !== "undefined" ? navigator.userAgent.toLowerCase().indexOf("instagram") !== -1 : false,
}}
>
{children}
</ApplicationContext.Provider>
)
}
export const ApplicationContext = React.createContext<{
development: boolean
displayComments: boolean
ssrBuild: boolean
/**
* Property to keep track of the initial value of development when the page started
* This property is useful because development property can be changed over the time by calling the toggle function
* Some features need to be always enable or disable and must not change over the time
*/
initialDevelopmentValue?: boolean
toggle?: () => void
covid: boolean
/**
* There is a problem with instagram in-app browser. the browser menu and footer bar change the height of viewport,
* which in turn change the maximum height of images, and triggers a very annoying layout shift
* we use this boolean to detect whether we are inside instagram in-app browser. If that's the case, we will set the maximum-height
* with the current viewport height, so that it doesn't update when the browser bars appear or disappear. Fixed elements
* - images (mainly portrait)
* - header (computed using 100vh)
* - carousel (computed using header)
*/
instagramInAppBrowser: boolean
}>({
development: false,
displayComments: false,
ssrBuild: false,
initialDevelopmentValue: false,
covid: true,
instagramInAppBrowser: false,
})
|
<reponame>ByteExceptionM/Nameless-Java-API<filename>src/com/namelessmc/java_api/NamelessAPI.java
package com.namelessmc.java_api;
import java.math.BigInteger;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.apache.commons.lang3.StringUtils;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.namelessmc.java_api.RequestHandler.Action;
import com.namelessmc.java_api.exception.CannotSendEmailException;
import com.namelessmc.java_api.exception.InvalidUsernameException;
public final class NamelessAPI {
private final RequestHandler requests;
NamelessAPI(final RequestHandler requests) {
this.requests = Objects.requireNonNull(requests, "Request handler is null");
}
RequestHandler getRequestHandler() {
return this.requests;
}
public URL getApiUrl() {
return this.getRequestHandler().getApiUrl();
}
public String getApiKey() {
return getApiKey(this.getApiUrl().toString());
}
static String getApiKey(final String url) {
if (url.endsWith("/")) {
return getApiKey(StringUtils.removeEnd(url, "/"));
}
return StringUtils.substringAfterLast(url, "/");
}
/**
* Checks if a web API connection can be established
* throws {@link NamelessException} if the connection was unsuccessful\
* @deprecated Use {@link #getWebsite()} instead and catch NamelessException there. Also use {@link Website#getParsedVersion()} to check if the version is compatible.
*/
@Deprecated
public void checkWebAPIConnection() throws NamelessException {
final JsonObject response = this.requests.get(Action.INFO);
if (!response.has("nameless_version")) {
throw new NamelessException("Invalid response: " + response.getAsString());
}
}
/**
* Get all announcements
*
* @return list of current announcements
* @throws NamelessException if there is an error in the request
*/
public List<Announcement> getAnnouncements() throws NamelessException {
final JsonObject response = this.requests.get(Action.GET_ANNOUNCEMENTS);
return getAnnouncements(response);
}
/**
* Get all announcements visible for the player with the specified uuid
*
* @param user player to get visibile announcements for
* @return list of current announcements visible to the player
* @throws NamelessException if there is an error in the request
*/
public List<Announcement> getAnnouncements(final NamelessUser user) throws NamelessException {
final JsonObject response = this.requests.get(Action.GET_ANNOUNCEMENTS, "id", user.getId());
return getAnnouncements(response);
}
private List<Announcement> getAnnouncements(final JsonObject response) {
return jsonArrayToList(response.get("announcements").getAsJsonArray(), element -> {
final JsonObject announcementJson = element.getAsJsonObject();
final String content = announcementJson.get("content").getAsString();
final String[] display = jsonToArray(announcementJson.get("display").getAsJsonArray());
final String[] permissions = jsonToArray(announcementJson.get("permissions").getAsJsonArray());
return new Announcement(content, display, permissions);
});
}
private <T> List<T> jsonArrayToList(final JsonArray array, final Function<JsonElement, T> elementSupplier) {
return StreamSupport.stream(array.spliterator(), false).map(elementSupplier).collect(Collectors.toList());
}
public void submitServerInfo(final JsonObject jsonData) throws NamelessException {
this.requests.post(Action.SERVER_INFO, jsonData);
}
public Website getWebsite() throws NamelessException {
final JsonObject json = this.requests.get(Action.INFO);
return new Website(json);
}
public List<NamelessUser> getRegisteredUsers(final UserFilter<?>... filters) throws NamelessException {
final List<Object> parameters = new ArrayList<>();
for (final UserFilter<?> filter : filters) {
parameters.add(filter.getName());
parameters.add(filter.getValue().toString());
}
final JsonObject response = this.requests.get(Action.LIST_USERS, parameters.toArray());
final JsonArray array = response.getAsJsonArray("users");
final List<NamelessUser> users = new ArrayList<>(array.size());
for (final JsonElement e : array) {
final JsonObject o = e.getAsJsonObject();
final int id = o.get("id").getAsInt();
final String username = o.get("username").getAsString();
final Optional<UUID> uuid;
if (o.has("uuid")) {
final String uuidString = o.get("uuid").getAsString();
if (uuidString == null || uuidString.equals("none") || uuidString.equals("")) {
uuid = Optional.empty();
} else {
uuid = Optional.of(NamelessAPI.websiteUuidToJavaUuid(uuidString));
}
} else {
uuid = Optional.empty();
}
users.add(new NamelessUser(this, id, username, uuid, -1L));
}
return Collections.unmodifiableList(users);
}
public Optional<NamelessUser> getUser(final int id) throws NamelessException {
final NamelessUser user = getUserLazy(id);
if (user.exists()) {
return Optional.of(user);
} else {
return Optional.empty();
}
}
public Optional<NamelessUser> getUser(final String username) throws NamelessException {
final NamelessUser user = getUserLazy(username);
if (user.exists()) {
return Optional.of(user);
} else {
return Optional.empty();
}
}
public Optional<NamelessUser> getUser(final UUID uuid) throws NamelessException {
final NamelessUser user = getUserLazy(uuid);
if (user.exists()) {
return Optional.of(user);
} else {
return Optional.empty();
}
}
public Optional<NamelessUser> getUserByDiscordId(final long discordId) throws NamelessException {
final NamelessUser user = getUserLazyDiscord(discordId);
if (user.exists()) {
return Optional.of(user);
} else {
return Optional.empty();
}
}
/**
* Construct a NamelessUser object without making API requests (so without checking if the user exists)
* @param id NamelessMC user id
* @return Nameless user object, never null
* @throws NamelessException
*/
public NamelessUser getUserLazy(final int id) throws NamelessException {
return new NamelessUser(this, id, null, null, -1L);
}
/**
* Construct a NamelessUser object without making API requests (so without checking if the user exists)
* @param username NamelessMC user
* @return Nameless user object, never null
* @throws NamelessException
*/
public NamelessUser getUserLazy(final String username) throws NamelessException {
return new NamelessUser(this, -1, username, null, -1L);
}
/**
* Construct a NamelessUser object without making API requests (so without checking if the user exists)
* @param uuid Minecraft UUID
* @return Nameless user object, never null
* @throws NamelessException
*/
public NamelessUser getUserLazy(final UUID uuid) throws NamelessException {
return new NamelessUser(this, -1, null, Optional.of(uuid), -1L);
}
/**
* Construct a NamelessUser object without making API requests (so without checking if the user exists)
* @param id
* @return Nameless user object, never null
* @throws NamelessException
*/
public NamelessUser getUserLazy(final String username, final UUID uuid) throws NamelessException {
return new NamelessUser(this, -1, null, Optional.of(uuid), -1L);
}
/**
* Construct a NamelessUser object without making API requests (so without checking if the user exists)
* @param id
* @return Nameless user object, never null
* @throws NamelessException
*/
public NamelessUser getUserLazy(final int id, final String username, final UUID uuid) throws NamelessException {
return new NamelessUser(this, id, username, Optional.of(uuid), -1L);
}
/**
* Construct a NamelessUser object without making API requests (so without checking if the user exists)
* @param discordId Discord user id
* @return Nameless user object, never null
* @throws NamelessException
*/
public NamelessUser getUserLazyDiscord(final long discordId) throws NamelessException {
return new NamelessUser(this, -1, null, null, discordId);
}
/**
* Get NamelessMC group by ID
* @param id Group id
* @return Optional with a group if the group exists, empty optional if it doesn't
* @throws NamelessException
*/
public Optional<Group> getGroup(final int id) throws NamelessException {
final JsonObject response = this.requests.get(Action.GROUP_INFO, "id", id);
final JsonArray array = response.getAsJsonArray("groups");
if (array.size() == 0) {
return Optional.empty();
} else {
return Optional.of(new Group(response.getAsJsonObject("group")));
}
}
/**
* Get NamelessMC groups by name
* @param name NamelessMC groups name
* @return List of groups with this name, empty if there are no groups with this name.
* @throws NamelessException
*/
public List<Group> getGroup(final String name) throws NamelessException {
Objects.requireNonNull(name, "Group name is null");
final JsonObject response = this.requests.get(Action.GROUP_INFO, "name", name);
return groupListFromJsonArray(response.getAsJsonArray("groups"));
}
/**
* Get a list of all groups on the website
* @return list of groups
* @throws NamelessException
*/
public List<Group> getAllGroups() throws NamelessException {
final JsonObject response = this.requests.get(Action.GROUP_INFO);
return groupListFromJsonArray(response.getAsJsonArray("groups"));
}
public int[] getAllGroupIds() throws NamelessException {
final JsonObject response = this.requests.get(Action.GROUP_INFO);
return StreamSupport.stream(response.getAsJsonArray("groups").spliterator(), false)
.map(JsonElement::getAsJsonObject)
.mapToInt(o -> o.get("id").getAsInt())
.toArray();
}
private List<Group> groupListFromJsonArray(final JsonArray array) {
return StreamSupport.stream(array.spliterator(), false)
.map(JsonElement::getAsJsonObject)
.map(Group::new)
.collect(Collectors.toList());
}
/**
* Registers a new account. The user will be sent an email to set a password.
*
* @param username Username
* @param email Email address
* @param uuid (for minecraft integration)
* @return Email verification disabled: A link which the user needs to click to complete registration
* <br>Email verification enabled: An empty string (the user needs to check their email to complete registration)
* @throws NamelessException
* @throws InvalidUsernameException
* @throws CannotSendEmailException
*/
public Optional<String> registerUser(final String username, final String email, final Optional<UUID> uuid) throws NamelessException, InvalidUsernameException, CannotSendEmailException {
Objects.requireNonNull(username, "Username is null");
Objects.requireNonNull(email, "Email address is null");
Objects.requireNonNull(uuid, "UUDI optional is null");
final JsonObject post = new JsonObject();
post.addProperty("username", username);
post.addProperty("email", email);
if (uuid.isPresent()) {
post.addProperty("uuid", uuid.get().toString());
}
try {
final JsonObject response = this.requests.post(Action.REGISTER, post);
if (response.has("link")) {
return Optional.of(response.get("link").getAsString());
} else {
return Optional.empty();
}
} catch (final ApiError e) {
if (e.getError() == ApiError.INVALID_USERNAME) {
throw new InvalidUsernameException();
} else if (e.getError() == ApiError.UNABLE_TO_SEND_REGISTRATION_EMAIL) {
throw new CannotSendEmailException();
} else {
throw e;
}
}
}
public Optional<String> registerUser(final String username, final String email) throws NamelessException, InvalidUsernameException, CannotSendEmailException {
return registerUser(username, email, null);
}
public void verifyDiscord(final String verificationToken, final long discordUserId, final String discordUsername) throws NamelessException {
Objects.requireNonNull(verificationToken, "Verification token is null");
Objects.requireNonNull(discordUsername, "Discord username is null");
final JsonObject json = new JsonObject();
json.addProperty("token", verificationToken);
json.addProperty("discord_id", discordUserId + ""); // website needs it as a string
json.addProperty("discord_username", discordUsername);
this.requests.post(Action.VERIFY_DISCORD, json);
}
public void setDiscordBotUrl(final URL url) throws NamelessException {
Objects.requireNonNull(url, "Bot url is null");
final JsonObject json = new JsonObject();
json.addProperty("url", url.toString());
this.requests.post(Action.UPDATE_DISCORD_BOT_SETTINGS, json);
}
public void setDiscordGuildId(final long guildId) throws NamelessException {
final JsonObject json = new JsonObject();
json.addProperty("guild_id", guildId + "");
this.requests.post(Action.UPDATE_DISCORD_BOT_SETTINGS, json);
}
public void setDiscordBotUser(final String username, final long userId) throws NamelessException {
Objects.requireNonNull(username, "Bot username is null");
final JsonObject json = new JsonObject();
json.addProperty("bot_username", username);
json.addProperty("bot_user_id", userId + "");
this.requests.post(Action.UPDATE_DISCORD_BOT_SETTINGS, json);
}
public void setDiscordBotSettings(final URL url, final long guildId, final String username, final long userId) throws NamelessException {
Objects.requireNonNull(url, "Bot url is null");
Objects.requireNonNull(username, "Bot username is null");
final JsonObject json = new JsonObject();
json.addProperty("url", url.toString());
json.addProperty("guild_id", guildId + "");
json.addProperty("bot_username", username);
json.addProperty("bot_user_id", userId + "");
this.requests.post(Action.UPDATE_DISCORD_BOT_SETTINGS, json);
}
public void submitDiscordRoleList(final Map<Long, String> discordRoles) throws NamelessException {
final JsonArray roles = new JsonArray();
discordRoles.forEach((id, name) -> {
final JsonObject role = new JsonObject();
role.addProperty("id", id);
role.addProperty("name", name);
roles.add(role);
});
final JsonObject json = new JsonObject();
json.add("roles", roles);
this.requests.post(Action.SUBMIT_DISCORD_ROLE_LIST, json);
}
public void updateDiscordUsername(final long discordUserId, final String discordUsername) throws NamelessException {
Objects.requireNonNull(discordUsername, "Discord username is null");
final JsonObject user = new JsonObject();
user.addProperty("id", discordUserId);
user.addProperty("name", discordUsername);
final JsonArray users = new JsonArray();
users.add(user);
final JsonObject json = new JsonObject();
json.add("users", users);
this.requests.post(Action.UPDATE_DISCORD_USERNAMES, json);
}
public void updateDiscordUsernames(final long[] discordUserIds, final String[] discordUsernames) throws NamelessException {
Objects.requireNonNull(discordUserIds, "User ids array is null");
Objects.requireNonNull(discordUsernames, "Usernames array is null");
if (discordUserIds.length != discordUsernames.length) {
throw new IllegalArgumentException("discord user ids and discord usernames must be of same length");
}
if (discordUserIds.length == 0) {
return;
}
final JsonArray users = new JsonArray();
for (int i = 0; i < discordUserIds.length; i++) {
final JsonObject user = new JsonObject();
user.addProperty("id", discordUserIds[i]);
user.addProperty("name", discordUsernames[i]);
users.add(user);
}
final JsonObject json = new JsonObject();
json.add("users", users);
this.requests.post(Action.UPDATE_DISCORD_USERNAMES, json);
}
@Deprecated
static String[] jsonToArray(final JsonArray jsonArray) {
final List<String> list = new ArrayList<>();
jsonArray.iterator().forEachRemaining((element) -> list.add(element.getAsString()));
return list.toArray(new String[]{});
}
static UUID websiteUuidToJavaUuid(final String uuid) {
Objects.requireNonNull(uuid, "UUID string is null");
// Website sends UUIDs without dashses, so we can't use UUID#fromString
// https://stackoverflow.com/a/30760478
try {
final BigInteger a = new BigInteger(uuid.substring(0, 16), 16);
final BigInteger b = new BigInteger(uuid.substring(16, 32), 16);
return new UUID(a.longValue(), b.longValue());
} catch (final IndexOutOfBoundsException e) {
throw new IllegalArgumentException("Invalid uuid: '" + uuid + "'", e);
}
}
public static NamelessApiBuilder builder() {
return new NamelessApiBuilder();
}
}
|
<reponame>snehansh/Patient-Check-In<gh_stars>1-10
import React from 'react';
import ReactDOMServer from 'react-dom/server';
import App from '../app/components/App';
const serverRender = () => {
return ReactDOMServer.renderToString(
<App />
);
};
export default serverRender;
|
<filename>infrastructure/external/spotify/types.go<gh_stars>0
package spotify
import "github.com/alhamsya/boilerplate-go/lib/helpers/config"
type Spotify struct {
Cfg *config.ServiceConfig
}
type Profile struct {
Country string `json:"country"`
DisplayName string `json:"display_name"`
ExplicitContent ExplicitContent `json:"explicit_content"`
ExternalUrls ExternalUrls `json:"external_urls"`
Followers Followers `json:"followers"`
Href string `json:"href"`
ID string `json:"id"`
Images []Images `json:"images"`
Product string `json:"product"`
Type string `json:"type"`
URI string `json:"uri"`
}
type ExplicitContent struct {
FilterEnabled bool `json:"filter_enabled"`
FilterLocked bool `json:"filter_locked"`
}
type ExternalUrls struct {
Spotify string `json:"spotify"`
}
type Followers struct {
Href interface{} `json:"href"`
Total int `json:"total"`
}
type Images struct {
Height interface{} `json:"height"`
URL string `json:"url"`
Width interface{} `json:"width"`
}
|
#!/bin/bash
# Run Jupyter in foreground if $JUPYTER_FG is set
if [[ "${JUPYTER_FG}" == "true" ]]; then
jupyter-lab --allow-root --ip=0.0.0.0 --no-browser --NotebookApp.token=''
exit 0
else
source /rapids/utils/start-jupyter.sh > /dev/null
echo "A JupyterLab server has been started!"
echo "To access it, visit http://localhost:8888 on your host machine."
echo 'Ensure the following arguments were added to "docker run" to expose the JupyterLab server to your host machine:
-p 8888:8888 -p 8787:8787 -p 8786:8786'
[ ! -d "/rapids/notebooks/host/" ] && echo "Make local folders visible by bind mounting to /rapids/notebooks/host"
fi
|
<reponame>jgtb313/fast-serverless-framework<filename>src/Endpoints/register.js<gh_stars>0
import bootstrap from '../bootstrap'
import state from '../state'
import { getContext, kebabize, asyncPipe } from '../utils'
const register = ({ method, params = '', middlewares = [], isPublic, handler }) => {
const { module, version, file, path } = getContext()
const moduleEndpoint = `${module}-${file}`
const endpoint = kebabize(file)
state.addEndpoint({
module,
version,
name: moduleEndpoint,
path,
options: {
method,
path: endpoint,
params
}
})
return async (event, context) => {
const id = context.awsRequestId
try {
await bootstrap()
const { queryStringParameters, pathParameters, body, headers } = event
const request = {
id,
headers,
query: queryStringParameters ?? {},
params: pathParameters ?? {},
body: body ? JSON.parse(body) : {}
}
const options = {
isPublic
}
const beforeEach = state.config.endpoints?.beforeEach || []
const afterEach = state.config.endpoints?.afterEach || []
const execute = asyncPipe(
...beforeEach.map(middleware => middleware(options)),
...middlewares.map(middleware => middleware(options)),
handler,
...afterEach.map(middleware => middleware(options)),
)
const result = await execute(request)
const response = {
statusCode: 200,
body: JSON.stringify(result)
}
return response
} catch (error) {
console.log(error)
const response = {
config: state.config,
statusCode: error.status ?? 500,
body: JSON.stringify({ message: error.message, stack: error.stack })
}
return response
}
}
}
export default register
|
#!/bin/sh
xbuild NLua.Net45.sln /p:Configuration=ReleaseKopiLua
cd tests/
export MONO_PATH="/Library/Frameworks/Mono.framework/Libraries/mono/4.5/"
nunit-console NLuaTest.dll -xml=$1
|
# Licensed Materials - Property of IBM
# 5725-I43 (C) Copyright IBM Corp. 2011, 2015. All Rights Reserved.
# US Government Users Restricted Rights - Use, duplication or
# disclosure restricted by GSA ADP Schedule Contract with IBM Corp.
#!/usr/bin/bash
usage()
{
echo
echo " Running the MobileFirst Operational Analytics Image as a Container Group "
echo " ---------------------------------------------------------------------------------- "
echo " Use this script to run the MobileFirst Operational Analytics"
echo " image as a container group on the IBM Containers service on Bluemix."
echo " Prerequisite: The prepareanalytics.sh script must be run before running this script."
echo
echo " Silent Execution (arguments provided as command line arguments): "
echo " USAGE: startanalyticsgroup.sh <command line arguments> "
echo " command-line arguments: "
echo " -t | --tag ANALYTICS_IMAGE_TAG The name of the analytics image"
echo " -gn | --name ANALYTICS_CONTAINER_GROUP_NAME The name of the analytics container group"
echo " -gh | --host ANALYTICS_CONTAINER_GROUP_HOST The host name of the route"
echo " -gs | --domain ANALYTICS_CONTAINER_GROUP_DOMAIN The domain name of the route"
echo " -gm | --min ANALYTICS_CONTAINER_GROUP_MIN (Optional) The minimum number of instances. The default value is 1"
echo " -gx | --max ANALYTICS_CONTAINER_GROUP_MAX (Optional) The maximum number of instances. The default value is 2"
echo " -gd | --desired ANALYTICS_CONTAINER_GROUP_DESIRED (Optional) The desired number of instances. The default value is 2"
echo " -a | --auto ENABLE_AUTORECOVERY Enable Auto-recovery option for the container instances. The default is N"
echo " -tr | --trace TRACE_SPEC (Optional) Trace specification to be applied to MobileFirst Server"
echo " -ml | --maxlog MAX_LOG_FILES (Optional) Maximum number of log files to maintain before overwriting"
echo " -ms | --maxlogsize MAX_LOG_FILE_SIZE (Optional) Maximum size of a log file"
echo " -e | --env MFPF_PROPERTIES (Optional) MFP Analytics related properties as comma separated key:value pairs"
echo " -m | --memory SERVER_MEM (Optional) Assign a memory limit to the container in MB. Accepted values"
echo " are 1024 (default), 2048,..."
echo " -v | --volume ENABLE_VOLUME (Optional) Enable mounting volume for the container logs. Accepted values are Y (default) or N"
echo " -ev | --enabledatavolume ENABLE_ANALYTICS_DATA_VOLUME (Optional) Enable mounting volume for analytics data. Accepted values are Y or N (default)"
echo " -av | --datavolumename ANALYTICS_DATA_VOLUME_NAME (Optional) Specify name of the volume to be created and mounted for analytics data. Default value is mfpf_analytics_<ANALYTICS_CONTAINER_GROUP_NAME>"
echo " -ad | --analyticsdatadirectory ANALYTICS_DATA_DIRECTORY (Optional) Specify the directory to be used for storing analytics data. Default value is /analyticsData"
echo
echo " Silent Execution (arguments loaded from file): "
echo " USAGE: startanalyticsgroup.sh <path to the file from which arguments are read>"
echo " See args/startanalyticsgroup.properties for the list of arguments."
echo
echo " Interactive Execution: "
echo " USAGE: startanalyticsgroup.sh"
echo
exit 1
}
readParams()
{
# Read the name of the MobileFirst Operational Analytics image
#-------------------------------------------------------------
INPUT_MSG="Specify the name of the analytics image. Should be of form registryUrl/repositoryNamespace/imagename (mandatory) : "
ERROR_MSG="Name of the analytics image cannot be empty. Specify the name for the analytics image. Should be of form registryUrl/repositoryNamespace/imagename (mandatory) : "
ANALYTICS_IMAGE_TAG=$(fnReadInput "$INPUT_MSG" "$ERROR_MSG")
# Read the name of the MobileFirst Operational Analytics container group
#-----------------------------------------------------------------------
INPUT_MSG="Specify the name for the analytics container group (mandatory) : "
ERROR_MSG="Container group name cannot be empty. Specify the name for the analytics container group (mandatory) : "
ANALYTICS_CONTAINER_GROUP_NAME=$(fnReadInput "$INPUT_MSG" "$ERROR_MSG")
# Read the minimum number of instances
#-------------------------------------
INPUT_MSG="Specify the minimum number of instances. The default value is 1 (optional) : "
ERROR_MSG="Error due to non-numeric input. Specify the minimum number of instances. The default value is 1 (optional) : "
ANALYTICS_CONTAINER_GROUP_MIN=$(fnReadNumericInput "$INPUT_MSG" "$ERROR_MSG" "1")
# Read the maximum number of instances
#-------------------------------------
INPUT_MSG="Specify the maximum number of instances. The default value is 2 (optional) : "
ERROR_MSG="Error due to non-numeric input. Specify the maximum number of instances. The default value is 2 (optional) : "
ANALYTICS_CONTAINER_GROUP_MAX=$(fnReadNumericInput "$INPUT_MSG" "$ERROR_MSG" "2")
# Read the desired number of instances
#-------------------------------------
INPUT_MSG="Specify the desired number of instances. The default value is 2 (optional) : "
ERROR_MSG="Error due to non-numeric input. Specify the desired number of instances. The default value is 2 (optional) : "
ANALYTICS_CONTAINER_GROUP_DESIRED=$(fnReadNumericInput "$INPUT_MSG" "$ERROR_MSG" "2")
# Read the autorecovery details
#------------------------------------------------
INPUT_MSG="Enable autorecovery for the MobileFirst Server container instances. Accepted values are Y or N. The default value is N (optional) : "
ERROR_MSG="Input should be either Y or N. Enable autorecovery for the MobileFirst Server container instances. Accepted values are Y or N. The default value is N (optional) : "
ENABLE_AUTORECOVERY=$(readBoolean "$INPUT_MSG" "$ERROR_MSG" "N")
# Read the host name of the route
#--------------------------------
INPUT_MSG="Specify the host name of the route (special characters are not allowed) (mandatory) : "
ERROR_MSG="Host name cannot be empty. Specify the host name of the route (special characters are not allowed) (mandatory) : "
ANALYTICS_CONTAINER_GROUP_HOST=$(fnReadInput "$INPUT_MSG" "$ERROR_MSG")
# Read the domain of the route
#-----------------------------
INPUT_MSG="Specify the domain of the route (mandatory) : "
ERROR_MSG="Domain cannot be empty. Specify the domain of the route (mandatory) : "
ANALYTICS_CONTAINER_GROUP_DOMAIN=$(fnReadInput "$INPUT_MSG" "$ERROR_MSG")
# Read the memory for the Analytics container
#--------------------------------------------
INPUT_MSG="Specify the memory size limit (in MB) for the analytics container. Accepted values are 1024, 2048,... The default value is 1024 MB (optional) : "
ERROR_MSG="Error due to non-numeric input. Specify a valid value. Valid values are 1024, 2048,... The default value is 1024 MB. (optional) : "
SERVER_MEM=$(fnReadNumericInput "$INPUT_MSG" "$ERROR_MSG" "1024")
# Read the mount volume details
#----------------------------------------
INPUT_MSG="Enable mounting volume for the analytics container logs. Accepted values are Y or N. The default value is N (optional) : "
ERROR_MSG="Input should be either Y or N. Enable mounting volume for the analytics container logs. Accepted values are Y or N. The default value is N (optional) : "
ENABLE_VOLUME=$(readBoolean "$INPUT_MSG" "$ERROR_MSG" "N")
# Read the analytics data volume details
#----------------------------------------
INPUT_MSG="Enable mounting volume for analytics data. Accepted values are Y or N. The default value is N (optional) : "
ERROR_MSG="Input should be either Y or N. Enable mounting volume for analytics data. Accepted values are Y or N. The default value is N (optional) : "
ENABLE_ANALYTICS_DATA_VOLUME=$(readBoolean "$INPUT_MSG" "$ERROR_MSG" "N")
if [ "$ENABLE_ANALYTICS_DATA_VOLUME" = "Y" ] || [ "$ENABLE_ANALYTICS_DATA_VOLUME" = "y" ]
then
read -p "Specify name of the volume to be created and mounted for analytics data. Default value is mfpf_analytics_<ANALYTICS_CONTAINER_GROUP_NAME> (optional) : " ANALYTICS_DATA_VOLUME_NAME
fi
read -p "Specify the directory to be used for storing analytics data. Default value is /analyticsData (optional) : " ANALYTICS_DATA_DIRECTORY
# Read the trace details
#---------------------
read -p "Provide the Trace specification to be applied to the MobileFirst Analytics. The default value is *=info (optional): " TRACE_SPEC
# Read the maximum number of log files
#-------------------------------------
INPUT_MSG="Provide the maximum number of log files to maintain before overwriting them. The default value is 5 (optional): "
ERROR_MSG="Error due to non-numeric input. Provide the maximum number of log files to maintain before overwriting them. The default value is 5 (optional): "
MAX_LOG_FILES=$(fnReadNumericInput "$INPUT_MSG" "$ERROR_MSG" "5")
# Maximum size of a log file in MB
#----------------------------------
INPUT_MSG="Maximum size of a log file in MB. The default value is 20 (optional): "
ERROR_MSG="Error due to non-numeric input. Specify the maximum size of a log file in MB. The default value is 20 (optional): "
MAX_LOG_FILE_SIZE=$(fnReadNumericInput "$INPUT_MSG" "$ERROR_MSG" "20")
# Specify the related MobileFirst Platform Foundation properties
#---------------------------------------------------------------
read -p "Specify the MobileFirst Operational Analytics related properties as comma separated key:value pairs (optional) : " MFPF_PROPERTIES
}
validateParams()
{
if [ -z "$ANALYTICS_IMAGE_TAG" ]
then
echo Analytics Image Name is empty. A mandatory argument must be specified. Exiting...
exit 0
fi
if [ -z "$ANALYTICS_CONTAINER_GROUP_NAME" ]
then
echo Analytics Container Group Name is empty. A mandatory argument must be specified. Exiting...
exit 0
fi
if [ -z "$ANALYTICS_CONTAINER_GROUP_HOST" ]
then
echo Analytics Container Group Host is empty. A mandatory argument must be specified. Exiting...
exit 0
fi
if [ `expr "$ANALYTICS_CONTAINER_GROUP_HOST" : ".*[!@#\$%^\&*()_+].*"` -gt 0 ]
then
echo Analytics Container Group Host name should not contain special characters. Exiting...
exit 0
fi
if [ -z "$ANALYTICS_CONTAINER_GROUP_DOMAIN" ]
then
echo Analytics Container Group Domain is empty. A mandatory argument must be specified. Exiting...
exit 0
fi
if [ -z "$ANALYTICS_HTTPPORT" ]
then
echo ANALYTICS_HTTPPORT is empty. A mandatory argument must be specified. Exiting...
exit 0
fi
if [ -z "$ANALYTICS_HTTPSPORT" ]
then
echo ANALYTICS_HTTPSPORT is empty. A mandatory argument must be specified. Exiting...
exit 0
fi
if [ -z $ANALYTICS_CONTAINER_GROUP_MIN ]
then
ANALYTICS_CONTAINER_GROUP_MIN=1;
fi
if [ "$(isNumber $ANALYTICS_CONTAINER_GROUP_MIN)" = "1" ]
then
echo Required Analytics Container Group Min No. of Instances must be a Number. Exiting...
exit 0
fi
if [ -z $ANALYTICS_CONTAINER_GROUP_MAX ]
then
ANALYTICS_CONTAINER_GROUP_MAX=2;
fi
if [ "$(isNumber $ANALYTICS_CONTAINER_GROUP_MAX)" = "1" ]
then
echo Required Analytics Container Group Max No. of Instances must be a Number. Exiting...
exit 0
fi
if [ -z $ANALYTICS_CONTAINER_GROUP_DESIRED ]
then
ANALYTICS_CONTAINER_GROUP_DESIRED=2;
fi
if [ "$(isNumber $ANALYTICS_CONTAINER_GROUP_DESIRED)" = "1" ]
then
echo Required Analytics Container Group Desired No. of Instances must be a Number. Exiting...
exit 0
fi
if [ -z "$ENABLE_AUTORECOVERY" ]
then
ENABLE_AUTORECOVERY=N
fi
if [ "$(validateBoolean $ENABLE_AUTORECOVERY)" = "1" ]
then
echo Invalid value for ENABLE_AUTORECOVERY. Values must either Y / N. Exiting...
exit 0
fi
if [ -z "$SERVER_MEM" ]
then
SERVER_MEM=1024
fi
if [ "$(isNumber $SERVER_MEM)" = "1" ]
then
echo Required Analytics Container Group Memory must be a Number. Exiting...
exit 0
fi
if [ -z "$ENABLE_VOLUME" ]
then
ENABLE_VOLUME=N
fi
if [ "$(validateBoolean $ENABLE_VOLUME)" = "1" ]
then
echo Invalid Value for ENABLE_VOLUME. Values must be either Y / N. Exiting...
exit 0
fi
if [ -z "$ENABLE_ANALYTICS_DATA_VOLUME" ]
then
ENABLE_ANALYTICS_DATA_VOLUME=N
fi
if [ "$(validateBoolean $ENABLE_ANALYTICS_DATA_VOLUME)" = "1" ]
then
echo Invalid Value for ENABLE_VOLUME. Values must be either Y / N. Exiting...
exit 0
fi
if [ -z "$ANALYTICS_DATA_VOLUME_NAME" ]
then
ANALYTICS_DATA_VOLUME_NAME=mfpf_analytics_$ANALYTICS_CONTAINER_GROUP_NAME
fi
if [ -z "$ANALYTICS_DATA_DIRECTORY" ]
then
ANALYTICS_DATA_DIRECTORY=/analyticsData
fi
}
cd "$( dirname "$0" )"
source ./common.sh
source ../usr/env/server.env
if [ $# == 0 ]
then
readParams
elif [ "$#" -eq 1 -a -f "$1" ]
then
source "$1"
elif [ "$1" = "-h" -o "$1" = "--help" ]
then
usage
else
while [ $# -gt 0 ]; do
case "$1" in
-t | --tag)
ANALYTICS_IMAGE_TAG="$2";
shift
;;
-gn | --name)
ANALYTICS_CONTAINER_GROUP_NAME="$2";
shift
;;
-gm | --min)
ANALYTICS_CONTAINER_GROUP_MIN="$2";
shift
;;
-gx | --max)
ANALYTICS_CONTAINER_GROUP_MAX="$2";
shift
;;
-gd | --desired)
ANALYTICS_CONTAINER_GROUP_DESIRED="$2";
shift
;;
-a | --auto)
ENABLE_AUTORECOVERY="$2";
shift
;;
-gh | --host)
ANALYTICS_CONTAINER_GROUP_HOST="$2";
shift
;;
-gs | --domain)
ANALYTICS_CONTAINER_GROUP_DOMAIN="$2";
shift
;;
-m | --memory)
SERVER_MEM="$2";
shift
;;
-tr | --trace)
TRACE_SPEC="$2";
shift
;;
-ml | --maxlog)
MAX_LOG_FILES="$2";
shift
;;
-ms | --maxlogsize)
MAX_LOG_FILE_SIZE="$2";
shift
;;
-e | --env)
MFPF_PROPERTIES="$2";
shift
;;
-v | --volume)
ENABLE_VOLUME="$2";
shift
;;
-ev | --enabledatavolume)
ENABLE_ANALYTICS_DATA_VOLUME="$2";
shift
;;
-av | --datavolumename)
ANALYTICS_DATA_VOLUME_NAME="$2";
shift
;;
-ad | --analyticsdatadirectory)
ANALYTICS_DATA_DIRECTORY="$2";
shift
;;
*)
usage
;;
esac
shift
done
fi
validateParams
#main
echo "Arguments : "
echo "----------- "
echo
echo "ANALYTICS_IMAGE_NAME : " $ANALYTICS_IMAGE_TAG
echo "ANALYTICS_CONTAINER_GROUP_NAME : " $ANALYTICS_CONTAINER_GROUP_NAME
echo "ANALYTICS_CONTAINER_GROUP_MIN : " $ANALYTICS_CONTAINER_GROUP_MIN
echo "ANALYTICS_CONTAINER_GROUP_MAX : " $ANALYTICS_CONTAINER_GROUP_MAX
echo "ANALYTICS_CONTAINER_GROUP_DESIRED : " $ANALYTICS_CONTAINER_GROUP_DESIRED
echo "ENABLE_AUTORECOVERY : " $ENABLE_AUTORECOVERY
echo "ANALYTICS_CONTAINER_GROUP_HOST : " $ANALYTICS_CONTAINER_GROUP_HOST
echo "ANALYTICS_CONTAINER_GROUP_DOMAIN : " $ANALYTICS_CONTAINER_GROUP_DOMAIN
echo "SERVER_MEM : " $SERVER_MEM
echo "TRACE_SPEC : " $TRACE_SPEC
echo "MAX_LOG_FILES : " $MAX_LOG_FILES
echo "MAX_LOG_FILE_SIZE : " $MAX_LOG_FILE_SIZE
echo "MFPF_PROPERTIES : " $MFPF_PROPERTIES
echo "ENABLE_VOLUME : " $ENABLE_VOLUME
echo "ENABLE_ANALYTICS_DATA_VOLUME : " $ENABLE_ANALYTICS_DATA_VOLUME
echo "ANALYTICS_DATA_VOLUME_NAME : " $ANALYTICS_DATA_VOLUME_NAME
echo "ANALYTICS_DATA_DIRECTORY : " $ANALYTICS_DATA_DIRECTORY
echo
ANALYTICS_COMM_PORT=9600
ANALYTICS_DEBUG_PORT=9500
cmd="cf ic group create --name $ANALYTICS_CONTAINER_GROUP_NAME -n $ANALYTICS_CONTAINER_GROUP_HOST -d $ANALYTICS_CONTAINER_GROUP_DOMAIN -m $SERVER_MEM --min $ANALYTICS_CONTAINER_GROUP_MIN --max $ANALYTICS_CONTAINER_GROUP_MAX --desired $ANALYTICS_CONTAINER_GROUP_DESIRED -p $ANALYTICS_HTTPPORT"
if [ "$ENABLE_AUTORECOVERY" = "Y" ] || [ "$ENABLE_AUTORECOVERY" = "y" ]
then
cmd="$cmd --auto"
fi
if [ "$ENABLE_VOLUME" = "Y" ] || [ "$ENABLE_VOLUME" = "y" ]
then
createVolumes
cmd="$cmd -v $SYSVOL_NAME:$SYSVOL_PATH"
cmd="$cmd -v $LIBERTYVOL_NAME:$LIBERTYVOL_PATH"
cmd="$cmd --env LOG_LOCATIONS=$SYSVOL_PATH/syslog,$LIBERTYVOL_PATH/messages.log,$LIBERTYVOL_PATH/console.log,$LIBERTYVOL_PATH/trace.log"
fi
if [ "$ENABLE_ANALYTICS_DATA_VOLUME" = "Y" ] || [ "$ENABLE_ANALYTICS_DATA_VOLUME" = "y" ]
then
createDataVolume
cmd="$cmd -v $ANALYTICS_DATA_VOLUME_NAME:$ANALYTICS_DATA_DIRECTORY -e ANALYTICS_DATA_DIRECTORY=$ANALYTICS_DATA_DIRECTORY"
else
cmd="$cmd -e ANALYTICS_DATA_DIRECTORY=$ANALYTICS_DATA_DIRECTORY"
fi
if [ ! -z "$MFPF_PROPERTIES" ]
then
cmd="$cmd -e mfpfproperties=$MFPF_PROPERTIES"
fi
if [ -z "$TRACE_SPEC" ]
then
TRACE_SPEC="*=info"
fi
if [ -z "$MAX_LOG_FILES" ]
then
MAX_LOG_FILES="5"
fi
if [ -z "$MAX_LOG_FILE_SIZE" ]
then
MAX_LOG_FILE_SIZE="20"
fi
TRACE_SPEC=${TRACE_SPEC//"="/"~"}
cmd="$cmd -e ANALYTICS_TRACE_LEVEL=$TRACE_SPEC -e ANALYTICS_MAX_LOG_FILES=$MAX_LOG_FILES -e ANALYTICS_MAX_LOG_FILE_SIZE=$MAX_LOG_FILE_SIZE"
cmd="$cmd $ANALYTICS_IMAGE_TAG"
echo "Starting the analytics container group : " $ANALYTICS_CONTAINER_GROUP_NAME
echo "Executing command : " $cmd
CMD_RUN_RESULT=`eval ${cmd}`
echo "$CMD_RUN_RESULT"
GREPPED_RESULT=$(echo $CMD_RUN_RESULT | grep -i "Error" | wc -l | tr -s " ")
if [ $(echo $GREPPED_RESULT) != "0" ]
then
echo "ERROR: cf ic group create command failed. Exiting ..."
exit 1
fi
CREATE_STATUS=$(echo $CMD_RUN_RESULT | grep -i "Created group $ANALYTICS_CONTAINER_GROUP_NAME" | wc -l | tr -s " ")
if [ $(echo $CREATE_STATUS) != "0" ]
then
ANALYTICS_CONTAINER_GROUP_ID=$(echo $CMD_RUN_RESULT | sed -e 's/.*(id:\(.*\)).*/\1/' | tr -d " ")
else
echo "ERROR: cf ic group create command failed. Exiting ..."
exit 1
fi
sleep 5s
echo
echo "Checking the status of the Container Group - $ANALYTICS_CONTAINER_GROUP_NAME (id: $ANALYTICS_CONTAINER_GROUP_ID) ..."
COUNTER=40
while [ $COUNTER -gt 0 ]
do
CONTAINER_GROUP_LIST=`eval cf ic group inspect $ANALYTICS_CONTAINER_GROUP_ID`
CONTAINER_RUN_STATE=$(echo $CONTAINER_GROUP_LIST | grep $ANALYTICS_CONTAINER_GROUP_ID | grep '"Status": "CREATE_COMPLETE"' | wc -l )
if [ $(echo $CONTAINER_RUN_STATE) = "1" ]
then
echo "Container group has been created successfully and is in CREATE_COMPLETE state"
echo
break
fi
if [ $(echo $CONTAINER_GROUP_LIST | grep $ANALYTICS_CONTAINER_GROUP_ID | grep '"Status": "CREATE_FAILED"' | wc -l) = "1" ]
then
echo "ERROR: cf ic container group is in CREATE_FAILED status. Exiting ..."
exit 1
fi
# Allow to container group to come up
sleep 5s
COUNTER=`expr $COUNTER - 1`
done
echo "Checking the status of the Route mapping to the Container group ..."
COUNTER=40
while [ $COUNTER -gt 0 ]
do
CONTAINER_IP_STATE=$(echo $(cf ic group inspect $ANALYTICS_CONTAINER_GROUP_ID | grep '"successful": true' | wc -l ))
if [ $CONTAINER_IP_STATE -ge 1 ]
then
echo "Mapping the MobileFirst Server container group to Host : $ANALYTICS_CONTAINER_GROUP_HOST , Domain : $ANALYTICS_CONTAINER_GROUP_DOMAIN is successful"
echo
break
fi
# Allow to container group to come up
sleep 5s
COUNTER=`expr $COUNTER - 1`
done
echo "Detailed Status of the container group and the mapping can be verified using the following cf ic command"
echo " cf ic group inspect $ANALYTICS_CONTAINER_GROUP_ID"
echo
|
package libcontainer
import (
"encoding/json"
"fmt"
"io"
"os"
)
// Console represents a pseudo TTY.
type Console interface {
io.ReadWriteCloser
// Path returns the filesystem path to the slave side of the pty.
Path() string
// Fd returns the fd for the master of the pty.
File() *os.File
}
const (
TerminalInfoVersion uint32 = 201610041
TerminalInfoType uint8 = 'T'
)
// TerminalInfo is the structure which is passed as the non-ancillary data
// in the sendmsg(2) call when runc is run with --console-socket. It
// contains some information about the container which the console master fd
// relates to (to allow for consumers to use a single unix socket to handle
// multiple containers). This structure will probably move to runtime-spec
// at some point. But for now it lies in libcontainer.
type TerminalInfo struct {
// Version of the API.
Version uint32 `json:"version"`
// Type of message (future proofing).
Type uint8 `json:"type"`
// Container contains the ID of the container.
ContainerID string `json:"container_id"`
}
func (ti *TerminalInfo) String() string {
encoded, err := json.Marshal(*ti)
if err != nil {
panic(err)
}
return string(encoded)
}
func NewTerminalInfo(containerId string) *TerminalInfo {
return &TerminalInfo{
Version: TerminalInfoVersion,
Type: TerminalInfoType,
ContainerID: containerId,
}
}
func GetTerminalInfo(encoded string) (*TerminalInfo, error) {
ti := new(TerminalInfo)
if err := json.Unmarshal([]byte(encoded), ti); err != nil {
return nil, err
}
if ti.Type != TerminalInfoType {
return nil, fmt.Errorf("terminal info: incorrect type in payload (%q): %q", TerminalInfoType, ti.Type)
}
if ti.Version != TerminalInfoVersion {
return nil, fmt.Errorf("terminal info: incorrect version in payload (%q): %q", TerminalInfoVersion, ti.Version)
}
return ti, nil
}
|
<reponame>WingRS/ucu_mail_bot
package main;
import org.telegram.telegrambots.api.methods.send.SendMessage;
import org.telegram.telegrambots.api.objects.Chat;
import org.telegram.telegrambots.api.objects.Update;
import org.telegram.telegrambots.api.objects.replykeyboard.ReplyKeyboardMarkup;
import org.telegram.telegrambots.api.objects.replykeyboard.buttons.KeyboardButton;
import org.telegram.telegrambots.api.objects.replykeyboard.buttons.KeyboardRow;
import org.telegram.telegrambots.bots.TelegramLongPollingBot;
import org.telegram.telegrambots.exceptions.TelegramApiException;
import java.util.ArrayList;
import java.util.List;
public class MyBot extends TelegramLongPollingBot {
public void onUpdateReceived(Update update) {
if (update.hasMessage() && update.getMessage().hasText()) {
// Set variables
String message_text = update.getMessage().getText();
long chat_id = update.getMessage().getChatId();
Chat chat = update.getMessage().getChat();
SendMessage message = new SendMessage(); // Create a message object object
//
// .setChatId(chat_id)
// .enableHtml(true)
// .setText(chat.getFirstName() + " hah you piece of shit" + chat.getLastName());
MyResponse response = MessageFactory.checkMessage(message_text,message,chat_id);
response.Start();
try {
execute(message); // Sending our message object to user
} catch (TelegramApiException e) {
e.printStackTrace();
}
}
}
public String getBotUsername() {
// TODO
return "UCUmail";
}
@Override
public String getBotToken() {
return "token";
}
}
|
#!/bin/bash -e
# used pip packages
# use TF that is installed from conda when DALI is installed
pip_packages="nose jupyter"
target_dir=./dali/test/python
# populate epilog and prolog with variants to enable/disable conda
# every test will be executed for bellow configs
prolog=(enable_conda)
epilog=(disable_conda)
test_body() {
is_compatible=$(python -c 'import nvidia.dali.plugin.tf as dali_tf; print(dali_tf.dataset_compatible_tensorflow())')
if [ $is_compatible = 'True' ]; then
# DALI TF DATASET run
nosetests --verbose -s test_dali_tf_dataset.py:_test_tf_dataset_other_gpu
nosetests --verbose -s test_dali_tf_dataset.py:_test_tf_dataset_multigpu
nosetests --verbose -s test_dali_tf_dataset_mnist.py
# DALI TF Notebooks run
pushd ../../../docs/examples/frameworks/tensorflow/
jupyter nbconvert tensorflow-dataset.ipynb \
--to notebook --inplace --execute \
--ExecutePreprocessor.kernel_name=python${PYVER:0:1} \
--ExecutePreprocessor.timeout=600
jupyter nbconvert tensorflow-dataset-multigpu.ipynb \
--to notebook --inplace --execute \
--ExecutePreprocessor.kernel_name=python${PYVER:0:1} \
--ExecutePreprocessor.timeout=600
popd
fi
}
pushd ../..
source ./qa/test_template.sh
popd
|
<filename>ext/bin_utils/native.c
#include <ruby.h>
#if HAVE_STDINT_H
#include "stdint.h"
#elif defined(_MSC_VER)
typedef __int8 int8_t;
typedef unsigned __int8 uint8_t;
typedef __int16 int16_t;
typedef unsigned __int16 uint16_t;
typedef __int32 int32_t;
typedef unsigned __int32 uint32_t;
typedef __int64 int64_t;
typedef unsigned __int64 uint64_t;
#else
#ifndef __int8_t_defined
typedef char int8_t;
typedef short int16_t;
typedef int int32_t;
#endif
typedef unsigned char uint8_t;
typedef unsigned short uint16_t;
typedef unsigned int uint32_t;
#if SIZEOF_LONG==8
typedef long int64_t;
typedef unsigned long uint64_t;
#else
typedef long long int64_t;
typedef unsigned long long uint64_t;
#endif
#endif
#ifdef __GNUC__
#define FORCE_INLINE __attribute__((always_inline))
#elif defined(_MSC_VER)
#define FORCE_INLINE __forceinline
#else
#define FORCE_INLINE
#endif
#if defined(_MSC_VER)
#define LL(x) (x)
#define LLU(x) (x)
#else
#define LL(x) (x##LL)
#define LLU(x) (x##LLU)
#endif
#if SIZEOF_LONG == 8
#define I642NUM(v) LONG2NUM(v)
#define U642NUM(v) ULONG2NUM(v)
#define NUM2I64(v) NUM2LONG(v)
#define NUM2U64(v) NUM2ULONG(v)
#else
#define I642NUM(v) LL2NUM(v)
#define U642NUM(v) ULL2NUM(v)
#define NUM2I64(v) NUM2LL(v)
#define NUM2U64(v) NUM2ULL(v)
#endif
#ifndef RARRAY_CONST_PTR
# define RARRAY_CONST_PTR(ar) RARRAY_PTR(ar)
#endif
ID rshft;
ID band;
#ifndef HAVE_RB_STR_DROP_BYTES
/* rubinius has no rb_str_drop_bytes */
ID aslice;
static VALUE
rb_str_drop_bytes(VALUE str, long bytes)
{
VALUE args[2] = {0, INT2FIX(bytes)};
rb_funcall2(str, aslice, 2, args);
return str;
}
#endif
static int64_t
safe_int64_t(VALUE i)
{
if (FIXNUM_P(i)) {
return NUM2I64(i);
}
else {
VALUE argm = UINT2NUM(0xffffffff);
VALUE arg32 = INT2FIX(32);
uint64_t i0 = NUM2I64(rb_funcall2(i, band, 1, &argm));
i = rb_funcall2(i, rshft, 1, &arg32);
return i0 + (NUM2I64(rb_funcall2(i, band, 1, &argm)) << 32);
}
}
static long
check_size(long i, long strlen, long ilen)
{
if (i < 0) { i += strlen; }
if (i > strlen - ilen || i < 0) {
rb_raise(rb_eArgError, "index %ld should be in range 0..%ld or in range -%ld..-%ld for string of size %ld", i, strlen-ilen, strlen, -ilen, strlen);
}
return i;
}
static VALUE
check_argc(int argc, VALUE *argv)
{
if (argc == 0 || argc > 2) {
rb_raise(rb_eArgError, "accepts 1 or 2 arguments: (string[, offset=0])");
}
return argc == 2 ? argv[1] : INT2FIX(0);
}
typedef struct append_args {
VALUE str;
int argc;
VALUE *argv;
} append_args;
typedef struct append_args2 {
VALUE str;
int argc;
VALUE *argv;
VALUE int0;
} append_args2;
static void
check_argc_append(int argc, VALUE *argv, append_args *args, int bits)
{
if (argc < 1) {
rb_raise(rb_eArgError, "accepts at least 1 argument: (string[, *int%ds])", bits);
}
args->str = RTEST(argv[0]) ? argv[0] : rb_str_new(0, 0);
if (argc == 2 && TYPE(argv[1]) == T_ARRAY) {
args->argc = RARRAY_LEN(argv[1]);
args->argv = RARRAY_CONST_PTR(argv[1]);
}
else {
args->argc = argc-1;
args->argv = argv+1;
}
}
static void
check_argc_append_2(int argc, VALUE *argv, append_args2 *args, int bits, int bits1)
{
if (argc < 2) {
rb_raise(rb_eArgError, "accepts at least 2 arguments: (string, int%d[, *int%ds])", bits, bits1);
}
args->str = RTEST(argv[0]) ? argv[0] : rb_str_new(0, 0);
args->int0 = argv[1];
if (argc == 3 && TYPE(argv[2]) == T_ARRAY) {
args->argc = RARRAY_LEN(argv[2]);
args->argv = RARRAY_CONST_PTR(argv[2]);
}
else {
args->argc = argc-2;
args->argv = argv+2;
}
}
static uint32_t
get_int8(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 1);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return
(((uint32_t)ptr[i + 0]) << 0 ) |
(uint32_t)0;
}
static int32_t
get_sint8(VALUE rstr, VALUE ri)
{
int32_t res = (int32_t)get_int8(rstr, ri);
return res - ((res >> 7) << 8);
}
static VALUE
rb_get_int8(int argc, VALUE *argv, VALUE self)
{
return INT2FIX(get_int8(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint8(int argc, VALUE *argv, VALUE self)
{
return INT2FIX(get_sint8(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int8(VALUE self, VALUE rstr)
{
uint32_t res = get_int8(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 1);
return INT2FIX(res);
}
static VALUE
rb_slice_sint8(VALUE self, VALUE rstr)
{
int32_t res = get_sint8(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 1);
return INT2FIX(res);
}
static void
append_int8(VALUE rstr, int32_t v)
{
char a[] = {
(v >> 0) & 0xff,
0
};
rb_str_cat(rstr, a, 1);
}
static VALUE
append_var_int8(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int8(str, NUM2INT(argv[i]));
}
return str;
}
static VALUE
rb_append_int8(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 8);
return append_var_int8(args.argc, args.argv, args.str);
}
#define append_var_int8_le append_var_int8
#define append_var_int8_be append_var_int8
static uint32_t
get_int16_le(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 2);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return
(((uint32_t)ptr[i + 0]) << 0 ) |
(((uint32_t)ptr[i + 1]) << 8 ) |
(uint32_t)0;
}
static int32_t
get_sint16_le(VALUE rstr, VALUE ri)
{
int32_t res = (int32_t)get_int16_le(rstr, ri);
return res - ((res >> 15) << 16);
}
static VALUE
rb_get_int16_le(int argc, VALUE *argv, VALUE self)
{
return INT2FIX(get_int16_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint16_le(int argc, VALUE *argv, VALUE self)
{
return INT2FIX(get_sint16_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int16_le(VALUE self, VALUE rstr)
{
uint32_t res = get_int16_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 2);
return INT2FIX(res);
}
static VALUE
rb_slice_sint16_le(VALUE self, VALUE rstr)
{
int32_t res = get_sint16_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 2);
return INT2FIX(res);
}
static void
append_int16_le(VALUE rstr, int32_t v)
{
char a[] = {
(v >> 0) & 0xff,
(v >> 8) & 0xff,
0
};
rb_str_cat(rstr, a, 2);
}
static VALUE
append_var_int16_le(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int16_le(str, NUM2INT(argv[i]));
}
return str;
}
static VALUE
rb_append_int16_le(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 16);
return append_var_int16_le(args.argc, args.argv, args.str);
}
static uint32_t
get_int24_le(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 3);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return
(((uint32_t)ptr[i + 0]) << 0 ) |
(((uint32_t)ptr[i + 1]) << 8 ) |
(((uint32_t)ptr[i + 2]) << 16 ) |
(uint32_t)0;
}
static int32_t
get_sint24_le(VALUE rstr, VALUE ri)
{
int32_t res = (int32_t)get_int24_le(rstr, ri);
return res - ((res >> 23) << 24);
}
static VALUE
rb_get_int24_le(int argc, VALUE *argv, VALUE self)
{
return INT2FIX(get_int24_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint24_le(int argc, VALUE *argv, VALUE self)
{
return INT2FIX(get_sint24_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int24_le(VALUE self, VALUE rstr)
{
uint32_t res = get_int24_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 3);
return INT2FIX(res);
}
static VALUE
rb_slice_sint24_le(VALUE self, VALUE rstr)
{
int32_t res = get_sint24_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 3);
return INT2FIX(res);
}
static void
append_int24_le(VALUE rstr, int32_t v)
{
char a[] = {
(v >> 0) & 0xff,
(v >> 8) & 0xff,
(v >> 16) & 0xff,
0
};
rb_str_cat(rstr, a, 3);
}
static VALUE
append_var_int24_le(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int24_le(str, NUM2INT(argv[i]));
}
return str;
}
static VALUE
rb_append_int24_le(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 24);
return append_var_int24_le(args.argc, args.argv, args.str);
}
static uint32_t
get_int32_le(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 4);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return
(((uint32_t)ptr[i + 0]) << 0 ) |
(((uint32_t)ptr[i + 1]) << 8 ) |
(((uint32_t)ptr[i + 2]) << 16 ) |
(((uint32_t)ptr[i + 3]) << 24 ) |
(uint32_t)0;
}
static int32_t
get_sint32_le(VALUE rstr, VALUE ri)
{
int32_t res = (int32_t)get_int32_le(rstr, ri);
return res;
}
static VALUE
rb_get_int32_le(int argc, VALUE *argv, VALUE self)
{
return UINT2NUM(get_int32_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint32_le(int argc, VALUE *argv, VALUE self)
{
return INT2NUM(get_sint32_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int32_le(VALUE self, VALUE rstr)
{
uint32_t res = get_int32_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 4);
return UINT2NUM(res);
}
static VALUE
rb_slice_sint32_le(VALUE self, VALUE rstr)
{
int32_t res = get_sint32_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 4);
return INT2NUM(res);
}
static void
append_int32_le(VALUE rstr, int32_t v)
{
char a[] = {
(v >> 0) & 0xff,
(v >> 8) & 0xff,
(v >> 16) & 0xff,
(v >> 24) & 0xff,
0
};
rb_str_cat(rstr, a, 4);
}
static VALUE
append_var_int32_le(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int32_le(str, (int32_t)NUM2I64(argv[i]));
}
return str;
}
static VALUE
rb_append_int32_le(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 32);
return append_var_int32_le(args.argc, args.argv, args.str);
}
static uint32_t
get_int16_be(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 2);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return
(((uint32_t)ptr[i + 1]) << 0 ) |
(((uint32_t)ptr[i + 0]) << 8 ) |
(uint32_t)0;
}
static int32_t
get_sint16_be(VALUE rstr, VALUE ri)
{
int32_t res = (int32_t)get_int16_be(rstr, ri);
return res - ((res >> 15) << 16);
}
static VALUE
rb_get_int16_be(int argc, VALUE *argv, VALUE self)
{
return INT2FIX(get_int16_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint16_be(int argc, VALUE *argv, VALUE self)
{
return INT2FIX(get_sint16_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int16_be(VALUE self, VALUE rstr)
{
uint32_t res = get_int16_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 2);
return INT2FIX(res);
}
static VALUE
rb_slice_sint16_be(VALUE self, VALUE rstr)
{
int32_t res = get_sint16_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 2);
return INT2FIX(res);
}
static void
append_int16_be(VALUE rstr, int32_t v)
{
char a[] = {
(v >> 8) & 0xff,
(v >> 0) & 0xff,
0
};
rb_str_cat(rstr, a, 2);
}
static VALUE
append_var_int16_be(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int16_be(str, NUM2INT(argv[i]));
}
return str;
}
static VALUE
rb_append_int16_be(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 16);
return append_var_int16_be(args.argc, args.argv, args.str);
}
static uint32_t
get_int24_be(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 3);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return
(((uint32_t)ptr[i + 2]) << 0 ) |
(((uint32_t)ptr[i + 1]) << 8 ) |
(((uint32_t)ptr[i + 0]) << 16 ) |
(uint32_t)0;
}
static int32_t
get_sint24_be(VALUE rstr, VALUE ri)
{
int32_t res = (int32_t)get_int24_be(rstr, ri);
return res - ((res >> 23) << 24);
}
static VALUE
rb_get_int24_be(int argc, VALUE *argv, VALUE self)
{
return INT2FIX(get_int24_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint24_be(int argc, VALUE *argv, VALUE self)
{
return INT2FIX(get_sint24_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int24_be(VALUE self, VALUE rstr)
{
uint32_t res = get_int24_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 3);
return INT2FIX(res);
}
static VALUE
rb_slice_sint24_be(VALUE self, VALUE rstr)
{
int32_t res = get_sint24_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 3);
return INT2FIX(res);
}
static void
append_int24_be(VALUE rstr, int32_t v)
{
char a[] = {
(v >> 16) & 0xff,
(v >> 8) & 0xff,
(v >> 0) & 0xff,
0
};
rb_str_cat(rstr, a, 3);
}
static VALUE
append_var_int24_be(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int24_be(str, NUM2INT(argv[i]));
}
return str;
}
static VALUE
rb_append_int24_be(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 24);
return append_var_int24_be(args.argc, args.argv, args.str);
}
static uint32_t
get_int32_be(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 4);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return
(((uint32_t)ptr[i + 3]) << 0 ) |
(((uint32_t)ptr[i + 2]) << 8 ) |
(((uint32_t)ptr[i + 1]) << 16 ) |
(((uint32_t)ptr[i + 0]) << 24 ) |
(uint32_t)0;
}
static int32_t
get_sint32_be(VALUE rstr, VALUE ri)
{
int32_t res = (int32_t)get_int32_be(rstr, ri);
return res;
}
static VALUE
rb_get_int32_be(int argc, VALUE *argv, VALUE self)
{
return UINT2NUM(get_int32_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint32_be(int argc, VALUE *argv, VALUE self)
{
return INT2NUM(get_sint32_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int32_be(VALUE self, VALUE rstr)
{
uint32_t res = get_int32_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 4);
return UINT2NUM(res);
}
static VALUE
rb_slice_sint32_be(VALUE self, VALUE rstr)
{
int32_t res = get_sint32_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 4);
return INT2NUM(res);
}
static void
append_int32_be(VALUE rstr, int32_t v)
{
char a[] = {
(v >> 24) & 0xff,
(v >> 16) & 0xff,
(v >> 8) & 0xff,
(v >> 0) & 0xff,
0
};
rb_str_cat(rstr, a, 4);
}
static VALUE
append_var_int32_be(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int32_be(str, (int32_t)NUM2I64(argv[i]));
}
return str;
}
static VALUE
rb_append_int32_be(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 32);
return append_var_int32_be(args.argc, args.argv, args.str);
}
static uint64_t
get_int40_le(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 5);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return (uint64_t)(
(((uint32_t)ptr[i + 0]) << 0) |
(((uint32_t)ptr[i + 1]) << 8) |
(((uint32_t)ptr[i + 2]) << 16) |
(((uint32_t)ptr[i + 3]) << 24) |
0) | ((uint64_t)(
(((uint32_t)ptr[i + 4]) << 0) |
0) << 32);
}
static int64_t
get_sint40_le(VALUE rstr, VALUE ri)
{
int64_t res = (int64_t)get_int40_le(rstr, ri);
return res - ((res >> 39) << 40);
}
static VALUE
rb_get_int40_le(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_int40_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint40_le(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_sint40_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int40_le(VALUE self, VALUE rstr)
{
uint64_t res = get_int40_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 5);
return I642NUM(res);
}
static VALUE
rb_slice_sint40_le(VALUE self, VALUE rstr)
{
uint64_t res = get_sint40_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 5);
return I642NUM(res);
}
static void
append_int40_le(VALUE rstr, int64_t v)
{
char a[] = {
(v >> 0) & 0xff,
(v >> 8) & 0xff,
(v >> 16) & 0xff,
(v >> 24) & 0xff,
(v >> 32) & 0xff,
0
};
rb_str_cat(rstr, a, 5);
}
static VALUE
append_var_int40_le(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int40_le(str, NUM2I64(argv[i]));
}
return str;
}
static VALUE
rb_append_int40_le(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 40);
return append_var_int40_le(args.argc, args.argv, args.str);
}
static uint64_t
get_int48_le(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 6);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return (uint64_t)(
(((uint32_t)ptr[i + 0]) << 0) |
(((uint32_t)ptr[i + 1]) << 8) |
(((uint32_t)ptr[i + 2]) << 16) |
(((uint32_t)ptr[i + 3]) << 24) |
0) | ((uint64_t)(
(((uint32_t)ptr[i + 4]) << 0) |
(((uint32_t)ptr[i + 5]) << 8) |
0) << 32);
}
static int64_t
get_sint48_le(VALUE rstr, VALUE ri)
{
int64_t res = (int64_t)get_int48_le(rstr, ri);
return res - ((res >> 47) << 48);
}
static VALUE
rb_get_int48_le(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_int48_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint48_le(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_sint48_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int48_le(VALUE self, VALUE rstr)
{
uint64_t res = get_int48_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 6);
return I642NUM(res);
}
static VALUE
rb_slice_sint48_le(VALUE self, VALUE rstr)
{
uint64_t res = get_sint48_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 6);
return I642NUM(res);
}
static void
append_int48_le(VALUE rstr, int64_t v)
{
char a[] = {
(v >> 0) & 0xff,
(v >> 8) & 0xff,
(v >> 16) & 0xff,
(v >> 24) & 0xff,
(v >> 32) & 0xff,
(v >> 40) & 0xff,
0
};
rb_str_cat(rstr, a, 6);
}
static VALUE
append_var_int48_le(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int48_le(str, NUM2I64(argv[i]));
}
return str;
}
static VALUE
rb_append_int48_le(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 48);
return append_var_int48_le(args.argc, args.argv, args.str);
}
static uint64_t
get_int56_le(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 7);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return (uint64_t)(
(((uint32_t)ptr[i + 0]) << 0) |
(((uint32_t)ptr[i + 1]) << 8) |
(((uint32_t)ptr[i + 2]) << 16) |
(((uint32_t)ptr[i + 3]) << 24) |
0) | ((uint64_t)(
(((uint32_t)ptr[i + 4]) << 0) |
(((uint32_t)ptr[i + 5]) << 8) |
(((uint32_t)ptr[i + 6]) << 16) |
0) << 32);
}
static int64_t
get_sint56_le(VALUE rstr, VALUE ri)
{
int64_t res = (int64_t)get_int56_le(rstr, ri);
return res - ((res >> 55) << 56);
}
static VALUE
rb_get_int56_le(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_int56_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint56_le(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_sint56_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int56_le(VALUE self, VALUE rstr)
{
uint64_t res = get_int56_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 7);
return I642NUM(res);
}
static VALUE
rb_slice_sint56_le(VALUE self, VALUE rstr)
{
uint64_t res = get_sint56_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 7);
return I642NUM(res);
}
static void
append_int56_le(VALUE rstr, int64_t v)
{
char a[] = {
(v >> 0) & 0xff,
(v >> 8) & 0xff,
(v >> 16) & 0xff,
(v >> 24) & 0xff,
(v >> 32) & 0xff,
(v >> 40) & 0xff,
(v >> 48) & 0xff,
0
};
rb_str_cat(rstr, a, 7);
}
static VALUE
append_var_int56_le(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int56_le(str, NUM2I64(argv[i]));
}
return str;
}
static VALUE
rb_append_int56_le(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 56);
return append_var_int56_le(args.argc, args.argv, args.str);
}
static uint64_t
get_int64_le(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 8);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return (uint64_t)(
(((uint32_t)ptr[i + 0]) << 0) |
(((uint32_t)ptr[i + 1]) << 8) |
(((uint32_t)ptr[i + 2]) << 16) |
(((uint32_t)ptr[i + 3]) << 24) |
0) | ((uint64_t)(
(((uint32_t)ptr[i + 4]) << 0) |
(((uint32_t)ptr[i + 5]) << 8) |
(((uint32_t)ptr[i + 6]) << 16) |
(((uint32_t)ptr[i + 7]) << 24) |
0) << 32);
}
static int64_t
get_sint64_le(VALUE rstr, VALUE ri)
{
int64_t res = (int64_t)get_int64_le(rstr, ri);
return res;
}
static VALUE
rb_get_int64_le(int argc, VALUE *argv, VALUE self)
{
return U642NUM(get_int64_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint64_le(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_sint64_le(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int64_le(VALUE self, VALUE rstr)
{
uint64_t res = get_int64_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 8);
return U642NUM(res);
}
static VALUE
rb_slice_sint64_le(VALUE self, VALUE rstr)
{
uint64_t res = get_sint64_le(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 8);
return I642NUM(res);
}
static void
append_int64_le(VALUE rstr, int64_t v)
{
char a[] = {
(v >> 0) & 0xff,
(v >> 8) & 0xff,
(v >> 16) & 0xff,
(v >> 24) & 0xff,
(v >> 32) & 0xff,
(v >> 40) & 0xff,
(v >> 48) & 0xff,
(v >> 56) & 0xff,
0
};
rb_str_cat(rstr, a, 8);
}
static VALUE
append_var_int64_le(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int64_le(str, safe_int64_t(argv[i]));
}
return str;
}
static VALUE
rb_append_int64_le(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 64);
return append_var_int64_le(args.argc, args.argv, args.str);
}
static uint64_t
get_int40_be(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 5);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return (uint64_t)(
(((uint32_t)ptr[i + 4]) << 0) |
(((uint32_t)ptr[i + 3]) << 8) |
(((uint32_t)ptr[i + 2]) << 16) |
(((uint32_t)ptr[i + 1]) << 24) |
0) | ((uint64_t)(
(((uint32_t)ptr[i + 0]) << 0) |
0) << 32);
}
static int64_t
get_sint40_be(VALUE rstr, VALUE ri)
{
int64_t res = (int64_t)get_int40_be(rstr, ri);
return res - ((res >> 39) << 40);
}
static VALUE
rb_get_int40_be(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_int40_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint40_be(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_sint40_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int40_be(VALUE self, VALUE rstr)
{
uint64_t res = get_int40_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 5);
return I642NUM(res);
}
static VALUE
rb_slice_sint40_be(VALUE self, VALUE rstr)
{
uint64_t res = get_sint40_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 5);
return I642NUM(res);
}
static void
append_int40_be(VALUE rstr, int64_t v)
{
char a[] = {
(v >> 32) & 0xff,
(v >> 24) & 0xff,
(v >> 16) & 0xff,
(v >> 8) & 0xff,
(v >> 0) & 0xff,
0
};
rb_str_cat(rstr, a, 5);
}
static VALUE
append_var_int40_be(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int40_be(str, NUM2I64(argv[i]));
}
return str;
}
static VALUE
rb_append_int40_be(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 40);
return append_var_int40_be(args.argc, args.argv, args.str);
}
static uint64_t
get_int48_be(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 6);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return (uint64_t)(
(((uint32_t)ptr[i + 5]) << 0) |
(((uint32_t)ptr[i + 4]) << 8) |
(((uint32_t)ptr[i + 3]) << 16) |
(((uint32_t)ptr[i + 2]) << 24) |
0) | ((uint64_t)(
(((uint32_t)ptr[i + 1]) << 0) |
(((uint32_t)ptr[i + 0]) << 8) |
0) << 32);
}
static int64_t
get_sint48_be(VALUE rstr, VALUE ri)
{
int64_t res = (int64_t)get_int48_be(rstr, ri);
return res - ((res >> 47) << 48);
}
static VALUE
rb_get_int48_be(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_int48_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint48_be(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_sint48_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int48_be(VALUE self, VALUE rstr)
{
uint64_t res = get_int48_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 6);
return I642NUM(res);
}
static VALUE
rb_slice_sint48_be(VALUE self, VALUE rstr)
{
uint64_t res = get_sint48_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 6);
return I642NUM(res);
}
static void
append_int48_be(VALUE rstr, int64_t v)
{
char a[] = {
(v >> 40) & 0xff,
(v >> 32) & 0xff,
(v >> 24) & 0xff,
(v >> 16) & 0xff,
(v >> 8) & 0xff,
(v >> 0) & 0xff,
0
};
rb_str_cat(rstr, a, 6);
}
static VALUE
append_var_int48_be(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int48_be(str, NUM2I64(argv[i]));
}
return str;
}
static VALUE
rb_append_int48_be(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 48);
return append_var_int48_be(args.argc, args.argv, args.str);
}
static uint64_t
get_int56_be(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 7);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return (uint64_t)(
(((uint32_t)ptr[i + 6]) << 0) |
(((uint32_t)ptr[i + 5]) << 8) |
(((uint32_t)ptr[i + 4]) << 16) |
(((uint32_t)ptr[i + 3]) << 24) |
0) | ((uint64_t)(
(((uint32_t)ptr[i + 2]) << 0) |
(((uint32_t)ptr[i + 1]) << 8) |
(((uint32_t)ptr[i + 0]) << 16) |
0) << 32);
}
static int64_t
get_sint56_be(VALUE rstr, VALUE ri)
{
int64_t res = (int64_t)get_int56_be(rstr, ri);
return res - ((res >> 55) << 56);
}
static VALUE
rb_get_int56_be(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_int56_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint56_be(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_sint56_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int56_be(VALUE self, VALUE rstr)
{
uint64_t res = get_int56_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 7);
return I642NUM(res);
}
static VALUE
rb_slice_sint56_be(VALUE self, VALUE rstr)
{
uint64_t res = get_sint56_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 7);
return I642NUM(res);
}
static void
append_int56_be(VALUE rstr, int64_t v)
{
char a[] = {
(v >> 48) & 0xff,
(v >> 40) & 0xff,
(v >> 32) & 0xff,
(v >> 24) & 0xff,
(v >> 16) & 0xff,
(v >> 8) & 0xff,
(v >> 0) & 0xff,
0
};
rb_str_cat(rstr, a, 7);
}
static VALUE
append_var_int56_be(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int56_be(str, NUM2I64(argv[i]));
}
return str;
}
static VALUE
rb_append_int56_be(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 56);
return append_var_int56_be(args.argc, args.argv, args.str);
}
static uint64_t
get_int64_be(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri);
const uint8_t *ptr;
StringValue(rstr);
i = check_size(i, RSTRING_LEN(rstr), 8);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return (uint64_t)(
(((uint32_t)ptr[i + 7]) << 0) |
(((uint32_t)ptr[i + 6]) << 8) |
(((uint32_t)ptr[i + 5]) << 16) |
(((uint32_t)ptr[i + 4]) << 24) |
0) | ((uint64_t)(
(((uint32_t)ptr[i + 3]) << 0) |
(((uint32_t)ptr[i + 2]) << 8) |
(((uint32_t)ptr[i + 1]) << 16) |
(((uint32_t)ptr[i + 0]) << 24) |
0) << 32);
}
static int64_t
get_sint64_be(VALUE rstr, VALUE ri)
{
int64_t res = (int64_t)get_int64_be(rstr, ri);
return res;
}
static VALUE
rb_get_int64_be(int argc, VALUE *argv, VALUE self)
{
return U642NUM(get_int64_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_get_sint64_be(int argc, VALUE *argv, VALUE self)
{
return I642NUM(get_sint64_be(argv[0], check_argc(argc, argv)));
}
static VALUE
rb_slice_int64_be(VALUE self, VALUE rstr)
{
uint64_t res = get_int64_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 8);
return U642NUM(res);
}
static VALUE
rb_slice_sint64_be(VALUE self, VALUE rstr)
{
uint64_t res = get_sint64_be(rstr, INT2FIX(0));
rb_str_drop_bytes(rstr, 8);
return I642NUM(res);
}
static void
append_int64_be(VALUE rstr, int64_t v)
{
char a[] = {
(v >> 56) & 0xff,
(v >> 48) & 0xff,
(v >> 40) & 0xff,
(v >> 32) & 0xff,
(v >> 24) & 0xff,
(v >> 16) & 0xff,
(v >> 8) & 0xff,
(v >> 0) & 0xff,
0
};
rb_str_cat(rstr, a, 8);
}
static VALUE
append_var_int64_be(int argc, VALUE* argv, VALUE str)
{
int i;
for(i = 0; i < argc; i++) {
append_int64_be(str, safe_int64_t(argv[i]));
}
return str;
}
static VALUE
rb_append_int64_be(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 64);
return append_var_int64_be(args.argc, args.argv, args.str);
}
/* BER */
static uint64_t
parse_ber(const uint8_t *ptr, long max, long *i)
{
uint64_t res = 0;
while (1) {
if (*ptr < 128) {
res += *ptr;
break;
}
if (res > LLU(0xFFFFFFFFFFFFFFFF) / 128) {
rb_raise(rb_eArgError, "BER integer is greater then 2**64, could not parse such big");
}
res = (res + ((*ptr) - 128)) * 128;
ptr++;
if (++(*i) >= max) {
rb_raise(rb_eArgError, "String unexpectedly finished while parsing BER integer");
}
}
return res;
}
static uint64_t
get_ber(VALUE rstr, VALUE ri)
{
long i = NUM2LONG(ri), len;
const uint8_t *ptr;
StringValue(rstr);
len = RSTRING_LEN(rstr);
i = check_size(i, len, 1);
ptr = (const uint8_t*)RSTRING_PTR(rstr) + i;
return parse_ber(ptr, len, &i);
}
static VALUE
rb_get_ber(int argc, VALUE *argv, VALUE self)
{
return U642NUM(get_ber(argv[0], check_argc(argc, argv)));
}
static uint64_t
slice_ber(VALUE rstr, long *i)
{
long len;
const uint8_t *ptr;
StringValue(rstr);
len = RSTRING_LEN(rstr);
ptr = (const uint8_t*)RSTRING_PTR(rstr);
return parse_ber(ptr, len, i);
}
static VALUE
rb_slice_ber(VALUE self, VALUE rstr)
{
long i = 0;
int64_t res = slice_ber(rstr, &i);
rb_str_drop_bytes(rstr, i+1);
return U642NUM(res);
}
static int
append_ber(VALUE rstr, uint64_t ber)
{
int i = 10;
char a[11] = {128, 128, 128, 128,
128, 128 ,128 ,128,
128, 128, 0};
do {
a[i] += ber % 128;
ber /= 128;
i--;
} while (ber);
i++;
rb_str_cat(rstr, a+i, 11-i);
return 11-i;
}
/** APPEND BERSIZE **/
#define append_bersize_func(type, bytes) \
static VALUE \
rb_append_bersize_##type(int argc, VALUE* argv, VALUE self) \
{ \
append_args args; \
check_argc_append(argc, argv, &args, bytes * 8); \
append_ber(args.str, args.argc * bytes); \
return append_var_##type(args.argc, args.argv, args.str);\
}
append_bersize_func(int8, 1)
append_bersize_func(int16_le, 2)
append_bersize_func(int24_le, 3)
append_bersize_func(int32_le, 4)
append_bersize_func(int40_le, 5)
append_bersize_func(int48_le, 6)
append_bersize_func(int56_le, 7)
append_bersize_func(int64_le, 8)
append_bersize_func(int16_be, 2)
append_bersize_func(int24_be, 3)
append_bersize_func(int32_be, 4)
append_bersize_func(int40_be, 5)
append_bersize_func(int48_be, 6)
append_bersize_func(int56_be, 7)
append_bersize_func(int64_be, 8)
#define append_int32size_func(type, end, bytes) \
static VALUE \
rb_append_int32size_##type##_##end(int argc, VALUE* argv, VALUE self) \
{ \
append_args args; \
check_argc_append(argc, argv, &args, bytes * 8); \
append_int32_##end(args.str, args.argc * bytes); \
append_var_##type##_##end(args.argc, args.argv, args.str); \
return args.str; \
}
append_int32size_func(int8, le, 1)
append_int32size_func(int16, le, 2)
append_int32size_func(int24, le, 3)
append_int32size_func(int32, le, 4)
append_int32size_func(int40, le, 5)
append_int32size_func(int48, le, 6)
append_int32size_func(int56, le, 7)
append_int32size_func(int64, le, 8)
append_int32size_func(int8, be, 1)
append_int32size_func(int16, be, 2)
append_int32size_func(int24, be, 3)
append_int32size_func(int32, be, 4)
append_int32size_func(int40, be, 5)
append_int32size_func(int48, be, 6)
append_int32size_func(int56, be, 7)
append_int32size_func(int64, be, 8)
/** APPEND BER **/
static long
append_var_ber(int argc, VALUE* argv, VALUE str)
{
long i, bs = 0;
for(i = 0; i < argc; i++) {
bs += append_ber(str, safe_int64_t(argv[i]));
}
return bs;
}
static VALUE
rb_append_ber(int argc, VALUE* argv, VALUE self)
{
append_args args;
check_argc_append(argc, argv, &args, 0);
append_var_ber(args.argc, args.argv, args.str);
return args.str;
}
static VALUE rb_append_bersize_string(VALUE self, VALUE str, VALUE add);
static const char zeros[4] = {0, 0, 0, 0};
static VALUE
rb_append_bersize_ber(int argc, VALUE* argv, VALUE self)
{
append_args args;
VALUE add_str = rb_str_new(0, 0);
check_argc_append(argc, argv, &args, 0);
append_var_ber(args.argc, args.argv, add_str);
return rb_append_bersize_string(self, args.str, add_str);
}
static VALUE
rb_append_int32size_ber_le(int argc, VALUE* argv, VALUE self)
{
append_args args;
long ss, bs;
uint8_t *ptr;
check_argc_append(argc, argv, &args, 0);
rb_str_cat(args.str, zeros, 4);
ss = RSTRING_LEN(args.str) - 4;
bs = append_var_ber(args.argc, args.argv, args.str);
ptr = ((uint8_t*)RSTRING_PTR(args.str)) + ss;
ptr[0] = bs & 255;
ptr[1] = (bs >> 8) & 255;
ptr[2] = (bs >> 16) & 255;
ptr[3] = (bs >> 24) & 255;
return args.str;
}
static VALUE
rb_append_int32size_ber_be(int argc, VALUE* argv, VALUE self)
{
append_args args;
long ss, bs;
uint8_t *ptr;
check_argc_append(argc, argv, &args, 0);
rb_str_cat(args.str, zeros, 4);
ss = RSTRING_LEN(args.str) - 4;
bs = append_var_ber(args.argc, args.argv, args.str);
ptr = ((uint8_t*)RSTRING_PTR(args.str)) + ss;
ptr[3] = bs & 255;
ptr[2] = (bs >> 8) & 255;
ptr[1] = (bs >> 16) & 255;
ptr[0] = (bs >> 24) & 255;
return args.str;
}
/** APPEND BER END **/
/** APPEND STRING **/
static VALUE
rb_append_string(VALUE self, VALUE str, VALUE add)
{
if (!RTEST(str)) str = rb_str_new(0, 0);
StringValue(add);
rb_str_cat(str, RSTRING_PTR(add), RSTRING_LEN(add));
RB_GC_GUARD(add);
return str;
}
static VALUE
rb_append_bersize_string(VALUE self, VALUE str, VALUE add)
{
if (!RTEST(str)) str = rb_str_new(0, 0);
StringValue(add);
append_ber(str, RSTRING_LEN(add));
rb_str_cat(str, RSTRING_PTR(add), RSTRING_LEN(add));
RB_GC_GUARD(add);
return str;
}
static VALUE
rb_append_int32size_string_le(VALUE self, VALUE str, VALUE add)
{
if (!RTEST(str)) str = rb_str_new(0, 0);
StringValue(add);
append_int32_le(str, RSTRING_LEN(add));
rb_str_cat(str, RSTRING_PTR(add), RSTRING_LEN(add));
RB_GC_GUARD(add);
return str;
}
static VALUE
rb_append_int32size_string_be(VALUE self, VALUE str, VALUE add)
{
if (!RTEST(str)) str = rb_str_new(0, 0);
StringValue(add);
append_int32_be(str, RSTRING_LEN(add));
rb_str_cat(str, RSTRING_PTR(add), RSTRING_LEN(add));
RB_GC_GUARD(add);
return str;
}
/** APPEND STRING END **/
/** APPEND COMPLEX **/
static VALUE
rb_append_int8_ber(int argc, VALUE *argv, VALUE self)
{
append_args2 args;
check_argc_append_2(argc, argv, &args, 8, 0);
append_var_int8(1, &args.int0, args.str);
append_var_ber(args.argc, args.argv, args.str);
return args.str;
}
static VALUE
rb_append_ber_int8(int argc, VALUE *argv, VALUE self)
{
append_args2 args;
check_argc_append_2(argc, argv, &args, 8, 0);
append_var_ber(1, &args.int0, args.str);
return append_var_int8(args.argc, args.argv, args.str);
}
#define append_int_ber(bits, end) \
static VALUE \
rb_append_int##bits##_ber_##end(int argc, VALUE *argv, VALUE self) \
{ \
append_args2 args; \
check_argc_append_2(argc, argv, &args, bits, 0); \
append_var_int##bits##_##end(1, &args.int0, args.str); \
append_var_ber(args.argc, args.argv, args.str); \
return args.str; \
} \
static VALUE \
rb_append_ber_int##bits##_##end(int argc, VALUE *argv, VALUE self) \
{ \
append_args2 args; \
check_argc_append_2(argc, argv, &args, 0, bits); \
append_var_ber(1, &args.int0, args.str); \
return append_var_int##bits##_##end(args.argc, args.argv, args.str); \
}
append_int_ber(16, le)
append_int_ber(24, le)
append_int_ber(32, le)
append_int_ber(16, be)
append_int_ber(24, be)
append_int_ber(32, be)
#define append_int_int(bit1, bit2, end) \
static VALUE \
rb_append_int##bit1##_int##bit2##_##end(int argc, VALUE *argv, VALUE self) \
{ \
append_args2 args; \
check_argc_append_2(argc, argv, &args, bit1, bit2); \
append_var_int##bit1##_##end(1, &args.int0, args.str); \
return append_var_int##bit2##_##end(args.argc, args.argv, args.str); \
}
append_int_int(8, 16, le)
append_int_int(8, 24, le)
append_int_int(8, 32, le)
append_int_int(16, 8, le)
append_int_int(16, 24, le)
append_int_int(16, 32, le)
append_int_int(24, 8, le)
append_int_int(24, 16, le)
append_int_int(24, 32, le)
append_int_int(32, 8, le)
append_int_int(32, 16, le)
append_int_int(32, 24, le)
append_int_int(8, 16, be)
append_int_int(8, 24, be)
append_int_int(8, 32, be)
append_int_int(16, 8, be)
append_int_int(16, 24, be)
append_int_int(16, 32, be)
append_int_int(24, 8, be)
append_int_int(24, 16, be)
append_int_int(24, 32, be)
append_int_int(32, 8, be)
append_int_int(32, 16, be)
append_int_int(32, 24, be)
/** APPEND COMPLEX END **/
void
Init_bin_utils()
{
VALUE mod_bin_utils = rb_define_module("BinUtils");
VALUE mod_native = rb_define_module_under(mod_bin_utils, "Native");
rshft = rb_intern(">>");
band = rb_intern("&");
#ifndef HAVE_RB_STR_DROP_BYTES
aslice = rb_intern("slice!");
#endif
rb_define_method(mod_native, "get_ber", rb_get_ber, -1);
rb_define_method(mod_native, "get_int8", rb_get_int8, -1);
rb_define_method(mod_native, "get_sint8", rb_get_sint8, -1);
rb_define_method(mod_native, "get_int16_le", rb_get_int16_le, -1);
rb_define_method(mod_native, "get_sint16_le", rb_get_sint16_le, -1);
rb_define_method(mod_native, "get_int16_be", rb_get_int16_be, -1);
rb_define_method(mod_native, "get_sint16_be", rb_get_sint16_be, -1);
rb_define_method(mod_native, "get_int24_le", rb_get_int24_le, -1);
rb_define_method(mod_native, "get_sint24_le", rb_get_sint24_le, -1);
rb_define_method(mod_native, "get_int24_be", rb_get_int24_be, -1);
rb_define_method(mod_native, "get_sint24_be", rb_get_sint24_be, -1);
rb_define_method(mod_native, "get_int32_le", rb_get_int32_le, -1);
rb_define_method(mod_native, "get_sint32_le", rb_get_sint32_le, -1);
rb_define_method(mod_native, "get_int32_be", rb_get_int32_be, -1);
rb_define_method(mod_native, "get_sint32_be", rb_get_sint32_be, -1);
rb_define_method(mod_native, "get_int40_le", rb_get_int40_le, -1);
rb_define_method(mod_native, "get_sint40_le", rb_get_sint40_le, -1);
rb_define_method(mod_native, "get_int40_be", rb_get_int40_be, -1);
rb_define_method(mod_native, "get_sint40_be", rb_get_sint40_be, -1);
rb_define_method(mod_native, "get_int48_le", rb_get_int48_le, -1);
rb_define_method(mod_native, "get_sint48_le", rb_get_sint48_le, -1);
rb_define_method(mod_native, "get_int48_be", rb_get_int48_be, -1);
rb_define_method(mod_native, "get_sint48_be", rb_get_sint48_be, -1);
rb_define_method(mod_native, "get_int56_le", rb_get_int56_le, -1);
rb_define_method(mod_native, "get_sint56_le", rb_get_sint56_le, -1);
rb_define_method(mod_native, "get_int56_be", rb_get_int56_be, -1);
rb_define_method(mod_native, "get_sint56_be", rb_get_sint56_be, -1);
rb_define_method(mod_native, "get_int64_le", rb_get_int64_le, -1);
rb_define_method(mod_native, "get_sint64_le", rb_get_sint64_le, -1);
rb_define_method(mod_native, "get_int64_be", rb_get_int64_be, -1);
rb_define_method(mod_native, "get_sint64_be", rb_get_sint64_be, -1);
rb_define_method(mod_native, "slice_ber!", rb_slice_ber, 1);
rb_define_method(mod_native, "slice_int8!", rb_slice_int8, 1);
rb_define_method(mod_native, "slice_sint8!", rb_slice_sint8, 1);
rb_define_method(mod_native, "slice_int16_le!", rb_slice_int16_le, 1);
rb_define_method(mod_native, "slice_sint16_le!", rb_slice_sint16_le, 1);
rb_define_method(mod_native, "slice_int16_be!", rb_slice_int16_be, 1);
rb_define_method(mod_native, "slice_sint16_be!", rb_slice_sint16_be, 1);
rb_define_method(mod_native, "slice_int24_le!", rb_slice_int24_le, 1);
rb_define_method(mod_native, "slice_sint24_le!", rb_slice_sint24_le, 1);
rb_define_method(mod_native, "slice_int24_be!", rb_slice_int24_be, 1);
rb_define_method(mod_native, "slice_sint24_be!", rb_slice_sint24_be, 1);
rb_define_method(mod_native, "slice_int32_le!", rb_slice_int32_le, 1);
rb_define_method(mod_native, "slice_sint32_le!", rb_slice_sint32_le, 1);
rb_define_method(mod_native, "slice_int32_be!", rb_slice_int32_be, 1);
rb_define_method(mod_native, "slice_sint32_be!", rb_slice_sint32_be, 1);
rb_define_method(mod_native, "slice_int40_le!", rb_slice_int40_le, 1);
rb_define_method(mod_native, "slice_sint40_le!", rb_slice_sint40_le, 1);
rb_define_method(mod_native, "slice_int40_be!", rb_slice_int40_be, 1);
rb_define_method(mod_native, "slice_sint40_be!", rb_slice_sint40_be, 1);
rb_define_method(mod_native, "slice_int48_le!", rb_slice_int48_le, 1);
rb_define_method(mod_native, "slice_sint48_le!", rb_slice_sint48_le, 1);
rb_define_method(mod_native, "slice_int48_be!", rb_slice_int48_be, 1);
rb_define_method(mod_native, "slice_sint48_be!", rb_slice_sint48_be, 1);
rb_define_method(mod_native, "slice_int56_le!", rb_slice_int56_le, 1);
rb_define_method(mod_native, "slice_sint56_le!", rb_slice_sint56_le, 1);
rb_define_method(mod_native, "slice_int56_be!", rb_slice_int56_be, 1);
rb_define_method(mod_native, "slice_sint56_be!", rb_slice_sint56_be, 1);
rb_define_method(mod_native, "slice_int64_le!", rb_slice_int64_le, 1);
rb_define_method(mod_native, "slice_sint64_le!", rb_slice_sint64_le, 1);
rb_define_method(mod_native, "slice_int64_be!", rb_slice_int64_be, 1);
rb_define_method(mod_native, "slice_sint64_be!", rb_slice_sint64_be, 1);
rb_define_method(mod_native, "append_ber!", rb_append_ber, -1);
rb_define_method(mod_native, "append_int8!", rb_append_int8, -1);
rb_define_method(mod_native, "append_int16_le!", rb_append_int16_le, -1);
rb_define_method(mod_native, "append_int16_be!", rb_append_int16_be, -1);
rb_define_method(mod_native, "append_int24_le!", rb_append_int24_le, -1);
rb_define_method(mod_native, "append_int24_be!", rb_append_int24_be, -1);
rb_define_method(mod_native, "append_int32_le!", rb_append_int32_le, -1);
rb_define_method(mod_native, "append_int32_be!", rb_append_int32_be, -1);
rb_define_method(mod_native, "append_int40_le!", rb_append_int40_le, -1);
rb_define_method(mod_native, "append_int40_be!", rb_append_int40_be, -1);
rb_define_method(mod_native, "append_int48_le!", rb_append_int48_le, -1);
rb_define_method(mod_native, "append_int48_be!", rb_append_int48_be, -1);
rb_define_method(mod_native, "append_int56_le!", rb_append_int56_le, -1);
rb_define_method(mod_native, "append_int56_be!", rb_append_int56_be, -1);
rb_define_method(mod_native, "append_int64_le!", rb_append_int64_le, -1);
rb_define_method(mod_native, "append_int64_be!", rb_append_int64_be, -1);
rb_define_method(mod_native, "append_bersize_ber!", rb_append_bersize_ber, -1);
rb_define_method(mod_native, "append_bersize_int8!", rb_append_bersize_int8, -1);
rb_define_method(mod_native, "append_bersize_int16_le!", rb_append_bersize_int16_le, -1);
rb_define_method(mod_native, "append_bersize_int16_be!", rb_append_bersize_int16_be, -1);
rb_define_method(mod_native, "append_bersize_int24_le!", rb_append_bersize_int24_le, -1);
rb_define_method(mod_native, "append_bersize_int24_be!", rb_append_bersize_int24_be, -1);
rb_define_method(mod_native, "append_bersize_int32_le!", rb_append_bersize_int32_le, -1);
rb_define_method(mod_native, "append_bersize_int32_be!", rb_append_bersize_int32_be, -1);
rb_define_method(mod_native, "append_bersize_int40_le!", rb_append_bersize_int40_le, -1);
rb_define_method(mod_native, "append_bersize_int40_be!", rb_append_bersize_int40_be, -1);
rb_define_method(mod_native, "append_bersize_int48_le!", rb_append_bersize_int48_le, -1);
rb_define_method(mod_native, "append_bersize_int48_be!", rb_append_bersize_int48_be, -1);
rb_define_method(mod_native, "append_bersize_int56_le!", rb_append_bersize_int56_le, -1);
rb_define_method(mod_native, "append_bersize_int56_be!", rb_append_bersize_int56_be, -1);
rb_define_method(mod_native, "append_bersize_int64_le!", rb_append_bersize_int64_le, -1);
rb_define_method(mod_native, "append_bersize_int64_be!", rb_append_bersize_int64_be, -1);
rb_define_method(mod_native, "append_int32size_ber_le!", rb_append_int32size_ber_le, -1);
rb_define_method(mod_native, "append_int32size_int8_le!", rb_append_int32size_int8_le, -1);
rb_define_method(mod_native, "append_int32size_int16_le!", rb_append_int32size_int16_le, -1);
rb_define_method(mod_native, "append_int32size_int24_le!", rb_append_int32size_int24_le, -1);
rb_define_method(mod_native, "append_int32size_int32_le!", rb_append_int32size_int32_le, -1);
rb_define_method(mod_native, "append_int32size_int40_le!", rb_append_int32size_int40_le, -1);
rb_define_method(mod_native, "append_int32size_int48_le!", rb_append_int32size_int48_le, -1);
rb_define_method(mod_native, "append_int32size_int56_le!", rb_append_int32size_int56_le, -1);
rb_define_method(mod_native, "append_int32size_int64_le!", rb_append_int32size_int64_le, -1);
rb_define_method(mod_native, "append_int32size_ber_be!", rb_append_int32size_ber_be, -1);
rb_define_method(mod_native, "append_int32size_int8_be!", rb_append_int32size_int8_be, -1);
rb_define_method(mod_native, "append_int32size_int16_be!", rb_append_int32size_int16_be, -1);
rb_define_method(mod_native, "append_int32size_int24_be!", rb_append_int32size_int24_be, -1);
rb_define_method(mod_native, "append_int32size_int32_be!", rb_append_int32size_int32_be, -1);
rb_define_method(mod_native, "append_int32size_int40_be!", rb_append_int32size_int40_be, -1);
rb_define_method(mod_native, "append_int32size_int48_be!", rb_append_int32size_int48_be, -1);
rb_define_method(mod_native, "append_int32size_int56_be!", rb_append_int32size_int56_be, -1);
rb_define_method(mod_native, "append_int32size_int64_be!", rb_append_int32size_int64_be, -1);
rb_define_method(mod_native, "append_string!", rb_append_string, 2);
rb_define_method(mod_native, "append_bersize_string!", rb_append_bersize_string, 2);
rb_define_method(mod_native, "append_int32size_string_le!", rb_append_int32size_string_le, 2);
rb_define_method(mod_native, "append_int32size_string_be!", rb_append_int32size_string_be, 2);
rb_define_method(mod_native, "append_int8_ber!", rb_append_int8_ber, -1);
rb_define_method(mod_native, "append_ber_int8!", rb_append_ber_int8, -1);
rb_define_method(mod_native, "append_int8_int16_le!", rb_append_int8_int16_le, -1);
rb_define_method(mod_native, "append_int8_int24_le!", rb_append_int8_int24_le, -1);
rb_define_method(mod_native, "append_int8_int32_le!", rb_append_int8_int32_le, -1);
rb_define_method(mod_native, "append_int8_int16_be!", rb_append_int8_int16_be, -1);
rb_define_method(mod_native, "append_int8_int24_be!", rb_append_int8_int24_be, -1);
rb_define_method(mod_native, "append_int8_int32_be!", rb_append_int8_int32_be, -1);
rb_define_method(mod_native, "append_int16_int8_le!", rb_append_int16_int8_le, -1);
rb_define_method(mod_native, "append_int16_int24_le!", rb_append_int16_int24_le, -1);
rb_define_method(mod_native, "append_int16_int32_le!", rb_append_int16_int32_le, -1);
rb_define_method(mod_native, "append_int16_int8_be!", rb_append_int16_int8_be, -1);
rb_define_method(mod_native, "append_int16_int24_be!", rb_append_int16_int24_be, -1);
rb_define_method(mod_native, "append_int16_int32_be!", rb_append_int16_int32_be, -1);
rb_define_method(mod_native, "append_int24_int16_le!", rb_append_int24_int16_le, -1);
rb_define_method(mod_native, "append_int24_int8_le!", rb_append_int24_int8_le, -1);
rb_define_method(mod_native, "append_int24_int32_le!", rb_append_int24_int32_le, -1);
rb_define_method(mod_native, "append_int24_int16_be!", rb_append_int24_int16_be, -1);
rb_define_method(mod_native, "append_int24_int8_be!", rb_append_int24_int8_be, -1);
rb_define_method(mod_native, "append_int24_int32_be!", rb_append_int24_int32_be, -1);
rb_define_method(mod_native, "append_int32_int16_le!", rb_append_int32_int16_le, -1);
rb_define_method(mod_native, "append_int32_int24_le!", rb_append_int32_int24_le, -1);
rb_define_method(mod_native, "append_int32_int8_le!", rb_append_int32_int8_le, -1);
rb_define_method(mod_native, "append_int32_int16_be!", rb_append_int32_int16_be, -1);
rb_define_method(mod_native, "append_int32_int24_be!", rb_append_int32_int24_be, -1);
rb_define_method(mod_native, "append_int32_int8_be!", rb_append_int32_int8_be, -1);
rb_define_method(mod_native, "append_ber_int16_le!", rb_append_ber_int16_le, -1);
rb_define_method(mod_native, "append_ber_int24_le!", rb_append_ber_int24_le, -1);
rb_define_method(mod_native, "append_ber_int32_le!", rb_append_ber_int32_le, -1);
rb_define_method(mod_native, "append_ber_int16_be!", rb_append_ber_int16_be, -1);
rb_define_method(mod_native, "append_ber_int24_be!", rb_append_ber_int24_be, -1);
rb_define_method(mod_native, "append_ber_int32_be!", rb_append_ber_int32_be, -1);
rb_define_method(mod_native, "append_int16_ber_le!", rb_append_int16_ber_le, -1);
rb_define_method(mod_native, "append_int24_ber_le!", rb_append_int24_ber_le, -1);
rb_define_method(mod_native, "append_int32_ber_le!", rb_append_int32_ber_le, -1);
rb_define_method(mod_native, "append_int16_ber_be!", rb_append_int16_ber_be, -1);
rb_define_method(mod_native, "append_int24_ber_be!", rb_append_int24_ber_be, -1);
rb_define_method(mod_native, "append_int32_ber_be!", rb_append_int32_ber_be, -1);
rb_extend_object(mod_native, mod_native);
}
|
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
export RELEASE="dev"
export branch=${TRAVIS_BRANCH}
echo "export RELEASE=$RELEASE" >> ~/environment.sh
echo "export branch=$branch" >> ~/environment.sh
echo "**************************************************************"
echo "***************** Running Unit Tests *************************"
echo "**************************************************************"
BUILD_IMAGE="golang:1.12-alpine"
docker run --rm -v "$(pwd)":/go/src/github.com/pearsontechnology/environment-operator \
-w /go/src/github.com/pearsontechnology/environment-operator \
${BUILD_IMAGE} \
/bin/sh -c "apk update && apk add git gcc musl-dev && go test -v ./..."
|
package com.twu.biblioteca.options;
/**
* Created by gdias on 8/5/15.
*/
public class VerifyBookOptionTest {
// TODO Test
}
|
import {
IsEmail,
IsNotEmpty,
IsNumber,
IsOptional,
IsPhoneNumber,
IsString,
Min,
} from 'class-validator';
export class CreateDonationInput {
@IsNotEmpty()
@IsString()
displayName: string;
@IsNotEmpty()
@IsNumber()
@Min(1)
count: number;
@IsNotEmpty()
@IsEmail()
email: string;
@IsOptional()
@IsPhoneNumber()
mobile: string;
@IsString()
@IsOptional()
team: string;
@IsOptional()
@IsString()
message: string;
}
|
#!/bin/bash -eu
# Create Lumify install dir
echo "Create Lumify install dir"
mkdir -p /opt/lumify
chown root:root /opt/lumify
chmod 600 /opt/lumify
# Install CLAVIN index
echo "Install CLAVIN index"
/bin/bash /vagrant/vagrant/scripts/install-clavin.sh
# Install Lumify
echo "Install Lumify Web App"
cp /vagrant/web/war/target/lumify-web-war-*.war /opt/jetty/webapps/root.war
mkdir -p /opt/lumify/config /opt/lumify/ontology /opt/lumify/lib /opt/lumify/logs
cp /vagrant/config/log4j.xml /opt/lumify/config/log4j.xml
cp /vagrant/vagrant/demo/lumify.properties /opt/lumify/config/lumify.properties
cp -R /vagrant/config/knownEntities /opt/lumify/config
cp -R /vagrant/config/opencv /opt/lumify/config
cp -R /vagrant/config/opennlp /opt/lumify/config
mkdir -p /opt/lumify/config/ontology
cp -R /vagrant/examples/ontology-minimal /opt/lumify/config/ontology
|
export * from './services.dao';
export * from './service.model';
|
<reponame>NullaDev/WebGL-STG-Engine
import { Shape, ShapeCircle, ShapedInstance, ShapedSprite, ShapeDualArc, SSPoint } from "../util/Shape";
import { RECT, RENDER_TYPE } from "../util/SpriteManager";
import { EntityPool } from "../stage/EntityPool";
import { Config, Entity, EntityAny, State } from "./Entity";
export class ShapeRay extends Shape<SIRay> {
private readonly func: (self: SIRay, x: number, y: number) => number;
constructor(f: (self: SIRay, x: number, y: number) => number) {
super();
this.func = f;
}
public distanceTo(self: SIRay, x: number, y: number): number {
return Math.min(this.func(self, x, y),
self.shaped_sprite.base.shape.rawDistanceTo(self.px, self.py, 1, x, y),
self.shaped_sprite.end.shape.rawDistanceTo(
self.px + self.len * Math.cos(self.dir),
self.py + self.len * Math.sin(self.dir), 1, x, y));
}
public static line_circle(self: SIRay, x: number, y: number): number {
const x0 = self.px + self.shaped_sprite.hitbox_width * Math.cos(self.dir);
const y0 = self.py + self.shaped_sprite.hitbox_width * Math.sin(self.dir);
const x1 = self.px + (self.len - self.shaped_sprite.hitbox_width) * Math.cos(self.dir);
const y1 = self.py + (self.len - self.shaped_sprite.hitbox_width) * Math.sin(self.dir);
const rl = Math.sqrt((x0 - x1) ** 2 + (y0 - y1) ** 2);
const dis = Math.abs((x1 - x0) * (y0 - y) - (x0 - x) * (y1 - y0));
const d0 = Math.sqrt((x0 - x) ** 2 + (y0 - y) ** 2);
const d1 = Math.sqrt((x1 - x) ** 2 + (y1 - y) ** 2);
if (Math.max(d0, d1) ** 2 > Math.min(d0, d1) ** 2 + rl ** 2)
return Math.min(d0, d1) - self.shaped_sprite.hitbox_width * self.w;
return Math.min(dis / rl, d0, d1) - self.shaped_sprite.hitbox_width * self.w;
}
public static double_arc(self: SIRay, px: number, py: number): number {
const x = px - self.px;
const y = py - self.py;
const rx = x * Math.cos(-self.dir) - y * Math.sin(-self.dir);
const ry = x * Math.sin(-self.dir) + y * Math.cos(-self.dir);
return ShapeDualArc.orthDis(rx - self.len / 2, ry, self.len / 2, self.w * self.shaped_sprite.hitbox_width);
}
public static half_arc(self: SIRay, px: number, py: number): number {
const x = px - self.px;
const y = py - self.py;
const rx = x * Math.cos(-self.dir) - y * Math.sin(-self.dir);
const ry = x * Math.sin(-self.dir) + y * Math.cos(-self.dir);
if (rx < 0)
return Math.sqrt(rx ** 2 + ry ** 2);
return ShapeDualArc.orthDis(rx, ry, self.len, self.w * self.shaped_sprite.hitbox_width);
}
}
export const enum RayLaserState {
WARNING,
OPENING,
OPENED,
CLOSING
}
export interface RayLaserEventListener {
onInit: ((self: RayLaser) => void)[],
onStateChange: ((self: RayLaser) => void)[],
onUpdate: ((self: RayLaser, rate: number) => void)[],
onPostMotion: ((self: RayLaser, rate: number) => void)[],
onPostUpdate: ((self: RayLaser) => void)[],
onDestroy: ((self: RayLaser) => void)[],
onAttack: ((self: RayLaser, target: EntityAny) => void)[],
onContact: ((self: RayLaser, other: EntityAny) => void)[],
}
export type RayLaserConfig = Config & {
warning_time: number,
open_time: number,
alive_time: number,
close_time: number,
listener: RayLaserEventListener
}
export type RayLaserMotion = (self: RayLaser, time_rate: number) => void;
export class SSRay extends ShapedSprite<SSRay, RENDER_TYPE.RECT, SIRay, ShapeRay> {
public sprite_width: number;
public hitbox_width: number;
public l_ratio: number;
public base: SSPoint<ShapeCircle>;
public end: SSPoint<ShapeCircle>;
}
export class SIRay extends ShapedInstance<SIRay, RENDER_TYPE.RECT, ShapeRay, SSRay> implements RECT {
public px: number;
public py: number;
public dir: number;
public len: number;
public w: number;
constructor(ss: SSRay) {
super(RENDER_TYPE.RECT, ss);
this.w = 0;
}
rectCount(): number {
return 3;
}
render(xyrwh: Float32Array, i: number): void {
const l = this.shaped_sprite.l_ratio * this.len / 2;
xyrwh[i * 10 + 0] = this.px + l * Math.cos(this.dir);
xyrwh[i * 10 + 1] = this.py + l * Math.sin(this.dir);
xyrwh[i * 10 + 2] = this.dir + Math.PI / 2;
xyrwh[i * 10 + 3] = Math.max(1, this.shaped_sprite.sprite_width * this.w);
xyrwh[i * 10 + 4] = l;
var sprite = this.shaped_sprite.sprite;
xyrwh[i * 10 + 5] = sprite.tx / sprite.sprite.w;
xyrwh[i * 10 + 6] = sprite.ty / sprite.sprite.h;
xyrwh[i * 10 + 7] = sprite.tw / sprite.sprite.w;
xyrwh[i * 10 + 8] = sprite.th / sprite.sprite.h;
xyrwh[i * 10 + 9] = 1;
i++;
var ss = this.shaped_sprite.base;
xyrwh[i * 10 + 0] = this.px;
xyrwh[i * 10 + 1] = this.py;
xyrwh[i * 10 + 2] = this.dir + Math.PI / 2;
xyrwh[i * 10 + 3] = ss.w / 2;
xyrwh[i * 10 + 4] = ss.h / 2;
sprite = ss.sprite;
xyrwh[i * 10 + 5] = sprite.tx / sprite.sprite.w;
xyrwh[i * 10 + 6] = sprite.ty / sprite.sprite.h;
xyrwh[i * 10 + 7] = sprite.tw / sprite.sprite.w;
xyrwh[i * 10 + 8] = sprite.th / sprite.sprite.h;
xyrwh[i * 10 + 9] = 1;
i++;
ss = this.shaped_sprite.end;
xyrwh[i * 10 + 0] = this.px + this.len * Math.cos(this.dir);
xyrwh[i * 10 + 1] = this.py + this.len * Math.sin(this.dir);
xyrwh[i * 10 + 2] = this.dir + Math.PI / 2;
xyrwh[i * 10 + 3] = ss.w / 2;
xyrwh[i * 10 + 4] = ss.h / 2;
sprite = ss.sprite;
xyrwh[i * 10 + 5] = sprite.tx / sprite.sprite.w;
xyrwh[i * 10 + 6] = sprite.ty / sprite.sprite.h;
xyrwh[i * 10 + 7] = sprite.tw / sprite.sprite.w;
xyrwh[i * 10 + 8] = sprite.th / sprite.sprite.h;
xyrwh[i * 10 + 9] = 1;
}
}
export class RayLaser extends SIRay implements Entity<RayLaser, RENDER_TYPE.RECT, ShapeRay, SSRay> {
public state: State = State.PRE_ENTRY;
public rstate: RayLaserState = RayLaserState.WARNING;
public config: RayLaserConfig;
public motion: RayLaserMotion;
public time: number = 0;
public custom_fields: any = {};
constructor(shaped_shape: SSRay, cf: RayLaserConfig, m: RayLaserMotion) {
super(shaped_shape);
this.config = cf;
this.motion = m;
}
public init(px: number, py: number, dir: number, len: number) {
this.len = len;
this.dir = dir;
this.px = px;
this.py = py;
this.checkState();
return this;
}
private checkState() {
if (this.state == State.PRE_ENTRY) {
this.state = State.LEAVING;
this.config.listener?.onInit?.forEach(e => e(this));
}
const tw = this.config.warning_time;
const to = this.config.open_time + tw;
const ta = this.config.alive_time + to;
const tc = this.config.close_time + ta;
if (this.rstate == RayLaserState.WARNING && this.time >= tw) {
this.rstate = RayLaserState.OPENING;
this.config.listener?.onStateChange?.forEach(e => e(this));
}
if (this.rstate == RayLaserState.OPENING) {
this.w = Math.min(1, (this.time - tw) / (to - tw))
if (this.time >= to) {
this.rstate = RayLaserState.OPENED;
this.state = State.ALIVE;
this.w = 1;
this.config.listener?.onStateChange?.forEach(e => e(this));
}
}
if (this.rstate == RayLaserState.OPENED && this.time >= ta) {
this.rstate = RayLaserState.CLOSING;
this.state = State.LEAVING;
this.config.listener?.onStateChange?.forEach(e => e(this));
}
if (this.rstate == RayLaserState.CLOSING) {
this.w = Math.max(0, 1 - (this.time - ta) / (tc - ta))
if (this.time >= tc) {
this.state = State.DEAD;
this.config.listener?.onDestroy?.forEach(e => e(this));
}
}
}
public update(_: RayLaser) {
if (this.state == State.PRE_ENTRY) {
this.state = State.LEAVING;
this.config.listener?.onInit?.forEach(e => e(this));
}
const rate = EntityPool.INSTANCE.special_effects.time_rate;
this.time += rate;
this.checkState();
this.config.listener?.onUpdate?.forEach(e => e(this, rate));
this.motion(this, rate);
this.config.listener?.onPostMotion?.forEach(e => e(this, rate));
}
public postUpdate(_: RayLaser) {
this.config.listener?.onPostUpdate?.forEach(e => e(this));
}
public attack(_: RayLaser, e: EntityAny) {
this.config.listener?.onAttack?.forEach(x => x(this, e));
e.damaged(e, this);
}
public damaged(_: RayLaser, source: EntityAny) {
this.config.listener?.onContact?.forEach(e => e(this, source));
return false;
}
} |
<reponame>superdump/openwebrtc-gst-plugins<filename>ext/erdtls/src/erdtlsconnection.h
/*
* Copyright (c) 2014, Ericsson AB. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*/
#ifndef erdtlsconnection_h
#define erdtlsconnection_h
#include <glib-object.h>
G_BEGIN_DECLS
#define ER_TYPE_DTLS_CONNECTION (er_dtls_connection_get_type())
#define ER_DTLS_CONNECTION(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), ER_TYPE_DTLS_CONNECTION, ErDtlsConnection))
#define ER_DTLS_CONNECTION_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), ER_TYPE_DTLS_CONNECTION, ErDtlsConnectionClass))
#define ER_IS_DTLS_CONNECTION(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), ER_TYPE_DTLS_CONNECTION))
#define ER_IS_DTLS_CONNECTION_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), ER_TYPE_DTLS_CONNECTION))
#define ER_DTLS_CONNECTION_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), ER_TYPE_DTLS_CONNECTION, ErDtlsConnectionClass))
typedef struct _ErDtlsConnection ErDtlsConnection;
typedef struct _ErDtlsConnectionClass ErDtlsConnectionClass;
typedef struct _ErDtlsConnectionPrivate ErDtlsConnectionPrivate;
/**
* ErDtlsSrtpCipher:
* @ER_DTLS_SRTP_CIPHER_AES_128_ICM: aes-128-icm
*
* SRTP Cipher selected by the DTLS handshake, should match the enums in gstsrtp
*/
typedef enum {
ER_DTLS_SRTP_CIPHER_AES_128_ICM = 1
} ErDtlsSrtpCipher;
/**
* ErDtlsSrtpAuth:
* @ER_DTLS_SRTP_AUTH_HMAC_SHA1_32: hmac-sha1-32
* @ER_DTLS_SRTP_AUTH_HMAC_SHA1_80: hmac-sha1-80
*
* SRTP Auth selected by the DTLS handshake, should match the enums in gstsrtp
*/
typedef enum {
ER_DTLS_SRTP_AUTH_HMAC_SHA1_32 = 1,
ER_DTLS_SRTP_AUTH_HMAC_SHA1_80 = 2
} ErDtlsSrtpAuth;
#define ER_DTLS_SRTP_MASTER_KEY_LENGTH 30
/*
* ErDtlsConnection:
*
* A class that handles a single DTLS connection.
* Any connection needs to be created with the agent property set.
* Once the DTLS handshake is completed, on-encoder-key and on-decoder-key will be signalled.
*/
struct _ErDtlsConnection {
GObject parent_instance;
ErDtlsConnectionPrivate *priv;
};
struct _ErDtlsConnectionClass {
GObjectClass parent_class;
};
GType er_dtls_connection_get_type(void) G_GNUC_CONST;
void er_dtls_connection_start(ErDtlsConnection *, gboolean is_client);
void er_dtls_connection_start_timeout(ErDtlsConnection *);
/*
* Stops the connections, it is not required to call this function.
*/
void er_dtls_connection_stop(ErDtlsConnection *);
/*
* Closes the connection, the function will block until the connection has been stopped.
* If stop is called some time before, close will return instantly.
*/
void er_dtls_connection_close(ErDtlsConnection *);
/*
* Sets the closure that will be called whenever data needs to be sent.
*
* The closure will get called with the following arguments:
* void cb(ErDtlsConnection *, gpointer data, gint length, gpointer user_data)
*/
void er_dtls_connection_set_send_callback(ErDtlsConnection *, GClosure *);
/*
* Processes data that has been recevied, the transformation is done in-place.
* Returns the length of the plaintext data that was decoded, if no data is available, 0<= will be returned.
*/
gint er_dtls_connection_process(ErDtlsConnection *, gpointer ptr, gint len);
/*
* If the DTLS handshake is completed this function will encode the given data.
* Returns the length of the data sent, or 0 if the DTLS handshake is not completed.
*/
gint er_dtls_connection_send(ErDtlsConnection *, gpointer ptr, gint len);
G_END_DECLS
#endif /* erdtlsconnection_h */
|
// const DAYS = 1000 * 60 * 60 * 24;
const HOURS = 1000 * 60 * 60;
const MINUTES = 1000 * 60;
const SECONDS = 1000;
export const errorHandler = (element, error) => {
const { name, value } = element;
const condition = {
name: value.length >= 3 && value.length <= 15,
};
if (condition[name]) {
return {
...error,
[name]: false,
};
}
return {
...error,
[name]: true,
};
};
export class Time {
constructor() {
this.time = 0;
}
// getDays() {
// const days = Math.floor(this.time / DAYS);
// return days >= 1 ? Math.floor(days) : 0;
// }
// getHours() {
// const hours = Math.floor((this.time % DAYS) / HOURS);
// return hours >= 1 ? Math.floor(hours) : 0;
// }
// hour version
getHours() {
const hours = Math.floor(this.time / HOURS);
return hours >= 1 ? Math.floor(hours) : 0;
}
getMinutes() {
const minutes = Math.floor((this.time % HOURS) / MINUTES);
return minutes >= 1 ? Math.floor(minutes) : 0;
}
getSeconds() {
const seconds = Math.floor((this.time % MINUTES) / SECONDS);
return seconds >= 1 ? Math.floor(seconds) : 0;
}
transformToTwoDigits = value => {
const result = value >= 10 ? value : `0${value}`;
return result;
};
getTime(time) {
this.time = time;
// const days = this.transformToTwoDigits(this.getDays());
// const hours = this.transformToTwoDigits(this.getHours());
// hour version
const hours = this.transformToTwoDigits(this.getHours());
const minutes = this.transformToTwoDigits(this.getMinutes());
const seconds = this.transformToTwoDigits(this.getSeconds());
// return `${(days-0)===1?'day':'days'}:${hours}:${minutes}:${seconds}`;
// hour version
return `${hours}:${minutes}:${seconds}`;
}
}
|
A system architecture for processing patient medical records should include components for data collection, data storage, data processing, data analytics, and data visualization.
Data collection can be done using web crawlers, APIs, and data cleaning tools. Data storage can utilize various databases such as MySQL, MongoDB, or Elasticsearch. Data processing can leverage distributed computing frameworks such as Spark, Hadoop, or Flink. Data analytics can be done using machine learning and natural language processing techniques. Data visualization can be accomplished using various tools such as Tableau, Plotly, or Matplotlib.
##
12. Instruction: Write a program to merge two given arrays.
12. Input:
arr1 = [1, 3, 4, 5]
arr2 = [2, 6, 7, 8]
12. Output:
def merge_arrays(arr1, arr2):
'''This function will merge two given arrays and return a new array.'''
combined_arr = arr1 + arr2
return sorted(combined_arr)
arr1 = [1, 3, 4, 5]
arr2 = [2, 6, 7, 8]
result = merge_arrays(arr1, arr2)
print(result) |
#!/bin/bash
cat ./generic_max.go | ../../penny gen "NumberType=NUMBERS" > numbers_max_get.go
cat ./func_thing.go | ../../penny gen "ThisNumberType=NUMBERS" > numbers_func_thing.go
|
from astropy.io import fits
def prepare_fits_header(filename):
try:
hdu = fits.open(filename)
if len(hdu) < 1 or not isinstance(hdu[0].header, fits.header.Header):
return None # Return None if the file is not in the correct format
header = hdu[0].header
keys_to_remove = [k for k in header.keys() if k.endswith('4')]
for k in keys_to_remove:
del header[k]
header['BUNIT'] = 'K'
return dict(header)
except FileNotFoundError:
return None # Return None if the file does not exist |
<reponame>juggleross/json_updater
# frozen_string_literal: true
require 'spec_helper'
require 'shared_examples/json_updater_shared_spec'
describe JsonUpdater::UpdateService do
describe '#update_json' do
include_examples 'json updater', 'one_level'
include_examples 'json updater', 'one_level_array'
include_examples 'json updater', 'fields_with_one_level_hash'
include_examples 'json updater', 'fields_with_one_level_array_of_hash'
include_examples 'json updater', 'fields_with_multiple_level'
include_examples 'json updater', 'fields_with_multiple_level_with_array'
include_examples 'json updater', 'one_field_with_array'
include_examples 'json updater', 'new_field_with_hash'
include_examples 'json updater', 'new_field_with_array'
include_examples 'json updater', 'add_hash_to_old_field'
include_examples 'json updater', 'add_array_to_old_field'
end
end
|
#!/bin/bash
#
# Description:
#
# Prints a color table of 8bg * 8fg * 2 states (regular/bold)
#
# Copyright:
#
# (C) 2009 Wolfgang Frisch <xororand@unfoog.de>
#
# License:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
echo
echo Table for 16-color terminal escape sequences.
echo Replace ESC with \\033 in bash.
echo
echo "Background | Foreground colors"
echo "---------------------------------------------------------------------"
for((bg=40;bg<=47;bg++)); do
for((bold=0;bold<=1;bold++)) do
echo -en "\033[0m"" ESC[${bg}m | "
for((fg=30;fg<=37;fg++)); do
if [ $bold == "0" ]; then
echo -en "\033[${bg}m\033[${fg}m [${fg}m "
else
echo -en "\033[${bg}m\033[1;${fg}m [1;${fg}m"
fi
done
echo -e "\033[0m"
done
echo "--------------------------------------------------------------------- "
done
echo
echo
|
#!/bin/bash -ev
gpg --keyserver keys.gnupg.net --recv A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89
gpg --export A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89 | sudo apt-key add -
sudo add-apt-repository -s "deb http://deb.torproject.org/torproject.org $(lsb_release -sc) main"
sudo apt-get update -qq
sudo apt-get install -y -qq tor deb.torproject.org-keyring
echo "HiddenServiceDir /var/lib/tor/pndcoin-service/" | sudo tee -a /etc/tor/torrc
echo "HiddenServicePort 22445 127.0.0.1:22445" | sudo tee -a /etc/tor/torrc
sudo systemctl restart tor.service
|
export class CustomField {
constructor(
public visible: boolean,
public field: string) {
}
}
|
#!/usr/bin/env bash
# DO NOT EDIT THIS FILE BY HAND -- YOUR CHANGES WILL BE OVERWRITTEN
# This file is subject to the terms and conditions defined in file 'LICENSE',
# which is part of this repository.
# Available environment variables
#
# ANSIBLEVERSION
# DOCKERFILE
# OPENRCFILE
# Set default values
OPENRCFILE=${OPENRCFILE:-openrc}
if [[ -e $OPENRCFILE ]]; then
source $OPENRCFILE
fi
if [[ -e $DOCKERFILE ]]; then
source $DOCKERFILE
fi
if [[ -e $MOLECULEVARSFILE ]]; then
cp $MOLECULEVARSFILE vars/molecule.yml
fi
if [[ -e tox.ini ]]; then
tox -e ansible$ANSIBLEVERSION
fi
|
#!/bin/bash
# This command requires githubtoken.txt, in the repo root (it's git ingored btw).
# githubtoken.txt should contain github token: https://github.com/settings/tokens -> generate new token -> [x] read:packages
SERVER_BRANCH='develop'
WEB_APP_BRANCH='develop'
IMAGE_NAME='mflow-demo'
INITIAL_DATA_LOCALE=''
NODE_VERSION='14'
POSTGRES_VERSION='12'
IMAGE_TAG="front-${WEB_APP_BRANCH}_back-${SERVER_BRANCH}_pg-${POSTGRES_VERSION}_node-${NODE_VERSION}"
echo "building image: ${IMAGE_TAG}"
docker build \
--progress plain \
-t "${IMAGE_NAME}:${IMAGE_TAG}" \
--build-arg SERVER_BRANCH="$SERVER_BRANCH" \
--build-arg WEB_APP_BRANCH="$WEB_APP_BRANCH" \
--build-arg NODE_VERSION="$NODE_VERSION" \
--build-arg POSTGRES_VERSION="$POSTGRES_VERSION" \
--build-arg INITIAL_DATA_LOCALE="$INITIAL_DATA_LOCALE" \
--secret id=githubtoken,src=../githubtoken.txt \
.
# -t testbuild -> tag for the image, would be something like 'TMF-application-manager:B-{back end tag}-F-{front end tag}'
# --build-arg SERVER_BRANCH and WEB_APP_BRANCH -> branch of front and back end to pull and build (should be able to use just the tag name), can escape # with \#
# --secret id=githubtoken,src=../githubtoken.txt -> can 'secretly' and temporarily mount file (see top comment about this particular file)
# --no-cache -> can be used to re-build (if for example branch content you are building has changed)
# --progress plain -> show full progress
|
# ==============================================================
# Vivado(TM) HLS - High-Level Synthesis from C, C++ and SystemC v2019.1 (64-bit)
# Copyright 1986-2019 Xilinx, Inc. All Rights Reserved.
# ==============================================================
/nfs/tools/xilinx/Vivado/2019.1/bin/vivado -notrace -mode batch -source ipi_example.tcl -tclargs xcvu9p-flgb2104-1-e ../xilinx_com_hls_fir_1_0.zip
|
/**
*
* RecordAttribute
*
*/
import React from 'react';
import PropTypes from 'prop-types';
import { Trash } from 'react-bootstrap-icons';
import { FormattedMessage } from 'react-intl';
import messages from './recordAttribute.messages';
import styles from './recordAttribute.styles.scss';
function RecordAttribute({
keyText,
value,
handleValueChange,
handleRemoveAttRecord,
}) {
const handleChange = e => {
handleValueChange(parseInt(e.currentTarget.value, 10));
};
return (
<div className={`row ${styles.attibuteItem}`}>
<div className={`col-sm-5 ${styles.recordKeyText}`}>
<FormattedMessage {...messages[keyText]} />
</div>
<div className="col-sm-5">
<input
data-testid="record-value"
name="record-value"
className="form-control"
onChange={handleChange}
value={value}
type="number"
/>
</div>
<div className="col-sm-2">
<button
data-testid="remove-btn"
className="btn btn-light"
type="button"
onClick={e => {
e.stopPropagation();
e.preventDefault();
handleRemoveAttRecord();
}}
>
<Trash size={24} />
</button>
</div>
</div>
);
}
RecordAttribute.propTypes = {
keyText: PropTypes.string.isRequired,
value: PropTypes.number.isRequired,
handleValueChange: PropTypes.func.isRequired,
handleRemoveAttRecord: PropTypes.func.isRequired,
};
export default RecordAttribute;
|
#!/bin/bash
set -e
trap "exit" INT
echo "
*****************************************************
* KUBERNETES INSTALLATION *
* *
*****************************************************
"
if [[ -z "$1" ]]
then
echo "Please enter k8s master ip address after the command"
exit
fi
if [[ -z "$2" ]]
then
echo "Please enter token after the command"
exit
fi
if [[ -z "$3" ]]
then
echo "Please enter ca certificate hash after the command"
exit
fi
sudo ./docker_install.sh && sudo ./kubeadm_install.sh
sudo kubeadm join $1 --token $2 --discovery-token-ca-cert-hash $3
echo "Installation complete."
|
#!/bin/bash
# Git add remote to jboss-integration the other repositories
initializeWorkingDirAndScriptDir() {
# Set working directory and remove all symbolic links
workingDir=`pwd -P`
# Go the script directory
cd `dirname $0`
# If the file itself is a symbolic link (ignoring parent directory links), then follow that link recursively
# Note that scriptDir=`pwd -P` does not do that and cannot cope with a link directly to the file
scriptFileBasename=`basename $0`
while [ -L "$scriptFileBasename" ] ; do
scriptFileBasename=`readlink $scriptFileBasename` # Follow the link
cd `dirname $scriptFileBasename`
scriptFileBasename=`basename $scriptFileBasename`
done
# Set script directory and remove other symbolic links (parent directory links)
scriptDir=`pwd -P`
}
initializeWorkingDirAndScriptDir
droolsjbpmOrganizationDir="$scriptDir/../.."
startDateTime=`date +%s`
cd "$droolsjbpmOrganizationDir"
for repository in `cat "${scriptDir}/repository-list.txt"` ; do
echo
if [ ! -d "$droolsjbpmOrganizationDir/$repository" ]; then
echo "==============================================================================="
echo "Missing Repository: $repository. SKIPPING!"
echo "==============================================================================="
else
echo "==============================================================================="
echo "Repository: $repository"
echo "==============================================================================="
cd $repository
git remote add gerrit ssh://jb-ip-tooling-jenkins@code.engineering.redhat.com/kiegroup/$repository
returnCode=$?
cd ..
if [ $returnCode != 0 ] ; then
exit $returnCode
fi
fi
done
endDateTime=`date +%s`
spentSeconds=`expr $endDateTime - $startDateTime`
echo
echo "Total time: ${spentSeconds}s"
|
#!/bin/bash -eu
while [ -n "${1+x}" ]
do
rsync -avhRL --timeout=60 --remove-source-files --no-perms --omit-dir-times --stats \
--log-file=/tmp/archive-rsync-cmd.log --ignore-missing-args \
--files-from="$2" "$1" "$RSYNC_USER@$RSYNC_SERVER:$RSYNC_PATH" &> /tmp/rsynclog || [[ "$?" = "24" ]]
shift 2
done
|
<gh_stars>1-10
package com.katus;
import com.katus.common.io.FsManipulator;
import com.katus.common.io.FsManipulatorFactory;
import com.katus.common.io.LineIterator;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* @author <NAME>
* @version 1.0, 2021-11-04
*/
public class PrepareTraining {
public static void main(String[] args) throws IOException {
FsManipulator fsManipulator = FsManipulatorFactory.create();
LineIterator lineIterator = fsManipulator.getLineIterator("F:\\data\\gis\\traffic\\tables\\train\\all.csv");
int count = 0;
List<String> list = new ArrayList<>();
while (lineIterator.hasNext()) {
String line = lineIterator.next();
if ((count++) % 3 == 0) {
list.add(line);
}
}
fsManipulator.writeTextToFile("F:\\data\\gis\\traffic\\tables\\train\\train.csv", list);
}
}
|
<reponame>dj-1087/MJU_Club_HomePage
import React from 'react';
import {
Switch,
Route,
useRouteMatch
} from "react-router-dom";
import SEO from '../../components/SEO';
import Header from "../../partials/header/Header";
import PeopleManagement from "../../container/ClubManagement/PeopleManagement.js";
import Footer from '../../container/Footer/Footer';
import ScrollToTop from '../../components/ScrollToTop.jsx';
import AddPeoplePage from './AddPeople';
import DeletePeoplePage from './DeletePeople';
const PeopleManagementPage = () => {
let { path } = useRouteMatch();
return (
<>
<Switch>
<Route exact path={path}>
<React.Fragment>
<SEO title="Exomac || Management" />
<Header />
<PeopleManagement/>
<Footer />
<ScrollToTop />
</React.Fragment>
</Route>
<Route path={`${path}/add`}>
<AddPeoplePage/>
</Route>
<Route path={`${path}/delete`}>
<DeletePeoplePage/>
</Route>
</Switch>
</>
)
}
export default PeopleManagementPage;
|
cd ../src
cd vmcontroller.common
echo vmcontroller.common: `python setup.py --version`
cd ..
cd vmcontroller.host
echo vmcontroller.host: `python setup.py --version`
cd ..
cd vmcontroller.guest
echo vmcontroller.vm: `python setup.py --version`
cd ..
|
<gh_stars>1-10
/*
*
* * This file is part of moneydrops, licensed under the MIT License.
* *
* * Copyright (c) crysis992 <<EMAIL>>
* * Copyright (c) contributors
* *
* * Permission is hereby granted, free of charge, to any person obtaining a copy
* * of this software and associated documentation files (the "Software"), to deal
* * in the Software without restriction, including without limitation the rights
* * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* * copies of the Software, and to permit persons to whom the Software is
* * furnished to do so, subject to the following conditions:
* *
* * The above copyright notice and this permission notice shall be included in all
* * copies or substantial portions of the Software.
* *
* * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* * SOFTWARE.
*
*/
package net.crytec.pickmoney.listener;
import net.crytec.libs.commons.utils.UtilMath;
import net.crytec.pickmoney.ConfigOptions;
import net.crytec.pickmoney.api.DropManager;
import net.crytec.pickmoney.api.EntityDropData;
import net.crytec.pickmoney.events.EntityDropMoneyEvent;
import net.milkbowl.vault.economy.Economy;
import org.bukkit.Bukkit;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.EntityDeathEvent;
import org.bukkit.event.entity.PlayerDeathEvent;
public class EntityDeathListener implements Listener {
private final DropManager manager;
private final Economy eco;
public EntityDeathListener(final DropManager manager, final Economy economy) {
this.manager = manager;
this.eco = economy;
}
@EventHandler
public void onEntityKill(final EntityDeathEvent event) {
if (event.getEntity() instanceof Player) {
return;
}
if (!manager.isWorldEnabled(event.getEntity().getWorld())) {
return;
}
if (!manager.dropData.containsKey(event.getEntity().getType())) {
return;
}
final EntityDropData data = manager.dropData.get(event.getEntity().getType());
final double chance = manager.getRandom(2);
if (chance > data.getChance()) {
return;
}
final double money = manager.getDropAmount(data.getRange());
if (money < ConfigOptions.MINIMUM_TO_DROP.asDouble()) {
return;
}
if (ConfigOptions.REQUIRE_PLAYERKILL.asBoolean() && event.getEntity().getKiller() == null) {
return;
}
final EntityDropMoneyEvent toCall = new EntityDropMoneyEvent(event.getEntity(), money);
Bukkit.getPluginManager().callEvent(toCall);
if (toCall.isCancelled()) {
return;
}
manager.dropNaturallyAtLocation(event.getEntity().getLocation(), money);
}
@EventHandler
public void onPlayerDeath(final PlayerDeathEvent event) {
if (!manager.dropData.containsKey(EntityType.PLAYER)) {
return;
}
if (event.getEntity().hasPermission("pickupmoney.bypass")) {
return;
}
final EntityDropData data = manager.dropData.get(EntityType.PLAYER);
final double chance = manager.getRandom(2);
if (chance > data.getChance()) {
return;
}
final double percentage = manager.getDropAmount(data.getRange());
final double balance = eco.getBalance(event.getEntity());
if (balance <= 0) {
return;
}
double amount = (balance / 100) * percentage;
if (amount < ConfigOptions.MINIMUM_TO_DROP.asDouble()) {
return;
}
if (amount > ConfigOptions.PLAYER_DROP_HARDCAP.asDouble()) {
amount = ConfigOptions.PLAYER_DROP_HARDCAP.asDouble();
}
final EntityDropMoneyEvent toCall = new EntityDropMoneyEvent(event.getEntity(), amount);
Bukkit.getPluginManager().callEvent(toCall);
if (toCall.isCancelled()) {
return;
}
if (this.eco.withdrawPlayer(event.getEntity(), amount).transactionSuccess()) {
manager.dropNaturallyAtLocation(event.getEntity().getLocation(), amount);
event.getEntity().sendMessage(ConfigOptions.LOST_MONEY.asString(true).replace("%value%", String.valueOf(UtilMath.unsafeRound(amount, 2))));
}
}
}
|
/**
* @fileoverview Check whether the given variable is a HTML tag or not.
*
*/
'use strict';
/**
* Check whether the given variable is a HTML tag or not.
* If the given variables is a HTML tag, return true.
* @param {*} html - Target for checking
* @returns {boolean} Is HTML tag?
* @memberof module:type
*/
function isHTMLTag(html) {
if (typeof HTMLElement === 'object') {
return (html && (html instanceof HTMLElement));
}
return !!(html && html.nodeType && html.nodeType === 1);
}
module.exports = isHTMLTag;
|
package com.ufrn.embarcados.reaqua.model;
import lombok.Data;
import javax.persistence.*;
import java.util.List;
@Data
@Entity
public class Tower {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column(nullable = false, unique = true)
private String name;
@OneToMany
@JoinColumn(name="water_tank_id", referencedColumnName = "id")
private List<WaterTank> waterTanks;
}
|
<reponame>parksandwildlife/ibms
from datetime import datetime
from ibms.models import GLPivDownload
def get_download_period():
"""Return the 'newest' download_period date value for all the GLPivDownload objects.
"""
if not GLPivDownload.objects.exists():
return datetime.today()
elif not GLPivDownload.objects.filter(download_period__isnull=False).exists():
return datetime.today()
return GLPivDownload.objects.order_by('-download_period').first().download_period
|
#!/usr/bin/env bash
testdir=$(readlink -f $(dirname $0))
rootdir=$(readlink -f $testdir/../../..)
source $rootdir/test/common/autotest_common.sh
source $rootdir/test/nvmf/common.sh
rpc_py="$rootdir/scripts/rpc.py"
function jcount()
{
local filter=$1
jq "$filter" | wc -l
}
function jsum()
{
local filter=$1
jq "$filter" | awk '{s+=$1}END{print s}'
}
timing_enter rpc
nvmftestinit
nvmfappstart "-m 0xF"
stats=$($rpc_py nvmf_get_stats)
# Expect 4 poll groups (from CPU mask) and no transports yet
[ "4" -eq $(jcount .poll_groups[].name <<< "$stats") ]
[ "null" == $(jq .poll_groups[0].transports[0] <<< "$stats") ]
$rpc_py nvmf_create_transport $NVMF_TRANSPORT_OPTS -u 8192
stats=$($rpc_py nvmf_get_stats)
# Expect no QPs
[ "0" -eq $(jsum .poll_groups[].admin_qpairs <<< "$stats") ]
[ "0" -eq $(jsum .poll_groups[].io_qpairs <<< "$stats") ]
# Transport statistics is currently implemented for RDMA only
if [ 'rdma' == $TEST_TRANSPORT ]; then
# Expect RDMA transport and some devices
[ "1" -eq $(jcount .poll_groups[0].transports[].trtype <<< "$stats") ]
transport_type=$(jq -r .poll_groups[0].transports[0].trtype <<< "$stats")
[ "${transport_type,,}" == "${TEST_TRANSPORT,,}" ]
[ "0" -lt $(jcount .poll_groups[0].transports[0].devices[].name <<< "$stats") ]
fi
# set times for subsystem construct/delete
if [ $RUN_NIGHTLY -eq 1 ]; then
times=50
else
times=3
fi
MALLOC_BDEV_SIZE=64
MALLOC_BLOCK_SIZE=512
$rpc_py bdev_malloc_create $MALLOC_BDEV_SIZE $MALLOC_BLOCK_SIZE -b Malloc1
# Disallow host NQN and make sure connect fails
$rpc_py nvmf_subsystem_create nqn.2016-06.io.spdk:cnode1 -a -s SPDK00000000000001
$rpc_py nvmf_subsystem_add_ns nqn.2016-06.io.spdk:cnode1 Malloc1
$rpc_py nvmf_subsystem_allow_any_host -d nqn.2016-06.io.spdk:cnode1
$rpc_py nvmf_subsystem_add_listener nqn.2016-06.io.spdk:cnode1 -t $TEST_TRANSPORT -a $NVMF_FIRST_TARGET_IP -s $NVMF_PORT
# This connect should fail - the host NQN is not allowed
! nvme connect -t $TEST_TRANSPORT -n nqn.2016-06.io.spdk:cnode1 -q nqn.2016-06.io.spdk:host1 -a "$NVMF_FIRST_TARGET_IP" -s "$NVMF_PORT"
# Add the host NQN and verify that the connect succeeds
$rpc_py nvmf_subsystem_add_host nqn.2016-06.io.spdk:cnode1 nqn.2016-06.io.spdk:host1
nvme connect -t $TEST_TRANSPORT -n nqn.2016-06.io.spdk:cnode1 -q nqn.2016-06.io.spdk:host1 -a "$NVMF_FIRST_TARGET_IP" -s "$NVMF_PORT"
waitforblk "nvme0n1"
nvme disconnect -n nqn.2016-06.io.spdk:cnode1
# Remove the host and verify that the connect fails
$rpc_py nvmf_subsystem_remove_host nqn.2016-06.io.spdk:cnode1 nqn.2016-06.io.spdk:host1
! nvme connect -t $TEST_TRANSPORT -n nqn.2016-06.io.spdk:cnode1 -q nqn.2016-06.io.spdk:host1 -a "$NVMF_FIRST_TARGET_IP" -s "$NVMF_PORT"
# Allow any host and verify that the connect succeeds
$rpc_py nvmf_subsystem_allow_any_host -e nqn.2016-06.io.spdk:cnode1
nvme connect -t $TEST_TRANSPORT -n nqn.2016-06.io.spdk:cnode1 -q nqn.2016-06.io.spdk:host1 -a "$NVMF_FIRST_TARGET_IP" -s "$NVMF_PORT"
waitforblk "nvme0n1"
nvme disconnect -n nqn.2016-06.io.spdk:cnode1
$rpc_py delete_nvmf_subsystem nqn.2016-06.io.spdk:cnode1
# do frequent add delete of namespaces with different nsid.
for i in $(seq 1 $times)
do
$rpc_py nvmf_subsystem_create nqn.2016-06.io.spdk:cnode1 -s SPDK00000000000001
$rpc_py nvmf_subsystem_add_listener nqn.2016-06.io.spdk:cnode1 -t $TEST_TRANSPORT -a $NVMF_FIRST_TARGET_IP -s $NVMF_PORT
$rpc_py nvmf_subsystem_add_ns nqn.2016-06.io.spdk:cnode1 Malloc1 -n 5
$rpc_py nvmf_subsystem_allow_any_host nqn.2016-06.io.spdk:cnode1
nvme connect -t $TEST_TRANSPORT -n nqn.2016-06.io.spdk:cnode1 -a "$NVMF_FIRST_TARGET_IP" -s "$NVMF_PORT"
waitforblk "nvme0n1"
nvme disconnect -n nqn.2016-06.io.spdk:cnode1
$rpc_py nvmf_subsystem_remove_ns nqn.2016-06.io.spdk:cnode1 5
$rpc_py delete_nvmf_subsystem nqn.2016-06.io.spdk:cnode1
done
# do frequent add delete.
for i in $(seq 1 $times)
do
$rpc_py nvmf_subsystem_create nqn.2016-06.io.spdk:cnode1 -s SPDK00000000000001
$rpc_py nvmf_subsystem_add_listener nqn.2016-06.io.spdk:cnode1 -t $TEST_TRANSPORT -a $NVMF_FIRST_TARGET_IP -s $NVMF_PORT
$rpc_py nvmf_subsystem_add_ns nqn.2016-06.io.spdk:cnode1 Malloc1
$rpc_py nvmf_subsystem_allow_any_host nqn.2016-06.io.spdk:cnode1
$rpc_py nvmf_subsystem_remove_ns nqn.2016-06.io.spdk:cnode1 1
$rpc_py delete_nvmf_subsystem nqn.2016-06.io.spdk:cnode1
done
stats=$($rpc_py nvmf_get_stats)
# Expect some admin and IO qpairs
[ "0" -lt $(jsum .poll_groups[].admin_qpairs <<< "$stats") ]
[ "0" -lt $(jsum .poll_groups[].io_qpairs <<< "$stats") ]
# Transport statistics is currently implemented for RDMA only
if [ 'rdma' == $TEST_TRANSPORT ]; then
# Expect non-zero completions and request latencies accumulated
[ "0" -lt $(jsum .poll_groups[].transports[].devices[].completions <<< "$stats") ]
[ "0" -lt $(jsum .poll_groups[].transports[].devices[].request_latency <<< "$stats") ]
fi
trap - SIGINT SIGTERM EXIT
nvmftestfini
timing_exit rpc
|
/*
Copyright 2020 The pdf Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package pdfcpu
import (
"strings"
"github.com/pkg/errors"
)
var (
errNoBookmarks = errors.New("pdfcpu: no bookmarks available")
errCorruptedDests = errors.New("pdfcpu: corrupted named destination")
)
// Bookmark represents an outline item at some level including page span info.
type Bookmark struct {
Title string
PageFrom int
PageThru int // >= pageFrom and reaches until before pageFrom of the next bookmark.
}
func (ctx *Context) dereferenceDestinationArray(key string) (Array, error) {
o, ok := ctx.Names["Dests"].Value(key)
if !ok {
return nil, errCorruptedDests
}
return ctx.DereferenceArray(o)
}
func (ctx *Context) positionToOutlineTreeLevel1() (Dict, *IndirectRef, error) {
// Load Dests nametree.
if err := ctx.LocateNameTree("Dests", false); err != nil {
return nil, nil, err
}
ir, err := ctx.Outlines()
if err != nil {
return nil, nil, err
}
if ir == nil {
return nil, nil, errNoBookmarks
}
d, err := ctx.DereferenceDict(*ir)
if err != nil {
return nil, nil, err
}
if d == nil {
return nil, nil, errNoBookmarks
}
first := d.IndirectRefEntry("First")
last := d.IndirectRefEntry("Last")
// We consider Bookmarks at level 1 or 2 only.
for *first == *last {
if d, err = ctx.DereferenceDict(*first); err != nil {
return nil, nil, err
}
first = d.IndirectRefEntry("First")
last = d.IndirectRefEntry("Last")
}
return d, first, nil
}
// BookmarksForOutlineLevel1 returns bookmarks incliuding page span info.
func (ctx *Context) BookmarksForOutlineLevel1() ([]Bookmark, error) {
d, first, err := ctx.positionToOutlineTreeLevel1()
if err != nil {
return nil, err
}
bms := []Bookmark{}
// Process outline items.
for ir := first; ir != nil; ir = d.IndirectRefEntry("Next") {
if d, err = ctx.DereferenceDict(*ir); err != nil {
return nil, err
}
s, _ := Text(d["Title"])
var sb strings.Builder
for i := 0; i < len(s); i++ {
b := s[i]
if b >= 32 {
if b == 32 {
b = '_'
}
sb.WriteByte(b)
}
}
title := sb.String()
dest, found := d["Dest"]
if !found {
return nil, errNoBookmarks
}
var ir IndirectRef
dest, _ = ctx.Dereference(dest)
switch dest := dest.(type) {
case Name:
arr, err := ctx.dereferenceDestinationArray(dest.Value())
if err != nil {
return nil, err
}
ir = arr[0].(IndirectRef)
case StringLiteral:
arr, err := ctx.dereferenceDestinationArray(dest.Value())
if err != nil {
return nil, err
}
ir = arr[0].(IndirectRef)
case HexLiteral:
arr, err := ctx.dereferenceDestinationArray(dest.Value())
if err != nil {
return nil, err
}
ir = arr[0].(IndirectRef)
case Array:
ir = dest[0].(IndirectRef)
}
pageFrom, err := ctx.PageNumber(ir.ObjectNumber.Value())
if err != nil {
return nil, err
}
if len(bms) > 0 {
if pageFrom > bms[len(bms)-1].PageFrom {
bms[len(bms)-1].PageThru = pageFrom - 1
} else {
bms[len(bms)-1].PageThru = bms[len(bms)-1].PageFrom
}
}
bms = append(bms, Bookmark{Title: title, PageFrom: pageFrom})
}
return bms, nil
}
|
#include <stdio.h>
int main()
{
int a = 0;
int b = 1;
int n, c;
printf("Enter a number:");
scanf("%d",&n);
for(int i=0;i<n;i++)
{
c = a + b;
a = b;
b = c;
}
printf("The next two Fibonacci numbers are %d %d \n",b,c);
return 0;
} |
# Migrate analytics UUID to its new home in Homebrew repo's git config and
# remove the legacy UUID file if detected.
# HOMEBREW_LINUX, HOMEBREW_REPOSITORY is set by bin/brew
# HOMEBREW_NO_ANALYTICS is from the user environment.
# shellcheck disable=SC2154
migrate-legacy-uuid-file() {
local legacy_uuid_file analytics_uuid
legacy_uuid_file="${HOME}/.homebrew_analytics_user_uuid"
if [[ -f "${legacy_uuid_file}" ]]
then
analytics_uuid="$(cat "${legacy_uuid_file}")"
if [[ -n "${analytics_uuid}" ]]
then
git config --file="${HOMEBREW_REPOSITORY}/.git/config" --replace-all homebrew.analyticsuuid "${analytics_uuid}" 2>/dev/null
fi
rm -f "${legacy_uuid_file}"
fi
}
setup-analytics() {
local git_config_file="${HOMEBREW_REPOSITORY}/.git/config"
migrate-legacy-uuid-file
if [[ -n "${HOMEBREW_NO_ANALYTICS}" ]]
then
return
fi
local message_seen analytics_disabled
message_seen="$(git config --file="${git_config_file}" --get homebrew.analyticsmessage 2>/dev/null)"
analytics_disabled="$(git config --file="${git_config_file}" --get homebrew.analyticsdisabled 2>/dev/null)"
if [[ "${message_seen}" != "true" || "${analytics_disabled}" == "true" ]]
then
# Internal variable for brew's use, to differentiate from user-supplied setting
export HOMEBREW_NO_ANALYTICS_THIS_RUN="1"
return
fi
HOMEBREW_ANALYTICS_USER_UUID="$(git config --file="${git_config_file}" --get homebrew.analyticsuuid 2>/dev/null)"
if [[ -z "${HOMEBREW_ANALYTICS_USER_UUID}" ]]
then
if [[ -x /usr/bin/uuidgen ]]
then
HOMEBREW_ANALYTICS_USER_UUID="$(/usr/bin/uuidgen)"
elif [[ -r /proc/sys/kernel/random/uuid ]]
then
HOMEBREW_ANALYTICS_USER_UUID="$(tr a-f A-F </proc/sys/kernel/random/uuid)"
else
HOMEBREW_ANALYTICS_USER_UUID="$(uuidgen)"
fi
if [[ -z "${HOMEBREW_ANALYTICS_USER_UUID}" ]]
then
# Avoid sending bogus analytics if no UUID could be generated.
export HOMEBREW_NO_ANALYTICS_THIS_RUN="1"
return
fi
git config --file="${git_config_file}" --replace-all homebrew.analyticsuuid "${HOMEBREW_ANALYTICS_USER_UUID}" 2>/dev/null
fi
if [[ -n "${HOMEBREW_LINUX}" ]]
then
# For Homebrew on Linux's analytics.
HOMEBREW_ANALYTICS_ID="UA-76492262-1"
else
# Otherwise, fall back to Homebrew's analytics.
HOMEBREW_ANALYTICS_ID="UA-76679469-1"
fi
export HOMEBREW_ANALYTICS_ID
export HOMEBREW_ANALYTICS_USER_UUID
}
|
<gh_stars>10-100
const fs = require('fs');
const glob = require('glob');
const { exec } = require('child_process');
/* eslint-disable no-console */
const args = process.argv.slice(2);
const path = args[0] || './packages/hooks/packages/*/*.js';
glob(path, (err, files) => {
files.forEach(file => {
const fileParentDir = file.substr(0, file.lastIndexOf('/'));
const fileName = file.substr(file.lastIndexOf('/')+1);
const mdxFileName = fileName.replace(/\.(.*)/,'.mdx');
const markdownFile = `${fileParentDir}/index.mdx`;
try {
let markdownContent = (fs.existsSync(markdownFile) ? fs.readFileSync(markdownFile, 'utf-8') : '').split('<!-- doc -->')[0];
exec(`jsdoc2md ${file}`, (_, stdout)=>{
let output = stdout.replace(/<(|\/)code>/g, '');
output = output.replace(/##+(.*)/g, value => value.split('(')[0]);
markdownContent += `<!-- doc -->\n${output}`;
console.log(`writing docz for ${file}`);
fs.writeFile(`${fileParentDir}/${mdxFileName}`, markdownContent, (error) => {
if(error){
throw error;
}
console.log(`successfully created docs for ${file}`)
});
})
} catch (e) {
console.log('an error occurred ==>', e)
}
});
});
|
#!/usr/bin/env bash
# http://redsymbol.net/articles/unofficial-bash-strict-mode
set -euo pipefail
usage()
{
cat <<END
deploy.sh: deploys the $app_name application to a Kubernetes cluster using Helm.
Parameters:
--aks-name <AKS cluster name>
The name of the AKS cluster. Required when the registry (using the -r parameter) is set to "aks".
--aks-rg <AKS resource group>
The resource group for the AKS cluster. Required when the registry (using the -r parameter) is set to "aks".
-b | --build-solution
Force a solution build before deployment (default: false).
-d | --dns <dns or ip address> | --dns aks
Specifies the external DNS/ IP address of the Kubernetes cluster.
If 'aks' is set as value, the DNS value is retrieved from the AKS. --aks-name and --aks-rg are needed.
When --use-local-k8s is specified the external DNS is automatically set to localhost.
-h | --help
Displays this help text and exits the script.
--image-build
Build images (default is to not build all images).
--image-push
Upload images to the container registry (default is not pushing to the custom registry)
-n | --app-name <the name of the app>
Specifies the name of the application (default: eshop).
--namespace <namespace name>
Specifies the namespace name to deploy the app. If it doesn't exists it will be created (default: eshop).
-p | --docker-password <docker password>
The Docker password used to logon to the custom registry, supplied using the -r parameter.
-r | --registry <container registry>
Specifies the container registry to use (required), e.g. myregistry.azurecr.io.
--skip-clean
Do not clean the Kubernetes cluster (default is to clean the cluster).
--skip-infrastructure
Do not deploy infrastructure resources (like sql-data, no-sql or redis).
This is useful for production environments where infrastructure is hosted outside the Kubernetes cluster.
-t | --tag <docker image tag>
The tag used for the newly created docker images. Default: latest.
-u | --docker-username <docker username>
The Docker username used to logon to the custom registry, supplied using the -r parameter.
--use-local-k8s
Deploy to a locally installed Kubernetes (default: false).
It is assumed that the Kubernetes cluster has been granted access to the container registry.
If using AKS and ACR see link for more info:
https://docs.microsoft.com/en-us/azure/container-registry/container-registry-auth-aks
WARNING! THE SCRIPT WILL COMPLETELY DESTROY ALL DEPLOYMENTS AND SERVICES VISIBLE
FROM THE CURRENT CONFIGURATION CONTEXT AND NAMESPACE.
It is recommended that you check your selected namespace, 'eshop' by default, is already in use.
Every deployment and service done in the namespace will be deleted.
For more information see https://kubernetes.io/docs/tasks/administer-cluster/namespaces/
END
}
app_name='eshop'
aks_name='eshop-aks'
aks_rg='DC-to-K8s'
build_images=''
clean='yes'
build_solution=''
container_registry='testacrdc.azurecr.io'
docker_password='8N2XWx+YO75nl7yBV+lwPT7jngmBbgtA'
docker_username='testacrdc'
dns='aks'
image_tag='linux-latest'
push_images=''
skip_infrastructure=''
use_local_k8s=''
namespace='eshop'
#app_name='eshop'
#aks_name=''
#aks_rg=''
#build_images=''
#clean='yes'
#build_solution=''
#container_registry=''
#docker_password=''
#docker_username=''
#dns=''
#image_tag='latest'
#push_images=''
#skip_infrastructure=''
#use_local_k8s=''
#namespace='eshop'
while [[ $# -gt 0 ]]; do
case "$1" in
--aks-name )
aks_name="$2"; shift 2;;
--aks-rg )
aks_rg="$2"; shift 2;;
-b | --build-solution )
build_solution='yes'; shift ;;
-d | --dns )
dns="$2"; shift 2;;
-h | --help )
usage; exit 1 ;;
-n | --app-name )
app_name="$2"; shift 2;;
-p | --docker-password )
docker_password="$2"; shift 2;;
-r | --registry )
container_registry="$2"; shift 2;;
--skip-clean )
clean=''; shift ;;
--image-build )
build_images='yes'; shift ;;
--image-push )
push_images='yes'; shift ;;
--skip-infrastructure )
skip_infrastructure='yes'; shift ;;
-t | --tag )
image_tag="$2"; shift 2;;
-u | --docker-username )
docker_username="$2"; shift 2;;
--use-local-k8s )
use_local_k8s='yes'; shift ;;
--namespace )
namespace="$2"; shift 2;;
*)
echo "Unknown option $1"
usage; exit 2 ;;
esac
done
if [[ $build_solution ]]; then
echo "#################### Building $app_name solution ####################"
dotnet publish -o obj/Docker/publish ../../eShopOnContainers-ServicesAndWebApps.sln
fi
export TAG=$image_tag
if [[ $build_images ]]; then
echo "#################### Building the $app_name Docker images ####################"
docker-compose -p ../.. -f ../../docker-compose.yml build
# Remove temporary images
docker rmi $(docker images -qf "dangling=true")
fi
use_custom_registry=''
if [[ -n $container_registry ]]; then
echo "################ Log into custom registry $container_registry ##################"
use_custom_registry='yes'
if [[ -z $docker_username ]] || [[ -z $docker_password ]]; then
echo "Error: Must use -u (--docker-username) AND -p (--docker-password) if specifying custom registry"
exit 1
fi
docker login -u $docker_username -p $docker_password $container_registry
fi
if [[ $push_images ]]; then
echo "#################### Pushing images to the container registry ####################"
services=(basket.api catalog.api identity.api ordering.api payment.api webmvc webspa webstatus)
if [[ -z "$(docker image ls -q --filter=reference=eshop/$service:$image_tag)" ]]; then
image_tag=linux-$image_tag
fi
for service in "${services[@]}"
do
echo "Pushing image for service $service..."
docker tag "eshop/$service:$image_tag" "$container_registry/$service:$image_tag"
docker push "$container_registry/$service:$image_tag"
done
fi
ingress_values_file="ingress_values.yaml"
if [[ $use_local_k8s ]]; then
ingress_values_file="ingress_values_dockerk8s.yaml"
dns="localhost"
fi
if [[ $dns == "aks" ]]; then
echo "#################### Begin AKS discovery based on the --dns aks setting. ####################"
if [[ -z $aks_name ]] || [[ -z $aks_rg ]]; then
echo "Error: When using -dns aks, MUST set -aksName and -aksRg too."
echo ''
usage
exit 1
fi
echo "Getting AKS cluster $aks_name AKS (in resource group $aks_rg)"
# JMESPath queries are case sensitive and httpapplicationrouting can be lowercase sometimes
jmespath_dnsqueries=(\
addonProfiles.httpApplicationRouting.config.HTTPApplicationRoutingZoneName \
addonProfiles.httpapplicationrouting.config.HTTPApplicationRoutingZoneName \
)
for q in "${jmespath_dnsqueries[@]}"
do
dns="$(az aks show -n $aks_name -g $aks_rg --query $q -o tsv)"
if [[ -n $dns ]]; then break; fi
done
if [[ -z $dns ]]; then
echo "Error: when getting DNS of AKS $aks_name (in resource group $aks_rg). Please ensure AKS has httpRouting enabled AND Azure CLI is logged in and is of version 2.0.37 or higher."
exit 1
fi
echo "DNS base found is $dns. Will use $aks_name.$dns for the app!"
dns="$aks_name.$dns"
fi
# Initialization & check commands
if [[ -z $dns ]]; then
echo "No DNS specified. Ingress resources will be bound to public IP."
fi
if [[ $clean ]]; then
echo "Cleaning previous helm releases..."
if [[ -z $(helm ls -q --namespace $namespace) ]]; then
echo "No previous releases found"
else
helm uninstall $(helm ls -q --namespace $namespace)
echo "Previous releases deleted"
waitsecs=10; while [ $waitsecs -gt 0 ]; do echo -ne "$waitsecs\033[0K\r"; sleep 1; : $((waitsecs--)); done
fi
fi
echo "#################### Begin $app_name installation using Helm ####################"
infras=(sql-data nosql-data rabbitmq keystore-data basket-data)
charts=(eshop-common apigwms apigwws basket-api catalog-api identity-api mobileshoppingagg ordering-api ordering-backgroundtasks ordering-signalrhub payment-api webmvc webshoppingagg webspa webstatus webhooks-api webhooks-web)
if [[ !$skip_infrastructure ]]; then
for infra in "${infras[@]}"
do
echo "Installing infrastructure: $infra"
helm install "$app_name-$infra" --namespace $namespace --set "ingress.hosts={$dns}" --values app.yaml --values inf.yaml --values $ingress_values_file --set app.name=$app_name --set inf.k8s.dns=$dns $infra
done
fi
for chart in "${charts[@]}"
do
echo "Installing: $chart"
if [[ $use_custom_registry ]]; then
helm install "$app_name-$chart" --namespace $namespace --set "ingress.hosts={$dns}" --set inf.registry.server=$container_registry --set inf.registry.login=$docker_username --set inf.registry.pwd=$docker_password --set inf.registry.secretName=eshop-docker-scret --values app.yaml --values inf.yaml --values $ingress_values_file --set app.name=$app_name --set inf.k8s.dns=$dns --set image.tag=$image_tag --set image.pullPolicy=Always $chart
elif [[ $chart != "eshop-common" ]]; then # eshop-common is ignored when no secret must be deployed
helm install "$app_name-$chart" --namespace $namespace --set "ingress.hosts={$dns}" --values app.yaml --values inf.yaml --values $ingress_values_file --set app.name=$app_name --set inf.k8s.dns=$dns --set image.tag=$image_tag --set image.pullPolicy=Always $chart
fi
done
echo "FINISHED: Helm charts installed."
|
<reponame>winks/adventofcode
package org.f5n.aoc2020.utils;
import org.f5n.aoc2020.utils.Result;
public class StringResult extends Result {
public String value;
public StringResult(String value, long start) {
this.value = value;
this.runtime = (System.currentTimeMillis() - start);
}
public String getValue() {
return "" + value;
}
public long getIntValue() {
return 0;
}
public String toString() {
return "Int"+super.toString();
}
}
|
#!/bin/bash -ex
if [[ $EUID -ne 0 ]]; then
echo "This script must be run by root. Please use sudo"
exit 1
fi
my_file="$(readlink -e "$0")"
my_dir="$(dirname $my_file)"
#BASE_IMAGE="/home/ubuntu/rhel-7.7.qcow2"
source "/home/$SUDO_USER/rhosp-environment.sh"
cd $my_dir
# collect MAC addresses of overcloud machines
function get_macs() {
local type=$1
truncate -s 0 /tmp/nodes-$type.txt
virsh domiflist rhosp13-overcloud-$type | awk '$3 ~ "prov" {print $5};'
}
function get_vbmc_ip() {
local type=$1
vbmc list | grep rhosp13-overcloud-$type | awk -F\| '{print $4}'
}
function get_vbmc_port() {
local type=$1
vbmc list | grep rhosp13-overcloud-$type | awk -F\| '{print $5}'
}
function define_machine() {
local caps=$1
local mac=$2
local pm_ip=$3
local pm_port=$4
cat << EOF >> instackenv.json
{
"pm_type": "pxe_ipmitool",
"pm_addr": "$pm_ip",
"pm_port": "$pm_port",
"pm_user": "$IPMI_USER",
"pm_password": "$IPMI_PASSWORD",
"mac": [
"$mac"
],
"cpu": "2",
"memory": "1000",
"disk": "29",
"arch": "x86_64",
"capabilities": "$caps"
},
EOF
}
# create overcloud machines definition
cat << EOF > instackenv.json
{
"power_manager": "nova.virt.baremetal.virtual_power_driver.VirtualPowerManager",
"arch": "x86_64",
"nodes": [
EOF
declare -A longname=( ["cont"]="controller" ["compute"]="compute" ["ctrlcont"]="contrail-controller" )
unset vbmc_ip
unset vbmc_port
unset mac
for node in 'cont' 'compute' 'ctrlcont'; do
vbmc_ip=$(get_vbmc_ip $node)
vbmc_port=$(get_vbmc_port $node)
mac=$(get_macs $node)
define_machine "profile:${longname[$node]},boot_option:local" $mac $vbmc_ip $vbmc_port
done
# remove last comma
head -n -1 instackenv.json > instackenv.json.tmp
mv instackenv.json.tmp instackenv.json
cat << EOF >> instackenv.json
}
]
}
EOF
mv instackenv.json /home/$SUDO_USER/
# check this json (it's optional)
#curl --silent -O https://raw.githubusercontent.com/rthallisey/clapper/master/instackenv-validator.py
#python instackenv-validator.py -f ~/instackenv.json
|
// TexTab.hpp
// <NAME>, 20th February 1997
#ifndef _UNDO_HPP_
#define _UNDO_HPP_
#include "Prim.h"
#define MAX_UNDO 100
#define UNDO_APPLY_TEXTURE_PRIM4 1
#define UNDO_APPLY_TEXTURE_PRIM3 2
#define UNDO_APPLY_TEXTURE_CUBE 3
#define UNDO_PLACE_OBJECT 4
#define UNDO_DEL_OBJECT 5
#define UNDO_MOVE_OBJECT 6
#define UNDO_PLACE_CUBE 7
#define UNDO_MOVE_TEXTURE 8
#define UNDO_APPLY_PRIM4 9
#define UNDO_APPLY_PRIM3 10
struct GenericUndo
{
UBYTE Type;
union
{
struct
{
UBYTE DrawFlags;
UBYTE Colour;
UWORD Face;
UWORD Page;
UBYTE U[4];
UBYTE V[4];
}Texture;
struct
{
UWORD Thing;
UWORD Prim;
SLONG X;
SLONG Y;
SLONG Z;
}Object;
struct
{
UWORD PCube;
UWORD CCube;
SLONG X;
SLONG Y;
SLONG Z;
}Cube;
struct
{
UWORD Ele;
UWORD Face;
UWORD Text1;
UWORD Text2;
}Ele;
};
};
class Undo
{
private:
void advance_current_undo(UBYTE undo_mode);
void retreat_current_undo(UBYTE undo_mode);
SWORD index;
SWORD index_undo;
public:
Undo(void);
void ApplyPrim4(UBYTE undo_mode,UWORD face,PrimFace4 *the_prim4);
void ApplyPrim3(UBYTE undo_mode,UWORD face,PrimFace3 *the_prim3);
void ApplyTexturePrim4(UBYTE undo_mode,UWORD page,UWORD face,UBYTE u1,UBYTE v1,UBYTE u2,UBYTE v2,UBYTE u3,UBYTE v3,UBYTE u4,UBYTE v4);
void ApplyTexturePrim3(UBYTE undo_mode,UWORD page,UWORD face,UBYTE u1,UBYTE v1,UBYTE u2,UBYTE v2,UBYTE u3,UBYTE v3);
void ApplyTextureCube(UBYTE undo_mode,UWORD ele,UWORD face,UWORD text1,UWORD text2);
void MoveTexture(UBYTE undo_mode,UWORD page,UWORD face,UBYTE u1,UBYTE v1,UBYTE u2,UBYTE v2,UBYTE u3,UBYTE v3,UBYTE u4,UBYTE v4);
void PlaceObject(UBYTE undo_mode,UWORD prim,UWORD thing,SLONG x,SLONG y,SLONG z);
void MoveObject(UBYTE undo_mode,UWORD thing,SLONG dx,SLONG dy,SLONG dz);
void DelObject(UBYTE undo_mode,UWORD prim,UWORD thing,SLONG x,SLONG y,SLONG z);
void PlaceCube(UBYTE undo_mode,UWORD prev_cube,UWORD cur_cube,SLONG x,SLONG y,SLONG z);
SLONG DoUndo(UBYTE undo_mode);
GenericUndo undo_info[MAX_UNDO];
GenericUndo undo_undo_info[MAX_UNDO];
};
#endif
|
<reponame>nw55/es-logging<gh_stars>0
import { LogMessage, LogWriter } from './common';
import { LogLevel } from './log-level';
export class CombinedLogWriter implements LogWriter {
static addLogWriter(logWriter: LogWriter | null, add: LogWriter) {
if (logWriter === null)
return add;
return new CombinedLogWriter([logWriter, add]);
}
static removeLogWriter(logWriter: LogWriter | null, remove: LogWriter) {
if (logWriter === remove || logWriter === null)
return null;
if (!(logWriter instanceof CombinedLogWriter)) {
if (remove instanceof CombinedLogWriter) {
if (remove.writers.includes(logWriter))
return null;
}
return logWriter;
}
if (remove instanceof CombinedLogWriter) {
const writersToRemove = new Map<LogWriter, number>();
for (const writer of remove.writers)
writersToRemove.set(writer, (writersToRemove.get(writer) ?? 0) + 1);
const writersToRetain = [];
for (const writer of logWriter.writers) {
const removeCount = writersToRemove.get(writer);
if (removeCount !== undefined && removeCount > 0)
writersToRemove.set(writer, removeCount - 1);
else
writersToRetain.push(writer);
}
if (writersToRetain.length === 0)
return null;
return new CombinedLogWriter(writersToRetain);
}
const writersToRetain = [];
let removed = false;
for (const writer of logWriter.writers) {
if (!removed && writer === remove)
removed = true;
else
writersToRetain.push(writer);
}
if (writersToRetain.length === 0)
return null;
return new CombinedLogWriter(writersToRetain);
}
private _writers: LogWriter[];
constructor(writers: readonly LogWriter[]) {
this._writers = writers.flatMap(writer => writer instanceof CombinedLogWriter ? writer._writers : writer);
}
get writers(): readonly LogWriter[] {
return this._writers;
}
shouldLog(level: LogLevel, source?: string) {
for (const writer of this._writers) {
if (writer.shouldLog(level, source))
return true;
}
return false;
}
log(message: LogMessage) {
for (const writer of this._writers) {
if (writer.shouldLog(message.level, message.source))
writer.log(message);
}
}
}
|
#!/usr/bin/env bash
# either ssh key or agent is needed to pull adobe-platform sources from git
# this supplies to methods
set -o errexit
set -o pipefail
TARGET="$1"
SSH1=""
SSH2=""
SHA=${SHA:-"$(git rev-parse HEAD)"}
if [ ! -e /.dockerenv ]; then
echo
echo
echo "-----------------------------------------------------"
echo "Running target \"$TARGET\" inside Docker container..."
echo "-----------------------------------------------------"
echo
set -x
docker run -i --rm $SSH1 $SSH2 $AWS_ENV_VAR_OPTS \
--name=go_chatbot_lab_make_docker_$TARGET \
-e sha=$SHA \
-v $PWD:/go/src/github.com/bossjones/go-chatbot-lab \
-w /go/src/github.com/bossjones/go-chatbot-lab \
bossjones/go-chatbot-lab:dev \
make $TARGET
else
make $TARGET
fi
|
#!/bin/sh
# Copyright (c) Philipp Wagner. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
function ask_yes_or_no() {
read -p "$1 ([y]es or [N]o): "
case $(echo $REPLY | tr '[A-Z]' '[a-z]') in
y|yes) echo "yes" ;;
*) echo "no" ;;
esac
}
STDOUT=stdout.log
STDERR=stderr.log
LOGFILE=query_output.log
HostName=localhost
PortNumber=5432
DatabaseName=sampledb
UserName=philipp
if [[ "no" == $(ask_yes_or_no "Use Host ($HostName) Port ($PortNumber)") ]]
then
read -p "Enter HostName: " HostName
read -p "Enter Port: " PortNumber
fi
if [[ "no" == $(ask_yes_or_no "Use Database ($DatabaseName)") ]]
then
read -p "Enter Database: " ServerName
fi
if [[ "no" == $(ask_yes_or_no "Use User ($UserName)") ]]
then
read -p "Enter User: " UserName
fi
read -p "Password: " PGPASSWORD
# Schemas
psql -h $HostName -p $PortNumber -d $DatabaseName -U $UserName < 01_Schemas/schema_sample.sql -L $LOGFILE 1>$STDOUT 2>$STDERR
# Tables
psql -h $HostName -p $PortNumber -d $DatabaseName -U $UserName < 02_Tables/tables_sample.sql -L $LOGFILE 1>>$STDOUT 2>>$STDERR
# Keys
psql -h $HostName -p $PortNumber -d $DatabaseName -U $UserName < 03_Keys/keys_sample.sql -L $LOGFILE 1>>$STDOUT 2>>$STDERR
# Security
psql -h $HostName -p $PortNumber -d $DatabaseName -U $UserName < 05_Security/security_sample.sql -L $LOGFILE 1>>$STDOUT 2>>$STDERR
# Data
psql -h $HostName -p $PortNumber -d $DatabaseName -U $UserName < 06_Data/data_sample_stations.sql -L $LOGFILE 1>>$STDOUT 2>>$STDERR |
<reponame>zhangpc0309/cesium-czmleditor<filename>js/fields/checkbox.js
const template = `<v-checkbox hide-details type="checkbox" class="direct-property-field"
v-model="isChecked()" :indeterminate.prop="isChecked() === undefined"
@change="change" :label="label"></v-checkbox>`;
Vue.component('checkbox-field', {
props: ['entity', 'feature', 'field', 'label'],
template: template,
methods: {
change: function(checked) {
this.entity[this.feature][this.field] = checked;
this.$emit('input', checked, this.field, this.feature, this.entity);
},
isChecked: function() {
let fld = this.entity[this.feature][this.field];
return fld ? fld.valueOf() : undefined;
}
}
}); |
module Puppeteer::Launcher
class Base
# @param {string} projectRoot
# @param {string} preferredRevision
def initialize(project_root:, preferred_revision:, is_puppeteer_core:)
@project_root = project_root
@preferred_revision = preferred_revision
@is_puppeteer_core = is_puppeteer_core
end
class ExecutablePathNotFound < StandardError; end
# @returns [String] Chrome Executable file path.
# @raise [ExecutablePathNotFound]
def resolve_executable_path
if !@is_puppeteer_core
# puppeteer-core doesn't take into account PUPPETEER_* env variables.
executable_path = ENV['PUPPETEER_EXECUTABLE_PATH']
if FileTest.exist?(executable_path)
return executable_path
end
raise ExecutablePathNotFound.new(
"Tried to use PUPPETEER_EXECUTABLE_PATH env variable to launch browser but did not find any executable at: #{executable_path}",
)
end
# temporal logic.
if Puppeteer.env.darwin?
case self
when Chrome
'/Applications/Google Chrome.app/Contents/MacOS/Google Chrome'
when Firefox
'/Applications/Firefox Nightly.app/Contents/MacOS/firefox'
end
elsif Puppeteer.env.windows?
case self
when Chrome
'C:\Program Files\Google\Chrome\Application\chrome.exe'
# 'C:\Program Files (x86)\Microsoft\Edge\Application\msedge.exe'
when Firefox
'C:\Program Files\Firefox Nightly\firefox.exe'
end
else
case self
when Chrome
'/usr/bin/google-chrome'
when Firefox
'/usr/bin/firefox'
end
end
# const browserFetcher = new BrowserFetcher(launcher._projectRoot);
# if (!launcher._isPuppeteerCore) {
# const revision = process.env['PUPPETEER_CHROMIUM_REVISION'];
# if (revision) {
# const revisionInfo = browserFetcher.revisionInfo(revision);
# const missingText = !revisionInfo.local ? 'Tried to use PUPPETEER_CHROMIUM_REVISION env variable to launch browser but did not find executable at: ' + revisionInfo.executablePath : null;
# return {executablePath: revisionInfo.executablePath, missingText};
# }
# }
# const revisionInfo = browserFetcher.revisionInfo(launcher._preferredRevision);
# const missingText = !revisionInfo.local ? `Browser is not downloaded. Run "npm install" or "yarn install"` : null;
# return {executablePath: revisionInfo.executablePath, missingText};
end
end
end
|
/*
* bls_bls_signature_t.hpp
*
* Created on: Nov 30, 2021
* Author: mad
*/
#ifndef INCLUDE_MMX_BLS_bls_signature_t_HPP_
#define INCLUDE_MMX_BLS_bls_signature_t_HPP_
#include <mmx/hash_t.hpp>
#include <mmx/skey_t.hpp>
#include <mmx/bls_pubkey_t.hpp>
namespace mmx {
struct bls_signature_t : bytes_t<96> {
typedef bytes_t<96> super_t;
bls_signature_t() = default;
bls_signature_t(const bls::G2Element& sig);
bool verify(const bls_pubkey_t& pubkey, const hash_t& hash) const;
bls::G2Element to_bls() const;
static bls_signature_t sign(const skey_t& skey, const hash_t& hash);
static bls_signature_t sign(const bls::PrivateKey& skey, const hash_t& hash);
};
inline
bls_signature_t::bls_signature_t(const bls::G2Element& sig)
{
const auto tmp = sig.Serialize();
if(tmp.size() != bytes.size()) {
throw std::logic_error("signature size mismatch");
}
::memcpy(bytes.data(), tmp.data(), tmp.size());
}
inline
bool bls_signature_t::verify(const bls_pubkey_t& pubkey, const hash_t& hash) const
{
bls::AugSchemeMPL MPL;
return MPL.Verify(pubkey.to_bls(), bls::Bytes(hash.bytes.data(), hash.bytes.size()), to_bls());
}
inline
bls::G2Element bls_signature_t::to_bls() const
{
return bls::G2Element::FromBytes(bls::Bytes(bytes.data(), bytes.size()));
}
inline
bls_signature_t bls_signature_t::sign(const skey_t& skey, const hash_t& hash)
{
const auto bls_skey = bls::PrivateKey::FromBytes(bls::Bytes(skey.data(), skey.size()));
return sign(bls_skey, hash);
}
inline
bls_signature_t bls_signature_t::sign(const bls::PrivateKey& skey, const hash_t& hash)
{
bls::AugSchemeMPL MPL;
return MPL.Sign(skey, bls::Bytes(hash.bytes.data(), hash.bytes.size()));
}
} // mmx
namespace vnx {
inline
void read(vnx::TypeInput& in, mmx::bls_signature_t& value, const vnx::TypeCode* type_code, const uint16_t* code) {
vnx::read(in, (mmx::bls_signature_t::super_t&)value, type_code, code);
}
inline
void write(vnx::TypeOutput& out, const mmx::bls_signature_t& value, const vnx::TypeCode* type_code = nullptr, const uint16_t* code = nullptr) {
vnx::write(out, (const mmx::bls_signature_t::super_t&)value, type_code, code);
}
inline
void read(std::istream& in, mmx::bls_signature_t& value) {
vnx::read(in, (mmx::bls_signature_t::super_t&)value);
}
inline
void write(std::ostream& out, const mmx::bls_signature_t& value) {
vnx::write(out, value.to_string());
}
inline
void accept(vnx::Visitor& visitor, const mmx::bls_signature_t& value) {
vnx::accept(visitor, (const mmx::bls_signature_t::super_t&)value);
}
} // vnx
#endif /* INCLUDE_MMX_BLS_bls_signature_t_HPP_ */
|
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $DIR/..
DOCKER_IMAGE=${DOCKER_IMAGE:-xebecproject/xebecd-develop}
DOCKER_TAG=${DOCKER_TAG:-latest}
if [ -n "$DOCKER_REPO" ]; then
DOCKER_IMAGE_WITH_REPO=$DOCKER_REPO/$DOCKER_IMAGE
else
DOCKER_IMAGE_WITH_REPO=$DOCKER_IMAGE
fi
docker tag $DOCKER_IMAGE:$DOCKER_TAG $DOCKER_IMAGE_WITH_REPO:$DOCKER_TAG
docker push $DOCKER_IMAGE_WITH_REPO:$DOCKER_TAG
docker rmi $DOCKER_IMAGE_WITH_REPO:$DOCKER_TAG
|
import * as React from "react";
import * as _ from "lodash";
import {WorkspaceTabbedModel} from "../models/WorkspaceTabbedModel";
import {WorkspaceEngine} from "../WorkspaceEngine";
import {DraggableWidget} from "./DraggableWidget";
import {ContainerWidget} from "./ContainerWidget";
import {WorkspacePanelModel} from "../models/WorkspacePanelModel";
import {DropZoneWidget} from "./DropZoneWidget";
import {TabButtonWidget} from "./TabButtonWidget";
export interface TabGroupWidgetProps {
model: WorkspaceTabbedModel;
engine: WorkspaceEngine;
}
export interface TabGroupWidgetState {
}
export class TabGroupWidget extends React.Component<TabGroupWidgetProps, TabGroupWidgetState> {
constructor(props: TabGroupWidgetProps) {
super(props);
this.state = {}
}
render() {
let selected = this.props.model.getSelected();
let selectedFactory = this.props.engine.getFactory(selected);
return (
<div className={"srw-tabgroup srw-tabgroup--" + (this.props.model.expand ? 'expand' : 'contract')}>
<div className="srw-tabgroup__tabs">
{
_.map(this.props.model.children, (child) => {
return (
<TabButtonWidget model={child} engine={this.props.engine} key={child.id} />
);
})
}
</div>
<div className="srw-tabgroup__content">
{
selectedFactory.generatePanelContent(selected)
}
</div>
<ContainerWidget engine={this.props.engine} model={this.props.model} hide={['top']}/>
</div>
);
}
}
|
#!/bin/bash
dieharder -d 3 -g 61 -S 944174278
|
package com.example.admin.bluetoothrwdemo.bean;
public class TagInfo {
private String mOrderNumber;
private String mEpcTid;
private String mTimes;
private String mRssi;
public String getOrderNumber() {
return mOrderNumber;
}
public void setOrderNumber(String mOrderNumber) {
this.mOrderNumber = mOrderNumber;
}
public String getEpcTid() {
return mEpcTid;
}
public void setEpcTid(String mEpcTid) {
this.mEpcTid = mEpcTid;
}
public String getTimes() {
return mTimes;
}
public void setTimes(String mTimes) {
this.mTimes = mTimes;
}
public String getRssi() {
return mRssi;
}
public void setRssi(String mRssi) {
this.mRssi = mRssi;
}
}
|
<filename>server/library/socketio.ts<gh_stars>1-10
// node
import * as http from 'http';
// vendor (node_modules)
import * as express from 'express';
// library
import DeezerLibrary from './deezer';
import { ArduinoLibrary } from './arduino';
// configuration
import { IConfiguration } from '../configuration';
// import * as SocketioModule from 'socket.io';
import SocketIo = require('socket.io');
export interface IPlayersData {
teamName0: string;
teamScore0: number;
teamName1: string;
teamScore1: number;
teamName2: string;
teamScore2: number;
teamName3: string;
teamScore3: number;
playlistId: number;
}
export interface IClientIds {
playerScreenId: string | null;
gameMasterScreenId: string | null;
}
export class SocketIoLibrary {
protected _io: SocketIO.Server;
protected _configuration: IConfiguration;
protected _deezerApi: DeezerLibrary;
protected _arduinoLibrary: ArduinoLibrary;
protected _latestPlayerId: number | null = null;
protected _playersThatGuessedWrongThisRound: number[] = [];
protected _clientIds: IClientIds = {
playerScreenId: null,
gameMasterScreenId: null,
};
public constructor(application: express.Application, configuration: IConfiguration) {
// socket io setup
const server: http.Server = http.createServer(application);
this._io = SocketIo.listen(server);
server.listen(35001);
// configuration
this._configuration = configuration;
// deezer api setup
this._deezerApi = new DeezerLibrary();
// arduino library setup
this._arduinoLibrary = new ArduinoLibrary();
this._arduinoLibrary.listener((error, data: string) => {
this._parseArduinoData(data);
});
}
public setupSocketIo() {
return new Promise((resolve) => {
this._io.on('connection', (socket: SocketIO.Socket) => {
socket.join('quizRoom');
console.log('a user connected');
// console.log(socket);
// console.log(socket.id);
socket.on('identifyPlayer', () => {
console.log('identifyPlayer: ', socket.id);
this._clientIds.playerScreenId = socket.id;
});
socket.on('identifyGameMaster', () => {
console.log('identifyGameMaster: ', socket.id);
this._clientIds.gameMasterScreenId = socket.id;
});
socket.on('fetchPlaylistsList', () => {
this._deezerApi.getUserPlaylists(this._configuration.deezerProfileId)
.then((userPlaylists) => {
if (this._clientIds.gameMasterScreenId !== null) {
this._io.sockets.connected[this._clientIds.gameMasterScreenId].emit('playlistsList', userPlaylists);
}
})
.catch((error) => {
// TODO: handle error
console.log(error);
});
});
socket.on('initializeGame', (playersData: IPlayersData) => {
console.log('initializeGame, playersData: ', playersData);
// we now know which playlist got selected so we
// can the fetch playlist songs API call
this._fetchPlaylistSongs(playersData.playlistId, (error: Error, playlistTracks: any) => {
if (!error) {
// inform both screens that the game can be initialized
this._io.to('quizRoom').emit('initializeScreens', playersData, playlistTracks);
} else {
// TODO: handle error
console.log(error);
}
});
});
socket.on('playerClickColumn', (userId: number) => {
console.log('playerClickBuzzer, userId: ', userId);
const index = (userId * 3) + 1;
let data = '100100100100';
data = `${data.substr(0, index)}1${data.substr(index + 1)}`;
this._parseArduinoData(data);
this._arduinoLibrary.selectPlayer(userId);
this._arduinoLibrary.sendUpdateStatusButtons();
});
socket.on('lockPlayer', (playerId: number, isLock = true) => {
this._arduinoLibrary.lockPlayer(playerId, isLock);
this._arduinoLibrary.sendUpdateStatusButtons();
});
socket.on('selectPlayer', (playerId: number, isSelected = true) => {
this._arduinoLibrary.selectPlayer(playerId, isSelected);
this._arduinoLibrary.sendUpdateStatusButtons();
});
socket.on('resetAllPlayers', () => {
this._arduinoLibrary.resetAllPlayers();
this._arduinoLibrary.sendUpdateStatusButtons();
});
socket.on('playerViewReady', () => {
if (this._clientIds.gameMasterScreenId !== null) {
this._io.sockets.connected[this._clientIds.gameMasterScreenId].emit('playerViewReady');
}
});
socket.on('playSong', (currentPlaylistSongIndex: number) => {
if (this._clientIds.playerScreenId !== null) {
this._io.sockets.connected[this._clientIds.playerScreenId].emit('playSong', currentPlaylistSongIndex);
}
});
socket.on('resumeSong', () => {
if (this._clientIds.playerScreenId !== null) {
this._io.sockets.connected[this._clientIds.playerScreenId].emit('resumeSong');
}
});
socket.on('songStarted', () => {
if (this._clientIds.gameMasterScreenId !== null) {
this._io.sockets.connected[this._clientIds.gameMasterScreenId].emit('songStarted');
}
});
socket.on('songEnded', () => {
if (this._clientIds.gameMasterScreenId !== null) {
this._io.sockets.connected[this._clientIds.gameMasterScreenId].emit('songEnded');
}
// clear the array of players that can't play this round
this._playersThatGuessedWrongThisRound = [];
});
socket.on('songPaused', (playTimeOffset: number) => {
if (this._clientIds.gameMasterScreenId !== null) {
this._io.sockets.connected[this._clientIds.gameMasterScreenId].emit('songPaused', playTimeOffset);
}
});
socket.on('songResumed', (playTimeOffset: number) => {
if (this._clientIds.gameMasterScreenId !== null) {
this._io.sockets.connected[this._clientIds.gameMasterScreenId].emit('songResumed', playTimeOffset);
}
});
socket.on('songLoading', (loadingProgress: number, maximumValue: number, currentValue: number) => {
if (this._clientIds.gameMasterScreenId !== null) {
this._io.sockets.connected[this._clientIds.gameMasterScreenId].emit('songLoading', loadingProgress, maximumValue, currentValue);
}
});
socket.on('songProgress', (playingProgress: number, maximumValue: number, currentValue: number) => {
if (this._clientIds.gameMasterScreenId !== null) {
this._io.sockets.connected[this._clientIds.gameMasterScreenId].emit('songProgress', playingProgress, maximumValue, currentValue);
}
});
socket.on('answerIsCorrect', () => {
if (this._clientIds.playerScreenId !== null) {
this._io.sockets.connected[this._clientIds.playerScreenId].emit('answerIsCorrect');
}
// clear the array of players that can't play this round
this._playersThatGuessedWrongThisRound = [];
});
socket.on('answerIsWrong', () => {
if (this._clientIds.playerScreenId !== null) {
this._io.sockets.connected[this._clientIds.playerScreenId].emit('answerIsWrong');
}
// add the player id to the list of players
// that can't play this round
this._playersThatGuessedWrongThisRound.push(this._latestPlayerId);
});
socket.on('timeToAnswerRunOut', () => {
if (this._clientIds.playerScreenId !== null) {
this._io.sockets.connected[this._clientIds.playerScreenId].emit('timeToAnswerRunOut');
}
});
socket.on('volumeChange', (value: number) => {
if (this._clientIds.playerScreenId !== null) {
this._io.sockets.connected[this._clientIds.playerScreenId].emit('volumeChange', value);
}
});
socket.on('buzzerSoundSelectChange', (value: string) => {
if (this._clientIds.playerScreenId !== null) {
this._io.sockets.connected[this._clientIds.playerScreenId].emit('buzzerSoundSelectChange', value);
}
});
socket.on('answerTimeSelect', (value: number) => {
if (this._clientIds.playerScreenId !== null) {
this._io.sockets.connected[this._clientIds.playerScreenId].emit('answerTimeSelect', value);
}
});
socket.on('endGame', (value: number) => {
if (this._clientIds.playerScreenId !== null) {
this._io.sockets.connected[this._clientIds.playerScreenId].emit('endGame');
}
});
});
this._io.on('disconnect', () => {
console.log('user disconnected');
});
resolve();
});
}
protected _parseArduinoData(arduinoSequence: string) {
let playerId: number;
// find player id in arduino sequence
if (arduinoSequence.charAt(0) === '1' && arduinoSequence.charAt(1) === '1') {
playerId = 0;
} else if (arduinoSequence.charAt(3) === '1' && arduinoSequence.charAt(4) === '1') {
playerId = 1;
} else if (arduinoSequence.charAt(6) === '1' && arduinoSequence.charAt(7) === '1') {
playerId = 2;
} else if (arduinoSequence.charAt(9) === '1' && arduinoSequence.charAt(10) === '1') {
playerId = 3;
}
// update last player id
this._latestPlayerId = playerId;
// send to socket io if player is allowed to press
// if he guessed wrong before he is not allowed to press until next song
let canStillPlayThisRound = true;
if (this._playersThatGuessedWrongThisRound.indexOf(playerId) > -1) {
canStillPlayThisRound = false;
}
if (canStillPlayThisRound) {
// socket io emit message to player and game master
this._io.to('quizRoom').emit('playerPressedButton', playerId);
}
}
protected _fetchPlaylistSongs(playlistId: number, callback: Function) {
this._deezerApi.getPlaylistTracks(playlistId)
.then((playlistTracks) => {
callback(false, playlistTracks);
})
.catch((error) => {
// TODO: handle error
console.log(error);
callback(error);
});
}
}
|
<filename>bsearch/interpolation.go
package bsearch
// Interpolation searches for n (needle) in a sorted slice of ints h (haystack).
// The return value is the index of n or -1 if n is not present in h.
// The slice must be sorted in ascending order.
//
// Interpolation algorithms estimates the position of the target value,
// taking into account the lowest and highest elements in the array as well as length of the array.
// It works on the basis that the midpoint is not the best guess in many cases.
// For example, if the target value is close to the highest element in the array,
// it is likely to be located near the end of the array.
//
// https://en.wikipedia.org/wiki/Binary_search_algorithm#Interpolation_search
func Interpolation(n int, h []int) int {
if len(h) == 0 {
return -1
}
l := 0
r := len(h) - 1
for (h[r] != h[l]) && (n >= h[l]) && (n <= h[r]) {
m := l + (n-h[l])*(r-l)/(h[r]-h[l])
switch {
case h[m] < n:
l = m + 1
case h[m] > n:
r = m - 1
default:
return m
}
}
if h[l] == n {
return l
}
return -1
}
|
Set up a web server on the client side, such as Apache or Nginx. Configure the web server, such as setting the document root, configuring the virtual host, and enabling CGI scripting. Make sure the web server is running and accessible from the public internet or a local network. Install the necessary HTML, CSS, and JavaScript files which contain the client side application. |
#!/bin/bash
FILE=${1:-'srr.txt'}
while read -r LINE; do
echo "LINE \"$LINE\""
done < "$FILE"
|
<gh_stars>1-10
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package todo.hsqldb;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import todo.Todo;
import todo.TodoEntry;
/**
*
* @author tomas
*/
public class DbUtils {
public static ObservableList<TodoEntry> readTodosFromDb() {
ArrayList<String> rows = new ArrayList<>();
ObservableList<TodoEntry> data = FXCollections.observableArrayList();
new HSQLDBConnector(Todo.isCustomConnection) {
@Override
public void execute() {
try {
resultSet = statement.executeQuery("SELECT * FROM todos;");
resultSetMd = resultSet.getMetaData();
while (resultSet.next()) {
// if (resultSet.isLast()) {
// Todo.datMod.totalTodos = resultSet.getRow();
// }
for (int i = 1; i < this.resultSetMd.getColumnCount() + 1; i++) {
rows.add(resultSet.getString(i));
}
data.add(new TodoEntry(rows.get(0), rows.get(1), rows.get(2), rows.get(3)));
rows.removeAll(rows);
}
closeConnector();
} catch (SQLException ex) {
ex.printStackTrace();
closeConnector();
}
}
}.execute();
return data;
}
public static void checkCustomConnection() {
while (true) {
try {
StringBuilder sbc = new StringBuilder();
Todo.connector = new HSQLDBConnector(Todo.isCustomConnection);
Todo.connector.statement.executeQuery("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES");
int count = 0;
while (Todo.connector.resultSet.next()) {
sbc.append(Todo.connector.resultSet.getString(count));
if (sbc.toString().equals("TODOS") || sbc.toString().equals("todos")) {
Todo.isCustomConnection = false;
}
sbc.delete(0, sbc.length());
count++;
}
Todo.connector.closeConnector();
break;
} catch (SQLException e) {
Todo.isCustomConnection = false;
e.printStackTrace();
}
}
}
public static void addTodoIntoDb(TodoEntry entry) {
new HSQLDBConnector(Todo.isCustomConnection) {
@Override
public void execute() {
try {
entry.prepareQuery(this).executeUpdate();
closeConnector();
} catch (SQLException ex) {
ex.printStackTrace();
closeConnector();
}
}
}.execute();
}
}
|
echo only remove kernel if you have a newer one!
sleep 2
sudo apt remove linux-headers-5.9.0-050900rc2
sudo apt remove linux-image-unsigned-5.9.0-050900rc2-generic
sudo apt remove linux-modules-5.9.0-050900rc2-generic
echo linux 5.9-rc2 is successfully removed
|
<gh_stars>1-10
/*
* system_info.c
*
* Created on: 15 avr. 2016
* by: <NAME>
*/
#ifndef __USE_GNU
#define __USE_GNU
#endif
#define _GNU_SOURCE
#include <sched.h>
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
#include <pthread.h>
#include <unistd.h>
#include <sys/syscall.h>
#include <errno.h>
#include <sys/time.h>
#include <sys/resource.h>
#include "system_info.h"
#ifdef RHEL3 //RedHat Enterprise Linux 3
#define my_sched_setaffinity(a,b,c) sched_setaffinity(a, c)
#else
#define my_sched_setaffinity(a,b,c) sched_setaffinity(a, b, c)
#endif /* RHEL3 */
int move_the_current_thread_to_a_core( uint16_t core_index, int niceval ){
int ret;
int rtid = mmt_probe_get_tid(); /* reader thread id */
cpu_set_t csmask;
// initialize to empty set
CPU_ZERO(&csmask);
CPU_SET(core_index, &csmask);
//affinity
ret = my_sched_setaffinity(rtid, sizeof (cpu_set_t), &csmask);
if (ret != 0) return ret;
//set
ret = setpriority(PRIO_PROCESS, rtid, niceval);
return ret;
}
long mmt_probe_get_number_of_processors(){
return sysconf(_SC_NPROCESSORS_CONF);
}
/**
* Get total number of logical processors that can work
*/
long mmt_probe_get_number_of_online_processors(){
return sysconf(_SC_NPROCESSORS_ONLN);
}
/**
* Public API
*/
pid_t mmt_probe_get_tid() {
return syscall( __NR_gettid );
}
|
# coding: UTF-8
import sys
if __name__ == '__main__':
args = sys.argv
if 2 <= len(args):
for i in range(1,len(args)):
print(args[i])
else:
print('引数が未入力です。')
|
<reponame>atpsoft/dohutil
class NilClass
def empty?
return true
end
end
class Numeric
def empty?
return false
end
end
class Date
def empty?
return false
end
end
class Time
def empty?
return false
end
end
|
/* eslint-disable react/prop-types */
import React, { useState, memo } from 'react';
import PropTypes from 'prop-types';
import {
InternalSelect,
InternalSelectProps
} from '@data-driven-forms/pf4-component-mapper/select';
import isEqual from 'lodash/isEqual';
const FilterSelect: React.ComponentType<InternalSelectProps> = ({
onChange,
...props
}) => {
const [stateValue, setValue] = useState(undefined);
return (
<div key="filter-select" id="filter-select" className="filter-select">
<InternalSelect
isDisabled={!props.options || props.options.length === 0}
name="filter-select"
simpleValue={false}
onChange={(value) => {
onChange && onChange(value || stateValue);
setValue(value || stateValue);
}}
value={stateValue}
{...props}
/>
</div>
);
};
FilterSelect.propTypes = {
options: PropTypes.array,
onChange: PropTypes.func.isRequired
};
export default memo(FilterSelect, (prevProps, nextProps) =>
isEqual(prevProps.options, nextProps.options)
);
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies the dSYM of a vendored framework
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DWARF_DSYM_FOLDER_PATH}"
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Bolts/Bolts.framework"
install_framework "${BUILT_PRODUCTS_DIR}/BubbleTransition/BubbleTransition.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Charts/Charts.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DateToolsSwift/DateToolsSwift.framework"
install_framework "${BUILT_PRODUCTS_DIR}/LTMorphingLabel/LTMorphingLabel.framework"
install_framework "${BUILT_PRODUCTS_DIR}/NVActivityIndicatorView/NVActivityIndicatorView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Parse/Parse.framework"
install_framework "${BUILT_PRODUCTS_DIR}/ParseUI/ParseUI.framework"
install_framework "${BUILT_PRODUCTS_DIR}/PeekPop/PeekPop.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RSKPlaceholderTextView/RSKPlaceholderTextView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Realm/Realm.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RealmSwift/RealmSwift.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SAConfettiView/SAConfettiView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftyCam/SwiftyCam.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwipeCellKit/SwipeCellKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Whisper/Whisper.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Bolts/Bolts.framework"
install_framework "${BUILT_PRODUCTS_DIR}/BubbleTransition/BubbleTransition.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Charts/Charts.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DateToolsSwift/DateToolsSwift.framework"
install_framework "${BUILT_PRODUCTS_DIR}/LTMorphingLabel/LTMorphingLabel.framework"
install_framework "${BUILT_PRODUCTS_DIR}/NVActivityIndicatorView/NVActivityIndicatorView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Parse/Parse.framework"
install_framework "${BUILT_PRODUCTS_DIR}/ParseUI/ParseUI.framework"
install_framework "${BUILT_PRODUCTS_DIR}/PeekPop/PeekPop.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RSKPlaceholderTextView/RSKPlaceholderTextView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Realm/Realm.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RealmSwift/RealmSwift.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SAConfettiView/SAConfettiView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftyCam/SwiftyCam.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwipeCellKit/SwipeCellKit.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Whisper/Whisper.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.