text
stringlengths 1
1.05M
|
|---|
<gh_stars>1-10
import { Column } from './../models/column.interface';
import { Formatter } from './../models/formatter.interface';
export const deleteIconFormatter: Formatter = (row: number, cell: number, value: any, columnDef: Column, dataContext: any) =>
`<i class="fa fa-trash pointer delete-icon" aria-hidden="true"></i>`;
|
(this["webpackJsonp@react-bratus/app"]=this["webpackJsonp@react-bratus/app"]||[]).push([[4],{853:function(p,a){p.exports={messages:{page:"page"}}}}]);
//# sourceMappingURL=4.c9a26e93.chunk.js.map
|
<reponame>hikair/keyserver
/*
Navicat Premium Data Transfer
Source Server : localhost
Source Server Type : MySQL
Source Server Version : 80019
Source Host : localhost:3306
Source Schema : test
Target Server Type : MySQL
Target Server Version : 80019
File Encoding : 65001
Date: 06/01/2021 18:44:24
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for menu
-- ----------------------------
DROP TABLE IF EXISTS `menu`;
CREATE TABLE `menu` (
`id` int(0) NOT NULL,
`auth_name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '菜单名',
`parent_id` int(0) NOT NULL COMMENT '父级id',
`path` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '跳转路径',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of menu
-- ----------------------------
INSERT INTO `menu` VALUES (101, '按键管理', 0, NULL);
INSERT INTO `menu` VALUES (102, '按键统计', 0, NULL);
INSERT INTO `menu` VALUES (201, '用户管理', 101, 'users');
INSERT INTO `menu` VALUES (202, '按键分配', 101, 'keys');
INSERT INTO `menu` VALUES (203, '模板管理', 101, 'templates');
INSERT INTO `menu` VALUES (204, '分析统计', 102, 'echarts');
INSERT INTO `menu` VALUES (205, '日志统计', 102, 'logs');
SET FOREIGN_KEY_CHECKS = 1;
|
import rgbw = basic.rgbw;
serial.writeLine("I2C");
// send to 0x44, register 0x00, value 0x46 (RESET ISL29125)
pins.i2cWriteNumber(0x44, 0x0046, NumberFormat.UInt16BE);
// send to 0x44, register 0x01, value 0x05 (GRB SAMPLING)
pins.i2cWriteNumber(0x44, 0x0105, NumberFormat.UInt16BE);
basic.forever(() => {
serial.writeString("[");
pins.i2cWriteNumber(0x44, 0x0A, NumberFormat.Int8BE);
let g = pins.i2cReadNumber(0x44, NumberFormat.UInt8BE);
serial.writeNumber(r);
serial.writeString(",");
pins.i2cWriteNumber(0x44, 0x0C, NumberFormat.UInt8BE);
let r = pins.i2cReadNumber(0x44, NumberFormat.UInt8BE);
serial.writeNumber(g);
serial.writeString(",");
pins.i2cWriteNumber(0x44, 0x0E, NumberFormat.UInt8BE);
let b = pins.i2cReadNumber(0x44, NumberFormat.UInt8LE);
serial.writeNumber(b);
serial.writeLine("]");
basic.setLedColor(basic.rgbw(r,g,b, 0));
basic.pause(1000);
});
|
/*
* Copyright (c) 2020, <NAME> <<EMAIL>>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Atomic.h>
#include <Kernel/FileSystem/FileBackedFileSystem.h>
#include <Kernel/FileSystem/Inode.h>
#include <Kernel/KBufferBuilder.h>
namespace Kernel {
class Plan9FSInode;
class Plan9FS final : public FileBackedFileSystem {
friend class Plan9FSInode;
public:
virtual ~Plan9FS() override;
static NonnullRefPtr<Plan9FS> create(FileDescription&);
virtual KResult initialize() override;
virtual bool supports_watchers() const override { return false; }
virtual Inode& root_inode() override;
u16 allocate_tag() { return m_next_tag++; }
u32 allocate_fid() { return m_next_fid++; }
enum class ProtocolVersion {
v9P2000,
v9P2000u,
v9P2000L
};
struct qid {
u8 type;
u32 version;
u64 path;
};
class Message;
private:
Plan9FS(FileDescription&);
class Blocker;
class Plan9FSBlockCondition : public Thread::BlockCondition {
public:
Plan9FSBlockCondition(Plan9FS& fs)
: m_fs(fs)
{
}
void unblock_completed(u16);
void unblock_all();
void try_unblock(Blocker&);
protected:
virtual bool should_add_blocker(Thread::Blocker&, void*) override;
private:
Plan9FS& m_fs;
mutable Spinlock<u8> m_lock;
};
struct ReceiveCompletion : public RefCounted<ReceiveCompletion> {
mutable Spinlock<u8> lock;
bool completed { false };
const u16 tag;
OwnPtr<Message> message;
KResult result { KSuccess };
ReceiveCompletion(u16 tag);
~ReceiveCompletion();
};
class Blocker final : public Thread::Blocker {
public:
Blocker(Plan9FS& fs, Message& message, NonnullRefPtr<ReceiveCompletion> completion)
: m_fs(fs)
, m_message(message)
, m_completion(move(completion))
{
set_block_condition(fs.m_completion_blocker);
}
virtual StringView state_string() const override { return "Waiting"sv; }
virtual Type blocker_type() const override { return Type::Plan9FS; }
virtual void not_blocking(bool) override;
const NonnullRefPtr<ReceiveCompletion>& completion() const { return m_completion; }
u16 tag() const { return m_completion->tag; }
bool is_completed() const;
bool unblock()
{
unblock_from_blocker();
return true;
}
bool unblock(u16 tag);
private:
Plan9FS& m_fs;
Message& m_message;
NonnullRefPtr<ReceiveCompletion> m_completion;
bool m_did_unblock { false };
};
friend class Blocker;
virtual StringView class_name() const override { return "Plan9FS"sv; }
bool is_complete(const ReceiveCompletion&);
KResult post_message(Message&, RefPtr<ReceiveCompletion>);
KResult do_read(u8* buffer, size_t);
KResult read_and_dispatch_one_message();
KResult post_message_and_wait_for_a_reply(Message&);
KResult post_message_and_explicitly_ignore_reply(Message&);
ProtocolVersion parse_protocol_version(const StringView&) const;
size_t adjust_buffer_size(size_t size) const;
void thread_main();
void ensure_thread();
RefPtr<Plan9FSInode> m_root_inode;
Atomic<u16> m_next_tag { (u16)-1 };
Atomic<u32> m_next_fid { 1 };
ProtocolVersion m_remote_protocol_version { ProtocolVersion::v9P2000 };
size_t m_max_message_size { 4 * KiB };
Mutex m_send_lock { "Plan9FS send" };
Plan9FSBlockCondition m_completion_blocker;
HashMap<u16, NonnullRefPtr<ReceiveCompletion>> m_completions;
Spinlock<u8> m_thread_lock;
RefPtr<Thread> m_thread;
Atomic<bool> m_thread_running { false };
Atomic<bool, AK::MemoryOrder::memory_order_relaxed> m_thread_shutdown { false };
};
class Plan9FSInode final : public Inode {
friend class Plan9FS;
public:
virtual ~Plan9FSInode() override;
u32 fid() const { return index().value(); }
// ^Inode
virtual InodeMetadata metadata() const override;
virtual void flush_metadata() override;
virtual KResultOr<size_t> read_bytes(off_t, size_t, UserOrKernelBuffer& buffer, FileDescription*) const override;
virtual KResultOr<size_t> write_bytes(off_t, size_t, const UserOrKernelBuffer& data, FileDescription*) override;
virtual KResult traverse_as_directory(Function<bool(FileSystem::DirectoryEntryView const&)>) const override;
virtual KResultOr<NonnullRefPtr<Inode>> lookup(StringView name) override;
virtual KResultOr<NonnullRefPtr<Inode>> create_child(StringView name, mode_t, dev_t, uid_t, gid_t) override;
virtual KResult add_child(Inode&, const StringView& name, mode_t) override;
virtual KResult remove_child(const StringView& name) override;
virtual KResult chmod(mode_t) override;
virtual KResult chown(uid_t, gid_t) override;
virtual KResult truncate(u64) override;
private:
Plan9FSInode(Plan9FS&, u32 fid);
static NonnullRefPtr<Plan9FSInode> create(Plan9FS&, u32 fid);
enum class GetAttrMask : u64 {
Mode = 0x1,
NLink = 0x2,
UID = 0x4,
GID = 0x8,
RDev = 0x10,
ATime = 0x20,
MTime = 0x40,
CTime = 0x80,
Ino = 0x100,
Size = 0x200,
Blocks = 0x400,
BTime = 0x800,
Gen = 0x1000,
DataVersion = 0x2000,
Basic = 0x7ff,
All = 0x3fff
};
enum class SetAttrMask : u64 {
Mode = 0x1,
UID = 0x2,
GID = 0x4,
Size = 0x8,
ATime = 0x10,
MTime = 0x20,
CTime = 0x40,
ATimeSet = 0x80,
MTimeSet = 0x100
};
// Mode in which the file is already open, using SerenityOS constants.
int m_open_mode { 0 };
KResult ensure_open_for_mode(int mode);
Plan9FS& fs() { return reinterpret_cast<Plan9FS&>(Inode::fs()); }
Plan9FS& fs() const
{
return const_cast<Plan9FS&>(reinterpret_cast<const Plan9FS&>(Inode::fs()));
}
};
}
|
import { IsString, IsPhoneNumber } from 'class-validator';
export class LoginDto {
@IsPhoneNumber('UA')
phone: string;
@IsString()
password: string;
}
|
#!/bin/bash
cd shopfront
mvn clean install
cd ..
cd productcatalogue
mvn clean install
cd ..
cd stockmanager
mvn clean install
cd ..
|
<filename>bootstrapped/ideal/runtime/texts/test_plain_text.java
// Autogenerated from runtime/texts/test_plain_text.i
package ideal.runtime.texts;
import ideal.library.elements.*;
import ideal.library.texts.*;
import ideal.runtime.elements.*;
import ideal.library.channels.output;
import ideal.machine.channels.string_writer;
public class test_plain_text {
public void run_all_tests() {
ideal.machine.elements.runtime_util.start_test("test_plain_text.test_writer_trivial");
test_writer_trivial();
ideal.machine.elements.runtime_util.end_test();
ideal.machine.elements.runtime_util.start_test("test_plain_text.test_writer_indent0");
test_writer_indent0();
ideal.machine.elements.runtime_util.end_test();
ideal.machine.elements.runtime_util.start_test("test_plain_text.test_writer_indent1");
test_writer_indent1();
ideal.machine.elements.runtime_util.end_test();
ideal.machine.elements.runtime_util.start_test("test_plain_text.test_writer_indent2");
test_writer_indent2();
ideal.machine.elements.runtime_util.end_test();
ideal.machine.elements.runtime_util.start_test("test_plain_text.test_self_closing_tag");
test_self_closing_tag();
ideal.machine.elements.runtime_util.end_test();
ideal.machine.elements.runtime_util.start_test("test_plain_text.test_writer_fragment");
test_writer_fragment();
ideal.machine.elements.runtime_util.end_test();
ideal.machine.elements.runtime_util.start_test("test_plain_text.test_underline_tag");
test_underline_tag();
ideal.machine.elements.runtime_util.end_test();
ideal.machine.elements.runtime_util.start_test("test_plain_text.test_underline2_tag");
test_underline2_tag();
ideal.machine.elements.runtime_util.end_test();
ideal.machine.elements.runtime_util.start_test("test_plain_text.test_two_underlines");
test_two_underlines();
ideal.machine.elements.runtime_util.end_test();
ideal.machine.elements.runtime_util.start_test("test_plain_text.test_blank_line");
test_blank_line();
ideal.machine.elements.runtime_util.end_test();
}
public static final base_string FOO = (base_string) new base_string("foo");
public static final base_string BAR = (base_string) new base_string("bar");
public static final base_string BAZ = (base_string) new base_string("baz");
public static final base_string WYZZY = (base_string) new base_string("wyzzy");
public void test_writer_trivial() {
final string_writer the_writer = new string_writer();
final plain_formatter the_formatter = new plain_formatter(the_writer);
the_formatter.write(base_element.make(text_library.P, FOO));
assert ideal.machine.elements.runtime_util.values_equal(new base_string("foo\n"), the_writer.elements());
}
public void test_writer_indent0() {
final string_writer the_writer = new string_writer();
final plain_formatter the_formatter = new plain_formatter(the_writer);
the_formatter.write(base_element.make(text_library.P, FOO));
the_formatter.write(base_element.make(text_library.INDENT, BAR));
assert ideal.machine.elements.runtime_util.values_equal(new base_string("foo\n bar\n"), the_writer.elements());
}
public void test_writer_indent1() {
final string_writer the_writer = new string_writer();
final plain_formatter the_formatter = new plain_formatter(the_writer);
the_formatter.write(base_element.make(text_library.P, FOO));
final text_element bar = base_element.make(text_library.P, BAR);
final text_element baz = base_element.make(text_library.P, BAZ);
the_formatter.write(base_element.make(text_library.INDENT, base_list_text_node.make(bar, baz)));
the_formatter.write(base_element.make(text_library.P, WYZZY));
assert ideal.machine.elements.runtime_util.values_equal(new base_string("foo\n bar\n baz\nwyzzy\n"), the_writer.elements());
}
public void test_writer_indent2() {
final string_writer the_writer = new string_writer();
final plain_formatter the_formatter = new plain_formatter(the_writer);
the_formatter.write(base_element.make(text_library.P, FOO));
the_formatter.write(base_element.make(text_library.INDENT, (base_string) new base_string("bar\nbaz")));
the_formatter.write(base_element.make(text_library.P, WYZZY));
assert ideal.machine.elements.runtime_util.values_equal(new base_string("foo\n bar\n baz\nwyzzy\n"), the_writer.elements());
}
public void test_self_closing_tag() {
final string_writer the_writer = new string_writer();
final plain_formatter the_formatter = new plain_formatter(the_writer);
the_formatter.write(FOO);
the_formatter.write(base_element.make(text_library.BR, text_library.CLEAR, (base_string) new base_string("all"), null));
the_formatter.write((base_string) new base_string("bar\n"));
assert ideal.machine.elements.runtime_util.values_equal(new base_string("foo\nbar\n"), the_writer.elements());
}
public void test_writer_fragment() {
final string_writer the_writer = new string_writer();
final plain_formatter the_formatter = new plain_formatter(the_writer);
final text_entity fragment = new text_entity(text_library.IDEAL_TEXT, new base_string("*"), new base_string("·"));
the_formatter.write((base_string) new base_string("one"));
the_formatter.write(fragment);
the_formatter.write((base_string) new base_string("two"));
assert ideal.machine.elements.runtime_util.values_equal(new base_string("one*two"), the_writer.elements());
}
public void test_underline_tag() {
final string_writer the_writer = new string_writer();
final plain_formatter the_formatter = new plain_formatter(the_writer);
the_formatter.write((base_string) new base_string("hello "));
final base_string world_string = (base_string) new base_string("world");
final text_element br = base_element.make(text_library.BR, null);
the_formatter.write(base_element.make(text_library.UNDERLINE, base_list_text_node.make(world_string, br, FOO)));
the_formatter.write((base_string) new base_string(" bar"));
the_formatter.write(base_element.make(text_library.BR, null));
assert ideal.machine.elements.runtime_util.values_equal(new base_string("hello world\n ^^^^^\nfoo bar\n^^^\n"), the_writer.elements());
}
public void test_underline2_tag() {
final string_writer the_writer = new string_writer();
final plain_formatter the_formatter = new plain_formatter(the_writer);
the_formatter.write((base_string) new base_string("hello "));
final base_string world_string = (base_string) new base_string("world");
final text_element br = base_element.make(text_library.BR, null);
the_formatter.write(base_element.make(text_library.UNDERLINE2, base_list_text_node.make(world_string, br, FOO)));
the_formatter.write((base_string) new base_string(" bar"));
the_formatter.write(base_element.make(text_library.BR, null));
assert ideal.machine.elements.runtime_util.values_equal(new base_string("hello world\n -----\nfoo bar\n---\n"), the_writer.elements());
}
public void test_two_underlines() {
final string_writer the_writer = new string_writer();
final plain_formatter the_formatter = new plain_formatter(the_writer);
final text_element hi = base_element.make(text_library.UNDERLINE, (base_string) new base_string("hi"));
final text_element mid = new base_element(text_library.UNDERLINE2, new list_dictionary<attribute_id, string>(), base_list_text_node.make((base_string) new base_string("start "), hi, (base_string) new base_string(" end")));
the_formatter.write(text_util.join((base_string) new base_string("foo "), mid, (base_string) new base_string(" bar")));
the_formatter.write(base_element.make(text_library.BR, null));
assert ideal.machine.elements.runtime_util.values_equal(new base_string("foo start hi end bar\n ------^^----\n"), the_writer.elements());
}
public void test_blank_line() {
final string_writer the_writer = new string_writer();
final plain_formatter the_formatter = new plain_formatter(the_writer);
the_formatter.write(base_element.make(text_library.DIV, FOO));
the_formatter.write(new base_element(text_library.BR));
the_formatter.write(base_element.make(text_library.DIV, (base_string) new base_string("bar")));
assert ideal.machine.elements.runtime_util.values_equal(new base_string("foo\n\nbar\n"), the_writer.elements());
}
}
|
#!/bin/bash
# get path to script
SCRIPT_PATH="$( cd "$(dirname "$0")" ; pwd -P )"
PACKAGE_PATH=$( rospack find mrs_uav_testing )
cp $PACKAGE_PATH/rviz/default_random_simulation.rviz /tmp/default_random_simulation.rviz
sed -i "s/uav[0-9]/$UAV_NAME/g" /tmp/default_random_simulation.rviz
|
<filename>blingfirecompile.library/inc/FARegexpLexer_wre.h
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
#ifndef _FA_REGEXPLEXER_WRE_H_
#define _FA_REGEXPLEXER_WRE_H_
#include "FAConfig.h"
#include "FARegexpLexer_triv.h"
namespace BlingFire
{
///
/// The lexer assumes that the input is the combination of the following:
///
/// <token> ::= <word-list>[/<regexp-list>][/<dict-list>][/<pos-list>]
/// <token> ::= [<word-list>/]<regexp-list>[/<dict-list>][/<pos-list>]
/// <token> ::= [<word-list>/][<regexp-list>/]<dict-list>[/<pos-list>]
/// <token> ::= [<word-list>/][<regexp-list>/][<dict-list>/]<pos-list>
///
/// <word-list> ::= <word-or-list>
/// <word-list> ::= <word-not-list>
/// <word-or-list> ::= "text"
/// <word-or-list> ::= "text"|<word-or-list>
/// <word-not-list> ::= !"text"
/// <word-not-list> ::= !"text"<word-not-list>
///
/// <tag-list> ::= <tag-or-list>
/// <tag-list> ::= <tag-not-list>
/// <tag-or-list> ::= TAG
/// <tag-or-list> ::= TAG|<tag-or-list>
/// <tag-not-list> ::= !TAG
/// <tag-not-list> ::= !TAG<tag-not-list>
///
/// TAG ::= /[A-Za-z][A-Za-z0-9_]*/
///
/// <regexp-list> ::= <regexp-or-list>
/// <regexp-list> ::= <regexp-and-list>
/// <regexp-or-list> ::= [!]'regexp'
/// <regexp-or-list> ::= [!]'regexp'|<regexp-or-list>
/// <regexp-and-list> ::= [!]'regexp'
/// <regexp-and-list> ::= [!]'regexp'<regexp-and-list>
///
/// <dict-list> ::= <dict-or-list>
/// <dict-list> ::= <dict-and-list>
/// <dict-or-list> ::= @[!]<dictname>|<dict-or-list>
/// <dict-and-list> ::= @[!]<dictname><dict-and-list>
///
/// <dictname> ::= /[A-Za-z][A-Za-z0-9_]*/
///
/// Notes:
/// 1. No empty tokens allowed
/// 2. The spaces in tokens are only allowed withing quotations
///
class FARegexpLexer_wre : public FARegexpLexer_triv {
public:
FARegexpLexer_wre ();
private:
/// overriden
const int FindToken (const int Pos, int * pLength) const;
/// returns true is any of ' ', '\t', '\n', '\r' is encountered at Pos
inline const bool IsSpace (const int Pos) const;
/// returns true if symbol at Pos can be a part of WRE token
inline const bool IsToken (const int Pos) const;
/// returns true if symbol at Pos can be a start of WRE token
inline const bool IsTokenStart (const int Pos) const;
private:
enum {
InfoMaskSpace = 1,
InfoMaskToken = 2,
InfoMaskTokenStart = 4,
};
/// mapping from character into
unsigned int m_char2info [AsciiSize];
};
}
#endif
|
#!/bin/bash
source script_common.sh
echo "Using build directory '${HN}' for executables"
# Run several evolves in parallel.
./${HN}/sim_supp/prob_fitinc 0.02 &
./${HN}/sim_supp/prob_fitinc 0.05 &
./${HN}/sim_supp/prob_fitinc 0.1 &
./${HN}/sim_supp/prob_fitinc 0.2 &
./${HN}/sim_supp/prob_fitinc 0.3 &
./${HN}/sim_supp/prob_fitinc 0.4 &
./${HN}/sim_supp/prob_fitinc 0.5 &
wait
popd
|
<filename>sentrycli/group.py
import logging
import re
from collections import Counter
from datetime import datetime
from itertools import izip_longest
from argh import arg
from sentrycli.event import load_from_file
from sentrycli.table import Table
from sentrycli.utils import check_required_keys_present
logging.basicConfig(level=logging.INFO)
T_HEADER = 'header'
T_CONTEXT = 'context'
T_PARAM = 'param'
T_VAR = 'var'
T_TAG = 'tag'
ORDER_META_KEY = ('breadcrumb', 'breadcrumbs in order')
def get_keys(prop, events):
"""
Get all distinct keys from events' property.
:rtype: set
"""
keys = set()
for event in events:
res = getattr(event, prop)
if res is not None:
keys |= set(res)
keys = sorted(keys)
return keys
def print_options(events):
"""
Print available aggregration options (headers, context, tags etc.)
:param events: list of events from which gather attributes
:type: list
"""
headers = get_keys('headers', events)
context = get_keys('context', events)
params = get_keys('params', events)
variables = get_keys('vars', events)
tags = get_keys('tags', events)
table = Table(['Headers', 'Context', 'Params', 'Vars', 'Tags'])
map(table.add_row, izip_longest(headers, context, params, variables, tags,
fillvalue=''))
print table
@arg('pathname', help='path to input file')
@arg('--headers', help='headers', nargs='+')
@arg('--context', help='context', nargs='+')
@arg('--params', help='params', nargs='+')
@arg('--variables', help='variables', nargs='+')
@arg('--ctime', help='creation time', choices=('daily', 'monthly'))
@arg('--breadcrumbs', nargs='+',
help='analyze if events order of breadcrumbs categories is fullfiled. '
'Order should be in Python regex format. Use `.*` for any number of'
'categories between and ` ` for strict order.')
@arg('--tags', help='tags', nargs='+')
@arg('--top', type=int, help='show only top x results')
@arg('-o', '--options', help='list possible grouping options')
def group(pathname, headers=None, context=None, params=None, breadcrumbs=None,
variables=None, tags=None, options=False, ctime=None, top=None):
events = load_from_file(pathname)
if options:
print_options(events)
return
check_required_keys_present([
'headers', 'context', 'params', 'variables', 'tags', 'ctime',
'breadcrumbs'], locals())
headers = headers or []
context = context or []
params = params or []
variables = variables or []
tags = tags or []
breadcrumbs = [re.compile(breadcrumb) for breadcrumb in breadcrumbs or []]
if ctime is not None:
group_by_ctime(events, ctime)
return
keys = []
keys.extend([(T_HEADER, header) for header in headers])
keys.extend([(T_CONTEXT, var) for var in context])
keys.extend([(T_PARAM, param) for param in params])
keys.extend([(T_VAR, var) for var in variables])
keys.extend([(T_TAG, tag) for tag in tags])
if len(breadcrumbs):
keys.extend([ORDER_META_KEY])
values = Counter()
for event in events:
meta = {}
for header in headers:
meta[(T_HEADER, header)] = event.headers.get(header)
for var in context:
meta[(T_CONTEXT, var)] = event.context.get(var)
for param in params:
meta[(T_PARAM, param)] = event.params.get(param)
for tag in tags:
meta[(T_TAG, tag)] = event.tags.get(tag)
for var in variables:
for frame in event.frames:
res = frame['vars'].get(var)
if res is not None:
meta[(T_VAR, var)] = res
break
if len(breadcrumbs):
meta[ORDER_META_KEY] = False
for breadcrumb in breadcrumbs:
if not event.is_breadcrumbs_order_preserved(breadcrumb):
break
else:
meta[ORDER_META_KEY] = True
value = tuple(meta.get(key) for key in keys)
values[value] += 1
print_grouping([key[1] for key in keys], values, top)
def group_by_ctime(events, mode):
"""
Group events by creation time.
:param events: events to group
:type: list
:param mode: grouping mode (daily or monthly)
:type: str
"""
counter = Counter()
total = 0
for event in events:
if mode == 'daily':
day = event.ctime.day
elif mode == 'monthly':
day = 1
key = datetime(event.ctime.year, event.ctime.month, day)
counter[key] += 1
total += 1
if mode == 'daily':
fmt = '%Y-%m-%d'
title = 'day'
elif mode == 'monthly':
fmt = '%Y-%m'
title = 'month'
table = Table([title, 'count', '%'])
total = sum(counter.values())
for item in sorted(counter.items()):
table.add_row((item[0].strftime(fmt),
item[1],
item[1] * 100.0 / total))
print table
def print_grouping(attributes, grouping, top):
"""
Print computed groups.
:param attributes: list of grouped attributes
:type: list(str)
:param grouping: counter for each combination of attributes' values
:type: Counter
:type top: int
"""
total = sum(grouping.values())
table = Table(attributes + ['count', '%'])
table.add_rows(total, grouping.most_common(top))
print '\n' + table.by_count()
print 'Total:', total
|
#!/bin/sh
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Google SQL Service command line tool.
# Examples:
# $0 instance [database]
SQL_SH_DIR="$(cd $(dirname $0); pwd)"
JAR="${SQL_SH_DIR}/./google_sql.jar"
JAVA="${JAVA_HOME}/bin/java"
die() {
echo $1
exit 2
}
main() {
if [ ! -x "${JAVA}" ]; then
JAVA=$(command -v "java")
fi
[ -x "${JAVA}" ] || die "Unable to find JVM. Please set JAVA_HOME"
${JAVA} -jar "${JAR}" "$@"
exit
}
main "$@"
|
require 'rails_helper'
RSpec.describe 'games/categories/show' do
it 'renders the show template' do
assign(:game, FactoryBot.create(:game))
assign(:category, FactoryBot.create(:category))
render
expect(view).to render_template('games/categories/show')
end
end
|
<reponame>Aarushi21/Projects
const express = require("express");
const mongoose = require("mongoose");
const passport = require("passport");
const router = express.Router();
router.get(
"/google",
passport.authenticate("google", {
scope: ["profile", "email"],
})
);
///Callback route for google to redirect
router.get(
"/google/redirect",
passport.authenticate("google"),
(req, res, next) => {
console.log("req.user:", req.user);
req.session.context = req.user;
const x = req.user;
var token = encodeURIComponent(req.user.token);
var name = encodeURIComponent(req.user.name);
//res.send(req.user);
// res.redirect(
// 303,
// "https://google.com/?name=" + name + "&token=" + token
// );
res.status(200).json({
success: true,
token: token,
name: name
})
}
);
module.exports = router;
|
<filename>bitmaplib/src/main/java/com/xingen/bitmaplib/internal/BitmapRequest.java<gh_stars>1-10
package com.xingen.bitmaplib.internal;
import android.graphics.Bitmap;
import com.xingen.bitmaplib.common.constants.Constants;
import com.xingen.bitmaplib.common.utils.MD5Utils;
/**
* Author by ${xinGen}, Date on 2018/5/28.
*/
public class BitmapRequest {
private boolean cancel=false;
private String imageId;
private BitmapRequestQueue requestQueue;
private BitmapListener bitmapListener;
private int targetWidth,targetHeight;
private String originId;
private String tag;
public BitmapRequest(String imageId,BitmapListener bitmapListener){
this(imageId,0,0,bitmapListener);
}
public BitmapRequest(String imageId, int targetWidth, int targetHeight,BitmapListener bitmapListener) {
this.imageId = imageId;
this.targetWidth = targetWidth;
this.targetHeight = targetHeight;
this.bitmapListener=bitmapListener;
originId=createOriginId();
}
public synchronized boolean isCancel() {
return cancel;
}
public synchronized void setCancel(boolean cancel) {
this.cancel = cancel;
}
public void deliverResult(Bitmap bitmap){
if (bitmapListener==null||isCancel()){
return ;
}
bitmapListener.result(originId,bitmap);
}
public void deliverError(Exception e){
if (bitmapListener==null||isCancel()){
return ;
}
bitmapListener.error(originId,e);
}
public void finish(){
this.requestQueue.finishAll(this);
}
public void setRequestQueue(BitmapRequestQueue requestQueue) {
this.requestQueue = requestQueue;
}
public String getImageId() {
return imageId;
}
public int getTargetHeight() {
return targetHeight;
}
public int getTargetWidth() {
return targetWidth;
}
/**
* 获取缓存的Key
*
* @return
*/
public String getCacheKey() {
return CacheKeyUtils.createKey(imageId,targetWidth,targetHeight);
}
public static class CacheKeyUtils {
public static String createKey(String imageId,int targetWidth,int targetHeight){
String s= new StringBuilder(imageId.length() + 12)
.append("#W")
.append(targetWidth)
.append("#H")
.append(targetHeight)
.append(imageId)
.toString();
return MD5Utils.hashImageUrlForDisk(s);
}
}
private String createOriginId(){
if (imageId.contains(Constants.PathPrefix.Prefix_Http)) {
return imageId;
}else {
return Constants.PathPrefix.getActualImageId(imageId);
}
}
public void setTag(String tag) {
this.tag = tag;
}
public String getTag() {
return tag;
}
}
|
import {
BoxCollider,
Collision,
GameObject,
RectPainter,
Subject,
Timer,
Vector,
} from '../../core';
import { GameUpdateArgs, Tag } from '../../game';
import {
EditorMapInputContext,
InputHoldThrottle,
InputHoldThrottleOptions,
} from '../../input';
import * as config from '../../config';
import { EditorBrush } from './EditorBrush';
const BLINK_DELAY = 0.2;
const HOLD_THROTTLE_OPTIONS: InputHoldThrottleOptions = {
activationDelay: 0.12,
delay: 0.024,
};
export class EditorTool extends GameObject {
public collider = new BoxCollider(this, true);
public painter = new RectPainter(null, config.COLOR_RED);
public zIndex = config.EDITOR_TOOL_Z_INDEX;
public draw = new Subject();
public erase = new Subject();
private brushes: EditorBrush[] = [];
private selectedBrush: EditorBrush = null;
private velocity = new Vector(0, 0);
private holdThrottles: InputHoldThrottle[] = [];
private blinkTimer = new Timer();
private isBlinkVisible = true;
constructor() {
super();
this.holdThrottles = [
new InputHoldThrottle(
EditorMapInputContext.MoveUp,
this.moveUp,
HOLD_THROTTLE_OPTIONS,
),
new InputHoldThrottle(
EditorMapInputContext.MoveDown,
this.moveDown,
HOLD_THROTTLE_OPTIONS,
),
new InputHoldThrottle(
EditorMapInputContext.MoveLeft,
this.moveLeft,
HOLD_THROTTLE_OPTIONS,
),
new InputHoldThrottle(
EditorMapInputContext.MoveRight,
this.moveRight,
HOLD_THROTTLE_OPTIONS,
),
];
}
public setBrushes(brushes: EditorBrush[]): void {
this.brushes = brushes;
this.selectBrush(0);
}
public getSelectedBrush(): EditorBrush {
return this.selectedBrush;
}
protected setup({ collisionSystem }: GameUpdateArgs): void {
collisionSystem.register(this.collider);
}
protected update(updateArgs: GameUpdateArgs): void {
this.dirtyPaintBox();
this.updatePosition(updateArgs);
this.updateBlinking(updateArgs);
const { inputManager } = updateArgs;
const inputMethod = inputManager.getActiveMethod();
if (inputMethod.isDownAny(EditorMapInputContext.Draw)) {
this.draw.notify(null);
}
if (inputMethod.isDownAny(EditorMapInputContext.Erase)) {
this.erase.notify(null);
}
if (inputMethod.isDownAny(EditorMapInputContext.NextBrush)) {
this.selectNextBrush();
}
if (inputMethod.isDownAny(EditorMapInputContext.PrevBrush)) {
this.selectPrevBrush();
}
this.collider.update();
}
protected collide(collision: Collision): void {
const blockMoveContacts = collision.contacts.filter((contact) => {
return contact.collider.object.tags.includes(Tag.EditorBlockMove);
});
if (blockMoveContacts.length > 0) {
this.position.sub(this.velocity);
this.updateMatrix(true);
}
}
private updatePosition(updateArgs: GameUpdateArgs): void {
const { deltaTime, inputManager } = updateArgs;
const inputMethod = inputManager.getActiveMethod();
this.velocity.set(0, 0);
if (inputMethod.isDownAny(EditorMapInputContext.MoveUp)) {
this.moveUp();
} else if (inputMethod.isDownAny(EditorMapInputContext.MoveDown)) {
this.moveDown();
} else if (inputMethod.isDownAny(EditorMapInputContext.MoveLeft)) {
this.moveLeft();
} else if (inputMethod.isDownAny(EditorMapInputContext.MoveRight)) {
this.moveRight();
}
for (const holdThrottle of this.holdThrottles) {
holdThrottle.update(inputMethod, deltaTime);
}
if (this.velocity.x !== 0 || this.velocity.y !== 0) {
this.position.add(this.velocity);
this.updateMatrix(true);
}
}
private moveUp = (): void => {
this.velocity.set(0, -this.size.height);
};
private moveDown = (): void => {
this.velocity.set(0, this.size.height);
};
private moveLeft = (): void => {
this.velocity.set(-this.size.width, 0);
};
private moveRight = (): void => {
this.velocity.set(this.size.width, 0);
};
private updateBlinking({ deltaTime }: GameUpdateArgs): void {
if (this.blinkTimer.isDone()) {
this.isBlinkVisible = !this.isBlinkVisible;
this.blinkTimer.reset(BLINK_DELAY);
} else {
this.blinkTimer.update(deltaTime);
}
if (this.selectBrush !== null) {
this.selectedBrush.setVisible(this.isBlinkVisible);
}
}
private selectNextBrush(): void {
const selectedBrushIndex = this.brushes.indexOf(this.selectedBrush);
let nextBrushIndex = selectedBrushIndex + 1;
if (nextBrushIndex > this.brushes.length - 1) {
nextBrushIndex = 0;
}
this.selectBrush(nextBrushIndex);
}
private selectPrevBrush(): void {
const selectedBrushIndex = this.brushes.indexOf(this.selectedBrush);
let prevBrushIndex = selectedBrushIndex - 1;
if (prevBrushIndex < 0) {
prevBrushIndex = this.brushes.length - 1;
}
this.selectBrush(prevBrushIndex);
}
private selectBrush(index: number): void {
// Clear previous brush
if (this.selectedBrush !== null) {
// Restore visibility
this.selectedBrush.setVisible(true);
this.remove(this.selectedBrush);
}
if (this.brushes[index] === undefined) {
this.selectBrush = null;
return;
}
this.selectedBrush = this.brushes[index];
this.selectedBrush.setVisible(this.isBlinkVisible);
this.size.copyFrom(this.selectedBrush.size);
this.position.x -= this.position.x % this.size.width;
this.position.y -= this.position.y % this.size.height;
this.add(this.selectedBrush);
this.updateMatrix(true);
}
}
|
<filename>simpleApp/functions/getCountFunction.js
'use strict';
const {getCount} = require('./db/dynamodb');
exports.handler = async (event, context) => {
let count = 1;
try {
count = parseInt(await getCount());
} catch (e) {
// no count record in the database
count = 1;
}
console.log('getCount:', count);
return sendResponse(200, count, context);
};
function sendResponse(statusCode, message, context) {
const message1 = JSON.stringify(message)
const response = {
statusCode: statusCode,
body: message1
};
return response
}
|
#!/bin/bash
set -e
# setup ros2 environment
source /opt/ros/dashing/setup.bash
source /root/ros2_ws/install/setup.bash
exec "$@"
|
use quote::quote;
use syn::{Data, DeriveInput, Fields, Ident, parse_macro_input};
use proc_macro::TokenStream;
#[proc_macro_derive(SumFields)]
pub fn sum_fields_derive(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let struct_name = &input.ident;
let sum_fields = match input.data {
Data::Struct(ref data) => {
match data.fields {
Fields::Named(ref fields) => {
let sum = fields.named.iter().map(|field| {
let field_name = field.ident.as_ref().unwrap();
quote! { self.#field_name }
});
quote! {
fn sum_fields(&self) -> u32 {
let sum = 0;
#(sum += self.#field_name;)*
sum
}
}
}
_ => {
quote! {
fn sum_fields(&self) -> u32 {
0
}
}
}
}
}
_ => {
quote! {
fn sum_fields(&self) -> u32 {
0
}
}
}
};
let expanded = quote! {
impl #struct_name {
#sum_fields
}
};
TokenStream::from(expanded)
}
|
#!/bin/bash
brew install go
git clone https://github.com/syndbg/goenv.git ~/.goenv
VERSION="1.11.13"
#VERSION="1.12.9"
goenv install -s $VERSION
goenv global $VERSION
|
#!/bin/bash
# ysoftman
# 인자(매개변수) 변수
# clear(이전 명령) 실행 후 리턴 값
echo '$?:'$?
# 현재 쉘 옵션들(set -o 으로 설정된 옵션들 파악)
echo '$-:'$-
# 현재 프로세스 ID
echo '$$:'$$
# 마지막 백그라운드 프로세스 ID
echo '$!:'$!
# 매개변수 개수
echo '$#:'$#
# 매개변수 모두
echo '$*:'$*
# 매개변수 모두(문자로 취급)
echo '$@:'$@
# 0번째 인자
echo "\${0}:" ${0}
# 1번째 인자
echo "\${1}:" ${1}
# 다음과 같이 파라미터 개수가 10을 넘어가는 2자리수 부터는 {} 로 감싸줘야 한다.
echo '$0 $1 $2 $3 $4 $5 $6 $7 $8 $9 ${10} ${11}:'$0 $1 $2 $3 $4 $5 $6 $7 $8 $9 ${10} ${11}
|
#!/usr/bin/env bash
#
# SPDX-License-Identifier: Apache-2.0
#
${AS_LOCAL_HOST:=true}
: "${TEST_NETWORK_HOME:=../..}"
: "${CONNECTION_PROFILE_FILE_ORG1:=${TEST_NETWORK_HOME}/organizations/peerOrganizations/org1.example.com/connection-org1.json}"
: "${CERTIFICATE_FILE_ORG1:=${TEST_NETWORK_HOME}/organizations/peerOrganizations/org1.example.com/users/User1@org1.example.com/msp/signcerts/User1@org1.example.com-cert.pem}"
: "${PRIVATE_KEY_FILE_ORG1:=${TEST_NETWORK_HOME}/organizations/peerOrganizations/org1.example.com/users/User1@org1.example.com/msp/keystore/priv_sk}"
: "${CONNECTION_PROFILE_FILE_ORG2:=${TEST_NETWORK_HOME}/organizations/peerOrganizations/org2.example.com/connection-org2.json}"
: "${CERTIFICATE_FILE_ORG2:=${TEST_NETWORK_HOME}/organizations/peerOrganizations/org2.example.com/users/User1@org2.example.com/msp/signcerts/User1@org2.example.com-cert.pem}"
: "${PRIVATE_KEY_FILE_ORG2:=${TEST_NETWORK_HOME}/organizations/peerOrganizations/org2.example.com/users/User1@org2.example.com/msp/keystore/priv_sk}"
cat << ENV_END > .env
# Generated .env file
# See src/config.ts for details of all the available configuration variables
#
LOG_LEVEL=info
PORT=3000
HLF_CERTIFICATE_ORG1="$(cat ${CERTIFICATE_FILE_ORG1} | sed -e 's/$/\\n/' | tr -d '\r\n')"
HLF_PRIVATE_KEY_ORG1="$(cat ${PRIVATE_KEY_FILE_ORG1} | sed -e 's/$/\\n/' | tr -d '\r\n')"
HLF_CERTIFICATE_ORG2="$(cat ${CERTIFICATE_FILE_ORG2} | sed -e 's/$/\\n/' | tr -d '\r\n')"
HLF_PRIVATE_KEY_ORG2="$(cat ${PRIVATE_KEY_FILE_ORG2} | sed -e 's/$/\\n/' | tr -d '\r\n')"
REDIS_PORT=6379
ORG1_APIKEY=$(uuidgen)
ORG2_APIKEY=$(uuidgen)
ENV_END
if [ "${AS_LOCAL_HOST}" = "true" ]; then
cat << LOCAL_HOST_END >> .env
AS_LOCAL_HOST=true
HLF_CONNECTION_PROFILE_ORG1=$(cat ${CONNECTION_PROFILE_FILE_ORG1} | jq -c .)
HLF_CONNECTION_PROFILE_ORG2=$(cat ${CONNECTION_PROFILE_FILE_ORG2} | jq -c .)
REDIS_HOST=localhost
LOCAL_HOST_END
elif [ "${AS_LOCAL_HOST}" = "false" ]; then
cat << WITH_HOSTNAME_END >> .env
AS_LOCAL_HOST=false
HLF_CONNECTION_PROFILE_ORG1=$(cat ${CONNECTION_PROFILE_FILE_ORG1} | jq -c '.peers["peer0.org1.example.com"].url = "grpcs://peer0.org1.example.com:7051" | .certificateAuthorities["ca.org1.example.com"].url = "https://ca.org1.example.com:7054"')
HLF_CONNECTION_PROFILE_ORG2=$(cat ${CONNECTION_PROFILE_FILE_ORG2} | jq -c '.peers["peer0.org2.example.com"].url = "grpcs://peer0.org2.example.com:9051" | .certificateAuthorities["ca.org2.example.com"].url = "https://ca.org2.example.com:8054"')
REDIS_HOST=redis
WITH_HOSTNAME_END
fi
|
#!/bin/bash
find .. -type d -name "__pycache__" -exec rm -rf {} +;
find .. -type f -name "*.pyc" -delete;
|
import * as React from "react";
export default class ExecutionModel extends React.Component {
constructor(props) {
super(props);
}
public render() {
return (
<div>
<h1>Execution Model</h1>
<hr/>
<h3>Services and Controllers</h3>
<p>Services and Controllers act as singletons. In other words, only one instance exists per service or controller in a given environment.</p>
<ol>
<li>All modules are loaded using <code>require()</code> at the start of runtime.</li>
<li>All modules have properties and methods "injected" via metatable.</li>
<li>Each <code>Init</code> method on the modules are invoked one-by-one synchronously.</li>
<li>Each <code>Start</code> method on the modules are invoked asynchronously.</li>
<li>The module remains in memory for the remainder of runtime.</li>
</ol>
<hr/>
<h3>Modules and Shared</h3>
<ol>
<li>A module (in Modules or Shared) is loaded using <code>require()</code> the first time it is referenced (i.e. lazy-loaded).</li>
<li>The module has properties and methods "injected" via metatable.</li>
<li>The module's <code>Init</code> method is invoked synchronously.</li>
<li>The module's <code>Start</code> method is invoked immediately and asynchronously after the <code>Init</code> method is completed.</li>
</ol>
<hr/>
<h3>Notes and Best Practices</h3>
<ul>
<li>The <code>Init</code> and <code>Start</code> methods are always optional, but it is good practice to always include them.</li>
<li>The <code>Init</code> method should be used to set up the individual module.</li>
<li>The <code>Init</code> method should try to do as minimal work as possible, as other modules are blocked until it is completed.</li>
<li>The <code>Init</code> method should <i>not</i> be used to invoke methods from other modules in the framework (that should be done in or after <code>Start</code>)</li>
<li>Events <i>must</i> be registered in the <code>Init</code> method.</li>
<li>Events should <i>never</i> be connected or fired within the <code>Init</code> method. Do this within the <code>Start</code> method.</li>
<li>Because Modules and Shared modules are lazy-loaded, their <code>Init</code> methods are executed after other modules have been started.</li>
</ul>
</div>
);
}
}
|
cat > program.exs
elixir program.exs
|
import { NativeModules } from 'react-native'
const { RNAdMobPlus } = NativeModules
export default RNAdMobPlus
|
<filename>src/main/java/com/xstudio/dao/sys/SysUserMapperExtend.java
package com.xstudio.dao.sys;
import com.xstudio.controllers.framework.IList;
import com.xstudio.models.sys.SysUser;
import org.apache.ibatis.annotations.Select;
import org.springframework.stereotype.Repository;
import java.util.HashMap;
import java.util.List;
@Repository
public interface SysUserMapperExtend extends IList{
List<HashMap<String,Object>> loadUserAuth(Integer id);
@Select("select a.* from sys_user as a " +
"LEFT JOIN sys_role_user as b " +
"on a.id=b.userid where b.roleid=#{id}")
List<SysUser> findUserByRoleId(Integer id);
}
|
#!/usr/bin/env sh
# First argument: Identifier
# Second argument: Operating system
identifier="$1"
os="$2"
mkdir -p ovpns
cat $os.conf \
<(echo -e "") \
<(echo -e "<ca>") \
pki/ca.crt \
<(echo -e "</ca>\n<cert>") \
pki/issued/"$identifier".crt \
<(echo -e "</cert>\n<key>") \
pki/private/"$identifier".key \
<(echo -e "</key>\n<tls-auth>") \
ta.key \
<(echo -e "</tls-auth>") \
> ovpns/"$identifier".ovpn
|
<gh_stars>0
package machine
import (
"bufio"
"bytes"
"io/ioutil"
"runtime"
"strconv"
"github.com/coreos/fleet/resource"
)
const (
memInfoPath = "/proc/meminfo"
)
func readLocalResources() (resource.ResourceTuple, error) {
var res resource.ResourceTuple
res.Cores = 100 * runtime.NumCPU()
// TODO(uwedeportivo): determine disk space
mem, err := readMeminfo()
if err != nil {
return res, err
}
res.Memory = mem
return res, nil
}
// parseMeminfo extracts the total amount of memory
// and returns it in MB.
func parseMeminfo(memstr []byte) (int, error) {
ss := bufio.NewScanner(bytes.NewBuffer(memstr))
ss.Split(bufio.ScanWords)
seenMemToken := false
mem := 0
for ss.Scan() {
token := ss.Text()
if seenMemToken {
m, err := strconv.Atoi(token)
if err != nil {
return 0, err
}
mem = m >> 10
break
} else if token == "MemTotal:" {
seenMemToken = true
}
}
if err := ss.Err(); err != nil {
return 0, err
}
return mem, nil
}
// readMeminfo reads /proc/meminfo and returns
// the total amount of memory in MB available on the system.
func readMeminfo() (int, error) {
memstr, err := ioutil.ReadFile(memInfoPath)
if err != nil {
return 0, err
}
return parseMeminfo(memstr)
}
|
source $stdenv/setup
source $mirrorsFile
curlVersion=$(curl -V | head -1 | cut -d' ' -f2)
# Curl flags to handle redirects, not use EPSV, handle cookies for
# servers to need them during redirects, and work on SSL without a
# certificate (this isn't a security problem because we check the
# cryptographic hash of the output anyway).
curl=(
curl
--location
--max-redirs 20
--retry 3
--disable-epsv
--cookie-jar cookies
--insecure
--user-agent "curl/$curlVersion Nixpkgs/$nixpkgsVersion"
$curlOpts
$NIX_CURL_FLAGS
)
downloadedFile="$out"
if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi
tryDownload() {
local url="$1"
echo
header "trying $url"
local curlexit=18;
success=
# if we get error code 18, resume partial download
while [ $curlexit -eq 18 ]; do
# keep this inside an if statement, since on failure it doesn't abort the script
if "${curl[@]}" -C - --fail "$url" --output "$downloadedFile"; then
success=1
break
else
curlexit=$?;
fi
done
}
finish() {
local skipPostFetch="$1"
set +o noglob
if [[ $executable == "1" ]]; then
chmod +x $downloadedFile
fi
if [ -z "$skipPostFetch" ]; then
runHook postFetch
fi
exit 0
}
tryHashedMirrors() {
if test -n "$NIX_HASHED_MIRRORS"; then
hashedMirrors="$NIX_HASHED_MIRRORS"
fi
for mirror in $hashedMirrors; do
url="$mirror/$outputHashAlgo/$outputHash"
if "${curl[@]}" --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \
--fail --silent --show-error --head "$url" \
--write-out "%{http_code}" --output /dev/null > code 2> log; then
tryDownload "$url"
# We skip postFetch here, because hashed-mirrors are
# already content addressed. So if $outputHash is in the
# hashed-mirror, changes from ‘postFetch’ would already be
# made. So, running postFetch will end up applying the
# change /again/, which we don’t want.
if test -n "$success"; then finish skipPostFetch; fi
else
# Be quiet about 404 errors, which we interpret as the file
# not being present on this particular mirror.
if test "$(cat code)" != 404; then
echo "error checking the existence of $url:"
cat log
fi
fi
done
}
# URL list may contain ?. No glob expansion for that, please
set -o noglob
urls2=
for url in $urls; do
if test "${url:0:9}" != "mirror://"; then
urls2="$urls2 $url"
else
url2="${url:9}"; echo "${url2/\// }" > split; read site fileName < split
#varName="mirror_$site"
varName="$site" # !!! danger of name clash, fix this
if test -z "${!varName}"; then
echo "warning: unknown mirror:// site \`$site'"
else
mirrors=${!varName}
# Allow command-line override by setting NIX_MIRRORS_$site.
varName="NIX_MIRRORS_$site"
if test -n "${!varName}"; then mirrors="${!varName}"; fi
for url3 in $mirrors; do
urls2="$urls2 $url3$fileName";
done
fi
fi
done
urls="$urls2"
# Restore globbing settings
set +o noglob
if test -n "$showURLs"; then
echo "$urls" > $out
exit 0
fi
if test -n "$preferHashedMirrors"; then
tryHashedMirrors
fi
# URL list may contain ?. No glob expansion for that, please
set -o noglob
success=
for url in $urls; do
if [ -z "$postFetch" ]; then
case "$url" in
https://github.com/*/archive/*)
echo "warning: archives from GitHub revisions should use fetchFromGitHub"
;;
https://gitlab.com/*/-/archive/*)
echo "warning: archives from GitLab revisions should use fetchFromGitLab"
;;
esac
fi
tryDownload "$url"
if test -n "$success"; then finish; fi
done
# Restore globbing settings
set +o noglob
if test -z "$preferHashedMirrors"; then
tryHashedMirrors
fi
echo "error: cannot download $name from any mirror"
exit 1
|
<filename>python_modules/dagster/dagster/core/snap/mode.py
# Contains mode, resources, loggers
from collections import namedtuple
from dagster import check
from dagster.config.snap import ConfigFieldSnap, snap_from_field
from dagster.core.definitions import LoggerDefinition, ModeDefinition, ResourceDefinition
from dagster.serdes import whitelist_for_serdes
def build_mode_def_snap(mode_def, root_config_key):
check.inst_param(mode_def, "mode_def", ModeDefinition)
check.str_param(root_config_key, "root_config_key")
return ModeDefSnap(
name=mode_def.name,
description=mode_def.description,
resource_def_snaps=sorted(
[build_resource_def_snap(name, rd) for name, rd in mode_def.resource_defs.items()],
key=lambda item: item.name,
),
logger_def_snaps=sorted(
[build_logger_def_snap(name, ld) for name, ld in mode_def.loggers.items()],
key=lambda item: item.name,
),
root_config_key=root_config_key,
)
@whitelist_for_serdes
class ModeDefSnap(
namedtuple(
"_ModeDefSnap", "name description resource_def_snaps logger_def_snaps root_config_key"
)
):
def __new__(
cls,
name,
description,
resource_def_snaps,
logger_def_snaps,
# root_config_key was added after pipeline snapshots started getting persisted
root_config_key=None,
):
return super(ModeDefSnap, cls).__new__(
cls,
name=check.str_param(name, "name"),
description=check.opt_str_param(description, "description"),
resource_def_snaps=check.list_param(
resource_def_snaps, "resource_def_snaps", of_type=ResourceDefSnap
),
logger_def_snaps=check.list_param(
logger_def_snaps, "logger_def_snaps", of_type=LoggerDefSnap
),
root_config_key=check.opt_str_param(root_config_key, "root_config_key"),
)
def build_resource_def_snap(name, resource_def):
check.str_param(name, "name")
check.inst_param(resource_def, "resource_def", ResourceDefinition)
return ResourceDefSnap(
name=name,
description=resource_def.description,
config_field_snap=snap_from_field("config", resource_def.config_field)
if resource_def.has_config_field
else None,
)
@whitelist_for_serdes
class ResourceDefSnap(namedtuple("_ResourceDefSnap", "name description config_field_snap")):
def __new__(cls, name, description, config_field_snap):
return super(ResourceDefSnap, cls).__new__(
cls,
name=check.str_param(name, "name"),
description=check.opt_str_param(description, "description"),
config_field_snap=check.opt_inst_param(
config_field_snap, "config_field_snap", ConfigFieldSnap
),
)
def build_logger_def_snap(name, logger_def):
check.str_param(name, "name")
check.inst_param(logger_def, "logger_def", LoggerDefinition)
return LoggerDefSnap(
name=name,
description=logger_def.description,
config_field_snap=snap_from_field("config", logger_def.config_field)
if logger_def.has_config_field
else None,
)
@whitelist_for_serdes
class LoggerDefSnap(namedtuple("_LoggerDefSnap", "name description config_field_snap")):
def __new__(cls, name, description, config_field_snap):
return super(LoggerDefSnap, cls).__new__(
cls,
name=check.str_param(name, "name"),
description=check.opt_str_param(description, "description"),
config_field_snap=check.opt_inst_param(
config_field_snap, "config_field_snap", ConfigFieldSnap
),
)
|
<filename>cogboard-webapp/src/components/widgets/types/SonarQubeWidget.js
import React from 'react';
import { number, string, object } from 'prop-types';
import { Caption, WidgetButton } from '../../styled';
const SonarQubeWidget = props => {
const { metrics, url, version, date } = props;
const ts = date ? new Date(Date.parse(date)).toLocaleString() : '';
return (
<>
<Caption>{ts}</Caption>
{version === '-' ? null : <Caption>Version: {version}</Caption>}
{Object.entries(metrics).map(([metric, val]) => (
<Caption key={metric}>
{metric.replace('_', ' ')}: {val}
</Caption>
))}
<WidgetButton href={url}>OPEN DASHBOARD</WidgetButton>
</>
);
};
SonarQubeWidget.propTypes = {
metrics: object,
id: number.isRequired,
url: string.isRequired,
version: string,
date: string
};
SonarQubeWidget.defaultProps = {
metrics: {},
version: '-',
date: ''
};
export default SonarQubeWidget;
|
//
// FMEncryptHelper.h
// FmdbDemo
//
// Created by ZhengXiankai on 15/8/26.
// Copyright (c) 2015年 ZhengXiankai. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface FMEncryptHelper : NSObject
/** encrypt sqlite database (same file) */
+ (BOOL)encryptDatabase:(NSString *)path encryptKey:(NSString *)encryptKey;
/** decrypt sqlite database (same file) */
+ (BOOL)unEncryptDatabase:(NSString *)path encryptKey:(NSString *)encryptKey;
/** encrypt sqlite database to new file */
+ (BOOL)encryptDatabase:(NSString *)sourcePath targetPath:(NSString *)targetPath encryptKey:(NSString *)encryptKey;
/** decrypt sqlite database to new file */
+ (BOOL)unEncryptDatabase:(NSString *)sourcePath targetPath:(NSString *)targetPath encryptKey:(NSString *)encryptKey;
/** change secretKey for sqlite database */
+ (BOOL)changeKey:(NSString *)dbPath originKey:(NSString *)originKey newKey:(NSString *)newKey;
@end
|
<gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Command line interface for the DCI agents
"""
import click
import dciagent
import dciagent.agents
@click.group(invoke_without_command=True)
@click.option(
"-C",
"--config-dir",
metavar="PATH",
envvar="DCI_CONFIG_DIR",
help="Path to the configuration directory, default is different per agent.",
type=click.Path(exists=True, file_okay=False, dir_okay=True),
)
@click.option(
"-P",
"--prefix",
metavar="STRING",
envvar="DCI_PREFIX",
help="String to prepend to all config files to be loaded.",
)
@click.option(
"-S",
"--settings",
metavar="FILE",
envvar="DCI_SETTINGS",
help="Path to settings file, overrides config-dir/prefix auto-discovery.",
type=click.Path(exists=True, file_okay=True, dir_okay=False),
)
@click.option(
"-I",
"--inventory",
metavar="FILE",
envvar="DCI_INVENTORY",
help="Ansible inventory file, overrides config-dir/prefix auto-discovery.",
type=click.Path(exists=True, file_okay=True, dir_okay=False),
)
@click.option("-D", "--debug", is_flag=True, help="Enable debugging output.")
@click.option("-V", "--version", is_flag=True, help="Print program version.")
@click.pass_context
def cli(ctx, config_dir, prefix, settings, inventory, debug, version):
"""Root agent control script"""
if version:
click.echo(dciagent.__version__)
return
ctx.ensure_object(dict)
ctx.obj["config_dir"] = config_dir
ctx.obj["prefix"] = prefix
ctx.obj["settings"] = settings
ctx.obj["inventory"] = inventory
ctx.obj["debug"] = debug
@cli.command()
@click.option(
"-i",
"--image",
envvar="DCI_CONTAINER_IMAGE",
default="quay.io/thekad/alpine-ansible:3",
help="Container image used to run the agent.",
)
@click.argument(
"playbook",
type=click.Path(exists=True, file_okay=True, dir_okay=False),
)
@click.option(
"-xa",
"--extra-ansible-opts",
envvar="DCI_EXTRA_ANSIBLE_OPTS",
help="Extra options passed to ansible.",
)
@click.option(
"-xp",
"--extra-podman-opts",
envvar="DCI_EXTRA_PODMAN_OPTS",
help="Extra options passed to podman.",
)
@click.pass_context
def rhel(ctx, image, playbook, extra_ansible_opts, extra_podman_opts):
"Red Hat Enterprise Linux Containerized Agent"
a = dciagent.agents.RHELAgent(
ctx.obj, playbook, image, extra_ansible_opts, extra_podman_opts
)
# a.validate()
a.run()
@cli.command()
@click.option(
"-xa",
"--extra-ansible-opts",
envvar="DCI_EXTRA_ANSIBLE_OPTS",
help="Extra options passed to ansible.",
)
@click.argument(
"playbook",
type=click.Path(exists=True, file_okay=True, dir_okay=False),
)
@click.pass_context
def ocp(ctx, playbook, extra_ansible_opts):
"OpenShift Ansible Agent"
ctx.obj["extra_ansible_opts"] = extra_ansible_opts
a = dciagent.agents.OCPAgent(ctx.obj, playbook)
# a.validate()
a.run()
if __name__ == "__main__":
cli(obj={})
|
<filename>website/static/src/components/App.js
import React from 'react';
import {
BrowserRouter as Router,
Switch,
Route
} from 'react-router-dom';
import Project from './Project';
import CreateProject from './CreateProject';
function App(props) {
return (
<div className="app">
<Router>
<Switch>
<Route exact path="/">
Homepage
</Route>
<Route path="/project/:projectCode" component={Project} />
<Route path="/create" component={CreateProject} />
</Switch>
</Router>
</div>
)
}
export default App
|
#!/bin/bash
echo "DEPLOYING WEB"
DOCKER_SCRIPTS="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
"${DOCKER_SCRIPTS}/install-system-libs.sh"
"${DOCKER_SCRIPTS}/install-python-libs.sh"
PROJECT_DIR="$(cd "${DOCKER_SCRIPTS}/.." >/dev/null 2>&1 && pwd)"
cd "${PROJECT_DIR}" || exit 1
pipenv --three
make venv
echo "DONE: DEPLOYING WEB"
|
<filename>src/app/application-list/student-create-application/student-create-application.component.ts<gh_stars>0
import {Component, Inject, OnDestroy, OnInit} from '@angular/core'
import {MAT_DIALOG_DATA, MatDialog, MatDialogRef, MatOptionSelectionChange} from '@angular/material'
import {LabworkAtom} from '../../models/labwork.model'
import {FormControl, FormGroup} from '@angular/forms'
import {FormInput} from '../../shared-dialogs/forms/form.input'
import {FormInputOption} from '../../shared-dialogs/forms/form.input.option'
import {User} from '../../models/user.model'
import {invalidChoiceKey} from '../../utils/form.validator'
import {formatUser} from '../../utils/component.utils'
import {BuddyResult, UserService} from '../../services/user.service'
import {LabworkApplicationAtom, LabworkApplicationProtocol} from '../../models/labwork.application.model'
import {isOption} from '../../utils/form-control-utils'
import {isUser} from '../../utils/type.check.utils'
import {Subscription} from 'rxjs'
import {subscribe} from '../../utils/functions'
@Component({
selector: 'lwm-student-create-application',
templateUrl: './student-create-application.component.html',
styleUrls: ['./student-create-application.component.scss']
})
export class StudentCreateApplicationComponent implements OnInit, OnDestroy {
formGroup: FormGroup
optionControls: { input: FormInput, hint: string | undefined }[]
private subs: Subscription[] = []
static instance(
dialog: MatDialog,
labwork: LabworkAtom,
applicantId: string,
app: LabworkApplicationAtom | undefined
): MatDialogRef<StudentCreateApplicationComponent, LabworkApplicationProtocol> {
return dialog.open<StudentCreateApplicationComponent, any, LabworkApplicationProtocol>(StudentCreateApplicationComponent, {
minWidth: '600px',
data: [labwork, app, applicantId],
panelClass: 'studentApplicationDialog'
})
}
constructor(
private dialogRef: MatDialogRef<StudentCreateApplicationComponent, LabworkApplicationProtocol>,
private readonly userService: UserService,
@Inject(MAT_DIALOG_DATA) public payload: [LabworkAtom, LabworkApplicationAtom, string]
) {
this.formGroup = new FormGroup({})
this.optionControls = []
}
ngOnInit(): void {
this.optionControls = this.inputData().map(i => ({input: i, hint: undefined}))
this.optionControls.forEach(d => {
const fc = new FormControl(d.input.data.value, d.input.data.validator)
this.formGroup.addControl(d.input.formControlName, fc)
if (isOption(d.input.data)) {
d.input.data.onInit(this.formGroup)
}
})
}
ngOnDestroy(): void {
this.subs.forEach(_ => _.unsubscribe())
}
optionControl = (input: FormInput): FormInputOption<User> | undefined =>
isOption(input.data) ? input.data : undefined
headerTitle = () => {
const labworkLabel = this.labwork().label
return this.isEditDialog() ?
`Bearbeitung der Anmeldung für ${labworkLabel}` :
`Anmeldung für ${labworkLabel}`
}
buttonTitle = () =>
this.isEditDialog() ? 'Aktualisieren' : 'Anmelden'
isEditDialog = () =>
this.existingApplication() !== undefined
onSubmit = () => {
if (!this.formGroup.valid) {
return
}
const extractFriends = () => {
const users: string[] = []
this.optionControls.forEach(c => {
const user = this.formGroup.controls[c.input.formControlName].value
if (isUser(user)) {
users.push(user.id)
}
})
return users
}
const p: LabworkApplicationProtocol = {
applicant: this.applicantId(),
labwork: this.labwork().id,
friends: extractFriends()
}
this.dialogRef.close(p)
}
onCancel = () =>
this.dialogRef.close()
private labwork = (): LabworkAtom =>
this.payload[0]
private existingApplication = (): LabworkApplicationAtom | undefined =>
this.payload[1]
private applicantId = (): string =>
this.payload[2]
// TODO reuse instead of copy
inputData = (): FormInput[] => {
const fellowStudents$ = this.userService.getAllWithFilter(
{attribute: 'status', value: 'student'},
{attribute: 'degree', value: this.labwork().degree.id}
)
const friendFormInputAt = (i: 0 | 1) => {
const controlName = i === 0 ? 'friends1' : 'friends2'
const app = this.existingApplication()
const displayUser = (u: User) => u.systemId
if (app && app.friends.length >= i + 1) {
return new FormInputOption<User>(
controlName,
invalidChoiceKey,
false,
displayUser,
fellowStudents$,
0,
opts => opts.find(_ => _.id === app.friends[i].id)
)
} else {
return new FormInputOption<User>(
controlName,
invalidChoiceKey,
false,
displayUser,
fellowStudents$,
0
)
}
}
return [
{
formControlName: 'friends1',
displayTitle: 'Partnerwunsch 1 (Optional)',
isDisabled: false,
data: friendFormInputAt(0)
},
{
formControlName: 'friends2',
displayTitle: 'Partnerwunsch 2 (Optional)',
isDisabled: false,
data: friendFormInputAt(1)
}
]
}
onSelectionChange = (c: MatOptionSelectionChange, input: { input: FormInput, hint: string | undefined }) => {
const user = c.source.value
if (!(c.isUserInput && isUser(user))) {
return
}
const updateHint = (res: BuddyResult) => {
input.hint = res.message
}
this.subs.push(subscribe(
this.userService.buddy(this.labwork().id, this.applicantId(), user.systemId),
updateHint
))
}
}
|
#!/bin/bash
# Copyright 2018 The Vitess Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# this scripts brings up zookeeper and all the vitess components
# required for a single shard deployment.
set -e
script_root=`dirname "${BASH_SOURCE}"`
./lvtctl.sh ApplySchema -sql-file drop_commerce_tables.sql commerce
./lvtctl.sh SetShardTabletControl -blacklisted_tables=customer,corder -remove commerce/0 rdonly
./lvtctl.sh SetShardTabletControl -blacklisted_tables=customer,corder -remove commerce/0 replica
./lvtctl.sh SetShardTabletControl -blacklisted_tables=customer,corder -remove commerce/0 master
disown -a
|
import numpy as np
def extractSpadData(data, mode):
"""
Extract data from specific detector elements from SPAD-FCS data
========== ===============================================================
Input Meaning
---------- ---------------------------------------------------------------
data 26 element vector with FCS data as a function of time
mode Either
- a number i between 0-25: extract data detector element i
- a number i between -1 and -26: sum all elements except for i
- "sum": sum over all detector element and all time points
- "sum3": sum central 9 detector elements
- "sum3_5D": sum central 9 detector elements in [z, y, x, t, c] dataset
- "sum5": sum all detector elements (= last column)
- "outer": sum 16 detector elements at the edge of the array
- "chess0": sum all even numbered detector elements
- "chess1": sum all odd numbered detector elements
- "upperleft": sum the 12 upper left detector elements
- "lowerright": sum the 12 lower right detector elements
========== ===============================================================
========== ===============================================================
Output Meaning
---------- ---------------------------------------------------------------
data Nx1 element vector with N the number of time points
The vector contains the extracted and/or summed data
========== ===============================================================
"""
# Check data type
if isinstance(mode, str):
return switchModeString(data, mode)
else:
# number given
if mode >= 0:
return data[:, mode]
else:
return np.subtract(data[:, 25], data[:,np.mod(-mode, 25)])
def sumAll(data):
# return total photon counts over all bins over all detector elements
return np.sum(data[:, 25])
def central(data):
# return central detector element data
return data[:, 12]
def sum3(data):
# sum detector elements 6, 7, 8, 11, 12, 13, 16, 17, 18
datasum = np.sum(data[:, [6, 7, 8, 11, 12, 13, 16, 17, 18]], 1)
return datasum
def sum3_5D(data):
# sum over all time bins
datasum = np.sum(data, 3)
# sum detector elements 6, 7, 8, 11, 12, 13, 16, 17, 18
datasum = np.sum(datasum[:, :, :, [6, 7, 8, 11, 12, 13, 16, 17, 18]], 3)
return datasum
def allbuthot(data):
# sum detector elements all but 1
print("Not pixel 1")
datasum = np.sum(data[:, [x for x in range(25) if x != 1]], 1)
return datasum
def sum5(data):
# sum all detector elements
# datasum = data[:, 25]
datasum = np.sum(data, 1)
return datasum
def allbuthot_5D(data):
# sum over all time bins
datasum = np.sum(data, 3)
# sum over all detector elements except hot
datasum = np.sum(datasum[:, :, :, [0, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24]], 3)
return datasum
def outer(data):
# smn 16 detector elements at the edge of the detector
datasum = np.sum(data[:, [0, 1, 2, 3, 4, 5, 9, 10, 14, 15, 19, 20, 21, 22, 23, 24]], 1)
return datasum
def chess0(data):
# sum over all even numbered detector elements
datasum = np.sum(data[:, 0:25:2], 1)
return datasum
def chess1(data):
# sum over all odd numbered detector elements
datasum = np.sum(data[:, 1:25:2], 1)
return datasum
def chess3a(data):
# sum over all even numbered detector elements in the central 3x3 matrix
datasum = np.sum(data[:, [6, 8, 12, 16, 18]], 1)
return datasum
def chess3b(data):
# sum over all odd numbered detector elements in the central 3x3 matrix
datasum = np.sum(data[:, [7, 11, 13, 17]], 1)
return datasum
def upperLeft(data):
# sum over detector elements 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 15
datasum = np.sum(data[:, [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 15]], 1)
return datasum
def lowerRight(data):
# sum over detector elements 9, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24
datasum = np.sum(data[:, [9, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24]], 1)
return datasum
def switchModeString(data, mode):
switcher = {
"central": central,
"center": central,
"sum3": sum3,
"sum3_5D": sum3_5D,
"allbuthot": allbuthot,
"allbuthot_5D": allbuthot_5D,
"sum5": sum5,
"sum": sumAll,
"outer": outer,
"chess0": chess0,
"chess1": chess1,
"chess3a": chess3a,
"chess3b": chess3b,
"upperleft": upperLeft,
"lowerright": lowerRight,
}
# Get the function from switcher dictionary
func = switcher.get(mode, "Invalid mode")
# Execute the function
return func(data)
|
<reponame>OfficialDarkCometklr5x/mfvanek
/*
* Copyright (c) 2019-2021. <NAME> and others.
* https://github.com/mfvanek/pg-index-health
*
* This file is a part of "pg-index-health" - a Java library for
* analyzing and maintaining indexes health in PostgreSQL databases.
*
* Licensed under the Apache License 2.0
*/
package io.github.mfvanek.pg.model.table;
import io.github.mfvanek.pg.utils.Validators;
import java.util.Objects;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.Immutable;
/**
* A base representation of database table.
*
* @author <NAME>
*/
@Immutable
public class Table implements TableNameAware, TableSizeAware, Comparable<Table> {
private final String tableName;
private final long tableSizeInBytes;
@SuppressWarnings("WeakerAccess")
protected Table(@Nonnull final String tableName, final long tableSizeInBytes) {
this.tableName = Validators.tableNameNotBlank(tableName);
this.tableSizeInBytes = Validators.sizeNotNegative(tableSizeInBytes, "tableSizeInBytes");
}
/**
* {@inheritDoc}
*/
@Override
@Nonnull
public String getTableName() {
return tableName;
}
/**
* {@inheritDoc}
*/
@Override
public long getTableSizeInBytes() {
return tableSizeInBytes;
}
@SuppressWarnings("WeakerAccess")
protected String innerToString() {
return "tableName='" + tableName + '\'' +
", tableSizeInBytes=" + tableSizeInBytes;
}
@Override
public String toString() {
return Table.class.getSimpleName() + '{' + innerToString() + '}';
}
@Override
public final boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Table)) {
return false;
}
final Table that = (Table) o;
return Objects.equals(tableName, that.tableName);
}
@Override
public final int hashCode() {
return Objects.hash(tableName);
}
@Override
public int compareTo(@Nonnull Table other) {
Objects.requireNonNull(other, "other");
return tableName.compareTo(other.tableName);
}
/**
* Constructs a {@code Table} object.
*
* @param tableName table name; should be non blank.
* @param tableSizeInBytes table size in bytes; should be positive or zero.
* @return {@code Table}
*/
@Nonnull
public static Table of(@Nonnull final String tableName, final long tableSizeInBytes) {
return new Table(tableName, tableSizeInBytes);
}
}
|
<reponame>BelgianSalamander/MoreBundles<gh_stars>0
package me.salamander.morebundles.common.gen;
import me.salamander.morebundles.common.Common;
import net.minecraft.client.Minecraft;
import net.minecraft.resources.ResourceLocation;
import org.jetbrains.annotations.Nullable;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.List;
import java.util.stream.Collectors;
public class TextureGen {
private static boolean isClient = true;
private static final int[] BUNDLE_COLORS = new int[]{0xFF421E01, 0xFF4F2B10, 0xFF623220, 0xFF7D4034, 0xFFA6572C, 0xFFCD7B46};
private static final BufferedImage BUNDLE = getMinecraftTexture("bundle");
private static final BufferedImage BUNDLE_FILLED = getMinecraftTexture("bundle_filled");
private static final BufferedImage LARGE_BUNDLE = getImage(TextureGen.class.getResourceAsStream("/assets/large_base.png"));
private static final BufferedImage LARGE_BUNDLE_FILLED = getImage(TextureGen.class.getResourceAsStream("/assets/large_base_filled.png"));
public static void main(String[] args) {
System.out.println("Starting in 20 seconds");
try {
Thread.sleep(20000);
}catch(InterruptedException e){
throw new RuntimeException(e);
}
System.out.println("Starting");
for(int i = 0; i < 20; i++) {
saveImage(createBundle("iron_ingot", false, false), Path.of("run", "test.png"));
}
}
public static BufferedImage createBundle(ResourceLocation materialIdentifier, boolean large, boolean filled) {
//long start = System.currentTimeMillis();
BufferedImage bundle = createBundle(getImage(Minecraft.class.getResourceAsStream("/assets/" + materialIdentifier.getNamespace() + "/textures/item/" + materialIdentifier.getPath() + ".png")), large, filled);
//System.out.println("Created bundle of " + materialIdentifier + " in " + (System.currentTimeMillis() - start) + "ms");
return bundle;
}
public static BufferedImage createBundle(BufferedImage material, boolean large, boolean filled){
Set<Integer> colors = new HashSet<>();
for(int x = 0; x < material.getWidth(); x++){
for (int y = 0; y < material.getHeight(); y++) {
int color = material.getRGB(x, y);
if(ColorModel.getRGBdefault().getAlpha(color) == 255){ //Alpha = 255
colors.add(color);
}
}
}
//System.out.println("Num Colors: " + colors.size());
List<HSVColor> hsvColors = colors.stream().map(HSVColor::fromRGB).collect(Collectors.toList());
hsvColors.sort(Comparator.comparingDouble(color -> color.v));
List<List<HSVColor>> colorGroups = new ArrayList<>();
colorGroups.add(new ArrayList<>());
colorGroups.get(0).add(hsvColors.get(0));
for (int i = 1; i < hsvColors.size(); i++) {
HSVColor color = hsvColors.get(i);
boolean foundGroup = false;
for(List<HSVColor> group: colorGroups){
if(hueDiff(color, group.get(group.size() - 1)) < 0.5f){
group.add(color);
foundGroup = true;
break;
}
}
if(!foundGroup){
List<HSVColor> newGroup = new ArrayList<>();
newGroup.add(color);
colorGroups.add(newGroup);
}
}
List<HSVColor> colorGroup;
if(Collections.max(colorGroups, Comparator.comparingInt(List::size)).size() < 6){
//throw new IllegalStateException("Couldn't find enough colors");
colorGroup = hsvColors;
}else{
colorGroup = colorGroups.stream().filter(l -> l.size() >= 6).findFirst().get();
}
//System.out.println("Num Groups: " + colorGroups.size());
spread(colorGroup, BUNDLE_COLORS.length);
int[] palette = colorGroup.stream().mapToInt(HSVColor::toRGB).toArray();
return createBundleFromPalette(palette, large, filled);
}
public static BufferedImage createBundle(String materialName, boolean large, boolean filled){
return createBundle(getMinecraftTexture(materialName), large, filled);
}
public static void spread(List<HSVColor> colors, int target){
int remove = colors.size() - target;
if(remove <= 0) return;
if(colors.get(colors.size() - 1).v >= 0.95f){
colors.remove(colors.size() - 1);
remove--;
}
if(remove <= 0) return;
float step = colors.size() / (float) remove;
for(int i = remove - 1; i >= 0; i--){
colors.remove((int) (step / 2 + step * i));
}
}
public static BufferedImage createBundleFromPalette(int[] palette, boolean large, boolean filled){
BufferedImage bundleImage = large ? (filled ? LARGE_BUNDLE_FILLED : LARGE_BUNDLE) : (filled ? BUNDLE_FILLED : BUNDLE);
BufferedImage outBundle = new BufferedImage(bundleImage.getWidth(), bundleImage.getHeight(), bundleImage.getType());
bundleImage.copyData(outBundle.getRaster());
for (int x = 0; x < outBundle.getWidth(); x++) {
for (int y = 0; y < outBundle.getHeight(); y++) {
int color = outBundle.getRGB(x, y);
for (int i = 0; i < 6; i++) {
if(color == BUNDLE_COLORS[i]){
outBundle.setRGB(x, y, palette[i]);
break;
}
}
}
}
return outBundle;
}
public static float hueDiff(HSVColor colorOne, HSVColor colorTwo){
float diff1 = Math.abs(colorOne.h - colorTwo.h);
float newH1 = colorOne.h, newH2 = colorTwo.h;
if(colorOne.h > colorTwo.h){
newH1 -= Math.PI * 2;
}else{
newH2 -= Math.PI * 2;
}
float diff2 = Math.abs(newH1 - newH2);
return Math.min(diff1, diff2);
}
public static @Nullable BufferedImage getImage(InputStream is){
try {
return ImageIO.read(is);
}catch (IOException e){
e.printStackTrace();
return null;
}
}
public static void saveImage(BufferedImage img, Path path){
try {
if(!Files.exists(path)){
path.getParent().toFile().mkdirs();
Files.createFile(path);
}
ImageIO.write(img, "PNG", path.toFile());
}catch (IOException e){
e.printStackTrace();
}
}
private static BufferedImage getMinecraftTexture(String name){
return getImage(Minecraft.class.getResourceAsStream("/assets/minecraft/textures/item/" + name + ".png"));
}
private static record HSVColor(float h, float s, float v){
private static final float SIXTY_DEGREES = (float) (Math.PI * 2 / 6);
public int toRGB(){
float c = v * s;
float x = c * (1 - Math.abs((h / SIXTY_DEGREES) % 2 - 1));
float m = v - c;
float red, green, blue;
if(h < SIXTY_DEGREES){
red = c;
green = x;
blue = 0;
}else if(h < SIXTY_DEGREES * 2){
red = x;
green = c;
blue = 0;
}else if(h < SIXTY_DEGREES * 3){
red = 0;
green = c;
blue = x;
}else if(h < SIXTY_DEGREES * 4){
red = 0;
green = x;
blue = c;
}else if(h < SIXTY_DEGREES * 5){
red = x;
green = 0;
blue = c;
}else{
red = c;
green = 0;
blue = x;
}
int r = (int) ((red + m) * 255);
int g = (int) ((green + m) * 255);
int b = (int) ((blue + m) * 255);
return 0xff000000 + (r << 16) + (g << 8) + b;
}
public static HSVColor fromRGB(int rgb){
int r = ColorModel.getRGBdefault().getRed(rgb);
int g = ColorModel.getRGBdefault().getGreen(rgb);
int b = ColorModel.getRGBdefault().getBlue(rgb);
float red = r / 255.f;
float green = g / 255.f;
float blue = b / 255.f;
float cmax = Math.max(red, Math.max(blue, green));
float cmin = Math.min(red, Math.min(blue, green));
float delta = cmax - cmin;
float hue, saturation, value;
if(delta == 0){
hue = 0;
}else if(cmax == red){
hue = ((green - blue) / delta) % 6;
}else if(cmax == green){
hue = ((blue - red) / delta) + 2;
}else{
hue = ((red - green) / delta) + 4;
}
hue *= SIXTY_DEGREES; //Multiply by 60 degrees
if(cmax == 0) saturation = 0;
else saturation = delta / cmax;
value = cmax;
return new HSVColor(hue, saturation, value);
}
}
static {
if(!Common.IS_CLIENT){
throw new RuntimeException("This class is only meant to be used on the client!");
}
}
}
|
public static int linearSearch(int[] arr, int x)
{
int n = arr.length;
for(int i = 0; i < n; i++)
{
if(arr[i] == x)
return i;
}
return -1;
}
|
<reponame>andrewvo89/tempnote
import admin = require('firebase-admin');
const firestore = admin.firestore();
/**
* A class representing Firestore.
*
* @class Firestore
*/
class Firestore {
/**
* Generate a document ID for any collection.
* @static
* @param {string} collectionName
* @memberof Firestore
*/
static generateDocumentId = (collectionName: string): string => {
const doc = firestore.collection(collectionName).doc();
return doc.id;
};
/**
* Create a document in a collection.
* @static
* @param {string} collectionName
* @param {Record<string, unknown>} data
* @param {string} [docId]
* @return {*} {Promise<void>}
* @memberof Firestore
*/
static async create(
collectionName: string,
data: Record<string, unknown>,
docId?: string,
): Promise<void> {
const collectionRef = firestore.collection(collectionName);
// If document id is supplied, run create() function
if (docId) {
const docRef = collectionRef.doc(docId);
await docRef.create(data);
return;
}
// Otherwise run add() function which generates a docId for you
await collectionRef.add(data);
}
/**
* Update a document in a collection.
* @static
* @param {string} collectionName
* @param {string} docId
* @param {Record<string, unknown>} data
* @return {*} {Promise<void>}
* @memberof Firestore
*/
static async update(
collectionName: string,
docId: string,
data: Record<string, unknown>,
): Promise<void> {
const collectionRef = firestore.collection(collectionName);
const docRef = collectionRef.doc(docId);
await docRef.update(data);
}
/**
* Set a document in a collection.
* @static
* @param {string} collectionName
* @param {string} docId
* @param {Record<string, unknown>} data
* @param {boolean} merge
* @return {*} {Promise<void>}
* @memberof Firestore
*/
static async set(
collectionName: string,
docId: string,
data: Record<string, unknown>,
merge: boolean,
): Promise<void> {
const collectionRef = firestore.collection(collectionName);
const docRef = collectionRef.doc(docId);
await docRef.set(data, { merge: merge });
}
/**
* Get a collection reference.
* @static
* @param {string} collectionName
* @return {*} {FirebaseFirestore.CollectionReference<FirebaseFirestore.DocumentData>}
* @memberof Firestore
*/
static get(
collectionName: string,
): FirebaseFirestore.CollectionReference<FirebaseFirestore.DocumentData> {
return firestore.collection(collectionName);
}
/**
* Get a document reference.
* @static
* @param {string} colletionName
* @param {string} docId
* @return {*} {FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData>}
* @memberof Firestore
*/
static getDocRef(
colletionName: string,
docId: string,
): FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData> {
return this.get(colletionName).doc(docId);
}
/**
* Delete a document from a collection.
* @static
* @param {string} collectionName
* @param {string} documentId
* @return {*} {Promise<void>}
* @memberof Firestore
*/
static async delete(
collectionName: string,
documentId: string,
): Promise<void> {
await firestore.collection(collectionName).doc(documentId).delete();
}
/**
* Increment a field value in a document.
* @static
* @param {string} collectionName
* @param {string} documentId
* @param {string} fieldName
* @param {number} amount
* @return {*} {Promise<void>}
* @memberof Firestore
*/
static async increment(
collectionName: string,
documentId: string,
fieldName: string,
amount: number,
): Promise<void> {
await firestore
.collection(collectionName)
.doc(documentId)
.update({
[fieldName]: admin.firestore.FieldValue.increment(amount),
});
}
/**
* Get the increment field value.
* @static
* @param {number} amount
* @return {*} {FirebaseFirestore.FieldValue}
* @memberof Firestore
*/
static incrementFieldValue(amount: number): FirebaseFirestore.FieldValue {
return admin.firestore.FieldValue.increment(amount);
}
/**
* Get a firestore batch value.
* @static
* @return {*} {FirebaseFirestore.WriteBatch}
* @memberof Firestore
*/
static getBatch(): FirebaseFirestore.WriteBatch {
return admin.firestore().batch();
}
}
export default Firestore;
|
const os = require("os");
const EventEmitter = require("events");
var CPUMonitor = {
// Default options
isStarted: false,
startedAt: false,
timespan: 500, // Default time to calculate average usage is 500ms
delay: 1000, // Default time between for monitor cycles is 1000ms and should be > timespan
alertValue: 85, // Emits a alert event when cpu usage exceeds this value
alertDisabled: false, // Will not emit alert event if enabled
monitor: null,
// Function definitions
resetOptions: function(){
this.timespan = 500;
this.delay = 1000;
this.isStarted = false;
this.startedAt = false;
this.monitor = null;
this.alertValue = 85;
this.alertDisabled = false;
},
getCurrentUsage: function(){
let cpuLt = 0, idleLt = 0, cpus = os.cpus();
for(var x in cpus){
let cpu = cpus[x];
idleLt += cpu.times.idle;
for(var k in cpu.times){
cpuLt += cpu.times[k];
}
}
return {
lifeTime: cpuLt,
idleTime: idleLt
}
},
getAverageUsage: function(){
return new Promise((resolve, reject) => {
// Think of similar implementation without promise as this will never be rejected
var startMeasure = this.getCurrentUsage();
setTimeout(() => {
var endMeasure = this.getCurrentUsage();
var totalTime = endMeasure.lifeTime - startMeasure.lifeTime;
var idleTime = endMeasure.idleTime - startMeasure.idleTime;
var cpuUsage = (1 - (idleTime/totalTime)) * 100;
resolve(cpuUsage);
}, this.timespan)
})
},
setOptions: function(o){
this.timespan = o.timespan || this.timespan;
this.delay = o.delay || this.delay;
this.alertValue = o.alertValue || this.alertValue;
this.alertDisabled = o.alertDisabled || this.alertDisabled;
if(this.delay < this.timespan){
this.resetOptions();
throw new Error('Delay should be greater than timespan');
}
},
start: function(options){
if(this.isStarted) throw new Error('Cpu monitoring is already running!');
if(options) this.setOptions(options);
// Log start time
this.startedAt = new Date().getTime();
var cpuLimitReached = false
// Create a event emitter to emit various monitor events;
this.monitor = new EventEmitter();
this.isStarted = setInterval(async () => {
let cpuUsage = await this.getAverageUsage();
this.monitor.emit('update', cpuUsage);
if(!this.alertDisabled && cpuUsage >= this.alertValue){
cpuLimitReached = true;
this.monitor.emit('alert', {cpuUsage, alertValue: this.alertValue});
}else{
cpuLimitReached = false;
}
}, this.delay)
return {
monitor: this.monitor,
isCpuLimitReached: function(){
return cpuLimitReached;
}
}
},
stop: function(){
if(!this.isStarted) throw new Error('Cpu monitoring is not active!');
clearInterval(this.isStarted);
this.monitor.removeAllListeners();
this.resetOptions();
},
getActiveTime: function(){
if(!this.isStarted) return 0;
return new Date().getTime() - this.startedAt;
}
}
module.exports = CPUMonitor;
|
#!/bin/sh
function cleanup() {
cd "$my_dir"
if [ -n "$node_0_pid" ]; then
kill $node_0_pid 2> /dev/null
fi
if [ -n "$node_1_pid" ]; then
kill $node_1_pid 2> /dev/null
fi
if [ -n "$tunnel_0_pid" ]; then
kill $tunnel_0_pid 2> /dev/null
fi
if [ -n "$tunnel_1_pid" ]; then
kill $tunnel_1_pid 2> /dev/null
fi
rmdir "$work_dir"
}
function check_retval() {
if [ $? -ne 0 ]; then
echo "FAILED"
cleanup
exit 1
fi
}
script_dir=$(dirname "$(readlink -e "$0")")
my_dir=$(pwd)
work_dir="/tmp/ratfist_$$"
mkdir -p "$work_dir"
check_retval
echo "BUILDING NODE STUB"
cd "${script_dir}/../node_stub"
check_retval
cargo build --release
check_retval
echo "BUILDING SERVER APP"
cd "${script_dir}/../server"
check_retval
cargo build --release --features spinner,meteo --bin ratfist_server
check_retval
cd $my_dir
check_retval
echo "STARTING SERIAL TUNNELS"
socat pty,raw,echo=0,link="${work_dir}/server_serial_0" pty,raw,echo=0,link="${work_dir}/node_serial_0" & tunnel_0_pid=$!
check_retval
socat pty,raw,echo=0,link="${work_dir}/server_serial_1" pty,raw,echo=0,link="${work_dir}/node_serial_1" & tunnel_1_pid=$!
check_retval
sleep 1
echo "STARTING NODE STUBS"
RUST_LOG=ratfist_node_stub=trace "${script_dir}/../target/release/ratfist_node_stub" "${work_dir}/node_serial_0" > "${my_dir}/node_0.log" 2>&1 & node_0_pid=$!
check_retval
RUST_LOG=ratfist_node_stub=trace "${script_dir}/../target/release/ratfist_node_stub" "${work_dir}/node_serial_1" > "${my_dir}/node_1.log" 2>&1 & node_1_pid=$!
check_retval
echo "STARTING SERVER"
for env_var_line in $(cat ${script_dir}/../server/.env | grep -v "^#"); do
env_var_name=$(echo $env_var_line | cut -d '=' -f 1)
env_var_val=$(echo $env_var_line | cut -d '=' -f 2)
set | cut -d '=' -f 1 | grep "$env_var_name"
if [ $? -ne 0 ]; then
export ${env_var_name}="${env_var_val}"
fi
done
trap cleanup INT
SERIAL_PORT_0_PATH="${work_dir}/server_serial_0" SERIAL_PORT_1_PATH="${work_dir}/server_serial_1" "${script_dir}/../target/release/ratfist_server"
|
<filename>examples/src/App.js
import React, { Component } from "react";
import * as eva from "eva-icons";
import styled from "styled-components";
import { ToastContainer } from "react-toastify";
import "react-toastify/dist/ReactToastify.min.css";
import Body from "./components/Body/Body";
import Header from "./components/Header/Header";
import Footer from "./components/Footer/Footer";
import ANIMATION_TYPES from "./constants/animationTypes";
import { blue, light } from "./utils/colors";
const Wrapper = styled.div`
padding: 2em 4em;
background-color: ${light};
max-width: 100vw;
`;
class App extends Component {
constructor(props) {
super(props);
this.state = {
icons: [],
size: "large",
};
}
componentWillMount() {
const icons = Object.keys(eva.icons);
const newIcons = icons.map((icon) => ({ name: icon }));
this.setState({ icons: newIcons });
}
handleSize = (size) => this.setState({ size });
render() {
const { icons, size } = this.state;
const config = {
fill: blue,
size,
animation: {
type: ANIMATION_TYPES.SHAKE,
hover: true,
infinite: false,
},
};
return (
<Wrapper>
<Header />
<Body icons={icons} config={config} />
<Footer />
<ToastContainer autoClose={2000} hideProgressBar closeButton={false} />
</Wrapper>
);
}
}
export default App;
|
<filename>SCRIPTS/space_rp.sql
REM FILE NAME: space_rp.sql
REM LOCATION: Object Management\Tables\Reports
REM FUNCTION: Execute SPACE procedure
REM TESTED ON: 7.3.3.5, 8.0.4.1, 8.1.5, 8.1.7, 9.0.1
REM PLATFORM: non-specific
REM REQUIRES: space.sql script to be run first
REM
REM This is a part of the Knowledge Xpert for Oracle Administration library.
REM Copyright (C) 2001 Quest Software
REM All rights reserved.
REM
REM******************** Knowledge Xpert for Oracle Administration ********************
REM
SET heading off feedback off verify off echo off
SET serveroutput on size 10000
COLUMN dbname new_value db noprint
SELECT NAME dbname
FROM v$database;
SPOOL rep_out/space_rp
EXECUTE space;
SPOOL off
SET heading on feedback on verify on
CLEAR columns
|
<gh_stars>0
package ovo
import (
"strconv"
"time"
)
// CreateRequest is request model for create push-to-pay transaction.
type CreateRequest struct {
Amount float64 `validate:"required,gt=0"`
Phone string `validate:"e164" mod:"no_space,e164"`
MerchantInvoice string `validate:"required" mod:"no_space"`
ReferenceNumber int `validate:"required,gt=0,lt=1000000"`
BatchNo int `validate:"required,gt=0,lt=1000000"`
}
// CreateReversalRequest is request model for create reversal push-to-pay transaction.
type CreateReversalRequest struct {
Amount float64 `validate:"required,gt=0"`
Phone string `validate:"e164" mod:"no_space,e164"`
MerchantInvoice string `validate:"required" mod:"no_space"`
ReferenceNumber int `validate:"required,gt=0,lt=1000000"`
BatchNo int `validate:"required,gt=0,lt=1000000"`
}
// VoidRequest is request model for void push-to-pay transaction.
type VoidRequest struct {
Amount float64 `validate:"required,gt=0"`
Phone string `validate:"e164" mod:"no_space,e164"`
MerchantInvoice string `validate:"required" mod:"no_space"`
ReferenceNumber int `validate:"required,gt=0,lt=1000000"`
BatchNo int `validate:"required,gt=0,lt=1000000"`
}
// InquiryPhoneRequest is request model for inquiry phone number.
type InquiryPhoneRequest struct {
Phone string `validate:"e164" mod:"no_space,e164"`
}
// GetStatusRequest is request model for get transaction data & status.
type GetStatusRequest struct {
Amount float64 `validate:"required,gt=0"`
Phone string `validate:"e164" mod:"no_space,e164"`
MerchantInvoice string `validate:"required" mod:"no_space"`
ReferenceNumber int `validate:"required,gt=0,lt=1000000"`
BatchNo int `validate:"required,gt=0,lt=1000000"`
}
type request struct {
Type string `json:"type"`
ProcessingCode string `json:"processingCode"`
Amount float64 `json:"amount,omitempty"`
Date string `json:"date"`
ReferenceNumber string `json:"referenceNumber,omitempty"`
TID string `json:"tid"`
MID string `json:"mid"`
MerchantID string `json:"merchantId"`
StoreCode string `json:"storeCode"`
AppSource string `json:"appSource"`
TransactionRequestData transactionRequestData `json:"transactionRequestData"`
}
type transactionRequestData struct {
BatchNo string `json:"batchNo,omitempty"`
Phone string `json:"phone"`
MerchantInvoice string `json:"merchantInvoice,omitempty"`
}
func (c *Client) prepareCreateRequest(req CreateRequest) request {
return request{
Type: "0200",
ProcessingCode: "040000",
Amount: req.Amount,
Date: time.Now().Format("2006-01-02 15:04:05.000"),
ReferenceNumber: strconv.Itoa(req.ReferenceNumber),
TID: c.tid,
MID: c.mid,
MerchantID: c.merchantID,
StoreCode: c.storeCode,
AppSource: "POS",
TransactionRequestData: transactionRequestData{
BatchNo: strconv.Itoa(req.BatchNo),
Phone: req.Phone,
MerchantInvoice: req.MerchantInvoice,
},
}
}
func (c *Client) prepareCreateReversalRequest(req CreateReversalRequest) request {
return request{
Type: "0400",
ProcessingCode: "040000",
Amount: req.Amount,
Date: time.Now().Format("2006-01-02 15:04:05.000"),
ReferenceNumber: strconv.Itoa(req.ReferenceNumber),
TID: c.tid,
MID: c.mid,
MerchantID: c.merchantID,
StoreCode: c.storeCode,
AppSource: "POS",
TransactionRequestData: transactionRequestData{
BatchNo: strconv.Itoa(req.BatchNo),
Phone: req.Phone,
MerchantInvoice: req.MerchantInvoice,
},
}
}
func (c *Client) prepareVoidRequest(req VoidRequest) request {
return request{
Type: "0200",
ProcessingCode: "020040",
Amount: req.Amount,
Date: time.Now().Format("2006-01-02 15:04:05.000"),
ReferenceNumber: strconv.Itoa(req.ReferenceNumber),
TID: c.tid,
MID: c.mid,
MerchantID: c.merchantID,
StoreCode: c.storeCode,
AppSource: "POS",
TransactionRequestData: transactionRequestData{
BatchNo: strconv.Itoa(req.BatchNo),
Phone: req.Phone,
MerchantInvoice: req.MerchantInvoice,
},
}
}
func (c *Client) prepareInquiryPhoneRequest(req InquiryPhoneRequest) request {
return request{
Type: "0100",
ProcessingCode: "050000",
Date: time.Now().Format("2006-01-02 15:04:05.000"),
TID: c.tid,
MID: c.mid,
MerchantID: c.merchantID,
StoreCode: c.storeCode,
AppSource: "POS",
TransactionRequestData: transactionRequestData{
Phone: req.Phone,
},
}
}
func (c *Client) prepareGetStatusRequest(req GetStatusRequest) request {
return request{
Type: "0100",
ProcessingCode: "040000",
Amount: req.Amount,
Date: time.Now().Format("2006-01-02 15:04:05.000"),
ReferenceNumber: strconv.Itoa(req.ReferenceNumber),
TID: c.tid,
MID: c.mid,
MerchantID: c.merchantID,
StoreCode: c.storeCode,
AppSource: "POS",
TransactionRequestData: transactionRequestData{
BatchNo: strconv.Itoa(req.BatchNo),
Phone: req.Phone,
MerchantInvoice: req.MerchantInvoice,
},
}
}
|
<filename>Test-Peers/peer2.py
import socket
import threading
import errno
import time
import glob
import os
import hashlib
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import Queue
import struct
from Tkinter import *
import math
import tkMessageBox
from random import randint
import random
import sqlite3
#**************************************Global Fonksiyonlar**********************************************************
#Peer klasörünün içine gitmeye çalışılıyor
def go_peer_dictionary():
global dictionary
if os.path.join(os.path.expandvars("%userprofile%"),"Desktop")==os.getcwd():
os.chdir(dictionary)
else:
os.chdir(os.path.join(os.path.expandvars("%userprofile%"),"Desktop",dictionary))
#veritabanı varmı yokmu
def open_db(database_name):
#Peer klasörünün içine gitmeye çalışılıyor
go_peer_dictionary()
#Peer klasöründe databasen var olup olmadığı kontrol ediliyor tekrar database oluşturmamak için
if os.path.isfile(database_name):
return True
return False
#veritabanı peer klasöründe
def create_database(database_name):
if open_db(database_name):
con = sqlite3.connect(database_name,check_same_thread = False)
with con:
cur = con.cursor()
else:
try:
con = sqlite3.connect(database_name,check_same_thread = False)
print "yok"
with con:
cur = con.cursor()
cur.execute("CREATE TABLE [md5sums] ([md5_id] INTEGER PRIMARY KEY NULL,[md5sum] TEXT NULL,[file_name] TEXT NULL,[chunk_number_size] INTEGER NULL)")
cur.execute("CREATE TABLE chunk_numbers (def_id INTEGER PRIMARY KEY, chunk_number INTEGER,"
"md5_def INTEGER, FOREIGN KEY(md5_def) REFERENCES md5sums(md5_id))")
except sqlite3.Error:
print "Error open db.\n"
return cur,con
#Bu fonksiyon veritabanında mdsum ve onun chunk sayıları çekecektir.
def get_mdssum_chunklist():
global liste_chunk
chk_nbr=[]
cur,con=create_database(database_name)
cur.execute("SELECT md5sum FROM md5sums")
data=cur.fetchall()
if len(data)!=0:
for item in data:
liste_chunk[item[0]]=chk_nbr
for item_chunk in liste_chunk:
md5_id=get_md5_id(item_chunk,cur )
for item_chunks in get_chunk_number(md5_id,cur):
liste_chunk[item_chunk].append(item_chunks)
con.close()
return liste_chunk
#Bu fonksiyon veritabanından md5sum file tablodan id sini çekiyor.
def get_md5_id(file_md5sum,cur):
cur.execute("SELECT md5_id FROM md5sums WHERE md5sum='%s' " % file_md5sum)
data=cur.fetchall()
return data[0][0]
#Bu fonksiyon veritabanında chunk numaralarının listesini çekmekte
def get_chunk_number(md5_id,cur):
cur.execute("SELECT chunk_number FROM chunk_numbers WHERE md5_def=?",(md5_id,))
data=cur.fetchall()
return data
#************************************************ VERİTABANI KISMI************************************************
class Database():
def __init__(self):
self.database_name='torrent_db2.db'
#bu fonksiyon veritabanı bağlantısnı sağlamakta
def connect(self):
self.cur,self.con =create_database(self.database_name)
return self.cur,self.con
#Bu fonksiyon database md5sum numaralarını ekliyor eklemeyi sağlıyor
def insert_data_md5(self,file_md5sum,file_name,chunk_number_size):
self.cur.execute("""INSERT INTO md5sums VALUES(NULL, ?,?,?)""",(file_md5sum,file_name,chunk_number_size))
self.con.commit()
return "Suc"
#Bu fonksiyon database gelen chunkların numaralarını ekliyor eklemeyi sağlıyor
def insert_data_chunk_number(self,chunk_number,file_md5sum):
self.cur.execute("""INSERT INTO chunk_numbers VALUES(NULL, ?,?)""",(int(chunk_number), self.get_md5sum_id(file_md5sum)))
self.con.commit()
return "Suc"
#Bu fonksiyon veritabanında dosyanın md5nin idsini çekiyor
def get_md5sum_id(self,file_md5sum):
self.cur.execute("SELECT md5_id from md5sums where md5sum='%s' " %file_md5sum)
data=self.cur.fetchall()
return data[0][0]
#Bu fonksiyon databasedeki md5suma göre dosyanın ismini çekiyor
def get_file_name(self,file_md5sum):
self.cur.execute("SELECT file_name from md5sums where md5sum='%s' " %file_md5sum)
data=self.cur.fetchall()
return data[0][0]
#Bu fonksiyonda databasedeki md5sumın kaç tane olduğunu gösterir
def get_chunk_number_size(self,file_md5sum):
self.cur.execute("SELECT chunk_number_size from md5sums where md5sum='%s' " %file_md5sum)
data=self.cur.fetchall()
if len(data[0])==1:
return data[0][0]
return "Q"
#Bu fonksiyon databasede inen dosyanın kaçtane chunkının indmiş olduğunu gösterir
def get_chunk_number_list(self,file_md5sum):
self.cur.execute("SELECT count(*) FROM chunk_numbers AS c JOIN md5sums AS md5 ON c.md5_def=md5.md5_id where md5.md5sum='%s' " %file_md5sum)
data=self.cur.fetchone()[0]
return int(data)
#Bu fonksiyon veritabanından indirilmekten olan dosya isimlerini çeker
def get_list_file_name(self):
self.cur.execute("SELECT file_name,md5sum from md5sums " )
data=self.cur.fetchall()
return data
#Bu fonksiyon md5sum listesindeki md5sum varmı yokmu onu kontrol edebilmektedir
def check_md5sum_number_md5list(self,file_md5sum):
self.cur.execute(" SELECT id from md5sum_list where md5='%s' " %file_md5sum)
data=self.cur.fetchall()
if len(data)==0:
return False
else:
return True
#Bu fonksiyon md5sum varmı yokmu onu kontrol edebilmektedir
def check_md5sum_number(self,file_md5sum):
self.cur.execute(" SELECT md5_id from md5sums where md5sum='%s' "% file_md5sum)
data=self.cur.fetchall()
if len(data)==0:
return False
else:
return True
#Bu fonksiyon databasede chunklistesinde o chunk var mı yok mu kontrolu yapmaktadır.Varsa fonksiyon true yok ise false döndürmektedir
def check_chunk_number(self,chunk_number,file_md5sum):
chunk_number=int(chunk_number)
t=(file_md5sum,chunk_number,)
self.cur.execute("SELECT c.def_id FROM chunk_numbers AS c JOIN md5sums AS md5 ON c.md5_def=md5.md5_id where md5.md5sum=? and c.chunk_number=?",t )
data=self.cur.fetchall()
if len(data)==0:
return False
else:
return True
#Bu fonksiyon dosya indirildikten sonra veritabanındaki indirilen dosyaya ait herşeyi siler
def delete_md5sum_number(self,file_md5sum):
md5_id=self.get_md5sum_id(file_md5sum)
t=(md5_id,)
self.cur.execute("DELETE FROM md5sums WHERE md5sum='%s' " % file_md5sum)
self.cur.execute("DELETE FROM chunk_numbers WHERE md5_def=? ",t)
self.con.commit()
#*******************************************Peerin server tarafı**********************************************
class ServerThread(threading.Thread):
def __init__(self, name, peer_ip, peer_port):
threading.Thread.__init__(self)
self.name = name
self.peer_ip=peer_ip
self. peer_port=peer_port
def run(self):
peer_socket=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
peer_socket.bind(( self.peer_ip,self. peer_port))
peer_socket.listen(5)
threadCounter=0
while True:
cPeer,addrPeer=peer_socket.accept()
threadCounter += 1
threadPeer = ServerReadThread("ServerReadThread"+str(threadCounter), cPeer, addrPeer)
threadPeer.start()
#******************************************Peerin server tarafı okuması**************************************
class ServerReadThread (threading.Thread):
def __init__(self, name, cSocket, address):
threading.Thread.__init__(self)
self.name = name
self.cSocket = cSocket
self.address = address
self.peer_ip=""
self.peer_port=""
self.connect_point={}
self.check_command_nummber=""
self.msg_list=""
self.file_name_list=""
self.user_add=""
self.running=True
def parser(self,data) :
global connect_point_list
global connect_point_temp_list
global CHUNK_SIZE
global dictionary
global liste_chunk
global md5_folder
if data[0:5]=="HELLO" :
response = "SALUT"
self.cSocket.send(response)
elif data[0:5]=="CLOSE":
response = "BUBYE"
self.cSocket.send(response)
self.cSocket.close()
elif data[0:5]=="REGME":
self.peer_ip=str(data[6:data.index(":")])
self.peer_port=data[data.index(":")+1:]
if self.peer_ip+str(self.peer_port) in connect_point_temp_list.keys():
response = "REGOK"
try:
self.cSocket.send(response)
except socket.error:
self.cSocket.close()
self.check_command_nummber=2
else:
t=time.ctime()
self.connect_point["time"]=t
self.connect_point["peer_port"]= self.peer_port
self.connect_point["peer_ip"]=self.peer_ip
self.user_add=self.peer_ip+":"+str(self.peer_port)+":"+str(t)
connect_point_list.append(self.user_add)
connect_point_temp_list[self.peer_ip+str(self.peer_port)]="True"
try:
self.cSocket.send("REGWA")
except socket.error:
self.cSocket.close()
time.sleep(0.4)
try:
self.cSocket.send("REGOK "+str(t))
except socket.error:
self.cSocket.close()
self.check_command_nummber=1
self.cSocket.close()
print(connect_point_list)
elif data[0:5]=="GETNL":
if len(data)>5:
if len(connect_point_list)>5:
size_list=5
else:
size_list=len(connect_point_list)
else:
size_list=len(connect_point_list)
for user in connect_point_list[0: size_list] :
self.msg_list=self.msg_list+user+"\n"
self.cSocket.send("NLIST BEGIN"+"\n"+self.msg_list+"NLIST END")
self.cSocket.close()
print("NLIST BEGIN"+"\n"+self.msg_list+"NLIST END")
print(self.msg_list)
print("Listeyi gönderdim")
elif data[0:11]=="NLIST BEGIN":
print("GELEN"+data)
connect_point_list=[]
connect_point_list=data.split("\n")
connect_point_list=connect_point_list[1:-1]
print(connect_point_list)
connect_point_temp_list.clear()
for i in connect_point_list:
tempdiz=i.split(":")
connect_point_temp_list[tempdiz[0]+tempdiz[1]]="True"
self.cSocket.close()
print("Guncel listeyi aldım")
elif data[0:5]=="FINDF":
flag=False
file_name=data[6:].split(".")
#Peer klasörünün içine gitmeye çalışılıyor
go_peer_dictionary()
for file_t in glob.glob("*.*"):
if file_name[0] in file_t:
flag=True
if flag:
for file_t in glob.glob(file_name[0]+".*"):
self.file_name_list=self.file_name_list+file_t+":"+str(self.md5_func(str(file_t)))+":"+str(os.path.getsize(str(file_t)))+"\n"
self.cSocket.send("NAMEY BEGIN"+"\n"+self.file_name_list+"NAMEY END")
self.cSocket.close()
else:
self.cSocket.send("NAMEN "+data[6:])
elif data[0:5]=="FINDM":
md5sum_file=data[6:]
#Dosyanın hepsi inmiş olabilir
if md5sum_file in md5_folder:
self.cSocket.send("MSUMY "+md5sum_file)
#Dosyanın iniyor olabilir
elif md5sum_file in liste_chunk.keys():
self.cSocket.send("MSUMY "+md5sum_file)
#Dosya yok
else:
try:
self.cSocket.send("MSUMN "+md5sum_file)
self.cSocket.close()
self.stop()
except socket.error:
self.cSocket.close()
elif data[0:5]=="FINDC":
print("folder")
print (md5_folder)
md5sum_file=data[6:data.index(":")]
chunk_number=data[data.index(":")+1:]
#Dosyanın hepsi inmiş olabilir
if md5sum_file in md5_folder:
self.cSocket.send("CHNKY "+data[6:])
#Dosyanın iniyor olabilir
elif chunk_number in liste_chunk[md5sum_file]:
try:
self.cSocket.send("CHNKY "+data[6:])
except socket.error:
print("")
#Dosya yok
else:
try:
self.cSocket.send("CHNKN "+data[6:])
self.cSocket.close()
self.stop()
except socket.error:
self.cSocket.close()
elif data[0:5]=="GETCH":
md5sum_file=data[6:data.index(":")]
chunk_number=data[data.index(":")+1:]
md5={}
#Peer klasörünün içine gitmeye çalışılıyor
go_peer_dictionary()
for file in glob.glob("*.*"):
md5[self.md5_func(file)]=file
#Dosyanın hepsi inmiş olabilir
if md5sum_file in md5:
file_nam=md5[md5sum_file]
self.cSocket.send("CHUNK "+data[6:])
time.sleep(0.5)
self.cSocket.send(self.chunk_read_file(file_nam,chunk_number))
#Dosyanın iniyor olabilir
elif chunk_number in liste_chunk[md5sum_file]:
self.cSocket.send("CHUNK "+data[6:])
time.sleep(0.5)
self.cSocket.send(self.chunk_read(md5sum_file,chunk_number))
#Dosya yok
else:
try:
self.cSocket.send("CHNKN "+data[6:])
self.cSocket.close()
except socket.error:
self.cSocket.close()
else:
response = "REGER"
try:
self.cSocket.send(response)
self.cSocket.close()
except socket.error:
self.cSocket.close()
def stop(self):
self.running = False
#İndirilmiş olan dosyanın md5'ni almakiçin kullanılan fonksiyon
def md5_func(self,file_name):
md5 = hashlib.md5()
with open(file_name,'rb') as filb:
for ch in iter(lambda: filb.read(8192), b''):
md5.update(ch)
filb.close()
return str(md5.hexdigest())
#İndirilmiş olan dosyadan bir chunk okumak için kullanılan fonksiyon
def chunk_read(self,file_md5sum,chunk_number):
#Peer klasörünün içine gitmeye çalışılıyor
go_peer_dictionary()
#dosyadan byte okuyoruz
with open("."+file_md5sum+"."+"chunk","rb") as filr:
filr.seek((int(chunk_number)-1)*int(CHUNK_SIZE))
data=filr.read(int(CHUNK_SIZE))
filr.close()
return data
#İndiriliyor olan dosyadan bir chunk okumak için kullanılan fonksiyon
def chunk_read_file(self,file_name,chunk_number):
#Peer klasörünün içine gitmeye çalışılıyor
go_peer_dictionary()
#dosyadan byte okuyoruz
with open(file_name,"rb") as fila:
fila.seek((int(chunk_number)-1)*int(CHUNK_SIZE))
data=fila.read(int(CHUNK_SIZE))
fila.close()
return data
def run(self):
while self.running:
try:
incoming_data=self.cSocket.recv(8192)
except socket.error ,e:
err=e.args[0]
if err == errno.EAGAIN or err == errno.EWOULDBLOCK:
time.sleep(1)
print 'No data available'
continue
self.parser(incoming_data)
if incoming_data=="CLOSE":
break
if incoming_data[0:5]=="REGME":
if self.check_command_nummber==1:
break
elif incoming_data[0:5]=="GETNL":
if self.check_command_nummber==2:
break
elif incoming_data[0:5]=="FINDF":
if self.check_command_nummber==2:
break
elif incoming_data[0:5]=="GETCH":
break
elif incoming_data[0:5]=="REGER":
break
elif incoming_data[0:11]=="NLIST BEGIN":
break
#----------------------------------------------------------------Clientın okuma threadi----------------------------------------
class ClientReadThread (threading.Thread):
def __init__(self, name, cSocket,list_queue):
threading.Thread.__init__(self)
self.name = name
self.cSocket = cSocket
self.file_name_list=""
self.list_queue=list_queue
self.count=0
self.connect_point_temp={}
self.chk_nbr=""
self.flag_random=True
self.count2=0
self.flag_chunk=False
self.file_name=""
self.chunk_number=""
self.file_mdsum=""
def parser(self,data):
global command
global connect_point_list
global peer_host
global peer_port
global count
global file_name_list
global dictionary
global connect_point_temp_list
global liste_chunk
if data[0:11]=="NAMEY BEGIN":
self.file_name_list=data.split("\n")
self.file_name_list=self.file_name_list[1:-1]
print(self.file_name_list)
file_name_list=self.file_name_list
self.list_queue.put(self.file_name_list)
self.cSocket.close()
elif data[0:5]=="NAMEN":
self.cSocket.close()
print(data)
elif data[0:5]=="REGWA":
print("REGWA")
elif data[0:5]=="MSUMY":
#Burada random repeati engellemeye çalışılor
while self.flag_random:
database=Database()
cur,con=database.connect()
if data[6:] in liste_chunk.keys() :
valeur=database.get_chunk_number_size(data[6:])
#Buradaki Q değeri veritabanı hatası engellemek için kullanılmaktadır
if valeur!="Q":
self.chk_nbr=str( random.randint(1, valeur))
#Burada databaseden o chunk numarası daha önceden varmı diye kontrol ediliyor
if self.chk_nbr in liste_chunk[data[6:]] :
self.flag_random=True
else:
self.flag_random=False
print("Random:"+self.chk_nbr)
print("FINDC "+str(data[6:])+":"+str(self.chk_nbr))
self.cSocket.send("FINDC "+str(data[6:])+":"+str(self.chk_nbr))
else:
self.flag_random=False
elif data[0:5]=="MSUMN":
self.cSocket.close()
print(data)
elif data[0:5]=="CHNKY":
print(data)
self.cSocket.send("GETCH "+data[6:])
elif data[0:5]=="CHNKN":
self.cSocket.close()
print(data)
elif data[0:5]=="CHUNK":
self.flag_chunk=True
chunk_data=data[6:].split(":")
self.chunk_number=chunk_data[1]
self.file_mdsum=chunk_data[0]
print(chunk_data)
#Burada bayrak değeri true olunca gelen data parserdan geçip bu kısma giriyor.
elif self.flag_chunk :
database=Database()
cur,con=database.connect()
if database.check_md5sum_number(self.file_mdsum) :
chunks_numbers=database.get_chunk_number_size(self.file_mdsum)
if database.check_chunk_number(self.chunk_number,self.file_mdsum):
con.close()
self.cSocket.close()
else:
database.insert_data_chunk_number(self.chunk_number,self.file_mdsum)
print(self.chunk_write(self.file_mdsum,self.chunk_number,data))
liste_chunk[self.file_mdsum].append(self.chunk_number)
self.cSocket.close()
#Burada databasede bulunan toplam chunk sayısı ile indirilen chunk sayısı eşitmidiye sorgulanıyor
if database.get_chunk_number_list(self.file_mdsum)==chunks_numbers:
print(database.get_chunk_number_list(self.file_mdsum))
file_name=database.get_file_name(self.file_mdsum)
self.rename_file(file_name,self.file_mdsum)
print(file_name)
#Veritabanı indirilen dosyaya ait herşey siliniyor
database.delete_md5sum_number(self.file_mdsum)
del liste_chunk[self.file_mdsum]
print("Dosya indirildi")
elif data[0:5]=="REGOK":
if len(data)>6:
if count==0:
print("REGOK")
self.cSocket.close()
self.check_comman=0
time.sleep(1)
#Negatorun ip ve port bilgisi
server_host="127.0.0.1"
server_port=12345
#Peerin clienti ilk olarak negatora bağlanıyor
se=socket.socket()
se.connect((server_host,server_port))
se.send("REGME "+peer_host+":"+str(peer_port))
command="GETNL"
time.sleep(0.3)
count=1
#Peerin client tarafının okuması
client_read=ClientReadThread("PeerServerReadThread",se,self.list_queue)
client_read.start()
elif command[0:5]=="FINDF":
for connection in connect_point_list:
connection=connection.split(":")
per_ip=connection[0]
per_port=connection[1]
if per_port!=str(peer_port):
s=socket.socket()
s.connect((per_ip,int(per_port)))
time.sleep(0.3)
s.send("REGME "+peer_host+":"+str(peer_port))
#Peerin client tarafının okuması
client_read=ClientReadThread("PeerServerReadThread",s,self.list_queue)
client_read.start()
else:
print(command)
try:
self.cSocket.send(command)
except socket.error:
self.cSocket.close()
elif data[0:5]=="REGER":
print("REGER")
elif data[0:11]=="NLIST BEGIN":
connect_point_list=data.split("\n")
connect_point_list=connect_point_list[1:-1]
print(connect_point_list)
for i in connect_point_list:
tempdiz=i.split(":")
connect_point_temp_list[tempdiz[0]+tempdiz[1]]="True"
self.cSocket.close()
print("güncelipeerden aldım")
#Dosyaya byte yazmak için kullanıla fonksiyon
def chunk_write(self,file_md5sum,chunk_number,data):
#Peer klasörünün içine gitmeye çalışılıyor
go_peer_dictionary()
with open("."+file_md5sum+"."+"chunk","rb+") as fils:
fils.seek((int(chunk_number)-1)*int(CHUNK_SIZE))
fils.write(data)
fils.close()
return "Succesful Chunk"
#ikikez dosya indirdiğinde aynı olan dosyalara farklı isim veriyor
def rename_file(self,file_name,file_md5):
flag=True
count=0
while flag:
if not os.path.isfile(file_name):
os.rename("."+file_md5+".chunk",file_name)
flag=False
else:
count+=1
file_name=file_name[:file_name.index(".")]+"("+str(count)+")"+file_name[file_name.index("."):];
def run(self):
while True:
try:
incoming_data=self.cSocket.recv(4096)
except socket.error ,e:
err=e.args[0]
if err == errno.EAGAIN or err == errno.EWOULDBLOCK:
time.sleep(1)
print 'No data available'
continue
if incoming_data[0:5]!="CHUNK":
self.parser(incoming_data)
else:
self.parser(incoming_data)
try:
self.parser(self.cSocket.recv(4096))
self.flag_chunk=False
break
except socket.error:
self.cSocket.close()
if str(incoming_data[0:11])=="NLIST BEGIN":
break
if str(incoming_data[0:5])=="REGOK":
if len(incoming_data)>5:
break
if incoming_data[0:11]=="NAMEY BEGIN":
break
if incoming_data[0:5]=="CHNKN":
break
if incoming_data[0:5]=="NAMEN":
break
if incoming_data[0:5]=="MSUMN":
break
#Arama sonuçlarını listeler
class interface_list_show (threading.Thread):
def __init__(self, name, Lb1,threadQueue):
threading.Thread.__init__(self)
self.name = name
self.Lb1=Lb1
self.list_queue = threadQueue
def run(self):
while True:
if self.list_queue.qsize>0:
count_item=0
msg_queu=self.list_queue.get()
for item in msg_queu:
count_item+=1
self.Lb1.insert(count_item, item)
#Dosya indirmek için kullanılan thread
class download_file_Thread (threading.Thread):
def __init__(self, name,down_file_name,list_queue):
threading.Thread.__init__(self)
self.down_file_name = down_file_name
self.list_queue=list_queue
self.name_file=""
self.counter=0
def run(self):
global connect_point_list
global peer_host
global peer_port
global command
database=Database()
cur,con=database.connect()
self.name_file=self.down_file_name.split(":")
while True:
if database.check_md5sum_number(self.name_file[1]):
for user in connect_point_list:
user_ip_port=user.split(":")
if peer_port!=int(user_ip_port[1]):
#Burada peer güncel listeyi almadan sitemde olmayan bir kullanıcıya bağlantı kurmaya çalışabilir.Bu durum engellenmiştir.
try:
sok=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sok.connect((user_ip_port[0],int(user_ip_port[1])))
sok.send("FINDM "+self.name_file[1])
except socket.error as e:
if e.errno == errno.ECONNREFUSED:
sok.close()
self.counter+=1
client_read=ClientReadThread("PeerServerReadThread"+str(self.counter),sok,self.list_queue)
client_read.start()
time.sleep(1)
else:
break
#Bu thread diğer peerlerden güncel olan listeyi istiyor
class ClientGetUpdateList (threading.Thread):
def __init__(self, name,list_queue):
threading.Thread.__init__(self)
self.name = name
self.list_queue=list_queue
def run(self):
global command
global UPDATE_INTERVAL
while True:
time.sleep(UPDATE_INTERVAL)
number= random.randint(1, 2)
if number==1:
command="GETNL "+"5"
else:
command="GETNL "
for connection in connect_point_list:
connection=connection.split(":")
per_ip=connection[0]
per_port=connection[1]
print(per_port)
if per_port!=str(peer_port):
print("Güncel listeyi peer")
s=socket.socket()
s.connect((per_ip,int(per_port)))
s.send("REGME "+peer_host+":"+str(peer_port))
client_read=ClientReadThread("PeerServerReadThread",s,self.list_queue)
client_read.start()
#Bu thread peerin dosyasında indirilmiş ve paylaşıma açılmış olan dosyaların md5sum listesini tutumaktadır
class Md5sumList (threading.Thread):
def __init__(self, name):
threading.Thread.__init__(self)
self.name = name
def run(self):
global md5_folder
while True:
md5_folder=self.md5_list_folder()
print(md5_folder)
time.sleep(25)
def md5_list_folder(self):
md5_diz=[]
go_peer_dictionary()
for file in glob.glob("*.*"):
md5_diz.append(self.md5_func(file))
return md5_diz
#İndirilmiş olan dosyanın md5'ni almakiçin kullanılan fonksiyon
def md5_func(self,file_name):
md5 = hashlib.md5()
with open(file_name,'rb') as filb:
for ch in iter(lambda: filb.read(8192), b''):
md5.update(ch)
filb.close()
return str(md5.hexdigest())
#***********************************************User Interface**************************************************
class interface:
def __init__(self, parent,queue):
self.list_queue=queue
self.root=parent
GUIFrame =Frame(parent,width= 600, height=500)
GUIFrame.pack(expand = True, anchor = CENTER)
self.entry = Entry(text="enter your choice",font = "Helvetica 16 bold",justify="left",width=34,bd=1)
self.entry.place(x=20, y = 10)
self.test = StringVar()
self.test.set('''Arama Sonuçları''')
self.Label3 = Label(parent, textvariable = self.test)
self.Label3.place(x = 20, y = 40)
self.Lb1 = Listbox(parent,width=68,height=10)
self.Lb1.place(x=20,y=60)
self.Lb2 = Listbox(parent,width=68,height=10)
self.Lb2.place(x=20,y=250)
self.test = StringVar()
self.test.set('''Tamamlanmayan dosyalar ''')
self.Label1 = Label(parent, textvariable = self.test)
self.Label1.place(x = 20, y = 230)
self.Button2 = Button(parent, text='BUL',command=self.Find_File,padx=50,pady=2)
self.Button2.place(x= 450, y = 10)
self.Button3 = Button(parent, text='ÇIKIŞ', command= self.exit_program,padx=50,pady=2)
self.Button3.place(x= 450, y = 470)
self.Lb1.bind('<<ListboxSelect>>', self.immediately)
self.Lb2.bind('<<ListboxSelect>>', self.immediately2)
self.listbox_items()
self.listbox_not_complete()
def immediately(self,e):
w = e.widget
index = int(w.curselection()[0])
value = w.get(index)
print 'You selected item %d: "%s"' % (index, value)
self.download_file(value)
def immediately2(self,e):
w = e.widget
index = int(w.curselection()[0])
value = w.get(index)
print 'You selected item %d: "%s"' % (index, value)
self.download_file_continue(" "+":"+value)
def download_file_continue(self,dow_file_name):
result=tkMessageBox.askquestion("Download", "Are you sure?", icon='warning')
if result=='yes':
down_fil=download_file_Thread("DownThread",dow_file_name,self.list_queue)
down_fil.start()
else:
print "Vazgeçti"
def download_file(self,dow_file_name):
global liste_chunk
result=tkMessageBox.askquestion("Download", "Are you sure?", icon='warning')
if result=='yes':
database=Database()
cur,con=database.connect()
#Peer klasörünün içine gitmeye çalışılıyor
go_peer_dictionary()
name_file=dow_file_name.split(":")
self.create_empty_file(name_file[1],name_file[2])
liste_chunk[name_file[1]]=[]
database.insert_data_md5(name_file[1],name_file[0],self.calculation_chunk_number(name_file[2]))
print "Download"
con.close()
time.sleep(2)
down_fil=download_file_Thread("DownThread",dow_file_name,self.list_queue)
down_fil.start()
else:
print "Vazgeçti"
def listbox_items(self):
list_box_show=interface_list_show("ListBox",self.Lb1,self.list_queue)
list_box_show.start()
def listbox_not_complete(self):
if len(liste_chunk.keys())!=0:
count_item=0
for item in liste_chunk.keys():
print(item)
count_item+=1
self.Lb2.insert(count_item, item)
def calculation_chunk_number(self,file_size):
global CHUNK_SIZE
chunk_numbers=float(file_size)/float(CHUNK_SIZE)
chunk_numbers=math.ceil(chunk_numbers)
return int(chunk_numbers)
def exit_program(self):
self.root.destroy()
sys.exit()
#Boş dosya oluşturmak için kullanılan fonksiyon
def create_empty_file(self,file_md5,file_size):
with open("."+file_md5+"."+"chunk","wb") as file_create:
file_create.close()
def Find_File(self):
global command
test = self.entry.get()
command="FINDF "+test
for connection in connect_point_list:
connection=connection.split(":")
per_ip=connection[0]
per_port=connection[1]
print(per_port)
if per_port!=str(peer_port):
print("okey")
s=socket.socket()
s.connect((per_ip,int(per_port)))
s.send("REGME "+peer_host+":"+str(peer_port))
client_read=ClientReadThread("PeerServerReadThread",s,self.list_queue)
client_read.start()
#************************************************Main*********************************************************
#Peer buaralıklarda diğer peerleden güncel liste isteği bulunmaktadır.
UPDATE_INTERVAL=30
#peerin dosyalarının bulunduğu klasör
dictionary="peer2"
#Chunklistesi ve md5sum listesi için database
database_name='torrent_db2.db'
#Peerin kullanıcı listesi
connect_point_list=[]
connect_point_temp_list={}
#Peerin client tarafındaki istemcinin komutları
command=""
temp=""
#Bu değişken negatora kayıt işleminde bayrak olarak kullanılmaktadır.
count=0
#Her chunka ait chunk boyutu belirtilmektedir.
CHUNK_SIZE=4096
#Burada peerin indirilmiş olanpaylaşıma açık olan dosyaların mdsumlsistesini tutyoruz
md5_folder=[]
md5_list_thread=Md5sumList("Md5sumlist")
md5_list_thread.start()
file_name_list=[]
#chunk_listesi veritabanından çekilmiş
#Burada indirilmesi tamamlanmayan dosyaya ait veritabanından gerekli chunk ve mdsum bilgileri çekilip listeye atılıyor
#Bu sayede indirilmesi tamamlanmış dosyayı indirmeye devam edebileceğiz
liste_chunk={}
liste_chunk=get_mdssum_chunklist()
print(liste_chunk)
#Bulunan dosyaların arayüzle haberleşmesi
list_queue=Queue.Queue()
q=Queue.Queue()
#Peerin Ip ve port bilgisi
peer_host="127.0.0.1"
peer_port=12332
#Negatorun ip ve port bilgisi
server_host="127.0.0.1"
server_port=12345
#Peerin Sunucu tarafı çalışmaya başlar
server_peer=ServerThread("ServerThread",peer_host,peer_port)
server_peer.start()
#Peerin clienti ilk olarak negatora bağlanıyor
s=socket.socket()
s.connect((server_host,server_port))
#Peer negatora bağlandıktan sonra otomatik olarak ilk REGME kaydolma isteğini gönderiyor daha sonra 2 saniye sonrada GETNL isteği yapıyor
s.send("REGME "+peer_host+":"+str(peer_port))
#Peerin client tarafının okuması
client_read=ClientReadThread("PeerServerReadThread",s,list_queue)
client_read.start()
#--------------------------------------------------------------Peerin Client tarafı-------------------------------------------------
root = Tk()
root.resizable(width=FALSE, height=FALSE)
MainFrame =interface(root,list_queue)
root.title('Torrent2')
root.mainloop()
|
package org.sonatype.nexus.repository.protop.internal;
import org.sonatype.nexus.repository.Format;
import javax.inject.Named;
import javax.inject.Singleton;
/**
* protop repository format.
*/
@Named(ProtopFormat.NAME)
@Singleton
public class ProtopFormat extends Format {
public static final String NAME = "protop";
public ProtopFormat() {
super(NAME);
}
}
|
<gh_stars>1-10
/*!
* qnfs - lib/stat.js
*
* Impl fs.Stat all APIs: http://nodejs.org/docs/latest/api/fs.html#fs_class_fs_stats
*
* stats.isFile()
* stats.isDirectory()
* stats.isBlockDevice()
* stats.isCharacterDevice()
* stats.isSymbolicLink() (only valid with fs.lstat())
* stats.isFIFO()
* stats.isSocket()
*
* Copyright(c) 2013 fengmk2 <<EMAIL>> (http://fengmk2.github.com)
* MIT Licensed
*/
"use strict";
/**
* Module dependencies.
*/
function Stat(info) {
var ctime = new Date(info.putTime / 10000);
this.size = info.fsize;
this.atime = new Date();
this.mtime = ctime;
this.ctime = ctime;
}
Stat.prototype.isFile = function () {
return true;
};
Stat.prototype.isDirectory =
Stat.prototype.isBlockDevice =
Stat.prototype.isCharacterDevice =
Stat.prototype.isSymbolicLink =
Stat.prototype.isFIFO =
Stat.prototype.isSocket = function () {
return false;
};
module.exports = Stat;
|
<reponame>Goxiaoy/go-saas-kit
package authorization
import (
"context"
"github.com/go-kratos/kratos/v2/errors"
"github.com/goxiaoy/go-saas-kit/pkg/authn/jwt"
)
type Result struct {
Allowed bool
Requirements []Requirement
}
func NewAllowAuthorizationResult() Result {
return Result{Allowed: true}
}
func NewDisallowAuthorizationResult(requirements []Requirement) Result {
return Result{Allowed: false, Requirements: requirements}
}
func FormatError(ctx context.Context, result Result) error {
if result.Allowed {
return nil
}
if _, ok := jwt.FromClaimsContext(ctx); ok {
//TODO format error
return errors.Forbidden("", "")
}
//no claims
return errors.Unauthorized("", "")
}
|
#!/bin/bash
/usr/bin/mysqld_safe > /dev/null 2>&1 &
RET=1
while [[ RET -ne 0 ]]; do
echo "=> Waiting for confirmation of MySQL service startup"
sleep 5
mysql -uroot -e "status" > /dev/null 2>&1
RET=$?
done
mysql -uroot -e "CREATE DATABASE scrum_online"
# Create database schema
./vendor/bin/doctrine orm:schema-tool:create
./vendor/bin/doctrine orm:generate-proxies
mysqladmin shutdown
|
<filename>options/train_options.py
from .base_options import BaseOptions, boolstr
class TrainOptions(BaseOptions):
def __init__(self):
super(BaseOptions).__init__()
def get_arguments(self, parser):
parser = BaseOptions.get_arguments(self, parser)
parser.add_argument('--epochs', type=int, help='number of total epochs to run', default=30)
parser.add_argument('--learning_rate', type=float, help='initial learning rate (default: 1e-4)', default=1e-4)
parser.add_argument('--adjust_lr', type=boolstr, help='apply learning rate decay or not', default=True)
parser.add_argument('--lr_mode', type=str, help='Which learning rate scheduler [plateau|polynomial|step]', default='plateau')
parser.add_argument('--optimizer', type=str, help='Optimizer to use [adam|sgd|rmsprop]', default='adam')
parser.add_argument('--batch_size', type=int, help='mini-batch size (default: 8)', default=8)
parser.add_argument('--img_loss_l1_w', type=float, help='Weight of the L1 loss component', default=0.25)
parser.add_argument('--img_loss_ssim_w', type=float, help='Weight of the SSIM loss component', default=0.25)
parser.add_argument('--lr_loss_w', type=float, help='left-right consistency weight', default=0.25)
parser.add_argument('--disp_grad_loss_w', type=float, help='disparity smoothness loss weight', default=0.25)
# Specific to CoV-Weighting
parser.add_argument('--mean_sort', type=str, help='full or decay', default='full')
parser.add_argument('--mean_decay_param', type=float, help='What decay to use with mean decay', default=1.0)
# Specific to GradNorm
parser.add_argument('--init_gamma', type=float, help='which alpha to start', default=1.5)
# Other params
parser.add_argument('--do_augmentation', type=boolstr, help='do augmentation of images or not', default=True)
parser.add_argument('--augment_parameters', type=str, help='lowest and highest values for gamma, brightness and color respectively',
default=[0.8, 1.2, 0.8, 1.2, 0.8, 1.2])
parser.add_argument('--norm_layer', type=str, help='defines if a normalization layer is used', default='')
parser.add_argument('--train_ratio', type=float, help='How much of the training data to use', default=1.0)
return parser
|
// Copyright 2011 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.internal.services;
import org.apache.tapestry5.internal.plastic.PlasticInternalUtils;
import org.apache.tapestry5.internal.plastic.asm.ClassWriter;
import org.apache.tapestry5.internal.plastic.asm.MethodVisitor;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.UUID;
import static org.apache.tapestry5.internal.plastic.asm.Opcodes.*;
public class ClassCreationHelper
{
public final String tempDir;
public ClassCreationHelper()
{
this(String.format("%s/tapestry-test-classpath/%s",
System.getProperty("java.io.tmpdir"),
UUID.randomUUID().toString()));
}
public ClassCreationHelper(String tempDir)
{
this.tempDir = tempDir;
}
public void writeFile(ClassWriter writer, String className) throws Exception
{
File classFile = toFile(className);
classFile.getParentFile().mkdirs();
OutputStream os = new BufferedOutputStream(new FileOutputStream(classFile));
os.write(writer.toByteArray());
os.close();
}
public ClassWriter createWriter(String className, String superClassName, String... interfaceNames)
{
String[] interfaceInternalNames = new String[interfaceNames.length];
for (int i = 0; i < interfaceNames.length; i++)
{
interfaceInternalNames[i] = PlasticInternalUtils.toInternalName(interfaceNames[i]);
}
ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES + ClassWriter.COMPUTE_MAXS);
cw.visit(V1_5, ACC_PUBLIC, PlasticInternalUtils.toInternalName(className), null,
PlasticInternalUtils.toInternalName(superClassName), interfaceInternalNames);
return cw;
}
public void implementPublicConstructor(ClassWriter cw, String superClassName)
{
MethodVisitor mv = cw.visitMethod(ACC_PUBLIC, "<init>", "()V", null, null);
mv.visitCode();
mv.visitVarInsn(ALOAD, 0);
mv.visitMethodInsn(INVOKESPECIAL, PlasticInternalUtils.toInternalName(superClassName), "<init>", "()V");
mv.visitInsn(RETURN);
mv.visitEnd();
}
public long readDTM(String className) throws Exception
{
URL url = toFile(className).toURL();
return readDTM(url);
}
private File toFile(String className)
{
String path = String.format("%s/%s.class",
tempDir,
PlasticInternalUtils.toInternalName(className));
return new File(path);
}
private long readDTM(URL url) throws Exception
{
URLConnection connection = url.openConnection();
connection.connect();
return connection.getLastModified();
}
}
|
<reponame>allison-knauss/arkweb
from base import Base
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
from sqlalchemy.orm import relationship
from marshmallow import Schema, fields, post_load
from datetime import datetime
class BlogPost(Base):
__tablename__ = 'blog_posts'
id = Column(Integer, primary_key=True)
title = Column(String)
date = Column(DateTime)
content = Column(String)
blog_id = Column(Integer, ForeignKey('blogs.id'))
blog = relationship("Blog", back_populates="posts")
def __repr__(self):
return str(PageSchema().dumps(self).data)
class BlogPostSchema(Schema):
id = fields.Int()
title = fields.String()
date = fields.Date()
content = fields.String()
@post_load
def make_blog(self, data):
return Page(**data)
|
def draw_triangle(n):
# outer loop to handle number of rows
# n in this case
for i in range(0, n):
# inner loop to handle number of columns
# values changing acc. to outer loop
for j in range(0, i+1):
# printing stars
print("*", end="")
# ending line after each row
print("\r")
n = 5
draw_triangle(n)
# output:
# *
# **
# * *
# * *
# * *
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = exports.selectUnpublishedEntriesByStatus = exports.selectUnpublishedEntry = void 0;
var _immutable = require("immutable");
var _publishModes = require("../constants/publishModes");
var _editorialWorkflow = require("../actions/editorialWorkflow");
var _config = require("../actions/config");
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; var ownKeys = Object.keys(source); if (typeof Object.getOwnPropertySymbols === 'function') { ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function (sym) { return Object.getOwnPropertyDescriptor(source, sym).enumerable; })); } ownKeys.forEach(function (key) { _defineProperty(target, key, source[key]); }); } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
const unpublishedEntries = function unpublishedEntries() {
let state = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : (0, _immutable.Map)();
let action = arguments.length > 1 ? arguments[1] : undefined;
switch (action.type) {
case _config.CONFIG_SUCCESS:
{
const publishMode = action.payload && action.payload.get('publish_mode');
if (publishMode === _publishModes.EDITORIAL_WORKFLOW) {
// Editorial workflow state is explicetelly initiated after the config.
return (0, _immutable.Map)({
entities: (0, _immutable.Map)(),
pages: (0, _immutable.Map)()
});
}
return state;
}
case _editorialWorkflow.UNPUBLISHED_ENTRY_REQUEST:
return state.setIn(['entities', `${action.payload.collection}.${action.payload.slug}`, 'isFetching'], true);
case _editorialWorkflow.UNPUBLISHED_ENTRY_REDIRECT:
return state.deleteIn(['entities', `${action.payload.collection}.${action.payload.slug}`]);
case _editorialWorkflow.UNPUBLISHED_ENTRY_SUCCESS:
return state.setIn(['entities', `${action.payload.collection}.${action.payload.entry.slug}`], (0, _immutable.fromJS)(action.payload.entry));
case _editorialWorkflow.UNPUBLISHED_ENTRIES_REQUEST:
return state.setIn(['pages', 'isFetching'], true);
case _editorialWorkflow.UNPUBLISHED_ENTRIES_SUCCESS:
return state.withMutations(map => {
action.payload.entries.forEach(entry => map.setIn(['entities', `${entry.collection}.${entry.slug}`], (0, _immutable.fromJS)(entry).set('isFetching', false)));
map.set('pages', (0, _immutable.Map)(_objectSpread({}, action.payload.pages, {
ids: (0, _immutable.List)(action.payload.entries.map(entry => entry.slug))
})));
});
case _editorialWorkflow.UNPUBLISHED_ENTRY_PERSIST_REQUEST:
// Update Optimistically
return state.withMutations(map => {
map.setIn(['entities', `${action.payload.collection}.${action.payload.entry.get('slug')}`], (0, _immutable.fromJS)(action.payload.entry));
map.setIn(['entities', `${action.payload.collection}.${action.payload.entry.get('slug')}`, 'isPersisting'], true);
map.updateIn(['pages', 'ids'], (0, _immutable.List)(), list => list.push(action.payload.entry.get('slug')));
});
case _editorialWorkflow.UNPUBLISHED_ENTRY_PERSIST_SUCCESS:
// Update Optimistically
return state.deleteIn(['entities', `${action.payload.collection}.${action.payload.entry.get('slug')}`, 'isPersisting']);
case _editorialWorkflow.UNPUBLISHED_ENTRY_STATUS_CHANGE_REQUEST:
// Update Optimistically
return state.withMutations(map => {
map.setIn(['entities', `${action.payload.collection}.${action.payload.slug}`, 'metaData', 'status'], action.payload.newStatus);
map.setIn(['entities', `${action.payload.collection}.${action.payload.slug}`, 'isUpdatingStatus'], true);
});
case _editorialWorkflow.UNPUBLISHED_ENTRY_STATUS_CHANGE_SUCCESS:
case _editorialWorkflow.UNPUBLISHED_ENTRY_STATUS_CHANGE_FAILURE:
return state.setIn(['entities', `${action.payload.collection}.${action.payload.slug}`, 'isUpdatingStatus'], false);
case _editorialWorkflow.UNPUBLISHED_ENTRY_PUBLISH_REQUEST:
return state.setIn(['entities', `${action.payload.collection}.${action.payload.slug}`, 'isPublishing'], true);
case _editorialWorkflow.UNPUBLISHED_ENTRY_PUBLISH_SUCCESS:
case _editorialWorkflow.UNPUBLISHED_ENTRY_PUBLISH_FAILURE:
return state.withMutations(map => {
map.deleteIn(['entities', `${action.payload.collection}.${action.payload.slug}`]);
});
case _editorialWorkflow.UNPUBLISHED_ENTRY_DELETE_SUCCESS:
return state.deleteIn(['entities', `${action.payload.collection}.${action.payload.slug}`]);
default:
return state;
}
};
const selectUnpublishedEntry = (state, collection, slug) => state && state.getIn(['entities', `${collection}.${slug}`]);
exports.selectUnpublishedEntry = selectUnpublishedEntry;
const selectUnpublishedEntriesByStatus = (state, status) => {
if (!state) return null;
return state.get('entities').filter(entry => entry.getIn(['metaData', 'status']) === status).valueSeq();
};
exports.selectUnpublishedEntriesByStatus = selectUnpublishedEntriesByStatus;
var _default = unpublishedEntries;
exports.default = _default;
//# sourceMappingURL=editorialWorkflow.js.map
|
#!/usr/bin/python
import tkinter
from tkinter import N, S, E, W, ttk, messagebox
class CanvasHelper(object):
class OnGridException(Exception):
pass
@staticmethod
def draw_grid(canvas, colour="black", thickness=2, rows=3, cols=3, outer=False, caps=tkinter.PROJECTING, tags=()):
"""Draws a grid with the specified number of rows and columns on the
gameboard with the specified colour and line thickness. Each line of the
grid is tagged with 'resize', 'grid' and anything specified in tags.
The line caps for each line are those specified by caps. If outer is set
to True then an outer border will also be drawn."""
width = canvas.winfo_width()
height = canvas.winfo_height()
for r in range(rows):
if outer or 0 != r:
y_coord = int(r * height / rows)
canvas.create_line(
0,
y_coord,
width,
y_coord,
fill=colour,
width=thickness,
capstyle=caps,
tags=("resize", "grid") + tags)
for c in range(cols):
if outer or 0 != c:
x_coord = int(c * width / cols)
canvas.create_line(
x_coord,
0,
x_coord,
height,
fill=colour,
width=thickness,
capstyle=caps,
tags=("resize", "grid") + tags)
if outer:
canvas.create_line(
0,
height,
width,
height,
fill=colour,
width=thickness,
capstyle=caps,
tags=("resize", "grid") + tags)
canvas.create_line(
width,
0,
width,
height,
fill=colour,
width=thickness,
capstyle=caps,
tags=("resize", "grid") + tags)
@staticmethod
def get_midpoint(canvas, row, col, totalrows=9, totalcols=9):
"""Returns a point (x, y) at the centre of the square at row, col
in the gameboard divided into totalrows rows and totalcols columns."""
sqare_nw = (
col * canvas.winfo_width() / totalcols,
row * canvas.winfo_height() / totalrows)
dimensions = (
canvas.winfo_width() / totalcols,
canvas.winfo_height() / totalrows)
middle = (sqare_nw[0] + (dimensions[0] / 2),
sqare_nw[1] + (dimensions[1] / 2))
return middle
@staticmethod
def get_bbox(canvas, row, col, totalrows=9, totalcols=9, size=1):
"""Returns a bounding box (x0, y0, x1, y1) for the square at row, col
in the gameboard divided into totalrows rows and totalcols columns,
scaled by size with the same centre point."""
middle = CanvasHelper.get_midpoint(canvas, row, col, totalrows, totalcols)
dimensions = (
canvas.winfo_width() / totalcols,
canvas.winfo_height() / totalrows)
bbox = (
middle[0] - (dimensions[0] * size * 0.5),
middle[1] - (dimensions[1] * size * 0.5),
middle[0] + (dimensions[0] * size * 0.5),
middle[1] + (dimensions[1] * size * 0.5)
)
return bbox
@staticmethod
def clear_square(canvas, type, x0, y0, x1, y1, exclude_tags=("grid",)):
"""Deletes all itesm on the gameboard either overlapping or enclosed by
the square x0, y0, x1, y1 (determined by the type paramater, which must
be one of 'enclosed' or 'overlapping'), except those tagged with at
least one of the exclude_tags"""
deltag = uuid.uuid4().hex
if "overlapping" == type:
canvas.addtag_overlapping(deltag, x0, y0, x1, y1)
elif "enclosed" == type:
canvas.addtag_enclosed(deltag, x0, y0, x1, y1)
else:
raise TypeError(
"Argument 'type' must be one of: 'enclosed', 'overlapping'")
for t in exclude_tags:
canvas.dtag(t, deltag)
canvas.delete(deltag)
@staticmethod
def clear_overlapping(canvas, x0, y0, x1, y1, exclude_tags=("grid",)):
"""Deletes all itesm on the gameboard overlapping the square
x0, y0, x1, y1 except those tagged with at least one of the exclude_tags"""
return CanvasHelper.clear_square(canvas, "overlapping", x0, y0, x1, y1, exclude_tags)
@staticmethod
def clear_enclosed(canvas, x0, y0, x1, y1, exclude_tags=("grid",)):
"""Deletes all itesm on the gameboard enclosed by the square
x0, y0, x1, y1 except those tagged with at least one of the exclude_tags"""
return CanvasHelper.clear_square(canvas, "enclosed", x0, y0, x1, y1, exclude_tags)
@staticmethod
def clear_board(canvas, exclude_tags=("grid",)):
"""Deletes all itesm on the gameboard except those tagged with at least
one of the exclude_tags"""
return CanvasHelper.clear_square(
canvas,
"overlapping",
0,
0,
canvas.winfo_width(),
canvas.winfo_height(),
exclude_tags)
@staticmethod
def get_square(canvas, x, y, totalrows=9, totalcols=9):
"""Returns the (row, col) on the gameboard, divided into totalrows rows
and totalcols columns, which contains (x, y). Throws CanvasHelper.OnGridException
if (x, y) falls on an element tagged 'grid'"""
row = int(y // (canvas.height / totalrows))
col = int(x // (canvas.width / totalcols))
overlapping = canvas.find_overlapping(x, y, x, y)
if len(overlapping) > 0:
for e in overlapping:
tags = canvas.gettags(e)
if "grid" in tags:
raise CanvasHelper.OnGridException()
return (row, col)
@staticmethod
def draw_o(canvas, row, col, totalrows=9, totalcols=9, thickness=5, size=0.5, tags=()):
"""Draws an O on the canvas at row, col based on a board with
totalrows rows and totalcols columns, with thickness as specified. The
O will be size * the size of the box and will be tagged 'resize', 'o'
and whatever is specified in tags."""
bbox = CanvasHelper.get_bbox(canvas, row, col, totalrows, totalcols, size)
canvas.create_oval(
bbox[0],
bbox[1],
bbox[2],
bbox[3],
outline="red",
width=thickness,
tag=("resize", "o") + tags)
@staticmethod
def draw_x(canvas, row, col, totalrows=9, totalcols=9, thickness=5, size=0.5, tags=()):
"""Draws an X on the canvas at row, col based on a board with
totalrows rows and totalcols columns, with thickness as specified. The
X will be size * the size of the box and will be tagged 'resize', 'x'
and whatever is specified in tags."""
bbox = CanvasHelper.get_bbox(canvas, row, col, totalrows, totalcols, size)
canvas.create_line(
bbox[0],
bbox[1],
bbox[2],
bbox[3],
fill="blue",
width=thickness,
capstyle=tkinter.ROUND,
tag=("resize", "x") + tags)
canvas.create_line(
bbox[0],
bbox[3],
bbox[2],
bbox[1],
fill="blue",
width=thickness,
capstyle=tkinter.ROUND,
tag=("resize", "x") + tags)
@staticmethod
def higlight_available_boards(canvas, available_boards):
canvas.delete("available")
for b in available_boards:
bbox = CanvasHelper.get_bbox(canvas, b[0], b[1], 3, 3)
canvas.create_rectangle(
bbox[0],
bbox[1],
bbox[2],
bbox[3],
fill="yellow",
width=0,
stipple="gray12",
tags=("resize", "available")
)
canvas.tag_lower("available", "grid")
|
#ifndef _iic_h_
#define _iic_h_
#include <reg52.h>
sbit SCL=P2^1;
sbit SDA=P2^0;
void delayus(unsigned char x);
void iic_start(void);
void iic_stop(void);
void iic_ask(bit askbit);
unsigned char iic_waitask(void);
void iic_sendbyte(unsigned char byte);
unsigned char iic_readbyte(void);
void AT2402_SendByte(unsigned char saddr, unsigned char baddr, unsigned char byte);
unsigned char AT2402_ReceiveByte(unsigned char saddr,unsigned char baddr);
#endif
|
const admin = require("firebase-admin");
const dotenv = require("dotenv");
dotenv.config();
const serviceAccount = require("./service-account.json");
const authDomain = process.env.AUTH_DOMAIN;
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: `https://${authDomain}`
});
const auth = admin.auth();
const database = admin.database();
// admin commands begin here
// auth.createUser({
// displayName: "Phillmont",
// email: "<EMAIL>",
// emailVerified: true,
// password: "<PASSWORD>"
// }).then(console.log).catch(console.error);
|
#!/usr/bin/env bash
#
# Copyright (c) 2021 VMware, Inc.
# SPDX-License-Identifier: BSD-2-Clause
# This script bootstraps a debian/bullseye64 Vagrant box
# to demo building containers with SBoMs
# Update repos and upgrade installed packages
apt-get update && sudo apt-get -y upgrade
# Install system dependencies
apt-get install -y python3 python3-pip python3-venv attr buildah podman git curl debootstrap jq
# Install tern
git clone https://github.com/tern-tools/tern
pushd tern
python3 setup.py sdist
pip3 install dist/tern*
popd
# Install oras
curl -LO https://github.com/oras-project/oras/releases/download/v0.12.0/oras_0.12.0_linux_amd64.tar.gz
mkdir -p oras-install/
tar -zxf oras_0.12.0_*.tar.gz -C oras-install/
mv oras-install/oras /usr/local/bin/
rm -rf oras_0.12.0_*.tar.gz oras-install/
# Install cosign
curl -LO https://github.com/sigstore/cosign/releases/download/v1.1.0/cosign-linux-amd64
mv cosign-linux-amd64 /usr/local/bin/cosign
chmod +x /usr/local/bin/cosign
# Add /home/vagrant/.local/bin and /usr/sbin to $PATH
echo "export PATH=/home/vagrant/.local/bin:/usr/sbin:$PATH" >> /home/vagrant/.bashrc
# Switch to the "vfs" storage driver for buildah and podman so we can get a consistent mount point
# for this version of buildah
echo "export STORAGE_DRIVER=vfs" >> /home/vagrant/.bashrc
# Create a debian rootfs
mkdir debian
debootstrap --variant=minbase stable debian http://snapshot.debian.org/archive/debian/20210914T205414Z
cd debian/ && tar cf ../containers-with-sboms/debian.tar .
|
<filename>lib/ketting.js
var Resource = require('./resource');
var representor = require('./representor');
var base64 = require('./utils/base64');
var oauth = require('./utils/oauth');
var fetch = require('./utils/fetch');
var url = require('./utils/url');
/**
* The main Ketting client object.
*
* @constructor
* @class
* @param {string} bookMark - Bookmark or 'base' uri.
* @param {object} options - List of options
*/
var Ketting = function(bookMark, options) {
if (typeof options === 'undefined') {
options = {};
}
this.resourceCache = {};
this.contentTypes = [
{
mime: 'application/hal+json',
representor: 'hal',
},
{
mime: 'application/json',
representor: 'hal',
},
{
mime: 'text/html',
representor: 'html',
}
];
if (options.auth) {
this.auth = options.auth;
if (options.auth.type == 'oauth2') {
this.auth.oauth = oauth.setupOAuthObject(this, options.auth);
}
}
if (options.fetchInit) {
this.fetchInit = options.fetchInit;
}
this.bookMark = bookMark;
};
Ketting.prototype = {
/**
* Here we store all the resources that were ever requested. This will
* ensure that if the same resource is requested twice, the same object is
* returned.
*/
resourceCache : null,
/**
* Autentication settings.
*
* If set, must have at least a `type` property.
* If type=basic, userName and password must be set.
*/
auth: null,
/**
* Content-Type settings and mappings.
*
* See the constructor for an example of the structure.
*/
contentTypes: [],
/**
* A list of settings passed to the Fetch API.
*
* It's effectively a list of defaults that are passed as the 'init' argument.
* @see https://developer.mozilla.org/en-US/docs/Web/API/Request/Request
*/
fetchInit : {},
/**
* This function is a shortcut for getResource().follow(x);
*
* @async
* @param {string} rel - Relationship
* @param {object} variables - Templated variables for templated links.
* @returns {Resource}
*/
follow: function(rel, variables) {
return this.getResource().follow(rel, variables);
},
/**
* Returns a resource by its uri.
*
* This function doesn't do any HTTP requests. The uri is optional. If it's
* not specified, it will return the bookmark resource.
*
* @param {string} uri - Optional uri.
* @return {Resource}
*/
getResource: function(uri) {
if (typeof uri === 'undefined') {
uri = '';
}
uri = url.resolve(this.bookMark, uri);
if (!this.resourceCache[uri]) {
this.resourceCache[uri] = new Resource(this, uri);
}
return this.resourceCache[uri];
},
/**
* This function does an arbitrary request using the fetch API.
*
* Every request in ketting is routed through here so it can be initialized
* with some useful defaults.
*
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/GlobalFetch}
* @param {string|Request} input - Uri or Request object.
* @param {object} init - A list of settings.
* @return {Response}
*/
fetch : function(input, init) {
var newInit = {};
if (init) {
Object.assign(newInit, this.fetchInit, init);
} else {
newInit = this.fetchInit;
}
var request = new fetch.Request(input, newInit);
if (!request.headers.has('User-Agent')) {
request.headers.set('User-Agent', 'Ketting/' + require('../package.json').version);
}
if (!request.headers.has('Accept')) {
var accept = this.contentTypes
.map( function(contentType) { return contentType.mime; } )
.join(',');
request.headers.set('Accept', accept);
}
if (!request.headers.has('Content-Type')) {
request.headers.set('Content-Type', this.contentTypes[0].mime);
}
if (!request.headers.has('Authorization') && this.auth) {
switch(this.auth.type) {
case 'basic' :
request.headers.set('Authorization', 'Basic ' + base64.encode(this.auth.userName + ':' + this.auth.password));
break;
case 'bearer' :
request.headers.set('Authorization', 'Bearer ' + this.auth.token);
break;
case 'oauth2' :
return oauth.fetch(this, request, init);
}
}
return fetch(request);
},
/**
* This function returns a representor constructor for a mime type.
*
* For example, given text/html, this function might return the constructor
* stored in representor/html.
*
* @param {String} contentType
* @return {Function}
*/
getRepresentor : function(contentType) {
if (contentType.indexOf(';') !== -1) {
contentType = contentType.split(';')[0];
}
contentType = contentType.trim();
var result = this.contentTypes.find(function(item) {
return item.mime === contentType;
});
if (!result) {
throw new Error('Could not find a representor for contentType: ' + contentType);
}
switch(result.representor) {
case 'html' :
return representor.html;
case 'hal' :
return representor.hal;
default :
throw new Error('Unknown representor: ' + result.representor);
}
}
};
module.exports = Ketting;
|
<reponame>zhanglei1949/fastFFI
/*
* Copyright 1999-2021 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FULL_FEATURE
#define FULL_FEATURE
#include "jni.h"
#include <string>
template<typename P1, typename P2>
class FullFeature {
private:
int field;
public:
jbyte echo(jbyte i);
jboolean echo(jboolean i);
jshort echo(jshort i);
jint echo(jint i);
jlong echo(jlong i);
jfloat echo(jfloat i);
jdouble echo(jdouble i);
std::string echo(std::string i);
void setField(int i) { field = i; }
int getField() { return field; }
void templateCall(P1 i, P2 j);
void unknownException();
FullFeature* make();
template<typename T>
void templateMethod(T t);
FullFeature rvTest();
};
std::string libraryApi();
#endif // FULL_FEATURE
|
package com.ervin.litepal.ui.widget;
/**
* Created by Ervin on 2015/12/22.
*/
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.TypedArray;
import android.database.DataSetObserver;
import android.graphics.Canvas;
import android.os.Build;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.View;
import android.widget.Adapter;
import android.widget.AdapterView;
import android.widget.ImageView;
import android.widget.LinearLayout;
import com.ervin.litepal.R;
/**
* Created by dionysis_lorentzos on 5/8/14 for package com.lorentzos.swipecards
* and project Swipe cards. Use with caution dinosaurs might appear!
*/
public class DragCardsView extends AdapterView {
private int MAX_VISIBLE = 3;
private int MIN_ADAPTER_STACK = 6;
private float ROTATION_DEGREES = 15.f;
public Adapter mAdapter;
private int LAST_OBJECT_IN_STACK = 0;
private final String TAG = "DragCardsView";
private onDragListener mFlingListener;
private AdapterDataSetObserver mDataSetObserver;
private boolean mInLayout = false;
private View mActiveCard = null;
private OnItemClickListener mOnItemClickListener;
private DragCardListener dragCardListener;
private int CARDS_SHIFT;
private int childLeft;
private int childTop;
public View mActiveCard2;
public View mActiveCard3;
private int widthMeasureSpec;
private int heightMeasureSpec;
private int CARDS_WIDTH;
private int CARDS_HEIGHT;
public DragCardsView(Context context) {
this(context, null);
}
public DragCardsView(Context context, AttributeSet attrs) {
this(context, attrs, R.attr.SwipeFlingStyle);
}
public DragCardsView(Context context, AttributeSet attrs,
int defStyle) {
super(context, attrs, defStyle);
TypedArray a = context.obtainStyledAttributes(attrs,
R.styleable.DragCardsView, defStyle, 0);
MAX_VISIBLE = a.getInt(R.styleable.DragCardsView_max_visible,
MAX_VISIBLE);
MIN_ADAPTER_STACK = a.getInt(
R.styleable.DragCardsView_min_adapter_stack,
MIN_ADAPTER_STACK);
ROTATION_DEGREES = a.getFloat(
R.styleable.DragCardsView_rotation_degrees,
ROTATION_DEGREES);
CARDS_SHIFT = a.getInt(R.styleable.DragCardsView_cards_shift,
10);
CARDS_WIDTH = a.getDimensionPixelSize(R.styleable.DragCardsView_cards_width, 300);
CARDS_HEIGHT = a.getDimensionPixelSize(R.styleable.DragCardsView_cards_height, 330);
a.recycle();
}
public void init(final Context context, Adapter mAdapter) {
if (context instanceof onDragListener) {
mFlingListener = (onDragListener) context;
} else {
throw new RuntimeException(
"Activity does not implement SwipeFlingAdapterView.onFlingListener");
}
if (context instanceof OnItemClickListener) {
mOnItemClickListener = (OnItemClickListener) context;
}
setAdapter(mAdapter);
}
public View getFirstCard() {
return mActiveCard;
}
public View getSecondCard() {
return mActiveCard2;
}
public View getThirdCard() {
return mActiveCard3;
}
public void rotationLeft(){
if(dragCardListener!=null){
dragCardListener.rotationLeft();
}
}
public void rotationtRight(){
if(dragCardListener!=null){
dragCardListener.rotationtRight();
}
}
/**
* Call this when something has changed which has invalidated the layout of
* this view. 当view确定自身已经不再适合现有的区域时调用
* requestLayout()回去调用onMeasure()和onLayout()方法
*/
@Override
public void requestLayout() {
if (!mInLayout) {
super.requestLayout();
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
this.widthMeasureSpec = widthMeasureSpec;
this.heightMeasureSpec = heightMeasureSpec;
}
public int getWidthMeasureSpec() {
return widthMeasureSpec;
}
public int getHeightMeasureSpec() {
return heightMeasureSpec;
}
@Override
protected void onLayout(boolean changed, int left, int top, int right,
int bottom) {
super.onLayout(changed, left, top, right, bottom);
// if we don't have an adapter, we don't need to do anything
if (mAdapter == null) {
return;
}
mInLayout = true;
final int adapterCount = mAdapter.getCount();
if (adapterCount == 0) {
removeAllViewsInLayout();
} else {
removeAllViewsInLayout();
layoutChildren(0, adapterCount);
setTopView();
}
mInLayout = false;
}
public void layoutChildren(int startingIndex, int adapterCount) {
while (startingIndex < Math.min(adapterCount, MAX_VISIBLE)) {
AntiAliasView newUnderChild = (AntiAliasView)mAdapter.getView(startingIndex, null, this);
if (newUnderChild.getVisibility() != GONE) {
makeAndAddView(newUnderChild, startingIndex);
LAST_OBJECT_IN_STACK = startingIndex;
}
startingIndex++;
}
}
public Adapter getmAdapter() {
return mAdapter;
}
public void setmAdapter(Adapter mAdapter) {
this.mAdapter = mAdapter;
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
private void makeAndAddView(View child, int index) {
LinearLayout.LayoutParams lp = (android.widget.LinearLayout.LayoutParams) child.getLayoutParams();
if (index < 3) {
lp.width = CARDS_WIDTH - (index * CARDS_SHIFT * 2);
lp.height = CARDS_HEIGHT - (index * CARDS_SHIFT * 2);
} else {
lp.width = CARDS_WIDTH - (2 * CARDS_SHIFT * 2);
lp.height = CARDS_HEIGHT - (2 * CARDS_SHIFT * 2);
}
addViewInLayout(child, 0, lp, true);
final boolean needToMeasure = child.isLayoutRequested();
if (needToMeasure) {
int childWidthSpec = getChildMeasureSpec(getWidthMeasureSpec(),
getPaddingLeft() + getPaddingRight() + lp.leftMargin
+ lp.rightMargin, lp.width);
int childHeightSpec = getChildMeasureSpec(getHeightMeasureSpec(),
getPaddingTop() + getPaddingBottom() + lp.topMargin
+ lp.bottomMargin, lp.height);
child.measure(childWidthSpec, childHeightSpec);
} else {
cleanupLayoutState(child);
}
int w = child.getMeasuredWidth();
int h = child.getMeasuredHeight();
int gravity = lp.gravity;
if (gravity == -1) {
gravity = Gravity.TOP | Gravity.START;
}
int layoutDirection = getLayoutDirection();
final int absoluteGravity = Gravity.getAbsoluteGravity(gravity,
layoutDirection);
final int verticalGravity = gravity & Gravity.VERTICAL_GRAVITY_MASK;
switch (absoluteGravity & Gravity.HORIZONTAL_GRAVITY_MASK) {
case Gravity.CENTER_HORIZONTAL:
childLeft = (getWidth() + getPaddingLeft() - getPaddingRight() - w)
/ 2 + lp.leftMargin - lp.rightMargin;
break;
case Gravity.END:
childLeft = getWidth() + getPaddingRight() - w - lp.rightMargin;
break;
case Gravity.START:
default:
childLeft = getPaddingLeft() + lp.leftMargin;
break;
}
switch (verticalGravity) {
case Gravity.CENTER_VERTICAL:
childTop = (getHeight() + getPaddingTop() - getPaddingBottom() - h)
/ 2 + lp.topMargin - lp.bottomMargin;
break;
case Gravity.BOTTOM:
childTop = getHeight() - getPaddingBottom() - h - lp.bottomMargin;
break;
case Gravity.TOP:
default:
childTop = getPaddingTop() + lp.topMargin;
break;
}
if (index < 3) {
child.layout(childLeft,
childTop + index * CARDS_SHIFT + CARDS_HEIGHT - h, childLeft + w,
childTop + index * CARDS_SHIFT + CARDS_HEIGHT);
} else {
child.layout(childLeft, childTop + 2 * CARDS_SHIFT + CARDS_HEIGHT - h,
childLeft + w, childTop + 2 * CARDS_SHIFT + CARDS_HEIGHT);
}
if (index == 1) {
this.mActiveCard2 = child;
}
if (index == 2) {
this.mActiveCard3 = child;
}
}
@Override
protected void onDraw(Canvas canvas) {
// TODO Auto-generated method stub
super.onDraw(canvas);
}
/**
* Set the top view and add the fling listener
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void setTopView() {
if (getChildCount() > 0) {
mActiveCard = getChildAt(LAST_OBJECT_IN_STACK);
final ImageView iv_dislike = (ImageView) mActiveCard.findViewById(R.id.iv_dislike);
final ImageView iv_like = (ImageView) mActiveCard.findViewById(R.id.iv_like);
if (mActiveCard != null) {
dragCardListener = new DragCardListener(this,
mAdapter.getItem(0), ROTATION_DEGREES,
new DragCardListener.FlingListener() {
@Override
public void onCardExited(boolean isLeft) {
mActiveCard = null;
if (mFlingListener != null) {
mFlingListener.removeFirstObjectInAdapter(isLeft);
if (getChildCount() < MAX_VISIBLE) {
mFlingListener.onAdapterAboutToEmpty(getChildCount());
}
}
}
@Override
public void onCardClick(Object dataObject) {
if (mOnItemClickListener != null)
mOnItemClickListener.onItemClicked(0,dataObject);
}
@Override
public void onSelectLeft(double distance) {
// TODO Auto-generated method stub
iv_dislike.setVisibility(View.VISIBLE);
iv_like.setVisibility(View.INVISIBLE);
if (mFlingListener != null) {
mFlingListener.onSelectLeft(distance);
}
}
@Override
public void onSelectRight(double distance) {
// TODO Auto-generated method stub
iv_like.setVisibility(View.VISIBLE);
iv_dislike.setVisibility(View.INVISIBLE);
if (mFlingListener != null) {
mFlingListener.onSelectRight(distance);
}
}
@Override
public void onCardMoveDistance(double distance) {
// TODO Auto-generated method stub
if (iv_like.isShown()) {
iv_like.setAlpha((float) distance /dragCardListener.MAX_DISTANCE);
} else if (iv_dislike.isShown()) {
iv_dislike.setAlpha((float) distance / dragCardListener.MAX_DISTANCE);
}
if (mFlingListener != null) {
mFlingListener.onCardMoveDistance(distance);
}
}
@Override
public void onCardReturn() {
// TODO Auto-generated method stub
iv_dislike.setVisibility(View.INVISIBLE);
iv_like.setVisibility(View.INVISIBLE);
if (mFlingListener != null) {
mFlingListener.onCardReturn();
}
}
});
dragCardListener.setViewGroudWidth(this.getWidth());
dragCardListener.setViewGroudHeight(this.getHeight());
mActiveCard.setOnTouchListener(dragCardListener);
}
}
}
public int getCARDS_SHIFT() {
return CARDS_SHIFT;
}
public void setCARDS_SHIFT(int cARDS_SHIFT) {
CARDS_SHIFT = cARDS_SHIFT;
}
public void setMaxVisible(int MAX_VISIBLE) {
this.MAX_VISIBLE = MAX_VISIBLE;
}
public void setMinStackInAdapter(int MIN_ADAPTER_STACK) {
this.MIN_ADAPTER_STACK = MIN_ADAPTER_STACK;
}
@Override
public Adapter getAdapter() {
return mAdapter;
}
@Override
public void setAdapter(Adapter adapter) {
if (mAdapter != null && mDataSetObserver != null) {
mAdapter.unregisterDataSetObserver(mDataSetObserver);
mDataSetObserver = null;
}
mAdapter = adapter;
if (mAdapter != null && mDataSetObserver == null) {
mDataSetObserver = new AdapterDataSetObserver();
mAdapter.registerDataSetObserver(mDataSetObserver);
}
}
public void setFlingListener(onDragListener onFlingListener) {
this.mFlingListener = onFlingListener;
}
public void setOnItemClickListener(OnItemClickListener onItemClickListener) {
this.mOnItemClickListener = onItemClickListener;
}
@Override
public LayoutParams generateLayoutParams(AttributeSet attrs) {
return new LinearLayout.LayoutParams(getContext(), attrs);
}
private class AdapterDataSetObserver extends DataSetObserver {
/**
* This method is called when the entire data set has changed
* 调用此方法时,整个数据集已经改变了
*/
@Override
public void onChanged() {
requestLayout();
}
/**
* This method is called when the entire data becomes invalid
* 调用此方法时,整个数据变得无效
*/
@Override
public void onInvalidated() {
requestLayout();
}
}
public interface OnItemClickListener {
public void onItemClicked(int itemPosition, Object dataObject);
}
public interface onDragListener {
public void removeFirstObjectInAdapter(boolean isLeft);
public void onSelectLeft(double distance);
public void onSelectRight(double distance);
public void onAdapterAboutToEmpty(int itemsInAdapter);
public void onCardMoveDistance(double distance);
public void onCardReturn();
}
@Override
public void setSelection(int position) {
// TODO Auto-generated method stub
}
@Override
public View getSelectedView() {
// TODO Auto-generated method stub
return getFirstCard();
}
}
|
#!/bin/bash
# Copyright 2016 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# runs the given command in a container with iso build dependencies
set -e && [ -n "$DEBUG" ] && set -x
function main {
PKGMGR=$(cat isos/base/repos/$1/repo-spec.json | jq -r '.packagemanager')
shift
rmArg=""
if [ -z "$DEBUG" ]; then
rmArg="--rm"
fi
docker run \
-it \
${rmArg} \
-v $GOPATH/bin:/go/bin:ro \
-v $GOPATH/src/github.com/vmware/vic:/go/src/github.com/vmware/vic:ro \
-v $GOPATH/src/github.com/vmware/vic/bin:/go/src/github.com/vmware/vic/bin \
-e DEBUG=${DEBUG} \
-e BUILD_NUMBER=${BUILD_NUMBER} \
gcr.io/eminent-nation-87317/vic-build-image:${PKGMGR:-tdnf} "$*"
}
REPO="photon-2.0"
# Find the dependency manager. The d stands for distro.
while getopts ':d:' flag; do
case "${flag}" in
d) REPO="${OPTARG}" ;;
esac
done
shift $((OPTIND-1))
# Check if jq is available - we need this on either path
which jq >/dev/null 2>&1
[ $? -ne 0 ] && "Echo please install 'jq' to continue..." && exit 1
echo "building $REPO"
# Check if docker installed
if ! docker info >/dev/null 2>&1; then
/bin/bash -c "$*" # prevent docker in docker
else
main "${REPO}" "$@"
fi
|
/*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.apiomat.helper.mvnnmhelper.mojos;
import org.apache.http.client.utils.URIBuilder;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.shared.utils.StringUtils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
/**
* Goal to upload a native module artifact
*
* @author thum
*/
@Mojo( name = "upload", defaultPhase = LifecyclePhase.DEPLOY )
public class UploadNMMojo extends AbstractRequestMojo
{
/**
* the path where the jar should be stored (build directory)
*/
@Parameter( defaultValue = "${project.build.directory}", property = "moduleJarPath", required = true )
protected File moduleJarPath;
/**
* whether to update or to overwrite the changes on yambas instance
*/
@Parameter( defaultValue = "overwrite", property = "update", required = true )
protected String update;
/**
* whether to update or to overwrite the changes on yambas instance
*/
@Parameter( defaultValue = "false", property = "noDownload" )
protected boolean noDownload;
/**
* Directory containing the generated JAR.
*/
@Parameter( defaultValue = "${project.build.directory}", required = true )
private File outputDirectory;
/**
* Name of the generated JAR.
*/
@Parameter( defaultValue = "${project.build.finalName}" )
private String finalName;
@Override
public void executeRequest( ) throws MojoExecutionException, IOException
{
final URI hostUrl = buildHostUrl( );
final URL url = hostUrl.toURL( );
final HttpURLConnection connection = ( HttpURLConnection ) url.openConnection( );
connection.setDoOutput( true );
connection.setRequestProperty( "Content-Type", "application/octet-stream" );
connection.setRequestMethod( "POST" );
if ( this.system != null )
{
connection.addRequestProperty( "X-apiomat-system", this.system );
}
// Allow Outputs
connection.setDoOutput( true );
// Don't use a cached copy.
connection.setUseCaches( false );
connection.setRequestProperty( "Authorization", getUserAuthHeaderValue( ) );
final File jarFile = getJarFile( this.moduleJarPath, this.finalName, "NM" );
if ( jarFile.exists( ) == false )
{
throw new MojoExecutionException( "Can't find module jar in " + jarFile.getAbsolutePath( ) );
}
final OutputStream writer = connection.getOutputStream( );
final FileInputStream fis = new FileInputStream( jarFile );
final byte[] buf = new byte[ 1024 ];
System.out.print( "Writing bytes " );
for ( int c = fis.read( buf ); c != -1; c = fis.read( buf ) )
{
writer.write( buf, 0, c );
System.out.print( "." );
}
writer.flush( );
writer.close( );
fis.close( );
System.out.println( "" );
final int responseCode = connection.getResponseCode( );
if ( responseCode != HttpURLConnection.HTTP_CREATED )
{
try (final InputStream is =
( 200 <= responseCode && responseCode <= 299 ) ? connection.getInputStream( )
: connection.getErrorStream( ))
{
String reason = "";
if ( is != null )
{
final StringBuilder inputStringBuilder = new StringBuilder( );
final BufferedReader bufferedReader =
new BufferedReader( new InputStreamReader( is, "UTF-8" ) );
String line = bufferedReader.readLine( );
while ( line != null )
{
inputStringBuilder.append( line );
inputStringBuilder.append( '\n' );
line = bufferedReader.readLine( );
}
reason = inputStringBuilder.toString( );
bufferedReader.close( );
}
throw new MojoExecutionException(
"Return code did not match 201: " + connection.getResponseMessage( ) + "(" +
connection.getResponseCode( ) + ") Reason: " + reason );
}
}
else if ( this.noDownload == false )
{
executeGoal( "download" );
}
}
private URI buildHostUrl( )
{
try
{
final StringBuilder sb = new StringBuilder( );
sb.append( this.host ).append( "/yambas/rest/modules/asset" );
final URIBuilder bldr = new URIBuilder( sb.toString( ) );
if ( StringUtils.isNotBlank( this.system ) )
{
bldr.addParameter( "usedSystem", this.system );
}
if ( StringUtils.isNotBlank( this.update ) )
{
bldr.addParameter( "update", this.update );
}
return bldr.build( );
}
catch ( final Exception e )
{
getLog( ).error( "Error building url", e );
}
return null;
}
/**
* Returns the Jar file to generate, based on an optional classifier.
*
* @param basedir the output directory
* @param resultFinalName the name of the ear file
* @param classifier an optional classifier
* @return the file to generate
*/
protected static File getJarFile( final File basedir, final String resultFinalName, final String classifier )
{
if ( basedir == null )
{
throw new IllegalArgumentException( "basedir is not allowed to be null" );
}
if ( resultFinalName == null )
{
throw new IllegalArgumentException( "finalName is not allowed to be null" );
}
final StringBuilder fileName = new StringBuilder( resultFinalName );
fileName.append( "-" ).append( classifier );
fileName.append( ".jar" );
return new File( basedir, fileName.toString( ) );
}
}
|
#!/bin/bash
tmux -V | cut -c 6-
|
def simulate_cog_init():
initialization_log = []
# Step 1: Check power supply
power_supply_status = check_power_supply()
initialization_log.append(f"Power supply check: {power_supply_status}")
if power_supply_status == "OK":
# Step 2: Initialize communication interface
communication_status = initialize_communication()
initialization_log.append(f"Communication interface initialization: {communication_status}")
if communication_status == "Success":
# Step 3: Calibrate sensors and actuators
calibration_status = calibrate_sensors_actuators()
initialization_log.append(f"Sensor and actuator calibration: {calibration_status}")
# Step 4: Log initialization process
log_initialization_process(initialization_log)
return initialization_log
def check_power_supply():
# Simulate power supply check
# Return "OK" if power supply meets requirements, otherwise return "Insufficient power"
return "OK"
def initialize_communication():
# Simulate communication interface initialization
# Return "Success" if initialization is successful, otherwise return "Communication error"
return "Success"
def calibrate_sensors_actuators():
# Simulate sensor and actuator calibration
# Return "Calibration successful" if calibration is successful, otherwise return "Calibration failed"
return "Calibration successful"
def log_initialization_process(log):
# Simulate logging of initialization process
# Print the initialization log
for entry in log:
print(entry)
|
<filename>lib/util/Logger.js
"use strict";
var logger;
function debug() {
if (logger) {
_callLog(logger.debug, arguments);
}
}
function info() {
if (logger) {
_callLog(logger.info, arguments);
}
}
function error() {
if (logger) {
_callLog(logger.error, arguments);
}
}
function overrideLogger(log) {
if (log && typeof log.debug === "function" && typeof log.info === "function" && typeof log.error === "function") {
logger = log;
}
}
function _callLog(func, funcArguments) {
/*jshint validthis:true */
var args = Array.prototype.slice.call(funcArguments);
func.apply(this, args);
}
module.exports = {
overrideLogger: overrideLogger,
debug: debug,
info: info,
error: error
};
|
exports.Temperature = function(kTemp, setTemp) {
this.kTemp = kTemp;
this.setTemp = setTemp;
};
exports.Temperature.prototype.convertTemp = function(kTemp) {
var c = kTemp - 273.15;
var f = c * 1.8 + 32;
return f.toFixed(2);
};
exports.Temperature.prototype.tempAlarm = function(kTemp, setTemp) {
if (kTemp >= setTemp) {
return true;
} else {
return false;
}
};
//C = K -273.15
//F = C*1.8 + 32
|
<filename>transform/path.go<gh_stars>0
package transform
import (
"fmt"
"os"
"path/filepath"
"regexp"
"strings"
"github.com/cuducos/go-cnpj"
)
var nonDigits = regexp.MustCompile(`\D`)
// PathsForSource lists files for a given `sourceType` in a directory `dir`.
func PathsForSource(t sourceType, dir string) ([]string, error) {
var ls []string
r, err := os.ReadDir(dir)
if err != nil {
return ls, err
}
for _, f := range r {
if f.IsDir() {
continue
}
if strings.Contains(strings.ToLower(f.Name()), strings.ToLower(string(t))) {
ls = append(ls, filepath.Join(dir, f.Name()))
}
}
return ls, nil
}
// PathForCNPJ creates the file path for a JSON file related to a CNPJ.
func PathForCNPJ(c string) (string, error) {
if !cnpj.IsValid(c) {
return "", fmt.Errorf("error finding file path for %s: invalid cnpj", c)
}
c = cnpj.Mask(c)
p := nonDigits.Split(cnpj.Mask(c), 5)
n := p[3] + p[4] + ".json"
return strings.Join(append(p[:3], n), string(os.PathSeparator)), nil
}
func pathForBaseCNPJ(s string) (string, error) {
if len(s) != 8 {
return "", fmt.Errorf("invalid base cnpj: %s", s)
}
return filepath.Join(s[:2], s[2:5], s[5:]), nil
}
// CNPJForPath creates a CNPJ from a path of a JSON file related to a CNPJ.
func CNPJForPath(f string) (string, error) {
p := strings.Split(f, string(os.PathSeparator))
if len(p) < 4 {
return "", fmt.Errorf("error finding the cnpj for %s: invalid path", f)
}
c := strings.TrimSuffix(strings.Join(p[len(p)-4:], ""), filepath.Ext(f))
if !cnpj.IsValid(c) {
return "", fmt.Errorf("error finding the cnpj for %s: invalid resulting cnpj", f)
}
return c, nil
}
|
var should = require('should');
var span = require('../index');
describe('Span', function() {
describe('interface', function() {
it('should export a function', function() {
span.should.be.an.instanceOf(Function);
});
it('should parse itself', function() {
span(span('3d 2h')).should.equal('3d 2h');
span(span(266400000)).should.equal(266400000);
})
});
describe('str to ms', function() {
it('should ignore whitespace', function() {
span('1 day').should.equal(span('1day'));
});
it('should speak english', function() {
span('1 second').should.equal(1000);
span('1 minute').should.equal(60000);
span('1 hour').should.equal(3600000);
span('1 day').should.equal(86400000);
span('1 week').should.equal(604800000);
});
it('should speak german', function() {
span('1 sekunde').should.equal(1000);
span('1 minute').should.equal(60000);
span('1 stunde').should.equal(3600000);
span('1 tag').should.equal(86400000);
span('1 woche').should.equal(604800000);
});
it('should understand short forms', function() {
span('2 seconds').should.equal(2000);
span('1 second').should.equal(1000);
span('2 secs').should.equal(2000);
span('1 sec').should.equal(1000);
span('1 sec').should.equal(1000);
});
it('should recognize numbers as strings', function() {
span('1000').should.equal('1s');
});
it('should understand composited values', function() {
span('3d 2h').should.equal(266400000);
span('3d 2m').should.equal(259320000);
});
});
describe('ms to str', function() {
it('should output milliseconds', function() {
span(100).should.equal('100ms');
});
it('shouldn\'t output milliseconds if >=1s', function() {
span(1000).should.equal('1s');
span(1001).should.equal('1s');
});
it('should output composited values', function() {
span(266400000).should.equal('3d 2h');
span(259320000).should.equal('3d 2m');
});
it('should support big values', function() {
span(1000).should.equal('1s')
span(60000).should.equal('1m')
span(3600000).should.equal('1h')
span(86400000).should.equal('1d')
span(604800000).should.equal('1w')
span(31557600000).should.equal('1y')
})
});
});
|
#!/bin/bash -e
docker build -t test-app:$CONJUR_NAMESPACE_NAME .
|
#!/bin/bash -e
time=$(date +%Y-%m-%d)
mirror_dir="/var/www/html/rcn-ee.us/rootfs/"
DIR="$PWD"
export apt_proxy=apt-proxy:3142/
if [ -d ./deploy ] ; then
sudo rm -rf ./deploy || true
fi
if [ ! -f jenkins.build ] ; then
./RootStock-NG.sh -c rcn-ee_console_debian_stretch_armhf
./RootStock-NG.sh -c rcn-ee_console_debian_buster_armhf
./RootStock-NG.sh -c rcn-ee_console_ubuntu_xenial_armhf
./RootStock-NG.sh -c rcn-ee_console_ubuntu_bionic_armhf
else
mkdir -p ${DIR}/deploy/ || true
fi
debian_stable="debian-9.4-console-armhf-${time}"
debian_testing="debian-buster-console-armhf-${time}"
ubuntu_stable="ubuntu-16.04.4-console-armhf-${time}"
ubuntu_testing="ubuntu-bionic-console-armhf-${time}"
xz_img="xz -z -8"
xz_tar="xz -T2 -z -8"
beaglebone="--dtb beaglebone --rootfs_label rootfs --enable-cape-universal --enable-uboot-cape-overlays"
omap3_beagle_xm="--dtb omap3-beagle-xm --rootfs_label rootfs"
omap5_uevm="--dtb omap5-uevm --rootfs_label rootfs"
am57xx_beagle_x15="--dtb am57xx-beagle-x15 --rootfs_label rootfs"
cat > ${DIR}/deploy/gift_wrap_final_images.sh <<-__EOF__
#!/bin/bash
wait_till_Xgb_free () {
memory=4096
free_memory=\$(free --mega | grep Mem | awk '{print \$7}')
until [ "\$free_memory" -gt "\$memory" ] ; do
free_memory=\$(free --mega | grep Mem | awk '{print \$7}')
echo "have [\$free_memory] need [\$memory]"
sleep 10
done
}
copy_base_rootfs_to_mirror () {
wait_till_Xgb_free
if [ -d ${mirror_dir}/ ] ; then
if [ ! -d ${mirror_dir}/${time}/\${blend}/ ] ; then
mkdir -p ${mirror_dir}/${time}/\${blend}/ || true
fi
if [ -d ${mirror_dir}/${time}/\${blend}/ ] ; then
if [ ! -f ${mirror_dir}/${time}/\${blend}/\${base_rootfs}.tar.xz ] ; then
cp -v \${base_rootfs}.tar ${mirror_dir}/${time}/\${blend}/
cd ${mirror_dir}/${time}/\${blend}/
${xz_tar} \${base_rootfs}.tar && sha256sum \${base_rootfs}.tar.xz > \${base_rootfs}.tar.xz.sha256sum &
cd -
fi
fi
fi
}
archive_base_rootfs () {
if [ -d ./\${base_rootfs} ] ; then
rm -rf \${base_rootfs} || true
fi
if [ -f \${base_rootfs}.tar ] ; then
copy_base_rootfs_to_mirror
fi
}
extract_base_rootfs () {
if [ -d ./\${base_rootfs} ] ; then
rm -rf \${base_rootfs} || true
fi
if [ -f \${base_rootfs}.tar.xz ] ; then
tar xf \${base_rootfs}.tar.xz
fi
if [ -f \${base_rootfs}.tar ] ; then
tar xf \${base_rootfs}.tar
fi
}
copy_img_to_mirror () {
wait_till_Xgb_free
if [ -d ${mirror_dir} ] ; then
if [ ! -d ${mirror_dir}/${time}/\${blend}/ ] ; then
mkdir -p ${mirror_dir}/${time}/\${blend}/ || true
fi
if [ -d ${mirror_dir}/${time}/\${blend}/ ] ; then
if [ ! -f ${mirror_dir}/${time}/\${blend}/\${wfile}.img.zx ] ; then
mv -v \${wfile}.img ${mirror_dir}/${time}/\${blend}/
sync
if [ -f \${wfile}.img.xz.job.txt ] ; then
mv -v \${wfile}.img.xz.job.txt ${mirror_dir}/${time}/\${blend}/
sync
fi
cd ${mirror_dir}/${time}/\${blend}/
${xz_img} \${wfile}.img && sha256sum \${wfile}.img.xz > \${wfile}.img.xz.sha256sum &
cd -
fi
fi
fi
}
archive_img () {
if [ -f \${wfile}.img ] ; then
copy_img_to_mirror
fi
}
generate_img () {
if [ -d \${base_rootfs}/ ] ; then
cd \${base_rootfs}/
sudo ./setup_sdcard.sh \${options}
sudo chown 1000:1000 *.img || true
sudo chown 1000:1000 *.job.txt || true
mv *.img ../ || true
mv *.job.txt ../ || true
cd ..
fi
}
#Debian Stable
base_rootfs="${debian_stable}" ; blend="elinux" ; extract_base_rootfs
options="--img BBB-eMMC-flasher-\${base_rootfs} ${beaglebone} --emmc-flasher" ; generate_img
options="--img bone-\${base_rootfs} ${beaglebone}" ; generate_img
options="--img bbxm-\${base_rootfs} ${omap3_beagle_xm}" ; generate_img
options="--img bbx15-eMMC-flasher-\${base_rootfs} ${am57xx_beagle_x15} --emmc-flasher" ; generate_img
options="--img bbx15-\${base_rootfs} ${am57xx_beagle_x15}" ; generate_img
options="--img omap5-uevm-\${base_rootfs} ${omap5_uevm}" ; generate_img
#Ubuntu Stable
base_rootfs="${ubuntu_stable}" ; blend="elinux" ; extract_base_rootfs
options="--img BBB-eMMC-flasher-\${base_rootfs} ${beaglebone} --emmc-flasher" ; generate_img
options="--img bone-\${base_rootfs} ${beaglebone}" ; generate_img
options="--img bbxm-\${base_rootfs} ${omap3_beagle_xm}" ; generate_img
options="--img bbx15-eMMC-flasher-\${base_rootfs} ${am57xx_beagle_x15} --emmc-flasher" ; generate_img
options="--img bbx15-\${base_rootfs} ${am57xx_beagle_x15}" ; generate_img
options="--img omap5-uevm-\${base_rootfs} ${omap5_uevm}" ; generate_img
#Archive tar:
base_rootfs="${debian_stable}" ; blend="elinux" ; archive_base_rootfs
base_rootfs="${ubuntu_stable}" ; blend="elinux" ; archive_base_rootfs
base_rootfs="${debian_testing}" ; blend="elinux" ; archive_base_rootfs
base_rootfs="${ubuntu_testing}" ; blend="elinux" ; archive_base_rootfs
#Archive img:
base_rootfs="${debian_stable}" ; blend="microsd"
wfile="bone-\${base_rootfs}-2gb" ; archive_img
wfile="bbxm-\${base_rootfs}-2gb" ; archive_img
wfile="bbx15-\${base_rootfs}-2gb" ; archive_img
wfile="omap5-uevm-\${base_rootfs}-2gb" ; archive_img
base_rootfs="${ubuntu_stable}" ; blend="microsd"
wfile="bone-\${base_rootfs}-2gb" ; archive_img
wfile="bbxm-\${base_rootfs}-2gb" ; archive_img
wfile="bbx15-\${base_rootfs}-2gb" ; archive_img
wfile="omap5-uevm-\${base_rootfs}-2gb" ; archive_img
base_rootfs="${debian_stable}" ; blend="flasher"
wfile="BBB-eMMC-flasher-\${base_rootfs}-2gb" ; archive_img
wfile="bbx15-eMMC-flasher-\${base_rootfs}-2gb" ; archive_img
base_rootfs="${ubuntu_stable}" ; blend="flasher"
wfile="BBB-eMMC-flasher-\${base_rootfs}-2gb" ; archive_img
wfile="bbx15-eMMC-flasher-\${base_rootfs}-2gb" ; archive_img
__EOF__
chmod +x ${DIR}/deploy/gift_wrap_final_images.sh
image_prefix="elinux"
#node:
if [ ! -d /var/www/html/farm/images/ ] ; then
if [ ! -d /mnt/farm/images/ ] ; then
#nfs mount...
sudo mount -a
fi
if [ -d /mnt/farm/images/ ] ; then
mkdir -p /mnt/farm/images/${image_prefix}-${time}/ || true
echo "Copying: *.tar to server: images/${image_prefix}-${time}/"
cp -v ${DIR}/deploy/*.tar /mnt/farm/images/${image_prefix}-${time}/ || true
cp -v ${DIR}/deploy/gift_wrap_final_images.sh /mnt/farm/images/${image_prefix}-${time}/gift_wrap_final_images.sh || true
chmod +x /mnt/farm/images/${image_prefix}-${time}/gift_wrap_final_images.sh || true
fi
fi
#x86:
if [ -d /var/www/html/farm/images/ ] ; then
mkdir -p /var/www/html/farm/images/${image_prefix}-${time}/ || true
echo "Copying: *.tar to server: images/${image_prefix}-${time}/"
cp -v ${DIR}/deploy/gift_wrap_final_images.sh /var/www/html/farm/images/${image_prefix}-${time}/gift_wrap_final_images.sh || true
chmod +x /var/www/html/farm/images/${image_prefix}-${time}/gift_wrap_final_images.sh || true
sudo chown -R apt-cacher-ng:apt-cacher-ng /var/www/html/farm/images/${image_prefix}-${time}/ || true
fi
|
<gh_stars>0
import { NodeSelection } from "prosemirror-state"
export function canInsert(state, nodeType) {
let $from = state.selection.$from
for (let d = $from.depth; d >= 0; d--) {
let index = $from.index(d)
if ($from.node(d).canReplaceWith(index, index, nodeType))
return true
}
return false
}
export function insertAndSelectNode(node, state, dispatch) {
// create new transaction
const tr = state.tr;
// insert the node over the existing selection
tr.replaceSelectionWith(node);
// select node
// (https://discuss.prosemirror.net/t/how-to-select-a-node-immediately-after-inserting-it/1566)
const resolvedPos = tr.doc.resolve(
tr.selection.anchor - tr.selection.$anchor.nodeBefore.nodeSize
);
tr.setSelection(new NodeSelection(resolvedPos));
// dispatch transaction
dispatch(tr);
}
|
<filename>src/app/entry1/entry1.component.ts
import { Component, OnInit, Renderer2, ApplicationRef, Injector, ComponentFactoryResolver, AfterContentInit } from '@angular/core';
import { HttpClient } from '@angular/common/http';
import { Entry2Component } from '../entry2/entry2.component';
@Component({
selector: 'scm-entry1',
templateUrl: './entry1.component.html',
styleUrls: ['./entry1.component.less']
})
export class Entry1Component implements OnInit, AfterContentInit {
constructor(
private http: HttpClient,
private render: Renderer2,
private appRef: ApplicationRef,
private injector: Injector,
private componentFactoryResolver: ComponentFactoryResolver
) { }
ngOnInit() {
}
ngAfterContentInit() {
this.creatEntryComp();
}
private creatEntryComp() {
const container = document.querySelector('.entry2-container');
// 生成Item组件并添加@input
const entry2Comp = this.componentFactoryResolver
.resolveComponentFactory(Entry2Component)
.create(this.injector);
this.render.appendChild(container, entry2Comp.location.nativeElement);
this.appRef.attachView(entry2Comp.hostView);
}
}
|
#slect循环:
#使用select循环将会在屏幕上生成一个数字化的菜单,并提示用户进行选择,默认的提示符是:”#“。用户只需在提示符下输入对应菜单的数字,即可完成选择。
#如下所示,item-list列表是一个字符串组成的序列,当用户选择数字以后,将会将相应的菜单值赋值给string变量,并执行select循环体内容。
#select经常与cast一起使用,允许用户在菜单中选择,并基于选择执行相应的命令,当执行到break语句时候,将退出select结构
#select是一个循环体,可以使用break退出循环。
# select string in item-list
# do
# command
# done
echo selcet 测试
select i in 星期一 星期二 星期三 星期四 星期五 星期六 星期日
do
{
case $i in
星期一) echo 您选择的是周一;break;;
星期二) echo 今天是星期二;;
星期三) echo 您选择的是周三; break;;
*) echo 今天既不是周一也不是周二更不是周三;;
esac
}
done
|
Thanks for your question! Our restaurant serves a variety of delicious cuisines, including Italian, American, Mexican, and Chinese. We also offer many vegan and vegetarian options. Let us know if you have any other questions!
|
import { connect } from 'react-redux'
import { push } from 'connected-react-router'
import { get, omit } from 'lodash/fp'
import { postsUrl, postUrl } from 'util/navigation'
export const mapDispatchToProps = (dispatch, props) => {
const routeParams = get('match.params', props)
if (!routeParams) return {}
const { postId, slug } = routeParams
const context = props.match.url.includes('public') ? 'public' : ''
const urlParams = {
communitySlug: slug,
...omit(['postId', 'action', 'slug'], routeParams),
context
}
const closeUrl = postId
? postUrl(postId, urlParams)
: postsUrl(urlParams)
return {
hidePostEditor: () => dispatch(push(closeUrl))
}
}
export default connect(null, mapDispatchToProps)
|
<reponame>nihalpandey4/Live_Video_Broadcasting_Service<gh_stars>1-10
import React from 'react';
import ReactDOM from "react-dom";
import {Link } from "react-router-dom";
import history from "../history";
const Modal = props=>{
return ReactDOM.createPortal(
<div className = "ui dimmer modals visible active" onClick = {()=>history.push(props.cancelUrl)}>
<div className="ui standard modal visible active" onClick={(e)=>e.stopPropagation()}>
<div className="header">
{props.header}
</div>
<div className = "content">
{props.content}
</div>
<div className = "actions">
<button className="ui button red" onClick={props.onActionClicked}>{props.actionText}</button>
<Link to={props.cancelUrl} className="ui button">Cancel</Link>
</div>
</div>
</div>,
document.querySelector("#modal")
);
}
export default Modal;
|
#!/bin/bash
sampleName="SRR3083838"
picard="/fh/fast/sun_w/bin/picard.jar"
gatk="/fh/fast/sun_w/bin/GenomeAnalysisTK-3.6/GenomeAnalysisTK.jar"
gatkBundle="/fh/fast/sun_w/research/data/GATK_bundle/hg38"
projDir="/fh/fast/sun_w/research/Immuno/data/Hugo_2016"
javaTmpDir="/fh/fast/sun_w/tmp4java/"
reference="/fh/fast/sun_w/research/data/human/hg38/Homo_sapiens_assembly38.fasta"
ml java
# Add read groups, sort by coordinate, save as a bam file and index the bam file
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${picard} AddOrReplaceReadGroups \
INPUT=${projDir}/sams/${sampleName}.sam \
OUTPUT=${projDir}/bams/${sampleName}_sorted_rg.bam \
SORT_ORDER=coordinate \
CREATE_INDEX=true \
ID=${sampleName}.LANE001 SM=${sampleName} LB=${sampleName} \
PL=ILLUMINA PU=${sampleName} &&
#
# Mark duplicated reads
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${picard} MarkDuplicates \
I=${projDir}/bams/${sampleName}_sorted_rg.bam \
O=${projDir}/bams/${sampleName}_sorted_dedup.bam \
M=${projDir}/bams/${sampleName}_sorted_dedup_metric.txt \
ASSUME_SORT_ORDER=coordinate \
CREATE_INDEX=true &&
#
# RealignerTargetCreator
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${gatk} \
-T RealignerTargetCreator \
-R ${reference} \
-I ${projDir}/bams/${sampleName}_sorted_dedup.bam \
-o ${projDir}/bams/${sampleName}_sorted_dedup.bam.intervals \
-known ${gatkBundle}/Homo_sapiens_assembly38.known_indels.vcf.gz \
-known ${gatkBundle}/Mills_and_1000G_gold_standard.indels.hg38.vcf.gz &&
#
# Realign
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${gatk} \
-T IndelRealigner \
-R ${reference} \
-I ${projDir}/bams/${sampleName}_sorted_dedup.bam \
-targetIntervals ${projDir}/bams/${sampleName}_sorted_dedup.bam.intervals \
-o ${projDir}/bams/${sampleName}_sorted_dedup_realigned.bam \
-known ${gatkBundle}/Homo_sapiens_assembly38.known_indels.vcf.gz \
-known ${gatkBundle}/Mills_and_1000G_gold_standard.indels.hg38.vcf.gz \
--filter_bases_not_stored &&
#
# Build index
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${picard} BuildBamIndex \
INPUT=${projDir}/bams/${sampleName}_sorted_dedup_realigned.bam &&
#
# Base quality recalibration: create table
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${gatk} \
-T BaseRecalibrator \
-R ${reference} \
-knownSites ${gatkBundle}/dbsnp_146.hg38.vcf.gz \
-knownSites ${gatkBundle}/Homo_sapiens_assembly38.known_indels.vcf.gz \
-knownSites ${gatkBundle}/Mills_and_1000G_gold_standard.indels.hg38.vcf.gz \
-I ${projDir}/bams/${sampleName}_sorted_dedup_realigned.bam \
-o ${projDir}/bams/${sampleName}_sorted_dedup_realigned_recal_table.txt &&
#
# Base quality recalibration
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${gatk} \
-T PrintReads \
-R ${reference} \
-I ${projDir}/bams/${sampleName}_sorted_dedup_realigned.bam \
-BQSR ${projDir}/bams/${sampleName}_sorted_dedup_realigned_recal_table.txt \
-o ${projDir}/bams/${sampleName}_sorted_dedup_realigned_recaled.bam &&
#
# Build index
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${picard} BuildBamIndex \
INPUT=${projDir}/bams/${sampleName}_sorted_dedup_realigned_recaled.bam
|
<reponame>godjackhoo/fptu-fe
export const GET_CODEDAO_ARTICLE_SUCCESS = "GET_CODEDAO_ARTICLE_SUCCESS";
export const GET_CODEDAO_ARTICLE_FAILURE = "GET_CODEDAO_ARTICLE_FAILURE";
|
#!/bin/bash
curl -v 10.29.2.2:12345/objects/test3 -XPUT -d"this is object test3"
echo -n "this is object test3" | openssl dgst -sha256 -binary | base64
curl -v 10.29.2.2:12345/objects/test3 -XPUT -d"this is object test3" -H "Digest: SHA-256=GYqqAdFPt+CScnUDc0/Gcu3kwcWmOADKNYpiZtdbgsM="
curl 10.29.2.1:12345/objects/test3
echo
echo -n "this is object test3 version 2" | openssl dgst -sha256 -binary | base64
curl -v 10.29.2.1:12345/objects/test3 -XPUT -d"this is object test3 version 2" -H "Digest: SHA-256=cAPvsxZe1PR54zIESQy0BaxC1pYJIvaHSF3qEOZYYIo="
curl 10.29.2.1:12345/objects/test3
echo
curl 10.29.2.1:12345/objects/test3
echo
curl 10.29.2.1:12345/locate/GYqqAdFPt+CScnUDc0%2FGcu3kwcWmOADKNYpiZtdbgs=
echo
curl 10.29.2.1:12345/locate/cAPvsxZe1PR54zIESQy0BaxC1pYJIvaHSF3qEOZYYIo=
echo
curl 10.29.2.1:12345/versions/test3
echo
curl 10.29.2.1:12345/objects/test3?version=1
echo
curl -v 10.29.2.1:12345/objects/test3 -XDELETE
curl -v 10.29.2.1:12345/objects/test3
echo
curl 10.29.2.1:12345/versions/test3
echo
curl 10.29.2.1:12345/objects/test3?version=1
echo
curl 10.29.2.1:12345/objects/test3?version=2
echo
|
<reponame>sgururajan/hyperledger-tictactoe<gh_stars>0
export interface AppConfig {
productName:string
services:{
[apiKey:string]:string
}
}
|
<?php
function find_sum($num1, $num2) {
return $num1 + $num2;
}
$a = 5;
$b = 10;
$sum = find_sum($a, $b);
echo "The sum of $a and $b is: $sum";
?>
|
<gh_stars>1-10
/**
* The well-known name of the cookie used to save the current preview session's
* ref.
*/
export const preview = 'io.prismic.preview'
|
import json
import os.path
import re
from typing import Any, Dict, List, Optional, Set, Union
from urllib.parse import quote
from urllib.request import urlopen
import yaml
import pkg_resources
_re_ids = re.compile(r"u2ff[\dab]-")
_re_koseki = re.compile(r"^koseki-\d{6}$")
def isKanji(name: str):
if _re_ids.match(name):
return True
header = name.split("-")[0]
if isUcs(header):
return isTogoKanji(header) or isGokanKanji(header)
if _re_koseki.match(name):
return name[7] != "9"
return True
_re_togo_f = re.compile(
r"""^u(
4d[c-f][0-9a-f]| # Ext A
9ff[d-f]| # URO
2a6d[ef]|2a6[ef][0-9a-f]| # Ext B
2b73[5-9a-f]| # Ext C
2b81[ef]| # Ext D
2cea[2-9a-f]| # Ext E
2ebe[1-9a-f]|2ebf[0-9a-f]| # Ext F
3134[b-f]|313[5-9a-f][0-9a-f] # Ext G
)$""",
re.X)
_re_togo_t1 = re.compile(
r"""^u(
3[4-9a-f]| # Ext A
[4-9][0-9a-f]| # URO
2[0-9a-d][0-9a-f]| # Ext B, C, D, E, F
2e[0-9ab]| # Ext F
30[0-9a-f]|31[0-3] # Ext G
)[\da-f]{2}$""",
re.X)
_re_togo_t2 = re.compile(r"^ufa(0[ef]|1[134f]|2[134789])$")
def isTogoKanji(name: str):
if _re_togo_f.match(name):
return False
if _re_togo_t1.match(name):
return True
if _re_togo_t2.match(name):
return True
return False
_re_gokan_f = re.compile(r"^ufa(6[ef]|d[a-f]|[ef][\da-f])$")
_re_gokan_t1 = re.compile(r"^uf[9a][\da-f]{2}$")
_re_gokan_t2 = re.compile(r"^u2f([89][\da-f]{2}|a0[\da-f]|a1[\da-d])$")
def isGokanKanji(name: str):
if _re_gokan_f.match(name):
return False
if _re_togo_t2.match(name):
return False
if _re_gokan_t1.match(name):
return True
if _re_gokan_t2.match(name):
return True
return False
_re_ucs = re.compile(r"^u[\da-f]{4,6}$")
RE_REGIONS = r"(?:[gtv]v?|[hmis]|k[pv]?|u[ks]?|j[asv]?)"
def isUcs(name: str):
return _re_ucs.match(name)
def isYoko(x0: int, y0: int, x1: int, y1: int) -> bool:
if y0 == y1 and x0 != x1:
return True
dx = x1 - x0
dy = y1 - y0
return -dx < dy < dx
_re_textarea = re.compile(r"</?textarea(?: [^>]*)?>")
_re_gwlink = re.compile(r"\[\[(?:[^]]+\s)?([0-9a-z_-]+(?:@\d+)?)\]\]")
def getGlyphsInGroup(groupname: str) -> List[str]:
url = "https://glyphwiki.org/wiki/Group:{}?action=edit".format(
quote(groupname.encode("utf-8")))
f = urlopen(url, timeout=60)
data = f.read().decode("utf-8")
f.close()
s = _re_textarea.split(data)[1]
return [m.group(1) for m in _re_gwlink.finditer(s)]
class GWGroupLazyLoader:
def __init__(self, groupname: str, isset: bool = False):
self.groupname = groupname
self.isset = isset
self.data: Union[List[str], Set[str]]
def load(self):
glyphs = getGlyphsInGroup(self.groupname)
if self.isset:
self.data = set(glyphs)
else:
self.data = glyphs
def get_data(self):
if not hasattr(self, "data"):
self.load()
return self.data
def load_package_data(name: str) -> Any:
with pkg_resources.resource_stream("gwv", name) as f:
ext = os.path.splitext(name)[1]
if ext == ".json":
return json.load(f)
if ext in (".yaml", ".yml"):
return yaml.safe_load(f)
if ext == ".txt":
return f.read()
raise ValueError("Unknown data file extension: {!r}".format(ext))
class CJKSources:
COLUMN_G = 0
COLUMN_T = 1
COLUMN_J = 2
COLUMN_K = 3
COLUMN_KP = 4
COLUMN_V = 5
COLUMN_H = 6
COLUMN_M = 7
COLUMN_U = 8
COLUMN_S = 9
COLUMN_UK = 10
COLUMN_COMPATIBILITY_VARIANT = 11
region2index = {
"g": COLUMN_G,
"t": COLUMN_T,
"j": COLUMN_J,
"k": COLUMN_K,
"kp": COLUMN_KP,
"v": COLUMN_V,
"h": COLUMN_H,
"m": COLUMN_M,
"u": COLUMN_U,
"s": COLUMN_S,
"uk": COLUMN_UK,
}
def __init__(self):
self.data: Dict[str, List[Optional[str]]]
def load(self):
self.data = load_package_data("data/3rd/cjksrc.json")
def get(self, ucs: str, column: int) -> Optional[str]:
if not hasattr(self, "data"):
self.load()
record = self.data.get(ucs)
if record is None:
return None
return record[column]
cjk_sources = CJKSources()
|
def get_output_volume_size(model):
output_tensor_shape = model.layers[0].output_shape[1:] # Get the shape of the output tensor from the convolutional base
volume_size = 1
for dim in output_tensor_shape:
volume_size *= dim # Calculate the volume size by multiplying the dimensions
return volume_size
|
import Proxy from './Proxy'
class TicketProxy extends Proxy {
/**
* The constructor for the TicketProxy.
*
* @param {Object} parameters The query parameters.
*/
constructor (parameters = {}) {
super('api/Tickets', parameters)
}
/**
* Method used to get all unassigned tickets.
*
* @param {String} email The email.
* @param {String} password The password.
*
* @returns {Promise} The result in a promise.
*/
getUnassignedTickets () {
return this.submit('get', `${this.endpoint}/UnassignedTickets`)
}
/**
* Method used to get all tickets assigned to the solver.
*
* @param {String} email The email.
* @param {String} password The password.
*
* @returns {Promise} The result in a promise.
*/
getSolverTickets () {
return this.submit('get', `${this.endpoint}/AssignedUnsolved`)
}
/**
* Method used to assign a ticket to the solver
*
* @returns {Promise} The result in a promise.
*/
assignTicket (id) {
return this.submit('put', `${this.endpoint}/AssignTicket/${id}`)
}
}
export default TicketProxy
|
let obj = {
name: "John",
age: 30
};
let hasKey = obj.hasOwnProperty('name');
console.log(hasKey);
|
program="$1"
times=$2
wait=$3
range=$4
function justDoIt() {
for ((n=0;n<$times;n++));
do
# sleep for a random duration, between $range and $wait minutes
sleep_time=$(((RANDOM % (range * 60)) + ($wait * 60) + 1));
# echo "sleeping for $sleep_time"
sleep $sleep_time
# find and shut off anything that matches $program
pkill -9 "$program"
done
}
justDoIt &
echo "F***ing $program keeps crashing every $wait to $(($wait+$range)) minutes! trolologlodyte..."
|
package com.badlogic.gdx.scenes.scene2d.ui;
public class ContainerLayout extends ActorLayout<Container> {
public ContainerLayout(){
super(Container.class);
}
@Override
protected Container createActor(Skin skin, StageLayoutListener listener) {
Container container;
container = new Container();
container.setSize(layout.width, layout.height);
return container;
}
}
|
def format_phone_number(phone_num):
return f"({phone_num[:3]}) {phone_num[3:6]}-{phone_num[6:]}"
number = "1234567890"
formatted_number = format_phone_number(number)
print(formatted_number)
|
def sum_two_numbers(a, b):
c = a + b # sum the two numbers
print(c)
return c
|
<gh_stars>1-10
import * as React from 'react';
import block from 'bem-cn';
import { bind } from 'decko';
import { ISortInfo } from 'shared/types/ui';
import {
IAsset, IAssetColumnData, IAssetNonColumnData, IAssetColumns, IAssetsInfoMap, ICurrencyPair,
} from 'shared/types/models';
import { Table } from 'shared/view/components';
import { Icon, Tooltip, CoinCell } from 'shared/view/elements';
import { ITranslateProps, i18nConnect } from 'services/i18n';
import { formatAssetWithCommas } from 'shared/helpers/number';
import { getTableRowHoverColor } from 'shared/view/styles/themes';
import { transformAssetName } from 'shared/helpers/converters';
import { makeActionButtons } from './makeActionButtons/makeActionButtons';
import './AssetsTable.scss';
interface IOwnProps {
assets: IAsset[];
assetsInfo: IAssetsInfoMap;
conversionCurrency: string;
onTradeMenuEntrySelect(x: ICurrencyPair): void;
getAssetCurrencyPairs(asset: string): ICurrencyPair[];
onDepositButtonClick(code: string): void;
onWithdrawButtonClick(code: string): void;
onSimplexButtonClick(code: string): void;
}
const b = block('assets-table');
type IProps = IOwnProps & ITranslateProps;
const BalanceTable = Table as new () => Table<IAssetColumnData, IAssetNonColumnData, 'actions'>;
class AssetsTable extends React.PureComponent<IProps> {
private actionButtons = makeActionButtons(this.props);
private columns: IAssetColumns = {
code: {
title: () => 'Coin',
renderCell: ({ code, iconSrc }: IAsset) => <CoinCell code={code} iconSrc={iconSrc} />,
},
name: {
title: () => 'Coin name',
},
total: {
title: () => 'Total balance',
renderCell: ({ total, code }: IAsset) => {
return <span>{formatAssetWithCommas(code, total || 0, this.props.assetsInfo)}</span>;
},
},
available: {
title: () => 'Available balance',
renderCell: ({ available, code }: IAsset) => {
return <span>{formatAssetWithCommas(code, available || 0, this.props.assetsInfo)}</span>;
},
},
inOrder: {
title: () => 'In order',
renderCell: ({ inOrder, code }: IAsset) => {
return <span>{formatAssetWithCommas(code, inOrder || 0, this.props.assetsInfo)}</span>;
},
},
convertedTotal: {
title: () => `${transformAssetName(this.props.conversionCurrency)} value`,
renderCell: ({ convertedTotal }: IAsset) => {
const { conversionCurrency } = this.props;
return convertedTotal === null
? '\u2014'
: <span>{formatAssetWithCommas(conversionCurrency, convertedTotal, this.props.assetsInfo)}</span >;
},
},
};
private extraColumns = {
actions: {
title: () => '',
isSortable: false,
width: 19,
renderCell: this.renderActions,
},
};
private sortInfo: ISortInfo<IAssetColumnData> = {
column: 'code',
kind: 'simple',
direction: 'ascend',
};
private table: HTMLDivElement;
public render() {
const { assets } = this.props;
return (
<div className={b()} ref={this.initTableRef}>
<BalanceTable
columns={this.columns}
records={assets}
extraColumns={this.extraColumns}
sortInfo={this.sortInfo}
getRowHoverColor={getTableRowHoverColor}
minWidth={94}
recordIDColumn="code"
/>
</div>
);
}
@bind
private initTableRef(x: HTMLDivElement) {
this.table = x;
}
@bind
private renderActions(record: IAsset) {
const { assetsInfo } = this.props;
const assetInfo = assetsInfo[record.code];
const areActionsDisabled = assetInfo && (!assetInfo.canDeposit || !assetInfo.canWithdrawal);
const tooltipMessage = (() => {
let message = '';
if (areActionsDisabled) {
message += 'Deposit or/and withdraw temporary disabled';
}
return message;
})();
return (
<div className={b('action-column')()}>
<div>
{areActionsDisabled &&
<Tooltip text={tooltipMessage} position="bottom">
<Icon className={b('warning-icon')()} src={require('./img/error-inline.svg')} />
</Tooltip>
}
</div>
<div className={b('action-buttons')()}>
<div className={b('action-button-simplex')()}>
{this.actionButtons.renderSimplex(record)}
</div>
<div className={b('action-button')()}>
{this.actionButtons.renderDeposit(record)}
</div>
<div className={b('action-button')()}>
{this.actionButtons.renderWithdraw(record)}
</div>
<div className={b('action-button')()}>
{this.actionButtons.renderTrade(record, this.table)}
</div>
</div>
</div>
);
}
}
export default (
i18nConnect(
AssetsTable,
)
);
|
import java.util.Scanner;
public class FunnyString
{
public static void main(String[] args)
{
Scanner stdin = new Scanner(System.in);
int tests = Integer.parseInt(stdin.nextLine());
for(int i = 0; i < tests; i++)
{
String s = stdin.nextLine();
if(isFunny(s))
{
System.out.println("Funny");
}
else
{
System.out.println("Not Funny");
}
}
stdin.close();
}
private static boolean isFunny(String s)
{
for(int j = 0; j < s.length() - 1; j++)
{
if(Math.abs(Character.codePointAt(s, j + 1) - Character.codePointAt(s, j)) !=
Math.abs(Character.codePointAt(s, s.length() - j - 2) - Character.codePointAt(s, s.length() - j - 1)))
{
return false;
}
}
return true;
}
}
|
TERMUX_PKG_HOMEPAGE=http://jodies.de/ipcalc
TERMUX_PKG_DESCRIPTION="Calculates IP broadcast, network, Cisco wildcard mask, and host ranges"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_DEPENDS="perl"
TERMUX_PKG_VERSION=0.41
TERMUX_PKG_SRCURL=http://jodies.de/ipcalc-archive/ipcalc-${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=dda9c571ce3369e5b6b06e92790434b54bec1f2b03f1c9df054c0988aa4e2e8a
termux_step_make_install() {
cp $TERMUX_PKG_SRCDIR/ipcalc $TERMUX_PREFIX/bin/
}
|
<reponame>cryptofriend/incognito-chain
package blockchain
import (
"time"
"github.com/incognitochain/incognito-chain/common"
"github.com/incognitochain/incognito-chain/metadata"
)
type BFTBlockInterface interface {
// UnmarshalJSON(data []byte) error
}
type ShardToBeaconPool interface {
RemoveBlock(map[byte]uint64)
//GetFinalBlock() map[byte][]ShardToBeaconBlock
AddShardToBeaconBlock(*ShardToBeaconBlock) (uint64, uint64, error)
//ValidateShardToBeaconBlock(ShardToBeaconBlock) error
GetValidBlockHash() map[byte][]common.Hash
GetValidBlock(map[byte]uint64) map[byte][]*ShardToBeaconBlock
GetValidBlockHeight() map[byte][]uint64
GetLatestValidPendingBlockHeight() map[byte]uint64
GetBlockByHeight(shardID byte, height uint64) *ShardToBeaconBlock
SetShardState(map[byte]uint64)
GetAllBlockHeight() map[byte][]uint64
}
type CrossShardPool interface {
AddCrossShardBlock(*CrossShardBlock) (map[byte]uint64, byte, error)
GetValidBlock(map[byte]uint64) map[byte][]*CrossShardBlock
GetLatestValidBlockHeight() map[byte]uint64
GetValidBlockHeight() map[byte][]uint64
GetBlockByHeight(_shardID byte, height uint64) *CrossShardBlock
RemoveBlockByHeight(map[byte]uint64)
UpdatePool() map[byte]uint64
GetAllBlockHeight() map[byte][]uint64
}
type ShardPool interface {
RemoveBlock(uint64)
AddShardBlock(block *ShardBlock) error
GetValidBlockHash() []common.Hash
GetValidBlock() []*ShardBlock
GetValidBlockHeight() []uint64
GetLatestValidBlockHeight() uint64
SetShardState(uint64)
GetAllBlockHeight() []uint64
Start(chan struct{})
}
type BeaconPool interface {
RemoveBlock(uint64)
AddBeaconBlock(block *BeaconBlock) error
GetValidBlockHash() []common.Hash
GetValidBlock() []*BeaconBlock
GetValidBlockHeight() []uint64
GetLatestValidBlockHeight() uint64
SetBeaconState(uint64)
GetAllBlockHeight() []uint64
Start(chan struct{})
}
type TxPool interface {
// LastUpdated returns the last time a transaction was added to or
// removed from the source pool.
LastUpdated() time.Time
// MiningDescs returns a slice of mining descriptors for all the
// transactions in the source pool.
MiningDescs() []*metadata.TxDesc
// HaveTransaction returns whether or not the passed transaction hash
// exists in the source pool.
HaveTransaction(hash *common.Hash) bool
// RemoveTx remove tx from tx resource
RemoveTx(txs []metadata.Transaction, isInBlock bool)
RemoveCandidateList([]string)
RemoveTokenIDList([]string)
EmptyPool() bool
MaybeAcceptTransactionForBlockProducing(metadata.Transaction) (*metadata.TxDesc, error)
ValidateTxList(txs []metadata.Transaction) error
//CheckTransactionFee
// CheckTransactionFee(tx metadata.Transaction) (uint64, error)
// Check tx validate by it self
// ValidateTxByItSelf(tx metadata.Transaction) bool
}
type FeeEstimator interface {
RegisterBlock(block *ShardBlock) error
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.