text stringlengths 1 1.05M |
|---|
function doubleArrayElements(arr) {
let doubledArr = [];
for (const num of arr) {
doubledArr.push(num * 2);
}
return doubledArr;
}
let result = doubleArrayElements([3, 4, 5]);
console.log(result); |
package cn.zhangjingyao.service.demo;
import cn.zhangjingyao.entity.PageData;
import com.github.pagehelper.PageInfo;
import java.util.List;
/**
* 类名称:DemoService
* 创建时间:2019-04-11
*
* @author
*/
@org.springframework.stereotype.Service
public interface DemoService {
/**
* 新增
* @param pd PageData
* @throws Exception Exception
*/
public void save(PageData pd)throws Exception;
/**
* 批量新增
* @param list PageData List
* @throws Exception Exception
*/
public void save(List<PageData> list)throws Exception;
/**
* 删除
* @param pd PageData
* @throws Exception Exception
*/
public void delete(PageData pd)throws Exception;
/**
* 批量删除
* @param list PageData List
* @throws Exception Exception
*/
public void delete(List<PageData> list)throws Exception;
/**
* 修改
* @param pd PageData
* @throws Exception Exception
*/
public void edit(PageData pd)throws Exception;
/**
* 批量修改
* @param list PageData List
* @throws Exception Exception
*/
public void edit(List<PageData> list)throws Exception;
/**
* 分页查询
* @param pd PageData
* @return PageInfo
* @throws Exception Exception
*/
public PageInfo<PageData> listPage(PageData pd)throws Exception;
/**
* 查询(全部)
* @param pd PageData
* @return PageData List
* @throws Exception Exception
*/
public List<PageData> listAll(PageData pd)throws Exception;
/**
* 通过id获取数据
* @param pd PageData
* @return PageData
* @throws Exception Exception
*/
public PageData findById(PageData pd)throws Exception;
/**
* 批量删除
* @param arrayDataIds Id数组
* @throws Exception Exception
*/
public void deleteAll(String[] arrayDataIds)throws Exception;
}
|
#!/bin/bash
set -e
cd /api-server
ls -la
echo 'try to run npm install'
npm install
npm run start:dev |
module SampleGemHelper
class Railtie < Rails::Railtie
initializer "SampleGemHelper.view_helpers" do
ActionView::Base.send :include, SampleGem
end
end
end
|
package testingdock
import (
"context"
"sync"
"testing"
"time"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/filters"
"github.com/docker/docker/client"
)
// NetworkOpts is used when creating a new network.
type NetworkOpts struct {
Name string
}
// Network is a struct representing a docker network configuration.
// This should usually not be created directly but via the NewNetwork
// function or in the Suite.
type Network struct {
t testing.TB
cli *client.Client // docker API object to talk to the docker daemon
id, name string
gateway string
cancel func()
children []*Container
closed bool
labels map[string]string
}
// Creates a new docker network configuration with the given options.
func newNetwork(t testing.TB, c *client.Client, opts NetworkOpts) *Network {
return &Network{
t: t,
cli: c,
name: opts.Name,
labels: createTestingLabel(),
}
}
// Creates the actual docker network and also starts the containers that
// are part of the network.
func (n *Network) start(ctx context.Context) {
n.initialCleanup(ctx)
res, err := n.cli.NetworkCreate(ctx, n.name, types.NetworkCreate{
Labels: n.labels,
})
if err != nil {
n.t.Fatalf("testingdock: network creation failure: %s", err.Error())
}
n.id = res.ID
n.cancel = func() {
if n.closed {
return
}
if err := n.cli.NetworkRemove(ctx, n.id); err != nil {
n.t.Fatalf("testingdock: network removal failure: %s", err.Error())
}
printf("(cancel) %-25s (%s) - network removed", n.name, n.id)
}
printf("(setup ) %-25s (%s) - network created", n.name, n.id)
ni, err := n.cli.NetworkInspect(ctx, n.id, types.NetworkInspectOptions{
Verbose: false,
})
if err != nil {
n.cancel()
n.t.Fatalf("testingdock: network inspect failure: %s", err.Error())
}
n.gateway = ni.IPAM.Config[0].Gateway
printf("(setup ) %-25s (%s) - network got gateway ip: %s", n.name, n.id, n.gateway)
// start child containers
if SpawnSequential {
for _, cont := range n.children {
cont.Start(ctx)
}
} else {
printf("(setup ) %-25s (%s) - network is spawning %d child containers in parallel", n.name, n.id, len(n.children))
var wg sync.WaitGroup
wg.Add(len(n.children))
for _, cont := range n.children {
go func(cont *Container) {
defer wg.Done()
cont.Start(ctx)
}(cont)
}
wg.Wait()
}
}
// removes the network if it already exists and all containers being part
// of that network
func (n *Network) initialCleanup(ctx context.Context) {
networkListArgs := filters.NewArgs()
networkListArgs.Add("name", n.name)
networks, err := n.cli.NetworkList(ctx, types.NetworkListOptions{Filters: networkListArgs})
if err != nil {
n.t.Fatalf("testingdock: network listing failure: %s", err.Error())
}
for _, nn := range networks {
containers, err := n.cli.ContainerList(ctx, types.ContainerListOptions{All: true})
if err != nil {
n.t.Fatalf("testingdock: container list failure: %s", err.Error())
}
for _, cc := range containers {
for _, nnn := range cc.NetworkSettings.Networks {
if nnn.NetworkID == nn.ID {
if isOwnedByTestingdock(cc.Labels) {
timeout := time.Second * 10
err = n.cli.ContainerStop(ctx, cc.ID, &timeout)
if err != nil {
n.t.Fatalf("testingdock: container stop failure: %s", err.Error())
}
if err = n.cli.ContainerRemove(ctx, cc.ID, types.ContainerRemoveOptions{
RemoveVolumes: true,
Force: true,
}); err != nil {
n.t.Fatalf("testingdock: container removal failure: %s", err.Error())
}
printf("(setup ) %-25s (%s) - network endpoint removed: %s", nn.Name, nn.ID, cc.Names[0])
} else {
n.t.Fatalf("testingdock: container with ID %s already exists, but wasn't started by tesingdock, aborting!", cc.ID)
}
}
}
}
if isOwnedByTestingdock(nn.Labels) {
if err = n.cli.NetworkRemove(ctx, nn.ID); err != nil {
n.t.Fatalf("testingdock: network removal failure: %s", err.Error())
}
printf("(setup ) %-25s (%s) - network removed", nn.Name, nn.ID)
} else {
n.t.Fatalf("testingdock: network with name %s already exists, but wasn't started by tesingdock, aborting!", n.name)
}
}
}
// Closes the docker network. This also closes the
// children containers if any are set in the Network struct.
// Implements io.Closer interface.
func (n *Network) close() error {
if SpawnSequential {
for _, cont := range n.children {
cont.close() // nolint: errcheck
}
} else {
var wg sync.WaitGroup
wg.Add(len(n.children))
for _, cont := range n.children {
go func(cont *Container) {
defer wg.Done()
cont.close() // nolint: errcheck
}(cont)
}
wg.Wait()
}
// if the network failed to start n.cancel will not be set
if n.cancel != nil {
n.cancel()
}
n.closed = true
return nil
}
// remove removes all the containers in the network.
func (n *Network) remove() error {
var wg sync.WaitGroup
wg.Add(len(n.children))
for _, cont := range n.children {
go func(cont *Container) {
defer wg.Done()
cont.remove() // nolint: errcheck
}(cont)
}
wg.Wait()
return nil
}
// After adds a child container to the current network configuration.
// These containers then kind of "depend" on the network and will
// be closed when the network closes.
func (n *Network) After(c *Container) {
c.network = n
n.children = append(n.children, c)
}
// resets the network and the child containers.
func (n *Network) reset(ctx context.Context) {
now := time.Now()
for _, c := range n.children {
c.reset(ctx)
}
printf("(reset ) %-25s (%s) - network reseted in %s", n.name, n.id, time.Since(now))
}
func (n *Network) ID() string {
return n.id
}
|
#include <cstdint>
namespace game {
struct ItemID {
// Define ItemID structure as per game requirements
};
class AmmoData {
public:
AmmoData(ItemID id, int32_t available, int32_t equipped)
: id(id), available(available), equipped(equipped) {}
// Getter and setter methods for id, available, and equipped
ItemID getId() const { return id; }
void setId(ItemID newId) { id = newId; }
int32_t getAvailable() const { return available; }
void setAvailable(int32_t newAvailable) { available = newAvailable; }
int32_t getEquipped() const { return equipped; }
void setEquipped(int32_t newEquipped) { equipped = newEquipped; }
// Method to check if ammunition is available
bool isAmmoAvailable() const {
return available > 0;
}
// Method to equip ammunition
void equipAmmo(int32_t amount) {
if (amount <= available) {
equipped += amount;
available -= amount;
}
}
private:
ItemID id;
int32_t available;
int32_t equipped;
};
static_assert(sizeof(AmmoData) == 0x18, "AmmoData size mismatch");
} // namespace game |
<gh_stars>1-10
package app;
import java.util.ArrayList;
import java.util.List;
public class Choice {
public List<Card> head = new ArrayList<Card>(); //������
public List<Card> mid = new ArrayList<Card>();//������
public List<Card> end = new ArrayList<Card>();//������
String headType,midType,endType;
}
|
<reponame>Matt-1985/choicely
import styled from "styled-components";
export const PageContainer = styled.div`
width: 100%;
height: inherit;
display: grid;
grid-template-rows: 85px 1fr 100px;
place-items: center;
grid-template-areas:
"header"
"content"
"footer";
`;
|
#!/bin/bash
npm publish
|
<html>
<head>
<title>Countdown Timer</title>
<style>
#timer {
font-size: 30px;
}
</style>
</head>
<body>
<div id="timer"> </div>
<script>
let endDate = new Date("12/31/2020 23:59:59");
const countDownDate = endDate.getTime();
let x = setInterval(function() {
let now = new Date().getTime();
let distance = countDownDate - now;
let days = Math.floor(distance / (1000 * 60 * 60 * 24));
let hours = Math.floor(
(distance % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60)
);
let minutes = Math.floor((distance % (1000 * 60 * 60)) / (1000 * 60));
let seconds = Math.floor((distance % (1000 * 60)) / 1000);
document.getElementById("timer").innerHTML =
days + "d " + hours + "h " + minutes + "m " + seconds + "s ";
if (distance < 0) {
clearInterval(x);
document.getElementById("timer").innerHTML = "EXPIRED";
}
}, 1000);
</script>
</body>
</html> |
<filename>03_rabbitmq/admin.py
#!/usr/bin/env python3
import pika
import sys
import datetime
from interactive_server import InteractiveServer
import common
from common import errprint
class Admin(InteractiveServer):
def __init__(self):
# matching 2-word keys filters out 'info'
queues = [('*.*', self._log)]
super().__init__(self._handle_line, queues)
def _handle_line(self, line):
self.channel.basic_publish(exchange=common.EXCHANGE,
routing_key='info',
body=line)
def _log(self, ch, method, properties, body):
time = datetime.datetime.now()
print('[{}] key: {}: {}'.format(
time, method.routing_key, body.decode()))
if __name__ == '__main__':
admin = Admin()
admin.start()
|
<reponame>muddessir/framework
#!/usr/bin/env python
# SPDX-License-Identifier: GPL-2.0+
# Copyright 2019 Google LLC
# Written by <NAME> <<EMAIL>>
"""Tests for cbfs_util
These create and read various CBFSs and compare the results with expected
values and with cbfstool
"""
import io
import os
import shutil
import struct
import tempfile
import unittest
from binman import cbfs_util
from binman.cbfs_util import CbfsWriter
from binman import elf
from patman import test_util
from patman import tools
U_BOOT_DATA = b'1234'
U_BOOT_DTB_DATA = b'udtb'
COMPRESS_DATA = b'compress xxxxxxxxxxxxxxxxxxxxxx data'
class TestCbfs(unittest.TestCase):
"""Test of cbfs_util classes"""
#pylint: disable=W0212
@classmethod
def setUpClass(cls):
# Create a temporary directory for test files
cls._indir = tempfile.mkdtemp(prefix='cbfs_util.')
tools.SetInputDirs([cls._indir])
# Set up some useful data files
TestCbfs._make_input_file('u-boot.bin', U_BOOT_DATA)
TestCbfs._make_input_file('u-boot.dtb', U_BOOT_DTB_DATA)
TestCbfs._make_input_file('compress', COMPRESS_DATA)
# Set up a temporary output directory, used by the tools library when
# compressing files
tools.PrepareOutputDir(None)
cls.have_cbfstool = True
try:
tools.Run('which', 'cbfstool')
except:
cls.have_cbfstool = False
cls.have_lz4 = True
try:
tools.Run('lz4', '--no-frame-crc', '-c',
tools.GetInputFilename('u-boot.bin'), binary=True)
except:
cls.have_lz4 = False
@classmethod
def tearDownClass(cls):
"""Remove the temporary input directory and its contents"""
if cls._indir:
shutil.rmtree(cls._indir)
cls._indir = None
tools.FinaliseOutputDir()
@classmethod
def _make_input_file(cls, fname, contents):
"""Create a new test input file, creating directories as needed
Args:
fname: Filename to create
contents: File contents to write in to the file
Returns:
Full pathname of file created
"""
pathname = os.path.join(cls._indir, fname)
tools.WriteFile(pathname, contents)
return pathname
def _check_hdr(self, data, size, offset=0, arch=cbfs_util.ARCHITECTURE_X86):
"""Check that the CBFS has the expected header
Args:
data: Data to check
size: Expected ROM size
offset: Expected offset to first CBFS file
arch: Expected architecture
Returns:
CbfsReader object containing the CBFS
"""
cbfs = cbfs_util.CbfsReader(data)
self.assertEqual(cbfs_util.HEADER_MAGIC, cbfs.magic)
self.assertEqual(cbfs_util.HEADER_VERSION2, cbfs.version)
self.assertEqual(size, cbfs.rom_size)
self.assertEqual(0, cbfs.boot_block_size)
self.assertEqual(cbfs_util.ENTRY_ALIGN, cbfs.align)
self.assertEqual(offset, cbfs.cbfs_offset)
self.assertEqual(arch, cbfs.arch)
return cbfs
def _check_uboot(self, cbfs, ftype=cbfs_util.TYPE_RAW, offset=0x38,
data=U_BOOT_DATA, cbfs_offset=None):
"""Check that the U-Boot file is as expected
Args:
cbfs: CbfsReader object to check
ftype: Expected file type
offset: Expected offset of file
data: Expected data in file
cbfs_offset: Expected CBFS offset for file's data
Returns:
CbfsFile object containing the file
"""
self.assertIn('u-boot', cbfs.files)
cfile = cbfs.files['u-boot']
self.assertEqual('u-boot', cfile.name)
self.assertEqual(offset, cfile.offset)
if cbfs_offset is not None:
self.assertEqual(cbfs_offset, cfile.cbfs_offset)
self.assertEqual(data, cfile.data)
self.assertEqual(ftype, cfile.ftype)
self.assertEqual(cbfs_util.COMPRESS_NONE, cfile.compress)
self.assertEqual(len(data), cfile.memlen)
return cfile
def _check_dtb(self, cbfs, offset=0x38, data=U_BOOT_DTB_DATA,
cbfs_offset=None):
"""Check that the U-Boot dtb file is as expected
Args:
cbfs: CbfsReader object to check
offset: Expected offset of file
data: Expected data in file
cbfs_offset: Expected CBFS offset for file's data
"""
self.assertIn('u-boot-dtb', cbfs.files)
cfile = cbfs.files['u-boot-dtb']
self.assertEqual('u-boot-dtb', cfile.name)
self.assertEqual(offset, cfile.offset)
if cbfs_offset is not None:
self.assertEqual(cbfs_offset, cfile.cbfs_offset)
self.assertEqual(U_BOOT_DTB_DATA, cfile.data)
self.assertEqual(cbfs_util.TYPE_RAW, cfile.ftype)
self.assertEqual(cbfs_util.COMPRESS_NONE, cfile.compress)
self.assertEqual(len(U_BOOT_DTB_DATA), cfile.memlen)
def _check_raw(self, data, size, offset=0, arch=cbfs_util.ARCHITECTURE_X86):
"""Check that two raw files are added as expected
Args:
data: Data to check
size: Expected ROM size
offset: Expected offset to first CBFS file
arch: Expected architecture
"""
cbfs = self._check_hdr(data, size, offset=offset, arch=arch)
self._check_uboot(cbfs)
self._check_dtb(cbfs)
def _get_expected_cbfs(self, size, arch='x86', compress=None, base=None):
"""Get the file created by cbfstool for a particular scenario
Args:
size: Size of the CBFS in bytes
arch: Architecture of the CBFS, as a string
compress: Compression to use, e.g. cbfs_util.COMPRESS_LZMA
base: Base address of file, or None to put it anywhere
Returns:
Resulting CBFS file, or None if cbfstool is not available
"""
if not self.have_cbfstool or not self.have_lz4:
return None
cbfs_fname = os.path.join(self._indir, 'test.cbfs')
cbfs_util.cbfstool(cbfs_fname, 'create', '-m', arch, '-s', '%#x' % size)
if base:
base = [(1 << 32) - size + b for b in base]
cbfs_util.cbfstool(cbfs_fname, 'add', '-n', 'u-boot', '-t', 'raw',
'-c', compress and compress[0] or 'none',
'-f', tools.GetInputFilename(
compress and 'compress' or 'u-boot.bin'),
base=base[0] if base else None)
cbfs_util.cbfstool(cbfs_fname, 'add', '-n', 'u-boot-dtb', '-t', 'raw',
'-c', compress and compress[1] or 'none',
'-f', tools.GetInputFilename(
compress and 'compress' or 'u-boot.dtb'),
base=base[1] if base else None)
return cbfs_fname
def _compare_expected_cbfs(self, data, cbfstool_fname):
"""Compare against what cbfstool creates
This compares what binman creates with what cbfstool creates for what
is proportedly the same thing.
Args:
data: CBFS created by binman
cbfstool_fname: CBFS created by cbfstool
"""
if not self.have_cbfstool or not self.have_lz4:
return
expect = tools.ReadFile(cbfstool_fname)
if expect != data:
tools.WriteFile('/tmp/expect', expect)
tools.WriteFile('/tmp/actual', data)
print('diff -y <(xxd -g1 /tmp/expect) <(xxd -g1 /tmp/actual) | colordiff')
self.fail('cbfstool produced a different result')
def test_cbfs_functions(self):
"""Test global functions of cbfs_util"""
self.assertEqual(cbfs_util.ARCHITECTURE_X86, cbfs_util.find_arch('x86'))
self.assertIsNone(cbfs_util.find_arch('bad-arch'))
self.assertEqual(cbfs_util.COMPRESS_LZMA, cbfs_util.find_compress('lzma'))
self.assertIsNone(cbfs_util.find_compress('bad-comp'))
def test_cbfstool_failure(self):
"""Test failure to run cbfstool"""
if not self.have_cbfstool:
self.skipTest('No cbfstool available')
try:
# In verbose mode this test fails since stderr is not captured. Fix
# this by turning off verbosity.
old_verbose = cbfs_util.VERBOSE
cbfs_util.VERBOSE = False
with test_util.capture_sys_output() as (_stdout, stderr):
with self.assertRaises(Exception) as e:
cbfs_util.cbfstool('missing-file', 'bad-command')
finally:
cbfs_util.VERBOSE = old_verbose
self.assertIn('Unknown command', stderr.getvalue())
self.assertIn('Failed to run', str(e.exception))
def test_cbfs_raw(self):
"""Test base handling of a Coreboot Filesystem (CBFS)"""
size = 0xb0
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', U_BOOT_DATA)
cbw.add_file_raw('u-boot-dtb', U_BOOT_DTB_DATA)
data = cbw.get_data()
self._check_raw(data, size)
cbfs_fname = self._get_expected_cbfs(size=size)
self._compare_expected_cbfs(data, cbfs_fname)
def test_cbfs_invalid_file_type(self):
"""Check handling of an invalid file type when outputiing a CBFS"""
size = 0xb0
cbw = CbfsWriter(size)
cfile = cbw.add_file_raw('u-boot', U_BOOT_DATA)
# Change the type manually before generating the CBFS, and make sure
# that the generator complains
cfile.ftype = 0xff
with self.assertRaises(ValueError) as e:
cbw.get_data()
self.assertIn('Unknown type 0xff when writing', str(e.exception))
def test_cbfs_invalid_file_type_on_read(self):
"""Check handling of an invalid file type when reading the CBFS"""
size = 0xb0
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', U_BOOT_DATA)
data = cbw.get_data()
# Read in the first file header
cbr = cbfs_util.CbfsReader(data, read=False)
with io.BytesIO(data) as fd:
self.assertTrue(cbr._find_and_read_header(fd, len(data)))
pos = fd.tell()
hdr_data = fd.read(cbfs_util.FILE_HEADER_LEN)
magic, size, ftype, attr, offset = struct.unpack(
cbfs_util.FILE_HEADER_FORMAT, hdr_data)
# Create a new CBFS with a change to the file type
ftype = 0xff
newdata = data[:pos]
newdata += struct.pack(cbfs_util.FILE_HEADER_FORMAT, magic, size, ftype,
attr, offset)
newdata += data[pos + cbfs_util.FILE_HEADER_LEN:]
# Read in this CBFS and make sure that the reader complains
with self.assertRaises(ValueError) as e:
cbfs_util.CbfsReader(newdata)
self.assertIn('Unknown type 0xff when reading', str(e.exception))
def test_cbfs_no_space(self):
"""Check handling of running out of space in the CBFS"""
size = 0x60
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', U_BOOT_DATA)
with self.assertRaises(ValueError) as e:
cbw.get_data()
self.assertIn('No space for header', str(e.exception))
def test_cbfs_no_space_skip(self):
"""Check handling of running out of space in CBFS with file header"""
size = 0x5c
cbw = CbfsWriter(size, arch=cbfs_util.ARCHITECTURE_PPC64)
cbw._add_fileheader = True
cbw.add_file_raw('u-boot', U_BOOT_DATA)
with self.assertRaises(ValueError) as e:
cbw.get_data()
self.assertIn('No space for data before offset', str(e.exception))
def test_cbfs_no_space_pad(self):
"""Check handling of running out of space in CBFS with file header"""
size = 0x70
cbw = CbfsWriter(size)
cbw._add_fileheader = True
cbw.add_file_raw('u-boot', U_BOOT_DATA)
with self.assertRaises(ValueError) as e:
cbw.get_data()
self.assertIn('No space for data before pad offset', str(e.exception))
def test_cbfs_bad_header_ptr(self):
"""Check handling of a bad master-header pointer"""
size = 0x70
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', U_BOOT_DATA)
data = cbw.get_data()
# Add one to the pointer to make it invalid
newdata = data[:-4] + struct.pack('<I', cbw._header_offset + 1)
# We should still be able to find the master header by searching
with test_util.capture_sys_output() as (stdout, _stderr):
cbfs = cbfs_util.CbfsReader(newdata)
self.assertIn('Relative offset seems wrong', stdout.getvalue())
self.assertIn('u-boot', cbfs.files)
self.assertEqual(size, cbfs.rom_size)
def test_cbfs_bad_header(self):
"""Check handling of a bad master header"""
size = 0x70
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', U_BOOT_DATA)
data = cbw.get_data()
# Drop most of the header and try reading the modified CBFS
newdata = data[:cbw._header_offset + 4]
with test_util.capture_sys_output() as (stdout, _stderr):
with self.assertRaises(ValueError) as e:
cbfs_util.CbfsReader(newdata)
self.assertIn('Relative offset seems wrong', stdout.getvalue())
self.assertIn('Cannot find master header', str(e.exception))
def test_cbfs_bad_file_header(self):
"""Check handling of a bad file header"""
size = 0x70
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', U_BOOT_DATA)
data = cbw.get_data()
# Read in the CBFS master header (only), then stop
cbr = cbfs_util.CbfsReader(data, read=False)
with io.BytesIO(data) as fd:
self.assertTrue(cbr._find_and_read_header(fd, len(data)))
pos = fd.tell()
# Remove all but 4 bytes of the file headerm and try to read the file
newdata = data[:pos + 4]
with test_util.capture_sys_output() as (stdout, _stderr):
with io.BytesIO(newdata) as fd:
fd.seek(pos)
self.assertEqual(False, cbr._read_next_file(fd))
self.assertIn('File header at 0x0 ran out of data', stdout.getvalue())
def test_cbfs_bad_file_string(self):
"""Check handling of an incomplete filename string"""
size = 0x70
cbw = CbfsWriter(size)
cbw.add_file_raw('16-characters xx', U_BOOT_DATA)
data = cbw.get_data()
# Read in the CBFS master header (only), then stop
cbr = cbfs_util.CbfsReader(data, read=False)
with io.BytesIO(data) as fd:
self.assertTrue(cbr._find_and_read_header(fd, len(data)))
pos = fd.tell()
# Create a new CBFS with only the first 16 bytes of the file name, then
# try to read the file
newdata = data[:pos + cbfs_util.FILE_HEADER_LEN + 16]
with test_util.capture_sys_output() as (stdout, _stderr):
with io.BytesIO(newdata) as fd:
fd.seek(pos)
self.assertEqual(False, cbr._read_next_file(fd))
self.assertIn('String at %#x ran out of data' %
cbfs_util.FILE_HEADER_LEN, stdout.getvalue())
def test_cbfs_debug(self):
"""Check debug output"""
size = 0x70
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', U_BOOT_DATA)
data = cbw.get_data()
try:
cbfs_util.DEBUG = True
with test_util.capture_sys_output() as (stdout, _stderr):
cbfs_util.CbfsReader(data)
self.assertEqual('name u-boot\ndata %s\n' % U_BOOT_DATA,
stdout.getvalue())
finally:
cbfs_util.DEBUG = False
def test_cbfs_bad_attribute(self):
"""Check handling of bad attribute tag"""
if not self.have_lz4:
self.skipTest('lz4 --no-frame-crc not available')
size = 0x140
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', COMPRESS_DATA, None,
compress=cbfs_util.COMPRESS_LZ4)
data = cbw.get_data()
# Search the CBFS for the expected compression tag
with io.BytesIO(data) as fd:
while True:
pos = fd.tell()
tag, = struct.unpack('>I', fd.read(4))
if tag == cbfs_util.FILE_ATTR_TAG_COMPRESSION:
break
# Create a new CBFS with the tag changed to something invalid
newdata = data[:pos] + struct.pack('>I', 0x123) + data[pos + 4:]
with test_util.capture_sys_output() as (stdout, _stderr):
cbfs_util.CbfsReader(newdata)
self.assertEqual('Unknown attribute tag 123\n', stdout.getvalue())
def test_cbfs_missing_attribute(self):
"""Check handling of an incomplete attribute tag"""
if not self.have_lz4:
self.skipTest('lz4 --no-frame-crc not available')
size = 0x140
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', COMPRESS_DATA, None,
compress=cbfs_util.COMPRESS_LZ4)
data = cbw.get_data()
# Read in the CBFS master header (only), then stop
cbr = cbfs_util.CbfsReader(data, read=False)
with io.BytesIO(data) as fd:
self.assertTrue(cbr._find_and_read_header(fd, len(data)))
pos = fd.tell()
# Create a new CBFS with only the first 4 bytes of the compression tag,
# then try to read the file
tag_pos = pos + cbfs_util.FILE_HEADER_LEN + cbfs_util.FILENAME_ALIGN
newdata = data[:tag_pos + 4]
with test_util.capture_sys_output() as (stdout, _stderr):
with io.BytesIO(newdata) as fd:
fd.seek(pos)
self.assertEqual(False, cbr._read_next_file(fd))
self.assertIn('Attribute tag at %x ran out of data' % tag_pos,
stdout.getvalue())
def test_cbfs_file_master_header(self):
"""Check handling of a file containing a master header"""
size = 0x100
cbw = CbfsWriter(size)
cbw._add_fileheader = True
cbw.add_file_raw('u-boot', U_BOOT_DATA)
data = cbw.get_data()
cbr = cbfs_util.CbfsReader(data)
self.assertIn('u-boot', cbr.files)
self.assertEqual(size, cbr.rom_size)
def test_cbfs_arch(self):
"""Test on non-x86 architecture"""
size = 0x100
cbw = CbfsWriter(size, arch=cbfs_util.ARCHITECTURE_PPC64)
cbw.add_file_raw('u-boot', U_BOOT_DATA)
cbw.add_file_raw('u-boot-dtb', U_BOOT_DTB_DATA)
data = cbw.get_data()
self._check_raw(data, size, offset=0x40,
arch=cbfs_util.ARCHITECTURE_PPC64)
# Compare against what cbfstool creates
cbfs_fname = self._get_expected_cbfs(size=size, arch='ppc64')
self._compare_expected_cbfs(data, cbfs_fname)
def test_cbfs_stage(self):
"""Tests handling of a Coreboot Filesystem (CBFS)"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
elf_fname = os.path.join(self._indir, 'cbfs-stage.elf')
elf.MakeElf(elf_fname, U_BOOT_DATA, U_BOOT_DTB_DATA)
size = 0xb0
cbw = CbfsWriter(size)
cbw.add_file_stage('u-boot', tools.ReadFile(elf_fname))
data = cbw.get_data()
cbfs = self._check_hdr(data, size)
load = 0xfef20000
entry = load + 2
cfile = self._check_uboot(cbfs, cbfs_util.TYPE_STAGE, offset=0x28,
data=U_BOOT_DATA + U_BOOT_DTB_DATA)
self.assertEqual(entry, cfile.entry)
self.assertEqual(load, cfile.load)
self.assertEqual(len(U_BOOT_DATA) + len(U_BOOT_DTB_DATA),
cfile.data_len)
# Compare against what cbfstool creates
if self.have_cbfstool:
cbfs_fname = os.path.join(self._indir, 'test.cbfs')
cbfs_util.cbfstool(cbfs_fname, 'create', '-m', 'x86', '-s',
'%#x' % size)
cbfs_util.cbfstool(cbfs_fname, 'add-stage', '-n', 'u-boot',
'-f', elf_fname)
self._compare_expected_cbfs(data, cbfs_fname)
def test_cbfs_raw_compress(self):
"""Test base handling of compressing raw files"""
if not self.have_lz4:
self.skipTest('lz4 --no-frame-crc not available')
size = 0x140
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', COMPRESS_DATA, None,
compress=cbfs_util.COMPRESS_LZ4)
cbw.add_file_raw('u-boot-dtb', COMPRESS_DATA, None,
compress=cbfs_util.COMPRESS_LZMA)
data = cbw.get_data()
cbfs = self._check_hdr(data, size)
self.assertIn('u-boot', cbfs.files)
cfile = cbfs.files['u-boot']
self.assertEqual(cfile.name, 'u-boot')
self.assertEqual(cfile.offset, 56)
self.assertEqual(cfile.data, COMPRESS_DATA)
self.assertEqual(cfile.ftype, cbfs_util.TYPE_RAW)
self.assertEqual(cfile.compress, cbfs_util.COMPRESS_LZ4)
self.assertEqual(cfile.memlen, len(COMPRESS_DATA))
self.assertIn('u-boot-dtb', cbfs.files)
cfile = cbfs.files['u-boot-dtb']
self.assertEqual(cfile.name, 'u-boot-dtb')
self.assertEqual(cfile.offset, 56)
self.assertEqual(cfile.data, COMPRESS_DATA)
self.assertEqual(cfile.ftype, cbfs_util.TYPE_RAW)
self.assertEqual(cfile.compress, cbfs_util.COMPRESS_LZMA)
self.assertEqual(cfile.memlen, len(COMPRESS_DATA))
cbfs_fname = self._get_expected_cbfs(size=size, compress=['lz4', 'lzma'])
self._compare_expected_cbfs(data, cbfs_fname)
def test_cbfs_raw_space(self):
"""Test files with unused space in the CBFS"""
size = 0xf0
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', U_BOOT_DATA)
cbw.add_file_raw('u-boot-dtb', U_BOOT_DTB_DATA)
data = cbw.get_data()
self._check_raw(data, size)
cbfs_fname = self._get_expected_cbfs(size=size)
self._compare_expected_cbfs(data, cbfs_fname)
def test_cbfs_offset(self):
"""Test a CBFS with files at particular offsets"""
size = 0x200
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', U_BOOT_DATA, 0x40)
cbw.add_file_raw('u-boot-dtb', U_BOOT_DTB_DATA, 0x140)
data = cbw.get_data()
cbfs = self._check_hdr(data, size)
self._check_uboot(cbfs, ftype=cbfs_util.TYPE_RAW, offset=0x40,
cbfs_offset=0x40)
self._check_dtb(cbfs, offset=0x40, cbfs_offset=0x140)
cbfs_fname = self._get_expected_cbfs(size=size, base=(0x40, 0x140))
self._compare_expected_cbfs(data, cbfs_fname)
def test_cbfs_invalid_file_type_header(self):
"""Check handling of an invalid file type when outputting a header"""
size = 0xb0
cbw = CbfsWriter(size)
cfile = cbw.add_file_raw('u-boot', U_BOOT_DATA, 0)
# Change the type manually before generating the CBFS, and make sure
# that the generator complains
cfile.ftype = 0xff
with self.assertRaises(ValueError) as e:
cbw.get_data()
self.assertIn('Unknown file type 0xff', str(e.exception))
def test_cbfs_offset_conflict(self):
"""Test a CBFS with files that want to overlap"""
size = 0x200
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', U_BOOT_DATA, 0x40)
cbw.add_file_raw('u-boot-dtb', U_BOOT_DTB_DATA, 0x80)
with self.assertRaises(ValueError) as e:
cbw.get_data()
self.assertIn('No space for data before pad offset', str(e.exception))
def test_cbfs_check_offset(self):
"""Test that we can discover the offset of a file after writing it"""
size = 0xb0
cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', U_BOOT_DATA)
cbw.add_file_raw('u-boot-dtb', U_BOOT_DTB_DATA)
data = cbw.get_data()
cbfs = cbfs_util.CbfsReader(data)
self.assertEqual(0x38, cbfs.files['u-boot'].cbfs_offset)
self.assertEqual(0x78, cbfs.files['u-boot-dtb'].cbfs_offset)
if __name__ == '__main__':
unittest.main()
|
package Ransom_Note;
import java.util.HashMap;
public class Solution {
public boolean canConstruct(String ransomNote, String magazine) {
HashMap<Character, Integer> map = new HashMap<>();
for (char c: magazine.toCharArray()) map.put(c, map.getOrDefault(c, 0) + 1);
for (char c: ransomNote.toCharArray()){
int left = map.getOrDefault(c, 0);
if (left == 0) return false;
map.put(c, left - 1);
}
return true;
}
public static void main(String[] args) {
Solution s = new Solution();
}
} |
/*
*
*/
package net.community.chest.jms.framework.queue.impl;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.BlockingQueue;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.QueueConnection;
import net.community.chest.jms.framework.queue.AbstractQueueConnectionFactory;
/**
* <P>Copyright 2010 as per GPLv2</P>
*
* @author <NAME>.
* @since Jun 8, 2010 4:13:05 PM
*/
public class SimpleQueueConnectionFactoryImpl extends AbstractQueueConnectionFactory {
private final Map<String,javax.jms.Queue> _queuesMap=
new TreeMap<String,javax.jms.Queue>(String.CASE_INSENSITIVE_ORDER);
protected final Map<String,javax.jms.Queue> getQueuesMap ()
{
return _queuesMap;
}
private final Map<String,BlockingQueue<Message>> _qDataMap=
new TreeMap<String,BlockingQueue<Message>>(String.CASE_INSENSITIVE_ORDER);
protected final Map<String,BlockingQueue<Message>> getQueueDataMap ()
{
return _qDataMap;
}
public SimpleQueueConnectionFactoryImpl ()
{
super();
}
private String _username;
public String getUsername ()
{
return _username;
}
public void setUsername (String username)
{
_username = username;
}
private String _password;
public String getPassword ()
{
return _password;
}
public void setPassword (String password)
{
_password = password;
}
/*
* @see javax.jms.QueueConnectionFactory#createQueueConnection(java.lang.String, java.lang.String)
*/
@Override
public QueueConnection createQueueConnection (String userName, String password)
throws JMSException
{
final String qfUser=getUsername();
if ((null == qfUser) || (qfUser.length() <= 0))
{
if ((userName != null) && (userName.length() > 0))
throw new JMSException("createQueueConnection() bad credentials");
}
else if (!qfUser.equals(userName))
throw new JMSException("createQueueConnection() bad credentials");
final String qfPass=getPassword();
if ((null == qfPass) || (qfPass.length() <= 0))
{
if ((password != null) && (password.length() > 0))
throw new JMSException("createQueueConnection() bad credentials");
}
else if (!qfPass.equals(password))
throw new JMSException("createQueueConnection() bad credentials");
return new SimpleQueueConnectionImpl(getQueuesMap(), getQueueDataMap());
}
}
|
package com.accounts;
public abstract class BankAccount {
private int accNum;
private String accHolder;
private double accBalance;
protected BankAccount(int accNum, String accHolder, double accBalance) {
this.accNum = accNum;
this.accHolder = accHolder;
this.accBalance = accBalance;
}
public int getAccNum() {
return accNum;
}
public String getAccHolder() {
return accHolder;
}
public double getAccBalance() {
return accBalance;
}
public void setAccBalance(double accBalance) {
this.accBalance = accBalance;
}
public abstract double withdraw(double amount);
public abstract double deposit(double amount);
@Override
public String toString() {
return "[accNum=" + accNum + ", accHolder=" + accHolder + ", accBalance=" + accBalance + "]";
}
} |
import { Component, Inject, OnInit } from '@angular/core';
import { Apollo } from 'apollo-angular';
import { NgxSpinnerService } from 'ngx-spinner';
import {
GetDefinitionListQueryDefinitionsFieldItemInterface,
GetDefinitionListQueryInterface,
getDefinitionListQueryGql,
} from './get-definition-list.query';
@Component({
selector: 'app-definition-list',
templateUrl: './definition-list.component.html',
styles: [],
})
export class DefinitionListComponent implements OnInit {
definitions: GetDefinitionListQueryDefinitionsFieldItemInterface[];
constructor(
protected apollo: Apollo,
protected spinner: NgxSpinnerService,
) {}
ngOnInit() {
this.getDefinitions();
}
protected getDefinitions() {
this.spinner.show();
this.apollo
.watchQuery<GetDefinitionListQueryInterface>({
query: getDefinitionListQueryGql,
})
.valueChanges.subscribe(result => {
const resultData: GetDefinitionListQueryInterface = result.data;
this.definitions = resultData.definitions;
this.spinner.hide();
});
}
}
|
import React, {Fragment, useState, useEffect} from 'react';
import { makeStyles } from '@material-ui/core/styles';
import { Avatar, IconButton, Button, Typography, Container, InputLabel, MenuItem, FormControl, Select, Grid } from '@material-ui/core';
import { ArrowBack as BackIcon, Videocam as VideocamIcon } from '@material-ui/icons';
import Footer from './Footer';
import TopBar from './TopBar';
import Video from './Video';
import { withContext } from '../contexts/AppContext';
const useStyles = makeStyles(theme => ({
paper: {
marginTop: theme.spacing(8),
display: 'flex',
flexDirection: 'column',
alignItems: 'center',
},
avatar: {
margin: theme.spacing(1),
backgroundColor: theme.palette.secondary.main,
},
form: {
width: '100%', // Fix IE 11 issue.
marginTop: theme.spacing(1),
},
submit: {
margin: theme.spacing(3, 0, 2),
},
formControl: {
margin: theme.spacing(1),
minWidth: 120,
display: 'flex'
},
selectEmpty: {
marginTop: theme.spacing(2),
},
}));
async function getMediaDevices() {
try {
let devices = await navigator.mediaDevices.enumerateDevices();
let categorisedDevices = {
videoinput: new Map(),
audioinput: new Map(),
audiooutput: new Map()
};
devices.forEach((device) => {
categorisedDevices[device.kind].set(device.deviceId, device.label);
});
return categorisedDevices;
} catch(err) {
console.log(err);
}
}
function devicesList(devicesList) {
let list = [];
if (devicesList) {
devicesList.forEach((deviceName, key) => {
list.push(<MenuItem value={key} key={key}>{deviceName}</MenuItem>);
})
return list;
} else {
return <MenuItem>No Devices</MenuItem>
}
}
function MediaSettings(props) {
let { dispatch, chosenAudioInput, chosenVideoInput, history } = props;
const classes = useStyles();
const [devices, setDevices] = useState(null);
const [stream, setStream] = useState(null);
useEffect(() => {
async function getDevices() {
let stream = await navigator.mediaDevices.getUserMedia({audio: true, video: true});
let devices = await getMediaDevices();
setDevices(devices);
setStream(stream);
}
getDevices();
}, []);
useEffect(() => {
async function getStream() {
let constraints = {
audio: true,
video: {
width: { min: 640, ideal: 1920, max: 3840 },
height: { min: 400, ideal: 1080, max: 2160 },
aspectRatio: { ideal: 1.7777777778 }
}
}
if (chosenAudioInput) {
constraints.audio = {
deviceId: { exact: chosenAudioInput }
}
}
if (chosenVideoInput) {
constraints.video.deviceId = { exact: chosenVideoInput };
}
let stream = await navigator.mediaDevices.getUserMedia(constraints);
setStream(stream);
}
getStream();
}, [chosenAudioInput, chosenVideoInput]);
useEffect(() => {
return function cleanup() {
if(stream && stream.getTracks) {
stream.getTracks().forEach((track) => {
track.stop();
});
}
}
}, [stream]);
const handleChange = (event) => {
let ACTION_NAME;
switch(event.target.name) {
case 'audioinput':
ACTION_NAME = 'UPDATE_CHOSEN_MIC';
break;
case 'videoinput':
ACTION_NAME = 'UPDATE_CHOSEN_CAMERA';
break;
case 'audiooutput':
ACTION_NAME = 'UPDATE_CHOSEN_OUTPUT';
break;
default:
break;
}
dispatch({ type: ACTION_NAME, deviceId: event.target.value});
};
return (
<Fragment>
<TopBar>
<IconButton edge='end' color='inherit' aria-label='Settings' onClick={() => history.goBack()}>
<BackIcon />
</IconButton>
</TopBar>
<Container maxWidth='lg'>
<div className={classes.paper}>
<Avatar className={classes.avatar}>
<VideocamIcon />
</Avatar>
<Typography component='h1' variant='h5'>
Media Settings
</Typography>
</div>
<Grid container spacing={3}>
<Grid item xs={12} md={6}>
<form className={classes.form} noValidate onSubmit={() => {
history.push('/');
}}>
<FormControl className={classes.formControl}>
<InputLabel id='audio-input-select-label'>Audio</InputLabel>
<Select
labelId='audio-input-select-label'
id='audioInputSelect'
name="audioinput"
autoWidth
value={chosenAudioInput}
onChange={handleChange}
>
{devicesList(devices && devices.audioinput)}
</Select>
</FormControl>
<FormControl className={classes.formControl}>
<InputLabel id='video-input-select-label'>Video</InputLabel>
<Select
labelId='video-input-select-label'
id='videoInputSelect'
name="videoinput"
autoWidth
value={chosenVideoInput}
onChange={handleChange}
>
{devicesList(devices && devices.videoinput)}
</Select>
</FormControl>
{/* <FormControl className={classes.formControl}>
<InputLabel id='audio-output-select-label'>Audio Output</InputLabel>
<Select
labelId='audio-output-select-label'
id='audioOutputSelect'
name="audiooutput"
autoWidth
value={chosenAudioOutput}
onChange={handleChange}
>
{devicesList(devices && devices.audiooutput)}
</Select>
</FormControl> */}
<Button
type='submit'
fullWidth
variant='contained'
color='primary'
className={classes.submit}
>
Finished
</Button>
</form>
</Grid>
<Grid item xs={12} md={6}>
<Video stream={stream} muted={true} />
</Grid>
</Grid>
</Container>
<Footer />
</Fragment>
);
}
export default withContext(MediaSettings); |
<gh_stars>1-10
// Button
// Remove the ugly outlines around the buttons automatically.
function button() {
let btns = document.querySelectorAll('.btn');
for (let i = 0, n = btns.length; i < n; i++) {
/* See: https://www.w3schools.com/jquery/tryit.asp
?filename=tryjquery_event_mouseenter_mouseover */
btns[i].addEventListener('mouseenter', unfocus);
btns[i].addEventListener('mouseup', unfocus);
btns[i].addEventListener('touchend', unfocus);
}
function unfocus() {
this.blur();
}
}
|
import gzip
import os.path
from bisect import bisect_left
from whoosh.compat import permutations
from whoosh.compat import xrange
from whoosh.automata import fsa, glob, lev
from whoosh.support.levenshtein import levenshtein
def test_nfa():
nfa = fsa.NFA(0)
nfa.add_transition(0, "a", 1)
nfa.add_transition(0, fsa.EPSILON, 4)
nfa.add_transition(0, "b", 1)
nfa.add_transition(1, "c", 4)
nfa.add_final_state(4)
assert nfa.accept("")
assert nfa.accept("ac")
assert nfa.accept("bc")
assert not nfa.accept("c")
def test_empty_string():
nfa = fsa.NFA(1)
nfa.add_final_state(1)
assert nfa.accept("")
assert not nfa.accept("a")
dfa = nfa.to_dfa()
assert dfa.accept("")
assert not dfa.accept("a")
def test_nfa2():
nfa = fsa.NFA(1)
nfa.add_transition(1, "a", 2)
nfa.add_transition(1, "c", 4)
nfa.add_transition(2, "b", 3)
nfa.add_transition(2, fsa.EPSILON, 1)
nfa.add_transition(3, "a", 2)
nfa.add_transition(4, "c", 3)
nfa.add_transition(4, fsa.EPSILON, 3)
nfa.add_final_state(3)
assert nfa.accept("ab")
assert nfa.accept("abab")
assert nfa.accept("cc")
assert nfa.accept("c")
assert nfa.accept("ccab")
assert nfa.accept("ccacc")
assert nfa.accept("ccac")
assert nfa.accept("abacab")
assert not nfa.accept("b")
assert not nfa.accept("a")
assert not nfa.accept("cb")
assert not nfa.accept("caa")
dfa = nfa.to_dfa()
assert dfa.accept("ab")
assert dfa.accept("abab")
assert dfa.accept("cc")
assert dfa.accept("c")
assert dfa.accept("ccab")
assert dfa.accept("ccacc")
assert dfa.accept("ccac")
assert dfa.accept("abacab")
assert not dfa.accept("b")
assert not dfa.accept("a")
assert not dfa.accept("cb")
assert not dfa.accept("caa")
def test_insert():
nfa1 = fsa.NFA(1)
nfa1.add_transition(1, "a", 2)
nfa1.add_transition(2, "b", 3)
nfa1.add_final_state(3)
nfa2 = fsa.NFA(4)
nfa2.add_transition(4, "x", 5)
nfa2.add_transition(4, "y", 5)
nfa2.insert(4, nfa1, 5)
nfa2.add_final_state(5)
assert nfa2.accept("x")
assert nfa2.accept("y")
assert nfa2.accept("ab")
assert not nfa2.accept("a")
def test_to_dfa():
nfa = fsa.NFA(0)
nfa.add_transition(0, "a", 1)
nfa.add_transition(0, fsa.EPSILON, 4)
nfa.add_transition(0, "b", 1)
nfa.add_transition(1, "c", 4)
nfa.add_final_state(4)
assert nfa.accept("")
dfa = nfa.to_dfa()
assert dfa.accept("")
assert dfa.accept("ac")
assert dfa.accept("bc")
assert not dfa.accept("c")
def test_glob_star():
nfa = glob.glob_automaton("a*c")
assert not nfa.accept("a")
assert not nfa.accept("c")
assert nfa.accept("ac")
assert nfa.accept("abc")
assert nfa.accept("abcc")
assert nfa.accept("abcac")
assert nfa.accept("aaaaaaaaaac")
assert not nfa.accept("abb")
dfa = nfa.to_dfa()
assert not dfa.accept("a")
assert not dfa.accept("c")
assert dfa.accept("ac")
assert dfa.accept("abc")
assert dfa.accept("abcc")
assert dfa.accept("abcac")
assert not dfa.accept("abb")
def test_glob_question():
nfa = glob.glob_automaton("?")
assert not nfa.accept("")
assert nfa.accept("a")
assert not nfa.accept("aa")
nfa = glob.glob_automaton("a?c")
assert not nfa.accept("a")
assert not nfa.accept("ac")
assert nfa.accept("abc")
assert not nfa.accept("aba")
def test_glob_range():
nfa = glob.glob_automaton("[ab][cd]")
assert not nfa.accept("")
assert not nfa.accept("a")
assert not nfa.accept("c")
assert nfa.accept("ac")
assert nfa.accept("bc")
assert nfa.accept("ad")
assert nfa.accept("bd")
assert not nfa.accept("acc")
# def test_glob_negate_range():
# nfa = glob.glob_automaton("a[!ab]a")
# assert not nfa.accept("aaa")
# assert not nfa.accept("aba")
# assert nfa.accept("aca")
# assert not nfa.accept("bcb")
class Skipper(object):
def __init__(self, data):
self.data = data
self.i = 0
def __call__(self, w):
if self.data[self.i] == w:
return w
self.i += 1
pos = bisect_left(self.data, w, self.i)
if pos < len(self.data):
return self.data[pos]
else:
return None
def test_levenshtein():
path = os.path.join(os.path.dirname(__file__), "english-words.10.gz")
wordfile = gzip.open(path, "rb")
words = sorted(line.decode("latin1").strip().lower() for line in wordfile)
def find_brute(target, k):
for w in words:
if levenshtein(w, target, k) <= k:
yield w
def find_auto(target, k):
dfa = lev.levenshtein_automaton(target, k).to_dfa()
sk = Skipper(words)
return fsa.find_all_matches(dfa, sk)
assert set(find_brute("look", 2)) == set(find_auto("look", 2))
assert set(find_brute("bend", 1)) == set(find_auto("bend", 1))
assert set(find_brute("puck", 1)) == set(find_auto("puck", 1))
assert set(find_brute("zero", 1)) == set(find_auto("zero", 1))
def test_levenshtein_prefix():
path = os.path.join(os.path.dirname(__file__), "english-words.10.gz")
wordfile = gzip.open(path, "rb")
words = sorted(line.decode("latin1").strip().lower() for line in wordfile)
prefixlen = 1
def find_brute(target, k):
for w in words:
d = levenshtein(w, target, k)
if d <= k and w[:prefixlen] == target[:prefixlen]:
yield w
def find_auto(target, k):
dfa = lev.levenshtein_automaton(target, k, prefix=prefixlen).to_dfa()
sk = Skipper(words)
return fsa.find_all_matches(dfa, sk)
assert set(find_brute("look", 2)) == set(find_auto("look", 2))
assert set(find_brute("bend", 1)) == set(find_auto("bend", 1))
assert set(find_brute("puck", 1)) == set(find_auto("puck", 1))
assert set(find_brute("zero", 1)) == set(find_auto("zero", 1))
def test_basics():
n = fsa.epsilon_nfa()
assert n.accept("")
assert not n.accept("a")
n = fsa.basic_nfa("a")
assert not n.accept("")
assert n.accept("a")
assert not n.accept("b")
n = fsa.dot_nfa()
assert not n.accept("")
assert n.accept("a")
assert n.accept("b")
def test_concat():
n = fsa.concat_nfa(fsa.basic_nfa("a"), fsa.basic_nfa("b"))
assert not n.accept("")
assert not n.accept("a")
assert not n.accept("aa")
assert not n.accept("b")
assert not n.accept("bb")
assert not n.accept("ba")
assert not n.accept("abc")
assert n.accept("ab")
def test_choice():
n = fsa.choice_nfa(fsa.basic_nfa("a"),
fsa.choice_nfa(fsa.basic_nfa("b"),
fsa.basic_nfa("c")))
assert not n.accept("")
assert n.accept("a")
assert n.accept("b")
assert n.accept("c")
assert not n.accept("d")
assert not n.accept("aa")
assert not n.accept("ab")
assert not n.accept("abc")
def test_star():
n = fsa.star_nfa(fsa.basic_nfa("a"))
assert n.accept("")
assert n.accept("a")
assert n.accept("aaaaaa")
assert not n.accept("b")
assert not n.accept("ab")
def test_optional():
n = fsa.concat_nfa(fsa.basic_nfa("a"), fsa.optional_nfa(fsa.basic_nfa("b")))
assert n.accept("a")
assert n.accept("ab")
assert not n.accept("")
assert not n.accept("b")
assert not n.accept("ba")
assert not n.accept("bab")
def test_reverse_nfa():
n = fsa.concat_nfa(fsa.basic_nfa("a"), fsa.basic_nfa("b"))
r = fsa.reverse_nfa(n)
assert not r.accept("")
assert not r.accept("a")
assert not r.accept("aa")
assert not r.accept("b")
assert not r.accept("bb")
assert not r.accept("ab")
assert not r.accept("abc")
assert r.accept("ba")
def test_regular():
ex = fsa.star_nfa(fsa.choice_nfa(fsa.basic_nfa("a"), fsa.basic_nfa("b")))
assert ex.accept("")
assert ex.accept("a")
assert ex.accept("aaaa")
assert ex.accept("b")
assert ex.accept("bbbb")
assert ex.accept("abab")
assert ex.accept("babb")
ex = fsa.concat_nfa(
fsa.basic_nfa("a"),
fsa.concat_nfa(
fsa.optional_nfa(fsa.basic_nfa("b")),
fsa.basic_nfa("c")
)
)
assert ex.accept("ac")
assert ex.accept("abc")
assert not ex.accept("ab")
assert not ex.accept("bc")
def test_minimize_dfa():
# Example from www.cs.odu.edu/~toida/nerzic/390teched/regular/fa/min-fa.html
dfa = fsa.DFA(1)
dfa.add_transition(1, "a", 3)
dfa.add_transition(1, "b", 2)
dfa.add_transition(2, "a", 4)
dfa.add_transition(2, "b", 1)
dfa.add_transition(3, "a", 5)
dfa.add_transition(3, "b", 4)
dfa.add_transition(4, "a", 4)
dfa.add_transition(4, "b", 4)
dfa.add_transition(5, "a", 3)
dfa.add_transition(5, "b", 2)
dfa.add_final_state(1)
dfa.add_final_state(5)
good = fsa.DFA(1)
good.add_transition(1, "a", 3)
good.add_transition(1, "b", 2)
good.add_transition(2, "b", 1)
good.add_transition(3, "a", 1)
good.add_final_state(1)
dfa.minimize()
assert dfa == good
def test_strings_dfa():
strings = "able alfa alpha apple bar bear beat boom boot".split()
dfa = fsa.strings_dfa(strings)
output = list(dfa.generate_all())
assert output == strings
domain = "abcd"
words = set()
for i in xrange(1, len(domain) + 1):
words.update("".join(p) for p in permutations(domain[:i]))
words = sorted(words)
dfa = fsa.strings_dfa(words)
assert list(dfa.generate_all()) == words
|
#!/bin/sh
printf "Content-type: application/json\r\n\r\n"
printf "{\"records\":[\n"
COUNT=`ls -r /tmp/sd/record | grep H -c`
IDX=1
for f in `ls -r /tmp/sd/record | grep H`; do
if [ ${#f} == 14 ]; then
printf "{\n"
printf "\"%s\":\"%s\",\n" "datetime" "Date: ${f:0:4}-${f:5:2}-${f:8:2} Time: ${f:11:2}:00"
printf "\"%s\":\"%s\"\n" "dirname" "$f"
if [ "$IDX" == "$COUNT" ]; then
printf "}\n"
else
printf "},\n"
fi
IDX=$(($IDX+1))
fi
done
printf "]}\n"
|
<filename>src/client/app/models/query.ts
export interface PaginatedReturnQuery {
count: number;
this_page?: ShortProfile[];
next_page?: string;
previous_page?: string;
}
export interface ShortProfile {
name: Name;
link: string; // /api/people/id
keywords?: string[];
email?: string;
faculty?: string;
department?: string;
}
export interface Profile {
name: Name;
department?: string;
campus?: string;
faculty?: string;
building?: string;
room?: string;
email?: string;
website?: string;
keywords: any;
publications?: string[];
}
export interface Name {
first : string;
last : string;
title? : string;
initials? : string;
alias? : string;
}
export interface Publication {
title: string;
abstract: string;
date: string;
authors: ShortProfile[];
}
|
<gh_stars>0
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fantasy.nacos.common.http.client.request;
import com.fantasy.nacos.common.constant.HttpHeaderConsts;
import com.fantasy.nacos.common.http.HttpClientConfig;
import com.fantasy.nacos.common.http.HttpUtils;
import com.fantasy.nacos.common.http.client.response.HttpClientResponse;
import com.fantasy.nacos.common.http.client.response.JdkHttpClientResponse;
import com.fantasy.nacos.common.http.param.Header;
import com.fantasy.nacos.common.http.param.MediaType;
import com.fantasy.nacos.common.model.RequestHttpEntity;
import com.fantasy.nacos.common.utils.JacksonUtils;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
/**
* JDK http client request implement.
*
* @author mai.jh
*/
public class JdkHttpClientRequest implements HttpClientRequest {
private HttpClientConfig httpClientConfig;
public JdkHttpClientRequest(HttpClientConfig httpClientConfig) {
this.httpClientConfig = httpClientConfig;
}
@Override
public HttpClientResponse execute(URI uri, String httpMethod, RequestHttpEntity requestHttpEntity)
throws Exception {
final Object body = requestHttpEntity.getBody();
final Header headers = requestHttpEntity.getHeaders();
replaceDefaultConfig(requestHttpEntity.getHttpClientConfig());
HttpURLConnection conn = (HttpURLConnection) uri.toURL().openConnection();
Map<String, String> headerMap = headers.getHeader();
if (headerMap != null && headerMap.size() > 0) {
for (Map.Entry<String, String> entry : headerMap.entrySet()) {
conn.setRequestProperty(entry.getKey(), entry.getValue());
}
}
conn.setConnectTimeout(this.httpClientConfig.getConTimeOutMillis());
conn.setReadTimeout(this.httpClientConfig.getReadTimeOutMillis());
conn.setRequestMethod(httpMethod);
if (body != null) {
String contentType = headers.getValue(HttpHeaderConsts.CONTENT_TYPE);
String bodyStr = JacksonUtils.toJson(body);
if (MediaType.APPLICATION_FORM_URLENCODED.equals(contentType)) {
Map<String, String> map = JacksonUtils.toObj(bodyStr, HashMap.class);
bodyStr = HttpUtils.encodingParams(map, headers.getCharset());
}
if (bodyStr != null) {
conn.setDoOutput(true);
byte[] b = bodyStr.getBytes();
conn.setRequestProperty("Content-Length", String.valueOf(b.length));
conn.getOutputStream().write(b, 0, b.length);
conn.getOutputStream().flush();
conn.getOutputStream().close();
}
}
conn.connect();
return new JdkHttpClientResponse(conn);
}
/**
* Replace the HTTP config created by default with the HTTP config specified in the request.
*
* @param replaceConfig http config
*/
private void replaceDefaultConfig(HttpClientConfig replaceConfig) {
if (replaceConfig == null) {
return;
}
this.httpClientConfig = replaceConfig;
}
@Override
public void close() throws IOException {
}
}
|
class SliceConceptualGraph:
def __init__(self, name, midhaul_qos, backhaul_qos, parameters):
self.name = name
self.midhaul_qos = midhaul_qos
self.backhaul_qos = backhaul_qos
self.best_qos = parameters['best_qos']
self.worst_qos = parameters['worst_qos']
self.radius = parameters['radius']
def create_conceptual_graph(self):
# Implement logic to create a conceptual graph representing the network slice
conceptual_graph = {
"name": self.name,
"midhaul_qos": self.midhaul_qos,
"backhaul_qos": self.backhaul_qos,
"best_qos": self.best_qos,
"worst_qos": self.worst_qos,
"radius": self.radius
}
return conceptual_graph |
import { NbStepperComponent } from './stepper.component';
import { Directive, HostBinding, HostListener, Input } from '@angular/core';
@Directive({
selector: 'button[nbStepperNext]',
})
export class NbStepperNextDirective {
@Input() @HostBinding('attr.type') type: string = 'submit';
constructor(private stepper: NbStepperComponent) {
}
@HostListener('click')
onClick() {
this.stepper.next();
}
}
@Directive({
selector: 'button[nbStepperPrevious]',
})
export class NbStepperPreviousDirective {
@Input() @HostBinding('attr.type') type: string = 'button';
constructor(private stepper: NbStepperComponent) {
}
@HostListener('click')
onClick() {
this.stepper.previous();
}
}
|
Minimize travelling cost
subject to
for every city pair (ci, cj):
minCost(ci, cj) <= costTravelled(ci, cj)
where
minCost(ci, cj) is the minimum cost for travelling between two cities, ci and cj.
costTravelled(ci, cj) is the cost of travelling from the city, ci to city, cj. |
`CONSTANTS for BEYOND ZORK:
Copyright (C)1987 Infocom, Inc. All rights reserved.`
const EOL = 13;
const LF = 10;
const SP = 32;
const EXCLAM = 33;
const QUOTATION = 34;
const PER_ = 46;
const COMMA = 44;
const DEC_20 = 1;
const APPLE_2E = 2;
const MACINTOSH = 3;
const AMIGA = 4;
const ATARI_ST = 5;
const IBM = 6;
const C128 = 7;
const C64 = 8;
const APPLE_2C = 9;
const APPLE_2GS = 10;
const MACHINES
= PLTABLE("DEC-20"
, "Apple //e"
, "Macintosh"
, "Amiga"
, "Atari ST"
, "IBM/MS-DOS"
, "Commodore 128"
, "C64"
, "Apple //c"
, "Apple //gs"
, "Tandy Color Computer");
const F_OLD = 0;
const F_DEFAULT = 1;
const F_PICTURES = 2;
const F_NEWFONT = 3;
const S_TEXT = 0;
const S_WINDOW = 1;
const S_BEEP = 1;
const S_BOOP = 2;
const H_NORMAL = 0;
const H_INVERSE = 1;
const H_BOLD = 2;
const H_ITALIC = 4;
const H_MONO = 8;
const D_SCREEN_ON = 1;
const D_SCREEN_OFF = -1;
const D_PRINTER_ON = 2;
const D_PRINTER_OFF = -2;
const D_TABLE_ON = 3;
const D_TABLE_OFF = -3;
const D_RECORD_ON = 4;
const D_RECORD_OFF = -4;
"Color constants"
const C_SAME = 0;
const C_DEFAULT = 1;
const C_BLACK = 2;
const C_RED = 3;
const C_GREEN = 4;
const C_YELLOW = 5;
const C_BLUE = 6;
const C_MAGENTA = 7;
const C_CYAN = 8;
const C_WHITE = 9;
const BWWW = [C_BLUE, C_WHITE, C_WHITE, C_WHITE];
const BWCR = [C_BLACK, C_WHITE, C_CYAN, C_RED];
const WBBB = [C_WHITE, C_BLACK, C_BLACK, C_BLACK];
const DWWW = [C_BLACK, C_WHITE, C_WHITE, C_WHITE];
const DEFCOLORS
= [C_DEFAULT, C_DEFAULT, C_DEFAULT, C_DEFAULT];
const ST_MONO = PLTABLE(DWWW, WBBB);
const MACHINE_COLORS
= PLTABLE(0/*"DEC-20"*/
, 0/*"Apple //e"*/
, 0/*"Macintosh"*/
, PLTABLE(BWCR, DWWW, BWWW, WBBB)/*"Amiga"*/
//, PLTABLE(BWCR, DWWW, BWWW, WBBB)/*"Atari ST"*/
, PLTABLE(BWCR, DWWW, BWWW, WBBB,[C_BLUE, C_WHITE, C_WHITE, C_GREEN],[C_BLACK, C_WHITE, C_YELLOW, C_CYAN])/*"Atari ST More Colors"*/
, PLTABLE(DEFCOLORS
, BWWW
, [C_BLUE, C_WHITE, C_WHITE, C_GREEN]
, BWCR, DWWW, WBBB)/*"IBM"*/
, PLTABLE([C_BLACK, C_WHITE, C_YELLOW, C_CYAN]
, DWWW, WBBB, BWWW)/*"C128"*/
, 0/*"C64"*/
, 0/*"Apple //c"*/
, PLTABLE(BWCR, DWWW, BWWW, WBBB)/*"Apple //gs"*/
);
"Apple //c MouseText characters."
const APPLE_LEFT = 95;
const APPLE_RIGHT = 90;
const APPLE_HORZ = 76;
"IBM graphics chars."
const IBM_TRC = 191;
const IBM_BLC = 192;
const IBM_BRC = 217;
const IBM_TLC = 218;
const IBM_HORZ = 196;
const IBM_VERT = 179;
const DIR_HACKS = [-7, -6, 1, 8, 7, 6, -1, -8];
const I_NORTH = 0;
const I_NE = 1;
const I_EAST = 2;
const I_SE = 3;
const I_SOUTH = 4;
const I_SW = 5;
const I_WEST = 6;
const I_NW = 7;
const I_U = 8;
const I_D = 9;
const DIR_NAMES
= [VOC("NORTH", null), VOC("NORTHEAST", null)
, VOC("EAST", null), VOC("SOUTHEAST", null)
, VOC("SOUTH", null), VOC("SOUTHWEST", null)
, VOC("WEST", null), VOC("NORTHWEST", null)
, VOC("UP", null), VOC("DOWN", null)];
const PDIR_LIST
= ["NORTH", "NE", "EAST", "SE"
, "SOUTH", "SW", "WEST", "NW"
, "UP", "DOWN", "IN", "OUT"];
const XPDIR_LIST
= ["SOUTH", "SW", "WEST", "NW"
, "NORTH", "NE", "EAST", "SE"
, "DOWN", "UP", "OUT", "IN"];
const UP_ARROW = 38;//129;
const DOWN_ARROW = 40;//130;
const LEFT_ARROW = 37;//131;
const RIGHT_ARROW = 39;//132;
const F1 = 133;
const F2 = 134;
const F3 = 135;
const F4 = 136;
const F5 = 137;
const F6 = 138;
const F7 = 139;
const F8 = 140;
const F9 = 141;
const F10 = 142;
const F11 = 143;
const F12 = 144;
const PAD0 = 145;
const PAD1 = 146;
const PAD2 = 147;
const PAD3 = 148;
const PAD4 = 149;
const PAD5 = 150;
const PAD6 = 151;
const PAD7 = 152;
const PAD8 = 153;
const PAD9 = 154;
const CLICK1 = 254;
const CLICK2 = 253;
const MAC_DOWN_ARROW = '\/'.charCodeAt(0);
const MAC_UP_ARROW = '\\'.charCodeAt(0);
const TCHARS
= [[KERNEL, BYTE]
, UP_ARROW, DOWN_ARROW, LEFT_ARROW, RIGHT_ARROW
, F1, F2, F3, F4, F5, F6, F7, F8, F9, F10
, PAD0, PAD1, PAD2, PAD3, PAD4, PAD5, PAD6, PAD7, PAD8, PAD9
, CLICK1, CLICK2, 0, 0, 0];
const FIRST_MAC_ARROW = 26;
const PAD_NAMES
= [VOC("SOUTHWEST", null)
, VOC("SOUTH", null)
, VOC("SOUTHEAST", null)
, VOC("WEST", null)
, VOC("AROUND", null)
, VOC("EAST", null)
, VOC("NORTHWEST", null)
, VOC("NORTH", null)
, VOC("NORTHEAST", null)];
const C_TABLE_LENGTH = 100;
var C_TABLE = [];
const C_INTLEN = 4;"Length of an interrupt entry."
const C_RTN = 0;"Offset of routine name."
const C_TICK = 1;"Offset of count."
const REXIT = 0;
const UEXIT = 2;
const NEXIT = 3;
const FEXIT = 4;
const CEXIT = 5;
const DEXIT = 6;
const NEXITSTR = 0;
const FEXITFCN = 0;
const CEXITFLAG = 4;
const CEXITSTR = 1;
const DEXITOBJ = 1;
const DEXITSTR = 2;
const NEW_STATS = ITABLE(8, [BYTE], 0);
const NORMAL_RATE = 2;
const BLESSED_RATE = (NORMAL_RATE * 2);
const MIN_HIT_PROB = 50;
const MAX_HIT_PROB = 95;
const STSTR = ["EN", "ST", "DX", "IQ", "CM", "LK", "AC"];
const KEY_LABELS
= [" F1", " F2", " F3", " F4", " F5"
, " F6", " F7", " F8", " F9", "F10"];
const APPLE_LABELS
= ["[1]", "[2]", "[3]", "[4]", "[5]"
, "[6]", "[7]", "[8]", "[9]", "[0]"];
const SOFT_LEN = 36;
const NSOFT_LEN = -36;
const SOFT_KEYS
= [ITABLE((SOFT_LEN + 2), [BYTE], 0)
, ITABLE((SOFT_LEN + 2), [BYTE], 0)
, ITABLE((SOFT_LEN + 2), [BYTE], 0)
, ITABLE((SOFT_LEN + 2), [BYTE], 0)
, ITABLE((SOFT_LEN + 2), [BYTE], 0)
, ITABLE((SOFT_LEN + 2), [BYTE], 0)
, ITABLE((SOFT_LEN + 2), [BYTE], 0)
, ITABLE((SOFT_LEN + 2), [BYTE], 0)
, ITABLE((SOFT_LEN + 2), [BYTE], 0)
, ITABLE((SOFT_LEN + 2), [BYTE], 0)];
const KEY_DEFAULTS
= [PLTABLE([STRING], "look around|")
, PLTABLE([STRING], "inventory|")
, PLTABLE([STRING], "status|")
, PLTABLE([STRING], "examine")
, PLTABLE([STRING], "take")
, PLTABLE([STRING], "drop")
, PLTABLE([STRING], "attack monster|")
, PLTABLE([STRING], "again|")
, PLTABLE([STRING], "undo|")
, PLTABLE([STRING], "oops")];
const FUMBLE_NUMBER = 6;
const LOAD_ALLOWED = 30;
const NORMAL_ATTACK = 0;
const PARRYING = 1;
const THRUSTING = 2;
const YAWNS
= LTABLE(2, "unusual", "interesting", "extraordinary", "special");
const HO_HUM
= LTABLE(2
, "n't do anything useful"
, " accomplish nothing"
, " have no desirable effect"
, "n't be very productive"
, " serve no purpose"
, " be pointless");
const YUKS
= LTABLE(2
, "That's impossible"
, "What a ridiculous concept"
, "You can't be serious");
const FIRMS
= LTABLE(2, "firm", "permanent", "immovab", "secure");
const ATTACHES
= LTABLE(2, "attached", "affixed");
const POINTLESS
= LTABLE(2
, "There's no point in doing that"
, "That would be pointless"
, "That's a pointless thing to do");
const PUZZLES
= LTABLE(2, "puzzl", "bewilder", "confus", "perplex");
const UNKNOWN_MSGS
= LTABLE(2
, ["The word \""
, "\" isn't in the vocabulary that you can use."]
, ["You don't need to use the word \""
, "\" to complete this story."]
, ["This story doesn't recognize the word \""
, ".\""]);
const LIKELIES
= LTABLE(2
, " isn't likely"
, " seems doubtful"
, " seems unlikely"
, "'s unlikely"
, "'s not likely"
, "'s doubtful");
"List of words to be capitalized."
const CAPS
= LTABLE(-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1
, VOC("MR", null), VOC("MRS", null), VOC("MISS", null)
, VOC("I", null), VOC("N", null), VOC("S", null)
, VOC("E", null), VOC("W", null), VOC("ZORK", null)
, VOC("ZORKMID", null), VOC("ZORKMIDS", null), VOC("ZM", null)
, VOC("CHRISTMAS", null), VOC("XMAS", null)
, VOC("FROON", null), VOC("FROTZEN", null)
, VOC("GUILD", null), VOC("ACCARDI", null), VOC("GRUBBO", null)
, VOC("THRIFF", null), VOC("ALEXIS", null), VOC("PHEE", null)
, VOC("PHEEBOR", null), VOC("PHEEHELM", null)
, VOC("QUEEN", null), VOC("GROTE", null), VOC("CLUTCHCAKE", null)
, VOC("BOK", null), VOC("YABBA", null), VOC("SMEE", null)
, VOC("SQUIRP", null), VOC("STELLA", null), VOC("BLARN", null)
, VOC("PROSSER", null), VOC("YQUEM", null), VOC("WATKIN", null)
, VOC("JUKES", null), VOC("MACUGA", null)
, VOC("GRUESLAYER", NOUN), VOC("SKYCAR", null)
, VOC("SKYWAY", null), VOC("Y\'GAEL", null));
const VOCAB2 = {};
"Game-specific constants."
const BASE_CHAR = 79;"Base (0) character of bargraph charset."
const MARKBIT = 128;
const SHOWING_ROOM = 1;
const SHOWING_INV = 2;
const SHOWING_ALL = (SHOWING_ROOM + SHOWING_INV);
const SHOWING_STATS = 4;
const SLINE_LENGTH = 82;
const SLINE = ITABLE(SLINE_LENGTH, [BYTE], 0);
const MAX_HEIGHT = 25;
const MAX_DWIDTH = 62;
const DBOX_LENGTH = ((MAX_HEIGHT * MAX_DWIDTH) + 2);
const DBOX = ITABLE(DBOX_LENGTH, [BYTE], 0);
const MWIDTH = 17;
const MHEIGHT = 11;
const MAP_SIZE = (MWIDTH * MHEIGHT);
const MAP = ITABLE(MAP_SIZE, [BYTE], 0);
const CENTERX = Math.floor(MWIDTH / 2);
const NCENTERX = (0 - CENTERX);
const CENTERY = Math.floor(MHEIGHT / 2);
const NCENTERY = (0 - CENTERY);
const ROOMS_MAPPED_LENGTH = 46;
const ROOMS_MAPPED = ITABLE(ROOMS_MAPPED_LENGTH, [BYTE], 0);
const MAP_TOP = 1;
const MAP_BOT = (MHEIGHT - 2);
const MAP_LEFT = 256;
const MAP_RIGHT = [15, 0];
const NGVERBS = 33;"Number of GAME-VERBS."
const GAME_VERBS
= ["INVENTORY", "STATUS", "TELL"
, "SAVE", "RESTORE", "RESTART", "UNDO", "TIME", "SCORE", "DIAGNOSE"
, "SCRIPT", "UNSCRIPT", "HELP", "MONITOR", "CASH"
, "VERSION", "QUIT", "MODE", "SETTINGS", "DEFINE"
, "VERBOSE", "BRIEF", "SUPER_BRIEF", "NOTIFY", "NAME"
, "PRIORITY_ON", "PRIORITY_OFF", "ZOOM"
, "REFRESH", "COLOR", "$VERIFY", "SPELLS", "$CREDITS"
, /*"V?$RECORD V?$UNRECORD V?$COMMAND V?$RANDOM V?$CHEAT V?$SUSS"*/];
"These verbs reverse the order of PRSO and PRSI."
const NR_VERBS = 19;
const R_VERBS
= ["STOUCH_TO", "SASK_FOR"
, "SGIVE", "SSHOW", "SFEED", "SSELL-TO", "SBUY"
, "SHIT", "SPOINT_AT", "STHROW"
, "SWRAP", "COVER", "DIG", "DIG-UNDER", "SDIG", "SLOOK_THRU"
, "WEDGE", "SFIRE_AT", "SWING"];
const NHAVES = 23;"Number of HAVEVERBS."
const HAVEVERBS
= ["DROP", "PUT", "PUT-ON", "GIVE", "SHOW", "FEED", "THROW", "HIT"
, "PUT-UNDER", "PUT-BEHIND", "THROW-OVER", "RELEASE"
, "TAKE-WITH", "TOUCH-TO", "OPEN", "OPEN-WITH", "CLOSE", "COVER"
, "ERASE-WITH", "POINT-AT", "SUBMERGE", "WIELD", "UNWIELD"].map(x => x.replace(/\-/, "_"));
const NTVERBS = 17;"Number of TALKVERBS."
const TALKVERBS
= ["TELL", "TELL-ABOUT", "ASK-ABOUT", "ASK-FOR", "WHAT", "WHERE", "WHO"
, "ALARM", "HELLO", "GOODBYE", "SAY", "YELL", "THANK", "QUESTION", "REPLY"
, "LAUGH", "REQUEST"].map(x => x.replace(/\-/, "_"));
const NTOUCHES = 81;"Number of TOUCHVERBS"
const TOUCHVERBS
= ["TAKE", "TAKE-OFF", "TAKE-WITH"
, "PUT", "PUT-ON", "PUT-UNDER", "PUT-BEHIND"
, "COVER", "EMPTY-INTO", "REACH-IN", "TOUCH-TO", "TOUCH", "HIT", "THRUST"
, "PARRY", "PUNCH", "KICK", "MOVE", "PUSH", "PUSH-TO", "PUSH-UP", "PUSH-DOWN"
, "PULL", "LOWER", "RAISE", "LOOSEN", "TURN-TO", "ADJUST", "SPIN", "TURN"
, "SHAKE", "SWING", "OPEN", "OPEN-WITH", "CLOSE", "LOCK", "UNLOCK"
, /*"V?SCREW V?UNSCREW"*/ "UPROOT"
, "PLUG-IN", "UNPLUG", "TIE", "UNTIE", "FOLD", "LAMP-ON", "LAMP-OFF"
, "WRAP-AROUND", "CUT", "RIP", "MUNG", "DIG", "DIG-UNDER"
, "FILL", "FILL-FROM"
, "DEFLATE", "BURN-WITH", "CLEAN", "CLEAN-OFF", "BLOW-INTO", "DETONATE"
, "WIND", "REPAIR", "REPLACE", "PICK", "MELT", "SQUEEZE", /*"PLAY"*/
/*"V?UNSCREW-FROM V?SCREW-WITH"*/ "GIVE", "FEED", "STAND-ON"
, "SIT", "LIE-DOWN", "EAT", "BITE", "TASTE", "DRINK", "DRINK-FROM", "POP"
, "CRANK", "SCRATCH", "SCRAPE-ON", "PEEL", "SUBMERGE"].map(x => x.replace(/\-/, "_"));
const NHVERBS = 17;"Number of HURTVERBS."
const HURTVERBS
= ["HIT", "PUNCH", "KICK", "MUNG", "KNOCK", "KICK", "SQUEEZE", "CUT"
, "RIP", "BITE", "RAPE", "SHAKE", "UNDRESS", "DETONATE", "PUSH", "PUSH-TO"
, "PULL"].map(x => x.replace(/\-/, "_"));
const NUMPUTS = 10;"# PUTVERBS."
const PUTVERBS
= ["DROP", "PUT", "PUT-ON", "PUT-UNDER", "PUT-BEHIND", "THROW"
, "THROW-OVER", "EMPTY", "EMPTY-INTO", "HANG-ON"].map(x => x.replace(/\-/, "_"));
const NMVERBS = 28;"Number of MOVEVERBS."
const MOVEVERBS
= ["TAKE", "TAKE-OFF", "MOVE", "PULL", "PUSH", "PUSH-TO", "PUSH-UP"
, "PUSH-DOWN", "TURN", "RAISE", "UPROOT"
, "LOWER", "SPIN", "SHAKE", /*"PLAY"*/ "OPEN", "OPEN-WITH", "CLOSE", "ADJUST"
, "TURN-TO", "POINT-AT", "SWING", "UNPLUG", "BOUNCE"
, "PUT-UNDER", "PUT-BEHIND", "LOOK-UNDER", "LOOK-BEHIND", "CRANK"].map(x => x.replace(/\-/, "_"));
const NSVERBS = 19;"Number of SEEVERBS"
const SEEVERBS
= ["EXAMINE", "LOOK", "LOOK-INSIDE", "LOOK-ON", "READ", "FIND"
, "SEARCH", "SHOW", "LOOK-UNDER", "LOOK-BEHIND", "LOOK-THRU"
, "LOOK-DOWN", "LOOK-UP", "READ-TO", "LOOK-OUTSIDE", "COUNT"
, "ADJUST", "POINT", "EXAMINE-IN"].map(x => x.replace(/\-/, "_"));
const ENTER_VERBS = 5;
const CLIMB_ON_VERBS = 13;
const E_VERBS
= ["WALK-TO", "ENTER", "THROUGH", "FOLLOW", "USE"
, "CLIMB-ON", "CLIMB-UP", "CLIMB-OVER", "SIT", "RIDE"
, "STAND-ON", "LIE-DOWN", "CROSS"].map(x => x.replace(/\-/, "_"));
const EXIT_VERBS = 3;
const CLIMB_DOWN_VERBS = 5;
const X_VERBS
= ["EXIT", "LEAVE", "ESCAPE"
, "CLIMB-DOWN", "LEAP"].map(x => x.replace(/\-/, "_"));
const D_N = 1/*1*/;
const D_NE = 2/*10*/;
const D_E = 4/*100*/;
const D_SE = 8/*1000*/;
const D_S = 16/*10000*/;
const D_SW = 32/*100000*/;
const D_W = 64/*1000000*/;
const D_NW = 128/*10000000*/;
const DBIT_LIST
= [D_N, D_NE, D_E, D_SE, D_S, D_SW, D_W, D_NW];
const XD_N = 254/*11111110*/;
const XD_NE = 253/*11111101*/;
const XD_E = 251/*11111011*/;
const XD_SE = 247/*11110111*/;
const XD_S = 239/*11101111*/;
const XD_SW = 223/*11011111*/;
const XD_W = 191/*10111111*/;
const XD_NW = 127/*1111111*/;
const XDBIT_LIST
= [XD_N, XD_NE, XD_E, XD_SE
, XD_S, XD_SW, XD_W, XD_NW
, XD_N, XD_NE, XD_E, XD_SE];
const D_ALL = 255;
const D_LEFT = (D_N + D_NE + D_E + D_SE + D_S);
const D_RIGHT = (D_N + D_S + D_SW + D_W + D_NW);
const D_TOP = (D_E + D_SE + D_S + D_SW + D_W);
const D_BOTTOM = (D_N + D_NE + D_E + D_W + D_NW);
"Pure border data (copied into BORDERS)."
const DEFAULT_BORDERS
= PLTABLE([BYTE]
, (D_E + D_SE + D_S)
, D_TOP, D_TOP, D_TOP, D_TOP, D_TOP
, (D_S + D_SW + D_W)
, D_LEFT, D_ALL, D_ALL, D_ALL, D_ALL, D_ALL, D_RIGHT
, D_LEFT, D_ALL, D_ALL, D_ALL, D_ALL, D_ALL, D_RIGHT
, D_LEFT, D_ALL, D_ALL, D_ALL, D_ALL, D_ALL, D_RIGHT
, D_LEFT, D_ALL, D_ALL, D_ALL, D_ALL, D_ALL, D_RIGHT
, D_LEFT, D_ALL, D_ALL, D_ALL, D_ALL, D_ALL, D_RIGHT
, (D_N + D_NE + D_E)
, D_BOTTOM, D_BOTTOM, D_BOTTOM, D_BOTTOM, D_BOTTOM
, (D_N + D_W + D_NW));
const XLIST_NAMES
= ["N", "NE", "E", "SE", "S", "SW", "W", "NW"
, "Up", "Down", "In", "Out"];
const NORMAL_DHEIGHT = 9;
const XOFFS = [0, 1, 1, 1, 0, -1, -1, -1];
const YOFFS = [-1, -1, 0, 1, 1, 1, 0, -1];
const SHITCHARS
= [124, 47, 45, 92, 124, 47, 45, 92];
const QDIRS = [0, 2, 4, 6, 0];
const ZDIRS = [3, 5, 7, 1, 3, 5];
const SETOFFS
= [4, 4, 3, 3, 8, 0, 2, 19, 19];
const SNAMES
= [" Display Mode "
, " Descriptions "
, " Transcripting "
, " Status Notify "
, " Map View "
, " Display Priority "
, " Combat Monitor "
, " Restore Defaults "
, " Exit "];
"Character set data."
const QMARK = 96;
const IQMARK = 126;
const TRCORNER = 71;
const BRCORNER = 72;
const BLCORNER = 73;
const TLCORNER = 74;
const TOPEDGE = 75;
const BOTEDGE = 76;
const LEDGE = 77;
const REDGE = 78;
const RDIAG = 35;
const LDIAG = 36;
const SOLID = 32;//37;
const BOT = 38;
const TOP = 39;
const LSID = 40;
const RSID = 41;
const NCON = 42;
const SCON = 43;
const ECON = 44;
const WCON = 45;
const BLC = 46;
const TLC = 47;
const TRC = 48;
const BRC = 49;
const SWCON = 50;
const NWCON = 51;
const NECON = 52;
const SECON = 53;
const ISOLID = 54;
const XCROSS = 90;
const HVCROSS = 91;
const UARROW = 92;
const DARROW = 93;
const UDARROW = 94;
const SMBOX = 95;
const MCHARS
= [LSID, RDIAG, BOT, LDIAG, LSID, RDIAG, BOT, LDIAG];
const XCHARS
= [NCON, NECON, ECON, SECON, SCON, SWCON, WCON, NWCON];
const NXCHARS
= [TOP, TRC, LSID, BRC, BOT, BLC, RSID, TLC];
const IUARROW = 123;
const IDARROW = 124;
const IUDARROW = 125;
const LCAP = 88;
const RCAP = 89;
const ENDURANCE = 0;
const HP = 0;
const STRENGTH = 1;
const STR = 1;
const DEXTERITY = 2;
const DEX = 2;
const INTELLIGENCE = 3;
const IQ = 3;
const COMPASSION = 4;
const COM = 4;
const LUCK = 5;
const ARMOR_CLASS = 6;
const AC = 6;
const EXPERIENCE = 7;
const EXP = 7;
const NSTATS = 8;"Number of statistics."
const BEGINNERS_ENDURANCE = 16;
const BEGINNERS_STRENGTH = 8;
const BEGINNERS_DEXTERITY = 8;
const BEGINNERS_INTELLIGENCE = 8;
const BEGINNERS_COMPASSION = 1;
const BEGINNERS_LUCK = 25;
const NAKED_ARMOR_CLASS = 1;
const BEGINNERS_EXPERIENCE = 0;
const STATMAX = 99;"High as any statistic can go."
const READING_IQ = 40;
const WINNING_COMPASSION = -45;
const DEFAULT_STATS
= [BEGINNERS_ENDURANCE
, BEGINNERS_STRENGTH
, BEGINNERS_DEXTERITY
, BEGINNERS_INTELLIGENCE
, BEGINNERS_COMPASSION
, BEGINNERS_LUCK
, NAKED_ARMOR_CLASS
, BEGINNERS_EXPERIENCE];
const CNAME_LEN = 7;
const BARMAR = (CNAME_LEN + 3);
const APPBOX = 18;"Width of Apple stat box."
const CNAMES
= ["Lucky "
, "Tank "
, "Muscles"
, "Nimble "
, "Genius "
, "Saint "];
const CSTATS
= [[16, 8, 8, 8, 1, 25]
, [30, 16, 8, 1, 1, 10]
, [18, 20, 18, 4, 1, 5]
, [12, 14, 20, 4, 1, 15]
, [12, 10, 8, 16, 5, 15]
, [12, 10, 8, 8, 16, 12]
];
const INIT_POTENTIAL
= (-6 + BEGINNERS_LUCK + BEGINNERS_COMPASSION
+ BEGINNERS_INTELLIGENCE + BEGINNERS_DEXTERITY
+ BEGINNERS_STRENGTH + BEGINNERS_ENDURANCE);
const AVERAGE = (INIT_POTENTIAL / 6);
const SPREAD = (AVERAGE / 2);
const THRESHOLDS = [9, 29, 59, 99, 149, 209, 279, 359, 449];
const MAX_LEVEL = 8;
const RANK_NAMES = ["Peasant", "Novice", "Cheater"];
/*const SAVE_NAME = PLTABLE([STRING], "BEYONDZ");*/
const CHARNAME_LENGTH = 24;
var CHARNAME = ITABLE((CHARNAME_LENGTH + 1), [BYTE], 0);
const NAMES_LENGTH = 12;
const DEFAULT_NAME_LENGTH = 13;
const DEFAULT_NAME
= [11, ...("<NAME>".split("")), 0];
const LABEL_WIDTH = 12;
const BAR_LABELS
= [
, " Endurance"
, " Strength"
, " Dexterity"
, "Intelligence"
, " Compassion"
, " Luck"
, " Armor Class"].join("");
const STAT_NAMES
= ["endurance"
, "strength"
, "dexterity"
, "intelligence"
, "compassion"
, "luck"
, "armor class"
, "experience"];
const BORDERS = ITABLE(50, [BYTE], 0);
const MAZE_ROOMS = ITABLE(51, [BYTE], 0);
const STORAGE_SPACE = 1024;
const FREE_STORAGE = ITABLE(STORAGE_SPACE, [BYTE], 0);
const PRESENT = 6;
const MAX_ATIME = 11;
const FULL = 5;
const SUSS_WIDTH = 18;
const SUSS_HEIGHT = 5;
const SUSSY = 7;
const SUSS_STATS = ["ENDURANCE", "STRENGTH", "DEXTERITY"]; |
<gh_stars>100-1000
/*
* Copyright The Stargate Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.stargate.web.restapi.models;
import io.swagger.annotations.ApiModelProperty;
import java.util.List;
public class Query {
private List<String> columnNames;
private List<Filter> filters;
private ClusteringExpression orderBy;
private Integer pageSize;
private String pageState;
@ApiModelProperty(
value =
"A list of column names to return in the result set. An empty array returns all columns.")
public List<String> getColumnNames() {
return columnNames;
}
public void setColumnNames(List<String> columnNames) {
this.columnNames = columnNames;
}
@ApiModelProperty(
required = true,
value =
"An array of filters to return results for, separated by `AND`. For example, `a > 1 AND b != 1`.")
public List<Filter> getFilters() {
return filters;
}
public void setFilters(List<Filter> filters) {
this.filters = filters;
}
@ApiModelProperty(value = "The clustering order for rows returned.")
public ClusteringExpression getOrderBy() {
return orderBy;
}
public void setOrderBy(ClusteringExpression orderBy) {
this.orderBy = orderBy;
}
@ApiModelProperty(value = "The size of the page to return in the result set.")
public Integer getPageSize() {
return pageSize;
}
public void setPageSize(Integer pageSize) {
this.pageSize = pageSize;
}
@ApiModelProperty(
value = "A string returned from previous query requests representing the paging state.")
public String getPageState() {
return pageState;
}
public void setPageState(String pageState) {
this.pageState = pageState;
}
}
|
import React from "react";
import Readme from "./readme.mdx";
import { MDXProvider } from "@mdx-js/react";
import CodeBlock from "./code-block";
const components = {
pre: props => <div {...props} />,
code: CodeBlock,
blockquote: props => (
<blockquote
{...props}
style={{
borderLeft: "3px solid #999",
marginLeft: "20px",
paddingLeft: "10px"
}}
/>
),
h1: props => <H as="h1" {...props} />,
h2: props => <H as="h2" {...props} />,
h3: props => <H as="h3" {...props} />
};
function H(props) {
const text = props.children;
const id =
text &&
text
.split(" ")
.join("-")
.toLowerCase();
return React.createElement(props.as, { ...props, id });
}
export default function Docs() {
return (
<div style={{ display: "flex", justifyContent: "center" }}>
<div
style={{
maxWidth: "800px",
width: "90%",
paddingTop: "1px",
position: "relative"
}}
>
<iframe
src="https://ghbtns.com/github-btn.html?user=pomber&repo=code-surfer&type=star&count=true&size=large"
frameborder="0"
scrolling="0"
width="160px"
height="30px"
style={{ right: 0, position: "absolute", top: -40, zIndex: 2 }}
></iframe>
<MDXProvider components={components}>
<Readme />
</MDXProvider>
</div>
</div>
);
}
|
<gh_stars>10-100
package io.opensphere.core.hud.framework.layout;
import io.opensphere.core.hud.framework.LayoutConstraints;
import io.opensphere.core.model.ScreenBoundingBox;
/**
* Grid bounds within the layout. The grid cells occupied are inclusive. For
* example, if the bounds are (0, 0) to (0, 0), the component will be place so
* that fully occupies the cell (0, 0).
*/
public class GridLayoutConstraints implements LayoutConstraints
{
/**
* Box which defines which cells the associated component will occupy within
* the layout. Boundaries are inclusive.
*/
private final ScreenBoundingBox myGridBox;
/**
* Construct me.
*
* @param gridBox grid cells to occupy.
*/
public GridLayoutConstraints(ScreenBoundingBox gridBox)
{
myGridBox = gridBox;
}
/**
* Get the gridBox.
*
* @return the gridBox
*/
public ScreenBoundingBox getGridBox()
{
return myGridBox;
}
}
|
package hudson.plugins.accurev.cmd;
import com.cloudbees.plugins.credentials.common.StandardUsernamePasswordCredentials;
import hudson.EnvVars;
import hudson.FilePath;
import hudson.Launcher;
import hudson.model.TaskListener;
import hudson.plugins.accurev.AccurevLauncher;
import hudson.plugins.accurev.AccurevSCM;
import hudson.plugins.accurev.AccurevSCM.AccurevServer;
import hudson.plugins.accurev.parsers.output.ParseInfoToLoginName;
import hudson.util.ArgumentListBuilder;
import hudson.util.Secret;
import java.io.IOException;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Logger;
import jenkins.model.Jenkins;
import org.apache.commons.lang.StringUtils;
public class Login extends Command {
private static final Logger logger = Logger.getLogger(Login.class.getName());
/**
* @return The currently logged in user "Principal" name, which may be "(not logged in)" if not
* logged in.<br>
* Returns null on failure.
*/
private static String getLoggedInUsername( //
String accurevTool,
final AccurevServer server, //
final EnvVars accurevEnv, //
final FilePath workspace, //
final TaskListener listener, //
final Launcher launcher)
throws IOException {
final String commandDescription = "info command";
final ArgumentListBuilder cmd = new ArgumentListBuilder();
cmd.add("info");
addServer(cmd, server);
// returns username
return AccurevLauncher.runCommand(
commandDescription,
accurevTool,
launcher,
cmd,
null,
accurevEnv,
workspace,
listener,
logger,
new ParseInfoToLoginName(),
null);
}
public static boolean ensureLoggedInToAccurev(
AccurevSCM scm,
AccurevServer server,
EnvVars accurevEnv,
FilePath pathToRunCommandsIn,
TaskListener listener,
Launcher launcher)
throws IOException {
String accurevTool = scm == null ? null : scm.getAccurevTool();
if (server == null) {
listener.getLogger().println("Authentication failure - Server is empty");
return false;
}
final String requiredUsername = server.getUsername();
if (StringUtils.isBlank(requiredUsername)) {
listener.getLogger().println("Authentication failure - Username blank");
return false;
}
ReentrantLock lock =
(scm == null) ? AccurevSCM.MASTER_LOCK : scm.getMandatoryLock(pathToRunCommandsIn);
lock.lock();
try {
final boolean loginRequired;
if (server.isMinimiseLogins()) {
final String currentUsername =
getLoggedInUsername(
accurevTool, server, accurevEnv, pathToRunCommandsIn, listener, launcher);
if (StringUtils.isEmpty(currentUsername)) {
loginRequired = true;
listener.getLogger().println("Not currently authenticated with AccuRev server");
} else {
loginRequired = !currentUsername.equals(requiredUsername);
listener
.getLogger()
.println(
"Currently authenticated with AccuRev server as '"
+ currentUsername
+ (loginRequired ? "', login required" : "', not logging in again."));
}
} else {
loginRequired = true;
}
if (loginRequired) {
return accurevLogin(
accurevTool, server, accurevEnv, pathToRunCommandsIn, listener, launcher);
}
} finally {
lock.unlock();
}
return true;
}
private static boolean accurevLogin(
String accurevTool,
final AccurevServer server, //
final EnvVars accurevEnv, //
final FilePath workspace, //
final TaskListener listener, //
final Launcher launcher)
throws IOException {
StandardUsernamePasswordCredentials credentials = server.getCredentials();
if (credentials == null) {
listener.getLogger().println("Credentials not found");
return false;
}
if (StringUtils.isBlank(credentials.getUsername())) {
listener.getLogger().println("Credentials username cannot be blank");
return false;
}
listener.getLogger().println("Authenticating with AccuRev server...");
final ArgumentListBuilder cmd = new ArgumentListBuilder();
cmd.add("login");
addServer(cmd, server);
if (server.isUseNonexpiringLogin()) {
cmd.add("-n");
}
cmd.add(credentials.getUsername());
if (StringUtils.isEmpty(Secret.toString(credentials.getPassword()))) {
if (launcher.isUnix()) {
cmd.add("", true);
} else {
cmd.addQuoted("", true);
}
} else {
cmd.add(server.getPassword(), true);
}
final boolean success =
AccurevLauncher.runCommand(
"login", accurevTool, launcher, cmd, null, accurevEnv, workspace, listener, logger);
if (success) {
listener.getLogger().println("Authentication completed successfully.");
return true;
} else {
return false;
}
}
/**
* @param server Accurev Server
* @return boolean whether am successful
* @throws IOException failing IO This method is called from doFillStreams and doFillDepots while
* configuring the job
*/
public static boolean accurevLoginFromGlobalConfig( //
final AccurevServer server) throws IOException {
Jenkins jenkins = Jenkins.get();
TaskListener listener = TaskListener.NULL;
Launcher launcher = jenkins.createLauncher(listener);
EnvVars accurevEnv = new EnvVars();
return ensureLoggedInToAccurev(
null, server, accurevEnv, jenkins.getRootPath(), listener, launcher);
}
}
|
package org.nem.core.test;
import org.nem.core.model.*;
import org.nem.core.serialization.*;
import org.nem.core.time.TimeInstant;
/**
* A mock VerifiableEntity implementation.
*/
public class MockVerifiableEntity extends VerifiableEntity {
public static final int TYPE = 12;
public static final int VERSION = 24;
public static final TimeInstant TIMESTAMP = new TimeInstant(127435);
private int customField;
/**
* Creates a mock verifiable entity.
*
* @param signer The owner's account.
*/
public MockVerifiableEntity(final Account signer) {
this(signer, 0);
}
/**
* Creates a mock verifiable entity.
*
* @param signer The owner's account.
* @param customField The initial custom field value.
*/
public MockVerifiableEntity(final Account signer, final int customField) {
super(TYPE, VERSION, TIMESTAMP, signer);
this.customField = customField;
}
/**
* Deserializes a mock verifiable entity.
*
* @param deserializer The deserializer to use.
*/
public MockVerifiableEntity(final Deserializer deserializer) {
this(DeserializationOptions.VERIFIABLE, deserializer);
}
/**
* Deserializes a mock verifiable entity.
*
* @param deserializer The deserializer to use.
*/
public MockVerifiableEntity(final DeserializationOptions options, final Deserializer deserializer) {
super(deserializer.readInt("type"), options, deserializer);
this.customField = deserializer.readInt("customField");
}
/**
* Gets the custom field value.
*
* @return The custom field value.
*/
public int getCustomField() {
return this.customField;
}
/**
* Sets the custom field value.
*
* @param customField The desired custom field value.
*/
public void setCustomField(final int customField) {
this.customField = customField;
}
@Override
protected void serializeImpl(final Serializer serializer) {
serializer.writeInt("customField", this.customField);
}
}
|
import React from 'react';
import styles from './index.module.scss';
import { ICommand } from '../../types';
import Command from '../Command';
export default function CommandsList({
commands,
onEdit,
onDelete,
editable,
}: {
commands: ICommand[];
onEdit?: (index: number, command: ICommand) => void;
onDelete?: (index: number) => void;
editable?: boolean;
}) {
function handleChange(index: number, command: ICommand) {
onEdit!(index, command);
}
return (
<div className={styles.wrapper}>
<ol className={styles.commands}>
{commands.map((command, index) => (
<Command
key={command.trigger}
command={command}
index={index}
onChange={handleChange.bind(null, index)}
onDelete={onDelete}
editable={editable}
/>
))}
</ol>
</div>
);
}
|
from LightPipes import *
import matplotlib.pyplot as plt
import numpy as np
"""
LightPipes for Python
*********************
LaserModeTransformer.py
Demonstrates the transformation of a Hermite Gauss resonator mode
into a Laguerre Gauss mode with a pair of cylindrical lenses.
Reference:
<NAME>, <NAME>, <NAME> and <NAME>,
Astigmatic laser mode converters and transfer of orbital angular momentum,
Optics Comm. 96 (1993) 123.
cc <NAME>, June 2020.
"""
labda=632.8*nm
size=5*mm
N=500
dz=-30*mm # tune position lens f2
d1=525*mm
d2=306*mm
d3=225*mm + dz #225*mm
d4=176*mm - dz #176*mm
d5=27*mm #27*mm
df2=12*mm # tune focal length of f2
R1=600*mm
R2=437*mm
f1=20*mm
f2=160*mm +df2 #160*mm
f3=19*mm
f4=19*mm
f5=20*mm
angle=45.0*deg
m_=5
n_=3
L=d1+d2
g1=1-L/R1
g2=1-L/R2
w0=np.sqrt(labda*L/np.pi)
w0*=(g1*g2*(1-g1*g2))**0.25
w0/=(g1+g2-2*g2)**0.5
d5=f4*np.sqrt(2)
fM2=-R2/(1.5-1) #lensmakers formula with refractive index = 1.5, focal length outcoupler
F=Begin(size,labda,N)
F=GaussBeam(F,w0,m=m_,n=n_) # Hermite Gauss beam in waist
F=Forvard(F,d2) # propagate d2
F=Lens(F,fM2) #outcoupler as lens
F=Forvard(F,d3) #propagate d3
I0=Intensity(F) # intensity input beam
F=Lens(F,f2) # lens f2
F=Forvard(F,d4) # propagate d4
F=CylindricalLens(F,f3,angle=angle) # cylindrical lens f3
F=Forvard(F,d5) # propagate d5
F=CylindricalLens(F,f4,angle=angle) # cylindrical lens f4
F=Lens(F,f5) # lens f5
F=Forvard(F,200*mm) # propagate to have sufficient large beam size
I1=Intensity(F) #intensity output beam
s1 = r'LightPipes for Python,' + '\n'\
r'LaserModeTransformer.py'+ '\n\n'\
f'size = {size/mm:4.2f} mm' + '\n'\
f'$\\lambda$ = {labda/nm:4.2f} nm' + '\n'\
f'N = {N:d}' + '\n' +\
f'm = {m_:d}, n = {n_}' + '\n'\
f'w0 = {w0/mm:4.3f} mm' + '\n'\
f'd1 = {d1/mm:4.1f} mm' + '\n'\
f'd2 = {d2/mm:4.1f} mm' + '\n'\
f'd3 = {d3/mm:4.1f} mm (225 mm)' + '\n'\
f'd4 = {d4/mm:4.1f} mm (176 mm)'+ '\n'\
f'd5 = {d5/mm:4.1f} mm (27 mm)' + '\n\n'\
r'${\copyright}$ <NAME>, June 2020'
s2 = f'f1 = {f1/mm:4.1f} mm' + '\n'\
f'f2 = {f2/mm:4.1f} mm (160 mm)' + '\n'\
f'f3 = {f3/mm:4.1f} mm' + '\n'\
f'f4 = {f4/mm:4.1f} mm' + '\n'\
f'f5 = {f5/mm:4.1f} mm' + '\n'\
f'R1 = {R1/mm:4.1f} mm' + '\n'\
f'R2 = {R2/mm:4.1f} mm' + '\n\n'
s3 = r'Reference:' + '\n'\
r'<NAME>, <NAME>, <NAME> and <NAME>,'+ '\n'+\
r'Astigmatic laser mode converters and transfer of orbital angular momentum,' + '\n'\
r'Optics Comm. 96 (1993) 123.'
fig=plt.figure(figsize=(11,6))
ax1 = fig.add_subplot(221);ax1.axis('off')
ax2 = fig.add_subplot(222);ax2.axis('off')
ax3 = fig.add_subplot(223);ax3.axis('off')
ax1.imshow(I0,cmap='jet');ax1.set_title(f'input intensity, HG$_{m_}$$_{n_}$')
ax2.imshow(I1,cmap='jet');ax2.set_title('output intensity')
ax3.text(0.0,0.0,s1);ax3.text(0.5,0.0,s2);ax3.text(1.0,0.0,s3)
plt.show()
|
package com.java.study.algorithm.zuo.abasic.basic_class_04;
/**
* <Description>
*
* @author hushiye
* @since 3/28/21 23:31
*/
public class ParentNode {
public int value;
public ParentNode left;
public ParentNode right;
public ParentNode parent;
public ParentNode(int data) {
this.value = data;
}
}
|
#!/usr/bin/env bash
# =============================================================================
# Copyright 2022 Hewlett Packard Enterprise
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# =============================================================================
USAGE="Usage: ${0} $(paste -s -d '|' providers)"
PROVIDERS=($(<providers))
if ! [ $# -gt 0 ] || ! (echo ${PROVIDERS[@]} | grep -w -q ${1}); then
echo $USAGE
exit 1
fi
set -euo pipefail
[[ -d "./generated" ]] || mkdir generated
if [[ ! -f "./generated/controller.prv_key" ]]; then
ssh-keygen -m pem -t rsa -N "" -f "./generated/controller.prv_key"
mv "./generated/controller.prv_key.pub" "./generated/controller.pub_key"
chmod 600 "./generated/controller.prv_key"
fi
SSH_PUB_KEY=$(cat ./generated/controller.pub_key)
SSH_PRV_KEY=$(cat ./generated/controller.prv_key)
SSH_PRV_KEY_B64=$(base64 -i ./generated/controller.prv_key)
. ./user_settings.sh
cat > ${1}/my.tfvars <<EOF
user = ${USER_ID}
project_id = ${PROJECT_ID// /_}
is_runtime = ${IS_RUNTIME}
is_mlops = ${IS_MLOPS}
is_ha = ${IS_HA}
is_mapr = ${IS_MAPR}
is_mapr_ha = ${IS_MAPR_HA}
install_ad = ${INSTALL_AD}
admin_password = ${ADMIN_PASSWORD}
extra_tags = ${EXTRA_TAGS}
EOF
if [[ "${IS_GPU}" == "true" ]]; then
echo "gworker_count = 1" >> ${1}/my.tfvars
fi
pushd "${1}" > /dev/null
TF_IN_AUTOMATION=1 terraform init ${EZWEB_TF:-}
### Init hook-up for targets
[ -f "./init.sh" ] && "./init.sh"
popd > /dev/null
echo "Stage 1 complete"
# Apply Terratags only extra_tags are set in user.settings and infra is aws or azure.
if [[ "$1" == "aws" || "$1" == "azure" ]] && [[ ! -z "${EXTRA_TAGS}" && "${EXTRA_TAGS}" != "{}" ]]
then
echo "Applying Additional Tags: ${EXTRA_TAGS} to cloud resources via terratag"
terratag -dir=$1 -tags="${EXTRA_TAGS}" -rename
fi
exit 0
|
<reponame>sunkencity999/cyberdecks
class User < ApplicationRecord
has_many :posts, dependent: :destroy
has_many :comments, dependent: :destroy
before_save { self.email = email.downcase if email.present? }
before_save { self.role ||= :member }
validates :name, length: { minimum: 1, maximum: 100}, presence: true
validates :password, presence: true, length: { minimum: 6}
validates :password, length: { minumum: 6, maximum: 30 }, allow_blank: true
validates :email,
presence: true,
uniqueness: { case_sensitive: false},
length: { minimum: 3, maximum: 254 }
has_secure_password
enum role: [:member, :admin]
def avatar_url(size)
gravatar_id = Digest::MD5::hexdigest(self.email).downcase
"http://gravatar.com/avatar/#{gravatar_id}.png?s=#{size}"
end
end
|
<reponame>cyberdevnet/mer-hacker
import React from "react";
import Dialog from '@material-ui/core/Dialog';
import "../styles/AlertsModal.css";
export default function AlertsModal(ac) {
const handleAlertsModal = () => {
ac.dc.setswitchAlertModal(false);
ac.dc.setswitchToolsTemplate(true);
};
return (
<Dialog
open={true}
>
<div >
<div className="modal-dialog modal-confirm">
<div >
<div className="modal-header">
<div className="icon-box">
<i className="material-icons"></i>
<i className="material-icons"></i>
<i className="material-icons"></i>
</div>
<button
onClick={handleAlertsModal}
type="button"
className="close"
data-dismiss="modal"
aria-hidden="true"
>
<span>×</span>
</button>
</div>
<div className="modal-body text-center">
<h4>Something went wrong</h4>
<span>{ac.dc.AlertModalError}</span>
</div>
</div>
</div>
</div>
</Dialog>
);
} |
<filename>src/Inicio.java
import javax.swing.JOptionPane;
public class Inicio extends javax.swing.JFrame {
public Inicio() {
initComponents();
setTitle("INICIO");
setResizable(false);
setLocationRelativeTo(null);
}
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPanel2 = new javax.swing.JPanel();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jPanel1 = new javax.swing.JPanel();
jButton3 = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
jButton4 = new javax.swing.JButton();
jButton1 = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
setBackground(new java.awt.Color(51, 51, 255));
jPanel2.setBackground(new java.awt.Color(255, 255, 255));
jLabel1.setFont(new java.awt.Font("Tahoma", 0, 36)); // NOI18N
jLabel1.setText("Bienvenido/a");
jLabel2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/hospital.png"))); // NOI18N
jLabel2.setBorder(javax.swing.BorderFactory.createLineBorder(new java.awt.Color(0, 0, 0), 2));
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGap(266, 266, 266)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGap(11, 11, 11)
.addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 218, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(jLabel2, javax.swing.GroupLayout.Alignment.TRAILING))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGap(64, 64, 64)
.addComponent(jLabel1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jLabel2)
.addContainerGap(117, Short.MAX_VALUE))
);
jPanel1.setBackground(new java.awt.Color(51, 51, 255));
jButton3.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jButton3.setForeground(new java.awt.Color(255, 255, 255));
jButton3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/cuentagotas.png"))); // NOI18N
jButton3.setText("Ingresar como Paciente");
jButton3.setBorder(null);
jButton3.setBorderPainted(false);
jButton3.setContentAreaFilled(false);
jButton3.setFocusPainted(false);
jButton3.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton3ActionPerformed(evt);
}
});
jButton2.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jButton2.setForeground(new java.awt.Color(255, 255, 255));
jButton2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/doctor.png"))); // NOI18N
jButton2.setText("Ingresar como Doctor");
jButton2.setBorder(null);
jButton2.setBorderPainted(false);
jButton2.setContentAreaFilled(false);
jButton2.setFocusPainted(false);
jButton2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton2ActionPerformed(evt);
}
});
jButton4.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jButton4.setForeground(new java.awt.Color(255, 255, 255));
jButton4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/mas.png"))); // NOI18N
jButton4.setText("Farmacia");
jButton4.setBorder(null);
jButton4.setBorderPainted(false);
jButton4.setContentAreaFilled(false);
jButton4.setFocusPainted(false);
jButton4.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton4ActionPerformed(evt);
}
});
jButton1.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jButton1.setForeground(new java.awt.Color(255, 255, 255));
jButton1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/salir-al-boton-de-la-aplicacion.png"))); // NOI18N
jButton1.setText("Salir");
jButton1.setBorder(null);
jButton1.setBorderPainted(false);
jButton1.setContentAreaFilled(false);
jButton1.setFocusPainted(false);
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jButton3)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jButton2, javax.swing.GroupLayout.PREFERRED_SIZE, 217, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 140, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 77, Short.MAX_VALUE)
.addComponent(jButton1)
.addGap(50, 50, 50))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jButton3, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jButton2, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup()
.addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 48, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(1, 1, 1)))
.addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 49, javax.swing.GroupLayout.PREFERRED_SIZE)
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jPanel2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
int salir=JOptionPane.showConfirmDialog(null, "¿Seguro que desea salir?");
if(salir==0){
System.exit(0);
}
}//GEN-LAST:event_jButton1ActionPerformed
private void jButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton2ActionPerformed
new InfoDoctores().setVisible(true);
}//GEN-LAST:event_jButton2ActionPerformed
private void jButton3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton3ActionPerformed
new InfoPaciente().setVisible(true);
}//GEN-LAST:event_jButton3ActionPerformed
private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed
new Farmacia().setVisible(true);
}//GEN-LAST:event_jButton4ActionPerformed
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(Inicio.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(Inicio.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(Inicio.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(Inicio.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new Inicio().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton jButton1;
private javax.swing.JButton jButton2;
private javax.swing.JButton jButton3;
private javax.swing.JButton jButton4;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
// End of variables declaration//GEN-END:variables
}
|
<filename>server/src/graphql/customScalars.js
import { gql } from 'apollo-server-express'
import { DateTimeResolver } from 'graphql-scalars'
const DateTypeDefs = gql`
scalar Date
`
const DateResolvers = {
Date: DateTimeResolver,
}
export { DateTypeDefs, DateResolvers }
|
class Template:
def __init__(self, preload = []):
raise NotImplementedError
def add(self, element):
raise NotImplementedError
def delete(self, index):
raise NotImplementedError
def remove(self, element):
raise NotImplementedError
def rank(self, element):
raise NotImplementedError
def select(self, index):
raise NotImplementedError
def iter(self):
raise NotImplementedError
def reversed(self):
raise NotImplementedError
def count(self, value):
raise NotImplementedError
def successor(self, value):
raise NotImplementedError
def predecessor(self, value):
raise NotImplementedError
def size(self):
raise NotImplementedError
def __len__(self):
raise NotImplementedError
def __getitem__(self, key):
raise NotImplementedError |
#!/bin/bash
#reference genome
ref=genomes/Liflandii.fasta
ragtag.py scaffold $ref P7741.polished.fasta -o P7741_reordered
#extract the reordered contig with a custom python script
#the scripts accept name of the ragtag file containing the reordered contigs and accession number for the reference genome
#accession number is found in the first line of the reference genome fasta file
python extract_reordered.py P7741_reordered/ragtag.scaffolds.fasta NC_020133.1
|
class Person:
def __init__(self, name, age, gender):
self.name = name
self.age = age
self.gender = gender |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-HPMI/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-HPMI/512+0+512-SS-N-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_sentences_remove_all_but_nouns_first_half_full --eval_function last_element_eval |
a = 1
b = 10
if a > b:
print("a is greater than b") |
package dev.webfx.kit.mapper.peers.javafxgraphics.emul_coupling.base;
import com.sun.javafx.tk.TKSceneListener;
import javafx.scene.Scene;
import javafx.stage.Window;
import dev.webfx.kit.mapper.peers.javafxgraphics.emul_coupling.ScenePeer;
/**
* @author <NAME>
*/
public abstract class ScenePeerBase implements ScenePeer {
protected final Scene scene;
public TKSceneListener listener;
public ScenePeerBase(Scene scene) {
this.scene = scene;
}
@Override
public void setTKSceneListener(TKSceneListener listener) {
this.listener = listener;
Window window = scene.getWindow();
changedWindowSize(window.getWidth(), window.getHeight());
}
public void changedWindowSize(double width, double height) {
if (listener != null && !Double.isNaN(width) && !Double.isNaN(height)) {
listener.changedSize((float) width, (float) height);
}
}
public Scene getScene() {
return scene;
}
}
|
require 'spec_helper'
require 'support/sharedcontext'
require 'support/libvirt_context'
require 'vagrant-libvirt/action/shutdown_domain'
describe VagrantPlugins::ProviderLibvirt::Action::ShutdownDomain do
subject { described_class.new(app, env, target_state, current_state) }
include_context 'unit'
include_context 'libvirt'
let(:driver) { double('driver') }
let(:libvirt_domain) { double('libvirt_domain') }
let(:servers) { double('servers') }
let(:current_state) { :running }
let(:target_state) { :shutoff }
before do
allow(machine.provider).to receive('driver').and_return(driver)
allow(driver).to receive(:created?).and_return(true)
allow(driver).to receive(:connection).and_return(connection)
end
describe '#call' do
before do
allow(connection).to receive(:servers).and_return(servers)
allow(servers).to receive(:get).and_return(domain)
allow(ui).to receive(:info).with('Attempting direct shutdown of domain...')
end
context "when state is shutoff" do
before do
allow(driver).to receive(:state).and_return(:shutoff)
end
it "should not shutdown" do
expect(domain).not_to receive(:shutoff)
subject.call(env)
end
it "should not print shutdown message" do
expect(ui).not_to receive(:info)
subject.call(env)
end
it "should provide a true result" do
subject.call(env)
expect(env[:result]).to be_truthy
end
end
context "when state is running" do
before do
allow(driver).to receive(:state).and_return(:running)
end
it "should shutdown" do
expect(domain).to receive(:wait_for)
expect(domain).to receive(:shutdown)
subject.call(env)
end
it "should print shutdown message" do
expect(domain).to receive(:wait_for)
expect(domain).to receive(:shutdown)
expect(ui).to receive(:info).with('Attempting direct shutdown of domain...')
subject.call(env)
end
context "when final state is not shutoff" do
before do
expect(driver).to receive(:state).and_return(:running).exactly(3).times
expect(domain).to receive(:wait_for)
expect(domain).to receive(:shutdown)
end
it "should provide a false result" do
subject.call(env)
expect(env[:result]).to be_falsey
end
end
context "when final state is shutoff" do
before do
expect(driver).to receive(:state).and_return(:running).exactly(2).times
expect(driver).to receive(:state).and_return(:shutoff).exactly(1).times
expect(domain).to receive(:wait_for)
expect(domain).to receive(:shutdown)
end
it "should provide a true result" do
subject.call(env)
expect(env[:result]).to be_truthy
end
end
context "when timeout exceeded" do
before do
expect(machine).to receive_message_chain('config.vm.graceful_halt_timeout').and_return(1)
expect(app).to receive(:call) { sleep 1.5 }
expect(driver).to receive(:state).and_return(:running).exactly(1).times
expect(domain).to_not receive(:wait_for)
expect(domain).to_not receive(:shutdown)
end
it "should provide a false result" do
subject.call(env)
expect(env[:result]).to be_falsey
end
end
context "when timeout not exceeded" do
before do
expect(machine).to receive_message_chain('config.vm.graceful_halt_timeout').and_return(2)
expect(app).to receive(:call) { sleep 1 }
expect(driver).to receive(:state).and_return(:running).exactly(3).times
expect(domain).to receive(:wait_for) do |time|
expect(time).to be < 1
expect(time).to be > 0
end
expect(domain).to receive(:shutdown)
end
it "should wait for the reduced time" do
subject.call(env)
expect(env[:result]).to be_falsey
end
end
end
end
end
|
<filename>src/main/java/br/com/teste/controlles/EstoqueEntradaController.java
package br.com.teste.controlles;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import br.com.teste.models.EstoqueEntrada;
import br.com.teste.services.EstoqueEntradaService;
@Controller
@RequestMapping("/estoque_entrada")
public class EstoqueEntradaController {
@Autowired
EstoqueEntradaService service;
@PostMapping
public ResponseEntity<EstoqueEntrada> create (@RequestBody EstoqueEntrada estoqueEntrada)
{ return new ResponseEntity<EstoqueEntrada>(service.create(estoqueEntrada),HttpStatus.CREATED);
}
@GetMapping("/{id}")
public ResponseEntity<EstoqueEntrada> readId(@PathVariable Long id)
{ return new ResponseEntity<EstoqueEntrada>(service.readId(id),HttpStatus.OK);
}
@GetMapping
public ResponseEntity<List<EstoqueEntrada>> readAll()
{ return new ResponseEntity<List<EstoqueEntrada>>(service.readAll(),HttpStatus.OK);
}
@PutMapping
public ResponseEntity<EstoqueEntrada> update(@RequestBody EstoqueEntrada estoqueEntrada)
{ return new ResponseEntity<EstoqueEntrada>(service.update(estoqueEntrada),HttpStatus.OK);
}
@DeleteMapping("/{id}")
public ResponseEntity<EstoqueEntrada> delete(@PathVariable Long id)
{ service.delete(id);
return new ResponseEntity<EstoqueEntrada>(HttpStatus.OK);
}
}
|
<filename>langs/php/core.js<gh_stars>100-1000
// Libraries
const config = require("../../config_php_lang.js");
const coreLib = require("../../core");
// Libraries init
const core = new coreLib();
module.exports = function() {
this.config = config; // exporting config file to outside
// Methods
this.connectToDBMS = function(){
if(config.IS_DBMS_USED){
if(config.DBMS == "mysql"){
try {
var mysql = require('sync-mysql');
var mysqlConnection = new mysql({
host : config.YOUR_WEBAPP_DBMS_SERVER_IP,
user : config.YOUR_WEBAPP_DBMS_USERNAME,
password : <PASSWORD>,
database : config.YOUR_WEBAPP_DBMS_DB_NAME
});
const result = mysqlConnection.query('SELECT 1 + 1 AS answer');
if(result[0].answer === 2){
return mysqlConnection;
}
else {
core.println("MySQL Config Error: MySQL not connected!", "red", "bold");
}
}
catch(e){
core.println("MySQL Config Error: MySQL not connected!", "red", "bold");
console.log(e);
}
}
else {
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log("Other DBMS is used, make sure you implement it.");
}
}
}
else {
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log("DBMS not enabled in the config");
}
}
return null;
}
this.getOneColumnValueFromDBMS = function(column, table, where = "where " + column + " != null or " + column + " != '' or " + column + " != ' ' or " + column + " != 'NULL' or " + column + " != 'null' or " + column + " != 'Null'", orderBy = " order by " + column + " DESC"){
var columnValue = null;
if(config.IS_DBMS_USED){
if(config.DBMS == "mysql"){
try {
var mysqlConnection = this.connectToDBMS();
if(mysqlConnection){
var queryResult = mysqlConnection.query('SELECT ' + column + ' from ' + table + ' ' + where + ' ' + orderBy + ' limit 1');
if(queryResult && queryResult.length && queryResult.length != 0 && queryResult[0][column]){
columnValue = queryResult[0][column];
}
else {
core.println("Please make sure the configurations are correct and your table " + table + " contains at least one row with real production data for tests!", "red", "bold");
}
}
else {
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log("MySQL Connection Error!");
}
}
}
catch(e){
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log("Error happened in Mysql query!");
console.log(e);
}
}
}
else {
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log("Other DBMS is used, make sure you implement it.");
}
}
}
else {
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log("DBMS not enabled in the config");
}
}
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log(columnValue);
}
return columnValue;
}
this.isAllColumnValuesRandomInDBMSTable = function(column, table, where = "where " + column + " != null or " + column + " != '' or " + column + " != ' ' or " + column + " != 'NULL' or " + column + " != 'null' or " + column + " != 'Null'", orderBy = " order by " + column + " DESC"){
if(config.IS_DBMS_USED){
if(config.DBMS == "mysql"){
try {
var mysqlConnection = this.connectToDBMS();
if(mysqlConnection){
var queryResult = mysqlConnection.query('SELECT ' + column + ' from ' + table + ' ' + where + ' ' + orderBy);
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log(queryResult);
}
if(queryResult && queryResult.length && queryResult.length > 0){
for(var i = 0; i < queryResult.length; i++){
var columnValue = queryResult[i][column];
for( j = i + 1; j < queryResult.length; j++){
column2Value = queryResult[j][column];
if(columnValue == column2Value){
return false;
}
}
}
}
else {
core.println("Please make sure the configurations are correct and your table " + table + " contains at least one row with real production data for tests!", "red", "bold");
}
}
else {
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log("MySQL Connection Error!");
}
}
}
catch(e){
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log("Error happened in Mysql query!");
console.log(e);
}
}
}
else {
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log("Other DBMS is used, make sure you implement it.");
}
}
}
else {
if(core.config.IS_DEBUG_MODE_ENABLED){
console.log("DBMS not enabled in the config");
}
}
return true;
}
}
|
package api
import (
"encoding/json"
"fmt"
"gfuzz/pkg/gexec"
"gfuzz/pkg/oraclert/config"
"gfuzz/pkg/oraclert/output"
"path"
"path/filepath"
)
// Stage indicates how we treat/response to an input and corresponding output
type Stage string
const (
// InitStage simply run the empty without any mutation
InitStage Stage = "init"
// DeterStage is to create input by tweak select choice one by one
DeterStage Stage = "deter"
// CalibStage choose an input from queue to run (prepare for rand)
CalibStage Stage = "calib"
// RandStage randomly mutate select choice
RandStage Stage = "rand"
// Run with custom/pre-prepared oracle runtime configuration
ReplayStage Stage = "replay"
)
// Input contains all information about a single execution
// (usually by fuzzer)
type Input struct {
// ID is the unique identifer for this execution.
ID string
// OracleRtConfig is the configuration for the oracle runtime.
OracleRtConfig *config.Config
// Exec is the command to trigger a program with oracle runtime.
Exec gexec.Executable
// OutputDir is the output directory for this execution
OutputDir string
Stage Stage
}
// Output contains all useful information after a single execution
type Output struct {
OracleRtOutput *output.Output
BugIDs []string
Timeout bool
}
func (i *Input) GetOrtConfigFilePath() (string, error) {
return filepath.Abs(path.Join(i.OutputDir, "ort_config"))
}
func (i *Input) GetOutputFilePath() (string, error) {
return filepath.Abs(path.Join(i.OutputDir, "stdout"))
}
func (i *Input) GetOrtOutputFilePath() (string, error) {
return filepath.Abs(path.Join(i.OutputDir, "ort_output"))
}
func Serialize(l *Input) ([]byte, error) {
if l == nil {
return []byte{}, nil
}
return json.Marshal(l)
}
func Deserilize(data []byte) (*Input, error) {
l := Input{}
err := json.Unmarshal(data, &l)
if err != nil {
return nil, err
}
return &l, nil
}
// newExecInput should be the only way to create exec.Input
func NewExecInput(ID uint32, fromID uint32, outputDir string, ge gexec.Executable,
rtConfig *config.Config, stage Stage) *Input {
inputID := fmt.Sprintf("%d-%s-%s-%d", ID, stage, ge.String(), fromID)
dir := path.Join(outputDir, "exec", inputID)
return &Input{
ID: inputID,
Exec: ge,
OracleRtConfig: rtConfig,
OutputDir: dir,
Stage: stage,
}
}
func NewInitExecInput(fctx *Context, ge gexec.Executable) *Input {
ortCfg := config.NewConfig()
ortCfg.SelEfcm.SelTimeout = fctx.Cfg.SelEfcmTimeout
globalID := fctx.GetAutoIncGlobalID()
return NewExecInput(globalID, 0, fctx.Cfg.OutputDir, ge, ortCfg, InitStage)
}
|
<gh_stars>0
define([
'../libs/buffers',
'./api_error',
'./file_flag',
'../libs/path',
'./util'
], function (buffers,api_error, file_flag, path, util) {
'use strict';
const { ApiError, ErrorCode } = api_error;
const { FileFlag, ActionType } = file_flag;
const { fail } = util;
/**
* Basic filesystem class. Most filesystems should extend this class, as it
* provides default implementations for a handful of methods.
*/
class BaseFileSystem {
supportsLinks() {
return false;
}
diskSpace(p, cb) {
cb(0, 0);
}
/**
* Opens the file at path p with the given flag. The file must exist.
* @param p The path to open.
* @param flag The flag to use when opening the file.
*/
openFile(p, flag, cb) {
throw new ApiError(ErrorCode.ENOTSUP);
}
/**
* Create the file at path p with the given mode. Then, open it with the given
* flag.
*/
createFile(p, flag, mode, cb) {
throw new ApiError(ErrorCode.ENOTSUP);
}
open(p, flag, mode, cb) {
const mustBeFile = (e, stats) => {
if (e) {
// File does not exist.
switch (flag.pathNotExistsAction()) {
case ActionType.CREATE_FILE:
// Ensure parent exists.
return this.stat(path.dirname(p), false, (e, parentStats) => {
if (e) {
cb(e);
}
else if (parentStats && !parentStats.isDirectory()) {
cb(ApiError.ENOTDIR(path.dirname(p)));
}
else {
this.createFile(p, flag, mode, cb);
}
});
case ActionType.THROW_EXCEPTION:
return cb(ApiError.ENOENT(p));
default:
return cb(new ApiError(ErrorCode.EINVAL, 'Invalid FileFlag object.'));
}
}
else {
// File exists.
if (stats && stats.isDirectory()) {
return cb(ApiError.EISDIR(p));
}
switch (flag.pathExistsAction()) {
case ActionType.THROW_EXCEPTION:
return cb(ApiError.EEXIST(p));
case ActionType.TRUNCATE_FILE:
// NOTE: In a previous implementation, we deleted the file and
// re-created it. However, this created a race condition if another
// asynchronous request was trying to read the file, as the file
// would not exist for a small period of time.
return this.openFile(p, flag, (e, fd) => {
if (e) {
cb(e);
}
else if (fd) {
fd.truncate(0, () => {
fd.sync(() => {
cb(null, fd);
});
});
}
else {
fail();
}
});
case ActionType.NOP:
return this.openFile(p, flag, cb);
default:
return cb(new ApiError(ErrorCode.EINVAL, 'Invalid FileFlag object.'));
}
}
};
this.stat(p, false, mustBeFile);
}
rename(oldPath, newPath, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
renameSync(oldPath, newPath) {
throw new ApiError(ErrorCode.ENOTSUP);
}
stat(p, isLstat, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
statSync(p, isLstat) {
throw new ApiError(ErrorCode.ENOTSUP);
}
/**
* Opens the file at path p with the given flag. The file must exist.
* @param p The path to open.
* @param flag The flag to use when opening the file.
* @return A File object corresponding to the opened file.
*/
openFileSync(p, flag, mode) {
throw new ApiError(ErrorCode.ENOTSUP);
}
/**
* Create the file at path p with the given mode. Then, open it with the given
* flag.
*/
createFileSync(p, flag, mode) {
throw new ApiError(ErrorCode.ENOTSUP);
}
openSync(p, flag, mode) {
// Check if the path exists, and is a file.
let stats;
try {
stats = this.statSync(p, false);
}
catch (e) {
// File does not exist.
switch (flag.pathNotExistsAction()) {
case ActionType.CREATE_FILE:
// Ensure parent exists.
const parentStats = this.statSync(path.dirname(p), false);
if (!parentStats.isDirectory()) {
throw ApiError.ENOTDIR(path.dirname(p));
}
return this.createFileSync(p, flag, mode);
case ActionType.THROW_EXCEPTION:
throw ApiError.ENOENT(p);
default:
throw new ApiError(ErrorCode.EINVAL, 'Invalid FileFlag object.');
}
}
// File exists.
if (stats.isDirectory()) {
throw ApiError.EISDIR(p);
}
switch (flag.pathExistsAction()) {
case ActionType.THROW_EXCEPTION:
throw ApiError.EEXIST(p);
case ActionType.TRUNCATE_FILE:
// Delete file.
this.unlinkSync(p);
// Create file. Use the same mode as the old file.
// Node itself modifies the ctime when this occurs, so this action
// will preserve that behavior if the underlying file system
// supports those properties.
return this.createFileSync(p, flag, stats.mode);
case ActionType.NOP:
return this.openFileSync(p, flag, mode);
default:
throw new ApiError(ErrorCode.EINVAL, 'Invalid FileFlag object.');
}
}
unlink(p, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
unlinkSync(p) {
throw new ApiError(ErrorCode.ENOTSUP);
}
rmdir(p, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
rmdirSync(p) {
throw new ApiError(ErrorCode.ENOTSUP);
}
mkdir(p, mode, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
mkdirSync(p, mode) {
throw new ApiError(ErrorCode.ENOTSUP);
}
readdir(p, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
readdirSync(p) {
throw new ApiError(ErrorCode.ENOTSUP);
}
exists(p, cb) {
this.stat(p, null, function (err) {
cb(!err);
});
}
existsSync(p) {
try {
this.statSync(p, true);
return true;
}
catch (e) {
return false;
}
}
realpath(p, cache, cb) {
if (this.supportsLinks()) {
// The path could contain symlinks. Split up the path,
// resolve any symlinks, return the resolved string.
const splitPath = p.split(path.sep);
// TODO: Simpler to just pass through file, find sep and such.
for (let i = 0; i < splitPath.length; i++) {
const addPaths = splitPath.slice(0, i + 1);
splitPath[i] = path.join.apply(null, addPaths);
}
}
else {
// No symlinks. We just need to verify that it exists.
this.exists(p, function (doesExist) {
if (doesExist) {
cb(null, p);
}
else {
cb(ApiError.ENOENT(p));
}
});
}
}
realpathSync(p, cache) {
if (this.supportsLinks()) {
// The path could contain symlinks. Split up the path,
// resolve any symlinks, return the resolved string.
const splitPath = p.split(path.sep);
// TODO: Simpler to just pass through file, find sep and such.
for (let i = 0; i < splitPath.length; i++) {
const addPaths = splitPath.slice(0, i + 1);
splitPath[i] = path.join.apply(path, addPaths);
}
return splitPath.join(path.sep);
}
else {
// No symlinks. We just need to verify that it exists.
if (this.existsSync(p)) {
return p;
}
else {
throw ApiError.ENOENT(p);
}
}
}
truncate(p, len, cb) {
this.open(p, FileFlag.getFileFlag('r+'), 0x1a4, (function (er, fd) {
if (er) {
return cb(er);
}
fd.truncate(len, (function (er) {
fd.close((function (er2) {
cb(er || er2);
}));
}));
}));
}
truncateSync(p, len) {
const fd = this.openSync(p, FileFlag.getFileFlag('r+'), 0x1a4);
// Need to safely close FD, regardless of whether or not truncate succeeds.
try {
fd.truncateSync(len);
}
catch (e) {
throw e;
}
finally {
fd.closeSync();
}
}
readFile(fname, encoding, flag, cb) {
// Wrap cb in file closing code.
const oldCb = cb;
// Get file.
this.open(fname, flag, 0x1a4, (err, fd) => {
if (err) {
return cb(err);
}
cb = function (err, arg) {
fd.close(function (err2) {
if (!err) {
err = err2;
}
return oldCb(err, arg);
});
};
fd.stat((err, stat) => {
if (err) {
return cb(err);
}
// Allocate buffer.
const buf = Buffer.alloc(stat.size);
fd.read(buf, 0, stat.size, 0, (err) => {
if (err) {
return cb(err);
}
else if (encoding === null) {
return cb(err, buf);
}
try {
cb(null, buf.toString(encoding));
}
catch (e) {
cb(e);
}
});
});
});
}
readFileSync(fname, encoding, flag) {
// Get file.
const fd = this.openSync(fname, flag, 0x1a4);
try {
const stat = fd.statSync();
// Allocate buffer.
const buf = Buffer.alloc(stat.size);
fd.readSync(buf, 0, stat.size, 0);
fd.closeSync();
if (encoding === null) {
return buf;
}
return buf.toString(encoding);
}
finally {
fd.closeSync();
}
}
writeFile(fname, data, encoding, flag, mode, cb) {
// Wrap cb in file closing code.
const oldCb = cb;
// Get file.
this.open(fname, flag, 0x1a4, function (err, fd) {
if (err) {
return cb(err);
}
cb = function (err) {
fd.close(function (err2) {
oldCb(err ? err : err2);
});
};
try {
if (typeof data === 'string') {
data = Buffer.from(data, encoding);
}
}
catch (e) {
return cb(e);
}
// Write into file.
fd.write(data, 0, data.length, 0, cb);
});
}
writeFileSync(fname, data, encoding, flag, mode) {
// Get file.
const fd = this.openSync(fname, flag, mode);
try {
if (typeof data === 'string') {
data = Buffer.from(data, encoding);
}
// Write into file.
fd.writeSync(data, 0, data.length, 0);
}
finally {
fd.closeSync();
}
}
appendFile(fname, data, encoding, flag, mode, cb) {
// Wrap cb in file closing code.
const oldCb = cb;
this.open(fname, flag, mode, function (err, fd) {
if (err) {
return cb(err);
}
cb = function (err) {
fd.close(function (err2) {
oldCb(err ? err : err2);
});
};
if (typeof data === 'string') {
data = Buffer.from(data, encoding);
}
fd.write(data, 0, data.length, null, cb);
});
}
appendFileSync(fname, data, encoding, flag, mode) {
const fd = this.openSync(fname, flag, mode);
try {
if (typeof data === 'string') {
data = Buffer.from(data, encoding);
}
fd.writeSync(data, 0, data.length, null);
}
finally {
fd.closeSync();
}
}
chmod(p, isLchmod, mode, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
chmodSync(p, isLchmod, mode) {
throw new ApiError(ErrorCode.ENOTSUP);
}
chown(p, isLchown, uid, gid, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
chownSync(p, isLchown, uid, gid) {
throw new ApiError(ErrorCode.ENOTSUP);
}
utimes(p, atime, mtime, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
utimesSync(p, atime, mtime) {
throw new ApiError(ErrorCode.ENOTSUP);
}
link(srcpath, dstpath, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
linkSync(srcpath, dstpath) {
throw new ApiError(ErrorCode.ENOTSUP);
}
symlink(srcpath, dstpath, type, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
symlinkSync(srcpath, dstpath, type) {
throw new ApiError(ErrorCode.ENOTSUP);
}
readlink(p, cb) {
cb(new ApiError(ErrorCode.ENOTSUP));
}
readlinkSync(p) {
throw new ApiError(ErrorCode.ENOTSUP);
}
}
/**
* Implements the asynchronous API in terms of the synchronous API.
* @class SynchronousFileSystem
*/
class SynchronousFileSystem extends BaseFileSystem {
supportsSynch() {
return true;
}
rename(oldPath, newPath, cb) {
try {
this.renameSync(oldPath, newPath);
cb();
}
catch (e) {
cb(e);
}
}
stat(p, isLstat, cb) {
try {
cb(null, this.statSync(p, isLstat));
}
catch (e) {
cb(e);
}
}
open(p, flags, mode, cb) {
try {
cb(null, this.openSync(p, flags, mode));
}
catch (e) {
cb(e);
}
}
unlink(p, cb) {
try {
this.unlinkSync(p);
cb();
}
catch (e) {
cb(e);
}
}
rmdir(p, cb) {
try {
this.rmdirSync(p);
cb();
}
catch (e) {
cb(e);
}
}
mkdir(p, mode, cb) {
try {
this.mkdirSync(p, mode);
cb();
}
catch (e) {
cb(e);
}
}
readdir(p, cb) {
try {
cb(null, this.readdirSync(p));
}
catch (e) {
cb(e);
}
}
chmod(p, isLchmod, mode, cb) {
try {
this.chmodSync(p, isLchmod, mode);
cb();
}
catch (e) {
cb(e);
}
}
chown(p, isLchown, uid, gid, cb) {
try {
this.chownSync(p, isLchown, uid, gid);
cb();
}
catch (e) {
cb(e);
}
}
utimes(p, atime, mtime, cb) {
try {
this.utimesSync(p, atime, mtime);
cb();
}
catch (e) {
cb(e);
}
}
link(srcpath, dstpath, cb) {
try {
this.linkSync(srcpath, dstpath);
cb();
}
catch (e) {
cb(e);
}
}
symlink(srcpath, dstpath, type, cb) {
try {
this.symlinkSync(srcpath, dstpath, type);
cb();
}
catch (e) {
cb(e);
}
}
readlink(p, cb) {
try {
cb(null, this.readlinkSync(p));
}
catch (e) {
cb(e);
}
}
}
return {
BaseFileSystem: BaseFileSystem,
SynchronousFileSystem: SynchronousFileSystem
};
}); |
#!/bin/sh
docker volume create -d local --name trader-db-data |
<reponame>leftjs/gym-api
package com.donler.gym.controller;
import com.donler.gym.expection.AttrValidateException;
import com.donler.gym.model.Business;
import com.donler.gym.model.dto.DeleteStatusModel;
import com.donler.gym.repo.BusinessRepo;
import com.donler.gym.util.NullCheckUtils;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.util.List;
/**
* Created by jason on 4/15/16.
*/
@RestController
@RequestMapping(value = "/business")
@Api(value = "business", tags = {"项目"})
public class BusinessController {
@Autowired
private BusinessRepo businessRepo;
@ApiOperation(value = "添加一个项目选项", notes = "通过传入的选项信息生成一个项目选项")
@RequestMapping(value = "", method = RequestMethod.POST, produces = {"application/json"}, consumes = {"application/json"})
public ResponseEntity<Business> addBusiness(@Valid @RequestBody Business input) {
Business business = businessRepo.save(input);
return ResponseEntity.ok(business);
}
@ApiOperation(value = "删除指定的项目选项", notes = "删除指定的项目选项,并给出反馈")
@RequestMapping(value = "/{id}", method = RequestMethod.DELETE, produces = {"application/json"})
public ResponseEntity<DeleteStatusModel> deleteBusiness(@PathVariable("id") Long id) {
Business business = businessRepo.findOne(id);
if (NullCheckUtils.isNullOrEmpty(business)) {
throw new AttrValidateException("请传入正确的id");
}
businessRepo.delete(id);
return ResponseEntity.ok(new DeleteStatusModel());
}
@ApiOperation(value = "获取所有项目选项", notes = "获得所有的项目选项列表")
@RequestMapping(value = "", method = RequestMethod.GET, produces = {"application/json"})
public ResponseEntity<List<Business>> findAllBusiness() {
return ResponseEntity.ok(businessRepo.findAll());
}
@ApiOperation(value = "更新指定项目选项", notes = "传入项目选项的信息更新指定的项目选项(ps.传入的实体中的id忽略,后台不予以识别)")
@RequestMapping(value = "/{id}", method = RequestMethod.PUT, produces = {"application/json"}, consumes = {"application/json"})
public ResponseEntity<Business> updateBusiness(@PathVariable("id") Long id, @Valid @RequestBody Business input) {
Business business = businessRepo.findOne(id);
if (NullCheckUtils.isNullOrEmpty(business)) {
throw new AttrValidateException("请检查传入的id");
}
/**
* 更新操作
*/
business.setPrice(input.getPrice());
business.setMonthCount(input.getMonthCount());
return ResponseEntity.ok(businessRepo.save(business));
}
}
|
function formatTimestamp($time, $timeStamp, $dateFormat) {
if ($time->relativeTime($timeStamp) !== null) {
return $time->relativeTime($timeStamp);
} else {
return $time->format($dateFormat, $timeStamp);
}
} |
#!/bin/bash
set -e
LANG=en_US.UTF-8
ROOT=$(pwd)
LOG_INFO()
{
local content=${1}
echo -e "\033[32m[INFO] ${content}\033[0m"
}
LOG_ERROR()
{
local content=${1}
echo -e "\033[31m[ERROR] ${content}\033[0m"
}
version_file="profile_version.sh"
[[ ! -f "${version_file}" ]] && {
LOG_ERROR " ${version_file} not exist, please check if the demo is the latest. "
exit 1
}
source "${version_file}"
LOG_INFO "source ${version_file}, WeCross Version=${WECROSS_VERSION}"
Download()
{
local url=${1}
local file=$(basename ${url})
if [ ! -e ${file} ]; then
curl -#LO ${url}
fi
}
sed_i()
{
if [ "$(uname)" == "Darwin" ]; then
# Mac
sed -i "" $@
else
sed -i $@
fi
}
check_command()
{
local cmd=${1}
if [ -z "$(command -v ${cmd})" ];then
LOG_ERROR "${cmd} is not installed."
exit 1
fi
}
check_docker_service()
{
set +e
docker ps > /dev/null
if [ "$?" -ne "0" ]; then
LOG_INFO "Please install docker and add your user by:"
echo -e "\033[32m sudo gpasswd -a ${USER} docker && su ${USER}\033[0m"
exit 1
fi
set -e
}
check_port_avaliable()
{
port=$1
name=$2
if [ "$(lsof -i:$port | wc -l)" -ne "0" ]; then
LOG_ERROR "${name} port ${port} is not avaliable. Are there any other blockchain is running?"
exit 1
fi
}
check_fabric_avaliable()
{
#7050,7051,8051,9051,10051
check_port_avaliable 7050 Fabric-Orderer
check_port_avaliable 7051 Fabric-Peer
check_port_avaliable 8051 Fabric-Peer
check_port_avaliable 9051 Fabric-Peer
check_port_avaliable 10051 Fabric-Peer
}
check_wecross_avaliable()
{
check_port_avaliable 8250 WeCross-8250-25500
check_port_avaliable 25500 WeCross-8250-25500
check_port_avaliable 8251 WeCross-8251-25501
check_port_avaliable 25501 WeCross-8251-25501
}
check_env()
{
LOG_INFO "Check environments"
check_command java
check_command docker
check_command docker-compose
check_docker_service
check_fabric_avaliable
check_wecross_avaliable
}
build_fabric()
{
LOG_INFO "Build Fabric ..."
cd ${ROOT}/fabric
bash build.sh
#add_channel
CHANNEL_NAME=yourchannel
chmod +x add_channel.sh
chmod +x update-channel-tx.sh
cp -f update-channel-tx.sh fabric-samples-1.4.4/first-network
cd ./fabric-samples-1.4.4/first-network/
./update-channel-tx.sh $CHANNEL_NAME
cd -
docker cp add_channel.sh cli:/opt/gopath/src/github.com/hyperledger/fabric/peer/scripts/add_channel.sh
docker exec cli scripts/add_channel.sh $CHANNEL_NAME
cd ${ROOT}/
}
check_process()
{
local process_name=${1}
if [ -z "$(ps -ef |grep ${process_name} |grep -v grep)" ];then
LOG_ERROR "Build demo failed: ${process_name} does not exist."
exit 1
fi
}
check_container()
{
local container_name=${1}
if [ -z "$(docker ps |grep ${container_name} |grep -v grep)" ];then
LOG_ERROR "Build demo failed: ${container_name} does not exist."
exit 1
fi
}
check_fabric()
{
check_container peer0.org1.example.com
check_container peer1.org1.example.com
check_container peer0.org2.example.com
check_container peer1.org2.example.com
check_container orderer.example.com
}
check_wecross()
{
check_process routers-payment/127.0.0.1-8250-25500
check_process routers-payment/127.0.0.1-8251-25501
}
check_wecross_network()
{
check_wecross
}
clear_ask()
{
# Clear history
if [ -e ${ROOT}/routers-payment ];then
read -p "Old demo network exist. Clear all and re-build? [Y/n]" ans
case "$ans" in
y | Y | "")
LOG_INFO "Clear old network ..."
bash clear.sh
;;
*)
exit 0
;;
esac
fi
}
console_ask()
{
read -p "Start WeCross console? [Y/n]" ans
case "$ans" in
y | Y | "")
cd ${ROOT}/WeCross-Console && ./start.sh
;;
*)
echo "To start WeCross console. Just: \"cd ./WeCross-Console && ./start.sh\""
;;
esac
}
config_router_8250()
{
router_dir=${1}
fabric_demo_dir=${2}
LOG_INFO "Configure router ${router_dir}"
cd ${router_dir}
# account
bash add_account.sh -t Fabric1.4 -n fabric_admin -d conf/accounts
cp ${fabric_demo_dir}/certs/accounts/fabric_admin/* conf/accounts/fabric_admin/
bash add_account.sh -t Fabric1.4 -n fabric_admin_org1 -d conf/accounts
cp ${fabric_demo_dir}/certs/accounts/fabric_admin_org1/* conf/accounts/fabric_admin_org1/
bash add_account.sh -t Fabric1.4 -n fabric_admin_org2 -d conf/accounts
cp ${fabric_demo_dir}/certs/accounts/fabric_admin_org2/* conf/accounts/fabric_admin_org2/
sed_i 's/Org1MSP/Org2MSP/g' conf/accounts/fabric_admin_org2/account.toml
bash add_account.sh -t Fabric1.4 -n fabric_default_account -d conf/accounts
cp ${fabric_demo_dir}/certs/accounts/fabric_user1/* conf/accounts/fabric_default_account/
bash add_account.sh -t Fabric1.4 -n fabric_user1 -d conf/accounts
cp ${fabric_demo_dir}/certs/accounts/fabric_user1/* conf/accounts/fabric_user1/
# stubs
bash add_chain.sh -t Fabric1.4 -n fabric -d conf/chains
cp ${fabric_demo_dir}/certs/chains/fabric/* conf/chains/fabric/
sed_i 's/payment/payment1/g' conf/wecross.toml
# deploy proxy
bash deploy_system_contract.sh -t Fabric1.4 -c chains/fabric -P
cd -
}
config_router_8251()
{
router_dir=${1}
fabric_demo_dir=${2}
LOG_INFO "Configure router ${router_dir}"
cd ${router_dir}
# account
bash add_account.sh -t Fabric1.4 -n fabric_admin -d conf/accounts
cp ${fabric_demo_dir}/certs/accounts/fabric_admin/* conf/accounts/fabric_admin/
bash add_account.sh -t Fabric1.4 -n fabric_admin_org1 -d conf/accounts
cp ${fabric_demo_dir}/certs/accounts/fabric_admin_org1/* conf/accounts/fabric_admin_org1/
bash add_account.sh -t Fabric1.4 -n fabric_admin_org2 -d conf/accounts
cp ${fabric_demo_dir}/certs/accounts/fabric_admin_org2/* conf/accounts/fabric_admin_org2/
sed_i 's/Org1MSP/Org2MSP/g' conf/accounts/fabric_admin_org2/account.toml
bash add_account.sh -t Fabric1.4 -n fabric_default_account -d conf/accounts
cp ${fabric_demo_dir}/certs/accounts/fabric_user1/* conf/accounts/fabric_default_account/
bash add_account.sh -t Fabric1.4 -n fabric_user2 -d conf/accounts
cp ${fabric_demo_dir}/certs/accounts/fabric_user1/* conf/accounts/fabric_user2/
# stubs
bash add_chain.sh -t Fabric1.4 -n fabric -d conf/chains
cp ${fabric_demo_dir}/certs/chains/fabric/* conf/chains/fabric/
sed_i 's/mychannel/yourchannel/g' conf/chains/fabric/stub.toml
sed_i 's/payment/payment2/g' conf/wecross.toml
# deploy proxy
bash deploy_system_contract.sh -t Fabric1.4 -c chains/fabric -P
cd -
}
download_wecross()
{
# Download
LOG_INFO "Download WeCross ..."
if [ -e download_wecross.sh ];then
bash download_wecross.sh -t "${WECROSS_VERSION}"
else
bash <(curl -sL https://github.com/WebankBlockchain/WeCross/releases/download/resources/download_wecross.sh) -t "${WECROSS_VERSION}"
fi
}
download_wecross_console()
{
LOG_INFO "Download WeCross Console ..."
if [ -e download_console.sh ];then
bash download_console.sh -t "${WECROSS_CONSOLE_VERSION}"
else
bash <(curl -sL https://github.com/WebankBlockchain/WeCross/releases/download/resources/download_console.sh) -t "${WECROSS_CONSOLE_VERSION}"
fi
}
deploy_channel1_sample_resource()
{
# deploy from 8250
LOG_INFO "Deploy fabric chaincode sacc1 to channel1"
cd ${ROOT}/WeCross-Console/
bash start.sh <<EOF
fabricInstall payment1.fabric.sacc1 fabric_admin_org1 Org1 contracts/chaincode/sacc 1.0 GO_LANG
fabricInstall payment1.fabric.sacc1 fabric_admin_org2 Org2 contracts/chaincode/sacc 1.0 GO_LANG
fabricInstantiate payment1.fabric.sacc1 fabric_admin ["Org1","Org2"] contracts/chaincode/sacc 1.0 GO_LANG policy.yaml ["a","10"]
quit
EOF
# wait the chaincode instantiate
try_times=80
i=0
echo -e "\033[32msacc1 chaincode is instantiating ...\033[0m\c"
while [ ! -n "$(docker ps |grep sacc1 |awk '{print $1}')" ]
do
sleep 1
((i=i+1))
if [ $i -lt ${try_times} ]; then
echo -e "\033[32m.\033[0m\c"
else
LOG_ERROR "Instantiate sacc1 timeout!"
exit 1
fi
done
cd -
}
deploy_channel2_sample_resource()
{
# deploy from 8250
LOG_INFO "Deploy fabric chaincode sacc2 to channel2"
cd ${ROOT}/WeCross-Console/
sed_i 's/8250/8251/g' conf/application.toml
bash start.sh <<EOF
fabricInstall payment2.fabric.sacc2 fabric_admin_org1 Org1 contracts/chaincode/sacc 1.0 GO_LANG
fabricInstall payment2.fabric.sacc2 fabric_admin_org2 Org2 contracts/chaincode/sacc 1.0 GO_LANG
fabricInstantiate payment2.fabric.sacc2 fabric_admin ["Org1","Org2"] contracts/chaincode/sacc 1.0 GO_LANG policy.yaml ["a","10"]
quit
EOF
# wait the chaincode instantiate
try_times=80
i=0
echo -e "\033[32msacc2 chaincode is instantiating ...\033[0m\c"
while [ ! -n "$(docker ps |grep sacc2 |awk '{print $1}')" ]
do
sleep 1
((i=i+1))
if [ $i -lt ${try_times} ]; then
echo -e "\033[32m.\033[0m\c"
else
LOG_ERROR "Instantiate sacc2 timeout!"
exit 1
fi
done
sed_i 's/8251/8250/g' conf/application.toml
cd -
}
deploy_sample_resource()
{
deploy_channel1_sample_resource
deploy_channel2_sample_resource
}
main()
{
clear_ask
check_env
download_wecross
download_wecross_console
# Build Routers
LOG_INFO "Build Routers ..."
cat << EOF > ipfile
127.0.0.1:8250:25500
127.0.0.1:8251:25501
EOF
bash ./WeCross/build_wecross.sh -n payment -o routers-payment -f ipfile
# Build WeCross Console
LOG_INFO "Build WeCross Console ..."
cp routers-payment/cert/sdk/* ${ROOT}/WeCross-Console/conf/
cp ${ROOT}/WeCross-Console/conf/application-sample.toml ${ROOT}/WeCross-Console/conf/application.toml
cd ${ROOT}/WeCross-Console/
bash start.sh <<EOF
quit
EOF
cd ${ROOT}/
# Build FABRIC
build_fabric
# config routers
config_router_8250 ${ROOT}/routers-payment/127.0.0.1-8250-25500/ ${ROOT}/fabric
config_router_8251 ${ROOT}/routers-payment/127.0.0.1-8251-25501/ ${ROOT}/fabric
# Start up routers
cd ${ROOT}/routers-payment/127.0.0.1-8250-25500/
bash start.sh
cd ${ROOT}/routers-payment/127.0.0.1-8251-25501/
bash start.sh
cd ${ROOT}
check_wecross_network
deploy_sample_resource
LOG_INFO "Success! WeCross demo network is running. Framework:"
echo -e "
Hyperledger Fabric
Channel 1 Channel 2
(payment1_Channel1) (payment2_Channel2)
(sacc1.go) (sacc2.go)
| |
| |
| |
WeCross Router <----------> WeCross Router <----------> WeCross Account Manager
(127.0.0.1-8250-25500) (127.0.0.1-8251-25501) (127.0.0.1:8340)
/ \\
/ \\
/ \\
WeCross WebApp WeCross Console
"
}
main
if [ ! -n "$1" ] ;then
console_ask
fi
|
name 'balanced-www'
maintainer 'Balanced'
maintainer_email '<EMAIL>'
license 'Apache License, Version 2.0'
description 'Installs/Configures balanced-www'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '1.0.0'
depends "nodejs"
depends "git"
|
#! /usr/bin/env bash
source ./utils.sh
parse_args "$@"
# Benchmark name
target_name="LULESH"
target_name_tgz="${target_name}.tar.gz"
print_info ${target_name}
# Target applicationi
lulesh_dir=lulesh
lulesh_patch_file=${CORRBENCH_mutate_file}
if [[ ${do_download} == "yes" ]]; then
echo "Download ${target_name}"
wget -O ${target_name_tgz} https://github.com/LLNL/LULESH/archive/2.0.3.tar.gz
echo "Creating directories and unpacking"
mkdir -p ${lulesh_dir} && cd ${lulesh_dir}
tar xzf ../${target_name_tgz}
cd ..
fi
# "Creating directories, unpacking, configuring, building"
if [[ ${do_build} == "yes" ]]; then
echo "Building application"
cd ${lulesh_dir}/LULESH-2.0.3
cp ../../patches/Makefile.lulesh ./Makefile
# apply patches and build the mutations
if [[ -n "${lulesh_patch_file}" ]]; then
# Init git repo, add everything apply patch
echo -e "Applying the patch file: " ${lulesh_patch_file}
git init >${outputTo} 2>&1
git add * >${outputTo} 2>&1
git commit -m "Basic patched LULESH" >${outputTo} 2>&1
git apply ${lulesh_patch_file}
else
echo "Applying no mutation"
fi
make MPICXX="${mpi_cxx} -DUSE_MPI=1" ${make_target} -j ${n_procs}
cd ../../..
fi
# "Running the target application"
if [[ "${do_run}" == "yes" ]]; then
echo "Running application"
cd ${lulesh_dir}/LULESH-2.0.3
time ${mpi_run} -np ${mpi_procs} ./lulesh2.0 -s 40
cd ../..
fi
if [[ "${do_clean}" == "yes" ]]; then
echo "Removing ${target_name_tgz} and directorie ${lulesh_dir}"
rm -rf ${target_name_tgz} ${lulesh_dir}
fi
|
/*
* Java port of Bullet (c) 2008 <NAME> <<EMAIL>>
*
* Bullet Continuous Collision Detection and Physics Library
* Copyright (c) 2003-2008 <NAME> http://www.bulletphysics.com/
*
* This software is provided 'as-is', without any express or implied warranty.
* In no event will the authors be held liable for any damages arising from
* the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*/
package com.bulletphysics.demos.opengl;
import java.io.IOException;
import java.nio.FloatBuffer;
import java.util.HashMap;
import java.util.Map;
import com.bulletphysics.demos.opengl.FontRender.GLFont;
import org.lwjgl.BufferUtils;
import org.lwjgl.opengl.GL11;
import org.lwjgl.util.glu.Cylinder;
import org.lwjgl.util.glu.Disk;
import org.lwjgl.util.glu.GLU;
import org.lwjgl.util.glu.Sphere;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.util.glu.GLU.*;
/**
*
* @author jezek2
*/
public class LwjglGL implements IGL {
private static FloatBuffer floatBuf = BufferUtils.createFloatBuffer(16);
private GLFont font;
public void init() {
try {
//font = FontRender.createFont("Dialog", 11, false, true);
font = new GLFont(IGL.class.getResourceAsStream("DejaVu_Sans_11.fnt"));
}
catch (IOException e) {
e.printStackTrace();
}
}
public void glLight(int light, int pname, float[] params) {
GL11.glLight(light, pname, FloatBuffer.wrap(params));
}
public void glEnable(int cap) {
GL11.glEnable(cap);
}
public void glDisable(int cap) {
GL11.glDisable(cap);
}
public void glShadeModel(int mode) {
GL11.glShadeModel(mode);
}
public void glDepthFunc(int func) {
GL11.glDepthFunc(func);
}
public void glClearColor(float red, float green, float blue, float alpha) {
GL11.glClearColor(red, green, blue, alpha);
}
public void glMatrixMode(int mode) {
GL11.glMatrixMode(mode);
}
public void glLoadIdentity() {
GL11.glLoadIdentity();
}
public void glFrustum(double left, double right, double bottom, double top, double zNear, double zFar) {
GL11.glFrustum(left, right, bottom, top, zNear, zFar);
}
public void gluLookAt(float eyex, float eyey, float eyez, float centerx, float centery, float centerz, float upx, float upy, float upz) {
GLU.gluLookAt(eyex, eyey, eyez, centerx, centery, centerz, upx, upy, upz);
}
public void glViewport(int x, int y, int width, int height) {
GL11.glViewport(x, y, width, height);
}
public void glPushMatrix() {
GL11.glPushMatrix();
}
public void glPopMatrix() {
GL11.glPopMatrix();
}
public void gluOrtho2D(float left, float right, float bottom, float top) {
GLU.gluOrtho2D(left, right, bottom, top);
}
public void glScalef(float x, float y, float z) {
GL11.glScalef(x, y, z);
}
public void glTranslatef(float x, float y, float z) {
GL11.glTranslatef(x, y, z);
}
public void glColor3f(float red, float green, float blue) {
GL11.glColor3f(red, green, blue);
}
public void glClear(int mask) {
GL11.glClear(mask);
}
public void glBegin(int mode) {
GL11.glBegin(mode);
}
public void glEnd() {
GL11.glEnd();
}
public void glVertex3f(float x, float y, float z) {
GL11.glVertex3f(x, y, z);
}
public void glLineWidth(float width) {
GL11.glLineWidth(width);
}
public void glPointSize(float size) {
GL11.glPointSize(size);
}
public void glNormal3f(float nx, float ny, float nz) {
GL11.glNormal3f(nx, ny, nz);
}
public void glMultMatrix(float[] m) {
floatBuf.clear();
floatBuf.put(m).flip();
GL11.glMultMatrix(floatBuf);
}
////////////////////////////////////////////////////////////////////////////
public void drawCube(float extent) {
extent = extent * 0.5f;
GL11.glBegin(GL11.GL_QUADS);
GL11.glNormal3f( 1f, 0f, 0f); GL11.glVertex3f(+extent,-extent,+extent); GL11.glVertex3f(+extent,-extent,-extent); GL11.glVertex3f(+extent,+extent,-extent); GL11.glVertex3f(+extent,+extent,+extent);
GL11.glNormal3f( 0f, 1f, 0f); GL11.glVertex3f(+extent,+extent,+extent); GL11.glVertex3f(+extent,+extent,-extent); GL11.glVertex3f(-extent,+extent,-extent); GL11.glVertex3f(-extent,+extent,+extent);
GL11.glNormal3f( 0f, 0f, 1f); GL11.glVertex3f(+extent,+extent,+extent); GL11.glVertex3f(-extent,+extent,+extent); GL11.glVertex3f(-extent,-extent,+extent); GL11.glVertex3f(+extent,-extent,+extent);
GL11.glNormal3f(-1f, 0f, 0f); GL11.glVertex3f(-extent,-extent,+extent); GL11.glVertex3f(-extent,+extent,+extent); GL11.glVertex3f(-extent,+extent,-extent); GL11.glVertex3f(-extent,-extent,-extent);
GL11.glNormal3f( 0f,-1f, 0f); GL11.glVertex3f(-extent,-extent,+extent); GL11.glVertex3f(-extent,-extent,-extent); GL11.glVertex3f(+extent,-extent,-extent); GL11.glVertex3f(+extent,-extent,+extent);
GL11.glNormal3f( 0f, 0f,-1f); GL11.glVertex3f(-extent,-extent,-extent); GL11.glVertex3f(-extent,+extent,-extent); GL11.glVertex3f(+extent,+extent,-extent); GL11.glVertex3f(+extent,-extent,-extent);
GL11.glEnd();
}
////////////////////////////////////////////////////////////////////////////
private static final Cylinder cylinder = new Cylinder();
private static final Disk disk = new Disk();
private static final Sphere sphere = new Sphere();
private static class SphereKey {
public float radius;
public SphereKey() {
}
public SphereKey(SphereKey key) {
radius = key.radius;
}
@Override
public boolean equals(Object obj) {
if (obj == null || !(obj instanceof SphereKey)) return false;
SphereKey other = (SphereKey)obj;
return radius == other.radius;
}
@Override
public int hashCode() {
return Float.floatToIntBits(radius);
}
}
private static Map<SphereKey,Integer> sphereDisplayLists = new HashMap<SphereKey,Integer>();
private static SphereKey sphereKey = new SphereKey();
public void drawSphere(float radius, int slices, int stacks) {
sphereKey.radius = radius;
Integer glList = sphereDisplayLists.get(sphereKey);
if (glList == null) {
glList = glGenLists(1);
glNewList(glList, GL_COMPILE);
sphere.draw(radius, 8, 8);
glEndList();
sphereDisplayLists.put(new SphereKey(sphereKey), glList);
}
glCallList(glList);
}
////////////////////////////////////////////////////////////////////////////
private static class CylinderKey {
public float radius;
public float halfHeight;
public CylinderKey() {
}
public CylinderKey(CylinderKey key) {
radius = key.radius;
halfHeight = key.halfHeight;
}
public void set(float radius, float halfHeight) {
this.radius = radius;
this.halfHeight = halfHeight;
}
@Override
public boolean equals(Object obj) {
if (obj == null || !(obj instanceof CylinderKey)) return false;
CylinderKey other = (CylinderKey) obj;
if (radius != other.radius) return false;
if (halfHeight != other.halfHeight) return false;
return true;
}
@Override
public int hashCode() {
int hash = 7;
hash = 23 * hash + Float.floatToIntBits(radius);
hash = 23 * hash + Float.floatToIntBits(halfHeight);
return hash;
}
}
private static Map<CylinderKey,Integer> cylinderDisplayLists = new HashMap<CylinderKey,Integer>();
private static CylinderKey cylinderKey = new CylinderKey();
public void drawCylinder(float radius, float halfHeight, int upAxis) {
glPushMatrix();
switch (upAxis) {
case 0:
glRotatef(-90f, 0.0f, 1.0f, 0.0f);
glTranslatef(0.0f, 0.0f, -halfHeight);
break;
case 1:
glRotatef(-90.0f, 1.0f, 0.0f, 0.0f);
glTranslatef(0.0f, 0.0f, -halfHeight);
break;
case 2:
glTranslatef(0.0f, 0.0f, -halfHeight);
break;
default: {
assert (false);
}
}
// The gluCylinder subroutine draws a cylinder that is oriented along the z axis.
// The base of the cylinder is placed at z = 0; the top of the cylinder is placed at z=height.
// Like a sphere, the cylinder is subdivided around the z axis into slices and along the z axis into stacks.
cylinderKey.set(radius, halfHeight);
Integer glList = cylinderDisplayLists.get(cylinderKey);
if (glList == null) {
glList = glGenLists(1);
glNewList(glList, GL_COMPILE);
disk.setDrawStyle(GLU_FILL);
disk.setNormals(GLU_SMOOTH);
disk.draw(0, radius, 15, 10);
cylinder.setDrawStyle(GLU_FILL);
cylinder.setNormals(GLU_SMOOTH);
cylinder.draw(radius, radius, 2f * halfHeight, 15, 10);
glTranslatef(0f, 0f, 2f * halfHeight);
glRotatef(-180f, 0f, 1f, 0f);
disk.draw(0, radius, 15, 10);
glEndList();
cylinderDisplayLists.put(new CylinderKey(cylinderKey), glList);
}
glCallList(glList);
glPopMatrix();
}
////////////////////////////////////////////////////////////////////////////
public void drawString(CharSequence s, int x, int y, float red, float green, float blue) {
if (font != null) {
FontRender.drawString(font, s, x, y, red, green, blue);
}
}
}
|
#!/bin/bash
# created at 21/07/17
# By Satmaxt Developer
# at Sukabumi, West Java, Indonesia
if [[ $USER != 'root' ]]; then
echo "Sorry.. Need root access for launch this script."
exit
fi
cd /etc/apt
clear
echo "Before start the setup, i'll ask to you about features want to install."
echo -n "Do you want to install webmin? [Y/n]: "
read rwebmin
echo -n "Do you want to install Git and Composer? [Y/n]: "
read gitcomp
mv sources.list sources.txt
wget -O sources.list "https://raw.githubusercontent.com/satriaajiputra/lamp-autoinstaller/master/src/ubuntu/sources.list"
cd
# update and install some app
apt-get update -y && apt-get upgrade -y
apt-get install pwgen apache2 zip unzip curl vim nano -y
apt-get install build-essential -y
# set password for mysql-server
root_password=`pwgen -s 16 1`
# set webserver
a2enmod rewrite
cd /etc/apache2/sites-available
mv 000-default.conf 000-default.bak
wget -O 000-default.conf "https://raw.githubusercontent.com/satriaajiputra/lamp-autoinstaller/master/src/ubuntu/vhostconfig.conf"
service apache2 restart
cd /home
ln -s /var/www/html
cd
# install screenfetch
cd /usr/bin
wget -O screenfetch "https://raw.githubusercontent.com/KittyKatt/screenFetch/master/screenfetch-dev"
chmod +x screenfetch
cd
echo "clear" >> .profile
echo "screenfetch" >> .profile
# set password to mysql
echo "mysql-server mysql-server/root_password password $root_password" | sudo debconf-set-selections
echo "mysql-server mysql-server/root_password_again password $root_password" | sudo debconf-set-selections
apt-get install mysql-server -y
# From Bert Van Vreckem <bert.vanvreckem@gmail.com>
mysql --user=root --password=$root_password <<SETSQL
DELETE FROM mysql.user WHERE User='';
DELETE FROM mysql.user WHERE User='root' AND Host NOT IN ('localhost', '127.0.0.1', '::1');
DROP DATABASE IF EXISTS test;
DELETE FROM mysql.db WHERE Db='test' OR Db='test\\_%';
FLUSH PRIVILEGES;
SETSQL
# end mysql install
# select php
sudo add-apt-repository ppa:ondrej/php -y
apt-get update -y
apt-get install -y php7.0 libapache2-mod-php7.0 php7.0-cli php7.0-common php7.0-mbstring php7.0-gd php7.0-intl php7.0-xml php7.0-mysql php7.0-mcrypt php7.0-zip
echo "<?php phpinfo(); ?>" > /var/www/html/info.php
service apache2 restart
service mysql restart
cd /var/www/html
wget -O phpmyadmin.zip "https://files.phpmyadmin.net/phpMyAdmin/4.7.1/phpMyAdmin-4.7.1-all-languages.zip"
unzip phpmyadmin.zip
mv phpMyAdmin-4.7.1-all-languages phpMyAdmin
# install other softwre
if [ $rwebmin == "y" ]
then
clear
echo "Installing Webmin..."
echo "deb http://download.webmin.com/download/repository sarge contrib" > /etc/apt/sources.list.d/webmin.list
echo "deb http://webmin.mirror.somersettechsolutions.co.uk/repository sarge contrib
" >> /etc/apt/sources.list.d/webmin.list
wget -q http://www.webmin.com/jcameron-key.asc -O- | sudo apt-key add -
apt-get update -y
apt-get install webmin -y
# turn off ssl of webmin
sed -i 's/ssl=1/ssl=0/g' /etc/webmin/miniserv.conf
service webmin restart
fi
if [ $gitcomp == "y" ]
then
clear
echo "Installing Git and Composer..."
curl -sS https://getcomposer.org/installer | php
mv composer.phar /usr/local/bin/composer
apt-get install git -y
fi
cd
apt-get update -y
clear
# create log
echo "Script has ben installed!" | tee log.txt
echo "===============" | tee -a log.txt
echo "List installed:" | tee -a log.txt
echo "===============" | tee -a log.txt
echo "1. PHP7.0" | tee -a log.txt
echo "2. Apache2" | tee -a log.txt
echo "3. phpMyadmin" | tee -a log.txt
echo "4. MySQL Server" | tee -a log.txt
echo "5. Screenfetch" | tee -a log.txt
echo "6. Git and Composer" | tee -a log.txt
echo "===============" | tee -a log.txt
echo "MySQL Login:" | tee -a log.txt
echo "===============" | tee -a log.txt
echo "Username: root" | tee -a log.txt
echo "Password: $root_password" | tee -a log.txt
echo "===============" | tee -a log.txt
echo "Look at log.txt file" | tee -a log.txt
|
<filename>frontend/web/resources/preview/190328/1456437721/js/custom.js
(function($){
"use strict";
// Preloader
jQuery(window).on('load', function() {
jQuery("#status").fadeOut();
jQuery("#preloader").delay(350).fadeOut("slow");
});
// on ready function
jQuery(document).ready(function($) {
var $this = $(window);
// Back to Top js
$(window).scroll(function(){
if ($(this).scrollTop() > 100) {
$('#scroll').fadeIn();
} else {
$('#scroll').fadeOut();
}
});
$('#scroll').on("click", function(){
$("html, body").animate({ scrollTop: 0 }, 600);
return false;
});
// Menu show Hide
var counter = 0;
$('.wd_menu_btn').on("click", function(e){
if( counter == '0') {
$('.wd_main_menu_wrapper').addClass('wd_main_menu_hide');
$(this).children().removeAttr('class');
$(this).children().attr('class','fa fa-close');
counter++;
}
else {
$('.wd_main_menu_wrapper').removeClass('wd_main_menu_hide');
$(this).children().removeAttr('class');
$(this).children().attr('class','fa fa-bars');
counter--;
}
});
// Menu js for Position fixed
$(window).scroll(function(){
var window_top = $(window).scrollTop() + 1;
if (window_top > 500) {
$('.wd_header_wrapper').addClass('menu_fixed animated fadeInDown');
} else {
$('.wd_header_wrapper').removeClass('menu_fixed animated fadeInDown');
}
});
// Guest Slider Js
$('.wd_guest_slider .owl-carousel').owlCarousel({
loop:true,
margin:45,
nav:false,
responsive:{
0:{
items:1
},
600:{
items:3
},
1000:{
items:3
}
}
});
// Testimonial Slider Js
$('.wd_testimonial_slider .owl-carousel').owlCarousel({
loop:true,
margin:0,
nav:false,
autoplay:true,
responsive:{
0:{
items:1
},
600:{
items:1
},
1000:{
items:1
}
}
});
// Family Slider Js
$('.wd_family_slider .owl-carousel').owlCarousel({
loop:true,
margin:10,
nav:true,
navText:["<i class='fa fa-angle-left'></i>" , "<i class='fa fa-angle-right'></i>"],
responsive:{
0:{
items:1
},
600:{
items:2
},
1000:{
items:4
}
}
});
// Gallery Slider js
$('.wd_gallery_slider .owl-carousel').owlCarousel({
animateOut: 'fadeOut',
loop:true,
margin:10,
nav:false,
responsive:{
0:{
items:1
},
600:{
items:1
},
1000:{
items:1
}
}
});
// Magnific Popup js
$('.popup-gallery').magnificPopup({
delegate: '.ast_glr_overlay a',
type: 'image',
tLoading: 'Loading image #%curr%...',
mainClass: 'mfp-img-mobile',
gallery: {
enabled: true,
navigateByImgClick: true,
preload: [0,1] // Will preload 0 - before current, and 1 after the current image
},
image: {
tError: '<a href="%url%">The image #%curr%</a> could not be loaded.',
titleSrc: function(item) {
return item.el.attr('title') + '<small></small>';
}
}
});
// Contact Form Submition
$("#wd_submit").on("click", function() {
var e = $("#uname").val();
var t = $("#umail").val();
var r = $("#msg").val();
var n = $("#unum").val();
$.ajax({
type: "POST",
url: "ajaxmail.php",
data: {
username: e,
useremail: t,
mesg: r,
unum: n
},
success: function(n) {
var i = n.split("#");
if (i[0] == "1") {
$("#uname").val("");
$("#umail").val("");
$("#msg").val("");
$("#unum").val("");
$("#err").html(i[1]);
} else {
$("#uname").val(e);
$("#umail").val(t);
$("#msg").val(r);
$("#unum").val(n);
$("#err").html(i[1]);
}
}
});
});
// Single page scroll menu
$('.wd_single_index_menu ul li a').on('click' , function(e){
$('.wd_single_index_menu ul li').removeClass('active');
$(this).parent().addClass('active');
var target = $('[section-scroll='+$(this).attr('href')+']');
e.preventDefault();
var targetHeight = target.offset().top-parseInt('83', 10);
$('html, body').animate({
scrollTop: targetHeight
}, 1000);
});
$(window).scroll(function() {
var windscroll = $(window).scrollTop();
var target = $('.wd_single_index_menu ul li');
if (windscroll >= 0) {
$('[section-scroll]').each(function(i) {
if ($(this).position().top <= windscroll + 83) {
target.removeClass('active');
target.eq(i).addClass('active');
}
});
}else{
target.removeClass('active');
$('.wd_single_index_menu ul li:first').addClass('active');
}
});
// CountDown Js
var deadline = 'November 1 2018 11:59:00 GMT-0400';
function time_remaining(endtime){
var t = Date.parse(endtime) - Date.parse(new Date());
var seconds = Math.floor( (t/1000) % 60 );
var minutes = Math.floor( (t/1000/60) % 60 );
var hours = Math.floor( (t/(1000*60*60)) % 24 );
var days = Math.floor( t/(1000*60*60*24) );
return {'total':t, 'days':days, 'hours':hours, 'minutes':minutes, 'seconds':seconds};
}
function run_clock(id,endtime){
var clock = document.getElementById(id);
// get spans where our clock numbers are held
var days_span = clock.querySelector('.days');
var hours_span = clock.querySelector('.hours');
var minutes_span = clock.querySelector('.minutes');
var seconds_span = clock.querySelector('.seconds');
function update_clock(){
var t = time_remaining(endtime);
// update the numbers in each part of the clock
days_span.innerHTML = t.days;
hours_span.innerHTML = ('0' + t.hours).slice(-2);
minutes_span.innerHTML = ('0' + t.minutes).slice(-2);
seconds_span.innerHTML = ('0' + t.seconds).slice(-2);
if(t.total<=0){ clearInterval(timeinterval); }
}
update_clock();
var timeinterval = setInterval(update_clock,1000);
}
run_clock('clockdiv',deadline);
});
})(); |
<reponame>omarefg/d-play-server
const axios = require('axios');
const qs = require('querystring');
const { config } = require('../../config');
class SpotifyAuthLib {
constructor() {
this._accessToken = '';
this._instance = null;
}
static getInstance() {
if (!this._instance) {
this._instance = new SpotifyAuthLib();
}
return this._instance;
}
async getAccessToken() {
if (!this._accessToken) {
const axiosConf = {
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},
auth: {
username: config.spotifyClientId,
password: <PASSWORD>,
},
};
const requestBody = {
grant_type: 'client_credentials',
};
try {
const { data: { access_token: accessToken } } = await axios.post(
'https://accounts.spotify.com/api/token',
qs.stringify(requestBody),
axiosConf,
);
this._accessToken = accessToken;
} catch (error) {
if (error.response) {
throw error.response;
} if (error.request) {
throw error.request;
}
throw error;
}
}
return this._accessToken;
}
restartAccessToken() {
this._accessToken = '';
}
}
module.exports = SpotifyAuthLib;
|
let {Transform} = require('stream');
//转换流是实现数据转换的
let t = Transform({
transform(chunk,encoding,cb){
this.push(chunk.toString().toUpperCase());
cb();
}
});
process.stdin.pipe(t).pipe(process.stdout); |
from typing import TypeVar, Protocol, Iterator
T = TypeVar("T", covariant=True)
class SizedIterable(Protocol[T]):
def __len__(self) -> int:
...
def __iter__(self) -> Iterator[T]:
...
class RestrictedList(SizedIterable[T]):
def __init__(self):
# Initialize the internal list
self.elements = []
def add_element(self, element: T) -> None:
# Add an element of type T to the list
self.elements.append(element)
def __len__(self) -> int:
# Return the number of elements in the list
return len(self.elements)
def __iter__(self) -> Iterator[T]:
# Iterate through the elements in the list
return iter(self.elements) |
my_dict = {
'key1' : [1, 2, 3],
'key2' : [4, 5, 6],
'key3' : [7, 8, 9]
}
print(my_dict) |
#!/bin/bash
set -e
# Basic template create, notifee install, link
\rm -fr notifeedemo
echo "Testing react-native current + notifee current"
npx react-native init notifeedemo
cd notifeedemo
# I have problems in my country with the cocoapods CDN sometimes, use github directly
if [ "$(uname -m)" == "arm64" ]; then
echo "arm64 detected, disabling flipper"
sed -i -e 's/use_flipper/#&/' ios/Podfile
sed -i -e 's/flipper_post_install/#&/' ios/Podfile
else
sed -i -e $'s/def add_flipper_pods/source \'https:\/\/github.com\/CocoaPods\/Specs.git\'\\\n\\\ndef add_flipper_pods/' ios/Podfile
fi
rm -f ios/Podfile.??
# This is the most basic integration
echo "Adding Notifee app package"
yarn add "@notifee/react-native"
# Set the Java application up for multidex (needed for API<21 w/Firebase)
echo "Configuring Android MultiDex for API<21 support - gradle toggle, library dependency, Application object inheritance"
sed -i -e $'s/defaultConfig {/defaultConfig {\\\n multiDexEnabled true/' android/app/build.gradle
rm -f android/app/build.gradle??
sed -i -e $'s/dependencies {/dependencies {\\\n implementation "androidx.multidex:multidex:2.0.1"/' android/app/build.gradle
rm -f android/app/build.gradle??
sed -i -e $'s/import android.app.Application;/import androidx.multidex.MultiDexApplication;/' android/app/src/main/java/com/notifeedemo/MainApplication.java
rm -f android/app/src/main/java/com/notifeedemo/MainApplication.java??
sed -i -e $'s/extends Application/extends MultiDexApplication/' android/app/src/main/java/com/notifeedemo/MainApplication.java
rm -f android/app/src/main/java/com/notifeedemo/MainApplication.java??
# Another Java build tweak - or gradle runs out of memory during the build in big projects
echo "Increasing memory available to gradle for android java build"
echo "org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8" >> android/gradle.properties
# In case we have any patches
echo "Running any patches necessary to compile successfully"
cp -rv ../patches .
npx patch-package
# Copy in our demonstrator App.js
echo "Copying demonstrator App.js"
rm ./App.js && cp ../NotifeeApp.js ./App.js
# Run the thing for iOS
if [ "$(uname)" == "Darwin" ]; then
echo "Installing pods and running iOS app"
if [ "$(uname -m)" == "arm64" ]; then
echo "Installing pods with prefix arch -arch x86_64"
cd ios && arch -arch x86_64 pod install && cd ..
else
cd ios && pod install --repo-update && cd ..
fi
npx react-native run-ios
# workaround for poorly setup Android SDK environments
USER=`whoami`
echo "sdk.dir=/Users/$USER/Library/Android/sdk" > android/local.properties
fi
echo "Configuring Android release build for ABI splits and code shrinking"
sed -i -e $'s/def enableSeparateBuildPerCPUArchitecture = false/def enableSeparateBuildPerCPUArchitecture = true/' android/app/build.gradle
rm -f android/app/build.gradle??
sed -i -e $'s/def enableProguardInReleaseBuilds = false/def enableProguardInReleaseBuilds = true/' android/app/build.gradle
rm -f android/app/build.gradle??
sed -i -e $'s/universalApk false/universalApk true/' android/app/build.gradle
rm -f android/app/build.gradle??
# Run it for Android (assumes you have an android emulator running)
echo "Running android app"
npx react-native run-android --variant release
# Let it start up, then uninstall it (otherwise ABI-split-generated version codes will prevent debug from installing)
sleep 10
pushd android
./gradlew uninstallRelease
popd
# may or may not be commented out, depending on if have an emulator available
# I run it manually in testing when I have one, comment if you like
npx react-native run-android
|
import {
RepositoryError,
UnknownRepositoryError,
} from "../../../../domain/repository/RepositoryError"
import { ChangeEventHandler } from "../../../ChangeEventHandler"
import { ILoginCredentialCommandRepository } from "../../../../domain/repository/command/LoginCredential"
import { LoginCredentialEntity } from "../../../../domain/entity/LoginCredential"
import { PrismaClient } from "@prisma/client"
import { prisma } from "../client"
export class LoginCredentialCommandRepository
extends ChangeEventHandler
implements ILoginCredentialCommandRepository
{
private _prisma: PrismaClient
constructor(transaction?: PrismaClient) {
super(LoginCredentialCommandRepository)
if (transaction) {
this._prisma = transaction
} else {
this._prisma = prisma
}
}
async add(credential: LoginCredentialEntity): Promise<boolean> {
if (credential instanceof LoginCredentialEntity !== true) {
throw new RepositoryError("`credential` must be an instance of LoginCredentialEntity")
}
try {
await this._prisma.loginCredential.create({
data: {
userId: credential.userId,
passwordHash: credential.passwordHash,
},
})
return true
} catch (error) {
if (error instanceof Error) {
throw new RepositoryError(
error.message,
error.stack,
"LoginCredentialCommandRepository::add"
)
} else {
throw new UnknownRepositoryError()
}
}
}
async delete(credential: LoginCredentialEntity): Promise<boolean> {
if (credential instanceof LoginCredentialEntity !== true) {
throw new RepositoryError("`credential` must be an instance of LoginCredentialEntity")
}
try {
await this._prisma.loginCredential.delete({
where: {
userId: credential.userId,
},
})
return true
} catch (error) {
if (error instanceof Error) {
throw new RepositoryError(
error.message,
error.stack,
"LoginCredentialCommandRepository::delete"
)
} else {
throw new UnknownRepositoryError()
}
}
}
async update(credential: LoginCredentialEntity): Promise<boolean> {
if (credential instanceof LoginCredentialEntity !== true) {
throw new RepositoryError("`credential` must be an instance of LoginCredentialEntity")
}
try {
await this._prisma.loginCredential.update({
where: {
userId: credential.userId,
},
data: {
passwordHash: <PASSWORD>,
},
})
return true
} catch (error) {
if (error instanceof Error) {
throw new RepositoryError(
error.message,
error.stack,
"LoginCredentialCommandRepository::update"
)
} else {
throw new UnknownRepositoryError()
}
}
}
}
|
#!/bin/sh
set -xe
mkdir macos-tmp
cp -R ./elm-compiler ./macos-tmp/
cp -R ./elm-package ./macos-tmp/elmer-package
pushd macos-tmp
cabal sandbox init
cabal sandbox add-source ./elm-compiler
cabal install -j --only-dependencies --ghc-options="-w" ./elmer-package
cabal install -j ./elmer-package
popd
cp ./macos-tmp/.cabal-sandbox/bin/elmer-package ./macos-bin/
rm -rf ./macos-tmp
|
const path = require('path');
const helpers = require('yeoman-test');
const assert = require('yeoman-assert');
describe('App generator', () => {
describe('generate a project', () => {
before(() => {
return helpers.run(path.join(__dirname, '../generators/app'))
.withArguments(['Book'])
.withPrompts({
appname: 'test-app',
group: 'com.logicdee.sample',
version: '0.0.1-SNAPSHOT',
port: '9080',
dbname: 'test',
scaffold: true,
entity: 'Book',
addField: false
})
.withGenerators([
[helpers.createDummyGenerator(), 'spring-reactive-microservice:entity']
])
});
it('generated common files', () => {
assert.file(['build.gradle', 'settings.gradle', 'gradlew', 'gradlew.bat', 'Dockerfile', 'README.md', '.gitignore']);
assert.file(['gradle/wrapper/gradle-wrapper.jar', 'gradle/wrapper/gradle-wrapper.properties']);
});
it('generated java source files', () => {
assert.file([`src/main/java/com/logicdee/sample/testapp/Application.java`]);
assert.file([`src/main/java/com/logicdee/sample/testapp/config/AuditingConfig.java`]);
assert.file([`src/main/java/com/logicdee/sample/testapp/config/SwaggerConfig.java`]);
});
it('generated resource files', () => {
assert.file([`src/main/resources/application.yml`]);
assert.file([`src/main/resources/logback-spring.xml`]);
});
});
}); |
#!/bin/bash
HELPTEXT="Usage: execute_scripts.sh <public_key> <path_to_scripts>"
PUBLIC_KEY=$1
FILE_TO_EXECUTE=$2
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ -z "$1" ]; then
echo $HELPTEXT
logger "ATC-$$: Failed to start due to missing arguments"
exit 1
fi
if [ -z "$2" ]; then
echo $HELPTEXT
logger "ATC-$$: Failed to start due to missing arguments"
exit 1
fi
#Execute bash scripts
for f in ${2}/*.sh
do
#If none are found then $f == *.sh
if [ $(basename $f) == "*.sh" ]; then
logger "ATC-$$: No shell scripts found."
break
fi
$SCRIPT_DIR/verify_file.sh $1 $f
if [ "$?" != 0 ]; then
logger "ATC-$$: ERROR: Could not verify $f -- skipping execution"
else
logger "ATC-$$: Running $f in background..."
(bash $f; logger "ATC-$$: Deleting $f"; rm -f $f; rm -f $f.sig) &
fi
done
# Execute python scripts
for f in ${2}/*.py
do
#If none are found then $f == *.py
if [ $(basename $f) == "*.py" ]; then
logger "ATC-$$: No python files found."
break
fi
echo $f
#Run the verification script against the file
$SCRIPT_DIR/verify_file.sh $1 $f
if [ "$?" != 0 ]; then
logger "ATC-$$: ERROR: Could not verify $f -- skipping execution"
else
logger "ATC-$$: Running $f in background..."
(python $f; logger "ATC-$$: Deleting $f"; rm -f $f; rm -f $f.sig) &
fi
done
|
const https = require('https');
const url = 'https://api.example.com/products';
fetch(url)
.then(response => response.json())
.then(data => {
// process data and sort the list in descending order based on price
data.sort((a, b) => {
let productA = a.price;
let productB = b.price;
let comparison = 0;
if (productA < productB) {
comparison = -1;
}
else if (productA > productB) {
comparison = 1;
}
return comparison * -1;
});
// get the top 10 most expensive items
let topTen = data.slice(0, 10);
// print the results
console.log('Top 10 most expensive items:');
topTen.forEach((item) => {
console.log(`- ${item.name} - $${item.price}`);
});
}); |
package io.opensphere.server.serverprovider.http.requestors;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Map;
import java.util.function.Function;
import com.bitsys.common.http.client.HttpClient;
import com.bitsys.common.http.entity.MultipartEntity;
import com.bitsys.common.http.entity.content.FileBodyPart;
import com.bitsys.common.http.entity.content.StringBodyPart;
import com.bitsys.common.http.header.ContentType;
import com.bitsys.common.http.message.HttpRequest;
import com.bitsys.common.http.message.HttpRequestFactory;
import io.opensphere.core.NetworkConfigurationManager;
import io.opensphere.core.event.EventManager;
import io.opensphere.core.server.ResponseValues;
import io.opensphere.core.util.io.CancellableInputStream;
import io.opensphere.server.serverprovider.http.header.HeaderValues;
/**
* Posts a file to the server.
*
*/
public class FilePostRequestorImpl extends BaseRequestor implements FilePostRequestor
{
/**
* Constructs a new file post requestor.
*
* @param client The HttpClient object to use to communicate with the
* server.
* @param headerValues Contains the header values.
* @param eventManager The manager through which events are sent.
* @param networkConfigurationManager The network configuration manager.
*/
public FilePostRequestorImpl(HttpClient client, HeaderValues headerValues, EventManager eventManager, NetworkConfigurationManager networkConfigurationManager)
{
super(client, headerValues, eventManager, networkConfigurationManager);
}
@Override
public CancellableInputStream postFileToServer(URL postToURL, Map<String, String> metaDataParts, File fileToPost,
ResponseValues response) throws IOException, URISyntaxException
{
final MultipartEntity entity = new MultipartEntity();
for (Map.Entry<String, String> entry : metaDataParts.entrySet())
{
entity.addPart(entry.getKey(), new StringBodyPart(entry.getValue(), ContentType.TEXT_PLAIN));
}
entity.addPart(fileToPost.getName(), new FileBodyPart(fileToPost));
HttpRequest request = HttpRequestFactory.getInstance().post(postToURL.toURI(), entity);
CancellableInputStream responseStream = executeRequest(request, response);
return handleRedirect(responseStream, response, new Function<String, HttpRequest>()
{
@Override
public HttpRequest apply(String newUrlString)
{
return entity.isRepeatable() ? HttpRequestFactory.getInstance().post(URI.create(newUrlString), entity) : null;
}
});
}
@Override
public CancellableInputStream postFileToServer(URL postToURL, File fileToPost,
ResponseValues response) throws IOException, URISyntaxException
{
final MultipartEntity entity = new MultipartEntity();
entity.addPart(fileToPost.getName(), new FileBodyPart(fileToPost));
HttpRequest request = HttpRequestFactory.getInstance().post(postToURL.toURI(), entity);
CancellableInputStream responseStream = executeRequest(request, response);
return handleRedirect(responseStream, response, new Function<String, HttpRequest>()
{
@Override
public HttpRequest apply(String newUrlString)
{
return entity.isRepeatable() ? HttpRequestFactory.getInstance().post(URI.create(newUrlString), entity) : null;
}
});
}
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -ex
: ${R_BIN:=R}
source_dir=${1}/r
pushd ${source_dir}
if [ "$ARROW_USE_PKG_CONFIG" != "false" ]; then
export LD_LIBRARY_PATH=${ARROW_HOME}/lib:${LD_LIBRARY_PATH}
export R_LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
fi
if [ "$ARROW_R_CXXFLAGS" != "" ]; then
export _R_CHECK_COMPILATION_FLAGS_=FALSE
fi
export TEST_R_WITH_ARROW=TRUE
export _R_CHECK_TESTS_NLINES_=0
export _R_CHECK_CRAN_INCOMING_REMOTE_=FALSE
export _R_CHECK_LIMIT_CORES_=FALSE
export VERSION=$(grep ^Version DESCRIPTION | sed s/Version:\ //)
# Make sure we aren't writing to the home dir (CRAN _hates_ this but there is no official check)
BEFORE=$(ls -alh ~/)
${R_BIN} -e "rcmdcheck::rcmdcheck(build_args = '--no-build-vignettes', args = c('--no-manual', '--as-cran', '--ignore-vignettes', '--run-donttest'), error_on = 'warning', check_dir = 'check')"
AFTER=$(ls -alh ~/)
if [ "$BEFORE" != "$AFTER" ]; then
ls -alh ~/.cmake/packages
exit 1
fi
popd
|
class DevelopmentConfig:
SECRET_KEY = 'dev_secret'
class TestingConfig:
SECRET_KEY = 'test_secret'
class ProductionConfig:
SECRET_KEY = 'prod_secret'
DEBUG = False
config_by_name = dict(
dev=DevelopmentConfig,
test=TestingConfig,
prod=ProductionConfig
)
def get_config_key(environment: str) -> str:
global DEBUG
config_class = config_by_name.get(environment)
if config_class:
DEBUG = (environment == 'dev')
return getattr(config_class, 'SECRET_KEY')
else:
raise ValueError("Invalid environment name")
# Test the function
print(get_config_key('dev')) # Output: 'dev_secret'
print(DEBUG) # Output: True
print(get_config_key('test')) # Output: 'test_secret'
print(DEBUG) # Output: False |
import socket
# Create a UDP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(('localhost', 8888))
while True:
data, addr = sock.recvfrom(1024) # Receive data from the client
message = data.decode('utf-8') # Decode the received data
response = message.upper() # Process the message (convert to uppercase)
sock.sendto(response.encode('utf-8'), addr) # Send the processed message back to the client |
#!/bin/bash
# Enable negative glob
shopt -s extglob
VERSION=1.0
rm -rf build
mkdir -p build/fedora-releng-dash-$VERSION
mkdir -p dist
cp -r !(build|dist) build/fedora-releng-dash-$VERSION/.
rm -rf build/fedora-releng-dash-$VERSION/{build,dist}
pushd build
tar -czvf ../dist/fedora-releng-dash-$VERSION.tar.gz fedora-releng-dash-$VERSION
popd
echo Wrote dist/fedora-releng-dash-$VERSION.tar.gz
|
#!/bin/bash -e
set -o pipefail
[ "${DEBUG,,}" == "true" ] && set -x
my_file="$(readlink -e "$0")"
my_dir="$(dirname $my_file)"
source "$my_dir/definitions"
# stackrc file is prepared by pipeline based on
# previous job's artifacts
export stackrc_file=${stackrc_file:-"deps.${JOB_NAME}.${JOB_RND}.env"}
source $WORKSPACE/$stackrc_file
${my_dir}/../common/collect_logs.sh rhosp
|
import tensorflow as tf
model = tf.keras.models.Sequential([
tf.keras.layers.Dense(32, input_shape=(30,), activation='relu'),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy']) |
import style from './item.scss';
import React, { Component } from 'react';
import {Link} from 'react-router-dom';
import Avatar from '../users/avatar.js';
class Item extends Component {
constructor(props) {
super(props);
this.state = {
group: props.group,
to: props.to
}
}
render() {
let group = this.state.group;
let user = group.user;
let membersView = group.users_count>1 ? <span className={style.count}>+{group.users_count-1 }</span> : '';
let link = `/groups/${group.group_id}`;
if (this.state.to == 'messages') {
link = `/groups/${group.group_id}/messages`
}
return (
<div className={style.group}>
<div className={style.head}>
<img src={group.cover_url} className={style.cover} />
<div className={style.title}>
<h2 className={style.name}>
<Link to={link}>{group.name}</Link>
</h2>
<div className={style.profile}>
<Avatar user={user} class="small" />
By {user.nickname}
{membersView}
</div>
</div>
</div>
</div>
)
}
}
export default Item;
|
#!/bin/sh
python -m unittest test_array_strings
|
#!/bin/bash
# Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
export CUDA_VISIBLE_DEVICES=
code_url="https://github.com/rapmetal/fedlearner_models/releases/download/${RELEASE_TAG}/${RELEASE_PKG}.zip"
wget $code_url
unzip "${RELEASE_PKG}.zip"
python "${RELEASE_PKG}/${ROLE}.py" \
--cluster-spec=$CLUSTER_SPEC \
--tf-addr=$POD_IP:50052 \
--local-addr=$POD_IP:50051 \
--worker-rank=$WORKER_RANK \
--peer-addr=$PEER_ADDR
|
from pulp import LpProblem, LpVariable, LpMinimize, lpSum, value
# Define the task durations and cost per minute
task_durations = [10, 15, 20, 8] # durations in minutes
cost_per_minute = [5, 7, 6, 4] # cost per minute for each task
# Create a PuLP linear programming problem
problem = LpProblem("TaskScheduling", LpMinimize)
# Define decision variables
tasks = [LpVariable(f"task_{i}", lowBound=0, cat='Continuous') for i in range(len(task_durations))]
# Set up the objective function to minimize the total cost per minute
problem += lpSum([tasks[i] * cost_per_minute[i] for i in range(len(task_durations))]), "TotalCostPerMinute"
# Add constraint to limit the total duration of tasks
problem += lpSum([tasks[i] * task_durations[i] for i in range(len(task_durations))]) <= 100, "TotalDurationConstraint"
# Solve the linear programming problem
problem.solve()
# Output the optimal solution
print("Optimal Solution:")
for i in range(len(task_durations)):
print(f"Task {i+1}: {value(tasks[i])} minutes")
print(f"Minimized Total Cost Per Minute: {value(problem.objective)}") |
import axios from 'axios';
const CREATE = 'privateURL/CREATE';
const LOAD = 'privateURL/LOAD';
const DELETE = 'privateURL/DELETE';
const CLEAR = 'privateURL/CLEAR';
const PERSIST_LOCALSTORAGE = 'privateURL/PERSIST_LOCALSTORAGE';
const TOGGLE_ERROR = 'privateURL/TOGGLE_ERROR';
const TOGGLE_LOADING = 'privateURL/TOGGLE_LOADING';
const initialState = {
hashes: [],
error: null,
loading: false
};
export const persistLocalStorage = hashes => async dispatch => {
dispatch({ type: TOGGLE_ERROR });
dispatch({ type: TOGGLE_LOADING });
try {
const res = await axios.post('/api/urls/batch', { hashes });
if (res.status >= 400) {
throw new Error(res.message);
}
localStorage.setItem('hashes', '[]');
dispatch({ type: PERSIST_LOCALSTORAGE, payload: res.data.newURLs });
} catch (error) {
dispatch({ type: TOGGLE_ERROR, payload: error.response.data });
} finally {
dispatch({ type: TOGGLE_LOADING });
}
};
export const createPrivateURL = originalURL => async dispatch => {
dispatch({ type: TOGGLE_ERROR });
dispatch({ type: TOGGLE_LOADING });
try {
const res = await axios.post('/api/urls', { originalURL });
if (res.status >= 400) {
throw new Error(res.message);
}
dispatch({ type: CREATE, payload: res.data.newURL });
} catch (error) {
dispatch({ type: TOGGLE_ERROR, payload: error.response.data });
} finally {
dispatch({ type: TOGGLE_LOADING });
}
};
export const loadPrivateURL = userId => async dispatch => {
dispatch({ type: TOGGLE_ERROR });
dispatch({ type: TOGGLE_LOADING });
try {
const res = await axios.get(`/api/users/${userId}/urls`);
if (res.status >= 400) {
throw new Error(res.message);
}
dispatch({ type: LOAD, payload: res.data });
} catch (error) {
dispatch({ type: TOGGLE_ERROR, payload: error.response.data });
} finally {
dispatch({ type: TOGGLE_LOADING });
}
};
export const clearPrivateURL = () => dispatch => {
dispatch({ type: CLEAR });
};
export const deletePrivateURL = urlId => async dispatch => {
dispatch({ type: TOGGLE_ERROR });
dispatch({ type: TOGGLE_LOADING });
try {
const res = await axios.delete(`/api/urls/${urlId}`);
if (res.status >= 400) {
throw new Error(res.message);
}
dispatch({ type: DELETE, payload: urlId });
} catch (error) {
dispatch({ type: TOGGLE_ERROR, payload: error.response.data });
} finally {
dispatch({ type: TOGGLE_LOADING });
}
};
const userURLReducer = (state = initialState, action) => {
switch (action.type) {
case LOAD:
return {
...state,
hashes: [...state.hashes, ...action.payload]
};
case CREATE:
return {
...state,
hashes: [action.payload, ...state.hashes]
};
case PERSIST_LOCALSTORAGE:
return {
...state,
hashes: [...action.payload, ...state.hashes]
};
case DELETE:
return {
...state,
hashes: [...state.hashes.filter(hash => hash.id !== action.payload)]
};
case TOGGLE_ERROR:
if (action.payload) {
return {
...state,
error: action.payload
};
}
return {
...state,
error: null
};
case CLEAR:
return {
...state,
hashes: []
};
case TOGGLE_LOADING:
return {
...state,
loading: !state.loading
};
default:
return state;
}
};
export default userURLReducer;
|
<reponame>LiuFang07/bk-cmdb
/*
* Tencent is pleased to support the open source community by making 蓝鲸 available.
* Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package service
import (
"encoding/json"
"net/http"
"github.com/emicklei/go-restful"
"configcenter/src/common"
"configcenter/src/common/blog"
"configcenter/src/common/metadata"
)
func (s *Service) Find(req *restful.Request, resp *restful.Response) {
srvData := s.newSrvComm(req.Request.Header)
input := &metadata.SynchronizeFindInfoParameter{}
if err := json.NewDecoder(req.Request.Body).Decode(input); err != nil {
blog.Errorf("FindInstance , but decode body failed, err: %v,rid:%s", err, srvData.rid)
resp.WriteError(http.StatusBadRequest, &metadata.RespError{Msg: srvData.ccErr.Error(common.CCErrCommJSONUnmarshalFailed)})
return
}
data, err := srvData.lgc.Find(srvData.ctx, input)
if err != nil {
blog.Errorf("FindInstance error. error: %s,input:%#v,rid:%s", err.Error(), input, srvData.rid)
resp.WriteError(http.StatusInternalServerError, &metadata.RespError{Msg: err})
return
}
resp.WriteEntity(metadata.QueryConditionResult{
BaseResp: metadata.SuccessBaseResp,
Data: *data,
})
}
// SetIdentifierFlag set cmdb synchronize identifier flag
func (s *Service) SetIdentifierFlag(req *restful.Request, resp *restful.Response) {
srvData := s.newSrvComm(req.Request.Header)
input := &metadata.SetIdenifierFlag{}
if err := json.NewDecoder(req.Request.Body).Decode(input); err != nil {
blog.Errorf("SetIdentifierFlag , but decode body failed, err: %v,rid:%s", err, srvData.rid)
resp.WriteError(http.StatusBadRequest, &metadata.RespError{Msg: srvData.ccErr.Error(common.CCErrCommJSONUnmarshalFailed)})
return
}
data, err := srvData.lgc.CoreAPI.CoreService().Synchronize().SetIdentifierFlag(srvData.ctx, srvData.header, input)
if err != nil {
blog.Errorf("SetIdentifierFlag error. error: %s,input:%#v,rid:%s", err.Error(), input, srvData.rid)
resp.WriteError(http.StatusInternalServerError, &metadata.RespError{Msg: err})
return
}
resp.WriteEntity(data)
}
|
package net.blay09.mods.cookingforblockheads.container.slot;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.blay09.mods.cookingforblockheads.client.ClientProxy;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.util.IIcon;
public class SlotOvenTool extends Slot {
private final int iconIndex;
public SlotOvenTool(IInventory inventory, int id, int x, int y, int iconIndex) {
super(inventory, id, x, y);
this.iconIndex = iconIndex;
}
@Override
public int getSlotStackLimit() {
return 1;
}
@Override
@SideOnly(Side.CLIENT)
public IIcon getBackgroundIconIndex() {
return ClientProxy.ovenToolIcons[iconIndex];
}
}
|
require 'awl_tags_twitter/version'
RSpec.describe AwlTagsTwitter do
context '#Version' do
it 'has a version' do
expect(AwlTagsTwitter::VERSION).to be_kind_of(String)
end
end
end
|
#!/usr/bin/env bats
load helpers
function teardown() {
rm -f "$BATS_RUN_TMPDIR"/runc-cgroups-integration-test.json
teardown_bundle
}
function setup() {
setup_busybox
set_cgroups_path
# Set some initial known values
update_config ' .linux.resources.memory |= {"limit": 33554432, "reservation": 25165824}
| .linux.resources.cpu |= {"shares": 100, "quota": 500000, "period": 1000000}
| .linux.resources.pids |= {"limit": 20}'
}
# Tests whatever limits are (more or less) common between cgroup
# v1 and v2: memory/swap, pids, and cpuset.
@test "update cgroup v1/v2 common limits" {
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
requires cgroups_memory cgroups_pids cgroups_cpuset
init_cgroup_paths
# run a few busyboxes detached
runc run -d --console-socket "$CONSOLE_SOCKET" test_update
[ "$status" -eq 0 ]
# Set a few variables to make the code below work for both v1 and v2
case $CGROUP_UNIFIED in
no)
MEM_LIMIT="memory.limit_in_bytes"
SD_MEM_LIMIT="MemoryLimit"
MEM_RESERVE="memory.soft_limit_in_bytes"
SD_MEM_RESERVE="unsupported"
MEM_SWAP="memory.memsw.limit_in_bytes"
SD_MEM_SWAP="unsupported"
SYSTEM_MEM=$(cat "${CGROUP_MEMORY_BASE_PATH}/${MEM_LIMIT}")
HAVE_SWAP="no"
if [ -f "${CGROUP_MEMORY_BASE_PATH}/${MEM_SWAP}" ]; then
HAVE_SWAP="yes"
fi
;;
yes)
MEM_LIMIT="memory.max"
SD_MEM_LIMIT="MemoryMax"
MEM_RESERVE="memory.low"
SD_MEM_RESERVE="MemoryLow"
MEM_SWAP="memory.swap.max"
SD_MEM_SWAP="MemorySwapMax"
SYSTEM_MEM="max"
HAVE_SWAP="yes"
;;
esac
SD_UNLIMITED="infinity"
SD_VERSION=$(systemctl --version | awk '{print $2; exit}')
if [ "$SD_VERSION" -lt 227 ]; then
SD_UNLIMITED="18446744073709551615"
fi
# check that initial values were properly set
check_cgroup_value $MEM_LIMIT 33554432
check_systemd_value $SD_MEM_LIMIT 33554432
check_cgroup_value $MEM_RESERVE 25165824
check_systemd_value $SD_MEM_RESERVE 25165824
check_cgroup_value "pids.max" 20
check_systemd_value "TasksMax" 20
# update cpuset if possible (i.e. we're running on a multicore cpu)
cpu_count=$(grep -c '^processor' /proc/cpuinfo)
if [ "$cpu_count" -gt 1 ]; then
runc update test_update --cpuset-cpus "1"
[ "$status" -eq 0 ]
check_cgroup_value "cpuset.cpus" 1
fi
# update memory limit
runc update test_update --memory 67108864
[ "$status" -eq 0 ]
check_cgroup_value $MEM_LIMIT 67108864
check_systemd_value $SD_MEM_LIMIT 67108864
runc update test_update --memory 50M
[ "$status" -eq 0 ]
check_cgroup_value $MEM_LIMIT 52428800
check_systemd_value $SD_MEM_LIMIT 52428800
# update memory soft limit
runc update test_update --memory-reservation 33554432
[ "$status" -eq 0 ]
check_cgroup_value "$MEM_RESERVE" 33554432
check_systemd_value "$SD_MEM_RESERVE" 33554432
# Run swap memory tests if swap is available
if [ "$HAVE_SWAP" = "yes" ]; then
# try to remove memory swap limit
runc update test_update --memory-swap -1
[ "$status" -eq 0 ]
check_cgroup_value "$MEM_SWAP" $SYSTEM_MEM
check_systemd_value "$SD_MEM_SWAP" $SD_UNLIMITED
# update memory swap
if [ "$CGROUP_UNIFIED" = "yes" ]; then
# for cgroupv2, memory and swap can only be set together
runc update test_update --memory 52428800 --memory-swap 96468992
[ "$status" -eq 0 ]
# for cgroupv2, swap is a separate limit (it does not include mem)
check_cgroup_value "$MEM_SWAP" $((96468992 - 52428800))
check_systemd_value "$SD_MEM_SWAP" $((96468992 - 52428800))
else
runc update test_update --memory-swap 96468992
[ "$status" -eq 0 ]
check_cgroup_value "$MEM_SWAP" 96468992
check_systemd_value "$SD_MEM_SWAP" 96468992
fi
fi
# try to remove memory limit
runc update test_update --memory -1
[ "$status" -eq 0 ]
# check memory limit is gone
check_cgroup_value $MEM_LIMIT $SYSTEM_MEM
check_systemd_value $SD_MEM_LIMIT $SD_UNLIMITED
# check swap memory limited is gone
if [ "$HAVE_SWAP" = "yes" ]; then
check_cgroup_value $MEM_SWAP $SYSTEM_MEM
check_systemd_value "$SD_MEM_SWAP" $SD_UNLIMITED
fi
# update pids limit
runc update test_update --pids-limit 10
[ "$status" -eq 0 ]
check_cgroup_value "pids.max" 10
check_systemd_value "TasksMax" 10
# unlimited
runc update test_update --pids-limit -1
[ "$status" -eq 0 ]
check_cgroup_value "pids.max" max
check_systemd_value "TasksMax" $SD_UNLIMITED
# Revert to the test initial value via json on stdin
runc update -r - test_update <<EOF
{
"memory": {
"limit": 33554432,
"reservation": 25165824
},
"cpu": {
"shares": 100,
"quota": 500000,
"period": 1000000,
"cpus": "0"
},
"pids": {
"limit": 20
}
}
EOF
[ "$status" -eq 0 ]
check_cgroup_value "cpuset.cpus" 0
check_cgroup_value $MEM_LIMIT 33554432
check_systemd_value $SD_MEM_LIMIT 33554432
check_cgroup_value $MEM_RESERVE 25165824
check_systemd_value $SD_MEM_RESERVE 25165824
check_cgroup_value "pids.max" 20
check_systemd_value "TasksMax" 20
# redo all the changes at once
runc update test_update \
--cpu-period 900000 --cpu-quota 600000 --cpu-share 200 \
--memory 67108864 --memory-reservation 33554432 \
--pids-limit 10
[ "$status" -eq 0 ]
check_cgroup_value $MEM_LIMIT 67108864
check_systemd_value $SD_MEM_LIMIT 67108864
check_cgroup_value $MEM_RESERVE 33554432
check_systemd_value $SD_MEM_RESERVE 33554432
check_cgroup_value "pids.max" 10
check_systemd_value "TasksMax" 10
# reset to initial test value via json file
cat <<EOF >"$BATS_RUN_TMPDIR"/runc-cgroups-integration-test.json
{
"memory": {
"limit": 33554432,
"reservation": 25165824
},
"cpu": {
"shares": 100,
"quota": 500000,
"period": 1000000,
"cpus": "0"
},
"pids": {
"limit": 20
}
}
EOF
runc update -r "$BATS_RUN_TMPDIR"/runc-cgroups-integration-test.json test_update
[ "$status" -eq 0 ]
check_cgroup_value "cpuset.cpus" 0
check_cgroup_value $MEM_LIMIT 33554432
check_systemd_value $SD_MEM_LIMIT 33554432
check_cgroup_value $MEM_RESERVE 25165824
check_systemd_value $SD_MEM_RESERVE 25165824
check_cgroup_value "pids.max" 20
check_systemd_value "TasksMax" 20
if [ "$HAVE_SWAP" = "yes" ]; then
# Test case for https://github.com/opencontainers/runc/pull/592,
# checking libcontainer/cgroups/fs/memory.go:setMemoryAndSwap.
runc update test_update --memory 30M --memory-swap 50M
[ "$status" -eq 0 ]
check_cgroup_value $MEM_LIMIT $((30 * 1024 * 1024))
check_systemd_value $SD_MEM_LIMIT $((30 * 1024 * 1024))
if [ "$CGROUP_UNIFIED" = "yes" ]; then
# for cgroupv2, swap does not include mem
check_cgroup_value "$MEM_SWAP" $((20 * 1024 * 1024))
check_systemd_value "$SD_MEM_SWAP" $((20 * 1024 * 1024))
else
check_cgroup_value "$MEM_SWAP" $((50 * 1024 * 1024))
check_systemd_value "$SD_MEM_SWAP" $((50 * 1024 * 1024))
fi
# Now, set new memory to more than old swap
runc update test_update --memory 60M --memory-swap 80M
[ "$status" -eq 0 ]
check_cgroup_value $MEM_LIMIT $((60 * 1024 * 1024))
check_systemd_value $SD_MEM_LIMIT $((60 * 1024 * 1024))
if [ "$CGROUP_UNIFIED" = "yes" ]; then
# for cgroupv2, swap does not include mem
check_cgroup_value "$MEM_SWAP" $((20 * 1024 * 1024))
check_systemd_value "$SD_MEM_SWAP" $((20 * 1024 * 1024))
else
check_cgroup_value "$MEM_SWAP" $((80 * 1024 * 1024))
check_systemd_value "$SD_MEM_SWAP" $((80 * 1024 * 1024))
fi
fi
}
@test "update cgroup cpu limits" {
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
# run a few busyboxes detached
runc run -d --console-socket "$CONSOLE_SOCKET" test_update
[ "$status" -eq 0 ]
# check that initial values were properly set
check_cpu_quota 500000 1000000 "500ms"
check_cpu_shares 100
# update cpu period
runc update test_update --cpu-period 900000
[ "$status" -eq 0 ]
check_cpu_quota 500000 900000 "560ms"
# update cpu quota
runc update test_update --cpu-quota 600000
[ "$status" -eq 0 ]
check_cpu_quota 600000 900000 "670ms"
# remove cpu quota
runc update test_update --cpu-quota -1
[ "$status" -eq 0 ]
check_cpu_quota -1 900000 "infinity"
# update cpu-shares
runc update test_update --cpu-share 200
[ "$status" -eq 0 ]
check_cpu_shares 200
# Revert to the test initial value via json on stding
runc update -r - test_update <<EOF
{
"cpu": {
"shares": 100,
"quota": 500000,
"period": 1000000
}
}
EOF
[ "$status" -eq 0 ]
check_cpu_quota 500000 1000000 "500ms"
# redo all the changes at once
runc update test_update \
--cpu-period 900000 --cpu-quota 600000 --cpu-share 200
[ "$status" -eq 0 ]
check_cpu_quota 600000 900000 "670ms"
check_cpu_shares 200
# remove cpu quota and reset the period
runc update test_update --cpu-quota -1 --cpu-period 100000
[ "$status" -eq 0 ]
check_cpu_quota -1 100000 "infinity"
# reset to initial test value via json file
cat <<EOF >"$BATS_RUN_TMPDIR"/runc-cgroups-integration-test.json
{
"cpu": {
"shares": 100,
"quota": 500000,
"period": 1000000
}
}
EOF
[ "$status" -eq 0 ]
runc update -r "$BATS_RUN_TMPDIR"/runc-cgroups-integration-test.json test_update
[ "$status" -eq 0 ]
check_cpu_quota 500000 1000000 "500ms"
check_cpu_shares 100
}
@test "set cpu period with no quota" {
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
update_config '.linux.resources.cpu |= { "period": 1000000 }'
runc run -d --console-socket "$CONSOLE_SOCKET" test_update
[ "$status" -eq 0 ]
check_cpu_quota -1 1000000 "infinity"
}
@test "set cpu quota with no period" {
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
update_config '.linux.resources.cpu |= { "quota": 5000 }'
runc run -d --console-socket "$CONSOLE_SOCKET" test_update
[ "$status" -eq 0 ]
check_cpu_quota 5000 100000 "50ms"
}
@test "update cpu period with no previous period/quota set" {
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
update_config '.linux.resources.cpu |= {}'
runc run -d --console-socket "$CONSOLE_SOCKET" test_update
[ "$status" -eq 0 ]
# update the period alone, no old values were set
runc update --cpu-period 50000 test_update
[ "$status" -eq 0 ]
check_cpu_quota -1 50000 "infinity"
}
@test "update cpu quota with no previous period/quota set" {
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
update_config '.linux.resources.cpu |= {}'
runc run -d --console-socket "$CONSOLE_SOCKET" test_update
[ "$status" -eq 0 ]
# update the quota alone, no old values were set
runc update --cpu-quota 30000 test_update
[ "$status" -eq 0 ]
check_cpu_quota 30000 100000 "300ms"
}
@test "update cgroup v2 resources via unified map" {
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
requires cgroups_v2
runc run -d --console-socket "$CONSOLE_SOCKET" test_update
[ "$status" -eq 0 ]
# check that initial values were properly set
check_cpu_quota 500000 1000000 "500ms"
# initial cpu shares of 100 corresponds to weight of 4
check_cpu_weight 4
check_systemd_value "TasksMax" 20
runc update -r - test_update <<EOF
{
"unified": {
"cpu.max": "max 100000",
"cpu.weight": "16",
"pids.max": "10"
}
}
EOF
# check the updated systemd unit properties
check_cpu_quota -1 100000 "infinity"
check_cpu_weight 16
check_systemd_value "TasksMax" 10
}
@test "update cpuset parameters via resources.CPU" {
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
requires smp cgroups_cpuset
local AllowedCPUs='AllowedCPUs' AllowedMemoryNodes='AllowedMemoryNodes'
# these properties require systemd >= v244
if [ "$(systemd_version)" -lt 244 ]; then
# a hack to skip checks, see check_systemd_value()
AllowedCPUs='unsupported'
AllowedMemoryNodes='unsupported'
fi
update_config ' .linux.resources.CPU |= {
"Cpus": "0",
"Mems": "0"
}'
runc run -d --console-socket "$CONSOLE_SOCKET" test_update
[ "$status" -eq 0 ]
# check that initial values were properly set
check_systemd_value "$AllowedCPUs" 0
check_systemd_value "$AllowedMemoryNodes" 0
runc update -r - test_update <<EOF
{
"CPU": {
"Cpus": "1"
}
}
EOF
[ "$status" -eq 0 ]
# check the updated systemd unit properties
check_systemd_value "$AllowedCPUs" 1
# More than 1 numa memory node is required to test this
file="/sys/fs/cgroup/cpuset.mems.effective"
if ! test -r $file || grep -q '^0$' $file; then
# skip the rest of it
return 0
fi
runc update -r - test_update <<EOF
{
"CPU": {
"Mems": "1"
}
}
EOF
[ "$status" -eq 0 ]
# check the updated systemd unit properties
check_systemd_value "$AllowedMemoryNodes" 1
}
@test "update cpuset parameters via v2 unified map" {
# This test assumes systemd >= v244
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
requires cgroups_v2 smp cgroups_cpuset
update_config ' .linux.resources.unified |= {
"cpuset.cpus": "0",
"cpuset.mems": "0"
}'
runc run -d --console-socket "$CONSOLE_SOCKET" test_update
[ "$status" -eq 0 ]
# check that initial values were properly set
check_systemd_value "AllowedCPUs" 0
check_systemd_value "AllowedMemoryNodes" 0
runc update -r - test_update <<EOF
{
"unified": {
"cpuset.cpus": "1"
}
}
EOF
[ "$status" -eq 0 ]
# check the updated systemd unit properties
check_systemd_value "AllowedCPUs" 1
# More than 1 numa memory node is required to test this
file="/sys/fs/cgroup/cpuset.mems.effective"
if ! test -r $file || grep -q '^0$' $file; then
# skip the rest of it
return 0
fi
runc update -r - test_update <<EOF
{
"unified": {
"cpuset.mems": "1"
}
}
EOF
[ "$status" -eq 0 ]
# check the updated systemd unit properties
check_systemd_value "AllowedMemoryNodes" 1
}
@test "update rt period and runtime" {
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
requires cgroups_v1 cgroups_rt no_systemd
local cgroup_cpu="${CGROUP_CPU_BASE_PATH}/${REL_CGROUPS_PATH}"
# By default, "${cgroup_cpu}/cpu.rt_runtime_us" is set to 0, which inhibits
# setting the container's realtimeRuntime. (#2046)
#
# When ${cgroup_cpu} is "/sys/fs/cgroup/cpu,cpuacct/runc-cgroups-integration-test/test-cgroup",
# we write the values of /sys/fs/cgroup/cpu,cpuacct/cpu.rt_{period,runtime}_us to:
# - sys/fs/cgroup/cpu,cpuacct/runc-cgroups-integration-test/cpu.rt_{period,runtime}_us
# - sys/fs/cgroup/cpu,cpuacct/runc-cgroups-integration-test/test-cgroup/cpu.rt_{period,runtime}_us
#
# Typically period=1000000 runtime=950000 .
#
# TODO: support systemd
mkdir -p "$cgroup_cpu"
local root_period root_runtime
root_period=$(cat "${CGROUP_CPU_BASE_PATH}/cpu.rt_period_us")
root_runtime=$(cat "${CGROUP_CPU_BASE_PATH}/cpu.rt_runtime_us")
# the following IFS magic sets dirs=("runc-cgroups-integration-test" "test-cgroup")
IFS='/' read -r -a dirs <<<"$REL_CGROUPS_PATH"
for ((i = 0; i < ${#dirs[@]}; i++)); do
local target="$CGROUP_CPU_BASE_PATH"
for ((j = 0; j <= i; j++)); do
target="${target}/${dirs[$j]}"
done
target_period="${target}/cpu.rt_period_us"
echo "Writing ${root_period} to ${target_period}"
echo "$root_period" >"$target_period"
target_runtime="${target}/cpu.rt_runtime_us"
echo "Writing ${root_runtime} to ${target_runtime}"
echo "$root_runtime" >"$target_runtime"
done
# run a detached busybox
runc run -d --console-socket "$CONSOLE_SOCKET" test_update_rt
[ "$status" -eq 0 ]
runc update -r - test_update_rt <<EOF
{
"cpu": {
"realtimeRuntime": 500001
}
}
EOF
[ "$status" -eq 0 ]
check_cgroup_value "cpu.rt_period_us" "$root_period"
check_cgroup_value "cpu.rt_runtime_us" 500001
runc update -r - test_update_rt <<EOF
{
"cpu": {
"realtimePeriod": 800001,
"realtimeRuntime": 500001
}
}
EOF
check_cgroup_value "cpu.rt_period_us" 800001
check_cgroup_value "cpu.rt_runtime_us" 500001
runc update test_update_rt --cpu-rt-period 900001 --cpu-rt-runtime 600001
[ "$status" -eq 0 ]
check_cgroup_value "cpu.rt_period_us" 900001
check_cgroup_value "cpu.rt_runtime_us" 600001
}
@test "update devices [minimal transition rules]" {
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
requires root
# Run a basic shell script that tries to read from /dev/kmsg, but
# due to lack of permissions, it prints the error message to /dev/null.
# If any data is read from /dev/kmsg, it will be printed to stdout, and the
# test will fail. In the same way, if access to /dev/null is denied, the
# error will be printed to stderr, and the test will also fail.
#
# "runc update" makes use of minimal transition rules, updates should not cause
# writes to fail at any point. For systemd cgroup driver on cgroup v1, the cgroup
# is frozen to ensure this.
update_config ' .linux.resources.devices = [{"allow": false, "access": "rwm"}, {"allow": false, "type": "c", "major": 1, "minor": 11, "access": "rwa"}]
| .linux.devices = [{"path": "/dev/kmsg", "type": "c", "major": 1, "minor": 11}]
| .process.capabilities.bounding += ["CAP_SYSLOG"]
| .process.capabilities.effective += ["CAP_SYSLOG"]
| .process.capabilities.inheritable += ["CAP_SYSLOG"]
| .process.capabilities.permitted += ["CAP_SYSLOG"]
| .process.args |= ["sh", "-c", "while true; do head -c 100 /dev/kmsg 2> /dev/null; done"]'
# Set up a temporary console socket and recvtty so we can get the stdio.
TMP_RECVTTY_DIR="$(mktemp -d "$BATS_RUN_TMPDIR/runc-tmp-recvtty.XXXXXX")"
TMP_RECVTTY_PID="$TMP_RECVTTY_DIR/recvtty.pid"
TMP_CONSOLE_SOCKET="$TMP_RECVTTY_DIR/console.sock"
CONTAINER_OUTPUT="$TMP_RECVTTY_DIR/output"
("$RECVTTY" --no-stdin --pid-file "$TMP_RECVTTY_PID" \
--mode single "$TMP_CONSOLE_SOCKET" &>"$CONTAINER_OUTPUT") &
retry 10 0.1 [ -e "$TMP_CONSOLE_SOCKET" ]
# Run the container in the background.
runc run -d --console-socket "$TMP_CONSOLE_SOCKET" test_update
cat "$CONTAINER_OUTPUT"
[ "$status" -eq 0 ]
# Trigger an update. This update doesn't actually change the device rules,
# but it will trigger the devices cgroup code to reapply the current rules.
# We trigger the update a few times to make sure we hit the race.
for _ in {1..30}; do
# TODO: Update "runc update" so we can change the device rules.
runc update --pids-limit 30 test_update
[ "$status" -eq 0 ]
done
# Kill recvtty.
kill -9 "$(<"$TMP_RECVTTY_PID")"
# There should've been no output from the container.
cat "$CONTAINER_OUTPUT"
[ -z "$(<"$CONTAINER_OUTPUT")" ]
}
@test "update paused container" {
[[ "$ROOTLESS" -ne 0 ]] && requires rootless_cgroup
requires cgroups_freezer
# Run the container in the background.
runc run -d --console-socket "$CONSOLE_SOCKET" test_update
[ "$status" -eq 0 ]
# Pause the container.
runc pause test_update
[ "$status" -eq 0 ]
# Trigger an unrelated update.
runc update --pids-limit 30 test_update
[ "$status" -eq 0 ]
# The container should still be paused.
testcontainer test_update paused
# Resume the container.
runc resume test_update
[ "$status" -eq 0 ]
}
|
<reponame>balovbohdan/fwd-ann<filename>dist/lib/weights/layers-pair-weights/Weights.d.ts
import { Matrix } from 'matrix-calculus';
export declare class Weights {
constructor(weights: Matrix);
getMatrix(): Matrix;
private readonly weights;
}
|
import numpy as np
import pandas as pd
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.model_selection import train_test_split
from sklearn.naive_bayes import MultinomialNB
# Load data
data = pd.read_csv("data.csv")
# Extract features
vectorizer = CountVectorizer()
X = vectorizer.fit_transform(data.comment)
y = data.sentiment
# Split data into train and test sets
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.33, random_state=42
)
# Fit model
nb_model = MultinomialNB()
nb_model.fit(X_train, y_train)
# Evaluate model
print(nb_model.score(X_test, y_test)) |
package main.support;
import java.lang.management.ManagementFactory;
import java.lang.management.GarbageCollectorMXBean;
// Source: https://cruftex.net/2017/03/28/The-6-Memory-Metrics-You-Should-Track-in-Your-Java-Benchmarks.html
public class MemoryStats {
public static long getGcCount() {
long sum = 0;
for (GarbageCollectorMXBean b : ManagementFactory.getGarbageCollectorMXBeans()) {
long count = b.getCollectionCount();
if (count != -1) { sum += count; }
}
return sum;
}
public static long getCurrentlyUsedMemory() {
return
ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed();
}
public static long getReallyUsedMemory() {
long before = getGcCount();
System.gc();
while (getGcCount() == before);
return getCurrentlyUsedMemory();
}
public static long getSettledUsedMemory() {
long m;
long m2 = getReallyUsedMemory();
do {
try {
Thread.sleep(567);
} catch (InterruptedException e) {
System.out.println("Inturrupted while sleeping");
System.exit(1);
}
m = m2;
m2 = getReallyUsedMemory();
} while (m2 < getReallyUsedMemory());
return m;
}
}
|
package v1
// VulnerabilityMetadata represents all vulnerability data that is not necessary to perform package-to-vulnerability matching.
type VulnerabilityMetadata struct {
ID string // The identifier of the vulnerability or advisory
RecordSource string // The source of the vulnerability information
Severity string // How severe the vulnerability is (valid values are defined by upstream sources currently)
Links []string // URLs to get more information about the vulnerability or advisory
Description string // Description of the vulnerability
CvssV2 *Cvss // Common Vulnerability Scoring System V2 values
CvssV3 *Cvss // Common Vulnerability Scoring System V3 values
}
// Cvss contains select Common Vulnerability Scoring System fields for a vulnerability.
type Cvss struct {
BaseScore float64 // Ranges from 0 - 10 and defines for qualities intrinsic to a vulnerability
ExploitabilityScore float64 // Indicator of how easy it may be for an attacker to exploit a vulnerability
ImpactScore float64 // Representation of the effects of an exploited vulnerability relative to compromise in confidentiality, integrity, and availability
Vector string // A textual representation of the metric values used to determine the score
}
|
/*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.dataelement.internal;
import org.hisp.dhis.android.core.arch.db.stores.internal.IdentifiableObjectStore;
import org.hisp.dhis.android.core.arch.handlers.internal.HandleAction;
import org.hisp.dhis.android.core.arch.handlers.internal.Handler;
import org.hisp.dhis.android.core.arch.handlers.internal.IdentifiableHandlerImpl;
import org.hisp.dhis.android.core.arch.handlers.internal.LinkHandler;
import org.hisp.dhis.android.core.attribute.Attribute;
import org.hisp.dhis.android.core.attribute.AttributeValueUtils;
import org.hisp.dhis.android.core.attribute.DataElementAttributeValueLink;
import org.hisp.dhis.android.core.dataelement.DataElement;
import org.hisp.dhis.android.core.legendset.DataElementLegendSetLink;
import org.hisp.dhis.android.core.legendset.LegendSet;
import java.util.List;
import javax.inject.Inject;
import dagger.Reusable;
@Reusable
final class DataElementHandler extends IdentifiableHandlerImpl<DataElement> {
private final Handler<Attribute> attributeHandler;
private final LinkHandler<Attribute, DataElementAttributeValueLink>
dataElementAttributeLinkHandler;
private final Handler<LegendSet> legendSetHandler;
private final LinkHandler<LegendSet, DataElementLegendSetLink> dataElementLegendSetLinkHandler;
@Inject
DataElementHandler(
IdentifiableObjectStore<DataElement> programStageDataElementStore,
Handler<Attribute> attributeHandler,
LinkHandler<Attribute, DataElementAttributeValueLink> dataElementAttributeLinkHandler,
Handler<LegendSet> legendSetHandler,
LinkHandler<LegendSet, DataElementLegendSetLink> dataElementLegendSetLinkHandler
) {
super(programStageDataElementStore);
this.attributeHandler = attributeHandler;
this.dataElementAttributeLinkHandler = dataElementAttributeLinkHandler;
this.legendSetHandler = legendSetHandler;
this.dataElementLegendSetLinkHandler = dataElementLegendSetLinkHandler;
}
@Override
protected void afterObjectHandled(DataElement dataElement, HandleAction action) {
if (dataElement.attributeValues() != null) {
final List<Attribute> attributes = AttributeValueUtils.extractAttributes(dataElement.attributeValues());
attributeHandler.handleMany(attributes);
dataElementAttributeLinkHandler.handleMany(dataElement.uid(), attributes,
attribute -> DataElementAttributeValueLink.builder()
.dataElement(dataElement.uid())
.attribute(attribute.uid())
.value(AttributeValueUtils.extractValue(dataElement.attributeValues(), attribute.uid()))
.build());
}
if (dataElement.legendSets() != null) {
legendSetHandler.handleMany(dataElement.legendSets());
dataElementLegendSetLinkHandler.handleMany(dataElement.uid(), dataElement.legendSets(),
legendSet -> DataElementLegendSetLink.builder()
.dataElement(dataElement.uid()).legendSet(legendSet.uid()).build());
}
}
}
|
func setupPulsatingAnimation() {
let pulsationDuration: TimeInterval = 1.5
let pulsationScale: CGFloat = 1.4
let pulsations: [(UIView, CGFloat)] = [
(pulse1, 1.2),
(pulse2, 1.4),
(pulse3, 1.6),
(pulse4, 1.8),
(pulse5, 2.0),
(pulse6, 2.2)
]
for (pulse, delay) in pulsations {
let animation = CABasicAnimation(keyPath: "transform.scale")
animation.duration = pulsationDuration
animation.autoreverses = true
animation.repeatCount = .infinity
animation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
animation.fromValue = 1.0
animation.toValue = pulsationScale
animation.beginTime = CACurrentMediaTime() + TimeInterval(delay)
pulse.layer.add(animation, forKey: "pulsate")
}
} |
<gh_stars>1000+
package com.semmle.js.ast;
import java.util.List;
/** The body of a {@linkplain ClassDeclaration} or {@linkplain ClassExpression}. */
public class ClassBody extends Node {
private final List<MemberDefinition<?>> body;
public ClassBody(SourceLocation loc, List<MemberDefinition<?>> body) {
super("ClassBody", loc);
this.body = body;
}
public List<MemberDefinition<?>> getBody() {
return body;
}
public void addMember(MemberDefinition<?> md) {
body.add(md);
}
public MethodDefinition getConstructor() {
for (MemberDefinition<?> md : body) if (md.isConstructor()) return (MethodDefinition) md;
return null;
}
@Override
public <C, R> R accept(Visitor<C, R> v, C c) {
return v.visit(this, c);
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-N-VB-fill/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-N-VB-fill/512+512+512-only-pad-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function pad_first_third_sixth --eval_function penultimate_sixth_eval |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 24 17:01:08 2019
@author: sbae
"""
import sys
import time
import numpy as np
sys.path.insert(0,'/home/sbae/automotive-control-temporary/python/nnmpc/sgan/')
from sgan.predictor import Predictor
model_path = "/home/sbae/automotive-control-temporary/python/nnmpc/sgan/models/sgan-models/eth_8_model.pt"
predictor = Predictor(model_path)
data = np.array([
# [ 1, 1.000e+00, 8.460e+00, 3.590e+00],
# [ 1, 2.000e+00, 1.364e+01, 5.800e+00],
# [ 2, 1.000e+00, 9.570e+00, 3.790e+00],
# [ 2, 2.000e+00, 1.364e+01, 5.800e+00],
# [ 3, 1.000e+00, 1.067e+01, 3.990e+00],
# [ 3, 2.000e+00, 1.364e+01, 5.800e+00],
# [ 4, 1.000e+00, 1.173e+01, 4.320e+00],
# [ 4, 2.000e+00, 1.209e+01, 5.750e+00],
[ 5, 1.000e+00, 1.281e+01, 4.610e+00],
[ 5, 2.000e+00, 1.137e+01, 5.800e+00],
[ 6, 1.000e+00, 1.281e+01, 4.610e+00],
[ 6, 2.000e+00, 1.031e+01, 5.970e+00],
[ 7, 1.000e+00, 1.194e+01, 6.770e+00],
[ 7, 2.000e+00, 9.570e+00, 6.240e+00],
[ 8, 1.000e+00, 1.103e+01, 6.840e+00],
[ 8, 2.000e+00, 8.730e+00, 6.340e+00]])
start_time = time.time()
pred_traj = predictor.predict(data)
print("time elapsed: {}\n".format(time.time()-start_time))
print("predicted trajectory: {}".format(pred_traj))
|
class LoginSimulator:
def __init__(self):
self._login_result = None
def _generate_encrypted_login_info(self):
# Implement the logic to generate and store encrypted login information
pass # Placeholder, replace with actual implementation
def _send_login_info(self):
# Implement the logic to simulate sending the login information
pass # Placeholder, replace with actual implementation
def _resolve_login_response(self):
# Implement the logic to simulate resolving the login response
self._login_result = "Success" # Placeholder, replace with actual implementation
def get_login_responce(self):
print("----------------")
print("Step3: Logging and resolve response.")
self._generate_encrypted_login_info()
self._send_login_info()
self._resolve_login_response()
print("The login result is: " + str(self._login_result))
print("----------------")
# Example usage
login_simulator = LoginSimulator()
login_simulator.get_login_responce() |
#!/usr/bin/env bash
# if you execute these tests on Windows git bash, make sure you install jq via choco: chocolatey install jq
load $HOME/test/test_helper/bats-assert/load.bash
load $HOME/test/test_helper/bats-support/load.bash
function setup(){
source "$BATS_TEST_DIRNAME/create_namespace.sh"
}
function teardown(){
unset kubectl
}
@test "UT:create_namespace: should report when namespace already exists" {
local _namespace=default
kubectl (){
cat << EOF
NAME STATUS AGE
crux2086 Active 2d2h
$_namespace Active 3d1h
kube-node-lease Active 3d1h
kube-public Active 3d1h
kube-system Active 3d1h
EOF
}
export -f kubectl
run create_namespace "$_namespace"
assert_output --partial "Namespace $_namespace already present"
}
@test "UT:create_namespace: should create namespace if does not exists" {
local _namespace=test
kubectl (){
subcommand=$1
if [ "$subcommand" == "get" ];then
cat << EOF
NAME STATUS AGE
crux2086 Active 2d2h
kube-node-lease Active 3d1h
kube-public Active 3d1h
default Active 3d1h
EOF
fi
}
export -f kubectl
run create_namespace "$_namespace"
assert_output "Creating namespace $_namespace"
} |
<filename>packages/coinstac-ui/app/render/state/ducks/statePersist.js
/* eslint-disable import/prefer-default-export */
import { dirname, join } from 'path';
import { deepParseJson } from 'deep-parse-json';
import { API_TOKEN_KEY, setUser } from './auth';
let electronStore;
let persistConfig;
let storePersistor;
export const CLEAR_STATE = 'CLEAR_STATE';
export const REHYDRATE = 'REHYDRATE';
export const clearState = state => ({ type: CLEAR_STATE, payload: state });
export const rehydrate = state => ({ type: REHYDRATE, payload: state });
function init(store, config, persistor) {
electronStore = store;
persistConfig = config;
storePersistor = persistor;
}
const loadUserState = (user, authTokenData) => async (dispatch) => {
const electronStoreFolder = dirname(electronStore.path);
electronStore.path = join(electronStoreFolder, `local-db-${user.id}.json`);
const data = await persistConfig.storage.getItem('persist:root');
storePersistor.persist();
// Rehydrate is done only once by redux-persist, so we do it manually
// for hydrating state on consecutive logins
if (data) {
const parsedState = deepParseJson(data);
delete parsedState._persist;
dispatch(rehydrate(parsedState));
}
localStorage.setItem(API_TOKEN_KEY, JSON.stringify(authTokenData));
dispatch(setUser(user));
};
const clearUserState = () => (dispatch) => {
storePersistor.pause();
// clear persisted state
dispatch(clearState({
maps: null,
localRunResults: null,
}));
};
export {
init,
loadUserState,
clearUserState,
};
|
<!DOCTYPE html>
<html>
<head>
<title>Board Game</title>
</head>
<body>
<h1>Board Game</h1>
<div>
<button id="roll-dice-button">Roll Dice</button>
<button id="take-turn-button">Take Turn</button>
<button id="buy-property-button">Buy Property</button>
<button id="trade-property-button">Trade Property</button>
<button id="end-turn-button">End Turn</button>
</div>
<h2>Game State:</h2>
<div id="game-state-area">
<!-- Game state will be displayed here -->
</div>
<script>
// Game logic goes here
</script>
</body>
</html> |
CUDA_VISIBLE_DEVICES=1 python main_f2c_cifar100.py --categories 15_classes --f2c 1 --data_ratio 1. --add_layer 0
|
def lcs(str1, str2, m, n):
dp = [[0 for i in range(n + 1)] for j in range(m + 1)]
for i in range(1, m + 1):
for j in range(1, n + 1):
if str1[i - 1] == str2[j - 1]:
dp[i][j] = 1 + dp[i - 1][j - 1]
else:
dp[i][j] = max(dp[i - 1][j], dp[i][j - 1])
returndp[m][n]
str1 = "abcdef"
str2 = "bcjklf"
m = len(str1)
n = len(str2)
print(lcs(str1, str2, m, n)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.