text
stringlengths 27
775k
|
|---|
-- Test cases for JitCert.Analysis.
module Analysis where
import Control.Monad
import Data.Digest.Pure.SHA
import Test.HUnit
import JitCert.Analysis
import JitCert.Context
import JitCert.Docs
import JitCert.Docs.Shared
import JitCert.GSN.Builder
tests = TestList [testCheckUnboundFigure6, testShadowedFigure5, testCheckUnboundValuesFigure7]
testShadowedFigure5 = TestCase $ do
(gsn, env) <- runBuilderT $ do
(t, _, _, _, _, _) <- figure5
setContextValue t (Test "test_constant" (sha256 "...")) -- "32dfe08c")
let r = checkShadowedValues gsn env
-- fail $ show r
assertError r
testCheckUnboundFigure6 = TestCase $ do
(gsn, env) <- runBuilderT figureUnboundError
let x = checkUnbound gsn
-- fail $ show x
assertError x
testCheckUnboundValuesFigure7 = TestCase $ do
(gsn, env) <- runBuilderT $ do
(fs, rng, _) <- figure7
let functions = [ (Function "fibonacci" "c9a0ad2c", RNG "rng")
, (Function "foo" "f9434869", RNG "rng")]
setContextValuesWith fs functions $ \(function, generator) -> do
-- setContextValuesWith fs [RNG] $ \rng ->
setContextValue rng generator
return function
let r = checkUnboundValues gsn env
-- fail $ show r
assertError r
assertError x = when (null x) $
fail "Expected an error"
-- assertLeft (Left _) = return ()
-- assertLeft (Right _) = fail "expected Left"
|
package com.nexthink.utils.parsing.distance
import java.util
import scala.collection.JavaConverters._
object DiceSorensenDistance {
def diceSorensenSimilarity(a: String, b: String): Double = {
val aWords = tokenizeWords(a.toLowerCase)
val bWords = tokenizeWords(b.toLowerCase)
val aBigrams = aWords.flatMap(bigramsWithAffixing).toList
val bBigrams = bWords.flatMap(bigramsWithAffixing).toList
val matchesSearchList = new util.LinkedList[String](bBigrams.asJava)
def hasMatchingBigramInB(bigram: String) = {
def findAndRemoveBigramIter(bigram: String, iterator: util.Iterator[String]): Boolean = {
if (!iterator.hasNext) {
false
} else if (iterator.next() == bigram) {
iterator.remove()
true
} else {
findAndRemoveBigramIter(bigram, iterator)
}
}
findAndRemoveBigramIter(bigram, matchesSearchList.iterator())
}
val intersection = aBigrams.count(hasMatchingBigramInB)
2.0 * intersection / (aBigrams.size + bBigrams.size)
}
}
|
<?php namespace Slackwolf;
use React\EventLoop\Factory;
use Slack\ConnectionException;
use Slack\RealTimeClient;
use Slackwolf\Game\Command\AliveCommand;
use Slackwolf\Game\Command\DeadCommand;
use Slackwolf\Game\Command\EndCommand;
use Slackwolf\Game\Command\GuardCommand;
use Slackwolf\Game\Command\HealCommand;
use Slackwolf\Game\Command\HelpCommand;
use Slackwolf\Game\Command\KillCommand;
use Slackwolf\Game\Command\PoisonCommand;
use Slackwolf\Game\Command\RemindCommand;
use Slackwolf\Game\Command\SeeCommand;
use Slackwolf\Game\Command\ShootCommand;
use Slackwolf\Game\Command\NewCommand;
use Slackwolf\Game\Command\JoinCommand;
use Slackwolf\Game\Command\LeaveCommand;
use Slackwolf\Game\Command\StartCommand;
use Slackwolf\Game\Command\VoteCommand;
use Slackwolf\Game\Command\SetOptionCommand;
use Slackwolf\Game\Command\StatusCommand;
use Slackwolf\Game\Command\WeatherCommand;
use Slackwolf\Game\GameManager;
use Slackwolf\Message\Message;
/**
* Defines Slackwolf class.
*
* @package Slackwolf
*/
class Slackwolf
{
/**
* Slackwolf constructor.
*/
public function __construct()
{
/*
* Set the default timezone in case it isn't configured in php.ini
*/
date_default_timezone_set(getenv('TIMEZONE'));
}
public function run()
{
/*
* Create the event loop
*/
$eventLoop = Factory::create();
/*
* Create our Slack client
*/
$client = new SlackRTMClient($eventLoop);
$client->setToken(getenv('BOT_TOKEN'));
/*
* Setup command bindings
*/
$commandBindings = [
'help' => HelpCommand::class,
'option' => SetOptionCommand::class,
'remindme' => RemindCommand::class,
'new' => NewCommand::class,
'join' => JoinCommand::class,
'leave' => LeaveCommand::class,
'start' => StartCommand::class,
'end' => EndCommand::class,
'see' => SeeCommand::class,
'vote' => VoteCommand::class,
'kill' => KillCommand::class,
'poison' => PoisonCommand::class,
'guard' => GuardCommand::class,
'heal' => HealCommand::class,
'shoot' => ShootCommand::class,
'alive' => AliveCommand::class,
'dead' => DeadCommand::class,
'status' => StatusCommand::class,
// 'weather' => WeatherCommand::class,
];
/*
* Create the game manager
*/
$gameManager = new GameManager($client, $commandBindings);
/*
* Route incoming Slack messages
*/
$client->on('message', function ($data) use ($client, $gameManager) {
$message = new Message($data);
if ($message->getSubType() == 'channel_join') {
$client->refreshChannel($message->getChannel());
} else if ($message->getSubType() == 'channel_leave') {
$client->refreshChannel($message->getChannel());
} else {
$gameManager->input($message);
}
});
/*
* Connect to Slack
*/
echo "Connecting...\r\n";
$client->connect()->then(function() {
echo "Connected.\n";
}, function(ConnectionException $e) {
echo $e->getMessage();
exit();
});
/*
* Start the event loop
*/
$eventLoop->run();
}
}
|
/*
* @Author: lihuan
* @Date: 2022-01-01 23:43:51
* @LastEditors: lihuan
* @LastEditTime: 2022-01-04 19:44:24
* @Email: 17719495105@163.com
*/
import { memo } from 'react';
import { Swiper } from 'antd-mobile';
import { SwiperWrapper } from './style';
import { IBanner } from '@/api/home/model';
const LHSwiper = ({ list }: { list: IBanner[] }) => {
return (
<SwiperWrapper>
{list.length > 0 ? (
<Swiper autoplay loop style={{ '--border-radius': '2vw' }}>
{list.map((item) => (
<Swiper.Item key={item.id} className="container">
<img src={item.url} alt="" />
</Swiper.Item>
))}
</Swiper>
) : null}
</SwiperWrapper>
);
};
export default memo(LHSwiper);
|
namespace AudioPlayerManager.Common
{
public enum Sound
{
CoinCollect = 0,
DoorClose = 1,
DoorOpen = 2,
EnemyDie = 3,
EnemyRoar = 4,
EnemySpeak = 5,
GameOver = 6,
Punch01 = 7,
Punch02 = 8,
Punch03 = 9,
Win = 10,
YouWin = 11
}
}
|
#!/usr/bin/env ruby
# coding: utf-8
Gem::Specification.new do |spec|
spec.name = "casjaysdev-jekyll-theme"
spec.version = "0.1.8"
spec.authors = ["CasjaysDev"]
spec.email = ["gem-admin@casjaysdev.com"]
spec.summary = "CasjaysDev jekyll theme"
spec.homepage = "https://github.com/casjay-templates/jekyll-site"
spec.license = "MIT"
spec.metadata = { "source_code_uri" => "https://github.com/casjay-templates/jekyll-site" }
spec.files = `git ls-files -z`.split("\x0")
spec.add_runtime_dependency 'jekyll', '~> 3.8'
spec.add_runtime_dependency 'jemoji'
spec.add_runtime_dependency 'kramdown'
spec.add_runtime_dependency 'jekyll-avatar'
spec.add_runtime_dependency 'jekyll-feed'
spec.add_runtime_dependency 'jekyll-mentions'
spec.add_runtime_dependency 'jekyll-redirect-from'
spec.add_runtime_dependency 'jekyll-seo-tag'
spec.add_runtime_dependency 'jekyll-gist'
spec.add_runtime_dependency 'jekyll-coffeescript'
spec.add_runtime_dependency 'jekyll-assets'
spec.add_runtime_dependency 'jekyll-sitemap'
spec.add_runtime_dependency 'jekyll-analytics'
spec.add_runtime_dependency 'jekyll-remote-include'
spec.add_runtime_dependency 'jekyll-menus'
spec.add_runtime_dependency 'jekyll-remote-theme'
spec.add_runtime_dependency 'jekyll-paginate'
spec.add_runtime_dependency 'jekyll-tidy'
spec.add_runtime_dependency 'github-pages'
spec.add_development_dependency 'jekyll'
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'sprockets', "~> 3.7"
spec.post_install_message = "Thanks for installing!"
end
|
from __future__ import absolute_import
import os
import zmq
import uuid
import binascii
import random
import socket
import struct
import six.moves.cPickle
import marshal
import mmap
from multiprocessing import Manager, Condition
from mmap import ACCESS_WRITE, ACCESS_READ
from dpark.util import compress, decompress, spawn, get_logger, mkdir_p
from dpark.cache import Cache
from dpark.serialize import marshalable
from dpark.env import env
import six
from six.moves import range, map
try:
from itertools import izip
except ImportError:
izip = zip
logger = get_logger(__name__)
MARSHAL_TYPE, PICKLE_TYPE = list(range(2))
BLOCK_SHIFT = 20
BLOCK_SIZE = 1 << BLOCK_SHIFT
GUIDE_ADDR = 'NewBroadcastGuideAddr'
DOWNLOAD_ADDR = 'NewDownloadAddr'
BATCHED_BLOCKS = 3
GUIDE_STOP, GUIDE_GET_SOURCES, GUIDE_SET_SOURCES, GUIDE_REPORT_BAD = list(range(4))
SERVER_STOP, SERVER_FETCH, SERVER_FETCH_FAIL, SERVER_FETCH_OK, \
DATA_GET, DATA_GET_OK, DATA_GET_FAIL, DATA_DOWNLOADING, SERVER_CLEAR_ITEM = list(range(9))
class GuideManager(object):
def __init__(self):
self._started = False
self.guides = {}
self.host = socket.gethostname()
self.guide_thread = None
self.guide_addr = None
self.register_addr = {}
self.ctx = zmq.Context()
def start(self):
if self._started:
return
self._started=True
self.guide_thread = self.start_guide()
env.register(GUIDE_ADDR, self.guide_addr)
def start_guide(self):
sock = self.ctx.socket(zmq.REP)
port = sock.bind_to_random_port('tcp://0.0.0.0')
self.guide_addr = 'tcp://%s:%d' % (self.host, port)
def run():
logger.debug("guide start at %s", self.guide_addr)
while True:
type, msg = sock.recv_pyobj()
if type == GUIDE_STOP:
sock.send_pyobj(0)
break
elif type == GUIDE_GET_SOURCES:
uuid = msg
sources = None
if uuid in self.guides:
sources = self.guides[uuid]
else:
logger.warning('uuid %s NOT REGISTERED in guide server', uuid)
sock.send_pyobj(sources)
elif type == GUIDE_SET_SOURCES:
uuid, addr, bitmap = msg
if any(bitmap):
sources = None
if uuid in self.guides:
sources = self.guides[uuid]
if sources:
sources[addr] = bitmap
else:
self.guides[uuid] = {addr: bitmap}
self.register_addr[uuid] = addr
sock.send_pyobj(None)
elif type == GUIDE_REPORT_BAD:
uuid, addr = msg
sources = self.guides[uuid]
if addr in sources:
if addr != self.register_addr[uuid]:
del sources[addr]
else:
logger.warning('The addr %s to delete is the register Quit!!!', addr)
sock.send_pyobj(None)
else:
logger.error('Unknown guide message: %s %s', type, msg)
sock.send_pyobj(None)
return spawn(run)
def shutdown(self):
if not self._started:
return
self._started = False
if self.guide_thread and self.guide_addr.\
startswith('tcp://%s:' % socket.gethostname()):
sock = self.ctx.socket(zmq.REQ)
sock.setsockopt(zmq.LINGER, 0)
sock.connect(self.guide_addr)
sock.send_pyobj((GUIDE_STOP, None))
sock.recv_pyobj()
sock.close()
self.guide_thread.join()
self.guide_addr = None
def check_memory(location):
try:
import psutil
pid = os.getpid()
p = psutil.Process(pid)
rss = p.memory_info().rss >> 20
logger.info('memory rss %d MB in host %s at ',
rss, socket.gethostname(), location)
except ImportError:
logger.warning('import psutil failed')
def decide_dir(work_dirs):
return work_dirs[-1]
def gen_broadcast_path(work_dirs, uuid):
work_dir = decide_dir(work_dirs)
broadcast_dir = os.path.join(work_dir, 'broadcast')
mkdir_p(broadcast_dir)
uuid_path = '%s_%d' % (uuid, os.getpid())
broadcast_path = os.path.join(broadcast_dir, uuid_path)
return broadcast_path
class DownloadManager(object):
def __init__(self):
self._started = False
self.server_thread = None
self.download_threads = {}
self.uuid_state_dict = None
self.uuid_map_dict = None
self.guide_addr = None
self.server_addr = None
self.host = None
self.ctx = None
self.random_inst = None
self.work_dirs = []
self.master_broadcast_blocks = {}
def start(self):
if self._started:
return
self.manager = manager = Manager()
self.shared_uuid_fn_dict = manager.dict()
self.shared_uuid_map_dict = manager.dict()
self.shared_master_blocks = manager.dict()
self.download_cond = Condition()
self._started = True
self.ctx = zmq.Context()
self.host = socket.gethostname()
if GUIDE_ADDR not in env.environ:
start_guide_manager()
self.guide_addr = env.get(GUIDE_ADDR)
self.random_inst = random.SystemRandom()
self.server_addr, self.server_thread = self.start_server()
self.uuid_state_dict = {}
self.uuid_map_dict = {}
self.work_dirs = env.get('WORKDIR')
self.master_broadcast_blocks = {}
env.register(DOWNLOAD_ADDR, self.server_addr)
def start_server(self):
sock = self.ctx.socket(zmq.REP)
sock.setsockopt(zmq.LINGER, 0)
port = sock.bind_to_random_port("tcp://0.0.0.0")
server_addr = 'tcp://%s:%d' % (self.host, port)
guide_sock = self.ctx.socket(zmq.REQ)
guide_sock.setsockopt(zmq.LINGER, 0)
guide_sock.connect(self.guide_addr)
def run():
logger.debug("server started at %s", server_addr)
while True:
type, msg = sock.recv_pyobj()
logger.debug('server recv: %s %s', type, msg)
if type == SERVER_STOP:
sock.send_pyobj(None)
break
elif type == SERVER_FETCH:
uuid, indices, client_addr = msg
if uuid in self.master_broadcast_blocks:
block_num = len(self.master_broadcast_blocks[uuid])
bls = []
for index in indices:
if index >= block_num:
logger.warning('input index too big %s for '
'len of blocks %d from host %s',
str(indices), block_num, client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
else:
bls.append(self.master_broadcast_blocks[uuid][index])
sock.send_pyobj((SERVER_FETCH_OK, (indices, bls)))
elif uuid in self.uuid_state_dict:
fd = os.open(self.uuid_state_dict[uuid][0], os.O_RDONLY)
mmfp = mmap.mmap(fd, 0, access=ACCESS_READ)
os.close(fd)
bitmap = self.uuid_map_dict[uuid]
block_num = len(bitmap)
bls = []
for index in indices:
if index >= block_num:
logger.warning('input index too big %s for '
'len of blocks %d from host %s',
str(indices), block_num, client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
else:
mmfp.seek(bitmap[index][0])
block = mmfp.read(bitmap[index][1])
bls.append(block)
mmfp.close()
sock.send_pyobj((SERVER_FETCH_OK, (indices, bls)))
else:
logger.warning('server fetch failed for uuid %s '
'not exists in server %s from host %s',
uuid, socket.gethostname(), client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
elif type == DATA_GET:
uuid, compressed_size = msg
if uuid not in self.uuid_state_dict or not self.uuid_state_dict[uuid][1]:
if uuid not in self.download_threads:
sources = self._get_sources(uuid, guide_sock)
if not sources:
logger.warning('get sources from guide server failed in host %s',
socket.gethostname())
sock.send_pyobj(DATA_GET_FAIL)
continue
self.download_threads[uuid] = spawn(self._download_blocks,
*[sources, uuid, compressed_size])
sock.send_pyobj(DATA_DOWNLOADING)
else:
sock.send_pyobj(DATA_DOWNLOADING)
else:
sock.send_pyobj(DATA_GET_OK)
elif type == SERVER_CLEAR_ITEM:
uuid = msg
self.clear(uuid)
sock.send_pyobj(None)
else:
logger.error('Unknown server message: %s %s', type, msg)
sock.send_pyobj(None)
sock.close()
logger.debug("stop Broadcast server %s", server_addr)
for uuid in list(self.uuid_state_dict.keys()):
self.clear(uuid)
return server_addr, spawn(run)
def get_blocks(self, uuid):
if uuid in self.master_broadcast_blocks:
return self.master_broadcast_blocks[uuid]
if uuid in self.shared_master_blocks:
return self.shared_master_blocks[uuid]
def register_blocks(self, uuid, blocks):
if uuid in self.master_broadcast_blocks:
logger.warning('the block uuid %s exists in dict', uuid)
return
self.master_broadcast_blocks[uuid] = blocks
self.shared_master_blocks[uuid] = blocks
def _get_sources(self, uuid, source_sock):
try:
source_sock.send_pyobj((GUIDE_GET_SOURCES,
uuid))
sources = source_sock.recv_pyobj()
except:
logger.warning('GET sources failed for addr %s with ZMQ ERR',
self.server_addr)
sources = {}
return sources
def _update_sources(self, uuid, bitmap, source_sock):
try:
source_sock.send_pyobj((GUIDE_SET_SOURCES,
(uuid, self.server_addr, bitmap)))
source_sock.recv_pyobj()
except:
pass
def _download_blocks(self, sources, uuid, compressed_size):
block_num = 0
bitmap = [0]
write_mmap_handler = None
download_guide_sock = self.ctx.socket(zmq.REQ)
download_guide_sock.setsockopt(zmq.LINGER, 0)
download_guide_sock.connect(self.guide_addr)
def _report_bad(addr):
logger.debug('fetch blocks failed from server %s', addr)
download_guide_sock.send_pyobj((GUIDE_REPORT_BAD, (uuid, addr)))
download_guide_sock.recv_pyobj()
def _fetch(addr, indices, bit_map):
sock = self.ctx.socket(zmq.REQ)
try:
sock.setsockopt(zmq.LINGER, 0)
sock.connect(addr)
sock.send_pyobj((SERVER_FETCH, (uuid, indices, self.server_addr)))
avail = sock.poll(1 * 1000, zmq.POLLIN)
check_sock = None
if not avail:
try:
check_sock = socket.socket()
addr_list = addr[len('tcp://'):].split(':')
addr_list[1] = int(addr_list[1])
check_sock.connect(tuple(addr_list))
except Exception as e:
logger.warning('connect the addr %s failed with exception %s',
addr, e.message)
_report_bad(addr)
else:
logger.debug("%s recv broadcast %s from %s timeout",
self.server_addr, str(indices), addr)
finally:
if check_sock:
check_sock.close()
return
result, msg = sock.recv_pyobj()
if result == SERVER_FETCH_FAIL:
_report_bad(addr)
return
if result == SERVER_FETCH_OK:
indices, blocks = msg
for rank, index in enumerate(indices):
if blocks[rank] is not None:
write_mmap_handler.seek(bit_map[index][0])
write_mmap_handler.write(blocks[rank])
bitmap[index] = bit_map[index]
else:
raise RuntimeError('Unknown server response: %s %s' % (result, msg))
finally:
sock.close()
final_path = gen_broadcast_path(self.work_dirs, uuid)
self.uuid_state_dict[uuid] = final_path, False
fp = open(final_path, 'wb')
fp.truncate(compressed_size)
fp.close()
fd = os.open(final_path, os.O_RDWR)
write_mmap_handler = mmap.mmap(fd, 0,
access=ACCESS_WRITE)
os.close(fd)
while not all(bitmap):
remote = []
for addr, _bitmap in six.iteritems(sources):
if block_num == 0:
block_num = len(_bitmap)
bitmap = [0] * block_num
self.uuid_map_dict[uuid] = bitmap
if not addr.startswith('tcp://%s:' % self.host):
remote.append((addr, _bitmap))
self.random_inst.shuffle(remote)
for addr, _bitmap in remote:
indices = [i for i in range(block_num) if not bitmap[i] and _bitmap[i]]
if indices:
self.random_inst.shuffle(indices)
_fetch(addr, indices[:BATCHED_BLOCKS], _bitmap)
self._update_sources(uuid, bitmap, download_guide_sock)
sources = self._get_sources(uuid, download_guide_sock)
write_mmap_handler.flush()
write_mmap_handler.close()
self.shared_uuid_map_dict[uuid] = bitmap
self.shared_uuid_fn_dict[uuid] = self.uuid_state_dict[uuid][0]
self.uuid_state_dict[uuid] = self.uuid_state_dict[uuid][0], True
download_guide_sock.close()
with self.download_cond:
self.download_cond.notify_all()
def clear(self, uuid):
if uuid in self.master_broadcast_blocks:
del self.master_broadcast_blocks[uuid]
del self.shared_master_blocks[uuid]
if uuid in self.uuid_state_dict:
del self.uuid_state_dict[uuid]
if uuid in self.shared_uuid_fn_dict:
del self.shared_uuid_fn_dict[uuid]
del self.shared_uuid_map_dict[uuid]
def shutdown(self):
if not self._started:
return
self._started = False
if self.server_thread and self.server_addr.\
startswith('tcp://%s:' % socket.gethostname()):
req = self.ctx.socket(zmq.REQ)
req.setsockopt(zmq.LINGER, 0)
req.connect(self.server_addr)
req.send_pyobj((SERVER_STOP, None))
avail = req.poll(1 * 100, zmq.POLLIN)
if avail:
req.recv_pyobj()
req.close()
for _, th in six.iteritems(self.download_threads):
th.join()
self.server_thread.join()
self.manager.shutdown()
self.manager.join()
def accumulate_list(l):
acc = 0
acc_l = []
for item in l:
acc_l.append(acc)
acc += item
acc_l.append(acc)
return acc_l
class BroadcastManager(object):
header_fmt = '>BI'
header_len = struct.calcsize(header_fmt)
def __init__(self):
self._started = False
self.guide_addr = None
self.download_addr = None
self.cache = None
self.shared_uuid_fn_dict = None
self.shared_uuid_map_dict = None
self.download_cond = None
self.ctx = None
def start(self):
if self._started:
return
self._started = True
start_download_manager()
self.guide_addr = env.get(GUIDE_ADDR)
self.download_addr = env.get(DOWNLOAD_ADDR)
self.cache = Cache()
self.ctx = zmq.Context()
self.shared_uuid_fn_dict = _download_manager.shared_uuid_fn_dict
self.shared_uuid_map_dict = _download_manager.shared_uuid_map_dict
self.download_cond = _download_manager.download_cond
def register(self, uuid, value):
self.start()
if uuid in self.shared_uuid_fn_dict:
raise RuntimeError('broadcast %s has already registered' % uuid)
blocks, size, block_map = self.to_blocks(uuid, value)
_download_manager.register_blocks(uuid, blocks)
self._update_sources(uuid, block_map)
self.cache.put(uuid, value)
return size
def _update_sources(self, uuid, bitmap):
guide_sock = self.ctx.socket(zmq.REQ)
try:
guide_sock.setsockopt(zmq.LINGER, 0)
guide_sock.connect(self.guide_addr)
guide_sock.send_pyobj((GUIDE_SET_SOURCES,
(uuid, self.download_addr, bitmap)))
guide_sock.recv_pyobj()
finally:
guide_sock.close()
def clear(self, uuid):
assert self._started
self.cache.put(uuid, None)
sock = self.ctx.socket(zmq.REQ)
sock.connect(self.download_addr)
sock.send_pyobj((SERVER_CLEAR_ITEM, uuid))
sock.recv_pyobj()
sock.close()
def fetch(self, uuid, compressed_size):
start_download_manager()
self.start()
value = self.cache.get(uuid)
if value is not None:
return value
blocks = _download_manager.get_blocks(uuid)
if blocks is None:
blocks = self.fetch_blocks(uuid, compressed_size)
value = self.from_blocks(uuid, blocks)
return value
def _get_blocks_by_filename(self, file_name, block_map):
fp = open(file_name, 'rb')
buf = fp.read()
blocks = [buf[offset: offset + size] for offset, size in block_map]
fp.close()
return blocks
def fetch_blocks(self, uuid, compressed_size):
if uuid in self.shared_uuid_fn_dict:
return self._get_blocks_by_filename(self.shared_uuid_fn_dict[uuid],
self.shared_uuid_map_dict[uuid])
download_sock = self.ctx.socket(zmq.REQ)
download_sock.connect(self.download_addr)
download_sock.send_pyobj((DATA_GET,
(uuid, compressed_size)))
res = download_sock.recv_pyobj()
if res == DATA_GET_OK:
return self._get_blocks_by_filename(self.shared_uuid_fn_dict[uuid],
self.shared_uuid_map_dict[uuid])
if res == DATA_GET_FAIL:
raise RuntimeError('Data GET failed for uuid:%s' % uuid)
while True:
with self.download_cond:
if uuid not in self.shared_uuid_fn_dict:
self.download_cond.wait()
else:
break
if uuid in self.shared_uuid_fn_dict:
return self._get_blocks_by_filename(self.shared_uuid_fn_dict[uuid],
self.shared_uuid_map_dict[uuid])
else:
raise RuntimeError('get blocks failed')
def to_blocks(self, uuid, obj):
try:
if marshalable(obj):
buf = marshal.dumps((uuid, obj))
type = MARSHAL_TYPE
else:
buf = six.moves.cPickle.dumps((uuid, obj), -1)
type = PICKLE_TYPE
except Exception:
buf = six.moves.cPickle.dumps((uuid, obj), -1)
type = PICKLE_TYPE
checksum = binascii.crc32(buf) & 0xFFFF
stream = struct.pack(self.header_fmt, type, checksum) + buf
blockNum = (len(stream) + (BLOCK_SIZE - 1)) >> BLOCK_SHIFT
blocks = [compress(stream[i*BLOCK_SIZE:(i+1)*BLOCK_SIZE]) for i in range(blockNum)]
sizes = [len(block) for block in blocks]
size_l = accumulate_list(sizes)
block_map = list(izip(size_l[:-1], sizes))
return blocks, size_l[-1], block_map
def from_blocks(self, uuid, blocks):
stream = b''.join(map(decompress, blocks))
type, checksum = struct.unpack(self.header_fmt, stream[:self.header_len])
buf = stream[self.header_len:]
_checksum = binascii.crc32(buf) & 0xFFFF
if _checksum != checksum:
raise RuntimeError('Wrong blocks: checksum: %s, expected: %s' % (
_checksum, checksum))
if type == MARSHAL_TYPE:
_uuid, value = marshal.loads(buf)
elif type == PICKLE_TYPE:
_uuid, value = six.moves.cPickle.loads(buf)
else:
raise RuntimeError('Unknown serialization type: %s' % type)
if uuid != _uuid:
raise RuntimeError('Wrong blocks: uuid: %s, expected: %s' % (_uuid, uuid))
return value
def shutdown(self):
if not self._started:
return
self._started = False
_manager = BroadcastManager()
_download_manager = DownloadManager()
_guide_manager = GuideManager()
def start_guide_manager():
_guide_manager.start()
def start_download_manager():
_download_manager.start()
def stop_manager():
_manager.shutdown()
_download_manager.shutdown()
_guide_manager.shutdown()
env.environ.pop(GUIDE_ADDR, None)
env.environ.pop(DOWNLOAD_ADDR, None)
class Broadcast(object):
def __init__(self, value):
assert value is not None, 'broadcast object should not been None'
self.uuid = str(uuid.uuid4())
self.value = value
self.compressed_size = _manager.register(self.uuid, self.value)
block_num = (self.compressed_size + BLOCK_SIZE - 1) >> BLOCK_SHIFT
self.bytes = block_num * BLOCK_SIZE
logger.info("broadcast %s in %d blocks", self.uuid, block_num)
def clear(self):
_manager.clear(self.uuid)
def __getstate__(self):
return self.uuid, self.compressed_size
def __setstate__(self, v):
self.uuid, self.compressed_size = v
def __getattr__(self, name):
if name != 'value':
return getattr(self.value, name)
value = _manager.fetch(self.uuid, self.compressed_size)
if value is None:
raise RuntimeError("fetch broadcast failed")
self.value = value
return value
def __len__(self):
return len(self.value)
def __iter__(self):
return self.value.__iter__()
def __getitem__(self, key):
return self.value.__getitem__(key)
def __contains__(self, item):
return self.value.__contains__(item)
def __missing__(self, key):
return self.value.__missing__(key)
def __reversed__(self):
return self.value.__reversed__()
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\User;
use App\Http\Requests;
use App\Http\Controllers\Controller;
use Auth;
class PokemonController extends Controller
{
public function __construct()
{
$this->middleware('auth');
$userActual = Auth::User();
if($userActual == null){
flash('No tiene los permisos necesarios')->error()->important();
return redirect('/');
}
}
}
|
<?php
/**
* @filesource modules/index/models/memberstatus.php
*
* @copyright 2016 Goragod.com
* @license http://www.kotchasan.com/license/
*
* @see http://www.kotchasan.com/
*/
namespace Index\Memberstatus;
use Gcms\Config;
use Gcms\Login;
use Kotchasan\Http\Request;
use Kotchasan\Language;
/**
* module=memberstatus
*
* @author Goragod Wiriya <admin@goragod.com>
*
* @since 1.0
*/
class Model extends \Kotchasan\KBase
{
/**
* บันทึกสถานะสมาชิก (memberstatus.php)
*
* @param Request $request
*/
public function action(Request $request)
{
$ret = array();
// session, referer, แอดมิน, can_config, ไม่ใช่สมาชิกตัวอย่าง
if ($request->initSession() && $request->isReferer() && $login = Login::adminAccess()) {
if (Login::checkPermission($login, 'can_config') && Login::notDemoMode($login)) {
try {
// โหลด config
$config = Config::load(CONFIG);
// รับค่าจากการ POST
$action = $request->post('action')->toString();
// do not saved
$save = false;
// default
if (!isset($config->member_status[0])) {
$config->member_status[0] = 'สมาชิก';
$save = true;
}
if (!isset($config->member_status[1])) {
$config->member_status[1] = 'ผู้ดูแลระบบ';
$save = true;
}
if (!isset($config->color_status[0])) {
$config->color_status[0] = '#006600';
$save = true;
}
if (!isset($config->color_status[1])) {
$config->color_status[1] = '#FF0000';
$save = true;
}
if ($action === 'config_status_add') {
// เพิ่มสถานะสมาชิกใหม่
$config->member_status[] = Language::get('click to edit');
$config->color_status[] = '#000000';
// id ของสถานะใหม่
$i = count($config->member_status) - 1;
// ข้อมูลใหม่
$row = '<li id="config_status_'.$i.'">';
$row .= '<span class="icon-delete" id="config_status_delete_'.$i.'" title="'.Language::get('Delete').'"></span>';
$row .= '<span id="config_status_color_'.$i.'" title="'.$config->color_status[$i].'"></span>';
$row .= '<span id="config_status_name_'.$i.'" title="'.$config->member_status[$i].'">'.htmlspecialchars($config->member_status[$i]).'</span>';
$row .= '</li>';
// คืนค่าข้อมูลเข้ารหัส
$ret['data'] = $row;
$ret['newId'] = "config_status_$i";
$save = true;
} elseif (preg_match('/^config_status_delete_([0-9]+)$/', $action, $match)) {
// ลบ
$save1 = array();
$save2 = array();
// ลบสถานะและสี
for ($i = 0; $i < count($config->member_status); ++$i) {
if ($i < 2 || $i != $match[1]) {
$save1[] = $config->member_status[$i];
$save2[] = $config->color_status[$i];
}
}
$config->member_status = $save1;
$config->color_status = $save2;
// รายการที่ลบ
$ret['del'] = str_replace('delete_', '', $action);
$save = true;
} elseif (preg_match('/^config_status_(name|color)_([0-9]+)$/', $action, $match)) {
// แก้ไขชื่อสถานะหรือสี
$value = $request->post('value')->text();
$match[2] = (int) $match[2];
if ($value == '' && $match[1] == 'name') {
$value = $config->member_status[$match[2]];
} elseif ($value == '' && $match[1] == 'color') {
$value = $config->color_status[$match[2]];
} elseif ($match[1] == 'name') {
$config->member_status[$match[2]] = $value;
$save = true;
} else {
$config->color_status[$match[2]] = $value;
$save = true;
}
// ส่งข้อมูลใหม่ไปแสดงผล
$ret['edit'] = $value;
$ret['editId'] = $action;
}
// save config
if ($save && !Config::save($config, CONFIG)) {
$ret['alert'] = sprintf(Language::get('File %s cannot be created or is read-only.'), 'settings/config.php');
}
} catch (\Kotchasan\InputItemException $e) {
$ret['alert'] = $e->getMessage();
}
}
}
if (empty($ret)) {
$ret['alert'] = Language::get('Unable to complete the transaction');
}
// คืนค่าเป็น JSON
echo json_encode($ret);
}
}
|
#include <iostream>
#include "sampler/metropolis/metropolis.h"
#include "sampler/gibbs/gibbs.h"
using namespace std;
int main() {
// filenames
string filename = "RBMoutput.txt";
string blockFilename = "blocking.txt";
// Nqs parameters
int nx = 4; // Number which represents particles*dimensions.
int nh = 2; // Number of hidden units.
int dim = 2; // Number of spatial dimensions
double sigma = 1.0; // Normal distribution visibles
bool gaussianInitialization = true; // Weights & biases (a,b,w) initialized uniformly or gaussian
// Sampler parameters
int nCycles = 100; // Number of optimization iterations
int nSamples = 100000; // Number of samples in each iteration
random_device rd; // Seed
// Metropolis
double step = 0.45;
// Hamiltonian parameters
double omega = 1.0;
bool includeInteraction = true; // Include interaction or not
// Optimizer parameters (choose either stochastic gradient descent (SGD) or adaptive SGD (ASGD))
int nPar = nx + nh + nx*nh;
// SGD parameters
double eta = 0.01; // must be >0. SGD learning rate (lr)
/*
// ASGD parameters. lr: gamma_i=a/(A+t_i) where t[i]=max(0, t[i-1]+f(-grad[i]*grad[i-1]))
double a = 0.01; // must be >0. Proportional to the lr
double A = 20.0; // must be >= 1. Inverse prop to the lr. (a/A) defines the max lr.
// ASGD optional: parameters to the function f
double asgdOmega; // must be >0. As omega->0, f-> step function.
double fmax; // must be >0
double fmin; // must be <0
// ASGD optional: initial conditions
double t0; // Suggested choices are t0=t1=A=20 (default)
double t1; // or t0=t1=0
*/
// Create objects for the sampler:
Hamiltonian hamiltonian(omega, includeInteraction);
NeuralQuantumState nqs(nh, nx, dim, sigma, gaussianInitialization);
Sgd optimizer(eta, nPar);
// Create the sampler:
Metropolis metropolisSampler(nSamples, nCycles, step, hamiltonian, nqs, optimizer, filename,
blockFilename, rd());
//Gibbs gibbsSampler(nSamples, nCycles, hamiltonian, nqs, optimizer, filename, blockFilename);
// Run
metropolisSampler.runOptimizationSampling();
//gibbsSampler.runOptimizationSampling();
return 0;
}
|
using Shared.Models;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using System.Windows;
namespace Client.Services
{
class NetworkSevice : INetworkService
{
private const int PORT = 8080;
private const string IPADD = "127.0.0.1";
private static Socket socket;
private static IPAddress? ip;
private static IPEndPoint? endPoint;
public NetworkSevice()
{
socket = new(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
ip = IPAddress.Parse(IPADD);
endPoint = new(ip, PORT);
socket.Connect(endPoint);
}
public async void HandleServerResponceAsync()
{
while (socket.Connected)
{
byte[] buffer = new byte[65000];
int size = await socket.ReceiveAsync(buffer, SocketFlags.None);
string data = Encoding.UTF8.GetString(buffer, 0, size);
object obj = JsonSerializer.Deserialize<object>(data);
MessageBox.Show(obj?.ToString());
}
}
public async void SendCredentialsToServerAsync(Request request)
{
string data = JsonSerializer.Serialize<Request>(request);
byte[] dataInBytes = Encoding.UTF8.GetBytes(data);
await socket.SendAsync(dataInBytes, socketFlags: SocketFlags.None);
}
}
}
|
<?php
namespace App\Http\Controllers;
use Auth;
use Illuminate\Http\Request;
use App\User;
use Illuminate\Support\Facades\Hash;
use Illuminate\Support\Facades\Session;
class AuthController extends Controller
{
public function login(){
return view('auth.login');
}
public function postlogin(Request $request)
{
if(Auth::attempt($request->only('email','password'))){
\Session::flash('sukses','Anda Berhasil Login');
return redirect('/beranda');
}
\Session::flash('gagal','Email Atau Password Salah');
return redirect('/login');
}
public function logout(){
Auth::logout();
Session::flush();
return redirect('/login')->with('sukses','Kamu Sudah Logout');
}
}
|
# frozen_string_literal: true
module ::Salesforce
class Contact < Person
ID_FIELD = "salesforce_contact_id"
SOURCE = "Web"
OBJECT_NAME = "Contact"
def self.group
Salesforce.contacts_group
end
def self.payload(user)
user.salesforce_contact_payload
end
end
end
|
using NUnit.Framework;
using Shouldly;
namespace Eshop.Domain.UnitTests.Customers
{
[TestFixture]
public class when_setting_delivery_address : CustomerWithDeliveryAddressSetSetup
{
[Test]
public void delivery_address_is_set()
{
Customer.DeliveryAddress.ShouldBe(DeliveryAddress);
}
}
}
|
import 'isomorphic-fetch';
import 'abortcontroller-polyfill/dist/abortcontroller-polyfill-only';
export declare const fetchAsync: (endpoint: string, options?: RequestInit, timeoutMs?: number) => Promise<Response>;
//# sourceMappingURL=fetch_async.d.ts.map
|
import { MyService } from './service';
import sinon from 'sinon';
import { expect } from 'chai';
describe('61695981', () => {
let clock;
before(() => {
clock = sinon.useFakeTimers();
});
after(() => {
clock.restore();
});
it('should pass', async () => {
const service = new MyService();
const ansPromise = service.doSomething();
clock.tick(5000);
const ans = await ansPromise;
expect(ans).to.equal('Done');
});
});
|
---
layout: post
microblog: true
date: 2008-02-20 19:00 -0500
guid: http://bdougherty.micro.blog/2008/02/21/t737690092.html
---
Rochester clouds = lunar eclipse fail
|
import numpy as np
import sys
np.set_printoptions(threshold=sys.maxsize)
from scipy.ndimage.interpolation import zoom
arr = np.random.uniform(size=(4,4))
arr = zoom(arr, 8)
arr = arr > 0.5
arr = np.where(arr, '-', '#')
arr = np.array_str(arr, max_line_width=500)
print(arr)
|
import java.awt.image.BufferedImage;
import java.io.File;
import javax.imageio.ImageIO;
public class MandelbrotBW {
public static void main(String[] args) throws Exception {
int width = 1920, height = 1080, max = 5000;
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
int black = 0x000000, white = 0xFFFFFF;
for (int row = 0; row < height; row++) {
for (int col = 0; col < width; col++) {
double c_re = (col - width/2)*4.0/width;
double c_im = (row - height/2)*4.0/width;
double x = 0, y = 0;
int iterations = 0;
while (x*x+y*y < 4 && iterations < max) {
double x_new = x*x-y*y+c_re;
y = 2*x*y+c_im;
x = x_new;
iterations++;
}
if (iterations < max) image.setRGB(col, row, white);
else image.setRGB(col, row, black);
}
}
ImageIO.write(image, "png", new File("testmandelbrot.png"));
}
}
|
CREATE TABLE EGTL_DEMAND_VOUCHER
(
ID BIGINT NOT NULL,
LICENSEDETAIL BIGINT NOT NULL,
VOUCHERHEADER BIGINT NOT NULL,
VERSION NUMERIC DEFAULT 0,
CREATEDBY BIGINT NOT NULL,
LASTMODIFIEDBY BIGINT NOT NULL,
CREATEDDATE TIMESTAMP WITHOUT TIME ZONE NOT NULL,
LASTMODIFIEDDATE TIMESTAMP WITHOUT TIME ZONE NOT NULL,
CONSTRAINT PK_EGTL_DEMAND_VOUCHER_ID PRIMARY KEY (ID),
CONSTRAINT FK_EGTL_DEMAND_VOUCHER_CREATEDBY FOREIGN KEY (CREATEDBY) REFERENCES EG_USER (ID),
CONSTRAINT FK_EGTL_DEMAND_VOUCHER_LASTMODIFIEDBY FOREIGN KEY (LASTMODIFIEDBY) REFERENCES EG_USER (ID),
CONSTRAINT FK_EGTL_DEMAND_VOUCHER_LICDETAIL FOREIGN KEY (licensedetail) REFERENCES EGTL_LICENSE (ID)
);
CREATE INDEX idx_tl_demandvoucher_license ON EGTL_DEMAND_VOUCHER(licensedetail);
CREATE INDEX idx_tl_demandvoucher_voucherheader ON EGTL_DEMAND_VOUCHER(voucherheader);
COMMENT ON COLUMN EGTL_DEMAND_VOUCHER.licensedetail IS 'primary key of egtl_license';
COMMENT ON COLUMN EGTL_DEMAND_VOUCHER.voucherheader IS 'primary key of voucherheader';
COMMENT ON TABLE EGTL_DEMAND_VOUCHER IS 'This table stores link between Trade License details and voucherheader';
CREATE SEQUENCE SEQ_EGTL_DEMAND_VOUCHER;
|
/*
* Copyright (C) 2015 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.quickjs;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.fail;
public final class QuickJsTest {
private QuickJs quickjs;
@Before public void setUp() {
quickjs = QuickJs.create();
}
@After public void tearDown() {
quickjs.close();
}
@Test public void helloWorld() {
String hello = (String) quickjs.evaluate("'hello, world!'.toUpperCase();");
assertThat(hello).isEqualTo("HELLO, WORLD!");
}
@Test public void exceptionsInScriptThrowInJava() {
try {
quickjs.evaluate("nope();");
fail();
} catch (QuickJsException e) {
assertThat(e).hasMessageThat().isEqualTo("nope is not defined");
}
}
@Test public void returnTypes() {
assertThat(quickjs.evaluate("\"test\";")).isEqualTo("test");
assertThat(quickjs.evaluate("true;")).isEqualTo(true);
assertThat(quickjs.evaluate("false;")).isEqualTo(false);
assertThat(quickjs.evaluate("1;")).isEqualTo(1);
assertThat(quickjs.evaluate("1.123;")).isEqualTo(1.123);
assertThat(quickjs.evaluate("undefined;")).isNull();
assertThat(quickjs.evaluate("null;")).isNull();
}
@Test public void exceptionsInScriptIncludeStackTrace() {
try {
quickjs.evaluate("\n"
+ "f1();\n" // Line 2.
+ "\n"
+ "function f1() {\n"
+ " f2();\n" // Line 5.
+ "}\n"
+ "\n"
+ "\n"
+ "function f2() {\n"
+ " nope();\n" // Line 10.
+ "}\n", "test.js");
fail();
} catch (QuickJsException e) {
assertThat(e).hasMessageThat().isEqualTo("nope is not defined");
assertThat(e.getStackTrace()).asList().containsAtLeast(
new StackTraceElement("JavaScript", "<eval>", "test.js", 2),
new StackTraceElement("JavaScript", "f1", "test.js", 5),
new StackTraceElement("JavaScript", "f2", "test.js", 10));
}
}
}
|
--
-- Base de datos: `newsletter`
--
CREATE DATABASE IF NOT EXISTS `newsletter` DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci;
USE `newsletter`;
--
-- Estructura de tabla para la tabla `users`
--
DROP TABLE IF EXISTS `users`;
CREATE TABLE IF NOT EXISTS `users` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`email` varchar(60) NOT NULL,
`fecha_registro` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
) ENGINE=innoDB DEFAULT CHARSET=utf8;
COMMIT;
|
import React from "react";
interface TokenObject {
value: string | null;
set(a: string): void;
}
const TokenContext = React.createContext({
value: "",
set: (a: string) => {},
} as TokenObject);
export { TokenContext };
|
# Copyright 2020-2021 Couchbase, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require_relative "test_helper"
require "couchbase/datastructures/couchbase_list"
module Couchbase
module Datastructures
class CouchbaseListTest < Minitest::Test
include TestUtilities
def setup
connect
@bucket = @cluster.bucket(env.bucket)
@collection = @bucket.default_collection
end
def teardown
disconnect
end
def test_new_list_empty
doc_id = uniq_id(:foo)
list = CouchbaseList.new(doc_id, @collection)
assert_equal 0, list.size
assert_empty list
end
def test_new_list_yields_no_elements
doc_id = uniq_id(:foo)
list = CouchbaseList.new(doc_id, @collection)
actual = []
list.each do |element|
actual << element
end
assert_empty actual
end
def test_at_returns_nil_for_new_list
doc_id = uniq_id(:foo)
list = CouchbaseList.new(doc_id, @collection)
refute list.at(0)
end
def test_push_creates_new_list
doc_id = uniq_id(:foo)
list = CouchbaseList.new(doc_id, @collection)
list.push(1, 2, 3)
assert_equal 3, list.size
refute_empty list
list = CouchbaseList.new(doc_id, @collection)
assert_equal 3, list.size
refute_empty list
end
def test_unshift_creates_new_list
doc_id = uniq_id(:foo)
list = CouchbaseList.new(doc_id, @collection)
list.unshift(1, 2, 3)
assert_equal 3, list.size
refute_empty list
list = CouchbaseList.new(doc_id, @collection)
assert_equal 3, list.size
refute_empty list
end
def test_clear_drops_the_list
doc_id = uniq_id(:foo)
list = CouchbaseList.new(doc_id, @collection)
list.push(1, 2, 3)
assert_equal 3, list.size
list.clear
assert_raises(Error::DocumentNotFound) do
@collection.get(doc_id)
end
assert_empty list
end
def test_at_returns_last_entry
doc_id = uniq_id(:foo)
list = CouchbaseList.new(doc_id, @collection)
list.push(1, 2, 3)
assert_equal 3, list.size
assert_equal 3, list.at(-1)
end
def test_removes_last_entry
doc_id = uniq_id(:foo)
list = CouchbaseList.new(doc_id, @collection)
list.push(1, 2, 3)
assert_equal 3, list.size
list.delete_at(-1)
result = @collection.get(doc_id)
assert_equal [1, 2], result.content
end
def test_inserts_into_center_of_the_list
doc_id = uniq_id(:foo)
list = CouchbaseList.new(doc_id, @collection)
list.push(1, 2, 3, 4)
list.insert(2, "hello", "world")
result = @collection.get(doc_id)
assert_equal [1, 2, "hello", "world", 3, 4], result.content
end
end
end
end
|
//-----------------------------------------------------------------------------
// <copyright file="JsonAssert.cs" company=".NET Foundation">
// Copyright (c) .NET Foundation and Contributors. All rights reserved.
// See License.txt in the project root for license information.
// </copyright>
//------------------------------------------------------------------------------
using Newtonsoft.Json.Linq;
using Xunit;
namespace Microsoft.AspNet.OData.Test.Formatter
{
internal static class JsonAssert
{
public static void Equal(string expected, string actual)
{
Assert.Equal(JToken.Parse(expected), JToken.Parse(actual), JToken.EqualityComparer);
}
}
}
|
use std::io::{Error, BufRead, BufReader};
use std::fs::File;
use std::path::Path;
use async_std::task;
use sqlx::Connection;
use sqlx::any::AnyConnection;
use sqlx::any::AnyQueryResult;
const CONFIG: &str = "/root/confixx/confixx_main.conf";
pub fn is_confixx() -> bool {
if Path::new(CONFIG).exists() {
return true;
}
false
}
pub fn run(password: &str) {
let mut username = String::new();
let mut password_hash = String::new();
task::block_on(async {
let (u, h) = get_login().await.unwrap();
username = u;
password_hash = h;
});
println!("Admin account: {}\nPassword hash: {}\n", &username, &password_hash);
task::block_on(async {
set_login(&username, &password).await.unwrap();
});
println!("Temporary password: {}\n", &password);
super::wait();
task::block_on(async {
reset_login(&username, &password_hash).await.unwrap();
});
println!("Passwort has been reset!");
}
fn get_mysql_credentials() -> Result<(String, String, String, String), Error> {
let mut username = String::new();
let mut password = String::new();
let mut hostname = String::new();
let mut database = String::new();
let lines = BufReader::new(File::open(CONFIG)?).lines();
for line in lines {
if let Ok(line) = line {
let split = line.split('=');
let vec: Vec<&str> = split.collect();
if vec.len() > 1 {
if vec[0].contains("dbUser") {
username = String::from(vec[1].replace(&['\'', ';'][..], "")).trim().to_string();
} else if vec[0].contains("dbPw") {
password = String::from(vec[1].replace(&['\'', ';'][..], "")).trim().to_string();
} else if vec[0].contains("dbServer") {
hostname = String::from(vec[1].replace(&['\'', ';'][..], "")).trim().to_string();
} else if vec[0].contains("dbDB") {
database = String::from(vec[1].replace(&['\'', ';'][..], "")).trim().to_string();
}
}
}
}
Ok((username, password, hostname, database))
}
async fn get_login() -> Result<(String, String), sqlx::Error> {
let (username, password, hostname, database) = get_mysql_credentials()?;
let mut conn = AnyConnection::
connect(&format!("mysql://{}:{}@{}/{}", username, password, hostname, database)).await?;
let row: (String, String) = sqlx::query_as("SELECT login, longpw FROM admin")
.fetch_one(&mut conn).await?;
Ok(row)
}
async fn set_login(username: &str, password: &str) -> Result<AnyQueryResult, sqlx::Error> {
let (db_username, db_password, hostname, database) = get_mysql_credentials()?;
let mut conn = AnyConnection::
connect(&format!("mysql://{}:{}@{}/{}", db_username, db_password, hostname, database)).await?;
sqlx::query("UPDATE admin SET longpw = ENCRYPT(?) WHERE login = ?")
.bind(password)
.bind(username)
.execute(&mut conn)
.await
}
async fn reset_login(username: &str, password_hash: &str) -> Result<AnyQueryResult, sqlx::Error> {
let (db_username, db_password, hostname, database) = get_mysql_credentials()?;
let mut conn = AnyConnection::
connect(&format!("mysql://{}:{}@{}/{}", db_username, db_password, hostname, database)).await?;
sqlx::query("UPDATE admin SET longpw = ? WHERE login = ?")
.bind(password_hash)
.bind(username)
.execute(&mut conn)
.await
}
|
# Configuration
If for any reason, this library fails to detect the actual type of the field type, application
developers can make a shortcut and tell the type from configuration.
To do this, the `migration.compatibility.map.<referenced_table>.<referenced_field>` configuration
value has to be set.
## Examples
To tell the migrations that the `users.id` field is unsigned bigint, you have to set the
`migration.compatibility.map.users.id` config key's value to `bigint unsigned`.
### From Within AppServiceProvider
```php
// app/Providers/AppServiceProvider.php
class AppServiceProvider extends ServiceProvider
{
// ...
public function boot()
{
$this->app['config']->set("migration.compatibility.map.users.id", 'bigint unsigned');
}
// ...
}
```
### From Within a Config File
Due to the way how Laravel is loading config files you can also create a file called `migration.php`
in the application's `config/` folder:
```php
// config/migration.php
return [
'compatibility' => [
'map' => [
'users' => [
'id' => 'int unsigned'
]
]
]
];
```
## Possible Values
This library only supports integer based types, the possible values in the configuration file are:
- `bigint unsigned`
- `int unsigned`
- `bigint` (will be signed)
- `int` (will be signed)
## Other Fields
The main reason for this library coming to birth was the ubiquity of Laravel's default `users` table.
This library can detect any other table+field combination:
```php
$table->intOrBigIntBasedOnRelated('comment_id', Schema::connection(null), 'comments.id');
```
To explicitly configure the type of the example above the `migration.compatibility.map.comments.id`
configuration value has to be set:
```php
config(['migration.compatibility.map.comments.id' => 'int unsigned']);
```
|
## iOS 平台手动集成
拖拽下列 framework 从 `InMobi` 插件包的 __plugins/ios__ 目录到您的 Xcode 工程中,在添加 frameworks 的时候,请勾选 `Copy items if needed`:
> sdkbox.framework
> PluginInMobi.framework
上面的 frameworks 依赖于其他 frameworks。如果您没有添加它们,您也需要添加下列这些 frameworks:
> AdSupport.framework
> AudioToolbox.framework
> AVFoundation.framework
> CoreLocation.framework
> CoreTelephony.framework
> EventKit.framework
> EventKitUI.framework
> MediaPlayer.framework
> MessageUI.framework
> Security.framework
> Social.framework
> StoreKit.framework
> SystemConfiguration.framework
> UIKit.framework
> SafariServices.framework
> GameController.framework
> libsqlite3.0.tbd
> libc++.tbd
|
[BITS 16]
%define KERNEL_BASE_SEGMENT 0x0800
%define BOOT_SECTOR_BASE_SEGMENT 0x07C0
%define STACK_SEGMENT 0x00
%define STACK_OFFSET 0x6FFF
%define NB_SECTORS_TO_COPY 128
global _start
_start:
jmp _boot_sector_start
%include "includes/utils.inc"
_boot_sector_start:
sti
mov ax, BOOT_SECTOR_BASE_SEGMENT
mov ds, ax
mov es, ax
mov ax, STACK_SEGMENT
mov ss, ax
mov sp, STACK_OFFSET
mov [boot_drive], dl
;mov ax, 0x02
;mov bx, 0x0
;mov dx, 0x0
; Dit bonjour
mov si, msg_welcome
call _print
push ds
mov ax, 0
mov ds, ax
mov si, 0x0475
lodsb
add al, '0'
mov ah, 0x0E
mov bh, 0x00
mov bl, 0x0F
int 0x10
pop ds
mov si, periph
call _print
; cacul la mémoire disponible et l'affiche
mov ax, 0xE801
int 0x15
shl ebx, 6 ; on multiplie par 64. BX contient le nb de paquet de 64kb
add eax, ebx
call _convert_number_in_string
mov si, nb_str
call _print
mov si, memory
call _print
mov si, msg_kernel_loading
call _print
; on réinitialise les periphériques
xor ax, ax
int 0x13
push es
mov ax, KERNEL_BASE_SEGMENT
mov es, ax
mov bx, 0
mov ah, 0x02
mov al, NB_SECTORS_TO_COPY
mov ch, 0
mov cl, 2
mov dh, 0
mov dl, [boot_drive]
int 0x13
pop es
jnc jump_kernel
mov si, msg_erreur
call _print
hlt
jump_kernel:
xor ax, ax
mov al, [boot_drive]
push ax
jmp dword KERNEL_BASE_SEGMENT:0x0
; ***************************************** VARIABLES ******************************************
boot_drive DB 0
msg_welcome DB "Welcome in MyOS", 13, 10, 0
periph DB " hard disks",13, 10, 0
memory DB " Memory Size (kb)", 13, 10, 0
msg_kernel_loading DB "The kernel is loading ...", 13, 10, 0
msg_erreur DB "Il y a eu une erreur au chargement du noyau", 10, 0
times 510-($-$$) db 144
dw 0xAA55
|
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
namespace GeoJSON {
[System.Serializable]
public class FeatureObject {
public string type;
public GeometryObject geometry;
public Dictionary<string, string> properties;
public FeatureObject(JSONObject jsonObject) {
type = jsonObject ["type"].str;
geometry = parseGeometry (jsonObject ["geometry"]);
properties = new Dictionary<string, string> ();
parseProperties (jsonObject ["properties"]);
}
public FeatureObject(string encodedString) {
JSONObject jsonObject = new JSONObject (encodedString);
type = jsonObject ["type"].str;
geometry = parseGeometry (jsonObject ["geometry"]);
properties = new Dictionary<string, string> ();
parseProperties (jsonObject ["properties"]);
}
public FeatureObject(GeometryObject featureGeometry) {
type = "Feature";
geometry = featureGeometry;
properties = new Dictionary<string, string> ();
}
protected void parseProperties(JSONObject jsonObject) {
for(int i = 0; i < jsonObject.list.Count; i++){
string key = (string)jsonObject.keys[i];
JSONObject value = (JSONObject)jsonObject.list[i];
if(value.IsString)
properties.Add (key, value.str);
if(value.IsNumber)
properties.Add (key, value.n.ToString());
}
}
protected GeometryObject parseGeometry(JSONObject jsonObject){
switch (jsonObject["type"].str) {
case "Point":
return new PointGeometryObject (jsonObject);
case "MultiPoint":
return new MultiPointGeometryObject (jsonObject);
case "LineString":
return new LineStringGeometryObject (jsonObject);
case "MultiLineString":
return new MultiLineStringGeometryObject (jsonObject);
case "Polygon":
return new PolygonGeometryObject (jsonObject);
case "MultiPolygon":
return new MultiPolygonGeometryObject (jsonObject);
default:
break;
}
return null;
}
public JSONObject Serialize() {
JSONObject rootObject = new JSONObject(JSONObject.Type.OBJECT);
rootObject.AddField("type", type);
//Geometry
JSONObject geometryObject = geometry.Serialize ();
rootObject.AddField ("geometry", geometryObject);
//Properties
JSONObject jsonProperties = new JSONObject(JSONObject.Type.OBJECT);
foreach (KeyValuePair<string,string> property in properties) {
jsonProperties.AddField (property.Key, property.Value);
}
rootObject.AddField("properties", jsonProperties);
return rootObject;
}
}
}
|
/*
* Copyright 2020 James Courtney
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace FlatSharpTests.Compiler
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Runtime.InteropServices;
using FlatSharp;
using FlatSharp.Attributes;
using FlatSharp.Compiler;
using FlatSharp.TypeModel;
using Microsoft.VisualStudio.TestTools.UnitTesting;
[TestClass]
public class TypeModelAliasTests
{
[TestMethod]
public void Alias_String()
=> this.AssertResolve("string", typeof(string), typeof(StringTypeModel));
[TestMethod]
public void Alias_Bool()
=> this.AssertResolve("bool", typeof(bool), typeof(BoolTypeModel));
[TestMethod]
public void Alias_Byte()
=> this.AssertResolve("byte", typeof(sbyte), typeof(SByteTypeModel));
[TestMethod]
public void Alias_Int8()
=> this.AssertResolve("int8", typeof(sbyte), typeof(SByteTypeModel));
[TestMethod]
public void Alias_UByte()
=> this.AssertResolve("ubyte", typeof(byte), typeof(ByteTypeModel));
[TestMethod]
public void Alias_UInt8()
=> this.AssertResolve("uint8", typeof(byte), typeof(ByteTypeModel));
[TestMethod]
public void Alias_Short()
=> this.AssertResolve("short", typeof(short), typeof(ShortTypeModel));
[TestMethod]
public void Alias_Int16()
=> this.AssertResolve("int16", typeof(short), typeof(ShortTypeModel));
[TestMethod]
public void Alias_UShort()
=> this.AssertResolve("ushort", typeof(ushort), typeof(UShortTypeModel));
[TestMethod]
public void Alias_UInt16()
=> this.AssertResolve("uint16", typeof(ushort), typeof(UShortTypeModel));
[TestMethod]
public void Alias_Int()
=> this.AssertResolve("int", typeof(int), typeof(IntTypeModel));
[TestMethod]
public void Alias_Int32()
=> this.AssertResolve("int32", typeof(int), typeof(IntTypeModel));
[TestMethod]
public void Alias_UInt()
=> this.AssertResolve("uint", typeof(uint), typeof(UIntTypeModel));
[TestMethod]
public void Alias_UInt32()
=> this.AssertResolve("uint32", typeof(uint), typeof(UIntTypeModel));
[TestMethod]
public void Alias_Long()
=> this.AssertResolve("long", typeof(long), typeof(LongTypeModel));
[TestMethod]
public void Alias_Int64()
=> this.AssertResolve("int64", typeof(long), typeof(LongTypeModel));
[TestMethod]
public void Alias_ULong()
=> this.AssertResolve("ulong", typeof(ulong), typeof(ULongTypeModel));
[TestMethod]
public void Alias_UInt64()
=> this.AssertResolve("uint64", typeof(ulong), typeof(ULongTypeModel));
[TestMethod]
public void Alias_Float32()
=> this.AssertResolve("float32", typeof(float), typeof(FloatTypeModel));
[TestMethod]
public void Alias_Float()
=> this.AssertResolve("float", typeof(float), typeof(FloatTypeModel));
[TestMethod]
public void Alias_Float64()
=> this.AssertResolve("float64", typeof(double), typeof(DoubleTypeModel));
[TestMethod]
public void Alias_Double()
=> this.AssertResolve("double", typeof(double), typeof(DoubleTypeModel));
[TestMethod]
public void Alias_Invalid()
{
TypeModelContainer container = TypeModelContainer.CreateDefault();
Assert.IsFalse(container.TryResolveFbsAlias("foo", out _));
Assert.IsFalse(container.TryResolveFbsAlias("Double", out _));
Assert.IsFalse(container.TryResolveFbsAlias("", out _));
Assert.IsFalse(container.TryResolveFbsAlias(null, out _));
}
private void AssertResolve(string alias, Type clrType, Type typeModelType)
{
TypeModelContainer container = TypeModelContainer.CreateDefault();
Assert.IsTrue(container.TryResolveFbsAlias(alias, out ITypeModel? typeModel));
Assert.IsInstanceOfType(typeModel, typeModelType);
Assert.AreEqual(clrType, typeModel.ClrType);
}
}
}
|
dep 'apache bench' do
requires \
'tap'.with('apache'),
'ab.managed'
end
dep 'ab.managed' do
provides "ab"
end
|
; ModuleID = '/home/david/src/c-semantics/tests/unitTests/weirdmain.c'
target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
target triple = "x86_64-unknown-linux-gnu"
define i32 @main(i32 %argc, i8** %argv) nounwind uwtable {
entry:
%argc.addr = alloca i32, align 4
%argv.addr = alloca i8**, align 8
store i32 %argc, i32* %argc.addr, align 4
store i8** %argv, i8*** %argv.addr, align 8
ret i32 0
}
|
class CssRegressionTest
require 'capybara'
include Capybara::DSL
class << self
attr_accessor :default_key_mode
attr_accessor :durable_asset_path
attr_accessor :temp_asset_path
attr_accessor :base_asset_path
end
self.default_key_mode = [:path, :query, :fragment]
self.durable_asset_path = ['spec', 'support', 'regressions']
self.temp_asset_path = ['tmp', 'regressions']
self.base_asset_path = ''
attr_accessor :selector
attr_accessor :key
attr_accessor :timestamp
def initialize(selector, **opts)
if Capybara.javascript_driver != :poltergeist
raise Capybara::NotSupportedByDriverError
end
self.selector = selector.to_s
opts[:key] ||= generate_key_from_path
self.key = format_key(opts[:key])
self.timestamp = Time.now.to_i
# Setup complete
reset if opts[:reset]
end
def run
ensure_image_saved(base_image_path)
ensure_image_saved(compare_image_path)
compare_images
end
private
def reset
`rm #{base_image_path}`
end
def get_page_url
URI.parse(current_url)
end
def generate_key_from_path
url = get_page_url
string = ''
default_key_mode.each do |method|
string = string + url.send(method).to_s+'/'
end
string
end
def default_key_mode
self.class.default_key_mode
end
def temp_asset_path
self.class.temp_asset_path
end
def durable_asset_path
self.class.durable_asset_path
end
def base_asset_path
self.class.base_asset_path
end
def format_key(string)
string.to_s.split(/\/|\?|-/).reject{ |e| e.empty? }
end
def parameterize(string, sep = '-')
# Turn unwanted chars into the separator
new_string = string.gsub(/[^a-z0-9\-_]+/, sep)
unless sep.nil? || sep.empty?
re_sep = Regexp.escape(sep)
# No more than one of the separator in a row.
new_string.gsub!(/#{re_sep}{2,}/, sep)
# Remove leading/trailing separator.
new_string.gsub!(/^#{re_sep}|#{re_sep}$/, '')
end
new_string.downcase
end
def storage_dir(type=:tmp)
path_array = (type == :tmp ? temp_asset_path : durable_asset_path)
base_asset_path+File.join(path_array + self.key)+'/'
end
def base_image_path
storage_dir(:spec)+img_filename()
end
def compare_image_path
storage_dir(:tmp)+img_filename(:compare)
end
def diff_image_path
storage_dir(:tmp)+img_filename(:diff)
end
def img_filename(type=:base)
"#{parameterize(selector)}"+(type != :base ? ".#{type}.#{timestamp}" : nil).to_s+'.png'
end
def compare_images
`compare -metric AE #{base_image_path} #{compare_image_path} #{diff_image_path}`
status = $?.exitstatus.to_i
if status == 127
warn '"compare" command not found! Make sure imagemagick is installed and in your $PATH.'
nil
elsif (status != 0)
open_file(diff_image_path)
false
else
true
end
end
def open_file(path)
begin
require "launchy"
Launchy.open(path)
rescue LoadError
warn "File saved to #{path}."
warn "Please install the launchy gem to open the file automatically."
end
end
def ensure_image_saved(file_path)
if File.exists?(file_path)
file_path
else
save_screenshot(file_path, selector: selector)
end
end
end
|
# -*- coding: utf-8 -*-
# @Brief: 不同数据集的父类实现
import numpy as np
import glob
from PIL import Image
import cv2 as cv
import os
import core.config as cfg
class Dataset:
def __init__(self, target_size=(320, 320), num_classes=21):
self.target_size = target_size
self.num_classes = num_classes
def set_image_info(self, **kwargs):
"""
Dataset的基类,需要实现self.image_info来存储输入图像的路径和对应mask路径
:return: None
"""
pass
def read_mask(self, image_id):
"""
读取mask,并转换成fcn需要的结构。
:param image_id: 图像的id号
:return: mask
"""
mask_path = self.image_info[image_id]["mask_path"]
image = Image.open(mask_path)
image = np.array(image)
image = resize_image_with_pad(image, self.target_size, pad_value=0.0)
# 转为one hot形式的标签
h, w = self.target_size
mask = np.zeros((h, w, self.num_classes), np.uint8)
for c in range(1, self.num_classes):
m = np.argwhere(image == c)
for row, col in m:
mask[row, col, c] = 1
return mask
def resize_image_with_pad(image, target_size, pad_value=128.0):
"""
resize图像,多余的地方用其他颜色填充
:param image: 输入图像
:param target_size: resize后图像的大小
:param pad_value: 填充区域像素值
:return: image_padded
"""
image_h, image_w = image.shape[:2]
input_h, input_w = target_size
scale = min(input_h / image_h, input_w / image_w)
image_h = int(image_h * scale)
image_w = int(image_w * scale)
dw, dh = (input_w - image_w) // 2, (input_h - image_h) // 2
if pad_value == 0:
# mask 用最近领域插值
image_resize = cv.resize(image, (image_w, image_h), interpolation=cv.INTER_NEAREST)
image_padded = np.full(shape=[input_h, input_w], fill_value=pad_value)
image_padded[dh: image_h+dh, dw: image_w+dw] = image_resize
else:
# image 用双线性插值
image_resize = cv.resize(image, (image_w, image_h), interpolation=cv.INTER_LINEAR)
image_padded = np.full(shape=[input_h, input_w, 3], fill_value=pad_value)
image_padded[dh: image_h+dh, dw: image_w+dw, :] = image_resize
return image_padded
|
import { Component } from 'react'
import Link from 'next/link'
import Layout from '../components/Layout'
import ajax from '../ajax'
export default class HomePage extends Component {
state = {
posts: [],
}
async componentDidMount() {
const res = await ajax('posts')
if (res.data.ok) {
this.setState({
posts: res.data.posts,
})
}
}
renderPosts = () => {
return this.state.posts.map(post => {
return (
<Link href={'/posts/' + post._id} key={post._id}>
<a className="list-group-item list-group-item-action">{post.title}</a>
</Link>
)
})
}
render() {
return (
<Layout>
<div className="list-group" style={{ margin: '10px 0' }}>
{this.renderPosts()}
</div>
</Layout>
)
}
}
|
#!/usr/bin/env bash
# vi: ft=sh
# @brief Cache sourced in entrys iusing BASH 4+ associative
# arrays.
#
declare cache_is_supported
export cache_is_supported=1
function cache.init() {
if bashmatic.bash.version-four-or-later ; then
declare -A item_cache_map 2>/dev/null
declare -A caches_cache_map 2>/dev/null
export item_cache_map
export caches_cache_map
else
export cache_is_supported=0
fi
}
cache.new() {
local name="$1"
test -z "${name}" && return 1
test -z "${caches_cache_map["${name}"]}" || {
error "${name} is already used as the key."
}
declare -A new_items_map
caches_cache_map[${name}]="${new_items_map[@]}"
}
cache.has() {
((cache_is_supported)) || return 1
local entry="$1"
test -z "$entry" && return 1
if [[ -n "$1" && -n "${item_cache_map["${entry}"]}" ]]; then
return 0
else
return 1
fi
}
cache.add() {
((cache_is_supported)) || return
[[ -n "${1}" ]] && item_cache_map[${1}]=true
}
cache.add-new() {
((cache_is_supported)) || return
s
[[ -n "${1}" ]] && item_cache_map[${1}]=true
}
cache.list() {
((cache_is_supported)) || return
for f in "${!item_cache_map[@]}"; do echo $f; done
}
cache.init
|
import { IInstance, IInstanceContext } from 'altinn-shared/types';
export function buildInstanceContext(instance: IInstance): IInstanceContext {
if (!instance) {
return null;
}
const instanceContext: IInstanceContext = {
appId: instance.appId,
instanceId: instance.id,
instanceOwnerPartyId: instance.instanceOwner.partyId,
}
return instanceContext;
}
|
RACK_ENV = 'test'.freeze unless defined?(RACK_ENV)
require File.expand_path(File.dirname(__FILE__) + '/../config/boot')
require File.dirname(__FILE__) + '/../app/helpers/wanikani_api'
require File.dirname(__FILE__) + '/../app/helpers/wkanki_helper'
require 'capybara'
require 'capybara/dsl'
Capybara.app = Wkanki::App
Capybara.register_driver :rack_test do |app|
Capybara::RackTest::Driver.new(app, respect_data_method: true)
end
RSpec.configure do |conf|
conf.include Rack::Test::Methods
conf.include Capybara::DSL
end
# You can use this method to custom specify a Rack app
# you want rack-test to invoke:
#
# app Wkanki::App
# app Wkanki::App.tap { |a| }
# app(Wkanki::App) do
# set :foo, :bar
# end
#
def app(app = nil, &blk)
@app ||= block_given? ? app.instance_eval(&blk) : app
@app ||= Padrino.application
end
# Decodes the Rack session from the response's cookie. This will
# allow us to get session information in our tests.
def decode_session_cookie(cookie)
encoded_cookie_str = cookie.match(/rack\.session=(\S*);/)[1]
data = Rack::Utils.unescape(encoded_cookie_str).unpack('m*').first
Marshal.load(data)
end
|
//
// IOS11Adapter.h
// VansLive
//
// Created by xinwei on 2017/11/3.
// Copyright © 2017年 Xiaomi. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface IOS11Adapter : NSObject
+ (void)scrollViewContentInsetAmendment:(UIScrollView *)scrollView;
+ (void)tableViewCancelEstimatedSeriesFunction:(UITableView *)tableView;
@end
|
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @addtogroup Bluetooth
* @{
*
* @brief Defines adapter classic properties.
*
*/
/**
* @file classic_adapter_properties.h
*
* @brief Adapter classic properties.
*
*/
#ifndef CLASSIC_ADAPTER_PROPERTIES_H
#define CLASSIC_ADAPTER_PROPERTIES_H
#include <mutex>
#include <string>
#include <vector>
#include "base_observer_list.h"
#include "bt_uuid.h"
#include "btstack.h"
#include "classic_bluetooth_data.h"
#include "classic_config.h"
#include "classic_remote_device.h"
#include "dispatcher.h"
#include "interface_adapter_classic.h"
#include "log.h"
#include "raw_address.h"
#include "timer.h"
namespace bluetooth {
/**
* @brief AdapterProperties to save and get classic properties.
*
*/
class ClassicAdapterProperties {
public:
/**
* @brief Get classic adapter properties singleton instance pointer.
*
* @return Returns the singleton instance pointer.
*/
static ClassicAdapterProperties &GetInstance();
/**
* @brief Load config info.
*
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool LoadConfigInfo();
/**
* @brief Initialize scan, bondable, security mode.
*
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool InitMode();
/**
* @brief Set local class and name.
*
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool ConfigProperties();
/**
* @brief Set bondable mode.
*
* @param mode Bondable mode.
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool SetBondableMode(int mode);
/**
* @brief Set discoverable timeout.
*
* @param time Discoverable timeout.
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool SetDiscoverableTimeout(int time);
/**
* @brief Set Io capability.
*
* @param ioCapability Device Io capability.
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool SetIoCapability(int ioCapability);
/**
* @brief Set local address.
*
* @param addr Device address.
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool SetLocalAddress(const std::string &addr);
/**
* @brief Set local name.
*
* @param name Device name.
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool SetLocalName(const std::string &name);
/**
* @brief Set local device class.
*
* @param deviceClass Device class.
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool SetLocalDeviceClass(int deviceClass);
/**
* @brief Set security mode.
*
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool SetSecurityMode();
/**
* @brief Get passkey.
*
* @return Returns passkey.
*/
std::string GetPasskey() const;
/**
* @brief Get bondable mode.
*
* @return Returns bondable mode.
*/
int GetBondableMode() const;
/**
* @brief Get discoverable timeout.
*
* @return Returns discoverable timeout.
*/
int GetDiscoverableTimeout() const;
/**
* @brief Get device Io capability.
*
* @return Returns device Io capability.
*/
int GetIoCapability() const;
/**
* @brief Get local address.
*
* @return Returns local address.
*/
std::string GetLocalAddress() const;
/**
* @brief Get local name.
*
* @return Returns local name.
*/
std::string GetLocalName() const;
/**
* @brief Get local device class.
*
* @return Returns local device class.
*/
int GetLocalDeviceClass() const;
/**
* @brief Get paired address list.
*
* @return Returns address list.
*/
std::vector<std::string> GetPairedAddrList() const;
/**
* @brief Set local name scallback.
*
* @param result Set result.
*/
void SetLocalNameCallback(int result);
/**
* @brief Register classic adapter observer.
*
* @param observer Class ClassicAdapterObserver pointer to register observer.
*/
void RegisterClassicAdapterObserver(IAdapterClassicObserver &observer);
/**
* @brief Deregister classic adapter observer.
*
*/
void DeregisterClassicAdapterObserver(IAdapterClassicObserver &observer);
/**
* @brief Get paired device.
*
* @param addr Device address.
* @return Returns paired device.
*/
std::shared_ptr<ClassicRemoteDevice> GetPairedDevice(std::string addr);
/**
* @brief Save paired device info.
*
* @param remote Remote device.
*/
void SavePairedDeviceInfo(std::shared_ptr<ClassicRemoteDevice> remote);
/**
* @brief Remove paired device info.
*
* @param addr Remote device address.
*/
void RemovePairedDeviceInfo(std::string addr) const;
/**
* @brief Save config file.
*
*/
void SaveConfigFile() const;
/**
* @brief Save local support uuids.
*
* @param uuids Device uuids.
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool SaveSupportUuids(const std::vector<Uuid> &uuids);
private:
/**
* @brief A constructor used to create a <b>ClassicAdapterProperties</b> instance.
*
*/
ClassicAdapterProperties();
/**
* @brief A destructor used to delete the <b>ClassicAdapterProperties</b> instance.
*
*/
~ClassicAdapterProperties();
/**
* @brief Load host info from config file.
*
*/
void LoadHostInfo();
/**
* @brief Read mac address from controller.
*
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool ReadAddrFromController();
/**
* @brief Update config.
*
* @param type Properties type.
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool UpdateConfig(int type);
/**
* @brief Set eir data.
*
* @return Returns <b>true</b> if the operation is successful;
* returns <b>false</b> if the operation fails.
*/
bool SetEirData();
void SendDeviceNameChanged(const std::string &deviceName);
void SendDeviceAddrChanged(const std::string &address);
ClassicConfig &config_;
int cod_ {DEFAULT_CLASS_OF_DEVICE};
std::string passkey_ {DEFAULT_PASSKEY};
int bondableMode_ {INVALID_VALUE};
int securityMode_ {SEC_MODE_2};
int ioCapability_ {GAP_IO_DISPLAYYESNO};
int discoverableTimeout_ {DEFAULT_SCANMODE_DURATION_MILLIS};
std::string macAddr_ {INVALID_MAC_ADDRESS};
std::string deviceName_ {DEFAULT_DEVICE_NAME};
std::vector<Uuid> uuids_ {};
BaseObserverList<IAdapterClassicObserver> adapterObservers_ {};
std::recursive_mutex propertiesMutex_ {};
};
} // namespace bluetooth
#endif // CLASSIC_ADAPTER_PROPERTIES_H
|
require 'rails_helper'
RSpec.describe Api::V1::CurrenciesController, type: :request do
let(:user) { create(:user) }
let(:requested_day) { Time.parse('2020-10-10') }
let!(:currency) do
create(:currency, valid_at: requested_day, daily_rates: { 'USD' => { value: 3.0 } })
end
before { travel_to requested_day }
after { travel_back }
before(:each) do
allow(Nbrb::Api).to receive(:daily_rates).and_return([])
end
describe 'GET /index' do
before { get api_v1_currencies_url, params: params, headers: headers }
let(:params) { {} }
let(:headers) { {} }
context 'when not authorized' do
it 'renders a successful response' do
expect(response).not_to be_successful
expect(response.status).to eq 401
end
end
context 'when authorized' do
let(:headers) do
{ 'CONTENT_TYPE' => 'application/json' }.merge(jwt_token_for(user))
end
it 'renders a successful response' do
expect(response).to be_successful
expect(JSON.parse(response.body))
.to include(Api::V1::CurrencySerializer.new(currency).serializable_hash.stringify_keys)
end
context 'with day parameter' do
context 'when currency is found for requested day' do
let(:params) { { day: requested_day } }
it do
expect(response).to be_successful
expect(JSON.parse(response.body))
.to include(Api::V1::CurrencySerializer.new(currency).serializable_hash.stringify_keys)
end
end
context 'when currency is not found for requested day' do
let(:params) { { day: Time.new('2010-01-10') } }
it do
expect(response).to be_successful
expect(JSON.parse(response.body)).to be_blank
end
end
end
end
end
end
|
<?php
namespace spec\Bxav\Component\ResellerClub\Model;
use PhpSpec\ObjectBehavior;
use Prophecy\Argument;
use Bxav\Component\ResellerClub\Model\ResellerClubClient;
use Bxav\Component\ResellerClub\Model\JsonResponse;
use Bxav\Component\ResellerClub\Model\Customer;
use Bxav\Component\ResellerClub\Model\Response;
class DomainRegisterSpec extends ObjectBehavior
{
protected $client;
function it_is_initializable()
{
$this->shouldHaveType('Bxav\Component\ResellerClub\Model\DomainRegister');
}
function let(ResellerClubClient $client)
{
$this->beConstructedWith($client);
$this->client = $client;
}
function it_should_check_the_availability_of_a_domain(JsonResponse $response)
{
$domain = "tralala";
$tld = "email";
$response->offsetGet("$domain.$tld")->willReturn(['status' => 'available']);
$this->client
->get("/domains/available.json", ['domain-name' => $domain, 'tlds' => $tld])
->willReturn($response);
$this->isDomainAvailable($domain, $tld)->shouldReturn(true);
}
function it_should_register_a_domain(Customer $customer, Response $response)
{
$response->offsetGet('status')->willReturn('Success');
$this->client->post('/domains/register.json', Argument::type('array'))->willReturn($response);
$this->register("tralala", "email", $customer)->shouldReturn(true);
}
}
|
#!/bin/bash
ROOTS=( \
"/" \
"/android" \
"/android/tools-base" \
)
export ROOTS
function all_tags_remote() {
repo=$(case "$1" in
("/") echo "community.git" ;;
("/android") echo "android.git" ;;
("/android/tools-base") echo "adt-tools-base.git" ;;
(*) exit 1 ;;
esac)
echo "git://git.labs.intellij.net/idea-all-tags/$repo"
}
|
namespace WpfAnalyzers.Test.WPF0006CoerceValueCallbackShouldMatchRegisteredNameTests
{
using Gu.Roslyn.Asserts;
using NUnit.Framework;
public class ValidCode
{
private static readonly PropertyMetadataAnalyzer Analyzer = new PropertyMetadataAnalyzer();
[Test]
public void DependencyPropertyNoMetadata()
{
var testCode = @"
namespace RoslynSandbox
{
using System.Windows;
using System.Windows.Controls;
using System.Windows;
using System.Windows.Controls;
public class FooControl : Control
{
public static readonly DependencyProperty ValueProperty = DependencyProperty.Register(
nameof(Value),
typeof(double),
typeof(FooControl));
public double Value
{
get { return (double)this.GetValue(ValueProperty); }
set { this.SetValue(ValueProperty, value); }
}
}
}";
RoslynAssert.Valid(Analyzer, testCode);
}
[TestCase("new PropertyMetadata(OnBarChanged)")]
[TestCase("new PropertyMetadata(new PropertyChangedCallback(OnBarChanged))")]
[TestCase("new PropertyMetadata(default(int), OnBarChanged)")]
[TestCase("new PropertyMetadata(default(int), new PropertyChangedCallback(OnBarChanged))")]
[TestCase("new PropertyMetadata((o, e) => { })")]
[TestCase("new FrameworkPropertyMetadata((o, e) => { })")]
[TestCase("new FrameworkPropertyMetadata(OnBarChanged)")]
[TestCase("new FrameworkPropertyMetadata(OnBarChanged, CoerceBar)")]
[TestCase("new PropertyMetadata(default(int), null, CoerceBar)")]
[TestCase("new PropertyMetadata(default(int), null, new CoerceValueCallback(CoerceBar))")]
public void DependencyPropertyWithMetadata(string metadata)
{
var testCode = @"
namespace RoslynSandbox
{
using System.Windows;
using System.Windows.Controls;
public class FooControl : Control
{
public static readonly DependencyProperty BarProperty = DependencyProperty.Register(
nameof(Bar),
typeof(int),
typeof(FooControl),
new PropertyMetadata(default(int), null, CoerceBar));
public int Bar
{
get { return (int)this.GetValue(BarProperty); }
set { this.SetValue(BarProperty, value); }
}
private static void OnBarChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
// nop
}
private static object CoerceBar(DependencyObject d, object baseValue)
{
if (baseValue is int i &&
i < 0)
{
return 0;
}
return baseValue;
}
}
}".AssertReplace("new PropertyMetadata(default(int), null, CoerceBar)", metadata);
RoslynAssert.Valid(Analyzer, testCode);
}
[Test]
public void ReadOnlyDependencyProperty()
{
var testCode = @"
namespace RoslynSandbox
{
using System.Windows;
using System.Windows.Controls;
public class FooControl : Control
{
private static readonly DependencyPropertyKey ValuePropertyKey = DependencyProperty.RegisterReadOnly(
nameof(Value),
typeof(double),
typeof(FooControl),
new PropertyMetadata(1.0, null, CoerceValue));
public static readonly DependencyProperty ValueProperty = ValuePropertyKey.DependencyProperty;
public double Value
{
get { return (double)this.GetValue(ValueProperty); }
set { this.SetValue(ValuePropertyKey, value); }
}
private static object CoerceValue(DependencyObject d, object baseValue)
{
if (baseValue is int i &&
i < 0)
{
return 0;
}
return baseValue;
}
}
}";
RoslynAssert.Valid(Analyzer, testCode);
}
[Test]
public void DependencyPropertyRegisterAttached()
{
var testCode = @"
namespace RoslynSandbox
{
using System.Windows;
public static class Foo
{
public static readonly DependencyProperty BarProperty = DependencyProperty.RegisterAttached(
""Bar"",
typeof(int),
typeof(Foo),
new PropertyMetadata(default(int), null, CoerceBar));
public static void SetBar(this FrameworkElement element, int value) => element.SetValue(BarProperty, value);
public static int GetBar(this FrameworkElement element) => (int)element.GetValue(BarProperty);
private static object CoerceBar(DependencyObject d, object baseValue)
{
if (baseValue is int i &&
i < 0)
{
return 0;
}
return baseValue;
}
}
}";
RoslynAssert.Valid(Analyzer, testCode);
}
[Test]
public void DependencyPropertyRegisterAttachedReadOnly()
{
var testCode = @"
namespace RoslynSandbox
{
using System.Windows;
public static class Foo
{
private static readonly DependencyPropertyKey BarPropertyKey = DependencyProperty.RegisterAttachedReadOnly(
""Bar"",
typeof(int),
typeof(Foo),
new PropertyMetadata(default(int), OnBarChanged, CoerceBar));
public static readonly DependencyProperty BarProperty = BarPropertyKey.DependencyProperty;
public static void SetBar(this FrameworkElement element, int value) => element.SetValue(BarPropertyKey, value);
public static int GetBar(this FrameworkElement element) => (int)element.GetValue(BarProperty);
private static void OnBarChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
// nop
}
private static object CoerceBar(DependencyObject d, object baseValue)
{
if (baseValue is int i &&
i < 0)
{
return 0;
}
return baseValue;
}
}
}";
RoslynAssert.Valid(Analyzer, testCode);
}
}
}
|
from compressor_toolkit.precompilers import SCSSCompiler, ES6Compiler
def test_scss_compiler():
"""
Test ``compressor_toolkit.precompilers.SCSSCompiler`` on simple SCSS input.
"""
input_scss = '''
.a {
.b {
padding: {
left: 5px;
right: 6px;
}
}
}
'''
output_css = '.a .b {\n padding-left: 5px;\n padding-right: 6px;\n}'
assert SCSSCompiler(input_scss, {}).input().strip() == output_css
def test_es6_compiler():
"""
Test ``compressor_toolkit.precompilers.ES6Compiler`` on simple ES6 input.
"""
input_es6 = 'export let CONST = 1'
output_es5 = (
'"use strict";\n'
'\n'
'Object.defineProperty(exports, "__esModule", {\n'
' value: true\n'
'});\n'
'var CONST = exports.CONST = 1;\n'
)
assert output_es5 in ES6Compiler(input_es6, {}).input()
|
// This file was automatically generated. DO NOT EDIT.
// If you have any remark or suggestion do not hesitate to open an issue.
package marketplace
import (
"bytes"
"encoding/json"
"fmt"
"net"
"net/http"
"net/url"
"time"
"github.com/scaleway/scaleway-sdk-go/internal/errors"
"github.com/scaleway/scaleway-sdk-go/internal/marshaler"
"github.com/scaleway/scaleway-sdk-go/internal/parameter"
"github.com/scaleway/scaleway-sdk-go/namegenerator"
"github.com/scaleway/scaleway-sdk-go/scw"
)
// always import dependencies
var (
_ fmt.Stringer
_ json.Unmarshaler
_ url.URL
_ net.IP
_ http.Header
_ bytes.Reader
_ time.Time
_ scw.ScalewayRequest
_ marshaler.Duration
_ scw.File
_ = parameter.AddToQuery
_ = namegenerator.GetRandomName
)
// API marketplace API
type API struct {
client *scw.Client
}
// NewAPI returns a API object from a Scaleway client.
func NewAPI(client *scw.Client) *API {
return &API{
client: client,
}
}
type GetImageResponse struct {
Image *Image `json:"image"`
}
type GetServiceInfoResponse struct {
API string `json:"api"`
Description string `json:"description"`
Version string `json:"version"`
}
type GetVersionResponse struct {
Version *Version `json:"version"`
}
type Image struct {
ID string `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
Logo string `json:"logo"`
Categories []string `json:"categories"`
Organization *Organization `json:"organization"`
ValidUntil time.Time `json:"valid_until"`
CreationDate time.Time `json:"creation_date"`
ModificationDate time.Time `json:"modification_date"`
Versions []*Version `json:"versions"`
CurrentPublicVersion string `json:"current_public_version"`
Label string `json:"label"`
}
type ListImagesResponse struct {
Images []*Image `json:"images"`
TotalCount uint32 `json:"total_count"`
}
type ListVersionsResponse struct {
Versions []*Version `json:"versions"`
TotalCount uint32 `json:"total_count"`
}
type LocalImage struct {
ID string `json:"id"`
Arch string `json:"arch"`
Zone scw.Zone `json:"zone"`
CompatibleCommercialTypes []string `json:"compatible_commercial_types"`
}
type Organization struct {
ID string `json:"id"`
Name string `json:"name"`
}
type Version struct {
ID string `json:"id"`
Name string `json:"name"`
CreationDate time.Time `json:"creation_date"`
ModificationDate time.Time `json:"modification_date"`
LocalImages []*LocalImage `json:"local_images"`
}
// Service API
type GetServiceInfoRequest struct {
}
func (s *API) GetServiceInfo(req *GetServiceInfoRequest, opts ...scw.RequestOption) (*GetServiceInfoResponse, error) {
var err error
scwReq := &scw.ScalewayRequest{
Method: "GET",
Path: "/marketplace/v1",
Headers: http.Header{},
}
var resp GetServiceInfoResponse
err = s.client.Do(scwReq, &resp, opts...)
if err != nil {
return nil, err
}
return &resp, nil
}
type ListImagesRequest struct {
PerPage *int32 `json:"-"`
Page *int32 `json:"-"`
}
func (s *API) ListImages(req *ListImagesRequest, opts ...scw.RequestOption) (*ListImagesResponse, error) {
var err error
defaultPerPage, exist := s.client.GetDefaultPageSize()
if (req.PerPage == nil || *req.PerPage == 0) && exist {
req.PerPage = &defaultPerPage
}
query := url.Values{}
parameter.AddToQuery(query, "per_page", req.PerPage)
parameter.AddToQuery(query, "page", req.Page)
scwReq := &scw.ScalewayRequest{
Method: "GET",
Path: "/marketplace/v1/images",
Query: query,
Headers: http.Header{},
}
var resp ListImagesResponse
err = s.client.Do(scwReq, &resp, opts...)
if err != nil {
return nil, err
}
return &resp, nil
}
// UnsafeGetTotalCount should not be used
// Internal usage only
func (r *ListImagesResponse) UnsafeGetTotalCount() int {
return int(r.TotalCount)
}
// UnsafeAppend should not be used
// Internal usage only
func (r *ListImagesResponse) UnsafeAppend(res interface{}) (int, scw.SdkError) {
results, ok := res.(*ListImagesResponse)
if !ok {
return 0, errors.New("%T type cannot be appended to type %T", res, r)
}
r.Images = append(r.Images, results.Images...)
r.TotalCount += uint32(len(results.Images))
return len(results.Images), nil
}
type GetImageRequest struct {
ImageID string `json:"-"`
}
func (s *API) GetImage(req *GetImageRequest, opts ...scw.RequestOption) (*GetImageResponse, error) {
var err error
if fmt.Sprint(req.ImageID) == "" {
return nil, errors.New("field ImageID cannot be empty in request")
}
scwReq := &scw.ScalewayRequest{
Method: "GET",
Path: "/marketplace/v1/images/" + fmt.Sprint(req.ImageID) + "",
Headers: http.Header{},
}
var resp GetImageResponse
err = s.client.Do(scwReq, &resp, opts...)
if err != nil {
return nil, err
}
return &resp, nil
}
type ListVersionsRequest struct {
ImageID string `json:"-"`
}
func (s *API) ListVersions(req *ListVersionsRequest, opts ...scw.RequestOption) (*ListVersionsResponse, error) {
var err error
if fmt.Sprint(req.ImageID) == "" {
return nil, errors.New("field ImageID cannot be empty in request")
}
scwReq := &scw.ScalewayRequest{
Method: "GET",
Path: "/marketplace/v1/images/" + fmt.Sprint(req.ImageID) + "/versions",
Headers: http.Header{},
}
var resp ListVersionsResponse
err = s.client.Do(scwReq, &resp, opts...)
if err != nil {
return nil, err
}
return &resp, nil
}
type GetVersionRequest struct {
ImageID string `json:"-"`
VersionID string `json:"-"`
}
func (s *API) GetVersion(req *GetVersionRequest, opts ...scw.RequestOption) (*GetVersionResponse, error) {
var err error
if fmt.Sprint(req.ImageID) == "" {
return nil, errors.New("field ImageID cannot be empty in request")
}
if fmt.Sprint(req.VersionID) == "" {
return nil, errors.New("field VersionID cannot be empty in request")
}
scwReq := &scw.ScalewayRequest{
Method: "GET",
Path: "/marketplace/v1/images/" + fmt.Sprint(req.ImageID) + "/versions/" + fmt.Sprint(req.VersionID) + "",
Headers: http.Header{},
}
var resp GetVersionResponse
err = s.client.Do(scwReq, &resp, opts...)
if err != nil {
return nil, err
}
return &resp, nil
}
|
package kata
import "unicode"
func Solve(s string) []int {
cnt_upper := 0
cnt_lower := 0
cnt_digit := 0
cnt_other := 0
for _, c := range s {
switch {
case unicode.IsUpper(c):
cnt_upper++
case unicode.IsLower(c):
cnt_lower++
case unicode.IsDigit(c):
cnt_digit++
default:
cnt_other++
}
}
return []int{cnt_upper, cnt_lower, cnt_digit, cnt_other}
}
|
package com.example.data.api
import com.example.data.constant.*
import com.example.data.entity.film.FilmEntity
import com.example.data.entity.film.FilmsResponse
import com.example.data.entity.person.PeopleResponse
import com.example.data.entity.person.PersonEntity
import com.example.data.entity.planet.PlanetEntity
import com.example.data.entity.planet.PlanetResponse
import com.example.data.entity.specie.SpecieEntity
import com.example.data.entity.specie.SpecieResponse
import com.example.data.entity.starship.StarshipEntity
import com.example.data.entity.starship.StarshipResponse
import com.example.data.entity.vehicle.VehicleEntity
import com.example.data.entity.vehicle.VehicleResponse
import retrofit2.http.GET
import retrofit2.http.Path
import retrofit2.http.Query
interface SwapiApi {
@GET(SWAPI_API_FILMS)
suspend fun getFilms(@Query("page") page: Int): FilmsResponse
@GET("$SWAPI_API_FILMS/{id}")
suspend fun getFilm(@Path("id") id: Int): FilmEntity
@GET(SWAPI_API_PEOPLE)
suspend fun getPeople(@Query("page") page: Int): PeopleResponse
@GET("$SWAPI_API_PEOPLE/{id}")
suspend fun getPerson(@Path("id") id: Int): PersonEntity
@GET(SWAPI_API_PLANETS)
suspend fun getPlanets(@Query("page") page: Int): PlanetResponse
@GET("$SWAPI_API_PLANETS/{id}")
suspend fun getPlanet(@Path("id") id: Int): PlanetEntity
@GET(SWAPI_API_SPECIES)
suspend fun getSpecies(@Query("page") page: Int): SpecieResponse
@GET("$SWAPI_API_SPECIES/{id}")
suspend fun getSpecie(@Path("id") id: Int): SpecieEntity
@GET(SWAPI_API_STARSHIPS)
suspend fun getStarships(@Query("page") page: Int): StarshipResponse
@GET("$SWAPI_API_STARSHIPS/{id}")
suspend fun getStarship(@Path("id") id: Int): StarshipEntity
@GET(SWAPI_API_VEHICLES)
suspend fun getVehicles(@Query("page") page: Int): VehicleResponse
@GET("$SWAPI_API_VEHICLES/{id}")
suspend fun getVehicle(@Path("id") id: Int): VehicleEntity
}
|
## `java:openjdk-6b38-jdk`
```console
$ docker pull java@sha256:c5a8b342ca8c70d4b347cf4dfbff6f2823822d15b2fe1ae4bd9a5bda5a2c89ed
```
- Platforms:
- linux; amd64
### `java:openjdk-6b38-jdk` - linux; amd64
- Docker Version: 1.12.3
- Manifest MIME: `application/vnd.docker.distribution.manifest.v2+json`
- Total Size: **187.3 MB (187311799 bytes)**
(compressed transfer size, not on-disk size)
- Image ID: `sha256:31eff0ca351d42bccc1aedf3c35ea08d7f9a4dda99a0fb8e1dce5bfd25191151`
- Default Command: `["\/bin\/bash"]`
```dockerfile
# Mon, 16 Jan 2017 20:41:52 GMT
ADD file:a341378be341bc318a57379c0a4b72f182f93ad617f08164343662e789b7244b in /
# Mon, 16 Jan 2017 20:42:00 GMT
CMD ["/bin/bash"]
# Tue, 17 Jan 2017 00:02:40 GMT
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl wget && rm -rf /var/lib/apt/lists/*
# Tue, 17 Jan 2017 00:03:12 GMT
RUN apt-get update && apt-get install -y --no-install-recommends bzr git mercurial openssh-client subversion procps && rm -rf /var/lib/apt/lists/*
# Tue, 17 Jan 2017 00:48:32 GMT
RUN apt-get update && apt-get install -y --no-install-recommends bzip2 unzip xz-utils && rm -rf /var/lib/apt/lists/*
# Tue, 17 Jan 2017 00:48:32 GMT
ENV LANG=C.UTF-8
# Tue, 17 Jan 2017 00:48:33 GMT
RUN { echo '#!/bin/sh'; echo 'set -e'; echo; echo 'dirname "$(dirname "$(readlink -f "$(which javac || which java)")")"'; } > /usr/local/bin/docker-java-home && chmod +x /usr/local/bin/docker-java-home
# Tue, 17 Jan 2017 00:48:33 GMT
ENV JAVA_HOME=/usr/lib/jvm/java-6-openjdk-amd64
# Tue, 17 Jan 2017 00:48:33 GMT
ENV JAVA_VERSION=6b38
# Tue, 17 Jan 2017 00:48:34 GMT
ENV JAVA_DEBIAN_VERSION=6b38-1.13.10-1~deb7u1
# Tue, 17 Jan 2017 00:49:25 GMT
RUN set -x && apt-get update && apt-get install -y openjdk-6-jdk="$JAVA_DEBIAN_VERSION" && rm -rf /var/lib/apt/lists/* && [ "$JAVA_HOME" = "$(docker-java-home)" ]
```
- Layers:
- `sha256:d9509b80c497066660d1e7a4e22ba112d025e83f6f7183d53c95bed1513938b7`
Last Modified: Mon, 16 Jan 2017 20:55:10 GMT
Size: 37.3 MB (37284485 bytes)
MIME: application/vnd.docker.image.rootfs.diff.tar.gzip
- `sha256:a0c12b04be6f745f58154eeb7b80403f95b355ca3842d384ffb685acf20432ad`
Last Modified: Tue, 17 Jan 2017 00:24:03 GMT
Size: 6.8 MB (6823701 bytes)
MIME: application/vnd.docker.image.rootfs.diff.tar.gzip
- `sha256:0998595e89ba3755e57a2e3cd79c8dd7805959a77828f622fa3b344a73ca5d50`
Last Modified: Tue, 17 Jan 2017 00:24:49 GMT
Size: 37.4 MB (37442207 bytes)
MIME: application/vnd.docker.image.rootfs.diff.tar.gzip
- `sha256:ff75640288162668192a3e192e53ed1a256803c30acbbbedc26b8646548e50a0`
Last Modified: Wed, 18 Jan 2017 05:15:52 GMT
Size: 413.7 KB (413731 bytes)
MIME: application/vnd.docker.image.rootfs.diff.tar.gzip
- `sha256:ea23632d43c730ab6c1671bc31df3456af028d9290c2c9b6a7e1b3a3b211f875`
Last Modified: Wed, 18 Jan 2017 05:15:48 GMT
Size: 242.0 B
MIME: application/vnd.docker.image.rootfs.diff.tar.gzip
- `sha256:352ad2a2801fd2a8c6f79f89bb796ace45fd0c0b0a9b12d487f6123b075cede0`
Last Modified: Wed, 18 Jan 2017 05:16:26 GMT
Size: 105.3 MB (105347433 bytes)
MIME: application/vnd.docker.image.rootfs.diff.tar.gzip
|
using System.Collections.Immutable;
using BenchmarkDotNet.Attributes;
namespace ImmutableListBuilderBenchmark;
[MemoryDiagnoser]
public class ImmutableBenchmark
{
private readonly List<int> _collection;
public ImmutableBenchmark()
{
_collection = Enumerable.Range(0, 1_000).ToList();
}
[Benchmark]
public IImmutableList<int> CollectionToImmutableList()
{
return _collection.ToImmutableList();
}
[Benchmark]
public IImmutableList<int> ImmutableListBuilderFromCollectionWithAdd()
{
var builder = ImmutableList.CreateBuilder<int>();
foreach (var entry in _collection)
{
builder.Add(entry);
}
return builder.ToImmutable();
}
[Benchmark]
public IImmutableList<int> ImmutableListBuilderAdd()
{
var data = Enumerable.Range(0, 1_000);
var builder = ImmutableList.CreateBuilder<int>();
foreach (var entry in data)
{
builder.Add(entry);
}
return builder.ToImmutable();
}
[Benchmark]
public IImmutableList<int> ImmutableListBuilderAddRange()
{
var data = Enumerable.Range(0, 1_000);
var builder = ImmutableList.CreateBuilder<int>();
builder.AddRange(data);
return builder.ToImmutable();
}
[Benchmark]
public IImmutableList<int> ImmutableListBuilderFromCollectionWithAddRange()
{
var builder = ImmutableList.CreateBuilder<int>();
builder.AddRange(_collection);
return builder.ToImmutable();
}
}
|
import { put, all, takeEvery } from 'redux-saga/effects';
import axios from 'axios';
import { SUBMIT_ACTION } from './constants';
import { ServerDataLoaded, ServerDataLoadingError, PostDataLoaded } from './actions';
export function* callDataSaga() {
try {
const equipmentData = yield axios.get('http://manufacture-service-api.dn-kronas.local/api/web/v1/equipment');
// console.log(equipmentData);
yield put(ServerDataLoaded(equipmentData.data));
} catch (err) {
yield put(ServerDataLoadingError(err));
}
}
export function* callPostId() {
try {
const postData = yield axios.get('http://manufacture-service-api.dn-kronas.local/api/web/v1/post');
// console.log('postData.data', postData.data)
yield put(PostDataLoaded(postData.data));
} catch (err) {
yield put(ServerDataLoadingError(err));
}
}
export function* pushData(action) {
try {
yield axios.post('http://manufacture-service-api.dn-kronas.local/api/web/v1/equipment', {
method: 'POST',
header: 'Data from EquipmentForm',
name: action.equipment.name,
inventory_number: +action.equipment.inventory_number,
performance_per_hour: +action.equipment.performance_per_hour,
post_id: +action.equipment.post_id,
});
} catch (err) {
yield put(ServerDataLoadingError(err));
}
}
export function* pushSaga() {
yield takeEvery(SUBMIT_ACTION, pushData);
}
export function* rootSaga() {
yield all([
callDataSaga(),
pushSaga(),
callPostId(),
]);
}
export default rootSaga;
|
'use strict'
const path = require('path')
function resolve(dir = '') {
return path.join(__dirname, dir)
}
const name = 'vtz-ui' // page title
// If your port is set to 80,
// use administrator privileges to execute the command line.
// For example, Mac: sudo npm run
// You can change the port by the following method:
// port = 9527 npm run dev OR npm run dev --port = 9527
const port = process.env.port || process.env.npm_config_port || 9537 // dev port
// All configuration item explanations can be find in https://cli.vuejs.org/config/
module.exports = {
/**
* You will need to set publicPath if you plan to deploy your site under a sub path,
* for example GitHub Pages. If you plan to deploy your site to https://foo.github.io/bar/,
* then publicPath should be set to "/bar/".
* In most cases please use '/' !!!
* Detail: https://cli.vuejs.org/config/#publicpath
*/
// publicPath: '/',
outputDir: 'lib',
// assetsDir: 'static',
pages: {
index: {
entry: 'examples/main.js',
template: 'public/index.html',
filename: 'index.html',
},
},
lintOnSave: process.env.NODE_ENV === 'development',
productionSourceMap: false,
devServer: {
port: port,
open: true,
overlay: {
warnings: false,
errors: true
}
},
configureWebpack: {
resolve: {
extensions: ['.js', '.vue', '.json'],
alias: {
'@': resolve('examples'),
'@vtz': resolve('components')
}
},
output: {
libraryExport: 'default'
}
},
chainWebpack(config) {
config.plugins.delete('preload') // TODO: need test
config.plugins.delete('prefetch') // TODO: need test
// set svg-sprite-loader
config.module
.rule('svg')
.exclude.add(resolve('examples/icons'))
.end()
config.module
.rule('icons')
.test(/\.svg$/)
.include.add(resolve('examples/icons'))
.end()
.use('svg-sprite-loader')
.loader('svg-sprite-loader')
.options({
symbolId: 'icon-[name]'
})
.end()
config.module
.rule('fonts')
.use('url-loader')
.tap(option => {
option.fallback.options.name = 'static/fonts/[name].[hash:8].[ext]'
return option
})
// set preserveWhitespace
config.module
.rule('vue')
.use('vue-loader')
.loader('vue-loader')
.tap(options => {
options.compilerOptions.preserveWhitespace = true
return options
})
.end()
config
// https://webpack.js.org/configuration/devtool/#development
.when(process.env.NODE_ENV === 'development',
config => config.devtool('cheap-source-map')
// config => config.devtool('cheap-module-eval-source-map')
)
config
.when(process.env.NODE_ENV !== 'development',
config => {
config.module
.rule('js')
.include
.add(resolve('components'))
.end()
.use('babel')
.loader('babel-loader')
.tap(options => {
// 修改它的选项...
return options
})
}
)
},
css: {
sourceMap: true,
extract: {
filename: 'style/[name].css'
}
},
}
|
interface GameOver {
winner: number;
}
interface Random {
D6: () => number;
}
interface Events {
endTurn: () => void;
}
export interface GameContext {
numPlayers: number;
turn: number;
currentPlayer: number;
gameover?: GameOver;
random: Random;
events: Events;
}
|
#!/bin/bash
#
# [DNB 7-Jun-2018] Script to upgrade galaxy instance
#
# ==================================================
#
# Some variables
NGINX_CLOUDMAN_TEMPLATE=/opt/cloudman/config/conftemplates/nginx_galaxy_locations
NGINX_FILE=/etc/nginx/sites-enabled/galaxy.locations
GALAXY_ROOT=/mnt/galaxy/galaxy-app
# Function to ensure that some parts of the code are indeed being run as galaxy
function test_galaxy_user() {
galaxy_id=`id -u galaxy`
if [ $EUID -ne $galaxy_id ]; then
echo "The $1 function must be run as the galaxy user."
exit 1
fi
echo "Running $1 function as galaxy user with uid of $galaxy_id"
}
# Function to change NGINX config as link from packed was dropped in this release
function change_nginx_config() {
for file in ${NGINX_CLOUDMAN_TEMPLATE} ${NGINX_FILE}; do
sed -e -i 's/\/packed//' ${file}
done
}
# Function to be run as user galaxy for updating the galaxy code
function update_galaxy() {
test_galaxy_user update_galaxy
backup_location=/mnt/tmp/galaxy_startup_scripts
cd ${GALAXY_ROOT}
echo "Making a backup of run.sh and run_reports.sh in ${backup_location}"
mkdir ${backup_location} && cp run.sh run_reports.sh ${backup_location}
echo "Doing the git upgrade"
echo git status
git status
git fetch origin && git checkout release_18.05 && git pull --ff-only origin release_18.05
echo "Removing virtual env for galaxy: rm -rf .venv"
rm -rf .venv
echo "Copying back run.sh and run_reports.sh"
cp ${backup_location}/run.sh ${backup_location}/run_reports.sh .
echo "pull down the virtual env by running galaxy - should fail to start because of db"
./run.sh
echo "Do the following to finish the upgrade"
echo "source .venv/bin/activate && pip install ephemeris"
}
# Function to update the database to be run as user galaxy
function update_database() {
test_galaxy_user update_database
echo "Doing the galaxy database upgrade: sh manage_db.sh upgrade"
sh manage_db.sh upgrade
}
echo "Usage: source $0"
echo "Choose from the following options"
echo " o change_nginx_config - as root - need to remove reference to old bundle in static path"
echo " o update_galaxy - as galaxy - git upgrade of galaxy and virtual environment"
echo " o update_database - as galaxy - upgrade of the galaxy database"
|
%%%===================================================================
%%% @copyright (C) 2012, Erlang Solutions Ltd.
%%% @doc Module abstracting Websockets over TCP connection to XMPP server
%%% @end
%%%===================================================================
-module(escalus_ws).
-behaviour(gen_server).
-behaviour(escalus_connection).
-include_lib("exml/include/exml_stream.hrl").
-include("escalus.hrl").
%% API exports
-export([connect/1,
send/2,
is_connected/1,
upgrade_to_tls/2,
use_zlib/2,
get_transport/1,
reset_parser/1,
stop/1,
kill/1,
set_filter_predicate/2]).
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-define(WAIT_FOR_SOCKET_CLOSE_TIMEOUT, 200).
-define(HANDSHAKE_TIMEOUT, 3000).
-define(SERVER, ?MODULE).
-record(state, {owner, socket, parser, legacy_ws, compress = false,
event_client, filter_pred}).
%%%===================================================================
%%% API
%%%===================================================================
-spec connect([proplists:property()]) -> {ok, escalus:client()}.
connect(Args) ->
{ok, Pid} = gen_server:start_link(?MODULE, [Args, self()], []),
Transport = gen_server:call(Pid, get_transport),
{ok, Transport}.
send(#client{rcv_pid = Pid, compress = {zlib, {_, Zout}}}, Elem) ->
gen_server:cast(Pid, {send_compressed, Zout, Elem});
send(#client{rcv_pid = Pid}, Elem) ->
gen_server:cast(Pid, {send, exml:to_iolist(Elem)}).
is_connected(#client{rcv_pid = Pid}) ->
erlang:is_process_alive(Pid).
reset_parser(#client{rcv_pid = Pid}) ->
gen_server:cast(Pid, reset_parser).
stop(#client{rcv_pid = Pid}) ->
try
gen_server:call(Pid, stop)
catch
exit:{noproc, {gen_server, call, _}} ->
already_stopped;
exit:{normal, {gen_server, call, _}} ->
already_stopped
end.
kill(Transport) ->
error({not_implemented_for, ?MODULE}, [Transport]).
-spec set_filter_predicate(escalus_connection:client(),
escalus_connection:filter_pred()) -> ok.
set_filter_predicate(#client{rcv_pid = Pid}, Pred) ->
gen_server:call(Pid, {set_filter_pred, Pred}).
upgrade_to_tls(_, _) ->
throw(starttls_not_supported).
%% TODO: this is en exact duplicate of escalus_tcp:use_zlib/2, DRY!
use_zlib(#client{rcv_pid = Pid} = Client, Props) ->
escalus_connection:send(Client, escalus_stanza:compress(<<"zlib">>)),
Compressed = escalus_connection:get_stanza(Client, compressed),
escalus:assert(is_compressed, Compressed),
gen_server:call(Pid, use_zlib),
Client1 = get_transport(Client),
{Props2, _} = escalus_session:start_stream(Client1, Props),
{Client1, Props2}.
get_transport(#client{rcv_pid = Pid}) ->
gen_server:call(Pid, get_transport).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%% TODO: refactor all opt defaults taken from Args into a default_opts function,
%% so that we know what options the module actually expects
init([Args, Owner]) ->
Host = get_host(Args, "localhost"),
Port = get_port(Args, 5280),
Resource = get_resource(Args, "/ws-xmpp"),
LegacyWS = get_legacy_ws(Args, false),
EventClient = proplists:get_value(event_client, Args),
SSL = proplists:get_value(ssl, Args, false),
WSOptions = [{ssl, SSL}],
{ok, Socket} = wsecli:start(Host, Port, Resource, WSOptions),
Pid = self(),
wsecli:on_open(Socket, fun() -> Pid ! opened end),
wsecli:on_error(Socket, fun(Reason) -> Pid ! {error, Reason} end),
wsecli:on_message(Socket, fun(Type, Data) -> Pid ! {Type, Data} end),
wsecli:on_close(Socket, fun(_) -> Pid ! tcp_closed end),
wait_for_socket_start(),
ParserOpts = if
LegacyWS -> [];
true -> [{infinite_stream, true}, {autoreset, true}]
end,
{ok, Parser} = exml_stream:new_parser(ParserOpts),
{ok, #state{owner = Owner,
socket = Socket,
parser = Parser,
legacy_ws = LegacyWS,
event_client = EventClient}}.
handle_call(get_transport, _From, State) ->
{reply, transport(State), State};
handle_call(use_zlib, _, #state{parser = Parser, socket = Socket} = State) ->
Zin = zlib:open(),
Zout = zlib:open(),
ok = zlib:inflateInit(Zin),
ok = zlib:deflateInit(Zout),
{ok, NewParser} = exml_stream:reset_parser(Parser),
{reply, Socket, State#state{parser = NewParser,
compress = {zlib, {Zin,Zout}}}};
handle_call({set_filter_pred, Pred}, _From, State) ->
{reply, ok, State#state{filter_pred = Pred}};
handle_call(stop, _From, #state{socket = Socket,
compress = Compress} = State) ->
StreamEnd = if
State#state.legacy_ws -> escalus_stanza:stream_end();
true -> escalus_stanza:ws_close()
end,
case Compress of
{zlib, {Zin, Zout}} ->
try
ok = zlib:inflateEnd(Zin)
catch
error:data_error -> ok
end,
ok = zlib:close(Zin),
wsecli:send(Socket, zlib:deflate(Zout,
exml:to_iolist(StreamEnd),
finish)),
ok = zlib:deflateEnd(Zout),
ok = zlib:close(Zout);
false ->
wsecli:send(Socket, exml:to_iolist(StreamEnd))
end,
wait_until_closed(),
{stop, normal, ok, State}.
handle_cast({send_compressed, Zout, Elem}, State) ->
wsecli:send(State#state.socket, zlib:deflate(Zout, exml:to_iolist(Elem), sync)),
{noreply, State};
handle_cast({send, Data}, State) ->
wsecli:send(State#state.socket, Data),
{noreply, State};
handle_cast(reset_parser, #state{parser = Parser} = State) ->
{ok, NewParser} = exml_stream:reset_parser(Parser),
{noreply, State#state{parser = NewParser}}.
handle_info(tcp_closed, State) ->
{stop, normal, State};
handle_info({error, Reason}, State) ->
{stop, Reason, State};
handle_info({text, Data}, State) ->
handle_data(list_to_binary(lists:flatten(Data)), State);
handle_info({binary, Data}, State) ->
handle_data(Data, State);
handle_info(_, State) ->
{noreply, State}.
terminate(_Reason, #state{socket = Socket} = State) ->
common_terminate(_Reason, State),
wsecli:stop(Socket).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
%%% Helpers
%%%===================================================================
handle_data(Data, State = #state{parser = Parser,
compress = Compress}) ->
{ok, NewParser, Stanzas} =
case Compress of
false ->
exml_stream:parse(Parser, Data);
{zlib, {Zin,_}} ->
Decompressed = iolist_to_binary(zlib:inflate(Zin, Data)),
exml_stream:parse(Parser, Decompressed)
end,
NewState = State#state{parser = NewParser},
escalus_connection:maybe_forward_to_owner(NewState#state.filter_pred,
NewState,
Stanzas,
fun forward_to_owner/2),
case lists:filter(fun is_stream_end/1, Stanzas) of
[] -> {noreply, NewState};
_ -> {stop, normal, NewState}
end.
-spec is_stream_end(exml_stream:element()) -> boolean().
is_stream_end(#xmlstreamend{}) -> true;
is_stream_end(_) -> false.
forward_to_owner(Stanzas, #state{owner = Owner,
event_client = EventClient} = NewState) ->
lists:foreach(fun(Stanza) ->
escalus_event:incoming_stanza(EventClient, Stanza),
Owner ! {stanza, transport(NewState), Stanza}
end, Stanzas).
common_terminate(_Reason, #state{parser = Parser}) ->
exml_stream:free_parser(Parser).
transport(#state{socket = Socket,
compress = Compress,
event_client = EventClient}) ->
#client{module = ?MODULE,
socket = Socket,
ssl = undefined,
compress = Compress,
rcv_pid = self(),
event_client = EventClient}.
wait_until_closed() ->
receive
tcp_closed ->
ok
after ?WAIT_FOR_SOCKET_CLOSE_TIMEOUT ->
ok
end.
wait_for_socket_start() ->
receive
opened ->
ok
after ?HANDSHAKE_TIMEOUT ->
throw(handshake_timeout)
end.
-spec get_port(list(), inet:port_number()) -> inet:port_number().
get_port(Args, Default) ->
get_option(port, Args, Default).
-spec get_host(list(), string()) -> string().
get_host(Args, Default) ->
maybe_binary_to_list(get_option(host, Args, Default)).
-spec get_resource(list(), string()) -> string().
get_resource(Args, Default) ->
maybe_binary_to_list(get_option(wspath, Args, Default)).
-spec get_legacy_ws(list(), boolean()) -> boolean().
get_legacy_ws(Args, Default) ->
get_option(wslegacy, Args, Default).
-spec maybe_binary_to_list(binary() | string()) -> string().
maybe_binary_to_list(B) when is_binary(B) -> binary_to_list(B);
maybe_binary_to_list(S) when is_list(S) -> S.
-spec get_option(any(), list(), any()) -> any().
get_option(Key, Opts, Default) ->
case lists:keyfind(Key, 1, Opts) of
false -> Default;
{Key, Value} -> Value
end.
|
rm -rf ./output-testnet/ks/*
rm -rf ./output-testnet/gpkKs
rm -rf ./output-testnet/ks/*
rm -rf ./output-testnet/gskList
rm -rf ./output-testnet/nodeKeyList
rm -rf ./output-testnet/RelationList
rm -rf ./output-testnet/WalletAddList
rm -rf ./output-testnet/WorkingAddList
cp ./output/ks_admin/* ./output-testnet/ks
|
console.log('blah');
huh = function anotherDependency() {
console.log('we are in dep2');
};
module.exports = {
anotherDependency: anotherDependency
};
|
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormatRoutingModule } from './format-routing.module';
import { FormatEditComponent } from './edit/format-edit.component';
import { FormatListComponent } from './list/format-list.component';
import { FormatCardComponent } from './card/format-card.component';
import { FormatDetailComponent } from './detail/format-detail.component';
import { FormatComponent } from './format.component';
@NgModule({
imports: [
CommonModule,
FormatRoutingModule
],
declarations: [
FormatCardComponent,
FormatDetailComponent,
FormatEditComponent,
FormatListComponent,
FormatComponent
]
})
export class FormatModule { }
|
require "sass"
require "compass"
require "fancy-buttons"
begin
require "yui/compressor"
rescue LoadError
puts "YUI-Compressor not available. Install it with: gem install yui-compressor"
end
module Middleman::Sass
def self.included(base)
base.supported_formats << "sass"
end
def render_path(path, layout)
if template_exists?(path, :sass)
begin
static_version = settings.public + request.path_info
send_file(static_version) if File.exists? static_version
location_of_sass_file = settings.environment == "build" ?
File.join(Dir.pwd, settings.build_dir) :
settings.public
css_filename = File.join(location_of_sass_file, request.path_info)
result = sass(path.to_sym, ::Compass.sass_engine_options.merge({ :css_filename => css_filename }))
if enabled?(:minify_css)
::YUI::CssCompressor.new.compress(result)
else
result
end
rescue Exception => e
sass_exception_string(e)
end
else
super
end
end
# Handle Sass errors
def sass_exception_string(e)
e_string = "#{e.class}: #{e.message}"
if e.is_a? ::Sass::SyntaxError
e_string << "\non line #{e.sass_line}"
if e.sass_filename
e_string << " of #{e.sass_filename}"
if File.exists?(e.sass_filename)
e_string << "\n\n"
min = [e.sass_line - 5, 0].max
begin
File.read(e.sass_filename).rstrip.split("\n")[
min .. e.sass_line + 5
].each_with_index do |line, i|
e_string << "#{min + i + 1}: #{line}\n"
end
rescue
e_string << "Couldn't read sass file: #{e.sass_filename}"
end
end
end
end
<<END
/*
#{e_string}
Backtrace:\n#{e.backtrace.join("\n")}
*/
body:before {
white-space: pre;
font-family: monospace;
content: "#{e_string.gsub('"', '\"').gsub("\n", '\\A ')}"; }
END
end
end
class Middleman::Base
include Middleman::Sass
after_feature_init do
::Compass.configuration do |config|
config.cache_path = File.join(self.root, ".sassc") # For sassc files
config.project_path = self.root
config.sass_dir = File.join(File.basename(self.views), self.css_dir)
config.output_style = self.enabled?(:minify_css) ? :compressed : :nested
config.fonts_dir = File.join(File.basename(self.public), self.fonts_dir)
config.css_dir = File.join(File.basename(self.public), self.css_dir)
config.images_dir = File.join(File.basename(self.public), self.images_dir)
config.http_images_path = self.http_images_path rescue File.join(self.http_prefix || "/", self.images_dir)
config.http_stylesheets_path = self.http_css_path rescue File.join(self.http_prefix || "/", self.css_dir)
config.asset_cache_buster { false }
config.add_import_path(config.sass_dir)
end
::Compass.configure_sass_plugin!
Sass::Plugin.options.update(:line_comments => true, :debug_info => true)
configure :build do
::Compass.configuration do |config|
config.css_dir = File.join(File.basename(self.build_dir), self.css_dir)
config.images_dir = File.join(File.basename(self.build_dir), self.images_dir)
end
::Compass.configure_sass_plugin!
Sass::Plugin.options.update(:line_comments => false, :debug_info => false)
end
end
end
|
#! /bin/bash
# This a simple build script which uses sub-scripts to build an X.Org server
# from scratch, along with all of it's necessary dependencies.
START_DIR=$(pwd)
SCRIPT_DIR="$START_DIR/build_scripts/xorg"
PACKAGE_DIR="$START_DIR/packs/xorg"
XORG_CONFIG="--prefix=/usr --sysconfdir=/etc --localstatedir=/var --disable-static "
# Configurables
# Choose from: "amd", "intel" or "rpi"
# Nvidia cards are not supported because I do not have one to test on
GRAPHICS_DRIVER="rpi"
INSTALL=1
CORES=$(grep -c ^processor /proc/cpuinfo)
# Start script
set -o errexit
echo "Building xorg server"
## Start building some things
# util-macros
source "$SCRIPT_DIR/std_xorg_install.sh" "util-macros" "1.19.0"
# X.Org protocol headers
source "$SCRIPT_DIR/group_build.sh" "proto"
# libXau
source "$SCRIPT_DIR/std_xorg_build.sh" "libXau" "1.0.8"
# libXdmcp
source "$SCRIPT_DIR/std_xorg_build.sh" "libXdmcp" "1.1.2"
# xcb-proto
source "$SCRIPT_DIR/std_xorg_install.sh" "xcb-proto" "1.11"
# build libxcb
source "$SCRIPT_DIR/libxcb.sh" "1.11.1"
## Start dependencies for Fontconfig which is a dependency for X.Org libraries
# build libpng
source "$SCRIPT_DIR/libpng.sh" "1.6.21"
# build FreeType
source "$SCRIPT_DIR/freetype.sh" "2.6.3"
# build elfutils (a glib optional dependency, but needed for Mesa later)
source "$SCRIPT_DIR/elfutils.sh" "0.166"
# build GLib
source "$SCRIPT_DIR/glib.sh" "2.48.0"
# build ICU
source "$SCRIPT_DIR/icu.sh" "57_1"
# build HarfBuzz
source "$SCRIPT_DIR/harfbuzz.sh" "1.2.6"
## Rebuild FreeType with HarfBuzz as a dependency
source "$SCRIPT_DIR/freetype.sh" "2.6.3"
# Finally, build Fontconfig
source "$SCRIPT_DIR/fontconfig.sh" "2.11.1"
# build the X.Org libraries group
source "$SCRIPT_DIR/group_build.sh" "lib"
# build xcb-util
source "$SCRIPT_DIR/std_xorg_build.sh" "xcb-util" "0.4.0"
# build xcb-util-image
source "$SCRIPT_DIR/std_xorg_build.sh" "xcb-util-image" "0.4.0"
# build xcb-util-keysms
source "$SCRIPT_DIR/std_xorg_build.sh" "xcb-util-keysyms" "0.4.0"
# build xcb-util-renderutil
source "$SCRIPT_DIR/std_xorg_build.sh" "xcb-util-renderutil" "0.3.9"
# build xcb-util-wm
source "$SCRIPT_DIR/std_xorg_build.sh" "xcb-util-wm" "0.4.1"
# build xcb-util-cursor
source "$SCRIPT_DIR/std_xorg_build.sh" "xcb-util-cursor" "0.1.2"
# Start Mesa build dependencies
# build libdrm
source "$SCRIPT_DIR/libdrm.sh" "2.4.67"
# build llvm (without clang)
source "$SCRIPT_DIR/llvm.sh" "3.8.0"
# build libvpdau
source "$SCRIPT_DIR/libvdpau.sh" "1.1.1"
# mesa
source "$SCRIPT_DIR/mesa.sh" "11.2.1"
# xbitmaps
source "$SCRIPT_DIR/std_xorg_install.sh" "xbitmaps" "1.1.1"
# X.Org applications group
source "$SCRIPT_DIR/group_build.sh" "app"
source "$SCRIPT_DIR/std_xorg_build.sh" "xcursor-themes" "1.0.4"
source "$SCRIPT_DIR/group_build.sh" "font"
source "$SCRIPT_DIR/xkeyboardconfig.sh" "2.17"
source "$SCRIPT_DIR/pixman.sh" "0.34.0"
source "$SCRIPT_DIR/libepoxy.sh" "1.3.1"
source "$SCRIPT_DIR/xorgserver.sh" "1.18.3"
## This is the part where it gets messy. Time to install drivers...
## Please make sure your kernel is configured correctly.
source "$SCRIPT_DIR/libevdev.sh" "1.4.6"
source "$SCRIPT_DIR/mtdev.sh" "1.1.5"
# Might try this alternative in the future
source "$SCRIPT_DIR/libinput.sh" "1.2.4"
source "$SCRIPT_DIR/input-evdev.sh" "2.10.1"
case "$GRAPHICS_DRIVER" in
"amd")
echo "Making AMD/ATI graphics driver"
sleep 2
source "$SCRIPT_DIR/amd.sh" "7.7.0"
;;
"intel")
source "$SCRIPT_DIR/intel.sh" "0340718"
;;
"rpi")
echo "Making the RPi framebuffer driver 'fbturbo'"
source "$SCRIPT_DIR/fbturbo.sh"
;;
esac
# xinit
source "$SCRIPT_DIR/xinit.sh" "1.3.4"
echo "All done! Enjoy the remainder of your day"
## End script
|
<?php
namespace TheFox\Network;
class Network
{
const NAME = 'Network';
const VERSION = '1.2.0-dev.1';
}
|
import * as express from 'express';
import { createServer, Server } from 'http';
import PoweredUP = require('node-poweredup');
import * as socketIo from 'socket.io';
import { HubController } from './controllers/hubController';
import { ILedRequest } from './interfaces/ILedRequest';
import { IMotorAngleRequest } from './interfaces/IMotorAngleRequest';
import { IMotorSpeedRequest } from './interfaces/IMotorSpeedRequest';
import { HubControllerCollection } from './model/hubControllerCollection';
export class MovehubServer {
public static readonly PORT: number = 8080;
private poweredUP = new PoweredUP.PoweredUP();
private app: express.Application;
private server: Server;
private io: SocketIO.Server;
private port: string | number;
private hubControllers = new HubControllerCollection();
constructor() {
this.app = express();
this.port = process.env.PORT || MovehubServer.PORT;
this.server = createServer(this.app);
this.io = socketIo(this.server);
this.listen();
}
public getApp(): express.Application {
return this.app;
}
private listen(): void {
this.server.listen(this.port, () => {
console.log('Running server on port %s', this.port);
console.log('Waiting for client connection...');
this.poweredUP.scan(); // Start scanning for hubs
console.log('Looking for Hubs...');
this.poweredUP.on('discover', async hub => {
// Wait to discover hubs
await hub.connect(); // Connect to hub
console.log(`Connected to ${hub.name} of type ${PoweredUP.Consts.HubType[hub.type]}!`);
const controller = new HubController(hub);
this.hubControllers[hub.name] = controller;
controller.hubState.subscribe(hubState => {
this.io.emit('hubUpdated', hubState);
});
controller.init();
});
});
this.io.on('connect', (socket: socketIo.Socket) => {
console.log('Client connected...');
Object.getOwnPropertyNames(this.hubControllers).map(name => {
this.io.emit('hubUpdated', this.hubControllers[name].hubState.getValue());
});
socket.on('motorSpeed', (request: IMotorSpeedRequest) => {
if (this.hubControllers[request.hubName]) {
this.hubControllers[request.hubName].setMotorSpeed(request);
}
});
socket.on('motorAngle', (request: IMotorAngleRequest) => {
if (this.hubControllers[request.hubName]) {
this.hubControllers[request.hubName].setMotorAngle(request);
}
});
socket.on('led', (request: ILedRequest) => {
if (this.hubControllers[request.hubName]) {
this.hubControllers[request.hubName].setLed(request);
}
});
});
}
}
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- |Strict encoder
module Flat.Encoder.Strict where
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Flat.Encoder.Prim
import qualified Flat.Encoder.Size as S
import Flat.Encoder.Types
import Flat.Memory
import Flat.Types
import Data.Foldable
-- import Data.Semigroup
-- import Data.Semigroup (Semigroup (..))
#if !MIN_VERSION_base(4,11,0)
import Data.Semigroup (Semigroup (..))
#endif
#ifdef ETA_VERSION
-- import Data.Function(trampoline)
import GHC.IO (trampolineIO)
trampolineEncoding :: Encoding -> Encoding
trampolineEncoding (Encoding op) = Encoding (\s -> trampolineIO (op s))
#else
-- trampolineIO = id
#endif
-- |Strict encoder
strictEncoder :: NumBits -> Encoding -> B.ByteString
strictEncoder numBits (Encoding op) =
let bufSize = S.bitsToBytes numBits
in fst $
unsafeCreateUptoN' bufSize $ \ptr -> do
(S ptr' 0 0) <- op (S ptr 0 0)
return (ptr' `minusPtr` ptr, ())
newtype Encoding =
Encoding
{ run :: Prim
}
instance Show Encoding where
show _ = "Encoding"
instance Semigroup Encoding where
{-# INLINE (<>) #-}
(<>) = mappend
instance Monoid Encoding where
{-# INLINE mempty #-}
mempty = Encoding return
{-# INLINE mappend #-}
-- mappend (Encoding f) (Encoding g) = Encoding (f >=> g)
mappend (Encoding f) (Encoding g) = Encoding m
where
m s@(S !_ !_ !_) = do
!s1 <- f s
g s1
{-# INLINE mconcat #-}
mconcat = foldl' mappend mempty
-- PROB: GHC 8.02 won't always apply the rules leading to poor execution times (e.g. with lists)
-- TODO: check with newest GHC versions
{-# RULES
"encodersSN" forall h t . encodersS (h : t) =
h `mappend` encodersS t
"encodersS0" encodersS [] = mempty
#-}
{-# NOINLINE encodersS #-}
encodersS :: [Encoding] -> Encoding
-- without the explicit parameter the rules won't fire
encodersS ws = foldl' mappend mempty ws
-- encodersS ws = error $ unwords ["encodersS CALLED",show ws]
{-# INLINE encodeListWith #-}
-- |Encode as a List
encodeListWith :: (t -> Encoding) -> [t] -> Encoding
encodeListWith enc = go
where
go [] = eFalse
go (x:xs) = eTrue <> enc x <> go xs
-- {-# INLINE encodeList #-}
-- encodeList :: (Foldable t, Flat a) => t a -> Encoding
-- encodeList l = F.foldl' (\acc a -> acc <> eTrue <> encode a) mempty l <> eFalse
-- {-# INLINE encodeList2 #-}
-- encodeList2 :: (Foldable t, Flat a) => t a -> Encoding
-- encodeList2 l = foldr (\a acc -> eTrue <> encode a <> acc) mempty l <> eFalse
{-# INLINE encodeArrayWith #-}
-- |Encode as Array
encodeArrayWith :: (t -> Encoding) -> [t] -> Encoding
encodeArrayWith _ [] = eWord8 0
encodeArrayWith f ws = Encoding $ go ws
where
go l s = do
s' <- eWord8F 0 s
(n, s'', l) <- gol l 0 s'
_ <- eWord8F n s
if null l
then eWord8F 0 s''
else go l s''
gol [] !n !s = return (n, s, [])
gol l@(x:xs) !n !s
| n == 255 = return (255, s, l)
| otherwise = run (f x) s >>= gol xs (n + 1)
-- Encoding primitives
{-# INLINE eChar #-}
{-# INLINE eUTF8 #-}
{-# INLINE eNatural #-}
{-# INLINE eFloat #-}
{-# INLINE eDouble #-}
{-# INLINE eInteger #-}
{-# INLINE eInt64 #-}
{-# INLINE eInt32 #-}
{-# INLINE eInt16 #-}
{-# INLINE eInt8 #-}
{-# INLINE eInt #-}
{-# INLINE eWord64 #-}
{-# INLINE eWord32 #-}
{-# INLINE eWord16 #-}
{-# INLINE eWord8 #-}
{-# INLINE eWord #-}
{-# INLINE eBits #-}
{-# INLINE eFiller #-}
{-# INLINE eBool #-}
{-# INLINE eTrue #-}
{-# INLINE eFalse #-}
eChar :: Char -> Encoding
eChar = Encoding . eCharF
#if! defined(ghcjs_HOST_OS) && ! defined (ETA_VERSION)
{-# INLINE eUTF16 #-}
eUTF16 :: Text -> Encoding
eUTF16 = Encoding . eUTF16F
#endif
eUTF8 :: Text -> Encoding
eUTF8 = Encoding . eUTF8F
eBytes :: B.ByteString -> Encoding
eBytes = Encoding . eBytesF
eLazyBytes :: L.ByteString -> Encoding
eLazyBytes = Encoding . eLazyBytesF
eShortBytes :: ShortByteString -> Encoding
eShortBytes = Encoding . eShortBytesF
eNatural :: Natural -> Encoding
eNatural = Encoding . eNaturalF
eFloat :: Float -> Encoding
eFloat = Encoding . eFloatF
eDouble :: Double -> Encoding
eDouble = Encoding . eDoubleF
eInteger :: Integer -> Encoding
eInteger = Encoding . eIntegerF
eInt64 :: Int64 -> Encoding
eInt64 = Encoding . eInt64F
eInt32 :: Int32 -> Encoding
eInt32 = Encoding . eInt32F
eInt16 :: Int16 -> Encoding
eInt16 = Encoding . eInt16F
eInt8 :: Int8 -> Encoding
eInt8 = Encoding . eInt8F
eInt :: Int -> Encoding
eInt = Encoding . eIntF
eWord64 :: Word64 -> Encoding
eWord64 = Encoding . eWord64F
eWord32 :: Word32 -> Encoding
eWord32 = Encoding . eWord32F
eWord16 :: Word16 -> Encoding
eWord16 = Encoding . eWord16F
eWord8 :: Word8 -> Encoding
eWord8 = Encoding . eWord8F
eWord :: Word -> Encoding
eWord = Encoding . eWordF
eBits16 :: NumBits -> Word16 -> Encoding
eBits16 n f = Encoding $ eBits16F n f
eBits :: NumBits -> Word8 -> Encoding
eBits n f = Encoding $ eBitsF n f
eFiller :: Encoding
eFiller = Encoding eFillerF
eBool :: Bool -> Encoding
eBool = Encoding . eBoolF
eTrue :: Encoding
eTrue = Encoding eTrueF
eFalse :: Encoding
eFalse = Encoding eFalseF
-- Size Primitives
-- Variable size
{-# INLINE vsize #-}
vsize :: (t -> NumBits) -> t -> NumBits -> NumBits
vsize !f !t !n = f t + n
-- Constant size
{-# INLINE csize #-}
csize :: NumBits -> t -> NumBits -> NumBits
csize !n _ !s = n + s
sChar :: Size Char
sChar = vsize S.sChar
sInt64 :: Size Int64
sInt64 = vsize S.sInt64
sInt32 :: Size Int32
sInt32 = vsize S.sInt32
sInt16 :: Size Int16
sInt16 = vsize S.sInt16
sInt8 :: Size Int8
sInt8 = csize S.sInt8
sInt :: Size Int
sInt = vsize S.sInt
sWord64 :: Size Word64
sWord64 = vsize S.sWord64
sWord32 :: Size Word32
sWord32 = vsize S.sWord32
sWord16 :: Size Word16
sWord16 = vsize S.sWord16
sWord8 :: Size Word8
sWord8 = csize S.sWord8
sWord :: Size Word
sWord = vsize S.sWord
sFloat :: Size Float
sFloat = csize S.sFloat
sDouble :: Size Double
sDouble = csize S.sDouble
sBytes :: Size B.ByteString
sBytes = vsize S.sBytes
sLazyBytes :: Size L.ByteString
sLazyBytes = vsize S.sLazyBytes
sShortBytes :: Size ShortByteString
sShortBytes = vsize S.sShortBytes
sNatural :: Size Natural
sNatural = vsize S.sNatural
sInteger :: Size Integer
sInteger = vsize S.sInteger
-- sUTF8 = vsize S.sUTF8
sUTF8Max :: Size Text
sUTF8Max = vsize S.sUTF8Max
#ifndef ghcjs_HOST_OS
sUTF16 :: Size Text
sUTF16 = vsize S.sUTF16
#endif
sFillerMax :: Size a
sFillerMax = csize S.sFillerMax
sBool :: Size Bool
sBool = csize S.sBool
|
using System;
namespace Many.Mocks.Tests.TestClasses
{
public class ImplIClass3Bis :IClass3
{
public static int ValidMocksInConstructor = 2;
public static int NotValidMocksInConstructor = 0;
public ImplIClass3Bis(IClass2 class1, IClass1 class2)
{
}
public bool Third(SealedClass sealedC)
{
throw new NotImplementedException();
}
}
}
|
#!/bin/bash
DEFAULT_UWKGM_MASTER_HOST=http://localhost
DEFAULT_UWKGM_EXT_HOST=http://localhost
DEFAULT_UWKGM_GRAPH=http://dbpedia.org
echo "Initializing configurations for deployment and update..."
read -p "[UWKGM: INPUT] Local environment (production*, pre-release, production:ext, pre-release:ext): " INIT_UWKGM_ENV
read -p "[UWKGM: INPUT] Master production server address ($DEFAULT_UWKGM_MASTER_HOST*): " INIT_UWKGM_MASTER_HOST
read -p "[UWKGM: INPUT] Extended production server address ($DEFAULT_UWKGM_EXT_HOST*): " INIT_UWKGM_EXT_HOST
read -p "[UWKGM: INPUT] Virtuoso RAM size in GB (*, 2, 4, 8, 16, 32, 48, 64, +): " INIT_UWKGM_VIRTUOSO_MEM
read -p "[UWKGM: INPUT] Default graph ($DEFAULT_UWKGM_GRAPH*): " INIT_UWKGM_GRAPH
rm -f ../local/conf.sh
mkdir -p ../local
echo "Initializing local configuration file..."
echo "#!/bin/bash" > ../local/conf.sh
echo "" >> ../local/conf.sh
if [[ $INIT_UWKGM_ENV == "" ]]; then
echo "...Using default local environment: production"
echo "export UWKGM_ENV=production" >> ../local/conf.sh
else
echo "...Setting local environment to: ${INIT_UWKGM_ENV}"
echo "export UWKGM_ENV=$INIT_UWKGM_ENV" >> ../local/conf.sh
fi
if [[ $INIT_UWKGM_MASTER_HOST == "" ]]; then
echo "...Using default master production server address: $DEFAULT_UWKGM_MASTER_HOST"
echo "export UWKGM_MASTER_HOST=$DEFAULT_UWKGM_MASTER_HOST" >> ../local/conf.sh
else
echo "...Setting default master production server address to: $INIT_UWKGM_MASTER_HOST"
echo "export UWKGM_MASTER_HOST=$INIT_UWKGM_MASTER_HOST" >> ../local/conf.sh
fi
if [[ $INIT_UWKGM_EXT_HOST == "" ]]; then
echo "...Using default master production server address: $DEFAULT_UWKGM_EXT_HOST"
echo "export UWKGM_EXT_HOST=$DEFAULT_UWKGM_EXT_HOST" >> ../local/conf.sh
else
echo "...Setting default master production server address to: $INIT_UWKGM_EXT_HOST"
echo "export UWKGM_EXT_HOST=$INIT_UWKGM_EXT_HOST" >> ../local/conf.sh
fi
if [[ $INIT_UWKGM_VIRTUOSO_MEM == "" ]]; then
INIT_UWKGM_VIRTUOSO_NUM_BUFFERS=10000
INIT_UWKGM_VIRTUOSO_DIRTY_BUFFERS=6000
elif [[ $INIT_UWKGM_VIRTUOSO_MEM == 2 ]]; then
INIT_UWKGM_VIRTUOSO_NUM_BUFFERS=170000
INIT_UWKGM_VIRTUOSO_DIRTY_BUFFERS=130000
elif [[ $INIT_UWKGM_VIRTUOSO_MEM == 4 ]]; then
INIT_UWKGM_VIRTUOSO_NUM_BUFFERS=340000
INIT_UWKGM_VIRTUOSO_DIRTY_BUFFERS=250000
elif [[ $INIT_UWKGM_VIRTUOSO_MEM == 8 ]]; then
INIT_UWKGM_VIRTUOSO_NUM_BUFFERS=680000
INIT_UWKGM_VIRTUOSO_DIRTY_BUFFERS=500000
elif [[ $INIT_UWKGM_VIRTUOSO_MEM == 16 ]]; then
INIT_UWKGM_VIRTUOSO_NUM_BUFFERS=1360000
INIT_UWKGM_VIRTUOSO_DIRTY_BUFFERS=1000000
elif [[ $INIT_UWKGM_VIRTUOSO_MEM == 32 ]]; then
INIT_UWKGM_VIRTUOSO_NUM_BUFFERS=2720000
INIT_UWKGM_VIRTUOSO_DIRTY_BUFFERS=2000000
elif [[ $INIT_UWKGM_VIRTUOSO_MEM == 48 ]]; then
INIT_UWKGM_VIRTUOSO_NUM_BUFFERS=4000000
INIT_UWKGM_VIRTUOSO_DIRTY_BUFFERS=3000000
elif [[ $INIT_UWKGM_VIRTUOSO_MEM == 64 ]]; then
INIT_UWKGM_VIRTUOSO_NUM_BUFFERS=5450000
INIT_UWKGM_VIRTUOSO_DIRTY_BUFFERS=4000000
elif [[ $INIT_UWKGM_VIRTUOSO_MEM == "+" ]]; then
read -p "[UWKGM: INPUT] Virtuoso's number of buffers: " INIT_UWKGM_VIRTUOSO_NUM_BUFFERS
read -p "[UWKGM: INPUT] Virtuoso's max dirty buffers: " INIT_UWKGM_VIRTUOSO_DIRTY_BUFFERS
fi
if [[ $INIT_UWKGM_GRAPH == "" ]]; then
echo "...Using default graph: $DEFAULT_UWKGM_GRAPH"
echo "export UWKGM_DEFAULT_GRAPH=$DEFAULT_UWKGM_GRAPH" >> ../local/conf.sh
else
echo "...Setting default graph to: $INIT_UWKGM_GRAPH"
echo "export UWKGM_DEFAULT_GRAPH=$INIT_UWKGM_GRAPH" >> ../local/conf.sh
fi
echo "...Setting Virtuoso RAM size to: $INIT_UWKGM_VIRTUOSO_MEM GB"
echo "...Setting Virtuoso's number of buffers to: $INIT_UWKGM_VIRTUOSO_NUM_BUFFERS"
echo "...Setting Virtuoso's max dirty buffers to: $INIT_UWKGM_VIRTUOSO_DIRTY_BUFFERS"
echo "export UWKGM_VIRTUOSO_MEM=$INIT_UWKGM_VIRTUOSO_MEM" >> ../local/conf.sh
echo "export UWKGM_VIRTUOSO_NUM_BUFFERS=$INIT_UWKGM_VIRTUOSO_NUM_BUFFERS" >> ../local/conf.sh
echo "export UWKGM_VIRTUOSO_DIRTY_BUFFERS=$INIT_UWKGM_VIRTUOSO_DIRTY_BUFFERS" >> ../local/conf.sh
|
import getData from './getData';
const initialize = (projects, id) => {
if (localStorage.getItem('projects') == null) {
projects = [];
} else {
projects = getData('projects');
}
if (localStorage.getItem('currentId') == null) {
id = 0;
} else {
id = getData('currentId');
}
return ({ projects, id });
};
export default initialize;
|
---
layout: post
title: "C# Fragment : Event and Delegate"
date: 2019-04-19
excerpt: ""
tag:
- C#
- Event
- Delegate
---
# 委托
委托的实质是一个类。
```c#
// 委托定义
delegate ReturnType DelegateName([parameters]);
// 委托声明
DelegateName delegateInstance = new DelegateName();
// 委托赋值
delegateInstance = delegateFunctionName1;
// 绑定/解绑委托
delegateInstance += delegateFunctionName2;
delegateInstance -= delegateFunctionName1;
```
# 事件
事件是封装了委托的类。
当事件被声明为类的内部成员时,它总会被编译为private类型,即无法使用=赋初值,只有+=和-=操作。
```c#
delegate ReturnType DelegateName([parameters]);
// 事件声明
event DelegateName eventName;
// 添加/删除事件
eventName += event1;
eventName -= event2;
```
事件委托与匿名方法、lambda表达式
```c#
delegate ReturnType DelegateName([parameters]);
// 匿名方法
DelegateName delegateInstance = delegate([parameters]){ return something; };
// lambda表达式
DelegateName delegateInstance = ([parameters]) => { return something; };
DelegateName delegateInstance = ([parameters]) => { return something; };
DelegateName delegateInstance = parameter => { return something; };
DelegateName delegateInstance = parameters => something ;
```
# 应用
Observer Pattern(观察者模式)
|
require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper')
describe Praxis::Mapper::Query::Base do
let(:scope) { {} }
let(:unloaded_ids) { [1, 2, 3] }
let(:connection) { double("connection") }
let(:identity_map) { double("identity_map", :scope => scope, :get_unloaded => unloaded_ids) }
let(:model) { SimpleModel }
let(:expected_ids_condition) { "id IN (#{unloaded_ids.join(", ")})" }
let(:query) { Praxis::Mapper::Query::Base.new(identity_map, model) }
subject { query }
let(:rows) { [
{:id => 1, :name => "george jr", :parent_id => 1, :description => "one"},
{:id => 2, :name => "george iii", :parent_id => 2, :description => "two"},
{:id => 3, :name => "george xvi", :parent_id => 2, :description => "three"}
] }
let(:ids) { rows.collect { |r| r[:id] } }
context "retrieving records" do
# TODO: refactor with shared_examples
context "#multi_get" do
it 'delegates to the subclass' do
query.should_receive(:_multi_get).and_return(rows)
response = query.multi_get(:id, ids)
response.should have(3).items
response.should eq(rows)
#record = response.first
#record.should be_kind_of(model)
#rows.first.each do |attribute, value|
# record.send(attribute).should == value
#end
end
it 'raises if run on a frozen query' do
query.freeze
expect { query.multi_get(:id, ids) }.to raise_error(TypeError)
end
context 'for very large lists of values' do
let(:batch_size) { Praxis::Mapper::Query::Base::MULTI_GET_BATCH_SIZE }
let(:result_size) { (batch_size * 2.5).to_i }
let(:values) { (0..result_size).to_a }
let(:rows) { values.collect { |v| {:id => v} } }
before do
stub_const("Praxis::Mapper::Query::Base::MULTI_GET_BATCH_SIZE", 4)
rows.each_slice(batch_size) do |batch_rows|
ids = batch_rows.collect { |v| v.values }.flatten
query.should_receive(:_multi_get).with(:id, ids).and_return(batch_rows)
end
end
it 'batches queries and aggregates their results' do # FIXME: totally lame name for this
query.multi_get(:id, values).should =~ rows
end
end
end
context "#execute" do
it 'delegates to the subclass and wraps the response in model instances' do
query.should_receive(:_execute).and_return(rows)
response = query.execute
response.should have(3).items
item = response.first
item.should be_kind_of(model)
rows.first.each do |attribute, value|
item.send(attribute).should == value
end
end
it 'raises if run on a frozen query' do
query.freeze
expect { query.execute }.to raise_error(TypeError)
end
end
it 'raises for subclass methods' do
expect { subject._multi_get(nil, nil) }.to raise_error "subclass responsibility"
expect { subject._execute }.to raise_error "subclass responsibility"
end
end
context "the specification DSL" do
context "#select" do
it "accepts an array of symbols" do
subject.select :id, :name
subject.select.should include(:id => nil, :name => nil)
end
it "accepts an array of strings" do
subject.select "id", "name"
subject.select.should include("id" => nil, "name" => nil)
end
it "raises for unknown field types" do
expect { subject.select Object.new }.to raise_error
end
context "accepts an array of hashes" do
context "with strings for the field definitions" do
it "and symbols to specify the field aliases" do
definition = {:id => "IFNULL(foo,bar)", :name => "CONCAT(foo,bar)"}
subject.select definition
subject.select.should include(definition)
end
it "and strings to specify the field aliases" do
definition = {"id" => "IFNULL(foo,bar)", "name" => "CONCAT(foo,bar)"}
subject.select definition
subject.select.should include(definition)
end
end
context "with symbols for the field definitions" do
it "and symbols to specify the field aliases" do
definition = {:my_id => :id, :name => :name}
subject.select :my_id => :id, :name => :name
subject.select.should include :my_id => :id, :name => :name
end
it "and strings to specify the field aliases" do
definition = {"id" => "IFNULL(foo,bar)", "name" => "CONCAT(foo,bar)"}
subject.select definition
subject.select.should include(definition)
end
end
end
it "accepts an array of mixed hashes and symbols and strings" do
subject.select :id, "description", :name => "CONCAT(foo,bar)"
subject.select.should include(:id => nil, "description" => nil, :name => "CONCAT(foo,bar)")
end
it "also accepts a single symbol" do
subject.select :id
subject.select.should include(:id => nil)
end
it "also accepts a single hash" do
definition = {:id => "IFNULL(foo,bar)"}
subject.select definition
subject.select.should include(definition)
end
end
context "with no query body" do
subject { Praxis::Mapper::Query::Base.new(identity_map, model) }
it "should be an empty nil" do
subject.select.should be_nil
end
its(:where) { should be_nil }
its(:track) { should eq(Set[]) }
context "with all the fixings" do
subject do
Praxis::Mapper::Query::Base.new(identity_map, model) do
select :id, :name
where "deployment_id=2"
track :parent
end
end
its(:select) { should == {:id => nil, :name => nil} }
its(:where) { should eq("deployment_id=2") }
its(:track) { should eq(Set[:parent]) }
its(:tracked_associations) { should =~ [model.associations[:parent]] }
end
end
context '#track' do
context 'with nested track something or another' do
subject :query do
Praxis::Mapper::Query::Base.new(identity_map, PersonModel) do
track :address do
select :id, :name
track :residents
end
end
end
it 'saves the subcontext block' do
query.track.should have(1).item
name, tracked_address = query.track.first
name.should be(:address)
tracked_address.should be_kind_of(Proc)
end
its(:tracked_associations) { should =~ [PersonModel.associations[:address]] }
end
context 'tracking an association tracked by a context' do
subject :query do
Praxis::Mapper::Query::Base.new(identity_map, PersonModel) do
context :default
track :address do
select :id, :name
track :residents
end
end
end
it 'retains both values' do
query.track.should have(2).item
query.track.should include(:address)
# TODO: find a better way to do this match
name, tracked_address = query.track.to_a[1]
name.should be(:address)
tracked_address.should be_kind_of(Proc)
end
its(:tracked_associations) { should =~ [PersonModel.associations[:address]] }
end
end
context '#context' do
let(:model) { PersonModel }
subject do
Praxis::Mapper::Query::Base.new(identity_map, model) do
context :default
context :tiny
track :properties
end
end
its(:select) { should eq({id: nil, email: nil}) }
its(:track) { should eq(Set[:address, :properties]) }
end
context '#load' do
subject do
Praxis::Mapper::Query::Base.new(identity_map, model) do
load :address
end
end
its(:load) { should eq(Set[:address])}
end
end
context 'statistics' do
its(:statistics) { should == Hash.new }
it 'initialize new values with zero' do
subject.statistics[:execute].should == 0
end
context "#execute" do
before do
query.should_receive(:_execute).and_return(rows)
query.execute
end
it 'tracks the number of calls' do
query.statistics[:execute].should == 1
end
it 'tracks records loaded' do
query.statistics[:records_loaded].should == rows.size
end
end
context "#multi_get" do
before do
query.should_receive(:_multi_get).with(:id, ids).and_return(rows)
query.multi_get(:id, ids)
end
it 'tracks the number of calls' do
query.statistics[:multi_get].should == 1
end
it 'tracks records loaded' do
query.statistics[:records_loaded].should == rows.size
end
end
end
context "#raw" do
let(:model) { PersonModel }
end
end
|
#! /bin/bash
PRGNAME="lame"
### LAME (LAME Ain't an Mp3 Encoder)
# Утилиты для кодирования аудио в формат MP3. LAME - рекурсивный акроним для
# Ain’t an MP3 Encoder (LAME - это не MP3-кодировщик), относящийся к ранней
# истории LAME, когда он не был кодером в полной мере, а входил в
# демонстрационный код ISO
# Required: no
# Recommended: no
# Optional: dmalloc (https://dmalloc.com/)
# electric-fence (https://linux.softpedia.com/get/Programming/Debuggers/Electric-Fence-3305.shtml/)
# libsndfile
# nasm
ROOT="/root/src/lfs"
source "${ROOT}/check_environment.sh" || exit 1
source "${ROOT}/unpack_source_archive.sh" "${PRGNAME}" || exit 1
TMP_DIR="${BUILD_DIR}/package-${PRGNAME}-${VERSION}"
mkdir -pv "${TMP_DIR}"
# если пакет nasm установлен включим использование NASM для оптимизации
# компиляции
NASM="--disable-nasm"
command -v nasm &>/dev/null && NASM="--enable-nasm"
./configure \
--prefix=/usr \
--enable-mp3rtp \
"${NASM}" \
--disable-static || exit 1
make || exit 1
# make test
make pkghtmldir="/usr/share/doc/${PRGNAME}-${VERSION}" \
install DESTDIR="${TMP_DIR}"
source "${ROOT}/stripping.sh" || exit 1
source "${ROOT}/update-info-db.sh" || exit 1
/bin/cp -vpR "${TMP_DIR}"/* /
cat << EOF > "/var/log/packages/${PRGNAME}-${VERSION}"
# Package: ${PRGNAME} (LAME Ain't an Mp3 Encoder)
#
# The LAME package contains an MP3 encoder and optionally, an MP3 frame
# analyzer. This is useful for creating and analyzing compressed audio files.
#
# Home page: https://${PRGNAME}.sourceforge.io/
# Download: https://downloads.sourceforge.net/${PRGNAME}/${PRGNAME}-${VERSION}.tar.gz
#
EOF
source "${ROOT}/write_to_var_log_packages.sh" \
"${TMP_DIR}" "${PRGNAME}-${VERSION}"
echo -e "\n---------------\nRemoving *.la files..."
remove-la-files.sh
echo "---------------"
|
import fs from "fs";
import parse from "../parse";
import { logWarning } from "../log";
jest.mock("../log", () => ({
__esModule: true,
logMessage: jest.fn(),
logWarning: jest.fn()
}));
const parsed = parse(fs.readFileSync("tests/.env", { encoding: "utf8" }));
describe("Parse Method", () => {
it("returns an object", () => {
expect(parsed).toEqual(expect.any(Object));
});
it("sets basic attributes", () => {
expect(parsed.BASIC).toEqual("basic");
});
it("reads after a skipped line", () => {
expect(parsed.AFTER_LINE).toEqual("after_line");
});
it("defaults empty values to empty string", () => {
expect(parsed.EMPTY).toEqual("");
});
it("escapes single quoted values", () => {
expect(parsed.SINGLE_QUOTES).toEqual("single_quotes");
});
it("respects surrounding spaces in single quotes", () => {
expect(parsed.SINGLE_QUOTES_SPACED).toEqual(" single quotes ");
});
it("escapes double quoted values", () => {
expect(parsed.DOUBLE_QUOTES).toEqual("double_quotes");
});
it("respects surrounding spaces in double quotes", () => {
expect(parsed.DOUBLE_QUOTES_SPACED).toEqual(" double quotes ");
});
it("doesn't respect newlines if not double quoted", () => {
expect(parsed.EXPAND_NEWLINES).toEqual("expand\nnew\nlines");
expect(parsed.DONT_EXPAND_UNQUOTED).toEqual("dontexpand\\nnewlines");
expect(parsed.DONT_EXPAND_SQUOTED).toEqual("dontexpand\\nnewlines");
});
it("ignores commented lines", () => {
expect(parsed.COMMENTS).toBeUndefined();
});
it("respects equals signs in values", () => {
expect(parsed.EQUAL_SIGNS).toEqual("equals==");
});
it("retains inner quotes", () => {
expect(parsed.RETAIN_INNER_QUOTES).toEqual('{"foo": "bar"}');
expect(parsed.RETAIN_INNER_QUOTES_AS_STRING).toEqual('{"foo": "bar"}');
});
it("retains leading double quote", () => {
expect(parsed.RETAIN_LEADING_DQUOTE).toEqual('"retained');
});
it("retains leading single quote", () => {
expect(parsed.RETAIN_LEADING_SQUOTE).toEqual("'retained");
});
it("reatins trailing double quote", () => {
expect(parsed.RETAIN_TRAILING_DQUOTE).toEqual('retained"');
});
it("retains trailing single quote", () => {
expect(parsed.RETAIN_TRAILING_SQUOTE).toEqual("retained'");
});
it("retains spaces in string", () => {
expect(parsed.TRIM_SPACE_FROM_UNQUOTED).toEqual("some spaced out string");
});
it("parses email addresses correctly", () => {
expect(parsed.USERNAME).toEqual("therealnerdybeast@example.tld");
});
it("parses keys and values surrounded by spaces", () => {
expect(parsed.SPACED_KEY).toEqual("parsed");
});
it("parses a buffer into an object", () => {
const payload = parse(Buffer.from("BUFFER=true"));
expect(payload.BUFFER).toEqual("true");
});
it("parses (\\r) line endings", () => {
const expectedPayload = {
SERVER: "localhost",
PASSWORD: "password",
DB: "tests"
};
const RPayload = parse(
Buffer.from("SERVER=localhost\rPASSWORD=password\rDB=tests\r")
);
expect(RPayload).toEqual(expectedPayload);
const NPayload = parse(
Buffer.from("SERVER=localhost\nPASSWORD=password\nDB=tests\n")
);
expect(NPayload).toEqual(expectedPayload);
const RNPayload = parse(
Buffer.from("SERVER=localhost\r\nPASSWORD=password\r\nDB=tests\r\n")
);
expect(RNPayload).toEqual(expectedPayload);
});
it("parses default substitutions", () => {
const result = parse(
Buffer.from(
`DEFAULT_VALUE=\${DEFAULT|hello}\nDEFAULT_EXAMPLE=$DEFAULT|hello\nENVNMT=$UNDFINED|$NODE_ENV`
)
);
expect(result).toEqual(
expect.objectContaining({
DEFAULT_VALUE: "hello",
DEFAULT_EXAMPLE: "hello",
ENVNMT: "test"
})
);
});
it("parses single command-line substitutions", () => {
let result = parse(
Buffer.from(
"MESSAGE=$(echo 'Welcome To The Mad House' | sed 's/[^A-Z]//g')"
)
);
expect(result.MESSAGE).toEqual("WTTMH");
});
it("parses multiple command-line substitutions", () => {
const result = parse(Buffer.from(`ADMIN=$(echo 'Bob') $(echo "Smith")`));
expect(result.ADMIN).toEqual("Bob Smith");
});
it("parses and interopolates command-line substitutions", () => {
const result = parse(
Buffer.from(`ADMIN=$(echo 'Bob')@$(echo "Smith")\nDBADMIN=$ADMIN`)
);
expect(result.ADMIN).toEqual("Bob@Smith");
expect(result.DBADMIN).toEqual("Bob@Smith");
});
it("handles invalid command-line substitutions", () => {
parse(Buffer.from("INVALIDCOMMAND=$(invalid)"));
expect(logWarning).toHaveBeenCalledTimes(1);
});
});
|
<x-page
title="Websites & webapplications in Laravel"
background="/backgrounds/home-2020.jpg">
<x-slot name="description">
Spatie is a digital allrounder: we design solid websites & web applications using Laravel & Vue. No frills, just
proven expertise. From Antwerp, Belgium
</x-slot>
@include('front.pages.home.partials.banner')
<div class="mb-8">
<a href="{{ route('products.index') }}">
{{-- @include('front.pages.products.partials.ctaLaraconEU') --}}
</a>
</div>
@include('front.pages.home.partials.news')
<div class="section section-group section-fade">
@include('front.pages.home.partials.portfolio')
@include('front.pages.home.partials.cta')
@include('front.pages.home.partials.clients')
</div>
@include('front.pages.home.partials.open-source')
</x-page>
|
package eu.xenit.alfred.initializr.start.sdk.alfred.platform;
import eu.xenit.alfred.initializr.start.project.alfresco.artifacts.AlfrescoVersionArtifactSelector;
import eu.xenit.alfred.initializr.start.project.alfresco.platform.AlfrescoPlatformModule;
import io.spring.initializr.generator.condition.ConditionalOnBuildSystem;
import io.spring.initializr.generator.project.ProjectGenerationConfiguration;
import org.springframework.context.annotation.Bean;
@ProjectGenerationConfiguration
@ConditionalOnBuildSystem("gradle")
public class AlfredSdkPlatformProjectGenerationConfiguration {
@Bean
public AlfredSdkPlatformModuleGradleCustomizer alfredSdkPlatformBuildCustomizer(AlfrescoPlatformModule module,
AlfrescoVersionArtifactSelector artifactSelector) {
return new AlfredSdkPlatformModuleGradleCustomizer(module, artifactSelector);
}
@Bean
public AlfredSdkPlatformProjectLayoutCustomizer projectLayoutCustomizer() {
return new AlfredSdkPlatformProjectLayoutCustomizer();
}
}
|
---
title: API Reference
permalink: /docs/api/
layout: docs
category: ignore
breadcrumb: API
---
<h2 class="docs-heading pb-3 mb-3"><span class="mega-octicon octicon-gear pr-3"></span>API Reference</h2>
<table class="table table-ruled table-full-width table-with-spacious-second-column">
<tr>
<th>API</th><th>Processes</th><th>Description</th>
</tr>
{% assign docs = site.docs | sort: 'sort_title' %}
{% for doc in docs %}
{% if doc.category == 'API' %}
<tr>
<td><a href="{{ site.baseurl }}{{ doc.url }}">{{ doc.title }}</a></td>
<td>{{ site.data.processes[doc.title] | replace: 'Process', '' | replace: 'Processes', '' }}</td>
<td>{{ doc.excerpt }}</td>
</tr>
{% endif %}
{% endfor %}
</table>
|
package typingsSlinky.awsSdk.ec2Mod
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait Region extends StObject {
/**
* The Region service endpoint.
*/
var Endpoint: js.UndefOr[String] = js.native
/**
* The Region opt-in status. The possible values are opt-in-not-required, opted-in, and not-opted-in.
*/
var OptInStatus: js.UndefOr[String] = js.native
/**
* The name of the Region.
*/
var RegionName: js.UndefOr[String] = js.native
}
object Region {
@scala.inline
def apply(): Region = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[Region]
}
@scala.inline
implicit class RegionMutableBuilder[Self <: Region] (val x: Self) extends AnyVal {
@scala.inline
def setEndpoint(value: String): Self = StObject.set(x, "Endpoint", value.asInstanceOf[js.Any])
@scala.inline
def setEndpointUndefined: Self = StObject.set(x, "Endpoint", js.undefined)
@scala.inline
def setOptInStatus(value: String): Self = StObject.set(x, "OptInStatus", value.asInstanceOf[js.Any])
@scala.inline
def setOptInStatusUndefined: Self = StObject.set(x, "OptInStatus", js.undefined)
@scala.inline
def setRegionName(value: String): Self = StObject.set(x, "RegionName", value.asInstanceOf[js.Any])
@scala.inline
def setRegionNameUndefined: Self = StObject.set(x, "RegionName", js.undefined)
}
}
|
package com.gabrielbmoro.programmingchallenge.repository
import com.gabrielbmoro.programmingchallenge.repository.entities.Movie
import com.gabrielbmoro.programmingchallenge.repository.retrofit.ApiRepository
import com.gabrielbmoro.programmingchallenge.repository.retrofit.responses.PageResponse
import com.gabrielbmoro.programmingchallenge.repository.room.FavoriteMoviesDAO
import com.gabrielbmoro.programmingchallenge.repository.room.entities.FavoriteMovieDTO
import com.gabrielbmoro.programmingchallenge.usecases.mappers.FavoriteMovieMapper
import com.google.common.truth.Truth
import io.mockk.coEvery
import io.mockk.coVerify
import io.mockk.mockk
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.test.TestCoroutineDispatcher
import kotlinx.coroutines.test.runBlockingTest
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import java.io.IOException
@ExperimentalCoroutinesApi
@RunWith(JUnit4::class)
class MoviesRepositoryImplTest {
private val testDispatcher = TestCoroutineDispatcher()
private val apiRepository: ApiRepository = mockk()
private val favoriteMoviesDAO: FavoriteMoviesDAO = mockk()
private val mockedMovie = Movie(
imageUrl = "https://asodaksd.jpg",
popularity = 5f,
votesAverage = 10f,
language = "pt-BR",
title = "The King Kong",
isFavorite = false,
overview = "This movie is a great one",
releaseDate = "12/02/2002",
)
private val fakeToken = "ab8622c4-2129-4824-9dd3-297ef7855942"
private fun getRepository(): MoviesRepositoryImpl {
return MoviesRepositoryImpl(
api = apiRepository,
favoriteMoviesDAO = favoriteMoviesDAO,
apiToken = fakeToken
)
}
@Test
fun `should be able to get top rated movies`() {
// arrange
val pageTarget = 2
val fakePageResponse = PageResponse(
totalPages = 42,
page = pageTarget,
results = emptyList(),
totalResults = 120
)
val repositoryTest = getRepository()
coEvery {
apiRepository.getTopRatedMovies(
apiKey = fakeToken,
pageNumber = pageTarget
)
}.answers { fakePageResponse }
// act
testDispatcher.runBlockingTest {
repositoryTest.getTopRatedMovies(pageTarget)
}
// assert
coVerify { apiRepository.getTopRatedMovies(fakeToken, pageTarget) }
}
@Test
fun `should be able to get popular movies`() {
// arrange
val pageTarget = 2
val fakePageResponse = PageResponse(
totalPages = 42,
page = pageTarget,
results = emptyList(),
totalResults = 120
)
val repositoryTest = getRepository()
coEvery {
apiRepository.getPopularMovies(
apiKey = fakeToken,
pageNumber = pageTarget
)
}.answers { fakePageResponse }
// act
testDispatcher.runBlockingTest {
repositoryTest.getPopularMovies(pageTarget)
}
// assert
coVerify { apiRepository.getPopularMovies(fakeToken, pageTarget) }
}
@Test
fun `should be able to favorite a movie that is not favorite`() {
// arrange
val repositoryTest = getRepository()
val favoriteMovie = FavoriteMovieMapper().map(movie = mockedMovie)
coEvery { favoriteMoviesDAO.isThereAMovie(title = mockedMovie.title) }.returns(emptyList())
coEvery { favoriteMoviesDAO.saveFavorite(favoriteMovie) }.answers { }
// act
testDispatcher.runBlockingTest {
val result = repositoryTest.doAsFavorite(favoriteMovie)
// assert
Truth.assertThat(result).isTrue()
}
coVerify { favoriteMoviesDAO.saveFavorite(favoriteMovie) }
}
@Test
fun `should be able to remove from favorites a movie that is favorite`() {
// arrange
val repositoryTest = getRepository()
val favoriteMovie = FavoriteMovieMapper().map(movie = mockedMovie)
coEvery { repositoryTest.checkIsAFavoriteMovie(favoriteMovie) }.returns(true)
coEvery { favoriteMoviesDAO.removeFavorite(favoriteMovie.title) }.answers { }
// act
testDispatcher.runBlockingTest {
val result = repositoryTest.unFavorite(favoriteMovie.title)
// assert
Truth.assertThat(result).isTrue()
}
coVerify { favoriteMoviesDAO.removeFavorite(favoriteMovie.title) }
}
@Test
fun `should not be able to remove from favorites a movie that is not favorite`() {
// arrange
val repositoryTest = getRepository()
val favoriteMovie = FavoriteMovieMapper().map(movie = mockedMovie)
coEvery { favoriteMoviesDAO.removeFavorite(favoriteMovie.title) }.answers { }
// act
testDispatcher.runBlockingTest {
val result = repositoryTest.unFavorite(favoriteMovie.title)
// assert
Truth.assertThat(result).isTrue()
}
coVerify { favoriteMoviesDAO.removeFavorite(favoriteMovie.title) }
}
@Test
fun `should not be able to favorite a movie that is already favorite`() {
// arrange
val repositoryTest = getRepository()
val favoriteMovie = FavoriteMovieMapper().map(movie = mockedMovie)
coEvery { favoriteMoviesDAO.isThereAMovie(title = mockedMovie.title) }.returns(
listOf(
FavoriteMovieDTO(
id = null,
language = mockedMovie.language,
votesAverage = mockedMovie.votesAverage,
popularity = mockedMovie.popularity,
imageUrl = mockedMovie.imageUrl,
releaseDate = mockedMovie.releaseDate,
title = mockedMovie.title,
overview = mockedMovie.overview
)
)
)
// act
testDispatcher.runBlockingTest {
val result = repositoryTest.doAsFavorite(favoriteMovie)
// assert
Truth.assertThat(result).isTrue()
}
// assert
coVerify(exactly = 0) { favoriteMoviesDAO.saveFavorite(favoriteMovie) }
}
@Test
fun `should be able to check if the movie is favorite when there is one`() {
// arrange
val repositoryTest = getRepository()
val favoriteMovie = FavoriteMovieMapper().map(movie = mockedMovie)
coEvery { favoriteMoviesDAO.isThereAMovie(favoriteMovie.title) }.returns(
listOf(
favoriteMovie
)
)
// act
testDispatcher.runBlockingTest {
val isFavorite = repositoryTest.checkIsAFavoriteMovie(favoriteMovie)
// assert
Truth.assertThat(isFavorite).isTrue()
}
}
@Test
fun `should be able to check if the movie is favorite when there is no one`() {
// arrange
val repositoryTest = getRepository()
val favoriteMovie = FavoriteMovieMapper().map(movie = mockedMovie)
coEvery { favoriteMoviesDAO.isThereAMovie(favoriteMovie.title) }.returns(emptyList())
// act
testDispatcher.runBlockingTest {
val isFavorite = repositoryTest.checkIsAFavoriteMovie(favoriteMovie)
// assert
Truth.assertThat(isFavorite).isFalse()
}
}
@Test
fun `should be able to get all favorites movies`() {
// arrange
val repositoryTest = getRepository()
coEvery { favoriteMoviesDAO.allFavoriteMovies() }.returns(emptyList())
// act
testDispatcher.runBlockingTest {
repositoryTest.getFavoriteMovies()
}
// assert
coVerify { favoriteMoviesDAO.allFavoriteMovies() }
}
@Test
fun `should be able to handle an exception when there is an attempt to favorite a movie`() {
// arrange
val repositoryTest = getRepository()
val favoriteMovie = FavoriteMovieMapper().map(movie = mockedMovie)
coEvery { favoriteMoviesDAO.saveFavorite(favoriteMovie) }.throws(IOException("Error simulated"))
// act
testDispatcher.runBlockingTest {
val wasOperationSuccess = repositoryTest.doAsFavorite(favoriteMovie)
// assert
Truth.assertThat(wasOperationSuccess).isFalse()
}
}
@Test
fun `should be able to handle an exception when occurs movie remove operation from the favorite list`() {
// arrange
val repositoryTest = getRepository()
val favoriteMovie = FavoriteMovieMapper().map(movie = mockedMovie)
coEvery { favoriteMoviesDAO.removeFavorite(favoriteMovie.title) }.throws(IOException("Error simulated"))
// act
testDispatcher.runBlockingTest {
val wasOperationSuccess = repositoryTest.unFavorite(favoriteMovie.title)
// assert
Truth.assertThat(wasOperationSuccess).isFalse()
}
}
}
|
TASK=RTE
for SEED in 3 7 42 50 87
do
CUDA_VISIBLE_DEVICES=2 python predict.py \
--task $TASK \
--output_dir ./outputs/rte/$SEED/ \
--data_dir RTE-bin
done
|
module MailerHelper
def total_movements_values(movements)
movements.values.collect(&:to_i).inject(:+)
end
def calcular_valor_total_licenca(licencas, movimentacoes_quantidades)
soma = 0
licencas.each do |licenca|
soma += movimentacoes_quantidades[licenca.id.to_s].to_i * licenca.valor_unitario
end
soma.reais_contabeis
end
end
|
using ODEInterfaceDiffEq, DiffEqProblemLibrary, DiffEqBase
using Test
@time @testset "Algorithms" begin include("algorithm_tests.jl") end
@time @testset "Saving" begin include("saving_tests.jl") end
@time @testset "Mass Matrix" begin include("mass_matrix_tests.jl") end
@time @testset "Jacobian Tests" begin include("jac_tests.jl") end
@time @testset "Callback Tests" begin include("callbacks.jl") end
|
<?php
namespace Tests\Feature;
use App\Persona;
use Illuminate\Foundation\Testing\RefreshDatabase;
use Illuminate\Foundation\Testing\WithFaker;
use Illuminate\Http\Response;
use Tests\TestCase;
class PersonasTest extends TestCase
{
use RefreshDatabase, WithFaker;
/** @test */
public function se_pueden_listar_personas()
{
$personas = factory(Persona::class,2)->create();
$this->withoutExceptionHandling();
$response = $this->json('GET',route('personas.index'));
$response->assertStatus(200)
->assertJson([
[
'id' => $personas[0]->id,
'nombre' => $personas[0]->nombre,
'apellido' => $personas[0]->apellido,
'documento' => $personas[0]->documento,
'telefono' => $personas[0]->telefono
],
[
'id' => $personas[1]->id,
'nombre' => $personas[1]->nombre,
'apellido' => $personas[1]->apellido,
'documento' => $personas[1]->documento,
'telefono' => $personas[1]->telefono
]
])
->assertJsonStructure([
'*' => ['id', 'nombre', 'apellido', 'documento', 'telefono'],
]);
}
/** @test */
public function se_pueden_agregar_personas()
{
$fields = [
'nombre' => 'Diego',
'apellido' => 'Zacarias',
'documento' => '5309590',
'telefono' => '0991269947'
];
$this->withoutExceptionHandling();
$response = $this->json('POST',route('personas.store',$fields));
$response->assertStatus(201)
->assertJson([
'nombre' => 'Diego',
'apellido' => 'Zacarias',
'documento' => '5309590',
'telefono' => '0991269947'
]);
$this->assertDatabaseHas('personas',['nombre' => 'Diego']);
}
/** @test */
public function se_puede_editar_una_persona()
{
$persona = factory(Persona::class)->create();
$fields = [
'nombre' => 'Diego',
'telefono' => '0991269947',
'documento' => '123456789'
];
$response = $this->json('PUT',route('personas.update',$persona->id),$fields);
$response->assertStatus(200)
->assertJson([
'nombre' => 'Diego'
]);
$this->assertDatabaseHas('personas', ['nombre' => 'Diego', 'telefono' => '0991269947']);
}
/** @test */
public function se_puede_eliminar_una_persona()
{
$persona = factory(Persona::class)->create();
$this->withoutExceptionHandling();
$response = $this->json('DELETE',route('personas.destroy',$persona->id));
$response->assertStatus(204);
// dd($response);
$this->assertDatabaseMissing('personas', ['nombre' => $persona->nombre]);
}
/** @test */
public function se_puede_buscar_personas()
{
factory(Persona::class,10)->create();
$persona = factory(Persona::class)->create(['nombre' => 'Diego']);
$palabra_buscada = ['q'=>'Diego'];
$this->withoutExceptionHandling();
$response = $this->json('GET',route('personas.buscar',$palabra_buscada));
$response->assertStatus(200)
->assertJson(
[
[ 'nombre' => 'Diego']
]
);
}
/** @test */
public function persona_requires_nombre_para_crear()
{
$fields = [
'nombre' => '',
'apellido' => $this->faker->lastname,
'documento' => '5309590',
'telefono' => '0991269947'
];
$response = $this->json('POST',route('personas.store',$fields));
$response->assertStatus(Response::HTTP_UNPROCESSABLE_ENTITY) //Status 422
->assertJsonPath('errors.nombre', ['El campo nombre es obligatorio.']);
}
/** @test */
public function persona_requires_documento_para_crear()
{
$fields = [
'nombre' => $this->faker->name,
'apellido' => $this->faker->lastname,
'documento' => '',
'telefono' => '0991269947'
];
$response= $this->json('POST',route('personas.store',$fields));
$response->assertStatus(422)
->assertJsonPath('errors.documento',['El campo documento es obligatorio.']);
}
/** @test */
public function persona_requires_nombre_para_actualizar()
{
$persona = factory(Persona::class)->create();
$fields = [
'nombre' => '',
'telefono' => '0991269947',
'documento' => '123456789'
];
$response = $this->json('PUT',route('personas.update',$persona->id),$fields);
$response->assertStatus(422)
->assertJsonPath('errors.nombre',['El campo nombre es obligatorio.']);
}
/** @test */
public function persona_requires_documento_para_actualizar()
{
$persona = factory(Persona::class)->create();
$fields = [
'nombre' => 'diego',
'telefono' => '0991269984',
'documento' => ''
];
$response = $this->json('PUT',route('personas.update',$persona->id),$fields);
$response->assertStatus(422)
->assertJsonPath('errors.documento',['El campo documento es obligatorio.']);
}
/** @test */
public function funcion_buscar_requires_q_para_buscar_personas()
{
$persona = factory(Persona::class)->create(['nombre' => 'Diego']);
$palabra_buscada = ['q'=>''];
$this->withoutExceptionHandling();
$response = $this->json('GET',route('personas.buscar',$palabra_buscada));
$response->assertStatus(200)
->assertJsonPath('errors.q',['El campo buscar es obligatorio.']);
}
}
|
/*
* Copyright (c) 2019. Ang Hou Fu.
* Licensed under the MIT License. See LICENSE file in the project root for license information.
*/
import {CHECK_BOX, CheckBox, ItemExpress, ItemFull} from "../src";
describe('CheckBox -- ', () => {
describe('Should convert from an Item. ', function () {
it('An ItemExpress.', function () {
const testItem: ItemExpress = {type: CHECK_BOX, value: 'Default Label'};
const testBox = new CheckBox();
testBox.ConvertItem(testItem);
const result = new CheckBox('Default Label');
expect(testBox).toEqual(result);
});
it('An ItemFull', function () {
const testItem: ItemFull = {
options: {label: 'Default Label', control: 'CheckBox1', help: 'Test Help'},
type: CHECK_BOX
};
const testBox = new CheckBox();
testBox.ConvertItem(testItem);
const result = new CheckBox('Default Label');
result.control = 'CheckBox1';
result.help = 'Test Help';
expect(testBox).toEqual(result);
});
});
});
|
using LLMerge2Lists;
using System;
using Xunit;
namespace Merge2ListsTests
{
public class UnitTest1
{
[Fact]
public void CanMerge2Lists()
{
//Arrange
LinkList llOne = new LinkList();
llOne.Add(new Node(31));
llOne.Add(new Node(8));
llOne.Add(new Node(2));
llOne.Add(new Node(19));
LinkList llTwo = new LinkList();
llTwo.Add(new Node(31));
llTwo.Add(new Node(8));
llTwo.Add(new Node(2));
llTwo.Add(new Node(20));
//Act
llOne.Merge2(llOne, llTwo);
//Node found = llThree.Find(value);
//Assert (expected, actual)
Assert.Equal(llOne.Head, llOne.Merge2(llOne, llTwo));
}
[Fact]
public void CanMergeUnevenWhereFirstListIsShorter()
{
//Arrange
LinkList llOne = new LinkList();
llOne.Add(new Node(41));
llOne.Add(new Node(8));
LinkList llTwo = new LinkList();
llTwo.Add(new Node(51));
llTwo.Add(new Node(18));
llTwo.Add(new Node(12));
llTwo.Add(new Node(2));
//Act
llOne.Merge2(llOne, llTwo);
//Node found = llThree.Find(value);
//Assert (expected, actual)
Assert.Equal(llOne.Head, llOne.Merge2(llOne, llTwo));
}
[Fact]
public void CanMergeUnevenWhereSecondListIsShorter()
{
//Arrange
LinkList llOne = new LinkList();
llOne.Add(new Node(8));
llOne.Add(new Node(2));
llOne.Add(new Node(19));
llOne.Add(new Node(19));
LinkList llTwo = new LinkList();
llTwo.Add(new Node(51));
//Act
llOne.Merge2(llOne, llTwo);
//Node found = llThree.Find(value);
//Assert (expected, actual)
Assert.Equal(llOne.Head, llOne.Merge2(llOne, llTwo));
}
}
}
|
from helper.ptt_class import ptt_craw
from db.connect import Heroku_DB
from imgur.upload import uploader
import os
import requests
if __name__ == "__main__":
ptt_beauty_album_id = os.environ.get('ptt_beauty_Album_ID')
# conn = Heroku_DB()
uploader = uploader()
ptt_craw = ptt_craw()
content, index_list = ptt_craw.ptt_beauty(requests)
print(index_list)
# for index_url in index_list:
# uploader.upload_photo(index_url, ptt_beauty_album_id)
|
#if XAMARIN_APPLETLS
#if XAMARIN_NO_TLS
#error THIS SHOULD NEVER HAPPEN!!!
#endif
//
// MobileTlsStream.cs
//
// Author:
// Martin Baulig <martin.baulig@xamarin.com>
//
// Copyright (c) 2015 Xamarin, Inc.
//
using System;
using System.IO;
using System.Linq;
using SD = System.Diagnostics;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using System.Security.Cryptography.X509Certificates;
using MX = Mono.Security.X509;
using Mono.Security.Interface;
namespace XamCore.Security.Tls
{
abstract class MobileTlsStream : IDisposable
{
MonoTlsSettings settings;
MobileTlsProvider provider;
public MobileTlsStream (MonoTlsSettings settings, MobileTlsProvider provider)
{
this.settings = settings;
this.provider = provider;
}
public MonoTlsSettings Settings {
get { return settings; }
}
public MobileTlsProvider Provider {
get { return provider; }
}
[SD.Conditional ("MARTIN_DEBUG")]
protected void Debug (string message, params object[] args)
{
Console.Error.WriteLine ("MobileTlsStream: {0}", string.Format (message, args));
}
public abstract bool HasContext {
get;
}
public abstract bool IsAuthenticated {
get;
}
public abstract bool IsServer {
get;
}
public abstract void StartHandshake ();
public abstract bool ProcessHandshake ();
public abstract void FinishHandshake ();
public abstract MonoTlsConnectionInfo ConnectionInfo {
get;
}
internal abstract X509Certificate LocalServerCertificate {
get;
}
internal abstract bool IsRemoteCertificateAvailable {
get;
}
internal abstract X509Certificate LocalClientCertificate {
get;
}
public abstract X509Certificate RemoteCertificate {
get;
}
public abstract TlsProtocols NegotiatedProtocol {
get;
}
public abstract void Flush ();
public abstract int Read (byte[] buffer, int offset, int count, out bool wantMore);
public abstract int Write (byte[] buffer, int offset, int count, out bool wantMore);
public abstract void Close ();
public void Dispose ()
{
Dispose (true);
GC.SuppressFinalize (this);
}
protected virtual void Dispose (bool disposing)
{
}
~MobileTlsStream ()
{
Dispose (false);
}
}
}
#endif
|
import Rect from '../geometry/Rect';
import Node from '../cells/Node';
import RemarkView from './RemarkView';
class Remark extends Node {
isRemark() {
return true;
}
getRemark() {
return this.data.name || '';
}
getBBox() {
const size = this.getSize();
const position = this.getPosition();
return new Rect(position.x, position.y, size.width, size.height);
}
getMaxSize() {
return {
width: 180,
height: 96
};
}
getSize() {
return this.metadata.size || this.getMaxSize();
}
getPosition() {
return this.metadata.position;
}
}
Remark.setDefaults({
tagName: 'g',
pane: 'decoratePane',
classNames: 'pane-remark',
view: RemarkView,
});
// exports
// -------
export default Remark;
|
import { INode } from ".";
export function delay(ms: number) {
return new Promise((resolve: any) => {
setTimeout(resolve, ms);
})
}
export function shuffle(array: any[]) {
let m = array.length;
while (m) {
// 选出一个剩余的元素
const i = Math.floor(Math.random() * m--);
// 交换两个元素
const t = array[m];
array[m] = array[i];
array[i] = t;
}
return array;
}
function calcuteDistance(node1: INode, node2: INode) {
const dx = node1.x - node2.x;
const dy = node1.y - node2.y;
return Math.sqrt(dx * dx + dy * dy);
}
/**
* 按顺序连接所有节点(首尾相连),计算总距离
*/
export function getTotalDistance(nodes: INode[], order: number[]): number {
let distance = 0;
const n = order.length;
for (let i = 0; i < n - 1; i++) {
const curr = order[i];
const next = order[i+1];
distance += calcuteDistance(nodes[curr], nodes[next]);
}
distance += calcuteDistance(nodes[order[0]], nodes[order[n - 1]])
return distance;
}
/**
* 生成一个 [0, range) 范围的整数
*/
export function getRandomInt(range: number) {
return Math.floor(Math.random() * range);
}
|
//..............................................................................
//
// This file is part of the AXL library.
//
// AXL is distributed under the MIT license.
// For details see accompanying license.txt file,
// the public copy of which is also available at:
// http://tibbo.com/downloads/archive/axl/license.txt
//
//..............................................................................
#pragma once
#define _AXL_GUI_CANVAS_H
#include "axl_gui_Engine.h"
#include "axl_gui_ColorAttr.h"
#include "axl_gui_TextAttr.h"
#include "axl_gui_Font.h"
namespace axl {
namespace gui {
class Image;
//..............................................................................
class Canvas: public GuiItem
{
protected:
Font* m_driverFont;
ColorAttr m_driverColorAttr;
public:
Font* m_font;
ColorAttr m_colorAttr;
Palette m_palette;
protected:
Canvas(Engine* engine):
GuiItem(engine)
{
m_font = engine->getStdFont(StdFontKind_Gui);
m_driverFont = NULL;
m_colorAttr.setup(StdPalColor_WidgetText, StdPalColor_WidgetBack);
}
public:
void
setTextAttr(const TextAttr& attr)
{
m_colorAttr = attr;
m_font = m_font->getFontMod(attr.m_fontFlags);
}
// rect drawing
bool
drawRect(
int left,
int top,
int right,
int bottom,
uint_t color
)
{
return m_engine->drawRect(this, left, top, right, bottom, color);
}
bool
drawRect(
int left,
int top,
int right,
int bottom
)
{
return drawRect(left, top, right, bottom, m_colorAttr.m_backColor);
}
bool
drawRect(
const Rect& rect,
uint_t color
)
{
return drawRect(
rect.m_left,
rect.m_top,
rect.m_right,
rect.m_bottom,
color
);
}
bool
drawRect(const Rect& rect)
{
return drawRect(
rect.m_left,
rect.m_top,
rect.m_right,
rect.m_bottom,
m_colorAttr.m_backColor
);
}
// alpha rect drawing
bool
drawAlphaRect(
int left,
int top,
int right,
int bottom,
uint_t color,
uint_t alpha
)
{
return m_engine->drawAlphaRect(this, left, top, right, bottom, color, alpha);
}
bool
drawAlphaRect(
int left,
int top,
int right,
int bottom,
uint_t alpha
)
{
return drawAlphaRect(left, top, right, bottom, m_colorAttr.m_backColor, alpha);
}
bool
drawAlphaRect(
const Rect& rect,
uint_t color,
uint_t alpha
)
{
return drawAlphaRect(
rect.m_left,
rect.m_top,
rect.m_right,
rect.m_bottom,
color,
alpha
);
}
bool
drawAlphaRect(
const Rect& rect,
uint_t alpha
)
{
return drawAlphaRect(
rect.m_left,
rect.m_top,
rect.m_right,
rect.m_bottom,
m_colorAttr.m_backColor,
alpha
);
}
// default to utf8
bool
drawText(
int x,
int y,
int left,
int top,
int right,
int bottom,
uint_t textColor,
uint_t backColor,
uint_t fontFlags,
const sl::StringRef& text
)
{
return drawText_utf8(
x,
y,
left,
top,
right,
bottom,
textColor,
backColor,
fontFlags,
text
);
}
bool
drawText(
int x,
int y,
int left,
int top,
int right,
int bottom,
const sl::StringRef& text
)
{
return drawText_utf8(x, y, left, top, right, bottom, text);
}
bool
drawText(
const Point& point,
const Rect& rect,
const TextAttr& textAttr,
const sl::StringRef& text
)
{
return drawText_utf8(point, rect, textAttr, text);
}
bool
drawText(
const Point& point,
const Rect& rect,
const sl::StringRef& text
)
{
return drawText_utf8(point, rect, text);
}
// utf8 text drawing
bool
drawText_utf8(
int x,
int y,
int left,
int top,
int right,
int bottom,
uint_t textColor,
uint_t backColor,
uint_t fontFlags,
const sl::StringRef_utf8& text
)
{
return m_engine->drawText_utf8(
this,
x,
y,
left,
top,
right,
bottom,
textColor,
backColor,
fontFlags,
text
);
}
bool
drawText_utf8(
int x,
int y,
int left,
int top,
int right,
int bottom,
const sl::StringRef_utf8& text
)
{
return drawText_utf8(
x,
y,
left,
top,
right,
bottom,
m_colorAttr.m_foreColor,
m_colorAttr.m_backColor,
-1,
text
);
}
bool
drawText_utf8(
const Point& point,
const Rect& rect,
const TextAttr& textAttr,
const sl::StringRef_utf8& text
)
{
return drawText_utf8(
point.m_x,
point.m_y,
rect.m_left,
rect.m_top,
rect.m_right,
rect.m_bottom,
textAttr.m_foreColor,
textAttr.m_backColor,
textAttr.m_fontFlags,
text
);
}
bool
drawText_utf8(
const Point& point,
const Rect& rect,
const sl::StringRef_utf8& text
)
{
return drawText_utf8(
point.m_x,
point.m_y,
rect.m_left,
rect.m_top,
rect.m_right,
rect.m_bottom,
m_colorAttr.m_foreColor,
m_colorAttr.m_backColor,
0,
text
);
}
// utf16 text drawing
bool
drawText_utf16(
int x,
int y,
int left,
int top,
int right,
int bottom,
uint_t textColor,
uint_t backColor,
uint_t fontFlags,
const sl::StringRef_utf16& text
)
{
return m_engine->drawText_utf16(
this,
x,
y,
left,
top,
right,
bottom,
textColor,
backColor,
fontFlags,
text
);
}
bool
drawText_utf16(
int x,
int y,
int left,
int top,
int right,
int bottom,
const sl::StringRef_utf16& text
)
{
return drawText_utf16(
x,
y,
left,
top,
right,
bottom,
m_colorAttr.m_foreColor,
m_colorAttr.m_backColor,
-1,
text
);
}
bool
drawText_utf16(
const Point& point,
const Rect& rect,
const TextAttr& textAttr,
const sl::StringRef_utf16& text
)
{
return drawText_utf16(
point.m_x,
point.m_y,
rect.m_left,
rect.m_top,
rect.m_right,
rect.m_bottom,
textAttr.m_foreColor,
textAttr.m_backColor,
textAttr.m_fontFlags,
text
);
}
bool
drawText_utf16(
const Point& point,
const Rect& rect,
const sl::StringRef_utf16& text
)
{
return drawText_utf16(
point.m_x,
point.m_y,
rect.m_left,
rect.m_top,
rect.m_right,
rect.m_bottom,
m_colorAttr.m_foreColor,
m_colorAttr.m_backColor,
-1,
text
);
}
// utf32 text drawing
bool
drawText_utf32(
int x,
int y,
int left,
int top,
int right,
int bottom,
uint_t textColor,
uint_t backColor,
uint_t fontFlags,
const sl::StringRef_utf32& text
)
{
return m_engine->drawText_utf32(
this,
x,
y,
left,
top,
right,
bottom,
textColor,
backColor,
fontFlags,
text
);
}
bool
drawText_utf32(
int x,
int y,
int left,
int top,
int right,
int bottom,
const sl::StringRef_utf32& text
)
{
return drawText_utf32(
x,
y,
left,
top,
right,
bottom,
m_colorAttr.m_foreColor,
m_colorAttr.m_backColor,
-1,
text
);
}
bool
drawText_utf32(
const Point& point,
const Rect& rect,
const TextAttr& textAttr,
const sl::StringRef_utf32& text
)
{
return drawText_utf32(
point.m_x,
point.m_y,
rect.m_left,
rect.m_top,
rect.m_right,
rect.m_bottom,
textAttr.m_foreColor,
textAttr.m_backColor,
textAttr.m_fontFlags,
text
);
}
bool
drawText_utf32(
const Point& point,
const Rect& rect,
const sl::StringRef_utf32& text
)
{
return drawText_utf32(
point.m_x,
point.m_y,
rect.m_left,
rect.m_top,
rect.m_right,
rect.m_bottom,
m_colorAttr.m_foreColor,
m_colorAttr.m_backColor,
0,
text
);
}
// image drawing
bool
drawImage(
int x,
int y,
Image* image,
int left,
int top,
int right,
int bottom
)
{
return m_engine->drawImage(
this,
x,
y,
image,
left,
top,
right,
bottom
);
}
bool
drawImage(
const Point& point,
Image* image,
const Rect& rect = Rect()
)
{
return drawImage(
point.m_x,
point.m_y,
image,
rect.m_left,
rect.m_top,
rect.m_right,
rect.m_bottom
);
}
// bitblt
bool
copyRect(
int x,
int y,
Canvas* srcCanvas,
int left,
int top,
int right,
int bottom
)
{
return m_engine->copyRect(
this,
x,
y,
srcCanvas,
left,
right,
top,
bottom
);
}
bool
copyRect(
const Point& point,
Canvas* srcCanvas,
const Rect& srcRect
)
{
return copyRect(
point.m_x,
point.m_y,
srcCanvas,
srcRect.m_left,
srcRect.m_top,
srcRect.m_right,
srcRect.m_bottom
);
}
};
//..............................................................................
template <typename T>
class OffscreenCanvasCache
{
protected:
struct Entry
{
Size m_size;
T m_canvas;
};
protected:
Engine* m_engine;
Entry* m_canvasTable[FormFactor__Count];
public:
OffscreenCanvasCache(Engine* engine)
{
m_engine = engine;
memset(m_canvasTable, 0, sizeof(m_canvasTable));
}
~OffscreenCanvasCache()
{
clear();
}
void
clear()
{
for (size_t i = 0; i < countof(m_canvasTable); i++)
if (m_canvasTable[i])
AXL_MEM_DELETE(m_canvasTable[i]);
memset(m_canvasTable, 0, sizeof(m_canvasTable));
}
T*
getCanvas(
uint_t width,
uint_t height
)
{
FormFactor formFactor = getFormFactor(width, height);
ASSERT(formFactor < countof(m_canvasTable));
Entry* entry = m_canvasTable[formFactor];
if (!entry)
{
entry = AXL_MEM_NEW(Entry);
m_canvasTable[formFactor] = entry;
}
if (entry->m_size.m_width_u >= width &&
entry->m_size.m_height_u >= height)
return &entry->m_canvas;
width = width < entry->m_size.m_width_u ?
entry->m_size.m_width_u :
sl::align<16> (width);
height = entry->m_size.m_height_u < height ?
entry->m_size.m_height_u :
sl::align<16> (height);
bool result = m_engine->createOffscreenCanvas(&entry->m_canvas, width, height);
if (!result)
return NULL;
entry->m_size.m_width_u = width;
entry->m_size.m_height_u = height;
return &entry->m_canvas;
}
T*
getCanvas(const Size& size)
{
return getCanvas(size.m_width, size.m_height);
}
};
//..............................................................................
} // namespace gui
} // namespace axl
|
package br.com.zup.clients.itau.response
import br.com.zup.TipoConta
import br.com.zup.chave.cadastro.Conta
data class DadosContaItauResponse(
val tipo: TipoConta,
val instituicao: InstituicaoItauResponse,
val agencia: String,
val numero: String,
val titular: TitularItauResponse
) {
fun toModel(): Conta {
return Conta(titular.nome, titular.cpf, instituicao.nome, agencia, numero, tipo)
}
}
data class InstituicaoItauResponse(
val nome: String,
val ispb: String
)
data class TitularItauResponse(
val id: String,
val nome: String,
val cpf: String
)
|
// import { Expect, SetupFixture, Teardown, TeardownFixture, Test, TestFixture } from 'alsatian';
// import * as fs from 'fs';
// import * as path from 'path';
// import { read, readPromise, readSync } from './read';
// @TestFixture('Extract Tests')
// export class ExtractTests {
// public outputFolderPath = '';
// public outputFilePath = '';
// public outputFileName = 'test.txt';
// public outputFileContents = 'test\r\n';
// public multipleOutputFileContents = 'test\r\ntest2\r\n';
// public password = 'test';
// @Test('read() should read empty zip')
// public read1() {
// read(
// 'test-assets/empty.zip',
// {
// outputDirectory: this.outputFolderPath
// },
// (_error, contents) => {
// Expect(contents).toEqual(new Uint16Array([]));
// }
// );
// }
// @Test('read() should read zip contents')
// public read2() {
// read('test-assets/no-password.zip', {}, (_error, contents) => {
// Expect(contents.toString()).toBe(this.outputFileContents);
// });
// }
// @Test('read() should read password protected zip contents')
// public read3() {
// read(
// 'test-assets/password.zip',
// {
// password: this.password
// },
// (_error, contents) => {
// Expect(contents.toString()).toBe(this.outputFileContents);
// }
// );
// }
// @Test('read() should read multiple files from zip')
// public read4() {
// read(
// 'test-assets/multiple.zip',
// {
// password: this.password
// },
// (_error, contents) => {
// Expect(contents.toString()).toBe(this.multipleOutputFileContents);
// }
// );
// }
// @Test('read() should read a single file from from a zip containing multiple files')
// public read5() {
// read(
// 'test-assets/multiple.zip',
// {
// password: this.password,
// files: ['test.txt']
// },
// (_error, contents) => {
// Expect(contents.toString()).toBe(this.outputFileContents);
// }
// );
// }
// @Test('readSync() should read empty zip')
// public readSync1() {
// const contents = readSync('test-assets/empty.zip', {
// outputDirectory: this.outputFolderPath
// });
// Expect(contents).toEqual(new Uint16Array([]));
// }
// @Test('readSync() should read zip contents')
// public readSync2() {
// const contents = readSync('test-assets/no-password.zip');
// Expect(contents.toString()).toBe(this.outputFileContents);
// }
// @Test('readSync() should read password protected zip contents')
// public readSync3() {
// const contents = readSync('test-assets/password.zip', {
// password: this.password
// });
// Expect(contents.toString()).toBe(this.outputFileContents);
// }
// @Test('readSync() should read multiple files from zip')
// public readSync4() {
// const contents = readSync('test-assets/multiple.zip', {
// password: this.password
// });
// Expect(contents.toString()).toBe(this.multipleOutputFileContents);
// }
// @Test('readSync() should read a single file from from a zip containing multiple files')
// public readSync5() {
// const contents = readSync('test-assets/multiple.zip', {
// password: this.password,
// files: ['test.txt']
// });
// Expect(contents.toString()).toBe(this.outputFileContents);
// }
// @Test('readPromise() should read empty zip')
// public async readPromise1() {
// const contents = await readPromise('test-assets/empty.zip', {
// outputDirectory: this.outputFolderPath
// });
// Expect(contents).toEqual(new Uint16Array([]));
// }
// @Test('readPromise() should read zip contents')
// public async readPromise2() {
// const contents = await readPromise('test-assets/no-password.zip');
// Expect(contents.toString()).toBe(this.outputFileContents);
// }
// @Test('readPromise() should read password protected zip contents')
// public async readPromise3() {
// const contents = await readPromise('test-assets/password.zip', {
// password: this.password
// });
// Expect(contents.toString()).toBe(this.outputFileContents);
// }
// @Test('readPromise() should read multiple files from zip')
// public async readPromise4() {
// const contents = await readPromise('test-assets/multiple.zip', {
// password: this.password
// });
// Expect(contents.toString()).toBe(this.multipleOutputFileContents);
// }
// @Test('readPromise() should read a single file from from a zip containing multiple files')
// public async readPromise5() {
// const contents = await readPromise('test-assets/multiple.zip', {
// password: this.password,
// files: ['test.txt']
// });
// Expect(contents.toString()).toBe(this.outputFileContents);
// }
// }
|
use crate::abpoa::{
abpoa_add_graph_edge, abpoa_add_graph_node, abpoa_align_sequence_to_graph, abpoa_dump_pog,
abpoa_init, abpoa_init_para, abpoa_msa, abpoa_para_t, abpoa_post_set_para, abpoa_res_t,
abpoa_t, free, strdup, ABPOA_CDEL, ABPOA_CDIFF, ABPOA_CHARD_CLIP, ABPOA_CINS, ABPOA_CMATCH,
ABPOA_CSOFT_CLIP, ABPOA_SINK_NODE_ID, ABPOA_SRC_NODE_ID, FILE,
};
use rayon::prelude::*;
use std::collections::HashMap;
use std::ffi::CString;
use std::os::raw::{c_char, c_int};
use std::ptr;
use log::{info, warn};
use std::env;
use std::time::Instant;
pub struct AbpoaAligner {
ab: *mut abpoa_t,
abpt: *mut abpoa_para_t,
// NOTE: the following only work when adding the nodes manually!!!
// TODO: fix this
n_nodes: usize,
nodes: Vec<Vec<i32>>,
nodes_str: HashMap<i32, char>,
abpoa_id_to_abstraction_id: HashMap<i32, usize>,
// this does not consider the initial and final edge, however this should not
// cause any issue
edges: Vec<(usize, usize)>,
edges_abpoa: Vec<(i32, i32)>,
}
pub struct AbpoaMSA {
pub msa_length: usize,
pub n_seqs: usize,
// TODO: maybe this should be a map {seq_id : aln}?
pub msa: Vec<String>,
}
impl AbpoaMSA {
fn new() -> Self {
AbpoaMSA {
msa_length: 0,
n_seqs: 0,
msa: Vec::new(),
}
}
fn new_from_alignment(msa: Vec<String>, n_seqs: usize, msa_length: usize) -> Self {
AbpoaMSA {
msa_length,
n_seqs,
msa,
}
}
}
pub struct AbpoaCons {
pub cons_length: usize,
pub cons: String,
}
impl AbpoaCons {
fn new() -> Self {
AbpoaCons {
cons_length: 0,
cons: String::new(),
}
}
fn new_from_cons(cons: String) -> Self {
AbpoaCons {
cons_length: cons.len(),
cons,
}
}
}
#[derive(Debug)]
pub struct AbpoaAlignmentResult {
pub cigar: String,
pub abpoa_nodes: Vec<i32>,
pub graph_nodes: Vec<usize>,
pub node_s: i32,
pub node_e: i32,
pub query_s: i32,
pub query_e: i32,
pub n_aligned_bases: i32,
pub n_matched_bases: i32,
pub best_score: i32,
pub cigar_vec: Vec<char>,
pub cs_string: String,
}
impl AbpoaAlignmentResult {
pub fn new() -> Self {
AbpoaAlignmentResult {
cigar: "".to_string(),
abpoa_nodes: vec![],
graph_nodes: vec![],
node_s: 0,
node_e: 0,
query_s: 0,
query_e: 0,
n_aligned_bases: 0,
n_matched_bases: 0,
best_score: 0,
cigar_vec: vec![],
cs_string: "".to_string(),
}
}
pub fn new_with_params(
cigar: &str,
abpoa_nodes: Vec<i32>,
graph_nodes: Vec<usize>,
res: &abpoa_res_t,
cigar_vec: Vec<char>,
cs_string: &str,
) -> Self {
AbpoaAlignmentResult {
cigar: cigar.to_string(),
abpoa_nodes,
graph_nodes,
node_s: res.node_s,
node_e: res.node_e,
query_s: res.query_s,
query_e: res.query_e,
n_aligned_bases: res.n_aln_bases,
n_matched_bases: res.n_matched_bases,
best_score: res.best_score,
cigar_vec,
cs_string: cs_string.to_string(),
}
}
}
impl AbpoaAligner {
pub unsafe fn new() -> Self {
AbpoaAligner {
ab: abpoa_init(),
abpt: abpoa_init_para(),
n_nodes: 0,
nodes: vec![],
nodes_str: HashMap::new(),
edges: vec![],
edges_abpoa: vec![],
abpoa_id_to_abstraction_id: HashMap::new(),
}
}
// Initializes the aligner with the example.c params,
// this is useful for debugging against the example.c
// file in the original abpoa's repo
pub unsafe fn new_with_example_params() -> Self {
let mut aligner = AbpoaAligner::new();
aligner.set_out_msa(true);
aligner.set_out_cons(true);
aligner.set_w(6);
aligner.set_k(9);
aligner.set_min_w(10);
aligner.set_progressive_poa(true);
aligner.set_post_para();
aligner
}
pub unsafe fn set_out_msa(&mut self, val: bool) {
(*self.abpt).set_out_msa(val as u8);
}
pub unsafe fn set_out_cons(&mut self, val: bool) {
(*self.abpt).set_out_cons(val as u8);
}
pub unsafe fn set_progressive_poa(&mut self, val: bool) {
(*self.abpt).set_progressive_poa(val as u8);
}
pub unsafe fn set_w(&mut self, w: u8) {
(*self.abpt).w = w as c_int;
}
pub unsafe fn set_k(&mut self, k: u8) {
(*self.abpt).k = k as c_int;
}
pub unsafe fn set_min_w(&mut self, min_w: u8) {
(*self.abpt).min_w = min_w as c_int;
}
pub unsafe fn set_post_para(&mut self) {
abpoa_post_set_para(self.abpt);
}
pub unsafe fn reset_aligner(&mut self) {
(*(*self.ab).abs).n_seq = 0;
}
// NOTE: Rust does not support static fields, using const is the closest thing to that
// see: https://stackoverflow.com/a/48972982
const NT4_TABLE: [u8; 256] = [
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, /*'-'*/
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 4, 1, 4, 4, 4, 2, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 4, 1, 4, 4, 4, 2, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
];
const ALN_ALPHABET: [char; 6] = ['A', 'C', 'G', 'T', 'N', '-'];
const CONS_ALPHABET: [char; 5] = ['A', 'C', 'G', 'T', 'N'];
pub fn convert_seq_to_bseq(seq: &str) -> Vec<u8> {
seq.chars()
.map(|c| *(AbpoaAligner::NT4_TABLE).get(c as usize).unwrap())
.collect()
}
pub unsafe fn align_seqs(&self, seqs: &Vec<&str>) -> AbpoaMSA {
// Get the number of input sequences
let n_seqs: c_int = seqs.len() as c_int;
// Create a Vec with the sequences' length
let mut seq_lens: Vec<c_int> = seqs.iter().map(|s| s.len() as c_int).collect();
// Generate bseqs
let mut bseqs_val: Vec<Vec<u8>> = seqs
.into_iter()
.map(|s| AbpoaAligner::convert_seq_to_bseq(s))
.collect();
let mut bseqs: Vec<*mut u8> = bseqs_val.iter_mut().map(|s| s.as_mut_ptr()).collect();
// Now perform the alignment
let mut cons_seq: *mut *mut u8 = ptr::null_mut();
let mut cons_c: *mut *mut c_int = ptr::null_mut();
let mut cons_l: *mut c_int = ptr::null_mut();
let mut cons_n: c_int = 0;
let mut msa_seq: *mut *mut u8 = ptr::null_mut();
let mut msa_l: c_int = 0;
let out: *mut FILE = ptr::null_mut(); //stdout;
abpoa_msa(
self.ab,
self.abpt,
n_seqs,
ptr::null_mut(),
seq_lens.as_mut_ptr(),
bseqs.as_mut_ptr(),
out,
&mut cons_seq,
&mut cons_c,
&mut cons_l,
&mut cons_n,
&mut msa_seq,
&mut msa_l,
);
// Read the alignment's results
let mut msa: Vec<String> = Vec::new();
for i in 0..n_seqs {
let mut curr_aln = String::with_capacity(msa_l as usize);
let outer_pointer = *msa_seq.add((i) as usize);
for j in 0..msa_l {
let inner_pointer = *(outer_pointer.add(j as usize));
curr_aln.push(
*AbpoaAligner::ALN_ALPHABET
.get(inner_pointer as usize)
.unwrap(),
);
}
msa.push(curr_aln);
}
AbpoaMSA::new_from_alignment(msa, n_seqs as usize, msa_l as usize)
}
pub unsafe fn consensus_from_seqs(&self, seqs: &Vec<&str>) -> AbpoaCons {
// Get the number of input sequences
let n_seqs: c_int = seqs.len() as c_int;
// Create a Vec with the sequences' length
let mut seq_lens: Vec<c_int> = seqs.iter().map(|s| s.len() as c_int).collect();
// Generate bseqs
let mut bseqs_val: Vec<Vec<u8>> = seqs
.into_iter()
.map(|s| AbpoaAligner::convert_seq_to_bseq(s))
.collect();
let mut bseqs: Vec<*mut u8> = bseqs_val.iter_mut().map(|s| s.as_mut_ptr()).collect();
// Now perform the alignment
let mut cons_seq: *mut *mut u8 = ptr::null_mut();
let mut cons_c: *mut *mut c_int = ptr::null_mut();
let mut cons_l: *mut c_int = ptr::null_mut();
let mut cons_n: c_int = 0;
let mut msa_seq: *mut *mut u8 = ptr::null_mut();
let mut msa_l: c_int = 0;
let out: *mut FILE = ptr::null_mut(); //stdout;
abpoa_msa(
self.ab,
self.abpt,
n_seqs,
ptr::null_mut(),
seq_lens.as_mut_ptr(),
bseqs.as_mut_ptr(),
out,
&mut cons_seq,
&mut cons_c,
&mut cons_l,
&mut cons_n,
&mut msa_seq,
&mut msa_l,
);
// Read the consensus
let mut cons = String::with_capacity(*cons_l as usize);
for i in 0..cons_n {
let offset = *cons_l.add((i) as usize);
for j in 0..offset {
let outer_pointer = *cons_seq.add(i as usize);
let inner_pointer = *(outer_pointer.add(j as usize));
cons.push(
*AbpoaAligner::CONS_ALPHABET
.get(inner_pointer as usize)
.unwrap(),
);
}
}
AbpoaCons::new_from_cons(cons)
}
pub unsafe fn print_aln_to_dot(&mut self, path: &str) {
// Build a C String to store path
let c_str = CString::new(path).unwrap();
(*self.abpt).out_pog = strdup(c_str.as_ptr() as *const c_char);
abpoa_dump_pog(self.ab, self.abpt);
}
pub unsafe fn add_nodes_from_seq(&mut self, seq: &str) {
let bseq: Vec<u8> = seq
.chars()
.map(|c| *(AbpoaAligner::NT4_TABLE).get(c as usize).unwrap())
.collect();
// First add the nodes to the graph
// NOTE: in abpoa, each node has length 1 (i.e. a single nucleotide)
let ids: Vec<i32> = bseq
.into_iter()
.map(|s| abpoa_add_graph_node((*self.ab).abg, s))
.collect();
assert_eq!(seq.len(), ids.len());
for i in 0..ids.len() {
let id = ids.get(i).unwrap();
let seq = seq.chars().nth(i).unwrap();
self.nodes_str.insert(*id, seq);
}
//Then add the edges between said nodes
ids.windows(2).for_each(|w| {
abpoa_add_graph_edge(
(*self.ab).abg,
*w.get(0).unwrap(),
*w.get(1).unwrap(),
0,
1,
0,
0,
0,
);
self.edges_abpoa
.push((*w.get(0).unwrap(), *w.get(1).unwrap()));
});
// Update wrapper data
self.n_nodes += seq.len();
self.nodes.push(ids.clone());
}
pub unsafe fn add_node(&mut self, base: u8) -> i32 {
abpoa_add_graph_node((*self.ab).abg, base)
}
pub unsafe fn add_edge(&mut self, from_node_id: i32, to_node_id: i32) {
abpoa_add_graph_edge((*self.ab).abg, from_node_id, to_node_id, 0, 1, 0, 0, 0);
self.edges_abpoa.push((from_node_id, to_node_id));
}
pub unsafe fn add_nodes_edges(&mut self, nodes: &Vec<&str>, edges: &Vec<(usize, usize)>) {
let start_add_nodes = Instant::now();
// Add nodes
nodes.iter().for_each(|n| self.add_nodes_from_seq(n));
info!(
"Adding the nodes took: {} ms",
start_add_nodes.elapsed().as_millis()
);
// Add edges between nodes
edges.iter().for_each(|e| {
self.edges.push((e.0, e.1));
let last_of_start_node = self.nodes.get(e.0).unwrap().last().unwrap().clone();
let first_of_end_node = self.nodes.get(e.1).unwrap().first().unwrap().clone();
self.add_edge(last_of_start_node, first_of_end_node);
});
let start_head_tails = Instant::now();
if self.n_nodes > 0 {
let heads: Vec<i32> = self.find_heads();
//println!("Heads are: {:#?}", heads);
for head in heads {
// Add initial edge -- ABPOA_SRC_NODE_ID has node id 0
abpoa_add_graph_edge(
(*self.ab).abg,
ABPOA_SRC_NODE_ID as i32,
head,
0,
1,
0,
0,
0,
);
self.edges_abpoa.push((ABPOA_SRC_NODE_ID as i32, head));
}
let tails: Vec<i32> = self.find_tails();
//println!("Tails are: {:#?}", tails);
for tail in tails {
// Add initial edge -- ABPOA_SRC_NODE_ID has node id 0
abpoa_add_graph_edge(
(*self.ab).abg,
tail,
ABPOA_SINK_NODE_ID as i32,
0,
1,
0,
0,
0,
);
self.edges_abpoa.push((tail, ABPOA_SINK_NODE_ID as i32));
}
info!(
"Finding heads and tails took: {} ms",
start_head_tails.elapsed().as_millis()
);
let start_map_creation = Instant::now();
// Build a HashMap having as keys the abpoa_ids and as values
// the abstraction_id, this will be useful when doing the conversion
// TODO: would like to use rayon but it does not like the .get()
for abstraction_id in 0..self.nodes.len() {
for abpoa_id in self.nodes.get(abstraction_id).unwrap() {
self.abpoa_id_to_abstraction_id
.insert(*abpoa_id, abstraction_id);
}
}
info!(
"Creating the map took: {} ms",
start_map_creation.elapsed().as_millis()
);
}
}
pub unsafe fn find_heads(&self) -> Vec<i32> {
let heads = self
.nodes
.clone()
.into_iter()
.flatten()
.filter(|abpoa_node| self.is_head(abpoa_node))
.collect();
heads
}
fn is_head(&self, node: &i32) -> bool {
let node_appears_as_end_edge = self.edges_abpoa.par_iter().any(|x| x.1 == *node);
return !node_appears_as_end_edge;
}
pub unsafe fn find_tails(&self) -> Vec<i32> {
let tails = self
.nodes
.clone()
.into_iter()
.flatten()
.filter(|abpoa_node| self.is_tail(abpoa_node))
.collect();
tails
}
fn is_tail(&self, node: &i32) -> bool {
let node_appears_as_start_edge = self.edges_abpoa.par_iter().any(|x| x.0 == *node);
return !node_appears_as_start_edge;
}
pub unsafe fn align_sequence(&mut self, seq: &str) -> AbpoaAlignmentResult {
let mut bseq: Vec<u8> = AbpoaAligner::convert_seq_to_bseq(seq);
let mut res = abpoa_res_t {
n_cigar: 0,
m_cigar: 0,
graph_cigar: vec![].as_mut_ptr(),
node_s: 0,
node_e: 0,
query_s: 0,
query_e: 0,
n_aln_bases: 0,
n_matched_bases: 0,
best_score: 0,
};
abpoa_align_sequence_to_graph(
self.ab,
self.abpt,
bseq.as_mut_ptr(),
seq.len() as i32,
&mut res,
);
// Create variables to store aln result
let mut abpoa_ids: Vec<i32> = Vec::new();
let mut cigar_vec: Vec<char> = Vec::new();
// Navigate the cigar
for i in 0..res.n_cigar {
let curr_cigar = res.graph_cigar.add(i as usize);
let op = (*curr_cigar & 0xf) as u32;
let op_char = match op {
ABPOA_CMATCH => 'M',
ABPOA_CINS => 'I',
ABPOA_CDEL => 'D',
ABPOA_CDIFF => 'X',
ABPOA_CSOFT_CLIP => 'S',
ABPOA_CHARD_CLIP => 'H',
_ => ' ',
};
let node_id: i32 = ((*curr_cigar >> 34) & 0x3fffffff) as i32;
// Necessary because sometimes abpoa returns weird nodes
// TODO: figure out why this happens
if self.abpoa_id_to_abstraction_id.contains_key(&node_id) && op_char != ' ' {
abpoa_ids.push(node_id);
cigar_vec.push(op_char);
}
}
// Convert abpoa_ids to abstraction_ids
let graph_ids: Vec<usize> = abpoa_ids
.iter()
.filter_map(|id| self.abpoa_id_to_abstraction_id.get(id))
.map(|id| *id)
.collect();
let mut cigar_string = String::new();
let mut cs_string = String::from("cs:Z::");
if !cigar_vec.is_empty() {
let mut last_char = ' ';
let mut count = 0;
// Convert cigar to string
for char in &cigar_vec {
if *char != last_char {
if last_char != ' ' {
// This is the initial delimiter for this loop
cigar_string.push_str(&mut format!("{}{}", count, last_char));
}
last_char = *char;
count = 1;
} else {
count += 1;
}
}
cigar_string.push_str(&mut format!("{}{}", count, last_char));
// Obtain cs string
let mut match_count = 1;
let mut tmp_string: String = String::new();
last_char = ' ';
let mut char = ' ';
for i in 0..cigar_vec.len() {
char = *cigar_vec.get(i).unwrap();
if char != last_char {
if last_char != ' ' {
// This is the initial delimiter for this loop
match last_char {
'M' => cs_string.push_str(&mut format!("{}", match_count)),
'I' => cs_string.push_str(&mut format!("{}{}", '-', tmp_string)),
'D' => cs_string.push_str(&mut format!("{}{}", '+', tmp_string)),
_ => (),
}
}
last_char = char;
// Reset tmp variables
match_count = 1;
tmp_string = String::new();
} else {
match char {
'M' => match_count += 1,
'I' => tmp_string.push(seq.char_indices().nth(i).unwrap().1),
'D' => {
let id = abpoa_ids.get(i).unwrap();
tmp_string.push(*self.nodes_str.get(id).unwrap())
}
_ => (),
}
}
}
match char {
'M' => cs_string.push_str(&mut format!("{}", match_count)),
'I' => cs_string.push_str(&mut format!("{}{}", '-', tmp_string)),
'D' => cs_string.push_str(&mut format!("{}{}", '+', tmp_string)),
_ => (),
}
}
assert_eq!(abpoa_ids.len(), graph_ids.len());
AbpoaAlignmentResult::new_with_params(
cigar_string.as_str(),
abpoa_ids,
graph_ids,
&res,
cigar_vec,
cs_string.as_str(),
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::abpoa::{abpoa_generate_gfa, stdout};
// ----- Check msa and consensus ("black-box" version) -----
#[test]
fn test_aln() {
unsafe {
let mut aligner = AbpoaAligner::new_with_example_params();
let seqs: Vec<&str> = [
"CGTCAATCTATCGAAGCATACGCGGGCAGAGCCGAAGACCTCGGCAATCCA",
"CCACGTCAATCTATCGAAGCATACGCGGCAGCCGAACTCGACCTCGGCAATCAC",
"CGTCAATCTATCGAAGCATACGCGGCAGAGCCCGGAAGACCTCGGCAATCAC",
"CGTCAATGCTAGTCGAAGCAGCTGCGGCAGAGCCGAAGACCTCGGCAATCAC",
"CGTCAATCTATCGAAGCATTCTACGCGGCAGAGCCGACCTCGGCAATCAC",
"CGTCAATCTAGAAGCATACGCGGCAAGAGCCGAAGACCTCGGCCAATCAC",
"CGTCAATCTATCGGTAAAGCATACGCTCTGTAGCCGAAGACCTCGGCAATCAC",
"CGTCAATCTATCTTCAAGCATACGCGGCAGAGCCGAAGACCTCGGCAATC",
"CGTCAATGGATCGAGTACGCGGCAGAGCCGAAGACCTCGGCAATCAC",
"CGTCAATCTAATCGAAGCATACGCGGCAGAGCCGTCTACCTCGGCAATCACGT",
]
.to_vec();
let aln = aligner.align_seqs(&seqs);
//aligner.print_aln_to_dot("example.png");
//aligner.reset_aligner();
//println!("MSA: {:#?}", aln.msa);
assert_eq!(aln.n_seqs, seqs.len());
assert_eq!(aln.msa_length, 75);
}
}
#[test]
fn test_cons() {
unsafe {
let mut aligner = AbpoaAligner::new_with_example_params();
let seqs: Vec<&str> = [
"CGTCAATCTATCGAAGCATACGCGGGCAGAGCCGAAGACCTCGGCAATCCA",
"CCACGTCAATCTATCGAAGCATACGCGGCAGCCGAACTCGACCTCGGCAATCAC",
"CGTCAATCTATCGAAGCATACGCGGCAGAGCCCGGAAGACCTCGGCAATCAC",
"CGTCAATGCTAGTCGAAGCAGCTGCGGCAGAGCCGAAGACCTCGGCAATCAC",
"CGTCAATCTATCGAAGCATTCTACGCGGCAGAGCCGACCTCGGCAATCAC",
"CGTCAATCTAGAAGCATACGCGGCAAGAGCCGAAGACCTCGGCCAATCAC",
"CGTCAATCTATCGGTAAAGCATACGCTCTGTAGCCGAAGACCTCGGCAATCAC",
"CGTCAATCTATCTTCAAGCATACGCGGCAGAGCCGAAGACCTCGGCAATC",
"CGTCAATGGATCGAGTACGCGGCAGAGCCGAAGACCTCGGCAATCAC",
"CGTCAATCTAATCGAAGCATACGCGGCAGAGCCGTCTACCTCGGCAATCACGT",
]
.to_vec();
let cons = aligner.consensus_from_seqs(&seqs);
//aligner.print_aln_to_dot("example.png");
//aligner.reset_aligner();
//assert_eq!(aln.n_seqs, seqs.len());
//assert_eq!(aln.msa_length, 75);
}
}
// ----- Test basic abstraction functionalities -----
#[test]
fn test_add_nodes() {
unsafe {
let mut aligner = AbpoaAligner::new();
aligner.add_nodes_from_seq("ACGT");
assert_eq!(aligner.nodes.first().unwrap().len(), 4);
assert_eq!(aligner.n_nodes, 4);
}
}
#[test]
fn test_add_nodes_and_edges() {
unsafe {
let mut aligner = AbpoaAligner::new();
aligner.add_nodes_edges(&vec!["ACG", "GCT", "TAT"], &vec![(0, 1), (0, 2)]);
assert_eq!(aligner.nodes.len(), 3);
assert_eq!(aligner.n_nodes, 9);
assert_eq!(aligner.edges.len(), 2);
}
}
// ----- Test graph-to-seq alignment -----
#[test]
fn test_alignment_1() {
unsafe {
let mut aligner = AbpoaAligner::new_with_example_params();
aligner.add_nodes_edges(&vec!["ACT"], &vec![]);
let res = aligner.align_sequence("ACT");
assert_eq!(res.cigar, String::from("3M"));
// These are the nodes in abpoa (remember, nodes are 1-base only!)
assert_eq!(res.abpoa_nodes, vec![2, 3, 4]);
// These are the nodes in our graph abstraction (nodes can have length > 1)
assert_eq!(res.graph_nodes, vec![0, 0, 0]);
}
}
#[test]
fn test_alignment_2() {
unsafe {
let mut aligner = AbpoaAligner::new_with_example_params();
aligner.add_nodes_edges(&vec!["ACT"], &vec![]);
let res = aligner.align_sequence("T");
assert_eq!(res.cigar, String::from("1M"));
// These are the nodes in abpoa (remember, nodes are 1-base only!)
assert_eq!(res.abpoa_nodes, vec![4]);
// These are the nodes in our graph abstraction (nodes can have length > 1)
assert_eq!(res.graph_nodes, vec![0]);
}
}
#[test]
fn test_alignment_3() {
unsafe {
let mut aligner = AbpoaAligner::new_with_example_params();
aligner.add_nodes_edges(&vec!["ACG"], &vec![]);
let res = aligner.align_sequence("ATG");
//Unexpected but same behavior in C ver
assert_eq!(res.cigar, String::from("3M"));
// These are the nodes in abpoa (remember, nodes are 1-base only!)
assert_eq!(res.abpoa_nodes, vec![2, 3, 4]);
// These are the nodes in our graph abstraction (nodes can have length > 1)
assert_eq!(res.graph_nodes, vec![0, 0, 0]);
}
}
#[test]
fn test_alignment_3_manual() {
unsafe {
let mut aligner = AbpoaAligner::new_with_example_params();
aligner.add_nodes_edges(&vec!["ACG"], &vec![]);
let res = aligner.align_sequence("ATG");
// Check against the "manual" version (= less wrapper abstractions used, closer
// to the original C impl.)
let res_manual = manual_test_single_node("ACG", "ATG");
assert_eq!(res.cigar, res_manual.cigar);
assert_eq!(res.abpoa_nodes, res_manual.abpoa_nodes);
// Makes no sense to compare res.graph_nodes because it is an abstraction
// only available in Rust
}
}
#[test]
fn test_alignment_4_multiple() {
unsafe {
let mut aligner = AbpoaAligner::new_with_example_params();
aligner.add_nodes_edges(&vec!["ACG", "AAA"], &vec![(0, 1)]);
let res = aligner.align_sequence("ATG");
//Unexpected but same behavior in C ver
assert_eq!(res.cigar, String::from("3M3D"));
// These are the nodes in abpoa (remember, nodes are 1-base only!)
assert_eq!(res.abpoa_nodes, vec![2, 3, 4, 5, 6, 7]);
// These are the nodes in our graph abstraction (nodes can have length > 1)
assert_eq!(res.graph_nodes, vec![0, 0, 0, 1, 1, 1]);
}
}
#[test]
fn test_alignment_4_multiple_manual() {
unsafe {
let mut aligner = AbpoaAligner::new_with_example_params();
aligner.add_nodes_edges(&vec!["ACG", "AAA"], &vec![(0, 1)]);
let res = aligner.align_sequence("ATG");
// Check against the "manual" version (= less wrapper abstractions used, closer
// to the original C impl.)
let res_manual = manual_test_multiple_nodes("ACG", "AAA", "ATG");
assert_eq!(res.cigar, res_manual.cigar);
assert_eq!(res.abpoa_nodes, res_manual.abpoa_nodes);
// Makes no sense to compare res.graph_nodes because it is an abstraction
// only available in Rust
}
}
fn manual_test_single_node(seq: &str, query: &str) -> AbpoaAlignmentResult {
unsafe {
let mut aligner = AbpoaAligner::new();
aligner.set_out_cons(true);
aligner.set_out_cons(true);
aligner.set_w(6);
aligner.set_k(9);
aligner.set_min_w(10);
aligner.set_progressive_poa(true);
abpoa_post_set_para(aligner.abpt);
let bseq: Vec<u8> = seq
.chars()
.map(|c| *(AbpoaAligner::NT4_TABLE).get(c as usize).unwrap())
.collect();
let mut ids: Vec<i32> = Vec::new();
for b in bseq {
let c_id = aligner.add_node(b);
ids.push(c_id);
}
//println!("ids: {:#?}", ids);
let mut prev_node_id = ABPOA_SRC_NODE_ID as i32;
let mut curr_node_id: i32 = 0;
for i in 0..ids.len() {
curr_node_id = *ids.get(i).unwrap();
aligner.add_edge(prev_node_id, curr_node_id);
prev_node_id = curr_node_id;
}
aligner.add_edge(curr_node_id, ABPOA_SINK_NODE_ID as i32);
//abpoa_generate_gfa(aligner.ab, aligner.abpt, stdout);
let mut query_bseq: Vec<u8> = query
.chars()
.map(|c| *(AbpoaAligner::NT4_TABLE).get(c as usize).unwrap())
.collect();
let mut res = abpoa_res_t {
n_cigar: 0,
m_cigar: 0,
graph_cigar: vec![].as_mut_ptr(),
node_s: 0,
node_e: 0,
query_s: 0,
query_e: 0,
n_aln_bases: 0,
n_matched_bases: 0,
best_score: 0,
};
abpoa_align_sequence_to_graph(
aligner.ab,
aligner.abpt,
query_bseq.as_mut_ptr(),
query.len() as i32,
&mut res,
);
// Create variables to store aln result
let mut abpoa_ids: Vec<i32> = Vec::new();
let mut graph_ids: Vec<usize> = Vec::new();
let mut cigar_vec: Vec<char> = Vec::new();
// Navigate the cigar
let mut op: u32 = 0;
let mut op_char = ' ';
let mut node_id: i32 = 0;
for i in 0..res.n_cigar {
let curr_cigar = res.graph_cigar.add(i as usize);
op = (*curr_cigar & 0xf) as u32;
op_char = match op {
ABPOA_CMATCH => 'M',
ABPOA_CINS => 'I',
ABPOA_CDEL => 'D',
ABPOA_CDIFF => 'X',
ABPOA_CSOFT_CLIP => 'S',
ABPOA_CHARD_CLIP => 'H',
_ => ' ',
};
node_id = ((*curr_cigar >> 34) & 0x3fffffff) as i32;
abpoa_ids.push(node_id);
cigar_vec.push(op_char);
}
//println!("Ids {:#?}", abpoa_ids);
//println!("Cigar vec {:#?}", cigar_vec);
// Compact cigar
let mut cigar_string = String::new();
if !cigar_vec.is_empty() {
let mut last_char = ' ';
let mut count = 0;
for char in &cigar_vec {
if *char != last_char {
if last_char != ' ' {
// This is the initial delimiter
cigar_string.push_str(&mut format!("{}{}", count, last_char));
}
last_char = *char;
count = 1;
} else {
count += 1;
}
}
cigar_string.push_str(&mut format!("{}{}", count, last_char));
}
AbpoaAlignmentResult::new_with_params(
cigar_string.as_str(),
abpoa_ids,
graph_ids,
&res,
cigar_vec,
String::new().as_str(),
)
}
}
fn manual_test_multiple_nodes(seq: &str, seq2: &str, query: &str) -> AbpoaAlignmentResult {
unsafe {
let mut aligner = AbpoaAligner::new();
aligner.set_out_cons(true);
aligner.set_out_cons(true);
aligner.set_w(6);
aligner.set_k(9);
aligner.set_min_w(10);
aligner.set_progressive_poa(true);
abpoa_post_set_para(aligner.abpt);
let bseq: Vec<u8> = seq
.chars()
.map(|c| *(AbpoaAligner::NT4_TABLE).get(c as usize).unwrap())
.collect();
let mut ids: Vec<i32> = Vec::new();
for b in bseq {
let c_id = aligner.add_node(b);
ids.push(c_id);
}
//println!("ids: {:#?}", ids);
let mut prev_node_id = ABPOA_SRC_NODE_ID as i32;
let mut curr_node_id: i32 = 0;
for i in 0..ids.len() {
curr_node_id = *ids.get(i).unwrap();
aligner.add_edge(prev_node_id, curr_node_id);
prev_node_id = curr_node_id;
}
// Add node 2
let bseq2: Vec<u8> = seq2
.chars()
.map(|c| *(AbpoaAligner::NT4_TABLE).get(c as usize).unwrap())
.collect();
let mut ids2: Vec<i32> = Vec::new();
for b in bseq2 {
let c_id = aligner.add_node(b);
ids2.push(c_id);
}
for i in 0..ids2.len() {
curr_node_id = *ids2.get(i).unwrap();
aligner.add_edge(prev_node_id, curr_node_id);
prev_node_id = curr_node_id;
}
// Add final edge
aligner.add_edge(curr_node_id, ABPOA_SINK_NODE_ID as i32);
//abpoa_generate_gfa(aligner.ab, aligner.abpt, stdout);
let mut query_bseq: Vec<u8> = query
.chars()
.map(|c| *(AbpoaAligner::NT4_TABLE).get(c as usize).unwrap())
.collect();
let mut res = abpoa_res_t {
n_cigar: 0,
m_cigar: 0,
graph_cigar: vec![].as_mut_ptr(),
node_s: 0,
node_e: 0,
query_s: 0,
query_e: 0,
n_aln_bases: 0,
n_matched_bases: 0,
best_score: 0,
};
abpoa_align_sequence_to_graph(
aligner.ab,
aligner.abpt,
query_bseq.as_mut_ptr(),
query.len() as i32,
&mut res,
);
// Create variables to store aln result
let mut abpoa_ids: Vec<i32> = Vec::new();
let mut graph_ids: Vec<usize> = Vec::new();
let mut cigar_vec: Vec<char> = Vec::new();
// Navigate the cigar
let mut op: u32 = 0;
let mut op_char = ' ';
let mut node_id: i32 = 0;
for i in 0..res.n_cigar {
let curr_cigar = res.graph_cigar.add(i as usize);
op = (*curr_cigar & 0xf) as u32;
op_char = match op {
ABPOA_CMATCH => 'M',
ABPOA_CINS => 'I',
ABPOA_CDEL => 'D',
ABPOA_CDIFF => 'X',
ABPOA_CSOFT_CLIP => 'S',
ABPOA_CHARD_CLIP => 'H',
_ => ' ',
};
node_id = ((*curr_cigar >> 34) & 0x3fffffff) as i32;
//println!("Node id {} type {}", node_id, op_char);
abpoa_ids.push(node_id);
cigar_vec.push(op_char);
}
// Compact cigar
let mut cigar_string = String::new();
if !cigar_vec.is_empty() {
let mut last_char = ' ';
let mut count = 0;
for char in &cigar_vec {
if *char != last_char {
if last_char != ' ' {
// This is the initial delimiter
cigar_string.push_str(&mut format!("{}{}", count, last_char));
}
last_char = *char;
count = 1;
} else {
count += 1;
}
}
cigar_string.push_str(&mut format!("{}{}", count, last_char));
}
AbpoaAlignmentResult::new_with_params(
cigar_string.as_str(),
abpoa_ids,
graph_ids,
&res,
cigar_vec,
String::new().as_str(),
)
}
}
#[test]
fn add_complex_graph_1() {
unsafe {
let mut aligner = AbpoaAligner::new_with_example_params();
aligner.add_nodes_edges(&vec!["ACG", "AAA"], &vec![(0, 1)]);
abpoa_generate_gfa(aligner.ab, aligner.abpt, stdout);
}
}
#[test]
fn add_complex_graph_2() {
unsafe {
let mut aligner = AbpoaAligner::new_with_example_params();
aligner.add_nodes_edges(&vec!["ACG", "AAA", "CC"], &vec![(0, 1)]);
abpoa_generate_gfa(aligner.ab, aligner.abpt, stdout);
}
}
#[test]
fn add_complex_graph_3() {
unsafe {
let mut aligner = AbpoaAligner::new_with_example_params();
let nodes: Vec<&str> = vec![
"A", "G", "AAAT", "AA", "TTTCT", "GG", "AGTTCTAT", "A", "T", "ATAT", "A", "T",
];
let edges: Vec<(usize, usize)> = vec![
(0, 2),
(1, 2),
(2, 3),
(2, 4),
(3, 4),
(4, 5),
(4, 6),
(5, 6),
(6, 7),
(6, 8),
(7, 9),
(8, 9),
(9, 10),
(9, 11),
];
//(0,1), (7,8), (10,11)];
aligner.add_nodes_edges(&nodes, &edges);
//println!("Nodes: {:?}", aligner.nodes);
//println!("Edges: {:?}", aligner.edges);
//println!("Edges_abpoa: {:?}", aligner.edges_abpoa);
//abpoa_generate_gfa(aligner.ab, aligner.abpt, stdout);
let result = aligner.align_sequence("AAATTTGGCAT");
println!("Result is: {:#?}", result);
assert_eq!(
result.abpoa_nodes,
vec![
2, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
27, 28, 29, 30, 31
]
);
assert_eq!(
result.graph_nodes,
vec![0, 2, 2, 2, 2, 4, 4, 4, 4, 4, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 7, 9, 9, 9, 9, 10]
);
}
}
}
|
<?php
namespace App\Controllers;
class Profile extends BaseController {
public function index()
{
echo view('template/header');
echo view('content/profile');
echo view('template/footer');
}
}
|
'use strict';
var psTree = require('ps-tree');
module.exports = function kill(pid, signal, cb) {
if (!pid) {
throw new Error('You must provide pid to kill.');
}
if (typeof signal === 'function') {
cb = signal;
signal = null;
}
if (!cb) {
throw new Error('You must provide a callback function.');
}
signal = signal || 'SIGKILL';
psTree(pid, function (err, children) {
if (err) {
return cb(err);
}
children.forEach(function (child) {
process.kill(child.PID, signal);
});
process.kill(pid, signal);
cb(null);
});
};
|
package database
import (
"errors"
"fmt"
"github.com/apex/log"
"github.com/gomodule/redigo/redis"
"time"
"unsafe"
)
type SubscribeCallback func(channel, message string)
type Subscriber struct {
client redis.PubSubConn
cbMap map[string]SubscribeCallback
}
func (c *Subscriber) Connect() {
conn, err := GetRedisConn()
if err != nil {
log.Fatalf("redis dial failed.")
}
c.client = redis.PubSubConn{Conn: conn}
c.cbMap = make(map[string]SubscribeCallback)
//retry connect redis 5 times, or panic
index := 0
go func(i int) {
for {
log.Debug("wait...")
switch res := c.client.Receive().(type) {
case redis.Message:
i = 0
channel := (*string)(unsafe.Pointer(&res.Channel))
message := (*string)(unsafe.Pointer(&res.Data))
c.cbMap[*channel](*channel, *message)
case redis.Subscription:
fmt.Printf("%s: %s %d\n", res.Channel, res.Kind, res.Count)
case error:
log.Error("error handle redis connection...")
time.Sleep(2 * time.Second)
if i > 5 {
panic(errors.New("redis connection failed too many times, panic"))
}
con, err := GetRedisConn()
if err != nil {
log.Error("redis dial failed")
continue
}
c.client = redis.PubSubConn{Conn: con}
i += 1
continue
}
}
}(index)
}
func (c *Subscriber) Close() {
err := c.client.Close()
if err != nil {
log.Errorf("redis close error.")
}
}
func (c *Subscriber) Subscribe(channel interface{}, cb SubscribeCallback) {
err := c.client.Subscribe(channel)
if err != nil {
log.Fatalf("redis Subscribe error.")
}
c.cbMap[channel.(string)] = cb
}
func Publish(channel string, msg string) error {
c, err := GetRedisConn()
if err != nil {
return err
}
if _, err := c.Do("PUBLISH", channel, msg); err != nil {
return err
}
return nil
}
|
#!/bin/bash
function usage(){
printf "\n$(basename $0) [OPTIONS]\n"
cat $0 | grep -E "[$1]\) # --.*$"
exit 0
}
# Check no arguments
[ $# -eq 0 ] \
&& printf "No arguments provided and exiting\n"
: ${DATALOAD_S3_ROOT:?"AWS S3 root path not defined; Exiting Script"}
while getopts "f:l:p:s:u" option; do
case "$option" in
f) # -- Fully Qualified Path Name to file
fqpn=${OPTARG}
;;
l) # -- location (CONUS, CARIB, HAWAII, etc)
location=${OPTARG^^}
;;
s) # -- sublocation (directory level below location)
sublocation=${OPTARG}
;;
p) # -- Project name (instrumentation, cumulus, etc)
# make sure it is lower case
project=${OPTARG,,}
;;
u) # -- Print usage message
usage "a-z"
;;
esac
done
shift $(($OPTIND - 1))
[ -z $fqpn ] \
&& printf "'Filename' not defined and exiting\n" \
&& usage "f"
[ -z $project ] \
&& printf "'Project' not defined and exiting\n" \
&& usage "p"
[ -z $location ] \
&& printf "'Location' not defined and exiting\n" \
&& usage "l"
[ -z $sublocation ] \
&& printf "'Subocation' not defined and exiting\n" \
&& usage "s"
filename=$(basename $fqpn)
s3_destination="$DATALOAD_S3_ROOT/$project/ldm/$location/$sublocation/$filename"
[ -z $AWS_ENDPOINT_URL ] \
&& aws s3 cp "$fqpn" "$s3_destination" \
|| aws s3 cp --endpoint-url "$AWS_ENDPOINT_URL" "$fqpn" "$s3_destination"
# make sure to exit
exit 0
|
<?php
namespace Curl\Test\Unit;
use shuber\Curl\Curl;
class CurlTest extends \PHPUnit_Framework_TestCase {
/**
* @test
*/
function itAllowsAddingHeaders()
{
$curl = new Curl;
$curl->add_header('Expect', '');
$this->assertEquals(array('Expect' => ''), $curl->headers);
}
}
|
import 'dart:async';
import 'dart:convert';
import 'dart:isolate';
import 'package:chainmetric/models/readings/readings.dart';
import 'package:chainmetric/shared/logger.dart';
import 'package:flutter/services.dart';
import 'package:streams_channel2/streams_channel2.dart';
import 'package:talos/talos.dart';
import 'package:tuple/tuple.dart';
typedef ReadingsListener = void Function(MetricReadingPoint point);
typedef CancelReadingsListening = void Function();
class ReadingsController {
static Future<MetricReadings?> getReadings(String assetID) async {
try {
final data = await Fabric.evaluateTransaction("readings", "ForAsset", assetID);
if (data?.isEmpty ?? true) {
return null;
}
final port = ReceivePort();
Isolate.spawn(_unmarshalReadings, Tuple2(data!, port.sendPort));
return await port.first as MetricReadings?;
} on Exception catch (e) {
logger.e("ReadingsController.getReadings: ${e.toString()}");
}
return null;
}
static Future<MetricReadingsStream?> getStream(
String assetID, String metric) async {
try {
final data = await Fabric.evaluateTransaction("readings", "ForMetric",
[assetID, metric]);
if (data?.isEmpty ?? true) {
return null;
}
final port = ReceivePort();
Isolate.spawn(_unmarshalStream, Tuple2(data!, port.sendPort));
return await port.first as MetricReadingsStream?;
} on Exception catch (e) {
logger.e("ReadingsController.getStream: ${e.toString()}");
}
return null;
}
static Future<CancelReadingsListening> subscribeToStream(
String assetID, String metric, ReadingsListener listener) async {
final cancel = EventSocket.bind((eventArtifact) {
listener(MetricReadingPoint.fromJson(json.decode(eventArtifact)));
}, "readings", [assetID, metric]);
return cancel;
}
static Future<void> _unmarshalReadings(Tuple2<String, SendPort> args) async {
args.item2.send(MetricReadings.fromJson(json.decode(args.item1)));
}
static Future<void> _unmarshalStream(Tuple2<String, SendPort> args) async {
args.item2.send(MetricReadingsStream.from(
MetricReadingPoint.listFromJson(json.decode(args.item1))));
}
}
|
package me.cassayre.florian.masterproject.legacy.parser
import scala.util.parsing.input.{Reader, Position, NoPosition}
private[parser] class SCTokensReader(tokens: Seq[SCToken]) extends Reader[SCToken] {
override def first: SCToken = tokens.head
override def atEnd: Boolean = tokens.isEmpty
override def pos: Position = tokens.headOption.map(_.pos).getOrElse(NoPosition)
override def rest: Reader[SCToken] = new SCTokensReader(tokens.tail)
}
|
import PropTypes from 'prop-types'
import React from 'react'
import generateScriptLoader from '../util/generateScriptLoader'
import AppLoadingScreen from './AppLoadingScreen'
// todo: investigate this. Doesn't seem like NODE_ENV gets set on sanity.io
const ENV = process.env.NODE_ENV || 'development'
function assetUrl(staticPath, item) {
const isAbsolute = item.path.match(/^https?:\/\//)
if (isAbsolute) {
return item.path
}
const base = `${staticPath}/${item.path}`
if (!item.hash) {
return base
}
const hasQuery = base.indexOf('?') !== -1
const separator = hasQuery ? '&' : '?'
return `${base}${separator}${item.hash}`
}
function Document(props) {
const stylesheets = props.stylesheets.map(item => (
<link key={item.path} rel="stylesheet" href={assetUrl(props.staticPath, item)} />
))
const subresources = props.scripts.map(item => (
<link key={item.path} rel="subresource" href={assetUrl(props.staticPath, item)} />
))
const scripts = props.scripts.map(item => assetUrl(props.staticPath, item))
const scriptLoader = generateScriptLoader(scripts)
const favicons = props.favicons.map((item, index) => (
<link key={item.path + index} rel="icon" href={assetUrl(props.staticPath, item)} />
))
return (
<html>
<head>
<meta charSet={props.charset} />
<title>{props.title}</title>
<meta name="viewport" content={props.viewport} />
{stylesheets}
{subresources}
{favicons}
</head>
<body>
<div id="sanity">
<AppLoadingScreen text={props.loading} />
</div>
{/* eslint-disable react/no-danger */}
<script dangerouslySetInnerHTML={{__html: scriptLoader}} />
{/* eslint-enable react/no-danger */}
</body>
</html>
)
}
const asset = PropTypes.shape({
path: PropTypes.string.isRequired,
hash: PropTypes.string
})
Document.defaultProps = {
charset: 'utf-8',
title: 'Sanity',
viewport: 'width=device-width, initial-scale=1',
loading: 'Sanity Content Studio',
staticPath: '/static',
favicons: [{path: 'favicon.ico'}],
stylesheets: [],
scripts: []
}
Document.propTypes = {
charset: PropTypes.string,
title: PropTypes.string,
viewport: PropTypes.string,
loading: PropTypes.node,
staticPath: PropTypes.string,
favicons: PropTypes.arrayOf(asset),
stylesheets: PropTypes.arrayOf(asset),
scripts: PropTypes.arrayOf(asset)
}
export default Document
|
package providers
import (
"gitlab.com/nxcp/tools/gophercloud"
"gitlab.com/nxcp/tools/gophercloud/pagination"
)
// Provider is the Octavia driver that implements the load balancing mechanism
type Provider struct {
// Human-readable description for the Loadbalancer.
Description string `json:"description"`
// Human-readable name for the Provider.
Name string `json:"name"`
}
// ProviderPage is the page returned by a pager when traversing over a
// collection of providers.
type ProviderPage struct {
pagination.LinkedPageBase
}
// NextPageURL is invoked when a paginated collection of providers has
// reached the end of a page and the pager seeks to traverse over a new one.
// In order to do this, it needs to construct the next page's URL.
func (r ProviderPage) NextPageURL() (string, error) {
var s struct {
Links []gophercloud.Link `json:"providers_links"`
}
err := r.ExtractInto(&s)
if err != nil {
return "", err
}
return gophercloud.ExtractNextURL(s.Links)
}
// IsEmpty checks whether a ProviderPage struct is empty.
func (r ProviderPage) IsEmpty() (bool, error) {
is, err := ExtractProviders(r)
return len(is) == 0, err
}
// ExtractProviders accepts a Page struct, specifically a ProviderPage
// struct, and extracts the elements into a slice of Provider structs. In
// other words, a generic collection is mapped into a relevant slice.
func ExtractProviders(r pagination.Page) ([]Provider, error) {
var s struct {
Providers []Provider `json:"providers"`
}
err := (r.(ProviderPage)).ExtractInto(&s)
return s.Providers, err
}
type commonResult struct {
gophercloud.Result
}
// Extract is a function that accepts a result and extracts a provider.
func (r commonResult) Extract() (*Provider, error) {
var s struct {
Provider *Provider `json:"provider"`
}
err := r.ExtractInto(&s)
return s.Provider, err
}
// GetResult represents the result of a get operation. Call its Extract
// method to interpret it as a Provider.
type GetResult struct {
commonResult
}
|
---
layout: page
title: "Página no encontrada"
permalink: /404.html
hide: true
---
Lo sentimos, pero no hemos podido encontrar la página que buscas.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.