text
stringlengths 27
775k
|
|---|
import { Callback, State } from './typedef';
import { SignalIndicator } from '../WebSocket/typedef';
import { SerializerService } from '../Serializer';
const NAME = 'react-use-socket';
export class StorageService<T> {
readonly #storage: Storage;
readonly #name: string;
#state: State<T> | null;
constructor(persist?: boolean, name = NAME) {
this.#storage = persist ? localStorage : sessionStorage;
this.#name = name;
this.#state = this.#getState();
}
static isSupported = (persist?: boolean) => {
return Boolean(persist ? localStorage : sessionStorage);
}
get = (signal: SignalIndicator): T | null => {
if (!this.#state) return null;
return this.#state[signal];
};
set = (query: SignalIndicator, payload: T, callback?: Callback) => {
const comparedPayload = this.#comparePayload(this.#state?.[query], payload);
const newState = { ...this.#state, [query]: comparedPayload };
this.#saveState(newState);
callback?.();
};
remove = (signal: SignalIndicator, callback?: Callback) => {
if (!this.#state) return;
const { [signal]: removedSignal, ...newState } = this.#state;
this.#saveState(newState);
callback?.();
};
wipe = () => {
this.#storage.removeItem(this.#name);
};
#comparePayload = (oldPayload: T = {} as T, newPayload: T) => {
if (
typeof oldPayload !== 'object' ||
typeof newPayload !== 'object' ||
typeof oldPayload !== typeof newPayload ||
Array.isArray(newPayload)
) return newPayload;
return { ...oldPayload, ...newPayload };
};
#getState = (): State<T> | null => {
const state = this.#storage.getItem(this.#name);
if (!state) return null;
return SerializerService.deserializeJSON<State<T>>(state);
};
#saveState = (state: State<T>) => {
this.#state = state;
this.#storage.setItem(this.#name, SerializerService.serializeJSON(state));
};
}
|
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Andre Anjos <andre.anjos@idiap.ch>
# Wed 11 Dec 15:14:08 2013 CET
#
# Copyright (C) 2011-2013 Idiap Research Institute, Martigny, Switzerland
"""Tests the IO functionality of bob.bio.base.score."""
import numpy
import tempfile
import os
import shutil
import pkg_resources
from .. import score
def test_load_scores():
# This function tests the IO functionality of loading score files in
# different ways
load_functions = {'4col': score.four_column,
'5col': score.five_column}
cols = {'4col': 4, '5col': 5}
for variant in cols:
normal_score_file = pkg_resources.resource_filename(
'bob.bio.base.test', 'data/dev-%s.txt' % variant)
normal_scores = list(load_functions[variant](normal_score_file))
assert len(normal_scores) == 910
assert all(len(s) == cols[variant] for s in normal_scores)
# read the compressed score file
compressed_score_file = pkg_resources.resource_filename(
'bob.bio.base.test', 'data/dev-%s.tar.gz' % variant)
compressed_scores = list(load_functions[variant](compressed_score_file))
assert len(compressed_scores) == len(normal_scores)
assert all(len(c) == cols[variant] for c in compressed_scores)
assert all(c[i] == s[i] for c, s in zip(compressed_scores,
normal_scores) for i in range(cols[variant]))
# Use auto-estimated score file contents
# read score file in normal way
normal_scores = list(score.scores(normal_score_file))
assert len(normal_scores) == 910
assert all(len(s) == cols[variant] for s in normal_scores)
# read the compressed score file
compressed_scores = list(score.scores(compressed_score_file))
assert len(compressed_scores) == len(normal_scores)
assert all(len(c) == cols[variant] for c in compressed_scores)
assert all(c[i] == s[i] for c, s in zip(compressed_scores,
normal_scores) for i in range(cols[variant]))
def test_split_vuln_scores():
"""Tests that vulnerability score files are loaded correctly"""
score_file = pkg_resources.resource_filename('bob.bio.base.test', 'data/vuln/scores-dev.csv')
split_scores = score.split_csv_vuln(score_file)
assert all(key in split_scores for key in ("licit_neg", "licit_pos", "spoof"))
assert all(isinstance(scores, numpy.ndarray) for scores in split_scores.values())
assert all(len(scores) == 5000 for scores in split_scores.values())
def test_split_scores():
# This function tests the IO functionality of loading score files in
# different ways
split_functions = {'4col': score.split_four_column,
'5col': score.split_five_column}
cols = {'4col': 4, '5col': 5}
for variant in cols:
# read score file in normal way
normal_score_file = pkg_resources.resource_filename(
'bob.bio.base.test', 'data/dev-%s.txt' % variant)
negatives, positives = split_functions[variant](normal_score_file)
assert len(negatives) == 520, len(negatives)
assert len(positives) == 390, len(positives)
# read the compressed score file
compressed_score_file = pkg_resources.resource_filename(
'bob.bio.base.test', 'data/dev-%s.tar.gz' % variant)
negatives, positives = split_functions[variant](compressed_score_file)
assert len(negatives) == 520, len(negatives)
assert len(positives) == 390, len(positives)
# Use auto-estimated score file contents
# read score file in normal way
negatives, positives = score.split(normal_score_file)
assert len(negatives) == 520, len(negatives)
assert len(positives) == 390, len(positives)
# read the compressed score file
negatives, positives = score.split(compressed_score_file)
assert len(negatives) == 520, len(negatives)
assert len(positives) == 390, len(positives)
def test_load_score():
# This function tests the IO functionality of loading score files in
# different ways
scores = []
cols = {'4col': 4, '5col': 5}
for variant in cols:
compressed_score_file = pkg_resources.resource_filename(
'bob.bio.base.test', 'data/dev-%s.tar.gz' % variant)
normal_score_file = pkg_resources.resource_filename(
'bob.bio.base.test', 'data/dev-%s.txt' % variant)
normal_scores = score.load_score(
normal_score_file, cols[variant])
assert len(normal_scores) == 910
assert len(normal_scores.dtype) == cols[variant]
# read the compressed score file
compressed_score_file = pkg_resources.resource_filename(
'bob.bio.base.test', 'data/dev-%s.tar.gz' % variant)
compressed_scores = score.load_score(
compressed_score_file, cols[variant])
assert len(compressed_scores) == len(normal_scores)
assert len(compressed_scores.dtype) == cols[variant]
for name in normal_scores.dtype.names:
assert all(normal_scores[name] == compressed_scores[name])
# test minimal loading
minimal_scores = score.load_score(
normal_score_file, minimal=True)
assert len(minimal_scores) == 910
assert len(minimal_scores.dtype) == 3
assert minimal_scores.dtype.names == ('claimed_id', 'real_id', 'score')
def test_dump_score():
# This function tests the IO functionality of dumping score files
scores = []
cols = {'4col': 4, '5col': 5}
for variant in cols:
# read score file
normal_score_file = pkg_resources.resource_filename(
'bob.bio.base.test', 'data/dev-%s.txt' % variant)
normal_scores = score.load_score(
normal_score_file, cols[variant])
with tempfile.TemporaryFile() as f:
score.dump_score(f, normal_scores)
f.seek(0)
loaded_scores = score.load_score(f, cols[variant])
for name in normal_scores.dtype.names:
assert all(normal_scores[name] == loaded_scores[name])
def _check_binary_identical(name1, name2):
# see: http://www.peterbe.com/plog/using-md5-to-check-equality-between-files
from hashlib import md5
# tests if two files are binary identical
with open(name1, 'rb') as f1:
with open(name2, 'rb') as f2:
assert md5(f1.read()).digest() == md5(f2.read()).digest()
|
@file:JvmName("BunchCleanup")
package org.jetbrains.bunches.cleanup
import com.github.ajalt.clikt.parameters.options.default
import com.github.ajalt.clikt.parameters.options.flag
import com.github.ajalt.clikt.parameters.options.option
import org.jetbrains.bunches.file.BUNCH_FILE_NAME
import org.jetbrains.bunches.file.readExtensionsFromFile
import org.jetbrains.bunches.file.resultWithExit
import org.jetbrains.bunches.general.BunchSubCommand
import org.jetbrains.bunches.general.exitWithError
import org.jetbrains.bunches.git.*
import java.io.File
const val EXT_PATTERN = "{ext}"
const val DEFAULT_CLEANUP_COMMIT_TITLE = "~~~~ cleanup $EXT_PATTERN ~~~~"
const val CLEANUP_DESCRIPTION = "Removes bunch file from repository directory."
data class Settings(
val repoPath: String,
val bunchPath: String,
val extension: String?,
val commitTitle: String? = DEFAULT_CLEANUP_COMMIT_TITLE,
val isNoCommit: Boolean
)
class CleanUpCommand : BunchSubCommand(
name = "cleanup",
help = CLEANUP_DESCRIPTION
) {
val repoPath by repoPathOption()
val extension by option(
"--ext",
help = "Particular extension for remove. " +
"All files with extensions found in '$BUNCH_FILE_NAME' file will be removed if not set."
)
private val isNoCommit by option("--no-commit", help = "Do not commit changes. -m option will be ignored").flag()
private val commitTitle by option(
"-m",
help = "Title for the cleanup commit. " +
"\"$DEFAULT_CLEANUP_COMMIT_TITLE\" is used by default."
).default(DEFAULT_CLEANUP_COMMIT_TITLE)
override fun run() {
val settings = Settings(
repoPath = repoPath.toString(),
bunchPath = repoPath.toString(),
extension = extension,
commitTitle = commitTitle,
isNoCommit = isNoCommit
)
process { cleanup(settings) }
}
}
fun main(args: Array<String>) {
CleanUpCommand().main(args)
}
fun cleanup(settings: Settings) {
if (!settings.isNoCommit) {
uncommittedChanges(settings.repoPath).checkAndExitIfNeeded {
exitWithError("Can not commit changes for cleanup with uncommitted changes.")
}
}
val extensions = if (settings.extension != null) {
setOf(settings.extension)
} else {
readExtensionsFromFile(settings.bunchPath).resultWithExit().toSet()
}.map { ".$it" }
val root = File(settings.repoPath)
if (!isGitRoot(root)) {
exitWithError("Repository directory with branch is expected")
}
val gitignoreParseResult = parseGitIgnore(root)
val filesWithExtensions = root
.walkTopDown()
.onEnter { dir -> !shouldIgnoreDir(dir, root, gitignoreParseResult) }
.filter { child -> extensions.any { child.name.endsWith(it) } }
.toList()
val changedFiles = ArrayList<FileChange>()
for (cleanupFile in filesWithExtensions) {
if (cleanupFile.isDirectory) {
exitWithError("Bunch directories are not supported: $cleanupFile")
}
cleanupFile.delete()
changedFiles.add(FileChange(ChangeType.REMOVE, cleanupFile))
}
if (settings.isNoCommit) {
return
}
val extValue = settings.extension ?: ""
val commitTitle = settings.commitTitle!!.replace(EXT_PATTERN, extValue)
commitChanges(settings.repoPath, changedFiles, commitTitle)
}
|
package com.xbw.spring.boot.beetl;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.xbw.spring.boot.project.dao.UserDao;
import com.xbw.spring.boot.project.model.User;
import com.xbw.spring.boot.project.service.UserService;
import org.beetl.sql.core.SQLManager;
import org.beetl.sql.core.engine.PageQuery;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.List;
import java.util.Map;
@RunWith(SpringRunner.class)
@SpringBootTest
public class BeetlSqlTests {
@Autowired
SQLManager sqlManager;
@Autowired
private UserService userService;
@Autowired
private UserDao userDao;
@Test
public void testInsert() throws Exception {
User user = new User("user", "user");
userDao.insert(user);
System.out.println(new ObjectMapper().writeValueAsString(user));
userService.insert(user);
System.out.println(new ObjectMapper().writeValueAsString(user));
userDao.insert(user,true);
System.out.println(new ObjectMapper().writeValueAsString(user));
userService.insert(user,true);
System.out.println(new ObjectMapper().writeValueAsString(user));
}
@Test
public void testUpdate() throws Exception {
User user = userDao.single(1l);
// user = userDao.unique(1l);
if (user != null) {
user.setUserCode("test");
user.setUserName("test");
userDao.updateById(user);
userService.updateById(user);
}
}
@Test
public void testFindAll() {
userDao.all();
userService.all();
}
@Test
public void testMd() throws JsonProcessingException {
List<User> users = sqlManager.select("user.findAll", User.class);
System.out.println(new ObjectMapper().writeValueAsString(users));
List<Map> list = sqlManager.select("user.findAll", Map.class);
System.out.println(new ObjectMapper().writeValueAsString(list));
}
@Test
public void testOthers() throws JsonProcessingException {
System.out.println(new ObjectMapper().writeValueAsString(userDao.findByUserCode("test")));
userDao.deleteByUserCode("test1");
}
@Test
public void testPage() {
PageQuery<User> page = sqlManager.query(User.class).page(1, 10);
System.out.println(page.getList());
System.out.println(page.getTotalRow());
System.out.println(page.getTotalPage());
}
@Test
public void testDelete() throws Exception {
// use @Column("user_id")
userDao.deleteById(9l);
userDao.deleteByUserCode("beetlsql");
userService.deleteById(9l);
}
}
|
---
autoGroup-2: 基本概念
title: JVM内存结构
---
### JVM内存结构
**PS:从processon.com上借鉴是最高效的**
<img :src="$withBase('/assets/img/jvm/jvm-1-1-1.png')" width="1000" height="800">
#### 大致流程
1. 类加载子系统将.class文件进行一系列加载,加载到运行时数据区
2. 执行引擎执行运行时数据区的内容
3. 弄差不多了,执行引擎再回收垃圾
|
v$session视图 中LAST_CALL_ET列的 重要性
好吧,我们都知道v $ session视图在oracle中的重要性。我们经常在此表中查找状态,sid,sql_text,程序名称和其他与对象相关的东西。但是其中还有一个重要的列,我们经常会忽略它,即LAST_CALL_ET
如果会话STATUS当前处于活动状态,则该值表示自会话变为活动状态以来经过的时间(以秒为单位)。
如果会话状态当前为非活动状态,则该值表示自会话变为非活动状态起所经过的时间(以秒为单位)。
因此,这是一本很棒的列,可以了解系统上会话正在执行的操作,例如,
select s.sid||','||s.serial# sess,
s.USERNAME,
s.last_call_et,
s.status,
s.sql_address,
s.program
from v$session s
where ( s.status = 'ACTIVE' and s.last_call_et > 10 ) or -- has been active for 10 seconds or more
( s.status != 'ACTIVE' and s.last_call_et > 1200 ); -- has been inactive for 20 mins or more
如果长时间无操作对于应用连接池来说应该应用没有正确关闭连接,发现连接池泄露时执行的SQL语句
select sid,serial#,username,trunc
(last_call_et/3600,2)||' hr'
last_call_et
from V$session where
last_call_et > 3600 and username is not null
select
ses.username,
ses.machine,
ses.program,
ses.status,
ses.last_call_et,
sql.hash_value,
sql.sql_text
from
v$session ses,
v$sql sql
where
ses.sql_hash_value = sql.hash_value
and
ses.type = 'USER';
|
# 爬虫项目
> powered by
## TODO
+ 提供自定义模板,提供自定义规则
+ 使用 `electron` 打包并发布到release
|
struct Solution;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::iter::FromIterator;
impl Solution {
fn minimum_jumps(forbidden: Vec<i32>, a: i32, b: i32, x: i32) -> i32 {
let mut visited: HashSet<(i32, bool)> = HashSet::new();
let mut queue: VecDeque<(i32, bool, i32)> = VecDeque::new();
let max_pos = forbidden.iter().max().unwrap_or(&0) + a + b + x;
let forbidden: HashSet<i32> = HashSet::from_iter(forbidden);
visited.insert((0, false));
queue.push_back((0, false, 0));
while let Some((pos, backjump, step)) = queue.pop_front() {
dbg!(pos);
if pos == x {
return step;
}
let next_pos = pos + a;
if next_pos <= max_pos
&& !forbidden.contains(&(next_pos))
&& visited.insert((next_pos, false))
{
queue.push_back((next_pos, false, step + 1));
}
if !backjump {
let next_pos = pos - b;
if next_pos >= 0
&& !forbidden.contains(&(next_pos))
&& visited.insert((next_pos, true))
{
queue.push_back((next_pos, true, step + 1));
}
}
}
-1
}
}
#[test]
fn test() {
let forbidden = vec![14, 4, 18, 1, 15];
let a = 3;
let b = 15;
let x = 9;
let res = 3;
assert_eq!(Solution::minimum_jumps(forbidden, a, b, x), res);
let forbidden = vec![8, 3, 16, 6, 12, 20];
let a = 15;
let b = 13;
let x = 11;
let res = -1;
assert_eq!(Solution::minimum_jumps(forbidden, a, b, x), res);
let forbidden = vec![1, 6, 2, 14, 5, 17, 4];
let a = 16;
let b = 9;
let x = 7;
let res = 2;
assert_eq!(Solution::minimum_jumps(forbidden, a, b, x), res);
}
|
package com.jackwdev
object BinarySearch {
// Pass in the array, the value to search for,
// the index of the start of the array, and the index of the end of the array
// The passing of index values will allow the same array to be utilised throughout each
// recursion, "filtering" out values that have been checked against by only looking between the indices
def binarySearch(arr: Array[Int], searchFor: Int, startIndex: Int, endIndex: Int): Int = {
// Binary search: (Assumes array is sorted) Splits the array in half using the midpoint,
// halving the data to look at each time by ignoring values that are less than or larger than
// the value being searched for.
// Time Complexity: Best - O(1) Average - O(log(n)) Worst - O(log(n))
// Check that there is more than one value in the array (low != high)
if (endIndex >= startIndex) {
val mid = startIndex + (endIndex - startIndex) / 2
// Check if searchFor is in the middle, if it is then return the index
if (arr(mid) == searchFor) return mid
// Check if the value is less than that at the mid, if it is, only look at the values to the left
// If not, the value is either to the right or not present so look at the right half instead
if (arr(mid) > searchFor) return binarySearch(arr, searchFor, startIndex, mid - 1)
else return binarySearch(arr, searchFor, mid + 1, endIndex)
}
// If the value is not found, return -1
-1
}
}
|
<?php namespace Kareem3d\PathManager;
abstract class Path {
/**
* Base path.
*
* @var string
*/
protected $path;
/**
* Base url.
*
* @var string
*/
protected static $baseUrl;
/**
* Base server path.
*
* @var string
*/
protected static $basePath;
/**
* Constructor.
*
* @param string $path
* @return void
*/
public function __construct( $path )
{
$this->path = $this->prepare($path);
}
/**
* Initialize path class.
*
* @param string $baseUrl
* @param string $basePath
* @return void
*/
public static function init( $baseUrl, $basePath )
{
static::$baseUrl = rtrim($baseUrl, '\\/');
static::$basePath = static::prepare($basePath);
}
/**
* Get base path.
*
* @throws PathException
* @return string
*/
public static function getBasePath()
{
if(! static::$basePath) throw new PathException("Path class not initialized.");
return static::$basePath;
}
/**
* Get base url.
*
* @throws PathException
* @return string
*/
public static function getBaseUrl()
{
if(! static::$baseUrl) throw new PathException("Path class not initialized.");
return static::$baseUrl;
}
/**
* @return Path
*/
public static function makeFromBase()
{
return static::make(static::getBasePath());
}
/**
* Make from given path.
*
* @throws PathException
* @param string $path
* @return Path
*/
public static function make( $path )
{
// Check if path is local url
if(static::isLocalUrl($path)) return static::make(static::convertToPath($path));
// Get path info to determine if it's a file or directory.
$pathinfo = pathinfo($path);
// If there's no extension or the directory does exists then return new Directory object
if(! isset($pathinfo['extension']) || is_dir($path)) return new Directory( $path );
// If there's an extension or the file does exists the return new File object
if( isset($pathinfo['extension']) || is_file($path)) return new File( $path );
throw new PathException('Given path is invalid.');
}
/**
* Determine if the url is local.
*
* @param string $url
* @return boolean
*/
public static function isLocalUrl( $url )
{
return strpos($url, static::getBaseUrl()) > -1;
}
/**
* Convert to path from url.
*
* @param string $url
* @return string
*/
public static function convertToPath( $url )
{
return static::prepare(str_replace(static::getBaseUrl(), static::getBasePath(), $url));
}
/**
* Convert to url from path.
*
* @param string $path
* @return string
*/
public static function convertToUrl( $path )
{
return str_replace('\\', '/', str_replace(static::getBasePath(), static::getBaseUrl(), static::prepare($path)));
}
/**
* Get server directory separator.
*
* @return string
*/
public static function ds()
{
return DIRECTORY_SEPARATOR;
}
/**
* Prepare path.
*
* @param $path
* @return string
*/
protected static function prepare( $path )
{
$path = str_replace('\\', static::ds(), $path);
$path = str_replace('/', static::ds(), $path);
return rtrim($path, '\\/');
}
/**
* Convert current path to url.
*
* @return string
*/
public function toUrl()
{
return $this->convertToUrl($this);
}
/**
* Get parent directory for this path
*
* @return Directory
*/
public function getParentDirectory()
{
return new Directory($this->getPathInfo('dirname'));
}
/**
* Get path info with key.
*
* @param string $key
* @return string
*/
public function getPathInfo( $key )
{
$pathinfo = pathinfo($this);
return isset($pathinfo[$key]) ? $pathinfo[$key] : '';
}
/**
* String representation for this class.
*
* @return string
*/
public function __toString()
{
return $this->path;
}
/**
* Copy this file or directory to the given directory.
*
* @param \PathManager\Path $path
* @return void
*/
public abstract function copy( Path $path );
/**
* Move this file or directory to the given directory.
*
* @param Path $path
* @return void
*/
public abstract function move( Path $path );
/**
* Delete this file or directory.
*
* @return void
*/
public abstract function delete();
/**
* Make this path unique by changing its name if it exists.
*
* @return void
*/
public abstract function makeUnique();
/**
* Make sure this path exists.
* This method will create all directories along the way to
* make sure this directory does exist.
* This method might take some time.. Don't use a lot!
*
* @throws PathException
* @param int $permissions
* @return void
*/
public abstract function makeSureItExists( $permissions = 0755 );
}
|
use std::sync::{Arc, RwLock};
use common::msg::Msg;
use common::types::LocalizedDetection;
use common::types::NavigatorState;
use common::types::RobotParams;
use common::types::SystemStatus;
use common::utils::LastValue;
use gdnative::api::*;
use gdnative::prelude::*;
use crate::components::Context;
use crate::ui_state::MirrorsState;
use tokio;
use tokio::sync::watch::Receiver;
use crate::components::traits::Updatable;
use crate::utils::find_node;
use crate::{watch_msg, watch_msg_once};
use common::types::TrackingState;
pub struct Status {
tracking_state: Receiver<Option<TrackingState>>,
robot_params: Arc<RwLock<LastValue<RobotParams>>>,
navigator_state: Arc<RwLock<LastValue<NavigatorState>>>,
system_status: Arc<RwLock<LastValue<SystemStatus>>>,
localized_detections: Arc<RwLock<LastValue<Vec<LocalizedDetection>>>>,
ui_state: tokio::sync::watch::Receiver<MirrorsState>,
got_first_robot_params_update: Arc<RwLock<bool>>,
viz_scale: Receiver<f64>,
}
impl Status {
pub fn new(owner: TRef<Node>, _path: String, context: &mut Context) -> Self {
let tracking_state = watch_msg!(context, Msg::TrackingState);
let robot_params = watch_msg_once!(context, Msg::RobotParams);
let navigator_state = watch_msg_once!(context, Msg::NavigatorState);
let system_status = watch_msg_once!(context, Msg::SystemStatus);
let localized_detections = watch_msg_once!(context, Msg::LocalizedDetections);
let viz_scale = context.ui_state.watch(|s| s.map_to_viz_scale());
// set initial value for connection address
find_node::<LineEdit>(&*owner, "ConnectionAddress".into()).set_text(
context
.ui_state
.state
.read()
.unwrap()
.robot_addresses
.first()
.unwrap(),
);
// set initial values to connection history
let connection_history = find_node::<MenuButton>(&*owner, "ConnectionHistory".into());
for (idx, robot_address) in context
.ui_state
.state
.read()
.unwrap()
.robot_addresses
.iter()
.enumerate()
{
unsafe {
connection_history
.get_popup()
.unwrap()
.assume_safe()
.add_item(robot_address, idx as i64, 0);
}
}
// HACK poll the robot until Grpc connection is established
let got_first_robot_params_update = Arc::new(RwLock::new(false));
{
let publisher = context.broadcaster.publisher();
let got_first_robot_params_update = Arc::clone(&got_first_robot_params_update);
context.runtime().spawn(async move {
while !*got_first_robot_params_update.read().unwrap() {
publisher.send(Msg::RequestRobotParams).ok();
tokio::time::sleep(tokio::time::Duration::from_millis(300)).await;
}
});
}
Status {
tracking_state,
robot_params,
navigator_state,
system_status,
localized_detections,
ui_state: context.ui_state.watch_all(),
viz_scale,
got_first_robot_params_update,
}
}
}
impl Updatable for Status {
fn update(&self, owner: &Node) {
let tracking_state = &*self.tracking_state.borrow();
if let Some(tracking_state) = tracking_state {
let track_label = find_node::<Label>(&*owner, "TrackingStateLabel".into());
track_label.set_text(format!("{:?}", tracking_state));
}
let viz_scale = *self.viz_scale.borrow();
if let Some(ref mut robot_params) = self.robot_params.write().unwrap().pop() {
*self.got_first_robot_params_update.write().unwrap() = true;
for map in &mut robot_params.maps {
for waypoint in &mut map.waypoints {
waypoint.mul(viz_scale);
}
}
owner.emit_signal("robot_params", &[robot_params.to_variant()]);
}
if let Some(mut navigator_state) = self.navigator_state.write().unwrap().pop() {
if let Some(ref mut goal) = navigator_state.goal {
goal.mul(viz_scale);
}
owner.emit_signal("navigator_state", &[navigator_state.to_variant()]);
}
if let Some(system_status) = self.system_status.write().unwrap().pop() {
owner.emit_signal("system_status", &[system_status.to_variant()]);
}
if let Some(mut localized_detections) = self.localized_detections.write().unwrap().pop() {
for detection in localized_detections.iter_mut() {
detection.center *= viz_scale;
for lm in detection.landmarks.iter_mut() {
lm.point *= viz_scale;
}
}
owner.emit_signal("localized_detections", &[localized_detections.to_variant()]);
}
// TODO only emit if changed
let ui_state = self.ui_state.borrow();
owner.emit_signal("ui_state", &[ui_state.to_variant()]);
}
}
|
for filename in UCRProfile-*.htm
do
echo $filename
sed "3,74d" $filename | sed -e 's/\ //g; s/<[^>]*>//g; /<\!/,/>;/d; /^[[:space:]]*$/d;' |
sed "1,27d" | sed -e '/ choose a /d' -e '/Browse/d' |
tac | sed "1,1167d" | tac >> 'cleanedProfiles.txt'
done
|
import { dispatchWrap } from '@/utils/dispatchUtil';
import { ForgetModelType, ForgetStateType } from './model';
import { ConnectState } from '@/typings/connect';
export interface ForgetPageModelState extends ForgetStateType {
submitting: any
}
export function mapStateToProps(state: ConnectState<ForgetStateType>): ForgetPageModelState {
return {
...state.forget,
submitting: state.loading.effects['forget/submit'],
}
}
type KeyMap = keyof ForgetModelType['effects'] | keyof ForgetModelType['reducers'];
export interface GetCaptchasPayload {
mobile_number: number;
usage_type: string;
country_code: number | string;
}
export interface SubmitPayload {
mobile_number: string;
password: string;
verification_code: string;
confirm: string;
prefix: string
}
interface DispatchHandlerPayload {
getCaptchas: GetCaptchasPayload;
submit: SubmitPayload
}
interface DispatchHandlerResult { }
export type ForgetPageDispatchProps = DispatchProps<KeyMap, DispatchHandlerPayload, DispatchHandlerResult>;
export function mapDispatchToProps(dispatch: Function): Partial<ForgetPageDispatchProps> {
return {
submit: (args) => {
return dispatchWrap(dispatch, 'forget/submit', args);
},
getCaptchas: (args) => {
return dispatchWrap(dispatch, 'forget/getCaptchas', args);
},
};
}
|
using System;
using System.Linq;
using System.Collections;
using System.Collections.Generic;
using NUnit.Framework.Api;
using NUnit.Framework;
namespace Sztorm.Collections.Tests
{
public partial class Array2DTests
{
public static class Index1DOf
{
public static class Any
{
[TestCaseSource(typeof(Index1DOf), nameof(AnyTestCases))]
[TestCaseSource(typeof(Index1DOf), nameof(EquatableTestCases))]
[TestCaseSource(typeof(Index1DOf), nameof(ComparableTestCases))]
public static ItemRequestResult<int> Test<T>(Array2D<T> array, T item)
=> array.Index1DOf(item);
[TestCaseSource(typeof(Index1DOf), nameof(InvalidStartIndexCases))]
public static void Index2DIntThrowsExceptionIfStartIndexIsOutOfBounds<T>(
Array2D<T> array, Index2D startIndex)
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOf(default, startIndex, 0));
[TestCaseSource(typeof(Index1DOf), nameof(InvalidCountTestCases))]
public static void Index2DIntThrowsExceptionIfCountExceedsArray2DCount<T>(
Array2D<T> array, Index2D startIndex, int count)
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOf(default, startIndex, count));
[TestCaseSource(typeof(Index1DOf), nameof(AnyIndex2DIntTestCases))]
[TestCaseSource(typeof(Index1DOf), nameof(EquatableIndex2DIntTestCases))]
[TestCaseSource(typeof(Index1DOf), nameof(ComparableIndex2DIntTestCases))]
public static ItemRequestResult<int> Test<T>(
Array2D<T> array, T item, Index2D startIndex, int count)
=> array.Index1DOf(item, startIndex, count);
[TestCaseSource(typeof(Index1DOf), nameof(InvalidStartIndexCases))]
public static void Index2DBounds2DThrowsExceptionIfStartIndexIsOutOfBounds<T>(
Array2D<T> array, Index2D startIndex)
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOf(default, startIndex, new Bounds2D()));
[TestCaseSource(typeof(Index1DOf), nameof(InvalidSectorTestCases))]
public static void Index2DBounds2DThrowsExceptionIfSectorIsOutOfBounds<T>(
Array2D<T> array, Index2D startIndex, Bounds2D sector)
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOf(default, startIndex, sector));
[TestCaseSource(typeof(Index1DOf), nameof(AnyIndex2DBounds2DTestCases))]
[TestCaseSource(typeof(Index1DOf), nameof(EquatableIndex2DBounds2DTestCases))]
[TestCaseSource(typeof(Index1DOf), nameof(ComparableIndex2DBounds2DTestCases))]
public static ItemRequestResult<int> Test<T>(
Array2D<T> array, T item, Index2D startIndex, Bounds2D sector)
=> array.Index1DOf(item, startIndex, sector);
}
public static class Equatable
{
[Test]
public static void ThrowsExceptionIfItemIsNull()
=> Assert.Throws<ArgumentNullException>(
() => new Array2D<string>(0, 0).Index1DOfEquatable<string>(item: null));
[TestCaseSource(typeof(Index1DOf), nameof(EquatableTestCases))]
public static ItemRequestResult<int> Test<T>(
Array2D<T> array, T item)
where T : IEquatable<T>
=> array.Index1DOfEquatable(item);
[Test]
public static void Index2DIntThrowsExceptionIfItemIsNull()
=> Assert.Throws<ArgumentNullException>(
() => new Array2D<string>(1, 1).Index1DOfEquatable<string>(null, (0, 0), 0));
[TestCaseSource(typeof(Index1DOf), nameof(InvalidStartIndexCases))]
public static void Index2DIntThrowsExceptionIfStartIndexIsOutOfBounds<T>(
Array2D<T> array, Index2D startIndex)
where T : IEquatable<T>, new()
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOfEquatable(new T(), startIndex, 0));
[TestCaseSource(typeof(Index1DOf), nameof(InvalidCountTestCases))]
public static void Index2DIntThrowsExceptionIfCountExceedsArray2DCount<T>(
Array2D<T> array, Index2D startIndex, int count)
where T : IEquatable<T>, new()
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOfEquatable(new T(), startIndex, count));
[TestCaseSource(typeof(Index1DOf), nameof(EquatableIndex2DIntTestCases))]
public static ItemRequestResult<int> Test<T>(
Array2D<T> array, T item, Index2D startIndex, int count)
where T : IEquatable<T>
=> array.Index1DOfEquatable(item, startIndex, count);
[Test]
public static void Index2DBounds2DThrowsExceptionIfItemIsNull()
=> Assert.Throws<ArgumentNullException>(
() => new Array2D<string>(1, 1).Index1DOfEquatable<string>(
null, (0, 0), new Bounds2D(1, 1)));
[TestCaseSource(typeof(Index1DOf), nameof(InvalidStartIndexCases))]
public static void Index2DBounds2DThrowsExceptionIfStartIndexIsOutOfBounds<T>(
Array2D<T> array, Index2D startIndex)
where T : IEquatable<T>, new()
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOfEquatable(new T(), startIndex, new Bounds2D()));
[TestCaseSource(typeof(Index1DOf), nameof(InvalidSectorTestCases))]
public static void Index2DBounds2DThrowsExceptionIfSectorIsOutOfBounds<T>(
Array2D<T> array, Index2D startIndex, Bounds2D sector)
where T : IEquatable<T>, new()
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOfEquatable(new T(), startIndex, sector));
[TestCaseSource(typeof(Index1DOf), nameof(EquatableIndex2DBounds2DTestCases))]
public static ItemRequestResult<int> Test<T>(
Array2D<T> array, T item, Index2D startIndex, Bounds2D sector)
where T : IEquatable<T>
=> array.Index1DOfEquatable(item, startIndex, sector);
}
public static class Comparable
{
[Test]
public static void ThrowsExceptionIfItemIsNull()
=> Assert.Throws<ArgumentNullException>(
() => new Array2D<string>(0, 0).Index1DOfComparable<string>(item: null));
[TestCaseSource(typeof(Index1DOf), nameof(ComparableTestCases))]
public static ItemRequestResult<int> Test<T>(
Array2D<T> array, T item)
where T : IComparable<T>
=> array.Index1DOfComparable(item);
[Test]
public static void Index2DIntThrowsExceptionIfItemIsNull()
=> Assert.Throws<ArgumentNullException>(
() => new Array2D<string>(1, 1).Index1DOfComparable<string>(
null, (0, 0), 0));
[TestCaseSource(typeof(Index1DOf), nameof(InvalidStartIndexCases))]
public static void Index2DIntThrowsExceptionIfStartIndexIsOutOfBounds<T>(
Array2D<T> array, Index2D startIndex)
where T : IComparable<T>, new()
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOfComparable(new T(), startIndex, 0));
[TestCaseSource(typeof(Index1DOf), nameof(InvalidCountTestCases))]
public static void Index2DIntThrowsExceptionIfCountExceedsArray2DCount<T>(
Array2D<T> array, Index2D startIndex, int count)
where T : IComparable<T>, new()
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOfComparable(new T(), startIndex, count));
[TestCaseSource(typeof(Index1DOf), nameof(ComparableIndex2DIntTestCases))]
public static ItemRequestResult<int> Test<T>(
Array2D<T> array, T item, Index2D startIndex, int count)
where T : IComparable<T>
=> array.Index1DOfComparable(item, startIndex, count);
[Test]
public static void Index2DBounds2DThrowsExceptionIfItemIsNull()
=> Assert.Throws<ArgumentNullException>(
() => new Array2D<string>(1, 1).Index1DOfComparable<string>(
null, (0, 0), new Bounds2D(1, 1)));
[TestCaseSource(typeof(Index1DOf), nameof(InvalidStartIndexCases))]
public static void Index2DBounds2DThrowsExceptionIfStartIndexIsOutOfBounds<T>(
Array2D<T> array, Index2D startIndex)
where T : IComparable<T>, new()
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOfComparable(new T(), startIndex, new Bounds2D()));
[TestCaseSource(typeof(Index1DOf), nameof(InvalidSectorTestCases))]
public static void Index2DBounds2DThrowsExceptionIfSectorIsOutOfBounds<T>(
Array2D<T> array, Index2D startIndex, Bounds2D sector)
where T : IComparable<T>, new()
=> Assert.Throws<ArgumentOutOfRangeException>(
() => array.Index1DOfComparable(new T(), startIndex, sector));
[TestCaseSource(typeof(Index1DOf), nameof(ComparableIndex2DBounds2DTestCases))]
public static ItemRequestResult<int> Test<T>(
Array2D<T> array, T item, Index2D startIndex, Bounds2D sector)
where T : IComparable<T>
=> array.Index1DOfComparable(item, startIndex, sector);
}
private static IEnumerable<TestCaseData> InvalidStartIndexCases()
{
var array2x3 = Array2D<int>.FromSystem2DArray(
new int[,] { { 2, 3, 5 },
{ 4, 9, 1 } });
yield return new TestCaseData(array2x3, new Index2D(-1, 0));
yield return new TestCaseData(array2x3, new Index2D(0, -1));
yield return new TestCaseData(array2x3, new Index2D(2, 0));
yield return new TestCaseData(array2x3, new Index2D(0, 3));
yield return new TestCaseData(new Array2D<int>(0, 0), new Index2D(0, 0));
yield return new TestCaseData(new Array2D<int>(1, 0), new Index2D(0, 0));
yield return new TestCaseData(new Array2D<int>(0, 1), new Index2D(0, 0));
}
private static IEnumerable<TestCaseData> InvalidCountTestCases()
{
var array2x3 = Array2D<int>.FromSystem2DArray(
new int[,] { { 2, 3, 5 },
{ 4, 9, 1 } });
yield return new TestCaseData(array2x3, new Index2D(0, 0), -1);
yield return new TestCaseData(array2x3, new Index2D(1, 2), -1);
yield return new TestCaseData(array2x3, new Index2D(1, 1), 3);
yield return new TestCaseData(array2x3, new Index2D(0, 0), 7);
}
private static IEnumerable<TestCaseData> InvalidSectorTestCases()
{
var array2x3 = Array2D<int>.FromSystem2DArray(
new int[,] { { 2, 3, 5 },
{ 4, 9, 1 } });
yield return new TestCaseData(array2x3, new Index2D(0, 0), new Bounds2D(3, 0));
yield return new TestCaseData(array2x3, new Index2D(0, 0), new Bounds2D(0, 4));
yield return new TestCaseData(array2x3, new Index2D(1, 1), new Bounds2D(2, 0));
yield return new TestCaseData(array2x3, new Index2D(1, 1), new Bounds2D(0, 3));
}
private static IEnumerable<TestCaseData> AnyTestCases()
{
var array3x2 = Array2D<object>.FromSystem2DArray(
new object[,] { { 2, 3 },
{ 4, null },
{ 9, null } });
yield return new TestCaseData(array3x2, 9)
.Returns(new ItemRequestResult<int>(4));
yield return new TestCaseData(array3x2, 3)
.Returns(new ItemRequestResult<int>(1));
yield return new TestCaseData(array3x2, 8)
.Returns(ItemRequestResult<int>.Fail);
yield return new TestCaseData(array3x2, null)
.Returns(new ItemRequestResult<int>(3));
}
private static IEnumerable<TestCaseData> AnyIndex2DIntTestCases()
{
var array2x3 = Array2D<object>.FromSystem2DArray(
new object[,] { { 2, 3, 5 },
{ 4, 9, 1 } });
yield return new TestCaseData(array2x3, 9, new Index2D(0, 0), 6)
.Returns(new ItemRequestResult<int>(4));
yield return new TestCaseData(array2x3, 1, new Index2D(1, 2), 0)
.Returns(ItemRequestResult<int>.Fail);
yield return new TestCaseData(array2x3, 10, new Index2D(1, 1), 2)
.Returns(ItemRequestResult<int>.Fail);
}
private static IEnumerable<TestCaseData> AnyIndex2DBounds2DTestCases()
{
var array2x3 = Array2D<object>.FromSystem2DArray(
new object[,] { { 2, 3, 5 },
{ 4, 9, 1 } });
yield return new TestCaseData(array2x3, 9, new Index2D(0, 0), new Bounds2D(2, 3))
.Returns(new ItemRequestResult<int>(4));
yield return new TestCaseData(array2x3, 2, new Index2D(0, 0), new Bounds2D(0, 0))
.Returns(ItemRequestResult<int>.Fail);
yield return new TestCaseData(array2x3, 10, new Index2D(1, 1), new Bounds2D(1, 2))
.Returns(ItemRequestResult<int>.Fail);
}
private static IEnumerable<TestCaseData> EquatableTestCases()
{
var array2x3 = Array2D<string>.FromSystem2DArray(
new string[,] { { "2", "3", "5" },
{ "4", "9", "1" } });
yield return new TestCaseData(array2x3, "9")
.Returns(new ItemRequestResult<int>(4));
yield return new TestCaseData(array2x3, "3")
.Returns(new ItemRequestResult<int>(1));
yield return new TestCaseData(array2x3, "8")
.Returns(ItemRequestResult<int>.Fail);
yield return new TestCaseData(array2x3, "7")
.Returns(ItemRequestResult<int>.Fail);
}
private static IEnumerable<TestCaseData> EquatableIndex2DIntTestCases()
{
var array2x3 = Array2D<string>.FromSystem2DArray(
new string[,] { { "2", "3", "5" },
{ "4", "9", "1" } });
yield return new TestCaseData(array2x3, "9", new Index2D(0, 0), 6)
.Returns(new ItemRequestResult<int>(4));
yield return new TestCaseData(array2x3, "1", new Index2D(1, 2), 0)
.Returns(ItemRequestResult<int>.Fail);
yield return new TestCaseData(array2x3, "10", new Index2D(1, 1), 2)
.Returns(ItemRequestResult<int>.Fail);
}
private static IEnumerable<TestCaseData> EquatableIndex2DBounds2DTestCases()
{
var array2x3 = Array2D<string>.FromSystem2DArray(
new string[,] { { "2", "3", "5" },
{ "4", "9", "1" } });
yield return new TestCaseData(array2x3, "9", new Index2D(0, 0), new Bounds2D(2, 3))
.Returns(new ItemRequestResult<int>(4));
yield return new TestCaseData(array2x3, "2", new Index2D(0, 0), new Bounds2D(0, 0))
.Returns(ItemRequestResult<int>.Fail);
yield return new TestCaseData(
array2x3, "10", new Index2D(1, 1), new Bounds2D(1, 2))
.Returns(ItemRequestResult<int>.Fail);
}
private static IEnumerable<TestCaseData> ComparableTestCases()
{
var array3x2 = Array2D<int>.FromSystem2DArray(
new int[,] { { 2, 3 },
{ 4, 9 },
{ 3, 6 } });
yield return new TestCaseData(array3x2, 9)
.Returns(new ItemRequestResult<int>(3));
yield return new TestCaseData(array3x2, 3)
.Returns(new ItemRequestResult<int>(1));
yield return new TestCaseData(array3x2, 8)
.Returns(ItemRequestResult<int>.Fail);
yield return new TestCaseData(array3x2, 7)
.Returns(ItemRequestResult<int>.Fail);
}
private static IEnumerable<TestCaseData> ComparableIndex2DIntTestCases()
{
var array2x3 = Array2D<int>.FromSystem2DArray(
new int[,] { { 2, 3, 5 },
{ 4, 9, 1 } });
yield return new TestCaseData(array2x3, 9, new Index2D(0, 0), 6)
.Returns(new ItemRequestResult<int>(4));
yield return new TestCaseData(array2x3, 1, new Index2D(1, 2), 0)
.Returns(ItemRequestResult<int>.Fail);
yield return new TestCaseData(array2x3, 10, new Index2D(1, 1), 2)
.Returns(ItemRequestResult<int>.Fail);
}
private static IEnumerable<TestCaseData> ComparableIndex2DBounds2DTestCases()
{
var array2x3 = Array2D<int>.FromSystem2DArray(
new int[,] { { 2, 3, 5 },
{ 4, 9, 1 } });
yield return new TestCaseData(array2x3, 9, new Index2D(0, 0), new Bounds2D(2, 3))
.Returns(new ItemRequestResult<int>(4));
yield return new TestCaseData(array2x3, 2, new Index2D(0, 0), new Bounds2D(0, 0))
.Returns(ItemRequestResult<int>.Fail);
yield return new TestCaseData(array2x3, 10, new Index2D(1, 1), new Bounds2D(1, 2))
.Returns(ItemRequestResult<int>.Fail);
}
}
}
}
|
package io.kixi.uom
import java.lang.RuntimeException
class NoSuchUnitException(symbol:String) :
RuntimeException("Unit for symbol $symbol is not recognized.")
|
module TabularData
class ContainerConfig
CONFIG_FILE_PATH = "#{Rails.root}/config/tables/"
def initialize(container_name, client_name)
@container_name = container_name
@client_name = client_name
end
def settings
container_yaml = YAML.load_file(config_file)
key = "default"
if container_yaml.key?(client_name)
key = client_name
end
container_yaml[key].deep_symbolize_keys
end
private
attr_reader :container_name, :client_name
def config_file
CONFIG_FILE_PATH + "#{container_name}.yml"
end
end
end
|
<h4
style="border-bottom: 1px solid #bbb; margin-bottom: 1rem;float: left;margin-top: 2rem; width:100%"
>Veículo: </h4>
<div class="form-group row">
<label for="placa_veiculo" class="col-md-1 col-form-label col-form-label-lg">Placa: </label>
<div class="col-md-3">
<input
type="text"
name="placa_veiculo"
id="placa_veiculo"
class="form-control form-control-lg"
required
placeholder="AAA0000"
>
</div>
<label for="marca_veiculo" class="col-md-1 col-form-label col-form-label-lg">Marca: </label>
<div class="col-md-3">
<input
type="text"
name="marca_veiculo"
id="marca_veiculo"
class="form-control form-control-lg"
required
placeholder="Digite a marca"
>
</div>
<label for="modelo_veiculo" class="col-md-1 col-form-label col-form-label-lg">Modelo: </label>
<div class="col-md-3">
<input
type="text"
name="modelo_veiculo"
id="modelo_veiculo"
class="form-control form-control-lg"
required
placeholder="Digite o modelo"
>
</div>
</div>
|
Old-skool tunnel effect.
Note that this is built at two different resolutions. Try 'em both.
This demonstrates:
- Repeating pixels to effectively lower resolution, saving memory and compute
time. In this case, pixels are handled in 2x2 groups, reducing 800x600 to
an effective resolution of 400x300, or 640x480 to 320x240.
- Subsampling a lookup table in ROM, and then upsampling it at runtime using
bilinear interpolation, to save space.
- Using half-precision floating point to store precomputed tables more
cheaply.
- Stupid (but *really useful*) constexpr tricks for generating lookup tables
at compile time and putting them in ROM.
- Rendering tricks handed down through the ages.
Notes
=====
This demo uses a bunch of tricks.
First: it uses a lookup table to avoid calling `atan2` or `sqrt` during
rendering. This lookup table is generated by a `constexpr` routine in
`table.cc`, so it runs *entirely inside the compiler*. The results are just
as if you had typed them in by hand, and can (and do!) go into ROM.
Second: the lookup table only describes one quarter of the screen, because
that's enough for `atan2`. We can generate the rest of the screen using
basic trig identities.
Third: the lookup table is subsampled, with each entry describing a 4x4
doubled-pixel area (so 8x8 monitor pixes). This dramatically reduces the size
of the table. We linearly interpolate between table entries to make up for the
missing data. It's technically wrong, but it looks fine.
Fourth: the lookup table is further compacted by being stored in half-precision
floating point.
Fifth: we only actually draw the *top half* of the screen. We use a special
rasterizer to mirror and reverse the top of the screen onto the bottom half
during scanout. This makes two pages of 8-bit framebuffer just barely
possible, since (at 800x600) we're storing a 400x150 framebuffer instead of
400x300.
All that processing is using about 60% of the available CPU. Increasing the
`config::sub` parameter raises the interpolation block size to 8x8, reduces the
CPU usage to 33%, but makes the tunnel in the distance appear slightly square.
Tradeoffs.
Moving from 400x300 to 800x600 would be difficult: you would not have room for
a framebuffer (at least in color), so you'd have to generate the pixels during
scanout with a rasterizer. Unfortunately, to get the vertical resolution
increase, you'd need to generate the pixels -- and thus perform the table
lookups and linear interpolation -- for every scanline. Without a breakthrough
in the algorithm or implementation, this would be computationally infeasible.
Because the 640x480 version has fewer pixels to push and a higher ratio of CPU
cycles to pixel clock cycles, it's only using about 38% of the CPU.
|
package com.test.tribal.rest.unsplashapi
import com.test.tribal.models.Photos
import retrofit2.Response
import retrofit2.http.GET
import retrofit2.http.Header
import retrofit2.http.Headers
import retrofit2.http.Query
interface InterfaceUnsplash {
@GET("/photos/random")
suspend fun getPhotos(@Query("page") page: Int, @Query("client_id") clientID: String): Response<Photos>
}
|
#include <stdio.h>
#include <stdlib.h>
#define NUM_NODES 9
#define INFINITY (-1)
#define TRUE 1
#define NIL (-1)
#define NOT_CONNTECTED (-1)
void initialize( int key[], int p[], int finished[], int root ) {
int i;
for( i = 0; i < NUM_NODES; i++) {
key[i] = INFINITY;
p[i] = NIL;
finished[i] = NIL;
}
key[root] = 0;
}
void formGraph( int (*connMatrix)[NUM_NODES] ) {
int i, j, a, b, w;
for( i = 0; i < NUM_NODES; i++)
for( j = 0; j < NUM_NODES; j++ )
connMatrix[i][j] = NOT_CONNTECTED;
i = 0;
printf("Enter edges and their weight ( format: a b w ):\t\t\t-1 -1 -1 to stop\n");
while( i++ < NUM_NODES * NUM_NODES ) {
scanf("%d%d%d", &a, &b, &w);
getchar();
if( a == -1 || b == -1 || w == -1 )
break;
connMatrix[a][b] = w;
}
}
int isLeftLessRight( int left, int right ) {
if( right == INFINITY ) {
if( left == INFINITY )
return 0;
return 1;
}
if( left == INFINITY )
return 0;
return left < right;
}
int findMin( int key[], int finished[] ) {
int i;
int minInd = NIL;
int min = INFINITY;
for( i = 0; i < NUM_NODES; i++ ) {
if( finished[i] == NIL && isLeftLessRight( key[i], min ) ) {
minInd = i;
min = key[minInd];
}
}
return minInd;
}
void prim( int (*connMatrix)[NUM_NODES], int key[], int p[], int finished[], int root ) {
initialize( key, p, finished, root );
int v, u;
int isInfinityFlag;
while( NIL != ( u = findMin( key, finished ) ) ) {
finished[u] = TRUE;
for( v = 0; v < NUM_NODES; v++ )
/* check if 'v' is adjacent of 'u' which is not finished */
if( connMatrix[u][v] != NOT_CONNTECTED && finished[v] != TRUE ) {
isInfinityFlag = key[v] == INFINITY;
/* check if there is a smaller value for key[v] */
if( isInfinityFlag || connMatrix[u][v] < key[v] ) {
p[v] = u;
key[v] = connMatrix[u][v];
}
}
}
}
int main() {
int root;
int key[NUM_NODES];
int p[NUM_NODES];
int finished[NUM_NODES];
int connMatrix[NUM_NODES][NUM_NODES];
formGraph( connMatrix );
while( 1 ) {
printf("\nEnter index of the root node: \n");
scanf("%d", &root );
getchar();
if( root < 0 || root > NUM_NODES - 1 )
printf("Index of root node is not in the interval [0,%d]\n", NUM_NODES - 1);
else
break;
}
prim( connMatrix, key, p, finished, root );
printf("\nc=2\tg=6\td=3\n");
printf("p[i]=%d\tp[h]=%d\tp[e]=%d\n", p[8], p[7], p[4] );
printf("p[i]=c\tp[h]=g\tp[e]=d\n");
}
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See LICENSE in the project root for
* license information.
*/
package com.microsoft.azure.spring.cloud.config;
import org.junit.Test;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.TaskScheduler;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import static com.microsoft.azure.spring.cloud.config.AzureCloudConfigAutoConfiguration.WATCH_TASK_SCHEDULER_NAME;
import static com.microsoft.azure.spring.cloud.config.TestConstants.*;
import static com.microsoft.azure.spring.cloud.config.TestUtils.propPair;
import static org.assertj.core.api.Assertions.assertThat;
public class AzureCloudConfigAutoConfigurationTest {
private static final TaskScheduler TEST_SCHEDULER = new ThreadPoolTaskScheduler();
private static final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withPropertyValues(propPair(CONN_STRING_PROP, TEST_CONN_STRING),
propPair(STORE_NAME_PROP, TEST_STORE_NAME))
.withConfiguration(AutoConfigurations.of(AzureConfigBootstrapConfiguration.class,
AzureCloudConfigAutoConfiguration.class));
@Test
public void watchEnabledNotConfiguredShouldNotCreateWatch() {
contextRunner.withPropertyValues(propPair(CONFIG_ENABLED_PROP, "true")).run(context -> {
assertThat(context).doesNotHaveBean(AzureCloudConfigWatch.class);
assertThat(context).doesNotHaveBean(WATCH_TASK_SCHEDULER_NAME);
});
}
@Test
public void configNotEnabledWatchNotEnabledShouldNotCreateWatch() {
contextRunner.withPropertyValues(propPair(CONFIG_ENABLED_PROP, "false"),
propPair(WATCH_ENABLED_PROP, "false")).run(context -> {
assertThat(context).doesNotHaveBean(AzureCloudConfigWatch.class);
assertThat(context).doesNotHaveBean(WATCH_TASK_SCHEDULER_NAME);
});
}
@Test
public void configNotEnabledWatchEnabledShouldNotCreateWatch() {
contextRunner.withPropertyValues(propPair(CONFIG_ENABLED_PROP, "false"),
propPair(WATCH_ENABLED_PROP, "true")).run(context -> {
assertThat(context).doesNotHaveBean(AzureCloudConfigWatch.class);
assertThat(context).doesNotHaveBean(WATCH_TASK_SCHEDULER_NAME);
});
}
@Test
public void configEnabledWatchNotEnabledShouldNotCreateWatch() {
contextRunner.withPropertyValues(propPair(CONFIG_ENABLED_PROP, "true"),
propPair(WATCH_ENABLED_PROP, "false")).run(context -> {
assertThat(context).doesNotHaveBean(AzureCloudConfigWatch.class);
assertThat(context).doesNotHaveBean(WATCH_TASK_SCHEDULER_NAME);
});
}
@Test
public void configEnabledWatchEnabledShouldCreateWatch() {
contextRunner.withPropertyValues(propPair(CONFIG_ENABLED_PROP, "true"),
propPair(WATCH_ENABLED_PROP, "true"),
propPair(WATCHED_KEY_PROP, TEST_WATCH_KEY)).run(context -> {
assertThat(context).hasSingleBean(AzureCloudConfigWatch.class);
assertThat(context).hasBean(WATCH_TASK_SCHEDULER_NAME);
assertThat(context).hasSingleBean(TaskScheduler.class);
});
}
@Test
public void taskSchedulerCanBeCustomizedByUser() {
contextRunner.withPropertyValues(propPair(CONFIG_ENABLED_PROP, "true"),
propPair(WATCH_ENABLED_PROP, "true"),
propPair(WATCHED_KEY_PROP, TEST_WATCH_KEY))
.withUserConfiguration(TestConfiguration.class).run(context -> {
assertThat(context).hasSingleBean(AzureCloudConfigWatch.class);
assertThat(context).hasBean(WATCH_TASK_SCHEDULER_NAME);
assertThat(context).hasSingleBean(TaskScheduler.class);
assertThat(context.getBean(WATCH_TASK_SCHEDULER_NAME)).isEqualTo(TEST_SCHEDULER);
});
}
@Configuration
static class TestConfiguration {
@Bean(name = WATCH_TASK_SCHEDULER_NAME)
public TaskScheduler getTaskScheduler() {
return TEST_SCHEDULER;
}
}
}
|
## Описание
Библиотека для isEngine framework на JavaScript.
## Ссылки
* [Официальный сайт](https://isengine.org);
* [Telegram](https://t.me/isengine);
* [Youtube](https://www.youtube.com/channel/UCLMf_v8P2O5JQ8ec6zkquDw);
* [Composer](https://packagist.org/packages/isengine/);
* [Github](https://github.com/isengine).
|
export default abstract class Controller {
// deno-lint-ignore no-explicit-any
[key: string]: any
constructor() {}
log(msg: string) {
console.log(msg);
}
}
|
@file:Suppress("UNCHECKED_CAST")
package io.github.matrixkt.models.events
import kotlinx.serialization.*
public sealed class Event<out Content> {
/**
* The fields in this object will vary depending on the type of event.
* When interacting with the REST API, this is the HTTP body.
*/
public abstract val content: Content
/**
* The type of event.
* This SHOULD be namespaced similar to Java package naming conventions e.g. 'com.example.subdomain.event.type'
*/
public abstract val type: String
}
public fun <Content : Any, UnsignedData : Any> RoomEvent(
type: String,
content: Content,
eventId: String,
sender: String,
originServerTimestamp: Long,
unsigned: UnsignedData? = null,
roomId: String,
stateKey: String? = null,
prevContent: Content? = null
): RoomEvent<Content, UnsignedData> {
return if (stateKey != null) {
StateEvent(type, content, eventId, sender, originServerTimestamp, unsigned, roomId, stateKey, prevContent)
} else {
MessageEvent(type, content, eventId, sender, originServerTimestamp, unsigned, roomId)
}
}
private data class MessageEvent<Content, UnsignedData>(
override val type: String,
override val content: Content,
@SerialName("event_id")
override val eventId: String,
override val sender: String,
@SerialName("origin_server_ts")
override val originServerTimestamp: Long,
override val unsigned: UnsignedData? = null,
@SerialName("room_id")
override val roomId: String
) : RoomEvent<Content, UnsignedData>() {
@SerialName("state_key")
override val stateKey: String? get() = null
@SerialName("prev_content")
override val prevContent: Content? get() = null
}
@Serializable
public data class EphemeralEvent<Content>(
override val type: String,
override val content: Content,
/**
* Contains the fully-qualified ID of the user who sent this event.
*/
val sender: String
) : Event<Content>()
// @Serializable
public sealed class AccountEvent<out Content> : Event<Content>() {
@SerialName("room_id")
public abstract val roomId: String?
}
@Serializable
public data class GlobalAccountEvent<out Content>(
override val type: String,
override val content: Content
) : AccountEvent<Content>() {
@SerialName("room_id")
override val roomId: String? get() = null
}
@Serializable
public data class RoomAccountEvent<out Content>(
override val type: String,
override val content: Content,
@SerialName("room_id")
override val roomId: String
) : AccountEvent<Content>()
|
//this module holds the declaration for all functions client side
#pragma once
#include "Thread.h"
typedef struct Client_Arguments {
char logfile[MAX_PATH_FILE];
int s_port;
char username[MAX_USERNAME];
char mode[6];
const char file[MAX_PATH_FILE];
HANDLE *MutexRcv;
char buffer[256];
FILE *f_pointer;
}c_args;
void MainClient(c_args *c_item);
static DWORD SendDataThread(LPVOID itemA);
static DWORD RecvDataThread(LPVOID itemA);
c_args *InitClientArgs(char logfile[], char server_port[], char username[], char mode[], char file[]);
static DWORD ListenThread(LPVOID itemA);
|
package com.copperleaf.kudzu.parser.expression
import com.copperleaf.kudzu.node.Node
import com.copperleaf.kudzu.node.expression.InfixOperatorNode
import com.copperleaf.kudzu.node.expression.InfixrOperatorNode
import com.copperleaf.kudzu.node.expression.PostfixOperatorNode
import com.copperleaf.kudzu.node.expression.PrefixOperatorNode
import com.copperleaf.kudzu.node.mapped.ValueNode
@ExperimentalStdlibApi
@Suppress("UNCHECKED_CAST")
internal class ExpressionEvaluatorImpl<T : Any>(
private val operators: List<Operator<T>>,
) : ExpressionEvaluator<T> {
override val evaluate = DeepRecursiveFunction<Node, T> { node ->
when (node) {
is InfixOperatorNode -> evaluateInfix.callRecursive(node)
is InfixrOperatorNode -> evaluateInfixr.callRecursive(node)
is PrefixOperatorNode -> evaluatePrefix.callRecursive(node)
is PostfixOperatorNode -> evaluatePostfix.callRecursive(node)
is ValueNode<*> -> node.value as T
else -> error("Unknown expression node")
}
}
private val evaluateInfix: DeepRecursiveFunction<InfixOperatorNode, T> = DeepRecursiveFunction {
with(it) {
var result = evaluate.callRecursive(leftOperand)
for (node in operationNodes) {
val rightOperatorResult = evaluate.callRecursive(node.operand)
result = node.operator.text.applyBinary(result, rightOperatorResult)
}
result
}
}
private val evaluateInfixr: DeepRecursiveFunction<InfixrOperatorNode, T> = DeepRecursiveFunction {
with(it) {
var result = evaluate.callRecursive(leftOperand)
if (operation != null) {
val rightOperationResult = evaluate.callRecursive(operation.operand)
result = operation.operator.text.applyBinary(result, rightOperationResult)
}
result
}
}
private val evaluatePrefix: DeepRecursiveFunction<PrefixOperatorNode, T> = DeepRecursiveFunction {
with(it) {
var result = evaluate.callRecursive(operand)
for (node in operatorNodes) {
result = node.text.applyUnary(result)
}
result
}
}
private val evaluatePostfix: DeepRecursiveFunction<PostfixOperatorNode, T> = DeepRecursiveFunction {
with(it) {
var result = evaluate.callRecursive(operand)
for (node in operatorNodes) {
result = node.text.applyUnary(result)
}
result
}
}
private fun String.applyUnary(node: T): T {
return operators
.asSequence()
.filterIsInstance<Operator.UnaryOperator<T>>()
.first { it.name == this@applyUnary || this@applyUnary in it.aliases }
.applyFn(node)
}
private fun String.applyBinary(left: T, right: T): T {
return operators
.asSequence()
.filterIsInstance<Operator.BinaryOperator<T>>()
.first { it.name == this@applyBinary || this@applyBinary in it.aliases }
.applyFn(left, right)
}
}
|
#pragma once
#include "common.h"
#include "interfaces.h"
class ModeDetector
{
public:
ModeDetector(IOPin *pin) : pin(pin)
{
pin->set_mode(IOPin::Modes::IN_PU);
}
Machine::Mode get_mode()
{
return (pin->is_high()) ? Machine::Mode::WATER_MODE : Machine::Mode::STEAM_MODE;
}
private:
IOPin *pin;
};
|
package de.debuglevel.streetdirectory.street.extraction
import de.debuglevel.streetdirectory.street.Street
interface StreetExtractor {
fun getStreets(streetExtractorSettings: StreetExtractorSettings): List<Street>
}
|
using ImageSmooth
using Test, ReferenceTests, TestImages
using FileIO
using ImageTransformations, ImageQualityIndexes
using ImageBase
using OffsetArrays
include("testutils.jl")
@testset "ImageSmooth.jl" begin
include("algorithms/l0_smooth.jl")
end
|
#!/bin/bash
echo "Screenshot John Hopkins Covid-19 ArcGIS page"
node index.js -u "https://www.arcgis.com/apps/opsdashboard/index.html#/bda7594740fd40299423467b48e9ecf6"
echo "Screenshot John Hopkins Covid-19 ArcGIS world map"
node index.js -u "https://www.arcgis.com/apps/opsdashboard/index.html#/bda7594740fd40299423467b48e9ecf6" -e ".map"
echo "Screenshot eleval observablehq div"
node index.js -u "https://observablehq.com/@elaval/coronavirus-worldwide-evolution" -e "div#el2"
echo "Screenshot eleval observablehq page"
node index.js -u "https://observablehq.com/@elaval/coronavirus-worldwide-evolution"
echo "Screenshot worldspandemic World card"
node index.js -u "https://worldspandemic.com/" -e ".cov19u-card"
echo "Screenshot worldspandemic Total World chart"
node index.js -u "https://worldspandemic.com/" -e ".covid19-ultimate-evolution-chart"
echo "Screenshot worldspandemic Daily New Cases World chart"
node index.js -u "https://worldspandemic.com/" -e ".covid19-ultimate-daily-chart"
echo "UW IHME - Total Deaths"
node index.js -u "https://covid19.healthdata.org/united-states-of-america" -e "#total-deaths"
echo "State scorecard test - AZ"
node index.js -u "https://projects.propublica.org/reopening-america/" -e "#state-row-AZ"
echo "zip the folder"
zip -r screenshots.zip screenshots/
cp screenshots.zip /tmp/.
|
#!/bin/bash
set -e
# 配置 Kmod
./configure --prefix=/usr \
--bindir=/bin \
--sysconfdir=/etc \
--with-rootlibdir=/lib \
--with-xz \
--with-zlib
# 编译 Kmod
make
# 安装 Kmod,并创建与 Module-Init-Tools (曾经用于处理 Linux 内核模块的软件包) 兼容的符号链接
make install
for target in depmod insmod lsmod modinfo modprobe rmmod; do
ln -sfv ../bin/kmod /sbin/$target
done
ln -sfv kmod /bin/lsmod
|
import { Database } from '@vuex-orm/core'
import Token from '~/database/Token'
import tokenStore from '~/database/Token/store'
import WaffleLayer from '~/database/WaffleLayer'
import Waffle from '~/database/Waffle'
import waffleStore from '~/database/Waffle/store'
const database = new Database()
database.register(Token, tokenStore)
database.register(Waffle, waffleStore)
database.register(WaffleLayer)
export default database
|
package cz.cuni.mff.odcleanstore.conflictresolution.resolution.comparators;
import org.openrdf.model.Value;
import cz.cuni.mff.odcleanstore.conflictresolution.CRContext;
/**
* Comparator of literals by the length of their lexical representation.
* @author Jan Michelfeit
*/
public class LexicalLengthComparator implements BestSelectedLiteralComparator {
private static final LexicalLengthComparator INSTANCE = new LexicalLengthComparator();
/**
* Returns the shared default instance of this class.
* @return instance of this class
*/
public static final LexicalLengthComparator getInstance() {
return INSTANCE;
}
@Override
public boolean accept(Value object, CRContext crContext) {
return true;
}
@Override
public int compare(Value object1, Value object2, CRContext crContext) {
return object1.stringValue().length()
- object2.stringValue().length();
}
}
|
namespace Monolith.Unity.Examples.TicTacToe.Models
{
public enum GameTurn : byte
{
None,
Player1,
Player2,
Ai,
}
}
|
---
title: "Digital Dreams"
site_link: "https://www.digitaldreamsgames.com/"
description: "Design and develop playful experiences."
location: "Utrecht"
active: true
active_from: "2010-01-01"
active_to: ""
tags: []
date: "2017-05-18T11:31:05Z"
---
|
const { readdirSync } = require("fs")
const path = require("path")
const config = require("../config.json")
module.exports = {
name: "도움말",
async execute(message) {
const embed = new (require("discord.js")).MessageEmbed()
.setTitle(`명령어 목록 | 접두사 : ${config.prefix}`)
.setColor("BLUE")
.setThumbnail(message.author.displayAvatarURL())
.setTimestamp()
const commands = readdirSync(path.resolve(__dirname, `../commands/`)).filter(file => file.endsWith(".js"))
for (const file of commands) {
const command = require(`../commands/${file}`)
embed.addFields({ name: `${command.name}`, value: `설명 : ${command.description || "없음"}`, inline: true })
}
const menu = new (require("discord.js")).MessageSelectMenu()
.setCustomId("select")
.setPlaceholder("옵션을 선택해 주세요")
.addOptions([
{ label: "일반 명령어", description: `${config.prefix}를(을) 사용하는 명령어`, value: "일반 명령어" },
])
const components = new (require("discord.js")).MessageActionRow().addComponents(menu)
await message.channel.send({ content: "**아래 옵션을 선택해 주세요**", components: [components] })
const collector = message.channel.createMessageComponentCollector({
componentType: "SELECT_MENU",
max: 1
})
collector.on("collect", collected => {
const value = collected.values[0]
if (collected.member.id !== message.author.id) return
if (value == "일반 명령어") {
return collected.update({ content: " ",embeds: [embed], components: [] })
}
})
}
}
|
using System.Collections.Generic;
using DAL.Common;
using Domain;
using Microsoft.EntityFrameworkCore;
namespace DAL.Repositories
{
public class BookRepository : Repository<Book>, IBookRepository
{
public BookRepository(BooksDbContext context) : base(context)
{
}
protected override DbSet<Book> DbSet => Context.Books;
public void BulkAdd(IEnumerable<Book> books)
{
var autoDetect = Context.ChangeTracker.AutoDetectChangesEnabled;
try
{
Context.ChangeTracker.AutoDetectChangesEnabled = false;
foreach (var book in books)
{
DbSet.Add(book);
}
Context.SaveChanges();
}
finally
{
Context.ChangeTracker.AutoDetectChangesEnabled = autoDetect;
}
}
}
}
|
namespace SoundFingerprinting.Tests.Integration
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Moq;
using NUnit.Framework;
using SoundFingerprinting.Audio;
using SoundFingerprinting.Audio.NAudio;
using SoundFingerprinting.Builder;
using SoundFingerprinting.InMemory;
using SoundFingerprinting.Utils;
[TestFixture]
[Category("RequiresWindowsDLL")]
public class TestRunnerTest : IntegrationWithSampleFilesTest
{
private readonly IModelService modelService = new InMemoryModelService();
private readonly IAudioService audioService = new NAudioService();
private readonly IFingerprintCommandBuilder fcb = new FingerprintCommandBuilder();
private readonly IQueryCommandBuilder qcb = new QueryCommandBuilder();
private readonly Mock<TestRunnerEvent> nfe = new Mock<TestRunnerEvent>(MockBehavior.Strict);
private readonly Mock<TestRunnerEvent> nnfe = new Mock<TestRunnerEvent>(MockBehavior.Strict);
private readonly Mock<TestRunnerEvent> pfe = new Mock<TestRunnerEvent>(MockBehavior.Strict);
private readonly Mock<TestRunnerEvent> pnfe = new Mock<TestRunnerEvent>(MockBehavior.Strict);
private readonly Mock<TestRunnerEvent> tife = new Mock<TestRunnerEvent>(MockBehavior.Strict);
private readonly Mock<ITagService> tagService = new Mock<ITagService>(MockBehavior.Strict);
[SetUp]
public void SetUp()
{
tagService.Setup(service => service.GetTagInfo(It.IsAny<string>())).Returns(
new TagInfo
{
Artist = "Chopin",
Album = string.Empty,
Title = "Nocturne C#",
ISRC = "USUR19980187",
Year = 1997,
Duration = 193.07d
});
}
[TearDown]
public void TearDown()
{
nfe.VerifyAll();
nnfe.VerifyAll();
pfe.VerifyAll();
pnfe.VerifyAll();
tife.VerifyAll();
}
[Test]
public void ShouldSuccessfullyRunTest()
{
string results = Path.GetTempPath();
Directory.GetFiles(results).Where(file => file.Contains("results_")).ToList().ForEach(File.Delete);
Directory.GetFiles(results).Where(file => file.Contains("suite_")).ToList().ForEach(File.Delete);
Directory.GetFiles(results).Where(file => file.Contains("insert_")).ToList().ForEach(File.Delete);
pfe.Setup(e => e(It.IsAny<TestRunner>(), It.IsAny<TestRunnerEventArgs>())).Callback(
(object runner, EventArgs param) =>
{
var fScore = ((TestRunnerEventArgs)param).FScore;
Assert.AreEqual(1, fScore.F1);
Assert.AreEqual(1, fScore.Precision);
Assert.AreEqual(1, fScore.Recall);
Assert.AreEqual(1, ((TestRunnerEventArgs)param).Verified);
});
nfe.Setup(e => e(It.IsAny<TestRunner>(), It.IsAny<TestRunnerEventArgs>())).Callback(
(object runner, EventArgs param) =>
{
var fScore = ((TestRunnerEventArgs)param).FScore;
Assert.AreEqual(0.6666, fScore.F1, 0.001);
Assert.AreEqual(0.5, fScore.Precision, 0.001);
Assert.AreEqual(1, fScore.Recall);
Assert.AreEqual(2, ((TestRunnerEventArgs)param).Verified);
});
tife.Setup(e => e(It.IsAny<TestRunner>(), It.IsAny<TestRunnerEventArgs>())).Verifiable();
string path = TestContext.CurrentContext.TestDirectory;
string scenario1 = string.Format("Insert,{0},IncrementalStatic,0,5115", path);
string scenario2 = string.Format("Run,{0},{1},IncrementalRandom,256,512,10,10|30|50", path, path);
string scenario3 = string.Format("Insert,{0},IncrementalStatic,0,2048", path);
string scenario4 = string.Format("Run,{0},{1},IncrementalRandom,256,512,10,10|30|50", path, path);
var testRunner = new TestRunner(
new List<string> { scenario1, scenario2, scenario3, scenario4 }.ToArray(),
modelService,
audioService,
tagService.Object,
fcb,
qcb,
results);
AttachEventHandlers(testRunner);
testRunner.Run();
pfe.Verify(e => e(It.IsAny<TestRunner>(), It.IsAny<TestRunnerEventArgs>()), Times.Exactly(6));
pfe.Verify(e => e(It.IsAny<TestRunner>(), It.IsAny<TestRunnerEventArgs>()), Times.Exactly(6));
tife.Verify(e => e(It.IsAny<TestRunner>(), It.IsAny<TestRunnerEventArgs>()), Times.Exactly(6));
var testRuns = Directory.GetFiles(results).Where(file => file.Contains("results_")).ToList();
Assert.AreEqual(6, testRuns.Count);
var testSuite = Directory.GetFiles(results).Where(file => file.Contains("suite_")).ToList();
Assert.AreEqual(1, testSuite.Count);
var testInsert = Directory.GetFiles(results).Where(file => file.Contains("insert_")).ToList();
Assert.AreEqual(2, testInsert.Count);
}
private void AttachEventHandlers(TestRunner testRunner)
{
testRunner.NegativeFoundEvent += nfe.Object;
testRunner.NegativeNotFoundEvent += nnfe.Object;
testRunner.PositiveFoundEvent += pfe.Object;
testRunner.PositiveNotFoundEvent += pnfe.Object;
testRunner.TestIterationFinishedEvent += tife.Object;
}
}
}
|
<?php
namespace Mgilet\NotificationBundle\Entity;
use Doctrine\Common\Collections\ArrayCollection;
use Doctrine\ORM\Mapping as ORM;
/**
* Class AbstractNotification
* Notifications defined in your app must implement this class
*
* @ORM\Table(name="notification")
* @ORM\Entity(repositoryClass="Mgilet\NotificationBundle\Entity\Repository\NotificationRepository")
*/
class Notification
{
/**
* @var integer $id
*
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue
*/
protected $id;
/**
* @var \DateTime
* @ORM\Column(type="datetime")
*/
protected $date;
/**
* @var string
* @ORM\Column(type="string", length=255)
*/
protected $subject;
/**
* @var string
* @ORM\Column(type="string", length=255, nullable=true)
*/
protected $message;
/**
* @var string
* @ORM\Column(type="string", length=255, nullable=true)
*/
protected $link;
/**
* @var NotifiableNotification[]|ArrayCollection
* @ORM\OneToMany(targetEntity="Mgilet\NotificationBundle\Entity\NotifiableNotification", mappedBy="notification")
*/
protected $notifiableNotifications;
/**
* AbstractNotification constructor.
*/
public function __construct()
{
$this->date = new \DateTime();
$this->notifiableNotifications = new ArrayCollection();
}
/**
* @return int Notification Id
*/
public function getId()
{
return $this->id;
}
/**
* @return \DateTime
*/
public function getDate()
{
return $this->date;
}
/**
* @param \DateTime $date
* @return $this
*/
public function setDate($date)
{
$this->date = $date;
return $this;
}
/**
* @return string Notification subject
*/
public function getSubject()
{
return $this->subject;
}
/**
* @param string $subject Notification subject
* @return $this
*/
public function setSubject($subject)
{
$this->subject = $subject;
return $this;
}
/**
* @return string Notification message
*/
public function getMessage()
{
return $this->message;
}
/**
* @param string $message Notification message
* @return $this
*/
public function setMessage($message)
{
$this->message = $message;
return $this;
}
/**
* @return string Link to redirect the user
*/
public function getLink()
{
return $this->link;
}
/**
* @param string $link Link to redirect the user
* @return $this
*/
public function setLink($link)
{
$this->link = $link;
return $this;
}
/**
* @return ArrayCollection|NotifiableNotification[]
*/
public function getNotifiableNotifications()
{
return $this->notifiableNotifications;
}
/**
* @param NotifiableNotification $notifiableNotification
*
* @return $this
*/
public function addNotifiableNotification(NotifiableNotification $notifiableNotification)
{
if (!$this->notifiableNotifications->contains($notifiableNotification)) {
$this->notifiableNotifications[] = $notifiableNotification;
$notifiableNotification->setNotification($this);
}
return $this;
}
/**
* @param NotifiableNotification $notifiableNotification
*
* @return $this
*/
public function removeNotifiableNotification(NotifiableNotification $notifiableNotification)
{
if ($this->notifiableNotifications->contains($notifiableNotification)) {
$this->notifiableNotifications->removeElement($notifiableNotification);
$notifiableNotification->setNotification(null);
}
return $this;
}
/**
* @return string
*/
public function __toString()
{
return $this->getSubject() . ' - ' . $this->getMessage();
}
}
|
package cn.xianyum.system.controller;
import cn.xianyum.common.utils.DataResult;
import cn.xianyum.system.entity.request.PushLogRequest;
import cn.xianyum.system.entity.response.PushLogResponse;
import cn.xianyum.system.service.PushLogService;
import com.baomidou.mybatisplus.core.metadata.IPage;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* 消息推送中心日志接口
*
*/
@Api(tags = "消息推送中心日志接口")
@RestController
@RequestMapping(value = "/pushLog")
@Slf4j
public class PushLogController {
@Autowired
private PushLogService pushLogService;
/**
* 消息推送中心日志分页查询数据
*
*/
@ApiOperation(value = "消息推送中心日志分页查询数据")
@PostMapping(value = "/getPage")
public DataResult getPage(@RequestBody PushLogRequest request) {
IPage<PushLogResponse> response = pushLogService.getPage(request);
return DataResult.success(response);
}
}
|
create schema gradebook;
show search_path;
set search_path to gradebook;
create table students (
student_id SERIAL primary key,
first_name VARCHAR(50),
last_name VARCHAR(50),
grade INT
);
INSERT INTO students (first_name, last_name, grade) VALUES ('Rubetta', 'Arnault', 71);
INSERT INTO students (first_name, last_name, grade) VALUES ('Pavlov', 'Gedney', 77);
INSERT INTO students (first_name, last_name, grade) VALUES ('Pauline', 'Cruess', 61);
INSERT INTO students (first_name, last_name, grade) VALUES ('Gustav', 'Lambeth', 81);
INSERT INTO students (first_name, last_name, grade) VALUES ('Garey', 'Goulding', 96);
INSERT INTO students (first_name, last_name, grade) VALUES ('Sidonnie', 'Lofthouse', 63);
INSERT INTO students (first_name, last_name, grade) VALUES ('Vicki', 'Mardling', 87);
INSERT INTO students (first_name, last_name, grade) VALUES ('Michal', 'Londsdale', 61);
INSERT INTO students (first_name, last_name, grade) VALUES ('Hephzibah', 'Gealy', 76);
INSERT INTO students (first_name, last_name, grade) VALUES ('Cherrita', 'Theuff', 66);
INSERT INTO students (first_name, last_name, grade) VALUES ('Shandee', 'Dell Casa', 83);
INSERT INTO students (first_name, last_name, grade) VALUES ('Pedro', 'Collister', 95);
INSERT INTO students (first_name, last_name, grade) VALUES ('Adam', 'Petkovic', 68);
INSERT INTO students (first_name, last_name, grade) VALUES ('Margaux', 'Drinkale', 95);
INSERT INTO students (first_name, last_name, grade) VALUES ('Helene', 'Disbrow', 67);
INSERT INTO students (first_name, last_name, grade) VALUES ('Sigismundo', 'Gomery', 93);
INSERT INTO students (first_name, last_name, grade) VALUES ('Tobi', 'Loukes', 79);
INSERT INTO students (first_name, last_name, grade) VALUES ('Lemar', 'Warburton', 78);
INSERT INTO students (first_name, last_name, grade) VALUES ('Dorree', 'Sweedy', 98);
INSERT INTO students (first_name, last_name, grade) VALUES ('Duke', 'Kendrew', 75);
select avg(grade) from students;
select upper(first_name),upper(last_name) from students where student_id = 2;
select * from students where grade = (select min(grade) from students);
select grade, count(grade) from students group by grade having grade > 80 order by grade desc;
select last_name, first_name from students order by last_name, first_name;
INSERT INTO students (first_name, last_name, grade) VALUES ('Aubetta', 'Arnault', 71);
INSERT INTO students (first_name, last_name, grade) VALUES ('Bubetta', 'Arnault', 71);
INSERT INTO students (first_name, last_name, grade) VALUES ('Cubetta', 'Arnault', 71);
-- Union, combines results from 2 queries, checks for duplicates
select * from students where length(first_name) < 5
union
select * from students where grade > 80;
-- Union all, combines results from 2 queries, no duplicate check
select * from students where length(first_name) > 5
union all
select * from students where grade > 80;
-- Intersect, return common records
select * from students where length(first_name) > 5
intersect
select * from students where grade > 80;
-- Except
select * from students where length(first_name) > 5
except
select * from students where grade > 80;
|
using System.Collections.Generic;
using System.IO;
namespace Login_Viewer
{
public class UseXML
{
private string path;
private TmpInfo infos;
public UseXML()
{
infos = TmpInfo.Instance;
loadXML();
}
private void loadXML()
{
List<string> settings = infos.SettingValues;
path = $"{settings[2]}:\\{settings[3]}";
//path = Directory.GetCurrentDirectory() + "\\XML_Files\\logins_save.xml";
}
public Logins getLoginObjects()
{
Serializer ser = new Serializer();
string xmlInputData = string.Empty;
xmlInputData = File.ReadAllText(path);
return ser.Deserialize<Logins>(xmlInputData); ;
}
public void saveLoginObjects()
{
Serializer ser = new Serializer();
string xmlOutputData = string.Empty;
Logins logs = new Logins();
logs.LoginList = infos.LoginList;
logs.VersionNo = infos.CurrentVersion;
xmlOutputData = ser.Serialize<Logins>(logs);
File.WriteAllText(path, xmlOutputData);
}
}
}
|
%%%-------------------------------------------------------------------
%%% @author aurawing
%%% @copyright (C) 2016, <COMPANY>
%%% @doc
%%%
%%% @end
%%% Created : 20. 七月 2016 上午10:30
%%%-------------------------------------------------------------------
-author("aurawing").
-define(F_ID, <<"_id">>).
-define(F_TYPE, <<"type">>).
-define(F_OWNER, <<"owner">>).
-define(F_NAME, <<"name">>).
-define(F_PID, <<"pid">>).
-define(F_APID, <<"apid">>).
-define(F_IDENTITY, <<"identity">>).
-define(F_MAXVER, <<"maxver">>).
-define(F_EXT_G, <<"ext_g">>).
-define(F_VERSIONS, <<"versions">>).
-define(F_VERNO, <<"verno">>).
-define(F_FD, <<"fd">>).
-define(F_CREATE_TIME, <<"create_time">>).
-define(F_MODIFY_TIME, <<"modify_time">>).
-define(F_INSERT_TIME, <<"insert_time">>).
-define(F_SIZE, <<"size">>).
-define(F_DIGEST, <<"digest">>).
-define(F_CURVER, <<"curver">>).
-define(F_EXT_V, <<"ext_v">>).
-define(FILE_TBL, <<"file">>).
-define(FTYPE_D, <<"d">>).
-define(FTYPE_F, <<"f">>).
-define(ROOT_PID, <<"1">>).
-define(ROOT_DIR_ID, <<"1">>).
-define(CREATE_IDENTITY(Type, Name, Pid), aurafs_mw_digest:sha1(<<Type/binary, $|, Name/binary, $|, Pid/binary>>)).
|
/*jshint globalstrict:false, strict:false */
/*global fail, assertTrue, assertEqual */
////////////////////////////////////////////////////////////////////////////////
/// @brief test the authentication (dual ldap)
///
/// @file
///
/// DISCLAIMER
///
/// Copyright 2021 ArangoDB GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author Frank Celler
/// @author Copyright 2021, triAGENS GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
const jsunity = require("jsunity");
const {
FORBIDDEN,
OK,
createSuite
} = require('@arangodb/testutils/ldap-generic-failover.js');
// username, password, database, result
const dualLdapResults = {
User1NoPwLdap1: ["user1", "", "test", FORBIDDEN],
User1PwLdap1: ["user1", "abc", "test", OK],
User1NoPwLdap2: ["user1,dc=arangodb", "", "test", FORBIDDEN],
User1PwLdap2: ["user1,dc=arangodb", "abc", "test", FORBIDDEN],
DonaldNoPwLdap1: ["The Donald", "", "test", FORBIDDEN],
DonaldPwLdap1: ["The Donald", "abc", "test", OK],
DonaldNoPwLdap2: ["The Donald,dc=arangodb", "", "test", FORBIDDEN],
DonaldPwLdap: ["The Donald,dc=arangodb", "abc", "test", FORBIDDEN],
MerkelNoPwLdap1: ["Angela Merkel", "", "test", FORBIDDEN],
MerkelPwLdap1: ["Angela Merkel", "abc", "test", FORBIDDEN],
MerkelNoPwLdap2: ["Angela Merkel,dc=arangodb", "", "test", FORBIDDEN],
MerkelPwLdap2: ["Angela Merkel,dc=arangodb", "abc", "test", FORBIDDEN]
};
const DualLdapSuite = createSuite(dualLdapResults);
jsunity.run(DualLdapSuite);
return jsunity.done();
|
# Mozi.HttpEmbedded 嵌入式Web服务器
## 项目介绍
Mozi.HttpEmbedded是一个基于.Net构建的嵌入式Web服务器,为.Net App提供web服务功能。
> 嵌入式的目标不是单片机,而是.Net应用程序。
> 此项目并不会实现非常丰富的大型Web服务器功能
> 项目基于.Net Framework 4.0开发,也可转换为.Net Core/.Net Standard项目,亦可作为.Net5.0项目引用库
## 特点
Mozi.HttpEmbedded在Socket之上使用异步单线程模型,构建了一个HTTP服务器。
1. 嵌入式
本项目可作为.Net应用的内嵌Web服务器,亦可作为单独Web服务器
2. 轻量化
项目编译结果小,部署程序占用系统资源少
3. 可用性
开箱即用,配置少,可控性高。同时遵从.Net平台Web项目开发的典型规范。
4. 低耦合
不改变现有业务逻辑,无需对现有代码进行改造,即可实现现有功能面向HTTP提供服务器
5. 可控性
宿主程序对Web服务高度可控
## 典型应用场景
业务体系或功能已开发完成,期望在项目中加入一个Web服务器功能,同时不对现有代码进行大规模改动。
1. APP内嵌WEB服务
通过HTTP方式对应用终端数据或文件,进行主动推送、下载
2. 简易WEB服务器
仅用很少的系统资源即可快速搭建WEB服务器
> 在经典的Asp.Net开发中,Web服务的部署高度依赖于IIS,.Net Core项目则可基于Kestrel/IIS部署。
> 基于KESTREL或IIS部署的WEB项目,都基于Asp.Net体系。
## 原理及功能
1. HTTP协议
实现HTTP/1.1
2. 通讯认证
实现基本认证(Basic)
3. Cookie管理
支持标准Cookie
4. HTTP请求方法
GET POST
5. 路由
实现了URL管理,参见Router模块
6. 引用与依赖关系
依赖于.Net Framework
7. Payload压缩
支持GZip,Deflate算法
8. 字符编码
字符编码使用UTF-8
## 功能与版本迭代
不定期对Mozi.HttpEmbedded的功能进行完善,解决各种BUG。HTTP标准功能繁多,需要一步步实现。
## 版权说明
本项目采用MIT开源协议,引用请注明出处。欢迎复制,引用和修改。意见建议疑问请联系软件作者,或提交ISSUE。
## 用例说明
~~~csharp
HttpServer hs = new HttpServer();
//配置端口并启动服务器
hs.SetPort(9000).Start();
//开启认证
hs.UseAuth(AuthorizationType.Basic).SetUser("admin", "admin");
//开启文件压缩
hs.UseGzip(new Compress.CompressOption() {
MinContentLength=1024,
CompressLevel=2
});
//开启静态文件支持
hs.UseStaticFiles("");
//配置虚拟目录 虚拟目录下的文件可以随意访问
hs.SetVirtualDirectory("config", AppDomain.CurrentDomain.BaseDirectory + @"Config\");
Router router = Router.Default;
//注入API
//1,此方法会扫描程序集内继承自BaseApi或属性标记为[BasicApi]的类
//2,Http通讯数据标准默认为xml,使用Router.Default.SetDataSerializer(ISerializer ser)更改序列化类型
router.Register($"{dllpath}");
router.SetDataSerializer(new JSONSerializer());
//路由映射
router.Map("services/{controller}/{action}");
Console.ReadLine();
~~~
### By [Jason][1] on Feb. 5,2020
[1]:mailto:brotherqian@163.com
|
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Linq;
using UnityEditor;
using UnityEngine;
using Chisel.Core;
using Chisel.Components;
using Object = UnityEngine.Object;
using UnityEngine.SceneManagement;
namespace Chisel.Editors
{
[Serializable]
public enum SelectionType { Replace, Additive, Subtractive };
// TODO: rewrite
internal static class ChiselRectSelection
{
public static bool Valid { get { return reflectionSucceeded; } }
public static int RectSelectionID { get; private set; }
static object rectSelection;
static SceneView sceneView;
public static SceneView SceneView
{
get
{
return sceneView;
}
set
{
if (sceneView == value)
return;
sceneView = value;
rectSelection = rectSelectionField.GetValue(sceneView);
}
}
public static bool RectSelecting { get { return (bool)rectSelectingField.GetValue(rectSelection); } }
public static Vector2 SelectStartPoint { get { return (Vector2)selectStartPointField.GetValue(rectSelection); } }
public static Vector2 SelectMousePoint { get { return (Vector2)selectMousePointField.GetValue(rectSelection); } }
public static UnityEngine.Object[] SelectionStart { get { return (UnityEngine.Object[])selectionStartField.GetValue(rectSelection); } set { selectionStartField.SetValue(rectSelection, value); } }
public static UnityEngine.Object[] CurrentSelection { get { return (UnityEngine.Object[])currentSelectionField.GetValue(rectSelection); } set { currentSelectionField.SetValue(rectSelection, value); } }
public static Dictionary<GameObject, bool> LastSelection { get { return (Dictionary<GameObject, bool>)lastSelectionField.GetValue(rectSelection); } }
public static void UpdateSelection(Object[] existingSelection, Object[] newObjects, SelectionType type)
{
object selectionType;
switch (type)
{
default: selectionType = selectionTypeNormal; break;
case SelectionType.Additive: selectionType = selectionTypeAdditive; break;
case SelectionType.Subtractive: selectionType = selectionTypeSubtractive; break;
}
updateSelectionMethod.Invoke(null,
new object[]
{
existingSelection,
newObjects,
selectionType,
RectSelecting
});
}
static Type unityRectSelectionType;
static Type unityEnumSelectionType;
static object selectionTypeAdditive;
static object selectionTypeSubtractive;
static object selectionTypeNormal;
static FieldInfo rectSelectionField;
static FieldInfo rectSelectingField;
static FieldInfo selectStartPointField;
static FieldInfo selectMousePointField;
static FieldInfo selectionStartField;
static FieldInfo lastSelectionField;
static FieldInfo currentSelectionField;
static FieldInfo rectSelectionIDField;
static MethodInfo updateSelectionMethod;
static bool reflectionSucceeded = false;
static ChiselRectSelection()
{
reflectionSucceeded = false;
unityRectSelectionType = ReflectionExtensions.GetTypeByName("UnityEditor.RectSelection");
if (unityRectSelectionType == null)
return;
unityEnumSelectionType = ReflectionExtensions.GetTypeByName("UnityEditor.RectSelection+SelectionType");
if (unityEnumSelectionType == null)
return;
rectSelectionField = typeof(SceneView).GetField("m_RectSelection", BindingFlags.NonPublic | BindingFlags.Instance);
if (rectSelectionField == null) return;
rectSelectionIDField = unityRectSelectionType.GetField("s_RectSelectionID", BindingFlags.NonPublic | BindingFlags.Static);
if (rectSelectionIDField == null) return;
RectSelectionID = (int)rectSelectionIDField.GetValue(null);
rectSelectingField = unityRectSelectionType.GetField("m_RectSelecting", BindingFlags.NonPublic | BindingFlags.Instance);
selectStartPointField = unityRectSelectionType.GetField("m_SelectStartPoint", BindingFlags.NonPublic | BindingFlags.Instance);
selectionStartField = unityRectSelectionType.GetField("m_SelectionStart", BindingFlags.NonPublic | BindingFlags.Instance);
lastSelectionField = unityRectSelectionType.GetField("m_LastSelection", BindingFlags.NonPublic | BindingFlags.Instance);
currentSelectionField = unityRectSelectionType.GetField("m_CurrentSelection", BindingFlags.NonPublic | BindingFlags.Instance);
selectMousePointField = unityRectSelectionType.GetField("m_SelectMousePoint", BindingFlags.NonPublic | BindingFlags.Instance);
updateSelectionMethod = unityRectSelectionType.GetMethod("UpdateSelection", BindingFlags.NonPublic | BindingFlags.Static,
null,
new Type[] {
typeof(UnityEngine.Object[]),
typeof(UnityEngine.Object[]),
unityEnumSelectionType,
typeof(bool)
},
null);
selectionTypeAdditive = Enum.Parse(unityEnumSelectionType, "Additive");
selectionTypeSubtractive = Enum.Parse(unityEnumSelectionType, "Subtractive");
selectionTypeNormal = Enum.Parse(unityEnumSelectionType, "Normal");
reflectionSucceeded = rectSelectingField != null &&
selectStartPointField != null &&
selectionStartField != null &&
lastSelectionField != null &&
currentSelectionField != null &&
selectMousePointField != null &&
updateSelectionMethod != null &&
selectionTypeAdditive != null &&
selectionTypeSubtractive != null &&
selectionTypeNormal != null;
}
}
// TODO: clean up, rename
internal static class ChiselRectSelectionManager
{
static HashSet<CSGTreeNode> rectFoundTreeNodes = new HashSet<CSGTreeNode>();
static HashSet<GameObject> rectFoundGameObjects = new HashSet<GameObject>();
static Vector2 prevStartGUIPoint;
static Vector2 prevMouseGUIPoint;
static Vector2 prevStartScreenPoint;
static Vector2 prevMouseScreenPoint;
static bool rectClickDown = false;
static bool mouseDragged = false;
static Vector2 clickMousePosition = Vector2.zero;
// TODO: put somewhere else
public static SelectionType GetCurrentSelectionType()
{
var selectionType = SelectionType.Replace;
// shift only
if ( Event.current.shift && !EditorGUI.actionKey && !Event.current.alt) { selectionType = SelectionType.Additive; } else
// action key only (Command on macOS, Control on Windows)
if (!Event.current.shift && EditorGUI.actionKey && !Event.current.alt) { selectionType = SelectionType.Subtractive; }
return selectionType;
}
static void RemoveGeneratedMeshesFromSelection()
{
var selectedObjects = Selection.objects;
if (selectedObjects != null)
{
var foundObjects = selectedObjects;
RemoveGeneratedMeshesFromArray(ref foundObjects);
if (foundObjects.Length != selectedObjects.Length)
Selection.objects = foundObjects;
}
}
static bool RemoveGeneratedMeshesFromArray(ref UnityEngine.Object[] selection)
{
var found = new List<UnityEngine.Object>();
for (int i = selection.Length - 1; i >= 0; i--)
{
var obj = selection[i];
if (ChiselGeneratedComponentManager.IsObjectGenerated(obj))
continue;
found.Add(obj);
}
if (selection.Length != found.Count)
{
selection = found.ToArray();
return true;
}
return false;
}
internal static void Update(SceneView sceneView)
{
if (!ChiselRectSelection.Valid)
{
prevStartGUIPoint = new Vector2(float.PositiveInfinity, float.PositiveInfinity);
prevMouseGUIPoint = prevStartGUIPoint;
prevStartScreenPoint = Vector2.zero;
prevMouseScreenPoint = Vector2.zero;
rectFoundGameObjects.Clear();
rectFoundTreeNodes.Clear();
return;
}
ChiselRectSelection.SceneView = sceneView;
var rectSelectionID = ChiselRectSelection.RectSelectionID;
var hotControl = GUIUtility.hotControl;
var areRectSelecting = hotControl == rectSelectionID;
var typeForControl = Event.current.GetTypeForControl(rectSelectionID);
// check if we're rect-selecting
if (areRectSelecting)
{
if ((typeForControl == EventType.Used || Event.current.commandName == "ModifierKeysChanged") &&
ChiselRectSelection.RectSelecting)
{
var selectStartPoint = ChiselRectSelection.SelectStartPoint;
var selectMousePoint = ChiselRectSelection.SelectMousePoint;
// determine if our frustum changed since the last time
bool modified = false;
bool needUpdate = false;
if (prevStartGUIPoint != selectStartPoint)
{
prevStartGUIPoint = selectStartPoint;
prevStartScreenPoint = Event.current.mousePosition;
needUpdate = true;
}
if (prevMouseGUIPoint != selectMousePoint)
{
prevMouseGUIPoint = selectMousePoint;
prevMouseScreenPoint = Event.current.mousePosition;
needUpdate = true;
}
if (needUpdate)
{
var rect = ChiselCameraUtility.PointsToRect(prevStartScreenPoint, prevMouseScreenPoint);
if (rect.width > 3 &&
rect.height > 3)
{
var frustum = ChiselCameraUtility.GetCameraSubFrustum(Camera.current, rect);
var selectionType = GetCurrentSelectionType();
if (selectionType == SelectionType.Replace)
{
rectFoundTreeNodes.Clear();
rectFoundGameObjects.Clear();
}
// Find all the brushes (and it's gameObjects) that are inside the frustum
if (!ChiselSceneQuery.GetNodesInFrustum(frustum, UnityEditor.Tools.visibleLayers, ref rectFoundTreeNodes))
{
if (rectFoundGameObjects != null &&
rectFoundGameObjects.Count > 0)
{
rectFoundTreeNodes.Clear();
rectFoundGameObjects.Clear();
modified = true;
}
} else
modified = true;
foreach(var treeNode in rectFoundTreeNodes)
{
var brush = (CSGTreeBrush)treeNode;
if (brush.Valid)
{
switch (selectionType)
{
case SelectionType.Additive:
{
ChiselSyncSelection.SelectBrushVariant(brush, uniqueSelection: false);
break;
}
case SelectionType.Subtractive:
{
ChiselSyncSelection.DeselectBrushVariant(brush);
break;
}
default:
{
ChiselSyncSelection.SelectBrushVariant(brush, uniqueSelection: true);
break;
}
}
}
var nodeComponent = ChiselNodeHierarchyManager.FindChiselNodeByTreeNode(treeNode);
if (!nodeComponent)
continue;
var gameObject = nodeComponent.gameObject;
rectFoundGameObjects.Add(gameObject);
}
}
}
UnityEngine.Object[] currentSelection = null;
var originalLastSelection = ChiselRectSelection.LastSelection;
var originalSelectionStart = ChiselRectSelection.SelectionStart;
if (modified &&
rectFoundGameObjects != null &&
rectFoundGameObjects.Count > 0)
{
foreach(var obj in rectFoundGameObjects)
{
// if it hasn't already been added, add the obj
if (!originalLastSelection.ContainsKey(obj))
{
originalLastSelection.Add(obj, false);
}
}
currentSelection = originalLastSelection.Keys.ToArray();
ChiselRectSelection.CurrentSelection = currentSelection;
} else
{
if (currentSelection == null || modified) { currentSelection = originalLastSelection.Keys.ToArray(); }
}
if (RemoveGeneratedMeshesFromArray(ref originalSelectionStart))
modified = true;
if (currentSelection != null && RemoveGeneratedMeshesFromArray(ref currentSelection))
modified = true;
if ((Event.current.commandName == "ModifierKeysChanged" || modified))
{
var foundObjects = currentSelection;
RemoveGeneratedMeshesFromArray(ref foundObjects);
// calling static method UpdateSelection of RectSelection
ChiselRectSelection.UpdateSelection(originalSelectionStart, foundObjects, GetCurrentSelectionType());
}
}
hotControl = GUIUtility.hotControl;
}
if (hotControl != rectSelectionID)
{
prevStartGUIPoint = Vector2.zero;
prevMouseGUIPoint = Vector2.zero;
rectFoundGameObjects.Clear();
rectFoundTreeNodes.Clear();
} /*else
if (ignoreRect)
{
hotControl = 0;
GUIUtility.hotControl = 0;
}
*/
bool click = false;
var evt = Event.current;
switch (typeForControl)
{
case EventType.MouseDown:
{
rectClickDown = (Event.current.button == 0 && areRectSelecting);
clickMousePosition = Event.current.mousePosition;
mouseDragged = false;
break;
}
case EventType.MouseUp:
{
if (!mouseDragged)
{
if ((UnityEditor.HandleUtility.nearestControl != 0 || evt.button != 0) &&
(GUIUtility.keyboardControl != 0 || evt.button != 2))
break;
click = true;
Event.current.Use();
}
rectClickDown = false;
break;
}
case EventType.MouseMove:
{
rectClickDown = false;
break;
}
case EventType.MouseDrag:
{
mouseDragged = true;
break;
}
case EventType.Used:
{
if (!mouseDragged)
{
var delta = Event.current.mousePosition - clickMousePosition;
if (Mathf.Abs(delta.x) > 4 || Mathf.Abs(delta.y) > 4) { mouseDragged = true; }
}
if (mouseDragged || !rectClickDown || Event.current.button != 0 || ChiselRectSelection.RectSelecting) { rectClickDown = false; break; }
click = true;
Event.current.Use();
break;
}
case EventType.KeyUp:
{
if (hotControl == 0 &&
Event.current.keyCode == ChiselKeyboardDefaults.kCancelKey)
{
if (GUIUtility.hotControl == 0 && // make sure we're not actively doing anything
Tools.current != Tool.Custom)
{
// This deselects everything and disables all tool modes
Selection.activeTransform = null;
Event.current.Use();
}
}
break;
}
case EventType.ValidateCommand:
{
if (Event.current.commandName != "SelectAll")
break;
Event.current.Use();
break;
}
case EventType.ExecuteCommand:
{
if (Event.current.commandName != "SelectAll")
break;
var transforms = new List<UnityEngine.Object>();
for (int sceneIndex = 0; sceneIndex < SceneManager.sceneCount; sceneIndex++)
{
var scene = SceneManager.GetSceneAt(sceneIndex);
if (!scene.isLoaded)
continue;
foreach (var gameObject in scene.GetRootGameObjects())
{
foreach (var transform in gameObject.GetComponentsInChildren<Transform>())
{
if ((transform.hideFlags & (HideFlags.NotEditable | HideFlags.HideInHierarchy)) == (HideFlags.NotEditable | HideFlags.HideInHierarchy))
continue;
transforms.Add(transform.gameObject);
}
}
}
var foundObjects = transforms.ToArray();
RemoveGeneratedMeshesFromArray(ref foundObjects);
Selection.objects = foundObjects;
Event.current.Use();
break;
}
/*
case EventType.ValidateCommand:
{
if (Event.current.commandName == "SelectAll")
{
Event.current.Use();
break;
}
if (Keys.HandleSceneValidate(EditModeManager.CurrentTool, true))
{
Event.current.Use();
HandleUtility.Repaint();
}
break;
}
case EventType.ExecuteCommand:
{
if (Event.current.commandName == "SelectAll")
{
var transforms = new List<UnityEngine.Object>();
for (int sceneIndex = 0; sceneIndex < SceneManager.sceneCount; sceneIndex++)
{
var scene = SceneManager.GetSceneAt(sceneIndex);
foreach (var gameObject in scene.GetRootGameObjects())
{
foreach (var transform in gameObject.GetComponentsInChildren<Transform>())
{
if ((transform.hideFlags & (HideFlags.NotEditable | HideFlags.HideInHierarchy)) == (HideFlags.NotEditable | HideFlags.HideInHierarchy))
continue;
transforms.Add(transform.gameObject);
}
}
}
Selection.objects = transforms.ToArray();
Event.current.Use();
break;
}
break;
}
case EventType.KeyDown:
{
if (Keys.HandleSceneKeyDown(EditModeManager.CurrentTool, true))
{
Event.current.Use();
HandleUtility.Repaint();
}
break;
}
case EventType.KeyUp:
{
if (Keys.HandleSceneKeyUp(EditModeManager.CurrentTool, true))
{
Event.current.Use();
HandleUtility.Repaint();
}
break;
}
*/
}
if (click)
{
// make sure GeneratedMeshes are not part of our selection
RemoveGeneratedMeshesFromSelection();
DoSelectionClick(sceneView, Event.current.mousePosition);
}
}
// TODO: make selecting variants work when selecting in hierarchy/rect-select too
public static void DoSelectionClick(SceneView sceneView, Vector2 mousePosition)
{
ChiselIntersection intersection;
var gameobject = ChiselClickSelectionManager.PickClosestGameObject(mousePosition, out intersection);
// If we're a child of an composite that has a "handle as one" flag set, return that instead
gameobject = ChiselSceneQuery.FindSelectionBase(gameobject);
var selectionType = GetCurrentSelectionType();
var selectedObjectsOnClick = new List<int>(Selection.instanceIDs);
switch (selectionType)
{
case SelectionType.Additive:
{
if (!gameobject)
break;
ChiselSyncSelection.SelectBrushVariant(intersection.brushIntersection.brush, uniqueSelection: false);
var instanceID = gameobject.GetInstanceID();
selectedObjectsOnClick.Add(instanceID);
ChiselClickSelectionManager.ignoreSelectionChanged = true;
Selection.instanceIDs = selectedObjectsOnClick.ToArray();
break;
}
case SelectionType.Subtractive:
{
if (!gameobject)
break;
Undo.RecordObject(ChiselSyncSelection.Instance, "Deselected brush variant");
ChiselSyncSelection.DeselectBrushVariant(intersection.brushIntersection.brush);
// Can only deselect brush if all it's synchronized brushes have also been deselected
if (!ChiselSyncSelection.IsAnyBrushVariantSelected(intersection.brushIntersection.brush))
{
var instanceID = gameobject.GetInstanceID();
selectedObjectsOnClick.Remove(instanceID);
}
ChiselClickSelectionManager.ignoreSelectionChanged = true;
Selection.instanceIDs = selectedObjectsOnClick.ToArray();
return;
}
default:
{
Undo.RecordObject(ChiselSyncSelection.Instance, "Selected brush variant");
ChiselSyncSelection.SelectBrushVariant(intersection.brushIntersection.brush, uniqueSelection: true);
ChiselClickSelectionManager.ignoreSelectionChanged = true;
Selection.activeGameObject = gameobject;
break;
}
}
}
}
}
|
-- file:xml.sql ln:306 expect:true
SELECT XMLPARSE(DOCUMENT '<!DOCTYPE foo [<!ENTITY c SYSTEM "/etc/no.such.file">]><foo>&c
|
using System;
using System.Runtime.InteropServices;
namespace Klak.Ndi
{
public enum FourCC : uint
{
UYVY = 0x59565955,
UYVA = 0x41565955
}
public static class PluginEntry
{
#region Common functions
[DllImport("KlakNDI")]
public static extern IntPtr NDI_GetTextureUpdateCallback();
[DllImport("KlakNDI")]
public static extern int NDI_RetrieveSourceNames(IntPtr[] destination, int maxCount);
#endregion
#region Sender functions
[DllImport("KlakNDI")]
public static extern IntPtr NDI_CreateSender(string name);
[DllImport("KlakNDI")]
public static extern void NDI_DestroySender(IntPtr sender);
[DllImport("KlakNDI")]
public static extern void NDI_SendFrame(IntPtr sender, IntPtr data, int width, int height, FourCC fourCC);
[DllImport("KlakNDI")]
public static extern void NDI_SyncSender(IntPtr sender);
#endregion
#region Receiver functions
[DllImport("KlakNDI")]
public static extern IntPtr NDI_TryOpenSourceNamedLike(string clause);
[DllImport("KlakNDI")]
public static extern void NDI_DestroyReceiver(IntPtr receiver);
[DllImport("KlakNDI")]
public static extern uint NDI_GetReceiverID(IntPtr receiver);
[DllImport("KlakNDI")]
public static extern int NDI_GetFrameWidth(IntPtr receiver);
[DllImport("KlakNDI")]
public static extern int NDI_GetFrameHeight(IntPtr receiver);
[DllImport("KlakNDI")]
public static extern FourCC NDI_GetFrameFourCC(IntPtr receiver);
#endregion
}
}
|
package net.crimsonwoods.easydatabinding.models
import android.content.Context
import android.content.res.Resources
import androidx.test.core.app.ApplicationProvider
import androidx.test.ext.junit.runners.AndroidJUnit4
import kotlin.test.BeforeTest
import kotlin.test.Test
import kotlin.test.assertFalse
import kotlin.test.assertTrue
import net.crimsonwoods.easydatabinding.testing.R
import org.junit.runner.RunWith
@RunWith(AndroidJUnit4::class)
class BoolTest {
private lateinit var resources: Resources
@BeforeTest
fun setUp() {
resources = ApplicationProvider.getApplicationContext<Context>().resources
}
@Test
fun testToBoolean_Res() {
assertTrue(Bool.of(R.bool.test_positive).toBoolean(resources))
assertFalse(Bool.of(R.bool.test_negative).toBoolean(resources))
}
@Test
fun testToBoolean_Value() {
assertTrue(Bool.TRUE.toBoolean(resources))
assertFalse(Bool.FALSE.toBoolean(resources))
}
}
|
use prettytable::{Table, Row, Cell};
use crate::server::detect::{self, VersionQuery};
use crate::server::options::Info;
use crate::server::package::Package;
use crate::server::linux;
use crate::server::macos;
use crate::server::init::find_distribution;
use crate::table;
pub fn info(options: &Info) -> anyhow::Result<()> {
let version_query = VersionQuery::new(
options.nightly, options.version.as_ref());
let current_os = detect::current_os()?;
let avail_methods = current_os.get_available_methods()?;
let (distr, method, _) = find_distribution(
&*current_os, &avail_methods,
&version_query, &options.method)?;
if options.bin_path {
if let Some(pkg) = distr.downcast_ref::<Package>() {
let cmd = if cfg!(target_os="macos") {
macos::get_server_path(&pkg.slot)
} else {
linux::get_server_path(Some(&pkg.slot))
};
println!("{}", cmd.display());
} else {
anyhow::bail!("cannot print binary path for {} installation",
method.option());
}
} else {
let mut table = Table::new();
table.add_row(Row::new(vec![
Cell::new("Installation method"),
Cell::new(method.title()),
]));
table.add_row(Row::new(vec![
Cell::new("Major version"),
Cell::new(distr.major_version().title()),
]));
table.add_row(Row::new(vec![
Cell::new("Exact version"),
Cell::new(distr.version().as_ref()),
]));
if let Some(pkg) = distr.downcast_ref::<Package>() {
let cmd = if cfg!(target_os="macos") {
macos::get_server_path(&pkg.slot)
} else {
linux::get_server_path(Some(&pkg.slot))
};
table.add_row(Row::new(vec![
Cell::new("Binary path"),
Cell::new(&cmd.display().to_string()),
]));
}
table.set_format(*table::FORMAT);
table.printstd();
}
Ok(())
}
|
/*
* Copyright (c) 2021, dSPACE GmbH, Modelica Association and contributors
*
* Licensed under the 3-Clause BSD license(the \"License\");
* you may not use this software except in compliance with
* the License.
*
* This software is not fully developed or tested.
*
* THE SOFTWARE IS PROVIDED \"as is\", in the hope that it may
* be useful to other users, without any warranty of any
* kind, either express or implied.
*
* See the License for the specific language governing
* permissions and limitations under the License.
*/
using System;
using System.IO;
using eFMI.Misc;
namespace eFMI.ContainerManager
{
class EfmuContainerTools
{
public delegate bool BoolReturnMethod();
public delegate bool BoolReturnStringParamMethod(string param);
/* TODO: refactoring + different solution */
public static bool PerformCallWithExceptionHandling(BoolReturnMethod handler)
{
bool success = true;
try
{
success = handler();
}
catch (IOException e)
{
EfmuConsoleWriter.DumpException(e, "Caught IO exception");
success = false;
}
catch (Exception e)
{
EfmuConsoleWriter.DumpException(e, "Caught exception");
success = false;
}
return success;
}
/* TODO: refactoring + different solution */
public static bool PerformCallWithExceptionHandling(BoolReturnStringParamMethod handler,
string handlerParam)
{
bool success = true;
try
{
success = handler(handlerParam);
}
catch (IOException e)
{
EfmuConsoleWriter.DumpException(e, "Caught IO exception");
success = false;
}
catch (Exception e)
{
EfmuConsoleWriter.DumpException(e, "Caught exception");
success = false;
}
return success;
}
public static bool DoesContainerOperationImplyFinalWrite(EfmuContainerOperations containerOperation)
{
switch (containerOperation)
{
case EfmuContainerOperations.CreateContainer:
case EfmuContainerOperations.AddToContainer:
case EfmuContainerOperations.ReplaceInContainer:
case EfmuContainerOperations.DeleteFromContainer:
case EfmuContainerOperations.UnpackFmu:
case EfmuContainerOperations.TidyRoot:
{
return true;
}
}
return false;
}
public static bool DoesContainerOperationRequireInitialRead(EfmuContainerOperations containerOperation)
{
switch (containerOperation)
{
case EfmuContainerOperations.AddToContainer:
case EfmuContainerOperations.ReplaceInContainer:
case EfmuContainerOperations.DeleteFromContainer:
case EfmuContainerOperations.ExtractFromContainer:
case EfmuContainerOperations.ExtractSchemasFromContainer:
case EfmuContainerOperations.UnpackFmu:
case EfmuContainerOperations.TidyRoot:
case EfmuContainerOperations.ListContainerContent:
{
return true;
}
}
return false;
}
public static bool IsContainerOperationReadyOnly(EfmuContainerOperations containerOperation)
{
return DoesContainerOperationRequireInitialRead(containerOperation)
&& !DoesContainerOperationImplyFinalWrite(containerOperation);
}
public static bool IsExtractFromContainerOperation(EfmuContainerOperations containerOperation)
{
return (EfmuContainerOperations.ExtractFromContainer == containerOperation
|| EfmuContainerOperations.ExtractSchemasFromContainer == containerOperation);
}
}
}
|
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart' hide MessageHandler;
class ApnsRemoteMessage {
ApnsRemoteMessage.fromMap(this.payload);
final Map<String, dynamic> payload;
String? get actionIdentifier => UNNotificationAction.getIdentifier(payload);
}
typedef ApnsMessageHandler = Future<void> Function(ApnsRemoteMessage);
typedef WillPresentHandler = Future<bool> Function(ApnsRemoteMessage);
class ApnsPushConnectorOnly {
final MethodChannel _channel = () {
assert(Platform.isIOS,
'ApnsPushConnectorOnly can only be created on iOS platform!');
return const MethodChannel('flutter_apns');
}();
ApnsMessageHandler? _onMessage;
ApnsMessageHandler? _onLaunch;
ApnsMessageHandler? _onResume;
void requestNotificationPermissions(
[IosNotificationSettings iosSettings = const IosNotificationSettings()]) {
_channel.invokeMethod(
'requestNotificationPermissions', iosSettings.toMap());
}
void getAuthorizationStatus() {
_channel.invokeMethod('getAuthorizationStatus', []);
}
final StreamController<IosNotificationSettings> _iosSettingsStreamController =
StreamController<IosNotificationSettings>.broadcast();
Stream<IosNotificationSettings> get onIosSettingsRegistered {
return _iosSettingsStreamController.stream;
}
/// Sets up [MessageHandler] for incoming messages.
void configureApns({
ApnsMessageHandler? onMessage,
ApnsMessageHandler? onLaunch,
ApnsMessageHandler? onResume,
ApnsMessageHandler? onBackgroundMessage,
}) {
_onMessage = onMessage;
_onLaunch = onLaunch;
_onResume = onResume;
_channel.setMethodCallHandler(_handleMethod);
_channel.invokeMethod('configure');
}
Future<dynamic> _handleMethod(MethodCall call) async {
switch (call.method) {
case 'onToken':
token.value = call.arguments;
return null;
case 'onIosSettingsRegistered':
final obj = IosNotificationSettings._fromMap(
call.arguments.cast<String, bool>());
isDisabledByUser.value = obj.alert == false;
return null;
case 'setAuthorizationStatus':
authorizationStatus.value = call.arguments;
return null;
case 'onMessage':
return _onMessage?.call(_extractMessage(call));
case 'onLaunch':
return _onLaunch?.call(_extractMessage(call));
case 'onResume':
return _onResume?.call(_extractMessage(call));
case 'willPresent':
return shouldPresent?.call(_extractMessage(call)) ??
Future.value(false);
default:
throw UnsupportedError('Unrecognized JSON message');
}
}
ApnsRemoteMessage _extractMessage(MethodCall call) {
// fix null safety errors
final Map<String, dynamic> map =
json.decode(json.encode(call.arguments)) as Map<String, dynamic>;
map.putIfAbsent('contentAvailable', () => false);
map.putIfAbsent('mutableContent', () => false);
map.putIfAbsent('data', () => map["aps"]);
return ApnsRemoteMessage.fromMap(map);
}
/// Handler that returns true/false to decide if push alert should be displayed when in foreground.
/// Returning true will delay onMessage callback until user actually clicks on it
WillPresentHandler? shouldPresent;
final isDisabledByUser = ValueNotifier(false);
final authorizationStatus = ValueNotifier<String?>(null);
final token = ValueNotifier<String?>(null);
String get providerType => "APNS";
void dispose() {
_iosSettingsStreamController.close();
}
/// https://developer.apple.com/documentation/usernotifications/declaring_your_actionable_notification_types
Future<void> setNotificationCategories(
List<UNNotificationCategory> categories) {
return _channel.invokeMethod(
'setNotificationCategories',
categories.map((e) => e.toJson()).toList(),
);
}
Future<void> unregister() async {
await _channel.invokeMethod('unregister');
token.value = null;
}
}
class IosNotificationSettings {
const IosNotificationSettings({
this.sound = true,
this.alert = true,
this.badge = true,
});
IosNotificationSettings._fromMap(Map<String, bool> settings)
: sound = settings['sound'],
alert = settings['alert'],
badge = settings['badge'];
final bool? sound;
final bool? alert;
final bool? badge;
Map<String, dynamic> toMap() {
return <String, bool?>{'sound': sound, 'alert': alert, 'badge': badge};
}
@override
String toString() => 'PushNotificationSettings ${toMap()}';
}
/// https://developer.apple.com/documentation/usernotifications/unnotificationcategory
class UNNotificationCategory {
final String identifier;
final List<UNNotificationAction> actions;
final List<String> intentIdentifiers;
final List<UNNotificationCategoryOptions> options;
Map<String, dynamic> toJson() {
return {
'identifier': identifier,
'actions': actions.map((e) => e.toJson()).toList(),
'intentIdentifiers': intentIdentifiers,
'options': _optionsToJson(options),
};
}
UNNotificationCategory({
required this.identifier,
required this.actions,
required this.intentIdentifiers,
required this.options,
});
}
/// https://developer.apple.com/documentation/usernotifications/UNNotificationAction
class UNNotificationAction {
final String identifier;
final String title;
final List<UNNotificationActionOptions> options;
static const defaultIdentifier =
'com.apple.UNNotificationDefaultActionIdentifier';
/// Returns action identifier associated with this push.
/// May be null, UNNotificationAction.defaultIdentifier, or value declared in setNotificationCategories
static String? getIdentifier(Map<String, dynamic> payload) {
final data = payload['data'] as Map?;
return data?['actionIdentifier'] ?? payload['actionIdentifier'];
}
UNNotificationAction({
required this.identifier,
required this.title,
required this.options,
});
dynamic toJson() {
return {
'identifier': identifier,
'title': title,
'options': _optionsToJson(options),
};
}
}
/// https://developer.apple.com/documentation/usernotifications/unnotificationactionoptions
enum UNNotificationActionOptions {
authenticationRequired,
destructive,
foreground,
}
/// https://developer.apple.com/documentation/usernotifications/unnotificationcategoryoptions
enum UNNotificationCategoryOptions {
customDismissAction,
allowInCarPlay,
hiddenPreviewsShowTitle,
hiddenPreviewsShowSubtitle,
allowAnnouncement,
}
List<String> _optionsToJson(List values) {
return values.map((e) => e.toString()).toList();
}
|
# Bingo Game Project 2020
Author Name: Andrea Treacy
Compiler: Borland
Date: 13/05/2020
## Description
Bingo game program where the user selects from the menu:
(1) Enter player's 6 numbers
The user enters their own 6 numbers
(2) Generate game's 6 random numbers
The game generates its 6 numbers
(3) Display both player and game number sets
Show the user's numbers and the game's numbers
(4) Compare the player and game numbers
The users numbers are compared to the game's numbers to check for a match. All numbers must match to get a Bingo
(5) Display frequency of the game's generated numbers
Display the ammount of times that each number was generated by the game
(6) End program
The program terminates
|
/*
* Copyright (C) 2011-2012 spray.cc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cc.spray
package directives
import typeconversion._
import http._
private[spray] trait MarshallingDirectives extends DefaultMarshallers with DefaultUnmarshallers {
this: BasicDirectives =>
/**
* Returns a Route that unmarshalls the requests content using the given unmarshaller and
* passes it as an argument to the inner Route building function.
* If the unmarshaller cannot unmarshal the request content the request is rejected with the [[cc.spray.Rejection]]
* produced by the unmarshaller.
*/
def content[A](unmarshaller: Unmarshaller[A]) = filter1 { ctx =>
ctx.request.content.as(unmarshaller) match {
case Right(a) => Pass.withTransform(a) {
_.cancelRejections {
case RequestEntityExpectedRejection => true
case _: UnsupportedContentType => true
case _ => false
}
}
case Left(problem) => Reject {
problem match {
case ContentExpected => RequestEntityExpectedRejection
case UnsupportedContentType(supported) => UnsupportedRequestContentTypeRejection(supported)
case MalformedContent(error) => MalformedRequestContentRejection(error)
}
}
}
}
/**
* Returns the in-scope Unmarshaller for the given type.
*/
def as[A :Unmarshaller] = unmarshaller[A]
/**
* Returns a Route that uses the given marshaller to produce a completion function that is
* passed to the inner route building function. You can use it do decouple marshaller resolution from the call
* site of the RequestContexts {{complete}} function.
*/
def produce[A](marshaller: Marshaller[A], status: StatusCode = StatusCodes.OK, headers: List[HttpHeader] = Nil) = {
filter1 { ctx =>
marshaller(ctx.request.acceptableContentType) match {
case MarshalWith(converter) =>
Pass.withTransform[A => Unit](converter(ctx.marshallingContext(status, headers))) {
_.cancelRejectionsOfType[UnacceptedResponseContentTypeRejection]
}
case CantMarshal(onlyTo) => Reject(UnacceptedResponseContentTypeRejection(onlyTo))
}
}
}
/**
* Returns the in-scope Marshaller for the given type.
*/
def instanceOf[A :Marshaller] = marshaller[A]
/**
* Returns a Route that completes the request using the given function. The input to the function is produced with
* the in-scope unmarshaller and the result value of the function is marshalled with the in-scope marshaller.
*/
def handleWith[A :Unmarshaller, B: Marshaller](f: A => B): Route = {
content(as[A]) { a => completeWith(f(a)) }
}
/**
* Completes the request with status "200 Ok" and the response content created by marshalling the given object using
* the in-scope marshaller for the type.
*/
def completeWith[T :Marshaller](value: => T): Route = _.complete(value)
}
|
-----------------------------------------------------------------------------
-- |
-- Copyright : (c) Chris Moore, 2018
-- License : MIT
-- Maintainer : 0xCM00@gmail.com
-----------------------------------------------------------------------------
module Alpha.Canonical.Common(module X) where
import Alpha.Canonical.Common.Asci as X
import Alpha.Canonical.Common.Cardinality as X
import Alpha.Canonical.Common.Concat as X
import Alpha.Canonical.Common.Container as X
import Alpha.Canonical.Common.Conversions as X
import Alpha.Canonical.Common.Executor as X
import Alpha.Canonical.Common.FiniteList as X
import Alpha.Canonical.Common.FiniteSeq as X
import Alpha.Canonical.Common.Free as X
import Alpha.Canonical.Common.Format as X
import Alpha.Canonical.Common.Individual as X
import Alpha.Canonical.Common.Length as X
import Alpha.Canonical.Common.Root as X
import Alpha.Canonical.Common.Orientation as X
import Alpha.Canonical.Common.Stateful as X
import Alpha.Canonical.Common.Synonyms as X
import Alpha.Canonical.Common.Types as X
import Alpha.Canonical.Common.Transcendental as X
import Alpha.Canonical.Common.Tuples as X
import Alpha.Canonical.Common.Vector as X
import Alpha.Canonical.Common.Partnership as X
import Alpha.Canonical.Common.Sequential as X
import Alpha.Canonical.Common.Setwise as X
import Alpha.Canonical.Common.Stream as X
import Alpha.Canonical.Common.Indexing as X
import Alpha.Canonical.Common.Tuples as X
import Alpha.Canonical.Common.Row as X
import Alpha.Canonical.Common.Sequence as X
import Alpha.Canonical.Common.Table as X
|
@using HRMapp.Models
@model ProductionTask
<h3>@Model.Name</h3>
<p>ID: @Model.Id</p>
<p>Duur: @Model.Duration.Hours uur en @Model.Duration.Minutes minuten</p>
<br />
<h4>Omschrijving:</h4>
<p>@Model.Description</p>
<br />
@if (Model.RequiredSkillsets.Count > 0)
{
<h4>Nodige skillsets:</h4>
<ul>
@foreach (var skillset in Model.RequiredSkillsets)
{
<li>@skillset.Name</li>
}
</ul>
}
|
use std::ffi::{OsStr, OsString};
use std::io;
use std::os::unix::ffi::{OsStrExt, OsStringExt};
use heim_runtime as rt;
use crate::{Pid, ProcessError, ProcessResult};
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum Delimiter {
Null,
Space,
}
impl Delimiter {
fn as_char(self) -> char {
match self {
Delimiter::Null => '\0',
Delimiter::Space => ' ',
}
}
}
impl From<char> for Delimiter {
// `man proc` says that delimiter between parts is the `\0`,
// but some programs are using ' ' (ASCII space).
//
// And if there some bad boy over there,
// falling back to `\0`, just in case.
fn from(raw: char) -> Delimiter {
match raw {
'\0' => Delimiter::Null,
' ' => Delimiter::Space,
_ => Delimiter::Null,
}
}
}
impl From<u8> for Delimiter {
fn from(raw: u8) -> Delimiter {
match raw {
b'\0' => Delimiter::Null,
b' ' => Delimiter::Space,
_ => Delimiter::Null,
}
}
}
#[derive(Debug)]
pub struct Command {
line: OsString,
delimiter: Delimiter,
}
impl Command {
pub fn to_os_string(&self) -> OsString {
let line = self.line.clone();
match self.delimiter {
Delimiter::Space => line,
Delimiter::Null => Self::with_spaces(line),
}
}
pub fn into_os_string(self) -> OsString {
match self.delimiter {
Delimiter::Space => self.line,
Delimiter::Null => Self::with_spaces(self.line),
}
}
fn with_spaces(line: OsString) -> OsString {
let mut bytes = line.into_vec();
for byte in bytes.iter_mut() {
if *byte == b'\0' {
*byte = b' ';
}
}
// Dropping trailing delimiter
let _ = bytes.pop();
OsString::from_vec(bytes)
}
}
impl<T> From<T> for Command
where
T: Into<OsString>,
{
fn from(os_string: T) -> Command {
let os_string = os_string.into();
let delimiter = match os_string.as_bytes().last() {
Some(chr) => Delimiter::from(*chr),
None => Delimiter::Null,
};
Command {
line: os_string,
delimiter,
}
}
}
impl<'a> IntoIterator for &'a Command {
type Item = &'a OsStr;
type IntoIter = CommandIter<'a>;
fn into_iter(self) -> Self::IntoIter {
CommandIter {
line: self.line.as_os_str(),
delimiter: self.delimiter,
position: 0,
}
}
}
#[derive(Debug)]
pub struct CommandIter<'a> {
line: &'a OsStr,
delimiter: Delimiter,
position: usize,
}
impl<'a> Iterator for CommandIter<'a> {
type Item = &'a OsStr;
fn next(&mut self) -> Option<Self::Item> {
if self.position >= self.line.len() {
return None;
}
let bytes = &self.line.as_bytes()[self.position..];
match memchr::memchr(self.delimiter.as_char() as u8, bytes) {
Some(offset) => {
let slice = &bytes[..offset];
// `+ 1` is for skipping the trailing delimiter of this argument slice
self.position += offset + 1;
Some(OsStr::from_bytes(slice))
}
None => None,
}
}
}
pub async fn command(pid: Pid) -> ProcessResult<Command> {
match rt::fs::read_to_string(format!("/proc/{}/cmdline", pid)).await {
Ok(contents) => Ok(Command::from(contents)),
Err(e) if e.kind() == io::ErrorKind::NotFound => Err(ProcessError::NoSuchProcess(pid)),
Err(e) => Err(e.into()),
}
}
#[cfg(test)]
mod tests {
use std::ffi::{OsStr, OsString};
use super::Command;
#[test]
fn test_iter_with_nulls() {
let line = OsString::from("/usr/bin/ntpd\0-g\0-u\0ntp:ntp\0");
let command = Command::from(line);
let iter = &mut command.into_iter();
assert_eq!(Some(OsStr::new("/usr/bin/ntpd")), iter.next());
assert_eq!(Some(OsStr::new("-g")), iter.next());
assert_eq!(Some(OsStr::new("-u")), iter.next());
assert_eq!(Some(OsStr::new("ntp:ntp")), iter.next());
assert_eq!(None, iter.next());
}
#[test]
fn test_iter_with_spaces() {
let line = OsString::from("/opt/atom/atom --type=renderer --no-sandbox --lang=en-US ");
let command = Command::from(line);
let iter = &mut command.into_iter();
assert_eq!(Some(OsStr::new("/opt/atom/atom")), iter.next());
assert_eq!(Some(OsStr::new("--type=renderer")), iter.next());
assert_eq!(Some(OsStr::new("--no-sandbox")), iter.next());
assert_eq!(Some(OsStr::new("--lang=en-US")), iter.next());
assert_eq!(None, iter.next());
}
#[test]
fn test_iter_empty() {
let command = Command::from("");
let iter = &mut command.into_iter();
assert_eq!(None, iter.next());
}
}
|
class Parrot
def initialize type, number_of_coconuts, voltage, nailed
@type = type
@number_of_coconuts = number_of_coconuts
@voltage = voltage
@nailed = nailed
end
def speed
public_send("#{@type}_speed")
# throw "Should be unreachable!"
end
def european_parrot_speed
base_speed
end
def african_parrot_speed
[0, base_speed - load_factor * @number_of_coconuts].max
end
def norwegian_blue_parrot_speed
(@nailed) ? 0 : compute_base_speed_for_voltage(@voltage)
end
private
def compute_base_speed_for_voltage voltage
[24.0, voltage * base_speed].min
end
def load_factor
9.0
end
def base_speed
12.0
end
end
|
<?php
class KtpsController extends CI_Controller
{
public function __construct(){
parent::__construct();
$this->load->model('SuratModel');
if($this->session->has_userdata('session_id')==null){
redirect('login');
}
}
public function index() {
$data['ktps'] = $this->SuratModel->view_data('dbsurat_ktps');
$this->load->view('templates/header');
$this->load->view('ktps/index', $data);
$this->load->view('templates/footer');
}
public function buat() {
if (isset($_POST['simpan'])){
$ktps_nomor = $this->input->post('nomor_surat');
$ktps_nama = $this->input->post('nama_lengkap');
$ktps_tanggal_lahir = $this->input->post('tanggal_lahir');
$ktps_tempat_lahir = $this->input->post('tempat_lahir');
$ktps_wni = $this->input->post('kewarganegaraan');
$ktps_jk = $this->input->post('jenis_kelamin');
$ktps_agama = $this->input->post('agama');
$ktps_kerja = $this->input->post('pekerjaan');
$ktps_status = $this->input->post('status');
$ktps_nik = $this->input->post('nik');
$ktps_alamat = $this->input->post('alamat');
$data = array(
'ktps_nomor' => $ktps_nomor,
'ktps_tanggal_lahir' => $ktps_tanggal_lahir,
'ktps_nama' => $ktps_nama,
'ktps_tempat_lahir' => $ktps_tempat_lahir,
'ktps_wni' => $ktps_wni,
'ktps_jk' => $ktps_jk,
'ktps_agama' => $ktps_agama,
'ktps_kerja' => $ktps_kerja,
'ktps_status' => $ktps_status,
'ktps_nik' => $ktps_nik,
'ktps_alamat' => $ktps_alamat,
);
$simpan = $this->SuratModel->insert('dbsurat_ktps',$data);
if ($simpan>0) {
$this->session->set_flashdata('alert','berhasil_buat_surat');
redirect('ktps');
}
else{
redirect('ktps');
}
}
}
public function lihat($id){
$data = array(
'ktps' => $this->SuratModel->view_data_by_id($id,'ktps_id','dbsurat_ktps'),
'ktps_detail' => $this->SuratModel->view_data_ktps_by_id($id)
);
$this->load->view('templates/header');
$this->load->view('ktps/lihat', $data);
$this->load->view('templates/footer');
}
public function hapus($id){
$this->SuratModel->delete('ktps_id',$id,'dbsurat_ktps');
$this->session->set_flashdata('alert','berhasil_menghapus_surat');
redirect('ktps');
}
public function edit($id) {
$where = array('ktps_id' => $id);
$data['ktps'] =$this->SuratModel->edit_ktps($where)->result();
$this->load->view('templates/header');
$this->load->view('ktps/edit', $data);
$this->load->view('templates/footer');
}
public function update($id) {
$ktps_nomor = $this->input->post('nomor_surat');
$ktps_nama = $this->input->post('nama_lengkap');
$ktps_tanggal_lahir = $this->input->post('tanggal_lahir');
$ktps_tempat_lahir = $this->input->post('tempat_lahir');
$ktps_wni = $this->input->post('kewarganegaraan');
$ktps_jk = $this->input->post('jenis_kelamin');
$ktps_agama = $this->input->post('agama');
$ktps_kerja = $this->input->post('pekerjaan');
$ktps_status = $this->input->post('status');
$ktps_nik = $this->input->post('nik');
$ktps_alamat = $this->input->post('alamat');
$data = array(
'ktps_nomor' => $ktps_nomor,
'ktps_tanggal_lahir' => $ktps_tanggal_lahir,
'ktps_nama' => $ktps_nama,
'ktps_tempat_lahir' => $ktps_tempat_lahir,
'ktps_wni' => $ktps_wni,
'ktps_jk' => $ktps_jk,
'ktps_agama' => $ktps_agama,
'ktps_kerja' => $ktps_kerja,
'ktps_status' => $ktps_status,
'ktps_nik' => $ktps_nik,
'ktps_alamat' => $ktps_alamat,
);
// $where = array(
// 'ktps_id' => $id
// );
$this->SuratModel->update('ktps_id',$id,'dbsurat_ktps',$data);
$this->session->set_flashdata('alert','berhasil_edit_ktps');
redirect('ktps');
}
public function setuju_surat(){
$id = $this->input->post('id');
$data_disposisi = array(
'ktps_disposisi' => 'Setuju',
);
$this->SuratModel->update('ktps_id',$id,'dbsurat_ktps',$data_disposisi);
$this->session->set_flashdata('alert','berhasil_menyetujui_surat');
redirect('surat');
}
public function cetak($id){
$data = array(
'ktps' => $this->SuratModel->view_data_by_id($id,'ktps_id','dbsurat_ktps'),
'ktps_detail' => $this->SuratModel->view_data_ktps_by_id($id)
);
$this->load->view('templates/header');
$this->load->view('ktps/cetak', $data);
$this->load->view('templates/footer');
}
public function laporan(){
if (isset($_POST['lihat'])){
$tanggal1 = $this->input->post('tanggal1');
$tanggal2 = $this->input->post('tanggal2');
$data['laporan'] = $this->SuratModel->view_laporan('dbsurat_ktps','ktps_tanggal',$tanggal1,$tanggal2);
$data['tanggal1'] = $tanggal1;
$data['tanggal2'] = $tanggal2;
$this->load->view('templates/header');
$this->load->view('ktps/arsip',$data);
$this->load->view('templates/footer');
} else {
$this->load->view('templates/header');
$this->load->view('ktps/laporan');
$this->load->view('templates/footer');
}
}
}
|
/*
Copyright 2021 Sojan James
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
pub use crate::someip_codec::SomeIPCodec;
use std::{
io,
net::{Ipv4Addr, Ipv6Addr, SocketAddr},
};
use tokio::net::{TcpListener, TcpStream, UdpSocket, UnixStream};
use tokio_util::codec::Decoder;
use tokio_util::codec::Framed;
use tokio_util::udp::UdpFramed;
pub type TcpSomeIpConnection = Framed<TcpStream, SomeIPCodec>;
pub type UdpSomeIpConnection = UdpFramed<SomeIPCodec>;
pub type UdsSomeIpConnection = Framed<UnixStream, SomeIPCodec>;
impl SomeIPCodec {
pub async fn connect(self, addr: &SocketAddr) -> Result<TcpSomeIpConnection, io::Error> {
let tcp_stream = TcpStream::connect(addr).await?;
log::debug!("Connected to {}", addr);
Ok(self.framed(tcp_stream))
}
pub async fn listen(
self,
addr: &SocketAddr,
) -> Result<(TcpSomeIpConnection, SocketAddr), io::Error> {
let listener = TcpListener::bind(addr).await?;
match listener.accept().await {
Ok((socket, addr)) => {
let peer_addr = socket.peer_addr().unwrap();
log::debug!("Connection accepted {:?} from {}", addr, peer_addr);
Ok((self.framed(socket), peer_addr))
}
Err(e) => {
log::error!("Error accepting {:?}", addr);
Err(e)
}
}
}
/// Create a Stream for over UDP transport
///
/// # Arguments
///
/// * `addr` - The address to bind to
/// * `multicast_v4` - A vector of multicast IPV4 (address,interface) tuples
/// * `multicast_v6` - A vector of multicast IPV6 (address, interface_id) tuples.
pub async fn create_udp_stream(
addr: &SocketAddr,
multicast_v4: Option<Vec<(Ipv4Addr, Ipv4Addr)>>,
multicast_v6: Option<Vec<(Ipv6Addr, u32)>>,
) -> Result<UdpSomeIpConnection, io::Error> {
if let Ok(socket) = UdpSocket::bind(addr).await {
if let Some(multicast_v4) = multicast_v4 {
for v4 in multicast_v4 {
socket.join_multicast_v4(v4.0, v4.1)?;
}
}
if let Some(multicast_v6) = multicast_v6 {
for v6 in multicast_v6 {
socket.join_multicast_v6(&v6.0, v6.1)?;
}
}
// Maximum payload length for UDP is 1400 Bytes
Ok(UdpFramed::new(socket, SomeIPCodec::new(1400)))
} else {
Err(io::Error::new(
io::ErrorKind::ConnectionRefused,
"Cannot bind UDP socket",
))
}
}
pub fn create_uds_stream(
mut uds: std::os::unix::net::UnixStream,
) -> Result<UdsSomeIpConnection, io::Error> {
//let uds = unsafe UnixStream::from_raw_fd(uds);
uds.set_nonblocking(true)?;
let mut tokio_uds = unsafe { tokio::net::UnixStream::from_std(uds)? };
Ok(Framed::new(tokio_uds, SomeIPCodec::new(1400)))
}
}
#[cfg(test)]
mod tests {
use core::panic;
use std::fmt::Write;
use crate::SomeIpPacket;
use super::*;
use bytes::BytesMut;
use futures::{SinkExt, StreamExt};
use someip_parse::SomeIpHeader;
#[test]
fn test_uds() {
let rt = tokio::runtime::Runtime::new().unwrap();
let _r = rt.block_on(async {
let (tx, rx) = std::os::unix::net::UnixStream::pair().unwrap();
let mut stream = SomeIPCodec::create_uds_stream(rx).unwrap();
rt.spawn(async {
let mut sink = SomeIPCodec::create_uds_stream(tx).unwrap();
let mut header = SomeIpHeader::default();
header.set_service_id(42);
header.set_method_or_event_id(67);
let mut payload = BytesMut::with_capacity(10);
payload.write_str("THIS IS A TEST").expect("payload write");
let packet = SomeIpPacket::new(header, payload.freeze());
let _res = sink.send(packet).await;
});
loop {
if let Some(pkt) = stream.next().await {
if let Ok(packet) = pkt {
println!("Packet received:{:?}", &packet);
assert_eq!(packet.header().service_id(), 42);
assert_eq!(packet.header().event_or_method_id(), 67);
let mut payload = BytesMut::with_capacity(10);
payload.write_str("THIS IS A TEST").expect("payload write");
assert_eq!(payload, packet.payload());
break;
} else {
panic!("Packet not received");
}
}
}
});
}
#[test]
fn test_loopback() {
let rt = tokio::runtime::Runtime::new().unwrap();
let _result = rt.block_on(async {
rt.spawn(async {
println!("Initiating connection");
let addr = "127.0.0.1:8094".parse::<SocketAddr>().unwrap();
let stream = SomeIPCodec::default().connect(&addr).await;
let (mut sink, _ins) = stream.unwrap().split();
println!("Connected");
let mut header = SomeIpHeader::default();
header.set_service_id(42);
header.set_method_or_event_id(67);
let mut payload = BytesMut::with_capacity(10);
payload.write_str("THIS IS A TEST").expect("payload write");
let packet = SomeIpPacket::new(header, payload.freeze());
let _res = sink.send(packet).await;
});
let addr = "127.0.0.1:8094".parse::<SocketAddr>().unwrap();
let (stream, _addr) = SomeIPCodec::default().listen(&addr).await.unwrap();
let (_sink, mut ins) = stream.split();
println!("Connected!");
let task = tokio::spawn(async move {
loop {
if let Some(packet) = ins.next().await {
if let Ok(packet) = packet {
println!("Packet received:{:?}", &packet);
assert_eq!(packet.header().service_id(), 42);
assert_eq!(packet.header().event_or_method_id(), 67);
let mut payload = BytesMut::with_capacity(10);
payload.write_str("THIS IS A TEST").expect("payload write");
assert_eq!(payload, packet.payload());
break;
}
} else {
println!("Connection stopped");
break;
}
}
});
task.await.unwrap();
});
}
#[test]
fn test_udp() {
let rt = tokio::runtime::Runtime::new().unwrap();
let _result = rt.block_on(async {
rt.spawn(async {
let addr = "0.0.0.0:4712".parse::<std::net::SocketAddr>().unwrap();
let ipv4s = Vec::new();
let ipv6 = Vec::new();
let stream = SomeIPCodec::create_udp_stream(&addr, Some(ipv4s), Some(ipv6)).await;
let (mut sink, _ins) = stream.unwrap().split();
let mut header = SomeIpHeader::default();
header.set_service_id(42);
header.set_method_or_event_id(67);
let mut payload = BytesMut::with_capacity(10);
payload
.write_str("THIS IS A UDP TEST")
.expect("payload write");
let packet = SomeIpPacket::new(header, payload.freeze());
let dest_addr = "0.0.0.0:4713".parse::<SocketAddr>().unwrap();
let res = sink.send((packet, dest_addr)).await;
assert!(res.is_ok())
});
let addr = "0.0.0.0:4713".parse::<SocketAddr>().unwrap();
let stream = SomeIPCodec::create_udp_stream(&addr, None, None).await;
let (_sink, mut ins) = stream.unwrap().split();
println!("Connected!");
loop {
if let Some(packet) = ins.next().await {
if let Ok((packet, addr)) = packet {
println!("Packet received:{:?} from {}", &packet, &addr);
assert_eq!(packet.header().service_id(), 42);
assert_eq!(packet.header().event_or_method_id(), 67);
let mut payload = BytesMut::with_capacity(10);
payload
.write_str("THIS IS A UDP TEST")
.expect("payload write");
assert_eq!(payload, packet.payload());
break;
}
} else {
println!("Connection stopped");
break;
}
}
});
}
}
|
(ns advent.2018.day4
"Advent of Code 2018, day 4: Repose Record"
(:require [advent.helpers :as h]))
(def puzzle-input (h/slurp-resource "2018/day4.txt" h/slurp-lines))
(def row-pattern #"\[\d{4}-(\d{2})-(\d{2}) (\d{2}):(\d{2})] (.+)")
(def guard-no-pattern #"Guard #(\d+) begins shift")
; XXX: No February dates or December 31 in the input file.
(def month-ends
{1 31 2 28 3 31 4 30 5 31 6 30 7 31 8 31 9 30 10 31 11 30 12 31})
(defn parse-row [raw-row]
(let [[_ mo d h mi action] (re-matches row-pattern raw-row)
d (Integer/parseInt d)
mo (Integer/parseInt mo)]
(cond
(= "00" h) [[mo d] (Integer/parseInt mi) action]
(= (month-ends mo) d) [[(inc mo) 1] 0 action]
:else [[mo (inc d)] 0 action])))
(defn asleep-ranges [sleep-actions]
(->> (map second sleep-actions) (partition 2) (map #(apply range %))))
(defn guard-id [action]
(-> (re-matches guard-no-pattern (last action)) (last) (Integer/parseInt)))
(defn sleep-map [input]
(->> (partition-by first (map parse-row (sort input)))
(reduce (fn [sleep-map logs]
(if-let [actions (seq (rest logs))]
(update sleep-map
(guard-id (first logs))
into (asleep-ranges actions))
sleep-map))
{})))
;;;
(defn most-asleep
"Finds the id and the sleep ranges for the guard, who has slept the most."
[sleep-map]
(first (sort-by #(apply + (map count (val %))) > sleep-map)))
(defn puzzle1 [input]
(let [[guard-id sleep-ranges] (most-asleep (sleep-map input))
minute-asleep (->> (flatten sleep-ranges)
(reduce (fn [acc i] (update acc i (fnil inc 0))) {})
(sort-by val >) ffirst)]
(* guard-id minute-asleep)))
;;;
(defn sleep-frequencies [[id sleep-ranges]]
[id (->> (flatten sleep-ranges)
(reduce (fn [acc i] (update acc i (fnil inc 0))) {})
(sort-by last >) first)])
(defn puzzle2 [input]
(->> (sleep-map input)
(map sleep-frequencies)
(sort-by #(-> % second last) >) first
(#(* (first %) (-> % second first)))))
|
"""Vista de usuarios."""
# Django REST Framework
from rest_framework import status, mixins, viewsets
from rest_framework.decorators import action
from rest_framework.permissions import (
AllowAny,
IsAuthenticated,
)
from rest_framework.response import Response
# Modelos
from andromeda.users.models import User
# Permisos
from andromeda.users.permissions import IsAccountOwner
# Serializers
from andromeda.users.serializers import (
UserLoginSerializer,
UserModelSerializer,
UserSignUpSerializer,
AccountVerificationSerializer,
EmailResetPasswordSerializer,
ResetPasswordSerializer,
ChangePasswordSerializer,
ContactAndromedaSerializer,
)
class UserViewSet(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.ListModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet):
"""View set de usuarios
Agrupa las vistas de registro, ingreso y verificación de cuenta.
"""
serializer_class = UserModelSerializer
def get_queryset(self):
return User.objects.filter(
is_client=True,
)
def perform_destroy(self, instance):
"""Inactiva el usuario."""
instance.is_active = False
instance.save()
def get_permissions(self):
"""Permisos basados en la acción."""
if self.action in ['signup', 'login', 'verify', 'email_reset_password', 'reset_password', 'contact_andromeda']:
permissions = [AllowAny]
elif self.action in ['retrieve', 'update', 'partial_update', 'change_password']:
permissions = [IsAuthenticated, IsAccountOwner]
else:
permissions = [IsAuthenticated]
return [p() for p in permissions]
@action(detail=False, methods=['post'])
def login(self, request):
"""Ingreso de usuario."""
serializer = UserLoginSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
user, token = serializer.save()
data = {
'user': UserModelSerializer(user).data,
'access_token': token
}
return Response(data, status=status.HTTP_201_CREATED)
@action(detail=False, methods=['post'])
def signup(self, request):
"""Registro de usuario."""
serializer = UserSignUpSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.save()
data = UserModelSerializer(user).data
return Response(data, status=status.HTTP_201_CREATED)
@action(detail=False, methods=['post'])
def verify(self, request):
"""Verificación de cuenta."""
serializer = AccountVerificationSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
data = {'message': 'Felicitaciones tu cuenta fue verificada!'}
return Response(data, status=status.HTTP_200_OK)
@action(detail=True, methods=['get'])
def profile(self, request, *args, **kwargs):
"""Perfil de usuario."""
user = self.get_object()
data = UserModelSerializer(user).data
return Response(data)
@action(detail=False, methods=['post'])
def email_reset_password(self, request):
"""Solicitud de cambio de contraseña."""
serializer = EmailResetPasswordSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
data = {'message': 'Mensaje enviado'}
return Response(data, status=status.HTTP_200_OK)
@action(detail=False, methods=['post'])
def reset_password(self, request):
"""Olvido de contraseña."""
serializer = ResetPasswordSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
data = {'message': 'Tu contraseña fue cambiada exitosamente'}
return Response(data, status=status.HTTP_200_OK)
@action(detail=False, methods=['post'])
def change_password(self, request):
"""Cambio de contraseña."""
serializer = ChangePasswordSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
data = {'message': 'Tu contraseña fue cambiada exitosamente'}
return Response(data, status=status.HTTP_200_OK)
@action(detail=False, methods=['post'])
def contact_andromeda(self, request):
"""Envio de correo para contactar con Andromeda"""
serializer = ContactAndromedaSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
data = {'message': 'Gracias por contactarnos, pronto nos pondremos en contacto contigo'}
return Response(data, status=status.HTTP_200_OK)
|
package net.apptronic.core.entity.extensions
import net.apptronic.core.base.SubscriptionHolder
import net.apptronic.core.base.SubscriptionHolders
import net.apptronic.core.entity.Entity
import net.apptronic.core.entity.base.Property
import net.apptronic.core.entity.base.Value
fun forEachChangeAnyOf(vararg properties: Property<*>, action: () -> Unit): SubscriptionHolder {
val subscriptionHolders = SubscriptionHolders()
properties.forEach { property ->
property.subscribe { _ ->
ifAllIsSet(*properties) {
action()
}
}
}
return subscriptionHolders
}
fun <T> Value<T>.copyValueFrom(source: Entity<T>): Value<T> {
source.subscribe { set(it) }
return this
}
|
cat << EOF
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<configuration>
<property>
<name>oozie.service.ProxyUserService.proxyuser.${USERNAME}.hosts</name>
<value>*</value>
<description>
List of hosts the '#USER#' user is allowed to perform 'doAs'
operations.
The '#USER#' must be replaced with the username o the user who is
allowed to perform 'doAs' operations.
The value can be the '*' wildcard or a list of hostnames.
For multiple users copy this property and replace the user name
in the property name.
</description>
</property>
<property>
<name>oozie.service.HadoopAccessorService.hadoop.configurations</name>
<value>*=${HADOOP_HOME}/etc/hadoop</value>
<description>
Comma separated AUTHORITY=HADOOP_CONF_DIR, where AUTHORITY is the HOST:PORT of
the Hadoop service (JobTracker, HDFS). The wildcard '*' configuration is
used when there is no exact match for an authority. The HADOOP_CONF_DIR contains
the relevant Hadoop *-site.xml files. If the path is relative is looked within
the Oozie configuration directory; though the path can be absolute (i.e. to point
to Hadoop client conf/ directories in the local filesystem.
</description>
</property>
<property>
<name>oozie.service.WorkflowAppService.system.libpath</name>
<value>hdfs://${HADOOP_NAME_SERVER}:${HADOOP_NAME_SERVER_PORT}/user/${USERNAME}/share/lib</value>
<description>
System library path to use for workflow applications.
This path is added to workflow application if their job properties sets
the property 'oozie.use.system.libpath' to true.
</description>
</property>
<property>
<name>oozie.service.ProxyUserService.proxyuser.${USERNAME}.groups</name>
<value>*</value>
<description>
List of groups the '#USER#' user is allowed to impersonate users
from to perform 'doAs' operations.
The '#USER#' must be replaced with the username o the user who is
allowed to perform 'doAs' operations.
The value can be the '*' wildcard or a list of groups.
For multiple users copy this property and replace the user name
in the property name.
</description>
</property>
<property>
<name>oozie.db.schema.name</name>
<value>oozie</value>
</property>
<property>
<name>oozie.service.JPAService.create.db.schema</name>
<value>false</value>
</property>
<property>
<name>oozie.service.JPAService.validate.db.connection</name>
<value>true</value>
</property>
<property>
<name>oozie.service.JPAService.jdbc.driver</name>
<value>com.mysql.jdbc.Driver</value>
<description>JDBC driver class.</description>
</property>
<property>
<name>oozie.test.db.port</name>
<value>3306</value>
</property>
<property>
<name>oozie.service.JPAService.jdbc.url</name>
<value>jdbc:mysql://${OOZIE_DB_HOST}:${OOZIE_DB_PORT}/oozie</value>
<description>JDBC URL.</description>
</property>
<property>
<name>oozie.service.JPAService.jdbc.username</name>
<value>oozie</value>
<description>DB user name.</description>
</property>
<property>
<name>oozie.service.JPAService.jdbc.password</name>
<value>oozie</value>
<description>
DB user password. IMPORTANT: if password is emtpy leave a 1 space string, the service trims the
value, if empty Configuration assumes it is NULL.
</description>
</property>
<property>
<name>oozie.service.JPAService.pool.max.active.conn</name>
<value>10</value>
</property>
</configuration>
EOF
|
// SPDX-License-Identifier: Apache-2.0
//! SSE related functions
use crate::interrupts::XSAVE_AREA_SIZE;
use crate::snp::cpuid_count;
use core::arch::asm;
use x86_64::registers::xcontrol::{XCr0, XCr0Flags};
use xsave::MxCsr;
/// Setup and check SSE relevant stuff
pub fn init_sse() {
const XSAVE_SUPPORTED_BIT: u32 = 1 << 26;
let xsave_supported = (cpuid_count(1, 0).ecx & XSAVE_SUPPORTED_BIT) != 0;
assert!(xsave_supported);
let xsaveopt_supported = (cpuid_count(0xD, 1).eax & 1) == 1;
assert!(xsaveopt_supported);
let sse_extended_supported = (cpuid_count(0xd, 0).eax & 0b111) == 0b111;
if sse_extended_supported {
let mut xcr0 = XCr0::read();
xcr0 |= XCr0Flags::AVX | XCr0Flags::SSE;
unsafe { XCr0::write(xcr0) };
} else {
let mut xcr0 = XCr0::read();
xcr0 |= XCr0Flags::SSE;
unsafe { XCr0::write(xcr0) };
}
let xsave_size = cpuid_count(0xD, 0).ebx;
// Make sure that interrupts have enough room for xsave
assert!(xsave_size <= XSAVE_AREA_SIZE);
unsafe {
let mxcsr: u32 = (MxCsr::INVALID_OPERATION_MASK
| MxCsr::DENORMAL_MASK
| MxCsr::DIVIDE_BY_ZERO_MASK
| MxCsr::OVERFLOW_MASK
| MxCsr::UNDERFLOW_MASK
| MxCsr::PRECISION_MASK)
.bits();
asm!("ldmxcsr [{}]", in(reg) &mxcsr, options(nostack));
}
}
|
using Distributed
struct DistributedSum{T}
data::Vector{T}
end
# Construct from a distributed sum from a
# function that returns the elements of the sum.
function DistributedSum(f::Function, n::Integer)
# Assign the matrices to the first `n` workers.
return DistributedSum([@spawnat(workers()[ni], f(ni)) for ni in 1:n])
end
# Functions needed for @distributed loop and reduction
Base.length(A::DistributedSum) = length(A.data)
Base.firstindex(A::DistributedSum) = firstindex(A.data)
Base.lastindex(A::DistributedSum) = lastindex(A.data)
Base.getindex(A::DistributedSum, args...) = getindex(A.data, args...)
Base.iterate(A::DistributedSum, args...) = iterate(A.data, args...)
# Apply the sum
function (A::DistributedSum)(v)
return @distributed (+) for An in A
fetch(An)(v)
end
end
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.r
import org.apache.hadoop.fs.Path
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.ml.{Pipeline, PipelineModel}
import org.apache.spark.ml.attribute.AttributeGroup
import org.apache.spark.ml.feature.RFormula
import org.apache.spark.ml.regression.{IsotonicRegression, IsotonicRegressionModel}
import org.apache.spark.ml.util._
import org.apache.spark.sql.{DataFrame, Dataset}
private[r] class IsotonicRegressionWrapper private (
val pipeline: PipelineModel,
val features: Array[String]) extends MLWritable {
private val isotonicRegressionModel: IsotonicRegressionModel =
pipeline.stages(1).asInstanceOf[IsotonicRegressionModel]
lazy val boundaries: Array[Double] = isotonicRegressionModel.boundaries.toArray
lazy val predictions: Array[Double] = isotonicRegressionModel.predictions.toArray
def transform(dataset: Dataset[_]): DataFrame = {
pipeline.transform(dataset).drop(isotonicRegressionModel.getFeaturesCol)
}
override def write: MLWriter = new IsotonicRegressionWrapper.IsotonicRegressionWrapperWriter(this)
}
private[r] object IsotonicRegressionWrapper
extends MLReadable[IsotonicRegressionWrapper] {
def fit(
data: DataFrame,
formula: String,
isotonic: Boolean,
featureIndex: Int,
weightCol: String): IsotonicRegressionWrapper = {
val rFormula = new RFormula()
.setFormula(formula)
.setFeaturesCol("features")
RWrapperUtils.checkDataColumns(rFormula, data)
val rFormulaModel = rFormula.fit(data)
// get feature names from output schema
val schema = rFormulaModel.transform(data).schema
val featureAttrs = AttributeGroup.fromStructField(schema(rFormulaModel.getFeaturesCol))
.attributes.get
val features = featureAttrs.map(_.name.get)
require(features.size == 1)
// assemble and fit the pipeline
val isotonicRegression = new IsotonicRegression()
.setIsotonic(isotonic)
.setFeatureIndex(featureIndex)
.setWeightCol(weightCol)
.setFeaturesCol(rFormula.getFeaturesCol)
val pipeline = new Pipeline()
.setStages(Array(rFormulaModel, isotonicRegression))
.fit(data)
new IsotonicRegressionWrapper(pipeline, features)
}
override def read: MLReader[IsotonicRegressionWrapper] = new IsotonicRegressionWrapperReader
override def load(path: String): IsotonicRegressionWrapper = super.load(path)
class IsotonicRegressionWrapperWriter(instance: IsotonicRegressionWrapper) extends MLWriter {
override protected def saveImpl(path: String): Unit = {
val rMetadataPath = new Path(path, "rMetadata").toString
val pipelinePath = new Path(path, "pipeline").toString
val rMetadata = ("class" -> instance.getClass.getName) ~
("features" -> instance.features.toSeq)
val rMetadataJson: String = compact(render(rMetadata))
sc.parallelize(Seq(rMetadataJson), 1).saveAsTextFile(rMetadataPath)
instance.pipeline.save(pipelinePath)
}
}
class IsotonicRegressionWrapperReader extends MLReader[IsotonicRegressionWrapper] {
override def load(path: String): IsotonicRegressionWrapper = {
implicit val format = DefaultFormats
val rMetadataPath = new Path(path, "rMetadata").toString
val pipelinePath = new Path(path, "pipeline").toString
val rMetadataStr = sc.textFile(rMetadataPath, 1).first()
val rMetadata = parse(rMetadataStr)
val features = (rMetadata \ "features").extract[Array[String]]
val pipeline = PipelineModel.load(pipelinePath)
new IsotonicRegressionWrapper(pipeline, features)
}
}
}
|
#ifndef CIRCLE_H
#define CIRCLE_H
#include "RCube/Core/Graphics/OpenGL/Mesh.h"
#include "glm/gtc/constants.hpp"
namespace rcube
{
/**
* Creates mesh data representing a solid 2D disk
* @param radius Radius of the disk
* @param radial_segments NUmber of segments along the radius
* @param theta_start Starting angle of the disk (default: 0)
* @param theta_end Ending angle of the disk (default: 2pi)
* @return A disk shaped mesh
*/
TriangleMeshData circle(float radius = 1, int radial_segments = 10, float theta_start = 0,
float theta_end = glm::two_pi<float>());
} // namespace rcube
#endif // CIRCLE_H
|
using UnityEngine;
using System.Collections;
public class FlyingMovement : MonoBehaviour
{
public Vector3[] waypoints;
private int waypointindex = 0;
void Update()
{
transform.position = Vector3.MoveTowards(transform.position, waypoints[waypointindex], Time.deltaTime);
if (Vector3.Distance(transform.position, waypoints[waypointindex]) < .2f)
{
waypointindex++;
waypointindex = waypointindex % waypoints.Length;
}
}
}
|
inherited ResourcesFrame: TResourcesFrame
object Splitter1: TSplitter [0]
Left = 149
Top = 0
Width = 5
Height = 280
ExplicitHeight = 342
end
object lblError: TLabel [1]
Left = 160
Top = 316
Width = 34
Height = 13
Caption = 'lblError'
end
object tvTree: TTreeView [2]
Left = 0
Top = 0
Width = 149
Height = 280
Align = alLeft
DoubleBuffered = True
HideSelection = False
HotTrack = True
Indent = 19
ParentDoubleBuffered = False
PopupMenu = pmTree
ReadOnly = True
RightClickSelect = True
StateImages = ilResources
TabOrder = 0
OnChange = tvTreeChange
OnClick = tvTreeClick
OnCollapsed = tvTreeCollapsed
OnContextPopup = tvTreeContextPopup
OnExpanded = tvTreeExpanded
end
object edStatus: TEdit [3]
AlignWithMargins = True
Left = 3
Top = 283
Width = 445
Height = 21
Margins.Bottom = 0
Align = alBottom
BorderStyle = bsNone
DoubleBuffered = True
ParentColor = True
ParentDoubleBuffered = False
ReadOnly = True
TabOrder = 1
end
object pResText: TPanel [4]
Left = 160
Top = 14
Width = 117
Height = 289
BevelOuter = bvNone
PopupMenu = pmData
TabOrder = 2
Visible = False
end
object pResImg: TPanel [5]
Left = 452
Top = 0
Width = 121
Height = 297
BevelOuter = bvNone
PopupMenu = pmData
TabOrder = 3
Visible = False
object lblImgInfo: TLabel
AlignWithMargins = True
Left = 3
Top = 3
Width = 115
Height = 13
Align = alTop
Caption = 'lblImgInfo'
ExplicitWidth = 48
end
object iImage: TImage
Left = 0
Top = 19
Width = 121
Height = 278
Align = alClient
Center = True
ExplicitLeft = 4
ExplicitTop = 72
ExplicitWidth = 105
ExplicitHeight = 105
end
end
object lbImgGroup: TListBox [6]
Left = 292
Top = 0
Width = 158
Height = 297
ItemHeight = 13
PopupMenu = pmData
TabOrder = 4
Visible = False
OnClick = lbImgGroupClick
end
inherited ilListView: TImageList
Bitmap = {
494C010102004000640008000800FFFFFFFFFF10FFFFFFFFFFFFFFFF424D3600
0000000000003600000028000000200000000800000001002000000000000004
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000424D3E000000000000003E000000
2800000020000000080000000100010000000000200000000000000000000000
000000000000000000000000FFFFFF00EFEF0000C7EF0000ABEF00006DEF0000
EF6D0000EFAB0000EFC70000EFEF000000000000000000000000000000000000
000000000000}
end
inherited alActions: TActionList
object aDataCopy: TAction [0]
Caption = 'Copy'
ShortCut = 16451
OnExecute = aDataCopyExecute
end
object aDataSelectAll: TAction [1]
Caption = 'Select All'
ShortCut = 16449
OnExecute = aDataSelectAllExecute
end
object aDataSaveAs: TAction [2]
Caption = 'Save As...'
ShortCut = 16467
OnExecute = aDataSaveAsExecute
end
object aDataViewProcessed: TAction [3]
Tag = 1
AutoCheck = True
Caption = 'Processed'
GroupIndex = 1
ShortCut = 16433
OnExecute = aDataViewProcessedExecute
end
object aDataViewText: TAction [4]
Tag = 2
AutoCheck = True
Caption = 'Text'
GroupIndex = 1
ShortCut = 16434
OnExecute = aDataViewProcessedExecute
end
object aDataViewBinaryText: TAction [5]
Tag = 3
AutoCheck = True
Caption = 'Binary text'
GroupIndex = 1
ShortCut = 16435
OnExecute = aDataViewProcessedExecute
end
object aDataViewHex: TAction [6]
Tag = 4
AutoCheck = True
Caption = 'Hex'
GroupIndex = 1
ShortCut = 16436
OnExecute = aDataViewProcessedExecute
end
end
object ilResources: TImageList
Left = 108
Top = 56
Bitmap = {
494C010109003800600010001000FFFFFFFFFF10FFFFFFFFFFFFFFFF424D3600
0000000000003600000028000000400000003000000001002000000000000030
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000925F5F008F5C
5C008C5959008754540086535300835050008350500083505000835050008350
5000804D4D00804D4D0000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000094616100FFEF
DE00FFEEDC00FFECDB00FFECD800FFEBD700FFEAD500FFE9D300FFE8D200FFE8
D000FFE6CF008250500000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000099666600FFF0
E100FFEFDF00FFEEDD00FFECDB00FFECD900FFEBD700FFEAD600FFE9D300FFE9
D200FFE7D0008857560000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000009C696700FFF2
E400FFF0E200FFEFE000FFEFDD00FFEDDC00FFECDA00FFEBD800FFEBD600FFEA
D400FFE9D2008B59580000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000A3706B00FFF3
E700FFF2E500FFF1E200403C3800FFEEDE00FFEEDD00403B3700FFEBD900FFEB
D700FFEAD5008D5D5B0000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000AE7B7000FFF4
EA00FFF4E700FFF2E500403C3900FFF0E100FFEFDF00403C3700FFECDB00FFEC
D900FFEBD7009464620000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000BB887700FFF5
EC00FFF5EA00FFF4E800403D3A00FFF1E400FFF0E200403C3800FFEFDE00FFEE
DC00FFECDA009869670000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000C5927D00FFF7
EF00FFF6EC00FFF5EB00100F0F00DFD5C900CFC4BA00403C3900FFF0E100FFEF
DE00FFEEDC009F716D0000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000D09D8100FFF8
F200FFF7F000FFF7EE00302E2C00605C5800504C4800CFC5BA00FFF1E300FFF0
E100FFEFDF00A275720000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000D8A58500FFF9
F400FFF9F200FFF8F000403E3C00FFF5EC00FFF4EA00FFF4E800FFF3E600FFF2
E400FFF0E200AA7F790000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000E0AD8A00FFFB
F700FFFAF500FFF9F3009F9B9600FFF7EF00FFF6ED00FFF5EB00FFF4E900FFF3
E700FFF2E500AE837E0000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000E7B48E00FFFC
F800FFFBF700FFFAF500FFF9F300FFF8F100FFF7F000FFF7EE00FFF5EB00BA91
8A00BA918A00B683740000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000F0BD9100FFFD
FB00FFFCF900FFFBF800FFFBF600FFF9F400FFF9F200FFF8F000FFF7EE00B683
7400F49E2B008E62560000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000F7C49500FFFE
FC00FFFDFB00FFFCFA00FFFCF800FFFBF600FFFAF400FFF9F300FFF8F100B683
74008E6256000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000FECB9900FECB
9900F3CDAB00ECC5A800E3BDA200DAB39D00D2AC9900C9A39400C9A39400B683
7400000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000925F5F008F5C
5C008C5959008754540086535300835050008350500083505000835050008350
5000804D4D00804D4D0000000000000000000000000000000000925F5F008F5C
5C008C5959008754540086535300835050008350500083505000835050008350
5000804D4D00804D4D0000000000000000000000000000000000925F5F008F5C
5C008C5959008754540086535300835050008350500083505000835050008350
5000804D4D00804D4D0000000000000000000000000000000000925F5F008F5C
5C008C5959008754540086535300835050008350500083505000835050008350
5000804D4D00804D4D000000000000000000000000000000000094616100FFEF
DE00FFEEDC00FFECDB00FFECD800FFEBD700FFEAD500FFE9D300FFE8D200FFE8
D000FFE6CF00825050000000000000000000000000000000000094616100FFEF
DE00FFEEDC00FFECDB00FFECD800FFEBD700FFEAD500FFE9D300FFE8D200FFE8
D000FFE6CF00825050000000000000000000000000000000000094616100FFEF
DE00FFEEDC00FFECDB00FFECD800FFEBD700FFEAD500FFE9D300FFE8D200FFE8
D000FFE6CF00825050000000000000000000000000000000000094616100FFE2
C600FFE1C200FFDFBF00FFDDBB00FFDBB800FFD9B500FFD8B200FFD6AE00FFD5
AC00FFD4A900825050000000000000000000000000000000000099666600FFF0
E100FFEFDF00FFEEDD00FFECDB00FFECD900FFEBD700FFEAD600FFE9D300FFE9
D200FFE7D000885756000000000000000000000000000000000099666600FFF0
E100FFEFDF00EBDDCF00CECDCB00B9B8B7008785840054525100CDBCAA00FFE9
D200FFE7D000885756000000000000000000000000000000000099666600FFF0
E100FFEFDF00FFEEDD00FFECDB00FFECD900FFEBD700FFEAD600FFE9D300FFE9
D200FFE7D000885756000000000000000000000000000000000099666600FFE5
CB00FFE3C700FFE1C400FFDFC000FFDDBD00FFDCB900FFDAB500FFD9B200FFD7
AF00FFD5AC0088575600000000000000000000000000000000009C696700FFF2
E400FFF0E200FFEFE000FFEFDD00FFEDDC00FFECDA00FFEBD800FFEBD600FFEA
D400FFE9D2008B595800000000000000000000000000000000009C696700FFF2
E400FFF0E200E6D9CE00F1F1F100E0DFDF009F9E9E0053535300C9B9A900FFEA
D400FFE9D2008B595800000000000000000000000000000000009C696700FFF2
E400FFF0E200FFEFE000FFEFDD00FFEDDC00FFECDA00FFEBD800E3D1BF006159
5100FFE9D2008B595800000000000000000000000000000000009C696700FFE7
D000E7CFB900796C5F0091817000C2AA9300857463009E88730085725F00F3CF
AB00FFD7B0008B59580000000000000000000000000000000000A3706B001B94
93001317160014231E001497970021737100216C6C001484830014C7C600137F
7D001B9593008D5D5B0000000000000000000000000000000000A3706B00FFF3
E700FFF2E500E8DDD100E7E8E800D4D3D300959494004B4B4A00CDBDAE00FFEB
D700FFEAD5008D5D5B0000000000000000000000000000000000A3706B00FFF3
E700FFF2E500FFF1E200FFF0E000FFEEDE008B827800DACBBB004A443F00E3D1
BF00FFEAD5008D5D5B0000000000000000000000000000000000A3706B00FFEA
D6009E908100E7D0BA00FFE4CA00FFE2C6009E8B7800FFDEBF00C2A88E00CEB1
9500FFD9B4008D5D5B0000000000000000000000000000000000AE7B700008CA
CA0000707700006E0000005D6B003C5C5E003C575800007F7F000087870000A4
A40008ABAA009464620000000000000000000000000000000000AE7B7000FFF4
EA00FFF4E700E0D7CE00DEDEDE00C0C0BF007F7E7E00413F3F00B8AA9E00FFEC
D900FFEBD7009464620000000000000000000000000000000000AE7B7000FFF4
EA00FFF4E700FFF2E500FFF1E300B0A69B00000000000E0D0C00DACABB00FFEC
D900FFEBD7009464620000000000000000000000000000000000AE7B7000FFEC
DA00AA9C8F00E7D3BE00F3DCC500CEB9A4009E8D7B00FFE1C300B69F8900DABD
A100FFDBB9009464620000000000000000000000000000000000BB8877000C82
82000148580000AB0000007D250000626800006767000071710000515100007F
7F00087E7D009869670000000000000000000000000000000000BB887700FFF5
EC00FFF5EA00DDD7D000F0F0F400DDDCD600A1A1A000625F5F00B2A79B00FFEE
DC00FFECDA009869670000000000000000000000000000000000BB887700FFF5
EC00FFF5EA00FFF4E800F6EADE00201E1D0000000000000000008B827900FFEE
DC00FFECDA009869670000000000000000000000000000000000BB887700FFEF
DF00F3E2D10091867A009E908300B6A594009E8E7E009E8D7C009E8B7900FFDF
C100FFDEBD009869670000000000000000000000000000000000C5927D00A8A3
9D000A4F090000B000000040000006600500B9B0A600CABEB300CBBFB300C9BC
AF00C1B4A7009F716D0000000000000000000000000000000000C5927D00FFF7
EF00FFF6EC00F6EBE200A1745300482F5B005274900048A6A700C7C0B600FFEF
DE00FFEEDC009F716D0000000000000000000000000000000000C5927D00FFF7
EF00FFF6EC00FFF5EB007D7872000000000025232100B5AAA000FFF0E100FFEF
DE00FFEEDC009F716D0000000000000000000000000000000000C5927D00FFF1
E400FFEFE000FFEEDC00FFECD900FFE9D500FFE7D100FFE6CD00FFE4C900FFE2
C500FFE0C2009F716D0000000000000000000000000000000000D09D81008F8E
8800008F000000890000172015007A8C7000FFF3E700BDB8AD004E6F6A00BDB6
AA00FFEFDF00A275720000000000000000000000000000000000D09D8100FFF8
F200FCF4ED009F8C78007E6A5500201FA0007D7F9100A4D4C80067898800C5B9
AE00FFEFDF00A275720000000000000000000000000000000000D09D8100FFF8
F200FFF7F000DFD8D00013121200827C7700F6EADF00FFF3E500FFF1E300FFF0
E100FFEFDF00A275720000000000000000000000000000000000D09D8100FFF3
E800918982009E958B00C2B5A8006D655D006D645B009E908200796D6200E7CF
B800FFE3C700A275720000000000000000000000000000000000D8A58500D7D2
CE001F4B1D0000910000B1ABA500FFF5EC00FFF4EA001E615F0000FFFF001E60
5F00FFF0E200AA7F790000000000000000000000000000000000D8A58500FFF9
F4009E9B99005E5D5500FAF2EB003533B6007C768200FFF4E800AFB3AD005B58
5600FFF0E200AA7F790000000000000000000000000000000000D8A58500FFF9
F400FFF9F2009E9A9500DFD8D000FFF5EC00FFF4EA00FFF4E800FFF3E600FFF2
E400FFF0E200AA7F790000000000000000000000000000000000D8A58500FFF6
EC00E7DDD30079736D0079726B00DACCBF009E938700C2B3A400857A6E00E7D1
BC00FFE5CB00AA7F790000000000000000000000000000000000E0AD8A00FFFB
F700D8D4D000336A3100DED8D200FFF7EF00FFF6ED0071817D00008D8D007180
7B00FFF2E500AE837E0000000000000000000000000000000000E0AD8A00FFFB
F700D5D2CE00EDE8E200FFF8F10099989E0084807C00FFF5EB00FDF2E700E1D8
CF00FFF2E500AE837E0000000000000000000000000000000000E0AD8A00FFFB
F700FFFAF500FFF9F300FFF8F100FFF7EF00FFF6ED00FFF5EB00FFF4E900FFF3
E700FFF2E500AE837E0000000000000000000000000000000000E0AD8A00FFF8
F000FFF6EE00857F7A00857E7800FFF1E3009E948B00CEBFB200857B7100FFE9
D400FFE8D000AE837E0000000000000000000000000000000000E7B48E00FFFC
F800FFFBF700FFFAF500FFF9F300FFF8F100FFF7F000FFF7EE00FFF5EB00BA91
8A00BA918A00B683740000000000000000000000000000000000E7B48E00FFFC
F800FFFBF700FFFAF500FDF7F100959390008E8A8600FFF7EE00FFF5EB00BA91
8A00BA918A00B683740000000000000000000000000000000000E7B48E00FFFC
F800FFFBF700FFFAF500FFF9F300FFF8F100FFF7F000FFF7EE00FFF5EB00BA91
8A00BA918A00B683740000000000000000000000000000000000E7B48E00FFFA
F400E7E1DA0085817C00B6AFA800FFF3E800857E770091888000B6AA9E00BA91
8A00BA918A00B683740000000000000000000000000000000000F0BD9100FFFD
FB00FFFCF900FFFBF800FFFBF600FFF9F400FFF9F200FFF8F000FFF7EE00B683
7400F49E2B008E62560000000000000000000000000000000000F0BD9100FFFD
FB00FFFCF900FFFBF800FFFBF600FFF9F400FFF9F200FFF8F000FFF7EE00B683
7400F49E2B008E62560000000000000000000000000000000000F0BD9100FFFD
FB00FFFCF900FFFBF800FFFBF600FFF9F400FFF9F200FFF8F000FFF7EE00B683
7400F49E2B008E62560000000000000000000000000000000000F0BD9100FFFB
F800FFFAF500FFF8F200FFF7F000FFF5EC00FFF4E900FFF2E500FFF0E100B683
7400F49E2B008E62560000000000000000000000000000000000F7C49500FFFE
FC00FFFDFB00FFFCFA00FFFCF800FFFBF600FFFAF400FFF9F300FFF8F100B683
74008E6256000000000000000000000000000000000000000000F7C49500FFFE
FC00FFFDFB00FFFCFA00FFFCF800FFFBF600FFFAF400FFF9F300FFF8F100B683
74008E6256000000000000000000000000000000000000000000F7C49500FFFE
FC00FFFDFB00FFFCFA00FFFCF800FFFBF600FFFAF400FFF9F300FFF8F100B683
74008E6256000000000000000000000000000000000000000000F7C49500FFFC
FB00FFFBF900FFFAF600FFF9F300FFF8F000FFF6ED00FFF4EA00FFF3E600B683
74008E6256000000000000000000000000000000000000000000FECB9900FECB
9900F3CDAB00ECC5A800E3BDA200DAB39D00D2AC9900C9A39400C9A39400B683
7400000000000000000000000000000000000000000000000000FECB9900FECB
9900F3CDAB00ECC5A800E3BDA200DAB39D00D2AC9900C9A39400C9A39400B683
7400000000000000000000000000000000000000000000000000FECB9900FECB
9900F3CDAB00ECC5A800E3BDA200DAB39D00D2AC9900C9A39400C9A39400B683
7400000000000000000000000000000000000000000000000000FECB9900FECB
9900F3CDAB00ECC5A800E3BDA200DAB39D00D2AC9900C9A39400C9A39400B683
7400000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000925F5F008F5C
5C008C5959008754540086535300835050008350500083505000835050008350
5000804D4D00804D4D0000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000008659700096293000A5F
8E000A5F8E000C5B87000C5B87000C5B87000C5B87000C5B87000C5B87000C5B
87000C5B87000C5B87000000000000000000000000000275B1000371AB00056D
A500066A9F0008669900096292000B5E8C000C5B87000C5B87000C5B87000C5B
87000C5B8700000000000000000000000000000000000000000094616100FFEF
DE00FFEEDC00FFECDB00FFECD800FFEBD700FFEAD500FFE9D300FFE8D200FFE8
D000FFE6CF008250500000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000066BA0008CE1FD0079CEF00072C5
E4006DBAD7006AB7D4006AB7D4006AB7D4006AB7D40063B1D40063B1D4005DAA
D200559FC7005097BD000C5B870000000000066BA0000178B50082DEF7007FDA
F2007BD7EF007BD7EF0078D3EC0078D3EC0073CCE80070C8E6006CC3E40067BC
DE0067BCDE000C5B87000000000000000000000000000000000099666600FFF0
E100FFEFDF00FFEEDD00FFECDB00FFECD900FFEBD700FFEAD600FFE9D300FFE9
D200FFE7D0008857560000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000056DA500BCEFFE0088DCF90084DE
FD0084DEFD0084DEFD0084DEFD0084DEFD007ED9FC007ED9FC0077D3FC0073CF
FC006FCCFB00549FC8000C5B870000000000056DA500007AB9008BE4FA008BE4
FA0083DFF4007EDBF0007EDBF0007EDBF0007AD7F20077D3F00073CFEE006DC4
E50066BBDD000C5B8700000000000000000000000000000000009C696700FFF2
E400FFF0E200FFEFE000FFEFDD00FFEDDC00FFECDA00FFEBD800FFEBD600FFEA
D400FFE9D2008B59580000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000471AA00BEF1FE0094E4FD0089E2
FD0089E2FD0089E2FD0089E2FD0089E2FD0082DCFD007ED9FC007ED9FC0076D2
FC0076D2FC005BAAD2000C5B8700000000000471AA0082D1F7000079B80091EB
FC008BE6F70086E8FB0086E8FB0086E8FB0081E1FA0081E1FA007ADAF90078D3
F4006CC3E50067BCE2000C5B8700000000000000000000000000A3706B00FFF3
E700FFF2E500FFF1E200FFF0E000FFEEDE00FFEEDD00FFEDDB00FFEBD900FFEB
D700FFEAD5008D5D5B0000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000274AF00C0F3FE009AE8FD008EE7
FD008EE7FD008EE7FD008EE7FD0089E3FD0089E3FD0082DCFD007ED9FC0079D5
FC0074D1FC005EAED5000C5B8700000000000274AF0082D2F700007AB90095EF
FD0095EFFD008AE9F8008AE9F80086E8FC0083E4FB007FE0FA007CDBF90079D7
F80072CBEC0068BFE3000C5B8700000000000000000000000000AE7B7000FFF4
EA00FFF4E700FFF2E500FFF1E300FFF0E100FFEFDF00FFEEDD00FFECDB00FFEC
D900FFEBD7009464620000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000176B300C2F5FE009EECFE0093EB
FE0093EBFE0093EBFE0093EBFE008DE6FD008DE6FD0085DFFD0080DBFD007CD7
FC0077D3FC0061AED5000C5B8700000000000176B30087D8F8007CD5F800007A
B9009AF4FE0095F1FD008FEFFD008CEBFC0087E6FB0083E2FA0083E2FA007EDA
F80077D2F3006CC3E80068BEE6000C5B87000000000000000000BB887700FFF5
EC00FFF5EA00FFF4E800FFF3E600FFF1E400FFF0E200FFEFE000FFEFDE00FFEE
DC00FFECDA009869670000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000007AB800C4F7FE00A4EFFE0098EF
FE0098EFFE0098EFFE0098EFFE0091E9FE008CE5FD0087E1FD0082DDFD007DD8
FC0078D4FC0062B1D7000C5B870000000000007AB80095E8FB008BE5FB00007A
B9009BF7FF009BF7FF0094F0FD008FEBFC008CE7FB0087E2FA0083DEF9007EDA
F80079D5F60073CCEF006BC3EC000C5B87000000000000000000C5927D00FFF7
EF00FFF6EC00FFF5EB00FFF4E900FFF3E600FFF2E500FFF0E200FFF0E100FFEF
DE00FFEEDC009F716D0000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000007AB900C5F8FF00ADF2FE009CF3
FE009CF3FE009CF3FE0098EFFE0093EBFE008EE7FD0089E2FD0084DEFD007ED9
FC0079D5FC0069BCE4000C5B870000000000007AB900C5F8FF00ADF2FE009CF3
FE00007AB900007AB9000274B0000471AA00056DA40007699E00086597000A61
91000B5E8B000C5B87000C5B87000C5B87000000000000000000D09D8100FFF8
F200FFF7F000FFF7EE00FFF5EB00FFF4EA00FFF3E700FFF3E500FFF1E300FFF0
E100FFEFDF00A275720000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000007AB900C7FAFF00B3F4FE00B3F4
FE00B1FAFF00AAF6FF00A6F2FE00A1EEFE009CEAFE0097E6FD0092E2FD008DDD
FC0081D6FA0071C7F0000C5B870000000000007AB900C7FAFF00B3F4FE00B3F4
FE00B1FAFF00AAF6FF00A6F2FE00A1EEFE009CEAFE0097E6FD0092E2FD008DDD
FC0081D6FA0071C7F0000C5B8700000000000000000000000000D8A58500FFF9
F400FFF9F200FFF8F000FFF7EE00FFF5EC00FFF4EA00FFF4E800FFF3E600FFF2
E400FFF0E200AA7F790000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000007AB900C5F8FF00C5F8FF00C5F8
FF00C5F8FF00C5F8FF00C0F3FE00C0F3FE00BCEFFE00BCEFFE00B8EBFD00B8EB
FD00B8EBFD007FD4FB000B5D8A0000000000007AB900C5F8FF00C5F8FF00C5F8
FF00C5F8FF00C5F8FF00C0F3FE00C0F3FE00BCEFFE00BCEFFE00B8EBFD00B8EB
FD00B8EBFD007FD4FB000B5D8A00000000000000000000000000E0AD8A00FFFB
F700FFFAF500FFF9F300FFF8F100FFF7EF00FFF6ED00FFF5EB00FFF4E900FFF3
E700FFF2E500AE837E0000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000007AB9009DF2FE009DF2FE009DF2
FE009DF2FE0098EDFE0098EDFE000373AD000470A900056DA400066AA000066A
A00008659700096293000000000000000000007AB9009DF2FE009DF2FE009DF2
FE009DF2FE0098EDFE0098EDFE000373AD000470A900056DA400066AA000066A
A000086597000962930000000000000000000000000000000000E7B48E00FFFC
F800FFFBF700FFFAF500FFF9F300FFF8F100FFF7F000FFF7EE00FFF5EB00BA91
8A00BA918A00B683740000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000007AB9009AEFFE009AEF
FE009AEFFE0096EBFE000178B600000000000000000000000000000000000000
00000000000000000000000000000000000000000000007AB9009AEFFE009AEF
FE009AEFFE0096EBFE000178B600000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000F0BD9100FFFD
FB00FFFCF900FFFBF800FFFBF600FFF9F400FFF9F200FFF8F000FFF7EE00B683
7400F49E2B008E62560000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000007AB900007A
B900007AB900007AB90000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000007AB900007A
B900007AB900007AB90000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000F7C49500FFFE
FC00FFFDFB00FFFCFA00FFFCF800FFFBF600FFFAF400FFF9F300FFF8F100B683
74008E6256000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000FECB9900FECB
9900F3CDAB00ECC5A800E3BDA200DAB39D00D2AC9900C9A39400C9A39400B683
740000000000000000000000000000000000424D3E000000000000003E000000
2800000040000000300000000100010000000000800100000000000000000000
000000000000000000000000FFFFFF00FFFF000000000000C003000000000000
C003000000000000C003000000000000C003000000000000C003000000000000
C003000000000000C003000000000000C003000000000000C003000000000000
C003000000000000C003000000000000C003000000000000C003000000000000
C007000000000000C00F000000000000FFFFFFFFFFFFFFFFC003C003C003C003
C003C003C003C003C003C003C003C003C003C003C003C003C003C003C003C003
C003C003C003C003C003C003C003C003C003C003C003C003C003C003C003C003
C003C003C003C003C003C003C003C003C003C003C003C003C003C003C003C003
C007C007C007C007C00FC00FC00FC00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFC003
FFFF80038007C003FFFF00010003C003FFFF00010003C003FFFF00010001C003
FFFF00010001C003FFFF00010000C003FFFF00010000C003FFFF00010000C003
FFFF00010001C003FFFF00010001C003FFFF00030003C003FFFF81FF81FFC003
FFFFC3FFC3FFC007FFFFFFFFFFFFC00F00000000000000000000000000000000
000000000000}
end
object pmData: TPopupMenu
OnPopup = pmDataPopup
Left = 12
Top = 56
object Copy1: TMenuItem
Action = aDataCopy
end
object SelectAll1: TMenuItem
Action = aDataSelectAll
end
object N1: TMenuItem
Caption = '-'
end
object SaveAs1: TMenuItem
Action = aDataSaveAs
end
object N2: TMenuItem
Caption = '-'
end
object Processed1: TMenuItem
Action = aDataViewProcessed
AutoCheck = True
end
object ext1: TMenuItem
Action = aDataViewText
AutoCheck = True
end
object BinaryText1: TMenuItem
Action = aDataViewBinaryText
AutoCheck = True
end
object Hex1: TMenuItem
Action = aDataViewHex
AutoCheck = True
end
end
object sdSaveAs: TSaveDialog
Options = [ofOverwritePrompt, ofHideReadOnly, ofPathMustExist, ofNoReadOnlyReturn, ofEnableSizing]
Left = 12
Top = 108
end
object pmTree: TPopupMenu
Left = 60
Top = 108
object miTreeSaveAs: TMenuItem
Action = aDataSaveAs
end
end
end
|
-- file:update.sql ln:118 expect:true
create table part_b_10_b_20 partition of range_parted for values from ('b', 10) to ('b', 20)
|
package com.matthiaslapierre.flyingbird.ui.game
import android.content.Context
import android.graphics.Canvas
import android.graphics.Paint
import android.graphics.PorterDuff
import android.view.SurfaceHolder
import androidx.annotation.WorkerThread
import com.matthiaslapierre.flyingbird.Constants
import com.matthiaslapierre.flyingbird.R
import com.matthiaslapierre.flyingbird.resources.Cache
import com.matthiaslapierre.flyingbird.resources.Scores
import com.matthiaslapierre.flyingbird.ui.game.sprite.*
import com.matthiaslapierre.flyingbird.util.Utils
/**
* Handles the game loop.
*/
class GameProcessor(
private val context: Context,
private val holder: SurfaceHolder,
private val globalPaint: Paint,
private val cache: Cache,
private val scores: Scores,
private var gameInterface: GameInterface?
): SplashSprite.SplashInterface, GameOverSprite.GameOverInterface {
companion object {
private const val MIN_PIPES = 60
}
private var currentStatus: Int = Sprite.STATUS_NOT_STARTED
private var isPaused: Boolean = false
private var points: Int = 0
private var workSprites: MutableList<Sprite> = mutableListOf()
private var birdSprite: BirdSprite? = null
private var scoreSprite: ScoreSprite? = null
private var groundSprite: GroundSprite? = null
private var splashSprite: SplashSprite? = null
private var gameOverSprite: GameOverSprite? = null
private var lastPipeSprite: PipeSprite? = null
private var countPipes: Int = 0
private val pipeWidth = Utils.getDimenInPx(context, R.dimen.pipe_width)
private val coinWidth = Utils.getDimenInPx(context, R.dimen.coin_width)
private val pipeInterval = Utils.getDimenInPx(context, R.dimen.pipe_interval)
private var newBestScore: Boolean = false
@WorkerThread
fun execute() {
/*
In our GameProcessor, we loop as long as the Thread is active.
First, we take care of the rendering of our game. To do this, we obtain a reference to the
Canvas of our SurfaceHolder object by calling its lockCanvas method.
We then empty the content of this Canvas before iterating on all the elements of our
game that we want to return to the screen. These elements being our Sprites.
*/
while (!Thread.interrupted()) {
if(isPaused) continue
val startTime = System.currentTimeMillis()
val canvas = holder.lockCanvas()
val screenWidth = canvas.width.toFloat()
try {
cleanCanvas(canvas)
/*
This iteration is performed via an Iterator object and we use it to delete Sprites
considered as no longer alive. This work is encapsulated within a try / finally block.
In the finally part, we ask that the updates we have made on the Canvas be posted on
the SurfaceHolder via a call to the unlockCanvasAndPost method with the current
instance of Canvas passed as a parameter.
*/
val iterator: MutableListIterator<Sprite> = workSprites.listIterator()
while (iterator.hasNext()) {
val sprite = iterator.next()
if(sprite.isAlive()) {
sprite.onDraw(canvas, globalPaint, currentStatus)
} else {
if(sprite is PipeSprite) {
scoreSprite?.currentScore = ++points
countPipes--
}
iterator.remove()
}
}
} finally {
holder.unlockCanvasAndPost(canvas)
}
/*
The rendering time is measured before comparing this time to a constant called GAP.
This constant allows us to add, if necessary, a delay to avoid that the rendering
phase of the Game Loop be too fast.
*/
val duration = System.currentTimeMillis() - startTime
val gap = Constants.MS_PER_FRAME - duration
if(gap > 0) {
try {
Thread.sleep(gap)
} catch (e: Exception) {
break
}
}
when(currentStatus) {
Sprite.STATUS_NOT_STARTED -> {
// Show the home screen
if(splashSprite == null || !splashSprite!!.isAlive()) {
splashSprite = SplashSprite(context, cache, this@GameProcessor)
workSprites.add(splashSprite!!)
}
}
Sprite.STATUS_GAME_OVER -> {
// Show the Game Over screen.
if(gameOverSprite == null || !gameOverSprite!!.isAlive()) {
gameOverSprite = GameOverSprite(
context,
cache,
points,
newBestScore,
scores.highScore(context),
this@GameProcessor
)
workSprites.add(gameOverSprite!!)
}
}
Sprite.STATUS_PLAY -> {
// Show the game.
// Draw the score, the ground, the bird...
if(scoreSprite == null || !scoreSprite!!.isAlive()) {
scoreSprite = ScoreSprite(context, cache)
workSprites.add(scoreSprite!!)
}
if(groundSprite == null || !groundSprite!!.isAlive()) {
groundSprite = GroundSprite(context, cache)
workSprites.add(groundSprite!!)
}
if(birdSprite == null || !birdSprite!!.isAlive()) {
birdSprite = BirdSprite(context, cache)
workSprites.add(birdSprite!!)
}
// don't forget the obstacles and the rewards !
var nextPipeX = screenWidth
if(lastPipeSprite != null) {
nextPipeX = lastPipeSprite!!.x + pipeInterval
}
val cloudSprites = mutableListOf<CloudSprite>()
while(countPipes < MIN_PIPES) {
lastPipeSprite = PipeSprite(
context,
cache,
nextPipeX,
lastPipeSprite?.lastBlockY
)
val lastCoinSprite = CoinSprite(
context,
cache,
nextPipeX + pipeWidth + pipeInterval / 2f - coinWidth / 2f
)
val cloudSprite = CloudSprite(
context,
cache,
nextPipeX
)
cloudSprites.add(cloudSprite)
workSprites.add(0, lastPipeSprite!!)
workSprites.add(0, lastCoinSprite)
nextPipeX += pipeWidth + pipeInterval
countPipes++
}
workSprites.addAll(0, cloudSprites)
/*
We can now complete our Game Loop with game updates about Sprites.
We will add the following code to detect if the game should stop because
the player has lost.
*/
val iterator: MutableListIterator<Sprite> = workSprites.listIterator()
while (iterator.hasNext()) {
val sprite = iterator.next()
if(sprite.isHit(birdSprite!!)) {
when(sprite) {
is PipeSprite, is GroundSprite -> {
gameInterface?.onHit()
newBestScore = scores.isNewBestScore(context, points)
if(newBestScore) {
scores.storeHighScore(context, points)
}
/*
The game is over and we will have to display the end screen to
the player the next time the Game Loop passes.
*/
currentStatus = Sprite.STATUS_GAME_OVER
gameInterface?.onGameOver()
}
is CoinSprite -> {
points += sprite.getScore()
scoreSprite?.currentScore = points
gameInterface?.onGetPoint()
}
}
}
}
}
}
}
}
/**
* Pauses the game (pauses the game loop).
*/
fun pause() {
isPaused = true
}
/**
* Resumes the game (resumes the game loop).
*/
fun resume() {
isPaused = false
}
override fun onPlayBtnTapped() {
startGame()
}
override fun onReplayBtnTapped() {
startGame()
}
/**
* The goal of Flying Bird is to allow the player to progress the bird by tapping on the screen,
* so we must act within the onTouch method of the OnTouchListener interface that our main
* activity inherits. It is in this method that we will interact with the player when they type
* on the screen.
*/
fun onTap(x: Float, y: Float) {
when(currentStatus) {
Sprite.STATUS_NOT_STARTED -> {
/*
If the game has not yet started, a first tap on the screen will allow you to
change its status to the STATUS_PLAY constant. Have a good game!
*/
splashSprite?.onTap(x, y)
}
Sprite.STATUS_PLAY -> {
/*
In case the game is in progress, we call the onTap method of the BirdSprite
to increment its current speed which causes the bird to rise on the screen.
This rise on the bird’s screen fights against the effect of gravity by preventing
it from falling to the ground.
*/
birdSprite?.jump()
gameInterface?.onWing()
}
Sprite.STATUS_GAME_OVER -> {
/*
Finally, if the game is over, we call the onTap method of the GameOverSprite object
which will return a constant as output allowing us to know which part of the end
screen the player to touch in order to react accordingly either to start a new game
or to share the successful score via social networks for example.
*/
gameOverSprite?.onTap(x, y)
}
}
}
/**
* Prevents memory leakage (some objects were not releasing memory).
*/
fun release() {
gameInterface = null
}
/**
* Cleans all sprites. Resets the score.
*/
private fun resetGame() {
workSprites = mutableListOf()
scoreSprite = null
groundSprite = null
birdSprite = null
splashSprite = null
gameOverSprite = null
lastPipeSprite = null
countPipes = 0
points = 0
}
private fun startGame() {
resetGame()
currentStatus = Sprite.STATUS_PLAY
gameInterface?.onGameStart()
}
private fun cleanCanvas(canvas: Canvas) {
canvas.drawColor(0x00000000, PorterDuff.Mode.CLEAR)
}
interface GameInterface {
fun onWing()
fun onGameStart()
fun onGetPoint()
fun onHit()
fun onGameOver()
}
}
|
part of kcaa_controller;
void handleShipList(
Assistant assistant, AssistantModel model, Map<String, dynamic> data) {
// Reset the equipping ship info.
for (var equipment in model.equipmentMap.values) {
equipment.ship = null;
}
Set<int> presentShips = new Set<int>();
for (var shipData in (data["ships"] as Map).values) {
var id = shipData["id"];
var ship = model.shipMap[id];
if (ship == null) {
ship = new Ship();
model.shipMap[id] = ship;
}
ship.update(shipData, model.shipTypeDefinitionMap, model.fleets,
model.equipmentMap);
presentShips.add(id);
}
updateAvailableEquipments(model);
// Remove ships that are no longer available.
Set<int> removedShips =
new Set<int>.from(model.shipMap.keys).difference(presentShips);
for (var id in removedShips) {
model.shipMap.remove(id);
}
reorderShipList(assistant, model);
model.numFilteredShips =
model.ships.where((ship) => assistant.shipList.filter(ship)).length;
model.numDamagedShipsToWarmUp = model.ships
.where((ship) => Ship.filterCanWarmUp(ship))
.where((ship) => Ship.filterCanRepair(ship)).length;
model.numShipsToWarmUp =
model.ships.where((ship) => Ship.filterCanWarmUp(ship)).length;
model.numShipsUnderRepair =
model.ships.where((ship) => Ship.filterUnderRepair(ship)).length;
model.numShipsToRepair =
model.ships.where((ship) => Ship.filterCanRepair(ship)).length;
updateShipTags(model);
}
void reorderShipList(Assistant assistant, AssistantModel model) {
var shipsLength = model.shipMap.length;
resizeList(model.ships, shipsLength, () => new Ship());
var sortedShips = model.shipMap.values.toList(growable: false);
var inverter = assistant.shipList.shipOrderInverter;
var comparer = assistant.shipList.shipComparer;
sortedShips.sort((a, b) => inverter(comparer(a, b)));
for (var i = 0; i < shipsLength; i++) {
var ship = sortedShips[i];
// Update the ship list only when the order has changed.
// Seems like it requires tremendous amount of load to assign a value to
// ObservableList, even if the value being assigned is the same as the
// previous value.
if (model.ships[i] != ship) {
model.ships[i] = ship;
}
}
}
void notifyShipList(AssistantModel model) {
var shipIdsInFleets = new Set.from(
model.fleets.expand((fleet) => fleet.ships).map((ship) => ship.id));
for (var ship in model.ships) {
if (shipIdsInFleets.contains(ship.id)) {
ship.updateBelongingFleet(model.fleets);
} else {
ship.belongingFleet = null;
}
}
}
void handleShipDefinitionList(
Assistant assistant, AssistantModel model, Map<String, dynamic> data) {
model.shipTypeDefinitionMap.clear();
for (var shipTypeData in (data["ship_types"] as Map).values) {
model.shipTypeDefinitionMap[shipTypeData["id"]] = new ShipTypeDefinition(
shipTypeData["id"], shipTypeData["name"],
shipTypeData["loadable_equipment_types"], shipTypeData["sort_order"]);
}
for (var shipData in (data["ships"] as Map).values) {
var ship = new Ship();
ship.id = shipData["id"];
ship.name = shipData["name"];
ship.shipType = model.shipTypeDefinitionMap[shipData["ship_type"]].name;
model.shipDefinitionMap[ship.id] = ship;
}
}
|
require "faceted_search/railtie"
module FacetedSearch
module Rails
require "faceted_search/engine"
end
end
|
module ThreeScaleToolbox
module ResourceReader
##
# Load resource from different types of sources.
# Supported types are: file, URL, stdin
# Loaded content is returned
def load_resource(resource, verify_ssl)
# Json format is parsed as well
YAML.safe_load(read_content(resource, verify_ssl))
rescue Psych::SyntaxError => e
raise ThreeScaleToolbox::Error, "JSON/YAML validation failed: #{e.message}"
end
##
# Reads resources from different types of sources.
# Supported types are: file, URL, stdin
# Resource raw content is returned
def read_content(resource, verify_ssl)
case resource
when '-'
read_stdin(resource)
when /\A#{URI::DEFAULT_PARSER.make_regexp}\z/
read_url(resource, verify_ssl)
when StringIO
read_stringio(resource)
else
read_file(resource)
end
end
# Detect format from file extension
def read_file(filename)
raise ThreeScaleToolbox::Error, "File not found: #{filename} " unless File.file?(filename)
raise ThreeScaleToolbox::Error, "File not readable: #{filename} " unless File.readable?(filename)
File.read(filename)
end
def read_stdin(_resource)
STDIN.read
end
def read_url(resource, verify_ssl)
endpoint = URI.parse(resource)
http_client = Net::HTTP.new(endpoint.host, endpoint.port)
http_client.use_ssl = endpoint.is_a?(URI::HTTPS)
http_client.verify_mode = OpenSSL::SSL::VERIFY_NONE unless verify_ssl
response = http_client.get(endpoint)
case response
when Net::HTTPSuccess then response.body
else raise ThreeScaleToolbox::Error, "could not download resource: #{response.body}"
end
end
def read_stringio(resource)
resource.string
end
end
end
|
def upgrade(ta, td, a, d)
# are we a role? If no, nothing to do.
# - crowbar-committing is always true for roles, and generally false for
# proposals (except when applying)
# - the pacemaker barclamp adds an required_post_chef_calls attribute to the
# deployment part of the role
return [a, d] unless d["crowbar-committing"] && d.key?("required_post_chef_calls")
members = d["elements"]["pacemaker-cluster-member"] || []
member_nodes = members.map { |n| NodeObject.find_node_by_name n }
remotes = d["elements"]["pacemaker-remote"] || []
remote_nodes = remotes.map { |n| NodeObject.find_node_by_name n }
service = PacemakerService.new
service.prepare_stonith_attributes(a, remote_nodes, member_nodes, remotes, members)
return a, d
end
def downgrade(ta, td, a, d)
# nothing; it doesn't hurt to keep the changes
return a, d
end
|
#!/usr/bin/env python
"""
Saves all the Isophonics dataset into a jams. The structure of the Isophonics
dataset is:
/Isophonics
/Artist Annotations
/feature
/Artist
/Album
/lab (or text) files
Example:
/Isophonics
/The Beatles Annotations
/seglab
/The Beatles
/01_-_Please_Please_Me
/01_-_I_Saw_Her_Standing_There.lab
/beat
/The Beatles
/01_-_Please_Please_Me
/01_-_I_Saw_Her_Standing_There.txt
To parse the entire dataset, you simply need the path to the Isophonics dataset
and an output folder.
Example:
./isohpnics_parser.py ~/datasets/Isophonics -o ~/datasets/Isophonics/outJAMS
"""
__author__ = "Oriol Nieto"
__copyright__ = "Copyright 2014, Music and Audio Research Lab (MARL)"
__license__ = "GPL"
__version__ = "1.0"
__email__ = "oriol@nyu.edu"
import argparse
import glob
import json
import logging
import os
import time
from msaf import jams2
def fill_global_metadata(jam, lab_file):
"""Fills the global metada into the JAMS jam."""
jam.metadata.artist = lab_file.split("/")[-3]
jam.metadata.duration = -1 # In seconds
jam.metadata.title = os.path.basename(lab_file).replace(".lab", "")
# TODO: extra info
#jam.metadata.genre = metadata[14]
def fill_annotation_metadata(annot, attribute):
"""Fills the annotation metadata."""
annot.annotation_metadata.attribute = attribute
annot.annotation_metadata.corpus = "Isophonics"
annot.annotation_metadata.version = "1.0"
annot.annotation_metadata.annotation_tools = "Sonic Visualizer"
annot.annotation_metadata.annotation_rules = "TODO" # TODO
annot.annotation_metadata.validation_and_reliability = "TODO" # TODO
annot.annotation_metadata.origin = "Centre for Digital Music"
annot.annotation_metadata.annotator.name = "TODO"
annot.annotation_metadata.annotator.email = "TODO" # TODO
#TODO:
#time = "TODO"
def fill_section_annotation(lab_file, annot):
"""Fills the JAMS annot annotation given a lab file."""
# Annotation Metadata
fill_annotation_metadata(annot, "sections")
# Open lab file
try:
f = open(lab_file, "r")
except IOError:
logging.warning("Annotation doesn't exist: %s", lab_file)
return
# Convert to JAMS
lines = f.readlines()
for line in lines:
section_raw = line.strip("\n").split("\t")
start_time = section_raw[0]
end_time = section_raw[1]
label = section_raw[3]
if float(end_time) <= float(start_time):
logging.warning("Start time is after end time in file %s" %
lab_file)
continue
section = annot.create_datapoint()
section.start.value = float(start_time)
section.start.confidence = 1.0
section.end.value = float(end_time)
section.end.confidence = 1.0
section.label.value = label
section.label.confidence = 1.0
section.label.context = "function" # Only function level of annotation
f.close()
def fill_beat_annotation(txt_file, annot):
"""Fills the JAMS annot annotation given a txt file."""
# Annotation Metadata
fill_annotation_metadata(annot, "beats")
# Open txt file
try:
f = open(txt_file, "r")
except IOError:
logging.warning("Annotation doesn't exist: %s", txt_file)
return
# Convert to JAMS
lines = f.readlines()
for line in lines:
line = line.strip("\n")
if " " in line:
time = line.split(" ")[0]
downbeat = line.split(" ")[-1]
elif "\t" in line:
time = line.split("\t")[0]
downbeat = line.split("\t")[-1]
beat = annot.create_datapoint()
try:
# Problem with 11_-_When_I_Get_Home (starting with upbeat)
beat.time.value = float(time)
except ValueError:
beat.time.value = float(time[0])
beat.time.confidence = 1.0
try:
beat.label.value = int(downbeat)
except ValueError:
beat.label.value = int(-1)
beat.label.confidence = 1.0
f.close()
def create_JAMS(lab_file, out_file, parse_beats=False):
"""Creates a JAMS file given the Isophonics lab file."""
# New JAMS and annotation
jam = jams2.Jams()
# Global file metadata
fill_global_metadata(jam, lab_file)
# Create Section annotations
annot = jam.sections.create_annotation()
fill_section_annotation(lab_file, annot)
# Create Beat annotations if needed
if parse_beats:
annot = jam.beats.create_annotation()
txt_file = lab_file.replace("seglab", "beat").replace(".lab", ".txt")
fill_beat_annotation(txt_file, annot)
# TODO: Create Chord and Key annotations
# Save JAMS
with open(out_file, "w") as f:
json.dump(jam, f, indent=2)
def process(in_dir, out_dir):
"""Converts the original Isophonic files into the JAMS format, and saves
them in the out_dir folder."""
# Check if output folder and create it if needed:
if not os.path.exists(out_dir):
os.makedirs(out_dir)
# Get all the higher level annotation folders
annots_folder = glob.glob(os.path.join(in_dir, "*"))
for annot_folder in annots_folder:
if not os.path.isdir(annot_folder):
continue
if os.path.basename(annot_folder) == "jams":
continue
if os.path.basename(annot_folder) == "audio":
continue
# Check whether we need to parse the beats
parse_beats = os.path.isdir(os.path.join(annot_folder, "beat"))
# Step into the segments folder
annot_folder = os.path.join(annot_folder, "seglab")
# Step into the artist folder
artist_folder = glob.glob(os.path.join(annot_folder, "*"))[0]
# Get all the subfolders (where the lab/txt files are)
album_folder = glob.glob(os.path.join(artist_folder, "*"))
for subfolder in album_folder:
if not os.path.isdir(subfolder):
continue
if os.path.basename(subfolder) == "audio":
continue
logging.info("Parsing album %s" % os.path.basename(subfolder))
# Get all the lab files
lab_files = glob.glob(os.path.join(subfolder, "*.lab"))
for lab_file in lab_files:
#Create a JAMS file for this track
create_JAMS(lab_file,
os.path.join(out_dir,
os.path.basename(lab_file).replace(".lab", "")
+ ".jams"),
parse_beats)
def main():
"""Main function to convert the dataset into JAMS."""
parser = argparse.ArgumentParser(description=
"Converts the Isophonics dataset to the JAMS format",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("in_dir",
action="store",
help="Isophonics main folder")
parser.add_argument("-o",
action="store",
dest="out_dir",
default="outJAMS",
help="Output JAMS folder")
args = parser.parse_args()
start_time = time.time()
# Setup the logger
logging.basicConfig(format='%(asctime)s: %(message)s', level=logging.INFO)
# Run the parser
process(args.in_dir, args.out_dir)
# Done!
logging.info("Done! Took %.2f seconds." % (time.time() - start_time))
if __name__ == '__main__':
main()
|
const io = require('socket.io-client');
const http = require('http');
const ioBack = require('socket.io');
export class ServerHandler {
constructor() {
this.httpServer = http.createServer().listen();
this.ioServer = ioBack(this.httpServer);
this.ioServer.on('connection', (serverSocket) => {
this.serverSocket = serverSocket;
});
}
setUpSocket(done) {
this.httpServerAddr = this.httpServer.address();
this.socket = io.connect(`http://[${this.httpServerAddr.address}]:${this.httpServerAddr.port}`, {
'reconnection delay': 0,
'reopen delay': 0,
'force new connection': true,
transports: ['websocket'],
});
this.socket.on('connect', () => {
done();
});
return this.socket;
}
cleanUp() {
this.ioServer.close();
this.httpServer.close();
}
getServerSocket() {
return this.serverSocket;
}
getSocket() {
return this.socket;
}
}
|
DDEX-Deserialiser
=================
A DDEX XML batch deserialiser generated by [7digital/XsdToObject](https://github.com/7digital/XsdToObject)
# How To Update Nuget Package
1. Increment the AssemblyInfo.cs version
2. Rebuild the project in Release configuration
3. Run: .nuget\nuget pack src\DDEX-Deserialiser\DDEX-Deserialiser.csproj -IncludeReferencedProjects
4. Upload the package to https://www.nuget.org/packages/DDEX-Deserialiser
|
import {ConfigurationInterface} from './Configuration';
import {EventDispatcher, EventDispatcherInterface, EventInterface, ListenerCallbackInterface} from './Event';
/**
* Base object interface
*/
export interface AjaxifyObjectInterface
{
/**
* Run the object's cleanup code
*
* The object is not safe to use after it has been destroyed.
*/
destroy: () => void;
/**
* See if an option is defined
*/
hasOption: (name: string) => boolean;
/**
* Get value of an option
*/
getOption: (name: string) => any;
/**
* Get all options
*/
getOptions: () => ConfigurationInterface;
/**
* Attach a callback for the given event
*/
listen: (eventName: string, callback: ListenerCallbackInterface, priority?: number) => void;
/**
* Emit an event
*/
emit: (event: EventInterface) => EventInterface;
}
/**
* Base object with event and configuration support
*/
export class AjaxifyObject implements AjaxifyObjectInterface
{
private options: ConfigurationInterface = null;
private eventDispatcher: EventDispatcherInterface = null;
constructor() {}
destroy(): void {
}
hasOption(name: string): boolean {
if (null === this.options) {
this.initOptions();
}
return this.options.hasOwnProperty(name);
}
getOption(name: string): any {
if (null === this.options) {
this.initOptions();
}
return this.options[name];
}
getOptions(): ConfigurationInterface {
if (null === this.options) {
this.initOptions();
}
return this.options;
}
loadOptions(): ConfigurationInterface {
return {};
}
/**
* Option lazy-loading
*/
private initOptions(): void {
this.options = this.loadOptions();
}
listen(eventName: string, callback: ListenerCallbackInterface, priority: number = 0): void {
if (null === this.eventDispatcher) {
this.initEventDispatcher();
}
this.eventDispatcher.addCallback(eventName, callback, priority);
}
emit(event: EventInterface): EventInterface {
if (null === this.eventDispatcher) {
this.initEventDispatcher();
}
return this.eventDispatcher.emit(event);
}
getEventDispatcher(): EventDispatcherInterface {
if (null === this.eventDispatcher) {
this.initEventDispatcher();
}
return this.eventDispatcher;
}
/**
* Initialize the event dispatcher
*/
private initEventDispatcher(): void {
this.eventDispatcher = new EventDispatcher();
}
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace _10FlipServer.Models
{
public class Deck
{
public Stack<Card> Cards;
}
}
|
#!/bin/sh
set -ex
USER=$(cat ./user)
PASSWORD=$(cat ./password)
exec bitcoind -rpcuser="$USER" -rpcpassword="$PASSWORD" "$@"
|
<#
.SYNOPSIS
.DESCRIPTION
.EXAMPLE
.INPUTS
.OUTPUTS
#>
[CmdletBinding()]
param(
[bool]$DotSourceModule = $false
)
#Get function files
Get-ChildItem $PSScriptRoot\ -Recurse -Include "*.ps1" -Exclude "*.ps1xml" |
ForEach-Object {
if ($DotSourceModule) {
. $_.FullName
} else {
$ExecutionContext.InvokeCommand.InvokeScript(
$false,
(
[scriptblock]::Create(
[io.file]::ReadAllText(
$_.FullName,
[Text.Encoding]::UTF8
)
)
),
$null,
$null
)
}
}
[System.Version]$Version = "0.0"
Set-Variable -Name ExternalVersion -Value $Version -Scope Script
|
# typed: false
require 'spec_helper'
require 'benchmark'
require 'datadog/core/buffer/random'
RSpec.describe Datadog::Core::Buffer::Random do
subject(:buffer) { described_class.new(max_size) }
let(:max_size) { 0 }
def get_test_items(n = 1)
Array.new(n) { Object.new }
end
describe '#initialize' do
it { is_expected.to be_a_kind_of(described_class) }
end
describe '#push' do
let(:output) { buffer.pop }
context 'given no limit' do
let(:items) { get_test_items(4) }
let(:max_size) { 0 }
it 'retains all items' do
items.each { |t| buffer.push(t) }
expect(output.length).to eq(4)
end
end
context 'given a max size' do
let(:items) { get_test_items(max_size + 1) }
let(:max_size) { 3 }
it 'does not exceed it' do
items.each { |t| buffer.push(t) }
expect(output.length).to eq(max_size)
expect(output).to include(items.last)
end
end
context 'when closed' do
let(:max_size) { 0 }
let(:items) { get_test_items(6) }
let(:output) { buffer.pop }
it 'retains items up to close' do
items.first(4).each { |t| buffer.push(t) }
buffer.close
items.last(2).each { |t| buffer.push(t) }
expect(output.length).to eq(4)
expect(output).to_not include(*items.last(2))
end
end
end
describe '#concat' do
let(:output) { buffer.pop }
context 'given no limit' do
let(:items) { get_test_items(4) }
let(:max_size) { 0 }
it 'retains all items' do
buffer.concat(items)
expect(output.length).to eq(4)
end
end
context 'given a max size' do
let(:items) { get_test_items(max_size + 1) }
let(:max_size) { 3 }
it 'does not exceed it' do
buffer.concat(items)
expect(output.length).to eq(max_size)
expect(output).to include(items.last)
end
end
context 'when closed' do
let(:max_size) { 0 }
let(:items) { get_test_items(6) }
let(:output) { buffer.pop }
it 'retains items up to close' do
buffer.concat(items[0..3])
buffer.close
buffer.concat(items[4..5])
expect(output.length).to eq(4)
expect(output).to_not include(*items.last(2))
end
end
end
describe '#length' do
subject(:length) { buffer.length }
context 'given no items' do
it { is_expected.to eq(0) }
end
context 'given an item' do
before { buffer.push([1]) }
it { is_expected.to eq(1) }
end
end
describe '#empty?' do
subject(:empty?) { buffer.empty? }
context 'given no items' do
it { is_expected.to be true }
end
context 'given an item' do
before { buffer.push([1]) }
it { is_expected.to be false }
end
end
describe '#pop' do
subject(:pop) { buffer.pop }
let(:items) { get_test_items(2) }
before do
items.each { |t| buffer.push(t) }
end
it do
expect(pop.length).to eq(items.length)
expect(pop).to include(*items)
expect(buffer.empty?).to be true
end
end
describe '#close' do
subject(:close) { buffer.close }
it do
expect { close }
.to change { buffer.closed? }
.from(false)
.to(true)
end
end
describe '#closed?' do
subject(:closed?) { buffer.closed? }
context 'when the buffer has not been closed' do
it { is_expected.to be false }
end
context 'when the buffer is closed' do
before { buffer.close }
it { is_expected.to be true }
end
end
# :nocov:
describe 'performance' do
require 'benchmark'
let(:n) { 10_000 }
let(:test_item_count) { 20 }
before { skip('Performance test does not run in CI.') }
context 'no max_size' do
it do
Benchmark.bmbm do |x|
x.report('No max #push') do
n.times do
buffer = described_class.new(max_size)
items = get_test_items(test_item_count)
items.each { |item| buffer.push(item) }
end
end
x.report('No max #concat') do
n.times do
buffer = described_class.new(max_size)
items = get_test_items(test_item_count)
buffer.concat(items)
end
end
end
end
end
context 'max size' do
let(:max_size) { 20 }
context 'no overflow' do
let(:test_item_count) { max_size }
it do
Benchmark.bmbm do |x|
x.report('Max no overflow #push') do
n.times do
buffer = described_class.new(max_size)
items = get_test_items(test_item_count)
items.each { |item| buffer.push(item) }
end
end
x.report('Max no overflow #concat') do
n.times do
buffer = described_class.new(max_size)
items = get_test_items(test_item_count)
buffer.concat(items)
end
end
end
end
end
context 'partial overflow' do
let(:test_item_count) { max_size + super() }
it do
Benchmark.bmbm do |x|
x.report('Max partial overflow #push') do
n.times do
buffer = described_class.new(max_size)
items = get_test_items(test_item_count)
items.each { |item| buffer.push(item) }
end
end
x.report('Max partial overflow #concat') do
n.times do
buffer = described_class.new(max_size)
items = get_test_items(test_item_count)
buffer.concat(items)
end
end
end
end
end
context 'total overflow' do
it do
Benchmark.bmbm do |x|
x.report('Max total overflow #push') do
n.times do
buffer = described_class.new(max_size)
buffer.instance_variable_set(:@items, get_test_items(max_size))
items = get_test_items(test_item_count)
items.each { |item| buffer.push(item) }
end
end
x.report('Max total overflow #concat') do
n.times do
buffer = described_class.new(max_size)
buffer.instance_variable_set(:@items, get_test_items(max_size))
items = get_test_items(test_item_count)
buffer.concat(items)
end
end
end
end
end
end
end
# :nocov:
end
|
namespace Sharpen.Engine.SharpenSuggestions.CSharp50.AsyncAwait
{
internal sealed class AwaitTaskDelayInsteadOfCallingThreadSleep : BaseAwaitKnownAsynchronousEquivalentInsteadOfCallingSynchronousMember
{
private AwaitTaskDelayInsteadOfCallingThreadSleep() : base(new SynchronousMemberReplacementInfo
{
AsyncEquivalentDisplayName = "Task.Delay()",
SynchronousMemberDisplayName = "Thread.Sleep()",
SynchronousMemberName = "Sleep",
SynchronousMemberTypeNamespace = "System.Threading",
SynchronousMemberTypeName = "Thread"
}) { }
public static readonly AwaitTaskDelayInsteadOfCallingThreadSleep Instance = new AwaitTaskDelayInsteadOfCallingThreadSleep();
}
}
|
package `in`.xnnyygn.bittorrent.tracker
import java.lang.RuntimeException
enum class PeerEvent {
STARTED,
COMPLETED,
STOPPED,
EMPTY
}
data class FindPeerRequest(
val infoHash: ByteArray,
val peerId: ByteArray,
val ip: String? = null,
val port: Int,
val uploaded: Int,
val downloaded: Int,
val left: Int,
val event: PeerEvent = PeerEvent.EMPTY
)
data class Peer(val id: ByteArray, val ip: String, val port: Int)
data class FindPeerResponse(val interval: Int, val peers: List<Peer>)
class FindPeerException(msg: String, cause: Throwable?) : RuntimeException(msg, cause) {
constructor(msg: String) : this(msg, null)
}
interface Tracker {
fun findPeers(request: FindPeerRequest): FindPeerResponse
}
class MockTracker(private val response: FindPeerResponse) :
Tracker {
override fun findPeers(request: FindPeerRequest) = response
}
|
# Moderation Bot
## Installation:
### Acquire a discord token as shown here:
[How to get the bot token](https://github.com/reactiflux/discord-irc/wiki/Creating-a-discord-bot-&-getting-a-token)
### Fill in the config.yml
- Bot prefix
- Owner ID
- botToken
- Server ID (in dev mod, right click the server and copy ID)
- moderation log channel name (You should only set this once)
### Create the bot:
[Create the bot here](https://discordapp.com/developers/applications/)
- Make it a bot user
- Not public bot
### Finish Installation
Open the folder preferably with Visual Studio Code Integrated terminal or git bash.
In the opened terminal:
Make sure you have npm installed:
- node -v
Install the required node modules:
- npm install --save request
- npm install --save discord.js
- npm install --save util
- npm install --save fs
- npm install --save path
When in the same folder as the index.js, run the bot with the command `node .`
The bot should be running.
## Add the bot to your server as shown here:
[Tutorial here](https://github.com/jagrosh/MusicBot/wiki/Adding-Your-Bot-To-Your-Server)
### Give administrative permissions to the bot
## How to use
Let the bot setup it's channels on first launch and configure them as you wish, do not change the name.
In any channel you can execute the following commands (default prefix: !):
### Command: ` ! clear <number> <userName>#<discriminator>`
The bot will clear the <number> of past messages of that user.
### Command: ` ! warn <userName>#<discriminator> <reason>`
The bot will warn the user with the message that follows in it's reason.
|
require 'pinball_wizard/configuration'
require 'pinball_wizard/feature'
require 'pinball_wizard/registry'
require 'pinball_wizard/dsl'
require 'pinball_wizard/railtie' if defined?(Rails)
|
using System;
namespace Agiil.Domain.Activity
{
public interface IGetsTicketWorkLog
{
GetWorklogResponse GetWorkLog(AddWorkLogRequest request);
}
}
|
-module(rebar3_nix_lock_prv).
-export([init/1, do/1, format_error/1]).
-define(NIX_DEPS, "# Generated by rebar3_nix
let fetchOnly = { src, ... }: src;
in { builder ? fetchOnly, fetchHex, fetchgit, fetchFromGitHub, overrides ? (x: y: { }) }:
let
self = packages // (overrides self packages);
packages = with self; {~s
};
in self
").
-define(DEP, "
~s = builder {
name = \"~s\";
version = \"~s\";
src = ~s;
beamDeps = [ ~s];
};").
-define(FETCH_HEX,
"fetchHex {
pkg = \"~s\";
version = \"~s\";
sha256 = \"~s\";
}").
-define(FETCH_GIT,
"fetchgit {
url = \"~s\";
rev = \"~s\";
sha256 = \"~s\";
}").
-define(FETCH_FROM_GITHUB,
"fetchFromGitHub {
owner = \"~s\";
repo = \"~s\";
rev = \"~s\";
sha256 = \"~s\";
}").
-define(DEFAULT_OUT, "rebar-deps.nix").
-spec init(rebar_state:t()) -> {ok, rebar_state:t()}.
init(State) ->
Provider = providers:create([
{namespace, nix},
{name, lock},
{module, ?MODULE},
{bare, true},
{deps, [{default, lock}]},
{example, "rebar3 nix lock -o rebar-deps.nix"},
{opts, [{out, $o, "out", {string, ?DEFAULT_OUT}, "Output file."}]},
{short_desc, "Export rebar3 dependencies for nix"},
{desc, "Export rebar3 dependencies for nix"},
{profiles, [default, test, prod]}
]),
{ok, rebar_state:add_provider(State, Provider)}.
-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}.
do(State) ->
JustApps = rebar_state:all_deps(State),
JustPlugins = rebar_state:all_plugin_deps(State),
Apps = deduplicate(JustApps ++ JustPlugins), % pick newer one
AllDepsNames = [to_binary(rebar_app_info:name(A)) || A <- Apps],
Deps = [to_nix(A, AllDepsNames) || A <- Apps],
Drv = io_lib:format(?NIX_DEPS, [Deps]),
ok = file:write_file(out_path(State), Drv),
{ok, State}.
make_comparator(F) ->
fun(A, B) ->
F(A) > F(B)
end.
deduplicate(Apps) ->
NewerFirstF = make_comparator(fun rebar_app_info:vsn/1),
CheckVF =
fun(App, Acc) ->
Name = rebar_app_info:name(App),
L = maps:get(Name, Acc, []),
[Newer | _] = lists:sort(NewerFirstF, [App | L]),
maps:put(Name, [Newer], Acc)
end,
lists:flatten(maps:values(lists:foldl(CheckVF, maps:new(), Apps))).
out_path(State) ->
{Args, _} = rebar_state:command_parsed_args(State),
proplists:get_value(out, Args, ?DEFAULT_OUT).
-spec format_error(any()) -> iolist().
format_error(Reason) ->
io_lib:format("~p", [Reason]).
to_nix(AppInfo, AllDepsNames) ->
Name = rebar_app_info:name(AppInfo),
{Vsn, Src} = src(Name, rebar_app_info:source(AppInfo)),
Deps = [[BinName, " "] || BinName <- app_deps(AppInfo),
lists:member(BinName, AllDepsNames)],
io_lib:format(?DEP, [Name, Name, Vsn, Src, Deps]).
src(_, {pkg, PkgName, Vsn, _OldHash, Hash, _Repo}) ->
{Vsn, io_lib:format(?FETCH_HEX, [PkgName, Vsn, to_sri(Hash)])};
src(_, {git, Url, {ref, Ref}}) ->
case string:prefix(string:lowercase(Url), "https://github.com/") of
nomatch ->
Prefetch = ["nix-prefetch-git --quiet ", Url, " ", Ref],
{ok, Json} = rebar3_nix_utils:cmd(Prefetch),
{ok, #{<<"sha256">> := Hash}, _} =
rebar3_nix_jsone_decode_vendored:decode(unicode:characters_to_binary(Json)),
{"git", io_lib:format(?FETCH_GIT, [Url, Ref, Hash])};
Path ->
[Owner, Repo0] = string:split(Path, "/", trailing),
Repo = re:replace(Repo0, "\\.git$", "", [{return, list}]),
Prefetch = ["nix-prefetch-url --unpack https://github.com/",
Owner, "/", Repo, "/tarball/", Ref],
{ok, Hash} = rebar3_nix_utils:cmd(Prefetch),
{"git", io_lib:format(?FETCH_FROM_GITHUB, [Owner, Repo, Ref, Hash])}
end;
src(Name, Other) ->
rebar_api:abort("rebar3_nix: unsupported dependency type ~p for ~s~n", [Other, Name]),
undefined.
to_sri(Sha256) when is_list(Sha256) ->
to_sri(list_to_binary(Sha256));
to_sri(<<Sha256Base16:64/binary>>) ->
Sha256 = binary_to_integer(Sha256Base16, 16),
["sha256-", base64:encode(<<Sha256:32/big-unsigned-integer-unit:8>>)].
app_deps(AppInfo) ->
Names = proplists:get_value(applications, rebar_app_info:app_details(AppInfo), []) ++
rebar_state:deps_names(rebar_app_info:get(AppInfo, {deps, default}, []) ++
rebar_app_info:get(AppInfo, {deps, prod}, [])),
lists:usort([to_binary(N) || N <- Names]).
to_binary(Atom) when is_atom(Atom) ->
atom_to_binary(Atom, utf8);
to_binary(List) when is_list(List) ->
unicode:characters_to_binary(List);
to_binary(Bin) ->
Bin.
|
export interface Customer {
/**
* The Address of the customer
*/
address: Address;
/**
* The email address of the customer
*/
email: string;
/**
* The full name of the owner
*/
name: string;
/**
* The phone number of the customer
* NOTE: This includes the extension
*/
phone: string;
/**
* Verified customer’s address
*/
readonly verified_address: Address;
/**
* Verified customer’s email address
*/
readonly verified_email: string;
/**
* Verified customer’s full name
*/
readonly verified_name: string;
/**
* Verified customer’s phone number
*/
readonly verified_phone: string;
}
// --- CUSTOMER ADDRESS --- //
export interface Address {
/**
* City/District/Suburb/Town/Village.
*/
city: string;
/**
* Two-letter country code, capitalized
* NOTE: The codes are specified by the ISO3166 alpha-2
*/
country: string;
/**
* Address line 1 (Street address/PO Box/Company name).
*/
line1: string;
/**
* Address line 2 (Apartment/Suite/Unit/Building).
*/
line2: string;
/**
* ZIP or postal code
*/
postal_code: string;
/**
* State/County/Province/Region.
*/
state: string;
}
|
package leader
import (
"context"
"database/sql"
"fmt"
"time"
"github.com/benbjohnson/clock"
"github.com/google/uuid"
)
// MysqlOpt allows configuration of leader defaults.
type MysqlOpt func(leader *mysqlLeader)
// WithNodeName allows the node name to be specified.
// The default value is a random UUID.
func WithNodeName(name string) MysqlOpt {
return func(leader *mysqlLeader) {
leader.nodeName = name
}
}
// WithTick allows the default election frequency to
// be specified. The default is 15 seconds.
func WithTick(tick time.Duration) MysqlOpt {
return func(leader *mysqlLeader) {
leader.tick = tick
}
}
// WithAge allows the default lifespan of an election
// to be specified. The default is 60 seconds.
func WithAge(age time.Duration) MysqlOpt {
return func(leader *mysqlLeader) {
leader.age = age
}
}
type mysqlLeader struct {
db *sql.DB
leaderName string
nodeName string
clock clock.Clock
tick time.Duration
age time.Duration
}
// NewMysqlLeader provides an implementation of the Leader interface using
// MySQL as the point of coordination between nodes. It is not a perfect
// leadership election implementation but should be good enough providing
// that tasks that require leadership election do not run for longer than
// either the tick or age intervals.
func NewMysqlLeader(db *sql.DB, leaderName string, opts ...MysqlOpt) Leader {
leader := &mysqlLeader{
db: db,
leaderName: leaderName,
clock: clock.New(),
}
for _, opt := range opts {
opt(leader)
}
if leader.nodeName == "" {
leader.nodeName = uuid.New().String()
}
if leader.tick < time.Nanosecond {
leader.tick = 15 * time.Second
}
if leader.age < time.Nanosecond {
leader.age = 60 * time.Second
}
return leader
}
func (m *mysqlLeader) RunElections(ctx context.Context, onError OnError) error {
election := m.election()
ticker := m.clock.Ticker(m.tick)
defer ticker.Stop()
for {
select {
case <-ticker.C:
err := election(ctx)
if err != nil {
err = onError(err)
if err != nil {
return err
}
}
case <-ctx.Done():
return ctx.Err()
}
}
}
const isLeaderSQL = `
SELECT COUNT(*)
FROM leader_election
WHERE leader_name = ?
AND node_name = ?
`
func (m *mysqlLeader) IsLeader(ctx context.Context) (bool, error) {
var count int
err := m.db.QueryRowContext(ctx, isLeaderSQL, m.leaderName, m.nodeName).Scan(&count)
if err != nil {
return false, err
}
return count > 0, nil
}
const electionSQL = `
INSERT INTO leader_election (leader_name, node_name, last_update) VALUES (?, ?, ?)
ON DUPLICATE KEY UPDATE
node_name = IF(last_update < DATE_SUB(VALUES(last_update), INTERVAL %d SECOND), VALUES(node_name), node_name),
last_update = IF(node_name = VALUES(node_name), VALUES(last_update), last_update)
`
func (m *mysqlLeader) election() func(context.Context) error {
stmt := fmt.Sprintf(electionSQL, int64(m.age.Seconds()))
return func(ctx context.Context) error {
_, err := m.db.ExecContext(ctx, stmt, m.leaderName, m.nodeName, m.clock.Now())
return err
}
}
// CreateMysqlLeaderSQL is the create statement used by CreateMysqlLeaderTable.
// It's published so that it can be used in database migrations without needing
// to call the CreateMysqlLeaderTable function.
const CreateMysqlLeaderSQL = `
CREATE TABLE IF NOT EXISTS leader_election (
leader_name varchar(255) NOT NULL PRIMARY KEY,
node_name varchar(255) NOT NULL,
last_update datetime NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
`
// CreateMysqlLeaderTable sets up the leadership election table and its constraints.
// It is not part of the MysqlLeader object since in practice it's a bad idea to run
// services with permissions to create or modify database schemas.
func CreateMysqlLeaderTable(db *sql.DB) error {
_, err := db.Exec(CreateMysqlLeaderSQL)
return err
}
|
# Grafana SQLite Datasource
This is a Grafana backend plugin to allow using a SQLite database as a data source.
The plugin was built using the grafana plugin sdk and npx grafana toolkit. Information can be
found at:
- <https://grafana.com/tutorials/build-a-data-source-backend-plugin/>
- <https://github.com/grafana/grafana-plugin-sdk-go>
- <https://github.com/grafana/grafana/tree/master/packages/grafana-toolkit>
## Getting started
### Requirements
- nodejs
- yarn
- go
- docker and docker-compose
### (First Time) Installation
```BASH
# installing packages
make install
# optional: using git hooks
git config core.hooksPath githooks
```
### Start up Grafana
```BASH
make build # this build the frontend and backend
make sign # sign the plugin or allow not signed plugins in the config
make bootstrap # credentials admin / admin123
```
## Testing
```BASH
make test
```
### Quick e2e tests with Selenium
First start the docker environment with `make selenium-test`. This will also run the tests.
Regardless of the tests passing the environment will stay up and running.
Now you can connect to the dockerized browser via a `VNC` client/viewer (like remmina)
|
# Openwebnet-webthings
A [webthings](https://iot.mozilla.org/docs/) service for [openwebnet](https://www.myopen-legrandgroup.com/)
# Dependencies
This depends on the reopenwebnet python library. It can be installed like this:
pip3 install reopenwebnet
# Usage
First edit the configuration file to describe your openwebnet-controlled devices.
There is an example in this directory: `openwebnet-webthings.example.yaml`
Next, start the webthings service:
python openwebnet-webthings.py <your-config-file>
Finally, import the webthings into your iot gateway. The following screenshots document the process;
Depending on how you installed the iot gateway the process is more or less automatic.



|
// Copyright 2019 syzkaller project authors. All rights reserved.
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
// syz-check does best-effort static correctness checking of the syscall descriptions in sys/os/*.txt.
// Use:
// $ go install ./tools/syz-check
// $ syz-check -obj /linux/vmlinux
// Currently it works only for linux and only for one arch at a time.
// The vmlinux files should include debug info and enable all relevant configs (since we parse dwarf).
// The results are produced in sys/os/*.warn files.
// On implementation level syz-check parses vmlinux dwarf, extracts struct descriptions
// and compares them with what we have (size, fields, alignment, etc).
package main
import (
"bytes"
"debug/dwarf"
"flag"
"fmt"
"os"
"path/filepath"
"runtime"
"runtime/pprof"
"sort"
"strings"
"github.com/google/syzkaller/pkg/ast"
"github.com/google/syzkaller/pkg/compiler"
"github.com/google/syzkaller/pkg/osutil"
"github.com/google/syzkaller/prog"
"github.com/google/syzkaller/sys/targets"
)
func main() {
var (
flagOS = flag.String("os", runtime.GOOS, "OS")
flagObjAMD64 = flag.String("obj-amd64", "", "amd64 kernel object file")
flagObj386 = flag.String("obj-386", "", "386 kernel object file")
flagCPUProfile = flag.String("cpuprofile", "", "write CPU profile to this file")
flagMEMProfile = flag.String("memprofile", "", "write memory profile to this file")
)
failf := func(msg string, args ...interface{}) {
fmt.Fprintf(os.Stderr, msg+"\n", args...)
os.Exit(1)
}
flag.Parse()
if *flagCPUProfile != "" {
f, err := os.Create(*flagCPUProfile)
if err != nil {
failf("failed to create cpuprofile file: %v", err)
}
defer f.Close()
if err := pprof.StartCPUProfile(f); err != nil {
failf("failed to start cpu profile: %v", err)
}
defer pprof.StopCPUProfile()
}
if *flagMEMProfile != "" {
defer func() {
f, err := os.Create(*flagMEMProfile)
if err != nil {
failf("failed to create memprofile file: %v", err)
}
defer f.Close()
runtime.GC()
if err := pprof.WriteHeapProfile(f); err != nil {
failf("failed to write mem profile: %v", err)
}
}()
}
warnings1, err := check(*flagOS, "amd64", *flagObjAMD64)
if err != nil {
failf("%v", err)
}
runtime.GC()
warnings2, err := check(*flagOS, "386", *flagObj386)
if err != nil {
failf("%v", err)
}
if err := writeWarnings(*flagOS, append(warnings1, warnings2...)); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func check(OS, arch, obj string) ([]Warn, error) {
structDescs, locs, warnings1, err := parseDescriptions(OS, arch)
if err != nil {
return nil, err
}
structs, err := parseKernelObject(obj)
if err != nil {
return nil, err
}
warnings2, err := checkImpl(structs, structDescs, locs)
if err != nil {
return nil, err
}
warnings := append(warnings1, warnings2...)
for i := range warnings {
warnings[i].arch = arch
}
return warnings, nil
}
const (
WarnCompiler = "compiler"
WarnNoSuchStruct = "no-such-struct"
WarnBadStructSize = "bad-struct-size"
WarnBadFieldNumber = "bad-field-number"
WarnBadFieldSize = "bad-field-size"
WarnBadFieldOffset = "bad-field-offset"
WarnBadBitfield = "bad-bitfield"
)
type Warn struct {
pos ast.Pos
arch string
typ string
msg string
}
func writeWarnings(OS string, warnings []Warn) error {
allFiles, err := filepath.Glob(filepath.Join("sys", OS, "*.warn"))
if err != nil {
return err
}
toRemove := make(map[string]bool)
for _, file := range allFiles {
toRemove[file] = true
}
byFile := make(map[string][]Warn)
for _, warn := range warnings {
byFile[warn.pos.File] = append(byFile[warn.pos.File], warn)
}
for file, warns := range byFile {
sort.Slice(warns, func(i, j int) bool {
w1, w2 := warns[i], warns[j]
if w1.pos.Line != w2.pos.Line {
return w1.pos.Line < w2.pos.Line
}
if w1.msg != w2.msg {
return w1.msg < w2.msg
}
return w1.arch < w2.arch
})
buf := new(bytes.Buffer)
for i := 0; i < len(warns); i++ {
warn := warns[i]
arch := warn.arch
arches := []string{warn.arch}
for i < len(warns)-1 && warn.msg == warns[i+1].msg {
if arch != warns[i+1].arch {
arch = warns[i+1].arch
arches = append(arches, arch)
}
i++
}
archStr := ""
if len(arches) < 2 {
archStr = fmt.Sprintf(" [%v]", strings.Join(arches, ","))
}
fmt.Fprintf(buf, "%v: %v%v\n", warn.typ, warn.msg, archStr)
}
warnFile := filepath.Join("sys", OS, file+".warn")
if err := osutil.WriteFile(warnFile, buf.Bytes()); err != nil {
return err
}
delete(toRemove, warnFile)
}
for file := range toRemove {
os.Remove(file)
}
return nil
}
func checkImpl(structs map[string]*dwarf.StructType, structDescs []*prog.KeyedStruct,
locs map[string]*ast.Struct) ([]Warn, error) {
var warnings []Warn
checked := make(map[string]bool)
for _, str := range structDescs {
typ := str.Desc
if typ.Varlen() {
continue
}
if checked[typ.Name()] {
continue
}
checked[typ.Name()] = true
name := templateName(typ.Name())
astStruct := locs[name]
if astStruct == nil {
continue
}
warns, err := checkStruct(typ, astStruct, structs[name])
if err != nil {
return nil, err
}
warnings = append(warnings, warns...)
}
return warnings, nil
}
func templateName(name string) string {
if pos := strings.IndexByte(name, '['); pos != -1 {
name = name[:pos]
}
return name
}
func checkStruct(typ *prog.StructDesc, astStruct *ast.Struct, str *dwarf.StructType) ([]Warn, error) {
var warnings []Warn
warn := func(pos ast.Pos, typ, msg string, args ...interface{}) {
warnings = append(warnings, Warn{pos: pos, typ: typ, msg: fmt.Sprintf(msg, args...)})
}
name := templateName(typ.Name())
if str == nil {
warn(astStruct.Pos, WarnNoSuchStruct, "%v", name)
return warnings, nil
}
if typ.Size() != uint64(str.ByteSize) {
warn(astStruct.Pos, WarnBadStructSize, "%v: syz=%v kernel=%v", name, typ.Size(), str.ByteSize)
}
// TODO: handle unions, currently we should report some false errors.
if str.Kind == "union" {
return warnings, nil
}
// TODO: we could also check enums (elements match corresponding flags in syzkaller).
// TODO: we could also check values of literal constants (dwarf should have that, right?).
// TODO: handle nested structs/unions, e.g.:
// struct foo {
// union {
// ...
// } bar;
// };
// should be matched with:
// foo_bar [
// ...
// ]
// TODO: consider making guesses about semantic types of fields,
// e.g. if a name contains filedes/uid/pid/gid that may be the corresponding resource.
ai := 0
offset := uint64(0)
for _, field := range typ.Fields {
if prog.IsPad(field) {
offset += field.Size()
continue
}
if ai < len(str.Field) {
fld := str.Field[ai]
pos := astStruct.Fields[ai].Pos
desc := fmt.Sprintf("%v.%v", name, field.FieldName())
if field.FieldName() != fld.Name {
desc += "/" + fld.Name
}
if field.UnitSize() != uint64(fld.Type.Size()) {
warn(pos, WarnBadFieldSize, "%v: syz=%v kernel=%v",
desc, field.UnitSize(), fld.Type.Size())
}
byteOffset := offset - field.UnitOffset()
if byteOffset != uint64(fld.ByteOffset) {
warn(pos, WarnBadFieldOffset, "%v: syz=%v kernel=%v",
desc, byteOffset, fld.ByteOffset)
}
// How would you define bitfield offset?
// Offset of the beginning of the field from the beginning of the memory location, right?
// No, DWARF defines it as offset of the end of the field from the end of the memory location.
bitOffset := fld.Type.Size()*8 - fld.BitOffset - fld.BitSize
if fld.BitSize == 0 {
// And to make things even more interesting this calculation
// does not work for normal variables.
bitOffset = 0
}
if field.BitfieldLength() != uint64(fld.BitSize) ||
field.BitfieldOffset() != uint64(bitOffset) {
warn(pos, WarnBadBitfield, "%v: size/offset: syz=%v/%v kernel=%v/%v",
desc, field.BitfieldLength(), field.BitfieldOffset(),
fld.BitSize, bitOffset)
}
}
ai++
offset += field.Size()
}
if ai != len(str.Field) {
warn(astStruct.Pos, WarnBadFieldNumber, "%v: syz=%v kernel=%v", name, ai, len(str.Field))
}
return warnings, nil
}
func parseDescriptions(OS, arch string) ([]*prog.KeyedStruct, map[string]*ast.Struct, []Warn, error) {
errorBuf := new(bytes.Buffer)
var warnings []Warn
eh := func(pos ast.Pos, msg string) {
warnings = append(warnings, Warn{pos: pos, typ: WarnCompiler, msg: msg})
fmt.Fprintf(errorBuf, "%v: %v\n", pos, msg)
}
top := ast.ParseGlob(filepath.Join("sys", OS, "*.txt"), eh)
if top == nil {
return nil, nil, nil, fmt.Errorf("failed to parse txt files:\n%s", errorBuf.Bytes())
}
consts := compiler.DeserializeConstsGlob(filepath.Join("sys", OS, "*_"+arch+".const"), eh)
if consts == nil {
return nil, nil, nil, fmt.Errorf("failed to parse const files:\n%s", errorBuf.Bytes())
}
prg := compiler.Compile(top, consts, targets.Get(OS, arch), eh)
if prg == nil {
return nil, nil, nil, fmt.Errorf("failed to compile descriptions:\n%s", errorBuf.Bytes())
}
prog.RestoreLinks(prg.Syscalls, prg.Resources, prg.StructDescs)
locs := make(map[string]*ast.Struct)
for _, decl := range top.Nodes {
switch n := decl.(type) {
case *ast.Struct:
locs[n.Name.Name] = n
case *ast.TypeDef:
if n.Struct != nil {
locs[n.Name.Name] = n.Struct
}
}
}
return prg.StructDescs, locs, warnings, nil
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! A library to create "pseudo" file systems. These file systems are backed by in process
//! callbacks. Examples are: component configuration, debug information or statistics.
#![recursion_limit = "1024"]
// TODO Follow 2018 idioms
#![allow(elided_lifetimes_in_paths)]
#![allow(unused_extern_crates)]
use proc_macro_hack::proc_macro_hack;
pub mod test_utils;
pub mod common;
pub mod execution_scope;
pub mod path;
pub mod registry;
pub mod directory;
pub mod file;
pub mod service;
pub mod tree_builder;
// --- pseudo_directory ---
// pseudo_directory! uses helper functions that live in this module. It needs to be accessible
// from the outside of this crate.
#[doc(hidden)]
pub mod pseudo_directory;
/// Builds a pseudo directory using a simple DSL, potentially containing files and nested pseudo
/// directories.
///
/// A directory is described using a sequence of rules of the following form:
///
/// <name> `=>` <something that implements DirectoryEntry>
///
/// separated by commas, with an optional trailing comma.
///
/// It generates a nested pseudo directory, using [`directory::immutable::simple()`] then adding
/// all the specified entries in it, by calling [`directory::Simple::add_entry`].
///
/// See [`mut_pseudo_directory!`] if you want the directory to be modifiable by the clients.
///
/// Note: Names specified as literals (both `str` and `[u8]`) are compared during compilation time,
/// so you should get a nice error message, if you specify the same entry name twice. As entry
/// names can be specified as expressions, you can easily work around this check - you will still
/// get an error, but it would be a `panic!` in this case. In any case the error message will
/// contain details of the location of the generating macro and the duplicate entry name.
///
/// # Examples
///
/// This will construct a small tree of read-only files:
/// ```
/// let root = pseudo_directory! {
/// "etc" => pseudo_directory! {
/// "fstab" => read_only_static(b"/dev/fs /"),
/// "passwd" => read_only_static(b"[redacted]"),
/// "shells" => read_only_static(b"/bin/bash"),
/// "ssh" => pseudo_directory! {
/// "sshd_config" => read_only_static(b"# Empty"),
/// },
/// },
/// "uname" => read_only_static(b"Fuchsia"),
/// };
/// ```
///
/// An example of a tree with a writable file:
/// ```
/// let write_count = &RefCell::new(0);
/// let root = pseudo_directory! {
/// "etc" => pseudo_directory! {
/// "sshd_config" => read_write(
/// || Ok(b"# Empty".to_vec()),
/// 100,
/// |content| {
/// let mut count = write_count.borrow_mut();
/// assert_eq!(*&content, format!("Port {}", 22 + *count).as_bytes());
/// *count += 1;
/// Ok(())
/// }),
/// },
/// };
/// ```
///
/// You can specify the POSIX attributes for the pseudo directory, by providing the attributes as
/// an expression, fater a "protection_attributes" keyword followed by a comma, with a `;`
/// separating it from the entry definitions:
/// ```
/// let root = pseudo_directory! {
/// "etc" => pseudo_directory! {
/// protection_attributes: S_IXOTH | S_IROTH | S_IXGRP | S_IRGRP | S_IXUSR | S_IRUSR;
/// "fstab" => read_only_attr(S_IROTH | S_IRGRP | S_IRUSR,
/// || Ok(b"/dev/fs /".to_vec())),
/// "passwd" => read_only_attr(S_IRUSR, || Ok(b"[redacted]".to_vec())),
/// },
/// };
/// ```
#[proc_macro_hack(support_nested)]
pub use fuchsia_vfs_pseudo_fs_mt_macros::pseudo_directory;
/// This macro is identical to [`pseudo_directory!`], except that it constructs instances of
/// [`directory::mutable::simple()`], allowing the clients connected over the FIDL connection to
/// modify this directory. Clients operations are still checked against specific connection
/// permissions as specified in the `io.fidl` protocol.
#[proc_macro_hack(support_nested)]
pub use fuchsia_vfs_pseudo_fs_mt_macros::mut_pseudo_directory;
// This allows the pseudo_directory! macro to use absolute paths within this crate to refer to the
// helper functions. External crates that use pseudo_directory! will rely on the pseudo_directory
// export above.
extern crate self as fuchsia_vfs_pseudo_fs_mt;
|
using UnityEngine;
using System.Collections;
using System;
using UnityEngine.UI;
using System.Collections.Generic;
public class DragDrop : MonoBehaviour {
float x;
float y;
public GameObject match;
private txtGameOver txtGO;
public GameObject[] GameObjectArray;
private void Awake()
{
txtGO = GameObject.FindObjectOfType<txtGameOver>();
}
private void Start()
{
GameObjectArray = GameObject.FindGameObjectsWithTag("Match");
}
void Update(){
x = Input.mousePosition.x;
y = Input.mousePosition.y;
}
void OnMouseDrag(){
// transform.position = Camera.main.ScreenToWorldPoint(new Vector3(x,y,10.0f));
}
void OnMouseDown()
{
var boxes = GameObject.FindGameObjectsWithTag("Match");
SelectMatches(match);
if (boxes.Length == 1)
{
txtGO.GameOverText();
}
}
void SelectMatches (GameObject m)
{
int n;
n = Convert.ToInt32(m.name);
if (n>=0 && n<=4)
{
if (GameObjectArray[n].CompareTag("Match") && GlobaVariables.Control==false)
{
ChangeTag(0, 4,n);
}
else if (GameObjectArray[n].CompareTag("Destroy") && GlobaVariables.Control == true)
{
GameObjectArray[n].SetActive(false);
}
}
else if (n > 4 && n <= 8)
{
if (GameObjectArray[n].CompareTag("Match") && GlobaVariables.Control == false)
{
ChangeTag(5, 8, n);
}
else if (GameObjectArray[n].CompareTag("Destroy") && GlobaVariables.Control == true)
{
GameObjectArray[n].SetActive(false);
}
}
else if (n > 8 && n <= 11)
{
if (GameObjectArray[n].CompareTag("Match") && GlobaVariables.Control == false)
{
ChangeTag(9, 11,n);
}
else if (GameObjectArray[n].CompareTag("Destroy") && GlobaVariables.Control == true)
{
GameObjectArray[n].SetActive(false);
}
}
else if (n > 11 && n <= 13)
{
if (GameObjectArray[n].CompareTag("Match") && GlobaVariables.Control == false)
{
ChangeTag(12, 13, n);
}
else if (GameObjectArray[n].CompareTag("Destroy") && GlobaVariables.Control == true)
{
GameObjectArray[n].SetActive(false);
}
}
else if (n==14)
{
if (GameObjectArray[n].CompareTag("Match") && GlobaVariables.Control == false)
{
ChangeTag(14, 14, n);
}
else if (GameObjectArray[n].CompareTag("Destroy") && GlobaVariables.Control == true)
{
GameObjectArray[n].SetActive(false);
}
}
}
void ChangeTag(int p, int z,int n)
{
for (int i = p; i <z+1; i++)
{
GameObjectArray[i].tag = "Destroy";
Debug.Log(GameObjectArray[i].tag + " " + GameObjectArray[i]);
}
GameObjectArray[n].SetActive(false);
GlobaVariables.Control = true;
}
}
|
package com.luqinx.xbinder
import android.os.Parcel
import android.os.Parcelable
import com.luqinx.xbinder.serialize.GenericAdapter
import com.luqinx.xbinder.serialize.ObjectAdapter
import java.lang.reflect.Type
/**
* @author qinchao
*
* @since 2022/1/2
*/
class ChannelResult() : Parcelable {
var succeed: Boolean = false
var errMessage: String? = null
var errCode: Int = BinderInvoker.ERROR_CODE_SUCCESS
var returnValue: Any? = null
lateinit var returnType: Type
var invokeConsumer = 0L
constructor(parcel: Parcel) : this() {
succeed = parcel.readInt() == 1
errMessage = parcel.readString()
invokeConsumer = parcel.readLong()
if (parcel.readInt() > 0) {
returnType = GenericAdapter.readInstance(parcel, Any::class.java)!!
returnValue = ObjectAdapter.read(parcel, returnType)
}
}
override fun writeToParcel(parcel: Parcel, flags: Int) {
parcel.writeInt( if (succeed) 1 else 0 )
parcel.writeString(errMessage)
parcel.writeLong(invokeConsumer)
if (returnValue != null) {
parcel.writeInt(1)
GenericAdapter.writeInstance(parcel, returnType, returnType)
ObjectAdapter.write(parcel, returnValue, returnType)
} else {
parcel.writeInt(-1)
}
}
override fun describeContents(): Int {
return 0
}
companion object CREATOR : Parcelable.Creator<ChannelResult> {
override fun createFromParcel(parcel: Parcel): ChannelResult {
return ChannelResult(parcel)
}
override fun newArray(size: Int): Array<ChannelResult?> {
return arrayOfNulls(size)
}
}
}
|
; RUN: not llvm37-as -disable-output %s 2>&1 | FileCheck %s
target datalayout = "e-p:32:32:32-p1:16:16:16-p2:32:32:32-i1:8:32-i8:8:32-i16:16:32-i32:32:32-i64:32:64-f32:32:32-f64:32:64-v64:32:64-v128:32:128-a0:0:32-n8:16:32"
; CHECK: error: invalid cast opcode for cast from '<4 x i32*>' to '<4 x i32 addrspace(1)*>'
define <4 x i32 addrspace(1)*> @vector_illegal_bitcast_as_0_to_1(<4 x i32 addrspace(0)*> %p) {
%cast = bitcast <4 x i32 addrspace(0)*> %p to <4 x i32 addrspace(1)*>
ret <4 x i32 addrspace(1)*> %cast
}
|
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package os
func (file *File) Readdirnames(n int) (names []string, err Error) {
fis, err := file.Readdir(n)
// If n > 0 and we get an error, we return now.
// If n < 0, we return whatever we got + any error.
if n > 0 && err != nil {
return nil, err
}
names = make([]string, len(fis))
for i, fi := range fis {
names[i] = fi.Name
}
return names, err
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.