text
stringlengths 27
775k
|
|---|
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using OpenLiveWriter.PostEditor.ContentSources;
namespace OpenLiveWriter.PostEditor
{
public sealed class PostEditorInitializer
{
/// <summary>
/// global initialization which may show error dialogs or cause
/// failure of the entire product to load
/// </summary>
public static bool Initialize()
{
// can show error dialog if plugin has missing or incorrect attributes
ContentSourceManager.Initialize();
return true ;
}
}
}
|
{-# LANGUAGE TupleSections #-}
module Scan where
import qualified Data.Set as S
import qualified Data.Vector as V
import Data.List
import Data.Function
import Data.Maybe
import qualified Data.Map as M
import Control.Applicative
import Control.Monad
import Geom
diffs :: [Vec3] -> [(Vec3,Vec3,Vec3)]
diffs vs =
map (\(x,y) -> (x, y, x `absdiff` y)) $ take2 vs
where take2 [] = []
take2 (v:vs) =
(map (v,) vs) ++ (take2 vs)
similar :: Vec3 -> Vec3 -> [Transfo3]
similar d1 d2 = do
x <- d2 `indices` (d1 `get` xx)
y <- d2 `indices` (d1 `get` yy)
z <- d2 `indices` (d1 `get` zz)
guard (x /= y)
guard (x /= z)
guard (y /= z)
return $ tran (x,y,z)
maxocc :: Eq b => [(a, b)] -> ([a], b)
maxocc l =
maximumBy (compare `on` (length . fst)) $ foldl group [] l
where group acc (x, b) = add acc x b
add [] x b' = [([x], b')]
add ((xs,b):rem) x b' = if b == b' then (x:xs,b):rem else (xs,b):(add rem x b')
find_transfo :: [((Vec3,Vec3,Vec3),(Vec3,Vec3,Vec3))] -> Transfo3 -> (Transfo3,Vec3)
find_transfo (((x1,x2,_),(y1,y2,_)):xs) t =
let ((xx1,yy1),(xx2,yy2),(xx3,yy3)) = find_next (x1,x2) (y1,y2) xs in
let dx12 = xx1 `diff` xx2
dy12 = (yy1 /// t) `diff` (yy2 /// t) in
let t' = concordant_directions t dx12 dy12 in
(t', find_translation t' xx1 yy1)
where find_next (x1,x2) (y1,y2) (((x3,x4,_),(y3,y4,_)):xs)
| x1 == x3 = make_up x1 x2 x4 y1 y2 y3 y4
| x1 == x4 = make_up x1 x2 x3 y1 y2 y3 y4
| x2 == x3 = make_up x2 x1 x4 y1 y2 y3 y4
| x2 == x4 = make_up x2 x1 x3 y1 y2 y3 y4
| otherwise = find_next (x1,x2) (y1,y2) xs
make_up x1 x2 x3 y1 y2 y3 y4
| y1 == y3 = ((x1,y1),(x2,y2),(x3,y4))
| y1 == y4 = ((x1,y1),(x2,y2),(x3,y3))
| y2 == y3 = ((x1,y2),(x2,y1),(x3,y4))
| y2 == y4 = ((x1,y2),(x2,y1),(x3,y3))
concordant_directions t xx yy =
foldl (flip_if_different_signum xx yy) t [1..3]
flip_if_different_signum xx yy tacc i
| signum (xx `get` i) == signum (yy `get` i) = tacc
| otherwise = tacc `opp` i
find_translation t x y =
let y' = y /// t in
x `diff` y'
find_similar :: [(Vec3,Vec3,Vec3)] -> [(Vec3,Vec3,Vec3)] -> Maybe ([((Vec3,Vec3,Vec3),(Vec3,Vec3,Vec3))],Transfo3)
find_similar base new =
case l of
[] -> Nothing
l -> Just $ maxocc l
where l = [ ((cbase,cnew),transfo) | cbase@(_,_,d) <- base
, cnew@(_,_,d') <- new
, transfo <- similar d d' ]
find_common_points :: Int -> [Vec3] -> [Vec3] -> Maybe (Transfo3,Vec3)
find_common_points thresh base new =
case find_similar (diffs base) (diffs new) of
Nothing -> Nothing
Just (similar,_) | length similar < dthresh -> Nothing
Just (similar,transfo) ->
let (transfo',trvec) = find_transfo similar transfo in
let common = foldl (\acc ((x1,x2,_),_) -> S.insert x2 (S.insert x1 acc)) S.empty similar in
Just (transfo', trvec)
where dthresh = (thresh * (thresh - 1)) `div` 2
find_overlaps :: Int -> [[Vec3]] -> [((Int,Int),Transfo3,Vec3)]
find_overlaps thresh sections =
next 0 sections
where next _ [] = []
next _ [x] = []
next n1 (s1:xs) = (next1 n1 s1 (n1 + 1) xs) ++ (next (n1 + 1) xs)
next1 _ _ _ [] = []
next1 n1 s1 n2 (s2:xs) =
let rem = next1 n1 s1 (n2 + 1) xs in
case find_common_points thresh s1 s2 of
Nothing -> rem
Just (t,v) -> ((n1,n2),t,v):rem
reduce :: [((Int,Int),Transfo3,Vec3)] -> M.Map Int (Transfo3,Vec3)
reduce trans =
reduce1 (M.singleton 0 (tran (1,2,3),vec 0 0 0)) 0
where upper = maximum $ concat $ map (\((a,b),_,_) -> [a,b]) trans
reduce1 acc n =
let start = filter (\((n',_),_,_) -> n' == n) trans
end = filter (\((_,n'),_,_) -> n' == n) trans in
case M.lookup n acc of
Nothing -> acc
Just (t,v) ->
let (acc',ns') = foldl (append_direct t v) (acc,S.empty) start in
let (acc'',ns'') = foldl (append_indirect t v) (acc',ns') end in
S.foldl reduce1 acc'' ns''
-- add a new transformation; (t,v) is the transfo from 0 to n, (t',v') is the
-- transfo from n to n'
append_direct t v (acc,ns) ((n,n'),t',v')
| n' `M.member` acc = (acc,ns)
| otherwise =
(M.insert n' (t' `comp` t,(v' /// t) `translate` v) acc, S.insert n' ns)
-- add a new transformation; (t,v) is the transfo from 0 to n, (t',v') is the
-- transfo from n' to n
append_indirect t v (acc,ns) ((n',n),t',v')
| n' `M.member` acc = (acc,ns)
| otherwise =
(M.insert n' ((invert t') `comp` t, v `diff` (v' /// ((invert t') `comp` t))) acc, S.insert n' ns)
accumulate_positions :: M.Map Int (Transfo3,Vec3) -> [[Vec3]] -> S.Set Vec3
accumulate_positions trans (sec:secs) =
snd $ foldl append_section (1,S.fromList sec) secs
where append_section (n,acc) sec =
let (t,v) = fromJust $ M.lookup n trans in
(n + 1, foldl (add_one t v) acc sec)
add_one t v acc vec = S.insert ((vec /// t) `translate` v) acc
|
# description
## package setup
1. $: mvn clean compile package
2. copy jar to new dstorage-v{currentDate}
3. compress finished
## directory name rule
```
example: dstorage-v2020.0313
explain: appName-v{currentDate}
```
## dist structure
| key |desc
|-----|------
| / |
| ├─ app/ | 程序内部资源,包含配置文件、GUI图片、帮助文档
| ├─ bin/ | 启动脚本,Linux/Windows的打开方式
| ├─ libs/ | 外部引用资源(maven打包后生成的jar包)
| ├─ logs/ | 日志文件夹
| ├─ webapp/ | 页面静态资源,包含文件上传本地存储files文件夹
| ├─ file-service.jar | 程序jar包
## open mode
- Linux
`
sh bin/dstorage-up.sh
`
- Windows
`
double click on bin/dstorage-up.bat
`
|
module Parser where
import Control.Monad
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr
import Text.ParserCombinators.Parsec.Language
import qualified Text.ParserCombinators.Parsec.Token as Token
import TinyPL
languageDef =
emptyDef { Token.commentStart = "/*"
, Token.commentEnd = "*/"
, Token.commentLine = "//"
, Token.identStart = letter
, Token.identLetter = alphaNum
, Token.reservedNames = [ "if"
, "else"
, "while"
, "func"
, "return"
]
, Token.reservedOpNames = [ "+", "-", "*", "/"
, "="
]
}
lexer = Token.makeTokenParser languageDef
identifier = Token.identifier lexer
reserved = Token.reserved lexer
reservedOp = Token.reservedOp lexer
parens = Token.parens lexer
int = fromIntegral <$> Token.integer lexer
semi = Token.semi lexer
whiteSpace = Token.whiteSpace lexer
braces = Token.braces lexer
comma = Token.comma lexer
program :: Parser Program
program = whiteSpace >> many function
function :: Parser Function
function = do
reserved "func"
name <- identifier
args <- parens (identifier `sepBy` comma)
body <- statement
return $ Function name args body
statement :: Parser Stmt
statement = block <|> assignment <|> ifElseStmt
<|> ifStmt <|> while <|> returnStmt
block :: Parser Stmt
block = Block <$> braces (many statement)
assignment :: Parser Stmt
assignment = do
var <- identifier
reservedOp "="
e <- expr
semi
return $ Assignment var e
ifElseStmt :: Parser Stmt
ifElseStmt = do
reserved "if"
cond <- parens expr
conseq <- statement
reserved "else"
alt <- statement
return $ IfElse cond conseq alt
ifStmt :: Parser Stmt
ifStmt = do
reserved "if"
cond <- parens expr
stmt <- statement
return $ If cond stmt
while :: Parser Stmt
while = do
reserved "while"
cond <- parens expr
body <- statement
return $ While cond body
returnStmt :: Parser Stmt
returnStmt = do
reserved "return"
e <- expr
semi
return $ Return e
expr :: Parser LExpr
expr = try funCall <|> buildExpressionParser operators term
funCall :: Parser LExpr
funCall = do
funName <- identifier
args <- parens (sepBy expr comma)
return $ FunCall funName args
operators = [ [Prefix (reservedOp "-" >> return Neg)]
, [Infix (reservedOp "*" >> return Mult) AssocLeft,
Infix (reservedOp "/" >> return Div) AssocLeft]
, [Infix (reservedOp "+" >> return Add) AssocLeft,
Infix (reservedOp "-" >> return Sub) AssocLeft]
]
term = parens expr
<|> Var <$> identifier
<|> Num <$> int
parseStr :: String -> Program
parseStr str = case parse program "" str of
Left e -> error $ show e
Right r -> r
|
package mayacomms
import com.intellij.openapi.util.SystemInfo
import com.intellij.util.io.exists
import java.nio.file.Paths
private const val mayaExecutableNameWin = "maya.exe"
private const val mayaPyExecutableNameWin = "mayapy.exe"
private const val mayaExecutableNameMac = "Maya"
private const val mayaPyExecutableNameMac = "mayapy"
private const val mayaExecutableNameLinux = "maya"
private const val mayaPyExecutableNameLinux = "mayapy"
fun mayaPyFromMaya(path: String): String? {
val p = Paths.get(path)
if (p.fileName.toString() != mayaExecutableName) {
return null
}
if (SystemInfo.isMac) {
val newPath = p.parent.parent.resolve("bin/$mayaPyExecutableNameMac")
return if (newPath.exists()) newPath.toString() else null
}
if (SystemInfo.isWindows || SystemInfo.isLinux) {
val newPath = p.parent.resolve(mayaPyExecutableName)
return if (newPath.exists()) newPath.toString() else null
}
return null
}
fun mayaFromMayaPy(path: String): String? {
val p = Paths.get(path)
if (p.fileName.toString() != mayaPyExecutableName) {
return null
}
if (SystemInfo.isMac) {
val newPath = p.parent.parent.resolve("MacOS/$mayaExecutableNameMac")
return if (newPath.exists()) newPath.toString() else null
}
if (SystemInfo.isWindows || SystemInfo.isLinux) {
val newPath = p.parent.resolve(mayaExecutableName)
return if (newPath.exists()) newPath.toString() else null
}
return null
}
val mayaExecutableName: String
get() {
if (SystemInfo.isWindows) return mayaExecutableNameWin
if (SystemInfo.isMac) return mayaExecutableNameMac
if (SystemInfo.isLinux) return mayaExecutableNameLinux
return ""
}
val mayaPyExecutableName: String
get() {
if (SystemInfo.isWindows) return mayaPyExecutableNameWin
if (SystemInfo.isMac) return mayaPyExecutableNameMac
if (SystemInfo.isLinux) return mayaPyExecutableNameLinux
return ""
}
|
/*
* Public API Surface of vmts-lib
*/
export const LIBRARY_VERSION = '0.1.0';
export * from './lib/api';
export * from './lib/model-collection';
export * from './lib/model-property';
export * from './lib/named-property-model';
export * from './lib/visitor-helpers';
export * from './lib/visitor-limiters';
export * from './lib/visitors';
|
<img src="topf.svg" height="128" />
# Topf — Topological peak filtering
`topf` is a small library for Python 3 that permits the detection and
subsequent filtering of peaks in one-dimensional functions. The method
is based on a topological notion of *prominence* or *persistence* of a
peak with respect to all other peaks.
# Dependencies
- Python 3.7
- `numpy`
# Installation
- Clone the repository
- `poetry install` (for local development)
- `pip3 install .` (for global usage)
# Usage
Install the library using `pip3 install topf`. You can then access the
main class, `PersistenceTransformer` by issuing `import topf`. As
a simple example, we load the file `example.txt`, depict its peaks,
and filter the smallest ones:
```python
import topf
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
data = np.genfromtxt('example.txt') # load data
transformer = topf.PersistenceTransformer() # prepare transformer
peaks = transformer.fit_transform(data) # transform data into peaks
# First, let's plot the original data. We can see that there is
# quite a number of relatively small peaks.
plt.subplot(3, 1, 1)
sns.lineplot(x=data[:, 0], y=data[:, 1])
# Second, let's show the transformed data. Here, every non-zero
# point depicts the *prominence* of a peak.
plt.subplot(3, 1, 2)
sns.lineplot(x=peaks[:, 0], y=peaks[:, 1])
# Third, let's show an example of filtering. At present, there is
# no automated way of doing so.
filtered_data = data[peaks[:, 1] > 4] # only keep high peaks
plt.subplot(3, 1, 3)
sns.lineplot(x=data[:, 0], y=data[:, 1], alpha=0.5)
sns.scatterplot(
x=filtered_data[:, 0],
y=filtered_data[:, 1],
)
plt.tight_layout()
plt.show()
```
This file is also available as [`example.py`](examples/example.py) in this
repository (with some minor modifications to simplify usage).
It will result in the following output:

# Licence notice
The icon of this project was created by <a href="http://www.freepik.com"
title="Freepik">Freepik</a> from <a href="https://www.flaticon.com/"
title="Flaticon">www.flaticon.com</a> and is licensed by <a
href="http://creativecommons.org/licenses/by/3.0/" title="Creative
Commons BY 3.0" target="_blank">CC 3.0 BY</a>.
|
package net.ripe.db.whois.common.grs;
import net.ripe.db.whois.common.domain.CIString;
import net.ripe.db.whois.common.rpsl.ObjectType;
import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.HttpHeaders;
import java.util.concurrent.Executors;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class RsngAuthoritativeResourceWorkerTest {
@Mock Client client;
@Mock WebTarget webTarget;
@Mock WebTarget webTargetAsn;
@Mock Invocation.Builder builderAsn;
@Mock WebTarget webTargetIpv4;
@Mock Invocation.Builder builderIpv4;
@Mock WebTarget webTargetIpv6;
@Mock Invocation.Builder builderIpv6;
static Logger logger = LoggerFactory.getLogger(RsngAuthoritativeResourceWorkerTest.class);
@BeforeEach
public void setup() {
when(client.target(eq("rsngBaseUrl"))).thenReturn(webTarget);
when(webTarget.path(eq("/resource-services/asn-delegations"))).thenReturn(webTargetAsn);
when(webTargetAsn.queryParam(anyString(), any())).thenReturn(webTargetAsn);
when(webTargetAsn.request()).thenReturn(builderAsn);
when(builderAsn.header(eq(HttpHeaders.ACCEPT), eq("application/json"))).thenReturn(builderAsn);
when(builderAsn.header(eq("X-API_KEY"), eq("apikey"))).thenReturn(builderAsn);
when(builderAsn.get(String.class)).then(invocation ->
IOUtils.toString(getClass().getResourceAsStream("/grs/asndelegations.json"))
);
when(webTarget.path(eq("/resource-services/ipv4-delegations"))).thenReturn(webTargetIpv4);
when(webTargetIpv4.queryParam(anyString(), any())).thenReturn(webTargetIpv4);
when(webTargetIpv4.request()).thenReturn(builderIpv4);
when(builderIpv4.header(eq(HttpHeaders.ACCEPT), eq("application/json"))).thenReturn(builderIpv4);
when(builderIpv4.header(eq("X-API_KEY"), eq("apikey"))).thenReturn(builderIpv4);
when(builderIpv4.get(String.class)).then(invocation ->
IOUtils.toString(getClass().getResourceAsStream("/grs/ipv4delegations.json"))
);
when(webTarget.path(eq("/resource-services/ipv6-delegations"))).thenReturn(webTargetIpv6);
when(webTargetIpv6.queryParam(anyString(), any())).thenReturn(webTargetIpv6);
when(webTargetIpv6.request()).thenReturn(builderIpv6);
when(builderIpv6.header(eq(HttpHeaders.ACCEPT), eq("application/json"))).thenReturn(builderIpv6);
when(builderIpv6.header(eq("X-API_KEY"), eq("apikey"))).thenReturn(builderIpv6);
when(builderIpv6.get(String.class)).then(invocation ->
IOUtils.toString(getClass().getResourceAsStream("/grs/ipv6delegations.json"))
);
}
@Test
public void load() {
final AuthoritativeResource authoritativeResource = new RsngAuthoritativeResourceWorker(logger, "rsngBaseUrl", client, Executors.newCachedThreadPool(), "apikey").load();
assertFalse(authoritativeResource.isEmpty());
assertThat(authoritativeResource.getNrAutNums(), is(1));
assertThat(authoritativeResource.getNrInet6nums(), is(1));
assertThat(authoritativeResource.getNrInetnums(), is(1));
assertTrue(authoritativeResource.isMaintainedInRirSpace(ObjectType.AUT_NUM, CIString.ciString("AS7")));
assertTrue(authoritativeResource.isMaintainedInRirSpace(ObjectType.INET6NUM, CIString.ciString("2001:700::/25")));
assertTrue(authoritativeResource.isMaintainedInRirSpace(ObjectType.INETNUM, CIString.ciString("1.178.224.0-1.178.255.255")));
}
}
|
Rails.application.routes.draw do
get 'page/dashboard', :as => 'dashboard'
get 'page/login', :as => 'login'
resources :people do
resources :signatures
resources :goals
resources :contributions
end
root 'page#login'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
|
---
layout: post
title: Tensor란 무엇인가?
tags: [ML]
comments: true
---
- Tensor
Tensor란 수학적인 개념의 하나로 인공지능에서의 Tensor는 데이터의 배열이라고 볼 수 있다. Tensor는 Rank에 따라서 다르게 불리며 Rank는 간단히 말해서 몇 차원으로 이루어진 배열인가를 나타낸다.
| RANK | TYPE | EXAMPLE |
| :------ |:--- | :--- |
| 0 | Scalar | [1] |
| 1 | Vector | [1, 1] |
| 2 | Matrix | [[1, 1], [1, 1]] |
| 3 | 3-Tensor | [[[1, 1], [1, 1]],[[1, 1], [1, 1]],[[1, 1], [1, 1]]] |
| n | n-Tensor | - |
Tensor는 Vector와 Matrix를 일반화한 것으로 배열의 집합이라고 할 수 있다.
|
require "ershou/engine"
require "cancan"
require "inherited_resources"
require "simple_form"
require "kaminari"
require "kaminari-bootstrap"
require "slim"
require "state_machine"
require "paperclip"
require 'redcarpet/compat'
require 'unread'
require 'acts_as_paranoid'
require 'acts_as_list'
require 'twitter-text'
module Ershou
end
|
# PLSudoku
A GUI for a sudoku solver written in Prolog.

|
//
// Copyright 2021 Free TON Wallet Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
import "serialization_exception.dart" show SerializationException;
import "auto_lock_delay.dart" show AutoLockDelay;
import "node_model.dart" show NodeModel;
import "seed_model.dart" show SeedModel;
class AppModel {
static final AppModel EMPTY = AppModel(
autoLockDelay: AutoLockDelay.HalfMinute,
selectedNodeId: NodeModel.NODE_MAINNET_NODEID,
nodes: <NodeModel>[
NodeModel.NODE_MAINNET,
NodeModel.NODE_TESTNET,
],
seeds: <SeedModel>[],
);
String selectedNodeId;
AutoLockDelay autoLockDelay;
final List<NodeModel> nodes;
final List<SeedModel> seeds;
NodeModel get selectedNode => this
.nodes
.singleWhere((NodeModel node) => node.nodeId == this.selectedNodeId);
factory AppModel.fromJson(Map<String, dynamic> rawJson) {
final String? selectedNodeId = rawJson[AppModel._SELECTEDNODE_PROPERTY];
final String? autoLockDelayName = rawJson[AppModel._AUTOLOCKDELAY_PROPERTY];
final List<dynamic>? nodes = rawJson[AppModel._NODES_PROPERTY];
final List<dynamic>? seeds = rawJson[AppModel._SEEDS_PROPERTY];
if (selectedNodeId == null) {
throw SerializationException(
"A field '${AppModel._SELECTEDNODE_PROPERTY}' is null.");
}
if (autoLockDelayName == null) {
throw SerializationException(
"A field '${AppModel._AUTOLOCKDELAY_PROPERTY}' is null.");
}
if (nodes == null) {
throw SerializationException(
"A field '${AppModel._NODES_PROPERTY}' is null.");
}
final List<NodeModel> nodeModels = nodes
.cast<Map<String, dynamic>>()
.map((Map<String, dynamic> nodeRawJson) =>
NodeModel.fromJson(nodeRawJson))
.toList(growable: true);
{
// local scope
final List<String> nodeIds = nodeModels
.map((NodeModel nodeModel) => nodeModel.nodeId)
.toList(growable: false);
final Set<String> nodeUniqueIds = nodeIds.toSet();
if (nodeIds.length > nodeUniqueIds.length) {
throw SerializationException(
"A field '${AppModel._NODES_PROPERTY}' is invalid. The nodes has duplicate id.");
}
}
if (seeds == null) {
throw SerializationException(
"A field '${AppModel._SEEDS_PROPERTY}' is null.");
}
final List<SeedModel> seedModels = seeds
.cast<Map<String, dynamic>>()
.map((Map<String, dynamic> seedRawJson) =>
SeedModel.fromJson(seedRawJson))
.toList(growable: true);
{
// local scope
final List<int> seedIds = seedModels
.map((SeedModel seedModel) => seedModel.seedId)
.toList(growable: false);
final Set<int> seedUniqueIds = seedIds.toSet();
if (seedIds.length > seedUniqueIds.length) {
throw SerializationException(
"A field '${AppModel._SEEDS_PROPERTY}' is invalid. The seeds has duplicate id.");
}
}
final AutoLockDelay autoLockDelay = AutoLockDelay.parse(autoLockDelayName);
return AppModel(
selectedNodeId: selectedNodeId,
autoLockDelay: autoLockDelay,
nodes: nodeModels,
seeds: seedModels,
);
}
AppModel clone() {
return AppModel.fromJson(this.toJson());
}
Map<String, dynamic> toJson() {
return <String, dynamic>{
AppModel._AUTOLOCKDELAY_PROPERTY: this.autoLockDelay.toString(),
AppModel._SELECTEDNODE_PROPERTY: this.selectedNodeId,
AppModel._NODES_PROPERTY: this
.nodes
.map((NodeModel node) => node.toJson())
.toList(growable: false),
AppModel._SEEDS_PROPERTY: this
.seeds
.map((SeedModel seed) => seed.toJson())
.toList(growable: false),
};
}
AppModel({
required this.autoLockDelay,
required this.selectedNodeId,
required this.nodes,
required this.seeds,
});
static const String _SEEDS_PROPERTY = "seeds";
static const String _NODES_PROPERTY = "nodes";
static const String _SELECTEDNODE_PROPERTY = "selectedNodeId";
static const String _AUTOLOCKDELAY_PROPERTY = "autoLockDelay";
}
|
package de.elektroniker.hippocloud.lib.console;
import de.elektroniker.hippocloud.lib.CloudLib;
import java.io.IOException;
/******************************************************************
* Urheberrechtshinweis
* Copyright © Thomas Michaelis 2019
* Erstellt: 23.09.2019 / 10:39
* Orginal Class: ConsoleReader
******************************************************************/
public interface InternalConsoleReader {
void start();
void stop();
void usage();
boolean isAlive();
void setPrefix(String prefix);
String getPrefix();
void setCloudLib(CloudLib cloudLib);
CloudLib getCloudLib();
}
|
---
pid: s7
label: 'A Covenant with Color: Race and Social Power in Brooklyn'
creator: Craig Steven Wilder
publisher: Columbia University Press
_date: '2000'
format: book
source: https://www.worldcat.org/title/covenant-with-color-race-and-social-power-in-brooklyn/oclc/469903160
clio: https://clio.columbia.edu/catalog/2832826
collection: undesign
citation: 'Wilder, Craig Steven. A Covenant with Color: Race and Social Power in Brooklyn.
New York, New York: Columbia University Press, 2000.'
description: Spanning three centuries of Brooklyn history from the colonial period
to the present, A Covenant with Color exposes the intricate relations of dominance
and subordination that have long characterized the relative social positions of
white and black Brooklynites. Craig Steven Wilder—examining both quantitative and
qualitative evidence and utilizing cutting-edge literature on race theory—demonstrates
how ideas of race were born, how they evolved, and how they were carried forth into
contemporary society. (Columbia Univ. Press)
order: '266'
layout: undesign_item
thumbnail: "/img/derivatives/simple/s7/thumbnail.jpg"
full: "/img/derivatives/simple/s7/fullwidth.jpg"
---
|
const URL = "https://caligari-api.herokuapp.com/api/v1/features";
const featuresWrapper = document.querySelector(".tags__wrapper");
export const getFeatures = async () => {
const response = await fetch(URL);
const features = await response.json();
const tpl = features
.map(
(feature) => `
<li>
<input id=${feature.id} name=${feature.id} type="checkbox"/>
<label class="tag" for=${feature.id}>${feature.name}</label>
</li>
`
)
.join("");
featuresWrapper.innerHTML = tpl;
};
getFeatures();
|
subroutine header_reservoir
use basin_module
use reservoir_module
!use hydrograph_module, only : res, sp_ob
use hydrograph_module
implicit none
!! RESERVOIR
if (pco%res%d == "y" .and. sp_ob%res > 0 ) then
open (2540,file="reservoir_day.txt",recl=1500)
write (2540,*) bsn%name, prog
write (9000,*) "RES reservoir_day.txt"
write (2540,*) ch_wbod_hdr, hyd_stor_hdr, hyd_in_hdr, hyd_out_hdr
write (2540,*) ch_wbod_hdr_units, hyd_hdr_units1, hyd_hdr_units1, hyd_hdr_units1
if (pco%csvout == "y") then
open (2544,file="reservoir_day.csv",recl=1500)
write (2544,*) bsn%name, prog
write (2544,'(*(G0.3,:","))') ch_wbod_hdr, hyd_stor_hdr, hyd_in_hdr, hyd_out_hdr
write (2544,'(*(G0.3,:","))') ch_wbod_hdr_units, hyd_hdr_units1, hyd_hdr_units1, hyd_hdr_units1
write (9000,*) "RES reservoir_day.csv"
end if
end if
if (pco%res%m == "y" .and. sp_ob%res > 0 ) then
open (2541,file="reservoir_mon.txt",recl=1500)
write (2541,*) bsn%name, prog
write (9000,*) "RES reservoir_mon.txt"
write (2541,*) ch_wbod_hdr, hyd_stor_hdr, hyd_in_hdr, hyd_out_hdr
write (2541,*) ch_wbod_hdr_units, hyd_hdr_units1, hyd_hdr_units1, hyd_hdr_units1
if (pco%csvout == "y") then
open (2545,file="reservoir_mon.csv",recl=1500)
write (2545,*) bsn%name, prog
write (2545,'(*(G0.3,:","))') ch_wbod_hdr, hyd_stor_hdr, hyd_in_hdr, hyd_out_hdr
write (2545,'(*(G0.3,:","))') ch_wbod_hdr_units, hyd_hdr_units1, hyd_hdr_units1, hyd_hdr_units1
write (2545,*) "RES reservoir_mon.csv"
end if
end if
if (pco%res%y == "y" .and. sp_ob%res > 0 ) then
open (2542,file="reservoir_yr.txt",recl=1500)
write (2542,*) bsn%name, prog
write (9000,*) "RES reservoir_yr.txt"
write (2542,*) ch_wbod_hdr, hyd_stor_hdr, hyd_in_hdr, hyd_out_hdr
write (2542,*) ch_wbod_hdr_units, hyd_hdr_units1, hyd_hdr_units1, hyd_hdr_units1
if (pco%csvout == "y") then
open (2546,file="reservoir_yr.csv",recl=1500)
write (2546,*) bsn%name, prog
write (2546,'(*(G0.3,:","))') ch_wbod_hdr, hyd_stor_hdr, hyd_in_hdr, hyd_out_hdr
write (2546,'(*(G0.3,:","))') ch_wbod_hdr_units, hyd_hdr_units1, hyd_hdr_units1, hyd_hdr_units1
write (9000,*) "RES reservoir_yr.csv"
end if
end if
if (pco%res%a == "y" .and. sp_ob%res > 0) then
open (2543,file="reservoir_aa.txt",recl = 1500)
write (2543,*) bsn%name, prog
write (2543,*) ch_wbod_hdr, hyd_stor_hdr, hyd_in_hdr, hyd_out_hdr
write (2543,*) ch_wbod_hdr_units, hyd_hdr_units1, hyd_hdr_units1, hyd_hdr_units1
write (9000,*) "RES reservoir_aa.txt"
if (pco%csvout == "y") then
open (2547,file="reservoir_aa.csv",recl=1500)
write (2547,*) bsn%name, prog
write (2547,'(*(G0.3,:","))') ch_wbod_hdr, hyd_stor_hdr, hyd_in_hdr, hyd_out_hdr
write (2547,'(*(G0.3,:","))') ch_wbod_hdr_units, hyd_hdr_units1, hyd_hdr_units1, hyd_hdr_units1
write (9000,*) "RES reservoir_aa.csv"
end if
end if
return
end subroutine header_reservoir
|
/******************************************************************************
* Copyright (c) Intel Corporation - All rights reserved. *
* This file is part of the LIBXSMM library. *
* *
* For information on the license, see the LICENSE file. *
* Further information: https://github.com/hfp/libxsmm/ *
* SPDX-License-Identifier: BSD-3-Clause *
******************************************************************************/
/* Sasikanth Avancha, Dhiraj Kalamkar (Intel Corp.)
******************************************************************************/
#pragma once
#include <string>
#include <stdio.h>
#include "assert.h"
#include "Node.hpp"
#include "Engine.hpp"
#include "Params.hpp"
#include "proto/gxm.pb.h"
#include "common.hpp"
#include "PoolingImpl.hpp"
#include "PoolingXSMM.hpp"
using namespace std;
using namespace gxm;
class PoolingParams : public NNParams
{
public:
PoolingParams(void) {}
~PoolingParams(void) {}
void set_kernel_dims(int kdims, int ksize)
{
for(int i=0; i<kdims; i++)
kernel_dim_.push_back(ksize);
}
void set_kernel_dims(int kh, int kw, int kd)
{
kernel_dim_.push_back(kh);
kernel_dim_.push_back(kw);
kernel_dim_.push_back(kd);
}
vector<int>& get_kernel_dims() { return kernel_dim_; }
void set_strides(int sdims, int stride)
{
for(int i=0; i<sdims; i++)
strides_.push_back(stride);
}
void set_strides(int sh, int sw, int sd)
{
strides_.push_back(sh);
strides_.push_back(sw);
strides_.push_back(sd);
}
vector<int>& get_strides() { return strides_; }
void set_pads(int pdims, int pad)
{
for(int i=0; i<pdims; i++)
pads_.push_back(pad);
}
void set_pads(int ph, int pw, int pd)
{
pads_.push_back(ph);
pads_.push_back(pw);
pads_.push_back(pd);
}
vector<int>& get_pads() { return pads_; }
void set_pool_mode(int m) { pool_mode_ = m; }
int get_pool_mode() { return pool_mode_; }
void set_compute_engine(int ce) { compute_engine_ = ce; }
int get_compute_engine() { return compute_engine_; }
void set_data_type(int t) { data_type_ = t; }
int get_data_type() { return data_type_; }
void set_algo_type(int at) { algotype_ = at; }
int get_algo_type() { return algotype_; }
protected:
vector<int> kernel_dim_; // Order of dimensions is Height, Width, Depth (for 3D Pooling)
vector<int> strides_; // Order follows kernel dimension
vector<int> pads_; // Order follows kernel dimension
int pool_mode_, compute_engine_, algotype_, data_type_;
};
static MLParams* parsePoolingParams(NodeParameter* np)
{
PoolingParams* pp = new PoolingParams();
// Set name of node
assert(!np->name().empty());
pp->set_node_name(np->name());
//Set node type (Convolution, FullyConnected, etc)
assert(!np->type().empty());
pp->set_node_type(np->type());
//Set tensor names
assert(np->bottom_size() == 1);
assert(!np->bottom(0).empty());
pp->set_bottom_names(np->bottom(0));
assert(np->top_size() == 1);
assert(!np->top(0).empty());
pp->set_top_names(np->top(0));
//Set Mode for the node
assert((np->mode() == TRAIN) || (np->mode() == TEST));
pp->set_mode(np->mode());
//Set backprop needed/not needed flag for this node
pp->set_bprop_flag(np->propagate_down());
// kernel dimensions
PoolingParameter ppp = np->pooling_param();
int kdims = ppp.kernel_size_size();
switch(kdims)
{
int kh, kw, kd;
case 0:
kh = ppp.kernel_h();
kw = ppp.kernel_w();
if(ppp.ndims() == 3)
kd = ppp.kernel_d();
else
kd = 0;
assert((kh > 0) && (kw > 0));
pp->set_kernel_dims(kh, kw, kd);
break;
case 1:
kh = ppp.kernel_size(0);
if(ppp.ndims() == 2)
pp->set_kernel_dims(kh, kh, 0);
else if(ppp.ndims() == 3)
pp->set_kernel_dims(kh, kh, kh);
break;
case 2:
kh = ppp.kernel_size(0);
kw = ppp.kernel_size(1);
assert(ppp.ndims() == 2);
pp->set_kernel_dims(kh, kw, 0);
break;
case 3:
kh = ppp.kernel_size(0);
kw = ppp.kernel_size(1);
kd = ppp.kernel_size(2);
assert(ppp.ndims() == 3);
pp->set_kernel_dims(kh, kw, kd);
break;
}
// strides
int sdims = ppp.stride_size();
switch(sdims)
{
int sh, sw, sd;
case 0:
sh = ppp.stride_h();
sw = ppp.stride_w();
if(ppp.ndims() == 3)
sd = ppp.stride_d();
else
sd = 0;
assert((sh > 0) && (sw > 0));
pp->set_strides(sh, sw, sd);
break;
case 1:
sh = ppp.stride(0);
if(ppp.ndims() == 2)
pp->set_strides(sh, sh, 0);
else if(ppp.ndims() == 3)
pp->set_strides(sh, sh, sh);
break;
case 2:
sh = ppp.stride(0);
sw = ppp.stride(1);
assert(ppp.ndims() == 2);
pp->set_strides(sh, sw, 0);
break;
case 3:
sh = ppp.stride(0);
sw = ppp.stride(1);
sd = ppp.stride(2);
assert(ppp.ndims() == 3);
pp->set_strides(sh, sw, sd);
break;
}
// pads
int pdims = ppp.pad_size();
switch(pdims)
{
int ph, pw, pd;
case 0:
ph = ppp.pad_h();
pw = ppp.pad_w();
if(ppp.ndims() == 3)
pd = ppp.pad_d();
else
pd = 0;
pp->set_pads(ph, pw, pd);
break;
case 1:
ph = ppp.pad(0);
if(ppp.ndims() == 2)
pp->set_pads(ph, ph, 0);
else if(ppp.ndims() == 3)
pp->set_pads(ph, ph, ph);
break;
case 2:
ph = ppp.pad(0);
pw = ppp.pad(1);
assert(ppp.ndims() == 2);
pp->set_pads(ph, pw, 0);
break;
case 3:
ph = ppp.pad(0);
pw = ppp.pad(1);
pd = ppp.pad(2);
assert(ppp.ndims() == 3);
pp->set_pads(ph, pw, pd);
break;
}
pp->set_pool_mode(ppp.pool());
pp->set_data_type(ppp.data_type());
pp->set_compute_engine(ppp.engine());
pp->set_algo_type(ppp.algotype());
return pp;
}
class PoolingNode : public NNNode
{
public:
PoolingNode(PoolingParams* p, MLEngine* e);
virtual ~PoolingNode(void) {}
protected:
void forwardPropagate();
void backPropagate();
void shape_setzero(Shape* s)
{
for(int i=0; i<MAX_DIMS; i++)
s->dims[i] = 0;
}
void configure(int engine);
void convert_bf16_f32(libxsmm_bfloat16* in, float* out, int len);
Tensor* tenTop_; // Output tensor pointer
Tensor* tenBot_; // Input tensor pointer
int* tenMask_;
PoolImplParams gparams_;
TensorBuf *tenBotDiff_, *tenBotData_;
TensorBuf *tenTopData_, *tenTopDiff_;
TensorBuf *tenScratchData_;
Shape ts_;
int count_, in_dtype, out_dtype;
int bot_cengine_;
bool first_fp=true;
float *stptr=NULL, cbptr[16];
PoolImpl* impl;
MLEngine* eptr_;
};
|
#!/usr/bin/env bash
set -e
BUILD_TYPE=Release
ARCH=arm64-v8a
OPENCL=OFF
ARCH_LIST="arm64-v8a armeabi-v7a"
READLINK=readlink
MAKEFILE_TYPE="Ninja"
OS=$(uname -s)
function usage() {
echo "$0 args1 .."
echo "available args detail:"
echo "-m : machine arch(arm64-v8a, armeabi-v7a)"
echo "-l : enable build with opencl"
echo "-h : show usage"
echo "example: $0 -m armeabi-v7a"
exit -1
}
while getopts "lhm:" arg
do
case $arg in
m)
echo "build with arch:$OPTARG"
ARCH=$OPTARG
;;
l)
echo "build with opencl"
OPENCL=ON
;;
h)
echo "show usage"
usage
exit 0
;;
esac
done
if [ $OS = "Darwin" ];then
READLINK=greadlink
elif [[ $OS =~ "NT" ]]; then
echo "BUILD in NT ..."
MAKEFILE_TYPE="Unix"
fi
SRC_DIR=$($READLINK -f "`dirname $0`/")
function cmake_build() {
BUILD_DIR=$SRC_DIR/build-${ARCH}/
BUILD_ABI=$1
BUILD_NATIVE_LEVEL=$2
echo "build dir: $BUILD_DIR"
echo "build ARCH: $ARCH"
echo "build ABI: $BUILD_ABI"
echo "build native level: $BUILD_NATIVE_LEVEL"
echo "BUILD MAKEFILE_TYPE: $MAKEFILE_TYPE"
echo "create build dir"
mkdir -p $BUILD_DIR
cd $BUILD_DIR
cmake -G "$MAKEFILE_TYPE" \
"-B$BUILD_DIR" \
"-S$SRC_DIR" \
-DCMAKE_TOOLCHAIN_FILE="$NDK_ROOT/build/cmake/android.toolchain.cmake" \
-DANDROID_NDK="$NDK_ROOT" \
-DANDROID_ABI=$BUILD_ABI \
-DANDROID_NATIVE_API_LEVEL=$BUILD_NATIVE_LEVEL \
-DMEGPEAK_ENABLE_OPENCL=${OPENCL}
ninja ${Target}
}
api_level=16
abi="armeabi-v7a with NEON"
IFS=""
if [ "$ARCH" = "arm64-v8a" ]; then
api_level=21
abi="arm64-v8a"
elif [ "$ARCH" = "armeabi-v7a" ]; then
api_level=16
abi="armeabi-v7a with NEON"
else
echo "ERR CONFIG ABORT NOW!!"
exit -1
fi
cmake_build $abi $api_level
|
const path = require('path');
const gulp = require('gulp');
const less = require('gulp-less');
const rename = require('gulp-rename');
const cleanCss = require('gulp-clean-css');
const babel = require('gulp-babel');
const gulpif = require('gulp-if');
const injectEnvs = require('gulp-inject-envs');
const isProduction = process.env.NODE_ENV === 'production';
const dist = isProduction
? path.join(__dirname, '../es')
: path.join(__dirname, '../demo/es');
const src = path.join(__dirname, '../src');
const extTypes = ['ts', 'less', 'json', 'axml', 'sjs'];
const isRpx = process.argv.splice(2)[0] === '--rpx';
const env = { jsUnitRpx: isRpx };
gulp.task('less', () =>
gulp
.src(`${src}/**/*.less`)
.pipe(
less({
modifyVars: {
'@pixelSize': isRpx ? '1rpx' : '0.5px',
},
}),
)
.on('error', e => console.error(e))
.pipe(gulpif(isProduction, cleanCss()))
.pipe(
rename({
extname: '.acss',
}),
)
.pipe(gulp.dest(dist)),
);
gulp.task('ts', () =>
gulp
.src(`${src}/**/*.ts`)
.pipe(babel())
.pipe(injectEnvs(env))
.on('error', (err) => {
console.log(err);
})
.pipe(gulp.dest(dist)),
);
gulp.task('json', () => gulp.src(`${src}/**/*.json`).pipe(gulp.dest(dist)));
gulp.task('axml', () => gulp.src(`${src}/**/*.axml`).pipe(gulp.dest(dist)));
gulp.task('sjs', () => gulp.src(`${src}/**/*.sjs`).pipe(gulp.dest(dist)));
const build = gulp.series(...extTypes);
build();
if (!isProduction) {
extTypes.forEach((type) => {
const watcher = gulp.watch(`${src}/**/*${type}`, gulp.series(type));
watcher.on('change', (event) => {
console.log(`File ${event} was change`);
});
watcher.on('add', (event) => {
console.log(`File ${event} was add`);
});
watcher.on('unlink', (event) => {
console.log(`File ${event} was remove`);
});
});
}
|
Credit: Image by GregoryButler from Pixabay
URL: https://pixabay.com/photos/battleship-engine-room-historic-war-389274/
|
package com.auritylab.graphql.kotlin.toolkit.common.markers
/**
* Marker for a public API which is in experimental state. Marked elements might be changed or even removed
* in the future.
*/
@Retention(AnnotationRetention.SOURCE)
annotation class Experimental()
|
package at.doml.fnc.lab1.controller
import at.doml.fnc.lab1.set.FuzzySet
class CenterOfAreaDefuzzifier extends Defuzzifier {
override def defuzzify(set: FuzzySet): Int = {
if (set.domain.numberOfComponents != 1) {
throw new IllegalArgumentException("Set with simple domain is expected")
}
var numerator = 0.0
var denominator = 0.0
for (element <- set.domain) {
val value = set.getValueAt(element)
numerator += value * element.getComponentValue(0)
denominator += value
}
(numerator / denominator).toInt
}
}
|
#!/usr/bin/env bash
source ../../.env
cd ../docker
docker-compose -p $PROJECT_NAME exec --user root magento xdebug
exit 0
|
import {
IExecuteFunctions,
} from 'n8n-core';
import {
IBinaryKeyData,
INodeExecutionData,
INodeType,
INodeTypeDescription,
NodeOperationError,
} from 'n8n-workflow';
import * as fflate from 'fflate';
import { promisify } from 'util';
const gunzip = promisify(fflate.gunzip);
const gzip = promisify(fflate.gzip);
const unzip = promisify(fflate.unzip);
const zip = promisify(fflate.zip);
import * as mime from 'mime-types';
const ALREADY_COMPRESSED = [
'7z',
'aifc',
'bz2',
'doc',
'docx',
'gif',
'gz',
'heic',
'heif',
'jpg',
'jpeg',
'mov',
'mp3',
'mp4',
'pdf',
'png',
'ppt',
'pptx',
'rar',
'webm',
'webp',
'xls',
'xlsx',
'zip',
];
export class Compression implements INodeType {
description: INodeTypeDescription = {
displayName: 'Compression',
name: 'compression',
icon: 'fa:file-archive',
group: ['transform'],
subtitle: '={{$parameter["operation"]}}',
version: 1,
description: 'Compress and uncompress files',
defaults: {
name: 'Compression',
color: '#408000',
},
inputs: ['main'],
outputs: ['main'],
properties: [
{
displayName: 'Operation',
name: 'operation',
type: 'options',
options: [
{
name: 'Compress',
value: 'compress',
},
{
name: 'Decompress',
value: 'decompress',
},
],
default: 'decompress',
},
{
displayName: 'Binary Property',
name: 'binaryPropertyName',
type: 'string',
default: 'data',
required: true,
displayOptions: {
show: {
operation: [
'compress',
'decompress',
],
},
},
placeholder: '',
description: 'Name of the binary property which contains the data for the file(s) to be compress/decompress. Multiple can be used separated by a comma (,)',
},
{
displayName: 'Output Format',
name: 'outputFormat',
type: 'options',
default: '',
options: [
{
name: 'gzip',
value: 'gzip',
},
{
name: 'zip',
value: 'zip',
},
],
displayOptions: {
show: {
operation: [
'compress',
],
},
},
description: 'Format of the output file',
},
{
displayName: 'File Name',
name: 'fileName',
type: 'string',
default: '',
placeholder: 'data.zip',
required: true,
displayOptions: {
show: {
operation: [
'compress',
],
outputFormat: [
'zip',
],
},
},
description: 'Name of the file to be compressed',
},
{
displayName: 'Binary Property Output',
name: 'binaryPropertyOutput',
type: 'string',
default: 'data',
required: false,
displayOptions: {
show: {
outputFormat: [
'zip',
],
operation: [
'compress',
],
},
},
placeholder: '',
description: 'Name of the binary property to which to write the data of the compressed files.',
},
{
displayName: 'Output Prefix',
name: 'outputPrefix',
type: 'string',
default: 'data',
required: true,
displayOptions: {
show: {
operation: [
'compress',
],
outputFormat: [
'gzip',
],
},
},
description: 'Prefix use for all gzip compresed files',
},
{
displayName: 'Output Prefix',
name: 'outputPrefix',
type: 'string',
default: 'file_',
required: true,
displayOptions: {
show: {
operation: [
'decompress',
],
},
},
description: 'Prefix use for all decompressed files',
},
],
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
const length = items.length as unknown as number;
const returnData: INodeExecutionData[] = [];
const operation = this.getNodeParameter('operation', 0) as string;
for (let i = 0; i < length; i++) {
try {
if (operation === 'decompress') {
const binaryPropertyNames = (this.getNodeParameter('binaryPropertyName', 0) as string).split(',').map(key => key.trim());
const outputPrefix = this.getNodeParameter('outputPrefix', 0) as string;
const binaryObject: IBinaryKeyData = {};
let zipIndex = 0;
for (const [index, binaryPropertyName] of binaryPropertyNames.entries()) {
if (items[i].binary === undefined) {
throw new NodeOperationError(this.getNode(), 'No binary data exists on item!');
}
//@ts-ignore
if (items[i].binary[binaryPropertyName] === undefined) {
throw new NodeOperationError(this.getNode(), `No binary data property "${binaryPropertyName}" does not exists on item!`);
}
const binaryData = (items[i].binary as IBinaryKeyData)[binaryPropertyName];
const binaryDataBuffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
if (binaryData.fileExtension === 'zip') {
const files = await unzip(binaryDataBuffer);
for (const key of Object.keys(files)) {
// when files are compresed using MACOSX for some reason they are duplicated under __MACOSX
if (key.includes('__MACOSX')) {
continue;
}
const data = await this.helpers.prepareBinaryData(Buffer.from(files[key].buffer), key);
binaryObject[`${outputPrefix}${zipIndex++}`] = data;
}
} else if (binaryData.fileExtension === 'gz') {
const file = await gunzip(binaryDataBuffer);
const fileName = binaryData.fileName?.split('.')[0];
const propertyName = `${outputPrefix}${index}`;
binaryObject[propertyName] = await this.helpers.prepareBinaryData(Buffer.from(file.buffer), fileName);
const fileExtension = mime.extension(binaryObject[propertyName].mimeType) as string;
binaryObject[propertyName].fileName = `${fileName}.${fileExtension}`;
binaryObject[propertyName].fileExtension = fileExtension;
}
}
returnData.push({
json: items[i].json,
binary: binaryObject,
});
}
if (operation === 'compress') {
const binaryPropertyNames = (this.getNodeParameter('binaryPropertyName', 0) as string).split(',').map(key => key.trim());
const outputFormat = this.getNodeParameter('outputFormat', 0) as string;
const zipData: fflate.Zippable = {};
const binaryObject: IBinaryKeyData = {};
for (const [index, binaryPropertyName] of binaryPropertyNames.entries()) {
if (items[i].binary === undefined) {
throw new NodeOperationError(this.getNode(), 'No binary data exists on item!');
}
//@ts-ignore
if (items[i].binary[binaryPropertyName] === undefined) {
throw new NodeOperationError(this.getNode(), `No binary data property "${binaryPropertyName}" does not exists on item!`);
}
const binaryData = (items[i].binary as IBinaryKeyData)[binaryPropertyName];
const binaryDataBuffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
if (outputFormat === 'zip') {
zipData[binaryData.fileName as string] = [
binaryDataBuffer, {
level: ALREADY_COMPRESSED.includes(binaryData.fileExtension as string) ? 0 : 6,
},
];
} else if (outputFormat === 'gzip') {
const outputPrefix = this.getNodeParameter('outputPrefix', 0) as string;
const data = await gzip(binaryDataBuffer) as Uint8Array;
const fileName = binaryData.fileName?.split('.')[0];
binaryObject[`${outputPrefix}${index}`] = await this.helpers.prepareBinaryData(Buffer.from(data), `${fileName}.gzip`);
}
}
if (outputFormat === 'zip') {
const fileName = this.getNodeParameter('fileName', 0) as string;
const binaryPropertyOutput = this.getNodeParameter('binaryPropertyOutput', 0) as string;
const buffer = await zip(zipData);
const data = await this.helpers.prepareBinaryData(Buffer.from(buffer), fileName);
returnData.push({
json: items[i].json,
binary: {
[binaryPropertyOutput]: data,
},
});
}
if (outputFormat === 'gzip') {
returnData.push({
json: items[i].json,
binary: binaryObject,
});
}
}
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message } });
continue;
}
throw error;
}
}
return this.prepareOutputData(returnData);
}
}
|
import { Inject as I } from './Inject';
import { VendingMachineTest } from './VendingMachineTest';
export class ExtendedVendingMachineTest extends I(VendingMachineTest, {})
{
}
|
/**
*
*/
package com.shivishbrahma.tilottama.test.util.weatherapp.api;
/**
* @author Purbayan Chowdhury
* (<a href="shivishbrahma.github.io">shivishbrahma.github.io</a>)
*
*/
public class City {
public Coordinate coord;
public String country, name;
private int id;
public City() {
}
/**
* @param coord
* @param country
* @param name
* @param id
*/
public City(Coordinate coord, String country, String name, int id) {
this.coord = coord;
this.country = country;
this.name = name;
this.id = id;
}
/**
* @return the coord
*/
public Coordinate getCoord() {
return coord;
}
/**
* @param coord the coord to set
*/
public void setCoord(Coordinate coord) {
this.coord = coord;
}
/**
* @return the country
*/
public String getCountry() {
return country;
}
/**
* @param country the country to set
*/
public void setCountry(String country) {
this.country = country;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @return the id
*/
public int getId() {
return id;
}
/**
* @param id the id to set
*/
public void setId(int id) {
this.id = id;
}
/**
*
*/
public void details() {
System.out.println("City: ");
// System.out.println("Id: " + this.id);
System.out.println("Name: " + this.name);
coord.details();
System.out.println("Country: " + this.country);
}
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Claims;
using System.Threading.Tasks;
using IAM.Data;
using IAM.Models;
using IdentityModel;
using Microsoft.AspNetCore.Identity;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using Serilog;
namespace IAM
{
public class SeedData
{
private static List<TenantInfo> SAMPLE_TENANTS = new List<TenantInfo>()
{
new TenantInfo(){ Id = Guid.NewGuid(), Name = "one"},
new TenantInfo(){ Id = Guid.NewGuid(), Name = "two"},
};
private static List<ApplicationUser> SAMPLE_USERS = new List<ApplicationUser>()
{
new ApplicationUser()
{
UserName = "UserOne",
TenantId = SAMPLE_TENANTS.Find(x => x.Name == "one").Id,
},
new ApplicationUser()
{
UserName = "UserOneAdmin",
TenantId = SAMPLE_TENANTS.Find(x => x.Name == "one").Id,
},
new ApplicationUser()
{
UserName = "UserTwo",
TenantId = SAMPLE_TENANTS.Find(x => x.Name == "two").Id,
},
new ApplicationUser()
{
UserName = "UserTwoAdmin",
TenantId = SAMPLE_TENANTS.Find(x => x.Name == "two").Id,
}
};
private static string UNIVERSAL_PASSWORD_FOR_USERS = "Pass123$";
public static void EnsureSeedData(string connectionString)
{
var services = new ServiceCollection();
services.AddLogging();
services.AddDbContext<ApplicationDbContext>(options =>
options.UseMySql(connectionString, ServerVersion.AutoDetect(connectionString))
);
services.AddDbContext<TenantContext>(options =>
options.UseMySql(connectionString, ServerVersion.AutoDetect(connectionString))
);
services.AddIdentity<ApplicationUser, IdentityRole>()
.AddEntityFrameworkStores<ApplicationDbContext>()
.AddDefaultTokenProviders();
using (var serviceProvider = services.BuildServiceProvider())
{
using (var scope = serviceProvider.GetRequiredService<IServiceScopeFactory>().CreateScope())
{
var context = scope.ServiceProvider.GetService<ApplicationDbContext>();
context.Database.Migrate();
// create user roles
CreateUserRoles(serviceProvider).Wait();
/*****************
* Seed Tenants
*****************/
var tenantContext = scope.ServiceProvider.GetService<TenantContext>();
tenantContext.Database.Migrate();
// get tenant one
var tenantOne = SAMPLE_TENANTS.Find(x => x.Name == "one");
var foundTenantOne = tenantContext.Tenant.FirstOrDefault(x => x.Name == tenantOne.Name);
if (foundTenantOne == null)
{
try
{
tenantContext.Tenant.Add(tenantOne);
tenantContext.SaveChanges();
Log.Debug("Tenant \'one\' created");
}
catch (Exception ex)
{
Log.Debug(ex, "Error when adding tenant two", ex.Message);
}
}
else
{
tenantOne = foundTenantOne;
Log.Debug("Tenant \'one\' already exists");
}
// get tenant two
var tenantTwo = SAMPLE_TENANTS.Find(x => x.Name == "two");
var foundTenantTwo = tenantContext.Tenant.FirstOrDefault(x => x.Name == tenantTwo.Name);
if (foundTenantTwo == null)
{
try
{
tenantContext.Tenant.Add(tenantTwo);
tenantContext.SaveChanges();
Log.Debug("Tenant \'two\' created");
}
catch (Exception ex)
{
Log.Debug(ex, "Error when adding tenant two", ex.Message);
}
}
else
{
tenantTwo = foundTenantTwo;
Log.Debug("Tenant \'two\' already exists");
}
/*****************
* Seed users
*****************/
var userManager = scope.ServiceProvider.GetRequiredService<UserManager<ApplicationUser>>();
/*****************
* UserOne
*****************/
var userOne = SAMPLE_USERS.Find(x => x.UserName == "UserOne");
// get user from db
var foundUser1 = userManager.FindByNameAsync(userOne.UserName).Result;
if (foundUser1 == null)
{
var result = userManager.CreateAsync(userOne, UNIVERSAL_PASSWORD_FOR_USERS).Result;
if (!result.Succeeded)
{
throw new Exception(result.Errors.First().Description);
}
Log.Debug("user1 created");
}
else
{
Log.Debug("user1 already exists");
}
/*****************
* UserOneAdmin
*****************/
var userOneAdmin = SAMPLE_USERS.Find(x => x.UserName == "UserOneAdmin");
// get user from db
var foundUser1Admin = userManager.FindByNameAsync(userOneAdmin.UserName).Result;
if (foundUser1Admin == null)
{
var result = userManager.CreateAsync(userOneAdmin, UNIVERSAL_PASSWORD_FOR_USERS).Result;
if (!result.Succeeded)
{
throw new Exception(result.Errors.First().Description);
}
Log.Debug("user1admin created");
var usr = userManager.FindByNameAsync(userOneAdmin.UserName).Result;
userManager.AddToRoleAsync(usr, "admin").Wait();
Log.Debug("user1admin made admin");
}
else
{
Log.Debug("user1admin already exists");
}
/*****************
* UserTwo
*****************/
var userTwo = SAMPLE_USERS.Find(x => x.UserName == "UserTwo");
var foundUserTwo = userManager.FindByNameAsync(userTwo.UserName).Result;
if (foundUserTwo == null)
{
var result = userManager.CreateAsync(userTwo, UNIVERSAL_PASSWORD_FOR_USERS).Result;
if (!result.Succeeded)
{
throw new Exception(result.Errors.First().Description);
}
Log.Debug("user2 created");
}
else
{
Log.Debug("user2 already exists");
}
/*****************
* UserTwoAdmin
*****************/
var userTwoAdmin = SAMPLE_USERS.Find(x => x.UserName == "UserTwoAdmin");
var foundUserTwoAdmin = userManager.FindByNameAsync(userTwoAdmin.UserName).Result;
if (foundUserTwoAdmin == null)
{
var result = userManager.CreateAsync(userTwoAdmin, UNIVERSAL_PASSWORD_FOR_USERS).Result;
if (!result.Succeeded)
{
throw new Exception(result.Errors.First().Description);
}
Log.Debug("user2admin created");
var usr = userManager.FindByNameAsync(userTwoAdmin.UserName).Result;
userManager.AddToRoleAsync(usr, "admin").Wait();
Log.Debug("user2admin made admin");
}
else
{
Log.Debug("user2admin already exists");
}
}
}
}
private static async Task CreateUserRoles(IServiceProvider provider)
{
var rolesManager = provider.GetRequiredService<RoleManager<IdentityRole>>();
var userManager = provider.GetRequiredService<UserManager<ApplicationUser>>();
IdentityResult adminUserRole;
bool doesAdminUserRoleExist = await rolesManager.RoleExistsAsync("admin");
if (!doesAdminUserRoleExist)
{
adminUserRole = await rolesManager.CreateAsync(new IdentityRole("admin"));
}
}
}
}
|
using System.Collections.Generic;
using Avocado.DependenciesVisualizer.Base.Editor.Scripts.State;
using DependenciesVisualizer.Base.Editor.Scripts.ReorderList;
using UnityEditor;
using UnityEditorInternal;
using UnityEngine;
namespace Avocado.DependenciesVisualizer.Base.Editor.Scripts {
public class LayersWindow {
public const string DefaultLayerName = "Default";
public List<LayerData> Layers => _layers;
private readonly List<LayerData> _layers;
private readonly VisualizerState _state;
private ReorderableList _layersList;
private Rect _windowRect;
public LayersWindow(VisualizerState state) {
_state = state;
_layers = _state.layers;
_windowRect = new Rect(_state.LayersWindowPosition, new Vector2(330, 300));
}
public void Draw() {
_windowRect.size = new Vector2(330, _layers.Count * 22 + 50);
_state.LayersWindowPosition = _windowRect.position;
_windowRect = GUI.Window(
1000,
_windowRect,
i => {
ReorderableListGUI.ListField(_layers, CustomListItem, DrawEmpty);
GUI.DragWindow();
}, "Layers");
}
private LayerData CustomListItem(Rect position, LayerData itemValue) {
var getPriorityByIndex = _layers.FindIndex(test => test == itemValue);
if (itemValue is null) {
itemValue = new LayerData();
itemValue.Name = "Default_" + getPriorityByIndex;
itemValue.Color = _state.layerDefaultColor;
}
position.x = 30;
position.width = 25;
EditorGUI.LabelField(position, getPriorityByIndex.ToString());
itemValue.Priority = getPriorityByIndex;
/*position.x = 50;
position.width = 50;
EditorGUI.LabelField(position, "Name");*/
position.width -= 50;
position.x = 50;
position.xMax = 200;
itemValue.Name = EditorGUI.TextField(position, itemValue.Name);
position.x = 210;
position.width = 75;
itemValue.Color = EditorGUI.ColorField(position, itemValue.Color);
_state.layerDefaultColor = itemValue.Color;
return itemValue;
}
private void DrawEmpty() {
GUILayout.Label("No items in list.", EditorStyles.miniLabel);
}
}
}
|
package com.demo.liur.cacheweather.data;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
/**
* 数据库建表类
* Created by Liur on 2016/6/15.
*/
public class CacheOpenHelper extends SQLiteOpenHelper {
private static final String CREATE_PROVINCE =
"create table province (id integer primary key autoincrement,province_name text,province_code text)";
private static final String CREATE_CITY =
"create table city (" + "id integer primary key autoincrement," + "city_name text,city_code text)";
private static final String CREATE_COUNTY =
"create table county (id integer primary key autoincrement,county_name text,county_code text)";
public CacheOpenHelper(Context context,
String name,
SQLiteDatabase.CursorFactory factory,
int version) {
super(context, name, factory, version);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(CREATE_PROVINCE);
db.execSQL(CREATE_CITY);
db.execSQL(CREATE_COUNTY);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
}
}
|
class User::SessionsController < Devise::SessionsController
protected
def after_sign_in_path_for(resource)
profile_path(resource)
end
end
|
# Write your MySQL query statement below
select A.id
from Weather A, Weather B
where A.temperature > B.temperature and DATE(DATE_ADD(B.recordDate, INTERVAL 1 DAY)) = DATE(A.recordDate);
|
package users
import (
"encoding/json"
"github.com/globalsign/mgo/bson"
"reflect"
)
const (
AdminRole = "admin"
UserRole = "user"
)
type Roles map[string]struct{}
func (r Roles) HasRole(role string) bool {
_, ok := r[role]
return ok
}
func (r Roles) AddRole(role string) Roles {
r[role] = struct{}{}
return r
}
func (r Roles) RemoveRole(role string) Roles {
delete(r, role)
return r
}
func (r Roles) GetBSON() (interface{}, error) {
arr := make([]string, 0)
for k := range r {
arr = append(arr, k)
}
return arr, nil
}
func (r *Roles) SetBSON(raw bson.Raw) error {
initMap(r)
var arr []string
err := raw.Unmarshal(&arr)
if err != nil {
return err
}
for _, v := range arr {
r.AddRole(v)
}
return nil
}
func (r Roles) MarshalJSON() ([]byte, error) {
arr := make([]string, 0)
for k := range r {
arr = append(arr, k)
}
return json.Marshal(arr)
}
func (r *Roles) UnmarshalJSON(b []byte) error {
initMap(r)
var arr []string
err := json.Unmarshal(b, &arr)
if err != nil {
return err
}
for _, v := range arr {
r.AddRole(v)
}
return nil
}
func initMap(i interface{}) {
rv := reflect.ValueOf(i).Elem()
t := rv.Type()
rv.Set(reflect.MakeMap(t))
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.shell;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.PathIOException;
/**
* Modifies the replication factor
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
class SetReplication extends FsCommand {
public static void registerCommands(CommandFactory factory) {
factory.addClass(SetReplication.class, "-setrep");
}
public static final String NAME = "setrep";
public static final String USAGE = "[-R] [-w] <rep> <path> ...";
public static final String DESCRIPTION =
"Set the replication level of a file. If <path> is a directory " +
"then the command recursively changes the replication factor of " +
"all files under the directory tree rooted at <path>.\n" +
"-w: It requests that the command waits for the replication " +
"to complete. This can potentially take a very long time.\n" +
"-R: It is accepted for backwards compatibility. It has no effect.";
protected short newRep = 0;
protected List<PathData> waitList = new LinkedList<PathData>();
protected boolean waitOpt = false;
@Override
protected void processOptions(LinkedList<String> args) throws IOException {
CommandFormat cf = new CommandFormat(2, Integer.MAX_VALUE, "R", "w");
cf.parse(args);
waitOpt = cf.getOpt("w");
setRecursive(true);
try {
newRep = Short.parseShort(args.removeFirst());
} catch (NumberFormatException nfe) {
displayWarning("Illegal replication, a positive integer expected");
throw nfe;
}
if (newRep < 1) {
throw new IllegalArgumentException("replication must be >= 1");
}
}
@Override
protected void processArguments(LinkedList<PathData> args)
throws IOException {
super.processArguments(args);
if (waitOpt) waitForReplication();
}
@Override
protected void processPath(PathData item) throws IOException {
if (item.stat.isSymlink()) {
throw new PathIOException(item.toString(), "Symlinks unsupported");
}
if (item.stat.isFile()) {
if (!item.fs.setReplication(item.path, newRep)) {
throw new IOException("Could not set replication for: " + item);
}
out.println("Replication " + newRep + " set: " + item);
if (waitOpt) waitList.add(item);
}
}
/**
* Wait for all files in waitList to have replication number equal to rep.
*/
private void waitForReplication() throws IOException {
for (PathData item : waitList) {
out.print("Waiting for " + item + " ...");
out.flush();
boolean printedWarning = false;
boolean done = false;
while (!done) {
item.refreshStatus();
BlockLocation[] locations =
item.fs.getFileBlockLocations(item.stat, 0, item.stat.getLen());
int i = 0;
for(; i < locations.length; i++) {
int currentRep = locations[i].getHosts().length;
if (currentRep != newRep) {
if (!printedWarning && currentRep > newRep) {
out.println("\nWARNING: the waiting time may be long for "
+ "DECREASING the number of replications.");
printedWarning = true;
}
break;
}
}
done = i == locations.length;
if (done) break;
out.print(".");
out.flush();
try {Thread.sleep(10000);} catch (InterruptedException e) {}
}
out.println(" done");
}
}
}
|
#!/bin/bash
if [ -n "$USERNAME" ] && [ -n "$PASSWORD" ]
then
echo $USERNAME
echo $PASSWORD
echo "Mod htpasswd"
htpasswd -bc /etc/nginx/htpasswd $USERNAME $PASSWORD
echo Done.
else
echo Using no auth.
sed -i 's%auth_basic "Restricted";% %g' /etc/nginx/conf.d/default.conf
sed -i 's%auth_basic_user_file htpasswd;% %g' /etc/nginx/conf.d/default.conf
fi
mediaowner=$(ls -ld /share | awk '{print $3}')
echo "Current /share owner is $mediaowner"
if [ "$mediaowner" != "www-data" ]
then
chown -R www-data:www-data /share
fi
nginx -g "daemon off;"
|
package cz.josefadamcik.trackontrakt.data.api
class ApiException(message: String, val responseCode: Int, val responseMessage: String) : Exception(message)
|
#include "ropp_map.h"
#include "ropp_actor.h"
#include "ropp_model.h"
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
class Program
{
static void Main(string[] args)
{
int rhombusSize = int.Parse(Console.ReadLine());
for (int rowSize = 1; rowSize <= rhombusSize; rowSize++)
{
PrintRow(rhombusSize, rowSize);
}
for (int rowSize = rhombusSize - 1; rowSize > 0; rowSize--)
{
PrintRow(rhombusSize, rowSize);
}
}
static void PrintRow(int rhombusSize, int rowSize)
{
for (int counter = 0; counter < rhombusSize - rowSize; counter++)
{
Console.Write(" ");
}
for (int counter = 0; counter < rowSize; counter++)
{
Console.Write("* ");
}
Console.WriteLine();
}
}
|
package reswing.reshapes.util
import java.awt.Point
object MathUtil {
def isInCircle(center: Point, radius: Int, point: Point): Boolean = {
val dx: Int = center.x - point.x
val dy: Int = center.y - point.y
math.sqrt((dx * dx + dy * dy).toDouble) <= radius
}
/** returns the intersection points of two finite lines.
* return null if the lines do not cross
* algorithm/formular copied from 'http://mathworld.wolfram.com/Line-LineIntersection.html'
*/
def getIntersectionsOfTwoLines(line1: (Point, Point), line2: (Point, Point)): Point = {
val x1 = line1._1.x
val y1 = line1._1.y
val x2 = line1._2.x
val y2 = line1._2.y
val x3 = line2._1.x
val y3 = line2._1.y
val x4 = line2._2.x
val y4 = line2._2.y
try {
val x = determinant(
determinant(x1, y1, x2, y2),
x1 - x2,
determinant(x3, y3, x4, y4),
x3 - x4
) / determinant(
x1 - x2,
y1 - y2,
x3 - x4,
y3 - y4
)
val y = determinant(
determinant(x1, y1, x2, y2),
y1 - y2,
determinant(x3, y3, x4, y4),
y3 - y4
) / determinant(
x1 - x2,
y1 - y2,
x3 - x4,
y3 - y4
)
if (
x >= math.min(x1, x2) && x <= math.max(x1, x2) &&
y >= math.min(y1, y2) && y <= math.max(y1, y2) &&
x >= math.min(x3, x4) && x <= math.max(x3, x4) &&
y >= math.min(y3, y4) && y <= math.max(y3, y4)
)
new Point(x, y)
else
null
} catch {
case e: ArithmeticException => null
}
}
/** Calculates the determinant of:
* | a b |
* | c d |
*/
def determinant(a: Int, b: Int, c: Int, d: Int): Int = {
a * d - b * c
}
}
|
package com.rallyhealth.vapors.v1
import data.{RegexMatch, SliceRange}
import munit.FunSuite
class SimpleRegexMatchesSpec extends FunSuite {
import dsl.uncached._
test("matchesRegex returns true with an exact string match") {
val input = "exact"
val expr = input.const.matchesRegex(input.r)
val obtained = expr.run()
assertEquals(obtained, true)
}
test("matchesRegex returns false") {
val expr = "correct".const.matchesRegex("wrong".r)
val obtained = expr.run()
assertEquals(obtained, false)
}
test("matchesRegex returns true for multiple substrings") {
val expr = "one match, two match, three match, more".const.matchesRegex("match".r)
val obtained = expr.run()
assertEquals(obtained, true)
}
test("findFirstMatch returns some exact match") {
val exact = "exact"
val re = exact.r
val expr = exact.const.findFirstMatch(re)
val obtained = expr.run()
assertEquals(obtained, Some(RegexMatch(exact, SliceRange.Absolute(0, exact.length), Map())))
}
test("findFirstMatch returns multiple matches") {
val input = "one match, two match, three match, more"
val exact = "match"
val re = exact.r
val expr = input.const.findFirstMatch(re)
val obtained = expr.run()
val firstIdx = "one ".length
assertEquals(obtained, Some(RegexMatch(exact, SliceRange.Absolute(firstIdx, firstIdx + exact.length), Map())))
}
test("findFirstMatch returns none") {
val expr = "correct".const.findFirstMatch("wrong".r)
val obtained = expr.run()
assertEquals(obtained, None)
}
test("findAllMatches returns some exact match") {
val exact = "exact"
val re = exact.r
val expr = exact.const.findAllMatches(re)
val obtained = expr.run()
assertEquals(obtained, LazyList(RegexMatch(exact, SliceRange.Absolute(0, exact.length), Map())))
}
test("findAllMatches returns multiple matches") {
val input = "one match, two match, three match, more"
val exact = "match"
val re = exact.r
val expr = input.const.findAllMatches(re)
val obtained = expr.run()
def matchAfter(prefix: String): RegexMatch =
RegexMatch(exact, SliceRange.Absolute(prefix.length, prefix.length + exact.length), Map())
val expected = LazyList(
matchAfter("one "),
matchAfter("one match, two "),
matchAfter("one match, two match, three "),
)
assertEquals(obtained, expected)
}
test("findAllMatches returns empty") {
val expr = "correct".const.findAllMatches("wrong".r)
val obtained = expr.run()
assertEquals(obtained, LazyList())
}
}
|
// -*- C++ -*-
//
// Package: CondTools/BeamSpot
// Class: BeamSpotOnlineHLTRcdWriter
//
/**\class BeamSpotOnlineHLTRcdWriter BeamSpotOnlineHLTRcdWriter.cc CondTools/BeamSpot/plugins/BeamSpotOnlineHLTRcdWriter.cc
Description: EDAnalyzer to read the BeamSpotOnlineHLTObjectsRcd and dump it into a txt and root file
Implementation:
[Notes on implementation]
*/
//
// Original Author: Francesco Brivio
// Created: Tue, 11 Feb 2020 11:10:12 GMT
//
//
// system include files
#include <memory>
#include <string>
#include <fstream>
#include <iostream>
// user include files
#include "FWCore/Framework/interface/Frameworkfwd.h"
#include "FWCore/Framework/interface/one/EDAnalyzer.h"
#include "FWCore/Framework/interface/Event.h"
#include "FWCore/Framework/interface/MakerMacros.h"
#include "FWCore/ParameterSet/interface/ParameterSet.h"
#include "FWCore/Utilities/interface/InputTag.h"
#include "FWCore/ServiceRegistry/interface/Service.h"
#include "CondFormats/DataRecord/interface/BeamSpotOnlineHLTObjectsRcd.h"
#include "CondFormats/BeamSpotObjects/interface/BeamSpotOnlineObjects.h"
#include "CondCore/DBOutputService/interface/PoolDBOutputService.h"
//
// class declaration
//
class BeamSpotOnlineHLTRcdWriter : public edm::one::EDAnalyzer<> {
public:
explicit BeamSpotOnlineHLTRcdWriter(const edm::ParameterSet&);
~BeamSpotOnlineHLTRcdWriter() override;
static void fillDescriptions(edm::ConfigurationDescriptions& descriptions);
cond::Time_t pack(uint32_t, uint32_t);
private:
void beginJob() override;
void analyze(const edm::Event&, const edm::EventSetup&) override;
void endJob() override;
std::ifstream fasciiFile;
std::string fasciiFileName;
uint32_t fIOVStartRun;
uint32_t fIOVStartLumi;
cond::Time_t fnewSince;
bool fuseNewSince;
// ----------member data ---------------------------
};
//
// constructors and destructor
//
BeamSpotOnlineHLTRcdWriter::BeamSpotOnlineHLTRcdWriter(const edm::ParameterSet& iConfig) {
//now do what ever initialization is needed
fasciiFileName = iConfig.getUntrackedParameter<std::string>("InputFileName");
fasciiFile.open(fasciiFileName.c_str());
if (iConfig.exists("IOVStartRun") && iConfig.exists("IOVStartLumi")) {
fIOVStartRun = iConfig.getUntrackedParameter<uint32_t>("IOVStartRun");
fIOVStartLumi = iConfig.getUntrackedParameter<uint32_t>("IOVStartLumi");
fnewSince = BeamSpotOnlineHLTRcdWriter::pack(fIOVStartRun, fIOVStartLumi);
fuseNewSince = true;
} else {
fuseNewSince = false;
}
}
BeamSpotOnlineHLTRcdWriter::~BeamSpotOnlineHLTRcdWriter() {
// do anything here that needs to be done at desctruction time
// (e.g. close files, deallocate resources etc.)
}
//
// member functions
//
// ------------ Create a since object (cond::Time_t) by packing Run and LS (both uint32_t) ------------
cond::Time_t BeamSpotOnlineHLTRcdWriter::pack(uint32_t fIOVStartRun, uint32_t fIOVStartLumi) {
return ((uint64_t)fIOVStartRun << 32 | fIOVStartLumi);
}
// ------------ method called for each event ------------
void BeamSpotOnlineHLTRcdWriter::analyze(const edm::Event& iEvent, const edm::EventSetup& iSetup) {}
// ------------ method called once each job just before starting event loop ------------
void BeamSpotOnlineHLTRcdWriter::beginJob() {}
// ------------ method called once each job just after ending the event loop ------------
void BeamSpotOnlineHLTRcdWriter::endJob() {
std::cout << "Reading BeamSpotOnlineHLTRcd data from text file: " << fasciiFileName << std::endl;
// extract from file
double x, y, z, sigmaZ, dxdz, dydz, beamWidthX, beamWidthY, emittanceX, emittanceY, betastar;
double cov[7][7];
int type, lastAnalyzedLumi, firstAnalyzedLumi, lastAnalyzedRun, lastAnalyzedFill;
std::string tag;
fasciiFile >> tag >> lastAnalyzedRun;
fasciiFile >> tag >> tag >> tag >> tag >> tag; // BeginTimeOfFit parsing (not used in payload)
fasciiFile >> tag >> tag >> tag >> tag >> tag; // EndTimeOfFit parsing (not used in payload)
fasciiFile >> tag >> firstAnalyzedLumi;
fasciiFile >> tag >> lastAnalyzedLumi;
fasciiFile >> tag >> type;
fasciiFile >> tag >> x;
fasciiFile >> tag >> y;
fasciiFile >> tag >> z;
fasciiFile >> tag >> sigmaZ;
fasciiFile >> tag >> dxdz;
fasciiFile >> tag >> dydz;
fasciiFile >> tag >> beamWidthX;
fasciiFile >> tag >> beamWidthY;
fasciiFile >> tag >> cov[0][0] >> cov[0][1] >> cov[0][2] >> cov[0][3] >> cov[0][4] >> cov[0][5] >> cov[0][6];
fasciiFile >> tag >> cov[1][0] >> cov[1][1] >> cov[1][2] >> cov[1][3] >> cov[1][4] >> cov[1][5] >> cov[1][6];
fasciiFile >> tag >> cov[2][0] >> cov[2][1] >> cov[2][2] >> cov[2][3] >> cov[2][4] >> cov[2][5] >> cov[2][6];
fasciiFile >> tag >> cov[3][0] >> cov[3][1] >> cov[3][2] >> cov[3][3] >> cov[3][4] >> cov[3][5] >> cov[3][6];
fasciiFile >> tag >> cov[4][0] >> cov[4][1] >> cov[4][2] >> cov[4][3] >> cov[4][4] >> cov[4][5] >> cov[4][6];
fasciiFile >> tag >> cov[5][0] >> cov[5][1] >> cov[5][2] >> cov[5][3] >> cov[5][4] >> cov[5][5] >> cov[5][6];
fasciiFile >> tag >> cov[6][0] >> cov[6][1] >> cov[6][2] >> cov[6][3] >> cov[6][4] >> cov[6][5] >> cov[6][6];
fasciiFile >> tag >> emittanceX;
fasciiFile >> tag >> emittanceY;
fasciiFile >> tag >> betastar;
lastAnalyzedFill = -999;
std::cout << "---- Parsed these parameters from input txt file ----" << std::endl;
std::cout << " lastAnalyzedRun : " << lastAnalyzedRun << std::endl;
std::cout << " lastAnalyzedFill : " << lastAnalyzedFill << std::endl;
std::cout << " firstAnalyzedLumi : " << firstAnalyzedLumi << std::endl;
std::cout << " lastAnalyzedLumi : " << lastAnalyzedLumi << std::endl;
std::cout << " type : " << type << std::endl;
std::cout << " x : " << x << std::endl;
std::cout << " y : " << y << std::endl;
std::cout << " z : " << z << std::endl;
std::cout << " sigmaZ : " << sigmaZ << std::endl;
std::cout << " dxdz : " << dxdz << std::endl;
std::cout << " dydz : " << dydz << std::endl;
std::cout << " beamWidthX : " << beamWidthX << std::endl;
std::cout << " beamWidthY : " << beamWidthY << std::endl;
std::cout << " Cov(0,j) : " << cov[0][0] << " " << cov[0][1] << " " << cov[0][2] << " " << cov[0][3] << " "
<< cov[0][4] << " " << cov[0][5] << " " << cov[0][6] << std::endl;
std::cout << " Cov(1,j) : " << cov[1][0] << " " << cov[1][1] << " " << cov[1][2] << " " << cov[1][3] << " "
<< cov[1][4] << " " << cov[1][5] << " " << cov[1][6] << std::endl;
std::cout << " Cov(2,j) : " << cov[2][0] << " " << cov[2][1] << " " << cov[2][2] << " " << cov[2][3] << " "
<< cov[2][4] << " " << cov[2][5] << " " << cov[2][6] << std::endl;
std::cout << " Cov(3,j) : " << cov[3][0] << " " << cov[3][1] << " " << cov[3][2] << " " << cov[3][3] << " "
<< cov[3][4] << " " << cov[3][5] << " " << cov[3][6] << std::endl;
std::cout << " Cov(4,j) : " << cov[4][0] << " " << cov[4][1] << " " << cov[4][2] << " " << cov[4][3] << " "
<< cov[4][4] << " " << cov[4][5] << " " << cov[4][6] << std::endl;
std::cout << " Cov(5,j) : " << cov[5][0] << " " << cov[5][1] << " " << cov[5][2] << " " << cov[5][3] << " "
<< cov[5][4] << " " << cov[5][5] << " " << cov[5][6] << std::endl;
std::cout << " Cov(6,j) : " << cov[6][0] << " " << cov[6][1] << " " << cov[6][2] << " " << cov[6][3] << " "
<< cov[6][4] << " " << cov[6][5] << " " << cov[6][6] << std::endl;
std::cout << " emittanceX : " << emittanceX << std::endl;
std::cout << " emittanceY : " << emittanceY << std::endl;
std::cout << " betastar : " << betastar << std::endl;
std::cout << "-----------------------------------------------------" << std::endl;
BeamSpotOnlineObjects* abeam = new BeamSpotOnlineObjects();
abeam->SetLastAnalyzedLumi(lastAnalyzedLumi);
abeam->SetLastAnalyzedRun(lastAnalyzedRun);
abeam->SetLastAnalyzedFill(lastAnalyzedFill);
abeam->SetType(type);
abeam->SetPosition(x, y, z);
abeam->SetSigmaZ(sigmaZ);
abeam->Setdxdz(dxdz);
abeam->Setdydz(dydz);
abeam->SetBeamWidthX(beamWidthX);
abeam->SetBeamWidthY(beamWidthY);
abeam->SetEmittanceX(emittanceX);
abeam->SetEmittanceY(emittanceY);
abeam->SetBetaStar(betastar);
for (int i = 0; i < 7; ++i) {
for (int j = 0; j < 7; ++j) {
abeam->SetCovariance(i, j, cov[i][j]);
}
}
std::cout << " Writing results to DB..." << std::endl;
edm::Service<cond::service::PoolDBOutputService> poolDbService;
if (poolDbService.isAvailable()) {
std::cout << "poolDBService available" << std::endl;
if (poolDbService->isNewTagRequest("BeamSpotOnlineHLTObjectsRcd")) {
std::cout << "new tag requested" << std::endl;
if (fuseNewSince) {
std::cout << "Using a new Since: " << fnewSince << std::endl;
poolDbService->createNewIOV<BeamSpotOnlineObjects>(
abeam, fnewSince, poolDbService->endOfTime(), "BeamSpotOnlineHLTObjectsRcd");
} else
poolDbService->createNewIOV<BeamSpotOnlineObjects>(
abeam, poolDbService->beginOfTime(), poolDbService->endOfTime(), "BeamSpotOnlineHLTObjectsRcd");
} else {
std::cout << "no new tag requested" << std::endl;
if (fuseNewSince) {
std::cout << "Using a new Since: " << fnewSince << std::endl;
poolDbService->appendSinceTime<BeamSpotOnlineObjects>(abeam, fnewSince, "BeamSpotOnlineHLTObjectsRcd");
} else
poolDbService->appendSinceTime<BeamSpotOnlineObjects>(
abeam, poolDbService->currentTime(), "BeamSpotOnlineHLTObjectsRcd");
}
}
std::cout << "[BeamSpotOnlineHLTRcdWriter] endJob done \n" << std::endl;
}
// ------------ method fills 'descriptions' with the allowed parameters for the module ------------
void BeamSpotOnlineHLTRcdWriter::fillDescriptions(edm::ConfigurationDescriptions& descriptions) {
//The following says we do not know what parameters are allowed so do no validation
// Please change this to state exactly what you do use, even if it is no parameters
edm::ParameterSetDescription desc;
desc.setUnknown();
descriptions.addDefault(desc);
}
//define this as a plug-in
DEFINE_FWK_MODULE(BeamSpotOnlineHLTRcdWriter);
|
require_relative "../persistence/sqlite_adaptor"
class Repository
SECOND = 1
MINUTE = 60 * SECOND
HOUR = 60 * MINUTE
DAY = 24 * HOUR
def initialize(args = {})
@database = args.fetch(:db) { SQLiteAdaptor.database }
end
def save_following(user_id)
following_table.insert(
user_id: user_id,
created_at: Time.now,
unfollowed: false,
)
end
def following_after(days)
following_table.where(Sequel[:created_at] < time_before(days)).exclude(:unfollowed).map(:user_id)
end
def following
following_table.map(:user_id)
end
def mark_unfollowed(user_id)
following_table.where(user_id: user_id).update(unfollowed: true)
end
private
attr_reader :database
def following_table
database[:following]
end
def time_before(number_days)
Time.now - number_days * DAY + last_execution_duration
end
def last_execution_duration
5 * MINUTE
end
end
|
package com.vk.api.sdk.listeners
interface ApiCallListener {
fun onApiCallStart(reqId: Long, url: String) {}
fun onApiCallSuccess(reqId: Long) {}
fun onApiCallFailed(reqId: Long, throwable: Throwable) {}
companion object {
val EMPTY = object : ApiCallListener {}
}
}
|
-- This is the simulator for lambda-bridge.
-- It emulates the different forms of low-level sockets.
-- lb_simulator byte|packet imported-socket exported-socket
module Main where
import System.IO
import Control.Concurrent
import System.Environment
import Control.Monad
import qualified Data.ByteString as B
import Network.LambdaBridge.Socket
import Network.LambdaBridge.Frame
import Network.LambdaBridge.Timeout
import Network.LambdaBridge.ARQ
main :: IO ()
main = do
args <- getArgs
case args of
["packet",src,dest] -> packetSim src dest
["byte",src,dest] -> byteSim src dest
otherwise -> error "usage: lb_simulator byte|packet imported-socket exported-socket"
packetSim :: String -> String -> IO ()
packetSim src dest = openAsServer dest $ \ destH -> do
-- destH <- openSocket dest
hPutStrLn destH "Hello,1 World"
print (src,destH)
{-
byteSim :: String -> String -> IO ()
byteSim src dest = do
srcH <- openAsClient src
srcB <- openByteBridge srcH
frameB <- frameProtocol srcB
let limit = boundLimit 1
let pktSize = 100
sender <- sendWithARQ frameB limit
recver <- recvWithARQ frameB
openAsServer dest $ \ destH -> do
hSetBuffering destH NoBuffering
forkIO $ forever $ do
bs <- B.hGetSome destH pktSize
print bs
sender bs
print "send str"
forkIO $ forever $ do
bs <- recver
print ("recv",bs)
B.hPut destH bs
return ()
-}
byteSim :: String -> String -> IO ()
byteSim src dest = do
srcH <- openAsClient src
hSetBuffering srcH NoBuffering
openAsServer dest $ \ destH -> do
destB <- openByteBridge destH
frameB <- frameProtocol destB
let limit = boundLimit 1
let pktSize = 100
sender <- sendWithARQ frameB limit
recver <- recvWithARQ frameB
forkIO $ forever $ do
bs <- B.hGetSome srcH pktSize
print bs
sender bs
print "send str"
forkIO $ forever $ do
bs <- recver
print ("recv",bs)
B.hPut srcH bs
return ()
|
package org.ballistacompute.logical
import org.ballistacompute.datatypes.Field
/**
* Logical Expression for use in logical query plans. The logical expression provides information needed
* during the planning phase such as the name and data type of the expression.
*/
interface LogicalExpr {
/**
* Return meta-data about the value that will be produced by this expression when evaluated against
* a particular input.
*/
fun toField(input: LogicalPlan): Field
}
|
import type { JWEHeaderParameters, KeyLike } from '../types.d'
import type { JWEKeyManagementHeaderResults } from '../types.i.d'
import { JOSENotSupported, JWEInvalid } from '../util/errors.js'
import { unwrap as aesKw } from '../runtime/aeskw.js'
import * as ECDH from '../runtime/ecdhes.js'
import { decrypt as pbes2Kw } from '../runtime/pbes2kw.js'
import { decrypt as rsaEs } from '../runtime/rsaes.js'
import { unwrap as aesGcmKw } from '../runtime/aesgcmkw.js'
import { decode as base64url } from '../runtime/base64url.js'
import { bitLengths as cekLengths } from '../lib/cek.js'
function assertEnryptedKey(encryptedKey: any) {
if (!encryptedKey) {
throw new JWEInvalid('JWE Encrypted Key missing')
}
}
function assertHeaderParameter(
joseHeader: { [propName: string]: any },
parameter: string,
name: string,
) {
if (joseHeader[parameter] === undefined) {
throw new JWEInvalid(`JOSE Header ${name} (${parameter}) missing`)
}
}
async function decryptKeyManagement(
alg: string,
key: KeyLike,
encryptedKey: Uint8Array | undefined,
joseHeader: JWEKeyManagementHeaderResults & JWEHeaderParameters,
): Promise<KeyLike> {
switch (alg) {
case 'dir': {
// Direct Encryption
if (encryptedKey !== undefined) {
throw new JWEInvalid('Encountered unexpected JWE Encrypted Key')
}
return key
}
case 'ECDH-ES':
// Direct Key Agreement
if (encryptedKey !== undefined) {
throw new JWEInvalid('Encountered unexpected JWE Encrypted Key')
}
// eslint-disable-next-line no-fallthrough
case 'ECDH-ES+A128KW':
case 'ECDH-ES+A192KW':
case 'ECDH-ES+A256KW': {
// Direct Key Agreement
assertHeaderParameter(joseHeader, 'epk', 'Ephemeral Public Key')
if (!ECDH.ecdhAllowed(key)) {
throw new JOSENotSupported(
'ECDH-ES with the provided key is not allowed or not supported by your javascript runtime',
)
}
const ephemeralKey = await ECDH.publicJwkToEphemeralKey(joseHeader.epk!)
let partyUInfo!: Uint8Array
let partyVInfo!: Uint8Array
if (joseHeader.apu !== undefined) partyUInfo = base64url(joseHeader.apu)
if (joseHeader.apv !== undefined) partyVInfo = base64url(joseHeader.apv)
const sharedSecret = await ECDH.deriveKey(
ephemeralKey,
key,
alg === 'ECDH-ES' ? joseHeader.enc! : alg,
parseInt(alg.substr(-5, 3), 10) || <number>cekLengths.get(joseHeader.enc!),
partyUInfo,
partyVInfo,
)
if (alg === 'ECDH-ES') {
return sharedSecret
}
// Key Agreement with Key Wrapping
assertEnryptedKey(encryptedKey)
const kwAlg = alg.substr(-6)
return aesKw(kwAlg, sharedSecret, encryptedKey!)
}
case 'RSA1_5':
case 'RSA-OAEP':
case 'RSA-OAEP-256':
case 'RSA-OAEP-384':
case 'RSA-OAEP-512': {
// Key Encryption (RSA)
assertEnryptedKey(encryptedKey)
return rsaEs(alg, key, encryptedKey!)
}
case 'PBES2-HS256+A128KW':
case 'PBES2-HS384+A192KW':
case 'PBES2-HS512+A256KW': {
// Key Encryption (PBES2)
assertEnryptedKey(encryptedKey)
assertHeaderParameter(joseHeader, 'p2c', 'PBES2 Count')
assertHeaderParameter(joseHeader, 'p2s', 'PBES2 Salt')
const { p2c } = joseHeader
const p2s = base64url(joseHeader.p2s!)
return pbes2Kw(alg, key, encryptedKey!, p2c!, p2s)
}
case 'A128KW':
case 'A192KW':
case 'A256KW': {
// Key Wrapping (AES KW)
assertEnryptedKey(encryptedKey)
return aesKw(alg, key, encryptedKey!)
}
case 'A128GCMKW':
case 'A192GCMKW':
case 'A256GCMKW': {
// Key Wrapping (AES GCM KW)
assertEnryptedKey(encryptedKey)
assertHeaderParameter(joseHeader, 'iv', 'Initialization Vector')
assertHeaderParameter(joseHeader, 'tag', 'Authentication Tag')
const iv = base64url(joseHeader.iv!)
const tag = base64url(joseHeader.tag!)
return aesGcmKw(alg, key, encryptedKey!, iv, tag)
}
default: {
throw new JOSENotSupported('unsupported or invalid "alg" (JWE Algorithm) header value')
}
}
}
export default decryptKeyManagement
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-emscripten
// min-llvm-version 6.0
// error-pattern: panicked
// Test that the simd_f{min,max} intrinsics produce the correct results.
#![feature(repr_simd, platform_intrinsics)]
#[allow(non_camel_case_types)]
#[repr(simd)]
#[derive(Copy, Clone, PartialEq, Debug)]
struct f32x4(pub f32, pub f32, pub f32, pub f32);
extern "platform-intrinsic" {
fn simd_fmin<T>(x: T, y: T) -> T;
fn simd_fmax<T>(x: T, y: T) -> T;
}
fn main() {
let x = f32x4(1.0, 2.0, 3.0, 4.0);
let y = f32x4(2.0, 1.0, 4.0, 3.0);
let nan = ::std::f32::NAN;
let n = f32x4(nan, nan, nan, nan);
unsafe {
let min0 = simd_fmin(x, y);
let min1 = simd_fmin(y, x);
assert_eq!(min0, min1);
let e = f32x4(1.0, 1.0, 3.0, 3.0);
assert_eq!(min0, e);
let minn = simd_fmin(x, n);
assert_eq!(minn, x);
let minn = simd_fmin(y, n);
assert_eq!(minn, y);
// FIXME(49261)
let max0 = simd_fmax(x, y);
let max1 = simd_fmax(y, x);
assert_eq!(max0, max1);
let e = f32x4(2.0, 2.0, 4.0, 4.0);
assert_eq!(max0, e);
let maxn = simd_fmax(x, n);
assert_eq!(maxn, x);
let maxn = simd_fmax(y, n);
assert_eq!(maxn, y);
}
}
|
"use strict";
import * as request from "request";
import * as Debug from "debug";
const debug: Debug = Debug("line-notify:client");
const UTF8: string = "utf8";
const SUCCESS_CODE: number = 200;
interface Config {
key: string;
}
interface Notify {
message: string;
imageThumbnail?: string;
imageFullsize?: string;
imageFile?: string;
stickerPackageId?: number;
stickerId?: number;
}
export class LineClient {
private config: Config | string;
private options: any;
private url: string;
private key: string;
constructor(config) {
if (!config) {
throw new Error("Please provide api key as constructor");
}
this.config = config;
if (typeof this.config === "object" && !(this.config as Config).key) {
throw new Error("Please provide key");
}
this.key = (typeof this.config === "object") ? (this.config as Config).key : this.config;
this.config = config;
this.url = "https://notify-api.line.me/api/";
}
private __request(options){
return new Promise((resolve, reject) => {
request(options, (error, response, body) => {
if (error) {
return reject(error);
}
try {
response = JSON.parse(response);
body = JSON.parse(body);
} catch (err) {
debug("Unable to parse response / body", err);
}
debug("request done");
if (response.statusCode !== SUCCESS_CODE){
return reject(new Error(body));
}
resolve(body);
});
});
}
private __xwwwfurlenc(src){
let urljson = "";
const u = encodeURIComponent;
const keys = Object.keys(src);
keys.forEach((n, i, arr) => {
urljson += encodeURIComponent(n) + "=" + encodeURIComponent(src[n]);
if (i < arr.length - 1) {
urljson += "&";
}
});
return urljson;
}
public async notify(body: Notify | string, cb?: any){
if (!body) {
throw new Error("Please provide a message as body object or as string");
}
if (typeof body === "object" && !(body as Notify).message) {
throw new Error("Please provide a message in a body");
}
if (typeof body === "string" && !body.includes("message=")) {
body = `message=${body}`;
}
const options = {
url: `${this.url}notify`,
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": `Bearer ${this.key}`
},
body: typeof body === "object" ? this.__xwwwfurlenc(body) : body
};
try {
const response = await this.__request(options);
if (typeof cb === "function") {
return cb(null, response);
}
return response;
} catch (err) {
if (typeof cb === "function") {
return cb(err);
}
return Promise.reject(err);
}
}
}
|
package com.ok.yuuki.janken
import android.content.Intent
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.preference.PreferenceManager
import android.view.View
import kotlinx.android.synthetic.main.activity_main.*
import org.jetbrains.anko.startActivity
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
gu.setOnClickListener { onJankenButtonTapped(it) }
choki.setOnClickListener { onJankenButtonTapped(it) }
pa.setOnClickListener { onJankenButtonTapped(it) }
val pref = PreferenceManager.getDefaultSharedPreferences(this).edit().clear().apply()
}
fun onJankenButtonTapped(view: View?) {
startActivity<ResultActivity>("MY_HAND" to view?.id)
}
}
|
package com.shuyu.gsygiideloader
import android.graphics.drawable.Drawable
import com.bumptech.glide.request.target.SimpleTarget
import com.bumptech.glide.request.transition.Transition
import com.shuyu.gsyimageloader.GSYImageLoader
import java.io.File
/**
* Glide 图片下载对象
* Created by guoshuyu on 2018/1/18.
*/
class GSYImageDownLoadTarget constructor(private val mCallback: GSYImageLoader.Callback?) : SimpleTarget<File>() {
override fun onResourceReady(resource: File, transition: Transition<in File>?) {
mCallback?.onSuccess(resource)
}
override fun onLoadStarted(placeholder: Drawable?) {
super.onLoadStarted(placeholder)
mCallback?.onStart()
}
override fun onLoadFailed(errorDrawable: Drawable?) {
super.onLoadFailed(errorDrawable)
mCallback?.onFail(null)
}
}
|
<?php
namespace app\mobile\controller;
use app\notice\model\Nuser as NuserModel;
use app\meeting\model\Lists as ListModel;
use app\meeting\model\MeetingUser as MUserModel;
use app\notice\model\Cate as CateModel;
use app\logs\model\Daily as DailyModel;
use app\logs\model\Plan as PlanModel;
use think\Db;
/*
* 消息控制器*/
class Message extends Base{
/*
* 消息列表页*/
public function index() {
$map = [];
$map['notice_user.uid'] = UID;
//公告数据
$notice = NuserModel::getLastOne($map, 'notice_user.create_time desc');
//halt($notice);
$notice_no_read = NuserModel::where(['is_read' => 0, 'uid' => UID]) -> count();
//会议数据
//日志计划最后一条
$planMap = ['uid' => UID];
$plan = PlanModel::getLastOne($planMap);
$plan_no_read = PlanModel::where(['uid' => UID, 'status' => 0]) -> count();
//日志
$log = DailyModel::getLastOne($planMap);
$log_no_read = DailyModel::where(['uid' => UID, 'status' => 0]) -> count();
//会议
$meeting = ListModel::getLastOne(UID);
$meeting_no_read = ListModel::getNoRead(UID);
//待办流程
$examine = db::name('flow_log')
->alias('l')
->field('w.title')
->join('flow_work w','l.wid=w.id','left')
->where(['l.result'=> 0, 'l.user_id' => UID])
->order('w.create_time desc')
->limit(1)
->find();
$list = [
'notice' => [
'data' => isset($notice['title']) ? $notice['title'] : '暂无...',
'no_read' => $notice_no_read
],
'plan' => [
'data' => isset($plan['title']) ? $plan['title'] : '暂无...',
'no_read' => $plan_no_read
],
'log' => [
'data' => isset($log['title']) ? $log['title'] : '暂无...',
'no_read' => $log_no_read
],
'meeting' => [
'data' => isset($meeting['title']) ? $meeting['title'] : '暂无...',
'no_read' => $meeting_no_read
],
'examine' => [
'data' => isset($examine['title']) ? $examine['title'] : '暂无...',
'no_read' => isset($examine['title']) ? 1 : 0,
]
];
return $this->fetch('', ['list' => $list]);
}
/*
* 消息列表页*/
public function notice_lists(){
if($this -> request -> isAjax()){
$map = $this->getMap();
$map['notice_user.uid'] = UID;
// 排序
$order = $this->getOrder('notice_user.is_read asc,notice_user.create_time desc');
// 数据列表
$lists = NuserModel::getList($map,$order);
$data_list = [];
foreach ($lists as $key => $value) {
$status = $value['is_read'] ? '(已阅)' : '(<span style="color: #ff0000">未读</span>)';
$data_list[$key] = [
'url' => url('notice_details',['id'=>$value['id']]),
'top' => '发布时间:'.date('Y-m-d H:i',$value['create_time']).$status,
'left' => $value['title'],
'right' => $value['cates'],
'bottom'=> $value['description']
];
}
return $data_list;
}
return $this -> fetch('apply/lists');
}
/*
* 公告类表*/
public function notice_details($id = null){
if(is_null($id)) $this -> error('参数错误');
NuserModel::update(['id'=>$id,'is_read'=>1]);
$info = NuserModel::getOne($id);
$cate = CateModel::getTree();
$info['cate'] = $cate[$info['cate']];
$info['create_time'] = date('Y-m-d H:i:s', $info['create_time']);
$data_list = detaillist([
['cate','公告类型'],
['title','标题'],
['description', '公告描述'],
['info', '公告详情'],
['create_time', '创建时间'],
['note', '备注'],
],$info);
return $this -> fetch('apply/details', ['data_list' => $data_list]);
}
//设置状态方法
public function format_status($s='', $e=''){
$t = time();
if($t < $s){
return '<span class="label label-primary">会议未开始</span>';
} else if ($t > $e ) {
return '<span class="label label-success">会议已结束</span>';
} else {
return '<span class="label label-danger">会议进行中</span>';
}
}
//会议列表
public function meeting_lists(){
if($this -> request -> isAjax()){
$where = 'u.user_id='.UID;
$order = $this->getOrder();
$map = $this->getMap();
$data_list = ListModel::getMeetingList($map, $order, $where);
$data = [];
$state = [0 => '(<span style="color:#ff0000">未读</span>)', 1 => '(已读)'];
foreach($data_list as $key => $value){
$states = $this -> format_status($value['s_time'], $value['e_time']);
$data[$key] = [
'url' => url('meeting_details',['id'=>$value['id'], 'rid' => $value['rid']]),
'top' => '会议日期:'.date('Y-m-d',$value['m_time']).$state[$value['is_read']],
'left' => $value['title'],
'right' => date('H:i', $value['s_time']).'-'.date('H:i', $value['e_time']),
'bottom'=> $states
];
}
//dump($list);die;
return $data;
}
return $this -> fetch('apply/lists');
}
//会议详情
public function meeting_details($id=null, $rid=null){
if(empty($id)){
$this -> error('参数错误');
}
$info = ListModel::getOne('l.id='.$id);
$info['state'] = $this -> format_status($info['s_time'], $info['e_time']);
$data_list = detaillist([
['title','会议主题'],
['s_time','开始时间','datetime'],
['e_time','结束时间','datetime'],
['title','会议地点'],
['nickname','主持人'],
['state','状态'],
],$info);
//修改此会议状态为已读
MUserModel::where(['id' => $rid]) -> update(['is_read' => 1]);
return $this -> fetch('apply/details', ['data_list' => $data_list]);
}
//工作日志
public function log_list(){
if($this -> request -> isAjax()){
$map = $this->getMap();
$map['uid'] = UID;
// 排序
$order = $this->getOrder('personnel_daily.status asc,personnel_daily.daily_time desc');
$list = DailyModel::getList($map, $order);
$data = [];
$state = [0 => '(<span style="color:#ff0000">未阅</span>)', 1 => '(已阅)'];
$type = [ 0 => '日报', 1 => '周报', 2 => '月报'];
foreach($list as $key => $value){
$value['status'] = $state[$value['status']];
$value['type'] = $type[$value['type']];
$data[$key] = [
'url' => url('log_details',['id'=>$value['id']]),
'top' => $value['daily_time'].$value['status'],
'left' => $value['title'],
'right' => $value['type'],
'bottom'=> $value['positions']
];
}
//dump($list);die;
return $data;
}
return $this -> fetch('apply/lists');
}
public function log_details($id = null){
if(empty($id)){
$this -> error('参数错误');
}
DailyModel::update(['id'=>$id,'status'=>1]);
$info = DailyModel::getOne($id);
$type = ['0'=>'日报','1'=>'周报','2'=>'月报'];
$info['type'] = $type[$info['type']];
$data_list = detaillist([
['type','日志类型'],
['nickname','姓名'],
['organizations', '部门'],
['positions', '职位'],
['title','标题'],
['daily_time', '报告时间'],
['info', '报告详情'],
['note', '备注'],
],$info);
return $this -> fetch('apply/details', ['data_list' => $data_list]);
}
//工作计划
public function plan_list(){
if($this -> request -> isAjax()){
$map = $this->getMap();
$map['uid'] = UID;
// 排序
$order = $this->getOrder();
$order['personnel_plan.status'] = 'asc';
$order['personnel_plan.plan_time'] = 'desc';
$list = PlanModel::getList($map, $order);
$data = [];
$state = [0 => '(<span style="color:#ff0000">未阅</span>)', 1 => '(已阅)'];
$type = [ 0 => '日计划', 1 => '周计划', 2 => '月计划'];
foreach($list as $key => $value){
$value['status'] = $state[$value['status']];
$value['type'] = $type[$value['type']];
$data[$key] = [
'url' => url('plan_details',['id'=>$value['id']]),
'top' => $value['plan_time'].$value['status'],
'left' => $value['title'],
'right' => $value['type'],
'bottom'=> $value['positions']
];
}
//dump($list);die;
return $data;
}
return $this -> fetch('apply/lists');
}
public function plan_details($id = null){
if(empty($id)){
$this -> error('参数错误');
}
PlanModel::update(['id'=>$id,'status'=>1]);
$info = PlanModel::getOne($id);
$type = [ 0 => '日计划', 1 => '周计划', 2 => '月计划'];
$state = [0 => '未阅', 1 => '已阅'];
$info['type'] = $type[$info['type']];
$info['status'] = $state[$info['status']];
$data_list = detaillist([
['nickname', '姓名'],
['organizations', '部门'],
['positions', '职位'],
['title', '标题'],
['plan_time', '计划时间'],
['create_time', '创建时间','datetime'],
['status', '查阅状态'],
['info', '计划详情'],
],$info);
return $this -> fetch('apply/details', ['data_list' => $data_list]);
}
}
?>
|
<?php
/*
* This file is part of pmg/three-repositories
*
* Copyright (c) PMG <https://www.pmg.com>
*
* For full copyright information see the LICENSE file distributed
* with this source code.
*
* @license http://opensource.org/licenses/MIT MIT
*/
namespace PMG\ThreeRepositories;
/**
* This is the "read" side of our repository, provides methods for fetching
* articles from the database.
*
* @since 0.1
*/
interface ArticleRepository
{
/**
* Find a single article by its identifier.
*
* @param int $id
* @return Article|null Null if no article is found
*/
public function find($id);
/**
* Find every article ever, ordered by date.
*
* @return Article[]
*/
public function findAll();
/**
* Find all the articles from a given year.
*
* @param int $year The year to look up
* @return Article[]
*/
public function findByYear($year);
/**
* Add the article to the repository.
*
* @param $article The article to persist. If an ID is present the article
* will be updated.
* @return int The article's primary key.
*/
public function add(Article $article);
/**
* Remove an article from the storage backend.
*
* @param Article|int $article The article to remove
* @return void
*/
public function remove($article);
}
|
namespace ProjectMarkdown.MarkdownLibrary.HtmlComponents
{
public class Link : HtmlComponent
{
private readonly string _url;
public Link(string text, string url) : base(text, TagTypes.Link)
{
_url = url;
}
public override string ToString()
{
return "<a href=\"" + _url + "\">" + Text + "</a>";
}
}
}
|
<?php
namespace DariusIII\ItunesApi\Providers;
use DariusIII\ItunesApi\Entities\Track;
use DariusIII\ItunesApi\Exceptions\SearchNoResultsException;
use DariusIII\ItunesApi\Exceptions\TrackNotFoundException;
use DariusIII\ItunesApi\Mappers\TrackMapper;
use DariusIII\ItunesApi\Utils\SearchResults;
class TrackProvider extends AbstractProvider
{
protected const TRACK_QUERY = 'entity=song&id=%d&country=%s';
protected const TRACK_SEARCH_QUERY = 'entity=song&media=music&term=%s&country=%s';
/**
* @param string $id
* @param string $country
*
* @return \DariusIII\ItunesApi\Entities\EntityInterface
* @throws \DariusIII\ItunesApi\Exceptions\TrackNotFoundException
*/
public function fetchById($id, $country = self::DEFAULT_COUNTRY)
{
$results = $this->lookup(sprintf(self::TRACK_QUERY, (int) $id, $country));
if ($results === false) {
throw new TrackNotFoundException($id);
}
return TrackMapper::map($results[0]);
}
/**
* @param string $name
* @param string $country
*
* @return \DariusIII\ItunesApi\Utils\SearchResults
* @throws \DariusIII\ItunesApi\Exceptions\SearchNoResultsException
*/
public function fetchByName($name, $country = self::DEFAULT_COUNTRY)
{
$results = $this->search(sprintf(self::TRACK_SEARCH_QUERY, urlencode($name), $country));
if ($results === false) {
throw new SearchNoResultsException($name);
}
$albums = [];
foreach ($results as $result) {
$albums[] = TrackMapper::map($result);
}
return new SearchResults($albums);
}
/**
* @param string $name
* @param string $country
*
* @return \DariusIII\ItunesApi\Entities\EntityInterface|\DariusIII\ItunesApi\Entities\Track
* @throws \DariusIII\ItunesApi\Exceptions\SearchNoResultsException
*/
public function fetchOneByName($name, $country = self::DEFAULT_COUNTRY)
{
/** @var Track[] $tracks */
$tracks = $this->fetchByName($name, $country);
return $tracks[0];
}
}
|
<?php
namespace Fundaudo\Http\Controllers;
use Illuminate\Http\Request;
use Fundaudo\Cliente;
use Fundaudo\Curso;
use Fundaudo\FacturacionCurso;
use Fundaudo\FacturacionDiplomado;
use Fundaudo\Http\Requests;
use Session;
use App;
use Auth;
use Carbon\Carbon;
use Illuminate\Routing\Route;
use Input;
use Redirect;
use Response;
class BackController extends Controller{
public function __construct(){
$this->middleware('auth');
}
public function index(){
$countClientes = Cliente::count();
$countCursos = Curso::where('tipo', 'Curso')->count();
$countDiplomados = Curso::where('tipo', 'Diplomado')->count();
return view("layouts.base", compact('countClientes', 'countCursos', 'countDiplomados'));
}
public function cargarReciboPago (){
$facturasCursos = FacturacionCurso::all();
$facturasDiplomados = FacturacionDiplomado::all();
$facturas = [];
foreach($facturasCursos as $data){
$facturas['C-'.$data->id] = 'C-'.$data->id;
}
foreach($facturasDiplomados as $data){
$facturas['D-'.$data->id] = 'D-'.$data->id;
}
$facturas = array('' => "Seleccione") + $facturas;
return view('reciboPagos.index', compact('facturas'));
}
public function postCargarReciboPago (Request $request){
if($request->ajax()){
$separarId = explode('-', $request['factura']);
if($separarId[0] == 'C'){
$recibo = FacturacionCurso::find($separarId[1]);
} else if($separarId[0] == 'D') {
$recibo = FacturacionDiplomado::find($separarId[1]);
}
$campos = [
'pagado' => 1,
'codigoPago' => $request['codigoPago'],
'tipoPago' => $request['tipoPago']
];
$recibo->fill($campos);
$recibo->save();
return response()->json([
'validations' => true
]);
}
}
}
|
window.addEventListener('DOMContentLoaded', setup, false)
const LUNGHEZZA = 4
const TENTATIVI = 3
const VALIDI = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
let segreto
let sequenza = []
let effettuati = 0
function setup() {
// alert('DOMContentLoaded')
const tb = document.querySelectorAll('span.tasto')
for (let index = 0; index < tb.length; index++) {
tb[index].addEventListener('click', inserisci, false)
}
restart()
}
function genera() {
const valori = VALIDI
disordina(valori)
return valori.slice(0, LUNGHEZZA)
}
function inserisci(evento) {
let valore = Number(evento.target.innerHTML)
document.getElementById('output').innerHTML = ''
// console.log(valore)
if (!sequenza.includes(valore)) {
sequenza.push(valore)
evento.target.classList.add('cliccato')
if (sequenza.length == LUNGHEZZA) {
check()
}
}
}
function check() {
// alert('controllato...')
let uguali = true
for (let index = 0; index < sequenza.length; index++) {
if (sequenza[index] != segreto[index]) {
uguali = false
}
}
if (uguali) {
document.getElementById('output').innerHTML = 'Indovinato'
} else {
document.getElementById('output').innerHTML = 'sbagliato'
effettuati++
if (effettuati == TENTATIVI) {
document.getElementById('output').innerHTML = 'GAME OVER!'
}
}
if (uguali || (effettuati == TENTATIVI)) {
// alert('restarting')
restart()
}
sequenza = []
let c = document.querySelectorAll('.cliccato')
for (let index = 0; index < c.length; index++) {
c[index].classList.remove('cliccato')
}
}
function restart() {
effettuati = 0
segreto = genera()
console.log(segreto)
}
function disordina(mazzo) {
for (let restanti = mazzo.length; restanti > 1; restanti--) {
let estratta = Math.floor(Math.random() * restanti)
let temp = mazzo[estratta]
mazzo[estratta] = mazzo[restanti - 1]
mazzo[restanti - 1] = temp
}
}
|
import * as React from 'react';
import {
View,
Platform,
requireNativeComponent,
ViewProps,
} from 'react-native';
type AirPlayButtonProps = ViewProps & {
activeTintColor?: string;
tintColor?: string;
style?: React.CSSProperties;
};
let AirPlayButton: React.FunctionComponent<AirPlayButtonProps> = () => null;
if (Platform.OS === 'ios') {
const RNAirPlayButton = requireNativeComponent<AirPlayButtonProps>(
'RNAirPlayButton'
);
AirPlayButton = ({ style, ...otherProps }) => (
<View style={style}>
<RNAirPlayButton style={style} {...otherProps} />
</View>
);
}
export default AirPlayButton;
|
---
layout: post
title: Creating Conda Configuration for Tensorflow
subtitle: Tensorflow
#gh-repo:
#gh-badge: [star, fork, follow]
tags: [Conda,Configuration,Tensorflow]
image: /img/logos/tensorflow.png
---
# Creating Conda Configuration for Tensorflow
### Making the conda variable to be called globally
export PATH=~/anaconda3/bin:$PATH
### List the conda environments
conda env list
### List the packages in the particular conda environment
conda list
### Create a new environment tensorflow with all necessary packages
conda create -n tensorflow python numpy scipy matplotlib spyder
### Activate the environment
activate tensorflow
### Install tensorflow and keras
pip install tensorflow
pip install keras
### Install Scikit-Image
pip install scikit-image
### Install OpenCV
pip install opencv-python
|
package net.mffjam2.common.item;
import lombok.Getter;
import net.mffjam2.common.gem.GemProperty;
import net.minecraft.item.Item;
public class EssenceItem extends Item
{
@Getter
private final GemProperty gemProperty;
public EssenceItem(GemProperty gemProperty, Properties properties)
{
super(properties);
this.gemProperty = gemProperty;
}
}
|
# Exonio
This gem brings some useful Excel formulas to Ruby. For now, it just has
financial formulas, but could have more (like statistical formulas) in the future.
[](https://badge.fury.io/rb/exonio)
[](https://travis-ci.org/Noverde/exonio)
## Installation
Add this line to your application's Gemfile:
```ruby
gem 'exonio'
```
And then execute:
$ bundle
Or install it yourself as:
$ gem install exonio
## Usage
To use Exonio you just have to call the method you like to use. Example:
```ruby
Exonio.pmt(0.075 / 12, 12 * 15, 200_000) # ==> -1854.0247200054619
```
## Available Formulas
### FV
What is the future value after 10 years of saving $100 now, with
an additional monthly savings of $100 with the interest rate at
5% (annually) compounded monthly?
```ruby
Exonio.fv(0.05 / 12, 10 * 12, -100, -100) # ==> 15692.928894335748
```
By convention, the negative sign represents cash flow out (i.e. money not
available today). Thus, saving $100 a month at 5% annual interest leads
to $15,692.93 available to spend in 10 years.
### IRR
Suppose one invests 100 units and then makes the following withdrawals at regular (fixed)
intervals: 39, 59, 55, 20. Assuming the ending value is 0, one's 100 unit investment
yields 173 units; however, due to the combination of compounding and the periodic
withdrawals, the "average" rate of return is neither simply 0.73/4 nor (1.73)^0.25-1.
```ruby
Exonio.irr([-100, 39, 59, 55, 20]) # ==> 0.28095
```
So, the internal rate of return is 28.09%
### IPMT
What is the interest part of a payment in the 8th period (i.e., 8th month),
having a $5,000 loan to be paid in 2 years at an annual interest rate of 7.5%?
```ruby
Exonio.ipmt(0.075 / 12, 8, 12 * 2, 5_000.00) # ==> -22.612926783996798
```
So, in the 8th payment, $22.61 are the interest part.
### NPER
If you only had $150/month to pay towards the loan, how long would it take
to pay-off a loan of $8,000 at 7% annual interest?
```ruby
Exonio.nper(0.07 / 12, -150, 8000) # ==> 64.07334877066185
```
So, over 64 months would be required to pay off the loan.
### NPV
Calculates the Net Present Value of an investment
```ruby
Exonio.npv(0.281, [-100, 39, 59, 55, 29]) # ==> -0.00661872883563408
```
### PMT
What is the monthly payment needed to pay off a $200,000 loan in 15
years at an annual interest rate of 7.5%?
```ruby
Exonio.pmt(0.075 / 12, 12 * 15, 200_000) # ==> -1854.0247200054619
```
In order to pay-off (i.e., have a future-value of 0) the $200,000 obtained
today, a monthly payment of $1,854.02 would be required. Note that this
example illustrates usage of `fv` (future value) having a default value of 0.
### PV
What is the present value (e.g., the initial investment) of an investment
that needs to total $20,000.00 after 10 years of saving $100 every month?
Assume the interest rate is 5% (annually) compounded monthly.
```ruby
Exonio.pv(0.05 / 12, 12 * 10, -100, 20_000) # ==> -2715.0857731569663
```
By convention, the negative sign represents cash flow out (i.e., money not available today).
Thus, to end up with $20,000.00 in 10 years saving $100 a month at 5% annual
interest, an initial deposit of $2715,09 should be made.
### RATE
Suppose you take a loan of $50,000.00 to pay in 3 years with a monthly payment of $2,500.00.
What is the rate applied to this loan?
```ruby
Exonio.rate(12 * 3, 2_500, -50_000) # ==> 0.036006853458478955
```
So, the rate applied is 3.60%.
## Statistical formulas
### Sum
```ruby
Exonio.sum([1, 2, 3, 4, 5]) # ==> 15
```
### Mean
```ruby
Exonio.mean([1, 2, 3, 4, 5]) # ==> 3.0
```
### Median
```ruby
Exonio.median([1, 2, 3, 6, 5, 4]) # ==> 3.5
```
## TODO
There's a lot of formulas to be implemented, including:
* ACCRINT
* ACCRINTM
* AMORDEGRC
* AMORLINC
* DB
* DDB
* MIRR
* PPMT
* SLN
* SYD
* VDB
So feel free to pick one of those and open a pull request \o/.
## Contributing
1. Fork the repository
2. Create a branch
3. Hack hack hack...
4. Create a spec
5. Open a Pull Request ;)
## License
Exonio is released under the MIT License.
## Special Thanks
A special thanks goes to the python [NumPy project](http://www.numpy.org/), which was the source for most
of the formulas.
|
package abacus.project.ietf.network;
import java.io.Serializable;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import abacus.project.ietf.network.NetworkId;
import abacus.project.ietf.network.NodeId;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Represents another node, in an underlay network, that
* this node is supported by. Used to represent layering
* structure.
*/
@JsonInclude(Include.NON_NULL)
public class SupportingNodeListType implements Serializable {
private static final long serialVersionUID = 1L;
/**
* References the underlay network that the
* underlay node is part of.
*/
private final NetworkId networkRef;
/**
* References the underlay node itself.
*/
private final NodeId nodeRef;
@JsonCreator
public SupportingNodeListType (
@JsonProperty("network-ref") NetworkId networkRef,
@JsonProperty("node-ref") NodeId nodeRef){
this.networkRef = networkRef;
this.nodeRef = nodeRef;
}
@JsonProperty("network-ref")
public NetworkId getNetworkRef(){
return this.networkRef;
}
@JsonProperty("node-ref")
public NodeId getNodeRef(){
return this.nodeRef;
}
@Override
public int hashCode() {
return Objects.hash(networkRef, nodeRef);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SupportingNodeListType that = (SupportingNodeListType) o;
return Objects.equals(this.networkRef, that.networkRef) &&
Objects.equals(this.nodeRef, that.nodeRef);
}
}
|
package monitor
import (
"io/ioutil"
"os"
"testing"
)
var (
redisDir string
)
func setupRedis() {
str, err := os.Getwd()
if err != nil {
return
}
redisDir = str + "/../logs/redis"
InitRedis(redisDir)
}
func teardownRedis() {
os.RemoveAll(redisDir)
}
func TestRedisWrite(t *testing.T) {
data := Redis{
Db: 1,
Type: "master",
Host: "127.0.0.1",
Port: 6379,
Delay: 0.45,
Command: "hGet",
Args: []interface{}{"key", 1},
Time: 12324234,
Timestamp: "2021-01-01 11:11:11",
Minute: 4353453,
Date: "2021-01-01",
Result: true,
}
data.Write()
logFile := redisDir + "/" + redisLog.GetFileName()
content, err := ioutil.ReadFile(logFile)
if err != nil {
t.Fatalf("read content fail, error: %s", err)
}
if len(content) == 0 {
t.Fatalf("test failure, content is empty")
}
con := string(content)
t.Logf("logs: %s", con)
}
|
<?php
use App\Models\QuestionnaireAvailableAnswer;
use Illuminate\Database\Seeder;
class QuestionnaireAvailableAnswerSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
// built in answer
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 997,
'answer' => 'Bekerja'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 997,
'answer' => 'Membuka Usaha'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 997,
'answer' => 'Belum Bekerja'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 998,
'answer' => 'Sangat Tidak Relevan'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 998,
'answer' => 'Tidak Relevan'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 998,
'answer' => 'Tidak Tahu'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 998,
'answer' => 'Relevan'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 998,
'answer' => 'Sangat Relevan'
]);
// Non Built In Answer
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 1,
'answer' => 'Pemerintah (Pusat/Departemen)'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 1,
'answer' => 'Pemerintah (Daerah)'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 1,
'answer' => 'Pemerintah (BUMN, BHMN)'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 1,
'answer' => 'Swasta (Jasa)'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 1,
'answer' => 'Swasta (Manufaktur)'
]);
QuestionnaireAvailableAnswer::create([
'questionnaire_question_id' => 1,
'answer' => 'Wiraswasta'
]);
}
}
|
---
layout: tutorial
title: シンプルな MobileFirst Server のインストール
breadcrumb_title: シンプルな MobileFirst Server のインストール
weight: 1
---
<!-- NLS_CHARSET=UTF-8 -->
## 概説
{: #overview }
この入門チュートリアルでは、機能する {{ site.data.keys.mf_server }} として 2 つのノードを持つクラスターを Liberty プロファイル上にインストールする手順をガイドします。 インストールは、以下の 2 つの方法で実行できます。
* グラフィカル・モードの IBM Installation Manager およびサーバー構成ツールを使用する方法。
* コマンド・ライン・ツールを使用する方法。
## グラフィカル・モードでの {{ site.data.keys.mf_server }} のインストール
{: #graphical-mode }
[グラフィカル・モードでの {{ site.data.keys.mf_server }} のインストールについてのチュートリアル](graphical-mode)で説明されている手順に従って、グラフィカル・モードの IBM Installation Manager およびサーバー構成ツールを使用して {{ site.data.keys.mf_server }} をインストールします。
## コマンド・ライン・モードでの {{ site.data.keys.mf_server }} のインストール
{: #cli-mode }
[コマンド・ライン・モードでの {{ site.data.keys.mf_server }} のインストールについてのチュートリアル](command-line)で説明されている手順に従って、コマンド・ライン・モードの IBM Installation Manager および Ant タスクを使用して {{ site.data.keys.mf_server }} をインストールします。
## 次のステップ
{: #whats-next }
[{{ site.data.keys.mf_server }} の構成](../server-configuration)
|
package `16_Loops`
fun main(){
for(i in 0..10){
println(i)
}
}
/* i - is a variable that will contain the current value in the range
in - keyword used to iterate over the range
0..10 - range where the loop will iterate.
*/
|
const trustedIpRoute = require('../../../src/routes/trusted-ipaddress');
describe('src/routes/trusted-ip', () => {
let redisClient;
let req;
let res;
beforeEach(() => {
redisClient = jasmine.createSpyObj('client-mock', ['on', 'get']);
req = {
redisClient,
params: {
ipAddress: '1.1.1.1'
}
};
res = jasmine.createSpyObj('res-mock', ['json', 'status', 'send']);
res.status.and.returnValue(res);
});
it('Gets the ip address from redis', () => {
trustedIpRoute(req, res);
expect(redisClient.get.calls.allArgs()[0][0]).toBe('1.1.1.1');
});
it('sets the response to trusted if nothing is found', () => {
trustedIpRoute(req, res);
const callback = redisClient.get.calls.allArgs()[0][1];
callback(undefined, undefined);
expect(res.json).toHaveBeenCalledWith({
trusted: true
});
});
it('sets the response to not trusted if IP is found', () => {
trustedIpRoute(req, res);
const callback = redisClient.get.calls.allArgs()[0][1];
callback(undefined, 'source of ip');
expect(res.json).toHaveBeenCalledWith({
trusted: false,
source: 'source of ip'
});
});
it("returns 500 if redis can't be accessed", () => {
const next = jasmine.createSpy('next-mock');
const error = new Error('The connection has already been closed.');
trustedIpRoute(req, res, next);
const callback = redisClient.get.calls.allArgs()[0][1];
callback(error);
expect(next).toHaveBeenCalledWith(error);
});
});
|
(ns rui.forms.components
"It contains UI components for forms like checkbox, text input, form errors, etc."
(:require
[goog.string.format]
[goog.string :refer [format]]
[ccn.core :refer [bem css-class twbs]]
[re-frame.core :refer [dispatch]]
[rui.buttons :refer [button-primary]]
[rui.forms.core :refer
[field-state input-on-change! input-on-blur!
gen-field-id]]
[rui.forms.events]))
(def default-required-mark " *")
(defn field->twbs-class
[field]
(case (:state field)
:initial nil
:invalid "is-invalid"
:valid "is-valid"
nil))
(defn form-errors
[messages]
[:div.form-errors.invalid-feedback
(for [[message i] (zipmap messages (range))]
^{:key (format "%s-%s" message i)}
[:div {:class (bem "form-errors" "error" [])} message])])
(defn can-show-errors?
[field errors]
(and (not= :initial (:state field))
(seq errors)))
(defn radio-input-element
"Bare radio input that renders just a div with an input type radio and label. Don't use it directly."
[{:keys [id field radio-name choice on-change checked? disabled? label modifiers form field-id]}]
(let [chosen-value? (= (:value choice) (:value field))]
[:div {:class (css-class "form-check"
(when disabled? "disabled")
(bem "radio" (concat modifiers [(when chosen-value? (field-state form field-id))])))}
[:input {:type "radio"
:class (css-class "form-check-input" (when chosen-value? (field->twbs-class field)))
:id id
:name radio-name
:value (:value choice)
:on-change on-change
:checked checked?
:disabled disabled?}]
label]))
(defn radio
"Radio input field with all functionalities like: updating value, showing an error, ...
Required parameters:
- `form` a form from the state
- `field-id` field is as a keyword
- `choices` a all possible values of the radio, a sequence of maps with `:label` and `:value` keys
Optional parameters:
- `modifiers` a CSS modifiers, a sequence of strings
- `component-class-name` a string that is added to 'form-group' Bootstrap element
- `renderer` a custom radio renderer, Reagent component that accepts a hashmap with following keys:
id, field, radio-name, choice, on-change, checked?, disabled?, label, modifiers, form, field-id, input-el
- `on-change` a custom handler that is called after the original on-change handler
- `required-mark` a valid Hiccup what will be shown when a field is required"
[form field-id choices
& {:keys [modifiers component-class-name renderer on-change
required-mark]
:or {required-mark default-required-mark}}]
(let [field (-> form :fields field-id)
errors (-> form :errors field-id)
on-change-internal (fn [was-keyword? event]
(input-on-change! form field-id (if was-keyword? keyword identity) event)
(input-on-blur! form field-id event)
(.preventDefault event)
(when on-change
(on-change event)))
radio-name (gen-field-id form field-id)]
[:div {:class (css-class "form-group" "radio" component-class-name)}
(for [choice choices]
(let [id (gen-field-id form (name (str (name field-id) "-" (str (:value choice)))))
disabled? (true? (:disabled? choice))
checked? (or (= (:value field) (:value choice))
(and (nil? (:value field)) (true? (:checked? choice))))
label [:label {:class "form-check-label", :for id}
(:label choice)
(when (:required? field)
required-mark)]
input-opts {:id id
:field field
:radio-name radio-name
:choice choice
:on-change (partial on-change-internal (-> choice :value keyword?))
:checked? checked?
:disabled? disabled?
:modifiers modifiers
:form form
:field-id field-id}
input-el [radio-input-element (merge input-opts
{:label (when-not renderer label)})]]
(if (some? renderer)
(with-meta [renderer (merge input-opts
{:input-el input-el})]
{:key (:value choice)})
(with-meta input-el {:key (:value choice)}))))
(when (can-show-errors? field errors)
[form-errors errors])]))
(defn checkbox
[form field-id label
& {:keys [children modifiers attrs on-change required-mark]
:or {modifiers []
children []
attrs {}
required-mark default-required-mark}}]
(let [field (-> form :fields field-id)
active? (-> field :value boolean)
errors (-> form :errors field-id)
on-change-internal (fn [event]
(dispatch [:rui::forms/forms-input-changed
(:id form)
field-id
(-> event .-target .-checked)])
(input-on-blur! form field-id event)
(when on-change
(on-change event)))
id (gen-field-id form field-id)]
(into [:div {:class (css-class "form-check"
"form-group"
(bem "checkbox" (concat modifiers
[(when active? "active") (field-state form field-id)])))}
[:input (merge {:type "checkbox"
:class (css-class "form-check-input" (field->twbs-class field))
:id id
:name id
:on-change on-change-internal
:checked active?}
attrs)]
[:label {:for id, :class "form-check-label"}
label
(when (:required? field)
required-mark)]]
(conj children (when (can-show-errors? field errors) [form-errors errors])))))
(defn select
[form field-id label value-label-pairs
& {:keys [children modifiers attrs label-attrs twbs-modifiers
on-change required-mark]
:or {modifiers []
children []
attrs {}
label-attrs {}
required-mark default-required-mark
twbs-modifiers []}}]
(let [field (-> form :fields field-id)
active? (-> field :value some?)
errors (-> form :errors field-id)
on-change-internal (fn [was-keyword? event]
(input-on-change! form field-id (if was-keyword? keyword identity) event)
(input-on-blur! form field-id event)
(when on-change
(on-change event)))
id (gen-field-id form field-id)]
(into [:div {:class (css-class "form-group"
(bem "select"
(concat modifiers
[(when active? "active")
(field-state form field-id)])))}
[:label (merge {:for id, :class "form-control-label"}
label-attrs)
label
(when (:required? field)
required-mark)]
[:select (merge {:id id
:name id
:on-change (partial on-change-internal
(->> value-label-pairs (map first) (every? keyword?)))
:class (css-class (twbs "form-control" twbs-modifiers)
(field->twbs-class field))}
attrs
(when (some? (:value field))
{:default-value (:value field)}))
(for [[value label] value-label-pairs]
^{:key value}
[:option {:value value} label])]]
(conj children (when (can-show-errors? field errors) [form-errors errors])))))
(defn input-field
"Input field with all functionalities like: updating value, showing an error, ...
Required parameters:
- `input-type` a string like: 'text', 'number', ...
- `form` a form from the state
- `field-id` field is as a keyword
- `label` a Reagent valid component or string
Optional parameters:
- `modifiers` a CSS modifiers, a sequence of strings
- `attrs` a hashmap of HTML attributes of the input
- `label-attrs` a hashmap of HTML attributes for the label
- `twbs-modifiers` Twitter Bootstrap modifiers, a sequence of strings
- `input-group-append` a Twitter Bootstrap's 'input-group-append', a Reagent component of string
- `input-group-prepend` a Twitter Bootstrap's 'input-group-prepend', a Reagent component of string
- `on-change` a custom handler that is called after the original on-change handler
- `on-blur` a custom handler that is called after the original on-blur handler
- `required-mark` a valid Hiccup what will be shown when a field is required"
[input-type form field-id label
& {:keys [modifiers attrs label-attrs twbs-modifiers
input-group-append input-group-prepend on-change
on-blur required-mark]
:or {modifiers []
attrs {}
label-attrs {}
twbs-modifiers []
input-group-prepend nil
input-group-append nil
required-mark default-required-mark}}]
(let [id (gen-field-id form field-id)
field (get-in form [:fields field-id])
on-change-internal (fn [event]
(input-on-change! form field-id event)
(when on-change
(on-change event)))
on-blur-internal (fn [event]
(input-on-blur! form field-id event)
(when on-blur
(on-blur event)))]
[:div {:class (css-class "form-group"
(bem "input-field" (name field-id) (conj modifiers (field-state form field-id))))}
(when (some? label)
[:label (merge {:for id, :class "form-control-label"}
label-attrs)
label
(when (:required? field)
required-mark)])
[:div {:class (when (or input-group-prepend input-group-append) "input-group")}
(when (some? input-group-prepend)
[:div.input-group-prepend
(if (string? input-group-prepend)
[:div.input-group-text input-group-prepend]
input-group-prepend)])
[:input (merge {:type input-type
:class (css-class (twbs "form-control" twbs-modifiers)
(field->twbs-class field))
:id id
:name id
:value (-> form :fields field-id :value)
:placeholder label
:on-change on-change-internal
:on-blur on-blur-internal}
attrs)]
(when (some? input-group-append)
[:div.input-group-append
(if (string? input-group-append)
[:div.input-group-text input-group-append]
input-group-append)])
(when (can-show-errors? (-> form :fields field-id) (-> form :errors field-id))
[form-errors (-> form :errors field-id)])]]))
(def input-text (partial input-field "text"))
(def input-password (partial input-field "password"))
(def input-number (partial input-field "number"))
(def input-email (partial input-field "email"))
(defn input-file
[form field-id label
& {:keys [modifiers attrs label-attrs twbs-modifiers on-blur
on-change required-mark]
:or {required-mark default-required-mark}
:as kwargs}]
(let [id (gen-field-id form field-id)
field (get-in form [:fields field-id])
on-change-internal (fn [event]
(input-on-change! form field-id event)
(when on-change
(on-change event)))
on-blur-internal (fn [event]
(input-on-blur! form field-id event)
(when on-blur
(on-blur event)))]
[:div {:class (css-class "form-group"
(bem "input-field" (conj modifiers (field-state form field-id))))}
[:div.custom-file
(when (some? label)
[:label (merge {:for id, :class "form-control-label"}
label-attrs)
label
(when (:required? field)
required-mark)])
[:input (merge {:type "file"
:class (css-class (twbs "form-control-file" twbs-modifiers)
(field->twbs-class field))
:id id
:name id
:value (-> form :fields field-id :value)
:placeholder label
:on-change on-change-internal
:on-blur on-blur-internal}
attrs)]
(when (can-show-errors? (-> form :fields field-id) (-> form :errors field-id))
[form-errors (-> form :errors field-id)])]]))
(defn text-area
[form field-id label
& {:keys [modifiers attrs label-attrs twbs-modifiers on-blur
on-change required-mark]
:or {modifiers []
attrs {}
label-attrs {}
required-mark default-required-mark
twbs-modifiers []}}]
(let [id (gen-field-id form field-id)
field (get-in form [:fields field-id])
on-change-internal (fn [event]
(input-on-change! form field-id event)
(when on-change
(on-change event)))
on-blur-internal (fn [event]
(input-on-blur! form field-id event)
(when on-blur
(on-blur event)))]
[:div {:class (css-class "form-group"
(bem "text-area" (conj modifiers (field-state form field-id))))}
(when (some? label)
[:label (merge {:for id, :class "form-control-label"}
label-attrs)
label
(when (:required? field)
required-mark)])
[:textarea (merge {:class (css-class (twbs "form-control" twbs-modifiers)
(field->twbs-class field))
:id id
:name id
:value (-> form :fields field-id :value)
:placeholder label
:on-change on-change-internal
:on-blur on-blur-internal}
attrs)]
(when (can-show-errors? (-> form :fields field-id) (-> form :errors field-id))
[form-errors (-> form :errors field-id)])]))
(defn form-wrapper
[{:keys [on-submit] :as kwargs} & children]
(into [:form {:class "form-wrapper", :on-submit #(do (.preventDefault %) (on-submit %))}]
children))
(defn button-submit
[title on-click & {:as kwargs}]
(into [button-primary title on-click] (apply concat (merge kwargs {:modifiers ["submit"]}))))
(defn plaintext
[form field-id label & {:keys [modifiers attrs label-attrs twbs-modifiers]
:or {modifiers []
attrs {}
label-attrs {}
twbs-modifiers []}}]
(let [id (gen-field-id form field-id)]
[:div {:class (css-class "form-group"
(bem "plaintext" (conj modifiers (field-state form field-id))))}
(when (some? label)
[:label (merge {:for id, :class "form-control-label"}
label-attrs)
label])
[:input (merge {:class (twbs "form-control-plaintext" twbs-modifiers)
:id id
:read-only true
:name id
:type "text"
:value (-> form :fields field-id :value)
:placeholder label
:on-change (partial input-on-change! form field-id)
:on-blur (partial input-on-blur! form field-id)}
attrs)]]))
|
const siteKey = process.env.NEXT_PUBLIC_SITE_KEY;
const gitBranch = process.env.VERCEL_GIT_COMMIT_REF;
const gitMessage = process.env.VERCEL_GIT_COMMIT_MESSAGE;
if(gitBranch.includes(siteKey)){
console.log('✅ - Build can proceed');
process.exit(1)
} else if(gitMessage.includes('#')){
if(gitMessage.includes(siteKey) || gitMessage.includes('all') || gitBranch.includes('all')){
console.log('✅ - Build can proceed');
process.exit(1)
} else {
console.log('🛑 - Build cancelled');
process.exit(0)
}
} else if((gitBranch.includes('all') || gitMessage.includes('all')) && !gitBranch.includes('all-contributors')){
console.log('✅ - Build can proceed');
process.exit(1)
} else {
console.log('🛑 - Build cancelled');
process.exit(0)
}
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\View;
use Illuminate\Support\Facades\Auth;
use App\TheLoai;
use App\LoaiTin;
use App\TinTuc;
use App\Slide;
use App\User;
class PagesController extends Controller
{
//
function __construct()
{
$theloai = TheLoai::all();
View::share('theloai', $theloai);
// Slide
$slide = Slide::all();
View::share('slide', $slide);
// if (Auth::check())
// {
// View::share('nguoidung', Auth::user());
// }
}
function TrangChu(){
return view('pages.trangchu');
}
function LienHe(){
return view('pages.lienhe');
}
function LoaiTin($id){
$loaitin = LoaiTin::find($id);
$tintuc = TinTuc::where('idLoaiTin',$id)->paginate(5);
return view('pages.loaitin',['loaitin'=>$loaitin,'tintuc'=>$tintuc]);
}
function tintuc($id){
$tintuc = TinTuc::find($id);
$tinnoibat = TinTuc::where('NoiBat',1)->take(4)->get();
$tinlienquan = TinTuc::where('idLoaiTin',$tintuc->idLoaiTin)->take(4)->get();
return view('pages.tintuc',['tintuc'=>$tintuc,'tinnoibat'=>$tinnoibat,'tinlienquan'=>$tinlienquan]);
}
// Dang nhap
function getDangnhap(){
return view('pages.dangnhap');
}
function postDangnhap(Request $request){
$this->validate($request,[
'email'=>'required',
'password'=>'required|min:6|max:32'
],[
'email.required'=>'Bạn chưa nhập email',
'password.required'=>'Bạn chưa nhập password',
'password.min'=>'password phải ít nhất 6 kí tự',
'password.mix'=>'password tối đa 32 kí tự'
]);
if (Auth::attempt(['email'=>$request->email,'password'=>$request->password])){
return redirect('trangchu');
}
else {
return redirect('dangnhap')->with('thongbao','Bạn đã nhập sai email hoặc password');
}
}
// Dang xuat
function getDangxuat(){
Auth::logout();
return redirect('trangchu');
}
//
function getNguoidung(){
return view('pages.nguoidung');
}
function postNguoidung(Request $request){
$this->validate($request,[
'name'=>'required|min:3'
],[
'name.required'=>'Bạn chưa nhập Tên',
'name.min'=>'Tên ít nhất 3 kí tự'
]);
$user = Auth::user();
$user->name = $request->name;
// Kiem tra mat khau co thay doi hay khong
if ($request->changePassword == "on")
{
$this->validate($request,[
'password'=>'required|min:6|max:32',
'passwordAgain' => 'required|same:password'
],[
'password.required'=>'Bạn chưa nhập Mật khẩu',
'password.min' => 'Mật khẩu có 6 đến 32 kí tự',
'password.max' => 'Mật khẩu có 6 đến 32 kí tự',
'passwordAgain.required'=>'Bạn chưa nhập lại Mật khẩu',
'passwordAgain.same' => 'Mật khẩu nhập lại chưa khớp'
]);
$user->password = bcrypt($request->password);
}
$user->save();
return redirect('nguoidung')->with('thongbao','Đã sửa thành công');
}
function getDangky(){
return view('pages.dangky');
}
function postDangky(Request $request){
$this->validate($request,[
'name'=>'required|min:3',
'email'=>'required|email|unique:users,email',
'password'=>'required|min:6|max:32',
'passwordAgain' => 'required|same:password'
],[
'name.required'=>'Bạn chưa nhập Tên',
'name.min'=>'Tên ít nhất 3 kí tự',
'email.required'=>'Bạn chưa nhập Email',
'email.email' => 'Tên email sai định dạng',
'email.unique'=>'Email đã tồn tại, Xin hãy nhập email khác',
'password.required'=>'Bạn chưa nhập Mật khẩu',
'password.min' => 'Mật khẩu có 6 đến 32 kí tự',
'password.max' => 'Mật khẩu có 6 đến 32 kí tự',
'passwordAgain.required'=>'Bạn chưa nhập lại Mật khẩu',
'passwordAgain.same' => 'Mật khẩu nhập lại chưa khớp'
]);
$user = new User();
$user->name = $request->name;
$user->email = $request->email;
$user->password=bcrypt($request->password);
$user->level = 0;
$user->save();
return redirect('dangky')->with('thongbao','Chúc mừng Bạn đã Đăng ký thành công');
}
function postTimkiem(Request $request)
{
$tukhoa = $request->tukhoa;
$tintuc = TinTuc::where('TieuDe','like',"%$tukhoa%")->orWhere('TomTat','like',"%$tukhoa%")
->orWhere('NoiDung','like',"%$tukhoa%")->take(10)->get();
return view('pages.timkiem',['tintuc'=>$tintuc,'tukhoa'=>$tukhoa]);
}
}
|
require 'rails_helper'
RSpec.describe 'Post creation', type: :feature do
let(:user) { User.create(name: 'Johnny Bravo', email: 'johnny@microverse.org', password: '123456789') }
scenario 'Create post' do
visit new_user_session_path
fill_in 'user_email', with: user.email
fill_in 'user_password', with: user.password
click_on 'Log in'
fill_in 'post_content', with: 'This is a new post for testing.'
click_on 'Save'
expect(page).to have_content('This is a new post for testing.')
end
end
|
<?php
namespace Modules\Adsense\Presenters;
interface AdsensePresenterInterface
{
/**
* @param string $spaceName
* @return string rendered space
*/
public function render($spaceName);
}
|
/*
Sniperkit-Bot
- Status: analyzed
*/
package main
import (
"flag"
"fmt"
"io/ioutil"
"log"
"os"
"path/filepath"
"sync"
"github.com/gdamore/tcell"
"github.com/sniperkit/snk.fork.viktomas-godu/core"
"github.com/sniperkit/snk.fork.viktomas-godu/interactive"
)
func main() {
limit := flag.Int64("l", 10, "show only files larger than limit (in MB)")
nullTerminate := flag.Bool("print0", false, "print null-terminated strings")
flag.Parse()
args := flag.Args()
root := "."
if len(args) > 0 {
root = args[0]
}
root, err := filepath.Abs(root)
if err != nil {
log.Fatalln(err.Error())
}
log.Printf("godu will walk through `%s` that might take up to few minutes\n", root)
tree := core.WalkFolder(root, ioutil.ReadDir, getIgnoredFolders())
tree.Name = root
err = core.PrepareTree(tree, *limit*core.MEGABYTE)
if err != nil {
log.Fatalln(err.Error())
}
s := initScreen()
commands := make(chan core.Executer)
states := make(chan core.State)
lastStateChan := make(chan *core.State, 1)
var wg sync.WaitGroup
wg.Add(3)
go core.StartProcessing(tree, commands, states, lastStateChan, &wg)
go InteractiveTree(s, states, &wg)
go ParseCommand(s, commands, &wg)
wg.Wait()
s.Fini()
lastState := <-lastStateChan
printMarkedFiles(lastState, *nullTerminate)
}
func printMarkedFiles(lastState *core.State, nullTerminate bool) {
markedFiles := interactive.FilesAsSlice(lastState.MarkedFiles)
var printFunc func(string)
if nullTerminate {
printFunc = func(s string) {
fmt.Printf("%s\x00", s)
}
} else {
printFunc = func(s string) {
fmt.Println(s)
}
}
for _, f := range markedFiles {
printFunc(f)
}
}
func initScreen() tcell.Screen {
tcell.SetEncodingFallback(tcell.EncodingFallbackASCII)
s, e := tcell.NewScreen()
if e != nil {
log.Printf("%v\n", e)
os.Exit(1)
}
if e = s.Init(); e != nil {
log.Printf("%v\n", e)
os.Exit(1)
}
s.Clear()
return s
}
|
package io.ginger.kdtrey5
object Utils {
/** A less frustrating version of `Arrays.arraysEquals` due to Java <-> Scala type system impedance.
* Also allows providing a `size` parameter for our own convenience.
*/
private[kdtrey5] def arrayEquals[T](a1: Array[T], a2: Array[T], size: Int): Boolean = {
var i = 0
while (i < size) {
if (a1(i) != a2(i)) return false
i += 1
}
return true
}
private[kdtrey5] def lastNotNull[T](a: Array[T]): T = {
var i = a.length - 1
while (i >= 0) {
if (a(i) != null) return a(i)
i -= 1
}
null.asInstanceOf[T]
}
}
|
package v0
const (
configKind = "azbiConfig"
stateKind = "azbiState"
configVersion = "v0.2.1"
stateVersion = "v0.0.2"
)
|
package com.dove.backend.storage.mocked.users.tokens
import com.dove.backend.storage.core.users.tokens.TokensStorage
import com.dove.data.users.tokens.Token
import com.dove.data.users.tokens.TokenType
import kotlin.random.Random
class MockedTokensStorage : TokensStorage {
private val tokens: MutableList<Token> = mutableListOf()
override suspend fun create(userId: Long, token: String, time: Long, type: TokenType): Token {
val auth = Token(Random.nextLong(99999), userId, token, time, type)
tokens += auth
return auth
}
override suspend fun readAll(userId: Long): List<Token> {
return tokens.filter { it.userId == userId }
}
override suspend fun read(token: String): Token? {
return tokens.firstOrNull { it.token == token }
}
override suspend fun read(id: Long): Token? {
return tokens.firstOrNull { it.tokenId == id }
}
override suspend fun delete(id: Long) {
tokens.removeIf { it.tokenId == id }
}
override suspend fun delete(token: String) {
tokens.removeIf { it.token == token }
}
override suspend fun deleteAll() {
tokens.clear()
}
}
|
#!/usr/bin/bash
python3 ~/.KitCast/KitCast/app.py $*
|
/*
* Copyright 2020 The Matrix.org Foundation C.I.C.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wormhole.android.sdk.api.pushrules
import org.wormhole.android.sdk.MatrixTest
import org.wormhole.android.sdk.api.pushrules.rest.PushRule
import org.wormhole.android.sdk.internal.di.MoshiProvider
import org.junit.Assert.assertEquals
import org.junit.Assert.assertNotNull
import org.junit.Assert.assertTrue
import org.junit.Test
class PushRuleActionsTest: MatrixTest {
@Test
fun test_action_parsing() {
val rawPushRule = """
{
"rule_id": ".m.rule.invite_for_me",
"default": true,
"enabled": true,
"conditions": [
{
"key": "type",
"kind": "event_match",
"pattern": "m.room.member"
},
{
"key": "content.membership",
"kind": "event_match",
"pattern": "invite"
},
{
"key": "state_key",
"kind": "event_match",
"pattern": "[the user's Matrix ID]"
}
],
"actions": [
"notify",
{
"set_tweak": "sound",
"value": "default"
},
{
"set_tweak": "highlight",
"value": false
}
]
}
""".trimIndent()
val pushRule = MoshiProvider.providesMoshi().adapter<PushRule>(PushRule::class.java).fromJson(rawPushRule)
assertNotNull("Should have parsed the rule", pushRule)
val actions = pushRule!!.getActions()
assertEquals(3, actions.size)
assertTrue("First action should be notify", actions[0] is Action.Notify)
assertTrue("Second action should be sound", actions[1] is Action.Sound)
assertEquals("Second action should have default sound", "default", (actions[1] as Action.Sound).sound)
assertTrue("Third action should be highlight", actions[2] is Action.Highlight)
assertEquals("Third action tweak param should be false", false, (actions[2] as Action.Highlight).highlight)
}
}
|
require 'spec_helper'
require 'arena.rb'
RSpec.describe Arena do
describe "#step" do
it "does the correct thing with the column example" do
new_grid = Arena.new([
[false, true, false],
[false, true, false],
[false, true, false],
]).step
expect(new_grid).to eq([
[false, false, false],
[true, true, true],
[false, false, false],
])
end
it "retains stable patterns" do
world = [
[false, true, false],
[true, false, true],
[false, true, false],
]
new_grid = Arena.new(world).step
expect(new_grid).to eq(world)
end
it "kills a lonley cell" do
world = [
[false, true, false],
[false, false, false],
[false, false, false],
]
new_grid = Arena.new(world).step
expect(new_grid).to eq([
[false, false, false],
[false, false, false],
[false, false, false],
])
end
it "keeps the square shape stable" do
world = [
[true, true, false],
[true, true, false],
[false, false, false],
]
new_grid = Arena.new(world).step
expect(new_grid).to eq([
[true, true, false],
[true, true, false],
[false, false, false],
])
end
end
describe "#alive_neighbors" do
it "returns 0 for a cell with 0 neighbors" do
world = [
[false, false, false],
[false, true, false],
[false, false, false],
]
arena = Arena.new(world)
expect(arena.alive_neighbors(1,1)).to be 0
end
it "returns 1 for a cell with 1 neighbors" do
world = [
[true, false, false],
[true, false, false],
[false, false, false],
]
arena = Arena.new(world)
expect(arena.alive_neighbors(0,0)).to be 1
end
it "returns 4 for a cell with 4 diagonal neighbors" do
world = [
[true, false, true],
[false, false, false],
[true, false, true],
]
arena = Arena.new(world)
expect(arena.alive_neighbors(1,1)).to be 4
end
it "returns 2 for a cell with 2 diagonal neighbors" do
world = [
[false, true, false],
[false, false, false],
[false, false, true],
]
arena = Arena.new(world)
expect(arena.alive_neighbors(2,1)).to be 2
end
it "returns 1 for a cell at the top of the column" do
world = [
[false, true, false],
[false, true, false],
[false, true, false],
]
arena = Arena.new(world)
expect(arena.alive_neighbors(1,2)).to be 1
end
end
end
|
<?php
namespace HnhDigital\LaravelConsoleSelfUpdate\Tests;
use HnhDigital\LaravelConsoleSelfUpdate\SelfUpdateInterface;
use PHPUnit\Framework\TestCase;
class SelfUpdateTest extends TestCase
{
public function testSetUrl()
{
$command = new MockCommand();
$command->setUrl('localhost');
$this->assertEquals('localhost', $command->getUrl());
}
public function testParseVersion()
{
$command = new MockCommand();
$release_tag = $command->parseVersion('1.0.0');
$this->assertEquals('stable', $release_tag[0]);
$this->assertEquals('1.0.0', $release_tag[1]);
$release_tag = $command->parseVersion('stable-1.0.0');
$this->assertEquals('stable', $release_tag[0]);
$this->assertEquals('1.0.0', $release_tag[1]);
$release_tag = $command->parseVersion('dev-1.0.0');
$this->assertEquals('dev', $release_tag[0]);
$this->assertEquals('1.0.0', $release_tag[1]);
$release_tag = $command->parseVersion('dev-1.0.0-beta1');
$this->assertEquals('dev', $release_tag[0]);
$this->assertEquals('1.0.0-beta1', $release_tag[1]);
}
public function testBackupPath()
{
$command = new MockCommand();
$command->setCurrentTag('1.0.0');
$this->assertEquals('mysql-helper.1.0.0', $command->getBackupPath('mysql-helper'));
$this->assertNotEquals('mysql-helper.1.0.0', $command->getBackupPath('mysql-helper1'));
}
public function testTempPath()
{
$command = new MockCommand();
$this->assertEquals('/tmp/mysql-helper.1.0.0', $command->getTempPath('mysql-helper', '1.0.0'));
$this->assertNotEquals('/tmp/mysql-helper.1.0.0', $command->getTempPath('mysql-helper', '1.0.1'));
}
public function testLatestTagPath()
{
$command = new MockCommand();
$this->assertEquals('latest', $command->getLatestTagPath());
$command->setLatestTagPath('latest.json');
$this->assertEquals('latest.json', $command->getLatestTagPath());
$this->assertNotEquals('latest1.json', $command->getLatestTagPath());
}
public function testVersionsPath()
{
$command = new MockCommand();
$this->assertEquals('versions', $command->getVersionsPath());
$command->setVersionsPath('version.json');
$this->assertEquals('version.json', $command->getVersionsPath());
$this->assertNotEquals('version1.json', $command->getVersionsPath());
}
public function testHashFromString()
{
$command = new MockCommand();
$this->assertEquals(
'9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08',
$command->getHashFromString('test')
);
$this->assertNotEquals(
'9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08',
$command->getHashFromString('test1')
);
}
public function testCompareHash()
{
$command = new MockCommand();
$command->setLatestTag('1.0.1');
$tmp_file = tempnam('/tmp', 'mysql-helper-1.0.1');
file_put_contents($tmp_file, 'testing');
$hash = hash_file($command->getHashAlgo(), $tmp_file);
unlink($tmp_file);
$command->setHashSource(SelfUpdateInterface::CHECKSUM_DISABLED);
$this->assertTrue($command->compareHash('', 'testing'));
$this->assertTrue($command->compareHash('', 'testing1'));
$checksums = [
'download/1.0.0/mysql-helper' => '9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08',
'download/1.0.1/mysql-helper' => $hash,
];
$tmp_file = tempnam('/tmp', 'mysql-helper-checksums');
file_put_contents($tmp_file, json_encode($checksums));
$command->setHashSource(SelfUpdateInterface::CHECKSUM_TOP_LEVEL);
$command->setHashPath($tmp_file);
$this->assertTrue($command->compareHash('download/1.0.1/mysql-helper', 'testing'));
$this->assertFalse($command->compareHash('download/1.0.1/mysql-helper', 'testing1'));
$command->setVersionData([
'1.0.0' => ['sha256' => '9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08'],
'1.0.1' => ['sha256' => $hash],
]);
$command->setHashPath('sha256');
$command->setHashSource(SelfUpdateInterface::CHECKSUM_VERSIONS);
$this->assertTrue($command->compareHash('', 'testing'));
$this->assertFalse($command->compareHash('', 'testing1'));
unlink($tmp_file);
}
}
|
---
layout: post
title: Continue to Refine Your Pronunciation After You Get Hired
date: 2014-03-26 22:47
author: Kevin Olega
comments: true
categories: [Lessons]
---
While making calls have you ever been asked the following questions?
## Has the Customer Asked You...
Where are you located?
I don't understand your English.
What?
Say that again?
Can you transfer me to an American agent?
If you are getting this often on your calls your pronunciation still sucks.
## Here's how to fix it.
- Write down your spiel.
- Read it and record it. Then listen.
- Does it sound like an American?
- Ask yourself what can I change to make it sound like an American?
- Apply the fix until your English is good enough.
Be humble and don't be pride full we can all use a little improvement.
What corrections have you found so far?
|
package Catalyst::Controller::HTML::FormFu;
{
$Catalyst::Controller::HTML::FormFu::VERSION = '1.00';
}
use Moose;
use HTML::FormFu;
use Config::Any;
use Regexp::Assemble;
use Scalar::Util qw/ isweak weaken /;
use Carp qw/ croak /;
use namespace::autoclean;
# see https://rt.cpan.org/Ticket/Display.html?id=55780
BEGIN {
extends 'Catalyst::Controller';
}
with 'Catalyst::Component::InstancePerContext';
has _html_formfu_config => ( is => 'rw' );
sub build_per_context_instance {
my ( $self, $c ) = @_;
return $self unless(ref $c);
$self->{c} = $c;
weaken( $self->{c} )
if !isweak( $self->{c} );
return $self;
}
sub BUILD {}
after BUILD => sub {
my ( $self ) = @_;
my $app = $self->_app;
my $self_config = $self->config->{'Controller::HTML::FormFu'} || {};
my $parent_config = $app->config->{'Controller::HTML::FormFu'} || {};
my %defaults = (
request_token_enable => 0,
request_token_field_name => '_token',
request_token_session_key => '__token',
request_token_expiration_time => 3600,
form_method => 'form',
form_stash => 'form',
form_attr => 'Form',
config_attr => 'FormConfig',
method_attr => 'FormMethod',
form_action => "Catalyst::Controller::HTML::FormFu::Action::Form",
config_action =>
"Catalyst::Controller::HTML::FormFu::Action::FormConfig",
method_action =>
"Catalyst::Controller::HTML::FormFu::Action::FormMethod",
multiform_method => 'multiform',
multiform_stash => 'multiform',
multiform_attr => 'MultiForm',
multiform_config_attr => 'MultiFormConfig',
multiform_method_attr => 'MultiFormMethod',
multiform_action =>
"Catalyst::Controller::HTML::FormFu::Action::MultiForm",
multiform_config_action =>
"Catalyst::Controller::HTML::FormFu::Action::MultiFormConfig",
multiform_method_action =>
"Catalyst::Controller::HTML::FormFu::Action::MultiFormMethod",
context_stash => 'context',
model_stash => {},
constructor => {},
multiform_constructor => {},
config_callback => 1,
);
my %args = ( %defaults, %$parent_config, %$self_config );
my $local_path = $app->path_to( 'root', 'formfu' );
if ( !exists $args{constructor}{tt_args}
|| !exists $args{constructor}{tt_args}{INCLUDE_PATH} && -d $local_path )
{
$args{constructor}{tt_args}{INCLUDE_PATH} = [$local_path];
}
$args{constructor}{query_type} ||= 'Catalyst';
if ( !exists $args{constructor}{config_file_path} ) {
$args{constructor}{config_file_path} = $app->path_to( 'root', 'forms' );
}
# build regexp of file extensions
my $regex_builder = Regexp::Assemble->new;
map { $regex_builder->add($_) } Config::Any->extensions;
$args{_file_ext_regex} = $regex_builder->re;
# save config for use by action classes
$self->_html_formfu_config( \%args );
# add controller methods
no strict 'refs';
*{"$args{form_method}"} = \&_form;
*{"$args{multiform_method}"} = \&_multiform;
};
sub _form {
my $self = shift;
my $config = $self->_html_formfu_config;
my $form = HTML::FormFu->new( {
%{ $self->_html_formfu_config->{constructor} },
( @_ ? %{ $_[0] } : () ),
} );
$self->_common_construction($form);
if ( $config->{request_token_enable} ) {
$form->plugins( {
type => 'RequestToken',
context => $config->{context_stash},
field_name => $config->{request_token_field_name},
session_key => $config->{request_token_session_key},
expiration_time => $config->{request_token_expiration_time} } );
}
return $form;
}
sub _multiform {
my $self = shift;
require HTML::FormFu::MultiForm;
my $multi = HTML::FormFu::MultiForm->new( {
%{ $self->_html_formfu_config->{constructor} },
%{ $self->_html_formfu_config->{multiform_constructor} },
( @_ ? %{ $_[0] } : () ),
} );
$self->_common_construction($multi);
return $multi;
}
sub _common_construction {
my ( $self, $form ) = @_;
croak "form or multi arg required" if !defined $form;
$form->query( $self->{c}->request );
my $config = $self->_html_formfu_config;
if ( $config->{config_callback} ) {
$form->config_callback( {
plain_value => sub {
return if !defined $_;
s{__uri_for\((.+?)\)__}
{ $self->{c}->uri_for( split( '\s*,\s*', $1 ) ) }eg
if /__uri_for\(/;
s{__path_to\(\s*(.+?)\s*\)__}
{ $self->{c}->path_to( split( '\s*,\s*', $1 ) ) }eg
if /__path_to\(/;
s{__config\((.+?)\)__}
{ $self->{c}->config->{$1} }eg
if /__config\(/;
}
} );
weaken( $self->{c} )
if !isweak( $self->{c} );
}
if ( $config->{languages_from_context} ) {
$form->languages( $self->{c}->languages );
}
if ( $config->{localize_from_context} ) {
$form->add_localize_object( $self->{c} );
}
if ( $config->{default_action_use_name} ) {
my $action = $self->{c}->uri_for( $self->{c}->{action}->name );
$self->{c}
->log->debug( "FormFu - Setting default action by name: $action" )
if $self->{c}->debug;
$form->action($action);
}
elsif ( $config->{default_action_use_path} ) {
my $action = $self->{c}->{request}->base . $self->{c}->{request}->path;
$self->{c}
->log->debug( "FormFu - Setting default action by path: $action" )
if $self->{c}->debug;
$form->action($action);
}
my $context_stash = $config->{context_stash};
$form->stash->{$context_stash} = $self->{c};
weaken( $form->stash->{$context_stash} );
my $model_stash = $config->{model_stash};
for my $model ( keys %$model_stash ) {
$form->stash->{$model} = $self->{c}->model( $model_stash->{$model} );
}
return;
}
sub create_action {
my $self = shift;
my %args = @_;
my $config = $self->_html_formfu_config;
for my $type (
qw/
form
config
method
multiform
multiform_config
multiform_method /
)
{
my $attr = $config->{"${type}_attr"};
if ( exists $args{attributes}{$attr} ) {
$args{_attr_params} = delete $args{attributes}{$attr};
}
elsif ( exists $args{attributes}{"$attr()"} ) {
$args{_attr_params} = delete $args{attributes}{"$attr()"};
}
else {
next;
}
push @{ $args{attributes}{ActionClass} }, $config->{"${type}_action"};
last;
}
$self->SUPER::create_action(%args);
}
1;
__END__
=head1 NAME
Catalyst::Controller::HTML::FormFu - Catalyst integration for HTML::FormFu
=head1 SYNOPSIS
package MyApp::Controller::My::Controller;
use base 'Catalyst::Controller::HTML::FormFu';
sub index : Local {
my ( $self, $c ) = @_;
# doesn't use an Attribute to make a form
# can get an empty form from $self->form()
my $form = $self->form();
}
sub foo : Local : Form {
my ( $self, $c ) = @_;
# using the Form attribute is equivalent to:
#
# my $form = $self->form;
#
# $form->process;
#
# $c->stash->{form} = $form;
}
sub bar : Local : FormConfig {
my ( $self, $c ) = @_;
# using the FormConfig attribute is equivalent to:
#
# my $form = $self->form;
#
# $form->load_config_filestem('root/forms/my/controller/bar');
#
# $form->process;
#
# $c->stash->{form} = $form;
#
# so you only need to do the following...
my $form = $c->stash->{form};
if ( $form->submitted_and_valid ) {
do_something();
}
}
sub baz : Local : FormConfig('my_config') {
my ( $self, $c ) = @_;
# using the FormConfig attribute with an argument is equivalent to:
#
# my $form = $self->form;
#
# $form->load_config_filestem('root/forms/my_config');
#
# $form->process;
#
# $c->stash->{form} = $form;
#
# so you only need to do the following...
my $form = $c->stash->{form};
if ( $form->submitted_and_valid ) {
do_something();
}
}
sub quux : Local : FormMethod('load_form') {
my ( $self, $c ) = @_;
# using the FormMethod attribute with an argument is equivalent to:
#
# my $form = $self->form;
#
# $form->populate( $c->load_form );
#
# $form->process;
#
# $c->stash->{form} = $form;
#
# so you only need to do the following...
my $form = $c->stash->{form};
if ( $form->submitted_and_valid ) {
do_something();
}
}
sub load_form {
my ( $self, $c ) = @_;
# Automatically called by the above FormMethod('load_form') action.
# Called as a method on the controller object, with the context
# object as an argument.
# Must return a hash-ref suitable to be fed to $form->populate()
}
You can also use specially-named actions that will only be called under
certain circumstances.
sub edit : Chained('group') : PathPart : Args(0) : FormConfig { }
sub edit_FORM_VALID {
my ( $self, $c ) = @_;
my $form = $c->stash->{form};
my $group = $c->stash->{group};
$form->model->update( $group );
$c->response->redirect( $c->uri_for( '/group', $group->id ) );
}
sub edit_FORM_NOT_SUBMITTED {
my ( $self, $c ) = @_;
my $form = $c->stash->{form};
my $group = $c->stash->{group};
$form->model->default_values( $group );
}
=head1 METHODS
=head2 form
This creates a new L<HTML::FormFu> object, passing as it's argument the
contents of the L</constructor> config value.
This is useful when using the ConfigForm() or MethodForm() action attributes,
to create a 2nd form which isn't populated using a config-file or method
return value.
sub foo : Local {
my ( $self, $c ) = @_;
my $form = $self->form;
}
Note that when using this method, the form's L<query|HTML::FormFu/query>
method is not populated with the Catalyst request object.
=head1 SPECIAL ACTION NAMES
An example showing how a complicated action method can be broken down into
smaller sections, making it clearer which code will be run, and when.
sub edit : Local : FormConfig {
my ( $self, $c ) = @_;
my $form = $c->stash->{form};
my $group = $c->stash->{group};
$c->detach('/unauthorised') unless $c->user->can_edit( $group );
if ( $form->submitted_and_valid ) {
$form->model->update( $group );
$c->response->redirect( $c->uri_for('/group', $group->id ) );
return;
}
elsif ( !$form->submitted ) {
$form->model->default_values( $group );
}
$self->_add_breadcrumbs_nav( $c, $group );
}
Instead becomes...
sub edit : Local : FormConfig {
my ( $self, $c ) = @_;
$c->detach('/unauthorised') unless $c->user->can_edit(
$c->stash->{group}
);
}
sub edit_FORM_VALID {
my ( $self, $c ) = @_;
my $group = $c->stash->{group};
$c->stash->{form}->model->update( $group );
$c->response->redirect( $c->uri_for('/group', $group->id ) );
}
sub edit_FORM_NOT_SUBMITTED {
my ( $self, $c ) = @_;
$c->stash->{form}->model->default_values(
$c->stash->{group}
);
}
sub edit_FORM_RENDER {
my ( $self, $c ) = @_;
$self->_add_breadcrumbs_nav( $c, $c->stash->{group} );
}
For any action method that uses a C<Form>, C<FormConfig> or C<FormMethod>
attribute, you can add extra methods that use the naming conventions below.
These methods will be called after the original, plainly named action method.
=head2 _FORM_VALID
Run when the form has been submitted and has no errors.
=head2 _FORM_SUBMITTED
Run when the form has been submitted, regardless of whether or not there was
errors.
=head2 _FORM_COMPLETE
For MultiForms, is run if the MultiForm is completed.
=head2 _FORM_NOT_VALID
Run when the form has been submitted and there were errors.
=head2 _FORM_NOT_SUBMITTED
Run when the form has not been submitted.
=head2 _FORM_NOT_COMPLETE
For MultiForms, is run if the MultiForm is not completed.
=head2 _FORM_RENDER
For normal C<Form> base classes, this subroutine is run after any of the
other special methods, unless C<< $form->submitted_and_valid >> is true.
For C<MultiForm> base classes, this subroutine is run after any of the other
special methods, unless C<< $multi->complete >> is true.
=head1 CUSTOMIZATION
You can set your own config settings, using either your controller config
or your application config.
$c->config( 'Controller::HTML::FormFu' => \%my_values );
# or
MyApp->config( 'Controller::HTML::FormFu' => \%my_values );
# or, in myapp.conf
<Controller::HTML::FormFu>
default_action_use_path 1
</Controller::HTML::FormFu>
=head2 form_method
Override the method-name used to create a new form object.
See L</form>.
Default value: C<form>.
=head2 form_stash
Sets the stash key name used to store the form object.
Default value: C<form>.
=head2 form_attr
Sets the attribute name used to load the
L<Catalyst::Controller::HTML::FormFu::Action::Form> action.
Default value: C<Form>.
=head2 config_attr
Sets the attribute name used to load the
L<Catalyst::Controller::HTML::FormFu::Action::Config> action.
Default value: C<FormConfig>.
=head2 method_attr
Sets the attribute name used to load the
L<Catalyst::Controller::HTML::FormFu::Action::Method> action.
Default value: C<FormMethod>.
=head2 form_action
Sets which package will be used by the Form() action.
Probably only useful if you want to create a sub-class which provides custom
behaviour.
Default value: C<Catalyst::Controller::HTML::FormFu::Action::Form>.
=head2 config_action
Sets which package will be used by the Config() action.
Probably only useful if you want to create a sub-class which provides custom
behaviour.
Default value: C<Catalyst::Controller::HTML::FormFu::Action::Config>.
=head2 method_action
Sets which package will be used by the Method() action.
Probably only useful if you want to create a sub-class which provides custom
behaviour.
Default value: C<Catalyst::Controller::HTML::FormFu::Action::Method>.
=head2 constructor
Pass common defaults to the L<HTML::FormFu constructor|HTML::FormFu/new>.
These values are used by all of the action attributes, and by the
C<< $self->form >> method.
Default value: C<{}>.
=head2 config_callback
Arguments: bool
If true, a coderef is passed to C<< $form->config_callback->{plain_value} >>
which replaces any instance of C<__uri_for(URI)__> found in form config files
with the result of passing the C<URI> argument to L<Catalyst/uri_for>.
The form C<< __uri_for(URI, PATH, PARTS)__ >> is also supported, which is
equivalent to C<< $c->uri_for( 'URI', \@ARGS ) >>. At this time, there is no
way to pass query values equivalent to
C<< $c->uri_for( 'URI', \@ARGS, \%QUERY_VALUES ) >>.
The second codeword that is being replaced is C<__path_to( @DIRS )__>. Any
instance is replaced with the result of passing the C<DIRS> arguments to
L<Catalyst/path_to>.
Don't use qoutationmarks as they would become part of the path.
Default value: 1
=head2 default_action_use_name
If set to a true value the action for the form will be set to the currently
called action name.
Default value: C<false>.
=head2 default_action_use_path
If set to a true value the action for the form will be set to the currently
called action path.
The action path includes concurrent to action name additioal parameters which
were code inside the path.
Default value: C<false>.
Example:
action: /foo/bar
called uri contains: /foo/bar/1
# default_action_use_name => 1 leads to:
$form->action = /foo/bar
# default_action_use_path => 1 leads to:
$form->action = /foo/bar/1
=head2 model_stash
Arguments: \%stash_keys_to_model_names
Used to place Catalyst models on the form stash.
If it's being used to make a L<DBIx::Class> schema available for
L<HTML::FormFu::Model::DBIC/options_from_model>, for C<Select> and other
Group-type elements - then the hash-key must be C<schema>. For example, if
your schema model class is C<MyApp::Model::MySchema>, you would set
C<model_stash> like so:
<Controller::HTML::FormFu>
<model_stash>
schema MySchema
</model_stash>
</Controller::HTML::FormFu>
=head2 context_stash
To allow your form validation packages, etc, access to the catalyst context,
a weakened reference of the context is copied into the form's stash.
$form->stash->{context};
This setting allows you to change the key name used in the form stash.
Default value: C<context>
=head2 languages_from_context
If you're using a L10N / I18N plugin such as L<Catalyst::Plugin::I18N> which
provides a C<languages> method that returns a list of valid languages to use
for the currect request - and you want to use formfu's built-in I18N packages,
then setting L</languages_from_context>
=head2 localize_from_context
If you're using a L10N / I18N plugin such as L<Catalyst::Plugin::I18N> which
provides it's own C<localize> method, you can set L<localize_from_context> to
use that method for formfu's localization.
=head2 request_token_enable
If true, adds an instance of L<HTML::FormFu::Plugin::RequestToken> to every
form, to stop accidental double-submissions of data and to prevent CSRF attacks.
=head2 request_token_field_name
Defaults to C<_token>.
=head2 request_token_session_key
Defaults to C<__token>.
=head2 request_token_expiration_time
Defaults to C<3600>.
=head1 DISCONTINUED CONFIG SETTINGS
=head2 config_file_ext
Support for this has now been removed. Config files are now searched
for, with any file extension supported by Config::Any.
=head2 config_file_path
Support for this has now been removed.
Use C<< {constructor}{config_file_path} >> instead.
=head1 CAVEATS
When using the C<Form> action attribute to create an empty form, you must
call L<< $form->process|HTML::FormFu/process >> after populating the form.
However, you don't need to pass any arguments to C<process>, as the
Catalyst request object will have automatically been set in
L<< $form->query|HTML::FormFu/query >>.
When using the C<FormConfig> and C<FormMethod> action attributes, if you
make any modifications to the form, such as adding or changing it's
elements, you must call L<< $form->process|HTML::FormFu/process >> before
rendering the form.
=head1 GITHUB REPOSITORY
This module's sourcecode is maintained in a git repository at
L<git://github.com/fireartist/Catalyst-Controller-HTML-FormFu.git>
The project page is L<https://github.com/fireartist/Catalyst-Controller-HTML-FormFu>
=head1 SEE ALSO
L<HTML::FormFu>, L<Catalyst::Helper::HTML::FormFu>
=head1 AUTHOR
Carl Franks, C<cfranks@cpan.org>
=head1 COPYRIGHT AND LICENSE
Copyright (C) 2007 by Carl Franks
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself, either Perl version 5.8.8 or,
at your option, any later version of Perl 5 you may have available.
=cut
|
package mypackage;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
public class MedicinesStaxBuilder {
private Set<Medicine> medicines;
private XMLInputFactory inputFactory;
private MedicineBuilder medicineBuilder;
public MedicinesStaxBuilder() {
inputFactory = XMLInputFactory.newInstance();
medicines = new HashSet<Medicine>();
medicineBuilder = new MedicineBuilder();
}
public Set<Medicine> getMedicines() {
return medicines;
}
public void buildSetMedicines(String filename) {
XMLStreamReader reader;
String name;
try(FileInputStream inputStream = new FileInputStream(new File(filename))) {
reader = inputFactory.createXMLStreamReader(inputStream);
// StAX parsing
while (reader.hasNext()) {
Integer type = reader.next();
if (type.equals(XMLStreamConstants.START_ELEMENT)) {
name = reader.getLocalName();
if (name.equals(MedicineXmlTag.MEDICINE.getValue())) {
Medicine medicine = buildMedicine(reader);
medicines.add(medicine);
}
}
}
} catch (XMLStreamException | FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private Medicine buildMedicine(XMLStreamReader reader)
throws XMLStreamException {
Medicine medicine = new Medicine();
medicine.setId(reader.getAttributeValue(null, MedicineXmlTag.ID.getValue()));
// null check
medicine.setName(reader.getAttributeValue(null,
MedicineXmlTag.NAME.getValue()));
String name;
while (reader.hasNext()) {
int type = reader.next();
switch (type) {
case XMLStreamConstants.START_ELEMENT:
name = reader.getLocalName();
MedicineXmlTag currentXmlTag = MedicineXmlTag.valueOf(name.toUpperCase());
medicine = medicineBuilder.buildBySTAX(medicine, currentXmlTag, reader);
break;
case XMLStreamConstants.END_ELEMENT:
name = reader.getLocalName();
if (MedicineXmlTag.valueOf(name.toUpperCase()).equals(MedicineXmlTag.MEDICINE)) {
return medicine;
}
}
}
throw new XMLStreamException("Unknown element in tag <medicine>");
}
}
|
import pathlib
# path of the given file
print(pathlib.Path("my_file.txt").parent.absolute())
# current working directory
print(pathlib.Path().absolute())
|
// Copyright (C) 2018, Baking Bits Studios - All Rights Reserved
package configs
import (
"fmt"
"os"
log "github.com/sirupsen/logrus"
"github.com/spf13/viper"
)
const defaultEnv = "int"
// Configuration is the top level configuration data from
// the config file.
type Configuration struct {
Backend BackendConfiguration
Authentication AuthenticationConfiguration
}
// LoadConfig loads the config file into the configuration
// objects and returns the top level configuration.
func LoadConfig() (configuration Configuration) {
// TODO: What about pflags?
// Use the DNDTEXTAPI_ENV variable to determine which config file to load up
viper.SetEnvPrefix("DNDTEXTAPI")
viper.BindEnv("ENV")
env := viper.Get("ENV")
// Default to the integration environment config since that works out of the box
if env == nil {
env = defaultEnv
}
// Load the config file and use it to populate the config structs
viper.SetConfigName(fmt.Sprintf("config-%s", env))
viper.AddConfigPath(".")
if err := viper.ReadInConfig(); err != nil {
log.WithError(err).Fatal("Error reading config file.")
os.Exit(-1)
}
err := viper.Unmarshal(&configuration)
if err != nil {
log.WithError(err).Fatal("Unable to decode into struct.")
os.Exit(-1)
}
return
}
|
import { Counter } from './counter';
customElements.define('ar-counter', Counter)
|
module GroupTree
def render_group_tree(groups)
@groups = if params[:filter].present?
Gitlab::GroupHierarchy.new(groups.search(params[:filter]))
.base_and_ancestors
else
# Only show root groups if no parent-id is given
groups.where(parent_id: params[:parent_id])
end
@groups = @groups.with_selects_for_list(archived: params[:archived])
.sort(@sort = params[:sort])
.page(params[:page])
respond_to do |format|
format.html
format.json do
serializer = GroupChildSerializer.new(current_user: current_user)
.with_pagination(request, response)
serializer.expand_hierarchy if params[:filter].present?
render json: serializer.represent(@groups)
end
end
end
end
|
Signaling Messages
===========================================================
These 2 fields are always there
- *Full User id (with device id)*
- *Message Type*
SignalR sends it as **SIGNAL** message:
- **Full User id**
- **Message Type** (I_AM_HERE, HELLO, etc.)
- *Message Payload*
Client code connection is configured as:
\_signalRConnection.On\<string, string, string\>("**SIGNAL**",
(string senderId, string messageType, string messagePayload) =\>
{
});
**Messages (message types):**
**I_AM_HERE** (send it first time when connected or re-connected)
*User id*
*Payload:*
- **Timeout** (if not received any message from me within *Timeout* seconds,
consider me as offline)
**HELLO** (send it back to I_AM_HERE)
*User id*
*Payload:*
- **Timeout** (if not received any message from me within *Timeout* seconds,
consider me as offline)
- **Status (ONLINE, OFFLINE, BUSY, AWAY, DONOTDISTURB)**
(same as **I_AM_HERE**)
**STILL_HERE** (broadcast it every \~Timeout\*0.9 seconds)
*User id*
*Payload:*
- **Status (ONLINE, OFFLINE, BUSY, AWAY, DONOTDISTURB)**
>
**I_AM_OUTTA_HERE** (signing out on a device)
*User id*
*Custom types (examples)*
**WEBRTC** (WebRTC SDP/ICE, must be sent to a single user+device)
*User id*
*Payload:*
- **SDP/ICE message**
**MSGBOX** (plain text messages, for debugging purposes)
*User id*
*Payload:*
- **Message**
|
use point_collector_api::routes::{create, index};
use rocket::{
serde::{json::Json, Deserialize},
Data,
};
#[macro_use]
extern crate rocket;
#[launch]
fn rocket() -> _ {
rocket::build().mount("/api/v1", routes![index, create])
}
|
import "package:kt_dart/collection.dart";
import "package:kt_dart/src/collection/impl/iterable.dart";
import "package:test/test.dart";
import "../test/assert_dart.dart";
void main() {
group("KtIterableExtensions", () {
group("iterable", () {
// TODO replace with Iterable.generate once implemented
testIterable(<T>() => EmptyIterable<T>(),
<T>(Iterable<T> iterable) => DartIterable(iterable));
});
group("list", () {
testIterable(<T>() => emptyList<T>(),
<T>(Iterable<T> iterable) => listFrom(iterable));
});
group("KtList", () {
testIterable(<T>() => KtList<T>.empty(),
<T>(Iterable<T> iterable) => KtList<T>.from(iterable));
});
group("mutableList", () {
testIterable(<T>() => emptyList<T>(),
<T>(Iterable<T> iterable) => mutableListFrom(iterable));
});
group("KtMutableList", () {
testIterable(<T>() => KtMutableList<T>.empty(),
<T>(Iterable<T> iterable) => KtMutableList<T>.from(iterable));
});
group("set", () {
testIterable(<T>() => emptySet<T>(),
<T>(Iterable<T> iterable) => setFrom(iterable));
});
group("KtSet", () {
testIterable(<T>() => KtSet<T>.empty(),
<T>(Iterable<T> iterable) => KtSet<T>.from(iterable));
});
group("hashset", () {
testIterable(<T>() => emptySet<T>(),
<T>(Iterable<T> iterable) => hashSetFrom(iterable),
ordered: false);
});
group("KtHashSet", () {
testIterable(<T>() => KtHashSet<T>.empty(),
<T>(Iterable<T> iterable) => KtHashSet<T>.from(iterable),
ordered: false);
});
group("linkedSet", () {
testIterable(<T>() => linkedSetOf<T>(),
<T>(Iterable<T> iterable) => linkedSetFrom(iterable));
});
group("KtLinkedSet", () {
testIterable(<T>() => KtLinkedSet<T>.empty(),
<T>(Iterable<T> iterable) => KtLinkedSet<T>.from(iterable));
});
group("CastKtIterable", () {
testIterable(<T>() => DartIterable([]).cast(),
<T>(Iterable<T> iterable) => DartIterable(iterable).cast());
});
group("CastKtList", () {
testIterable(<T>() => KtList<T>.empty().cast(),
<T>(Iterable<T> iterable) => KtList<T>.from(iterable).cast());
});
});
group("cast", () {
test("cast single element", () {
final dynamicIterable = DartIterable(["string", 1, null]);
final KtList<String> stringOnly =
// ignore: unnecessary_cast
(dynamicIterable.filter((it) => it is String) as KtIterable<dynamic>)
.cast<String>()
.toList();
expect(stringOnly.size, 1);
expect(stringOnly, listOf("string"));
});
test("cast empty list", () {
final dynamicIterable = DartIterable<int>([]);
final KtList<String> stringOnly =
// ignore: unnecessary_cast
(dynamicIterable.filter((it) => it is String) as KtIterable<dynamic>)
.cast<String>()
.toList();
expect(stringOnly.size, 0);
expect(stringOnly, listOf());
expect(stringOnly.getOrElse(0, (_) => "fallback"), "fallback");
});
group("cast infinity", () {
Iterable<num> infinityNums() sync* {
int i = 0;
// ignore: literal_only_boolean_expressions
while (true) {
yield i++;
}
}
test("cast infinity iterable", () {
// only testing this for iterable not lists because they try to return all elements
final infinity = DartIterable(infinityNums());
final KtIterable<int> ints = infinity.cast();
expect(ints.take(101).drop(100).first(), 100);
});
test("cast infinity iterator", () {
final infinity = DartIterable(infinityNums());
final KtIterator<int> iterator = infinity.cast<int>().iterator();
expect(iterator.hasNext(), isTrue);
expect(iterator.next(), 0);
expect(iterator.hasNext(), isTrue);
expect(iterator.next(), 1);
expect(iterator.hasNext(), isTrue);
expect(iterator.next(), 2);
expect(iterator.hasNext(), isTrue);
expect(iterator.next(), 3);
});
});
test("cast iterator with 0 elements", () {
final noElements = DartIterable<String>([]).cast<int>();
final KtIterator<int> iterator = noElements.iterator();
expect(iterator.hasNext(), isFalse);
expect(() => iterator.next(), throwsA(isA<NoSuchElementException>()));
});
});
}
void testIterable(KtIterable<T> Function<T>() emptyIterable,
KtIterable<T> Function<T>(Iterable<T> iterable) iterableOf,
{bool ordered = true}) {
group("all", () {
test("matches all", () {
final iterable = iterableOf(["abc", "bcd", "cde"]);
expect(iterable.all((e) => e.contains("c")), isTrue);
});
test("matches none", () {
final iterable = iterableOf(["abc", "bcd", "cde"]);
expect(iterable.all((e) => e.contains("x")), isFalse);
});
test("matches one", () {
final iterable = iterableOf(["abc", "bcd", "cde"]);
expect(iterable.all((e) => e.contains("a")), isFalse);
});
});
group("any", () {
test("matches single", () {
final iterable = iterableOf(["abc", "bcd", "cde"]);
expect(iterable.any((e) => e.contains("a")), isTrue);
});
test("matches all", () {
final iterable = iterableOf(["abc", "bcd", "cde"]);
expect(iterable.any((e) => e.contains("c")), isTrue);
});
test("is false when none matches", () {
final iterable = iterableOf(["abc", "bcd", "cde"]);
expect(iterable.any((e) => e.contains("x")), isFalse);
});
test("any without args returns true with items", () {
final iterable = iterableOf(["abc", "bcd", "cde"]);
expect(iterable.any(), isTrue);
});
test("any without args returns false for no items", () {
final iterable = emptyIterable();
expect(iterable.any(), isFalse);
});
});
group("associate", () {
test("associate", () {
final list = iterableOf(["a", "b", "c"]);
final result = list.associate((it) => KtPair(it.toUpperCase(), it));
final expected = mapFrom({"A": "a", "B": "b", "C": "c"});
expect(result, equals(expected));
});
test("associate on empty map", () {
final list = emptyIterable<String>();
final result = list.associate((it) => KtPair(it.toUpperCase(), it));
expect(result, equals(emptyMap()));
});
});
group("associateBy", () {
test("associateBy", () {
final list = iterableOf(["a", "b", "c"]);
final result = list.associateBy((it) => it.toUpperCase());
final expected = mapFrom({"A": "a", "B": "b", "C": "c"});
expect(result, equals(expected));
});
test("associateBy on empty map", () {
final list = emptyList<String>();
final result = list.associateWith((it) => it.toUpperCase());
expect(result, equals(emptyMap()));
});
test("when conflicting keys, use last ", () {
final list = iterableOf(["a", "b", "c"]);
final result = list.associateBy((it) => it.length);
expect(result.size, equals(1));
expect(result.containsKey(1), isTrue);
});
});
group("associateByTransform", () {
test("associateByTransform", () {
final list = iterableOf(["a", "bb", "ccc"]);
final result = list.associateByTransform(
(it) => it.length, (it) => it.toUpperCase());
final expected = mapFrom({1: "A", 2: "BB", 3: "CCC"});
expect(result, equals(expected));
});
test("associateByTransform on empty map", () {
final list = emptyList<String>();
final result = list.associateWith((it) => it.toUpperCase());
expect(result, equals(emptyMap()));
});
});
group("associateWith", () {
test("associateWith", () {
final iterable = iterableOf(["a", "b", "c"]);
final result = iterable.associateWith((it) => it.toUpperCase());
final expected = mapFrom({"a": "A", "b": "B", "c": "C"});
expect(result, equals(expected));
});
test("associateWith on empty map", () {
final iterable = emptyIterable<String>();
final result = iterable.associateWith((it) => it.toUpperCase());
expect(result, equals(emptyMap()));
});
});
group("associateWithTo", () {
test("associateWithTo same type", () {
final iterable = iterableOf(["a", "bb", "ccc"]);
final result = mutableMapFrom<String, int>();
final filtered = iterable.associateWithTo(result, (it) => it.length);
expect(identical(result, filtered), isTrue);
expect(result, mapFrom({"a": 1, "bb": 2, "ccc": 3}));
});
test("associateWithTo super type", () {
final iterable = iterableOf(["a", "bb", "ccc"]);
final result = mutableMapFrom<String, num>();
final filtered = iterable.associateWithTo(result, (it) => it.length);
expect(identical(result, filtered), isTrue);
expect(result, mapFrom({"a": 1, "bb": 2, "ccc": 3}));
});
test("associateWithTo wrong type throws", () {
final iterable = iterableOf(["a", "b", "c"]);
final result = mutableMapFrom<String, String>();
final e = catchException<ArgumentError>(
() => iterable.associateWithTo(result, (entry) => entry.length));
expect(
e.message,
allOf(
contains("associateWithTo"),
contains("destination"),
contains("<String, String>"),
contains("<String, int>"),
));
});
});
group("average", () {
test("average of ints", () {
final ints = iterableOf([1, 2, 3, 4]);
final result = ints.average();
expect(result, equals(2.5));
});
test("average of empty is NaN", () {
final ints = emptyIterable<num>();
final result = ints.average();
expect(identical(result, double.nan), isTrue);
});
test("average of nums", () {
final ints = iterableOf([1, 2.0, 3, 4]);
final result = ints.average();
expect(result, equals(2.5));
});
test("average of nums with NaN (nan is ignored)", () {
final ints = iterableOf([1, 2.0, double.nan, 3, double.nan, 4]);
final result = ints.average();
expect(result, equals(2.5));
});
test("average of nan only returns nan", () {
final ints = iterableOf([double.nan, double.nan, double.nan]);
final result = ints.average();
expect(identical(result, double.nan), isTrue);
});
});
group("averageBy", () {
test("averageBy of ints", () {
final ints = iterableOf([1, 2, 3, 4]);
final result = ints.averageBy((it) => it);
expect(result, equals(2.5));
});
test("averageBy of empty is NaN", () {
final ints = emptyIterable<num>();
final result = ints.averageBy((it) => it);
expect(identical(result, double.nan), isTrue);
});
test("averageBy of nums", () {
final ints = iterableOf([1, 2.0, 3, 4]);
final result = ints.averageBy((it) => it);
expect(result, equals(2.5));
});
test("averageBy of nums with NaN (nan is ignored)", () {
final ints = iterableOf([1, 2.0, double.nan, 3, double.nan, 4]);
final result = ints.averageBy((it) => it);
expect(result, equals(2.5));
});
test("average of nan only returns nan", () {
final ints = iterableOf([double.nan, double.nan, double.nan]);
final result = ints.averageBy((it) => it);
expect(identical(result, double.nan), isTrue);
});
});
group("distinct", () {
if (ordered) {
test("distinct elements", () {
final iterable = iterableOf(["a", "b", "c", "b"]);
expect(iterable.distinct(), equals(listOf("a", "b", "c")));
});
test("distinct by ordered", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
expect(iterable.distinctBy((it) => it.length),
equals(listOf("paul", "peter")));
});
} else {
test("distinct elements", () {
final iterable = iterableOf(["a", "b", "c", "b"]);
expect(iterable.distinct().toSet(), equals(setOf("a", "b", "c")));
});
test("distinct by unordered", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
final distinct = iterable.distinctBy((it) => it.length);
expect(distinct.contains("peter"), true);
expect(
distinct.contains("paul") ||
distinct.contains("john") ||
distinct.contains("lisa"),
true);
});
}
});
group("count", () {
test("count elements", () {
expect(iterableOf([1, 2, 3, 4, 5]).count(), 5);
});
test("count even", () {
expect(iterableOf([1, 2, 3, 4, 5]).count((it) => it % 2 == 0), 2);
});
});
group("chunked", () {
test("chunk", () {
final chunks = iterableOf([1, 2, 3, 4, 5]).chunked(3);
expect(chunks, listOf(listOf(1, 2, 3), listOf(4, 5)));
});
test("chunkedTransform", () {
final chunks =
iterableOf([1, 2, 3, 4, 5]).chunkedTransform(3, (it) => it.sum());
expect(chunks, listOf(6, 9));
});
});
group("dart property", () {
test("dart property returns a dart iterable", () {
final Iterable<String> iterable = iterableOf(["a", "b", "c"]).dart;
if (ordered) {
expect(iterable.first, "a");
expect(iterable.skip(1).first, "b");
expect(iterable.skip(2).first, "c");
}
expect(iterable.length, 3);
});
test("dart property returns empty as original", () {
final Iterable<String> iterable = emptyIterable<String>().dart;
expect(iterable.length, 0);
});
test('dart work on all objects', () {
// there was once a bug where it only worked for Comparable<T>
iterableOf(<dynamic>[]).dart;
iterableOf(<Object>[]).dart;
iterableOf(<num>[]).dart;
iterableOf(<RegExp>[]).dart;
iterableOf(<Future>[]).dart;
});
});
group("drop", () {
if (ordered) {
test("drop first value ordered", () {
final iterable = iterableOf(["a", "b", "c"]);
expect(iterable.drop(1), equals(listOf("b", "c")));
});
} else {
test("drop on iterable returns a iterable", () {
final iterable = emptyIterable<int>();
expect(iterable.drop(1), const TypeMatcher<KtList<int>>());
});
}
test("drop empty does nothing", () {
final iterable = emptyIterable<int>();
expect(iterable.drop(1).toList(), equals(emptyList<int>()));
});
test("drop on iterable returns a iterable", () {
final iterable = emptyIterable<int>();
expect(iterable.drop(1), const TypeMatcher<KtList<int>>());
});
test("drop negative, drops nothing", () {
final iterable = iterableOf(["a", "b", "c"]);
expect(iterable.drop(-10).toList(), iterable.toList());
});
});
group("dropWhile", () {
if (ordered) {
test("dropWhile two", () {
final iterable = iterableOf(["a", "b", "c"]);
expect(iterable.dropWhile((it) => it != "c"), equals(listOf("c")));
});
test("dropWhile one", () {
final iterable = iterableOf(["a", "b", "c"]);
expect(iterable.dropWhile((it) => it != "b"), equals(listOf("b", "c")));
});
} else {
test("dropWhile first value unordered", () {
final iterable = iterableOf(["a", "b", "c"]);
int i = 0;
expect(iterable.dropWhile((_) => ++i <= 2).size, 1);
});
}
test("dropWhile empty does nothing", () {
final iterable = emptyIterable<int>();
expect(
iterable.dropWhile((_) => false).toList(), equals(emptyList<int>()));
});
test("dropWhile all makes an empty list", () {
final iterable = iterableOf(["a", "b", "c"]);
expect(
iterable.dropWhile((_) => true).toList(), equals(emptyList<int>()));
});
test("dropWhile on iterable returns a iterable", () {
final iterable = emptyIterable<int>();
expect(
iterable.dropWhile((_) => false), const TypeMatcher<KtList<int>>());
});
});
group("elementAt", () {
if (ordered) {
test("returns correct elements", () {
final iterable = iterableOf(["a", "b", "c"]);
expect(iterable.elementAt(0), equals("a"));
expect(iterable.elementAt(1), equals("b"));
expect(iterable.elementAt(2), equals("c"));
});
} else {
test("returns all elements", () {
final iterable = iterableOf(["a", "b", "c"]);
final set = setOf(iterable.elementAt(0), iterable.elementAt(1),
iterable.elementAt(2));
expect(set.containsAll(iterable.toSet()), isTrue);
});
}
test("throws out of bounds exceptions", () {
final iterable = iterableOf(["a", "b", "c"]);
final eOver = catchException<IndexOutOfBoundsException>(
() => iterable.elementAt(3));
expect(eOver.message, allOf(contains("index"), contains("3")));
final eUnder = catchException<IndexOutOfBoundsException>(
() => iterable.elementAt(-1));
expect(eUnder.message, allOf(contains("index"), contains("-1")));
});
});
group("elementAtOrElse", () {
if (ordered) {
test("returns correct elements", () {
final iterable = iterableOf(["a", "b", "c"]);
expect(iterable.elementAtOrElse(0, (i) => "x"), equals("a"));
expect(iterable.elementAtOrElse(1, (i) => "x"), equals("b"));
expect(iterable.elementAtOrElse(2, (i) => "x"), equals("c"));
});
} else {
test("returns all elements", () {
final iterable = iterableOf(["a", "b", "c"]);
final set = setOf(
iterable.elementAtOrElse(0, (i) => "x"),
iterable.elementAtOrElse(1, (i) => "x"),
iterable.elementAtOrElse(2, (i) => "x"));
expect(set.containsAll(iterable.toSet()), isTrue);
});
}
test("returns else case", () {
final iterable = iterableOf(["a", "b", "c"]);
expect(iterable.elementAtOrElse(-1, (i) => "x"), equals("x"));
});
test("returns else case based on index", () {
final iterable = iterableOf(["a", "b", "c"]);
expect(iterable.elementAtOrElse(-1, (i) => "$i"), equals("-1"));
expect(iterable.elementAtOrElse(10, (i) => "$i"), equals("10"));
});
});
group("elementAtOrNull", () {
if (ordered) {
test("returns correct elements", () {
final iterable = iterableOf(["a", "b", "c"]);
expect(iterable.elementAtOrNull(0), equals("a"));
expect(iterable.elementAtOrNull(1), equals("b"));
expect(iterable.elementAtOrNull(2), equals("c"));
});
} else {
test("returns all elements", () {
final iterable = iterableOf(["a", "b", "c"]);
final set = setOf(iterable.elementAtOrNull(0),
iterable.elementAtOrNull(1), iterable.elementAtOrNull(2));
expect(set.containsAll(iterable.toSet()), isTrue);
});
}
test("returns null when out of range", () {
final iterable = iterableOf(["a", "b", "c"]);
expect(iterable.elementAtOrNull(-1), isNull);
expect(iterable.elementAtOrNull(10), isNull);
});
});
group("filter", () {
test("filter", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
expect(iterable.filter((it) => it.contains("a")).toSet(),
equals(setOf("paul", "lisa")));
});
});
group("filterTo", () {
test("filterTo same type", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<int>();
final filtered = iterable.filterTo(result, (it) => it < 10);
expect(identical(result, filtered), isTrue);
if (ordered) {
expect(result, listOf(4, -12));
} else {
expect(result.toSet(), setOf(4, -12));
}
});
test("filterTo super type", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<num>();
final filtered = iterable.filterTo(result, (it) => it < 10);
expect(identical(result, filtered), isTrue);
if (ordered) {
expect(result, listOf(4, -12));
} else {
expect(result.toSet(), equals(setOf(4, -12)));
}
});
test("filterTo wrong type throws", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<String>();
final e = catchException<ArgumentError>(
() => iterable.filterTo(result, (it) => it < 10));
expect(
e.message,
allOf(
contains("filterTo"),
contains("destination"),
contains("<int>"),
contains("<String>"),
));
});
});
group("filterIndexed", () {
test("filterIndexed", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
var i = 0;
expect(
iterable.filterIndexed((index, it) {
expect(index, i);
i++;
return it.contains("a");
}).toSet(),
equals(setOf("paul", "lisa")));
});
});
group("filterIndexedTo", () {
test("filterIndexedTo index is incrementing", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<int>();
var index = 0;
iterable.filterIndexedTo(result, (i, it) {
expect(i, index);
index++;
return true;
});
expect(index, 4);
});
test("filterIndexedTo same type", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<int>();
final filtered = iterable.filterIndexedTo(result, (i, it) => it < 10);
expect(identical(result, filtered), isTrue);
if (ordered) {
expect(result, listOf(4, -12));
} else {
expect(result.toSet(), setOf(4, -12));
}
});
test("filterIndexedTo super type", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<num>();
final filtered = iterable.filterIndexedTo(result, (i, it) => it < 10);
expect(identical(result, filtered), isTrue);
if (ordered) {
expect(result, listOf(4, -12));
} else {
expect(result.toSet(), equals(setOf(4, -12)));
}
});
test("filterIndexedTo wrong type throws", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<String>();
final e = catchException<ArgumentError>(
() => iterable.filterIndexedTo(result, (i, it) => it < 10));
expect(
e.message,
allOf(
contains("filterIndexedTo"),
contains("destination"),
contains("<int>"),
contains("<String>"),
));
});
});
group("filterNot", () {
test("filterNot", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
expect(iterable.filterNot((it) => it.contains("a")).toSet(),
equals(setOf("peter", "john")));
});
});
group("filterNotTo", () {
test("filterNotTo same type", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<int>();
final filtered = iterable.filterNotTo(result, (it) => it < 10);
expect(identical(result, filtered), isTrue);
if (ordered) {
expect(result, listOf(25, 10));
} else {
expect(result.toSet(), setOf(25, 10));
}
});
test("filterNotTo super type", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<num>();
final filtered = iterable.filterNotTo(result, (it) => it < 10);
expect(identical(result, filtered), isTrue);
if (ordered) {
expect(result, listOf(25, 10));
} else {
expect(result.toSet(), equals(setOf(25, 10)));
}
});
test("filterNotTo wrong type throws", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<String>();
final e = catchException<ArgumentError>(
() => iterable.filterNotTo(result, (it) => it < 10));
expect(
e.message,
allOf(
contains("filterNotTo"),
contains("destination"),
contains("<int>"),
contains("<String>"),
));
});
});
group("filterNotNull", () {
test("filterNotNull", () {
final KtIterable<String?> iterable =
iterableOf(["paul", null, "john", "lisa"]);
final KtSet<String> set = iterable.filterNotNull().toSet();
expect(set, equals(setOf("paul", "john", "lisa")));
});
});
group("filterNotNullTo", () {
test("filterNotNullTo same type", () {
final KtIterable<int?> iterable = iterableOf([4, 25, null, 10]);
final result = mutableListOf<int>();
final KtMutableList<int> filtered = iterable.filterNotNullTo(result);
expect(identical(result, filtered), isTrue);
if (ordered) {
expect(result, listOf(4, 25, 10));
} else {
expect(result.toSet(), setOf(4, 25, 10));
}
});
test("filterNotNullTo super type", () {
final iterable = iterableOf([4, 25, null, 10]);
final result = mutableListOf<num?>();
final filtered = iterable.filterNotNullTo(result);
expect(identical(result, filtered), isTrue);
if (ordered) {
expect(result, listOf(4, 25, 10));
} else {
expect(result.toSet(), equals(setOf(4, 25, 10)));
}
});
test("filterNotNullTo wrong type throws", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<String>();
final e =
catchException<ArgumentError>(() => iterable.filterNotNullTo(result));
expect(
e.message,
allOf(
contains("filterNotNullTo"),
contains("destination"),
contains("<int>"),
contains("<String>"),
));
});
});
group("filterIsInstance", () {
test("filterIsInstance", () {
final iterable = iterableOf<Object?>(["paul", null, "john", 1, "lisa"]);
expect(iterable.filterIsInstance<String>().toSet(),
equals(setOf("paul", "john", "lisa")));
});
});
group("find", () {
test("find item", () {
final iterable = iterableOf(["paul", "john", "max", "lisa"]);
final result = iterable.find((it) => it.contains("l"));
if (ordered) {
expect(result, "paul");
} else {
expect(result, anyOf("paul", "lisa"));
}
});
});
group("findLast", () {
test("findLast item", () {
final iterable = iterableOf(["paul", "john", "max", "lisa"]);
final result = iterable.findLast((it) => it.contains("l"));
if (ordered) {
expect(result, "lisa");
} else {
expect(result, anyOf("paul", "lisa"));
}
});
});
group("first", () {
if (ordered) {
test("get first element", () {
expect(iterableOf(["a", "b"]).first(), "a");
});
} else {
test("get random first element", () {
final result = iterableOf(["a", "b"]).first();
expect(result == "a" || result == "b", true);
});
}
test("first throws for no elements", () {
expect(() => emptyIterable().first(),
throwsA(const TypeMatcher<NoSuchElementException>()));
});
test("finds nothing throws", () {
expect(() => iterableOf<String>(["a"]).first((it) => it == "b"),
throwsA(const TypeMatcher<NoSuchElementException>()));
});
});
group("firstOrNull", () {
if (ordered) {
test("get first element", () {
expect(iterableOf(["a", "b"]).firstOrNull(), "a");
});
} else {
test("get random first element", () {
final result = iterableOf(["a", "b"]).firstOrNull();
expect(result == "a" || result == "b", true);
});
}
test("firstOrNull returns null for empty", () {
expect(emptyIterable().firstOrNull(), isNull);
});
test("finds nothing throws", () {
expect(iterableOf<String>(["a"]).firstOrNull((it) => it == "b"), isNull);
});
});
group("flatMap", () {
test("flatMap int to string", () {
final iterable = iterableOf([1, 2, 3]);
expect(
iterable.flatMap((it) => iterableOf([it, it + 1, it + 2])).toList(),
listOf(1, 2, 3, 2, 3, 4, 3, 4, 5));
});
});
group("flatten", () {
test("empty", () {
final KtIterable<KtIterable<int>> nested =
emptyIterable<KtIterable<int>>();
expect(nested.flatten(), emptyList());
});
test("flatten KtIterable<KtIterable<T>>", () {
final nested = iterableOf([
iterableOf([1, 2, 3]),
iterableOf([4, 5, 6]),
iterableOf([7, 8, 9]),
]);
if (ordered) {
expect(nested.flatten(), listFrom([1, 2, 3, 4, 5, 6, 7, 8, 9]));
} else {
expect(nested.flatten().toSet(), setFrom([1, 2, 3, 4, 5, 6, 7, 8, 9]));
}
});
});
group("fold", () {
if (ordered) {
test("fold division", () {
final iterable = iterableOf([
[1, 2],
[3, 4],
[5, 6]
]);
final result = iterable.fold(
listFrom<int>(), (KtList<int> acc, it) => acc + listFrom(it));
expect(result, listOf(1, 2, 3, 4, 5, 6));
});
}
});
group("foldIndexed", () {
if (ordered) {
test("foldIndexed division", () {
final iterable = iterableOf([
[1, 2],
[3, 4],
[5, 6]
]);
var i = 0;
final result =
iterable.foldIndexed(listFrom<int>(), (index, KtList<int> acc, it) {
expect(index, i);
i++;
return acc + listFrom(it);
});
expect(result, listOf(1, 2, 3, 4, 5, 6));
});
}
});
group("forEach", () {
test("forEach", () {
final result = mutableListOf<String>();
final iterable = iterableOf(["a", "b", "c", "d"]);
iterable.forEach((it) {
result.add(it);
});
if (ordered) {
expect(result, listOf("a", "b", "c", "d"));
} else {
expect(result.size, 4);
expect(result.toSet(), iterable.toSet());
}
});
});
group("forEachIndexed", () {
test("forEachIndexed", () {
final result = mutableListOf<String>();
final iterable = iterableOf(["a", "b", "c", "d"]);
iterable.forEachIndexed((index, it) {
result.add("$index$it");
});
if (ordered) {
expect(result, listOf("0a", "1b", "2c", "3d"));
} else {
expect(result.size, 4);
expect(result.toSet(),
iterable.mapIndexed((index, it) => "$index$it").toSet());
}
});
});
group("groupBy", () {
test("basic generic return type 100% matches", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
final grouped = iterable.groupBy((it) => it.length);
// Fixes https://github.com/passsy/kt.dart/issues/139
expect(grouped.runtimeType.toString(), contains("<int, KtList<String>"));
});
test("valuetransform generic return type 100% matches", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
final grouped = iterable.groupByTransform(
(it) => it.length, (it) => it.toUpperCase());
// Fixes https://github.com/passsy/kt.dart/issues/139
expect(grouped.runtimeType.toString(), contains("<int, KtList<String>"));
});
if (ordered) {
test("basic", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
expect(
iterable.groupBy((it) => it.length),
equals(mapFrom({
4: listOf("paul", "john", "lisa"),
5: listOf("peter"),
})));
});
test("valuetransform", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
expect(
iterable.groupByTransform(
(it) => it.length, (it) => it.toUpperCase()),
equals(mapFrom({
4: listOf("PAUL", "JOHN", "LISA"),
5: listOf("PETER"),
})));
});
} else {
test("basic", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
expect(
iterable
.groupBy((it) => it.length)
.mapValues((it) => it.value.toSet()),
equals(mapFrom({
4: setOf("paul", "john", "lisa"),
5: setOf("peter"),
})));
});
test("valuetransform", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
expect(
iterable
.groupByTransform((it) => it.length, (it) => it.toUpperCase())
.mapValues((it) => it.value.toSet()),
equals(mapFrom({
4: setOf("PAUL", "JOHN", "LISA"),
5: setOf("PETER"),
})));
});
}
});
group("groupByTo", () {
test("groupByTo same type", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
final result = mutableMapFrom<int, KtMutableList<String>>();
final grouped = iterable.groupByTo(result, (it) => it.length);
expect(identical(result, grouped), isTrue);
expect(
result,
mapFrom({
4: iterableOf(["paul", "john", "lisa"]).toList(),
5: listOf("peter"),
}));
});
test("groupByTo super type", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
final result = mutableMapFrom<int, KtMutableList<Pattern>>();
final grouped = iterable.groupByTo(result, (it) => it.length);
expect(identical(result, grouped), isTrue);
expect(
result,
mapFrom({
4: iterableOf(["paul", "john", "lisa"]).toList(),
5: listOf("peter"),
}));
});
test("groupByTo wrong type throws", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
final result = mutableMapFrom<int, KtMutableList<int>>();
final e = catchException<ArgumentError>(
() => iterable.groupByTo(result, (it) => it.length));
expect(
e.message,
allOf(
contains("groupByTo"),
contains("destination"),
contains("KtMutableList<int>"),
contains("KtMutableList<String>"),
));
});
});
group("groupByToTransform", () {
test("groupByToTransform same type", () {
final iterable = iterableOf(["paul", "peter", "john", "lisa"]);
final result = mutableMapFrom<int, KtMutableList<String>>();
final grouped = iterable.groupByToTransform(
result, (it) => it.length, (it) => it.toUpperCase());
expect(identical(result, grouped), isTrue);
if (ordered) {
expect(
result,
mapFrom({
4: listOf("PAUL", "JOHN", "LISA"),
5: listOf("PETER"),
}));
} else {
expect(result.size, 2);
expect(result[4]!.toSet(), setOf("PAUL", "JOHN", "LISA"));
expect(result[5], listOf("PETER"));
}
});
});
group("indexOf", () {
test("returns index", () {
final iterable = iterableOf(["a", "b", "c", "b"]);
final found = iterable.indexOf("b");
if (iterable.count() == 4) {
// ordered list
expect(found, 1);
} else {
// set, position is unknown
expect(found, isNot(-1));
}
});
});
group("indexOfFirst", () {
test("returns index", () {
final iterable = iterableOf(["a", "b", "c", "b"]);
final found = iterable.indexOfFirst((it) => it == "b");
if (iterable.count() == 4) {
// ordered list
expect(found, 1);
} else {
// set, position is unknown
expect(found, isNot(-1));
}
});
test("not found returns -1", () {
final iterable = iterableOf(["a", "b", "c", "b"]);
final found = iterable.indexOfFirst((it) => it == "x");
expect(found, -1);
});
});
group("indexOfLast", () {
test("returns index", () {
final iterable = iterableOf(["a", "b", "c", "b"]);
final found = iterable.indexOfLast((it) => it == "b");
if (iterable.count() == 4) {
// ordered list
expect(found, 3);
} else {
// set, position is unknown
expect(found, isNot(-1));
}
});
});
group("intersect", () {
test("remove one item", () {
final a = iterableOf(["paul", "john", "max", "lisa"]);
final b = iterableOf(["julie", "richard", "john", "lisa"]);
final result = a.intersect(b);
expect(result, setOf("john", "lisa"));
});
});
group("iter", () {
test("iterate using a for loop", () {
final items = KtMutableList<String>.empty();
for (final String s in iterableOf(["a", "b", "c"]).iter) {
items.add(s);
}
expect(items.size, 3);
if (ordered) {
expect(items, listOf("a", "b", "c"));
}
});
test('iter work on all objects', () {
iterableOf(<dynamic>[]).iter;
iterableOf(<Object>[]).iter;
iterableOf(<num>[]).iter;
iterableOf(<RegExp>[]).iter;
iterableOf(<Future>[]).iter;
});
});
group("joinToString", () {
if (ordered) {
test("joinToString", () {
final s = iterableOf(["a", "b", "c"]).joinToString();
expect(s, "a, b, c");
});
test("joinToString calls childs toString", () {
final s = iterableOf([listOf(1, 2, 3), const KtPair("a", "b"), "test"])
.joinToString();
expect(s, "[1, 2, 3], (a, b), test");
});
test("with transform", () {
final s = iterableOf(["a", "b", "c"])
.joinToString(transform: (it) => it.toUpperCase());
expect(s, "A, B, C");
});
test("custom separator", () {
final s = iterableOf(["a", "b", "c"]).joinToString(separator: "/");
expect(s, "a/b/c");
});
test("post and prefix", () {
final s =
iterableOf(["a", "b", "c"]).joinToString(prefix: "<", postfix: ">");
expect(s, "<a, b, c>");
});
test("limit length", () {
final s =
iterableOf([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]).joinToString(limit: 7);
expect(s, "1, 2, 3, 4, 5, 6, 7, ...");
});
test("custom truncated", () {
final s = iterableOf([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
.joinToString(limit: 7, truncated: "(and many more)");
expect(s, "1, 2, 3, 4, 5, 6, 7, (and many more)");
});
}
});
group("last", () {
if (ordered) {
test("get last element", () {
expect(iterableOf(["a", "b"]).last(), "b");
});
} else {
test("get random last element", () {
final result = iterableOf(["a", "b"]).last();
expect(result == "a" || result == "b", true);
});
}
test("last throws for no elements", () {
expect(() => emptyIterable().last(),
throwsA(const TypeMatcher<NoSuchElementException>()));
});
test("finds nothing throws", () {
expect(() => iterableOf<String>(["a", "b", "c"]).last((it) => it == "x"),
throwsA(const TypeMatcher<NoSuchElementException>()));
});
test("finds nothing in empty throws", () {
expect(() => emptyIterable().last((it) => it == "x"),
throwsA(const TypeMatcher<NoSuchElementException>()));
});
test("returns null when null is the last element", () {
expect(listFrom([1, 2, null]).last(), null);
expect(listFrom([1, null, 2]).last(), 2);
});
});
group("lastOrNull", () {
if (ordered) {
test("get lastOrNull element", () {
expect(iterableOf(["a", "b"]).lastOrNull(), "b");
});
} else {
test("get random last element", () {
final result = iterableOf(["a", "b"]).lastOrNull();
expect(result == "a" || result == "b", true);
});
}
test("lastOrNull returns null for empty", () {
expect(emptyIterable().lastOrNull(), isNull);
});
test("finds nothing throws", () {
expect(iterableOf<String>(["a"]).lastOrNull((it) => it == "b"), isNull);
});
});
group("lastIndexOf", () {
test("returns last index", () {
final iterable = iterableOf(["a", "b", "c", "b"]);
final found = iterable.lastIndexOf("b");
if (iterable.count() == 4) {
// ordered list
expect(found, 3);
} else {
// set, position is unknown
expect(found, isNot(-1));
}
});
});
group("map", () {
test("map int to string", () {
final iterable = iterableOf([1, 2, 3]);
expect(
iterable.map((it) => it.toString()).toList(), listOf("1", "2", "3"));
});
});
group("map", () {
test("map int to string", () {
final iterable = iterableOf([1, 2, 3]);
expect(
iterable.map((it) => it.toString()).toList(), listOf("1", "2", "3"));
});
});
group("mapNotNull", () {
test("mapNotNull int to string", () {
final iterable = iterableOf([1, null, 2, null, 3]);
expect(iterable.mapNotNull((it) => it?.toString()).toList(),
listOf("1", "2", "3"));
});
});
group("mapNotNullTo", () {
test("mapNotNullTo int to string", () {
final list = mutableListOf<String>();
final iterable = iterableOf([1, null, 2, null, 3]);
iterable.mapNotNullTo(list, (it) => it?.toString());
expect(list, listOf("1", "2", "3"));
});
});
group("mapTo", () {
test("mapTo int to string", () {
final list = mutableListOf<String>();
final iterable = iterableOf([1, 2, 3]);
iterable.mapTo(list, (it) => it.toString());
expect(list, listOf("1", "2", "3"));
});
});
if (ordered) {
group("mapIndexedTo", () {
test("mapIndexedTo int to string", () {
final list = mutableListOf<String>();
final iterable = iterableOf(["a", "b", "c"]);
iterable.mapIndexedTo(list, (index, it) => "$index$it");
expect(list, listOf("0a", "1b", "2c"));
});
});
}
if (ordered) {
group("mapIndexed", () {
test("mapIndexed int to string", () {
final iterable = iterableOf(["a", "b", "c"]);
final result = iterable.mapIndexed((index, it) => "$index$it");
expect(result, listOf("0a", "1b", "2c"));
});
});
}
if (ordered) {
group("mapIndexedNotNull", () {
test("mapIndexedNotNull int to string", () {
final KtIterable<String?> iterable = iterableOf(["a", null, "b", "c"]);
final KtList<String> result = iterable.mapIndexedNotNull((index, it) {
if (it == null) return null;
return "$index$it";
}).toList();
expect(result, listOf("0a", "2b", "3c"));
});
});
}
if (ordered) {
group("mapIndexedNotNull", () {
test("mapIndexedNotNull int to string", () {
final set = linkedSetOf<String>();
final iterable = iterableOf(["a", null, "b", "c"]);
iterable.mapIndexedNotNullTo(set, (index, it) {
if (it == null) return null;
return "$index$it";
}).toList();
expect(set, setOf("0a", "2b", "3c"));
});
});
}
group("max", () {
test("gets max value int", () {
final iterable = iterableOf([1, 3, 2]);
final int? max = iterable.max();
expect(max, 3);
});
test("gets max value double", () {
final iterable = iterableOf([1.0, 3.2, 2.0]);
final double? max = iterable.max();
expect(max, 3.2);
});
test("gets max value comparable", () {
final iterable = iterableOf(["a", "x", "b"]);
final String? max = iterable.max();
expect(max, "x");
});
test("empty iterable return null", () {
final iterable = emptyIterable<int>();
final int? max = iterable.max();
expect(max, null);
});
});
group("maxBy", () {
test("gets max value", () {
final iterable = iterableOf(["1", "3", "2"]);
expect(iterable.maxBy((it) => num.parse(it)), "3");
});
test("empty iterable return null", () {
final iterable = emptyIterable<int>();
expect(iterable.maxBy<num>((it) => it), null);
});
});
group("maxWith", () {
int _intComparison(int value, int other) => value.compareTo(other);
int _doubleComparison(double value, double other) => value.compareTo(other);
test("gets max value int", () {
final iterable = iterableOf([2, 1, 3]);
expect(iterable.maxWith(_intComparison), 3);
});
test("gets max value double", () {
final iterable = iterableOf([2.0, 1.0, 3.2]);
expect(iterable.maxWith(_doubleComparison), 3.2);
});
test("empty iterable return null", () {
final iterable = emptyIterable<int>();
expect(iterable.maxWith(_intComparison), null);
});
});
group("min", () {
test("gets min int value", () {
final KtIterable<int> iterable = iterableOf([3, 1, 2]);
final int? min = iterable.min();
expect(min, 1);
});
test("gets min double value", () {
final KtIterable<double> iterable = iterableOf([3.2, 1.4, 2.2]);
final double? min = iterable.min();
expect(min, 1.4);
});
test("gets max value comparable", () {
final iterable = iterableOf(["x", "b", "a", "h"]);
final String? min = iterable.min();
expect(min, "a");
});
test("empty iterable return null", () {
final iterable = emptyIterable<int>();
final int? min = iterable.min();
expect(min, null);
});
});
group("minBy", () {
test("gets min value", () {
final iterable = iterableOf(["1", "3", "2"]);
expect(iterable.minBy((it) => int.parse(it)), "1");
expect(iterable.minBy((it) => num.parse(it)), "1");
});
test("empty iterable return null", () {
final iterable = emptyIterable<int>();
expect(iterable.minBy((it) => it), null);
// with generic type
expect(iterable.minBy<num>((it) => it), null);
});
});
group("minWith", () {
int _intComparison(int value, int other) => value.compareTo(other);
test("gets min value", () {
final iterable = iterableOf([2, 1, 3]);
expect(iterable.minWith(_intComparison), 1);
});
test("empty iterable return null", () {
final iterable = emptyIterable<int>();
expect(iterable.minWith(_intComparison), null);
});
});
group("minus", () {
if (ordered) {
test("remove iterable", () {
final result = iterableOf(["paul", "john", "max", "lisa"])
.minus(iterableOf(["max", "john"]));
expect(result, listOf("paul", "lisa"));
});
test("infix", () {
final result =
iterableOf(["paul", "john", "max", "lisa"]) - iterableOf(["max"]);
expect(result.toList(), listOf("paul", "john", "lisa"));
});
test("remove one item", () {
final result =
iterableOf(["paul", "john", "max", "lisa"]).minusElement("max");
expect(result.toList(), listOf("paul", "john", "lisa"));
});
} else {
test("remove iterable", () {
final result = iterableOf(["paul", "john", "max", "lisa"])
.minus(iterableOf(["max", "john"]));
expect(result.toSet(), setOf("paul", "lisa"));
});
test("infix", () {
final result =
iterableOf(["paul", "john", "max", "lisa"]) - iterableOf(["max"]);
expect(result.toSet(), setOf("paul", "john", "lisa"));
});
test("remove one item", () {
final result =
iterableOf(["paul", "john", "max", "lisa"]).minusElement("max");
expect(result.toSet(), setOf("paul", "john", "lisa"));
});
}
test("empty gets returned empty", () {
final result = emptyIterable() - iterableOf(["max"]);
expect(result.toList(), emptyList());
});
});
group("none", () {
test("no matching returns true", () {
final items = iterableOf(["paul", "john", "max", "lisa"]);
expect(items.none((it) => it.contains("y")), isTrue);
});
test("matching returns false", () {
final items = iterableOf(["paul", "john", "max", "lisa"]);
expect(items.none((it) => it.contains("p")), isFalse);
});
test("none without predicate returns false when iterable has items", () {
final items = iterableOf(["paul", "john", "max", "lisa"]);
expect(items.none(), isFalse);
});
test("empty returns always true", () {
expect(emptyIterable().none(), isTrue);
});
});
group("onEach", () {
test("onEach", () {
final iterable = iterableOf([
[1, 2],
[3, 4],
[5, 6]
]);
iterable.onEach((it) => it.add(0));
expect(iterable.map((it) => it.last).toList(), listOf(0, 0, 0));
});
test("chainable", () {
final list = KtMutableList.empty();
final result = listOf("a", "b", "c")
.onEach((it) => list.add(it))
.map((it) => it.toUpperCase())
.getOrNull(0); // prints: A
expect(result, "A");
expect(list, listOf("a", "b", "c"));
});
});
group("onEachIndexed", () {
test("pairs", () {
final indexes = KtMutableList<int>.empty();
final items = KtMutableList<int>.empty();
final iterable = iterableOf([1, 2, 3]);
iterable.onEachIndexed((index, item) {
indexes.add(index);
items.add(item);
});
expect(indexes, listOf(0, 1, 2));
if (ordered) {
expect(items, iterable.toList());
}
});
test("chainable", () {
final list = KtMutableList.empty();
final result = listOf("a", "b", "c")
.onEachIndexed((index, it) {
list.add(index);
list.add(it);
})
.map((it) => it.toUpperCase())
.getOrNull(0); // prints: A
expect(result, "A");
expect(list, listOf(0, "a", 1, "b", 2, "c"));
});
});
group("partition", () {
test("partition", () {
final result =
iterableOf([7, 31, 4, 3, 92, 32]).partition((it) => it > 10);
expect(result.first.toSet(), setOf(31, 92, 32));
expect(result.second.toSet(), setOf(7, 4, 3));
});
});
group("plus", () {
test("concat two iterables", () {
final result = iterableOf([1, 2, 3]).plus(iterableOf([4, 5, 6]));
expect(result.toList(), listOf(1, 2, 3, 4, 5, 6));
});
test("infix", () {
final result = iterableOf([1, 2, 3]) + iterableOf([4, 5, 6]);
expect(result.toList(), listOf(1, 2, 3, 4, 5, 6));
});
});
group("plusElement", () {
test("concat item", () {
final result = iterableOf([1, 2, 3]).plusElement(5);
expect(result.toList(), listOf(1, 2, 3, 5));
});
test("element can be null", () {
final result = iterableOf<int?>([1, 2, 3]).plusElement(null);
expect(result.toList(), listFrom([1, 2, 3, null]));
});
});
group("reduce", () {
test("reduce", () {
final result = iterableOf([1, 2, 3, 4]).reduce((int acc, it) => it + acc);
expect(result, 10);
});
test("empty throws", () {
expect(() => emptyIterable<int>().reduce((int acc, it) => it + acc),
throwsUnsupportedError);
});
});
group("reduceIndexed", () {
test("reduceIndexed", () {
var i = 1;
final result =
iterableOf([1, 2, 3, 4]).reduceIndexed((index, int acc, it) {
expect(index, i);
i++;
return it + acc;
});
expect(result, 10);
});
test("empty throws", () {
expect(
() => emptyIterable<int>()
.reduceIndexed((index, int acc, it) => it + acc),
throwsUnsupportedError);
});
});
group("requireNoNulls", () {
test("throw when nulls are found", () {
final e = catchException<ArgumentError>(
() => iterableOf(["paul", null, "john", "lisa"]).requireNoNulls());
expect(e.message, contains("null element found"));
});
test("chains", () {
iterableOf(["a", "b", "c"]).requireNoNulls().requireNoNulls().toList();
});
test("removes nullable types", () {
final KtIterable<int?> list = iterableOf<int?>([1, 2, 3]);
final KtIterable<int> nonNull = list.requireNoNulls();
expect(nonNull.toList().runtimeType.toString(), contains('<int>'));
});
});
group("reversed", () {
test("mutliple", () {
final result = iterableOf([1, 2, 3, 4]).reversed();
expect(result.toList(), listOf(4, 3, 2, 1));
});
test("empty", () {
expect(emptyIterable<int>().reversed().toList(), emptyList<int>());
});
test("one", () {
expect(iterableOf<int>([1]).reversed().toList(), listFrom<int>([1]));
});
});
group("single", () {
test("single", () {
expect(iterableOf([1]).single(), 1);
});
test("single throws when list has more elements", () {
final e =
catchException<ArgumentError>(() => iterableOf([1, 2]).single());
expect(e.message, contains("has more than one element"));
});
test("single throws for empty iterables", () {
final e = catchException<NoSuchElementException>(
() => emptyIterable().single());
expect(e.message, contains("is empty"));
});
test("single with predicate finds item", () {
final found = iterableOf(["paul", "john", "max", "lisa"])
.single((it) => it.contains("x"));
expect(found, "max");
});
test("single with predicate without match", () {
final e = catchException<NoSuchElementException>(() =>
iterableOf(["paul", "john", "max", "lisa"])
.single((it) => it.contains("y")));
expect(e.message, contains("no element matching the predicate"));
});
test("single with predicate multiple matches", () {
final e = catchException<ArgumentError>(() =>
iterableOf(["paul", "john", "max", "lisa"])
.single((it) => it.contains("l")));
expect(e.message, contains("more than one matching element"));
});
});
group("singleOrNull", () {
test("singleOrNull", () {
expect(iterableOf([1]).singleOrNull(), 1);
});
test("singleOrNull on multiple iterable returns null", () {
expect(iterableOf([1, 2]).singleOrNull(), null);
});
test("singleOrNull on empty iterable returns null", () {
expect(emptyIterable().singleOrNull(), null);
});
test("singleOrNull with predicate finds item", () {
final found = iterableOf(["paul", "john", "max", "lisa"])
.singleOrNull((it) => it.contains("x"));
expect(found, "max");
});
test("singleOrNull with predicate without match returns null", () {
final result = iterableOf(["paul", "john", "max", "lisa"])
.singleOrNull((it) => it.contains("y"));
expect(result, null);
});
test("singleOrNull with predicate multiple matches returns null", () {
final result = iterableOf(["paul", "john", "max", "lisa"])
.singleOrNull((it) => it.contains("l"));
expect(result, null);
});
});
group("sort", () {
test("sort", () {
final result = iterableOf([4, 2, 3, 1]).sorted();
expect(result.toList(), listOf(1, 2, 3, 4));
});
test("sortedDescending", () {
final result = iterableOf([4, 2, 3, 1]).sortedDescending();
expect(result.toList(), listOf(4, 3, 2, 1));
});
String lastChar(String it) {
final last = it.runes.last;
return String.fromCharCode(last);
}
test("sortedBy", () {
final result =
iterableOf(["paul", "john", "max", "lisa"]).sortedBy(lastChar);
expect(result, listOf("lisa", "paul", "john", "max"));
});
test("sortedBy for ints", () {
final result = iterableOf(["paul", "john", "max", "lisa"])
.sortedBy((it) => it.length);
if (ordered) {
expect(result, listOf("max", "paul", "john", "lisa"));
} else {
expect(result.first(), "max");
}
});
test("sortedBy with doubles", () {
final result = iterableOf(["paul", "john", "max", "lisa"])
.sortedBy((it) => it.length / it.indexOf("a"));
expect(result, listOf("john", "lisa", "max", "paul"));
});
test("sortedByDescending", () {
final result = iterableOf(["paul", "john", "max", "lisa"])
.sortedByDescending(lastChar);
expect(result, listOf("max", "john", "paul", "lisa"));
});
});
group("subtract", () {
test("remove one item", () {
final result = iterableOf(["paul", "john", "max", "lisa"])
.subtract(iterableOf(["max"]));
expect(result, setOf("paul", "john", "lisa"));
});
});
group("sum", () {
test("sum of ints", () {
expect(iterableOf([1, 2, 3, 4, 5]).sum(), 15);
});
test("sum of doubles", () {
final sum = iterableOf([1.0, 2.1, 3.2]).sum();
expect(sum, closeTo(6.3, 0.000000001));
});
});
group("sumBy", () {
test("int", () {
expect(iterableOf([1, 2, 3]).sumBy((i) => i * 2), 12);
});
test("double", () {
expect(iterableOf([1, 2, 3]).sumBy((i) => i * 1.5), 9.0);
// ignore: deprecated_member_use_from_same_package
expect(iterableOf([1, 2, 3]).sumByDouble((i) => i * 1.5), 9.0);
});
test("double as num", () {
const num factor = 1.5;
expect(iterableOf([1, 2, 3]).sumBy((i) => i * factor), 9.0);
});
test("double as num", () {
const num factor = 2;
expect(iterableOf([1, 2, 3]).sumBy((i) => i * factor), 12);
});
});
group("take", () {
test("take zero returns empty", () {
final iterable = iterableOf([1, 2, 3, 4]);
expect(iterable.take(0).toList(), emptyList());
});
test("take negative throws", () {
final iterable = iterableOf([1, 2, 3, 4]);
final e = catchException<ArgumentError>(() => iterable.take(-3));
expect(e.message, allOf(contains("-3"), contains("less than zero")));
});
test("take more than size returns full list", () {
final iterable = iterableOf([1, 2, 3, 4]);
expect(iterable.take(10).toList(), iterable.toList());
});
if (ordered) {
test("take smaller list size returns first elements", () {
final iterable = iterableOf([1, 2, 3, 4]);
expect(iterable.take(2).toList(), listOf(1, 2));
});
}
if (ordered) {
test("take first element which is null", () {
final iterable = iterableOf([null, 1]);
expect(iterable.take(1).toList(), listFrom([null]));
expect(iterable.take(2).toList(), listFrom([null, 1]));
});
}
});
group("takeWhile", () {
test("take no elements returns empty", () {
final iterable = iterableOf([1, 2, 3, 4]);
expect(iterable.takeWhile((it) => false), emptyList());
});
test("take all elements returns original list", () {
final iterable = iterableOf([1, 2, 3, 4]);
expect(iterable.takeWhile((it) => true), iterable.toList());
});
if (ordered) {
test("takeWhile smaller 3", () {
final iterable = iterableOf([1, 2, 3, 4]);
expect(iterable.takeWhile((it) => it < 3), listOf(1, 2));
});
}
});
group("toHashSet", () {
test("toHashSet", () {
final list = iterableOf(["a", "b", "c", "b"]);
expect(list.toHashSet().size, 3);
});
});
group("toCollection", () {
test("toCollection same type", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<int>();
final filtered = iterable.toCollection(result);
expect(identical(result, filtered), isTrue);
if (ordered) {
expect(result, listOf(4, 25, -12, 10));
} else {
expect(result.toSet(), setOf(4, 25, -12, 10));
}
});
test("toCollection super type", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<num>();
final filtered = iterable.toCollection(result);
expect(identical(result, filtered), isTrue);
if (ordered) {
expect(result, listOf(4, 25, -12, 10));
} else {
expect(result.toSet(), equals(setOf(4, 25, -12, 10)));
}
});
test("toCollection wrong type throws", () {
final iterable = iterableOf([4, 25, -12, 10]);
final result = mutableListOf<String>();
final e =
catchException<ArgumentError>(() => iterable.toCollection(result));
expect(
e.message,
allOf(
contains("toCollection"),
contains("destination"),
contains("<int>"),
contains("<String>"),
));
});
});
group("union", () {
test("concat two iterables", () {
final result = iterableOf([1, 2, 3]).union(iterableOf([4, 5, 6]));
expect(result.toList(), listOf(1, 2, 3, 4, 5, 6));
});
});
group("windowed", () {
test("default step", () {
expect(
iterableOf([1, 2, 3, 4, 5]).windowed(3),
listOf(
listOf(1, 2, 3),
listOf(2, 3, 4),
listOf(3, 4, 5),
));
});
test("larger step", () {
expect(
iterableOf([1, 2, 3, 4, 5]).windowed(3, step: 2),
listOf(
listOf(1, 2, 3),
listOf(3, 4, 5),
));
});
test("step doesn't fit length", () {
expect(
iterableOf([1, 2, 3, 4, 5, 6]).windowed(3, step: 2),
listOf(
listOf(1, 2, 3),
listOf(3, 4, 5),
));
});
test("window can be smaller than length", () {
expect(iterableOf([1]).windowed(3, step: 2), emptyList());
});
test("step doesn't fit length, partial", () {
expect(
iterableOf([1, 2, 3, 4, 5, 6])
.windowed(3, step: 2, partialWindows: true),
listOf(
listOf(1, 2, 3),
listOf(3, 4, 5),
listOf(5, 6),
));
});
test("partial doesn't crash on empty iterable", () {
expect(emptyIterable().windowed(3, step: 2, partialWindows: true),
emptyList());
});
test("window can be smaller than length, emitting partial only", () {
expect(iterableOf([1]).windowed(3, step: 2, partialWindows: true),
listOf(listOf(1)));
});
});
group("unzip", () {
test("empty", () {
final KtIterable<KtPair<String, int>> zipped =
emptyIterable<KtPair<String, int>>();
final unzipped = zipped.unzip();
expect(unzipped.first, emptyList());
expect(unzipped.second, emptyList());
});
test("unzip pairs", () {
final zipped = iterableOf([
const KtPair("a", 1),
const KtPair("b", 2),
const KtPair("c", 3),
]);
final unzipped = zipped.unzip();
if (ordered) {
expect(unzipped.first, listOf("a", "b", "c"));
expect(unzipped.second, listOf(1, 2, 3));
} else {
expect(unzipped.first.toSet(), setOf("a", "b", "c"));
expect(unzipped.second.toSet(), setOf(1, 2, 3));
}
});
});
group("windowedTransform", () {
test("default step", () {
expect(iterableOf([1, 2, 3, 4, 5]).windowedTransform(3, (l) => l.sum()),
listOf(6, 9, 12));
});
test("larger step", () {
expect(
iterableOf([1, 2, 3, 4, 5])
.windowedTransform(3, (l) => l.sum(), step: 2),
listOf(6, 12));
});
test("step doesn't fit length", () {
expect(
iterableOf([1, 2, 3, 4, 5, 6])
.windowedTransform(3, (l) => l.sum(), step: 2),
listOf(6, 12));
});
test("window can be smaller than length", () {
expect(iterableOf([1]).windowed(3, step: 2), emptyList());
});
test("step doesn't fit length, partial", () {
expect(
iterableOf([1, 2, 3, 4, 5, 6]).windowedTransform(3, (l) => l.sum(),
step: 2, partialWindows: true),
listOf(6, 12, 11));
});
test("partial doesn't crash on empty iterable", () {
expect(
emptyIterable().windowedTransform(
3, (l) => throw StateError("this gets never executed"),
step: 2, partialWindows: true),
emptyList());
});
test("window can be smaller than length, emitting partial only", () {
expect(
iterableOf([1]).windowedTransform(3, (l) => l.sum(),
step: 2, partialWindows: true),
listOf(1));
});
});
group("zip", () {
test("to pair", () {
final result = iterableOf([1, 2, 3, 4, 5]).zip(iterableOf(["a", "b"]));
expect(result, listFrom(const [KtPair(1, "a"), KtPair(2, "b")]));
});
test("transform", () {
final result = iterableOf([1, 2, 3, 4, 5])
.zipTransform(iterableOf(["a", "b"]), (a, b) => "$a$b");
expect(result, listOf("1a", "2b"));
});
});
group("zipWithNext", () {
test("zipWithNext", () {
final result = iterableOf([1, 2, 3]).zipWithNext();
expect(result, listOf(const KtPair(1, 2), const KtPair(2, 3)));
});
});
group("zipWithNextTransform", () {
test("zipWithNextTransform", () {
final result =
iterableOf([1, 2, 3, 4, 5]).zipWithNextTransform((a, b) => a + b);
expect(result, listOf(3, 5, 7, 9));
});
test("empty does nothing", () {
final result = emptyIterable().zipWithNextTransform((a, b) => a + b);
expect(result, emptyList());
});
});
}
|
use super::*;
use dscp_pallet_traits::{ProcessFullyQualifiedId, ProcessIO, ProcessValidator};
use sp_std::collections::btree_map::BTreeMap;
use crate::restrictions::Restriction;
use crate::{Process, ProcessModel, ProcessStatus};
#[test]
fn it_succeeds_when_process_exists() {
new_test_ext().execute_with(|| {
ProcessModel::<Test>::insert(
ProcessIdentifier::A,
1u32,
Process {
status: ProcessStatus::Enabled,
restrictions: Vec::new()
}
);
assert!(ProcessValidation::validate_process(
ProcessFullyQualifiedId {
id: ProcessIdentifier::A,
version: 1u32
},
&0u64,
&Vec::new(),
&Vec::new(),
));
});
}
#[test]
fn it_fails_when_process_id_doesnt_exist() {
new_test_ext().execute_with(|| {
ProcessModel::<Test>::insert(
ProcessIdentifier::A,
1u32,
Process {
status: ProcessStatus::Enabled,
restrictions: Vec::new()
}
);
assert!(!ProcessValidation::validate_process(
ProcessFullyQualifiedId {
id: ProcessIdentifier::B,
version: 1u32
},
&0u64,
&Vec::new(),
&Vec::new(),
));
});
}
#[test]
fn it_fails_when_process_version_doesnt_exist() {
new_test_ext().execute_with(|| {
ProcessModel::<Test>::insert(
ProcessIdentifier::A,
1u32,
Process {
status: ProcessStatus::Enabled,
restrictions: Vec::new()
}
);
assert!(!ProcessValidation::validate_process(
ProcessFullyQualifiedId {
id: ProcessIdentifier::A,
version: 2u32
},
&0u64,
&Vec::new(),
&Vec::new(),
));
});
}
#[test]
fn it_fails_when_process_disabled() {
new_test_ext().execute_with(|| {
ProcessModel::<Test>::insert(
ProcessIdentifier::A,
1u32,
Process {
status: ProcessStatus::Disabled,
restrictions: Vec::new()
}
);
assert!(!ProcessValidation::validate_process(
ProcessFullyQualifiedId {
id: ProcessIdentifier::A,
version: 1u32
},
&0u64,
&Vec::new(),
&Vec::new(),
));
});
}
#[test]
fn it_succeeds_when_all_restrictions_succeed() {
new_test_ext().execute_with(|| {
ProcessModel::<Test>::insert(
ProcessIdentifier::A,
1u32,
Process {
status: ProcessStatus::Enabled,
restrictions: vec![Restriction::None, Restriction::SenderOwnsAllInputs]
}
);
let mut token_roles: BTreeMap<u32, u64> = BTreeMap::new();
token_roles.insert(Default::default(), 0u64);
assert!(ProcessValidation::validate_process(
ProcessFullyQualifiedId {
id: ProcessIdentifier::A,
version: 1u32
},
&0u64,
&vec![ProcessIO {
roles: token_roles,
metadata: BTreeMap::new(),
parent_index: None
}],
&Vec::new(),
));
});
}
#[test]
fn it_fails_when_one_restrictions_fails() {
new_test_ext().execute_with(|| {
ProcessModel::<Test>::insert(
ProcessIdentifier::A,
1u32,
Process {
status: ProcessStatus::Enabled,
restrictions: vec![Restriction::None, Restriction::SenderOwnsAllInputs]
}
);
let mut token_roles: BTreeMap<u32, u64> = BTreeMap::new();
token_roles.insert(Default::default(), 1u64);
assert!(!ProcessValidation::validate_process(
ProcessFullyQualifiedId {
id: ProcessIdentifier::A,
version: 1u32
},
&0u64,
&vec![ProcessIO {
roles: token_roles,
metadata: BTreeMap::new(),
parent_index: None
}],
&Vec::new(),
));
});
}
|
# Options
CLEAN_ALL_OPT="--clean-all"
clean_all() {
rm -rf $MJW_CACHE_PATH &&
echo "All cache cleaned successfully."
}
cache() {
while [ "$#" -gt 0 ]; do
case "${1^^}" in
"${CLEAN_ALL_OPT^^}")
clean_all
exit 0
;;
*)
shift
;;
esac
done
}
|
namespace BusinessLayer.DTOs.Filter.Enums
{
public enum AchievementType
{
Done,
Partial,
NotStarted,
NonCompleted
}
}
|
-module(oauth_uri).
-export([normalize/1, calate/2, encode/1]).
-export([params_from_string/1, params_to_string/1,
params_from_header_string/1, params_to_header_string/1]).
-import(lists, [concat/1]).
-spec normalize(iolist()) -> iolist().
normalize(URI) ->
case http_uri:parse(URI) of
{ok, {Scheme, UserInfo, Host, Port, Path, _Query}} -> % R15B
normalize(Scheme, UserInfo, string:to_lower(Host), Port, [Path]);
{Scheme, UserInfo, Host, Port, Path, _Query} ->
normalize(Scheme, UserInfo, string:to_lower(Host), Port, [Path]);
Else ->
Else
end.
normalize(http, UserInfo, Host, 80, Acc) ->
normalize(http, UserInfo, [Host|Acc]);
normalize(https, UserInfo, Host, 443, Acc) ->
normalize(https, UserInfo, [Host|Acc]);
normalize(Scheme, UserInfo, Host, Port, Acc) ->
normalize(Scheme, UserInfo, [Host, ":", Port|Acc]).
normalize(Scheme, [], Acc) ->
concat([Scheme, "://"|Acc]);
normalize(Scheme, UserInfo, Acc) ->
concat([Scheme, "://", UserInfo, "@"|Acc]).
-spec params_to_header_string([{string(), string()}]) -> string().
params_to_header_string(Params) ->
intercalate(", ", [concat([encode(K), "=\"", encode(V), "\""]) || {K, V} <- Params]).
-spec params_from_header_string(string()) -> [{string(), string()}].
params_from_header_string(String) ->
[param_from_header_string(Param) || Param <- re:split(String, ",\\s*", [{return, list}]), Param =/= ""].
param_from_header_string(Param) ->
[Key|Rest] = string:tokens(Param, "="),
QuotedValue = string:join(Rest, "="),
Value = string:substr(QuotedValue, 2, length(QuotedValue) - 2),
{decode(Key), decode(Value)}.
-spec params_from_string(string()) -> [{string(), string()}].
params_from_string(Params) ->
[param_from_string(Param) || Param <- string:tokens(Params, "&")].
param_from_string(Param) ->
list_to_tuple([decode(Value) || Value <- string:tokens(Param, "=")]).
-spec params_to_string([{string(), string()}]) -> string().
params_to_string(Params) ->
intercalate("&", [calate("=", [K, V]) || {K, V} <- Params]).
-spec calate(string(), [string()]) -> string().
calate(Sep, Xs) ->
intercalate(Sep, [encode(X) || X <- Xs]).
intercalate(Sep, Xs) ->
concat(intersperse(Sep, Xs)).
intersperse(_, []) -> [];
intersperse(_, [X]) -> [X];
intersperse(Sep, [X|Xs]) ->
[X, Sep|intersperse(Sep, Xs)].
-define(is_alphanum(C), C >= $A, C =< $Z; C >= $a, C =< $z; C >= $0, C =< $9).
-spec encode(integer() | atom() | string()) -> string().
encode(Term) when is_integer(Term) ->
integer_to_list(Term);
encode(Term) when is_atom(Term) ->
encode(atom_to_list(Term));
encode(Term) when is_list(Term) ->
encode(lists:reverse(Term, []), []).
encode([X | T], Acc) when ?is_alphanum(X); X =:= $-; X =:= $_; X =:= $.; X =:= $~ ->
encode(T, [X | Acc]);
encode([X | T], Acc) ->
NewAcc = [$%, dec2hex(X bsr 4), dec2hex(X band 16#0f) | Acc],
encode(T, NewAcc);
encode([], Acc) ->
Acc.
decode(Str) when is_list(Str) ->
decode(Str, []).
decode([$%, A, B | T], Acc) ->
decode(T, [(hex2dec(A) bsl 4) + hex2dec(B) | Acc]);
decode([X | T], Acc) ->
decode(T, [X | Acc]);
decode([], Acc) ->
lists:reverse(Acc, []).
-compile({inline, [{dec2hex, 1}, {hex2dec, 1}]}).
dec2hex(N) when N >= 10 andalso N =< 15 ->
N + $A - 10;
dec2hex(N) when N >= 0 andalso N =< 9 ->
N + $0.
hex2dec(C) when C >= $A andalso C =< $F ->
C - $A + 10;
hex2dec(C) when C >= $0 andalso C =< $9 ->
C - $0.
|
% ------------------------------------------------------------------------------
%
% Copyright © 2018-2019, Lauri Moisio <l@arv.io>
%
% The ISC License
%
% Permission to use, copy, modify, and/or distribute this software for any
% purpose with or without fee is hereby granted, provided that the above
% copyright notice and this permission notice appear in all copies.
%
% THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL WARRANTIES
% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
%
% ------------------------------------------------------------------------------
%
-module(dnsclass).
-export([builtin/0,from_to/3]).
-ifdef(EUNIT).
-include_lib("eunit/include/eunit.hrl").
-endif.
-include("include/pre_otp20_string_macro.hrl").
-callback atom() -> atom().
-callback value() -> 0..16#FFFF.
-callback masterfile_token() -> string().
-optional_callbacks([masterfile_token/0]).
-type class() :: atom() | 0..16#FFFF.
-export_type([class/0]).
% And do something like what was done with dnsrr and dnsrr_types?
builtin() ->
[
dnsclass_in,
dnsclass_cs,
dnsclass_ch,
dnsclass_hs,
dnsclass_none,
dnsclass_any
].
-ifdef(EUNIT).
builtin_modules_sanity_test() ->
Builtin = builtin(),
CheckFn = fun (FunMod) ->
FunAtom = FunMod:atom(),
FunValue = FunMod:value(),
not (
from_to(FunAtom, atom, value) =:= FunValue andalso
from_to(FunValue, value, atom) =:= FunAtom
)
end,
[] = lists:filter(CheckFn, Builtin).
-endif.
-spec from_to(
Value :: atom() | 0..16#FFFF | string(),
From :: 'value' | 'module' | 'atom' | 'masterfile_token',
To :: 'value' | 'module' | 'atom' | 'masterfile_token' | 'masterfile_token_generic'
) -> atom() | 0..16#FFFF | string().
from_to(Value, value, module) ->
maps:get(Value, dnsclass_classes:value(), Value);
from_to(Value, value, masterfile_token) when Value >= 0, Value =< 16#FFFF ->
case maps:get(Value, dnsclass_classes:value(), Value) of
Value -> from_to(Value, value, masterfile_token_generic);
Module -> Module:masterfile_token()
end;
from_to(Value, value, masterfile_token_generic) when Value >= 0, Value =< 16#FFFF ->
"class" ++ integer_to_list(Value);
from_to(Value, atom, module) ->
maps:get(Value, dnsclass_classes:atom(), Value);
from_to(Value0, masterfile_token, To) ->
case string:(?LOWER)(Value0) of
[$c, $l, $a, $s, $s|Int] ->
try list_to_integer(Int) of
Value when To =:= value, Value >= 0, Value =< 16#FFFF -> Value;
Value when Value >= 0, Value =< 16#FFFF ->
case from_to(Value, value, To) of
Value -> Value0;
ToValue -> ToValue
end;
_ -> Value0
catch
error:badarg -> Value0
end;
Value ->
case maps:get(Value, dnsclass_classes:masterfile_token(), Value0) of
Value0 -> Value0;
Module when To =:= module -> Module;
Module -> from_to(Module, module, To)
end
end;
from_to(Module, module, value) ->
Module:value();
from_to(Module, module, atom) ->
Module:atom();
from_to(Module, module, masterfile_token) ->
Module:masterfile_token();
from_to(Module, module, masterfile_token_generic) ->
from_to(Module:value(), value, masterfile_token_generic);
from_to(Value, From, To) when From =/= To ->
% If either From or To are not allowed, function_clause exception will result
case from_to(Value, From, module) of
Value -> Value;
Module -> from_to(Module, module, To)
end.
|
#' @title Install suggested packages only if missing
#'
#' @descriptions Reads DESCRIPTION file and installs any
#' suggested packages that are missing.
#' Useful in R data packages, as this can be run in
#' a script to create the data to easily install packages
#' required for actually creating the data, but not required
#' for simply using the R package as an R package.
#'
#' Searches for DESCRIPTION file in working directory
#' and parent of working directory (useful for Rmd's).
#'
#' @param stop_on_error logical.
#' If \code{TRUE}, then an error is thrown if
#' the DESCRIPTION file cannot be found.
#' Default is \code{TRUE}.
#'
#' @param only_if_missing logical.
#' If \code{TRUE}, then only installs packages that
#' are not already installed.
#' Default is \code{FALSE}.
#'
#' @param ... arguments passed to \code{install.packages}.
#'
#' @details
#' Still need/unclear how to do the following:
#' - install packages specifically off BioConductor (probably
#' just check if packages are on CRAN first).
#' - allow installation from remotes.
#' - allow update of packages
#' Should we rename function to install_project_dependencies?
#' - This is not meant for R packages, but rather packages using
#' `bookdown`, and others where you don't really install the output.
#' @return Invisibly returns character vector of installed packages.
install_project_dependencies <- function(only_if_missing = TRUE,
dependencies = c("Imports", "Suggests", "Depends"), # nolint
...) {
fn <- c("DESCRIPTION", file.path(dirname(getwd()), "DESCRIPTION"))
fn <- fn[file.exists(fn)][[1]]
if (length(fn) == 0) {
msg <- "Could not find DESCRIPTION file in working directory or immediate parent." # nolint
if (stop_on_error) {
stop(msg, call. = FALSE)
} else {
warning(msg, call. = FALSE)
return(invisible(character(0)))
}
}
desc_mat <- read.dcf(fn)
pkg_version_vec <- c("testthat (>= 3.0.0)", "dplyr")
pkg_version_vec <- NULL
for (x in dependencies) {
if (!x %in% colnames(desc_mat)) next
pkg_version_vec <- c(
pkg_version_vec, strsplit(desc_mat[, x], split = ",\\n")[[1]]
)
}
if (length(pkg_version_vec) == 0) return(invisible(character(0)))
pkg_mat <- installed.packages()
pkg_version_list <- strsplit(pkg_version_vec, " \\(|\\)")
pkg_vec <- vapply(pkg_version_list, function(x) x[[1]], character(1))
version_vec_number <- lapply(pkg_version_list, function(x) {
if (length(x) == 1) return(NULL)
strsplit(x[2], " ")[[1]][2]
})
version_vec_number <- setNames(version_vec_number, pkg_vec)
version_vec_relation <- lapply(pkg_version_list, function(x) {
if (length(x) == 1) return(NULL)
strsplit(x[2], " ")[[1]][1]
})
version_vec_relation <- setNames(version_vec_relation, pkg_vec)
pkg_vec_versioned <- names(Filter(Negate(is.null), version_vec_relation))
pkg_vec_installed_n <- pkg_vec[!pkg_vec %in% pkg_mat[, "Package"]]
pkg_vec_installed <- setdiff(pkg_vec, pkg_vec_installed_n)
pkg_vec_versioned_installed <- setdiff(pkg_vec_versioned, pkg_vec_installed_n)
# get packages that have versions that are different
pkg_vec_installed_n_greater <- NULL
pkg_vec_installed_n_exact <- NULL
for (i in seq_along(pkg_vec_versioned_installed)) {
pkg <- pkg_vec_versioned_installed[i]
version_required <- version_vec_number[pkg][[1]]
relation <- version_vec_relation[pkg][[1]]
version_installed <- pkg_mat[pkg_mat[, "Package"] == pkg, "Version"]
version_required_12 <- as.numeric(substr(version_required, 1, 3))
version_installed_12 <- as.numeric(substr(version_installed, 1, 3))
version_required_23 <- as.numeric(
substr(version_required, 3, nchar(version_required))
)
version_installed_23 <- as.numeric(
substr(version_installed, 3, nchar(version_installed))
)
installed_out_out_date_12 <- version_required_12 > version_installed_12
installed_out_of_date_23 <- (version_required_12 == version_installed_12) &&
(version_required_23 > version_installed_23)
installed_out_of_date <- installed_out_out_date_12 ||
installed_out_of_date_23
installed_not_exact <- version_installed != version_required
if (relation == ">=" && installed_out_of_date) {
pkg_vec_installed_n <- c(pkg_vec_installed_n, pkg)
pkg_vec_installed_n_greater <- c(pkg_vec_installed_n_greater, pkg)
} else if (relation == "==" && installed_not_exact) {
pkg_vec_installed_n_exact <- c(pkg_vec_installed_n_exact, pkg)
} else {
stop("relation not recognised")
}
}
# install packages that need to be installed
if (length(pkg_vec_installed_n) > 0) {
install.packages(pkg_vec_installed_n, ...)
}
if (!is.null(pkg_vec_installed_n_exact)) {
if (!requireNamespace("versions", quietly = TRUE)) {
install.packages("versions", ...)
}
versions::install.versions(
pkgs = pkg_vec_installed_n_exact,
versions = as.character(version_vec_number[pkg_vec_installed_n_exact]),
...
)
}
# check that packages have correct version
pkg_vec_incorrect_version <- NULL
for (i in seq_along(pkg_vec_installed_n_greater)) {
if (i == 1) pkg_mat <- installed.packages()
pkg <- pkg_vec_versioned_installed[i]
version_required <- version_vec_number[pkg][[1]]
relation <- version_vec_relation[pkg][[1]]
version_installed <- pkg_mat[pkg_mat[, "Package"] == pkg, "Version"]
version_required_12 <- as.numeric(substr(version_required, 1, 3))
version_installed_12 <- as.numeric(substr(version_installed, 1, 3))
version_required_23 <- as.numeric(
substr(version_required, 3, nchar(version_required))
)
version_installed_23 <- as.numeric(
substr(version_installed, 3, nchar(version_installed))
)
installed_out_out_date_12 <- version_required_12 > version_installed_12
installed_out_of_date_23 <- (version_required_12 == version_installed_12) &&
(version_required_23 > version_installed_23)
installed_out_of_date <- installed_out_out_date_12 ||
installed_out_of_date_23
if (installed_out_of_date) pkg_vec_incorrect_version <- c(
pkg_vec_incorrect_version, pkg
)
}
if (!is.null(pkg_vec_incorrect_version)) stop(
paste0("The following packages have out of date versions after installing from CRAN: ",
paste0(pkg_version_incorrect_version, collapse = ", "))
)
invisible(pkg_vec)
}
|
*DECK DPRVEC
DOUBLE PRECISION FUNCTION DPRVEC (M, U, V)
C***BEGIN PROLOGUE DPRVEC
C***SUBSIDIARY
C***PURPOSE Subsidiary to DBVSUP
C***LIBRARY SLATEC
C***TYPE DOUBLE PRECISION (PRVEC-S, DPRVEC-D)
C***AUTHOR Watts, H. A., (SNLA)
C***DESCRIPTION
C
C This subroutine computes the inner product of a vector U
C with the imaginary product or mate vector corresponding to V.
C
C***SEE ALSO DBVSUP
C***ROUTINES CALLED DDOT
C***REVISION HISTORY (YYMMDD)
C 750601 DATE WRITTEN
C 890831 Modified array declarations. (WRB)
C 890831 REVISION DATE from Version 3.2
C 891214 Prologue converted to Version 4.0 format. (BAB)
C 900328 Added TYPE section. (WRB)
C 910722 Updated AUTHOR section. (ALS)
C***END PROLOGUE DPRVEC
C
DOUBLE PRECISION DDOT
INTEGER M, N, NP
DOUBLE PRECISION U(*), V(*), VP
C***FIRST EXECUTABLE STATEMENT DPRVEC
N = M/2
NP = N + 1
VP = DDOT(N,U(1),1,V(NP),1)
DPRVEC = DDOT(N,U(NP),1,V(1),1) - VP
RETURN
END
|
package csw.config.api.models
import java.nio.file.Path
/**
* Contains information about a config file stored in the config service
*
* @param path the path of file sitting in config service
* @param id the ConfigId representing unique id of the file
* @param comment the comment end user wants to provide while committing the file in config service
*/
case class ConfigFileInfo private[config] (path: Path, id: ConfigId, author: String, comment: String)
|
---
title: Soror
letter: S
permalink: "/definitions/bld-soror.html"
body: Lat. In the civil iaw. Sister; a sister. Ihst 3, 6, 1
published_at: '2018-07-07'
source: Black's Law Dictionary 2nd Ed (1910)
layout: post
---
|
/**
* \file
* \copyright
* Copyright (c) 2012-2022, OpenGeoSys Community (http://www.opengeosys.org)
* Distributed under a Modified BSD License.
* See accompanying file LICENSE.txt or
* http://www.opengeosys.org/project/license
*/
#include <algorithm>
#include <memory>
#include <string>
#include <vector>
// ThirdParty
#include <tclap/CmdLine.h>
#include "BaseLib/IO/readStringListFromFile.h"
#include "GeoLib/AABB.h"
#include "InfoLib/GitInfo.h"
#include "MathLib/Point3d.h"
#include "MeshLib/Elements/Element.h"
#include "MeshLib/IO/VtkIO/VtuInterface.h"
#include "MeshLib/IO/readMeshFromFile.h"
#include "MeshLib/Mesh.h"
#include "MeshLib/MeshEditing/ProjectPointOnMesh.h"
#include "MeshLib/MeshEditing/RemoveMeshComponents.h"
#include "MeshLib/MeshGenerators/MeshGenerator.h"
#include "MeshLib/MeshSearch/MeshElementGrid.h"
#include "MeshLib/Node.h"
static std::string mat_name = "MaterialIDs";
// returns the AABB of all mesh nodes of layers read so far
void adjustExtent(std::pair<MathLib::Point3d, MathLib::Point3d>& extent,
MeshLib::Mesh const& mesh)
{
auto const& nodes = mesh.getNodes();
GeoLib::AABB aabb(nodes.cbegin(), nodes.cend());
for (std::size_t i = 0; i < 3; ++i)
{
extent.first[i] = std::min(extent.first[i], aabb.getMinPoint()[i]);
extent.second[i] = std::max(extent.second[i], aabb.getMaxPoint()[i]);
}
}
// creates a voxel grid of the AABB of all layers
std::unique_ptr<MeshLib::Mesh> generateInitialMesh(
std::pair<MathLib::Point3d, MathLib::Point3d>& extent,
std::array<double, 3> const& res)
{
INFO("Creating initial mesh...");
std::array<double, 3> mesh_range{{extent.second[0] - extent.first[0],
extent.second[1] - extent.first[1],
extent.second[2] - extent.first[2]}};
std::array<std::size_t, 3> const n_cells{
{static_cast<std::size_t>(std::ceil(mesh_range[0] / res[0])),
static_cast<std::size_t>(std::ceil(mesh_range[1] / res[1])),
static_cast<std::size_t>(std::ceil(mesh_range[2] / res[2]))}};
for (std::size_t i = 0; i < 3; ++i)
{
double const ext_range = n_cells[i] * res[i];
double const offset = (ext_range - mesh_range[i]) / 2.0;
mesh_range[i] = ext_range;
extent.first[i] -= offset;
extent.second[i] += offset;
}
std::unique_ptr<MeshLib::Mesh> mesh(
MeshLib::MeshGenerator::generateRegularHexMesh(
mesh_range[0], mesh_range[1], mesh_range[2], n_cells[0], n_cells[1],
n_cells[2], extent.first));
auto mat_id = mesh->getProperties().createNewPropertyVector<int>(
mat_name, MeshLib::MeshItemType::Cell);
if (!mat_id)
{
return nullptr;
}
mat_id->insert(mat_id->end(), mesh->getNumberOfElements(), -1);
return mesh;
}
// returns the element the given node is projected on (or nullptr otherwise)
MeshLib::Element const* getProjectedElement(
MeshLib::MeshElementGrid const& grid,
MeshLib::Node const& node,
double const max_edge)
{
constexpr double max_val = std::numeric_limits<double>::max();
MathLib::Point3d const min_vol{
{node[0] - max_edge, node[1] - max_edge, -max_val}};
MathLib::Point3d const max_vol{
{node[0] + max_edge, node[1] + max_edge, max_val}};
auto const& intersection_candidates =
grid.getElementsInVolume(min_vol, max_vol);
return MeshLib::ProjectPointOnMesh::getProjectedElement(
intersection_candidates, node);
}
// casts vote if the given nodes belongs to lower layer, upper layer or no layer
// at all
void voteMatId(MeshLib::Node const& node, MeshLib::MeshElementGrid const& grid,
double const max_edge, std::size_t& nullptr_cnt,
std::size_t& upper_layer_cnt, std::size_t& lower_layer_cnt)
{
auto const& proj_elem = getProjectedElement(grid, node, max_edge);
if (proj_elem == nullptr)
{
nullptr_cnt++;
return;
}
if (node[2] > MeshLib::ProjectPointOnMesh::getElevation(*proj_elem, node))
{
upper_layer_cnt++;
return;
}
lower_layer_cnt++;
}
// sets material IDs for all elements depending on the layers they are located
// between
void setMaterialIDs(MeshLib::Mesh& mesh,
std::vector<std::unique_ptr<MeshLib::Mesh>> const& layers,
bool const dilate)
{
INFO("Setting material properties...");
std::size_t const n_layers = layers.size();
auto const& elems = mesh.getElements();
std::size_t const n_elems = mesh.getNumberOfElements();
auto mat_ids = mesh.getProperties().getPropertyVector<int>(mat_name);
std::vector<bool> is_set(n_elems, false);
for (int i = n_layers - 1; i >= 0; --i)
{
INFO("-> Layer {:d}", n_layers - i - 1);
MeshLib::MeshElementGrid const grid(*layers[i]);
double const max_edge(layers[i]->getMaxEdgeLength());
for (std::size_t j = 0; j < n_elems; ++j)
{
if (is_set[j])
{
continue;
}
std::size_t nullptr_cnt(0);
std::size_t upper_layer_cnt(0);
std::size_t lower_layer_cnt(0);
MeshLib::Node const node = MeshLib::getCenterOfGravity(*elems[j]);
voteMatId(node, grid, max_edge, nullptr_cnt, upper_layer_cnt,
lower_layer_cnt);
if (nullptr_cnt)
{
// if no element was found at centre point, vote via corners
for (std::size_t k = 0; k < 8; ++k)
{
MeshLib::Node const& n = *elems[j]->getNode(k);
voteMatId(n, grid, max_edge, nullptr_cnt, upper_layer_cnt,
lower_layer_cnt);
}
// If the "dilate"-param is set, a mat ID will be assigned if at
// least one node was voting for a specific layer. Without the
// "dilate"-param, an absolute majority is needed. In case of a
// tie, the lower layer will be favoured.
if ((upper_layer_cnt == 0 && lower_layer_cnt == 0) ||
(!dilate && nullptr_cnt >= upper_layer_cnt &&
nullptr_cnt >= lower_layer_cnt))
{
continue;
}
if (upper_layer_cnt > lower_layer_cnt)
{
(*mat_ids)[j] = n_layers - i - 1;
}
else
{
is_set[j] = true;
}
continue;
}
if (upper_layer_cnt)
{
(*mat_ids)[j] = n_layers - i - 1;
}
else
{
is_set[j] = true;
}
}
}
// set all elements above uppermost layer back to -1 so they are
// subsequently cut
std::replace(mat_ids->begin(), mat_ids->end(),
static_cast<int>(n_layers - 1), -1);
}
// Removes all elements from mesh that have not been marked as being located
// between two layers. If all elements remain unmarked, a nullptr is returned.
MeshLib::Mesh* removeUnusedElements(MeshLib::Mesh const& mesh)
{
std::vector<std::size_t> marked_elems;
auto const mat_ids = *MeshLib::materialIDs(mesh);
std::size_t const n_elems = mat_ids.size();
for (std::size_t i = 0; i < n_elems; ++i)
{
if (mat_ids[i] == -1)
{
marked_elems.push_back(i);
}
}
if (marked_elems.size() == mesh.getNumberOfElements())
{
return nullptr;
}
return MeshLib::removeElements(mesh, marked_elems, "mesh");
}
int main(int argc, char* argv[])
{
TCLAP::CmdLine cmd(
"Reads a list of 2D unstructured mesh layers and samples them onto a "
"structured grid of the same extent. Note, that a large cube size may "
"result in an undersampling of the original structure.\nCube sizes are "
"defines by x/y/z-parameters. For equilateral cubes, only the "
"x-parameter needs to be set.\n\n"
"OpenGeoSys-6 software, version " +
GitInfoLib::GitInfo::ogs_version +
".\n"
"Copyright (c) 2012-2022, OpenGeoSys Community "
"(http://www.opengeosys.org)",
' ', GitInfoLib::GitInfo::ogs_version);
TCLAP::SwitchArg dilate_arg(
"d", "dilate",
"assign mat IDs based on single nodes instead of a majority of nodes, "
"which can result in a slightly increased voxel grid extent",
false);
cmd.add(dilate_arg);
TCLAP::ValueArg<double> z_arg("z", "cellsize-z",
"edge length of cubes in z-direction (depth)",
false, 1000, "floating point number");
cmd.add(z_arg);
TCLAP::ValueArg<double> y_arg(
"y", "cellsize-y", "edge length of cubes in y-direction (latitude)",
false, 1000, "floating point number");
cmd.add(y_arg);
TCLAP::ValueArg<double> x_arg(
"x", "cellsize-x",
"edge length of cubes in x-direction (longitude) or all directions, if "
"y and z are not set",
true, 1000, "floating point number");
cmd.add(x_arg);
TCLAP::ValueArg<std::string> output_arg(
"o", "output", "name of output mesh (*.vtu)", true, "", "string");
cmd.add(output_arg);
TCLAP::ValueArg<std::string> input_arg(
"i", "input",
"name of the input file list containing the paths the all input layers "
"in correct order from top to bottom",
true, "", "string");
cmd.add(input_arg);
cmd.parse(argc, argv);
if ((y_arg.isSet() && !z_arg.isSet()) ||
((!y_arg.isSet() && z_arg.isSet())))
{
ERR("For equilateral cubes, only x needs to be set. For unequal "
"cuboids, all three edge lengths (x/y/z) need to be specified.");
return EXIT_FAILURE;
}
double const x_size = x_arg.getValue();
double const y_size = (y_arg.isSet()) ? y_arg.getValue() : x_arg.getValue();
double const z_size = (z_arg.isSet()) ? z_arg.getValue() : x_arg.getValue();
std::array<double, 3> const cellsize = {x_size, y_size, z_size};
std::string const input_name = input_arg.getValue();
std::string const output_name = output_arg.getValue();
auto const layer_names = BaseLib::IO::readStringListFromFile(input_name);
if (layer_names.size() < 2)
{
ERR("At least two layers are required to create a 3D Mesh");
return EXIT_FAILURE;
}
std::vector<std::unique_ptr<MeshLib::Mesh>> layers;
layers.reserve(layer_names.size());
constexpr double minval = std::numeric_limits<double>::max();
constexpr double maxval = std::numeric_limits<double>::lowest();
std::pair<MathLib::Point3d, MathLib::Point3d> extent(
MathLib::Point3d{{minval, minval, minval}},
MathLib::Point3d{{maxval, maxval, maxval}});
for (auto const& layer : layer_names)
{
std::unique_ptr<MeshLib::Mesh> mesh(
MeshLib::IO::readMeshFromFile(layer));
if (mesh == nullptr)
{
ERR("Input layer '{:s}' not found. Aborting...", layer);
return EXIT_FAILURE;
}
adjustExtent(extent, *mesh);
layers.emplace_back(std::move(mesh));
}
std::unique_ptr<MeshLib::Mesh> mesh(generateInitialMesh(extent, cellsize));
if (mesh == nullptr)
{
ERR("Error creating mesh...");
return EXIT_FAILURE;
}
setMaterialIDs(*mesh, layers, dilate_arg.getValue());
std::unique_ptr<MeshLib::Mesh> new_mesh(removeUnusedElements(*mesh));
if (new_mesh == nullptr)
{
ERR("Error generating mesh...");
return EXIT_FAILURE;
}
MeshLib::IO::VtuInterface vtu(new_mesh.get());
vtu.writeToFile(output_name);
return EXIT_SUCCESS;
}
|
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.drelephant.spark.fetchers
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStream}
import java.text.SimpleDateFormat
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}
import java.util.{Calendar, Date, SimpleTimeZone}
import javax.ws.rs.client.WebTarget
import com.linkedin.drelephant.spark.fetchers.statusapiv1.StageStatus
import scala.concurrent.ExecutionContext
import scala.util.Try
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationAttemptInfoImpl, ApplicationInfoImpl, ExecutorSummaryImpl, JobDataImpl, StageDataImpl}
import javax.ws.rs.{GET, Path, PathParam, Produces}
import javax.ws.rs.core.{Application, MediaType, Response}
import javax.ws.rs.ext.ContextResolver
import com.google.common.io.Resources
import com.linkedin.drelephant.analysis.AnalyticJob
import com.ning.compress.lzf.LZFEncoder
import org.apache.spark.{JobExecutionStatus, SparkConf}
import org.glassfish.jersey.client.ClientConfig
import org.glassfish.jersey.server.ResourceConfig
import org.glassfish.jersey.test.{JerseyTest, TestProperties}
import org.scalatest.{AsyncFunSpec, Matchers}
import org.scalatest.compatible.Assertion
class SparkRestClientTest extends AsyncFunSpec with Matchers {
import SparkRestClientTest._
describe("SparkRestClient") {
it("returns the desired data from the Spark REST API for cluster mode application") {
import ExecutionContext.Implicits.global
val fakeJerseyServer = new FakeJerseyServer() {
override def configure(): Application = super.configure() match {
case resourceConfig: ResourceConfig =>
resourceConfig
.register(classOf[FetchClusterModeDataFixtures.ApiResource])
.register(classOf[FetchClusterModeDataFixtures.ApplicationResource])
.register(classOf[FetchClusterModeDataFixtures.JobsResource])
.register(classOf[FetchClusterModeDataFixtures.StagesResource])
.register(classOf[FetchClusterModeDataFixtures.ExecutorsResource])
.register(classOf[FetchClusterModeDataFixtures.LogsResource])
case config => config
}
}
fakeJerseyServer.setUp()
val historyServerUri = fakeJerseyServer.target.getUri
val sparkConf = new SparkConf().set("spark.yarn.historyServer.address", s"${historyServerUri.getHost}:${historyServerUri.getPort}")
val sparkRestClient = new SparkRestClient(sparkConf)
sparkRestClient.fetchData(FetchClusterModeDataFixtures.APP_ID) map { restDerivedData =>
restDerivedData.applicationInfo.id should be(FetchClusterModeDataFixtures.APP_ID)
restDerivedData.applicationInfo.name should be(FetchClusterModeDataFixtures.APP_NAME)
restDerivedData.jobDatas should not be (None)
restDerivedData.stageDatas should not be (None)
restDerivedData.executorSummaries should not be (None)
restDerivedData.logDerivedData should be(None)
} flatMap {
case assertion: Try[Assertion] => assertion
case _ =>
sparkRestClient.fetchData(FetchClusterModeDataFixtures.APP_ID, fetchLogs = true)
.map { _.logDerivedData.get.appConfigurationProperties should be(EXPECTED_PROPERTIES_FROM_LOG_1) }
} andThen { case assertion: Try[Assertion] =>
fakeJerseyServer.tearDown()
assertion
}
}
it("returns the desired SparkApplicationData using Spark REST API based eventlog for cluster mode application") {
import ExecutionContext.Implicits.global
val fakeJerseyServer = new FakeJerseyServer() {
override def configure(): Application = super.configure() match {
case resourceConfig: ResourceConfig =>
resourceConfig
.register(classOf[FetchClusterModeDataFixtures.ApiResource])
.register(classOf[FetchClusterModeDataFixtures.LogsResource])
case config => config
}
}
fakeJerseyServer.setUp()
val historyServerUri = fakeJerseyServer.target.getUri
val sparkConf = new SparkConf().set("spark.yarn.historyServer.address", s"${historyServerUri.getHost}:${historyServerUri.getPort}")
val sparkRestClient = new SparkRestClient(sparkConf)
val job = new AnalyticJob().setAppId(FetchClusterModeDataFixtures.APP_ID)
val sparkApplicationData = sparkRestClient.fetchEventLogAndParse(job)
sparkApplicationData.applicationInfo.id should be("application_1457600942802_0093")
sparkApplicationData.applicationInfo.name should be("PythonPi")
sparkApplicationData.jobDatas.size should be (1)
sparkApplicationData.stageDatas.size should be (1)
sparkApplicationData.executorSummaries.size should be(3)
sparkApplicationData.appConfigurationProperties.size should be(6)
sparkApplicationData.jobDatas(0).jobId should be(0)
sparkApplicationData.jobDatas(0).numCompletedTasks should be(10)
sparkApplicationData.jobDatas(0).numCompletedStages should be(1)
sparkApplicationData.jobDatas(0).status should be(JobExecutionStatus.SUCCEEDED)
sparkApplicationData.stageDatas(0).stageId should be(0)
sparkApplicationData.stageDatas(0).status should be(StageStatus.COMPLETE)
sparkApplicationData.stageDatas(0).numCompleteTasks should be(10)
sparkApplicationData.stageDatas(0).executorRunTime should be(2470)
sparkApplicationData.stageDatas(0).name should be("reduce at pi.py:39")
sparkApplicationData.executorSummaries(0).id should be("1")
sparkApplicationData.executorSummaries(1).id should be("2")
sparkApplicationData.executorSummaries(2).id should be("driver")
sparkApplicationData.executorSummaries(0).hostPort should be(".hello.com:38464")
sparkApplicationData.executorSummaries(1).hostPort should be(".hello.com:36478")
sparkApplicationData.executorSummaries(2).hostPort should be("10.20.0.71:58838")
sparkApplicationData.executorSummaries(0).maxMemory should be(2223023063L)
sparkApplicationData.executorSummaries(1).maxMemory should be(2223023063L)
sparkApplicationData.executorSummaries(2).maxMemory should be(1111794647L)
sparkApplicationData.executorSummaries(0).totalTasks should be(5)
sparkApplicationData.executorSummaries(1).totalTasks should be(5)
sparkApplicationData.executorSummaries(2).totalTasks should be(0)
sparkApplicationData.appConfigurationProperties should be(EXPECTED_PROPERTIES_FROM_LOG_1)
}
it("throws RunTimeException when eventlog name ends with .inprogress") {
import ExecutionContext.Implicits.global
val fakeJerseyServer = new FakeJerseyServer() {
override def configure(): Application = super.configure() match {
case resourceConfig: ResourceConfig =>
resourceConfig
.register(classOf[FetchClusterModeDataFixtures.ApiResource])
.register(classOf[FetchClusterModeDataFixtures.LogsResource])
case config => config
}
}
fakeJerseyServer.setUp()
val historyServerUri = fakeJerseyServer.target.getUri
val sparkConf = new SparkConf().set("spark.yarn.historyServer.address", s"${historyServerUri.getHost}:${historyServerUri.getPort}")
val sparkRestClient = new SparkRestClient(sparkConf) {
override def getApplicationLogs(logTarget: WebTarget): ZipInputStream = {
new ZipInputStream(newFakeLog(FetchClusterModeDataFixtures.APP_ID, None, ".inprogress"))
}
}
val job = new AnalyticJob().setAppId(FetchClusterModeDataFixtures.APP_ID)
val thrown = the[RuntimeException] thrownBy(sparkRestClient.fetchEventLogAndParse(job))
thrown.getMessage should be (s"Application for the log application_1.lzf.inprogress has not finished yet.")
}
it("returns the desired data from the Spark REST API for client mode application") {
import ExecutionContext.Implicits.global
val fakeJerseyServer = new FakeJerseyServer() {
override def configure(): Application = super.configure() match {
case resourceConfig: ResourceConfig =>
resourceConfig
.register(classOf[FetchClientModeDataFixtures.ApiResource])
.register(classOf[FetchClientModeDataFixtures.ApplicationResource])
.register(classOf[FetchClientModeDataFixtures.JobsResource])
.register(classOf[FetchClientModeDataFixtures.StagesResource])
.register(classOf[FetchClientModeDataFixtures.ExecutorsResource])
.register(classOf[FetchClientModeDataFixtures.LogsResource])
case config => config
}
}
fakeJerseyServer.setUp()
val historyServerUri = fakeJerseyServer.target.getUri
val sparkConf = new SparkConf().set("spark.yarn.historyServer.address", s"${historyServerUri.getHost}:${historyServerUri.getPort}")
val sparkRestClient = new SparkRestClient(sparkConf)
sparkRestClient.fetchData(FetchClusterModeDataFixtures.APP_ID) map { restDerivedData =>
restDerivedData.applicationInfo.id should be(FetchClusterModeDataFixtures.APP_ID)
restDerivedData.applicationInfo.name should be(FetchClusterModeDataFixtures.APP_NAME)
restDerivedData.jobDatas should not be(None)
restDerivedData.stageDatas should not be(None)
restDerivedData.executorSummaries should not be(None)
restDerivedData.logDerivedData should be(None)
} flatMap {
case assertion: Try[Assertion] => assertion
case _ =>
sparkRestClient.fetchData(FetchClientModeDataFixtures.APP_ID, fetchLogs = true)
.map { _.logDerivedData.get.appConfigurationProperties should be(EXPECTED_PROPERTIES_FROM_LOG_1) }
} andThen { case assertion: Try[Assertion] =>
fakeJerseyServer.tearDown()
assertion
}
}
it("returns the desired data from the Spark REST API for cluster mode application when http in jobhistory address") {
import ExecutionContext.Implicits.global
val fakeJerseyServer = new FakeJerseyServer() {
override def configure(): Application = super.configure() match {
case resourceConfig: ResourceConfig =>
resourceConfig
.register(classOf[FetchClusterModeDataFixtures.ApiResource])
.register(classOf[FetchClusterModeDataFixtures.ApplicationResource])
.register(classOf[FetchClusterModeDataFixtures.JobsResource])
.register(classOf[FetchClusterModeDataFixtures.StagesResource])
.register(classOf[FetchClusterModeDataFixtures.ExecutorsResource])
case config => config
}
}
fakeJerseyServer.setUp()
val historyServerUri = fakeJerseyServer.target.getUri
val sparkConf = new SparkConf().set("spark.yarn.historyServer.address", s"http://${historyServerUri.getHost}:${historyServerUri.getPort}")
val sparkRestClient = new SparkRestClient(sparkConf)
sparkRestClient.fetchData(FetchClusterModeDataFixtures.APP_ID) map { restDerivedData =>
restDerivedData.applicationInfo.id should be(FetchClusterModeDataFixtures.APP_ID)
restDerivedData.applicationInfo.name should be(FetchClusterModeDataFixtures.APP_NAME)
restDerivedData.jobDatas should not be(None)
restDerivedData.stageDatas should not be(None)
restDerivedData.executorSummaries should not be(None)
} andThen { case assertion: Try[Assertion] =>
fakeJerseyServer.tearDown()
assertion
}
}
it("throws an exception if spark.yarn.historyServer.address is missing") {
an[IllegalArgumentException] should be thrownBy(new SparkRestClient(new SparkConf()))
}
it("handles unrecognized fields gracefully when parsing") {
val objectMapper = SparkRestClient.SparkRestObjectMapper
val json = s"""{
"startTime" : "2016-09-12T19:30:18.101GMT",
"endTime" : "1969-12-31T23:59:59.999GMT",
"sparkUser" : "foo",
"completed" : false,
"unrecognized" : "bar"
}"""
val applicationAttemptInfo = objectMapper.readValue[ApplicationAttemptInfoImpl](json)
applicationAttemptInfo.sparkUser should be("foo")
}
it("returns the desired list of applications using Spark REST API") {
import ExecutionContext.Implicits.global
val fakeJerseyServer = new FakeJerseyServer() {
override def configure(): Application = super.configure() match {
case resourceConfig: ResourceConfig =>
resourceConfig
.register(classOf[FetchClusterModeDataFixtures.ApiResource])
.register(classOf[FetchClusterModeDataFixtures.ApplicationsResource])
case config => config
}
}
fakeJerseyServer.setUp()
val historyServerUri = fakeJerseyServer.target.getUri
val sparkConf = new SparkConf().set(
"spark.yarn.historyServer.address", s"${historyServerUri.getHost}:${historyServerUri.getPort}")
val sparkRestClient = new SparkRestClient(sparkConf)
val startTs = System.currentTimeMillis - 200000
val endTs = System.currentTimeMillis + 120000
val sparkApplications = sparkRestClient.fetchCompletedApplicationsData(startTs, endTs)
sparkApplications.size should be(1)
sparkApplications.head.id should be(FetchClusterModeDataFixtures.APP_ID)
sparkApplications.head.name should be(FetchClusterModeDataFixtures.APP_NAME)
sparkApplications.head.attempts.size should be(2)
}
}
}
object SparkRestClientTest {
class FakeJerseyServer extends JerseyTest {
override def configure(): Application = {
forceSet(TestProperties.CONTAINER_PORT, "0")
enable(TestProperties.LOG_TRAFFIC)
enable(TestProperties.DUMP_ENTITY)
new ResourceConfig()
.register(classOf[FakeJerseyObjectMapperProvider])
}
override def configureClient(clientConfig: ClientConfig): Unit = {
clientConfig.register(classOf[FakeJerseyObjectMapperProvider])
}
}
class FakeJerseyObjectMapperProvider extends ContextResolver[ObjectMapper] {
lazy val objectMapper = {
val objectMapper = new ObjectMapper()
objectMapper.registerModule(DefaultScalaModule)
objectMapper.setDateFormat(dateFormat)
objectMapper
}
lazy val dateFormat = {
val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'")
val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT"))
iso8601.setCalendar(cal)
iso8601
}
override def getContext(cls: Class[_]): ObjectMapper = objectMapper
}
object FetchClusterModeDataFixtures {
val APP_ID = "application_1"
val APP_NAME = "app"
@Path("/api/v1")
class ApiResource {
@Path("applications")
def getApplications(): ApplicationsResource = new ApplicationsResource()
@Path("applications/{appId}")
def getApplication(): ApplicationResource = new ApplicationResource()
@Path("applications/{appId}/{attemptId}/jobs")
def getJobs(): JobsResource = new JobsResource()
@Path("applications/{appId}/{attemptId}/stages")
def getStages(): StagesResource = new StagesResource()
@Path("applications/{appId}/{attemptId}/executors")
def getExecutors(): ExecutorsResource = new ExecutorsResource()
@Path("applications/{appId}/{attemptId}/logs")
def getLogs(): LogsResource = new LogsResource()
}
@Produces(Array(MediaType.APPLICATION_JSON))
class ApplicationResource {
@GET
def getApplication(@PathParam("appId") appId: String): ApplicationInfoImpl = {
val t2 = System.currentTimeMillis
val t1 = t2 - 1
val duration = 8000000L
new ApplicationInfoImpl(
APP_ID,
APP_NAME,
Seq(
newFakeApplicationAttemptInfo(Some("2"), startTime = new Date(t2 - duration), endTime = new Date(t2)),
newFakeApplicationAttemptInfo(Some("1"), startTime = new Date(t1 - duration), endTime = new Date(t1))
)
)
}
}
@Produces(Array(MediaType.APPLICATION_JSON))
class ApplicationsResource {
@GET
def getApplications(): Seq[ApplicationInfoImpl] = {
val t2 = System.currentTimeMillis
val t1 = t2 + 1
val duration = 8000000L
Seq(
new ApplicationInfoImpl(
APP_ID,
APP_NAME,
Seq(
newFakeApplicationAttemptInfo(Some("2"), startTime = new Date(t2 - duration), endTime = new Date(t2)),
newFakeApplicationAttemptInfo(Some("1"), startTime = new Date(t1 - duration), endTime = new Date(t1))
)
)
)
}
}
@Produces(Array(MediaType.APPLICATION_JSON))
class JobsResource {
@GET
def getJobs(@PathParam("appId") appId: String, @PathParam("attemptId") attemptId: String): Seq[JobDataImpl] =
if (attemptId == "2") Seq.empty else throw new Exception()
}
@Produces(Array(MediaType.APPLICATION_JSON))
class StagesResource {
@GET
def getStages(@PathParam("appId") appId: String, @PathParam("attemptId") attemptId: String): Seq[StageDataImpl] =
if (attemptId == "2") Seq.empty else throw new Exception()
}
@Produces(Array(MediaType.APPLICATION_JSON))
class ExecutorsResource {
@GET
def getExecutors(@PathParam("appId") appId: String, @PathParam("attemptId") attemptId: String): Seq[ExecutorSummaryImpl] =
if (attemptId == "2") Seq.empty else throw new Exception()
}
@Produces(Array(MediaType.APPLICATION_OCTET_STREAM))
class LogsResource {
@GET
def getLogs(@PathParam("appId") appId: String, @PathParam("attemptId") attemptId: String): Response = {
if (attemptId == "2") {
Response.ok(newFakeLog(appId, Some(attemptId))).build()
} else throw new Exception()
}
}
}
object FetchClientModeDataFixtures {
val APP_ID = "application_1"
val APP_NAME = "app"
@Path("/api/v1")
class ApiResource {
@Path("applications/{appId}")
def getApplication(): ApplicationResource = new ApplicationResource()
@Path("applications/{appId}/jobs")
def getJobs(): JobsResource = new JobsResource()
@Path("applications/{appId}/stages")
def getStages(): StagesResource = new StagesResource()
@Path("applications/{appId}/executors")
def getExecutors(): ExecutorsResource = new ExecutorsResource()
@Path("applications/{appId}/logs")
def getLogs(): LogsResource = new LogsResource()
}
@Produces(Array(MediaType.APPLICATION_JSON))
class ApplicationResource {
@GET
def getApplication(@PathParam("appId") appId: String): ApplicationInfoImpl = {
val t2 = System.currentTimeMillis
val t1 = t2 - 1
val duration = 8000000L
new ApplicationInfoImpl(
APP_ID,
APP_NAME,
Seq(
newFakeApplicationAttemptInfo(None, startTime = new Date(t2 - duration), endTime = new Date(t2)),
newFakeApplicationAttemptInfo(None, startTime = new Date(t1 - duration), endTime = new Date(t1))
)
)
}
}
@Produces(Array(MediaType.APPLICATION_JSON))
class JobsResource {
@GET
def getJobs(@PathParam("appId") appId: String): Seq[JobDataImpl] =
Seq.empty
}
@Produces(Array(MediaType.APPLICATION_JSON))
class StagesResource {
@GET
def getStages(@PathParam("appId") appId: String): Seq[StageDataImpl] =
Seq.empty
}
@Produces(Array(MediaType.APPLICATION_JSON))
class ExecutorsResource {
@GET
def getExecutors(@PathParam("appId") appId: String): Seq[ExecutorSummaryImpl] =
Seq.empty
}
@Produces(Array(MediaType.APPLICATION_OCTET_STREAM))
class LogsResource {
@GET
def getLogs(@PathParam("appId") appId: String): Response = {
Response.ok(newFakeLog(appId, None)).build()
}
}
}
def newFakeApplicationAttemptInfo(
attemptId: Option[String],
startTime: Date,
endTime: Date
): ApplicationAttemptInfoImpl = new ApplicationAttemptInfoImpl(
attemptId,
startTime,
endTime,
sparkUser = "foo",
completed = true
)
private val EVENT_LOG_1 = Resources.toByteArray(
Resources.getResource("spark_event_logs/event_log_1"))
private val EXPECTED_PROPERTIES_FROM_LOG_1 = Map(
"spark.serializer" -> "org.apache.spark.serializer.KryoSerializer",
"spark.storage.memoryFraction" -> "0.3",
"spark.driver.memory" -> "2G",
"spark.executor.instances" -> "900",
"spark.executor.memory" -> "1g",
"spark.shuffle.memoryFraction" -> "0.5"
)
def newFakeLog(appId: String, attemptId: Option[String], inProgress: String = ""): InputStream = {
val os = new ByteArrayOutputStream()
val zos = new ZipOutputStream(os)
val name = attemptId.map(id => s"${appId}_$id").getOrElse(appId) + ".lzf" + inProgress
zos.putNextEntry(new ZipEntry(name))
// LZFEncoder instead of Snappy, because of xerial/snappy-java#76.
zos.write(LZFEncoder.encode(EVENT_LOG_1))
zos.closeEntry()
zos.close()
new ByteArrayInputStream(os.toByteArray)
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.