text
stringlengths 27
775k
|
|---|
package org.ccci.gto.android.common.db;
import android.content.ContentValues;
import android.database.Cursor;
import androidx.annotation.NonNull;
public interface Mapper<T> {
@NonNull
ContentValues toContentValues(@NonNull T obj, @NonNull String[] projection);
@NonNull
T toObject(@NonNull Cursor c);
}
|
---
layout: post
title: ์ฐ๋ถํฌ์์ node.js ์ค์น
author: teddy
categories: [ server, nodejs ]
date: 2020-10-06 16:00:00 +0900
image: assets/images/ubuntu+node.jpg
featured: true
---
์ฐ๋ถํฌ ํ๊ฒฝ์ ์์ฃผ ์ฌ์ฉํ๋๋ฐ, ๋งค๋ฒ ์๋ก์ด ํ๊ฒฝ์์ node.js ๋ฅผ ์ค์นํ ๋ ๋ง๋ค.. ์ฐพ์๋ณด๋ ๊ฒ ๊ฐ์ต๋๋ค.
๋ฒ์ ๋ง๋ค ๋ค๋ฅธ๋ฐ, ์ ๋ ์ฃผ๋ก LTS ๋ฒ์ ์ ์ด์ฉํ๊ธฐ ๋๋ฌธ์ Node.js LTS ๋ฅผ ์ค์นํ์ต๋๋ค.
(์ฐธ๊ณ , ํ
์คํธ ํ๊ฒฝ - Ubuntu 16.04, 18.04, 20.04) <br>
### **Node.js LTS ๋ฒ์ (v12.x)**:
```bash
curl -sL https://deb.nodesource.com/setup_lts.x | sudo -E bash -
sudo apt-get install -y nodejs
```
<br>
### **Node.js v14.x**:
```bash
curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -
sudo apt-get install -y nodejs
```
<br>
### **Node.js v12.x**:
```bash
curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash -
sudo apt-get install -y nodejs
```
<br>
### **Node.js v10.x**:
```bash
curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash -
sudo apt-get install -y nodejs
```
<br>
## **Node.js ์ต์ ๋ฒ์ (v14.x)**:
```bash
curl -sL https://deb.nodesource.com/setup_current.x | sudo -E bash -
sudo apt-get install -y nodejs
```
<br>
#### ***์ ํ์ฌํญ***: ๋น๋ ํด ์ค์นํ๊ธฐ
npm์์ ๋ค์ดํฐ๋ธ addon๋ค์ ์ปดํ์ผํ๊ณ ์ค์นํ๊ธฐ ์ํด์, ๋น๋ ๋๊ตฌ๋ฅผ ์ค์น ํ ์ ๋ ์์ต๋๋ค. ์๋ ์ปค๋ฉ๋๋ฅผ ์ฐธ๊ณ ํ์ธ์.
```bash
apt-get install -y build-essential
```
<br>
---
## References
* **NodeSource Node.js Binary Distributions** (https://github.com/nodesource/distributions/blob/master/README.md)
|
package cn.ncu.edu.be.security.provider
import cn.ncu.edu.be.exception.UserNotFoundException
import cn.ncu.edu.be.security.authentication.JwtAuthenticationToken
import cn.ncu.edu.be.security.authentication.UserAuthenticationToken
import cn.ncu.edu.be.security.exception.UserNotFoundAuthenticationException
import cn.ncu.edu.be.user.UserService
import cn.ncu.edu.be.util.TokenUtil
import io.jsonwebtoken.JwtException
import org.springframework.security.authentication.AuthenticationProvider
import org.springframework.security.authentication.BadCredentialsException
import org.springframework.security.authentication.InternalAuthenticationServiceException
import org.springframework.security.core.Authentication
import org.springframework.util.Assert
/**
* Jwt Authentication Provider.
* @author Lin Lorry
* @author linlorry.p@gmail.com
* @see AuthenticationProvider
*/
class JwtAuthenticationProvider(private val userService: UserService) : AuthenticationProvider {
override fun authenticate(authentication: Authentication?): Authentication {
Assert.isInstanceOf(JwtAuthenticationToken::class.java, authentication)
val auth = authentication as JwtAuthenticationToken
val user = try {
userService.loadById(TokenUtil.getIdFromToken(auth.credentials))
} catch (ex: UserNotFoundException) {
throw UserNotFoundAuthenticationException(ex)
} catch (ex: JwtException) {
throw BadCredentialsException(ex.message ?: "JwtAuthenticationProvider.authenticate: Jwt Exception")
} catch (ex: Exception) {
throw InternalAuthenticationServiceException(ex.message, ex)
}
return UserAuthenticationToken(user)
}
override fun supports(authentication: Class<*>): Boolean {
return JwtAuthenticationToken::class.java.isAssignableFrom(authentication)
}
}
|
// @dart=2.9
import 'package:business_banking/features/credit_card/ui/credit_card/credit_card_widget.dart';
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
void main() {
Widget testWidget;
setUp(() {
testWidget = MaterialApp(
home: CreditCardWidget(),
);
});
tearDown(() {
testWidget = null;
});
testWidgets('CreditCardWidget initialization', (tester) async {
await tester.pumpWidget(testWidget);
await tester.pump(Duration(milliseconds: 500));
expect(find.byType(CreditCardWidget), findsOneWidget);
});
}
|
import { Neovim } from "neovim";
export class BufferRepository {
constructor(protected readonly vim: Neovim) {}
public async verticalOpen(id: number): Promise<void> {
await this.vim.command("rightbelow split #" + id);
}
public async horizontalOpen(id: number): Promise<void> {
await this.vim.command("rightbelow vsplit #" + id);
}
public async tabOpen(id: number): Promise<void> {
await this.vim.command("tabedit #" + id);
}
public async open(id: number): Promise<void> {
await this.vim.command("buffer " + id);
}
public async delete(id: number): Promise<void> {
await this.vim.command("bwipeout " + id);
}
public async getBufferIdsOnCurrentTab(): Promise<ReadonlyArray<number>> {
return await this.vim.call("tabpagebuflist");
}
}
|
<?php
namespace ImmoweltHH\Test\DependencyInjection\Fixtures\HasDependencies;
use ImmoweltHH\Test\DependencyInjection\Fixtures\NoDependencies\ClassANoConstructor;
class ClassAWithDependencies
{
/** @var ClassANoConstructor */
public $classANoConstructor;
/**
* ClassAWithDependencies constructor.
*
* @param ClassANoConstructor $classANoConstructor
*/
public function __construct(ClassANoConstructor $classANoConstructor)
{
$this->classANoConstructor = $classANoConstructor;
}
}
|
use crate::{
ast::{Expr, NaryOp},
context::{Context, InputWithContext},
real::Real,
};
use inari::dec_interval;
use nom::{
branch::alt,
bytes::complete::{tag, take, take_while},
character::complete::{char, digit0, digit1, one_of, satisfy, space0},
combinator::{
all_consuming, consumed, cut, map, map_opt, not, opt, peek, recognize, value, verify,
},
error::{ErrorKind as NomErrorKind, ParseError},
multi::{fold_many0, many0_count},
sequence::{delimited, pair, preceded, separated_pair, terminated},
Err, Finish, IResult, InputLength, Parser,
};
use rug::{Integer, Rational};
use std::ops::Range;
#[derive(Clone, Debug)]
enum ErrorKind<'a> {
ExpectedChar(char),
ExpectedEof,
ExpectedExpr,
UnknownIdentifier(&'a str),
/// Errors reported by nom's combinators that should not be exposed.
OtherNomError,
}
#[derive(Clone, Debug)]
struct Error<'a, I> {
input: I,
kind: ErrorKind<'a>,
}
impl<'a, I> Error<'a, I> {
fn expected_expr(input: I) -> Self {
Self {
input,
kind: ErrorKind::ExpectedExpr,
}
}
fn unknown_identifier(input: I, name: &'a str) -> Self {
Self {
input,
kind: ErrorKind::UnknownIdentifier(name),
}
}
}
impl<'a, I> ParseError<I> for Error<'a, I> {
fn append(_: I, _: NomErrorKind, other: Self) -> Self {
// Only keep the first error.
other
}
fn from_char(input: I, c: char) -> Self {
Self {
input,
kind: ErrorKind::ExpectedChar(c),
}
}
fn from_error_kind(input: I, kind: NomErrorKind) -> Self {
Self {
input,
kind: match kind {
NomErrorKind::Eof => ErrorKind::ExpectedEof,
_ => ErrorKind::OtherNomError,
},
}
}
}
type ParseResult<'a, O> = IResult<InputWithContext<'a>, O, Error<'a, InputWithContext<'a>>>;
// Based on `inari::parse::parse_dec_float`.
fn parse_decimal(mant: &str) -> Option<Rational> {
fn pow(base: u32, exp: i32) -> Rational {
let i = Integer::from(Integer::u_pow_u(base, exp.unsigned_abs()));
let mut r = Rational::from(i);
if exp < 0 {
r.recip_mut();
}
r
}
let mut parts = mant.split('.');
let int_part = parts.next().unwrap();
let frac_part = match parts.next() {
Some(s) => s,
_ => "",
};
// 123.456 -> 123456e-3 (ulp == 1e-3)
let log_ulp = -(frac_part.len() as i32);
let ulp = pow(10, log_ulp);
let i_str = [int_part, frac_part].concat();
let i = Integer::parse_radix(i_str, 10).unwrap();
Some(Rational::from(i) * ulp)
}
fn decimal_literal(i: InputWithContext) -> ParseResult<&str> {
map(
alt((
// "12", "12." or "12.3"
recognize(pair(digit1, opt(pair(char('.'), digit0)))),
// ".3"
recognize(pair(char('.'), digit1)),
)),
|i: InputWithContext| i.source,
)(i)
}
fn decimal_constant(i: InputWithContext) -> ParseResult<Expr> {
map(decimal_literal, |s| {
let x = if let Some(x_q) = parse_decimal(s) {
Real::from(x_q)
} else {
let interval_literal = ["[", s, "]"].concat();
Real::from(dec_interval!(&interval_literal).unwrap())
};
Expr::constant(x)
})(i)
}
fn identifier_head(i: InputWithContext) -> ParseResult<char> {
satisfy(|c| c.is_alphabetic())(i)
}
fn identifier_tail(i: InputWithContext) -> ParseResult<&str> {
map(
recognize(many0_count(satisfy(|c| c.is_alphanumeric() || c == '\''))),
|i: InputWithContext| i.source,
)(i)
}
fn identifier(i: InputWithContext) -> ParseResult<&str> {
map(recognize(pair(identifier_head, identifier_tail)), |i| {
i.source
})(i)
}
fn name_in_context(i: InputWithContext) -> ParseResult<(&Context, &str)> {
let context_stack = i.context_stack;
map_opt(identifier, move |name| {
context_stack
.iter()
.rfind(|c| c.has(name))
.map(|&c| (c, name))
})(i)
}
fn named_constant(i: InputWithContext) -> ParseResult<Expr> {
map_opt(name_in_context, |(ctx, name)| ctx.get_constant(name))(i)
}
fn function_name(i: InputWithContext) -> ParseResult<(&Context, &str)> {
verify(name_in_context, |(ctx, name)| ctx.is_function(name))(i)
}
/// Nonempty, comma-separated list of expressions.
fn expr_list(i: InputWithContext) -> ParseResult<Vec<Expr>> {
let (i, mut x) = expr(i)?;
fold_many0(
preceded(delimited(space0, char(','), space0), cut(expr)),
move || vec![std::mem::take(&mut x)],
|mut xs, x| {
xs.push(x);
xs
},
)(i)
}
fn function_application(i: InputWithContext) -> ParseResult<Expr> {
map(
pair(
function_name,
delimited(
delimited(space0, cut(char('(')), space0),
cut(expr_list),
preceded(space0, cut(char(')'))),
),
),
|((ctx, name), args)| ctx.apply(name, args),
)(i)
}
/// If an identifier is found, [`cut`]s with [`ErrorKind::UnknownIdentifier`]
/// (the position where the identifier is found is reported);
/// otherwise, fails in the same manner as [`identifier`].
fn fail_unknown_identifier(i: InputWithContext) -> ParseResult<Expr> {
let (i, name) = peek(identifier)(i)?;
Err(Err::Failure(Error::unknown_identifier(i, name)))
}
/// Fails with [`ErrorKind::ExpectedExpr`].
fn fail_expr(i: InputWithContext) -> ParseResult<Expr> {
Err(Err::Error(Error::expected_expr(i)))
}
fn expr_within_bars(i: InputWithContext) -> ParseResult<Expr> {
let mut o = recognize(take_while(|c| c != '|'))(i.clone())?;
let mut even_bars_taken = true;
loop {
let (rest, taken) = o;
if even_bars_taken {
if let Ok((_, x)) = all_consuming(expr)(taken.clone()) {
return Ok((rest, x));
}
}
if rest.input_len() == 0 {
// Reached the end of input. All we can do is return a meaningful error.
return expr(taken);
}
o = recognize(pair(take(taken.input_len() + 1), take_while(|c| c != '|')))(i.clone())?;
even_bars_taken = !even_bars_taken;
}
}
/// The inverse operation of [`cut`]; converts [`Err::Failure`] back to [`Err::Error`].
fn decut<I, O, E: ParseError<I>, F>(mut parser: F) -> impl FnMut(I) -> IResult<I, O, E>
where
F: Parser<I, O, E>,
{
move |input: I| match parser.parse(input) {
Err(Err::Failure(e)) => Err(Err::Error(e)),
rest => rest,
}
}
fn primary_expr(i: InputWithContext) -> ParseResult<Expr> {
let builtin = i.context_stack.first().unwrap();
map(
consumed(alt((
decimal_constant,
named_constant,
function_application,
fail_unknown_identifier,
delimited(
terminated(char('('), space0),
cut(expr),
preceded(space0, cut(char(')'))),
),
map(
delimited(
terminated(char('['), space0),
cut(expr_list),
preceded(space0, cut(char(']'))),
),
|xs| Expr::nary(NaryOp::List, xs),
),
map(
delimited(
delimited(char('|'), peek(not(char('|'))), space0),
// Not an OR expression (unless it's called from the case below).
// So we can cut when no expression is found.
cut(expr_within_bars),
preceded(space0, cut(char('|'))),
),
move |x| builtin.apply("abs", vec![x]),
),
map(
delimited(
terminated(char('|'), space0),
// Possibly an OR expression. We must not cut when no expression is found.
// The above case is called recursively, so we also need to cancel cut.
decut(expr_within_bars),
preceded(space0, cut(char('|'))),
),
move |x| builtin.apply("abs", vec![x]),
),
map(
delimited(
terminated(char('โ'), space0),
cut(expr),
preceded(space0, cut(char('โ'))),
),
move |x| builtin.apply("ceil", vec![x]),
),
map(
delimited(
terminated(char('โ'), space0),
cut(expr),
preceded(space0, cut(char('โ'))),
),
move |x| builtin.apply("floor", vec![x]),
),
fail_expr,
))),
|(i, x)| x.with_source_range(i.source_range),
)(i)
}
// ^ is right-associative; x^y^z is equivalent to x^(y^z).
fn power_expr(i: InputWithContext) -> ParseResult<Expr> {
let builtin = i.context_stack.first().unwrap();
map(
pair(
primary_expr,
opt(pair(
delimited(
space0,
alt((value("^^", tag("^^")), value("^", char('^')))),
space0,
),
cut(unary_expr),
)),
),
move |(x, op_y)| match op_y {
Some((op, y)) => {
let range = x.source_range.start..y.source_range.end;
builtin.apply(op, vec![x, y]).with_source_range(range)
}
_ => x,
},
)(i)
}
fn unary_expr(i: InputWithContext) -> ParseResult<Expr> {
let builtin = i.context_stack.first().unwrap();
alt((
preceded(pair(char('+'), space0), cut(unary_expr)),
map(
consumed(separated_pair(
alt((
value("~", char('~')),
value("-", one_of("-โ")), // a hyphen-minus or a minus sign
value("!", one_of("!ยฌ")),
)),
space0,
cut(unary_expr),
)),
move |(i, (op, x))| builtin.apply(op, vec![x]).with_source_range(i.source_range),
),
power_expr,
))(i)
}
fn multiplicative_expr(i: InputWithContext) -> ParseResult<Expr> {
let builtin = i.context_stack.first().unwrap();
let (i, mut x) = unary_expr(i)?;
fold_many0(
alt((
// x * y
// x / y
pair(
delimited(
space0,
alt((value("*", char('*')), value("/", char('/')))),
space0,
),
cut(unary_expr),
),
// 2x
// x y
pair(value("*", space0), power_expr),
)),
move || std::mem::take(&mut x),
move |xs, (op, y)| {
let range = xs.source_range.start..y.source_range.end;
builtin.apply(op, vec![xs, y]).with_source_range(range)
},
)(i)
}
fn additive_expr(i: InputWithContext) -> ParseResult<Expr> {
let builtin = i.context_stack.first().unwrap();
let (i, mut x) = multiplicative_expr(i)?;
fold_many0(
pair(
delimited(
space0,
alt((
value("+", char('+')),
value("-", one_of("-โ")), // a hyphen-minus or a minus sign
)),
space0,
),
cut(multiplicative_expr),
),
move || std::mem::take(&mut x),
move |xs, (op, y)| {
let range = xs.source_range.start..y.source_range.end;
builtin.apply(op, vec![xs, y]).with_source_range(range)
},
)(i)
}
// Relational operators can be chained: x op1 y op2 z is equivalent to x op1 y โง y op2 z.
fn relational_expr(i: InputWithContext) -> ParseResult<Expr> {
let builtin = i.context_stack.first().unwrap();
let (i, mut side) = additive_expr(i)?;
map(
fold_many0(
pair(
delimited(
space0,
alt((
value("=", char('=')),
value(">=", alt((tag(">="), tag("โฅ")))),
value(">", char('>')),
value("<=", alt((tag("<="), tag("โค")))),
value("<", char('<')),
)),
space0,
),
cut(additive_expr),
),
move || (vec![], vec![std::mem::take(&mut side)]),
|(mut ops, mut sides), (op, side)| {
ops.push(op);
sides.push(side);
(ops, sides)
},
),
move |(ops, sides)| {
assert_eq!(sides.len(), ops.len() + 1);
if sides.len() == 1 {
sides.into_iter().next().unwrap()
} else {
ops.iter()
.zip(sides.windows(2))
.map(|(op, sides)| {
let range = sides[0].source_range.start..sides[1].source_range.end;
builtin.apply(op, sides.to_vec()).with_source_range(range)
})
.reduce(|xs, y| {
let range = xs.source_range.start..y.source_range.end;
builtin.apply("&&", vec![xs, y]).with_source_range(range)
})
.unwrap()
}
},
)(i)
}
fn and_expr(i: InputWithContext) -> ParseResult<Expr> {
let builtin = i.context_stack.first().unwrap();
let (i, mut x) = relational_expr(i)?;
fold_many0(
preceded(
delimited(space0, alt((tag("&&"), tag("โง"))), space0),
cut(relational_expr),
),
move || std::mem::take(&mut x),
move |xs, y| {
let range = xs.source_range.start..y.source_range.end;
builtin.apply("&&", vec![xs, y]).with_source_range(range)
},
)(i)
}
fn or_expr(i: InputWithContext) -> ParseResult<Expr> {
let builtin = i.context_stack.first().unwrap();
let (i, mut x) = and_expr(i)?;
fold_many0(
preceded(
delimited(space0, alt((tag("||"), tag("โจ"))), space0),
cut(and_expr),
),
move || std::mem::take(&mut x),
move |xs, y| {
let range = xs.source_range.start..y.source_range.end;
builtin.apply("||", vec![xs, y]).with_source_range(range)
},
)(i)
}
fn expr(i: InputWithContext) -> ParseResult<Expr> {
or_expr(i)
}
/// Parses an expression.
pub fn parse_expr(source: &str, context_stack: &[&Context]) -> Result<Expr, String> {
let i = InputWithContext::new(source, context_stack);
match all_consuming(delimited(space0, expr, space0))(i.clone()).finish() {
Ok((_, x)) => Ok(x),
Err(e) => Err(convert_error(i, e)),
}
}
pub fn format_error(source: &str, range: Range<usize>, message: &str) -> String {
assert!(range.start <= range.end && range.end <= source.len());
let offset = |substr: &str| {
use nom::Offset;
source.offset(substr)
};
let (line, source_line) = source
.split('\n') // Do not use `.lines()` which ignores a final line ending.
.enumerate()
.take_while(|(_, line)| offset(*line) <= range.start)
.last()
.unwrap();
let start_in_line = range.start - offset(source_line);
let end_in_line = (range.end - offset(source_line)).min(source_line.len());
let col = source_line[..start_in_line].chars().count();
let n_cols = source_line[start_in_line..end_in_line].chars().count();
let decoration = match n_cols {
0 => "^".to_owned(),
_ => "~".repeat(n_cols),
};
format!(
r"
input:{}:{}: error: {}
{}
{:col$}{}
",
line + 1,
col + 1,
message,
source_line,
"",
decoration
)
}
fn convert_error(input: InputWithContext, e: Error<InputWithContext>) -> String {
use nom::Offset;
let source = input.source;
let offset = source.offset(e.input.source);
let len = match e.kind {
ErrorKind::UnknownIdentifier(name) => name.len(),
_ => 0,
};
let message = match e.kind {
ErrorKind::ExpectedChar(c) => format!("expected '{}'", c),
ErrorKind::ExpectedEof => "unexpected input".to_owned(),
ErrorKind::ExpectedExpr => "expected expression".to_owned(),
ErrorKind::UnknownIdentifier(name) => format!("'{}' is not defined", name),
_ => panic!("unexpected error kind"),
};
format_error(source, offset..offset + len, &message)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::context::{Def, VarProps};
#[test]
fn parse_expr() {
let ctx = Context::new()
.def("a", Def::var("a", VarProps::default()))
.def("b", Def::var("b", VarProps::default()))
.def("k", Def::var("k", VarProps::default()))
.def("n", Def::var("n", VarProps::default()))
.def("z", Def::var("z", VarProps::default()));
let test = |input, expected| {
let f = super::parse_expr(input, &[Context::builtin(), &ctx]).unwrap();
assert_eq!(format!("{}", f.dump_short()), expected);
};
test("false", "False");
test("true", "True");
test("e", "@");
test("gamma", "@");
test("ฮณ", "@");
test("pi", "@");
test("ฯ", "@");
test("i", "(Complex 0 1)");
test("[x, y, z]", "(List x y z)");
test("|x|", "(Abs x)");
test("||x| + y|", "(Abs (Add (Abs x) y))");
test("|x + |y||", "(Abs (Add x (Abs y)))");
test(
"y = ||x|| || |||x|| + |||y|||| = y",
"(Or (Eq y (Abs (Abs x))) (Eq (Abs (Add (Abs (Abs x)) (Abs (Abs (Abs y))))) y))",
);
test("โxโ", "(Ceil x)");
test("โxโ", "(Floor x)");
test("abs(x)", "(Abs x)");
test("acos(x)", "(Acos x)");
test("acosh(x)", "(Acosh x)");
test("Ai(x)", "(AiryAi x)");
test("Ai'(x)", "(AiryAiPrime x)");
test("Bi(x)", "(AiryBi x)");
test("Bi'(x)", "(AiryBiPrime x)");
test("arg(x)", "(Arg x)");
test("asin(x)", "(Asin x)");
test("asinh(x)", "(Asinh x)");
test("atan(x)", "(Atan x)");
test("atanh(x)", "(Atanh x)");
test("ceil(x)", "(Ceil x)");
test("Chi(x)", "(Chi x)");
test("Ci(x)", "(Ci x)");
test("~x", "(Conj x)");
test("cos(x)", "(Cos x)");
test("cosh(x)", "(Cosh x)");
test("psi(x)", "(Digamma x)");
test("ฯ(x)", "(Digamma x)");
test("Ei(x)", "(Ei x)");
test("E(x)", "(EllipticE x)");
test("K(x)", "(EllipticK x)");
test("erf(x)", "(Erf x)");
test("erfc(x)", "(Erfc x)");
test("erfi(x)", "(Erfi x)");
test("exp(x)", "(Exp x)");
test("floor(x)", "(Floor x)");
test("C(x)", "(FresnelC x)");
test("S(x)", "(FresnelS x)");
test("Gamma(x)", "(Gamma x)");
test("ฮ(x)", "(Gamma x)");
test("Im(x)", "(Im x)");
test("erfinv(x)", "(InverseErf x)");
test("erfcinv(x)", "(InverseErfc x)");
test("li(x)", "(Li x)");
test("ln(x)", "(Ln x)");
test("lnGamma(x)", "(LnGamma x)");
test("lnฮ(x)", "(LnGamma x)");
test("-x", "(Neg x)"); // hyphen-minus
test("โx", "(Neg x)"); // minus sign
test("Re(x)", "(Re x)");
test("Shi(x)", "(Shi x)");
test("Si(x)", "(Si x)");
test("sgn(x)", "(Sign x)");
test("sign(x)", "(Sign x)");
test("sin(x)", "(Sin x)");
test("sinc(x)", "(Sinc x)");
test("sinh(x)", "(Sinh x)");
test("sqrt(x)", "(Sqrt x)");
test("tan(x)", "(Tan x)");
test("tanh(x)", "(Tanh x)");
test("zeta(x)", "(Zeta x)");
test("ฮถ(x)", "(Zeta x)");
test("atan2(y, x)", "(Atan2 y x)");
test("I(n, x)", "(BesselI n x)");
test("J(n, x)", "(BesselJ n x)");
test("K(n, x)", "(BesselK n x)");
test("Y(n, x)", "(BesselY n x)");
test("Gamma(a, x)", "(GammaInc a x)");
test("ฮ(a, x)", "(GammaInc a x)");
test("W(x)", "(LambertW 0 x)");
test("W(k, x)", "(LambertW k x)");
test("log(b, x)", "(Log b x)");
test("mod(x, y)", "(Mod x y)");
test("gcd(x, y, z)", "(Gcd (Gcd x y) z)");
test("lcm(x, y, z)", "(Lcm (Lcm x y) z)");
test("max(x, y, z)", "(Max (Max x y) z)");
test("min(x, y, z)", "(Min (Min x y) z)");
test("if(x = 0, y, z)", "(IfThenElse (Boole (Eq x 0)) y z)");
test("rankedMax([x, y, z], k)", "(RankedMax (List x y z) k)");
test("rankedMin([x, y, z], k)", "(RankedMin (List x y z) k)");
test("x ^ y ^ z", "(Pow x (Pow y z))");
test("-x ^ -y", "(Neg (Pow x (Neg y)))");
test("x ^^ y ^^ z", "(PowRational x (PowRational y z))");
test("-x ^^ -y", "(Neg (PowRational x (Neg y)))");
test("+x", "x");
test("2x", "(Mul 2 x)");
test("x y z", "(Mul (Mul x y) z)");
test("x * y * z", "(Mul (Mul x y) z)");
test("x / y / z", "(Div (Div x y) z)");
test("x + y + z", "(Add (Add x y) z)");
test("x - y - z", "(Sub (Sub x y) z)"); // hyphen-minus
test("x โ y โ z", "(Sub (Sub x y) z)"); // minus sign
test("x + y z", "(Add x (Mul y z))");
test("(x + y) z", "(Mul (Add x y) z)");
test("x = y", "(Eq x y)");
test("x >= y", "(Ge x y)");
test("x โฅ y", "(Ge x y)");
test("x > y", "(Gt x y)");
test("x <= y", "(Le x y)");
test("x โค y", "(Le x y)");
test("x < y", "(Lt x y)");
test("x = y = z", "(And (Eq x y) (Eq y z))");
test("!x", "(Not x)");
test("ยฌx", "(Not x)");
test("x && y", "(And x y)");
test("x โง y", "(And x y)");
test("x || y", "(Or x y)");
test("x โจ y", "(Or x y)");
test("x = y && y = z", "(And (Eq x y) (Eq y z))");
test("x = y || y = z", "(Or (Eq x y) (Eq y z))");
test(
"x = y || y = z && z = x",
"(Or (Eq x y) (And (Eq y z) (Eq z x)))",
);
test(
"(x = y || y = z) && z = x",
"(And (Or (Eq x y) (Eq y z)) (Eq z x))",
);
}
}
|
rem -----------------------------------------------------------------------
rem # File Name: mktable.sql
rem #
rem # Purpose: Script to dump table creation script
rem # for the username (schema) provided as
rem # the parameter.
rem #
rem # This is script is useful for cases where
rem # Reverse Engineering is required. The resulting
rem # SQL is sent to an output file:
rem #
rem # tbl_<SCHEMA_NAME>.lst
rem #
rem -----------------------------------------------------------------------
set arraysize 1
set echo off
set heading off
set feedback off
set verify off
set pagesize 0
set linesize 79
define 1 = &&SCHEMA_NAME
spool tbl_&&SCHEMA_NAME
set termout off
col x noprint
col y noprint
select 'rem **** Create Table DDL for '||chr(10)||
'rem **** '||username||''''||'s tables'||chr(10)||chr(10)
from dba_users
where username = upper ('&&1')
/
select table_name y,
0 x,
'create table ' ||
rtrim(table_name) ||
'('
from dba_tables
where owner = upper('&&1')
union
select tc.table_name y,
column_id x,
rtrim(decode(column_id,1,null,','))||
rtrim(column_name)|| ' ' ||
rtrim(data_type) ||
rtrim(decode(data_type,'DATE',null,'LONG',null,
'NUMBER',decode(to_char(data_precision),null,null,'('),
'(')) ||
rtrim(decode(data_type,
'DATE',null,
'CHAR',data_length,
'VARCHAR2',data_length,
'NUMBER',decode(to_char(data_precision),null,null,
to_char(data_precision) || ',' || to_char(data_scale)),
'LONG',null,
'******ERROR')) ||
rtrim(decode(data_type,'DATE',null,'LONG',null,
'NUMBER',decode(to_char(data_precision),null,null,')'),
')')) || ' ' ||
rtrim(decode(nullable,'N','NOT NULL',null))
from dba_tab_columns tc,
dba_objects o
where o.owner = tc.owner
and o.object_name = tc.table_name
and o.object_type = 'TABLE'
and o.owner = upper('&&1')
union
select table_name y,
999999 x,
')' || chr(10)
||' STORAGE(' || chr(10)
||' INITIAL ' || initial_extent || chr(10)
||' NEXT ' || next_extent || chr(10)
||' MINEXTENTS ' || min_extents || chr(10)
||' MAXEXTENTS ' || max_extents || chr(10)
||' PCTINCREASE '|| pct_increase || ')' ||chr(10)
||' INITRANS ' || ini_trans || chr(10)
||' MAXTRANS ' || max_trans || chr(10)
||' PCTFREE ' || pct_free || chr(10)
||' PCTUSED ' || pct_used || chr(10)
||' PARALLEL (DEGREE ' || DEGREE || ') ' || chr(10)
||' TABLESPACE ' || rtrim(tablespace_name) ||chr(10)
||'/'||chr(10)||chr(10)
from dba_tables
where owner = upper('&&1')
order by 1,2
|
//! Operator norm
use ndarray::*;
use super::error::*;
use super::layout::*;
use super::types::*;
pub use lapack_traits::NormType;
/// Operator norm using `*lange` LAPACK routines
///
/// [Wikipedia article on operator norm](https://en.wikipedia.org/wiki/Operator_norm)
pub trait OperationNorm {
/// the value of norm
type Output: RealScalar;
fn opnorm(&self, t: NormType) -> Result<Self::Output>;
/// the one norm of a matrix (maximum column sum)
fn opnorm_one(&self) -> Result<Self::Output> {
self.opnorm(NormType::One)
}
/// the infinity norm of a matrix (maximum row sum)
fn opnorm_inf(&self) -> Result<Self::Output> {
self.opnorm(NormType::Infinity)
}
/// the Frobenius norm of a matrix (square root of sum of squares)
fn opnorm_fro(&self) -> Result<Self::Output> {
self.opnorm(NormType::Frobenius)
}
}
impl<A, S> OperationNorm for ArrayBase<S, Ix2>
where
A: Scalar,
S: Data<Elem = A>,
{
type Output = A::Real;
fn opnorm(&self, t: NormType) -> Result<Self::Output> {
let l = self.layout()?;
let a = self.as_allocated()?;
Ok(unsafe { A::opnorm(t, l, a) })
}
}
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
import time
import os.path
import subprocess
import shutil
# helpful for kernel development
debug = 0
gen_kernels = [
[ "xgemm_blocksparse_32x32x32_xprop", "fprop", "A32", "B32", "C32" ],
[ "xgemm_blocksparse_32x32x32_xprop", "fprop", "A10", "B10", "C10" ],
[ "xgemm_blocksparse_32x32x32_xprop", "fprop", "A10", "B32", "C10" ],
[ "xgemm_blocksparse_32x32x32_xprop", "fprop", "A7", "B7", "C7" ],
[ "xgemm_blocksparse_32x32x32_xprop", "bprop", "A32", "B32", "C32" ],
[ "xgemm_blocksparse_32x32x32_xprop", "bprop", "A10", "B10", "C10" ],
[ "xgemm_blocksparse_32x32x32_xprop", "bprop", "A32", "B10", "C32" ],
[ "xgemm_blocksparse_32x32x32_xprop", "bprop", "A7", "B7", "C7" ],
[ "xgemm_blocksparse_32x32x32_xprop", "bprop", "A32", "B7", "C32" ],
[ "xgemm_blocksparse_32x32x8_updat", "updat", "A32", "B32", "C32" ],
[ "xgemm_blocksparse_32x32x8_updat", "updat", "A10", "B10", "C10" ],
[ "xgemm_blocksparse_32x32x8_updat", "updat", "A10", "B32", "C10" ],
[ "xgemm_blocksparse_32x32x8_updat", "updat", "A10", "B32", "C32" ],
[ "xgemm_blocksparse_32x32x8_updat", "updat", "A7", "B7", "C7" ],
[ "xgemm_blocksparse_32x32x8_updat", "updat", "A7", "B32", "C7" ],
[ "xgemm_blocksparse_32x32x8_updat", "updat", "A7", "B32", "C32" ],
[ "xconv_blocksparse_32x32x16_fprop", "fprop", "F32", "I32", "O32" ],
[ "xconv_blocksparse_32x32x16_fprop", "fprop", "F16", "I16", "O16" ],
[ "xconv_blocksparse_32x32x16_fprop", "fprop", "F16", "I32", "O32" ],
[ "xconv_blocksparse_32x32x16_fprop", "fprop", "F32", "I32", "O32", "overlapK" ],
[ "xconv_blocksparse_32x32x16_fprop", "fprop", "F16", "I16", "O16", "overlapK" ],
[ "xconv_blocksparse_32x32x16_fprop", "fprop", "F16", "I32", "O32", "overlapK" ],
[ "xconv_blocksparse_32x32x16_bprop", "bprop", "F32", "I32", "O32" ],
[ "xconv_blocksparse_32x32x16_bprop", "bprop", "F16", "I16", "O16" ],
[ "xconv_blocksparse_32x32x16_bprop", "bprop", "F16", "I32", "O32" ],
[ "xconv_blocksparse_32x32x16_bprop", "bprop", "F32", "I32", "O32", "overlapC" ],
[ "xconv_blocksparse_32x32x16_bprop", "bprop", "F16", "I16", "O16", "overlapC" ],
[ "xconv_blocksparse_32x32x16_bprop", "bprop", "F16", "I32", "O32", "overlapC" ],
[ "xconv_blocksparse_32x32x32_updat", "updat", "E32", "I32", "O32" ],
[ "xconv_blocksparse_32x32x32_updat", "updat", "E16", "I16", "O16" ],
[ "xconv_blocksparse_32x32x32_updat", "updat", "E32", "I16", "O16" ],
[ "xconv_blocksparse_32x32x32_updat", "updat", "E16", "I32", "O16" ],
[ "xconv_blocksparse_32x32x32_updat", "updat", "E32", "I16", "O32" ],
[ "xconv_blocksparse_32x32x32_updat", "updat", "E16", "I32", "O32" ],
]
kernel_specs = dict(
xgemm_blocksparse_32x32x32_xprop=dict(basename="gemm_blocksparse_32x32x32", params="xprop_matmul", threads=128, share="(32*33)*4 + 4"),
xgemm_blocksparse_32x32x32_updat=dict(basename="gemm_blocksparse_32x32x32", params="updat_matmul", threads=128, share="(32*32)*4 + 64"),
xgemm_blocksparse_32x32x8_updat =dict(basename="gemm_blocksparse_32x32x8", params="updat_matmul", threads= 32, share="(32* 8)*4 + 64"),
xconv_blocksparse_32x32x32_fprop=dict(basename="conv_blocksparse_32x32x32", params="xprop_conv", threads=128, share="(33+32)*32*2" ),
xconv_blocksparse_32x32x16_fprop=dict(basename="conv_blocksparse_32x32x16", params="xprop_conv", threads= 64, share="(17+16)*32*2" ),
xconv_blocksparse_32x32x16_bprop=dict(basename="conv_blocksparse_32x32x16", params="xprop_conv", threads= 64, share="(16+16)*32*2 + 64" ),
xconv_blocksparse_32x32x32_updat=dict(basename="conv_blocksparse_32x32x32", params="updat_conv", threads=128, share="32*33*4 + 4" ),
)
_params = {
"xprop_matmul": [
"unsigned* param_Layout",
"float* param_C",
"float* param_A",
"float* param_B",
"float param_alpha",
"float param_beta",
"unsigned param_cda",
"unsigned param_cdc",
"unsigned param_m",
],
"updat_matmul": [
"plist8 param_A",
"plist8 param_B",
"unsigned* param_Layout",
"float* param_C",
"float param_alpha",
"float param_beta",
"unsigned param_cda",
"unsigned param_cdb",
"unsigned param_k",
"unsigned param_count",
],
"xprop_conv": [
"unsigned* param_Block",
"unsigned* param_LutMPQ",
"unsigned* param_LutCK",
"float* param_O",
"float* param_F",
"float* param_I",
"float param_alpha",
"unsigned param_TRS",
"unsigned param_magic_TRS",
"unsigned param_shift_TRS",
"unsigned param_CDHW",
"unsigned param_KMPQ",
],
"updat_conv": [
"unsigned* param_Block",
"unsigned* param_LutMPQ",
"unsigned* param_LutCK",
"float* param_O",
"float* param_E",
"float* param_I",
"float param_alpha",
"unsigned param_TRS",
"unsigned param_magic_TRS",
"unsigned param_shift_TRS",
"unsigned param_CDHW",
"unsigned param_KMPQ",
"unsigned param_N",
"unsigned param_sizeF",
],
}
def _get_cache_dir(subdir=None):
cache_dir = os.path.expanduser("~/.cache/blocksparse")
if subdir:
subdir = subdir if isinstance(subdir, list) else [subdir]
cache_dir = os.path.join(cache_dir, *subdir)
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
return cache_dir
base_dir = os.path.dirname(__file__)
maxas_dir = os.path.join(base_dir, "vendor", "maxas")
sass_dir = os.path.join(base_dir, "src", "sass")
_space_re = re.compile(r"\s+")
_share_template = r"""
.shared .align 4 .b32 share[{0}];
"""
_kernel_template = r"""
.version {6}
.target {0}
.address_size 64
// args: {5}
.visible .entry {1}(
{2}
)
{{
{4}
ret;
}}
"""
#.reqntid {3}
def get_ptx_file(kernel_spec, args_spec, kernel_name, arch, ptx_ver):
ptx_dir = _get_cache_dir([arch, 'ptx'])
thread_spec = kernel_spec["threads"]
param_spec = _params[kernel_spec["params"]]
kernel_params = []
for p in param_spec:
ptype, pname = _space_re.split(p)
if ptype == "plist8":
kernel_params.append(" .param .align 8 .b64 %s[8]" % pname)
else:
if ptype[-1] == '*':
ptype = '.u64'
elif ptype == 'float':
ptype = '.f32'
else:
ptype = '.u32'
kernel_params.append(" .param %s %s" % (ptype, pname))
kernel_params = ",\n".join(kernel_params)
if "share" in kernel_spec:
share = _share_template.format(eval(kernel_spec["share"]))
else:
share = ""
kernel_text = _kernel_template.format(arch, kernel_name, kernel_params, thread_spec, share, args_spec, ptx_ver)
kernel_ptx = os.path.join(ptx_dir, kernel_name + ".ptx")
current_text = ""
if os.path.exists(kernel_ptx):
f = open(kernel_ptx, "r")
current_text = f.read()
f.close()
# only write out the kernel if text has changed.
if kernel_text != current_text:
f = open(kernel_ptx, "w")
f.write(kernel_text)
f.close()
return kernel_ptx
include_re = re.compile(r'^<INCLUDE\s+file="([^"]+)"\s*/>')
def extract_includes(name, includes=None):
if not includes:
includes = list()
sass_file = os.path.join(sass_dir, name)
includes.append((sass_file, os.path.getmtime(sass_file)))
for line in open(sass_file, "r"):
match = include_re.search(line)
if match:
extract_includes(match.group(1), includes)
return includes
def run_command(cmdlist):
cmd = " ".join(cmdlist)
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode:
raise RuntimeError("Error(%d):\n%s\n%s" % (proc.returncode, cmd, err))
#if debug:
print(cmd)
if out: print(out)
if err: print(err)
def get_kernel(kernel):
major, minor = 5, 0
arch = "sm_%d%d" % (major, minor)
libprefix = "PERL5LIB=%s" % maxas_dir
maxas_i = [libprefix, os.path.join(maxas_dir, "maxas.pl") + " -i -w"]
maxas_p = [libprefix, os.path.join(maxas_dir, "maxas.pl") + " -p"]
sass_name = kernel[0]
kernel_spec = kernel_specs[sass_name]
kernel_name = kernel_spec["basename"]
args_spec = str(kernel[1:])
for opt in kernel[1:]:
maxas_i.append("-D%s 1" % opt)
maxas_p.append("-D%s 1" % opt)
kernel_name += "_" + opt
maxas_i.insert(2, "-k " + kernel_name)
sass_name += ".sass"
cubin_name = kernel_name + ".cubin"
cubin_dir = _get_cache_dir([arch, 'cubin'])
ptx_version = "4.2" if major < 6 else "5.0"
ptx_file = get_ptx_file(kernel_spec, args_spec, kernel_name, arch, ptx_version)
sass_file = os.path.join(sass_dir, sass_name)
cubin_file = os.path.join(cubin_dir, cubin_name)
if not os.path.exists(sass_file):
raise RuntimeError("Missing: %s for kernel: %s" % (sass_name, kernel_name))
ptx_mtime = os.path.getmtime(ptx_file)
cubin_mtime = os.path.getmtime(cubin_file) if os.path.exists(cubin_file) else 0
build_cubin = False
if ptx_mtime > cubin_mtime:
build_cubin = True
includes = extract_includes(sass_name)
for include, include_mtime in includes:
if include_mtime > cubin_mtime:
build_cubin = True
break
if build_cubin:
# build the cubin and run maxas in the same command
# we don't want the chance of a generated cubin not processed by maxas (in case user hits ^C in between these steps)
run_command([ "ptxas -v -arch", arch, "-o", cubin_file, ptx_file, ";" ] + maxas_i + [sass_file, cubin_file])
cubin_mtime = time.time()
# output preprocessed and disassembled versions in debug mode
if debug:
pre_dir = _get_cache_dir([arch, 'pre'])
dump_dir = _get_cache_dir([arch, 'dump'])
pre_file = os.path.join(pre_dir, kernel_name + "_pre.sass")
dump_file = os.path.join(dump_dir, kernel_name + "_dump.sass")
pre_mtime = os.path.getmtime(pre_file) if os.path.exists(pre_file) else 0
dump_mtime = os.path.getmtime(dump_file) if os.path.exists(dump_file) else 0
for include, include_mtime in includes:
if include_mtime > pre_mtime:
run_command(maxas_p + [sass_file, pre_file])
break
# if cubin_mtime > dump_mtime:
# run_command(["nvdisasm -c", cubin_file, ">", dump_file])
return kernel_name, cubin_file
def main():
header_file = os.path.join(base_dir, "build", "blocksparse_kernels.h")
with open(header_file, "w") as output_file:
kernel_map = "\n\nstd::map<std::string, std::pair<const uint8_t*, size_t>> kernel_map_ = {"
for kernel in gen_kernels:
kernel_name, cubin_file = get_kernel(kernel)
kernel_text = "\n\nconst uint8_t %s[] = {" % kernel_name
with open(cubin_file, 'rb') as input_file:
count = 0
byte = input_file.read(1)
use_hex = 'hex' in dir(byte)
while byte:
if count % 32 == 0:
kernel_text += "\n "
count += 1
if use_hex:
kernel_text += "0x" + byte.hex() + ","
else:
kernel_text += "0x" + byte.encode("hex") + ","
byte = input_file.read(1)
kernel_text += "\n};"
kernel_map += "\n { \"%s\", { %s, %d } }," % (kernel_name, kernel_name, count)
output_file.write(kernel_text)
kernel_map += "\n};"
output_file.write(kernel_map)
if __name__ == '__main__':
main()
|
# frozen_string_literal: true
RSpec.feature '#within', helpers: [:scopes_page] do
before do
scopes_page.visit_page
end
it 'should allow to escape a within restriction using within_document' do
scopes_page.within_element(:first_section) do
scopes_page.should_not.have_content('First Name')
scopes_page.within_document do
scopes_page.should.have_content('First Name')
end
scopes_page.should_not.have_content('First Name')
end
end
context 'with CSS selector' do
it 'should click links in the given scope' do
scopes_page.within(:first_item) {
scopes_page.click_link('Go')
}
scopes_page.should.have_content('Bar')
end
it 'should assert content in the given scope' do
scopes_page.first_section.within do
scopes_page.should_not.have_content('First Name')
end
scopes_page.should.have_content('First Name')
end
it 'should accept additional options' do
scopes_page.within(:css, :list_item, text: 'With Simple HTML') do
scopes_page.click_link('Go')
end
scopes_page.should.have_content('Bar')
end
it 'should reload the node if the page is changed' do
scopes_page.within(:first_section) do
current_page.visit('/with_scope_other')
scopes_page.should.have_content('Different text')
end
end
it 'should reload multiple nodes if the page is changed' do
scopes_page.within(:css, '#for_bar') do
scopes_page.within(:css, 'form[action="/redirect"]') do
scopes_page.refresh
scopes_page.should.have_content('First Name')
end
end
end
it 'should error if the page is changed and a matching node no longer exists' do
scopes_page.within(:first_section) do
current_page.visit('/')
expect { scopes_page.text }.to raise_error(StandardError)
end
end
end
context 'with XPath selector' do
it 'should click links in the given scope' do
scopes_page.within(:xpath, "//div[@id='for_bar']//li[contains(.,'With Simple HTML')]") do
scopes_page.click_link('Go')
end
scopes_page.should.have_content('Bar')
end
end
context 'with Node rather than selector' do
it 'should click links in the given scope' do
node_of_interest = scopes_page.find(:css, :list_item, text: 'With Simple HTML')
scopes_page.within(node_of_interest) do
scopes_page.click_link('Go')
end
scopes_page.should.have_content('Bar')
end
end
context 'with the default selector set to CSS' do
it 'should use CSS' do
scopes_page.within(:list_item, text: 'With Simple HTML') do
scopes_page.click_link('Go')
end
scopes_page.should.have_content('Bar')
end
end
context 'with nested scopes' do
it 'should respect the inner scope' do
scopes_page.within(:xpath, "//div[@id='for_bar']") do
scopes_page.within(:xpath, ".//li[contains(.,'Bar')]") do
scopes_page.click_link('Go')
end
end
scopes_page.should.have_content('Another World')
end
it 'should respect the outer scope' do
scopes_page.within(:xpath, "//div[@id='another_foo']") do
scopes_page.find(:xpath, ".//li[contains(.,'With Simple HTML')]").within do
scopes_page.click_link('Go')
end
end
scopes_page.should.have_content('Hello world')
end
end
it 'should raise an error if the scope is not found on the page' do
expect do
scopes_page.within(:xpath, "//div[@id='doesnotexist']") do
end
end.to raise_error(Capybara::ElementNotFound)
end
it 'should restore the scope when an error is raised' do
expect do
scopes_page.within(:xpath, "//div[@id='for_bar']") do
expect do
expect do
scopes_page.within(:xpath, ".//div[@id='doesnotexist']") do
end
end.to raise_error(Capybara::ElementNotFound)
end.not_to change { scopes_page.has_xpath?(".//div[@id='another_foo']") }.from(false)
end
end.not_to change { scopes_page.has_xpath?(".//div[@id='another_foo']") }.from(true)
end
it 'should fill in a field and click a button' do
scopes_page.within(:xpath, "//li[contains(.,'Bar')]") do
scopes_page.click_button('Go')
end
scopes_page.should.have_results('first_name', 'Peter')
current_page.visit('/with_scope')
scopes_page.within(:xpath, "//li[contains(.,'Bar')]") do
scopes_page.fill_in('First Name', with: 'Dagobert')
scopes_page.click_button('Go')
end
scopes_page.should.have_results('first_name', 'Dagobert')
end
end
RSpec.feature '#within_fieldset', helpers: [:scopes_page] do
before do
page.visit('/fieldsets')
end
it 'should restrict scope to a fieldset given by id' do
scopes_page.within_fieldset('villain_fieldset') do
scopes_page.fill_in('Name', with: 'Goldfinger')
scopes_page.click_button('Create')
end
scopes_page.should.have_results('villain_name', 'Goldfinger')
end
it 'should restrict scope to a fieldset given by legend' do
scopes_page.within_fieldset('Villain') do
scopes_page.fill_in('Name', with: 'Goldfinger')
scopes_page.click_button('Create')
end
scopes_page.should.have_results('villain_name', 'Goldfinger')
end
end
RSpec.feature '#within_table', helpers: [:scopes_page] do
before do
page.visit('/tables')
end
it 'should restrict scope to a fieldset given by id' do
scopes_page.within_table('girl_table') do
scopes_page.fill_in('Name', with: 'Christmas')
scopes_page.click_button('Create')
end
scopes_page.should.have_results('girl_name', 'Christmas')
end
it 'should restrict scope to a fieldset given by legend' do
scopes_page.within_table('Villain') do
scopes_page.fill_in('Name', with: 'Quantum')
scopes_page.click_button('Create')
end
scopes_page.should.have_results('villain_name', 'Quantum')
end
end
|
import { writable } from "svelte/store";
function createJSONAPIStore(cls: string) {
const entries = writable({} as {[x:string]: JSONAPIItem});
let loading = false;
async function load() {
if (!loading) {
loading = true;
try {
let url = '/api/' + cls;
const response = await fetch(url);
if (response.ok) {
const data = (await response.json()).data as JSONAPIItem[];
entries.set(Object.fromEntries(data.map((category) => {
return [category.id, category];
})));
}
loading = false;
} catch(e) {
loading = false;
throw e;
}
}
}
return {
subscribe: entries.subscribe,
load,
}
}
export const analysisTimePeriods = createJSONAPIStore('analysis-time-periods');
|
import { ApplicationCommandOptionTypes } from '../interfaces';
import SlashCommandOptionWithChoices from '../commons/SlashCommandOptionWithChoices';
/*
|--------------------------------------------------------------------------
| SlashCommandBuilder::Options -> SlashCommandNumberOption
|--------------------------------------------------------------------------
|
| ...
|
*/
export default class SlashCommandNumberOption extends SlashCommandOptionWithChoices<number> {
public override readonly type = ApplicationCommandOptionTypes.Number;
constructor() {
super(ApplicationCommandOptionTypes.Number);
}
}
|
---
numero: 370
titulo: Tรชm os santos do Senhor
---
1. Nรณs aqui estamos para, a Cristo, exaltar,
Por Sua obra redentora;
O Seu Nome nรฃo cessemos de glorificar;
Sua igreja O adora.
__Tรชm os santos do Senhor, alegria de louvar
A Jesus, que, por amor, veio ร terra a salvar;
Nossas almas resgatou, com Seu sangue nos lavou;
Glรณria a Jesus , aleluia!__
2. A promessa cumprirรก o eterno Rei dos reis
De nos fazer herdar a glรณria;
O Penhor da heranรงa deu a todos os fiรฉis;
De Cristo, honramos a memรณria.
3. A esperanรงa jรก nos deu da redenรงรฃo final,
Que firmemente aguardamos;
Essa bem-aventuranรงa eterna รฉ real;
Ao Redentor nรณs exaltamos.
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
public class PlayerController : MonoBehaviour
{
// Controls
[Header("Controls")]
[SerializeField] private float airResistance = 7f;
[SerializeField] private float moveSpeed = 700f;
[SerializeField] private float freezeDuration = 10f;
[SerializeField] private Material frozenSky;
private Material defaultSky;
// Audio
[Header("Sounds")]
[Range(0.05f, 1f)]
[SerializeField] private float volume = 0.3f;
[SerializeField] private AudioClip freeze;
[SerializeField] private AudioClip actionDenied;
[SerializeField] private AudioClip forceFieldHit;
private AudioSource audioSource;
// References
private Rigidbody rb;
private GameObject ship;
private ShipController shipController;
private HUDUtils HUDUtils;
private MouseController mouse;
// Start is called before the first frame update
void Start()
{
rb = GetComponent<Rigidbody>();
mouse = GetComponent<MouseController>();
HUDUtils = GameObject.Find("UI/HUD").GetComponent<HUDUtils>();
ship = GameObject.Find("Spaceship");
shipController = ship.GetComponent<ShipController>();
audioSource = GetComponent<AudioSource>();
defaultSky = RenderSettings.skybox;
}
// Update is called once per frame
void Update()
{
// Game Controls (only active when unpaused)
if(!GameManager.Instance.paused){
// Space: freeze asteroids
if(Input.GetKeyDown(KeyCode.Space)) {
if(HUDUtils.GetScrap() >= HUDUtils.GetScrapCap()) {
HUDUtils.SetScrap(0f);
GameManager.Instance.frozen = true;
RenderSettings.skybox = frozenSky;
StartCoroutine(Unfreeze());
PlaySound(freeze);
} else {
PlaySound(actionDenied);
}
}
}
// Escape: pause & unpause
if(Input.GetKeyDown(KeyCode.Escape)){
audioSource.Stop();
HUDUtils.TogglePause();
}
}
void FixedUpdate()
{
// Air resistance
rb.AddForce(airResistance * -rb.velocity * Time.fixedDeltaTime);
// WASD Movement
Vector3 forwardInput = (Camera.main.transform.forward * Input.GetAxis("Vertical")).normalized;
Vector3 rightInput = (Camera.main.transform.right * Input.GetAxis("Horizontal")).normalized;
rb.AddForce(forwardInput * moveSpeed * Time.deltaTime);
rb.AddForce(rightInput * moveSpeed * Time.deltaTime);
}
void OnCollisionEnter(Collision collision) {
// Spaceship
if(collision.collider.name == "Spaceship") {
rb.velocity = new Vector3(); // Stop player movement
}
// Asteroids
AsteroidController asteroid = collision.gameObject.GetComponent<AsteroidController>();
if(asteroid != null && !asteroid.IsDestroying()) {
if(GameObject.ReferenceEquals(mouse.GetTarget(), collision.collider.transform)) {
mouse.UntrackTarget();
rb.velocity *= 0.4f;
}
HUDUtils.IncrementCombo();
audioSource.Stop();
asteroid.Break();
}
// Force Field
if(collision.collider.name == "Force Field") {
audioSource.PlayOneShot(forceFieldHit, volume);
}
}
public void PlaySound(AudioClip clip){
audioSource.PlayOneShot(clip, volume);
}
IEnumerator Unfreeze() {
yield return new WaitForSeconds(freezeDuration);
GameManager.Instance.frozen = false;
RenderSettings.skybox = defaultSky;
shipController.IncrementHitTime(freezeDuration);
HUDUtils.TickCombo();
}
}
|
๏ปฟusing System;
namespace R5T.Frisia.Suebia.Construction
{
/// <summary>
/// Note: public!!! Do not use any sensitive information!
/// </summary>
public static class Users
{
public const string User1Name = "User1";
}
}
|
# zshrc for darwin
[[ $(get-env homebrew) ]] && {
# setup for brew-file
trysource $(brew --prefix)/etc/brew-wrap
trysource $(brew --prefix)/etc/profile.d/z.sh
}
|
--TEST--
usb_free_device_list() function
--SKIPIF--
<?php
if(!extension_loaded('usb')) die('skip ');
?>
--FILE--
<?php
$context; $devices;
if (USB_SUCCESS !== usb_init($context)) {
goto out;
}
// expect that existence of an usb device.
if (1 > usb_get_device_list($context, $devices)) {
goto out;
}
var_dump($devices[0]);
out:
if ($devices) usb_free_device_list($devices);
if ($context) usb_exit($context);
var_dump($devices[0]);
?>
--EXPECTF--
resource(%d) of type (usb_device)
resource(%d) of type (Unknown)
|
<script src="{{ asset('izitoast/iziToast.min.js') }}"></script>
@if ($message = Session::get('success'))
<script> iziToast.success({title: 'Sistema', message: '{{ $message }}', position: 'topCenter'}); </script>
@endif
@if ($message = Session::get('error'))
<script> iziToast.error({title: 'Sistema', message: '{{ $message }}', position: 'topCenter'}); </script>
@endif
@if ($message = Session::get('warning'))
<script> iziToast.warning({title: 'Sistema', message: '{{ $message }}', position: 'topCenter'}); </script>
@endif
@if ($message = Session::get('info'))
<script> iziToast.show({title: 'Sistema', message: '{{ $message }}', position: 'topCenter'}); </script>
@endif
|
using System;
using System.Collections.Concurrent;
using System.Threading.Tasks;
using Google.Protobuf;
using Microsoft.Extensions.Hosting;
using Proto.Remote.GrpcNet;
using Xunit;
namespace Proto.Remote.Tests
{
public class HostedGrpcNetWithCustomSerializerTests
: RemoteTests,
IClassFixture<HostedGrpcNetWithCustomSerializerTests.Fixture>
{
public HostedGrpcNetWithCustomSerializerTests(Fixture fixture) : base(fixture)
{
}
public class CustomSerializer : ISerializer
{
private readonly ConcurrentDictionary<string, Type> _types = new();
public object Deserialize(ByteString bytes, string typeName)
{
var type = _types.GetOrAdd(typeName, name => Type.GetType(name));
return System.Text.Json.JsonSerializer.Deserialize(bytes.ToStringUtf8(), type);
}
public string GetTypeName(object message) => message.GetType().AssemblyQualifiedName;
public ByteString Serialize(object obj) =>
ByteString.CopyFromUtf8(System.Text.Json.JsonSerializer.Serialize(obj));
public bool CanSerialize(object obj) => true;
}
public class Fixture : RemoteFixture
{
private readonly IHost _clientHost;
private readonly IHost _serverHost;
public Fixture()
{
var clientConfig = ConfigureClientRemoteConfig(GrpcNetRemoteConfig.BindToLocalhost())
.WithSerializer(serializerId: 2, priority: 1000, new CustomSerializer());
(_clientHost, Remote) = GetHostedGrpcNetRemote(clientConfig);
var serverConfig = ConfigureServerRemoteConfig(GrpcNetRemoteConfig.BindToLocalhost())
.WithSerializer(serializerId: 2, priority: 1000, new CustomSerializer());
(_serverHost, ServerRemote) = GetHostedGrpcNetRemote(serverConfig);
}
public override async Task DisposeAsync()
{
await _clientHost.StopAsync();
_clientHost.Dispose();
await _serverHost.StopAsync();
_serverHost.Dispose();
}
}
}
}
|
(function() {
'use strict';
var gulp = require('gulp');
var plug = require('gulp-load-plugins')();
var path = require('path');
var config = require('../config');
exports.task = function() {
return gulp.src(path.join(config.coverageDir, 'report-lcov/lcov.info'))
.pipe(plug.coveralls());
};
})();
|
<?php
namespace Cloudinary\Cloudinary\Block;
use Cloudinary\Cloudinary\Core\ConfigurationInterface;
use Magento\Framework\Json\EncoderInterface;
use Magento\Framework\View\Element\Template\Context;
class Lazyload extends \Magento\Framework\View\Element\Template
{
/**
* @var ConfigurationInterface
*/
private $configuration;
/**
* @var EncoderInterface
*/
private $jsonEncoder;
/**
* @method __construct
* @param Context $context
* @param ConfigurationInterface $configuration
* @param EncoderInterface $jsonEncoder
* @param array $data
*/
public function __construct(
Context $context,
ConfigurationInterface $configuration,
EncoderInterface $jsonEncoder,
array $data = []
) {
$this->configuration = $configuration;
$this->jsonEncoder = $jsonEncoder;
parent::__construct($context, $data);
}
/**
* @method isEnabledLazyload
* @return boolean
*/
public function isEnabledLazyload()
{
return $this->configuration->isEnabled() && $this->configuration->isEnabledLazyload();
}
/**
* @method getLazyloadOptions
* @param boolean $json
* @return string|array
*/
public function getLazyloadOptions($json = true)
{
$options = [
'threshold' => $this->configuration->getLazyloadThreshold(),
'effect' => $this->configuration->getLazyloadEffect(),
'placeholder' => $this->configuration->getLazyloadPlaceholder(),
];
return $json ? $this->jsonEncoder->encode($options) : $options;
}
}
|
package nl.suriani.jadeval.symbols.value;
public class TextValue extends FactValue<String> {
public TextValue(String value) {
super(value);
}
}
|
#!/bin/bash
set -e
TESTDIR="$(dirname "$0")"
while [[ "$#" -gt 0 ]]; do
case $1 in
-d|--disassemble) DISASSEMBLE=1; ;;
-f|--file) FILE="$2"; shift ;;
-r|--rem) REM=1; ;;
-s|--safe-funcs) SAFE_FUNCS="--safe-function-overrides=$2"; shift ;;
--stack) STACK="--stack-size=$2"; shift ;;
--stop-on-string) STOP_ON_STRING="--stop-on-string"; ;;
-u|--unsafe-funcs) UNSAFE_FUNCS="--unsafe-function-overrides=$2"; shift ;;
*) echo "Unknown parameter passed: $1"; exit 1 ;;
esac
shift
done
if [ "$DISASSEMBLE" ]; then
OUTDIR="$PWD/$FILE.disassembled"
DISFLAG="--disassemble"
else
OUTDIR="$PWD/$FILE.decompiled"
fi
if [ -f "$FILE.gameinfo.dbg" ]; then
DEBUG="--debug-file=$FILE.gameinfo.dbg"
fi
cargo run --bin glulxtoc -- $FILE --out-dir=$OUTDIR $DISFLAG $DEBUG $SAFE_FUNCS $STACK $STOP_ON_STRING $UNSAFE_FUNCS
if [ "$REM" ]; then
GLKLIB="remglk"
REMFLAG="-r"
else
GLKLIB="cheapglk"
BINFLAG="-u"
fi
BUILDDIR="$OUTDIR/$GLKLIB"
mkdir -p $BUILDDIR
export CC=clang
cmake -DGlkLibPath=$TESTDIR/$GLKLIB -B$BUILDDIR -S$OUTDIR
make -C $BUILDDIR -j$(nproc) --no-print-directory
REGTEST="$TESTDIR/regtest.py"
BIN="$BUILDDIR/$(basename ${FILE%%.*}) $BINFLAG"
TESTFILE="$FILE.regtest"
echo "Running testfile $TESTFILE"
python $REGTEST -i "$BIN" $TESTFILE $REMFLAG -t 10
|
/usr/bin/node --nouse-idle-notification --expose-gc /www/superadmin/index.js 9999 --release > /dev/stdout &
nginx -g "daemon off;"
|
module.exports = function (app) {
const mongooseClient = app.get('mongooseClient');
const { Schema } = mongooseClient;
const pointSchema = new Schema({
type: {
type: String,
enum: ['Point'],
required: true
},
coordinates: {
type: [Number],
required: true
}
});
const polygonSchema = new Schema({
type: {
type: String,
enum: ['Polygon'],
required: true
},
coordinates: {
type: [[[Number]]], // Array of arrays of arrays of numbers
required: true
}
});
return {pointSchema, polygonSchema};
};
|
package io.github.resilience4j.circuitbreaker.utils;
import io.github.resilience4j.circuitbreaker.CircuitBreaker;
import io.github.resilience4j.circuitbreaker.CircuitBreaker.State;
import static io.github.resilience4j.circuitbreaker.CircuitBreaker.State.*;
public final class CircuitBreakerUtil {
/**
* Indicates whether Circuit Breaker allows any calls or not.
*
* @param circuitBreaker to test
* @return call is permitted
*/
public static boolean isCallPermitted(CircuitBreaker circuitBreaker) {
State state = circuitBreaker.getState();
return state == CLOSED || state == HALF_OPEN || state == DISABLED;
}
}
|
# [TypeScript React Style Guide](https://www.npmjs.com/package/@qulix/tslint-config-react)
> StyleGuide under development
|
๏ปฟusing System;
using System.IO;
using System.Text;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using System.Globalization;
namespace Tylerian.Challenge08
{
class Program
{
const string TestInputFileName = "./testinput.txt";
const string SubmitInputFileName = "./submitinput.txt";
static void Main(string[] args)
{
try
{
var problems = ParseFileInput(SubmitInputFileName);
foreach (var problem in problems)
{
try
{
Console.WriteLine($"Case #{problem.Id}: {problem.TryNormalizeInputToHexNumber()}");
}
catch (Exception ex)
{
// Print N/A instead of ex.Message
// to match challenge's requirements!
Console.WriteLine($"Case #{problem.Id}: {ex.Message}");
}
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
Console.WriteLine(ex.StackTrace);
}
}
static List<Problem> ParseFileInput(string path)
{
var output = new List<Problem>();
var input = File.ReadAllLines(path, Encoding.Unicode);
// Is the input in the correct format?
// C (number of cases)
// F S (f: number of floors, s: number of shortcuts)
// [... S]
// A B Y (a: from shortcut, b: to shortcut, y: years taken)
// [/... S]
if (input.Length <= 1)
{
throw new Exception("Error while parsing input file.");
}
// Start at line 1, because line 0
// is the total number of cases
for (int caseId = 0, line = 1; line < input.Length;)
{
// Increase case ID
caseId++;
var unicode = input[line++];
output.Add(new Problem(caseId, unicode));
}
return output;
}
}
public class Problem
{
private static Regex Validator;
private const string ValidInputPattern = "^[\\p{Z}]*(?<numbers>[\\p{N}]+)[\\p{Z}]*$";
public int Id
{
get;
}
public string Unicode
{
get;
}
static Problem()
{
Validator = new Regex(
ValidInputPattern,
RegexOptions.Compiled
);
}
public Problem(int id, string unicode)
{
this.Id = id;
this.Unicode = unicode.Trim();
}
public string TryNormalizeInputToHexNumber()
{
// Is our input in a valid form?
if (!Validator.IsMatch(Unicode))
{
throw new Exception("Invalid input format.");
}
// get numeric block from my regex pattern and normalize it
// decomposing each unicode character to it's basic values
var match = Validator.Match(Unicode).Result("${numbers}");
match = match.Normalize(NormalizationForm.FormD);
var value = new StringBuilder();
foreach (var character in match)
{
value.Append(CharUnicodeInfo.GetDigitValue(character));
}
// return result in hex format.
return long.Parse(value.ToString()).ToString("x");
}
}
}
|
import { Component, OnInit, TemplateRef, ViewChild } from '@angular/core';
import { SocialService } from '@delon/auth';
import { _HttpClient } from '@delon/theme';
import { NzMessageService, NzModalService } from 'ng-zorro-antd';
import { STColumn, STComponent, STData, STReq, STRes } from '@delon/abc';
import { map, tap } from 'rxjs/operators';
import { BaseConfig } from '../../../app.config';
@Component({
selector: 'app-rechargeRecord',
templateUrl: './rechargeRecord.component.html',
providers: [ SocialService ]
})
export class RechargeRecordComponent implements OnInit{
constructor(
private http: _HttpClient,
public msg: NzMessageService,
) {
}
q: any = {
pi: 1,
ps: 10,
sorter: '',
status: null,
statusList: [],
};
data=BaseConfig.host+'/center/getRechargeRecord';
loading = false;
status = [
{ index: 0, text: 'ๆฏไปๅฎ', value: false, type: 'default', checked: false },
];
@ViewChild('st')
st: STComponent;
keyword:String;
//ๅฎไน่ฏทๆฑ็ๅๆฐ
req: STReq = {
reName: {
pi: 'page',
ps: 'size'
},
params:{
keyword:this.keyword
}
}
//ๅฎไน่ฟๅ็ๅๆฐ
res: STRes = {
reName: {
total: 'data.total',
list: 'data.list'
},
process:(data:STData[])=>
data.map(i => {
const statusItem = this.status[i.rechargeWay];
i.statusText = statusItem.text;
i.statusType = statusItem.type;
return i;
}),
}
columns: STColumn[] = [
{
title: 'ๅ
ๅผๆนๅผ',
index: 'rechargeWay',
render: 'status',
filter: {
menus: this.status,
fn: (filter: any, record: any) => record.status === filter.index,
},
},
{ title: 'ๅ
ๅผ้้ข', index: 'rechargeMoney' },
{
title: 'ๅ
ๅผๆถ้ด',
index: 'createTime',
type: 'date',
sort: {
compare: (a: any, b: any) => a.updatedAt - b.updatedAt,
},
}
];
selectedRows: STData[] = [];
totalCallNo = 0;
selectedValue=0;
selectData:any[]=[];
ngOnInit(): void {
//่ทๅๅฎขๆๅ่กจ
this.http.get('http://localhost:81/v1/imweb/kf/listall').subscribe((res:any)=>{
this.selectData=res.data;
}
)
}
checkboxChange(list: STData[]) {
list.map(i =>{
this.selectedRows.push(i.robotId);
} )
this.totalCallNo = this.selectedRows.reduce(
(total, cv) => total + cv.callNo,
0,
);
}
}
|
/*
* //
* // This file is part of the pika parser implementation allowing whitespace-sensitive syntax. It is based
* // on the Java reference implementation at:
* //
* // https://github.com/lukehutch/pikaparser
* //
* // The pika parsing algorithm is described in the following paper:
* //
* // Pika parsing: reformulating packrat parsing as a dynamic programming algorithm solves the left recursion
* // and error recovery problems. Luke A. D. Hutchison, May 2020.
* // https://arxiv.org/abs/2005.06444* //
* //
* // This software is provided under the MIT license:
* //
* // Copyright (c) 2020 Paul Blair
* // Based on pikaparser by Luke Hutchison, also licensed with the MIT license.
* //
* // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* // documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* // the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
* // and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
* //
* // The above copyright notice and this permission notice shall be included in all copies or substantial portions
* // of the Software.
* //
* // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
* // TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* // CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* // DEALINGS IN THE SOFTWARE.
* //
*
*/
package net.phobot.parser.clause.terminal
import net.phobot.parser.memotable.Match
import net.phobot.parser.memotable.Match.Companion.NO_SUBCLAUSE_MATCHES
import net.phobot.parser.memotable.MemoKey
import net.phobot.parser.memotable.MemoTable
import net.phobot.parser.utils.StringUtils
import java.util.*
/** Terminal clause that matches a character or sequence of characters. */
class CharSet : Terminal {
private val charSet: MutableSet<Char> = HashSet()
private val subCharSets: MutableList<CharSet> = ArrayList()
private var invertMatch = false
constructor(vararg chars: Char) : super() {
for (i in chars.indices) {
this.charSet.add(chars[i])
}
}
constructor(vararg charSets: CharSet) : super() {
for (charSet in charSets) {
this.subCharSets.add(charSet)
}
}
/** Invert in-place, and return this. */
fun invert(): CharSet {
invertMatch = !invertMatch
return this
}
private fun matchesInput(memoKey: MemoKey, input: String): Boolean {
if (memoKey.startPos >= input.length) {
return false
}
val matches = (charSet.isNotEmpty() //
&& invertMatch xor charSet.contains(input[memoKey.startPos]))
if (matches) {
return true
}
if (subCharSets.isNotEmpty()) {
// SubCharSets may be inverted, so need to test each individually for efficiency,
// rather than producing a large Set<Character> for all chars of an inverted CharSet
for (subCharSet in subCharSets) {
if (subCharSet.matchesInput(memoKey, input)) {
return true
}
}
}
return false
}
override fun determineWhetherCanMatchZeroChars() {}
override fun match(memoTable: MemoTable, memoKey: MemoKey, input: String): Match? {
return if (matchesInput(memoKey, input)) {
// Terminals are not memoized (i.e. don't look in the memo table)
Match(memoKey, length = 1, subClauseMatches = NO_SUBCLAUSE_MATCHES)
} else null
}
private fun getCharSets(charSets: MutableList<CharSet>) {
if (charSet.isNotEmpty()) {
charSets.add(this)
}
for (subCharSet in subCharSets) {
subCharSet.getCharSets(charSets)
}
}
private fun toString(buf: StringBuilder) {
val charsSorted = ArrayList(charSet)
charsSorted.sort()
val isSingleChar = !invertMatch && charsSorted.size == 1
if (isSingleChar) {
val c = charsSorted.iterator().next()
buf.append('\'')
buf.append(StringUtils.escapeQuotedChar(c))
buf.append('\'')
} else {
if (charsSorted.isNotEmpty()) {
buf.append('[')
if (invertMatch) {
buf.append('^')
}
var i = 0
while (i < charsSorted.size) {
val c = charsSorted[i]
buf.append(StringUtils.escapeCharRangeChar(c))
var j = i + 1
while (j < charsSorted.size && charsSorted[j].toInt() == c.toInt() + (j - i)) {
j++
}
if (j > i + 2) {
buf.append("-")
i = j - 1
buf.append(charsSorted[i])
}
i++
}
buf.append(']')
}
}
}
override fun toString(): String {
return updateStringCacheIfNecessary {
val charSets = ArrayList<CharSet>()
getCharSets(charSets)
val buf = StringBuilder()
if (charSets.size > 1) {
buf.append('(')
}
val startLen = buf.length
for (charSet in charSets) {
if (buf.length > startLen) {
buf.append(" | ")
}
charSet.toString(buf)
}
if (charSets.size > 1) {
buf.append(')')
}
buf.toString()
}
}
}
|
module Transformers where
import Control.Monad.State
import Control.Monad.Identity
import Control.Monad.Trans.Reader
import Control.Monad.IO.Class
-- runIdentity $ evalStateT floop 0
-- State(Idenity)
floop :: StateT Int Identity Int
floop = do
put 7
modify (\n -> n * 2)
get
-- :i StateT
-- newtype StateT s (m :: * -> *) a
-- evalState (evalStateT doop 0) "hello"
-- State(State)
doop :: StateT Int (State String) (Int, String)
doop = do
modify (+1)
lift $ modify (++ " world")
num <- get
wrd <- lift get
return (num, wrd)
-- :i ReaderT
-- newtype ReaderT r (m :: * -> *) a
-- runReaderT woop [1..10]
-- runIO done by haskell, but you could sup Identity for IO and do runIdentity $ runReaderT woop [1..10]
-- Reader(IO)
woop :: ReaderT [Int] IO Int
woop = do
xs <- ask
liftIO $ print $ "orig list" ++ (show xs)
return $ sum xs
boop :: StateT Int IO Int
boop = do
s <- get
liftIO $ print ("orig state is " ++ (show s))
modify (\s -> s * 2)
s1 <- get
liftIO $ print ("doubled it's " ++ (show s1))
modify (\s -> s + 10)
s2 <- get
liftIO $ (print ("and final result is " ++ (show s2)))
get
|
// "Make 'Data' public" "true"
// ACTION: Make 'foo' private
private data class Data(val x: Int)
class First {
val <caret>foo = Data(13)
}
|
๏ปฟnamespace Atata
{
/// <summary>
/// Represents the behavior to find an item of <see cref="OptionList{T, TOwner}"/> control by parent element content.
/// </summary>
public class FindItemByParentContentAttribute : FindItemByRelativeElementContentAttribute
{
public const string ParentElementXPath = "parent::*";
public FindItemByParentContentAttribute()
: base(ParentElementXPath)
{
}
public FindItemByParentContentAttribute(TermCase termCase)
: base(ParentElementXPath, termCase)
{
}
public FindItemByParentContentAttribute(TermMatch match)
: base(ParentElementXPath, match)
{
}
public FindItemByParentContentAttribute(TermMatch match, TermCase termCase)
: base(ParentElementXPath, match, termCase)
{
}
}
}
|
import { Vector } from "matter-js";
import { enumIncludes } from "./func";
import { msg } from "./msg";
export enum HandledKeys {
ArrowUp = "ArrowUp",
ArrowDown = "ArrowDown",
ArrowLeft = "ArrowLeft",
ArrowRight = "ArrowRight",
KeyW = "KeyW",
KeyS = "KeyS",
KeyA = "KeyA",
KeyD = "KeyD",
Enter = "Enter",
Space = "Space",
ShiftLeft = "ShiftLeft",
}
export class Keyboard {
private static _initialized = false;
static readonly state = new Map<string, boolean>();
static initialize() {
if (Keyboard._initialized) return;
Keyboard._initialized = true;
window.addEventListener("keydown", Keyboard.keyDown);
window.addEventListener("keyup", Keyboard.keyUp);
}
static resetAll() {
Keyboard.state.clear();
}
private static keyDown(e: KeyboardEvent): void {
if (enumIncludes(HandledKeys, e.code)) {
Keyboard.state.set(e.code, true)
msg.emit("keydown", e.code);
}
else {
Keyboard.resetAll();
}
}
private static keyUp(e: KeyboardEvent): void {
if (Keyboard.state.has(e.code)) {
Keyboard.state.set(e.code, false);
msg.emit("keyup", e.code);
}
}
static get up() {
return Keyboard.state.get(HandledKeys.ArrowUp) || Keyboard.state.get(HandledKeys.KeyW);
}
static get down() {
return Keyboard.state.get(HandledKeys.ArrowDown) || Keyboard.state.get(HandledKeys.KeyS);
}
static get left() {
return Keyboard.state.get(HandledKeys.ArrowLeft) || Keyboard.state.get(HandledKeys.KeyA);
}
static get right() {
return Keyboard.state.get(HandledKeys.ArrowRight) || Keyboard.state.get(HandledKeys.KeyD);
}
static get isMoving() {
return this.up || this.down || this.left || this.right;
}
static moveVector(): Vector {
const v = Vector.create(0, 0);
if (Keyboard.up) {
v.y -= 1;
}
if (Keyboard.down) {
v.y += 1;
}
if (Keyboard.left) {
v.x -= 1;
}
if (Keyboard.right) {
v.x += 1;
}
return Vector.normalise(v);
}
}
Keyboard.initialize();
|
---
name: backend.averageConnectTimeInSeconds
type: attribute
events:
- HAProxyBackendSample
---
Average connect time over the 1024 last requests, in milliseconds.
|
package de.jensklingenberg.mpapt.utils
/**
* Got the values from org.jetbrains.kotlin.konan.target.KonanTarget
*/
class KonanTargetValues {
companion object {
val ANDROID_ARM32 = "android_arm32"
val ANDROID_ARM64 = "android_arm64"
val IOS_ARM32 = "ios_arm32"
val IOS_ARM64 = "ios_arm64"
val IOS_X64 = "ios_x64"
val LINUX_X64 = "linux_x64"
val MINGW_X86 = "mingw_x86"
val MINGW_X64 = "mingw_x64"
val MACOS_X64 = "macos_x64"
val LINUX_ARM32_HFP = "linux_arm32_hfp"
val LINUX_MIPS32 = "linux_mips32"
val LINUX_MIPSEL32 = "linux_mipsel32"
val WASM32 = "wasm32"
}
}
|
from ._builtin import Page
from .models import Constants
from exp.util import Participant
class InstructionsPage(Page):
def is_displayed(self):
return self.round_number == Constants.INSTRUCTIONS_ROUND
class BidPage(Page):
form_model = 'player'
form_fields = ['bid']
def vars_for_template(self):
experiment = Participant.get_experiment(self.player)
auction = experiment.phase_three.auction(self.round_number)
if auction.signal_is_percentage:
signals = ", ".join(["{}%".format(round(s * 100)) for s in auction.signals])
else:
signals = ", ".join(map(str, auction.signals))
return {
'signal_is_percentage': auction.signal_is_percentage,
'auction': auction,
'signal': experiment.phase_three.signal(self.round_number),
'low_update': experiment.phase_three.low_update(self.round_number),
'high_update': experiment.phase_three.high_update(self.round_number),
'signals': signals,
}
def bid_error_message(self, bid):
experiment = Participant.get_experiment(self.player)
min_value = experiment.phase_three.auction(self.round_number).min_value
max_value = experiment.phase_three.auction(self.round_number).max_value
if not min_value <= bid <= max_value:
return 'The bid value must be between {} and {}.'.format(min_value, max_value)
def before_next_page(self):
experiment = Participant.get_experiment(self.player)
experiment.phase_three.set_bid(self.round_number, float(self.player.bid))
self.player.auction = experiment.phase_three.auction(self.round_number).aid
self.player.signal = experiment.phase_three.signal(self.round_number)
self.player.low_update = experiment.phase_three.low_update(self.round_number)
self.player.high_update = experiment.phase_three.high_update(self.round_number)
page_sequence = [
InstructionsPage, BidPage,
]
|
//
// DConnectManager.h
// dConnectManager
//
// Created by ๅฐๆ ไผธ้ on 2014/05/02.
// Copyright (c) 2014 NTT DOCOMO, INC. All Rights Reserved.
//
/*!
@mainpage
dConnectใฎ่ชฌๆใใผใธ
*/
/*! @file
@brief dConnectๆฌไฝใ
@author NTT DOCOMO
@date ไฝๆๆฅ(2014.5.14)
*/
#import <Foundation/Foundation.h>
#import <DConnectSDK/DConnectRequestMessage.h>
#import <DConnectSDK/DConnectResponseMessage.h>
#import <DConnectSDK/DConnectProfileProvider.h>
/*!
@brief ใขใใชใฑใผใทใงใณใใใผใ ใใฟใณๆผไธใงไธๆๅๆญขใใใใใจใ้็ฅใใใคใใณใๅใ
ใคใใณใใ็บ็ใใใใใใซใฏใใขใใชๅดใฎAppDelegateใซไปฅไธใฎๅฎ่ฃ
ใ่กใๅฟ
่ฆใใใใพใใ
@code
- (void)applicationDidEnterBackground:(UIApplication *)application
{
NSNotification* n = [NSNotification notificationWithName:DConnectApplicationDidEnterBackground object:self];
[[NSNotificationCenter defaultCenter] postNotification:n];
}
@endcode
*/
extern NSString *const DConnectApplicationDidEnterBackground;
/*!
@brief ใขใใชใฑใผใทใงใณใๅ้ใใใใใจใ้็ฅใใใคใใณใๅใ
ใคใใณใใ็บ็ใใใใใใซใฏใใขใใชๅดใฎAppDelegateใซไปฅไธใฎๅฎ่ฃ
ใ่กใๅฟ
่ฆใใใใพใใ
@code
- (void)applicationWillEnterForeground:(UIApplication *)application
{
NSNotification* n = [NSNotification notificationWithName:DConnectApplicationWillEnterForeground object:self];
[[NSNotificationCenter defaultCenter] postNotification:n];
}
@endcode
*/
extern NSString *const DConnectApplicationWillEnterForeground;
@class DConnectManager;
/*! @brief DConnectManagerใใใฎใคใใณใๅใๅใใใชใฒใผใ.
*/
@protocol DConnectManagerDelegate <NSObject>
/*!
@brief ๅใใใคในใใใฎใคใใณใใๅ้ ใใ.
@param[in] manager ใใใธใฃใผ
@param[in] event ใคใใณใใกใใปใผใธ
*/
- (void) manager:(DConnectManager *)manager didReceiveDConnectMessage:(DConnectMessage *)event;
@end
/*!
@class DConnectManager
@brief DConnectใ็ฎก็ใใใฏใฉใน.
@code
// DConnectManagerใฎ้ๅง
DConnectManager *mgr = [DConnectManager sharedManager];
mgr.delegate = delegate;
@endcode
*/
@interface DConnectManager : NSObject <DConnectProfileProvider>
/*!
@brief ๅใใใคในใใใฎใคใใณใใๅ้ ใใใใชใฒใผใ.
MARK: d-ConnectใใฉใฆใถใฏWebSocket็ต็ฑใงใคใใณใใๅใๅใใฎใงใdelegateใnullใซใใฆใใไบใ
*/
@property (nonatomic, weak) id<DConnectManagerDelegate> delegate;
/*!
@brief DConnectManagerใฎใคใณในใฟใณในใๅๅพใใใ
@return DConnectManagerใฎใคใณในใฟใณใน
*/
+ (DConnectManager *) sharedManager;
/*!
@brief dConnectManagerใซ้ๅๆ็ใซใชใฏใจในใใ้ไฟกใใใ
dConnectManagerใงใฏใใชใฏใจในใใ้ๅๆ็ใซๅฎ่กใใๅฏพๅฟใใใฌในใใณในใcallbackใซ้็ฅใใใ<br/>
ใใฎใกใฝใใใๅฎ่กใใๅ ดๅใซใฏๆ้ใใใใๅ ดๅใใใใ<br/>
ใใฎๅ ดๅใงใๅ้กใ็บ็ใใชใใใใซๆณจๆใใๅฟ
่ฆใๆใใ<br/>
@param[in] request ใชใฏใจในใ
@param[in] callback ใณใผใซใใใฏ
*/
- (void) sendRequest:(DConnectRequestMessage *) request callback:(DConnectResponseBlocks)callback;
/*!
@brief ใฌในใใณในใใผใฟใ้ไฟกใใใ
้ๅๆใงๅฆ็ใใๅ ดๅใฏๅฝใกใฝใใใงๆ็คบ็ใซใฌในใใณในใ่ฟใๅฟ
่ฆใใใใ
@param[in] response ใฌในใใณในใใผใฟ
*/
- (void) sendResponse:(DConnectResponseMessage *)response;
@end
|
//
// OSCDevice.h
// DeviceServer3
//
// Created by charlie on 5/26/09.
// Copyright 2009 One More Muse. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import "Device.h"
#import "lo.h"
@interface OSCDevice : Device {
char *ipAddress;
int port;
}
- (void) processMessageWithValues:(lo_arg **)argv count:(int)count types:(const char *)types;
@property (assign) char *ipAddress;
@property (assign) int port;
@end
|
package com.emajliramokade
package server.api
package rest
import api.model.EmailProvjera.{ Odgovor, Zahtjev }
import hr.ngs.patterns.ISerialization
import net.liftweb.http.{ LiftResponse, PlainTextResponse, PostRequest, Req }
import net.liftweb.http.rest.RestHelper
import org.slf4j.Logger
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.{ Failure, Success, Try }
import io.jvm.uuid._
import services.dispatchers.EmailSenderDispatcher
class EmailListener(
logger: Logger
, serialization: ISerialization[String]
, dispatcher: EmailSenderDispatcher
) extends RestHelper {
serve {
case req @ Req("ping" :: Nil, _, _) =>
PlainTextResponse("pong")
case req @ Req(x, _, PostRequest) =>
parseBody(req) orElse parseParams(req) match {
case Success(zahtjev) =>
val resFut = dispatcher.dispatch(zahtjev)
val res = Await.result(resFut, 60 seconds)
odgovorToResponse(res)
case Failure(e) =>
odgovorToResponse(new Odgovor(false, e.toString))
}
}
def odgovorToResponse(odgovor: Odgovor): LiftResponse = {
val body = serialization.serialize(odgovor)
val code = if (odgovor.getStatus) 200 else 400
PlainTextResponse(body, List("Content-type" -> s"application/json; charset=$Encoding"), code)
}
// {"email":"\"ฤoni ล iลก\" <ฤonatan.ฤevapฤiฤ@example.com>"}
def parseBody(req: Req): Try[Zahtjev] =
Try {
val body = req.body.openOrThrowException("Tried to open an empty body box")
// ugly hax for haskell entrypoint compatibility
val strBody = body.fromUTF8
.replace("\"kada\"", "\"kadaID\"")
serialization.deserialize[Zahtjev](strBody, null)
}
// Map(email -> List("ฤoni ล iลก" <ฤonatan.ฤevapฤiฤ@example.com>))
def parseParams(req: Req): Try[Zahtjev] =
Try {
val params = req._params
val email = params("email").head
val kadaID = Try {
UUID(params("kadaID").head)
}
new Zahtjev()
.setEmail(email)
.setKadaID(kadaID getOrElse null)
}
}
|
# Installing
* Explain prerequisites
* Explain the requirements of the operator, RBAC etc
* Explain the operator config map
* Explain how to do the installation
|
import { Injectable, Autowired } from '@opensumi/di';
import { IRPCProtocol } from '@opensumi/ide-connection';
import { IEventBus, Disposable, ILogger } from '@opensumi/ide-core-browser';
import { IMainLayoutService, TabBarRegistrationEvent } from '@opensumi/ide-main-layout';
import { TabBarHandler } from '@opensumi/ide-main-layout/lib/browser/tabbar-handler';
import { IconType, IconShape, IIconService } from '@opensumi/ide-theme';
import { ExtHostSumiAPIIdentifier } from '../../common/sumi';
import { IMainThreadLayout, IExtHostLayout } from '../../common/sumi/layout';
@Injectable({ multiple: true })
export class MainThreadLayout extends Disposable implements IMainThreadLayout {
@Autowired(IMainLayoutService)
layoutService: IMainLayoutService;
@Autowired(IIconService)
private iconService: IIconService;
handlerMap = new Map<string, TabBarHandler>();
proxy: IExtHostLayout;
@Autowired(IEventBus)
eventBus: IEventBus;
@Autowired(ILogger)
logger: ILogger;
constructor(rpcProtocol: IRPCProtocol) {
super();
this.proxy = rpcProtocol.getProxy(ExtHostSumiAPIIdentifier.ExtHostLayout);
}
$setTitle(id: string, title: string): void {
this.getHandler(id).updateTitle(title);
}
$setIcon(id: string, iconPath: string): void {
const iconClass = this.iconService.fromIcon('', iconPath, IconType.Background, IconShape.Square);
this.getHandler(id).setIconClass(iconClass!);
}
$setSize(id: string, size: number): void {
this.getHandler(id).setSize(size);
}
$activate(id: string): void {
this.getHandler(id).activate();
}
$deactivate(id: string): void {
this.getHandler(id).deactivate();
}
$setBadge(id: string, badge: string): void {
this.getHandler(id).setBadge(badge);
}
async $setVisible(id: string, visible: boolean) {
if (visible) {
this.getHandler(id).show();
} else {
if (this.getHandler(id).isActivated()) {
this.getHandler(id).deactivate();
}
this.getHandler(id).hide();
}
}
async $connectTabbar(id: string) {
if (!this.handlerMap.has(id)) {
const handle = this.layoutService.getTabbarHandler(id);
if (handle) {
this.bindHandleEvents(handle);
} else {
const disposer = this.eventBus.on(TabBarRegistrationEvent, (e) => {
if (e.payload.tabBarId === id) {
const handle = this.layoutService.getTabbarHandler(id);
this.bindHandleEvents(handle!);
disposer.dispose();
}
});
this.addDispose(disposer);
}
}
}
// ่งๅพๅฏ่ฝๆชๆณจๅๅฐlayoutไธ๏ผๆญคๆถ่ฐ็จ่ฏฅๆนๆณ่ฟๅfalse
async $isAttached(id: string) {
return !!this.layoutService.getTabbarHandler(id);
}
private bindHandleEvents(handle: TabBarHandler) {
this.handlerMap.set(handle.containerId, handle);
handle.onActivate(() => {
this.proxy.$acceptMessage(handle.containerId, 'activate');
});
handle.onInActivate(() => {
this.proxy.$acceptMessage(handle.containerId, 'deactivate');
});
}
protected getHandler(id: string) {
const handler = this.layoutService.getTabbarHandler(id);
if (!handler) {
this.logger.warn(`MainThreaLayout:ๆฒกๆๆพๅฐ${id}ๅฏนๅบ็handler`);
}
return handler!;
}
}
|
package ca.ulaval.glo2003.transactions.rest.serializers;
import ca.ulaval.glo2003.interfaces.rest.serializers.DoubleDeserializer;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
public abstract class PriceDeserializer<E extends RuntimeException> extends DoubleDeserializer<E> {
private static final int MAX_DECIMALS = 2;
@Override
public Class<?> getType() {
return Double.class;
}
@Override
public Double deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
throws E {
Double price = super.deserialize(jsonParser, deserializationContext);
try {
if (!hasGoodAmountOfDecimals(jsonParser.getText())) throwException();
price = jsonParser.getDoubleValue();
} catch (Exception e) {
throwException();
}
return price;
}
private boolean hasGoodAmountOfDecimals(String price) {
return !price.contains(".") || price.substring(price.indexOf('.') + 1).length() <= MAX_DECIMALS;
}
}
|
import dizzySymbolEmoji from 'emoji-datasource-apple/img/apple/64/1f4ab.png';
export default {
data() {
return {
dizzySymbolEmoji,
mnemonic: '',
error: false,
};
},
methods: {
async setMnemonic() {
if (!await this.$validator.validateAll()) return;
await this.$store.dispatch('accounts/hdWallet/createWallet', this.mnemonic);
},
},
};
|
/*
* PDate.java
*
* 02.12.2003
*
* (c) by O.Lieshoff
*
*/
package corent.dates;
import java.io.Serializable;
import java.util.Calendar;
import java.util.GregorianCalendar;
import corent.base.Utl;
import logging.Logger;
/**
* Diese Klasse ist aus der ursprünglich im Rahmen diverser privater
* Projekte entwickelten Klasse PDate hervorgegangen, die letztendlich im
* corelib-Package gelandet ist. Diese Klasse weist jedoch einige Probleme in
* bezug auf Sicherheit und Zuverlässigkeit auf, sodaß an dieser
* Stelle neu implementiert worden ist. Die Ursprünge der Klasse liegen in
* einem Modula-2-Modul namens PackedDate, das im Rahmen Zahlreicher Mickey-D's
* Programme seinen Dienst tat und tut.
* <P>
* Die Klasse repräsentiert ein Datum im Integer-Format (JJJJMMTT). Dies
* ist zwar keine unbedingt natürliche Datumsdarstellung, hat jedoch einen
* Wust an Vorteilen, der diese Art der Repräsentation rechtfertigt.<BR>
* <P>
* Die Methoden der Klasse sind stellenweise auf hohe Fehlertoleranz angelegt.
* So errechnet das Verschieben von Daten um ganze Monate immer richtige
* Datum-Werte, die aber eventuell nicht den Erwartungen entsprechen
* können. Beispiel:
*
* <PRE>
*
* monthBefore(1) vom 31.03.2003 -> 28.02.2003.
* </PRE>
*
* <P>
* Über die Property <TT>corent.dates.debug</TT> läßt sich an
* einigen Stellen eine zusätzliche Debug-Ausgabe auf die Konsole
* einschalten.
* <P>
* Mit Hilfe der Properties <TT>corent.dates.PDate.undefined.string.regular</TT>
* und <TT>corent.dates.PDate.undefined.string.short</TT> können ein
* alternative String zur Ausgabe eines undefinierten PDates in der
* regulären und der kurzen Schreibweise angegeben werden. Die Defaultwerte
* sind "XX.XX.XXXX" und "XX.XX.XX".
*
* <P>
* Mit Hilfe der Properties <I>corent.dates.debug</I> und <I>corent.debug</I>
* können zusätzliche Debuginformationen auf der Konsole ausgegeben
* werden.
*
* @author O.Lieshoff
* <P>
*
* @changed OLI 14.11.2008 - Korrektur des Konstruktors
* <TT>PDate(java.util.Date)</TT>. Erweiterung um zuschaltbare
* Debugausgaben in diesem Konstruktor.
* <P>
* OLI 16.02.2009 - Erste Schritte zur Umstellung auf ein
* englischsprachiges zur Klasse.
* <P>
* OLI 14.04.2009 - Debugging an der Methode
* <TT>GetMontagVonWoche(int, int)</TT>.
* <P>
*
*/
public class PDate implements Serializable {
private static final Logger log = Logger.getLogger(PDate.class);
/** Das undefinierte Datum als Bezeichner. */
public static final PDate UNDEFINIERT = new PDate(false);
/* Hier wird der Wert des PDates gespeichert. */
private int d = -1;
private PDate(boolean b) {
super();
}
/** Erzeugt ein PDate mit dem aktuellen Datum der ausführenden Maschine. */
public PDate() {
super();
Calendar dt = Calendar.getInstance();
d = (dt.get(Calendar.YEAR) * 10000) + ((dt.get(Calendar.MONTH) + 1) * 100) + dt.get(Calendar.DAY_OF_MONTH);
}
/**
* Erzeugt ein PDate aus dem übergebenen Integer-Wert, falls dieser ein
* gültiges PDate enthät.
*
* @param pd Ein int-Wert im PDate-Format.
* @throws DateFormatException wenn der int-Wert kein gültiges Datum
* enthält.
*/
public PDate(int pd) throws DateFormatException {
this(pd % 100, (pd / 100) % 100, pd / 10000);
}
/**
* Erzeugt ein PDate als Kopie des übergebenen PDates.
*
* @param pd Das zu kopierende PDate.
* @throws DateFormatException wenn der Inhalt des übergebenen PDates
* fehlerhaft ist.
*/
public PDate(PDate pd) {
this(pd.getTag(), pd.getMonat(), pd.getJahr());
}
/**
* Erzeugt ein PDate aus den übergebenen Parametern.
*
* @param tag Der Tag des zu prüfenden Datums.
* @param monat Der Monat des zu prüfenden Datums.
* @param jahr Der Jahr des zu prüfenden Datums.
*/
public PDate(int tag, int monat, int jahr) {
super();
if (CheckDate(tag, monat, jahr)) {
this.d = jahr * 10000 + monat * 100 + tag;
} else {
throw new DateFormatException("date not valid: " + tag + "." + monat + "." + jahr);
}
}
/**
* Erzeugt aus dem angegeben String ein PDate.
*
* @param s Der String, aus dem das PDate generiert werden soll.
* @throws DateFormatException wenn der Inhalt des übergebenen String
* fehlerhaft ist.
*/
public static PDate valueOf(String s) throws DateFormatException {
int a = 0;
int jahr = 0;
int monat = 0;
int p = 0;
int tag = 0;
char c;
for (int i = 0; i < s.length(); i++) {
c = s.charAt(i);
if ((c >= '0') && (c <= '9')) {
a = a * 10;
a += (c - '0');
} else if (c == '.') {
p++;
switch (p) {
case 1:
tag = a;
break;
case 2:
monat = a;
break;
case 3:
jahr = a;
break;
default:
throw new DateFormatException("to many arguments");
}
a = 0;
} else {
throw new DateFormatException("invalid digit: " + c);
}
}
jahr = a;
if (!CheckDate(tag, monat, jahr)) {
throw new DateFormatException("date not valid: " + tag + "." + monat + "." + jahr);
}
return new PDate(tag, monat, jahr);
}
/*
* Checkt die übergebenen Parameter auf Darstellung eines gütigen Datums.
*
* @param tag Der Tag des zu prüfenden Datums.
*
* @param monat Der Monat des zu prüfenden Datums.
*
* @param jahr Der Jahr des zu prüfenden Datums.
*
* @return <TT>true</TT>, wenn die übergebenen Parameter ein gültiges Datum ergeben,<BR> <TT>false</TT>
* sonst.
*/
private static boolean CheckDate(int tag, int monat, int jahr) {
if (monat < 1) {
return false;
}
if (monat > 12) {
return false;
}
if (tag < 1) {
return false;
}
switch (monat) {
case 1:
case 3:
case 5:
case 7:
case 8:
case 10:
case 12:
if (tag > 31) {
return false;
}
break;
case 2:
int ld = 28;
if (jahr % 400 == 0) {
ld++;
} else if (jahr % 100 == 0) {
} else if (jahr % 4 == 0) {
ld++;
}
if (tag > ld) {
return false;
}
break;
case 4:
case 6:
case 9:
case 11:
if (tag > 30) {
return false;
}
break;
}
return true;
}
/**
* Wandelt ein PDate in einen int-Wert um.
*
* @return Der int-Wert (JJJJMMTT) zum PDate.
*/
public int toInt() {
return d;
}
/** @return Der Tag des Datums (Ordnungszahl in bezug zum Monat). */
public int getTag() {
return d % 100;
}
/** @return Der Monat zum Datum (Ordnungszahl in bezug zum Jahr). */
public int getMonat() {
return (d % 10000) / 100;
}
/** @return Die Jahreszahl zum Datum. */
public int getJahr() {
return d / 10000;
}
/** @return Liefert ein neues Datum mit dem angegeben Tag. */
public PDate setTag(int tag) {
return new PDate(tag, this.getMonat(), this.getJahr());
}
/** @return Liefert ein neues Datum mit dem angegeben Monat. */
public PDate setMonat(int monat) {
return new PDate(this.getTag(), monat, this.getJahr());
}
/** @return Liefert ein neues Datum mit dem angegeben Jahr. */
public PDate setJahr(int jahr) {
return new PDate(this.getTag(), this.getMonat(), jahr);
}
/**
* @return <TT>true</TT>, wenn das angegebene Datum in einem Schaltjahr liegt.
*/
public boolean isSchaltjahr() {
if (this.getJahr() % 400 == 0) {
return true;
} else if (this.getJahr() % 100 == 0) {
return false;
} else if (this.getJahr() % 4 == 0) {
return true;
}
return false;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof PDate)) {
return false;
}
PDate p = (PDate) o;
return (this.toInt() == p.toInt());
}
@Override
public int hashCode() {
return this.toInt();
}
@Override
public String toString() {
String s = "";
if (this.toInt() == -1) {
s = s + Utl.GetProperty("corent.dates.PDate.undefined.string.regular", "XX.XX.XXXX");
} else {
String h = "";
h = new String("" + this.getTag());
if (h.length() < 2) {
s = "0";
}
s = s + h + ".";
h = new String("" + this.getMonat());
if (h.length() < 2) {
s = s + "0";
}
s = s + h + ".";
h = new String("" + this.getJahr());
while (h.length() < 4) {
h = "0" + h;
}
s = s + h;
}
return s;
}
/** @return Datum des Folgetages zum vorliegenden Datum. */
public PDate naechsterTag() {
int day = this.getTag() + 1;
int month = this.getMonat();
int year = this.getJahr();
if (day > 28) {
switch (month) {
case 1:
case 3:
case 5:
case 7:
case 8:
case 10:
case 12:
if (day > 31) {
month++;
day = 1;
}
break;
case 2:
int ld = 28;
if (year % 400 == 0) {
ld++;
} else if (year % 100 == 0) {
} else if (year % 4 == 0) {
ld++;
}
if (day > ld) {
month++;
day = 1;
}
break;
case 4:
case 6:
case 9:
case 11:
if (day > 30) {
month++;
day = 1;
}
break;
default:
// NOP
}
if (month > 12) {
year++;
month = 1;
day = 1;
}
}
return new PDate(day, month, year);
}
/**
* Ermittelt das Datum, das n Tage nach dem vorliegenden Datum liegt.
*
* @param n Anzahl der Tage.
* @return Datum das n Tage nach dem vorliegenden Datum liegt.
* @throws IllegalArgumentException wenn n kleiner als 0 ist.
*/
PDate naechsterTag(int n) throws IllegalArgumentException {
if (n <= 0) {
throw new IllegalArgumentException("n muss groesser oder gleich 0 sein!");
}
PDate next = new PDate(this);
for (int i = 0; i < n; i++) {
next = next.naechsterTag();
}
return next;
}
/** @return Das Datum des Vortages vom vorliegenden Datum. */
public PDate vorherigerTag() {
int day = this.getTag() - 1;
int month = this.getMonat();
int year = this.getJahr();
if (day == 0) {
month--;
switch (month) {
case 0:
year--;
month = 12;
day = 31;
break;
case 1:
case 3:
case 5:
case 7:
case 8:
case 10:
day = 31;
break;
case 2:
day = 28;
if (year % 400 == 0) {
day++;
} else if (year % 100 == 0) {
} else if (year % 4 == 0) {
day++;
}
break;
case 4:
case 6:
case 9:
case 11:
day = 30;
break;
default:
// NOP
}
}
return new PDate(day, month, year);
}
/**
* Ermittelt das Datum, das n Tage vor dem vorliegenden Datum liegt.
*
* @param n Anzahl der Tage.
* @return Das Datum, daß n Tage vor dem vorliegenden liegt.
* @throws IllegalArgumentException wenn n kleiner oder gleich null ist).
*/
public PDate vorherigerTag(int n) throws IllegalArgumentException {
if (n <= 0) {
throw new IllegalArgumentException("n muss groesser oder gleich 0 sein!");
}
PDate previous = new PDate(this);
for (int i = 0; i < n; i++) {
previous = previous.vorherigerTag();
}
return previous;
}
/** @return Das Datum des ersten Tages des Monats des vorliegenden Datums. */
public PDate monatsErster() {
return new PDate(1, this.getMonat(), this.getJahr());
}
/** @return Das Datum des letzten Tages des Monats des vorliegenden Datums. */
public PDate monatsLetzter() {
PDate d = new PDate(28, this.getMonat(), this.getJahr());
int m = d.getMonat();
while (d.getMonat() == m) {
d = d.naechsterTag();
}
d = d.vorherigerTag();
return d;
}
/**
* Ermittelt das Datum, das n Monate vor dem vorliegenden Datum liegt.
*
* @param n Anzahl der Monate.
* @return Das Datum, das n Monate vor dem vorliegenden liegt.
* @throws IllegalArgumentException wenn n kleiner oder gleich null ist).
*/
public PDate monateVorher(int n) throws IllegalArgumentException {
if (n <= 0) {
throw new IllegalArgumentException("n muss groesser oder gleich 0 sein!");
}
PDate d = new PDate(this);
for (int i = 0; i < n; i++) {
d = d.monatsErster();
d = d.vorherigerTag();
}
while (d.getTag() > this.getTag()) {
d = d.vorherigerTag();
}
return d;
}
/**
* Ermittelt das Datum, das n Monate nach dem vorliegenden Datum liegt.
*
* @param n Anzahl der Monate.
* @return Das Datum, das n Monate nach dem vorliegenden liegt.
* @throws IllegalArgumentException wenn n kleiner oder gleich null ist).
*/
public PDate monateNachher(int n) throws IllegalArgumentException {
if (n <= 0) {
throw new IllegalArgumentException("n muss groesser oder gleich 0 sein!");
}
PDate d = new PDate(this);
int lastday = 0;
for (int i = 0; i < n; i++) {
d = d.monatsLetzter();
d = d.naechsterTag();
d = d.monatsLetzter();
lastday = d.getTag();
d = d.monatsErster();
while ((d.getTag() < this.getTag()) && (d.getTag() < lastday)) {
d = d.naechsterTag();
}
}
return d;
}
/**
* Ermittelt das Datum, das n Jahre vor dem vorliegenden Datum liegt.
*
* @param n Anzahl der Jahre.
* @return Das Datum, das n Jahre vor dem vorliegenden liegt.
* @throws IllegalArgumentException wenn n kleiner oder gleich null ist).
*/
public PDate jahreVorher(int n) throws IllegalArgumentException {
if (n <= 0) {
throw new IllegalArgumentException("n muss groesser oder gleich 0 sein!");
}
return this.shiftJahr(0 - n);
}
/**
* Ermittelt das Datum, das n Jahre vor dem vorliegenden Datum liegt.
*
* @param n Anzahl der Jahre.
* @return Das Datum, das n Jahre nach dem vorliegenden liegt.
* @throws IllegalArgumentException wenn n kleiner oder gleich null ist).
*/
public PDate jahreNachher(int n) throws IllegalArgumentException {
if (n <= 0) {
throw new IllegalArgumentException("n muss groesser oder gleich 0 sein!");
}
return this.shiftJahr(n);
}
private PDate shiftJahr(int n) {
PDate d = new PDate(this);
d = d.setJahr(d.getJahr() + n);
if (this.isSchaltjahr() && (!d.isSchaltjahr()) && (this.getMonat() == 2) && (this.getTag() == 29)) {
d = d.setTag(28);
}
return d;
}
/** @return Der deutsche Name für den Wochentag des vorliegenden Datums. */
public String getTagesnameDeutsch() {
if (this.getWochentag() == Wochentag.MONTAG) {
return "Montag";
} else if (this.getWochentag() == Wochentag.DIENSTAG) {
return "Dienstag";
} else if (this.getWochentag() == Wochentag.MITTWOCH) {
return "Mittwoch";
} else if (this.getWochentag() == Wochentag.DONNERSTAG) {
return "Donnerstag";
} else if (this.getWochentag() == Wochentag.FREITAG) {
return "Freitag";
} else if (this.getWochentag() == Wochentag.SONNABEND) {
return "Sonnabend";
} else if (this.getWochentag() == Wochentag.SONNTAG) {
return "Sonntag";
}
return "UNBEKANNT";
}
public String getMonatsnameDeutsch() {
switch (this.getMonat()) {
case 1:
return "Januar";
case 2:
return "Februar";
case 3:
return "M\344rz";
case 4:
return "April";
case 5:
return "Mai";
case 6:
return "Juni";
case 7:
return "Juli";
case 8:
return "August";
case 9:
return "September";
case 10:
return "Oktober";
case 11:
return "November";
case 12:
return "Dezember";
}
return "UNBEKANNT";
}
/**
* Ermittelt den Abstand zwischen dem vorliegenden und dem angegebenen Datum in
* Jahren.
*
* @param d Das Datum, zu dem der Jahresabstand ermittelt werden soll.
* @return Anzahl der Jahre zwischen dem vorliegenden und dem angegebenen Datum.
*/
public int jahreabstandZu(PDate d) {
PDate d0 = new PDate(this);
int erg = -1;
if (d0.toInt() > d.toInt()) {
PDate h = new PDate(d0);
d0 = new PDate(d);
d = h;
}
while (d0.toInt() <= d.toInt()) {
if ((d0.getMonat() == d.getMonat()) && (d0.getTag() == d.getTag())) {
erg++;
}
d0 = d0.naechsterTag();
}
return erg;
}
/**
* Ermittelt den Abstand zwischen dem vorliegenden und dem angegebenen Datum in
* Tagen.
*
* @param d Das Datum, zu dem der Tagesabstand ermittelt werden soll.
* @return Anzahl der Tage zwischen dem vorliegenden und dem angegebenen Datum.
*/
public int tagesabstandZu(PDate d) {
int erg = 0;
PDate dh = new PDate(this);
if (dh.toInt() > d.toInt()) {
dh = new PDate(d);
d = new PDate(this);
}
while (dh.toInt() < d.toInt()) {
dh = dh.naechsterTag();
erg++;
}
return erg;
}
/** @return Die Nummer der Woche des Jahres zum Datum. */
public int getWoche() {
log.debug("\nEntrypoint: PDate -> getWoche()");
GregorianCalendar dt = new GregorianCalendar();
dt.set(this.getJahr(), this.getMonat() - 1, this.getTag());
log.debug(" dt=" + dt);
log.debug(" dt.get(Calendar.WEEK_OF_YEAR)=" + dt.get(Calendar.WEEK_OF_YEAR));
return dt.get(Calendar.WEEK_OF_YEAR);
}
/**
* Ermittelt, ob sich das vorliegende Datum zwischen den beiden angegebenen
* Daten befindet. Liegt das Datum d0 hinter dem Datum d1, so werden die Daten
* vertauscht.
*
* @param d0 Beginn des Zeitraums.
* @param d1 Ende des Zeitraumes.
* @return <TT>true</TT>, wenn das vorliegende Datum in dem angegebenen Zeitraum
* liegt.
*/
public boolean isImZeitraum(PDate d0, PDate d1) {
if (d0.toInt() > d1.toInt()) {
PDate dh = new PDate(d0);
d0 = new PDate(d1);
d1 = new PDate(dh);
}
if ((this.toInt() >= d0.toInt()) && (this.toInt() <= d1.toInt())) {
return true;
}
return false;
}
/** @return Der Wochentag zum Datum. */
public Wochentag getWochentag() {
GregorianCalendar dt = new GregorianCalendar();
dt.set(this.getJahr(), this.getMonat() - 1, this.getTag());
switch (dt.get(Calendar.DAY_OF_WEEK)) {
case Calendar.MONDAY:
return Wochentag.MONTAG;
case Calendar.TUESDAY:
return Wochentag.DIENSTAG;
case Calendar.WEDNESDAY:
return Wochentag.MITTWOCH;
case Calendar.THURSDAY:
return Wochentag.DONNERSTAG;
case Calendar.FRIDAY:
return Wochentag.FREITAG;
case Calendar.SATURDAY:
return Wochentag.SONNABEND;
}
return Wochentag.SONNTAG;
}
/** @return Nummer des Tages im Jahr des Datums. */
public int tagDesJahres() {
GregorianCalendar dt = new GregorianCalendar();
dt.set(this.getJahr(), this.getMonat() - 1, this.getTag());
return dt.get(Calendar.DAY_OF_YEAR);
}
/**
* Der Montag zur angegebenen Woche des angegebenen Jahres.
*
* @param woche Die Nummer der Woche, zu der der Montag ermittelt werden soll
* (sollte zwischen 1 und 53 liegen).
* @param jahr Die Jahreszahl, um die Woche eindeutig zu beschreiben.
* @return Das Datum des Montags der Woche als PDate.
* @throws IllegalArgumentException falls die Woche außerhalb der
* zulässigen Parameter liegt.
*
* @changed OLI 14.04.2009 - Hinzugefügt.
* <P>
*
*/
public static PDate GetMontagVonWoche(int woche, int jahr) throws IllegalArgumentException {
PDate pd = new PDate(31, 12, jahr);
int maxwoche = pd.getWoche();
while (maxwoche < 2) {
pd = pd.vorherigerTag(7);
maxwoche = pd.getWoche();
}
log.debug("\nEntrypoint: PDate.GetMontagVonWoche(" + woche + ", " + jahr + ")");
log.debug(" maxwoche=" + maxwoche);
if ((woche < 1) || (woche > maxwoche)) {
throw new IllegalArgumentException("FEHLER: Die Wochennummer (" + woche + ") liegt "
+ "nicht innerhalb der erlaubten Parameter (1.." + maxwoche + ")");
}
pd = new PDate(1, 1, jahr);
log.debug(" pd=" + pd);
log.debug(" pd.getWoche()=" + pd.getWoche());
if (pd.getWoche() == 1) {
while (pd.getWochentag() != Wochentag.MONTAG) {
pd = pd.vorherigerTag();
}
} else {
while ((pd.getWochentag() != Wochentag.MONTAG) && (pd.getWoche() != 1)) {
pd = pd.naechsterTag();
}
}
while (woche > pd.getWoche()) {
pd = pd.naechsterTag(7);
}
return pd;
}
/* Neue, englischsprachige Methoden. */
/**
* Liefert den Tag des Datums.
*
* @return Der Tag zum Datum.
*
* @changed OLI 16.02.2009 - Hinzugefügt
* <P>
*
*/
public int getDay() {
return this.getTag();
}
/**
* Liefert den Monat des Datums.
*
* @return Der Monat zum Datum.
*
* @changed OLI 16.02.2009 - Hinzugefügt
* <P>
*
*/
public int getMonth() {
return this.getMonat();
}
/**
* Liefert das Jahr des Datums.
*
* @return Das Jahr zum Datum.
*
* @changed OLI 16.02.2009 - Hinzugefügt
* <P>
*
*/
public int getYear() {
return this.getJahr();
}
/**
* Liefert das Datum des letzten Tages des Monats des vorliegenden Datums.
*
* @return Das Datum des letzten Tages des Monats des vorliegenden Datums.
*
* @changed OLI 16.02.2009 - Hinzugefügt
* <P>
*
*/
public PDate lastOfMonth() {
return this.monatsLetzter();
}
/**
* Liefert das Datum des Folgetages zum vorliegenden Tag.
*
* @return Das Datum des Folgetages zum vorliegenden Tag.
*
* @changed OLI 16.02.2009 - Hinzugefügt
* <P>
*
*/
public PDate nextDay() {
return this.naechsterTag();
}
/**
* Liefert das Datum des n-ten Folgetages zum vorliegenden Tag.
*
* @return Das Datum des n-ten Folgetages zum vorliegenden Tag.
*
* @changed OLI 16.02.2009 - Hinzugefügt
* <P>
*
*/
public PDate nextDay(int n) {
return this.naechsterTag(n);
}
/**
* Setzt den übergebenen Tag als neuen Tag für das Datum ein.
*
* @param day Der neue Tag zum Datum.
* @return Ein PDate mit der geänderten Tagesangabe.
*
* @changed OLI 16.02.2009 - Hinzugefügt
* <P>
*
*/
public PDate setDay(int day) {
return this.setTag(day);
}
/**
* Setzt den übergebenen Monat als neuen Monat für das Datum ein.
*
* @param month Der neue Monat zum Datum.
* @return Ein PDate mit der geänderten Monatsangabe.
*
* @changed OLI 16.02.2009 - Hinzugefügt
* <P>
*
*/
public PDate setMonth(int month) {
return this.setMonat(month);
}
/**
* Setzt das übergebene Jahr als neues Jahr für das Datum ein.
*
* @param year Das neue Jahr zum Datum.
* @return Ein PDate mit der geänderten Jahresangabe.
*
* @changed OLI 16.02.2009 - Hinzugefügt
* <P>
*
*/
public PDate setYear(int year) {
return this.setJahr(year);
}
}
|
"use strict";
var env = require('gitter-web-env');
var nconf = env.config;
var testRequire = require('../../test-require');
var fixtureLoader = require('gitter-web-test-utils/lib/test-fixtures');
var assertUtils = require('../../assert-utils')
var serialize = require('gitter-web-serialization/lib/serialize');
var serializeObject = require('gitter-web-serialization/lib/serialize-object');
var ForumStrategy = testRequire('./serializers/rest/forum-strategy');
var subscriberService = require('gitter-web-topic-notifications/lib/subscriber-service');
var ForumObject = require('gitter-web-topic-models/lib/forum-object');
var assert = require('assert');
var mongoUtils = require('gitter-web-persistence-utils/lib/mongo-utils');
var LONG_AGO = '2014-01-01T00:00:00.000Z';
describe('ForumStrategy #slow', function() {
var blockTimer = require('../../block-timer');
before(blockTimer.on);
after(blockTimer.off);
var fixture = fixtureLoader.setup({
user1: {
accessToken: 'web-internal'
},
user2: {},
forum1: {
securityDescriptor: {
extraAdmins: ['user1']
}
},
category1: {
forum: 'forum1',
adminOnly: true
},
category2: {
forum: 'forum1',
order: 2
},
category3: {
forum: 'forum1',
order: 1
},
topic1: {
user: 'user1',
forum: 'forum1',
category: 'category1',
sent: new Date(LONG_AGO)
}
});
it('should serialize a forum without a userId', function() {
var strategy = ForumStrategy.nested();
var user = fixture.user1;
var forum = fixture.forum1;
var category1 = fixture.category1;
var category2 = fixture.category2;
var category3 = fixture.category3;
var topic = fixture.topic1;
return serialize([forum], strategy)
.then(function(s) {
assertUtils.assertSerializedEqual(s, [{
id: forum.id,
name: forum.name,
uri: forum.uri,
tags: [],
categories: [{
id: category3.id,
name: category3.name,
slug: category3.slug,
adminOnly: false,
v: 1
}, {
id: category2.id,
name: category2.name,
slug: category2.slug,
adminOnly: false,
v: 1
}, {
id: category1.id,
name: category1.name,
slug: category1.slug,
adminOnly: true,
v: 1
}],
topics: [{
id: topic.id,
title: topic.title,
slug: topic.slug,
body: {
text: topic.text,
html: topic.html,
},
sticky: topic.sticky,
tags: [],
category: {
id: category1.id,
name: category1.name,
slug: category1.slug,
adminOnly: true,
v: 1
},
user: {
id: user.id,
username: user.username,
displayName: user.displayName,
avatarUrl: nconf.get('avatar:officialHost') + '/g/u/' + user.username,
},
repliesTotal: 0,
replyingUsers: [],
reactions: {},
sent: LONG_AGO,
editedAt: null,
lastChanged: LONG_AGO,
v: 1
}]
}])
});
});
it('should serialize a forum with a userId', function() {
var user = fixture.user1;
var forum = fixture.forum1;
var category1 = fixture.category1;
var category2 = fixture.category2;
var category3 = fixture.category3;
var topic = fixture.topic1;
var strategy = ForumStrategy.nested({ currentUserId: user._id });
return serialize([forum], strategy)
.then(function(s) {
assertUtils.assertSerializedEqual(s, [{
id: forum.id,
name: forum.name,
uri: forum.uri,
tags: [],
categories: [{
id: category3.id,
name: category3.name,
slug: category3.slug,
adminOnly: false,
v: 1
}, {
id: category2.id,
name: category2.name,
slug: category2.slug,
adminOnly: false,
v: 1
}, {
id: category1.id,
name: category1.name,
slug: category1.slug,
adminOnly: true,
v: 1
}],
topics: [{
id: topic.id,
title: topic.title,
slug: topic.slug,
body: {
text: topic.text,
html: topic.html,
},
sticky: topic.sticky,
tags: [],
category: {
id: category1.id,
name: category1.name,
slug: category1.slug,
adminOnly: true,
v: 1
},
user: {
id: user.id,
username: user.username,
displayName: user.displayName,
avatarUrl: nconf.get('avatar:officialHost') + '/g/u/' + user.username,
},
subscribed: false,
repliesTotal: 0,
replyingUsers: [],
reactions: {},
ownReactions: {},
sent: LONG_AGO,
editedAt: null,
lastChanged: LONG_AGO,
v: 1
}],
subscribed: false,
permissions: {
admin: true
}
}])
});
});
it('should tell a user when they are subscribed to a forum', function() {
var forumObject = ForumObject.createForForum(fixture.forum1._id);
var userId = fixture.user1._id;
return subscriberService.addSubscriber(forumObject, userId)
.then(function() {
var strategy = ForumStrategy.nested({
currentUserId: userId
});
return serializeObject(fixture.forum1, strategy);
})
.then(function(serialized) {
assert.strictEqual(serialized.subscribed, true);
})
});
it('should tell a user when they are subscribed to a topic within a forum', function() {
var forumObject = ForumObject.createForTopic(fixture.forum1._id, fixture.topic1._id);
var userId = fixture.user1._id;
return subscriberService.addSubscriber(forumObject, userId)
.then(function() {
var strategy = ForumStrategy.nested({
currentUserId: userId
});
return serializeObject(fixture.forum1, strategy);
})
.then(function(serialized) {
assert.strictEqual(serialized.topics[0].subscribed, true);
})
});
it('should tell a user when they are an admin of the forum', function() {
var strategy = ForumStrategy.permissions({
// this one sends the whole user
currentUser: fixture.user1
});
return serializeObject(fixture.forum1, strategy)
.then(function(serialized) {
assert.strictEqual(serialized.permissions.admin, true);
});
});
it('should tell a user when they are NOT an admin of the forum', function() {
var strategy = ForumStrategy.permissions({
// this one sends the user id
currentUserId: fixture.user2._id
});
return serializeObject(fixture.forum1, strategy)
.then(function(serialized) {
assert.strictEqual(serialized.permissions.admin, false);
});
});
describe('getCurrentUserFromOptions', function() {
var getCurrentUserFromOptions = ForumStrategy.testOnly.getCurrentUserFromOptions;
it('should return currentUser if present', function() {
assert.strictEqual(getCurrentUserFromOptions({ currentUser: fixture.user1}), fixture.user1);
});
it('should return undefined if currentUser is not present', function() {
assert.strictEqual(getCurrentUserFromOptions(), undefined);
});
});
describe('getCurrentUserIdFromOptions', function() {
var getCurrentUserIdFromOptions = ForumStrategy.testOnly.getCurrentUserIdFromOptions;
it('should return currentUserId if present', function() {
var id = getCurrentUserIdFromOptions({ currentUserId: fixture.user1._id});
assert(mongoUtils.objectIDsEqual(id, fixture.user1._id));
});
it("should return currentUser's id if currentUser is present", function() {
var id = getCurrentUserIdFromOptions({ currentUser: fixture.user1});
assert(mongoUtils.objectIDsEqual(id, fixture.user1._id));
});
it('should return undefined if neither are present', function() {
assert.strictEqual(getCurrentUserIdFromOptions(), undefined);
});
});
});
|
use specs::{
prelude::*,
storage::HashMapStorage,
world::{Builder, WorldExt},
};
#[derive(Clone, Debug, PartialEq)]
struct CompInt(i8);
impl Component for CompInt {
type Storage = VecStorage<Self>;
}
#[derive(Clone, Debug, PartialEq)]
struct CompBool(bool);
impl Component for CompBool {
type Storage = HashMapStorage<Self>;
}
fn create_world() -> World {
let mut w = World::new();
w.register::<CompInt>();
w.register::<CompBool>();
w
}
#[should_panic]
#[test]
fn task_panics() {
struct Sys;
impl<'a> System<'a> for Sys {
type SystemData = ();
fn run(&mut self, _: ()) {
panic!()
}
}
let mut world = create_world();
world
.create_entity()
.with(CompInt(7))
.with(CompBool(false))
.build();
DispatcherBuilder::new()
.with(Sys, "s", &[])
.build()
.dispatch(&mut world);
}
#[test]
fn dynamic_create() {
struct Sys;
impl<'a> System<'a> for Sys {
type SystemData = Entities<'a>;
fn run(&mut self, entities: Self::SystemData) {
entities.create();
}
}
let mut world = create_world();
let mut dispatcher = DispatcherBuilder::new().with(Sys, "s", &[]).build();
for _ in 0..1_000 {
dispatcher.dispatch(&mut world);
}
}
#[test]
fn dynamic_deletion() {
struct Sys;
impl<'a> System<'a> for Sys {
type SystemData = Entities<'a>;
fn run(&mut self, entities: Self::SystemData) {
let e = entities.create();
entities.delete(e).unwrap();
}
}
let mut world = create_world();
let mut dispatcher = DispatcherBuilder::new().with(Sys, "s", &[]).build();
for _ in 0..1_000 {
dispatcher.dispatch(&mut world);
}
}
#[test]
fn dynamic_create_and_delete() {
let mut world = create_world();
{
let entities = &world.entities();
let five: Vec<_> = entities.create_iter().take(5).collect();
for e in five {
entities.delete(e).unwrap();
}
}
world.maintain();
}
#[test]
fn mixed_create_merge() {
use std::collections::HashSet;
let mut world = create_world();
let mut set = HashSet::new();
let add = |set: &mut HashSet<Entity>, e: Entity| {
assert!(!set.contains(&e));
set.insert(e);
};
let insert = |w: &mut World, set: &mut HashSet<Entity>, cnt: usize| {
// Check to make sure there is no conflict between create_now
// and create_pure
for _ in 0..10 {
for _ in 0..cnt {
add(set, w.create_entity().build());
let e = w.create_entity().build();
w.delete_entity(e).unwrap();
add(set, w.entities().create());
// swap order
add(set, w.entities().create());
add(set, w.create_entity().build());
}
w.maintain();
}
};
insert(&mut world, &mut set, 10);
for e in set.drain() {
world.entities().delete(e).unwrap();
}
insert(&mut world, &mut set, 20);
for e in set.drain() {
world.delete_entity(e).unwrap();
}
insert(&mut world, &mut set, 40);
}
#[test]
fn is_alive() {
let mut w = World::new();
let e = w.create_entity().build();
assert!(w.is_alive(e));
w.delete_entity(e).unwrap();
assert!(!w.is_alive(e));
let e2 = w.create_entity().build();
assert!(w.is_alive(e2));
w.entities().delete(e2).unwrap();
assert!(w.is_alive(e2));
w.maintain();
assert!(!w.is_alive(e2));
}
// Checks whether entities are considered dead immediately after creation
#[test]
fn stillborn_entities() {
struct LCG(u32);
const RANDMAX: u32 = 32_767;
impl LCG {
fn new() -> Self {
LCG(0xdead_beef)
}
fn geni(&mut self) -> i8 {
((self.gen() as i32) - 0x7f) as i8
}
fn gen(&mut self) -> u32 {
self.0 = self.0.wrapping_mul(214_013).wrapping_add(2_531_011);
self.0 % RANDMAX
}
}
#[derive(Debug, Default)]
struct Rand {
values: Vec<i8>,
}
struct SysRand(LCG);
impl<'a> System<'a> for SysRand {
type SystemData = Write<'a, Rand>;
fn run(&mut self, mut data: Self::SystemData) {
let rng = &mut self.0;
let count = (rng.gen() % 25) as usize;
let values: &mut Vec<i8> = &mut data.values;
values.clear();
for _ in 0..count {
values.push(rng.geni());
}
}
}
struct Delete;
impl<'a> System<'a> for Delete {
type SystemData = (Entities<'a>, ReadStorage<'a, CompInt>, Read<'a, Rand>);
fn run(&mut self, (entities, comp_int, rand): Self::SystemData) {
let mut lowest = Vec::new();
for (&CompInt(k), entity) in (&comp_int, &entities).join() {
if lowest.iter().all(|&(n, _)| n >= k) {
lowest.push((k, entity));
}
}
lowest.reverse();
lowest.truncate(rand.values.len());
for (_, eid) in lowest {
entities.delete(eid).unwrap();
}
}
}
struct Insert;
impl<'a> System<'a> for Insert {
type SystemData = (Entities<'a>, WriteStorage<'a, CompInt>, Read<'a, Rand>);
fn run(&mut self, (entities, mut comp_int, rand): Self::SystemData) {
for &i in &rand.values {
let result = comp_int.insert(entities.create(), CompInt(i));
if result.is_err() {
panic!("Couldn't insert {} into a stillborn entity", i);
}
}
}
}
let mut rng = LCG::new();
// Construct a bunch of entities
let mut world = create_world();
world.insert(Rand { values: Vec::new() });
for _ in 0..100 {
world.create_entity().with(CompInt(rng.geni())).build();
}
let mut dispatcher = DispatcherBuilder::new()
.with(SysRand(rng), "rand", &[])
.with(Delete, "del", &["rand"])
.with(Insert, "insert", &["del"])
.build();
for _ in 0..100 {
dispatcher.dispatch(&mut world);
}
}
#[test]
fn register_idempotency() {
// Test that repeated calls to `register` do not silently
// stomp over the existing storage, but instead silently do nothing.
let mut w = World::new();
w.register::<CompInt>();
let e = w.create_entity().with::<CompInt>(CompInt(10)).build();
// At the time this test was written, a call to `register`
// would blindly plough ahead and stomp the existing storage, so...
w.register::<CompInt>();
// ...this would end up trying to unwrap a `None`.
let i = w.read_storage::<CompInt>().get(e).unwrap().0;
assert_eq!(i, 10);
}
#[test]
fn join_two_components() {
let mut world = create_world();
world
.create_entity()
.with(CompInt(1))
.with(CompBool(false))
.build();
world
.create_entity()
.with(CompInt(2))
.with(CompBool(true))
.build();
world.create_entity().with(CompInt(3)).build();
struct Iter;
impl<'a> System<'a> for Iter {
type SystemData = (ReadStorage<'a, CompInt>, ReadStorage<'a, CompBool>);
fn run(&mut self, (int, boolean): Self::SystemData) {
let (mut first, mut second) = (false, false);
for (int, boolean) in (&int, &boolean).join() {
if int.0 == 1 && !boolean.0 {
first = true;
} else if int.0 == 2 && boolean.0 {
second = true;
} else {
panic!(
"Entity with compent values that shouldn't be: {:?} {:?}",
int, boolean
);
}
}
assert!(
first,
"There should be entity with CompInt(1) and CompBool(false)"
);
assert!(
second,
"There should be entity with CompInt(2) and CompBool(true)"
);
}
}
let mut dispatcher = DispatcherBuilder::new().with(Iter, "iter", &[]).build();
dispatcher.dispatch(&mut world);
}
#[test]
#[cfg(feature = "parallel")]
fn par_join_two_components() {
use std::sync::{
atomic::{AtomicBool, Ordering},
Mutex,
};
let mut world = create_world();
world
.create_entity()
.with(CompInt(1))
.with(CompBool(false))
.build();
world
.create_entity()
.with(CompInt(2))
.with(CompBool(true))
.build();
world.create_entity().with(CompInt(3)).build();
let first = AtomicBool::new(false);
let second = AtomicBool::new(false);
let error = Mutex::new(None);
struct Iter<'a>(
&'a AtomicBool,
&'a AtomicBool,
&'a Mutex<Option<(i8, bool)>>,
);
impl<'a, 'b> System<'a> for Iter<'b> {
type SystemData = (ReadStorage<'a, CompInt>, ReadStorage<'a, CompBool>);
fn run(&mut self, (int, boolean): Self::SystemData) {
use rayon::iter::ParallelIterator;
let Iter(first, second, error) = *self;
(&int, &boolean).par_join().for_each(|(int, boolean)| {
if !first.load(Ordering::SeqCst) && int.0 == 1 && !boolean.0 {
first.store(true, Ordering::SeqCst);
} else if !second.load(Ordering::SeqCst) && int.0 == 2 && boolean.0 {
second.store(true, Ordering::SeqCst);
} else {
*error.lock().unwrap() = Some((int.0, boolean.0));
}
});
}
}
let mut dispatcher = DispatcherBuilder::new()
.with(Iter(&first, &second, &error), "iter", &[])
.build();
dispatcher.dispatch(&mut world);
assert_eq!(
*error.lock().unwrap(),
None,
"Entity shouldn't be in the join",
);
assert!(
first.load(Ordering::SeqCst),
"There should be entity with CompInt(1) and CompBool(false)"
);
assert!(
second.load(Ordering::SeqCst),
"There should be entity with CompInt(2) and CompBool(true)"
);
}
#[test]
#[cfg(feature = "parallel")]
fn par_join_with_maybe() {
use std::sync::{
atomic::{AtomicBool, Ordering},
Mutex,
};
let mut world = create_world();
world
.create_entity()
.with(CompInt(1))
.with(CompBool(false))
.build();
world
.create_entity()
.with(CompInt(2))
.with(CompBool(true))
.build();
world.create_entity().with(CompInt(3)).build();
let first = AtomicBool::new(false);
let second = AtomicBool::new(false);
let third = AtomicBool::new(false);
let error = Mutex::new(None);
struct Iter<'a>(
&'a AtomicBool,
&'a AtomicBool,
&'a AtomicBool,
&'a Mutex<Option<(i8, Option<bool>)>>,
);
impl<'a, 'b> System<'a> for Iter<'b> {
type SystemData = (ReadStorage<'a, CompInt>, ReadStorage<'a, CompBool>);
fn run(&mut self, (int, boolean): Self::SystemData) {
use rayon::iter::ParallelIterator;
let Iter(first, second, third, error) = *self;
(&int, boolean.maybe())
.par_join()
.for_each(|(int, boolean)| {
let boolean = boolean.map(|c| c.0);
if !first.load(Ordering::SeqCst) && int.0 == 1 && boolean == Some(false) {
first.store(true, Ordering::SeqCst);
} else if !second.load(Ordering::SeqCst) && int.0 == 2 && boolean == Some(true)
{
second.store(true, Ordering::SeqCst);
} else if !third.load(Ordering::SeqCst) && int.0 == 3 && boolean == None {
third.store(true, Ordering::SeqCst);
} else {
*error.lock().unwrap() = Some((int.0, boolean));
}
});
}
}
let mut dispatcher = DispatcherBuilder::new()
.with(Iter(&first, &second, &third, &error), "iter", &[])
.build();
dispatcher.dispatch(&mut world);
assert_eq!(
*error.lock().unwrap(),
None,
"Entity shouldn't be in the join",
);
assert!(
first.load(Ordering::SeqCst),
"There should be entity with CompInt(1) and CompBool(false)"
);
assert!(
second.load(Ordering::SeqCst),
"There should be entity with CompInt(2) and CompBool(true)"
);
assert!(
third.load(Ordering::SeqCst),
"There should be entity with CompInt(3) and no CompBool"
);
}
#[test]
#[cfg(feature = "parallel")]
fn par_join_many_entities_and_systems() {
use rayon::iter::ParallelIterator;
use std::sync::Mutex;
let failed = Mutex::new(vec![]);
let mut world = create_world();
for _ in 0..1000 {
world.create_entity().with(CompInt(-128)).build();
}
struct Incr;
impl<'a> System<'a> for Incr {
type SystemData = (Entities<'a>, WriteStorage<'a, CompInt>);
fn run(&mut self, (entities, mut ints): Self::SystemData) {
(&mut ints, &entities).par_join().for_each(|(int, _)| {
int.0 += 1;
});
}
}
let mut builder = DispatcherBuilder::new();
for _ in 0..255 {
builder.add(Incr, "", &[]);
}
struct FindFailed<'a>(&'a Mutex<Vec<(u32, i8)>>);
impl<'a, 'b> System<'a> for FindFailed<'b> {
type SystemData = (Entities<'a>, ReadStorage<'a, CompInt>);
fn run(&mut self, (entities, ints): Self::SystemData) {
(&ints, &entities).par_join().for_each(|(int, entity)| {
if int.0 != 127 {
self.0.lock().unwrap().push((entity.id(), int.0));
}
});
}
}
let mut dispatcher = builder
.with_barrier()
.with(FindFailed(&failed), "find_failed", &[])
.build();
dispatcher.dispatch(&mut world);
for &(id, n) in &*failed.lock().unwrap() {
panic!(
"Entity with id {} failed to count to 127. Count was {}",
id, n
);
}
}
#[test]
fn getting_specific_entity_with_join() {
let mut world = create_world();
world
.create_entity()
.with(CompInt(1))
.with(CompBool(true))
.build();
let entity = {
let ints = world.read_storage::<CompInt>();
let mut bools = world.write_storage::<CompBool>();
let entity = world.entities().join().next().unwrap();
assert_eq!(
Some((&CompInt(1), &mut CompBool(true))),
(&ints, &mut bools).join().get(entity, &world.entities())
);
bools.remove(entity);
assert_eq!(
None,
(&ints, &mut bools).join().get(entity, &world.entities())
);
entity
};
world.delete_entity(entity).unwrap();
world
.create_entity()
.with(CompInt(2))
.with(CompBool(false))
.build();
let ints = world.read_storage::<CompInt>();
let mut bools = world.write_storage::<CompBool>();
assert_eq!(
None,
(&ints, &mut bools).join().get(entity, &world.entities())
);
}
#[test]
fn maintain_entity_deletion() {
let mut world = World::new();
struct DeleteSys {
pub entity: Option<Entity>,
}
impl<'a> System<'a> for DeleteSys {
type SystemData = Entities<'a>;
fn run(&mut self, entities: Self::SystemData) {
if let Some(entity) = self.entity {
if let Err(err) = entities.delete(entity) {
println!("Failed deleting entity: {}", err);
}
}
self.entity = None;
}
}
let mut delete = DeleteSys { entity: None };
struct CheckSys;
impl<'a> System<'a> for CheckSys {
type SystemData = (
Entities<'a>,
ReadStorage<'a, CompInt>,
ReadStorage<'a, CompBool>,
);
fn run(&mut self, (entities, ints, bools): Self::SystemData) {
assert_eq!(
(&entities, &ints, &bools).join().count(),
(&ints, &bools).join().count()
);
}
}
let mut check = CheckSys;
System::setup(&mut check, &mut world);
let _e1 = world
.create_entity()
.with(CompInt(12))
.with(CompBool(true))
.build();
let e2 = world
.create_entity()
.with(CompInt(12))
.with(CompBool(true))
.build();
let _e3 = world
.create_entity()
.with(CompInt(12))
.with(CompBool(true))
.build();
world.maintain();
check.run_now(&world);
delete.entity = Some(e2);
delete.run_now(&world);
world.maintain();
check.run_now(&world);
}
|
package com.devops.extraUtil;
import java.util.ResourceBundle;
import com.baomidou.mybatisplus.generator.AutoGenerator;
import com.baomidou.mybatisplus.generator.config.DataSourceConfig;
import com.baomidou.mybatisplus.generator.config.GlobalConfig;
import com.baomidou.mybatisplus.generator.config.PackageConfig;
import com.baomidou.mybatisplus.generator.config.StrategyConfig;
import com.baomidou.mybatisplus.generator.config.converts.MySqlTypeConvert;
import com.baomidou.mybatisplus.generator.config.rules.DbType;
import com.baomidou.mybatisplus.generator.config.rules.NamingStrategy;
/**
*
*
*
*/
public class AutoGeneratorHelper {
/**
* <p>
* </p>
* <p>
* </p>
*/
public static void main(String[] args) {
//็จๆฅ่ทๅMybatis-Plus.propertiesๆไปถ็้
็ฝฎไฟกๆฏ
ResourceBundle rb = ResourceBundle.getBundle("Mybatis-Plus");
AutoGenerator mpg = new AutoGenerator();
// ๅ
จๅฑ้
็ฝฎ
GlobalConfig gc = new GlobalConfig();
gc.setOutputDir(rb.getString("OutputDir"));
gc.setFileOverride(true);
gc.setActiveRecord(true);// ๅผๅฏ activeRecord ๆจกๅผ
gc.setEnableCache(false);// XML ไบ็บง็ผๅญ
gc.setBaseResultMap(true);// XML ResultMap
gc.setBaseColumnList(false);// XML columList
gc.setAuthor(rb.getString("author"));
mpg.setGlobalConfig(gc);
// ๆฐๆฎๆบ้
็ฝฎ
DataSourceConfig dsc = new DataSourceConfig();
dsc.setDbType(DbType.MYSQL);
dsc.setTypeConvert(new MySqlTypeConvert());
dsc.setDriverName("com.mysql.jdbc.Driver");
dsc.setUsername(rb.getString("userName"));
dsc.setPassword(rb.getString("passWord"));
dsc.setUrl(rb.getString("url"));
mpg.setDataSource(dsc);
// ็ญ็ฅ้
็ฝฎ
StrategyConfig strategy = new StrategyConfig();
//strategy.setTablePrefix(new String[] { "bmd_", "mp_" });// ๆญคๅคๅฏไปฅไฟฎๆนไธบๆจ็่กจๅ็ผ
strategy.setNaming(NamingStrategy.underline_to_camel);// ่กจๅ็ๆ็ญ็ฅ
strategy.setInclude(new String[] { rb.getString("tableName")}); // ้่ฆ็ๆ็่กจ
// ๅญๆฎตๅ็ๆ็ญ็ฅ
// strategy.setFieldNaming(NamingStrategy.underline_to_camel);
//strategy.setSuperServiceImplClass("com.baomidou.springwind.service.support.BaseServiceImpl");
mpg.setStrategy(strategy);
// ๅ
้
็ฝฎ
PackageConfig pc = new PackageConfig();
// pc.setModuleName("test");
pc.setParent(rb.getString("parent"));// ่ชๅฎไนๅ
่ทฏๅพ
pc.setController("controller."+rb.getString("className"));// ่ฟ้ๆฏๆงๅถๅจๅ
ๅ๏ผ้ป่ฎค web
pc.setEntity("model."+rb.getString("className"));
pc.setMapper("dao."+rb.getString("className"));
pc.setXml("mapping."+rb.getString("className"));
pc.setService("service."+rb.getString("className"));
pc.setServiceImpl("service."+rb.getString("className")+".impl");
mpg.setPackageInfo(pc);
// ๆง่ก็ๆ
mpg.execute();
}
}
|
# DANSI
DANSI ็ๆๆๆฏ Draw ANSI๏ผๆฏ็จไพ็นช่ฃฝๆต่กๆผ PTT ็ ANSI ๅ็ซ็็นชๅ่ป้ซใ
## Environment
- Node.js 10.18.1 or above
- node-gyp 5.1.0 or above (Windows only)
## How to use
Install node_module packages
```sh
$ npm i
```
Rebuild node-sass
```sh
$ npm rebuild node-sass
```
If on Windows, compile C++ libaray
```sh
$ npm run addon
```
Run
```sh
$ npm run start
```
Run with develop tool
```sh
$ npm run dev
```
Pack to exe or dmg
```sh
$ npm run pack
```
## License
MIT
|
module Main where
import PL0.CodeGen.StackMachine
import PL0.Lexer
import PL0.Parser
import PL0.StackMachine
import PL0.StackMachine.Linker
import PL0.StaticChecker
import PL0.SymbolTable.Scope
import Control.Lens
import Control.Monad
import Control.Monad.Except
import Control.Monad.State (evalStateT)
import qualified Data.ByteString as B
import Data.Serialize
import System.Environment
main = do
args <- getArgs
case args of
[] -> putStrLn "Enter a file"
(x:xs) -> do
content <- readFile x
let full = ExceptT . return . scanTokens
>=> ExceptT . return . parseTokens
>=> checkProgram
>=> link
>=> generate
flip evalStateT initialState . runExceptT $ do
program <- full content
liftIO $ B.writeFile "test.vm" (encode program)
program' <- liftIO $ B.readFile "test.vm"
liftIO $ runProgram program'
return ()
|
<?php
namespace Jariff\DocumentBundle\Document;
use Doctrine\ODM\MongoDB\Mapping\Annotations as MongoDB;
/**
* @MongoDB\Document(collection="keyword_history")
*/
class KeywordHistory
{
/**
* @MongoDB\Id
*/
private $id;
/**
* @MongoDB\Int
*/
private $user_id;
/**
* @MongoDB\String
*/
private $keyword;
/**
* @MongoDB\String
*/
private $identity;
/**
* @MongoDB\String
*/
private $search_on;
/**
* Get id
*
* @return id $id
*/
public function getId()
{
return $this->id;
}
/**
* Set userId
*
* @param int $userId
* @return self
*/
public function setUserId($userId)
{
$this->user_id = $userId;
return $this;
}
/**
* Get userId
*
* @return int $userId
*/
public function getUserId()
{
return $this->user_id;
}
/**
* Set keyword
*
* @param string $keyword
* @return self
*/
public function setKeyword($keyword)
{
$this->keyword = $keyword;
return $this;
}
/**
* Get keyword
*
* @return string $keyword
*/
public function getKeyword()
{
return $this->keyword;
}
/**
* Set identity
*
* @param string $identity
* @return self
*/
public function setIdentity($identity)
{
$this->identity = $identity;
return $this;
}
/**
* Get identity
*
* @return string $identity
*/
public function getIdentity()
{
return $this->identity;
}
/**
* Set searchOn
*
* @param string $searchOn
* @return self
*/
public function setSearchOn($searchOn)
{
$this->search_on = $searchOn;
return $this;
}
/**
* Get searchOn
*
* @return string $searchOn
*/
public function getSearchOn()
{
return $this->search_on;
}
}
|
๏ปฟusing ProtoBuf;
using Sandbox.Common.ObjectBuilders;
using Sandbox.Common.ObjectBuilders.Definitions;
using Sandbox.Common.ObjectBuilders.VRageData;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using VRageMath;
namespace Medieval.ObjectBuilders.Definitions
{
[ProtoContract]
[MyObjectBuilderDefinition]
public class MyObjectBuilder_FireLightBlockDefinition : MyObjectBuilder_CubeBlockDefinition
{
[ProtoMember(1)]
public SerializableBounds LightIntensity = new MyBounds(1.3f, 1.7f, 1.5f);
[ProtoMember(2)]
public Vector4 LightColor = new Vector4(0.9f, 0.7f, 0.5f, 1);
[ProtoMember(3)]
public SerializableBounds LightRadius = new MyBounds(1, 2, 1.6f);
[ProtoMember(4)]
public SerializableBounds LightFalloff = new MyBounds(1, 2, 1.3f);
[ProtoMember(5)]
public float ParticleScale = 0.1f;
}
}
|
package inventory
import (
"context"
"encoding/json"
"testing"
"cloud.google.com/go/pubsub"
"github.com/stretchr/testify/require"
)
func TestHandleMessage(t *testing.T) {
t.Skip("this is a test driver for dev rather than real unit test")
/** Guide to running this test:
Step 0: Close your IDE and set your GOOGLE_APPLICATION_CREDENTIALS
Step 1: comment out t.Skip(...)
Step 2: Create a bucket and add in your json file
Step 3: Replace routerMessage's Bucket and Name with
your new Bucket and Name
Step 4: Check initial run uses EventType "OBJECT_FINALIZE"
Step 5: Run test and check firestore (data should appear)
Step 6: Replace EventType with "OBJECT_DELETE"
Step 7: Run test and check firestore (data should be removed)
*/
routerMessage := routerMessage{
Bucket: "philips-tempbucket",
Name: "aws-example.json",
EventType: "OBJECT_FINALIZE",
MimosaType: "new",
MimosaTypeVersion: "something",
Workspace: "philip",
}
handler := build(convertAWS)
data, err := json.Marshal(routerMessage)
require.NoError(t, err)
err = handler(context.Background(), &pubsub.Message{Data: data})
require.NoError(t, err)
}
|
export const moveStrategyList = ['default', 'insideSource'] as const;
export type MoveStrategy = typeof moveStrategyList[number];
export const revealStrategyList = [
'select',
'previousBuffer',
'previousWindow',
'sourceWindow',
'path',
] as const;
export type RevealStrategy = typeof revealStrategyList[number];
export const openStrategyList = [
'select',
'split',
'vsplit',
'tab',
'previousBuffer',
'previousWindow',
'sourceWindow',
] as const;
export type OpenStrategy = typeof openStrategyList[number];
export const previewStrategyList = ['labeling'] as const;
export type PreviewStrategy = typeof previewStrategyList[number];
export const expandOptionList = [
'recursive',
'compact',
'uncompact',
'recursiveSingle',
] as const;
export type ExpandOption = typeof expandOptionList[number];
export const collapseOptionList = ['recursive'] as const;
export type CollapseOption = typeof collapseOptionList[number];
|
๏ปฟusing CompanyWebApi.Contracts.Entities;
using CompanyWebApi.Persistence.Repositories.Base;
using System.Collections.Generic;
using System.Linq.Expressions;
using System.Threading.Tasks;
using System;
namespace CompanyWebApi.Persistence.Repositories
{
public interface ICompanyRepository : IBaseRepository<Company>
{
Task<Company> AddCompanyAsync(Company company, bool tracking = true);
Task<IList<Company>> GetCompaniesAsync(bool tracking = false);
Task<Company> GetCompanyAsync(int id, bool tracking = false);
Task<Company> GetCompanyAsync(Expression<Func<Company, bool>> predicate, bool tracking = false);
}
}
|
module Uwecode.Project.ProjectIOs where
import Uwecode.UweObj
import Uwecode.Conversion
import Uwecode.IO
import Uwecode.Project.Project
import System.IO
import Control.Monad.State
import Control.Exception
import System.Directory
defltIos = ([], [], "")
defltOpts = ([], [])
tryRead :: IO String -> IO (Either IOError String)
tryRead = try
readIosFile :: IO ([(String, String)], [String], String)
readIosFile = do
createDirectoryIfMissing False $ projectLocation "."
eitherText <- tryRead $ readFile $ iosLocation $ projectLocation "."
return $ let text = either (const "") id eitherText in (if text == "" then defltIos else read text)
writeIosFile :: ([(String, String)], [String], String) -> IO ()
writeIosFile contents = writeFile (iosLocation $ projectLocation ".") $ show contents ++ "\n"
addIosImport :: (String, String) -> IO ()
addIosImport imp = do
(imps, ios, closer) <- readIosFile
writeIosFile (imp : imps, ios, closer)
addIo :: String -> IO ()
addIo io = do
(imps, ios, closer) <- readIosFile
writeIosFile (imps, io : ios, closer)
setIosCloser :: String -> IO ()
setIosCloser closer = do
(imps, ios, _) <- readIosFile
writeIosFile (imps, ios, closer)
addIosImportIO :: UweObj -> UweIOMonad ()
addIosImportIO obj = maybe unsuccessful (lift . addIosImport) $ do
(leftObj, rightObj) <- ignoringConversion objToTuple Nothing obj
left <- ignoringConversion objToString Nothing leftObj
right <- ignoringConversion objToString Nothing rightObj
return (left, right)
addIoIO :: UweObj -> UweIOMonad ()
addIoIO = maybe unsuccessful (lift . addIo) . ignoringConversion objToString Nothing
setIosCloserIO :: UweObj -> UweIOMonad ()
setIosCloserIO = maybe unsuccessful (lift . setIosCloser) . ignoringConversion objToString Nothing
readOptsFile :: IO ([(String, String)], [String])
readOptsFile = do
createDirectoryIfMissing False $ projectLocation "."
eitherText <- tryRead $ readFile $ optsLocation $ projectLocation "."
return $ let text = either (const "") id eitherText in (if text == "" then defltOpts else read text)
writeOptsFile :: ([(String, String)], [String]) -> IO ()
writeOptsFile contents = writeFile (optsLocation $ projectLocation ".") $ show contents ++ "\n"
addOptsImport :: (String, String) -> IO ()
addOptsImport imp = do
(imps, opts) <- readOptsFile
writeOptsFile (imp : imps, opts)
addOpt :: String -> IO ()
addOpt opt = do
(imps, opts) <- readOptsFile
writeOptsFile (imps, opt : opts)
addOptsImportIO :: UweObj -> UweIOMonad ()
addOptsImportIO obj = maybe unsuccessful (lift . addOptsImport) $ do
(leftObj, rightObj) <- ignoringConversion objToTuple Nothing obj
left <- ignoringConversion objToString Nothing leftObj
right <- ignoringConversion objToString Nothing rightObj
return (left, right)
addOptIO :: UweObj -> UweIOMonad ()
addOptIO = maybe unsuccessful (lift . addOpt) . ignoringConversion objToString Nothing
projCloser :: UweIOMonad ()
projCloser = lift $ do
ios <- readIosFile
if ios == defltIos then writeIosFile defltIos else return ()
opts <- readOptsFile
if opts == defltOpts then writeOptsFile defltOpts else return ()
|
package dev.ssch.minijava.compiler.expressions
import dev.ssch.minijava.compiler.exception.InvalidBinaryOperationException
import dev.ssch.minijava.compiler.util.CompilerTest
import org.assertj.core.api.Assertions.assertThat
import org.assertj.core.api.Assertions.assertThatThrownBy
import org.junit.jupiter.api.Test
class RelationalTest : CompilerTest() {
@Test
fun `lt two int values`() {
val output = """
Console.println(1 < 2);
Console.println(2 < 1);
Console.println(-1 < 15);
Console.println(15 < -1);
""".compileAndRunInMainFunction()
assertThat(output.lines()).containsExactly("true", "false", "true", "false", "")
}
@Test
fun `lt two float values`() {
val output = """
Console.println(1f < 2f);
Console.println(2f < 1f);
Console.println(-1f < 15f);
Console.println(15f < -1f);
""".compileAndRunInMainFunction()
assertThat(output.lines()).containsExactly("true", "false", "true", "false", "")
}
@Test
fun `lte two int values`() {
val output = """
Console.println(1 <= 2);
Console.println(2 <= 1);
Console.println(-1 <= 15);
Console.println(15 <= -1);
Console.println(1 <= 1);
Console.println(-10 <= -10);
""".compileAndRunInMainFunction()
assertThat(output.lines()).containsExactly("true", "false", "true", "false", "true", "true", "")
}
@Test
fun `lte two float values`() {
val output = """
Console.println(1f <= 2f);
Console.println(2f <= 1f);
Console.println(-1f <= 15f);
Console.println(15f <= -1f);
Console.println(1f <= 1f);
Console.println(-10f <= -10f);
""".compileAndRunInMainFunction()
assertThat(output.lines()).containsExactly("true", "false", "true", "false", "true", "true", "")
}
@Test
fun `gt two int values`() {
val output = """
Console.println(1 > 2);
Console.println(2 > 1);
Console.println(-1 > 15);
Console.println(15 > -1);
""".compileAndRunInMainFunction()
assertThat(output.lines()).containsExactly("false", "true", "false", "true", "")
}
@Test
fun `gt two float values`() {
val output = """
Console.println(1f > 2f);
Console.println(2f > 1f);
Console.println(-1f > 15f);
Console.println(15f > -1f);
""".compileAndRunInMainFunction()
assertThat(output.lines()).containsExactly("false", "true", "false", "true", "")
}
@Test
fun `gte two int values`() {
val output = """
Console.println(1 >= 2);
Console.println(2 >= 1);
Console.println(-1 >= 15);
Console.println(15 >= -1);
Console.println(1 >= 1);
Console.println(-10 >= -10);
""".compileAndRunInMainFunction()
assertThat(output.lines()).containsExactly("false", "true", "false", "true", "true", "true", "")
}
@Test
fun `gte two float values`() {
val output = """
Console.println(1f >= 2f);
Console.println(2f >= 1f);
Console.println(-1f >= 15f);
Console.println(15f >= -1f);
Console.println(1f >= 1f);
Console.println(-10f >= -10f);
""".compileAndRunInMainFunction()
assertThat(output.lines()).containsExactly("false", "true", "false", "true", "true", "true", "")
}
@Test
fun `lt two incompatible literals`() {
assertThatThrownBy {
"""
boolean a = 123 < false;
""".compileAndRunInMainFunction()
}.isInstanceOf(InvalidBinaryOperationException::class.java)
}
@Test
fun `lte two incompatible literals`() {
assertThatThrownBy {
"""
boolean a = 123 <= false;
""".compileAndRunInMainFunction()
}.isInstanceOf(InvalidBinaryOperationException::class.java)
}
@Test
fun `gt two incompatible literals`() {
assertThatThrownBy {
"""
boolean a = 123 > false;
""".compileAndRunInMainFunction()
}.isInstanceOf(InvalidBinaryOperationException::class.java)
}
@Test
fun `gte two incompatible literals`() {
assertThatThrownBy {
"""
boolean a = 123 >= false;
""".compileAndRunInMainFunction()
}.isInstanceOf(InvalidBinaryOperationException::class.java)
}
@Test
fun `complex expressions`() {
val output = """
Console.println(1 < 2 && 2 < 3);
Console.println(1 > 2 && 2 > 3);
Console.println(1 <= 2 && 2 <= 3);
Console.println(1 >= 2 && 2 >= 3);
""".compileAndRunInMainFunction()
assertThat(output.lines()).containsExactly("true", "false", "true", "false", "")
}
}
|
using N3O.Umbraco.Content;
namespace N3O.Umbraco.Analytics.Content {
public class GoogleTagManagerSettingsContent : UmbracoContent<GoogleTagManagerSettingsContent> {
public string ContainerId => GetValue(x => x.ContainerId);
}
}
|
#!perl
use lib 't/lib';
use Test::More;
use Dancer::Test;
use MyApp;
use MyApp::Artist;
response_content_is [GET => '/artists/1'],
'artist object', '/ returned expected response';
done_testing;
|
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ :name => 'Chicago' }, { :name => 'Copenhagen' }])
# Mayor.create(:name => 'Daley', :city => cities.first)
def create_user(github_nickname)
u = User.new(:github_nickname => github_nickname,
:contact_email => "#{github_nickname}@example.com")
u.status = "active"
yield u if block_given?
u.save
Authorization.create(:user_id => u.id, :github_uid => github_nickname)
end
# ------------------------------------------------------------------------------
# Create an admin user
# ------------------------------------------------------------------------------
create_user("admin") { |u| u.admin = true }
# ------------------------------------------------------------------------------
# Create several active accounts
# ------------------------------------------------------------------------------
%w[user1 user2 user3].each do |e|
create_user(e)
end
# ------------------------------------------------------------------------------
# Create an account for each other account status
# ------------------------------------------------------------------------------
%w[authorized pending_confirmation confirmed disabled].each do |e|
create_user(e) { |u| u.status = e }
end
|
/*
* Copyright 2015 herd contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.herd
import org.apache.commons.lang3.StringUtils
import org.apache.log4j.Logger
import org.apache.spark.sql.herd.PartitionFilter._
import scala.collection.JavaConverters._
import scala.util.{Failure, Success, Try}
import org.finra.herd.sdk.api._
import org.finra.herd.sdk.invoker.{ApiClient, ApiException}
import org.finra.herd.sdk.model.{PartitionValueFilter, _}
/** A subset of business object data statuses used by the custom data source */
object ObjectStatus extends Enumeration {
val UPLOADING: ObjectStatus.Value = Value("UPLOADING")
val VALID: ObjectStatus.Value = Value("VALID")
val INVALID: ObjectStatus.Value = Value("INVALID")
}
/** List all Herd APIs used by the custom data source */
trait HerdApi {
/** Retrieve the business object definition by the namespace and business object values.
*
* @param namespace The namespace name
* @param businessObjectName The business object definition name
* @return The business object definition
*/
def getBusinessObjectByName(namespace: String, businessObjectName: String): BusinessObjectDefinition
/** Retrieve all business object definitions by namespace.
*
* @param namespace The namespace name
* @return List of business object definition keys
*/
def getBusinessObjectsByNamespace(namespace: String): BusinessObjectDefinitionKeys
/** Create a business object definition
*
* @param namespace The namespace value
* @param businessObjectName The business object definition
* @param dataProvider The name of a valid data provider known by the system.
*/
def registerBusinessObject(namespace: String, businessObjectName: String, dataProvider: String): Unit
/** Retrieve list of business object formats based on the namespace and business object definition
*
* @param namespace The namespace
* @param businessObjectName The business object definition name
* @return list of business object formats
*/
def getBusinessObjectFormats(namespace: String, businessObjectName: String, latestBusinessObjectFormatVersion: Boolean = true): BusinessObjectFormatKeys
/** Retrieve a single business object format
*
* @param namespace The namespace value
* @param businessObjectName The business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param formatVersion The version of the business object format (e.g. 0).
* @return single business object format
*/
def getBusinessObjectFormat(namespace: String, businessObjectName: String,
formatUsage: String, formatFileType: String,
formatVersion: Integer): BusinessObjectFormat
/** Create a new business object format
*
* @param namespace The namespace
* @param businessObjectName The business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param partitionKey The business object format partition key.
* @param schema An optional schema associated with this business object format.
* @return business object format version
*/
def registerBusinessObjectFormat(namespace: String, businessObjectName: String, formatUsage: String,
formatFileType: String, partitionKey: String, schema: Option[Schema]): Integer
/** Retrieve all available partitions
*
* @param namespace The namespace
* @param businessObjectName The business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param formatVersion The version of the business object format (e.g. 0).
* @param partitionFilter the partition filter
* @return list of partitions
*/
def getBusinessObjectPartitions(namespace: String, businessObjectName: String,
formatUsage: String, formatFileType: String, formatVersion: Integer,
partitionFilter: Option[PartitionFilter]): Seq[(Integer, String, Seq[String], Integer)]
/** Get the business object data based on the specified parameters
*
* @param namespace The namespace
* @param businessObjectName The business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param formatVersion The version of the business object format (e.g. 0).
* @param partitionKey The business object format partition key.
* @param partitionValue The partition value that the data is associated with (e.g. a specific trade date such as 20140401).
* @param subPartitionValues list of sub partition values
* @param dataVersion The version of the business object data (e.g. 0).
* @return the business object data
*/
def getBusinessObjectData(namespace: String, businessObjectName: String,
formatUsage: String, formatFileType: String, formatVersion: Integer,
partitionKey: String, partitionValue: String,
subPartitionValues: Seq[String], dataVersion: Integer): BusinessObjectData
/** Search the business object data based on the specified parameters
*
* @param businessObjectDataSearchRequest The search request
* @param pageNum The page number
* @param pageSize The page size
* @return the business object data search result
*/
def searchBusinessObjectData(businessObjectDataSearchRequest: BusinessObjectDataSearchRequest, pageNum: Integer = 1,
pageSize: Integer = 1000): BusinessObjectDataSearchResult
/** Retrieves the DDL to initialize the specified type of the database system (e.g. Hive) to perform queries for a range of requested business object data
* in the optionally specified storage of the S3 storage platform type.
*
* @param namespace The namespace
* @param businessObjectName The business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param formatVersion The version of the business object format (e.g. 0).
* @param partitionKey The business object format partition key.
* @param partitionValues The list of partition values that the data is associated with (e.g. a specific trade date such as 20140401).
* @param dataVersion The version of the business object data (e.g. 0).
* @return The business object data DDL
*/
def getBusinessObjectDataGenerateDdl(namespace: String, businessObjectName: String,
formatUsage: String, formatFileType: String, formatVersion: Integer,
partitionKey: String, partitionValues: Seq[String],
dataVersion: Integer): BusinessObjectDataDdl
/** Retrieves the list of partition locations for a given business object data (or a range of partition values).
*
* @param namespace The namespace
* @param businessObjectName The business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param formatVersion The version of the business object format (e.g. 0).
* @param partitionKey The business object format partition key.
* @param partitionValues The list of partition values that the data is associated with (e.g. a specific trade date such as 20140401).
* @param dataVersion The version of the business object data (e.g. 0).
* @return The business object data DDL
*/
def getBusinessObjectDataPartitions(namespace: String, businessObjectName: String,
formatUsage: String, formatFileType: String, formatVersion: Integer,
partitionKey: String, partitionValues: Seq[String],
dataVersion: Integer): BusinessObjectDataPartitions
/** Retrieves the business object data availability
*
* @param namespace The namespace
* @param businessObjectName The business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param partitionKey The business object format partition key.
* @param firstPartitionValue The first partition value
* @param lastPartitionValue The last partition value
* @return The business object data availability
*/
def getBusinessObjectDataAvailability(namespace: String, businessObjectName: String,
formatUsage: String, formatFileType: String,
partitionKey: String, firstPartitionValue: String,
lastPartitionValue: String): BusinessObjectDataAvailability
/** Create a business object data based on the specified parameters
*
* @param namespace namespace
* @param businessObjectName business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param formatVersion The version of the business object format (e.g. 0).
* @param partitionKey The business object format partition key.
* @param partitionValue The partition value that the data is associated with (e.g. a specific trade date such as 20140401).
* @param subPartitionValues list of sub partitions
* @param status the status of the business object data(UPLOADING, VALID, INVALID...etc)
* @param storageName the storage name
* @param storageDirectory the storage directory
* @return the business object data
*/
def registerBusinessObjectData(namespace: String, businessObjectName: String, formatUsage: String,
formatFileType: String, formatVersion: Integer, partitionKey: String,
partitionValue: String, subPartitionValues: Seq[String],
status: ObjectStatus.Value, storageName: String,
storageDirectory: Option[String] = None): (Integer, Seq[StorageUnit])
/** Add storage files to an existing storage unit in a business object data
*
* @param namespace namespace
* @param businessObjectName business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param formatVersion The version of the business object format (e.g. 0).
* @param partitionKey The business object format partition key.
* @param partitionValue The partition value that the data is associated with (e.g. a specific trade date such as 20140401).
* @param subPartitionValues The list of subpartition values of the business object data.
* @param dataVersion The version of the business object data (e.g. 0).
* @param storageName The storage name
* @param files The list of storage files that need to be added to the storage unit
*/
def setStorageFiles(namespace: String, businessObjectName: String, formatUsage: String,
formatFileType: String, formatVersion: Integer, partitionKey: String,
partitionValue: String, subPartitionValues: Seq[String], dataVersion: Integer,
storageName: String, files: Seq[(String, Long)]): Unit
/** Updates an existing business object data based on the specified parameters.
*
* @param namespace The namespace
* @param businessObjectName The business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param formatVersion The version of the business object format (e.g. 0).
* @param partitionKey The business object format partition key.
* @param partitionValue The partition value that the data is associated with (e.g. a specific trade date such as 20140401).
* @param subPartitionValues The list of subpartition values of the business object data.
* @param dataVersion The version of the business object data (e.g. 0).
* @param status The business object data status
*/
def updateBusinessObjectData(namespace: String, businessObjectName: String, formatUsage: String,
formatFileType: String, formatVersion: Integer, partitionKey: String,
partitionValue: String, subPartitionValues: Seq[String], dataVersion: Integer,
status: ObjectStatus.Value): Unit
/** Deletes an existing business object data based on the specified parameters.
*
* @param namespace The namespace
* @param businessObjectName The business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param formatVersion The version of the business object format (e.g. 0).
* @param partitionKey The business object format partition key.
* @param partitionValue The partition value that the data is associated with (e.g. a specific trade date such as 20140401).
* @param subPartitionValues The list of subpartition values of the business object data.
* @param dataVersion The version of the business object data (e.g. 0).
*/
def removeBusinessObjectData(namespace: String, businessObjectName: String, formatUsage: String,
formatFileType: String, formatVersion: Integer, partitionKey: String,
partitionValue: String, subPartitionValues: Seq[String], dataVersion: Integer): Unit
/** Deletes an existing business object definition based on the specified parameters.
*
* @param namespace The namespace
* @param businessObjectName The business object definition name
*/
def removeBusinessObjectDefinition(namespace: String, businessObjectName: String): Unit
/** Deletes an existing business object format based on the specified parameters.
*
* @param namespace The namespace
* @param businessObjectName The business object definition name
* @param formatUsage The business object format usage (e.g. PRC).
* @param formatFileType The business object format file type (e.g. GZ).
* @param formatVersion The version of the business object format (e.g. 0).
*/
def removeBusinessObjectFormat(namespace: String, businessObjectName: String, formatUsage: String,
formatFileType: String, formatVersion: Integer): Unit
/** Gets information about an existing storage.
*
* @param name storage name
* @return The storage
*/
def getStorage(name: String): Storage
/** Retrieve namespace by namespace code.
*
* @param namespaceCode The namespace code
* @return The namespace.
*/
def getNamespaceByNamespaceCode(namespaceCode: String): Namespace
/** Retrieve all namespace keys.
*
* @return List of namespace keys
*/
def getAllNamespaces: NamespaceKeys
}
/** A simple Interface that knows how to retry an action in case of error/failure */
trait Retry {
val log: Logger
private val MAX_TRIES = 3
private val WAIT = 100
def withRetry[T](block: => T): T = {
var tries = 0
def runRecursively[S](block: => S): S = {
Try(block) match {
case Success(result) => result
case Failure(ex: ApiException) =>
if (ex.getCode >= 400 && ex.getCode < 500) {
log.error(s"Encountered fatal error from Herd, will not retry. Status code: ${ex.getCode}, error message: ${ex.toString}", ex)
throw new ApiException(ex.getCode, s"Encountered fatal error from Herd, will not retry. Status code: ${ex.getCode}, error message: ${ex.toString}")
}
else if (tries < MAX_TRIES) {
log.error(s"Herd returned an error, will retry ${MAX_TRIES - tries} times. Status code: ${ex.getCode}, error message: ${ex.toString}", ex)
tries += 1
Thread.sleep(WAIT * tries)
runRecursively(block)
} else {
log.error(s"Retried $MAX_TRIES times - aborting. Status code: ${ex.getCode}, error message: ${ex.toString}", ex)
throw new ApiException(ex.getCode, s"Retried 3 times. Aborting. Status code: ${ex.getCode}, error message: ${ex.toString}")
}
}
}
runRecursively(block)
}
}
class DefaultHerdApi(private val apiClient: ApiClient) extends HerdApi with Retry {
override val log: Logger = Logger.getLogger(classOf[DefaultHerdApi])
def getBusinessObjectDefinitionApi(apiClient: ApiClient) : BusinessObjectDefinitionApi = {
new BusinessObjectDefinitionApi(apiClient)
}
def getBusinessObjectDataApi(apiClient: ApiClient) : BusinessObjectDataApi = {
new BusinessObjectDataApi(apiClient)
}
def getBusinessObjectDataStorageFileApi(apiClient: ApiClient) : BusinessObjectDataStorageFileApi = {
new BusinessObjectDataStorageFileApi(apiClient)
}
def getBusinessObjectFormatApi(apiClient: ApiClient) : BusinessObjectFormatApi = {
new BusinessObjectFormatApi(apiClient)
}
def getStorageApi(apiClient: ApiClient) : StorageApi = {
new StorageApi(apiClient)
}
def getNamespaceApi(apiClient: ApiClient) : NamespaceApi = {
new NamespaceApi(apiClient)
}
def getBusinessObjectDataStatusApi(apiClient: ApiClient) : BusinessObjectDataStatusApi = {
new BusinessObjectDataStatusApi(apiClient)
}
override def getBusinessObjectByName(namespace: String, businessObjectDefinitionName: String): BusinessObjectDefinition = {
val api = getBusinessObjectDefinitionApi(apiClient)
withRetry {
api.businessObjectDefinitionGetBusinessObjectDefinition(namespace, businessObjectDefinitionName, false)
}
}
override def getBusinessObjectsByNamespace(namespace: String): BusinessObjectDefinitionKeys = {
val api = getBusinessObjectDefinitionApi(apiClient)
withRetry {
api.businessObjectDefinitionGetBusinessObjectDefinitions1(namespace)
}
}
override def registerBusinessObject(namespace: String, businessObjectName: String, dataProvider: String): Unit = {
val api = getBusinessObjectDefinitionApi(apiClient)
val req = new BusinessObjectDefinitionCreateRequest()
req.setNamespace(namespace)
req.setBusinessObjectDefinitionName(businessObjectName)
req.setDataProviderName(dataProvider)
withRetry {
api.businessObjectDefinitionCreateBusinessObjectDefinition(req)
}
}
override def getBusinessObjectFormats(namespace: String, businessObjectName: String,
latestBusinessObjectFormatVersion: Boolean = true): BusinessObjectFormatKeys = {
val api = getBusinessObjectFormatApi(apiClient)
withRetry {
api.businessObjectFormatGetBusinessObjectFormats(namespace, businessObjectName, latestBusinessObjectFormatVersion)
}
}
override def getBusinessObjectFormat(namespace: String, businessObjectDefinitionName: String,
formatUsage: String, formatFileType: String,
formatVersion: Integer): BusinessObjectFormat = {
val api = getBusinessObjectFormatApi(apiClient)
withRetry {
api.businessObjectFormatGetBusinessObjectFormat(namespace, businessObjectDefinitionName, formatUsage,
formatFileType, formatVersion)
}
}
override def registerBusinessObjectFormat(namespace: String, businessObjectDefinitionName: String,
formatUsage: String, formatFileType: String, partitionKey: String, schema: Option[Schema]): Integer = {
val api = getBusinessObjectFormatApi(apiClient)
val req = new BusinessObjectFormatCreateRequest()
req.setNamespace(namespace)
req.setBusinessObjectDefinitionName(businessObjectDefinitionName)
req.setBusinessObjectFormatUsage(formatUsage)
req.setBusinessObjectFormatFileType(formatFileType)
req.setPartitionKey(partitionKey)
req.setSchema(schema.orNull)
withRetry {
api.businessObjectFormatCreateBusinessObjectFormat(req).getBusinessObjectFormatVersion
}
}
override def getBusinessObjectPartitions(namespace: String, businessObjectDefinitionName: String, formatUsage: String,
formatFileType: String, formatVersion: Integer,
partitionFilter: Option[PartitionFilter]): Seq[(Integer, String, Seq[String], Integer)] = {
val api = getBusinessObjectDataApi(apiClient)
partitionFilter match {
case None =>
val filter = new PartitionValueFilter()
filter.setPartitionValues(List("${maximum.partition.value}", "${minimum.partition.value}").asJava)
val req = new BusinessObjectDataAvailabilityRequest()
req.setNamespace(namespace)
req.setBusinessObjectDefinitionName(businessObjectDefinitionName)
req.setBusinessObjectFormatUsage(formatUsage)
req.setBusinessObjectFormatFileType(formatFileType)
req.setPartitionValueFilters(null)
req.setPartitionValueFilter(filter)
req.setIncludeAllRegisteredSubPartitions(false)
val range = withRetry {
api.businessObjectDataCheckBusinessObjectDataAvailability(req)
}.getAvailableStatuses.asScala.map { status =>
status.getPartitionValue
}
filter.setPartitionValues(null)
if (range.isEmpty) {
return Seq.empty
} else if (range.size == 1) {
filter.setPartitionValues(List(range.head).asJava)
} else {
val filterRange = new PartitionValueRange()
filterRange.setStartPartitionValue(range.head)
filterRange.setEndPartitionValue(range.last)
filter.setPartitionValueRange(filterRange)
}
req.setIncludeAllRegisteredSubPartitions(false)
withRetry {
api.businessObjectDataCheckBusinessObjectDataAvailability(req)
}.getAvailableStatuses.asScala.map { status =>
(status.getBusinessObjectFormatVersion,
status.getPartitionValue,
status.getSubPartitionValues.asScala,
status.getBusinessObjectDataVersion)
}
case Some(filter) =>
val req = new BusinessObjectDataAvailabilityRequest()
req.setNamespace(namespace)
req.setBusinessObjectDefinitionName(businessObjectDefinitionName)
req.setBusinessObjectFormatUsage(formatUsage)
req.setBusinessObjectFormatFileType(formatFileType)
req.setPartitionValueFilters(null)
req.setPartitionValueFilter(filter)
req.setIncludeAllRegisteredSubPartitions(false)
val convertedFilter: PartitionValueFilter = filter
req.setPartitionValueFilter(convertedFilter)
withRetry {
api.businessObjectDataCheckBusinessObjectDataAvailability(req)
}.getAvailableStatuses.asScala.map { status =>
(status.getBusinessObjectFormatVersion,
status.getPartitionValue,
status.getSubPartitionValues.asScala,
status.getBusinessObjectDataVersion
)
}
}
}
override def getBusinessObjectData(namespace: String, businessObjectName: String,
formatUsage: String, formatFileType: String,
formatVersion: Integer, partitionKey: String, partitionValue: String,
subPartitionValues: Seq[String], dataVersion: Integer): BusinessObjectData = {
val api = getBusinessObjectDataApi(apiClient)
withRetry {
api.businessObjectDataGetBusinessObjectData(
namespace,
businessObjectName,
formatUsage,
formatFileType,
partitionKey,
partitionValue,
subPartitionValues.mkString("|"),
formatVersion,
dataVersion,
ObjectStatus.VALID.toString,
false,
false
)
}
}
override def searchBusinessObjectData(businessObjectDataSearchRequest: BusinessObjectDataSearchRequest, pageNum: Integer = 1,
pageSize: Integer = 1000): BusinessObjectDataSearchResult = {
val api = getBusinessObjectDataApi(apiClient)
withRetry {
api.businessObjectDataSearchBusinessObjectData(businessObjectDataSearchRequest, pageNum, pageSize)
}
}
override def getBusinessObjectDataGenerateDdl(namespace: String, businessObjectName: String,
formatUsage: String, formatFileType: String,
formatVersion: Integer, partitionKey: String, partitionValues: Seq[String],
dataVersion: Integer): BusinessObjectDataDdl = {
val api = getBusinessObjectDataApi(apiClient)
val businessObjectDataDdlRequest = new BusinessObjectDataDdlRequest()
businessObjectDataDdlRequest.setNamespace(namespace)
businessObjectDataDdlRequest.setBusinessObjectDefinitionName(businessObjectName)
businessObjectDataDdlRequest.setBusinessObjectFormatUsage(formatUsage)
businessObjectDataDdlRequest.setBusinessObjectFormatFileType(formatFileType)
businessObjectDataDdlRequest.setBusinessObjectFormatVersion(formatVersion)
val partitionValueFilter = new PartitionValueFilter()
partitionValueFilter.setPartitionKey(partitionKey)
partitionValueFilter.setPartitionValues(partitionValues.asJava)
businessObjectDataDdlRequest.setPartitionValueFilters(List.fill(1)(partitionValueFilter).asJava)
businessObjectDataDdlRequest.setOutputFormat(BusinessObjectDataDdlRequest.OutputFormatEnum.HIVE_13_DDL)
businessObjectDataDdlRequest.setBusinessObjectDataVersion(dataVersion)
businessObjectDataDdlRequest.setTableName("HerdSpark")
withRetry {
log.debug("businessObjectDataDdlRequest=" + businessObjectDataDdlRequest)
val businessObjectDataDdl = api.businessObjectDataGenerateBusinessObjectDataDdl(businessObjectDataDdlRequest)
log.debug("businessObjectDataDdl=" + businessObjectDataDdl)
businessObjectDataDdl
}
}
override def getBusinessObjectDataPartitions(namespace: String, businessObjectName: String,
formatUsage: String, formatFileType: String,
formatVersion: Integer, partitionKey: String, partitionValues: Seq[String],
dataVersion: Integer): BusinessObjectDataPartitions = {
val api = getBusinessObjectDataApi(apiClient)
val businessObjectDataPartitionsRequest = new BusinessObjectDataPartitionsRequest
businessObjectDataPartitionsRequest.setNamespace(namespace)
businessObjectDataPartitionsRequest.setBusinessObjectDefinitionName(businessObjectName)
businessObjectDataPartitionsRequest.setBusinessObjectFormatUsage(formatUsage)
businessObjectDataPartitionsRequest.setBusinessObjectFormatFileType(formatFileType)
businessObjectDataPartitionsRequest.setBusinessObjectFormatVersion(formatVersion)
businessObjectDataPartitionsRequest.setAllowMissingData(true)
val partitionValueFilter = new PartitionValueFilter()
partitionValueFilter.setPartitionKey(partitionKey)
partitionValueFilter.setPartitionValues(partitionValues.asJava)
businessObjectDataPartitionsRequest.setPartitionValueFilters(List.fill(1)(partitionValueFilter).asJava)
businessObjectDataPartitionsRequest.setBusinessObjectDataVersion(dataVersion)
withRetry {
log.debug("businessObjectDataPartitionsRequest=" + businessObjectDataPartitionsRequest)
val businessObjectDataPartitions = api.businessObjectDataGenerateBusinessObjectDataPartitions(businessObjectDataPartitionsRequest)
log.debug("businessObjectDataPartitions=" + businessObjectDataPartitions)
businessObjectDataPartitions
}
}
override def getBusinessObjectDataAvailability(namespace: String, businessObjectName: String,
formatUsage: String, formatFileType: String,
partitionKey: String, firstPartitionValue: String,
lastPartitionValue: String): BusinessObjectDataAvailability = {
val api = getBusinessObjectDataApi(apiClient)
val req = new BusinessObjectDataAvailabilityRequest
req.setNamespace(namespace)
req.setBusinessObjectDefinitionName(businessObjectName)
req.setBusinessObjectFormatUsage(formatUsage)
req.setBusinessObjectFormatFileType(formatFileType)
val partitionValueFilter = new PartitionValueFilter
partitionValueFilter.setPartitionKey(partitionKey)
if (StringUtils.isEmpty(lastPartitionValue)) {
val latestAfterPartitionValue = new LatestAfterPartitionValue();
latestAfterPartitionValue.setPartitionValue(firstPartitionValue)
partitionValueFilter.setLatestAfterPartitionValue(latestAfterPartitionValue)
}
else if (StringUtils.isEmpty(firstPartitionValue)) {
val latestBeforePartitionValue = new LatestBeforePartitionValue();
latestBeforePartitionValue.setPartitionValue(lastPartitionValue)
partitionValueFilter.setLatestBeforePartitionValue(latestBeforePartitionValue)
}
else {
val partitionValueRange = new PartitionValueRange()
partitionValueRange.setStartPartitionValue(firstPartitionValue)
partitionValueRange.setEndPartitionValue(lastPartitionValue)
partitionValueFilter.setPartitionValueRange(partitionValueRange)
}
req.setPartitionValueFilter(partitionValueFilter)
withRetry {
api.businessObjectDataCheckBusinessObjectDataAvailability(req)
}
}
override def registerBusinessObjectData(namespace: String, businessObjectName: String, formatUsage: String,
formatFileType: String, formatVersion: Integer, partitionKey: String,
partitionValue: String, subPartitionValues: Seq[String],
status: ObjectStatus.Value, storageName: String,
storageDirectory: Option[String] = None): (Integer, Seq[StorageUnit]) = {
val api = getBusinessObjectDataApi(apiClient)
val req = new BusinessObjectDataCreateRequest()
req.setNamespace(namespace)
req.setBusinessObjectDefinitionName(businessObjectName)
req.setBusinessObjectFormatUsage(formatUsage)
req.setBusinessObjectFormatFileType(formatFileType)
req.setBusinessObjectFormatVersion(formatVersion)
req.setPartitionKey(partitionKey)
req.setPartitionValue(partitionValue)
req.setSubPartitionValues(subPartitionValues.toList.asJava)
req.setCreateNewVersion(true)
req.setStatus(status.toString)
val storageUnit = new StorageUnitCreateRequest()
storageUnit.setStorageName(storageName)
if (storageDirectory.isEmpty) {
storageUnit.setDiscoverStorageFiles(false)
} else {
val dir = new StorageDirectory()
dir.setDirectoryPath(storageDirectory.get)
storageUnit.setStorageDirectory(dir)
storageUnit.setDiscoverStorageFiles(true)
}
req.setStorageUnits(List(storageUnit).asJava)
val resp = withRetry {
api.businessObjectDataCreateBusinessObjectData(req)
}
// we only expect one path
(resp.getVersion, resp.getStorageUnits.asScala)
}
override def setStorageFiles(namespace: String, businessObjectName: String, formatUsage: String,
formatFileType: String, formatVersion: Integer, partitionKey: String,
partitionValue: String, subPartitionValues: Seq[String], dataVersion: Integer,
storageName: String, files: Seq[(String, Long)]): Unit = {
val req = new BusinessObjectDataStorageFilesCreateRequest()
req.setNamespace(namespace)
req.setBusinessObjectDefinitionName(businessObjectName)
req.setBusinessObjectFormatUsage(formatUsage)
req.setBusinessObjectFormatFileType(formatFileType)
req.setBusinessObjectFormatVersion(formatVersion)
req.setPartitionValue(partitionValue)
req.setSubPartitionValues(subPartitionValues.toList.asJava)
req.setBusinessObjectDataVersion(dataVersion)
req.setStorageName(storageName)
req.setStorageFiles(files.map {
case (f, size) =>
val file = new StorageFile()
file.setFilePath(f)
file.setFileSizeBytes(size)
file
}.toList.asJava)
val api = getBusinessObjectDataStorageFileApi(apiClient)
withRetry {
api.businessObjectDataStorageFileCreateBusinessObjectDataStorageFiles(req)
}
}
override def updateBusinessObjectData(namespace: String, businessObjectName: String, formatUsage: String,
formatFileType: String, formatVersion: Integer, partitionKey: String,
partitionValue: String, subPartitionValues: Seq[String], dataVersion: Integer,
status: ObjectStatus.Value): Unit = {
val api = getBusinessObjectDataStatusApi(apiClient)
val req = new BusinessObjectDataStatusUpdateRequest()
req.setStatus(status.toString)
subPartitionValues.size match {
case 0 => withRetry(api.businessObjectDataStatusUpdateBusinessObjectDataStatus(
namespace,
businessObjectName,
formatUsage,
formatFileType,
formatVersion,
partitionValue,
dataVersion,
req
))
case 1 => withRetry(api.businessObjectDataStatusUpdateBusinessObjectDataStatus1(
namespace,
businessObjectName,
formatUsage,
formatFileType,
formatVersion,
partitionValue,
subPartitionValues.head,
dataVersion,
req
))
case 2 => withRetry(api.businessObjectDataStatusUpdateBusinessObjectDataStatus2(
namespace,
businessObjectName,
formatUsage,
formatFileType,
formatVersion,
partitionValue,
subPartitionValues.head,
subPartitionValues(1),
dataVersion,
req
))
case 3 => withRetry(api.businessObjectDataStatusUpdateBusinessObjectDataStatus3(
namespace,
businessObjectName,
formatUsage,
formatFileType,
formatVersion,
partitionValue,
subPartitionValues.head,
subPartitionValues(1),
subPartitionValues(2),
dataVersion,
req
))
case 4 => withRetry(api.businessObjectDataStatusUpdateBusinessObjectDataStatus4(
namespace,
businessObjectName,
formatUsage,
formatFileType,
formatVersion,
partitionValue,
subPartitionValues.head,
subPartitionValues(1),
subPartitionValues(2),
subPartitionValues(3),
dataVersion,
req
))
case _ => sys.error(s"Cannot update object with more than 4 sub-partition values!")
}
}
override def removeBusinessObjectData(namespace: String, businessObjectName: String, formatUsage: String,
formatFileType: String, formatVersion: Integer, partitionKey: String,
partitionValue: String, subPartitionValues: Seq[String],
dataVersion: Integer): Unit = {
val api = getBusinessObjectDataApi(apiClient)
subPartitionValues.size match {
case 0 => withRetry(api.businessObjectDataDeleteBusinessObjectData(
namespace,
businessObjectName,
formatUsage,
formatFileType,
formatVersion,
partitionValue,
dataVersion,
false
))
case 1 => withRetry(api.businessObjectDataDeleteBusinessObjectData1(
namespace,
businessObjectName,
formatUsage,
formatFileType,
formatVersion,
partitionValue,
subPartitionValues.head,
dataVersion,
false
))
case 2 => withRetry(api.businessObjectDataDeleteBusinessObjectData2(
namespace,
businessObjectName,
formatUsage,
formatFileType,
formatVersion,
partitionValue,
subPartitionValues.head,
subPartitionValues(1),
dataVersion,
false
))
case 3 => withRetry(api.businessObjectDataDeleteBusinessObjectData3(
namespace,
businessObjectName,
formatUsage,
formatFileType,
formatVersion,
partitionValue,
subPartitionValues.head,
subPartitionValues(1),
subPartitionValues(2),
dataVersion,
false
))
case 4 => withRetry(api.businessObjectDataDeleteBusinessObjectData4(
namespace,
businessObjectName,
formatUsage,
formatFileType,
formatVersion,
partitionValue,
subPartitionValues.head,
subPartitionValues(1),
subPartitionValues(2),
subPartitionValues(3),
dataVersion,
false
))
case _ => sys.error(s"Cannot delete object with more than 4 sub-partition values!")
}
}
override def removeBusinessObjectDefinition(namespace: String, businessObjectName: String): Unit = {
val api = getBusinessObjectDefinitionApi(apiClient)
withRetry {
api.businessObjectDefinitionDeleteBusinessObjectDefinition(namespace, businessObjectName)
}
}
override def removeBusinessObjectFormat(namespace: String, businessObjectName: String, formatUsage: String,
formatFileType: String, formatVersion: Integer): Unit = {
val api = getBusinessObjectFormatApi(apiClient)
withRetry {
api.businessObjectFormatDeleteBusinessObjectFormat(namespace, businessObjectName, formatUsage, formatFileType, formatVersion)
}
}
override def getStorage(name: String): Storage = {
val api = getStorageApi(apiClient)
withRetry {
api.storageGetStorage(name)
}
}
override def getNamespaceByNamespaceCode(namespaceCode: String): Namespace = {
val api = getNamespaceApi(apiClient)
withRetry {
api.namespaceGetNamespace(namespaceCode)
}
}
override def getAllNamespaces: NamespaceKeys = {
val api = getNamespaceApi(apiClient)
withRetry {
api.namespaceGetNamespaces()
}
}
}
|
#!/bin/bash
#$ -cwd
#$ -m e
#$ -N makeBai-Sim
#$ -hold_jid sample*th
module load samtools/1.1
for i in sample*; do echo $i; cd $i; pwd; samtools index accepted_hits.bam ; cd ..; done
|
import copy
import json
import itertools
from types import GeneratorType
from pathlib import PurePath
from datetime import datetime, date, time
from functools import wraps
from collections import deque, defaultdict
import idlib
import rdflib
import ontquery as oq
from idlib.formats import rdf as _bind_rdf # imported for side effect
from ttlser import CustomTurtleSerializer
from xlsx2csv import Xlsx2csv, SheetNotFoundException
from pysercomb.pyr.types import ProtcurExpression, Quantity, AJ, Measurement
from pyontutils.core import OntTerm as OTB, OntId as OIDB
from pyontutils.utils import isoformat
from pyontutils.namespaces import OntCuries, TEMP, sparc, NIFRID, definition
from pyontutils.namespaces import tech, asp, dim, unit, rdf, owl, rdfs
from sparcur import exceptions as exc
from sparcur.utils import log, logd # FIXME fix other imports
from sparcur.utils import is_list_or_tuple, register_type, IdentityJsonType
xsd = rdflib.XSD
po = CustomTurtleSerializer.predicateOrder
po.extend((sparc.firstName,
sparc.middleName,
sparc.lastName,
xsd.minInclusive,
xsd.maxInclusive,
TEMP.hasValue,
TEMP.hasUnit,))
OntCuries({'orcid':'https://orcid.org/',
'ORCID':'https://orcid.org/',
'DOI':'https://doi.org/',
'ror':'https://ror.org/',
'pio.api': 'https://www.protocols.io/api/v3/protocols/',
# FIXME -> pennsieve.io XXX and that is why you always embed the expansion rule
'dataset':'https://api.pennsieve.io/datasets/N:dataset:',
'collection':'https://api.pennsieve.io/collections/N:collection:',
'package':'https://api.pennsieve.io/packages/N:package:',
'user':'https://api.pennsieve.io/users/N:user:',
'bibo': 'http://purl.org/ontology/bibo/', # crossref
'prism.basic': 'http://prismstandard.org/namespaces/basic/2.1/', # crossref
'unit': str(unit),
'dim': str(dim),
'asp': str(asp),
'protcur': 'https://uilx.org/tgbugs/u/protcur/',
'hyp-protcur': 'https://uilx.org/tgbugs/u/hypothesis/protcur/',
'aspect-raw': 'https://uilx.org/tgbugs/u/aspect-raw/',
'verb': 'https://uilx.org/tgbugs/u/executor-verb/',
'fuzzy': 'https://uilx.org/tgbugs/u/fuzzy-quantity/',
'tech': str(tech),
'awards':str(TEMP['awards/']),
'sparc':str(sparc),})
def curies_runtime(base):
""" base is .e.g https://api.blackfynn.io/datasets/{dataset_id}/ """
return {
'local': base,
'contributor': base + 'contributors/',
'subject': base + 'subjects/',
'sample': base + 'samples/',
}
class OntId(OIDB):
pass
#def atag(self, **kwargs):
#if 'curie' in kwargs:
#kwargs.pop('curie')
#return hfn.atag(self.iri, self.curie, **kwargs)
class OntTerm(OTB, OntId):
_known_no_label = 'dataset', 'pio.private'
#def atag(self, curie=False, **kwargs):
#return hfn.atag(self.iri, self.curie if curie else self.label, **kwargs) # TODO schema.org ...
_logged = set()
@classmethod
def fromJson(cls, blob):
assert blob['system'] == cls.__name__
identifier = blob['id']
if isinstance(identifier, cls):
return identifier
else:
return cls(identifier, label=blob['label']) # FIXME need the .fetch() impl
@classmethod
def _already_logged(cls, thing):
case = thing in cls._logged
if not case:
cls._logged.add(thing)
return case
def asType(self, _class):
return _class(self.iri)
def asUri(self, asType=None):
return (self.iri
if asType is None else
asType(self.iri))
def asDict(self):
out = {
'type': 'identifier',
'system': self.__class__.__name__,
#'id': self.iri,
'id': self, # XXX
'label': self.label,
}
if hasattr(self, 'synonyms') and self.synonyms:
if is_list_or_tuple(self.synonyms):
out['synonyms'] = self.synonyms
else:
out['synonyms'] = self.synonyms,
return out
def asCell(self, sep='|'):
if self.label is None:
_id = self.curie if self.curie else self.iri
if self.prefix not in self._known_no_label:
if not self._already_logged(_id):
log.error(f'No label {_id}')
return _id
return self.label + sep + self.curie
def asCellHyperlink(self):
return f'=HYPERLINK("{self.iri}", "{self.label}")'
@property
def triples_simple(self):
# method name matches convention from neurondm
# but I still don't really like this pattern
# especially since this wouldn't be derived directly
# from the json from but would/could hit the network again
s = self.asUri(rdflib.URIRef)
yield s, rdf.type, owl.Class
if self.label:
yield s, rdfs.label, rdflib.Literal(self.label)
if self.definition:
yield s, definition, rdflib.Literal(self.definition)
if self.deprecated:
yield s, owl.deprecated, rdflib.Literal(True)
if is_list_or_tuple(self.synonyms): # workaround for ontquery list vs string issue
for syn in self.synonyms:
yield s, NIFRID.synonym, rdflib.Literal(syn)
elif self.synonyms:
yield s, NIFRID.synonym, rdflib.Literal(self.synonyms)
OntTerm._sinit()
OntTerm.set_repr_args('curie', 'label')
class HasErrors:
message_passing_key = 'keys_with_errors'
_already_logged = set()
def __init__(self, *args, pipeline_stage=None, **kwargs):
try:
super().__init__(*args, **kwargs)
except TypeError as e: # this is so dumb
super().__init__()
self._pipeline_stage = pipeline_stage
self._errors_set = set()
@staticmethod
def errorInKey(data, key):
mpk = HasErrors.message_passing_key
if mpk in data:
data[mpk].append(key)
else:
data[mpk] = [key]
def addError(self, error, pipeline_stage=None, blame=None, path=None, json_path=None):
do_log = error not in self._already_logged
if do_log:
self._already_logged.add(error)
stage = (pipeline_stage if pipeline_stage is not None
else (self._pipeline_stage if self._pipeline_stage
else self.__class__.__name__))
b = len(self._errors_set)
et = (error, stage, blame, path, json_path)
self._last_error = et
self._errors_set.add(et)
a = len(self._errors_set)
return a != b and do_log
def _render(self, e, stage, blame, path, json_path):
o = {'pipeline_stage': stage,
'blame': blame,} # FIXME
if path is not None:
o['file_path'] = path
if json_path is not None:
o['path'] = json_path
if isinstance(e, str):
o['message'] = e
o['type'] = None # FIXME probably wan our own?
elif isinstance(e, BaseException):
o['message'] = str(e)
o['type'] = str(type(e))
else:
raise TypeError(repr(e))
log.debug(o)
return o
@property
def _errors(self):
for et in self._errors_set:
yield self._render(*et)
def embedLastError(self, data):
self.embedErrors(data, el=[self._render(*self._last_error)])
def embedErrors(self, data, el=tuple()):
if not el:
el = list(self._errors)
if el:
if 'errors' in data:
data['errors'].extend(el)
elif el:
data['errors'] = el
def lj(j, limit=100):
""" use with log to format json """
out = '\n' + json.dumps(j, indent=2, cls=JEncode)
if out.count('\n') > limit + 3:
asdf = out.split('\n')
ninclude = limit // 2
head = asdf[:ninclude]
tail = asdf[-ninclude:]
# better to calculate explicitly given potential
# weirdness with // 2
nexcluded = len(asdf) - (len(head) + len(tail))
lines = head + [f'\n ... Skipping {nexcluded} lines ... \n'] + tail
return '\n'.join(lines)
else:
return out
def dereference_all_identifiers(obj, stage, *args, path=None, addError=None, **kwargs):
try:
dict_literal = _json_identifier_expansion(obj)
except idlib.exc.RemoteError as e:
if hasattr(obj, '_cooldown'):
return obj._cooldown() # trigger cooldown to simplify issues down the line
error = dict(error=e,
pipeline_stage=stage.__class__.__name__,
blame='submission',
path=tuple(path))
if addError:
if addError(**error):
log.exception(e)
#logd.error(msg)
else:
return {'errors': [error]}
except idlib.exc.ResolutionError as e:
if hasattr(obj, '_cooldown'):
return obj._cooldown() # trigger cooldown to simplify issues down the line
oops = json_export_type_converter(obj)
msg = (f'{stage.lifters.id} could not resolve ' # FIXME lifters sigh
f'{type(obj)}: {oops} {obj.asUri()}')
error = dict(error=msg,
pipeline_stage=stage.__class__.__name__,
blame='submission',
path=tuple(path))
if addError:
if addError(**error):
logd.error(msg)
else:
return {'errors': [error]}
except Exception as e:
log.critical(f'Unhandled exception {e} in {path}')
error = dict(error=e,
pipeline_stage=stage.__class__.__name__,
blame='stage',
path=tuple(path))
if addError:
if addError(**error):
log.exception(e)
#logd.error(msg)
else:
return {'errors': [error]}
def _json_identifier_expansion(obj, *args, **kwargs):
if not isinstance(obj, oq.OntTerm):
if isinstance(obj, rdflib.URIRef):
obj = OntId(obj)
if isinstance(obj, oq.OntId):
obj = obj.asInstrumented()
if isinstance(obj, oq.OntTerm):
oc = obj.__class__
obj.__class__ = OntTerm # that this works is amazing/terrifying
try:
return obj.asDict()
finally:
obj.__class__ = oc
elif isinstance(obj, idlib.Stream):
if obj._id_class is str:
return obj.identifier
else:
try:
return obj.asDict()
except idlib.exc.RemoteError as e:
logd.error(e)
# we must return object here otherwise sanity destorying
# None might creep in during expansion here, this really
# shouldn't be able to happen i.e. it should be impossible
# for asDict to raise a remote error at all, but we need
# to catch it here as well
return obj
else:
return obj
def json_identifier_expansion(obj, *args, path=None, **kwargs):
""" expand identifiers to json literal form """
try:
return _json_identifier_expansion(obj, *args, **kwargs)
except idlib.exceptions.RemoteError as e:
oops = json_export_type_converter(obj)
msg = f'remote error {e} for {type(obj)}: {oops}'
out = {'id': obj,
'type': 'identifier',
'system': obj.__class__.__name__,
'errors': [{'message': msg, 'path': path}]}
return out
except idlib.exceptions.ResolutionError as e:
oops = json_export_type_converter(obj)
msg = f'could not resolve {type(obj)}: {oops}'
out = {'id': obj,
'type': 'identifier',
'system': obj.__class__.__name__,
'errors': [{'message': msg, 'path': path}]}
return out
except Exception as e:
oops = json_export_type_converter(obj)
msg = f'Unhandled exception {e} in {path}'
out = {'id': obj,
'type': 'identifier',
'system': obj.__class__.__name__,
'errors': [{'message': msg, 'path': path}]}
log.critical(msg)
return out
def json_export_type_converter(obj):
if isinstance(obj, deque):
return list(obj)
elif isinstance(obj, AJ):
return obj.asJson()
elif isinstance(obj, ProtcurExpression):
return obj.json()
elif isinstance(obj, PurePath):
return obj.as_posix()
elif isinstance(obj, Quantity):
return obj.json()
elif isinstance(obj, Measurement):
return obj.json()
elif isinstance(obj, oq.OntTerm):
return obj.iri
#return obj.asDict() # FIXME need a no network/scigraph version
elif isinstance(obj, idlib.Stream) and hasattr(obj, '_id_class'):
if obj._id_class is str:
return obj.identifier
else:
return json_export_type_converter(obj.identifier)
#return obj.asDict() # FIXME need a no network/scigraph version
elif isinstance(obj, datetime):
return isoformat(obj)
elif isinstance(obj, date):
return isoformat(obj)
elif isinstance(obj, time):
return isoformat(obj)
elif isinstance(obj, BaseException):
# FIXME hunt down where these are sneeking in from
return repr(obj)
class JEncode(json.JSONEncoder):
def default(self, obj):
new_obj = json_export_type_converter(obj)
if new_obj is not None:
return new_obj
#else:
#log.critical(f'{type(obj)} -> {obj}')
#if isinstance(obj, ValueError):
#breakpoint()
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
def JFixKeys(obj):
def jetc(o):
out = json_export_type_converter(o)
return out if out else o
if isinstance(obj, dict):
return {jetc(k): JFixKeys(v) for k, v in obj.items()}
elif isinstance(obj, list):
return [JFixKeys(v) for v in obj]
else:
return obj
def get_nested_by_key(obj, key, *args, path=None, asExport=True,
join_lists=True, collect=tuple()):
if isinstance(obj, dict) and key in obj:
value = obj[key]
if is_list_or_tuple(value) and join_lists:
for v in value:
n = json_export_type_converter(v)
collect.append(n if n is not None and asExport else v)
else:
n = json_export_type_converter(value)
collect.append(n if n is not None and asExport else value)
return obj # have to return this otherwise somehow everything is turned to None?
def get_nested_by_type(obj, type, *args, path=None, collect=tuple()):
if isinstance(obj, type):
collect.append(obj)
return obj
def JApplyRecursive(function, obj, *args,
condense=False,
skip_keys=tuple(),
preserve_keys=tuple(),
path=None,
**kwargs):
""" *args, **kwargs, and path= are passed to the function """
def testx(v):
return (v is not None and
not (not v and
(is_list_or_tuple(v) or isinstance(v, dict))))
if path is None:
path = []
if isinstance(obj, dict):
out = {k: JApplyRecursive(function, v, *args,
condense=condense,
skip_keys=skip_keys,
preserve_keys=preserve_keys,
path=path + [k],
**kwargs)
if k not in preserve_keys else v
for k, v in obj.items() if k not in skip_keys}
if condense:
out = {k:v for k, v in out.items() if testx(v)}
return function(out, *args, path=path, **kwargs)
elif is_list_or_tuple(obj):
out = [JApplyRecursive(function, v, *args,
condense=condense,
skip_keys=skip_keys,
preserve_keys=preserve_keys,
path=path + [i],
**kwargs)
for i, v in enumerate(obj)]
if condense:
out = [v for v in out if testx(v)]
return function(out, *args, path=path, **kwargs)
else:
return function(obj, *args, path=path, **kwargs)
def zipeq(*iterables):
""" zip or fail if lengths do not match """
sentinel = object()
try:
gen = itertools.zip_longest(*iterables, fillvalue=sentinel)
except TypeError as e:
msg = str(iterables)[:2000]
raise TypeError(f'One of these is not iterable {msg}') from e
for zipped in gen:
# sadly we can't use len % 2 because
# iterables have indefinite length
if sentinel in zipped:
msg = str(iterables)[:2000]
raise exc.LengthMismatchError('Lengths do not match! '
'Did you remember to box your function?\n'
f'{msg}')
yield zipped
class JTList:
pass
class JTDict:
pass
def JT(blob):
""" this is not a class but is a function hacked to work like one """
def _populate(blob, top=False):
if isinstance(blob, list) or isinstance(blob, tuple):
# TODO alternatively if the schema is uniform, could use bc here ...
def _all(self, l=blob): # FIXME don't autocomplete?
keys = set(k for b in l
if isinstance(b, dict)
for k in b)
obj = {k:[] for k in keys}
_list = []
_other = []
for b in l:
if isinstance(b, dict):
for k in keys:
if k in b:
obj[k].append(b[k])
else:
obj[k].append(None)
elif any(isinstance(b, t) for t in (list, tuple)):
_list.append(JT(b))
else:
_other.append(b)
for k in keys:
obj[k].append(None) # super inefficient
if _list:
obj['_list'] = JT(_list)
if obj:
j = JT(obj)
else:
j = JT(blob)
if _other:
#obj['_'] = _other # infinite, though lazy
setattr(j, '_', _other)
setattr(j, '_b', blob)
#lb = len(blob)
#setattr(j, '__len__', lambda: lb) # FIXME len()
return j
def it(self, l=blob):
for b in l:
if any(isinstance(b, t) for t in (dict, list, tuple)):
yield JT(b)
else:
yield b
if top:
# FIXME iter is non homogenous
return [('__iter__', it), ('_all', property(_all))]
#elif not [e for e in b if isinstance(self, dict)]:
#return property(id)
else:
# FIXME this can render as {} if there are no keys
return property(_all)
#obj = {'_all': property(_all),
#'_l': property(it),}
#j = JT(obj)
#return j
#nl = JT(obj)
#nl._list = blob
#return property(it)
elif isinstance(blob, dict):
if top:
out = [('_keys', tuple(blob))]
for k, v in blob.items(): # FIXME normalize keys ...
nv = _populate(v)
out.append((k, nv))
#setattr(cls, k, nv)
return out
else:
return JT(blob)
else:
if top:
raise exc.UnhandledTypeError('asdf')
else:
@property
def prop(self, v=blob):
return v
return prop
def _repr(self, b=blob): # because why not
return 'JT(\n' + lj(b) + '\n)'
def query(self, *path):
""" returns None at first failure """
j = self
for key in path:
j = getattr(j, key, None)
if j is None:
return
return j
# additional thought required for how to integrate these into this
# shambling abomination
#adops
#dt = DictTransformer
#cd = {k:v for k, v in _populate(blob, True)}
# populate the top level
cd = {k:v for k, v in ((a, b) for t in _populate(blob, True)
for a, b in (t if isinstance(t, list) else (t,)))}
cd['__repr__'] = _repr
cd['query'] = query
if isinstance(blob, dict):
type_ = JTDict
elif isinstance(blob, list):
type_ = JTList
else:
type_ = object
nc = type('JT' + str(type(blob)), (type_,), cd) # use object to prevent polution of ns
#nc = type('JT' + str(type(blob)), (type(blob),), cd)
return nc()
class AtomicDictOperations:
""" functions that modify dicts in place """
# note: no delete is implemented at the moment ...
# in place modifications means that delete can loose data ...
__empty_node_key = object()
@staticmethod
def apply(function, *args,
source_key_optional=False,
extra_error_types=tuple(),
failure_value=None):
error_types = (exc.NoSourcePathError,) + extra_error_types
try:
return function(*args)
except error_types as e:
if not source_key_optional:
raise e
else:
logd.debug(e)
return failure_value
except exc.LengthMismatchError as e:
raise e
@staticmethod
def add(data, target_path, value, fail_on_exists=True, update=False):
""" Note on semantics when target_path contains the type int.
Normally when adding a path all the parents are added because
we are expecting a direct path down. However, if the path
contains int then it implicitly expects the list to alread
exist. Therefore any failure on the way TO a list will
immediately abort and not add the keys to the non-existent list.
This is consistent with the approach where keys are not required
but if their value is a list it must not be empty. Thus we abort
so that we don't go around creating a bunch of empty lists that
will show up later as errors when validating the schema. """
# type errors can occur here ...
# e.g. you try to go to a string
if not [_ for _ in (list, tuple) if isinstance(target_path, _)]:
msg = f'target_path is not a list or tuple! {type(target_path)}'
raise TypeError(msg)
if False and target_path == ['@context', '@base']:
# use to debug TargetPathExistsError issues
if '@tracker' not in data:
data['@tracker'] = []
try:
raise BaseException('tracker')
except BaseException as e:
data['@tracker'].append(e)
if '@context' in data and '@base' in data['@context']:
log.critical(f'target present {data["id"]}')
else:
log.critical(f'target not present {data["id"]}')
target_prefixes = target_path[:-1]
target_key = target_path[-1]
target = data
is_subpath_add = int in target_path
for i, target_name in enumerate(target_prefixes):
if target_name is int: # add same value to all objects in list
if not is_list_or_tuple(target):
msg = (f'attempt to add to all elements of not a list '
f'{type(target)} target_path was {target_path} '
f'target_name was {target_name}')
raise TypeError(msg)
# LOL PYTHON namespaces
[AtomicDictOperations.add(subtarget, target_path[i + 1:], value)
for subtarget in target]
return # int terminates this level of an add
if target_name not in target: # TODO list indicies XXX that is really append though ...
if is_subpath_add:
# if we are targeting objects in a list for addition
# abort the first time we would have to create a key
# because we will eventually create an empty list
# which we won't be able to add anything to and will
# likely cause schema validation errors
return
target[target_name] = {}
target = target[target_name]
if update:
pass
elif fail_on_exists and target_key in target:
msg = f'A value already exists at path {target_path} in\n{lj(data)}'
raise exc.TargetPathExistsError(msg)
target[target_key] = value
@classmethod
def update(cls, data, target_path, value):
cls.add(data, target_path, value, update=True)
@classmethod
def get(cls, data, source_path, on_failure=None, on_failure_func=None):
""" get stops at lists because the number of possible issues explodes
and we don't hand those here, if you encounter that, use this
primitive to get the list, then use it again on the members in
the function making the call where you have the information needed
to figure out how to handle the error """
try:
source_key, node_key, source = cls._get_source(data, source_path)
return source[source_key]
except Exception as e:
if on_failure_func is not None:
return on_failure_func(source_path)
elif on_failure is not None:
return on_failure
else:
raise e
@classmethod
def pop(cls, data, source_path):
""" allows us to document removals """
source_key, node_key, source = cls._get_source(data, source_path)
if isinstance(source, tuple):
value = source[source_key]
new_node = tuple(v for i, v in enumerate(source) if i != source_key)
parent_source_key, parent_node_key, parent_source = cls._get_source(data, source_path[:-1])
assert node_key == parent_source_key
# FIXME will fail if parent_source is a tuple
parent_source[node_key] = new_node
return value
else:
return source.pop(source_key)
@classmethod
def copy(cls, data, source_path, target_path):
cls._copy_or_move(data, source_path, target_path)
@classmethod
def move(cls, data, source_path, target_path):
cls._copy_or_move(data, source_path, target_path, move=True)
@staticmethod
def _get_source(data, source_path):
#print(source_path, target_path)
source_prefixes = source_path[:-1]
source_key = source_path[-1]
yield source_key # yield this because we don't know if move or copy
source = data
for node_key in source_prefixes:
if isinstance(source, dict):
if node_key in source:
source = source[node_key]
else:
# don't move if no source
msg = f'did not find {node_key!r} in {tuple(source.keys())}'
raise exc.NoSourcePathError(msg)
elif is_list_or_tuple(source):
if not isinstance(node_key, int):
raise TypeError(f'Wrong type for node_key {type(node_key)} {node_key}. '
'Expected int.')
source = source[node_key] # good good let the index errors flow through you
else:
raise TypeError(f'Unsupported type {type(source)} for {lj(source)}')
# for move
yield (node_key if source_prefixes else # FIXME {'type': {'type': {'type': sigh}}}
AtomicDictOperations.__empty_node_key)
if isinstance(source, dict):
if source_key not in source:
try:
msg = f'did not find {source_key!r} in {tuple(source.keys())}'
raise exc.NoSourcePathError(msg)
except AttributeError as e:
raise TypeError(f'value at {source_path} has wrong type!{lj(source)}') from e
#log.debug(f'{source_path}')
elif is_list_or_tuple(source):
if not isinstance(source_key, int):
msg = (f'path {source_path} indicates that schema does not '
'expect an array here')
raise exc.BadDataError(msg)
try:
source[source_key]
except IndexError as e:
msg = f'There is not a {source_key}th value in {lj(source)}'
raise exc.NoSourcePathError(msg) from e
else:
raise TypeError(f'Unsupported type {type(source)} for {lj(source)}')
yield source
@classmethod
def _copy_or_move(cls, data, source_path, target_path, move=False):
""" if exists ... """
source_key, node_key, source = cls._get_source(data, source_path)
# do not catch errors here, deal with that in the callers that people use directly
if move:
_parent = source # incase something goes wrong
source = source.pop(source_key)
else:
source = source[source_key]
if source != data: # this should .. always happen ???
source = copy.deepcopy(source) # FIXME this will mangle types e.g. OntId -> URIRef
# copy first then modify means we need to deepcopy these
# otherwise we would delete original forms that were being
# saved elsewhere in the schema for later
else:
raise BaseException('should not happen?')
try:
cls.add(data, target_path, source)
finally:
# FIXME I have no idea why this was running here but it causes some
# weird bugs
# this will change key ordering but
# that is expected, and if you were relying
# on dict key ordering HAH
#if move:
#breakpoint()
#if move and node_key is not AtomicDictOperations.__empty_node_key:
#_parent[node_key] = source
pass
adops = AtomicDictOperations()
class _DictTransformer:
""" transformations from rules """
@staticmethod
def BOX(function):
""" Combinator that takes a function and returns a version of
that function whose return value is boxed as a tuple.
This makes it _much_ easier to understand what is going on
rather than trying to count commas when trying to count
how many return values are needed for a derive function """
@wraps(function)
def boxed(*args, **kwargs):
return function(*args, **kwargs),
return boxed
@staticmethod
def add(data, adds):
""" adds is a list (or tuples) with the following structure
[[target-path, value], ...]
"""
for target_path, value in adds:
adops.add(data, target_path, value)
@staticmethod
def update(data, updates, source_key_optional=False):
""" updates is a list (or tuples) with the following structure
[[path, function], ...]
"""
for path, function in updates:
if path == ['metadata_file', int, 'contents', 'manifest_records', int, 'software_rrid']:
breakpoint()
if int in path: # unlike adops.add, this has to be implemented in DT
pivot = path.index(int)
before = path[:pivot]
after = path[pivot + 1:]
try:
collection = adops.get(data, before)
except exc.NoSourcePathError as e:
if source_key_optional:
continue
else:
raise e
assert is_list_or_tuple(collection)
if pivot + 1 == len(path):
# have to update list in place not just its elements
# because int is the terminal element in the path
# NOTE this is NOT functional, this modifies in place
_DictTransformer.update(
collection,
# fun rewite here
[[[i], function] for i in range(len(collection))],
source_key_optional=source_key_optional)
else:
# nominally safe to run in parallel here
# if python could actually do it
for obj in collection:
_DictTransformer.update(
obj,
[[after, function]], # construct mini updates spec
source_key_optional=source_key_optional)
continue
try:
value = adops.get(data, path)
except exc.NoSourcePathError as e:
if source_key_optional:
continue
else:
raise e
new = function(value)
if isinstance(new, GeneratorType):
new = tuple(new)
adops.update(data, path, new) # this will fail if data is immutable
@staticmethod
def get(data, gets, source_key_optional=False, on_failure_func=None):
""" gets is a list with the following structure
[source-path ...] """
# FIXME we need a way to pass on_failure
for source_path in gets:
yield adops.apply(adops.get, data, source_path, None, on_failure_func,
source_key_optional=source_key_optional)
@staticmethod
def pop(data, pops, source_key_optional=False):
""" pops is a list with the following structure
[source-path ...] """
for source_path in pops:
yield adops.apply(adops.pop, data, source_path,
source_key_optional=source_key_optional)
@staticmethod
def delete(data, deletes, source_key_optional=False):
""" delets is a list with the following structure
[source-path ...]
THIS IS SILENT YOU SHOULD USE pop instead!
The tradeoff is that if you forget to express
the pop generator it will also be silent until
until schema catches it """
for source_path in deletes:
adops.pop(data, source_path)
@staticmethod
def copy(data, copies, source_key_optional=False): # put this first to clarify functionality
""" copies is a list wth the following structure
[[source-path, target-path] ...]
"""
for source_path, target_path in copies:
# don't need a base case for thing?
# can't lift a dict outside of itself
# in this context
adops.apply(adops.copy, data, source_path, target_path,
source_key_optional=source_key_optional)
@staticmethod
def move(data, moves, source_key_optional=False):
""" moves is a list with the following structure
[[source-path, target-path] ...]
"""
for source_path, target_path in moves:
if int in source_path and int in target_path:
if source_path[-1] is int:
msg = ('It is not meaningful to move each element of a '
'source path individually.')
# FIXME we may not need to error here we may just
# be able to drop the int at the end and ignore it
raise TypeError(msg)
# one way to implement this is to walk to the first
# pivot in the source and get that list, then to walk
# to the first pivot in the target and get that list
# and then do pairwise moves between the lists, noting
# that the lengths must match, which we will check
# beforehand because we know we aren't dealing with an
# arbitrary length stream
pbac = []
for i, path in enumerate((source_path, target_path)):
pivot = path.index(int)
before = path[:pivot]
after = path[pivot + 1:]
try:
collection = adops.get(data, before)
# FIXME behavior on missing keys in the target
# there isn't an obviously correct solution
# here but partial mutation is pretty much the
# only thing we can do and have to deal with
# fact that sometimes an error will leave the
# transformation in a partially transformed
# state
except exc.NoSourcePathError as e:
if i: # target_path case
msg = ('TODO right now we only vary 1 element at '
'a time but can eventually vary more')
raise NotImplementedError(msg)
# a this point we know we have to add all
# the target structure
if not after:
msg = ('How to move into a non-existent list? '
'Have you considered using derive?')
# closer to derive I would think
raise NotImplementedError(msg)
#s_piv, s_bef, s_aft, source = pbac[0]
#collection = [
#_DictTransformer.pop(d, s_aft)
#]
# construct empty types matching the
# source collection types FIXME this is
# wrong, it should be the type of the
# first element of after
# FIXME pretty sure this is incorrect
_len_s = len(pbac[0][-1])
_nt = after[0]
if _nt is int: # list in list case
raise NotImplementedError('please no')
elif isinstance(_nt, str):
collection = [{} for _ in range(_len_s)]
elif isinstance(_nt, int):
# I'm pretty sure indexed target lists
# are just bad all around in this case
# because it would overwrite a value
msg = 'indexed target lists must already exist!'
raise TypeError(msg)
adops.add(data, before, collection)
elif source_key_optional:
break # break out of inner loop we are done here
else:
raise e
assert is_list_or_tuple(collection)
pbac.append((pivot, before, after, collection))
else:
# only run if we don't break
((s_piv, s_bef, s_aft, source),
(t_piv, t_bef, t_aft, target)) = pbac
assert len(source) == len(target)
s_key, t_key = object(), object()
# transform two lists from paired holes into a single
# list of dicts with a source and target key and
# rewrite the move rule to move the after from source
# key to target key, this of course mutates in place
log.debug(pbac)
_sigh = [_DictTransformer.move(
{s_key: s,
t_key: t},
[[[s_key, *s_aft],
[t_key, *t_aft]]],
source_key_optional=source_key_optional)
for s, t in zip(source, target)]
# NOTE we can't continue here, because there may be a
# move at this point of the path as well XXX TODO
# determing the semantics for multi-level moves in the
# presences of a pivot is more than I'm up for right
# now, so just move keys inside the same object and
# let's not teleport them between lists of objects for
continue
elif int in source_path or int in target_path:
msg = ('source_path and target_path must have the same holes')
raise TypeError(msg)
adops.apply(adops.move, data, source_path, target_path,
source_key_optional=source_key_optional)
@classmethod
def derive(cls, data, derives, source_key_optional=True, empty='CULL'):
""" [[[source-path, ...], function, [target-path, ...], on_failure_func], ...] """
# if you have source key option True and empty='OK' you will get loads of junk
allow_empty = empty == 'OK' and not empty == 'CULL'
error_empty = empty == 'ERROR'
def empty(value):
empty = (value is None or
hasattr(value, '__iter__')
and not len(value))
if empty and error_empty:
raise ValueError(f'value to add may not be empty!')
return empty or allow_empty and not empty
failure_value = tuple()
class Sigh(Exception): """ SIGH """
for source_paths, derive_function, target_paths, *on_failure_func in derives:
# FIXME zipeq may cause adds to modify in place in error?
# except that this is really a type checking thing on the function
if on_failure_func:
off, = on_failure_func
else:
def off(sp): raise Sigh(sp)
def defer_get(*get_args):
""" if we fail to get args then we can't gurantee that
derive_function will work at all so we wrap the lot """
# we have to pass source_key_optional explicitly here because apply
# used below captures source_key_optional and can't pass it along
# because there is no way to tell that the callee function also wants it
# however we know that we do want it here, on_failure_func allows us to
# control cases where we want multi-arity functions to not fail immeidately
try:
args = tuple(cls.get(
*get_args,
source_key_optional=source_key_optional,
on_failure_func=off))
except Sigh as e:
# we log here because the default is to fail silently
# and continue of a source path is missing which we
# may not want for nairy function
if len(get_args[1]) > 1:
msg = ('nairy function failed to get source path! '
f'{e} {derive_function}')
logd.critical(msg)
raise exc.NoSourcePathError(source_paths) from e
return derive_function(*args)
def express_zip(*zip_args):
return tuple(zipeq(*zip_args))
try:
if not target_paths:
# allows nesting
adops.apply(defer_get, data, source_paths,
source_key_optional=source_key_optional)
continue
cls.add(data,
((tp, v) for tp, v in
adops.apply(express_zip,
target_paths,
adops.apply(defer_get, data, source_paths,
source_key_optional=source_key_optional),
source_key_optional=source_key_optional,
extra_error_types=(TypeError,),
failure_value=tuple())
if not empty(v)))
except TypeError as e:
log.error('wat')
idmsg = data['id'] if 'id' in data else ''
raise TypeError(f'derive failed\n{source_paths}\n'
f'{derive_function}\n{target_paths}\n'
f'{idmsg}\n') from e
@staticmethod
def _derive(data, derives, source_key_optional=True, allow_empty=False):
# OLD
""" derives is a list with the following structure
[[[source-path, ...], derive-function, [target-path, ...]], ...]
"""
# TODO this is an implementaiton of copy that has semantics for handling lists
for source_path, function, target_paths in derives:
source_prefixes = source_path[:-1]
source_key = source_path[-1]
source = data
failed = False
for i, node_key in enumerate(source_prefixes):
log.debug(lj(source))
if node_key in source:
source = source[node_key]
else:
msg = f'did not find {node_key} in {source.keys()}'
if not i:
log.error(msg)
failed = True
break
raise exc.NoSourcePathError(msg)
if isinstance(source, list) or isinstance(source, tuple):
new_source_path = source_prefixes[i + 1:] + [source_key]
new_target_paths = [tp[i + 1:] for tp in target_paths]
new_derives = [(new_source_path, function, new_target_paths)]
for sub_source in source:
_DictTransformer.derive(sub_source, new_derives,
source_key_optional=source_key_optional)
return # no more to do here
if failed:
continue # sometimes things are missing we continue to others
if source_key not in source:
msg = f'did not find {source_key} in {source.keys()}'
if source_key_optional:
return logd.info(msg)
else:
raise exc.NoSourcePathError(msg)
source_value = source[source_key]
new_values = function(source_value)
if len(new_values) != len(target_paths):
log.debug(f'{source_paths} {target_paths}')
raise TypeError(f'wrong number of values returned for {function}\n'
f'was {len(new_values)} expect {len(target_paths)}')
#temp = b'__temporary'
#data[temp] = {} # bytes ensure no collisions
for target_path, value in zip(target_paths, new_values):
if (not allow_empty and
(value is None or
hasattr(value, '__iter__') and not len(value))):
raise ValueError(f'value to add to {target_path} may not be empty!')
adops.add(data, target_path, value, fail_on_exists=True)
#heh = str(target_path)
#data[temp][heh] = value
#source_path = temp, heh # hah
#self.move(data, source_path, target_path)
#data.pop(temp)
@classmethod
def subpipeline(cls, data, runtime_context, subpipelines, update=True,
source_key_optional=True, lifters=None):
"""
[[[[get-path, add-path], ...], pipeline-class, target-path], ...]
NOTE: this function is a generator, you have to express it!
"""
class DataWrapper:
def __init__(self, data):
self.data = data
prepared = []
for get_adds, pipeline_class, target_path in subpipelines:
selected_data = {}
ok = True
for get_path, add_path in get_adds:
try:
value = adops.get(data, get_path)
if add_path is not None:
adops.add(selected_data, add_path, value)
else:
selected_data = value
except exc.NoSourcePathError as e:
if source_key_optional:
yield get_path, e, pipeline_class
ok = False
break # breaks the inner loop
else:
raise e
if not ok:
continue
log.debug(lj(selected_data))
prepared.append((target_path, pipeline_class, DataWrapper(selected_data),
lifters, runtime_context))
function = adops.update if update else adops.add
for target_path, pc, *args in prepared:
p = pc(*args)
if target_path is not None:
try:
function(data, target_path, p.data)
except Exception as e:
import inspect
if isinstance(pc, object):
pi, pc = pc, pc.__class__
try:
__file = inspect.getsourcefile(pc)
__line = ' line ' + inspect.getsourcelines(pc)[-1]
except TypeError as e2:
__file = f'<Thing that is not defined in a file: {pc}>'
__line = ''
if hasattr(p, 'path'):
__path = f'"{p.path}"'
else:
__path = 'unknown input'
raise exc.SubPipelineError(
f'Error while processing {p}.data for\n{__path}\n'
f'{__file}{__line}') from e
else:
p.data # trigger the pipeline since it is stateful
yield p
@staticmethod
def lift(data, lifts, source_key_optional=True):
"""
lifts are lists with the following structure
[[path, function], ...]
the only difference from derives is that lift
overwrites the underlying data (e.g. a filepath
would be replaced by the contents of the file)
"""
for path, function in lifts:
try:
old_value = adops.get(data, path)
except exc.NoSourcePathError as e:
if source_key_optional:
logd.exception(str(type(e)))
continue
else:
raise e
new_value = function(old_value)
adops.add(data, path, new_value, fail_on_exists=False)
DictTransformer = _DictTransformer()
def copy_all(source_parent, target_parent, *fields):
return [[source_parent + [field], target_parent + [field]]
for field in fields]
def normalize_tabular_format(project_path):
kwargs = {
'delimiter' : '\t',
'skip_empty_lines' : True,
'outputencoding': 'utf-8',
}
sheetid = 0
for xf in project_path.rglob('*.xlsx'):
xlsx2csv = Xlsx2csv(xf, **kwargs)
with open(xf.with_suffix('.tsv'), 'wt') as f:
try:
xlsx2csv.convert(f, sheetid)
except SheetNotFoundException as e:
log.warning(f'Sheet weirdness in {xf}\n{e}')
def extract_errors(thing, path=None):
""" recursively extract errors """
if path is None:
path = []
if isinstance(thing, dict):
for k, v in thing.items():
if k == 'errors':
for error in v:
yield tuple(path), error
else:
yield from extract_errors(v, path + [k])
elif isinstance(thing, list):
for i, v in enumerate(thing):
yield from extract_errors(v, path + [i])
def get_all_errors(_with_errors):
""" A better and easier to interpret measure of completeness. """
# TODO deduplicate by tracing causes
# TODO if due to a missing required report expected value of missing steps
return list(extract_errors(_with_errors))
def get_by_invariant_path(errors):
dd = defaultdict(list)
for e in errors:
k = tuple(int if isinstance(element, int)
else element for element in e['path'])
dd[k].append(e)
by_invariant_path = dict(dd)
return by_invariant_path
def make_path_error_report(by_invariant_path):
# FIXME to obtain the full final path you have to know where
# if anywhere the schema being validated is located in the output
path_error_report = {JPointer.fromList(['-1' if e is int else str(e)
# -1 to indicate all instead of *
for e in k]):
{'error_count': len(v),
'messages': sorted(set(e['message'] for e in v))}
for k, v in by_invariant_path.items()}
return path_error_report
def hashable_converter(v):
try:
hash(v)
return v
except TypeError as e:
if isinstance(v, dict):
return frozenset((k, hashable_converter(v)) for k, v in v.items())
else:
# HACK will fail on non iterables
return tuple(hashable_converter(v) for v in v)
def condense_over_stage(errors):
""" condense shadoweded errors into their caster """
dd = defaultdict(list)
for error in errors:
#log.critical(error)
new_error = {k:hashable_converter(v) for k, v in error.items()}
#copy.deepcopy(error) # FIXME why the heck was this here !?
new_error.pop('pipeline_stage', None)
if 'schema_path' in new_error: # XXX non json schema errors
sp = new_error['schema_path']
if 'inputs' in sp:
# account for the fact that inputs are checked twice right now
index = sp.index('inputs')
new_error['schema_path'] = sp[index + 3:]
# FIXME somehow the path and schema_path get misaligned
dd[hashable_converter(new_error.items())].append(error)
merged = dict(dd)
compacted = []
for frozen_new_error, these_errors in merged.items():
if len(these_errors) > 1:
stages = [e['pipeline_stage'] for e in these_errors]
error = copy.deepcopy(these_errors[0])
error['pipeline_stage'] = stages
error['total_stage_errors'] = len(these_errors)
compacted.append(error)
else:
compacted.extend(these_errors)
return compacted, merged # for maximum confusion
def merge_error_paths(path_errors):
for path, error in path_errors:
#log.critical((path, error))
error = copy.deepcopy(error)
oldpath = list(error['path']) if 'path' in error else [] # XXX non json schema errors
error['path'] = list(path) + oldpath
yield error
def compact_errors(path_errors):
""" compact repeated errors in lists by lifting the list index
to the int type, making it constaint over all indexes and
making it possible to identify paths with the same structure """
errors = list(merge_error_paths(path_errors))
condensed, merged = condense_over_stage(errors)
by_invariant_path = get_by_invariant_path(condensed)
path_error_report = make_path_error_report(by_invariant_path)
compacted = []
for ipath, these_errors in by_invariant_path.items():
if int in ipath:
index = ipath.index(int)
ints = sorted(set(e['path'][index] for e in these_errors))
error = copy.deepcopy(these_errors[0])
error['path'][index] = ints
error['total_errors'] = len(these_errors)
compacted.append(error)
else:
compacted.extend(these_errors)
return compacted, path_error_report, by_invariant_path, errors
class JPointer(str):
""" a class to mark json pointers for resolution """
@staticmethod
def pathToSchema(path):
# search all possible valid paths for information about the
# current path in the schema ... useful to check just a single
# path in an object against a schema ... there is surely a better way
raise NotImplementedError("This shouldn't be implemented here.")
@staticmethod
def pathFromSchema(schema_path):
gen = (e for e in schema_path)
while True:
try:
element = next(gen)
if element in ('oneOf', 'anyOf', 'allOf'):
next(gen)
continue
if element == 'properties':
yield next(gen)
elif element == 'items':
# NOTE it is ok to use int in cases like this because even
# though it is in principle ambiguous with a case where
# someone happens to be using the python function int
# as a key in a dict they would never be able to serialize
# that to json without some major conversions, so I am
# ruling that it is safe to lift to type here since this
# is JPointer not rando dict pointer
yield int
except StopIteration:
break
@classmethod
def fromList(cls, iterable):
return cls('#/' + '/'.join(iterable))
def asList(self):
return self.split('/')
def dereference(self, blob):
sharp, *path = self.asList()
assert sharp == '#'
return adops.get(blob, path)
def update(self, blob, value):
sharp, *path = self.asList()
assert sharp == '#'
adops.update(blob, path, value)
def resolve_context_runtime(specs, *blobs):
""" [source-path lambda-function target-path] """
# TODO see if we need multi source multi target
for source, function, target in specs:
s = JPointer(source)
t = JPointer(target)
for blob in blobs:
value = function(s.dereference(blob))
t.update(blob, value)
# register idlib classes for fromJson
[register_type(i, i.__name__) for i in
(idlib.Ror, idlib.Doi, idlib.Orcid, idlib.Pio, idlib.Rrid, OntTerm)]
# register other types for fromJson
register_type(IdentityJsonType, 'BlackfynnRemoteMetadata') # legacy type string
register_type(IdentityJsonType, 'BlackfynnDatasetData')
register_type(IdentityJsonType, 'PennsieveDatasetData') # FIXME TODO fully abstract this
# handle old json export that won't stop due to missing BRM
# XXX let this be a reminder to never allow foreign json in unless it
# comes wrapped in an outer blob that can properly type it or where we
# can reasonably inject a type so that we can stop further recursion
# because it is (for now) json raw json
register_type(None, 'publication')
register_type(None, 'revision')
register_type(None, 'embargo')
register_type(None, 'removal')
# FIXME bad that we need to call these here
# needed in derives and needed when loading from ir
register_type(None, 'SampleDirs')
register_type(None, 'SubjectDirs')
|
<?php
namespace App\Models;
trait HasRoles
{
/**
* Get roles to currently user.
*
* @return \Illuminate\Database\Eloquent\Relations\BelongsToMany
*/
public function roles()
{
return $this->belongsToMany(Role::class);
}
/**
* Detect if user contain given role.
*
* $user->hasRole('manager') or $user->hasRole($collections)
*
* @param \Illuminate\Database\Eloquent\Collection|string $role
* @return boolean
*/
public function hasRole($role)
{
if (is_string($role)) {
return $this->roles->contains('name', $role);
}
return (bool) $role->intersect($this->roles)->count();
}
/**
* Bind user with role.
*
* @param string $role
* @return void
*/
public function actAs(string $role)
{
$this->roles()->save(
Role::whereName($role)->firstOrFail()
);
}
/**
* Bind user with role.
*
* @param Role $role
* @return void
*/
public function assignRole(Role $role)
{
$this->roles()->attach($role);
}
/**
* Unbind user from role.
*
* @param Role $role
* @return void
*/
public function cancelRole(Role $role)
{
$this->roles()->detach($role);
}
}
|
<?php
/**
* File upload field
* @author abiusx
*
*/
class jFormUpload extends jFormWidget
{
private $fileIsSaved=false;
function Put($Where)
{
$this->fileIsSaved=true;
if (!isset($_FILES[$this->Name()])) return false;
$file=$_FILES[$this->Name()];
if ($file["error"] > 0) return $file['error'];
if ($this->MaxSize !==null && $file['size']>$this->MaxSize) return UPLOAD_ERR_FORM_SIZE;
return move_uploaded_file($file['tmp_name'], $Where);
}
public $MaxSize=null;
/**
*
* Note: you should explicitly call Put method of this object to put the uploaded file
* @param jWidget $Parent
* @param string $Label
* @param string $Location where to put uploaded file
* @param string $MaxSize
*
*/
function __construct(jWidget $Parent,$Label=null,$MaxSize=null)
{
parent::__construct($Parent,$Label);
$this->MaxSize=$MaxSize;
$Name=$this->Name();
$this->SetValidation(function ($Data) use ($MaxSize,$Name) {
if (!isset($_FILES[$Name])) return false;
$file=$_FILES[$Name];
if ($file["error"] > 0) return $file['error'];
if ($MaxSize !==null && $file['size']>$this->MaxSize) return UPLOAD_ERR_FORM_SIZE;
return file_exists($file['tmp_name']);
});
}
function Value()
{
if (isset($_FILES[$this->Name()]))
return $_FILES[$this->Name()]['name'];
else
return null;
}
function Present()
{
$this->DumpLabel();
?><input type="file" <?php $this->DumpAttributes();?> <?php if ($this->MaxSize!==null) echo " maxlength='".($this->MaxSize*1)."'";?> />
<?php $this->DumpDescription();
}
}
|
#!/bin/bash
sudo vmhgfs-fuse .host:/ /mnt/hgfs/ -o allow_other,nonempty
exit 0
|
// Copyright by Barry G. Becker, 2016-2017. Licensed under MIT License: http://www.opensource.org/licenses/MIT
package com.barrybecker4.simulation.liquid.model
/**
* Possible status of the cell. determined by what's in it.
* @author Barry Becker
*/
object CellStatus extends Enumeration {
type CellStatus = Value
val EMPTY: Value = Value(".")
val SURFACE: Value = Value("S")
val FULL: Value = Value("F")
val OBSTACLE: Value = Value("o")
val ISOLATED: Value = Value("I")
}
|
---
layout: default
title: Second box
box: true
summary: Here you can enter the contents of box 2
---
|
import turtle as patel
import random as coder
def star (x , y , color , side):
patel.color(color)
patel.begin_fill()
patel.penup()
patel.goto(x,y)
patel.pendown()
for k in range(5):
patel.forward(side)
patel.right(144)
patel.forward(side)
patel.end_fill()
def randoml():
return coder.randrange(5 , 25)
def randomll():
return coder.randrange(-290 , 290) , coder.randrange(-270 , 270)
patel.title("star m")
patel.bgcolor("black")
patel.speed("fastest")
colors = ["red" , "orange" , "green" , "magenta" , "blue" , "yellow"]
stars= 60
for k in range (stars):
color = coder.choice(colors)
side = randoml()
x , y = randomll()
star(x,y,color,side)
patel.done()
|
/*
* Created by Lee Oh Hyung on 2020/10/17.
*/
package kr.ohyung.domain.entity
import kr.ohyung.domain.Entity
data class Forecast(
val legalName: LegalName,
val weather: Weather
): Entity
|
/*
* Copyright 2021 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.showcase.client.selenium.locator;
import org.openqa.selenium.By;
/**
* Locators of nodes in DMN Decision navigator panel
*/
public class CommonCSSLocator implements DMNDesignerLocator {
private static final String DECISION_NAVIGATOR_EXPAND = "docks-item-W-org.kie.dmn.decision.navigator";
private static final String DECISION_NAVIGATOR_EXPANDED = "expanded-docks-bar-W";
private String cssLocator;
private CommonCSSLocator(final String cssLocator) {
this.cssLocator = cssLocator;
}
/**
* Locates button for expanding the Decision Navigator panel
* @return
*/
public static CommonCSSLocator expandNavigator() {
return new CommonCSSLocator(DECISION_NAVIGATOR_EXPAND);
}
public static CommonCSSLocator expandedNavigator() {
return new CommonCSSLocator(DECISION_NAVIGATOR_EXPANDED);
}
public static CommonCSSLocator collapseDecisionNavigatorButton() {
return new CommonCSSLocator("fa-chevron-left");
}
/**
* Locates wrapper of the editor pages - Model, Documentation, Data Types.
* @return
*/
public static CommonCSSLocator multiPageEditor() {
return new CommonCSSLocator("uf-multi-page-editor");
}
@Override
public By locator() {
return By.className(cssLocator);
}
}
|
"""
Split the hotkey string into the VK code sets.
"""
from typing import Sequence, Tuple, List, Union, Optional
from .....aid.std import i18n as _
from .....aid.std import (
ErrorReport,
ResultOrError,
create_user_error,
)
from ...general.hotkey import (
KeyCombo,
StandardKeyCode,
ModifierKeyCode,
create_modal_chain,
create_primary_chain,
)
from .keymap import (
MODIFIERS,
STR_VK_MAP,
VK_ALIASES,
)
def create_hotkey_format_error(hotkey: str, err: str, **args: Union[int, str]) -> ErrorReport:
return create_user_error(
'parse_hotkeys',
_('invalid hotkey format for "{hotkey}": {err}'),
hotkey=hotkey, err=err, **args
)
def create_master_modifier_hotkey_combo(
master_modifier: Sequence[ModifierKeyCode],
hotkey: str
) -> ResultOrError[Sequence[KeyCombo]]:
"""
Parses the standard master modifier (e.g. Super+Shift) plus a key to go with it
(e.g. up-arrow or ctrl+1).
The master modifier keys must be pressed before any of the hotkey is pressed.
"""
if hotkey.find('+') > 0:
# At least one modifier...
simple = parse_simple_modified_key(hotkey, hotkey)
if isinstance(simple, ErrorReport):
return simple
modifiers = list(master_modifier) + list(simple[0])
return create_primary_chain(modifiers, (simple[1],))
key = convert_standard_key_name(hotkey)
if key is None:
return create_hotkey_format_error(hotkey, 'expected zero or more modifiers plus a normal key')
return create_primary_chain(master_modifier, (key,))
def create_master_mkey_and_sequence_combo(
master_modifiers: Sequence[ModifierKeyCode],
master_key: StandardKeyCode,
hotkeys: str
) -> ResultOrError[Sequence[KeyCombo]]:
"""
Create a modal key sequence. This is a master modifier + key
(say, ctrl-a), followed by one or more normal keys. This is similar to
how the `screen` program works.
"""
modal_keys = parse_key_sequence(hotkeys, hotkeys)
if isinstance(modal_keys, ErrorReport):
return modal_keys
return create_modal_chain(master_modifiers, (master_key,), modal_keys)
def create_master_modifier(
master: str
) -> ResultOrError[Sequence[ModifierKeyCode]]:
"""Creates a primary modifier combination master sequence, such as `super`."""
return parse_modifier_sequence(master, master)
def create_master_mkey(
modifier_key: str
) -> ResultOrError[Tuple[Sequence[ModifierKeyCode], StandardKeyCode]]:
"""Creates a master modifier key sequence, such as `super+a`."""
return parse_simple_modified_key(modifier_key, modifier_key)
def parse_modifier_sequence(
original_expression: str, hotkey: str
) -> ResultOrError[Sequence[ModifierKeyCode]]:
"""
Parses a simple sequence of modifier keys, separated by '+'.
"""
keys = hotkey.split('+')
if not keys:
return create_hotkey_format_error(
original_expression, 'modifier sequence must have at least 1 modifier'
)
modifier_vks: List[ModifierKeyCode] = []
for kname in keys:
mvk = convert_modifier_key_name(kname)
if mvk is None:
return create_hotkey_format_error(
original_expression,
'modifier key must have at least 1 modifier; '
'found normal key or unknown modifier key "{kname}"',
kname=kname
)
modifier_vks.append(mvk)
assert len(modifier_vks) >= 1
return modifier_vks
def parse_simple_modified_key(
original_expression: str, hotkey: str
) -> ResultOrError[Tuple[Sequence[ModifierKeyCode], StandardKeyCode]]:
"""
Parses a simple sequence of `modifier '+' modifier '+' ... '+' key`.
Used for the master sequence, or part of a sequence for other things.
"""
keys = hotkey.split('+')
if len(keys) < 2:
return create_hotkey_format_error(
original_expression,
'modifier key must have at least 1 modifier and exactly 1 normal key'
)
final_vk = convert_standard_key_name(keys[-1])
if final_vk is None:
return create_hotkey_format_error(
original_expression,
'modifier key must have at least 1 modifier and exactly 1 normal key; '
'found modifier key or unknown key "{key}" instead of a normal key',
key=keys[-1]
)
modifier_vks: List[ModifierKeyCode] = []
for kname in keys[:-1]:
mvk = convert_modifier_key_name(kname)
if mvk is None:
return create_hotkey_format_error(
original_expression,
# TODO localize
'modifier key must have at least 1 modifier and exactly 1 normal key; '
'found normal key or unknown key "{kname}" instead of a modifier',
kname=kname
)
modifier_vks.append(mvk)
assert len(modifier_vks) >= 1
return modifier_vks, final_vk
def parse_key_sequence(
original_expression: str, hotkey: str
) -> ResultOrError[Sequence[StandardKeyCode]]:
"""
Parses a sequence of non-modifier keys.
These are a simple list of key names separated by a '+'.
"""
sequence_keys = hotkey.split('+')
if not sequence_keys:
return create_hotkey_format_error(
original_expression,
'key sequence must have at least 1 normal key'
)
sequence_vks: List[StandardKeyCode] = []
for kname in sequence_keys:
skv = convert_standard_key_name(kname)
if skv is None:
return create_hotkey_format_error(
original_expression,
'key sequence must have at least 1 normal key; '
'found modifier key or unknown key "{kname}" instead of a normal key',
kname=kname
)
sequence_vks.append(skv)
return sequence_vks
def convert_modifier_key_name(name: str) -> Optional[ModifierKeyCode]:
"""Convert the modifier key text name into one VK or a collection of VK
alternatives."""
name = name.lower().strip()
if name in VK_ALIASES:
ret: List[int] = []
for kname in VK_ALIASES[name]:
ret.append(STR_VK_MAP[kname])
return ret
if name in MODIFIERS:
return STR_VK_MAP[name]
return None
def convert_standard_key_name(name: str) -> Optional[StandardKeyCode]:
"""Convert the standard key text name to a VK."""
name = name.lower().strip()
if name in VK_ALIASES or name in MODIFIERS or name not in STR_VK_MAP:
return None
return STR_VK_MAP[name]
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
class authentificationController extends Controller
{
public function client_login(){
return view('Authentification.client_login');
}
public function inscription(){
return view('Authentification.inscription');
}
}
|
๏ปฟusing System;
namespace T4LogS.Core
{
public class T4LogSWriteBase : T4LogSBase, IDisposable
{
internal bool isExited = false;
public virtual void Dispose() { }
}
}
|
๏ปฟusing JDI.Light.Attributes;
using JDI.Light.Elements.Common;
using JDI.Light.Elements.Composite;
namespace JDI.Light.Tests.UIObjects.Sections
{
public class JdiSearch : Search
{
[FindBy(Css = ".search>.icon-search")]
public new Button SearchButton { get; set; }
[FindBy(Css = ".icon-search.active")]
public Button SearchButtonActive { get; set; }
[FindBy(Css = ".search-field input")]
public TextField SearchInput { get; set; }
}
}
|
import React from 'react';
import {mount} from 'enzyme';
import {KeyValue} from './KeyValue';
import {Provider} from 'react-redux';
import configureStore from '../../store/configureStore';
const store = configureStore();
const props = {
keyObject: {
id: 1,
value: "Test value",
key: "Test key",
loading: false,
edit: false
},
actions: {
getValue: jasmine.createSpy()
},
namespace: "Namespace name",
loading: false
};
describe('Components', () => {
describe('<KeyValue/>', () => {
it('should render KeyValue correctly', () => {
const wrapper = mount(
<Provider store={store}>
<KeyValue {...props}/>
</Provider>);
expect(wrapper.find('div .row').length).toBe(2);
expect(wrapper.find('div .col-lg-12').length).toBe(1);
expect(wrapper.find('div .container').length).toBe(1);
});
it('should get value on mount', () => {
const wrapper = mount(
<Provider store={store}>
<KeyValue {...props}/>
</Provider>
);
expect(wrapper.find(KeyValue).props().actions.getValue).toHaveBeenCalled();
});
});
});
|
package io.techery.mappery.test
import io.techery.mappery.Mappery
import io.techery.mappery.test.converter.*
import org.jetbrains.spek.api.Spek
import org.jetbrains.spek.api.dsl.describe
import org.jetbrains.spek.api.dsl.it
import org.junit.Assert.assertNotNull
import org.junit.platform.runner.JUnitPlatform
import org.junit.runner.RunWith
@RunWith(JUnitPlatform::class)
class MapperySpec : Spek({
describe("Mappery Testing") {
it("should map source class for converting") {
val mappery = Mappery.Builder()
.map(String::class.java)
.from(Int::class.java, IntToStringConverter())
.from(Double::class.java, DoubleToStringConverter())
.from(ABConverter.A::class.java, ABConverter())
.build()
var str = mappery.convert(Int.MAX_VALUE, String::class.java)
assertNotNull(str)
str = mappery.convert(ABConverter.B(), String::class.java)
assertNotNull(str)
str = mappery.convert(Double.MAX_VALUE, String::class.java)
assertNotNull(str)
}
it("should map target class for converting") {
val mappery = Mappery.Builder()
.map(String::class.java)
.to(Int::class.java, StringToIntConverter())
.to(Double::class.java, StringToDoubleConverter())
.build()
val string = "1"
val intValue = mappery.convert(string, Int::class.java)
assertNotNull(intValue)
val doubleValue = mappery.convert(string, Double::class.java)
assertNotNull(doubleValue)
}
it("should map target class to inherited classes") {
val mappery = Mappery.Builder()
.map(ModelA::class.java)
.to(ModelB::class.java, ModelAToModelBConverter())
.to(ModelC::class.java, ModelAToModelCConverter())
.build()
val a = ModelA()
val intValue = mappery.convert(a, ModelB::class.java)
assertNotNull(intValue)
val doubleValue = mappery.convert(a, ModelC::class.java)
assertNotNull(doubleValue)
}
}
})
|
import createLoadingPlugin from '@rematch/loading';
import createRematchPersist from '@rematch/persist';
import storageSession from 'redux-persist/lib/storage/session';
export const loading = createLoadingPlugin({
whitelist: ['calendar/getHolidaysAsync'],
});
export const persistPlugin = createRematchPersist({
whitelist: ['calendar'],
storage: storageSession,
version: 1,
});
|
๏ปฟangular.module("stopwatchApp", ["main.controller", "filters"])
.config(["localStorageServiceProvider", function (localStorageServiceProvider) {
localStorageServiceProvider
.setPrefix("stopwatchApp")
.setStorageType("localStorage");
}]);
|
use franklin_crypto::bellman::pairing::ff::PrimeField;
use zinc_build::ScalarType;
use crate::error::RuntimeError;
use crate::IEngine;
pub trait ITypeExpectation: Sized {
fn expect_same(left: Self, right: Self) -> Result<Self, RuntimeError>;
fn assert_type(&self, expected: Self) -> Result<(), RuntimeError>;
fn assert_signed(&self, is_signed: bool) -> Result<(), RuntimeError>;
fn bitlength<E: IEngine>(&self) -> usize;
}
impl ITypeExpectation for ScalarType {
fn expect_same(left: Self, right: Self) -> Result<Self, RuntimeError> {
if left != right {
return Err(RuntimeError::TypeError {
expected: left.to_string(),
found: right.to_string(),
});
}
Ok(left)
}
fn assert_type(&self, expected: Self) -> Result<(), RuntimeError> {
if self != &expected {
return Err(RuntimeError::TypeError {
expected: expected.to_string(),
found: self.to_string(),
});
}
Ok(())
}
fn assert_signed(&self, is_signed: bool) -> Result<(), RuntimeError> {
let is_signed = match self {
ScalarType::Field | ScalarType::Boolean => false,
ScalarType::Integer(int_type) => int_type.is_signed == is_signed,
};
if !is_signed {
return Err(RuntimeError::TypeError {
expected: if is_signed {
"signed integer".to_owned()
} else {
"unsigned integer".to_owned()
},
found: self.to_string(),
});
}
Ok(())
}
fn bitlength<E: IEngine>(&self) -> usize {
match self {
ScalarType::Boolean => 1,
ScalarType::Integer(inner) => inner.bitlength,
ScalarType::Field => E::Fr::NUM_BITS as usize,
}
}
}
|
package App::SD::CLI::Command;
use Any::Moose 'Role';
use Params::Validate qw(validate);
=head2 get_content %args
This is a helper routine for use in SD commands to enable getting records
in different ways such as from a file, on the commandline, or from an
editor. Returns the record content.
Valid keys in %args are type => str, default_edit => bool, and
prefill_props => $props_hash_ref, props_order => $order_array_ref,
footer => str, header => str.
Specifying props with prefill_props allows you to present lists of
key/value pairs (with possible default values) for a user to fill in.
If you need a specific ordering of props, specify it with
C<props_order>. Specifying C<header> and/or C<footer> allows you to
print some optional text before and/or after the list of props.
Note that you will have to post-process C<$content> from the routine
calling C<get_content> in order to extract the keys and values from
the returned text.
=cut
sub get_content {
my $self = shift;
my %args = @_;
Prophet::CLI->end_pager();
my $content;
if (my $file = $self->delete_arg('file')) {
my ( $vol, $dir, $name ) = File::Spec->splitpath( $file );
$content = Prophet::Util->slurp( $file );
$self->set_prop(name => $name);
} elsif ($content = $self->delete_arg('content')) {
} elsif ($args{default_edit} || $self->has_arg('edit')) {
my $text = '';
if (my $header = $args{header}) {
$text .= $header;
}
if (my $props = $args{prefill_props}) {
my $props_order;
my @ordering = ($props_order = $args{props_order}) ?
@$props_order : keys %$props;
$text .= join "\n", map { "$_: $props->{$_}" } @ordering;
}
if (my $footer = $args{footer}) {
$text .= $footer;
}
$content = $self->edit_text($text);
# user aborted their text editor without changing anything; signify
# this to the caller by returning nothing
$content = '' if $content eq $text;
} elsif ($ENV{IN_PROPHET_TEST_COMMAND}) {
die "Tried to invoke an editor in a test script!";
} else {
print "Please type your $args{type} and press ctrl-d.\n";
$content = do { local $/; <STDIN> };
}
chomp $content;
return $content;
}
no Any::Moose;
1;
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package md.vmacari.messages;
/**
*
* @author vmacari
*/
public enum MessageStreamSubtypes {
ST_FIRMWARE_CONFIG_REQUEST, ST_FIRMWARE_CONFIG_RESPONSE, ST_FIRMWARE_RESPONSE, ST_SOUND, ST_IMAGE;
/**
*
*/
private final static MessageStreamSubtypes [] enumValues = MessageStreamSubtypes.values();
/**
*
* @param value
* @return
*/
public static MessageStreamSubtypes parseInteger(Integer value) {
if (enumValues.length <= value) {
return null;
}
return enumValues[value];
}
/**
*
* @param value
* @return
*/
public static Integer toInteger (MessageStreamSubtypes value) {
for (int i =0; i < enumValues.length; i ++ ) {
if (enumValues[i] == value) {
return i;
}
}
return -1;
}
}
|
# frozen_string_literal: true
if RUBY_ENGINE == 'opal'
class Parser::Lexer
def source_buffer=(source_buffer)
@source_buffer = source_buffer
if @source_buffer
source = @source_buffer.source
# Force UTF8 unpacking even if JS works with UTF-16/UCS-2
# See: https://mathiasbynens.be/notes/javascript-encoding
@source_pts = source.unpack('U*')
else
@source_pts = nil
end
end
end
class Parser::Lexer::Literal
undef :extend_string
def extend_string(string, ts, te)
@buffer_s ||= ts
@buffer_e = te
# Patch for opal-parser, original:
# @buffer << string
@buffer += string
end
end
class Parser::Source::Buffer
def source_lines
@lines ||= begin
lines = @source.lines.to_a
lines << '' if @source.end_with?("\n")
lines.map { |line| line.chomp("\n") }
end
end
end
end
|
import * as React from 'react';
import { FittedText, TextAlign } from '../../../.';
type Props = {
text: string;
};
export const H1: React.FC<Props> = ({ text }) => (
<h1>
<FittedText
text={text}
topMetric="upper"
bottomMetric="baseline"
align={TextAlign.middle}
></FittedText>
</h1>
);
export const H2: React.FC<Props> = ({ text }) => (
<h2>
<FittedText
text={text}
topMetric="upper"
bottomMetric="baseline"
align={TextAlign.middle}
></FittedText>
</h2>
);
export const H3: React.FC<Props> = ({ text }) => (
<h3>
<FittedText
text={text}
topMetric="upper"
bottomMetric="baseline"
align={TextAlign.start}
></FittedText>
</h3>
);
|
(ns pedestal-api.helpers
(:require [io.pedestal.interceptor.helpers]
[pedestal-api.swagger :as swagger]
[clojure.string :as string]))
(defmacro defhelper [helper-name]
(let [helper-fn-name (symbol (string/replace helper-name "def" ""))]
`(do (defmacro ~(symbol helper-name)
[name# swagger# & args#]
`(def ~name#
(swagger/annotate ~swagger#
(@~(ns-resolve 'io.pedestal.interceptor.helpers '~helper-fn-name)
(keyword (name (ns-name *ns*)) (name '~name#))
(fn ~@args#)))))
(defn ~helper-fn-name [name# swagger# & args#]
(swagger/annotate
swagger#
(apply @~(ns-resolve 'io.pedestal.interceptor.helpers helper-fn-name) name# args#))))))
;; shadows helper macros in io.pedestal.interceptor.helpers
;; adding swagger metadata as the second argument, e.g.
;; (defhandler create-pet
;; {:summary "Creates a pet"}
;; [request]
;; {:status 200
;; :body "Created pet"})
;;
;; also shadows helper functions in io.pedestal.interceptor.helpers,
;; again adding swagger metadata as the second argument, e.g.
;; (handler ::create-pet
;; {:summary "Creates a pet"}
;; (fn [request]
;; {:status 200
;; :body "Created pet"}))
;;
;; Note that pedestal recommends building interceptors directly,
;; to which you should add swagger metadata, e.g.
;; (swagger/annotate
;; {:summary "Creates a pet"}
;; (i/interceptor {:name ::create-pet
;; :enter (fn [context] {:status 200
;; :body "Created pet"})}))
;;
;; All these forms create equivalent interceptors.
(defhelper defbefore)
(defhelper defafter)
(defhelper defaround)
(defhelper defon-request)
(defhelper defon-response)
(defhelper defhandler)
(defhelper defmiddleware)
|
import struct
import pytest
import parsley
import message_types as mt
class TestParsley:
@pytest.fixture
def timestamp(self):
def _timestamp(val=0):
return struct.pack(">I", val << 8)[:-1]
return _timestamp
def test_parse_timestamp(self, timestamp):
msg_data = timestamp(12345)
assert parsley._parse_timestamp(msg_data) == 12345
def test_gen_cmd(self, timestamp):
msg_data = timestamp(12345)
msg_data += struct.pack(">b", mt.gen_cmd_hex["BUS_DOWN_WARNING"])
res = parsley.parse_gen_cmd(msg_data)
assert res["time"] == 12345
assert res["command"] == "BUS_DOWN_WARNING"
def test_valve_cmd(self, timestamp):
msg_data = timestamp()
msg_data += struct.pack(">b", mt.valve_states_hex["VALVE_CLOSED"])
res = parsley.parse_valve_cmd(msg_data)
assert res["req_state"] == "VALVE_CLOSED"
def test_valve_status(self, timestamp):
msg_data = timestamp()
msg_data += struct.pack(">bb",
mt.valve_states_hex["VALVE_UNK"], mt.valve_states_hex["VALVE_CLOSED"])
res = parsley.parse_valve_status(msg_data)
assert res["req_state"] == "VALVE_CLOSED"
assert res["cur_state"] == "VALVE_UNK"
def test_arm_cmd(self, timestamp):
msg_data = timestamp() + b'\x17'
res = parsley.parse_arm_cmd(msg_data)
assert res["altimeter"] == 7
assert res["state"] == "ARMED"
def test_arm_status(self, timestamp):
msg_data = timestamp() + struct.pack(">bHH", 0x04, 12345, 54321)
res = parsley.parse_arm_status(msg_data)
assert res["altimeter"] == 4
assert res["state"] == "DISARMED"
assert res["drogue_v"] == 12345
assert res["main_v"] == 54321
def test_debug_msg(self, timestamp):
msg_data = timestamp() + b'\x61\x23' + b'ABC'
res = parsley.parse_debug_msg(msg_data)
assert res["level"] == 6
assert res["line"] == 0x123
assert res["data"] == b'ABC'
def test_debug_printf(self):
msg_data = b'ABCDEFGH'
res = parsley.parse_debug_printf(msg_data)
assert res["string"] == "ABCDEFGH"
def test_board_status_nominal(self, timestamp):
msg_data = timestamp()
msg_data += struct.pack(">b", mt.board_stat_hex["E_NOMINAL"])
res = parsley.parse_board_status(msg_data)
assert res["status"] == "E_NOMINAL"
def test_board_status_current(self, timestamp):
msg_data = timestamp()
msg_data += struct.pack(">bH", mt.board_stat_hex["E_BUS_OVER_CURRENT"], 12345)
res = parsley.parse_board_status(msg_data)
assert res["status"] == "E_BUS_OVER_CURRENT"
assert res["current"] == 12345
def test_board_status_voltage(self, timestamp):
msg_data = timestamp()
msg_data += struct.pack(">bH", mt.board_stat_hex["E_BUS_OVER_VOLTAGE"], 12345)
res = parsley.parse_board_status(msg_data)
assert res["status"] == "E_BUS_OVER_VOLTAGE"
assert res["voltage"] == 12345
def test_board_status_dead(self, timestamp):
msg_data = timestamp()
msg_data += struct.pack(">bb",
mt.board_stat_hex["E_BOARD_FEARED_DEAD"], mt.board_id_hex["RADIO"])
res = parsley.parse_board_status(msg_data)
assert res["status"] == "E_BOARD_FEARED_DEAD"
assert res["board_id"] == "RADIO"
def test_board_status_quiet(self, timestamp):
msg_data = timestamp()
msg_data += struct.pack(">bH", mt.board_stat_hex["E_NO_CAN_TRAFFIC"], 12345)
res = parsley.parse_board_status(msg_data)
assert res["status"] == "E_NO_CAN_TRAFFIC"
assert res["err_time"] == 12345
def test_board_status_sensor(self, timestamp):
msg_data = timestamp()
msg_data += struct.pack(">bb",
mt.board_stat_hex["E_SENSOR"], mt.sensor_id_hex["SENSOR_BARO"])
res = parsley.parse_board_status(msg_data)
assert res["status"] == "E_SENSOR"
assert res["sensor_id"] == "SENSOR_BARO"
def test_board_status_valve(self, timestamp):
msg_data = timestamp()
msg_data += struct.pack(">bbb", mt.board_stat_hex["E_VALVE_STATE"],
mt.valve_states_hex["VALVE_CLOSED"], mt.valve_states_hex["VALVE_UNK"])
res = parsley.parse_board_status(msg_data)
assert res["status"] == "E_VALVE_STATE"
assert res["req_state"] == "VALVE_CLOSED"
assert res["cur_state"] == "VALVE_UNK"
def test_sensor_analog(self):
msg_data = struct.pack(">HbH", 12345, mt.sensor_id_hex["SENSOR_BARO"], 54321)
res = parsley.parse_sensor_analog(msg_data)
assert res["time"] == 12345
assert res["sensor_id"] == "SENSOR_BARO"
assert res["value"] == 54321
def test_sensor_altitude(self, timestamp):
msg_data = timestamp() + struct.pack(">i", -12345)
res = parsley.parse_sensor_altitude(msg_data)
assert res["altitude"] == -12345
def test_sensor_temp(self, timestamp):
msg_data = timestamp() + b'\x12'
msg_data += struct.pack(">I", int(12.5 * 2**10))[1:]
res = parsley.parse_sensor_temp(msg_data)
assert res["sensor_id"] == 0x12
assert res["temperature"] == 12.5
def test_gps_timestamp(self, timestamp):
msg_data = timestamp() + struct.pack(">bbbb", 12, 23, 34, 45)
res = parsley.parse_gps_timestamp(msg_data)
assert res["hrs"] == 12
assert res["mins"] == 23
assert res["secs"] == 34
assert res["dsecs"] == 45
def test_gps_latitude(self, timestamp):
msg_data = timestamp() + struct.pack(">bbHc", 12, 23, 12345, b'N')
res = parsley.parse_gps_latitude(msg_data)
assert res["degs"] == 12
assert res["mins"] == 23
assert res["dmins"] == 12345
assert res["direction"] == "N"
def test_gps_longitude(self, timestamp):
msg_data = timestamp() + struct.pack(">bbHc", 12, 23, 12345, b'W')
res = parsley.parse_gps_longitude(msg_data)
assert res["degs"] == 12
assert res["mins"] == 23
assert res["dmins"] == 12345
assert res["direction"] == "W"
def test_gps_altitude(self, timestamp):
msg_data = timestamp() + struct.pack(">Hbc", 12345, 12, b'm')
res = parsley.parse_gps_altitude(msg_data)
assert res["altitude"] == 12345
assert res["daltitude"] == 12
assert res["unit"] == "m"
def test_gps_info(self, timestamp):
msg_data = timestamp() + struct.pack(">bb", 12, 23)
res = parsley.parse_gps_info(msg_data)
assert res["num_sats"] == 12
assert res["quality"] == 23
def test_fill_lvl(self, timestamp):
msg_data = timestamp()
msg_data += struct.pack(">bb", 9, mt.fill_direction_hex["FILLING"])
res = parsley.parse_fill_lvl(msg_data)
assert res["level"] == 9
assert res["direction"] == "FILLING"
def test_parse(self, monkeypatch):
def parse_monkey(msg_data):
return {"monkey": msg_data}
monkeypatch.setitem(parsley._func_map, "LEDS_ON", parse_monkey)
msg_sid = mt.msg_type_hex["LEDS_ON"] | mt.board_id_hex["ARMING"]
msg_data = [1, 2, 3, 4]
res = parsley.parse(msg_sid, msg_data)
assert res["msg_type"] == "LEDS_ON"
assert res["board_id"] == "ARMING"
assert res["data"]["monkey"] == msg_data
def test_parse_usb(self):
msg_sid, msg_data = parsley.parse_usb_debug("$555:1,2,FF")
assert msg_sid == 0x555
assert msg_data == [1, 2, 0xFF]
def test_parse_logger(self):
msg_sid, msg_data = parsley.parse_logger("12345678 555 3: 01 02 FF 87654321")
assert msg_sid == 0x555
assert msg_data == [1, 2, 0xFF]
|
package com.example.diary_practice2
import android.content.ContentValues
import android.content.Context
import android.database.sqlite.SQLiteDatabase
import android.database.sqlite.SQLiteOpenHelper
import android.util.Log
class DatabaseHandler(context: Context) :
SQLiteOpenHelper(context, DB_NAME, null, DB_VERSIOM) {
override fun onCreate(db: SQLiteDatabase?) {
val CREATE_TABLE = "CREATE TABLE $TABLE_NAME " +
"($ID Integer PRIMARY KEY, $DIARY_TITLE TEXT, $DIARY_DAY TEXT, $PHOTO_PATH TEXT)"
db?.execSQL(CREATE_TABLE)
}
override fun onUpgrade(db: SQLiteDatabase?, oldVersion: Int, newVersion: Int) {
// Called when the database needs to be upgraded
}
//Inserting (Creating) data
fun addUser(user: Users): Boolean {
//Create and/or open a database that will be used for reading and writing.
val db = this.writableDatabase
val values = ContentValues()
values.put(DIARY_TITLE, user.diaryTitle)
values.put(DIARY_DAY, user.diaryDay)
values.put(PHOTO_PATH, user.photoPath)
val _success = db.insert(TABLE_NAME, null, values)
db.close()
Log.v("InsertedID", "$_success")
return (Integer.parseInt("$_success") != -1)
}
//get all users
fun getAllUsers(): List<DiaryData> {
//var allUser: String = ""
//var data = arrayListOf<Triple<String, String, String>>()
//var contents: String = ""
var diaryTitles = arrayListOf<String>()
var diaryDays = arrayListOf<String>()
var photoPaths = arrayListOf<String>()
val db = readableDatabase
val selectALLQuery = "SELECT * FROM $TABLE_NAME"
val cursor = db.rawQuery(selectALLQuery, null)
if (cursor != null) {
if (cursor.moveToFirst()) {
do {
//var id = cursor.getString(cursor.getColumnIndex(ID))
//var diaryTitle = cursor.getString(cursor.getColumnIndex(DIARY_TITLE))
//var diaryDay = cursor.getString(cursor.getColumnIndex(DIARY_DAY))
//var photoPath = cursor.getString(cursor.getColumnIndex(PHOTO_PATH))
//var info = Triple(diaryTitle,diaryDay,photoPath)
//allUser = "$allUser\n$id $diaryTitle $diaryDay $photoPath"
//contents = "$diaryTitle ($diaryDay) $photoPath"
//data.add(contents)
//data.add(info)
diaryTitles.add(cursor.getString(cursor.getColumnIndex(DIARY_TITLE)))
diaryDays.add(cursor.getString(cursor.getColumnIndex(DIARY_DAY)))
photoPaths.add(cursor.getString(cursor.getColumnIndex(PHOTO_PATH)))
//allUser = "$allUser\n$id $diaryTitle"
} while (cursor.moveToNext())
}
}
cursor.close()
db.close()
//return allUser
val allDiaries = List(diaryTitles.size) { i -> DiaryData(diaryTitles[i], diaryDays[i], photoPaths[i])}
return allDiaries
}
companion object {
private val DB_NAME = "UsersDB"
private val DB_VERSIOM = 1;
private val TABLE_NAME = "users"
private val ID = "id"
private val DIARY_TITLE = "DiaryTitle"
private val DIARY_DAY = "DiaryDay"
private val PHOTO_PATH = "PhotoPath"
}
}
|
require 'tty-spinner'
spinner = TTY::Spinner.new
spinner = TTY::Spinner.new("[:spinner] [*]Loading Rube-Saved-PCAP[*]...", format: :pulse_2)
spinner.auto_spin
sleep(5)
spinner.stop("Done!")
|
#!/bin/bash
# Copyright 2021 Adevinta
# set -e # Uncomment this to make the pipeline fail in case of a security vuln.
echo "Start target app"
docker pull appsecco/dsvw
docker run -p 1234:8000 --restart unless-stopped --name dsvw -d appsecco/dsvw
sleep 5
echo "Test based on yaml config using lightweight policy"
./vulcan-local -c ./vulcan.yaml -c ./script/vulcan-policies.yaml -p lightweight
echo "exit=$?"
echo "Test local path as a git repository excluding the github check"
./vulcan-local -t . -e github -checktypes file://./script/checktypes-stable.json
echo "exit=$?"
# Add a path and a tag to bypass check target validations.
docker tag vulcan-local path/vulcan-local:xxx
echo "Test local docker image"
./vulcan-local -t path/vulcan-local:xxx -a DockerImage -i trivy -checktypes file://./script/checktypes-stable.json
echo "exit=$?"
echo "Docker test based on yaml config"
docker run -i --rm -v /var/run/docker.sock:/var/run/docker.sock \
-v "$PWD":/target -e TRAVIS_BUILD_DIR=/target \
-e REGISTRY_SERVER -e REGISTRY_USERNAME -e REGISTRY_PASSWORD \
vulcan-local -c /target/vulcan.yaml -i retirejs
echo "exit=$?"
echo "Docker test local app as a webaddress excluding nessus and zap"
docker run -i --rm -v /var/run/docker.sock:/var/run/docker.sock \
-v "$PWD":/target \
-e TRAVIS_BUILD_DIR=/target -e REGISTRY_SERVER -e REGISTRY_USERNAME -e REGISTRY_PASSWORD \
vulcan-local -t http://localhost:1234 -e '(nessus|zap)' -checktypes file:///target/script/checktypes-stable.json
echo "exit=$?"
echo "Stopping target app"
docker rm -f dsvw
|
package com.displee.web.localhost.route
import com.displee.undertow.host.route.*
import com.displee.undertow.host.route.impl.TemplateRouteHandler
import com.google.gson.JsonObject
import io.undertow.server.HttpServerExchange
import io.undertow.util.Methods
@RouteManifest("/register", Methods.GET_STRING)
class SampleRoute : TemplateRouteHandler() {
override fun handleRequest(exchange: HttpServerExchange) {
//check if we've received the required query params
val queryParameters = exchange.getQueryParametersAsMap()
if (queryParameters.containsNullOrBlank("username", "first_name", "last_name")) {
exchange.send("Error received not enough parameters.")
return
}
val username = queryParameters["username"]
val firstName = queryParameters["first_name"]
val lastName = queryParameters["last_name"]
//we can do any logic here, for example put the data above in a database
val json = JsonObject()
json.addProperty("error", -1)
json.addProperty("message", "Successfully registered!")
exchange.send(json)
}
}
|
package de.huddeldaddel.euler
import de.huddeldaddel.euler.math.Permutator
/**
* Solution for https://projecteuler.net/problem=24
*/
fun main() {
println(Problem24().getMillionthPermutation())
}
class Problem24 {
fun getMillionthPermutation(): String {
return Permutator()
.getPermutations("0123456789")
.sorted()[999_999]
}
}
|
/*
* Copyright LIRIS-CNRS (2016)
* Contributors: Vincent Primault <vincent.primault@liris.cnrs.fr>
*
* This software is a computer program whose purpose is to study location privacy.
*
* This software is governed by the CeCILL-B license under French law and
* abiding by the rules of distribution of free software. You can use,
* modify and/ or redistribute the software under the terms of the CeCILL-B
* license as circulated by CEA, CNRS and INRIA at the following URL
* "http://www.cecill.info".
*
* As a counterpart to the access to the source code and rights to copy,
* modify and redistribute granted by the license, users are provided only
* with a limited warranty and the software's author, the holder of the
* economic rights, and the successive licensors have only limited liability.
*
* In this respect, the user's attention is drawn to the risks associated
* with loading, using, modifying and/or developing or reproducing the
* software by the user in light of its specific status of free software,
* that may mean that it is complicated to manipulate, and that also
* therefore means that it is reserved for developers and experienced
* professionals having in-depth computer knowledge. Users are therefore
* encouraged to load and test the software's suitability as regards their
* requirements in conditions enabling the security of their systems and/or
* data to be ensured and, more generally, to use and operate it in the
* same conditions as regards security.
*
* The fact that you are presently reading this means that you have had
* knowledge of the CeCILL-B license and that you accept its terms.
*/
package fr.cnrs.liris.common.util
object MathUtils {
def roundAt1(v: Double): Double = (v * 10).round.toDouble / 10
def roundAt2(v: Double): Double = (v * 100).round.toDouble / 100
def roundAt3(v: Double): Double = (v * 1000).round.toDouble / 1000
def roundAt4(v: Double): Double = (v * 10000).round.toDouble / 10000
def roundAt5(v: Double): Double = (v * 100000).round.toDouble / 100000
def roundAt6(v: Double): Double = (v * 1000000).round.toDouble / 1000000
def roundAt(v: Double, places: Int): Double = {
val factor = math.pow(10, places)
(v * factor).round.toDouble / factor
}
def nextPowerOf2(x : Int) : Int = {
val pos = math.log(x)
val c = math.ceil(pos)
math.pow(2,c).toInt
}
def hilbert_xy2d(n : Int, i : Int,j : Int) : Int = {
var x = j
var y = i
var rx = 0
var ry = 0
var d = 0
var s = n/2
while(s>0)
{
rx = if ((x & s) > 0) 1 else 0
ry = if ((y & s) > 0) 1 else 0
d += s * s * ((3 * rx) ^ ry)
val out = rotation(s, x, y, rx, ry)
x = out._1
y = out._2
s=s/2
}
d
}
//rotate/flip a quadrant appropriately (returns (x,y)
def rotation(n : Int , x : Int , y : Int, rx : Int, ry : Int): (Int,Int) = {
var outx = x
var outy = y
if (ry == 0) {
if (rx == 1) {
outx = n-1 - outx;
outy = n-1 - outy;
}
//Swap x and y
val t = outx
outx = outy
outy = t
}
(outx,outy)
}
}
|
import { EventCode } from './types';
export declare const EVENT_CODES: {
[index: string]: EventCode;
};
|
import React from 'react'
import { mount } from 'enzyme'
import CollectionImage from './'
import IIIFImage from 'Components/Shared/IIIFImage'
test('CollectionImage renders and image with alt text', () => {
const wrapper = mount(<CollectionImage image='test.png' altText='Awesome Collection' />)
expect(wrapper.find('.collectionImage').exists()).toBeTruthy()
expect(wrapper.find(IIIFImage).exists()).toBeTruthy()
})
|
<?php
namespace App\Helpers;
use \MediactiveDigital\MedKit\Helpers\FormatHelper as MedKitFormatHelper;
class FormatHelper extends MedKitFormatHelper{
}
|
import 'package:flutter/material.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:get/get.dart';
import 'package:myapp/controller/menu_controller.dart';
import 'package:myapp/controller/user_controller.dart';
import 'package:myapp/page_util/Info.dart';
import 'package:myapp/user/user_ex.dart';
import 'package:myapp/view/components/button/yes_eating.dart';
import 'package:myapp/view/pages/subpages/rate_menu_page.dart';
import '../../date_functions.dart';
import 'button/not_eating.dart';
class MenuBox extends StatelessWidget {
final String dateAndTime;
final Map<String, List<String>> menuMap;
final List<String> menuList;
final UserController u = Get.find();
final MenuController m = Get.find();
MenuBox(this.dateAndTime, this.menuList, this.menuMap);
@override
Widget build(BuildContext context) {
bool _isNow = validateNow(dateAndTime);
String time = getTimeFromDateAndTime(dateAndTime);
return Padding(
padding: EdgeInsets.symmetric(vertical: 8.h, horizontal: 4.w),
child: InkWell(
onTap: () {
Get.to(
() => RateMenuPage(
dateAndTime,
menuMap,
),
);
},
child: Container(
padding: EdgeInsets.all(2.w),
height: (0.3).sw * 1.5,
width: (0.22).sw * 1.5,
decoration: BoxDecoration(
color: Colors.white,
border: _isNow == false
? Border.all(color: Colors.black45, width: 2)
: Border.all(color: Colors.lightGreen[600]!, width: 2),
borderRadius: BorderRadius.circular(5),
boxShadow: [
_isNow == true
? BoxShadow(
color: Colors.lightGreen,
blurRadius: 5,
spreadRadius: 0.01,
)
: BoxShadow()
]),
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
//๊ทธ ๋ฉ๋ด์ ๋ํ ๋ ์ง์ ์๊ฐ(์กฐ์, ์ค์, ์์)
_buildMenuHeader(time),
Divider(color: Colors.grey),
// ๋ฉ๋ด
_buildMenuList(),
SizedBox(height: 2),
// ์ทจ์, ๋ถ์ทจ์ ํ์
menuList.length == 0
? Container()
: _buildCheckIfEating(time)
? YesEating()
: NotEating(),
],
),
),
),
);
}
Widget _buildMenuHeader(String time) {
return Text(
"${getMonthDayAndWeekdayInKorean(dateAndTime)} $time",
style: TextStyle(fontSize: 8.sp),
);
}
Widget _buildMenuList() {
Map<String, Map<String, dynamic>> menusAndAllergyMap =
getMenusAndAllergyMap(m.menus);
return menuList.length == 0
? Center(
child: Text(
"๋ฑ๋ก๋ ๋ฉ๋ด ์ ๋ณด๊ฐ ์์ต๋๋ค.",
textAlign: TextAlign.center,
style: TextStyle(fontSize: 8.sp),
),
)
: Expanded(
child: ListView(
children: [
Column(
children: List.generate(
menuList.length,
(index) => Text(
"${menuList[index]}",
style: TextStyle(
fontSize: 11.sp,
color: containAllergy(
menuList[index],
menusAndAllergyMap,
u.principal.value.allergy ?? {}) ==
true
? Colors.red
: Colors.black),
),
),
)
],
),
);
}
bool _buildCheckIfEating(String time) {
return checkIfEating(dateAndTime, time);
}
}
|
package org.geepawhill.tsd.core
interface TsdBuilder {
fun open(node: String)
fun leaf(node: String, value: String)
fun close(node: String)
}
|
# -*- coding: utf-8 -*-
from ... utilities.singleton import Singleton
from ultron.sentry.Analysis.SecurityValueHolders import SecurityLatestValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityCurrentValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityDiffValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecuritySignValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityExpValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityLogValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecuritySqrtValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAbsValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityNormInvValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityCeilValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityFloorValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityRoundValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecuritySigmoidValueHolder
from ultron.sentry.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityTanhValueHolder
from ultron.sentry.Analysis.CrossSectionValueHolders import CSRankedSecurityValueHolder
from ultron.sentry.Analysis.CrossSectionValueHolders import CSZScoreSecurityValueHolder
from ultron.sentry.Analysis.CrossSectionValueHolders import CSPercentileSecurityValueHolder
from ultron.sentry.Analysis.CrossSectionValueHolders import CSResidueSecurityValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityAddedValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecuritySubbedValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityMultipliedValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityDividedValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityLtOperatorValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityLeOperatorValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityGtOperatorValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityGeOperatorValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityEqOperatorValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityNeOperatorValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityAndOperatorValueHolder
from ultron.sentry.Analysis.SecurityValueHolders import SecurityOrOperatorValueHolder
import six,pdb
@six.add_metaclass(Singleton)
class Accumulators(object):
def __init__(self):
self._accumulators_pool = {
1:SecurityCurrentValueHolder,2:SecurityDiffValueHolder,
3:SecuritySignValueHolder,4:SecurityExpValueHolder,
5:SecurityLogValueHolder,6:SecuritySqrtValueHolder,
7:SecurityAbsValueHolder,8:SecurityNormInvValueHolder,
9:SecurityCeilValueHolder,10:SecurityFloorValueHolder,
11:SecurityRoundValueHolder,12:SecurityRoundValueHolder,
13:CSRankedSecurityValueHolder,
14:CSZScoreSecurityValueHolder,15:CSPercentileSecurityValueHolder,
16:SecuritySigmoidValueHolder,17:SecurityTanhValueHolder
}
self._mutated_pool = {
1:SecurityCurrentValueHolder,2:SecurityDiffValueHolder,
3:SecuritySignValueHolder,4:SecurityExpValueHolder,
5:SecurityLogValueHolder,6:SecuritySqrtValueHolder,
7:SecurityAbsValueHolder,8:SecurityNormInvValueHolder,
9:SecurityCeilValueHolder,10:SecurityFloorValueHolder,
11:SecurityRoundValueHolder,12:SecurityRoundValueHolder,
13:CSRankedSecurityValueHolder,
14:CSZScoreSecurityValueHolder,15:CSPercentileSecurityValueHolder,
16:SecuritySigmoidValueHolder,17:SecurityTanhValueHolder
}
self._cross_pool = {
1:CSResidueSecurityValueHolder,2:SecurityAddedValueHolder,
3:SecuritySubbedValueHolder,4:SecurityMultipliedValueHolder,
5:SecurityDividedValueHolder,6:SecurityLtOperatorValueHolder,
7:SecurityLeOperatorValueHolder,8:SecurityGtOperatorValueHolder,
9:SecurityGeOperatorValueHolder,10:SecurityEqOperatorValueHolder,
11:SecurityNeOperatorValueHolder,12:SecurityAndOperatorValueHolder,
13:SecurityOrOperatorValueHolder
}
def transform(self, expression, is_formula=False):
var_group = expression.split('c_')
formula = ''
is_acc = False
for i in range(len(var_group)):
if i == 0:
formula = '\'' + var_group[i] + '\''
elif int(var_group[i]) > 1:
is_acc = True
formula = self._accumulators_pool[int(var_group[i])].__name__ + '(' + formula + ')'
if not is_acc : formula = self._accumulators_pool[1].__name__ + '(' + formula + ')'
return eval(formula) if is_formula else formula
def dependency(self, expression_sets):
result = {}
expression_list = list(expression_sets)
for expression in expression_list:
result[expression] = ','.join(eval(expression)._dependency)
return result
def calc_new_factor(self, mutated_cross_columns, factor_data):
new_factors = None
for columns in mutated_cross_columns:
sub_data = eval(columns).transform(factor_data, name=str(columns),
category_field='code', dropna=False)
sub_data = sub_data.reset_index().set_index('trade_date','code')
if new_factors is None:
new_factors = sub_data.copy()
else:
new_factors[str(columns)] = sub_data[str(columns)].values
return new_factors.reset_index()[mutated_cross_columns]
def fetch_accumulators_pool(self):
return self._accumulators_pool
def fetch_mutated_pool(self):
return self._mutated_pool
def fetch_cross_pool(self):
return self._cross_pool
def get_accumulators_pool(self, index):
return self._accumulators_pool[index]
accumulators_pool = Accumulators().fetch_accumulators_pool()
mutated_pool = Accumulators().fetch_mutated_pool()
cross_pool = Accumulators().fetch_cross_pool()
transform = Accumulators().transform
dependency = Accumulators().dependency
calc_new_factor = Accumulators().calc_new_factor
|
#if !defined(AFX_CDXCDYNAMICPROPSHEET_H__82427297_6456_11D3_802D_000000000000__INCLUDED_)
#define AFX_CDXCDYNAMICPROPSHEET_H__82427297_6456_11D3_802D_000000000000__INCLUDED_
#if _MSC_VER >= 1000
#pragma once
#endif // _MSC_VER >= 1000
// cdxCDynamicPropSheet.h : header file
//
#include "cdxCDynamicWndEx.h"
#pragma warning(disable: 4100)
class cdxCDynamicPropPage;
/*
* cdxCDynamicPropSheet
* ====================
* Dynamic property sheet.
*/
class cdxCDynamicPropSheet : public CPropertySheet, public cdxCDynamicWndEx
{
DECLARE_DYNCREATE(cdxCDynamicPropSheet);
enum { flDefault = flAntiFlicker|flSizeIcon|flSWPCopyBits };
friend class cdxCDynamicPropPage;
private:
Position m_PagePos;
bool m_bHasPos;
public:
cdxCDynamicPropSheet(Freedom fd = fdAll, UINT nFlags = flDefault);
cdxCDynamicPropSheet(UINT nIDCaption, CWnd* pParentWnd = NULL, UINT iSelectPage = 0, Freedom fd = fdAll, UINT nFlags = flDefault);
cdxCDynamicPropSheet(LPCTSTR pszCaption, CWnd* pParentWnd = NULL, UINT iSelectPage = 0, Freedom fd = fdAll, UINT nFlags = flDefault);
cdxCDynamicPropSheet(UINT sheetAutoPosID, UINT nIDCaption, CWnd* pParentWnd = NULL, UINT iSelectPage = 0, Freedom fd = fdAll, UINT nFlags = flDefault);
cdxCDynamicPropSheet(UINT sheetAutoPosID, LPCTSTR pszCaption, CWnd* pParentWnd = NULL, UINT iSelectPage = 0, Freedom fd = fdAll, UINT nFlags = flDefault);
cdxCDynamicPropSheet(LPCTSTR lpszSheetAutoPosID, UINT nIDCaption, CWnd* pParentWnd = NULL, UINT iSelectPage = 0, Freedom fd = fdAll, UINT nFlags = flDefault);
cdxCDynamicPropSheet(LPCTSTR lpszSheetAutoPosID, LPCTSTR pszCaption, CWnd* pParentWnd = NULL, UINT iSelectPage = 0, Freedom fd = fdAll, UINT nFlags = flDefault);
virtual ~cdxCDynamicPropSheet() { DoOnDestroy(); }
// ops
public:
virtual void AddPage( cdxCDynamicPropPage & rPage );
virtual void RemovePage( cdxCDynamicPropPage & rPage );
void AddPage( cdxCDynamicPropPage *pPage ) { ASSERT(pPage != NULL); AddPage(*pPage); }
void RemovePage( cdxCDynamicPropPage *pPage ) { ASSERT(pPage != NULL); RemovePage(*pPage); }
void RemovePage( int nPage );
BOOL IsWizard() const { return (m_psh.dwFlags & PSH_WIZARD) != 0; }
// events
protected:
virtual void OnInitPage(cdxCDynamicPropPage & rPage);
virtual void OnSetActive(cdxCDynamicPropPage & rPage, BOOL bStatus) { if(IsWindow() && IsWizard()) Layout(); }
virtual void OnKillActive(cdxCDynamicPropPage & rPage, BOOL bStatus) {}
// Overrides
// ClassWizard generated virtual function overrides
//{{AFX_VIRTUAL(cdxCDynamicPropSheet)
public:
virtual BOOL DestroyWindow();
//}}AFX_VIRTUAL
// Implementation
public:
// Generated message map functions
protected:
//{{AFX_MSG(cdxCDynamicPropSheet)
virtual BOOL OnInitDialog();
afx_msg void OnClose();
afx_msg void OnDestroy();
afx_msg int OnCreate(LPCREATESTRUCT lpCreateStruct);
afx_msg void OnSize(UINT nType, int cx, int cy);
afx_msg void OnSizing(UINT fwSide, LPRECT pRect);
afx_msg void OnTimer(UINT nIDEvent);
afx_msg void OnGetMinMaxInfo(MINMAXINFO FAR* lpMMI);
//}}AFX_MSG
DECLARE_MESSAGE_MAP();
DECLARE_DYNAMIC_MAP();
};
/*
* cdxCDynamicPropPage
* ===================
* The page for our sheet.
*/
class cdxCDynamicPropPage : public CPropertyPage, public cdxCDynamicWnd
{
DECLARE_DYNCREATE(cdxCDynamicPropPage)
friend class cdxCDynamicPropSheet;
enum { flDefault = flAntiFlicker };
private:
cdxCDynamicPropSheet *m_pSheet;
bool m_bFirstHit;
public:
cdxCDynamicPropPage() : cdxCDynamicWnd(fdAll,flDefault), m_pSheet(NULL), m_bFirstHit(false) {}
cdxCDynamicPropPage(UINT nID, UINT nIDCaption = 0) : CPropertyPage(nID,nIDCaption), cdxCDynamicWnd(fdAll,flDefault), m_pSheet(NULL), m_bFirstHit(false) {}
cdxCDynamicPropPage(LPCTSTR lpszID, UINT nIDCaption = 0) : CPropertyPage(lpszID,nIDCaption), cdxCDynamicWnd(fdAll,flDefault), m_pSheet(NULL), m_bFirstHit(false) {}
virtual ~cdxCDynamicPropPage() { DoOnDestroy(); }
cdxCDynamicPropSheet *GetSheet() const { return m_pSheet; }
// Dialog Data
//{{AFX_DATA(cdxCDynamicPropPage)
// NOTE - ClassWizard will add data members here.
// DO NOT EDIT what you see in these blocks of generated code !
//}}AFX_DATA
// Overrides
// ClassWizard generate virtual function overrides
//{{AFX_VIRTUAL(cdxCDynamicPropPage)
public:
virtual BOOL OnSetActive();
virtual BOOL OnKillActive();
protected:
virtual void DoDataExchange(CDataExchange* pDX); // DDX/DDV support
//}}AFX_VIRTUAL
// Implementation
protected:
// Generated message map functions
//{{AFX_MSG(cdxCDynamicPropPage)
afx_msg void OnSize(UINT nType, int cx, int cy);
afx_msg void OnTimer(UINT nIDEvent);
virtual BOOL OnInitDialog();
afx_msg void OnDestroy();
afx_msg void OnSizing(UINT fwSide, LPRECT pRect);
afx_msg void OnGetMinMaxInfo(MINMAXINFO FAR* lpMMI);
afx_msg void OnParentNotify(UINT message, LPARAM lParam);
//}}AFX_MSG
DECLARE_MESSAGE_MAP()
};
//////////////////////////////////////////////////////////////////////
// inlines
//////////////////////////////////////////////////////////////////////
inline cdxCDynamicPropSheet::cdxCDynamicPropSheet(Freedom fd, UINT nFlags)
: cdxCDynamicWndEx(fd,nFlags),
m_bHasPos(false)
{
}
inline cdxCDynamicPropSheet::cdxCDynamicPropSheet(UINT nIDCaption, CWnd* pParentWnd, UINT iSelectPage, Freedom fd, UINT nFlags)
: CPropertySheet(nIDCaption,pParentWnd,iSelectPage),
cdxCDynamicWndEx(fd,nFlags),
m_bHasPos(false)
{
}
inline cdxCDynamicPropSheet::cdxCDynamicPropSheet(LPCTSTR pszCaption, CWnd* pParentWnd, UINT iSelectPage, Freedom fd, UINT nFlags)
: CPropertySheet(pszCaption,pParentWnd,iSelectPage),
cdxCDynamicWndEx(fd,nFlags),
m_bHasPos(false)
{
}
inline cdxCDynamicPropSheet::cdxCDynamicPropSheet(UINT sheetAutoPosID, UINT nIDCaption, CWnd* pParentWnd, UINT iSelectPage, Freedom fd, UINT nFlags)
: CPropertySheet(nIDCaption,pParentWnd,iSelectPage),
cdxCDynamicWndEx(fd,nFlags),
m_bHasPos(false)
{
if(sheetAutoPosID)
ActivateAutoPos(sheetAutoPosID);
}
inline cdxCDynamicPropSheet::cdxCDynamicPropSheet(UINT sheetAutoPosID, LPCTSTR pszCaption, CWnd* pParentWnd, UINT iSelectPage, Freedom fd, UINT nFlags)
: CPropertySheet(pszCaption,pParentWnd,iSelectPage),
cdxCDynamicWndEx(fd,nFlags),
m_bHasPos(false)
{
if(sheetAutoPosID)
ActivateAutoPos(sheetAutoPosID);
}
inline cdxCDynamicPropSheet::cdxCDynamicPropSheet(LPCTSTR lpszSheetAutoPosID, UINT nIDCaption, CWnd* pParentWnd, UINT iSelectPage, Freedom fd, UINT nFlags)
: CPropertySheet(nIDCaption,pParentWnd,iSelectPage),
cdxCDynamicWndEx(fd,nFlags),
m_bHasPos(false)
{
if(lpszSheetAutoPosID && *lpszSheetAutoPosID)
ActivateAutoPos(lpszSheetAutoPosID);
}
inline cdxCDynamicPropSheet::cdxCDynamicPropSheet(LPCTSTR lpszSheetAutoPosID, LPCTSTR pszCaption, CWnd* pParentWnd, UINT iSelectPage, Freedom fd, UINT nFlags)
: CPropertySheet(pszCaption,pParentWnd,iSelectPage),
cdxCDynamicWndEx(fd,nFlags),
m_bHasPos(false)
{
if(lpszSheetAutoPosID && *lpszSheetAutoPosID)
ActivateAutoPos(lpszSheetAutoPosID);
}
#pragma warning(default: 4100)
//{{AFX_INSERT_LOCATION}}
// Microsoft Developer Studio will insert additional declarations immediately before the previous line.
#endif // !defined(AFX_CDXCDYNAMICPROPSHEET_H__82427297_6456_11D3_802D_000000000000__INCLUDED_)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.