text
stringlengths 27
775k
|
|---|
/**
* Clipboard Module
* @module Clipboard
*/
import { container } from "../../nodeshow.js"
import { Container } from "../../Container.js"
import { InputAccessManagerInstance as InputAccessManager } from "./InputAccessManager.mjs"
import { InputManager } from "../utils/InputManager.js"
let appId = null; //Temporary, think this up
export const EVENTS = {
'copy':'container.copy',
'paste':'container.paste',
'cut':'container.cut'
}
function copy(e) {
container.emit('container.copy', {originalEvent:e});
}
function paste(e) {
container.emit('container.paste', {originalEvent:e});
}
function cut(e) {
container.emit('container.cut', {originalEvent:e});
}
document.addEventListener('copy', copy)
document.addEventListener('paste', paste)
document.addEventListener('cut', cut)
let Manager = new InputManager(InputAccessManager, EVENTS);
/** @class
* @summary Component implementing consistent and managed access to mouse input.
* @description TODO
* */
export class Clipboard {
#appId = null
#handlers = {}
#mmanager = Manager
constructor(appId, container) {
console.log(`NEW Clipboard handler instance created for ${appId}`)
this.#appId = `${appId} ${Container.generateUUID()}` //appId
}
getId() {
return this.#appId
}
getEvents() {
let result = {}
for ( const [key, value] of Object.entries(this.#handlers) ) {
result[key] = value
}
return result
}
enable() {
this.#mmanager.register(this)
}
disable() {
this.#mmanager.unregister(this)
}
setAction(event, callback, accessReq) {
this.#handlers[event] = {
callback:callback,
access: accessReq
}
}
}
export { Manager as ClipboardManager }
|
package example09
import java.io.File
import java.io.FileInputStream
object ReadBinFile {
def main(args: Array[String]): Unit = {
val file = new File("src/example09/README.md")
val in = new FileInputStream(file)
val bytes = new Array[Byte](file.length.toInt)
in.read(bytes)
println(new String(bytes))
in.close()
}
}
|
#!/usr/bin/env python3
import numpy as np
import cv2
class Box:
"""
Rectangular box, suitable for use with OpenCV.
Attributes
----------
x : int
x-coordinate of top-left corner
y : int
y-coordinate of top-left corner
width : int
x-length of box
height : int
y-length of box
Methods
-------
tl : (int, int)
Top left corner of the box.
br : (int, int)
Bottom right corner of the box.
tr : (int, int)
Top right corner of the box.
bl : (int, int)
Bottom left corner of the box.
center : (int, int)
Geometric center point of the box.
area : int
Rectangular area of the box.
aspect : float
Aspect ratio of the box, in height/width format.
indexes : (slice int, slice int)
2-D array index slices that this box corresponds to.
Suitable for accessing 2-D arrays as `array[box.indexes]`.
contains(point) : bool
Returns true if `point` is within the box.
overlap(box) : float
The fraction of the area of `box` that overlaps with this box.
is_superset_of(box) : bool
Returns true if `box` is entirely inside this box.
Notes
-----
As per OpenCV, a point `(x, y)` corresponds to an array index `array[y, x]`.
"""
def __init__(self, x, y, width, height):
self._x = x
self._y = y
self._width = width
self._height = height
@property
def x(self):
return self._x
@property
def y(self):
return self._y
@property
def width(self):
return self._width
@property
def height(self):
return self._height
@property
def tl(self):
return (self.x, self.y)
@property
def br(self):
return (self.x + self.width, self.y + self.height)
@property
def tr(self):
return (self.x + self.width, self.y)
@property
def bl(self):
return (self.x, self.y + self.height)
@property
def center(self):
return (self.x + int(self.width / 2), self.y + int(self.height / 2))
@property
def area(self):
return (self.width * self.height)
@property
def aspect(self):
return (self.height / self.width)
@property
def indexes(self):
slice_x = slice(self.x, self.x + self.width)
slice_y = slice(self.y, self.y + self.height)
return (slice_y, slice_x)
def contains(self, point):
x, y = point[0], point[1]
return (self.x <= x <= self.x + self.width
and self.y <= y <= self.y + self.height)
def overlap(self, box):
a_x = max([self.x, box.x])
b_x = min([self.x + self.width, box.x + box.width])
w = b_x - a_x if b_x > a_x else 0
a_y = max([self.y, box.y])
b_y = min([self.y + self.height, box.y + box.height])
h = b_y - a_y if b_y > a_y else 0
return ((w * h) / box.area)
def is_superset_of(self, box):
return (self.x <= box.x <= self.x + self.width - box.width
and self.y <= box.y <= self.y + self.height - box.height)
def __str__(self):
properties = \
f"Box:\n"\
+ f"tl = ({self.x}, {self.y})\n"\
+ f"width = {self.width}\n"\
+ f"height = {self.height}\n"\
+ f"area = {self.area}\n"\
+ f"aspect = {self.aspect}"
return properties
def covering_box(boxes):
"""
Construct the smallest box which covers a collection of boxes.
Parameters
----------
boxes : iterable collection of Box
Returns
-------
cover : Box
"""
x_min = np.amin([b.x for b in boxes])
x_max = np.amax([b.x + b.width for b in boxes])
y_min = np.amin([b.y for b in boxes])
y_max = np.amax([b.y + b.height for b in boxes])
cover = Box(x_min, y_min, x_max - x_min, y_max - y_min)
return cover
def bounding_box(points):
"""
Construct the minimal bounding box for a given set of 2-D points.
Parameters
----------
points : iterable collection of (int, int)
Returns
-------
bounding : Box
"""
x, y, w, h = cv2.boundingRect(np.array([p for p in points]))
bounding = Box(x, y, w, h)
return bounding
def merge_overlapping(boxes, max_overlap=0.05):
"""
Merge all sufficiently overlapping boxes in a collection of boxes.
Parameters
----------
boxes : iterable collection of Box
max_overlap : float, default=0.05
Merge any pair of boxes for which one of them overlaps the other more
than this value.
Returns
-------
boxes_merged : list of Box
"""
def overlaps(bi, bj):
return (bi.overlap(bj) >= max_overlap
or bj.overlap(bi) >= max_overlap)
def merge_into(boxes, box):
overlapping = [b for b in boxes if overlaps(box, b)]
if (len(overlapping) == 0):
return (boxes + [box])
else:
preserved = [b for b in boxes if not overlaps(box, b)]
merged = covering_box(overlapping + [box])
return (merge_into(preserved, merged))
boxes_merged = []
for b in boxes:
boxes_merged = merge_into(boxes_merged, b)
return boxes_merged
def otsu_separation(img_gray, box):
"""
Calculate the Otsu separation of a single-channel image restricted to a box.
Parameters
----------
img_gray : 2-D array of int
Single channel image.
box : Box
The 2-D restriction of `img_gray` for which the Otsu separation is
calculated.
Returns
-------
otsu_sep : float
The separation between the black and white class means, after
determining class by Otsu thresholding.
"""
img_box = (img_gray[box.indexes]).astype(np.uint8)
t, img_bin = cv2.threshold(img_box, 128, 255, cv2.THRESH_OTSU)
idxs_w = img_box > t
idxs_b = img_box <= t
mean_w = np.average(img_box[idxs_w])
mean_b = np.average(img_box[idxs_b])
otsu_sep = mean_w - mean_b
return otsu_sep
def otsu_separation_color(img, box):
"""
Calculate the Otsu separation of a colour image restricted to a box.
Parameters
----------
img : 3-D array of int
Color image with 3 colour channels.
box : Box
The 2-D restriction of `img` for which the Otsu separation is
calculated.
Returns
-------
min_otsu_sep : float
The minimum of the Otsu separations calculated for each colour channel
of `img` and its grayscale transformation.
"""
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
min_otsu_sep = np.amin(
[otsu_separation(img_gray, box),
otsu_separation(img[:, :, 0], box),
otsu_separation(img[:, :, 1], box),
otsu_separation(img[:, :, 2], box)])
return min_otsu_sep
|
---
_id: cac44ed0-b748-11e6-9b81-0bc3350a75b6
_parent: /articles/jekyll-static-comments/
replying_to: '4'
name: Michael Rose
email: 1ce71bc10b86565464b612093d89707e
hidden: ''
date: '2016-11-30T22:03:15.286Z'
---
Staticman now supports
[threaded comments](https://github.com/eduardoboucas/staticman/issues/35). The
Liquid you need to craft can get messy, but it's manageable if you don't nest
too deep.
|
from django import template
from django.utils.html import mark_safe
from ..constants import Status
register = template.Library()
@register.filter
def status_badge(subscriber):
css_classes = {
Status.PENDING: 'badge-warning',
Status.SUBSCRIBED: 'badge-primary',
Status.UNSUBSCRIBED: 'badge-danger',
Status.CLEANED: 'badge-secondary',
}
badge_class = css_classes[subscriber.status]
badge_text = subscriber.get_status_display()
html = '<span class="badge %s badge-pill">%s</span>' % (badge_class, badge_text)
return mark_safe(html)
|
module Problem012 where
import Problem011
decodeModified :: Eq a => [ModifiedEncoding a] -> [a]
decodeModified = foldr f []
where
f (Single x) acc = x : acc
f (Multiple n x) acc = replicate n x ++ acc
|
<?php
/**
* Horde_Share_Exception
*
*
*/
class Horde_Share_Exception extends Horde_Exception_Wrapped
{
}
|
; A118639: Smallest number expressible using the next Roman-numeral symbol.
; Submitted by Jon Maiga
; 1,4,9,40,90,400,900,4000,9000,40000,90000,400000,900000
lpb $0
sub $0,1
add $1,1
mov $2,$3
mul $2,5
trn $2,$1
add $2,3
mov $3,$1
add $3,$1
add $1,$2
lpe
mov $0,$2
add $0,1
|
use itertools::Itertools;
use rayon::prelude::*;
use std::thread::spawn;
#[macro_use]
extern crate may;
// https://en.wikipedia.org/wiki/Bailey–Borwein–Plouffe_formula
fn bbp(k: u32) -> f64 {
let a1 = 4.0 / (8 * k + 1) as f64;
let a2 = 2.0 / (8 * k + 4) as f64;
let a3 = 1.0 / (8 * k + 5) as f64;
let a4 = 1.0 / (8 * k + 6) as f64;
(a1 - a2 - a3 - a4) / ((16 as f64).powi(k as i32))
}
pub fn pi(n: u32) -> f64 {
let mut result: f64 = 0.0;
for i in 0..n {
result += bbp(i);
}
result
}
pub fn pi_with_thread(n: u32, num: usize) -> f64 {
let mut result: f64 = 0.0;
let mut thread_handlers = vec![];
for chunk in &(0..n).chunks(num) {
let worklist = chunk.collect::<Vec<_>>();
thread_handlers.push(spawn(move || {
let mut result_in_child: f64 = 0.0;
for i in worklist {
result_in_child += bbp(i);
}
result_in_child
}));
}
for handle in thread_handlers {
result += handle.join().unwrap();
}
result
}
pub fn pi_with_rayon(n: u32) -> f64 {
(0..n).collect::<Vec<_>>().par_iter().map(|&i| bbp(i)).sum()
}
pub fn pi_with_may(n: u32, num: usize) -> f64 {
let mut result: f64 = 0.0;
may::config().set_workers(num).set_io_workers(0);
let v = (0..n).map(|i| go!(move || { bbp(i) })).collect::<Vec<_>>();
for i in v {
result += i.join().unwrap();
}
result
}
#[cfg(test)]
mod tests {
use crate::*;
const ITER_NUM: u32 = 10000;
#[test]
fn test_pi() {
println!("pi = {}", pi(ITER_NUM));
}
#[test]
fn test_pi_with_thread() {
println!("pi_with_thread = {}", pi_with_thread(ITER_NUM, 4));
}
#[test]
fn test_pi_with_rayon() {
println!("pi_with_rayon = {}", pi_with_rayon(ITER_NUM));
}
#[test]
fn test_pi_with_may() {
println!("pi_with_rayon = {}", pi_with_may(ITER_NUM, 4));
}
}
|
class SystemScreenCapturer {
void capture({required String imagePath, bool silent = true}) {
throw UnimplementedError();
}
}
|
import * as chai from 'chai';
import 'mocha';
import * as sinon from 'sinon';
import Utils from '../src/utils';
const chaiAsPromised = require('chai-as-promised');
chai.use(chaiAsPromised);
const expect = chai.expect;
/* tslint:disable:no-unused-expression */
describe('Utils', () => {
it('can deepSort() nested objects', () => {
const arbitraryObjectSorted = {
attribut1: 'valeurC',
attribut2: {
attributa: {
i: 'valeur',
j: 'valeur',
k: 'valeur',
},
attributb: 'valeurB',
},
attribut3: [{ x: 10, y: 2, z: 3 }, { a: 3, b: 2, c: 1 }],
};
/* tslint:disable:object-literal-sort-keys */
const arbitraryObjectNotSorted = {
attribut1: 'valeurC',
attribut3: [{ z: 3, y: 2, x: 10 }, { c: 1, a: 3, b: 2 }],
attribut2: {
attributb: 'valeurB',
attributa: {
j: 'valeur',
i: 'valeur',
k: 'valeur',
},
},
};
/* tslint:enable:object-literal-sort-keys */
expect(
JSON.stringify(Utils.deepSort(arbitraryObjectNotSorted)),
'deepSort(arbitraryObject) error',
).to.be.equal(JSON.stringify(arbitraryObjectSorted));
});
it('can deepCopy() nested objects', () => {
const arbitraryObject = {
attribut1: 'valeurC',
attribut2: {
attributa: {
i: 'valeur',
j: 'valeur',
k: 'valeur',
},
attributb: 'valeurB',
},
attribut3: 'valeurA',
};
const arbitraryObjectDeepCopy = Utils.deepCopy(arbitraryObject);
expect(arbitraryObjectDeepCopy, 'deepCopy(arbitraryObject) error').to.be.deep.equal(
arbitraryObject,
);
arbitraryObjectDeepCopy.attribut1 = 'new value';
expect(arbitraryObjectDeepCopy, 'deepCopy(arbitraryObject) error').to.not.be.deep.equal(
arbitraryObject,
);
// witness reference copy
const arbitraryObjectRefCopy = arbitraryObject;
arbitraryObjectRefCopy.attribut1 = 'new value 2';
expect(arbitraryObjectRefCopy, 'deepCopy(arbitraryObject) error').to.be.deep.equal(
arbitraryObject,
);
});
it('can return true if variable is String or string', () => {
expect(Utils.isString('this is a string'), 'istring("") error').to.be.true;
expect(Utils.isString(String('this is a string')), 'istring("") error').to.be.true;
});
it('cannot return true if variable is not a string', () => {
/* tslint:disable:no-magic-numbers */
expect(Utils.isString(1234), 'istring("") error').to.be.false;
expect(Utils.isString({ var: 'plop' }), 'istring("") error').to.be.false;
});
it('getCurrentTimestampInSecond()', () => {
const time = Math.floor(Date.now() / 1000);
expect(Utils.getCurrentTimestampInSecond(), 'getCurrentTimestampInSecond() error').to.be.equal(
time,
);
});
describe('unique', () => {
it('can unique with different case in the values', () => {
const arbitraryArray = [
{ att1: 'value1', att2: 'value2' },
{ att1: 'value1', att2: 'Value2' },
{ att3: 'value3', att4: 'value4' },
{ att1: 'value1', att2: 'value2' },
];
/* tslint:disable:object-literal-sort-keys */
expect(Utils.unique(arbitraryArray), 'unique(arbitraryArray) error').to.deep.equal({
uniqueItems: [{ att1: 'value1', att2: 'value2' }, { att3: 'value3', att4: 'value4' }],
duplicates: [{ att1: 'value1', att2: 'Value2' }, { att1: 'value1', att2: 'value2' }],
});
});
it('can unique with different case in the key', () => {
const arbitraryArray = [
{ att1: 'value1', att2: 'value2' },
{ att1: 'value1', Att2: 'Value2' },
{ att3: 'value3', att4: 'value4' },
{ att1: 'value1', att2: 'value2' },
];
/* tslint:disable:object-literal-sort-keys */
expect(Utils.unique(arbitraryArray), 'unique(arbitraryArray) error').to.deep.equal({
uniqueItems: [
{ att1: 'value1', att2: 'value2' },
{ att1: 'value1', Att2: 'Value2' },
{ att3: 'value3', att4: 'value4' },
],
duplicates: [{ att1: 'value1', att2: 'value2' }],
});
});
it('can unique without duplication', () => {
const arbitraryArray = [
{ att1: 'value1', att2: 'value2' },
{ att1: 'value1', Att2: 'Value2' },
{ att3: 'value3', att4: 'value4' },
{ att5: 'value5', att6: 'value6' },
];
/* tslint:disable:object-literal-sort-keys */
expect(Utils.unique(arbitraryArray), 'unique(arbitraryArray) error').to.deep.equal({
uniqueItems: [
{ att1: 'value1', att2: 'value2' },
{ att1: 'value1', Att2: 'Value2' },
{ att3: 'value3', att4: 'value4' },
{ att5: 'value5', att6: 'value6' },
],
duplicates: [],
});
});
});
describe('uniqueByProperty', () => {
it('can uniqueByProperty with different case in the values', () => {
const arbitraryArray = [
{ att1: 'value1', att2: 'value2' },
{ att1: 'Value1', att2: 'value2' },
{ att1: 'value3', att4: 'value4' },
{ att1: 'value1', att2: 'value2' },
];
/* tslint:disable:object-literal-sort-keys */
expect(
Utils.uniqueByProperty(arbitraryArray, 'att1'),
'uniqueByProperty(arbitraryArray) error',
).to.deep.equal({
uniqueItems: [{ att1: 'value1', att2: 'value2' }, { att1: 'value3', att4: 'value4' }],
duplicates: [{ att1: 'Value1', att2: 'value2' }, { att1: 'value1', att2: 'value2' }],
});
});
it('can unique without duplication', () => {
const arbitraryArray = [
{ att1: 'value1', att2: 'value2' },
{ att1: 'value12', Att2: 'Value2' },
{ att1: 'value3', att4: 'value4' },
{ att1: 'value5', att6: 'value6' },
];
/* tslint:disable:object-literal-sort-keys */
expect(
Utils.uniqueByProperty(arbitraryArray, 'att1'),
'unique(arbitraryArray) error',
).to.deep.equal({
uniqueItems: [
{ att1: 'value1', att2: 'value2' },
{ att1: 'value12', Att2: 'Value2' },
{ att1: 'value3', att4: 'value4' },
{ att1: 'value5', att6: 'value6' },
],
duplicates: [],
});
});
});
describe('flatten2DimensionsArray', () => {
it('can flatten2DimensionsArray() 1 dimension array', () => {
const arbitraryArray: any[] = [1, 2, 3, 4, 5];
const flattenArray = Utils.flatten2DimensionsArray(arbitraryArray);
expect(flattenArray, 'flatten2DimensionsArray(twoDimensionsArray) error').to.be.deep.equal([
1,
2,
3,
4,
5,
]);
});
it('can flatten2DimensionsArray() 3 dimensions array', () => {
const arbitraryArray: any[] = [[1, 2], [3], [4, [5, 6]]];
const flattenArray = Utils.flatten2DimensionsArray(arbitraryArray);
expect(flattenArray, 'flatten2DimensionsArray(twoDimensionsArray) error').to.be.deep.equal([
1,
2,
3,
4,
[5, 6],
]);
});
it('can flatten2DimensionsArray() empty array', () => {
const emptyArray: any[] = [];
const flattenArray = Utils.flatten2DimensionsArray(emptyArray);
expect(flattenArray, 'flatten2DimensionsArray(twoDimensionsArray) error').to.be.deep.equal(
[],
);
});
it('can flatten2DimensionsArray() two dimensionals array', () => {
const twoDimensionsArray = [[1, 2], [3], [4, 5]];
const flattenArray = Utils.flatten2DimensionsArray(twoDimensionsArray);
expect(flattenArray, 'flatten2DimensionsArray(twoDimensionsArray) error').to.be.deep.equal([
1,
2,
3,
4,
5,
]);
});
});
describe('timeoutPromise', () => {
let clock: sinon.SinonFakeTimers;
beforeEach(async () => {
clock = sinon.useFakeTimers();
});
afterEach(async () => {
sinon.restore();
});
it('rejects with specified message if timeout is reached', (done) => {
const errorMessage = 'An error occured !';
let rejected = false;
Utils.timeoutPromise(1000, errorMessage).then(() => {
expect.fail('timeoutPromise should not be fulfilled');
}).catch((err) => {
rejected = true;
expect(err.toString()).to.contains(errorMessage);
done();
});
expect(rejected).to.be.false;
clock.tick(999);
expect(rejected).to.be.false;
clock.tick(1);
});
});
});
|
extern crate proc_macro;
use proc_macro::TokenStream;
use serde_json::Value;
/// 将相连的若干的 list 分割
fn split_to_vec(input: &str) -> Vec<&str> {
let mut start = 0;
let mut cnt = 0;
let mut ret = vec![];
for (idx, c) in input.chars().enumerate() {
match c {
'[' => {
if cnt == 0 {
start = idx;
}
cnt += 1;
}
']' => {
cnt -= 1;
if cnt == 0 {
ret.push(&input[start..=idx]);
start = idx + 1;
}
}
_ => (),
}
}
ret
}
/// 将名字转换为 Rust 风格的命名
/// 除第一个单词外, 其余大写字母替换为 下划线 + 小写字母 的组合
fn turn_to_legal_name(name: &str) -> String {
let mut ret = String::new();
let mut skiped_first = false;
for c in name.chars() {
if c.is_ascii_uppercase() && skiped_first {
ret.push('_');
ret.push(c.to_ascii_lowercase());
} else {
ret.push(c);
}
if c == ',' {
skiped_first = true;
}
}
ret
}
/// 转换为 Rust 语法的参数
trait ToArgs {
fn to_args(&self) -> String;
}
impl ToArgs for Value {
fn to_args(&self) -> String {
let array = self.as_array().unwrap();
if array.is_empty() {
"".to_owned()
} else {
array
.iter()
.map(|value| {
if value.is_string() {
format!("{}.to_owned()", value)
} else {
value.to_string()
}
})
.collect::<Vec<_>>()
.join(", ")
}
}
}
fn json_to_code(input: TokenStream) -> String {
let input = input.to_string();
let v = split_to_vec(&input);
let (funcs, args, rets) = (v[0], v[1], v[2]);
// 反序列化 json
// 这个地方有个坑点, "-123" 在先前 to_string 的时候会变成 "- 123", 所以单独替换一下 "- " => "-"
let funcs = serde_json::from_str::<Value>(&turn_to_legal_name(funcs)).unwrap();
let args = serde_json::from_str::<Value>(&args.replace("- ", "-")).unwrap();
let rets = serde_json::from_str::<Value>(&rets.replace("- ", "-")).unwrap();
let mut code = String::new();
code.push_str(&format!(
"let mut obj = {}::new({});\n",
funcs[0].as_str().unwrap(),
args[0].to_args(),
));
for i in 1..funcs.as_array().unwrap().len() {
let mut stmt = format!("obj.{}({})", funcs[i].as_str().unwrap(), args[i].to_args());
if !rets[i].is_null() {
stmt = format!(
r##"assert_eq!({}, {}, r#"{}"#)"##,
stmt,
rets[i].to_string(),
stmt
);
}
stmt.push_str(";\n");
code.push_str(&stmt);
}
code
}
/// Generate code from leetcode json
///
/// # Example
///
/// ```ignore
/// leetcode_test!(
/// ["Trie", "insert", "search"]
/// [[], ["apple"], ["apple"]]
/// [null, null, true]
/// )
/// ```
///
/// this will be expaned to
///
/// ``` ignore
/// let mut obj = Trie::new();
/// obj.insert("apple".to_owned());
/// assert_eq!(obj.search("apple".to_owned()), true);
/// ```
#[proc_macro]
pub fn leetcode_test(input: TokenStream) -> TokenStream {
json_to_code(input).parse().unwrap()
}
#[proc_macro]
pub fn leetcode_test_debug(input: TokenStream) -> TokenStream {
format!(r###"r##"{}"##"###, json_to_code(input))
.parse()
.unwrap()
}
|
using UnityEngine;
namespace CommandPattern.Case1.Base1 {
/// <summary>
/// * The 'Abstract Command' class
/// </summary>
// ? Use singleton
public abstract class MoveCommand {
public Cube Cube;
public KeyCode KeyCode;
public MoveCommand(KeyCode keyCode) => KeyCode = keyCode;
public bool CanExecute => Input.GetKeyDown(KeyCode);
public abstract void Execute();
public abstract void Undo();
}
}
|
#pragma once
#include "Event.h"
inline namespace MARS
{
class EXPORT_TYPE KeyEvent : public Event
{
public:
inline int32 GetKeyCode() const { return KeyCode; }
EVENT_CLASS_CATEGORY(CategoryKeyboard | CategoryInput)
protected:
KeyEvent(int32 InKeyCode)
: KeyCode(InKeyCode) { }
int32 KeyCode;
};
class EXPORT_TYPE KeyPressedEvent : public KeyEvent
{
public:
KeyPressedEvent(int32 InKeyCode, int32 InRepeatCount) : KeyEvent(InKeyCode), RepeatCount(InRepeatCount) {}
inline int32 GetRepeatCount() const { return RepeatCount; }
String ToString() const override
{
std::stringstream ss;
ss << "KeyPressedEvent: " << KeyCode << " (" << RepeatCount << " repeats)";
return ss.str();
}
EVENT_CLASS_TYPE(KeyPressed)
private:
int32 RepeatCount;
};
class EXPORT_TYPE KeyReleasedEvent : public KeyEvent
{
public:
KeyReleasedEvent(int32 InKeyCode) : KeyEvent(InKeyCode) { }
String ToString() const override
{
std::stringstream ss;
ss << "KeyReleasedEvent " << KeyCode;
return ss.str();
}
EVENT_CLASS_TYPE(KeyReleased)
};
class EXPORT_TYPE KeyTypedEvent : public KeyEvent
{
public:
KeyTypedEvent(int32 KeyCode) : KeyEvent(KeyCode) {}
EVENT_CLASS_TYPE(KeyTyped)
};
}
|
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Reflection;
using System.Text;
using ALinq.SqlClient;
namespace ALinq.SqlClient
{
internal class SqlColumnizer
{
// Fields
private ColumnDeclarer declarer;
private ColumnNominator nominator;
// Methods
internal SqlColumnizer(Func<SqlExpression, bool> fnCanBeColumn)
{
this.nominator = new ColumnNominator(fnCanBeColumn);
this.declarer = new ColumnDeclarer();
}
internal SqlExpression ColumnizeSelection(SqlExpression selection)
{
return this.declarer.Declare(selection, this.nominator.Nominate(selection));
}
// Nested Types
private class ColumnDeclarer : SqlVisitor
{
// Fields
private HashSet<SqlExpression> candidates;
// Methods
internal ColumnDeclarer()
{
}
internal SqlExpression Declare(SqlExpression expression, HashSet<SqlExpression> candidates)
{
this.candidates = candidates;
return (SqlExpression) this.Visit(expression);
}
internal override SqlNode Visit(SqlNode node)
{
SqlExpression item = node as SqlExpression;
if ((item == null) || !this.candidates.Contains(item))
{
return base.Visit(node);
}
if ((item.NodeType != SqlNodeType.Column) && (item.NodeType != SqlNodeType.ColumnRef))
{
return new SqlColumn(item.ClrType, item.SqlType, null, null, item, item.SourceExpression);
}
return item;
}
}
private class ColumnNominator : SqlVisitor
{
// Fields
private HashSet<SqlExpression> candidates;
private Func<SqlExpression, bool> fnCanBeColumn;
private bool isBlocked;
// Methods
internal ColumnNominator(Func<SqlExpression, bool> fnCanBeColumn)
{
this.fnCanBeColumn = fnCanBeColumn;
}
private static bool CanRecurseColumnize(SqlExpression expr)
{
switch (expr.NodeType)
{
case SqlNodeType.Element:
case SqlNodeType.Exists:
case SqlNodeType.ClientQuery:
case SqlNodeType.Column:
case SqlNodeType.ColumnRef:
case SqlNodeType.AliasRef:
case SqlNodeType.Link:
case SqlNodeType.Multiset:
case SqlNodeType.ScalarSubSelect:
case SqlNodeType.Select:
case SqlNodeType.SharedExpressionRef:
case SqlNodeType.Value:
case SqlNodeType.Nop:
return false;
}
return true;
}
private static bool IsClientOnly(SqlExpression expr)
{
switch (expr.NodeType)
{
case SqlNodeType.DiscriminatedType:
case SqlNodeType.Element:
case SqlNodeType.Link:
case SqlNodeType.ClientArray:
case SqlNodeType.ClientCase:
case SqlNodeType.ClientQuery:
case SqlNodeType.AliasRef:
case SqlNodeType.Grouping:
case SqlNodeType.Multiset:
case SqlNodeType.Nop:
case SqlNodeType.SharedExpression:
case SqlNodeType.SharedExpressionRef:
case SqlNodeType.SimpleExpression:
case SqlNodeType.TypeCase:
return true;
case SqlNodeType.OuterJoinedValue:
return IsClientOnly(((SqlUnary) expr).Operand);
}
return false;
}
internal HashSet<SqlExpression> Nominate(SqlExpression expression)
{
this.candidates = new HashSet<SqlExpression>();
this.isBlocked = false;
this.Visit(expression);
return this.candidates;
}
internal override SqlNode Visit(SqlNode node)
{
SqlExpression expr = node as SqlExpression;
if (expr != null)
{
bool isBlocked = this.isBlocked;
this.isBlocked = false;
if (CanRecurseColumnize(expr))
{
base.Visit(expr);
}
if (!this.isBlocked)
{
if (!IsClientOnly(expr) && (expr.NodeType != SqlNodeType.Column) && expr.SqlType.CanBeColumn &&
(this.fnCanBeColumn == null || this.fnCanBeColumn(expr)))
{
this.candidates.Add(expr);
}
else
{
this.isBlocked = true;
}
}
this.isBlocked |= isBlocked;
}
return node;
}
internal override SqlExpression VisitClientCase(SqlClientCase c)
{
c.Expression = this.VisitExpression(c.Expression);
int num = 0;
int count = c.Whens.Count;
while (num < count)
{
c.Whens[num].Value = this.VisitExpression(c.Whens[num].Value);
num++;
}
return c;
}
internal override SqlExpression VisitSimpleCase(SqlSimpleCase c)
{
c.Expression = this.VisitExpression(c.Expression);
int num = 0;
int count = c.Whens.Count;
while (num < count)
{
c.Whens[num].Value = this.VisitExpression(c.Whens[num].Value);
num++;
}
return c;
}
internal override SqlExpression VisitTypeCase(SqlTypeCase tc)
{
tc.Discriminator = this.VisitExpression(tc.Discriminator);
int num = 0;
int count = tc.Whens.Count;
while (num < count)
{
tc.Whens[num].TypeBinding = this.VisitExpression(tc.Whens[num].TypeBinding);
num++;
}
return tc;
}
}
private class ColumnAppendToTable : SqlVisitor
{
internal override SqlTable VisitTable(SqlTable tab)
{
return base.VisitTable(tab);
}
}
}
}
|
//
// MainAppViewController.h
// helloworld
//
// Created by chen on 14/7/13.
// Copyright (c) 2014年 chen. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "LeftViewController.h"
@interface MainAppViewController : UIViewController<TCLeftListSelectDelegate>
@end
|
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scouter.server.tagcnt.first;
import java.io.File
import java.io.RandomAccessFile
import scouter.io.DataInputX
import scouter.io.DataOutputX
import scouter.server.tagcnt.core.TagCountUtil
import scouter.util.IClose
class KeyDataFile(path: String) extends IClose {
class ITEM {
var deleted = false
var value: Array[Int]=null
var key: Array[Byte]=null
var link = 0L
var next = 0L
}
val file = new File(path + ".kfile");
var raf = new RandomAccessFile(file, "rw");
if (this.raf.length() == 0) {
this.raf.write(Array(0xCA.toByte, 0xFE.toByte));
}
def getRecord(pos: Long): ITEM = {
this.synchronized {
this.raf.seek(pos);
val r = new ITEM();
val in = new DataInputX(this.raf);
val buf = in.read(1 + 5 + TagCountUtil.BUCKET_SIZE * 4);
val in2 = new DataInputX(buf);
r.deleted = in2.readBoolean();
r.link = in2.readLong5();
r.value = new Array[Int](TagCountUtil.BUCKET_SIZE);
var inx = 0
while (inx < TagCountUtil.BUCKET_SIZE) {
r.value(inx) = in2.readInt();
inx += 1
}
r.key = in.readShortBytes();
r.next = this.raf.getFilePointer();
return r;
}
}
def isDeleted(pos: Long): Boolean = {
this.synchronized {
this.raf.seek(pos);
return new DataInputX(this.raf).readBoolean();
}
}
def getHashLink(pos: Long): Long = {
this.synchronized {
this.raf.seek(pos + 1);
return new DataInputX(this.raf).readLong5();
}
}
def getKey(pos: Long): Array[Byte] = {
this.synchronized {
this.raf.seek(pos + 1 + 5 + TagCountUtil.BUCKET_SIZE * 4);
val in = new DataInputX(this.raf);
return in.readShortBytes();
}
}
def getValue(pos: Long): Array[Float] = {
this.synchronized {
this.raf.seek(pos + 1 + 5);
val bytes = new DataInputX(this.raf).read(TagCountUtil.BUCKET_SIZE * 4);
//
val in = new DataInputX(bytes);
val value = new Array[Float](TagCountUtil.BUCKET_SIZE)
var inx = 0
while (inx < TagCountUtil.BUCKET_SIZE) {
value(inx) = in.readFloat();
inx += 1
}
return value;
}
}
def setDelete(pos: Long, deleted: Boolean) {
this.synchronized {
this.raf.seek(pos);
new DataOutputX(this.raf).writeBoolean(deleted);
}
}
def setHashLink(pos: Long, link: Long) {
this.synchronized {
this.raf.seek(pos + 1);
new DataOutputX(this.raf).writeLong5(link);
}
}
def write(pos: Long, next: Long, key: Array[Byte], value: Array[Float]) {
this.synchronized {
val out = new DataOutputX();
out.writeBoolean(false);
out.writeLong5(next);
var inx = 0
while (inx < TagCountUtil.BUCKET_SIZE) {
out.writeFloat(value(inx));
inx += 1
}
out.writeShortBytes(key);
this.raf.seek(pos);
this.raf.write(out.toByteArray());
}
}
def update(pos: Long, hhmm: Int, value: Int) {
this.synchronized {
this.raf.seek(pos);
val bucketPos = TagCountUtil.getBucketPos(hhmm);
this.raf.seek(pos + 1 + 5 + bucketPos * 4);
this.raf.write(DataOutputX.toBytes(value));
}
}
def updateAdd(pos: Long, hhmm: Int, value: Float): Float = {
this.synchronized {
val bucketPos = TagCountUtil.getBucketPos(hhmm);
this.raf.seek(pos + 1 + 5 + bucketPos * 4);
val old = new DataInputX(this.raf).readFloat();
this.raf.seek(pos + 1 + 5 + bucketPos * 4);
this.raf.write(DataOutputX.toBytes(old + value));
return old + value;
}
}
def update(pos: Long, value: Array[Float]) {
this.raf.seek(pos + 1 + 5);
val out = new DataOutputX(this.raf);
var inx = 0
while (inx < TagCountUtil.BUCKET_SIZE) {
out.writeFloat(value(inx));
inx += 1
}
}
def updateAdd(pos: Long, value: Array[Int]): Int = {
this.synchronized {
this.raf.seek(pos + 1 + 5);
val oldbytes = new DataInputX(this.raf).read(value.length * 4);
val out = new DataOutputX();
var idx = 0
while (idx < value.length) {
val old = DataInputX.toInt(oldbytes, idx * 4);
out.writeInt(old + value(idx));
idx += 1
}
this.raf.seek(pos + 1 + 5);
this.raf.write(out.toByteArray());
return value.length;
}
}
def append(next: Long, key: Array[Byte], value: Array[Float]): Long = {
this.synchronized {
val pos = this.raf.length();
write(pos, next, key, value);
return pos;
}
}
def close() {
this.synchronized {
if (this.raf == null)
return ;
try {
this.raf.close();
} catch {
case _:Throwable =>
}
this.raf = null;
}
}
def getFirstPos() = 2L
def getLength() = if (raf == null) 0 else raf.length();
}
|
<?php
namespace forStubMockTesting;
class User {
public function __construct()
{
echo 'constructor was called!';
}
public function createUser($name, $email)
{
$this->name = $name;
$this->email = $email;
if($this->validate())
{
return $this->save();
}
else
{
return false;
}
}
public function validate()
{
if(!empty($this->name) && filter_var($this->email, FILTER_VALIDATE_EMAIL))
return true;
else
return false;
}
public function save()
{
echo 'User was saved in database - real operation!';
return true;
}
}
|
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
ALLOWED_EXTRA_MISSING=4
show_diff () {
head -1 $1
diff -U 0 $1 $2 | sed 1,2d
}
# Stash uncommitted changes, checkout previous commit and save coverage report
uncommitted=$(git status --porcelain | grep -v "^??")
[[ -n $uncommitted ]] && git stash > /dev/null
git checkout HEAD^
baseline_report=$(mktemp -t mistral_coverageXXXXXXX)
find . -type f -name "*.pyc" -delete && python setup.py testr --coverage --testr-args="$*"
coverage report -m > $baseline_report
baseline_missing=$(awk 'END { print $3 }' $baseline_report)
previous_sha=$(git rev-parse HEAD);
# Checkout back and unstash uncommitted changes (if any)
git checkout -
[[ -n $uncommitted ]] && git stash pop > /dev/null
# Erase previously collected coverage data.
coverage erase;
# Generate and save coverage report
current_report=$(mktemp -t mistral_coverageXXXXXXX)
find . -type f -name "*.pyc" -delete && python setup.py testr --coverage --testr-args="$*"
coverage report -m > $current_report
current_missing=$(awk 'END { print $3 }' $current_report)
# Show coverage details
allowed_missing=$((baseline_missing+ALLOWED_EXTRA_MISSING))
echo "Allowed to introduce missing lines : ${ALLOWED_EXTRA_MISSING}"
echo "Copmared against ${previous_sha}";
echo "Missing lines in previous commit : ${baseline_missing}"
echo "Missing lines in proposed change : ${current_missing}"
if [ $allowed_missing -gt $current_missing ];
then
if [ $baseline_missing -lt $current_missing ];
then
show_diff $baseline_report $current_report
echo "I believe you can cover all your code with 100% coverage!"
else
echo "Thank you! You are awesome! Keep writing unit tests! :)"
fi
exit_code=0
else
show_diff $baseline_report $current_report
echo "Please write more unit tests, we should keep our test coverage :( "
exit_code=1
fi
rm $baseline_report $current_report
exit $exit_code
|
use std::ops::{Add, Sub, Mul, Div, AddAssign};
///
/// Represents a scalar value which can either be single (f32) or double (f64) precision
///
pub trait Scalar:
private::Sealed +
Copy +
Add<Self, Output=Self> +
Sub<Self, Output=Self> +
Mul<Self, Output=Self> +
Div<Self, Output=Self> +
AddAssign +
PartialOrd {
///
/// Computes the absolute value
///
fn abs(self) -> Self;
///
/// Compute the square root
///
fn sqrt(self) -> Self;
///
/// Indicates whether or not the value is finite
///
fn is_finite(self) -> bool;
///
/// Provides a representation of the number zero
///
fn zero() -> Self;
///
/// Provides a representation of the number one
///
fn one() -> Self;
///
/// Constructs a float from an f32 value
///
fn from_f32(x: f32) -> Self;
///
/// Constructs a float from an f64 value
///
fn from_f64(x: f64) -> Self;
///
/// Constructs a float from a usize value
///
fn from_usize(x: usize) -> Self;
}
///
/// Implementation for f32 single-precision values
///
impl Scalar for f32 {
fn abs(self) -> Self {self.abs()}
fn sqrt(self) -> Self {self.sqrt()}
fn is_finite(self) -> bool {self.is_finite()}
fn zero() -> Self {0.0_f32}
fn one() -> Self {1.0_f32}
fn from_f32(x: f32) -> Self {x}
fn from_f64(x: f64) -> Self {x as f32}
fn from_usize(x: usize) -> Self {x as f32}
}
///
/// Implementation for f64 double-precision values
///
impl Scalar for f64 {
fn abs(self) -> Self {self.abs()}
fn sqrt(self) -> Self {self.sqrt()}
fn is_finite(self) -> bool {self.is_finite()}
fn zero() -> Self {0.0}
fn one() -> Self {1.0}
fn from_f32(x: f32) -> Self {x as f64}
fn from_f64(x: f64) -> Self {x}
fn from_usize(x: usize) -> Self {x as f64}
}
mod private {
pub trait Sealed {}
impl Sealed for f32 {}
impl Sealed for f64 {}
}
|
#!/bin/sh
# © 2021 Qualcomm Innovation Center, Inc. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
status=`git diff HEAD --quiet || echo '-dirty'`
echo "#define HYP_GIT_VERSION `git rev-parse --short HEAD`$status"
if [ -z "$status" ]
then
echo "#define HYP_BUILD_DATE \"`TZ=UTC git show -s --pretty="%cd" --date=local HEAD` UTC\""
else
echo "#define HYP_BUILD_DATE \"`date -R`\""
fi
|
package net.degols.libs.election
import javax.inject.Singleton
@Singleton
class ElectionConfigurationMerge extends ConfigurationMerge {
override val filenames: Seq[String] = Seq("application.election.conf")
}
|
class MudPie::StockCommand
MudPie::COMMANDS['stock'] = self
def self.summary
"Update pantry with pages and layouts"
end
def self.help
"Usage: mudpie stock"
end
def self.call(argv, options)
self.new(MudPie::Bakery.new).execute
end
def initialize(bakery)
@pantry = bakery.pantry
end
def execute
purge_rows_for_missing_files
scan_pages(Pathname.new('layouts')) { |path| stock_layout path }
scan_pages(Pathname.new('pages')) { |path| stock_page path }
end
def scan_pages(dir, &block)
unless dir.directory?
if dir.exist?
$stderr.puts "WARNING: '#{dir}' is not a directory."
else
$stderr.puts "WARNING: directory '#{dir}' does not exist."
end
return
end
dir.each_child do |path|
if path.directory?
scan_pages(path, &block)
else
mtime = @pantry.mtime_for_path(path)
if mtime.nil? and path.basename != '.DS_Store'
puts "NEW #{path}"
yield path
elsif mtime < path.mtime
puts "UPDA #{path}"
yield path
else
puts "OK #{path}" if MudPie::OPTIONS[:debug]
end
end
end
end
def stock_layout(path)
chain = MudPie::build_render_chain(path)
name = '#' + path.basename.to_s.chomp(path.extname)
@pantry.stock(path, name, chain.meta)
end
def stock_page(path)
chain = MudPie::build_render_chain(path)
@pantry.stock(path, chain.url, chain.meta)
end
def purge_rows_for_missing_files
ids_to_purge = []
@pantry.each_path do |path, page_id|
unless path.exist?
puts "DELE #{path}"
ids_to_purge << page_id
end
end
if ids_to_purge.length > 0
@pantry.delete_pages_by_id(ids_to_purge)
end
end
end
|
import { Animal } from './Animal';
export class Sheep extends Animal{
constructor(name) {
super(name);
}
}
|
#!/bin/bash
for file in $1/*.sv; do
module=$(basename -s .sv $file)
if echo "$module" | grep -q '_pkg$' ; then
continue
fi
${HOME}/Downloads/sv2v/bin/sv2v \
--define=SYNTHESIS \
$1/*_pkg.sv \
$1/../vendor/lowrisc_ip/ip/prim/rtl/prim_ram_1p_pkg.sv \
-I$1/../vendor/lowrisc_ip/ip/prim/rtl \
-I$1/../vendor/lowrisc_ip/dv/sv/dv_utils \
$file > $2/${module}.sv
done
|
package soup.movie.theme
import android.widget.TextView
import androidx.databinding.BindingAdapter
@BindingAdapter("themeOptionLabel")
fun setThemeOptionLabel(textView: TextView, themeOption: ThemeOption?) {
val resId = when (themeOption) {
ThemeOption.Light -> R.string.theme_option_light
ThemeOption.Dark -> R.string.theme_option_dark
ThemeOption.Battery -> R.string.theme_option_battery_saver
ThemeOption.System -> R.string.theme_option_system
else -> R.string.theme_option_system
}
textView.setText(resId)
}
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width,initial-scale=1.0">
<title><?= $site->title()->html() ?> | <?= $page->title()->html() ?></title>
<link rel="shortcut icon" type="image/png" href="<?= site()->url() ?>/assets/images/favicon.png"/>
<link href='https://fonts.googleapis.com/css?family=Lato:100,200,300,400' rel='stylesheet' type='text/css'>
<meta name="description" content="<?= $site->description()->html() ?>">
<meta name="keywords" content="<?= $site->keywords()->html() ?>">
<?= css('assets/styles/css/styles.css') ?>
<?= js('assets/js/jquery.min.js') ?>
<script>
// Google Analytics
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-74784709-1', 'auto');
ga('send', 'pageview');
</script>
<script type="text/javascript" src="assets/js/html5shim.js"></script>
</head>
<body>
<header role="banner">
<div class="banner">
<div class="in_blk left">
<a class="overlay" href="<?= site()->url() ?>" title="Center for Sports Medicine home page">
<h1 class="h1">Center for Sports Medicine</h1>
<p class="subtitle">Saint Francis Orthopedic and Sports Medicine Institute</p>
</a>
</div>
<div class="in_blk right">
<span class="header-phone"><?= page('contact')->phone() ?></span>
</div>
</div>
<nav class="navigation" role="navigation">
<div class="title">
<a class="overlay" href="<?= site()->url() ?>" title="Center for Sports Medicine home page">
<h1 class="h1">Center for Sports Medicine</h1>
<p class="subtitle">Saint Francis Orthopedic and Sports Medicine Institute</p>
</a>
</div>
<ul id="navbar">
<?php foreach($pages->visible() as $p): ?>
<li><a <?= ($p->isOpen()) ? 'class="active"' : '' ?> href="<?= $p->url() ?>"><?= html($p->title()) ?></a></li>
<?php endforeach ?>
</ul>
<?= snippet('responsive_nav') ?>
<div class="clear"></div>
<?= js('assets/js/header.js') ?>
</nav>
</header>
|
export function mod(value: number, divisor: number) {
return ((value % divisor) + divisor) % divisor;
}
|
// Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Helpers for triggering best-effort crash reports.
use anyhow::anyhow;
use fidl_fuchsia_feedback::{CrashReport, CrashReporterProxy};
use fuchsia_zircon as zx;
use futures::{channel::mpsc, future::LocalBoxFuture, prelude::*};
use log::{error, warn};
use omaha_client::time::TimeSource;
use std::time::Duration;
const TWENTY_FOUR_HOURS: Duration = Duration::from_secs(60 * 60 * 24);
const MAX_PENDING_CRASH_REPORTS: usize = 10;
const MIN_CONSECUTIVE_FAILED_UPDATE_CHECKS: u32 = 5;
/// Parameters to `handle_crash_reports_impl` are extracted out into a seperate type. Otherwise,
/// we'd have to pass the parameters in seperately and it would be easy to mistake the ordering
/// of the integers.
struct HandleCrashReportsParams<T: TimeSource> {
proxy: CrashReporterProxy,
/// A pending crash report is any crash report request that's successfully sent to the
/// `monitor_control_requests` future, but has not yet completed (whether by completing the
/// `file` call or deciding to skip it).
max_pending_crash_reports: usize,
/// The minimum number of consecutive update check failures required to file a crash report.
min_consecutive_failed_update_checks: u32,
time_source: T,
}
pub fn handle_crash_reports<'a, T: TimeSource + 'a>(
proxy: CrashReporterProxy,
time_source: T,
) -> (CrashReportControlHandle, LocalBoxFuture<'a, ()>) {
handle_crash_reports_impl(HandleCrashReportsParams {
proxy,
max_pending_crash_reports: MAX_PENDING_CRASH_REPORTS,
min_consecutive_failed_update_checks: MIN_CONSECUTIVE_FAILED_UPDATE_CHECKS,
time_source,
})
}
fn handle_crash_reports_impl<'a, T: TimeSource + 'a>(
params: HandleCrashReportsParams<T>,
) -> (CrashReportControlHandle, LocalBoxFuture<'a, ()>) {
// The capacity of the channel is max_pending_crash_reports because when it's full, we should
// not accept any ControlRequests via `try_send`. Note: we subtract 1 because the capacity is
// actually 1 plus the number passed in to the `channel` fn.
let (send, recv) = mpsc::channel(params.max_pending_crash_reports - 1);
(CrashReportControlHandle(send), monitor_control_requests(params, recv).boxed_local())
}
/// A future that receives crash report signatures over the channel, determines if the report should
/// be filed, and then uses the proxy to send a File FIDL request to the CrashReporter service.
///
/// We add this layer of abstraction so that we can:
/// 1. Deduplicate CrashReporter requests for InstallationErrors within the same 24 hour band. This
/// is important so that we don't spam the CrashReporter with redundant errors.
/// 2. Only file crash reports for ConsecutiveFailedUpdateChecks once we reach a given threshold.
/// 3. Rate-limit requests to the CrashReporter service. The CrashReporter FIDL API does not return
/// until the crash report has been fully generated, which can take many seconds. Rate-limiting
/// should help support filing crash reports in this window. Note: the maximum pending crash
/// reports are implicitly enforced by the channel capacity.
async fn monitor_control_requests<T: TimeSource>(
params: HandleCrashReportsParams<T>,
mut recv: mpsc::Receiver<ControlRequest>,
) {
let mut previous_report_filed_timestamp = None;
loop {
match recv.next().await {
Some(ControlRequest::InstallationError) => {
let now = params.time_source.now_in_monotonic();
if let Some(prev) = previous_report_filed_timestamp {
// Do not file InstallationError crash reports within a 24 hour period.
if now < prev + TWENTY_FOUR_HOURS {
warn!("skipping report because we already filed one in the past 24 hours");
continue;
}
}
file_report(¶ms.proxy, "fuchsia-installation-error").await;
previous_report_filed_timestamp = Some(now);
}
Some(ControlRequest::ConsecutiveFailedUpdateChecks(n)) => {
let min = params.min_consecutive_failed_update_checks;
if (n >= min) && (n - min + 1).is_power_of_two() {
let signature = format!("fuchsia-{}-consecutive-failed-update-checks", n);
file_report(¶ms.proxy, &signature).await;
}
}
None => {
error!("Crash report handler failed to receive ControlRequest");
return;
}
}
}
}
/// File a crash report with the given signature. We log on error because crash-reporting is
/// best-effort and should not block update checks.
async fn file_report(proxy: &CrashReporterProxy, signature: &str) {
if let Err(e) = proxy
.file(CrashReport {
crash_signature: Some(signature.to_owned()),
program_name: Some("system".to_owned()),
// Need to do syscall because `std::time::Instant` cannot be converted into nanos.
program_uptime: Some(zx::Time::get_monotonic().into_nanos()),
is_fatal: Some(false),
..CrashReport::EMPTY
})
.await
{
error!("Error filing crash report: {:#}", anyhow!(e));
};
}
/// A handle to forward crash report requests to `monitor_control_requests` future.
#[derive(Debug)]
pub struct CrashReportControlHandle(mpsc::Sender<ControlRequest>);
impl CrashReportControlHandle {
/// Forward InstallationError alerts to the CrashReporter service.
pub fn installation_error(&mut self) -> Result<(), mpsc::TrySendError<ControlRequest>> {
self.0.try_send(ControlRequest::InstallationError)
}
/// Forward ConsecutiveFailedUpdateChecks alerts to the CrashReporter service.
pub fn consecutive_failed_update_checks(
&mut self,
consecutive_failed_update_checks: u32,
) -> Result<(), mpsc::TrySendError<ControlRequest>> {
self.0.try_send(ControlRequest::ConsecutiveFailedUpdateChecks(
consecutive_failed_update_checks,
))
}
}
/// The set of messages than can be sent to the `monitor_control_requests` future.
#[derive(Debug)]
pub enum ControlRequest {
InstallationError,
ConsecutiveFailedUpdateChecks(u32),
}
#[cfg(test)]
/// Verifies the signature of the CrashReport is what's expected.
pub fn assert_signature(report: CrashReport, expected_signature: &str) {
assert_matches::assert_matches!(
report,
CrashReport {
crash_signature: Some(signature),
program_name: Some(program),
program_uptime: Some(_),
is_fatal: Some(false),
..
} if signature == expected_signature && program == "system"
)
}
#[cfg(test)]
mod tests {
use super::*;
use assert_matches::assert_matches;
use fuchsia_async::{self as fasync, Task};
use mock_crash_reporter::{MockCrashReporterService, ThrottleHook};
use omaha_client::time::MockTimeSource;
use std::sync::Arc;
/// Verifies the file function behaves as expected.
async fn test_file_crash_report(res: Result<(), zx::Status>) {
let (hook, mut recv) = ThrottleHook::new(res);
let mock = Arc::new(MockCrashReporterService::new(hook));
let (proxy, _crash_report_server) = mock.spawn_crash_reporter_service();
let () = Task::local(async move {
let () = file_report(&proxy, "foo").await;
})
.detach();
assert_signature(recv.next().await.unwrap(), "foo");
}
#[fasync::run_singlethreaded(test)]
async fn test_file_crash_report_success() {
test_file_crash_report(Ok(())).await
}
/// We should ignore any errors from the CrashReporter service.
#[fasync::run_singlethreaded(test)]
async fn test_file_crash_report_error_ignored() {
test_file_crash_report(Err(zx::Status::NOT_FOUND)).await
}
/// Verifies installation error reports are deduplicated over 24 hour periods.
#[fasync::run_singlethreaded(test)]
async fn test_installation_error() {
let (hook, mut recv) = ThrottleHook::new(Ok(()));
let mock = Arc::new(MockCrashReporterService::new(hook));
let (proxy, _fidl_server) = mock.spawn_crash_reporter_service();
let mut time_source = MockTimeSource::new_from_now();
let (mut ch, fut) = handle_crash_reports_impl(HandleCrashReportsParams {
proxy,
max_pending_crash_reports: 10,
min_consecutive_failed_update_checks: 0,
time_source: time_source.clone(),
});
let _control_request_server = fasync::Task::local(fut);
// On the first InstallationError, we file a report.
let () = ch.installation_error().unwrap();
assert_signature(recv.next().await.unwrap(), "fuchsia-installation-error");
// Subsequent requests within 24 hours should not file a report. We know we don't file a
// report because nothing is polled from the receiver.
let () = ch.installation_error().unwrap();
assert_matches!(recv.try_next(), Err(_));
time_source.advance(TWENTY_FOUR_HOURS - Duration::from_secs(1));
let () = ch.installation_error().unwrap();
assert_matches!(recv.try_next(), Err(_));
// When we hit 24 hrs, we'll file a new report.
time_source.advance(Duration::from_secs(1));
let () = ch.installation_error().unwrap();
assert_signature(recv.next().await.unwrap(), "fuchsia-installation-error");
// We'll also file a new report when we exceed 24 hours.
time_source.advance(TWENTY_FOUR_HOURS + Duration::from_secs(1));
let () = ch.installation_error().unwrap();
assert_signature(recv.next().await.unwrap(), "fuchsia-installation-error");
}
#[fasync::run_singlethreaded(test)]
async fn test_consecutive_failed_update_checks() {
let (hook, mut recv) = ThrottleHook::new(Ok(()));
let mock = Arc::new(MockCrashReporterService::new(hook));
let (proxy, _fidl_server) = mock.spawn_crash_reporter_service();
let (mut ch, fut) = handle_crash_reports_impl(HandleCrashReportsParams {
proxy,
max_pending_crash_reports: 2,
min_consecutive_failed_update_checks: 1,
time_source: MockTimeSource::new_from_now(),
});
let _control_request_server = fasync::Task::local(fut);
// If num checks < min, we SHOULD NOT file a crash report.
let () = ch.consecutive_failed_update_checks(0).unwrap();
assert_matches!(recv.try_next(), Err(_));
// If num checks >= min, we SHOULD file a crash report on a backoff (e.g. 1, 2, 4, etc).
let () = ch.consecutive_failed_update_checks(1).unwrap();
assert_signature(recv.next().await.unwrap(), "fuchsia-1-consecutive-failed-update-checks");
let () = ch.consecutive_failed_update_checks(2).unwrap();
assert_signature(recv.next().await.unwrap(), "fuchsia-2-consecutive-failed-update-checks");
let () = ch.consecutive_failed_update_checks(3).unwrap();
assert_matches!(recv.try_next(), Err(_));
let () = ch.consecutive_failed_update_checks(4).unwrap();
assert_signature(recv.next().await.unwrap(), "fuchsia-4-consecutive-failed-update-checks");
}
/// Tests that the number of pending crash reports is correctly bounded.
#[fasync::run_singlethreaded(test)]
async fn test_max_pending_crash_reports() {
let (hook, mut recv) = ThrottleHook::new(Ok(()));
let mock = Arc::new(MockCrashReporterService::new(hook));
let (proxy, _fidl_server) = mock.spawn_crash_reporter_service();
let (mut ch, fut) = handle_crash_reports_impl(HandleCrashReportsParams {
proxy,
max_pending_crash_reports: 2,
min_consecutive_failed_update_checks: 0,
time_source: MockTimeSource::new_from_now(),
});
let _control_request_server = fasync::Task::local(fut);
// The first control request should go through, but hang when calling `file`.
// After this call, we're guaranteed to have 1 pending crash report.
let () = ch.consecutive_failed_update_checks(0).unwrap();
// After this call, we're guaranteed to have 2 pending crash reports.
let () = ch.installation_error().unwrap();
// Since we're at the max pending crash reports, additional requests will fail.
assert_matches!(ch.installation_error(), Err(mpsc::TrySendError::<ControlRequest> { .. }));
// Complete a file call, so we now have 1 pending crash report.
assert_signature(recv.next().await.unwrap(), "fuchsia-0-consecutive-failed-update-checks");
// Now that the file call is unblocked, we can successfully make another request.
let () = ch.consecutive_failed_update_checks(1).unwrap();
// Drain remaining file calls.
assert_signature(recv.next().await.unwrap(), "fuchsia-installation-error");
assert_signature(recv.next().await.unwrap(), "fuchsia-1-consecutive-failed-update-checks");
}
/// Tests that when the control handle is dropped, the `handle_crash_reports_impl` future
/// terminates.
#[fasync::run_singlethreaded(test)]
async fn test_ch_dropped() {
let mock = Arc::new(MockCrashReporterService::new(|_| Ok(())));
let (proxy, _fidl_server) = mock.spawn_crash_reporter_service();
let (ch, fut) = handle_crash_reports_impl(HandleCrashReportsParams {
proxy,
max_pending_crash_reports: 2,
min_consecutive_failed_update_checks: 0,
time_source: MockTimeSource::new_from_now(),
});
drop(ch);
let () = fut.await;
}
}
|
import {cloneDeep} from "lodash";
import {ClientHelper, DesiredCapabilities, ServerConfig, WindowSize} from "../../..";
import {setBrowserStackSessionName, standardCapabilities, standardServerConfig} from "../../0_helper/config";
describe(`creating a new Browser`, (): void => {
const conf: ServerConfig = cloneDeep(standardServerConfig);
const capabilities: DesiredCapabilities = cloneDeep(standardCapabilities);
setBrowserStackSessionName(capabilities, `browser_attributes_spec.ts`);
const windowSize = function (): {} {
return {width: window.innerWidth, height: window.innerHeight};
};
beforeAll((): void => {
jasmine.DEFAULT_TIMEOUT_INTERVAL = 30000;
});
afterEach((): Promise<void[]> => {
return ClientHelper.cleanup()
});
describe(`with an initial window setSize`, (): void => {
it(`it should be maximized when the config contains the "maximum" attribute
- (test case id: 8a0d9a58-9591-43c1-89bb-d848319c90f1)`, async (): Promise<void> => {
const con: ServerConfig = cloneDeep(conf);
const capa: DesiredCapabilities = cloneDeep(capabilities);
capa.window = {
setToMaxSize: true
};
const browserInitialResize = ClientHelper.create(con, capa);
const data = await browserInitialResize.executeScript(windowSize);
await new Promise((resolve) => setTimeout(resolve, 2000));
const dataParsed: WindowSize = JSON.parse(JSON.stringify(data));
expect(dataParsed.height).toBeGreaterThanOrEqual(100);
expect(dataParsed.width).toBeGreaterThanOrEqual(100);
});
it(`it should be maximized when maximize() is called after browser creation
- (test case id: 1473a628-0347-41d9-b2f8-2c93f827f840)`, async (): Promise<void> => {
const browserManualResize = ClientHelper.create(conf, capabilities);
const dataBefore = await browserManualResize.executeScript(windowSize);
const dataBeforeParsed: WindowSize = JSON.parse(JSON.stringify(dataBefore));
await browserManualResize.window.maximize();
const dataAfter = await browserManualResize.executeScript(windowSize);
const dataAfterParsed: WindowSize = JSON.parse(JSON.stringify(dataAfter));
expect(dataAfterParsed.width).toBeGreaterThanOrEqual(dataBeforeParsed.width);
expect(dataAfterParsed.height).toBeGreaterThanOrEqual(dataBeforeParsed.height)
});
describe(`and changing the window size`, (): void => {
it(`should resize the window to 500x500 pixel
- (test case id: 1b7451ac-0ca2-4bdc-8700-60b4098d5829)`, async (): Promise<void> => {
const browserResize = ClientHelper.create(conf, capabilities);
await browserResize.window.setSize({width: 500, height: 500});
const size = await browserResize.window.getSize();
expect(size.width).toBeLessThan(550);
expect(size.width).toBeGreaterThanOrEqual(500);
expect(size.height).toBeLessThan(550);
expect(size.height).toBeGreaterThanOrEqual(500);
});
});
});
});
|
#!/bin/bash -eu
#
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
help() {
echo "\
Download Certificate Revocation List (CRL) for each cert in the specified file
if it's available.
Usage: $(basename "${0}") -i input -o output
-i: input
File containing a list of certs in PEM format.
-o: output
Output file to write a list of CRLs in PEM format.
-h:
Print this message.
" >&2
exit 1
}
main() {
local certs_file=
local output_crl_file=
while getopts ":i:o:h" option; do
case "${option}" in
i) certs_file="${OPTARG}";;
o) output_crl_file="${OPTARG}";;
h) help;;
\?) echo "Invalid Option: -${OPTARG}" >&2; exit 1;;
:) echo "Option -${OPTARG} requires an argument." >&2; exit 1;;
*) echo "Unimplemented Option: -${OPTARG}" >&2; exit 1;;
esac
done
if [[ -z "${certs_file}" || -z "${output_crl_file}" ]]; then
echo "Missing input or output file." >&2
exit 1
fi
certs_file="$(readlink -f "${certs_file}")"
if [[ ! -e "${certs_file}" ]]; then
echo "${certs_file} not found.">&2
exit 1
fi
local cert_dir="$(mktemp -d)"
# shellcheck disable=SC2064
trap "rm -rf '${cert_dir}'" SIGKILL SIGTERM SIGQUIT EXIT
pushd "${cert_dir}" >/dev/null
# Parse the certificate data from a file containing multiple certs.
awk '
BEGIN {
cert_filename = "";
cert_index = 0;
print_cert = 0;
}
/-----BEGIN CERTIFICATE-----/ {
cert_filename = "cert_" cert_index ".pem";
print_cert = 1;
}
{
if (print_cert) {
print $0 > cert_filename;
}
}
/-----END CERTIFICATE-----/ {
print_cert = 0;
cert_index ++;
}' "${certs_file}"
# For each cert, get the CRL distribution points and
# download the CRL file.
for cert in *.pem; do
local cert_info=
local distribution_point_uris=
# Parse distribution point URIs from each cert.
# shellcheck disable=SC1004
eval "$(
openssl x509 -noout -text -in "${cert}" | \
awk '
BEGIN {
in_distribution_points = 0;
in_distribution_point_name = 0;
cert_info = "";
num_distribution_point_uris = 0;
cert_info_headers[0] = "Issuer: ";
cert_info_headers[1] = "Subject: ";
}
{
for (i in cert_info_headers) {
header=cert_info_headers[i]
re=".* " header
if ($0 ~ re) {
cert_info = cert_info header gensub(re, "", "", $0) "\n";
}
}
if (in_distribution_points) {
if (in_distribution_point_name) {
if ($0 ~ /URI:/) {
distribution_point_uris[num_distribution_point_uris] = \
gensub(/.*URI:/, "", "", $0);
num_distribution_point_uris ++;
}
in_distribution_point_name = 0;
} else if ($0 ~ /^$/) {
# Empty line between fields.
} else if ($0 ~ /Full Name:/) {
in_distribution_point_name = 1;
} else {
in_distribution_points = 0;
}
}
}
/X509v3 CRL Distribution Points:/ {
in_distribution_points = 1;
}
END {
if (num_distribution_point_uris) {
print "local cert_info='\''" cert_info "'\'';";
printf "local distribution_point_uris=(";
for (i = 0; i < num_distribution_point_uris; i++) {
printf "'\''" distribution_point_uris[i] "'\'' ";
}
printf ");\n";
}
}'
)"
# If the cert has CRLs, download them.
if [[ -n "${cert_info}" ]]; then
local crl_index=0
local downloaded_crl=0
for uri in "${distribution_point_uris[@]}"; do
# Download the CRL and convert to PEM format.
local crl_basename="${cert}_crl_${crl_index}"
local crl_der_filename="${crl_basename}.der"
local crl_pem_filename="${crl_basename}.pem"
wget -q -O "${crl_der_filename}" "${uri}" || continue
openssl crl -inform DER -in "${crl_der_filename}" \
-outform PEM -out "${crl_pem_filename}"
# Add cert information in comments at the start of the CRL.
echo -e "${cert_info}\nX509v3 CRL Distribution Point: ${uri}" | \
sed 's/^/# /' > "${crl_pem_filename}.new"
cat "${crl_pem_filename}" >> "${crl_pem_filename}.new"
mv "${crl_pem_filename}.new" "${crl_pem_filename}"
crl_index=$((crl_index+1))
downloaded_crl=1
break
done
if [[ $((downloaded_crl)) -eq 0 ]]; then
echo "\
ERROR: Unable to download from any distribution points of cert.
${cert_info}
$(for uri in "${distribution_point_uris[@]}"; do echo "${uri}"; done)
" >&2
exit 1
fi
fi
done
popd >/dev/null
# Finally, generate a file of all concatenated CRL PEMs.
cat "${cert_dir}/"*_crl_*.pem >> "${output_crl_file}"
}
main "$@"
|
package goopenzwave
// #include "gzw_manager.h"
// #include <stdlib.h>
import "C"
// GetPollInterval returns the time period between polls of a node's state.
func GetPollInterval() int32 {
return int32(C.manager_getPollInterval(cmanager))
}
// SetPollInterval will set the time period between polls of a node's state.
//
// Due to patent concerns, some devices do not report state changes
// automatically to the controller. These devices need to have their state
// polled at regular intervals. The length of the interval is the same for all
// devices. To even out the Z-Wave network traffic generated by polling,
// OpenZWave divides the polling interval by the number of devices that have
// polling enabled, and polls each in turn. It is recommended that if possible,
// the interval should not be set shorter than the number of polled devices in
// seconds (so that the network does not have to cope with more than one poll
// per second).
func SetPollInterval(milliseconds int32, intervalBetweenPolls bool) {
C.manager_setPollInterval(cmanager, C.int32_t(milliseconds), C.bool(intervalBetweenPolls))
}
// EnablePoll enables the polling of a device's state. Returns true if polling
// was enabled.
func EnablePoll(homeID uint32, valueID uint64, intensity uint8) bool {
cvalueid := C.valueid_create(C.uint32_t(homeID), C.uint64_t(valueID))
defer C.valueid_free(cvalueid)
return bool(C.manager_enablePoll(cmanager, cvalueid, C.uint8_t(intensity)))
}
// DisablePoll disables the polling of a device's state. Returns true if polling
// was disabled.
func DisablePoll(homeID uint32, valueID uint64) bool {
cvalueid := C.valueid_create(C.uint32_t(homeID), C.uint64_t(valueID))
defer C.valueid_free(cvalueid)
return bool(C.manager_disablePoll(cmanager, cvalueid))
}
// IsPolled returns true if the device's state is being polled.
func IsPolled(homeID uint32, valueID uint64) bool {
cvalueid := C.valueid_create(C.uint32_t(homeID), C.uint64_t(valueID))
defer C.valueid_free(cvalueid)
return bool(C.manager_isPolled(cmanager, cvalueid))
}
// SetPollIntensity sets the frequency of polling.
//
// - 0 = none
// - 1 = every time through the list
// - 2 = every other time
// - etc.
func SetPollIntensity(homeID uint32, valueID uint64, intensity uint8) {
cvalueid := C.valueid_create(C.uint32_t(homeID), C.uint64_t(valueID))
defer C.valueid_free(cvalueid)
C.manager_setPollIntensity(cmanager, cvalueid, C.uint8_t(intensity))
}
// GetPollIntensity returns the polling intensity of a device's state.
func GetPollIntensity(homeID uint32, valueID uint64) uint8 {
cvalueid := C.valueid_create(C.uint32_t(homeID), C.uint64_t(valueID))
defer C.valueid_free(cvalueid)
return uint8(C.manager_getPollIntensity(cmanager, cvalueid))
}
|
<?php
declare(strict_types=1);
namespace Shapin\TalkJS\Model\Conversation;
class ConversationCreatedOrUpdated
{
}
|
module test_bukdu_plugs_prequisite_plugs
using Test
using Bukdu
@test length(Bukdu.bukdu_env[:prequisite_plugs]) == 1
empty!(Bukdu.bukdu_env[:prequisite_plugs])
using Bukdu
@test length(Bukdu.bukdu_env[:prequisite_plugs]) == 0
plug(Plug.Head)
@test length(Bukdu.bukdu_env[:prequisite_plugs]) == 1
end # module test_bukdu_plugs_prequisite_plugs
|
submodule (points_basic) geo
implicit none
contains
module procedure point_dist
point_dist = hypot(ax - bx, ay - by)
end procedure point_dist
end submodule geo
|
package Curses::Orrery;
use v5.12.0;
use Moo;
use Types::Standard qw(ArrayRef Bool InstanceOf Int Num Tuple);
use Astro::Coords::Angle;
use Astro::Coords::Planet;
use Astro::MoonPhase;
use Astro::Telescope;
use Curses;
use DateTime;
use DateTime::TimeZone;
use I18N::Langinfo qw(CODESET langinfo);
use Math::Trig qw(deg2rad pi);
use POSIX qw(round);
use Scalar::Util qw(looks_like_number);
use Switch;
=head1 NAME
Curses::Orrery - Plot the positions of the sun, moon, and planets in the sky
=head1 VERSION
Version 0.01
=cut
our $VERSION = '0.01';
=head1 SYNOPSIS
use Curses::Orrery;
$orrery = Curses::Orrery->new(lat => -15.75,
long => -69.42,
alt => 3812);
$orrery->run;
=head1 DESCRIPTION
C<Curses::Orrery> is a geocentric orrery using C<Curses>: given a latitude,
longitude, and optional altitude, it plots on the terminal the positions of the
sun, moon and planets in the sky.
When run, the user can select each individual body for information on its
position and rise/transit/set times, and can also step the movement
hour-by-hour.
=head1 METHODS
=head2 Constructor
=over 4
=item B<new>
C<lat> and C<long> are required: they can be decimal degrees, strings of format
C<'(-)DD:MM:SS'>, or L<Astro::Coords::Angle> objects. C<alt>, which is
optional, is the altitude in meters.
use Curses::Orrery;
$orrery = Curses::Orrery->new(lat => -15.75,
long => -69.42,
alt => 3812);
$orrery->run;
You may also specify initial values for the C<datetime>, C<range>,
C<selection_index>, and C<time_zone> attributes (described below).
By default, this constructor initializes L<Curses> on C<stdscr>.
Alternatively, you can specify a specific L<Curses::Screen>.
=cut
sub BUILD {
my $self = shift;
# configure curses
$stdscr->keypad(1);
curs_set 0;
noecho;
}
sub DEMOLISH {
endwin;
}
=back
=head2 Accessor Methods
=over 4
=item B<lat>
An L<Astro::Coords::Angle> corresponding to the latitude of the viewer.
$lat = $orrery->lat;
=cut
has 'lat' => (
is => 'ro',
isa => InstanceOf['Astro::Coords::Angle'],
required => 1,
coerce => \&_latlong_coerce,
);
=item B<long>
An L<Astro::Coords::Angle> corresponding to the longitude of the viewer.
$long = $orrery->long;
=cut
has 'long' => (
is => 'ro',
isa => InstanceOf['Astro::Coords::Angle'],
required => 1,
coerce => \&_latlong_coerce,
);
sub _latlong_coerce {
my ($angle) = @_;
if (!ref $angle) {
$angle = Astro::Coords::Angle->new($angle,
range => 'PI',
units => looks_like_number($angle)
? 'degrees'
: 'sexagesimal');
}
return $angle;
}
=item B<alt>
The altitude of the viewer in meters. Defaults to C<0> if not specified.
$alt = $orrery->alt;
=cut
has 'alt' => (
is => 'ro',
isa => Num,
default => 0,
);
=item B<datetime>, B<has_datetime>, B<clear_datetime>
Get/set a L<DateTime> specifying a specific date to plot. If unset, the current
date/time will be plotted.
$dt = $orrery_datetime;
$orrery->datetime(DateTime->new(year => 1985,
month => 10,
day => 26,
hour => 1,
minute => 21,
time_zone => 'America/Los_Angeles');
$time_set = $orrery->has_datetime;
$orrery->clear_datetime;
=cut
has 'datetime' => (
is => 'rw',
isa => InstanceOf['DateTime'],
lazy => 1,
clearer => 'clear_datetime',
predicate => 'has_datetime',
trigger => \&_datetime_set,
);
sub _datetime_set {
my ($self, $dt) = @_;
foreach my $planet (@{$self->planets}) {
$planet->usenow(0);
$planet->datetime($dt);
}
}
after 'clear_datetime' => sub {
my $self = shift;
foreach my $planet (@{$self->planets}) {
$planet->datetime(undef);
$planet->usenow(1);
}
};
=item B<time_zone>
A L<DateTime::TimeZone> corresponding to the viewer's local time zone,
for user output. Defaults to local time.
$tz = $orrery->time_zone;
$orrery->time_zone(DateTime::TimeZone->new(name => 'local'));
=cut
has 'time_zone' => (
is => 'rw',
isa => InstanceOf['DateTime::TimeZone'],
default => sub { DateTime::TimeZone->new(name => 'local') },
);
=item B<range>
A four-element array reference specifying the range of positions to plot in
radians, of format C<[$min_azimuth, $max_azimuth, $min_elevation,
$max_elevation]>.
Defaults to C<[0, 2*pi, -pi, pi]> (putting due south in the center of the plot)
if the viewer is in the Northern Hemisphere, and C<-pi, pi, -pi. pi> if the
viewer is in the Southern (putting due north in the center of the plot).
($min_az, $max_az, $min_el, $max_el) = @{$orrery->range};
$orrery->range([0, 2*pi, 0, pi);
=cut
has 'range' => (
is => 'rw',
isa => Tuple[Num, Num, Num, Num],
lazy => 1,
default => \&_range_default,
);
sub _range_default {
my $self = shift;
return $self->lat > 0 ? [ 0, 2*pi, -pi/2, pi/2]
: [-pi, pi, -pi/2, pi/2];
}
=item B<planets>
An array reference of L<Astro::Coords::Planet> objects corresponding to the
seven non-Earth planets, Sun, and Moon. Each is populated with C<lat>, C<long>,
and C<alt>, and updated when C<datetime> is changed.
@planets = @{$orrery->planets};
=cut
has 'planets' => (
is => 'ro',
isa => ArrayRef[InstanceOf['Astro::Coords::Planet']],
lazy => 1,
init_arg => undef,
builder => '_planets_builder',
);
sub _planets_builder {
my $self = shift;
my $tel = Astro::Telescope->new(Name => 'orrery',
Lat => $self->lat,
Long => $self->long,
Alt => $self->alt);
my @planets;
foreach my $planet_name (@Astro::Coords::Planet::PLANETS) {
my $planet = Astro::Coords::Planet->new($planet_name);
$planet->telescope($tel);
if ($self->has_datetime) {
$planet->datetime($self->datetime);
}
else {
$planet->usenow(1);
}
push @planets, $planet;
}
return \@planets;
}
sub _planet_symbol {
my ($self, $planet) = @_;
my %abbrevs = (sun => 'S', mercury => 'M', venus => 'v',
moon => 'L', mars => 'm', jupiter => 'j',
saturn => 's', uranus => 'u', neptune => 'n');
my %symbols = (sun => "\x{2609}", mercury => "\x{263f}",
venus => "\x{2640}", moon => "\x{263D}",
mars => "\x{2642}", jupiter => "\x{2643}",
saturn => "\x{2644}", uranus => "\x{2645}",
neptune => "\x{2646}");
return $self->unicode
? $symbols{$planet->name}
: $abbrevs{$planet->name};
}
=item B<selection_index>, B<has_selection>, B<clear_selection>
An optional index specifying which planet is selected in the user interface.
$n = $orrery->selection_index;
$orrery->selection_index(5);
$selected = $orrery->has_selection;
$orrery->clear_selection;
=cut
has 'selection_index' => (
is => 'rw',
isa => Int,
clearer => 'clear_selection',
predicate => 'has_selection',
trigger => \&_selection_index_set,
);
sub _selection_index_set {
my ($self, $index) = @_;
my $num_planets = @{$self->planets};
if ($index >= $num_planets) {
$self->selection_index($index % $num_planets);
}
elsif ($index < 0) {
$self->selection_index($num_planets - 1);
}
}
sub select_next {
my $self = shift;
$self->selection_index( $self->has_selection
? $self->selection_index + 1
: 0);
}
sub select_prev {
my $self = shift;
$self->selection_index( $self->has_selection
? $self->selection_index - 1
: @{$self->planets} - 1);
}
=item B<screen>
The C<Curses::Screen> being operated on.
$scr = $orrery->scr;
=cut
has 'screen' => (
is => 'ro',
isa => InstanceOf['Curses::Screen'],
default => \&_screen_default,
);
sub _screen_default {
# this is equivalent to initscr()
return newterm($ENV{'TERM'}, *STDOUT, *STDIN);
}
=item B<unicode>
A Boolean value specifying whether to represent planets as planetary symbols if
true, or letters if false. Defaults to true if the user's locale is a Unicode
locale.
$using_unicode = $orrery->unicode;
$orrery->unicode(0);
=cut
has 'unicode' => (
is => 'rw',
isa => Bool,
default => \&_unicode_default,
);
sub _unicode_default {
return langinfo(CODESET) =~ /^utf|^ucs/i;
}
=back
=head2 General Methods
=over 4
=item B<advance_to_next>
Truncate B<datetime> to the given time component, and add to that component (or
subtract, if negative). For example, advance to the top of the next hour.
$orrery->advance_to_next('hour', 1);
=cut
sub advance_to_next {
my ($self, $time_part, $magnitude) = @_;
my $dt = $self->has_datetime ? $self->datetime : DateTime->now;
$dt = $dt->truncate(to => $time_part);
$dt = $dt->add("${time_part}s", $magnitude);
$self->datetime($dt);
}
=item B<draw>
Plots the planets on the screen.
$orrery->draw;
=cut
sub draw {
my $self = shift;
clear;
$self->_draw_axes;
foreach my $planet (@{$self->planets}) {
$self->_draw_planet($planet);
}
$self->_draw_selection;
$self->_draw_status;
refresh;
}
sub _az_col {
my ($self, $az) = @_;
my ($min_az, $max_az) = @{$self->range}[0,1];
$az = $az->radians if ref $az;
$az = -2*pi + $az if $az > $max_az;
return round($COLS * ($az - $min_az) / ($max_az - $min_az));
}
sub _el_line {
my ($self, $el) = @_;
my ($min_el, $max_el) = @{$self->range}[2,3];
$el = $el->radians if ref $el;
return round($LINES - $LINES * ($el - $min_el) / ($max_el - $min_el));
}
sub _draw_axes {
my $self = shift;
# x axis
my $y_line = $self->_el_line(0);
hline($y_line, 0, ACS_HLINE, $COLS);
foreach my $az (45, 90, 135, 180, 225, 270, 315) {
my $label_col = $self->_az_col(deg2rad($az));
if ($az == 90) {
addstring $y_line, $label_col, 'E';
}
elsif ($az == 270) {
addstring $y_line, $label_col, 'W';
}
else {
addch $y_line, $label_col, ACS_PLUS;
}
}
# y axes (one each at due north and due south)
foreach my $az (0, pi) {
my $x_col = $self->_az_col($az);
next if $az == $self->range->[0]
|| $az == $self->range->[1];
vline 0, $x_col, ACS_VLINE, $LINES;
addch $y_line, $x_col, ACS_PLUS;
foreach my $el (-60, -30, 0, 30, 60) {
my $label_line = $self->_el_line(deg2rad($el));
if (!$el) {
addch $label_line, $x_col, ACS_PLUS;
} else {
addstring $label_line, $x_col - 2, sprintf('% 2d', $el);
addch $label_line, $x_col + 1, ACS_DEGREE;
}
}
}
}
sub _draw_planet {
my ($self, $planet) = @_;
my ($az, $el) = $planet->azel;
my $col = $self->_az_col($az);
my $line = $self->_el_line($el);
addstring $line, $col, $self->_planet_symbol($planet);
}
sub _draw_selection {
my $self = shift;
return if !$self->has_selection;
my $planet = $self->planets->[$self->selection_index];
my ($az, $el) = $planet->azel;
# redraw the planet in reverse video
attron(A_REVERSE);
$self->_draw_planet($planet);
attroff(A_REVERSE);
# top left display
# name
addstring 0, 0, $planet->name;
# az/el
addstring 2, 2, 'azimuth:';
addstring 2, 12, sprintf('% 4d', $az->degrees);
addch 2, 16, ACS_DEGREE;
addstring 3, 0, 'elevation:';
addstring 3, 12, sprintf('% 4d', $el->degrees);
addch 3, 16, ACS_DEGREE;
# rise/transit/set
# first find the nearest transit
my $transit = $planet->meridian_time(nearest => 1);
my $transit_day = $transit->clone->truncate(to => 'day');
# then pick the rise/set in the same cycle
my $prev_rise = $planet->rise_time(event => -1);
my $next_rise = $planet->rise_time(event => 1);
my $prev_set = $planet->set_time (event => -1);
my $next_set = $planet->set_time (event => 1);
my $rise = $transit > $next_rise ? $next_rise : $prev_rise;
my $set = $transit < $prev_set ? $prev_set : $next_set;
# local times from here forward
$transit->set_time_zone($self->time_zone);
$rise->set_time_zone($self->time_zone);
$set->set_time_zone($self->time_zone);
addstring 5, 5, 'rise:';
addstring 6, 2, 'transit:';
addstring 7, 6, 'set:';
my $line = 5;
foreach my $event_time ($rise, $transit, $set) {
if (!defined $event_time) {
addstring $line++, 11, 'never';
next;
}
addstring $line, 11, $event_time->strftime('%R');
# print the date alongside the time if it isn't same-day
if ($event_time->truncate(to => 'day') != $transit_day) {
addstring $line, 17, $event_time->strftime('%d %b');
}
$line++;
}
}
sub _draw_status {
my $self = shift;
my $dt = $self->has_datetime
? $self->datetime->clone
: DateTime->now;
$dt->set_time_zone($self->time_zone);
# bottom left: viewer long, lat, alt
my ($lat_sign, $lat_d, $lat_m, $lat_s) =
$self->lat->components;
my ($long_sign, $long_d, $long_m, $long_s) =
$self->long->components;
my $alt = $self->alt;
my $lower_left = sprintf(q{%3d %02d'%02d"%s %3d %02d'%02d"%s %4dm},
$lat_d, $lat_m, $lat_s,
$lat_sign eq '+' ? 'N' : 'S',
$long_d, $long_m, $long_s,
$long_sign eq '+' ? 'E' : 'W',
$alt);
addstring $LINES - 1, 0, $lower_left;
addch $LINES - 1, 3, ACS_DEGREE;
addch $LINES - 1, 15, ACS_DEGREE;
# bottom right, line 1: moon phase, illum%
my ($phase, $illum) = (phase($dt->epoch))[0..1];
my $phase_name = $phase < 0.02 ? 'new' :
$phase < 0.24 ? 'waxing crescent' :
$phase < 0.26 ? 'first quarter' :
$phase < 0.49 ? 'waxing gibbous' :
$phase < 0.51 ? 'full' :
$phase < 0.74 ? 'waning gibbous' :
$phase < 0.76 ? 'last quarter' :
$phase < 0.99 ? 'waning crescent' :
'new';
my $lower_right2 = sprintf($illum < 1 ? '%s %2d%%': '%s --%',
$phase_name,
int($illum * 100));
addstring $LINES - 2, $COLS - length($lower_right2), $lower_right2;
# bottom right, line 2: solar time
my $lower_right = sprintf('%s', $dt->strftime('%a %F %R'));
addstring $LINES - 1, $COLS - length($lower_right), $lower_right;
if ($self->has_datetime) {
addch $LINES - 1, $COLS - 23, ACS_BULLET;
}
if ($self->unicode) {
addstring $LINES - 2, $COLS - 22, "\x{263D}";
addstring $LINES - 1, $COLS - 22, "\x{2609}";
}
}
=item B<show_help>
Shows an informational dialog on the screen with a key to planetary symbols and
a list of key bindings. Waits for the user to press a key before returning.
$orrery->show_help;
=cut
sub show_help {
my $self = shift;
# terminal needs to be at least 60x13 to fit help on screen
my ($help_lines, $help_cols) = (13, 60);
return if $LINES < $help_lines || $COLS < $help_cols;
# create the help window
my $help = $stdscr->derwin($help_lines,
$help_cols,
$LINES / 2 - $help_lines / 2,
$COLS / 2 - $help_cols / 2);
$help->clear;
$help->box(ACS_VLINE, ACS_HLINE);
# divide it with a vertical line 1/3 from the left
my $divider_col = $help_cols / 3;
$help->addch( 0, $divider_col, ACS_TTEE);
$help->addch($help_lines - 1, $divider_col, ACS_BTEE);
$help->vline( 1, $divider_col, ACS_VLINE, $help_lines - 2);
# populate the left side with a key to the planet symbols
my $syms = $help->derwin($help_lines - 2, $divider_col - 2, 1, 1);
$syms->addstring(0, 1, 'planets:');
for my $n (0 .. @{$self->planets} - 1) {
my $planet = $self->planets->[$n];
$syms->addstring($n+1, 2, $self->_planet_symbol($planet));
$syms->addstring($n+1, 4, $planet->name);
}
# populate the right side with key-binding help
my @binding_help = (['h/l', 'go back/forward in time'],
['n', 'go to the present time'],
['j/k', 'highlight next/previous planet'],
['c', 'clear highlight'],
['?', 'help'],
['q', 'quit']);
my $keys = $help->derwin($help_lines - 2,
$help_cols - $divider_col - 2,
1,
$divider_col + 1);
$keys->addstring(0, 1, 'key bindings:');
foreach my $n (1 .. @binding_help - 1) {
$keys->addstring($n, 2, $binding_help[$n]->[0]);
$keys->addstring($n, 7, $binding_help[$n]->[1]);
}
$help->addstring($help_lines - 2, $help_cols - 27,
'press any key to continue');
# wait for any key
$help->getchar;
}
=item B<run>
Draws the screen and waits for single-key commands from the user. Redraws the
screen after each user command, and at the top of each minute. Returns after
the user presses C<q>.
The available key bindings are:
h/l go back/forward in time
n go back to the present time
j/k highlight next/previous planet
c clear highlight
? help
q quit
=cut
sub run {
my $self = shift;
# both signals will interrupt getchar, redraw immediately follows
local $SIG{WINCH} = sub { endwin; }; # endwin required after SIGWINCH
local $SIG{ALRM} = sub { 0 };
while (1) {
# draw the screen
$self->draw;
# wait for a key, or redraw at the top of each minute
alarm (60 - time % 60);
my ($ch, $key) = getchar;
next if !defined $ch && !defined $key;
switch ($ch || $key) {
case ['h', KEY_LEFT] { $self->advance_to_next('hour', -1); }
case ['l', KEY_RIGHT] { $self->advance_to_next('hour', 1); }
case 'n' { $self->clear_datetime; }
case ['j', KEY_DOWN] { $self->select_next; }
case ['k', KEY_UP] { $self->select_prev; }
case ['c', "\e"] { $self->clear_selection; }
case '?' { alarm 0;
$self->show_help; }
case 'q' { return; }
}
}
}
=back
=head1 AUTHOR
Fran Rogers, C<< <fran at violuma.net> >>
=head1 SEE ALSO
L<Astro::Coords>; L<DateTime>
=head1 LICENSE AND COPYRIGHT
This software is Copyright (c) 2020 by Fran Rogers.
This is free software, licensed under:
The Artistic License 2.0 (GPL Compatible)
=cut
1; # End of Curses::Orrery
|
using Root.Coding.Code.Enums.E01D.Json.Reflection;
namespace Root.Coding.Code.Attributes.E01D.Json.Reflection
{
public class JsonArrayAttribute:JsonContainerAttribute
{
public JsonArrayAttributeInternals Internals { get; set; }
public override JsonContainerKind Kind => JsonContainerKind.Array;
}
}
|
<?php
namespace App\Http\Resources\Pegawai;
use Illuminate\Http\Resources\Json\JsonResource;
class listCollection extends JsonResource
{
public function toArray($request)
{
return [
'nip' => $this->PegNip,
'nama' => $this->pegNama,
'noHp' => $this->pegnoHp,
'nama_jurusan' => $this->jurusan->jurNamaResmi
];
}
}
|
package com.github.sanctum.clans.construct.extra;
import com.github.sanctum.clans.construct.api.Insignia;
public class InsigniaError extends InstantiationException {
private static final long serialVersionUID = -2323418870626176815L;
private final String key;
public InsigniaError(String key, String message) {
super(message);
this.key = key;
}
public Insignia getRegistration() {
return Insignia.get(this.key);
}
}
|
const { JSDOM } = require("jsdom");
const { ExifImage } = require("exif");
const sharp = require("sharp");
const path = require("path")
const { promises: { readFile, writeFile } } = require("fs");
const { findByType } = require("../utils/file-search.util")
async function start() {
const target = "./photographs/index.html";
const images = (await findByType("./assets/images/gallery", ".jpg"))
.filter(x => !x.includes("_thumb"));
const galleryDom = await getDOM(target);
if (!galleryDom) {
return;
}
const galleryFigure = galleryDom.window.document.querySelector(".image-gallery figure");
if (!galleryFigure) {
return;
}
const imageGallery = galleryDom.window.document.querySelector(".image-gallery");
imageGallery.innerHTML = "";
for (const image of images) {
const exifData = await new Promise((resolve, reject) => {
try {
new ExifImage({
image
}, (error, data) => {
if (error) {
reject(error);
return;
}
resolve(data);
});
} catch (error) {
reject(error)
}
})
const clonedGalleryFigure = galleryFigure.cloneNode(true);
const img = clonedGalleryFigure.querySelector("img");
const anchor = clonedGalleryFigure.querySelector("a");
const caption = clonedGalleryFigure.querySelector("figcaption");
const captionText = exifData && exifData.image && exifData.image.XPComment
? exifData.image.XPComment.filter(x => x > 0).map(x => String.fromCharCode(x)).join("")
: "No caption available.";
const thumbPath = await resizeImage(image, "thumb", 500);
img.src = thumbPath;
img.alt = captionText;
const linkName = path.basename(image, path.extname(image));
anchor.href = `/photographs/${linkName}`
caption.innerHTML = "";
caption.appendChild(galleryDom.window.document.createTextNode(captionText));
imageGallery.appendChild(clonedGalleryFigure);
const title = exifData && exifData.image && exifData.image.XPTitle
? exifData.image.XPTitle.filter(x => x > 0).map(x => String.fromCharCode(x)).join("")
: linkName.split("-").map(x => x.charAt(0).toUpperCase() + x.slice(1)).join(" ")
await createImageView(linkName, title, captionText, image);
}
const result = galleryDom.serialize();
await writeFile(target, result)
}
async function resizeImage(imagePath, nameDecoration, size) {
const image = await readFile(imagePath);
const imageDir = path.dirname(imagePath);
const imageExt = path.extname(imagePath);
const imageName = path.basename(imagePath, imageExt);
const newImagePath = path.join(imageDir, `${imageName}_thumb${imageExt}`)
await sharp(image)
.resize(size)
.toFile(path.join(imageDir, `${imageName}_${nameDecoration}${imageExt}`))
return newImagePath;
}
async function createImageView(linkName, title, description, src) {
const imageViewDom = await getDOM(path.join("./", "photographs", "photograph.html"));
if (!imageViewDom) {
return;
}
const imageViewFigure = imageViewDom.window.document.querySelector(".image-gallery figure");
if (!imageViewFigure) {
return;
}
const header = imageViewDom.window.document.querySelector("h1");
if (!header) {
return;
}
const img = imageViewFigure.querySelector("img");
const caption = imageViewFigure.querySelector("figcaption");
header.innerHTML = "";
header.appendChild(imageViewDom.window.document.createTextNode(title))
img.src = src;
caption.innerHTML = "";
caption.appendChild(imageViewDom.window.document.createTextNode(description))
const result = imageViewDom.serialize();
const outputPath = path.join("photographs", `${linkName}.html`)
return await writeFile(outputPath, result);
}
async function getDOM(templateName) {
const template = await readFile(templateName);
const templateDom = new JSDOM(template);
if (!templateDom) {
return;
}
return Promise.resolve(templateDom);
}
start()
.then();
|
<div class="m-default-index">
<div class="row top-icon">
<div class="col-sm-3 col-xs-3 top-icon-item"><a href="<?= \yii\helpers\Url::to(['/m/trade'])?>">
<div class="bg-danger icon-wrap"><i class="glyphicon glyphicon-stats icon"></i></div>
<p class="text">股票操盘</p>
</a></div>
<div class="col-sm-3 col-xs-3 top-icon-item">
<div class="bg-success icon-wrap"><i class="glyphicon glyphicon-stats icon"></i></div>
<p class="text">期货操盘</p>
</div>
<div class="col-sm-3 col-xs-3 top-icon-item">
<div class="bg-info icon-wrap"><i class="glyphicon glyphicon-stats icon"></i></div>
<p class="text">股票模拟</p>
</div>
<div class="col-sm-3 col-xs-3 top-icon-item">
<div class="bg-warning icon-wrap"><i class="glyphicon glyphicon-stats icon"></i></div>
<p class="text">期货模拟</p>
</div>
</div>
<script>
function topIconHeightFix(){
$('.top-icon-item .icon-wrap').each(function () {
$(this).height($(this).width());
var icon = $(this).find('.icon')
icon.css('margin-top', ($(this).height() - icon.height()) / 2);
});
}
topIconHeightFix();
$(window).resize(topIconHeightFix);
</script>
</div>
|
package disk
import (
"bytes"
"strconv"
"sync"
"time"
"github.com/akrylysov/pogreb"
)
// PogrebDB - represents a pogreb db implementation
type PogrebDB struct {
db *pogreb.DB
sync.RWMutex
}
// OpenPogrebDB - Opens the specified path
func OpenPogrebDB(path string) (*PogrebDB, error) {
db, err := pogreb.Open(path, nil)
if err != nil {
return nil, err
}
pdb := new(PogrebDB)
pdb.db = db
return pdb, nil
}
// Size - returns the size of the database in bytes
func (pdb *PogrebDB) Size() int64 {
size, err := pdb.db.FileSize()
if err != nil {
return 0
}
return size
}
// Close ...
func (pdb *PogrebDB) Close() {
pdb.db.Close()
}
// GC - runs the garbage collector
func (pdb *PogrebDB) GC() error {
_, err := pdb.db.Compact()
return err
}
// Incr - increment the key by the specified value
func (pdb *PogrebDB) Incr(k string, by int64) (int64, error) {
pdb.Lock()
defer pdb.Unlock()
val, err := pdb.get(k)
if err != nil {
val = []byte{}
}
valFloat, _ := strconv.ParseInt(string(val), 10, 64)
valFloat += by
err = pdb.set([]byte(k), intToByteSlice(valFloat), -1)
if err != nil {
return 0, err
}
return valFloat, nil
}
func (pdb *PogrebDB) set(k, v []byte, ttl time.Duration) error {
var expires int64
if ttl > 0 {
expires = time.Now().Add(ttl).Unix()
}
expiresBytes := append(intToByteSlice(expires), expSeparator[:]...)
v = append(expiresBytes, v...)
return pdb.db.Put(k, v)
}
// Set - sets a key with the specified value and optional ttl
func (pdb *PogrebDB) Set(k string, v []byte, ttl time.Duration) error {
return pdb.set([]byte(k), v, ttl)
}
// MSet - sets multiple key-value pairs
func (pdb *PogrebDB) MSet(data map[string][]byte) error {
return nil
}
func (pdb *PogrebDB) get(k string) ([]byte, error) {
var data []byte
var err error
delete := false
item, err := pdb.db.Get([]byte(k))
if err != nil {
return []byte{}, err
}
if len(item) == 0 {
return []byte{}, nil
}
parts := bytes.SplitN(item, []byte(expSeparator), 2)
expires, actual := parts[0], parts[1]
if exp, _ := strconv.Atoi(string(expires)); exp > 0 && int(time.Now().Unix()) >= exp {
delete = true
} else {
data = actual
}
if delete {
pdb.db.Delete([]byte(k))
return data, ErrNotFound
}
return data, nil
}
// Get - fetches the value of the specified k
func (pdb *PogrebDB) Get(k string) ([]byte, error) {
return pdb.get(k)
}
// MGet - fetch multiple values of the specified keys
func (pdb *PogrebDB) MGet(keys []string) [][]byte {
var data [][]byte
for _, key := range keys {
val, err := pdb.get(key)
if err != nil {
data = append(data, []byte{})
continue
}
data = append(data, val)
}
return data
}
// TTL - returns the time to live of the specified key's value
func (pdb *PogrebDB) TTL(key string) int64 {
item, err := pdb.db.Get([]byte(key))
if err != nil {
return -2
}
parts := bytes.SplitN(item, []byte(expSeparator), 2)
exp, _ := strconv.Atoi(string(parts[0]))
if exp == 0 {
return -1
}
now := time.Now().Unix()
if now >= int64(exp) {
return -2
}
return int64(exp) - now
}
// MDel - removes key(s) from the store
func (pdb *PogrebDB) MDel(keys []string) error {
return nil
}
// Del - removes key from the store
func (pdb *PogrebDB) Del(key string) error {
return pdb.db.Delete([]byte(key))
}
// Scan - iterate over the whole store using the handler function
func (pdb *PogrebDB) Scan(scannerOpt ScannerOptions) error {
valid := func(k []byte) bool {
if k == nil {
return false
}
if scannerOpt.Prefix != "" && !bytes.HasPrefix(k, []byte(scannerOpt.Prefix)) {
return false
}
return true
}
it := pdb.db.Items()
for {
key, val, err := it.Next()
if err == pogreb.ErrIterationDone {
break
}
if err != nil {
return err
}
if !valid(key) || scannerOpt.Handler(key, val) != nil {
break
}
}
return nil
}
|
const fs = require('fs');
const scrapeDigitalDebitCardActivatedEmail = require('../src/scrapeDigitalDebitCardActivatedEmail');
test('scrapeDigitalDebitCardActivatedEmail', () => {
const htmlEmail = fs.readFileSync('./test/emails/digital-debit-card-activated-email.html');
const actual = scrapeDigitalDebitCardActivatedEmail(htmlEmail);
expect(actual).toStrictEqual({
recordType: 'ACCOUNT_OPERATION',
emailType: 'DIGITAL_DEBIT_CARD_ACTIVATED',
note: 'Activación de Tarjeta Digital asociada a tu Cuenta de Débito | CUENTA ENLACE PERSONAL ****0000',
operationDate: '25/Ago/2019 13:07:48 horas'
});
});
|
import React, { CSSProperties } from 'react'
import FormCheck from 'react-bootstrap/FormCheck'
interface Props {
/** The id for the switch element */
id: string
/** The label to render next to the switch */
label: string
/** Determines if the switch should be disabled or not. By default false */
disabled?: boolean
/** The onChange listener */
onChange?: (event: React.ChangeEvent<HTMLInputElement>) => void
/**
* Defines the class of the switch.
*/
className?: string
/**
* Defines the style of the switch.
*/
style?: CSSProperties
}
/**
* Switches are used to choose if something is true or false
*/
const Switch = (props: Props) => {
const { id, label, disabled, onChange, className, style } = props
return (
<div>
<FormCheck
type="switch"
id={id}
label={label}
disabled={disabled}
onChange={onChange}
className={className}
style={style}
/>
</div>
)
}
export { Switch }
|
# frozen_string_literal: true
require "rails_helper"
RSpec.describe "ImportGiasDataJob" do
describe "#perform" do
it "should run the GIAS data importer" do
files = {
school_data_file: "file.csv",
school_links_file: "links.csv",
}
fetch_gias_files = class_double("DataStage::FetchGiasDataFiles")
.as_stubbed_const(transfer_nested_contants: true)
update_staged_schools = class_double("DataStage::UpdateStagedSchools")
.as_stubbed_const(transfer_nested_contants: true)
expect(fetch_gias_files).to receive(:call).and_yield(files)
expect(update_staged_schools).to receive(:call).with(files)
ImportGiasDataJob.new.perform
end
end
end
|
using hedCommon.extension.runtime;
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Reflection;
using UnityEditor;
using UnityEngine;
using static UnityEditor.EditorGUILayout;
namespace hedCommon.extension.editor
{
/// <summary>
/// all of this function must be called in OnSceneGUI
/// </summary>
public static class ExtGUI
{
public static bool DrawDisplayDialog(
string mainTitle = "main title",
string content = "what's the dialog box say",
string accept = "Yes",
string no = "Get me out of here",
bool replaceMousePositionAtTheEnd = true)
{
if (!EditorUtility.DisplayDialog(mainTitle, content, accept, no))
{
return (false);
}
return (true);
}
/// <summary>
/// Create an horizontal line
/// </summary>
/// <param name="color"></param>
/// <param name="thickness">2 pixels</param>
/// <param name="paddingTop">1 = 10 pixels</param>
/// <param name="paddingBottom">1 = 10 pixels</param>
/// <param name="paddingLeft">percent of the width clamped 0-1</param>
/// <param name="paddingRight">percent of the width clamped 0-1</param>
public static void HorizontalLineThickness(Color color,
int thickness = 2,
float paddingTop = 1,
float paddingBottom = 1,
float paddingLeft = 0.1f,
float paddingRight = 0.1f,
float autoWidth = -1)
{
paddingTop *= 10;
paddingBottom *= 10;
Rect r = EditorGUILayout.GetControlRect(GUILayout.Height(paddingBottom + thickness));
float width = r.width;
if (autoWidth > 0)
{
width = autoWidth;
}
paddingLeft = ExtMathf.SetBetween(paddingLeft, 0, 1);
paddingRight = ExtMathf.SetBetween(paddingRight, 0, 1);
paddingLeft = paddingLeft * width / 1f;
paddingRight = paddingRight * width / 1f;
r.height = thickness;
r.y += paddingTop / 2;
r.x -= 2 - paddingLeft;
r.width += 6 - paddingLeft - paddingRight;
EditorGUI.DrawRect(r, color);
}
//end class
}
//end nameSpace
}
|
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// MARKER(update_precomp.py): autogen include statement, do not remove
#include "precompiled_sfx2.hxx"
#include <svl/stritem.hxx>
#ifndef GCC
#endif
#include <com/sun/star/util/URL.hpp>
#include <com/sun/star/util/XURLTransformer.hpp>
#include <com/sun/star/frame/XController.hpp>
#include <com/sun/star/lang/XUnoTunnel.hpp>
#include <com/sun/star/frame/status/ItemStatus.hpp>
#include <com/sun/star/frame/status/ItemState.hpp>
#include <com/sun/star/awt/MouseButton.hpp>
#include <vcl/status.hxx>
#include <sfx2/app.hxx>
#include "sfx2/stbitem.hxx"
#include "sfxtypes.hxx"
#include <sfx2/msg.hxx>
#include "arrdecl.hxx"
#include <sfx2/bindings.hxx>
#include <sfx2/msgpool.hxx>
#include <sfx2/module.hxx>
#include <sfx2/dispatch.hxx>
#include <sfx2/unoctitm.hxx>
#include <sfx2/objsh.hxx>
#include <sfx2/sfx.hrc>
#include <comphelper/processfactory.hxx>
#include <svl/eitem.hxx>
#include <svl/stritem.hxx>
#include <svl/intitem.hxx>
#ifndef _TOOLKIT_HELPER_VCLUNOHELPER_HXX_
#include <toolkit/helper/vclunohelper.hxx>
#endif
#include <toolkit/helper/convert.hxx>
using namespace ::com::sun::star;
//--------------------------------------------------------------------
sal_uInt16 SfxStatusBarControl::convertAwtToVCLMouseButtons( sal_Int16 nAwtMouseButtons )
{
sal_uInt16 nVCLMouseButtons( 0 );
if ( nAwtMouseButtons & awt::MouseButton::LEFT )
nVCLMouseButtons |= MOUSE_LEFT;
if ( nAwtMouseButtons & awt::MouseButton::RIGHT )
nVCLMouseButtons |= MOUSE_RIGHT;
if ( nAwtMouseButtons & awt::MouseButton::MIDDLE )
nVCLMouseButtons |= MOUSE_MIDDLE;
return nVCLMouseButtons;
}
//--------------------------------------------------------------------
svt::StatusbarController* SAL_CALL SfxStatusBarControllerFactory(
const uno::Reference< frame::XFrame >& rFrame,
StatusBar* pStatusBar,
unsigned short nID,
const ::rtl::OUString& aCommandURL )
{
::vos::OGuard aGuard( Application::GetSolarMutex() );
util::URL aTargetURL;
aTargetURL.Complete = aCommandURL;
uno::Reference < util::XURLTransformer > xTrans( ::comphelper::getProcessServiceFactory()->createInstance(
rtl::OUString::createFromAscii( "com.sun.star.util.URLTransformer" )), uno::UNO_QUERY );
xTrans->parseStrict( aTargetURL );
SfxObjectShell* pObjShell = NULL;
uno::Reference < frame::XController > xController;
uno::Reference < frame::XModel > xModel;
if ( rFrame.is() )
{
xController = rFrame->getController();
if ( xController.is() )
xModel = xController->getModel();
}
if ( xModel.is() )
{
// Get tunnel from model to retrieve the SfxObjectShell pointer from it
::com::sun::star::uno::Reference < ::com::sun::star::lang::XUnoTunnel > xObj( xModel, uno::UNO_QUERY );
::com::sun::star::uno::Sequence < sal_Int8 > aSeq = SvGlobalName( SFX_GLOBAL_CLASSID ).GetByteSequence();
if ( xObj.is() )
{
sal_Int64 nHandle = xObj->getSomething( aSeq );
if ( nHandle )
pObjShell = reinterpret_cast< SfxObjectShell* >( sal::static_int_cast< sal_IntPtr >( nHandle ));
}
}
SfxModule* pModule = pObjShell ? pObjShell->GetModule() : NULL;
SfxSlotPool* pSlotPool = 0;
if ( pModule )
pSlotPool = pModule->GetSlotPool();
else
pSlotPool = &(SfxSlotPool::GetSlotPool( NULL ));
const SfxSlot* pSlot = pSlotPool->GetUnoSlot( aTargetURL.Path );
if ( pSlot )
{
sal_uInt16 nSlotId = pSlot->GetSlotId();
if ( nSlotId > 0 )
{
rtl::OString aCmd(".uno:");
aCmd += pSlot->GetUnoName();
pStatusBar->SetHelpId( nSlotId, aCmd );
return SfxStatusBarControl::CreateControl( nSlotId, nID, pStatusBar, pModule );
}
}
return NULL;
}
//--------------------------------------------------------------------
SfxStatusBarControl::SfxStatusBarControl
(
sal_uInt16 nSlotID, /* Slot-Id, mit der diese Instanz
verbunden wird. Wurde bei der
Registrierung eine Slot-Id != 0
angegeben, ist dies immer die dort
angegebene. */
sal_uInt16 nCtrlID, /* ID of this controller in the status bar */
StatusBar& rBar /* Referenz auf die StatusBar, f"ur die
dieses Control erzeugt wurde. */
)
/* [Beschreibung]
Konstruktor der Klasse SfxStatusBarControl. Die Subclasses werden
bei Bedarf per Factory vom SFx erzeugt.
Instanzen dieser Basisklasse werden f"ur alle StatusBar-Felder
erzeugt, f"ur die keine speziellen registriert wurden.
*/
: svt::StatusbarController(),
nSlotId( nSlotID ),
nId( nCtrlID ),
pBar( &rBar )
{
}
//--------------------------------------------------------------------
SfxStatusBarControl::~SfxStatusBarControl()
/* [Beschreibung]
Destruktor der Klasse SfxStatusBarControl. Die Instanzen dieser
Klasse und deren Subklassen werden vom SFx zerst"ort.
*/
{}
//--------------------------------------------------------------------
// XInterface
uno::Any SAL_CALL SfxStatusBarControl::queryInterface( const uno::Type & rType )
throw( uno::RuntimeException)
{
return svt::StatusbarController::queryInterface( rType );
}
void SAL_CALL SfxStatusBarControl::acquire() throw()
{
OWeakObject::acquire();
}
void SAL_CALL SfxStatusBarControl::release() throw()
{
OWeakObject::release();
}
//--------------------------------------------------------------------
// XEventListener
void SAL_CALL SfxStatusBarControl::disposing( const lang::EventObject& aEvent )
throw( uno::RuntimeException )
{
svt::StatusbarController::disposing( aEvent );
}
//--------------------------------------------------------------------
// XComponent
void SAL_CALL SfxStatusBarControl::dispose()
throw (uno::RuntimeException)
{
svt::StatusbarController::dispose();
}
//--------------------------------------------------------------------
// XStatusListener
void SAL_CALL SfxStatusBarControl::statusChanged( const frame::FeatureStateEvent& rEvent )
throw ( ::com::sun::star::uno::RuntimeException )
{
SfxViewFrame* pViewFrame = NULL;
uno::Reference < frame::XController > xController;
::vos::OGuard aGuard( Application::GetSolarMutex() );
if ( m_xFrame.is() )
xController = m_xFrame->getController();
uno::Reference < frame::XDispatchProvider > xProvider( xController, uno::UNO_QUERY );
if ( xProvider.is() )
{
uno::Reference < frame::XDispatch > xDisp = xProvider->queryDispatch( rEvent.FeatureURL, ::rtl::OUString(), 0 );
if ( xDisp.is() )
{
uno::Reference< lang::XUnoTunnel > xTunnel( xDisp, uno::UNO_QUERY );
SfxOfficeDispatch* pDisp = NULL;
if ( xTunnel.is() )
{
sal_Int64 nImplementation = xTunnel->getSomething(SfxOfficeDispatch::impl_getStaticIdentifier());
pDisp = reinterpret_cast< SfxOfficeDispatch* >(sal::static_int_cast< sal_IntPtr >( nImplementation ));
}
if ( pDisp )
pViewFrame = pDisp->GetDispatcher_Impl()->GetFrame();
}
}
sal_uInt16 nSlotID = 0;
SfxSlotPool& rPool = SfxSlotPool::GetSlotPool( pViewFrame );
const SfxSlot* pSlot = rPool.GetUnoSlot( rEvent.FeatureURL.Path );
if ( pSlot )
nSlotID = pSlot->GetSlotId();
if ( nSlotID > 0 )
{
if ( rEvent.Requery )
svt::StatusbarController::statusChanged( rEvent );
else
{
SfxItemState eState = SFX_ITEM_DISABLED;
SfxPoolItem* pItem = NULL;
if ( rEvent.IsEnabled )
{
eState = SFX_ITEM_AVAILABLE;
uno::Type pType = rEvent.State.getValueType();
if ( pType == ::getVoidCppuType() )
{
pItem = new SfxVoidItem( nSlotID );
eState = SFX_ITEM_UNKNOWN;
}
else if ( pType == ::getBooleanCppuType() )
{
sal_Bool bTemp = 0;
rEvent.State >>= bTemp ;
pItem = new SfxBoolItem( nSlotID, bTemp );
}
else if ( pType == ::getCppuType((const sal_uInt16*)0) )
{
sal_uInt16 nTemp = 0;
rEvent.State >>= nTemp ;
pItem = new SfxUInt16Item( nSlotID, nTemp );
}
else if ( pType == ::getCppuType((const sal_uInt32*)0) )
{
sal_uInt32 nTemp = 0;
rEvent.State >>= nTemp ;
pItem = new SfxUInt32Item( nSlotID, nTemp );
}
else if ( pType == ::getCppuType((const ::rtl::OUString*)0) )
{
::rtl::OUString sTemp ;
rEvent.State >>= sTemp ;
pItem = new SfxStringItem( nSlotID, sTemp );
}
else if ( pType == ::getCppuType((const ::com::sun::star::frame::status::ItemStatus*)0) )
{
frame::status::ItemStatus aItemStatus;
rEvent.State >>= aItemStatus;
eState = aItemStatus.State;
pItem = new SfxVoidItem( nSlotID );
}
else
{
if ( pSlot )
pItem = pSlot->GetType()->CreateItem();
if ( pItem )
{
pItem->SetWhich( nSlotID );
pItem->PutValue( rEvent.State );
}
else
pItem = new SfxVoidItem( nSlotID );
}
}
StateChanged( nSlotID, eState, pItem );
delete pItem;
}
}
}
//--------------------------------------------------------------------
// XStatusbarController
::sal_Bool SAL_CALL SfxStatusBarControl::mouseButtonDown(
const awt::MouseEvent& rMouseEvent )
throw ( uno::RuntimeException )
{
::vos::OGuard aGuard( Application::GetSolarMutex() );
::Point aPos( rMouseEvent.X, rMouseEvent.Y );
::MouseEvent aMouseEvent( aPos,
(sal_uInt16)rMouseEvent.ClickCount,
0,
convertAwtToVCLMouseButtons( rMouseEvent.Buttons ),
0 );
return MouseButtonDown( aMouseEvent );
}
//--------------------------------------------------------------------
::sal_Bool SAL_CALL SfxStatusBarControl::mouseMove(
const awt::MouseEvent& rMouseEvent )
throw (uno::RuntimeException)
{
::vos::OGuard aGuard( Application::GetSolarMutex() );
::Point aPos( rMouseEvent.X, rMouseEvent.Y );
::MouseEvent aMouseEvent( aPos,
(sal_uInt16)rMouseEvent.ClickCount,
0,
convertAwtToVCLMouseButtons( rMouseEvent.Buttons ),
0 );
return MouseMove( aMouseEvent );
}
//--------------------------------------------------------------------
::sal_Bool SAL_CALL SfxStatusBarControl::mouseButtonUp(
const ::awt::MouseEvent& rMouseEvent )
throw ( uno::RuntimeException )
{
::vos::OGuard aGuard( Application::GetSolarMutex() );
::Point aPos( rMouseEvent.X, rMouseEvent.Y );
::MouseEvent aMouseEvent( aPos,
(sal_uInt16)rMouseEvent.ClickCount,
0,
convertAwtToVCLMouseButtons( rMouseEvent.Buttons ),
0 );
return MouseButtonUp( aMouseEvent );
}
//--------------------------------------------------------------------
void SAL_CALL SfxStatusBarControl::command(
const awt::Point& rPos,
::sal_Int32 nCommand,
::sal_Bool /*bMouseEvent*/,
const ::com::sun::star::uno::Any& /*aData*/ )
throw (::com::sun::star::uno::RuntimeException)
{
::vos::OGuard aGuard( Application::GetSolarMutex() );
::Point aPos( rPos.X, rPos.Y );
CommandEvent aCmdEvent( aPos, (sal_uInt16)nCommand, sal_True, NULL );
Command( aCmdEvent );
}
//--------------------------------------------------------------------
void SAL_CALL SfxStatusBarControl::paint(
const uno::Reference< awt::XGraphics >& xGraphics,
const awt::Rectangle& rOutputRectangle,
::sal_Int32 nStyle )
throw ( ::uno::RuntimeException )
{
::vos::OGuard aGuard( Application::GetSolarMutex() );
OutputDevice* pOutDev = VCLUnoHelper::GetOutputDevice( xGraphics );;
if ( pOutDev )
{
::Rectangle aRect = VCLRectangle( rOutputRectangle );
UserDrawEvent aUserDrawEvent( pOutDev, aRect, pBar->GetCurItemId(), (sal_uInt16)nStyle );
Paint( aUserDrawEvent );
}
}
//--------------------------------------------------------------------
void SAL_CALL SfxStatusBarControl::click( const awt::Point& )
throw ( uno::RuntimeException )
{
::vos::OGuard aGuard( Application::GetSolarMutex() );
Click();
}
//--------------------------------------------------------------------
void SAL_CALL SfxStatusBarControl::doubleClick( const awt::Point& )
throw ( uno::RuntimeException )
{
::vos::OGuard aGuard( Application::GetSolarMutex() );
DoubleClick();
}
//--------------------------------------------------------------------
// old sfx2 interface
//--------------------------------------------------------------------
void SfxStatusBarControl::StateChanged
(
sal_uInt16 nSID,
SfxItemState eState,
const SfxPoolItem* pState /* Zeiger auf ein SfxPoolItem, welches nur
innerhalb dieses Methodenaufrufs g"ultig
ist. Es kann ein 0-Pointer, ein Pointer
auf ein SfxVoidItem oder auf den Typ, f"ur
den die Subclass von SfxStatusBarControl
registriert ist vorkommen. */
)
/* [Beschreibung]
Die Basisimplementation versteht Items vom Type SfxStringItem, bei
denen der Text in das Status-Zeilen-Feld eingetragen wird und
SfxVoidItem, bei denen das Feld geleert wird. Die Basisimplementierng
sollte in "uberladenen Methoden nicht gerufen werden.
*/
{
DBG_MEMTEST();
DBG_ASSERT( pBar != 0, "setting state to dangling StatusBar" );
const SfxStringItem* pStr = PTR_CAST( SfxStringItem, pState );
if ( eState == SFX_ITEM_AVAILABLE && pStr )
pBar->SetItemText( nSID, pStr->GetValue() );
else
{
DBG_ASSERT( eState != SFX_ITEM_AVAILABLE || pState->ISA(SfxVoidItem),
"wrong SfxPoolItem subclass in SfxStatusBarControl" );
pBar->SetItemText( nSID, String() );
}
}
//--------------------------------------------------------------------
sal_Bool SfxStatusBarControl::MouseButtonDown( const MouseEvent & )
/* [Beschreibung]
Diese virtuelle Methode ist eine Weiterleitung des Events
MouseButtonDown() der StatusBar, falls die Maus-Position innerhalb
des Bereichs des betreffenden Items ist, oder die Maus von diesem
Control mit <SfxStatusBarControl::CaptureMouse()> gecaptured wurde.
Die Defaultimplementierung ist leer und gibt FALSE zur"uck.
[Rueckgabewert]
sal_Bool TRUE
das Event wurde bearbeitet und soll nicht an
die StatusBar weitergeleitet werden
FALSE
das Event wurde nicht bearbeitet und soll an
die StatusBar weitergeleitet werden
*/
{
return sal_False;
}
//--------------------------------------------------------------------
sal_Bool SfxStatusBarControl::MouseMove( const MouseEvent & )
/* [Beschreibung]
Diese virtuelle Methode ist eine Weiterleitung des Events
MouseMove() der StatusBar, falls die Maus-Position innerhalb
des Bereichs des betreffenden Items ist, oder die Maus von diesem
Control mit <SfxStatusBarControl::CaptureMouse()> gecaptured wurde.
Die Defaultimplementierung ist leer und gibt FALSE zur"uck.
[Rueckgabewert]
sal_Bool TRUE
das Event wurde bearbeitet und soll nicht an
die StatusBar weitergeleitet werden
FALSE
das Event wurde nicht bearbeitet und soll an
die StatusBar weitergeleitet werden
*/
{
return sal_False;
}
//--------------------------------------------------------------------
sal_Bool SfxStatusBarControl::MouseButtonUp( const MouseEvent & )
/* [Beschreibung]
Diese virtuelle Methode ist eine Weiterleitung des Events
MouseButtonUp() der StatusBar, falls die Maus-Position innerhalb
des Bereichs des betreffenden Items ist, oder die Maus von diesem
Control mit <SfxStatusBarControl::CaptureMouse()> gecaptured wurde.
Die Defaultimplementierung ist leer und gibt FALSE zur"uck.
[Rueckgabewert]
sal_Bool TRUE
das Event wurde bearbeitet und soll nicht an
die StatusBar weitergeleitet werden
FALSE
das Event wurde nicht bearbeitet und soll an
die StatusBar weitergeleitet werden
*/
{
return sal_False;
}
//--------------------------------------------------------------------
void SfxStatusBarControl::Command( const CommandEvent& )
/* [Beschreibung]
Diese virtuelle Methode wird gerufen, wenn f"ur dieses SfxStatusBarControl
ein CommandEvent f"ur erkannt wurde.
Die Defaultimplementierung ist leer.
*/
{
}
//--------------------------------------------------------------------
void SfxStatusBarControl::Click()
/* [Beschreibung]
Diese virtuelle Methode wird gerufen, wenn der Anwender mit der Maus
in das zu diesem Control geh"orige Feld der Statuszeile klickt.
Die Defaultimplementierung ist leer.
*/
{
}
//--------------------------------------------------------------------
void SfxStatusBarControl::DoubleClick()
/* [Beschreibung]
Diese virtuelle Methode wird gerufen, wenn der Anwender mit der Maus
in das zu diesem Control geh"orige Feld der Statuszeile doppel-klickt.
*/
{
::com::sun::star::uno::Sequence< ::com::sun::star::beans::PropertyValue > aArgs;
execute( aArgs );
}
//--------------------------------------------------------------------
void SfxStatusBarControl::Paint
(
const UserDrawEvent& /* Referenz auf einen UserDrawEvent */
)
/* [Beschreibung]
Diese virtuelle Methode wird gerufen, falls das betreffende Feld
mit SIB_USERDRAW gekennzeichnet ist, um den Inhalt zu zeichnen.
Die Ausgabe mu"s auf dem in durch rUDEvt.GetDevice() erh"altlichen
OutputDevice innerhalb des durch rUDEvt.GetRect() angegebenenen
Rechtecks erfolgen.
Die Defaultimplementierung ist leer.
*/
{
}
//--------------------------------------------------------------------
void SfxStatusBarControl::CaptureMouse()
{
}
//--------------------------------------------------------------------
void SfxStatusBarControl::ReleaseMouse()
{
}
//--------------------------------------------------------------------
SfxStatusBarControl* SfxStatusBarControl::CreateControl
(
sal_uInt16 nSlotID,
sal_uInt16 nStbId,
StatusBar* pBar,
SfxModule* pMod
)
{
::vos::OGuard aGuard( Application::GetSolarMutex() );
SfxApplication *pApp = SFX_APP();
SfxSlotPool *pSlotPool;
if ( pMod )
pSlotPool = pMod->GetSlotPool();
else
pSlotPool = &SfxSlotPool::GetSlotPool();
TypeId aSlotType = pSlotPool->GetSlotType(nSlotID);
if ( aSlotType )
{
if ( pMod )
{
SfxStbCtrlFactArr_Impl *pFactories = pMod->GetStbCtrlFactories_Impl();
if ( pFactories )
{
SfxStbCtrlFactArr_Impl &rFactories = *pFactories;
for ( sal_uInt16 nFactory = 0; nFactory < rFactories.Count(); ++nFactory )
if ( rFactories[nFactory]->nTypeId == aSlotType &&
( ( rFactories[nFactory]->nSlotId == 0 ) ||
( rFactories[nFactory]->nSlotId == nSlotID) ) )
return rFactories[nFactory]->pCtor( nSlotID, nStbId, *pBar );
}
}
SfxStbCtrlFactArr_Impl &rFactories = pApp->GetStbCtrlFactories_Impl();
for ( sal_uInt16 nFactory = 0; nFactory < rFactories.Count(); ++nFactory )
if ( rFactories[nFactory]->nTypeId == aSlotType &&
( ( rFactories[nFactory]->nSlotId == 0 ) ||
( rFactories[nFactory]->nSlotId == nSlotID) ) )
return rFactories[nFactory]->pCtor( nSlotID, nStbId, *pBar );
}
return NULL;
}
//--------------------------------------------------------------------
void SfxStatusBarControl::RegisterStatusBarControl(SfxModule* pMod, SfxStbCtrlFactory* pFact)
{
SFX_APP()->RegisterStatusBarControl_Impl( pMod, pFact );
}
//--------------------------------------------------------------------
|
#ifndef GUARD_median_h
#define GUARD_median_h
#include <vector>
double median(std::vector<double> vec);
#endif
|
/*
* Copyright 2019 Google LLC.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef PRIVATE_JOIN_AND_COMPUTE_MESSAGE_SINK_H_
#define PRIVATE_JOIN_AND_COMPUTE_MESSAGE_SINK_H_
#include "absl/memory/memory.h"
#include "private_join_and_compute/private_join_and_compute.pb.h"
#include "private_join_and_compute/util/status.inc"
namespace private_join_and_compute {
// An interface for message sinks.
template <typename T>
class MessageSink {
public:
virtual ~MessageSink() = default;
// Subclasses should accept a message and process it appropriately.
virtual Status Send(const T& message) = 0;
protected:
MessageSink() = default;
};
// A dummy message sink, that simply stores the last message received, and
// allows retrieval. Intended for testing.
template <typename T>
class DummyMessageSink : public MessageSink<T> {
public:
~DummyMessageSink() override = default;
// Simply copies the message.
Status Send(const T& message) override {
last_message_ = absl::make_unique<T>(message);
return OkStatus();
}
// Will fail if no message was received.
const T& last_message() { return *last_message_; }
private:
std::unique_ptr<T> last_message_;
};
} // namespace private_join_and_compute
#endif // PRIVATE_JOIN_AND_COMPUTE_MESSAGE_SINK_H_
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Controls.Primitives;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
namespace PLC
{
public class BlockButton : Button
{
public int row;
public int column;
public int type;
public string Block_Name;
public int ID;//每个元件都有自己的标识编号
public IList<BlockButton> leftBlocks = new List<BlockButton>();
public IList<BlockButton> rightBlocks = new List<BlockButton>();
public bool IsAOVPoint = false;
public IList<BlockButton> leftAOVs = new List<BlockButton>();
public IList<BlockButton> rightAOVs = new List<BlockButton>();
public int left_num = 0; //改后仅用于表征AOV节点的左右连接数
public int right_num = 0;
public int AccessTime = 0;//用于转二叉树时计数
}
public class BlockTextBox : TextBox
{
public int row;
public int column;
}
}
|
/// The Fluent Assertions library written in Dart.
///
/// It uses Dart's Extension Functions to provide a fluent wrapper around test assertions.
library fluent_assertions;
export 'src/basic_assertions.dart';
export 'src/numerical_assertions.dart';
export 'src/string_assertions.dart';
|
require 'english/double_metaphone'
require 'test/unit'
#require 'fastercsv'
# 1218 tests, 2436 assertions
class TC_DoubleMetaphone < Test::Unit::TestCase
DIR = File.dirname(__FILE__)
DATA = File.read(File.join(DIR,'fixture/double_metaphone.txt')).split(/\n/)
DATA.each_with_index do |line, i|
row = *line.split(', ')
primary, secondary = English::DoubleMetaphone[row[0]]
define_method("test_#{i}") do
assert_equal row[1], primary
assert_equal row[2], (secondary.nil?? primary : secondary)
end
end
end
|
package tictactoe
import(
"log"
"fmt"
"strings"
)
type GameNode struct {
b *Board
i, j int // where are they playing
p Player // whose turn is it
}
func (b *Board) CheckForWin() *Player {
var winners map[Player]bool
winners = make(map[Player]bool)
// Check for a winner
// rows
for _, row := range b {
if row[0] == row[1] && row[1] == row[2] {
winners[row[0]] = true
}
}
// cols
for j := 0; j < 3; j++ {
if b[0][j] == b[1][j] && b[1][j] == b[2][j] {
winners[b[0][j]] = true
}
}
// diags
// top left to bottom right; top right to bottom left
if (b[0][0] == b[1][1] && b[1][1] == b[2][2]) || (b[0][2] == b[1][1] && b[1][1] == b[2][0]) {
winners[b[1][1]] = true
}
if winners[O] && winners[X] {
log.Fatal("Multiple winners on board '" + b.ToString() + "'")
} else if winners[O] {
o := O
return &o
} else if winners[X] {
x := X
return &x
}
// Check for a draw
filledIn := 0
for _, row := range b {
for _, space := range row {
if space != EMPTY {
filledIn++
}
}
}
if filledIn == 9 {
e := EMPTY
return &e
}
// Neither a winner nor a draw has happened yet
return nil
}
func (b *Board) pickMoveHelper(p Player, i int, j int, depth int) (bestResult Player) {
b[i][j] = p
winner := b.CheckForWin()
if winner != nil {
return *winner
}
if p == X {
bestResult = O
} else {
bestResult = X
}
for i, row := range b {
for j, space := range row {
if space == EMPTY {
b2 := b.deepCopyBoard()
var result Player // EMPTY == TIE
if p == O {
result = b2.pickMoveHelper(X, i, j, depth+1)
} else {
result = b2.pickMoveHelper(O, i, j, depth+1)
}
if result == p { // this move lets us win
fmt.Printf(strings.Repeat(" ", depth) + "%d can win at %d, %d on %q\n", p, i, j, b.ToString())
bestResult = result
} else if result == EMPTY && bestResult != p { // this moves gets us to tie
bestResult = result
}
}
}
}
return
}
func (b *Board) pickMove() (bestR int, bestC int) {
bestR, bestC = -1, -1
bestResult := X
for i, row := range b {
for j, space := range row {
if space == EMPTY {
b2 := b.deepCopyBoard()
fmt.Printf("Considering %d, %d on %q\n", i, j, b.ToString())
var result Player // EMPTY == TIE
result = b2.pickMoveHelper(O, i, j, 1)
// we win or we draw (and don't have a way to win)
if result == O || (result == EMPTY && bestResult == X) {
bestR, bestC, bestResult = i, j, result
}
}
}
}
return
}
func (b *Board) MakeMove() {
i, j := b.pickMove()
b[i][j] = O // make the move we picked
}
|
/*
* Copyright (C) 2009-2016 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api
import org.specs2.mutable.Specification
class LoggerConfiguratorSpec extends Specification {
"generateProperties" should {
"generate in the simplest case" in {
val env = Environment.simple()
val config = Configuration.empty
val properties = LoggerConfigurator.generateProperties(env, config, Map.empty)
properties.size must beEqualTo(1)
properties must havePair("application.home" -> env.rootPath.getAbsolutePath)
}
"generate in the case of including string config property" in {
val env = Environment.simple()
val config = Configuration(
"play.logger.includeConfigProperties" -> true,
"my.string.in.application.conf" -> "hello"
)
val properties = LoggerConfigurator.generateProperties(env, config, Map.empty)
properties must havePair("my.string.in.application.conf" -> "hello")
}
"generate in the case of including integer config property" in {
val env = Environment.simple()
val config = Configuration(
"play.logger.includeConfigProperties" -> true,
"my.number.in.application.conf" -> 1
)
val properties = LoggerConfigurator.generateProperties(env, config, Map.empty)
properties must havePair("my.number.in.application.conf" -> "1")
}
"generate in the case of including null config property" in {
val env = Environment.simple()
val config = Configuration(
"play.logger.includeConfigProperties" -> true,
"my.null.in.application.conf" -> null
)
val properties = LoggerConfigurator.generateProperties(env, config, Map.empty)
// nulls are excluded, you must specify them directly
// https://typesafehub.github.io/config/latest/api/com/typesafe/config/Config.html#entrySet--
properties must not haveKey ("my.null.in.application.conf")
}
"generate in the case of direct properties" in {
val env = Environment.simple()
val config = Configuration.empty
val optProperties = Map("direct.map.property" -> "goodbye")
val properties = LoggerConfigurator.generateProperties(env, config, optProperties)
properties.size must beEqualTo(2)
properties must havePair("application.home" -> env.rootPath.getAbsolutePath)
properties must havePair("direct.map.property" -> "goodbye")
}
"generate a null using direct properties" in {
val env = Environment.simple()
val config = Configuration.empty
val optProperties = Map("direct.null.property" -> null)
val properties = LoggerConfigurator.generateProperties(env, config, optProperties)
properties must havePair("direct.null.property" -> null)
}
"override config property with direct properties" in {
val env = Environment.simple()
val config = Configuration("some.property" -> "AAA")
val optProperties = Map("some.property" -> "BBB")
val properties = LoggerConfigurator.generateProperties(env, config, optProperties)
properties must havePair("some.property" -> "BBB")
}
}
}
|
#!/bin/bash
# Replace with your identity
readonly CODE_SIGN_IDENTITY=C6DD0BCD24C737EA0505F1EB26B8BBEEDEC12F1B
set -e # forbid command failure
# Embed provisioning profile
cp \
Karabiner-DriverKit-VirtualHIDDeviceClient/embedded.provisionprofile \
build/Release/Karabiner-DriverKit-VirtualHIDDeviceClient.app/Contents/embedded.provisionprofile
codesign \
--sign $CODE_SIGN_IDENTITY \
--entitlements Karabiner-DriverKit-VirtualHIDDeviceClient/entitlements.plist \
--options runtime \
--verbose \
--force \
build/Release/Karabiner-DriverKit-VirtualHIDDeviceClient.app
|
import itertools as it
def test_get_deleters(generic_case_data):
"""
Test :meth:`.GenericFunctionalGroup.get_deleters`.
Parameters
----------
generic_case_data : :class:`.GenericCaseData`
The test case. Holds the functional group to test and the
correct deleter atoms.
Returns
-------
None : :class:`NoneType`
"""
_test_get_deleters(
functional_group=generic_case_data.functional_group,
deleters=generic_case_data.deleters,
)
def _test_get_deleters(functional_group, deleters):
"""
Test :meth:`.GenericFunctionalGroup.get_deleters`.
Parameters
----------
functional_group : :class:`.GenericFunctionalGroup`
The functional group to test.
deleters : :class:`tuple` of :class:`.Atom`
The correct deleter atoms.
Returns
-------
None : :class:`NoneType`
"""
for atom1, atom2 in it.zip_longest(
functional_group.get_deleters(),
deleters,
):
assert atom1 is atom2
|
<?
enforce_login();
if ($_REQUEST['action']) {
switch($_REQUEST['action']) {
case 'email':
include('delete_email.php');
break;
case 'takeemail':
include('take_delete_email.php');
break;
case 'ip':
include('delete_ip.php');
break;
case 'takeip':
include('take_delete_ip.php');
break;
default:
header('Location: index.php');
}
} else {
header('Location: index.php');
}
?>
|
<?php
/**
* Created by PhpStorm.
* User: Danil Baibak danil.baibak@gmail.com
* Date: 23/04/15
* Time: 11:58
*/
namespace Bundles\WidgetBundle\Tests\Service;
use Bundles\WidgetBundle\Service\ImageService;
use Bundles\WidgetBundle\Tests\Entity\UserFakeRepository;
class ImageServiceTest extends \PHPUnit_Framework_TestCase
{
private $imageService;
public function __construct()
{
// $emMock = $this->getEmMock();
// $this->imageService = new ImageService($emMock);
}
/**
* Mock for \Doctrine\ORM\EntityManager
*
* @return \PHPUnit_Framework_MockObject_MockObject
*/
protected function getEmMock()
{
$emMock = $this->getMock('\Doctrine\ORM\EntityManager',
array('getRepository', 'getClassMetadata', 'persist', 'flush'), array(), '', false);
$emMock->expects($this->any())
->method('getRepository')
->will($this->returnValue(new UserFakeRepository()));
$emMock->expects($this->any())
->method('getClassMetadata')
->will($this->returnValue((object)array('name' => 'aClass')));
$emMock->expects($this->any())
->method('persist')
->will($this->returnValue(null));
$emMock->expects($this->any())
->method('flush')
->will($this->returnValue(null));
return $emMock;
}
/**
* Test function for getTextPosition
*/
public function testGetTextPosition()
{
// $textPosition = $this->imageService->getTextPosition(100, 100, 'Test text');
//
// $this->assertEquals(array('marginLeft' => 0.5, 'marginTop' => 60), $textPosition);
}
/**
* Check case with wrong userId
*/
public function testGetWidgetImage()
{
// $widgetImage = $this->imageService->getWidgetImage(1, 100, 100, '000', 'fff');
//
// $this->assertFalse($widgetImage);
}
}
|
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.math.BigDecimal;
import java.math.MathContext;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Objects;
import java.util.StringTokenizer;
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author Gerasimov
*/
public class Main {
/**
* @param args the command line arguments
*/
static BufferedReader inp = new BufferedReader(new InputStreamReader(System.in));
static PrintWriter output = new PrintWriter(
new BufferedWriter(new OutputStreamWriter(
System.out)));
public static void main(String[] args) throws FileNotFoundException, IOException {
StringTokenizer input = new StringTokenizer(inp.readLine());
int t = Integer.parseInt(input.nextToken());
for (int i = 0; i < t; i++) {
inp.readLine();
Raschet();
}
output.close();
}
public static void Raschet() throws IOException {
StringTokenizer input = new StringTokenizer(inp.readLine());
int m = Integer.parseInt(input.nextToken());
int k = Integer.parseInt(input.nextToken());
//исходный массив
int[] mas = new int[k + 1];
//массив в который будем сохранять при первой встречи с 1
int[] mas2 = new int[k + 1];
boolean[] mas3 = new boolean[k + 1];
boolean[] mas4 = new boolean[k + 1];
input = new StringTokenizer(inp.readLine());
//был ли уже 0 в массиве
boolean is0 = false;
//колчиство нулей - количество нулей перед первой 1
int count0 = 0;
//колчичество нулей перед первой 1
int count0pre1 = 0;
//читаем массив
for (int i = 1; i <= k; i++) {
mas[i] = Integer.parseInt(input.nextToken());
}
boolean p = false;
//читаем то что известно про первых просящих
for (int i = 0; i < m - 1; i++) {
input = new StringTokenizer(inp.readLine());
//какое блюдо он выбрал
int t = Integer.parseInt(input.nextToken());
//был или нет феил при выборе блюда
int r = Integer.parseInt(input.nextToken());
//если был феил и это в первый раз
if (r == 1 && !is0) {
p = true;
//копируем состояние массива на этот момент
mas2 = Arrays.copyOf(mas, k + 1);
//в массиве однозначно есть ноль
is0 = true;
//запоминаем количество нулей
count0pre1 = count0;
//обнуляем счетчик
count0 = 0;
}
//если изветен номер блюда
if (t > 0) {
//уменьшаем его количество на 1
mas[t]--;
if (p){mas4[t]=true;}
//если получился ноль то запоминаем это
if (mas[t] == 0) {
is0 = true;
}
} else {
//если неизвестно то увеличиваем счетчик нулей
count0++;
}
}
int min0 = 999999999;
//находим минимальное значение в копированном массиве такое что в получившемся там не ноль
if (p) {
for (int i = 1; i <= k; i++) {
if (!mas4[i] && min0 >= mas2[i]) {
min0 = mas2[i];
}
if (!mas4[i] &&count0pre1 >= mas2[i]) {
mas3[i] = true;
}
}
}
//выводим Y если в получившемся массиве число меньше чем сумарное количество нулей минус минимум в копированном
for (int i = 1; i <= k; i++) {
if ((mas[i] <= count0 + count0pre1 - (min0 == 999999999 ? 0 : min0)) || mas3[i]) {
output.print("Y");
} else {
output.print("N");
}
}
output.println();
}
}
|
use crate::Opt;
use notify::{
event::{Event as NEvent, EventKind as NEventKind},
immediate_watcher, RecursiveMode, Watcher,
};
use std::fs;
use std::fs::File;
use std::io::prelude::*;
use std::net::TcpStream;
use std::process::Command;
use std::sync::{Arc, Mutex};
use std::time::{Duration, Instant};
#[derive(Debug)]
pub struct State {
pub link_quality: u8,
pub ping: f32,
pub all_profiles: Arc<Mutex<Vec<String>>>,
pub active_profile: Option<String>,
}
pub fn inotify_watch(
all_profiles: Arc<Mutex<Vec<String>>>,
dir: &str,
) -> Result<(), notify::Error> {
// initialize the inotify watcher
let mut watcher = immediate_watcher(move |res: Result<NEvent, _>| match res {
Ok(event) => {
match event.kind {
NEventKind::Create(_) => {
// Add the new profile
for path in event.paths {
match path.file_name().unwrap().to_str() {
Some(p) => all_profiles.lock().unwrap().push(p.to_owned()),
None => {
eprintln!(
"Can't convert OsStr to str: {:?}",
path.file_name().unwrap()
);
continue;
}
};
}
}
NEventKind::Remove(_) => {
// Remove the profile
for path in event.paths {
match path.file_name().unwrap().to_str() {
Some(p) => {
all_profiles.lock().unwrap().retain(|x| *x != p);
}
None => {
eprintln!(
"Can't convert OsStr to str: {:?}",
path.file_name().unwrap()
);
continue;
}
};
}
}
_ => {}
}
}
Err(e) => eprintln!("watch error: {:?}", e),
})?;
watcher.watch(dir, RecursiveMode::Recursive)?;
Ok(())
}
// Scans the files in /etc/netctl and adds the profiles to the vector
pub fn scan_profiles(all_profiles: &mut Vec<String>) -> Result<(), std::io::Error> {
// for every file or folder in /etc/netcl
for entry in fs::read_dir("/etc/netctl/")? {
let path = entry?.path();
let metadata = path.metadata()?;
if metadata.is_file() {
// the file name of the profile configuration
// is the name of the profile.
let profile_name = match path.file_name().unwrap().to_str() {
Some(f) => f,
None => {
eprintln!(
"Can't convert OsStr to str: {:?}",
path.file_name().unwrap()
);
continue;
}
};
// add the profile to the vector
all_profiles.push(profile_name.to_owned());
}
}
Ok(())
}
// Updates the netctl-tray state: ping, quality and current active profile
pub fn update_state(state: &mut State, args: &Opt) -> Result<(), std::io::Error> {
// get the current active profile
#[cfg(not(feature = "auto"))]
let raw_profiles = Command::new("netctl").arg("list").output()?;
#[cfg(feature = "auto")]
let raw_profiles = Command::new("netctl-auto").arg("list").output()?;
// Iterate through each line
let mut active_profile = None;
for line in raw_profiles.stdout.split(|c| *c == '\n' as u8) {
if line.len() == 0 {
continue;
}
// If the line starts with an asterisk, then the profile is active
// and we need it's name
if line[0] == '*' as u8 {
active_profile = match std::str::from_utf8(&line[2..]) {
Ok(s) => Some(s.to_owned()),
Err(e) => {
eprintln!("Can't read profile name from netctl list: {:?}", e);
break;
}
};
break;
}
}
state.active_profile = active_profile;
if let Some(active_profile) = &state.active_profile {
// Now we need to get the interface the current profile uses
let mut current_profile_file = File::open(&format!("/etc/netctl/{}", active_profile))?;
let mut current_profile_contents = String::new();
current_profile_file.read_to_string(&mut current_profile_contents)?;
// iterate over lines to find the one specifying the interface
let mut profile_interface = "";
for line in current_profile_contents.split('\n') {
if line.starts_with("Interface") {
let mut interface = match line.split('=').nth(1) {
Some(i) => i,
None => {
eprintln!(
"Profile not properly configured! Corrupted file: /etc/netctl/{}",
active_profile
);
continue;
}
}
.trim();
// Remove quotes if there
if interface.starts_with('"') && interface.ends_with('"') {
// double quotes
interface = &interface[1..interface.len() - 1];
} else if interface.starts_with('\'') && interface.ends_with('\'') {
// single quotes
interface = &interface[1..interface.len() - 1];
}
profile_interface = interface;
break;
}
}
// Now, as we know the used interface we can check the link quality
// It can be found in /proc/net/wireless
let mut file = File::open("/proc/net/wireless")?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
// iterate over lines and find the one describing our needed interface
for line in contents.split('\n').skip(2) {
if line.starts_with(profile_interface) {
// Found the line
// find the right column
let mut columns = line.split(' ').filter(|x| !x.is_empty());
let mut link_quality = columns.nth(2).unwrap();
// remove the last char which is a dot apparently
link_quality = &link_quality[..link_quality.len() - 1];
let link_quality: u8 = link_quality.parse().unwrap();
state.link_quality = link_quality;
}
}
}
// check ping
// try connecting to the given IP
let now = Instant::now();
if TcpStream::connect_timeout(&args.host, Duration::from_nanos(500_000_000)).is_ok() {
state.ping = now.elapsed().as_millis() as f32;
} else {
state.ping = f32::INFINITY;
}
Ok(())
}
|
/*
* Copyright 2013-2015 Vitalii Fedorchenko (nrecosite.com)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License version 3
* as published by the Free Software Foundation
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the NReco Recommender software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers as an ASP,
* making recommendations in a web application, shipping NReco Recommender with a closed
* source product.
*
* For more information, please contact: support@nrecosite.com
*
* Parts of this code are based on Apache Mahout and Apache Commons Mathematics Library that were licensed under the
* Apache 2.0 License (see http://www.apache.org/licenses/LICENSE-2.0).
*
* Unless required by applicable law or agreed to in writing, software distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
using System;
using System.Linq;
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using NReco.Math3.Primes;
namespace NReco.CF {
/// <summary>
/// The source of random stuff for the whole project. This lets us make all randomness in the project
/// predictable, if desired, for when we run unit tests, which should be repeatable.
/// </summary>
public sealed class RandomUtils {
/// The largest prime less than 2<sup>31</sup>-1 that is the smaller of a twin prime pair.
public const int MAX_INT_SMALLER_TWIN_PRIME = 2147482949;
private static IDictionary<RandomWrapper,Boolean> INSTANCES =
new ConcurrentDictionary<RandomWrapper,Boolean>();
private static bool testSeed = false;
private RandomUtils() { }
public static void useTestSeed() {
testSeed = true;
lock (INSTANCES) {
foreach (RandomWrapper rng in INSTANCES.Keys) {
rng.resetToTestSeed();
}
}
}
public static RandomWrapper getRandom() {
RandomWrapper random = new RandomWrapper();
if (testSeed) {
random.resetToTestSeed();
}
INSTANCES[random ] = true;
return random;
}
public static RandomWrapper getRandom(long seed) {
RandomWrapper random = new RandomWrapper(seed);
INSTANCES[random] = true;
return random;
}
/// @return what {@link Double#hashCode()} would return for the same value
public static int hashDouble(double value) {
return BitConverter.DoubleToInt64Bits(value).GetHashCode();
}
/// @return what {@link Float#hashCode()} would return for the same value
public static int hashFloat(float value) {
return BitConverter.ToInt32( BitConverter.GetBytes(value), 0); // float.floatToIntBits(value);
}
/// <p>
/// Finds next-largest "twin primes": numbers p and p+2 such that both are prime. Finds the smallest such p
/// such that the smaller twin, p, is greater than or equal to n. Returns p+2, the larger of the two twins.
/// </p>
public static int nextTwinPrime(int n) {
if (n > MAX_INT_SMALLER_TWIN_PRIME) {
throw new ArgumentException();
}
if (n <= 3) {
return 5;
}
int next = Primes.nextPrime(n);
while (!Primes.isPrime(next + 2)) {
next = Primes.nextPrime(next + 4);
}
return next + 2;
}
}
}
|
//
// Copyright (c) 2018, University of Edinburgh
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of nor the names of its contributors may be used to
// endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
#include <exotica_core/exotica_core.h>
using namespace exotica;
void run()
{
Server::InitRos(std::shared_ptr<ros::NodeHandle>(new ros::NodeHandle("~")));
// Scene using joint group 'arm'
Initializer scene("Scene", {{"Name", std::string("MyScene")},
{"JointGroup", std::string("arm")},
{"URDF", std::string("{exotica_examples}/resources/robots/lwr_simplified.urdf")},
{"SRDF", std::string("{exotica_examples}/resources/robots/lwr_simplified.srdf")}});
// End-effector task map with two position frames
Initializer map("exotica/EffFrame", {{"Name", std::string("Position")},
{"EndEffector", std::vector<Initializer>({
Initializer("Frame", {{"Link", std::string("lwr_arm_6_link")}, {"LinkOffset", Eigen::VectorTransform(0, 0, 0, 0.7071067811865476, -4.3297802811774664e-17, 0.7071067811865475, 4.3297802811774664e-17)}}),
})}});
Initializer cost("exotica/Task", {{"Task", std::string("Position")}});
Eigen::VectorXd W(7);
W << 7, 6, 5, 4, 3, 2, 1;
Eigen::VectorXd start_state = Eigen::VectorXd::Zero(7);
Eigen::VectorXd nominal_state = Eigen::VectorXd::Zero(7);
Initializer problem("exotica/UnconstrainedEndPoseProblem", {
{"Name", std::string("MyProblem")},
{"PlanningScene", scene},
{"Maps", std::vector<Initializer>({map})},
{"Cost", std::vector<Initializer>({cost})},
{"W", W},
{"Tolerance", 1e-5},
{"StartState", start_state},
{"NominalState", nominal_state},
});
Initializer solver("exotica/IKSolver", {
{"Name", std::string("MySolver")},
{"MaxIterations", 1},
});
HIGHLIGHT_NAMED("GenericLoader", "Loaded from a hardcoded generic initializer.");
// Initialize
PlanningProblemPtr any_problem = Setup::CreateProblem(problem);
MotionSolverPtr any_solver = Setup::CreateSolver(solver);
// Assign the problem to the solver
any_solver->SpecifyProblem(any_problem);
UnconstrainedEndPoseProblemPtr my_problem = std::static_pointer_cast<UnconstrainedEndPoseProblem>(any_problem);
// Create the initial configuration
Eigen::VectorXd q = Eigen::VectorXd::Zero(any_problem->N);
Eigen::MatrixXd solution;
HIGHLIGHT("Calling solve() in an infinite loop");
ros::Rate loop_rate(500.0);
ros::WallTime init_time = ros::WallTime::now();
while (ros::ok())
{
// Update the goal if necessary
// e.g. figure eight
const double t = ros::Duration((ros::WallTime::now() - init_time).toSec()).toSec();
my_problem->cost.y = {0.6,
-0.1 + sin(t * 2.0 * M_PI * 0.5) * 0.1,
0.5 + sin(t * M_PI * 0.5) * 0.2, 0, 0, 0};
// Solve the problem using the IK solver
my_problem->SetStartState(q);
any_solver->Solve(solution);
q = solution.row(solution.rows() - 1);
my_problem->Update(q);
my_problem->GetScene()->GetKinematicTree().PublishFrames();
ros::spinOnce();
loop_rate.sleep();
}
// All classes will be destroyed at this point.
}
int main(int argc, char **argv)
{
ros::init(argc, argv, "example_cpp_init_generic_node");
HIGHLIGHT("Started");
// Run demo code
run();
// Clean up
// Run this only after all the exoica classes have been disposed of!
Setup::Destroy();
}
|
/****************************************************************************
* Copyright (C) 2014 by Brendan Duncan. *
* *
* This file is part of DartRay. *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.*
* See the License for the specific language governing permissions and *
* limitations under the License. *
* *
* This project is based on PBRT v2 ; see http://www.pbrt.org *
* pbrt2 source code Copyright(c) 1998-2010 Matt Pharr and Greg Humphreys. *
****************************************************************************/
part of surface_integrators;
/**
* Path tracing surface integrator.
*/
class PathIntegrator extends SurfaceIntegrator {
PathIntegrator(int md) {
maxDepth = md;
}
Spectrum Li(Scene scene, Renderer renderer,
RayDifferential r, Intersection isect, Sample sample,
RNG rng) {
// Declare common path integration variables
Spectrum pathThroughput = new Spectrum(1.0);
Spectrum L = new Spectrum(0.0);
RayDifferential ray = new RayDifferential.fromRay(r);
bool specularBounce = false;
Intersection localIsect = new Intersection();
Intersection isectP = isect;
Vector wi = new Vector();
List<double> pdf = [0.0];
List<int> flags = [0];
for (int bounces = 0; ; ++bounces) {
// Possibly add emitted light at path vertex
if (bounces == 0 || specularBounce) {
L += pathThroughput * isectP.Le(-ray.direction);
}
// Sample illumination from lights to find path contribution
BSDF bsdf = isectP.getBSDF(ray);
Point p = bsdf.dgShading.p;
Normal n = bsdf.dgShading.nn;
Vector wo = -ray.direction;
if (bounces < SAMPLE_DEPTH) {
L += pathThroughput *
Integrator.UniformSampleOneLight(scene, renderer, p, n, wo,
isectP.rayEpsilon, ray.time, bsdf,
sample, rng,
lightNumOffset[bounces], lightSampleOffsets[bounces],
bsdfSampleOffsets[bounces]);
} else {
L += pathThroughput *
Integrator.UniformSampleOneLight(scene, renderer, p, n, wo,
isectP.rayEpsilon, ray.time, bsdf, sample,
rng);
}
// Sample BSDF to get new path direction
// Get _outgoingBSDFSample_ for sampling new path direction
BSDFSample outgoingBSDFSample;
if (bounces < SAMPLE_DEPTH) {
outgoingBSDFSample = new BSDFSample.sample(sample,
pathSampleOffsets[bounces], 0);
} else {
outgoingBSDFSample = new BSDFSample.random(rng);
}
Spectrum f = bsdf.sample_f(wo, wi, outgoingBSDFSample, pdf, BSDF_ALL,
flags);
if (f.isBlack() || pdf[0] == 0.0) {
break;
}
specularBounce = (flags[0] & BSDF_SPECULAR) != 0;
pathThroughput *= f * Vector.AbsDot(wi, n) / pdf[0];
ray = new RayDifferential.child(p, wi, ray, isectP.rayEpsilon);
// Possibly terminate the path
if (bounces > 3) {
double continueProbability = Math.min(0.5, pathThroughput.luminance());
if (rng.randomFloat() > continueProbability) {
break;
}
pathThroughput /= continueProbability;
}
if (bounces == maxDepth) {
break;
}
// Find next vertex of path
if (!scene.intersect(ray, localIsect)) {
if (specularBounce) {
for (int i = 0; i < scene.lights.length; ++i) {
L += pathThroughput * scene.lights[i].Le(ray);
}
}
break;
}
pathThroughput *= renderer.transmittance(scene, ray, null, rng);
isectP = localIsect;
}
return L;
}
void requestSamples(Sampler sampler, Sample sample, Scene scene) {
for (int i = 0; i < SAMPLE_DEPTH; ++i) {
lightSampleOffsets[i] = new LightSampleOffsets(1, sample);
lightNumOffset[i] = sample.add1D(1);
bsdfSampleOffsets[i] = new BSDFSampleOffsets(1, sample);
pathSampleOffsets[i] = new BSDFSampleOffsets(1, sample);
}
}
static PathIntegrator Create(ParamSet params) {
int maxDepth = params.findOneInt('maxdepth', 5);
return new PathIntegrator(maxDepth);
}
int maxDepth;
static const int SAMPLE_DEPTH = 3;
List<LightSampleOffsets> lightSampleOffsets =
new List<LightSampleOffsets>(SAMPLE_DEPTH);
List<int> lightNumOffset = new List<int>(SAMPLE_DEPTH);
List<BSDFSampleOffsets> bsdfSampleOffsets =
new List<BSDFSampleOffsets>(SAMPLE_DEPTH);
List<BSDFSampleOffsets> pathSampleOffsets =
new List<BSDFSampleOffsets>(SAMPLE_DEPTH);
}
|
// ReSharper disable UnusedMember.Global
// ReSharper disable MemberCanBePrivate.Global
// ReSharper disable UnusedType.Global
namespace Caxapexac.Common.Sharp.Extensions
{
public static class ObjectExtensions
{
public static bool HasMethod(this object self, string methodName)
{
return self.GetType().GetMethod(methodName) != null;
}
public static bool HasField(this object self, string fieldName)
{
return self.GetType().GetField(fieldName) != null;
}
public static bool HasProperty(this object self, string propertyName)
{
return self.GetType().GetProperty(propertyName) != null;
}
}
}
|
<div id="nav-bar">
<li class="nav-item" id="header">Sillystringz Factory Manager</li>
<li class="nav-item nav-link">@Html.ActionLink("Home", "Index", "Home")</li>
<li class="nav-item nav-link">@Html.ActionLink("Engineers", "Index", "Engineers")</li>
<li class="nav-item nav-link">@Html.ActionLink("Machines", "Index", "Machines")</li>
</div>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import chain
from operator import methodcaller
import regex as re
from six.moves import zip_longest
from dateparser.utils import normalize_unicode
PARSER_HARDCODED_TOKENS = [":", ".", " ", "-", "/"]
PARSER_KNOWN_TOKENS = ["am", "pm", "a", "p", "UTC", "GMT", "Z"]
ALWAYS_KEEP_TOKENS = ["+"] + PARSER_HARDCODED_TOKENS
class UnknownTokenError(Exception):
pass
class Dictionary(object):
_split_regex_cache = {}
_sorted_words_cache = {}
def __init__(self, language_info, settings=None):
dictionary = {}
self._settings = settings
self.info = language_info
if 'skip' in language_info:
skip = map(methodcaller('lower'), language_info['skip'])
dictionary.update(zip_longest(skip, [], fillvalue=None))
if 'pertain' in language_info:
pertain = map(methodcaller('lower'), language_info['pertain'])
dictionary.update(zip_longest(pertain, [], fillvalue=None))
for word in ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday',
'january', 'february', 'march', 'april', 'may', 'june', 'july',
'august', 'september', 'october', 'november', 'december',
'year', 'month', 'week', 'day', 'hour', 'minute', 'second',
'ago', 'in']:
translations = map(methodcaller('lower'), language_info[word])
dictionary.update(zip_longest(translations, [], fillvalue=word))
dictionary.update(zip_longest(ALWAYS_KEEP_TOKENS, ALWAYS_KEEP_TOKENS))
dictionary.update(zip_longest(map(methodcaller('lower'),
PARSER_KNOWN_TOKENS),
PARSER_KNOWN_TOKENS))
self._dictionary = dictionary
self._no_word_spacing = language_info.get('no_word_spacing', False)
def __contains__(self, key):
if key in self._settings.SKIP_TOKENS:
return True
return self._dictionary.__contains__(key)
def __getitem__(self, key):
if key in self._settings.SKIP_TOKENS:
return None
return self._dictionary.__getitem__(key)
def __iter__(self):
return chain(self._settings.SKIP_TOKENS, iter(self._dictionary))
def split(self, string, keep_formatting):
""" Recursively splitting string by words in dictionary """
if not string:
return string
regex = self._get_split_regex_cache()
match = regex.match(string)
if not match:
return [string] if self._should_capture(string, keep_formatting) else []
unparsed, known, unknown = match.groups()
splitted = [known] if self._should_capture(known, keep_formatting) else []
if unparsed and self._should_capture(unparsed, keep_formatting):
splitted = [unparsed] + splitted
if unknown:
splitted.extend(self.split(unknown, keep_formatting))
return splitted
def _should_capture(self, token, keep_formatting):
return (
keep_formatting or
(token in ALWAYS_KEEP_TOKENS) or
re.match(r"^.*[^\W_].*$", token, re.U)
)
def _get_sorted_words_from_cache(self):
if (
self._settings.registry_key not in self._sorted_words_cache or
self.info['name'] not in self._sorted_words_cache[self._settings.registry_key]
):
self._sorted_words_cache[self._settings.registry_key] = {
self.info['name']: sorted([key for key in self], key=len, reverse=True)
}
return self._sorted_words_cache[self._settings.registry_key][self.info['name']]
def _get_split_regex_cache(self):
if (
self._settings.registry_key not in self._split_regex_cache or
self.info['name'] not in self._split_regex_cache[self._settings.registry_key]
):
self._construct_split_regex()
return self._split_regex_cache[self._settings.registry_key][self.info['name']]
def _construct_split_regex(self):
known_words_group = u"|".join(map(re.escape, self._get_sorted_words_from_cache()))
if self._no_word_spacing:
regex = r"^(.*?)({})(.*)$".format(known_words_group)
else:
regex = r"^(.*?(?:\A|\d|_|\W))({})((?:\d|_|\W|\Z).*)$".format(known_words_group)
self._split_regex_cache[self._settings.registry_key] = {
self.info['name']: re.compile(regex, re.UNICODE | re.IGNORECASE)
}
class NormalizedDictionary(Dictionary):
def __init__(self, language_info, settings=None):
super(NormalizedDictionary, self).__init__(language_info, settings)
self._normalize()
def _normalize(self):
new_dict = {}
conflicting_keys = []
for key, value in self._dictionary.items():
normalized = normalize_unicode(key)
if key != normalized and normalized in self._dictionary:
conflicting_keys.append(key)
else:
new_dict[normalized] = value
for key in conflicting_keys:
normalized = normalize_unicode(key)
if key in (self.info.get('skip', []) + self.info.get('pertain', [])):
new_dict[normalized] = self._dictionary[key]
self._dictionary = new_dict
|
'use strict';
const assert = require('assert');
const asyncUtils = exports;
asyncUtils.forEachSequential = async function(array, functionToBeApplied) {
assert(functionToBeApplied instanceof Function, 'The second parameter has to be a function.');
let resultArray = [];
if (Array.isArray(array))
{
for (let arrayElement of array)
{
let possiblePromise = functionToBeApplied(arrayElement);
if (possiblePromise instanceof Promise)
{
resultArray.push(await possiblePromise);
}
}
}
return resultArray;
};
/**
*
* @param array
* @param functionToBeApplied
* @returns {Promise}
*/
asyncUtils.forEachParallel = function(array, functionToBeApplied) {
assert(functionToBeApplied instanceof Function, 'The second parameter has to be a function.');
if (Array.isArray(array))
{
let arrayPromises = [];
for(let arrayElement of array)
{
let possiblePromise = functionToBeApplied(arrayElement);
if (possiblePromise instanceof Promise)
{
arrayPromises.push(possiblePromise);
}
}
if (arrayPromises.length > 0)
{
return Promise.all(arrayPromises);
}
else
{
return Promise.resolve([]);
}
}
else
{
return Promise.resolve([]);
}
};
|
---
title: "Open Source and ReScience"
collection: thesis
type: "Thesis topic"
permalink: /thesis/open_source_rescience
venue: "Osnabrück University, Institute of Cognitive Science"
date: 2019-09-20
location: "Osnabrück, Germany"
---
Ever read an exciting paper that you could not find any source code for?
It has long been recognized by the scientific community that transparency
and reproducibility of computational experiments are very important.
Journals like [ReScience](http://rescience.github.io/) encourage the
replication of existing results and publishes them together with open
source code.
So why not implement the paper yourself and contribute your results back to the community?
This is the perfect scope for a bachelor thesis, but can also be part of a larger
master thesis.
Similarly, you might have always wanted to write an open source package for a scientific
problem you had to solve over and over again, because no good open source solution
exists. Your thesis might be the perfect opportunity to bridge this gap. Again journals
like [JOSS](https://joss.theoj.org) provide a great venue to share your results with the community.
|
require "test_helper"
class SetupGameTest < Minitest::Test
def setup
@messages = Minesweeper::Messages.new
@validator = Minesweeper::InputValidator.new(@messages)
bomb_positions = [10, 11, 12, 13, 14]
@test_board = Minesweeper::Board.new(5, 5, bomb_positions)
@mock_cli = Minesweeper::MockCli.new(@messages, @validator)
@test_app = Minesweeper::App.new
@test_io = {
output: Minesweeper::Output.new,
input: 'test',
board_formatter: Minesweeper::CliBoardFormatter.new,
board_printer: Minesweeper::MockBoardPrinter.new
}
end
def test_that_run_can_setup_and_return_a_new_game
@mock_output = Minesweeper::MockOutput.new
@test_setup_game = Minesweeper::SetupGame.new
result = @test_setup_game.run(@mock_cli, @test_io)
assert_instance_of(Minesweeper::Game, result)
end
def test_that_initialize_can_set_the_formatter_type_and_rows_and_bomb_count
@test_setup_game = Minesweeper::SetupGame.new
game = @test_setup_game.run(@mock_cli, @test_io)
assert_equal(10, game.board.row_size)
assert_equal(70, game.board.bomb_count)
assert_instance_of(Minesweeper::CliBoardFormatter, game.formatter)
end
end
|
# Tutti-Frutti
:kiwi_fruit: :strawberry: :kiwi_fruit:
An archive of half-baked projects, fire & forget hacks, and a shameless clipboard.
## Gallery



## Reports
[Witchcraft](./witchcraft/becerra_houses_all.pdf)
[Eco-Bikes](./eco-bikes/eda/becerra_describing_eco_bici_2017_v4.pdf)
|
# Features
* Add a mail body
* Support Multiple Attachments
* Supports ESMTP Authentication
* Supports StartTLS and SSL
* Send mail to a list of users
* Show SMTP server info
* Fixes [issues of mailsend](https://github.com/muquit/mailsend#known-issues)
|
require "utils.rb"
module Language
module Python
def self.major_minor_version python
version = /\d\.\d/.match `#{python} --version 2>&1`
return unless version
Version.new(version.to_s)
end
def self.each_python build, &block
original_pythonpath = ENV["PYTHONPATH"]
["python", "python3"].each do |python|
next if build.without? python
version = self.major_minor_version python
ENV["PYTHONPATH"] = if Formulary.factory(python).installed?
nil
else
"#{HOMEBREW_PREFIX}/lib/python#{version}/site-packages"
end
block.call python, version if block
end
ENV["PYTHONPATH"] = original_pythonpath
end
end
end
|
/**
* Copyright 2017 Yahoo Holdings Inc.
* Licensed under the terms of the MIT license. See LICENSE file in project root for terms.
*/
/* eslint-env mocha */
/* eslint-disable no-unused-expressions */
import { Cerebro } from '../cerebro'
const expect = require('chai').expect
const FIXTURE_PATH = '../../test/fixtures/'
require('../../test/setup/server')
describe('Feature Flipper', function () {
describe('integrated test cases', function () {
describe('feature', function () {
// takes a generated file from the build and runs it through cerebro
it('generates the correct output for features', function () {
var context = {
buckets: '1'
}
var configuration = require(FIXTURE_PATH + 'generated/feature.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('optionA')).to.be.true
expect(cerebroConfig.isEnabled('optionB')).to.be.false
expect(cerebroConfig.isEnabled('optionC')).to.be.true
expect(cerebroConfig.isEnabled('optionD')).to.be.true
})
})
describe('setting', function () {
it('generates the correct output for settings', function () {
var context = {
buckets: '2',
intls: 'us'
}
var configuration = require(FIXTURE_PATH + 'generated/setting.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.getValue('sender')).to.equal('foo@yahoo.com')
expect(cerebroConfig.getValue('helpUrl')).to.equal('')
expect(cerebroConfig.getValue('dateOverride')).to.equal(1428938850)
expect(cerebroConfig.getValue('testIds')).to.deep.equal([
'10',
'20',
'30',
'40'
])
expect(cerebroConfig.getValue('alienMap')).to.deep.equal({
1: 'hello',
2: 'world',
3: 'from',
4: 'the',
5: 'aliens'
})
expect(cerebroConfig.getValue('assetMap')).to.deep.equal({})
expect(cerebroConfig.getValue('foo')).to.equal('a')
expect(cerebroConfig.getValue('version')).to.equal('v1.0')
})
})
})
describe('isolated test cases', function () {
context('simple features', function () {
it('enables a feature when enabled is true', function () {
var context = {}
var configuration = require(FIXTURE_PATH + 'simple/enabled.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('simple')).to.be.true
})
it('disables a feature when enabled is false', function () {
var context = {}
var configuration = require(FIXTURE_PATH + 'simple/disabled.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('simple')).to.be.false
})
})
context('templates', function () {
it('inserts context value into template', function () {
var contextEnabled = {
partner: 'bar'
}
var contextDisabled = {
partner: 'baz'
}
var configuration = require(FIXTURE_PATH + 'template/simple.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(contextEnabled)
expect(cerebroConfig.getValue('template')).to.equal('https://bar.com')
cerebroConfig = cerebro.resolveConfig(contextDisabled)
expect(cerebroConfig.getValue('template')).to.equal('https://foo.com')
})
})
context('simple settings', function () {
it('changes the value when the entry is evaluated to true', function () {
var context = {
bucket: ['43225', '123']
}
var configuration = require(FIXTURE_PATH + 'settings/setting.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.getValue('testSetting')).to.equal(777)
})
it('does not change the value when the entry is evaluated to false', function () {
var context = {
bucket: '14353'
}
var configuration = require(FIXTURE_PATH + 'settings/setting.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.getValue('testSetting')).to.equal(42)
})
})
context('multiple except blocks', function () {
it('chooses the right value when the option is first', function () {
var context = {
bucket: '123'
}
var configuration = require(FIXTURE_PATH +
'multiple_except_blocks/multiple_except_blocks.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.getValue('testSetting')).to.equal(777)
})
it('chooses the right value when the option is second', function () {
var context = {
bucket: '445'
}
var configuration = require(FIXTURE_PATH +
'multiple_except_blocks/multiple_except_blocks.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.getValue('testSetting')).to.equal(888)
})
})
context('when depending on a feature', function () {
it('enables a feature when the independent feature is enabled', function () {
var context = {}
var configuration = require(FIXTURE_PATH + 'dependent/enabled.js')
var options = {}
var cerebro = new Cerebro(configuration, options)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('dependent')).to.be.true
})
it('does nothing when the independent feature is disabled', function () {
var context = {}
var configuration = require(FIXTURE_PATH + 'dependent/disabled.js')
var options = {}
var cerebro = new Cerebro(configuration, options)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('dependent')).to.be.false
})
})
context('when the configuration is a list', function () {
context('default list scenarios', function () {
it('enables the feature when the context matches', function () {
var context = {
buckets: 'bucket1'
}
var configuration = require(FIXTURE_PATH + 'enums/default.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('list')).to.be.true
})
it('does not enable the feature when the context does not match', function () {
var context = {
buckets: ['bucket3']
}
var configuration = require(FIXTURE_PATH + 'enums/default.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('list')).to.be.false
})
})
context('and it contains `all`', function () {
it('enables the feature when the context contains a valid value', function () {
var context = {
buckets: ['bucket1']
}
var configuration = require(FIXTURE_PATH + 'enums/all.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('all')).to.be.true
})
it('enables the feature when the context contains a invalid value', function () {
// this functionality is debatable, but it appears to be this way in Storm.
var context = {
buckets: ['bucket3']
}
var configuration = require(FIXTURE_PATH + 'enums/all.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('all')).to.be.true
})
it('disables the feature when the context contains no value', function () {
var context = {}
var configuration = require(FIXTURE_PATH + 'enums/all.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('all')).to.be.false
})
})
context('and it contains `none`', function () {
it('disables the feature when the context contains a valid value', function () {
var context = {
buckets: ['bucket1']
}
var configuration = require(FIXTURE_PATH + 'enums/none.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('none')).to.be.false
})
it('disables the feature when the context contains a invalid value', function () {
// this functionality is debatable, but it appears to be this way in Storm.
var context = {
buckets: ['bucket3']
}
var configuration = require(FIXTURE_PATH + 'enums/none.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('none')).to.be.false
})
it('enables the feature when the context contains no value', function () {
var context = {}
var configuration = require(FIXTURE_PATH + 'enums/none.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('none')).to.be.true
})
})
})
context('when the configuration contains a range', function () {
it('enables the feature for a number within the range', function () {
var context = {
bucket: 1500
}
var configuration = require(FIXTURE_PATH + 'range/default.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('range')).to.be.true
})
it('enables the feature when the range is inclusive', function () {
var context = {
bucket: 2000
}
var configuration = require(FIXTURE_PATH + 'range/default.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('range')).to.be.true
})
it('does not enable the feature when the range is exclusive', function () {
var context = {
bucket: 2000
}
var configuration = require(FIXTURE_PATH + 'range/exclusive.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('range')).to.be.false
})
it('does not enable the feature for a number greater than the range', function () {
var context = {
bucket: 2001
}
var configuration = require(FIXTURE_PATH + 'range/default.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('range')).to.be.false
})
it('does not enable the feature for a number less than the range', function () {
var context = {
bucket: 999
}
var configuration = require(FIXTURE_PATH + 'range/default.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('range')).to.be.false
})
it('enables the feature for a number within a negative range', function () {
var context = {
bucket: -1500
}
var configuration = require(FIXTURE_PATH + 'range/negative.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('range')).to.be.true
})
it('enables the feature for a number within a reverse negative range', function () {
var context = {
bucket: -1500
}
var configuration = require(FIXTURE_PATH + 'range/reverse_negative.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('range')).to.be.true
})
it('disables the feature for a number outside a negative range', function () {
var context = {
bucket: -999
}
var configuration = require(FIXTURE_PATH + 'range/negative.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('range')).to.be.false
})
it('enables the feature for a number outside a reverse negative range', function () {
var context = {
bucket: -999
}
var configuration = require(FIXTURE_PATH + 'range/reverse_negative.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('range')).to.be.false
})
})
xcontext('when the configuration contains a percentage', function () {
it('enables the feature for the same person', function () {
// TODO: Not sure how this test will look like
})
it('enables the feature randomly', function () {
// TODO: Not sure how this test will look like
})
})
context(
'when there are multiple conditions in one except block',
function () {
it('enables the feature if all conditions are met', function () {
var context = {
env: 'alpha',
bucket: 'bucket1'
}
var configuration = require(FIXTURE_PATH + 'combined/combined.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('combined')).to.be.true
})
it('does not enable the feature if all conditions are not met', function () {
var context = {
env: 'alpha',
bucket: 'bucket2'
}
var configuration = require(FIXTURE_PATH + 'combined/combined.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('combined')).to.be.false
})
}
)
context('overrides', function () {
it('overrides the setting if provided', function () {
var context = {
bucket: ['43225', '123']
}
var options = {
overrides: {
testSetting: 888
}
}
var configuration = require(FIXTURE_PATH + 'settings/setting.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context, options)
expect(cerebroConfig.getValue('testSetting')).to.equal(888)
})
it('coerces the override to boolean if the setting is boolean', function () {
var context = {}
var options = {
overrides: {
simple: 0
}
}
var configuration = require(FIXTURE_PATH + 'simple/enabled.js')
var cerebro = new Cerebro(configuration)
var cerebroConfig = cerebro.resolveConfig(context, options)
expect(cerebroConfig.isEnabled('simple')).to.equal(false)
})
})
context('custom evaluators', function () {
beforeEach(function () {
this.customEvaluators = {
evaluateCondition: function (condition, testValue) {
if (testValue.indexOf(condition) !== -1) {
return true
}
return false
}
}
})
it('returns the new answer if the custom evaluator is fulfilled', function () {
var context = {
customCondition: 'en-US'
}
var options = {
customEvaluators: this.customEvaluators
}
var configuration = require(FIXTURE_PATH +
'custom_evaluator/custom_evaluator.js')
var cerebro = new Cerebro(configuration, options)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('customEvaluator')).to.be.true
})
it('returns the default answer if the custom evaluator is not fulfilled', function () {
var context = {
customCondition: 'no'
}
var options = {
customEvaluators: this.customEvaluators
}
var configuration = require(FIXTURE_PATH +
'custom_evaluator/custom_evaluator.js')
var cerebro = new Cerebro(configuration, options)
var cerebroConfig = cerebro.resolveConfig(context)
expect(cerebroConfig.isEnabled('customEvaluator')).to.be.false
})
})
})
})
|
<?php
/*
* This file is part of the MagmaCore package.
*
* (c) Ricardo Miller <ricardomiller@lava-studio.co.uk>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace MagmaCore\Http;
use Symfony\Component\HttpFoundation\Request;
class RequestHandler
{
/**
* Wrapper method for symfony http request object
*
* @return Request
*/
public function handler() : Request
{
if (!isset($request)) {
$request = new Request();
if ($request) {
$create = $request->createFromGlobals();
if ($create) {
return $create;
}
}
}
return false;
}
}
|
export interface ICreditCard {
ccNetwork:string;
lastFourNumbers:number;
expiryMonth:number;
expiryYear:number;
cvvVerified:boolean;
}
|
---
layout: post
title: "数据分析思维"
date: 2022-01-10
description: "数据分析思考"
tag: 数据分析
katex: true
---
```
数据分析的本质是解决问题,创造价值;而不是为了分析而分析,为了汇报而分析。
```
通过这段期间的数据分析工作,对这个岗位也有了一些思考。
首先是数据分析的sense:
## 目标思维
- **正确定义问题与目标**
做分析要想想自己分析是为了什么,要得到什么结果,解决什么问题。
- **合理分解问题**
通常一个项目,一个数据需求是一个比较大的问题,把大问题拆解为各个小问题并逐个击破,是数分必要的能力。拆解问题有一个非常出名的方法论:**MECE**,相互穷尽,彼此独立。同时,费米问题的实质也是合理拆分一个抽象难解决的大问题。
- **抓住问题关键**
大问题拆解为多个小问题,这个时候就需要去判断那些问题是关键的,运用“二八原则”,时间有限的情况下先解决主要的问题。
## 对比思维
- **与目标对比**
- **环比**
- **同比**
- **拆分对比**
- **与竞争对手对比**
## 细分思维
- 按时间细分
年、月、日按时间细分看趋势,找差异
- 按空间划分
地理位置、不同人员、不同类别、不同渠道
- 按公式细分
针对能继续拆借的公式:`GMV = 客单价 * 流量 * 转化率`
- 按过程细分
用户生命周期:AARRR
- 按模型细分
波士顿矩阵、RMF
## 溯源思维
从之前的数据中发现问题规律
## 相关思维
寻找变量之间的相关性:散点图-计算相关系数
## 假设思维
提出假设 - 检验 - 做出判断:AB Test
```
对于数据分析的过程,首先定义问题明确目标,通过对比、假设等思维,找出问题原因,得出分析结论,提出可行意见,起到创造价值的作用。
```
|
SUBROUTINE DT_DIAGR(Na,Nb,Ijproj,B,Js,Jt,Jnt,Inta,Intb,Idirec,
& Nidx)
C***********************************************************************
C Based on the original version by Shmakov et al. *
C This version dated 21.04.95 is revised by S. Roesler *
C***********************************************************************
IMPLICIT NONE
DOUBLE PRECISION afluc , ai , ALPHEM , AMP , AMP2 , AMRHO0 , amv ,
& amv2 , ar , B , bdum , dcoh , DT_RNDM , DT_SAM2 ,
& DT_SIGVP , dumzer , elab , fca , gam , GEV2FM
DOUBLE PRECISION GEV2MB , ONE , p , PI , plab , qq1 , qq2 , rca ,
& rpnt , s , sdum1 , sdum2 , sdum3 , sigel , sq2 ,
& TINY10 , TWO , TWOPI , x
DOUBLE PRECISION xnu , xy , ZERO , zero1
INTEGER i , icnt , Idirec , idx , ifluk , Ijproj , ina , inb ,
& Inta , Intb , ipnt , j , ji1 , ji2 , Jnt , jnt0 , Js ,
& js0 , Jt , jt0
INTEGER k , kint , kk1 , MAXINT , MAXNCL , MAXSQU , MAXVQU , Na ,
& Nb , ncall , Nidx , ntarg , ntargo , ntry , nwa , nwamax ,
& nwb , nwbmax
SAVE
INCLUDE 'inc/dtflka'
PARAMETER (ZERO=0.0D0,TINY10=1.0D-10,ONE=1.0D0,TWO=2.0D0)
C proton mass
C rho0 mass
PARAMETER (TWOPI=6.283185307179586454D+00,PI=TWOPI/TWO,
& GEV2MB=0.38938D0,GEV2FM=0.1972D0,ALPHEM=ONE/137.0D0,
& AMP=0.938D0,AMP2=AMP**2,AMRHO0=0.77D0)
COMPLEX*16 c , ca , ci
PARAMETER (MAXNCL=260,MAXVQU=MAXNCL,MAXSQU=20*MAXVQU,
& MAXINT=MAXVQU+MAXSQU)
C particle properties (BAMJET index convention)
INCLUDE 'inc/dtpart'
C emulsion treatment
INCLUDE 'inc/dtcomp'
C Glauber formalism: parameters
INCLUDE 'inc/dtglam'
C Glauber formalism: cross sections
INCLUDE 'inc/dtglxs'
C VDM parameter for photon-nucleus interactions
INCLUDE 'inc/dtvdmp'
C nucleon-nucleon event-generator
INCLUDE 'inc/dtmodl'
C*PHOJET105a
C COMMON /CUTOFF/ PTCUT(4),CUTMU(4),FPS(4),FPH(4),PSOMIN,XSOMIN
C*PHOJET112
C obsolete cut-off information
INCLUDE 'inc/pocut1'
C*
C coordinates of nucleons
INCLUDE 'inc/dtnuco'
C interface between Glauber formalism and DPM
INCLUDE 'inc/dtglif'
C statistics: Glauber-formalism
INCLUDE 'inc/dtsta3'
C n-n cross section fluctuations
INCLUDE 'inc/dtxsfl'
DIMENSION Js(MAXNCL) , Jt(MAXNCL) , js0(MAXNCL) ,
& jt0(MAXNCL,MAXNCL) , ji1(MAXNCL,MAXNCL) ,
& ji2(MAXNCL,MAXNCL) , jnt0(MAXNCL)
DIMENSION nwa(0:MAXNCL) , nwb(0:MAXNCL)
LOGICAL lfirst
DATA lfirst/.TRUE./
DATA ntargo , icnt/0 , 0/
ntarg = ABS(Nidx)
IF ( lfirst ) THEN
lfirst = .FALSE.
IF ( NCOmpo.EQ.0 ) THEN
ncall = 0
nwamax = Na
nwbmax = Nb
DO i = 0 , MAXNCL
nwa(i) = 0
nwb(i) = 0
END DO
END IF
END IF
IF ( ntarg.EQ.-1 ) THEN
IF ( NCOmpo.EQ.0 ) THEN
IF ( LPRi.GT.4 ) WRITE (LOUt,*)
& ' DIAGR: distribution of wounded nucleons'
IF ( LPRi.GT.4 ) WRITE (LOUt,'(8X,A,3I7)')
& 'NCALL,NWAMAX,NWBMAX = ' , ncall , nwamax , nwbmax
DO i = 1 , MAX(nwamax,nwbmax)
IF ( LPRi.GT.4 ) WRITE (LOUt,'(8X,2I7,E12.4,I7,E12.4)')
& i , nwa(i) , DBLE(nwa(i))/DBLE(ncall) , nwb(i) ,
& DBLE(nwb(i))/DBLE(ncall)
END DO
END IF
RETURN
END IF
dcoh = 1.0D10
ipnt = 0
sq2 = Q2
IF ( sq2.LE.ZERO ) sq2 = 0.0001D0
s = ECMnow**2
x = sq2/(s+sq2-AMP2)
xnu = (s+sq2-AMP2)/(TWO*AMP)
C photon projectiles: recalculate photon-nucleon amplitude
IF ( Ijproj.EQ.7 ) THEN
C VDM assumption: mass of V-meson
50 amv2 = DT_SAM2(sq2,ECMnow)
amv = SQRT(amv2)
IF ( amv.GT.2.0D0*PTCut(1) ) GOTO 50
C check for pointlike interaction
CALL DT_POILIK(Nb,ntarg,ECMnow,sq2,ipnt,rpnt,1)
C*sr 27.10.
C SIGSH = DT_SIGVP(X,SQ2)/(AMV2+SQ2+RL2)/10.0D0
SIGsh = (ONE-rpnt)*DT_SIGVP(x,sq2)/(amv2+sq2+RL2)/10.0D0
C*
ROSh = 0.1D0
BSLope = 2.0D0*(2.0D0+AMRHO0**2/(amv2+sq2)
& +0.25D0*LOG(s/(amv2+sq2)))
C coherence length
IF ( ISHad(3).EQ.1 ) dcoh = TWO*xnu/(amv2+sq2)*GEV2FM
ELSE IF ( ((Ijproj.LE.40) .OR. ((Ijproj.GE.97) .AND. (Ijproj.LE.
& 103)) .OR. (Ijproj.EQ.109) .OR. (Ijproj.EQ.115)) .AND.
& (Ijproj.NE.7) ) THEN
IF ( MCGene.EQ.2 ) THEN
zero1 = ZERO
CALL DT_PHOXS(Ijproj,1,ECMnow,zero1,sdum1,sdum2,sdum3,
& BSLope,0)
ELSE
BSLope = 8.5D0*(1.0D0+0.065D0*LOG(s))
END IF
IF ( ECMnow.LE.3.0D0 ) THEN
ROSh = -0.43D0
ELSE IF ( (ECMnow.GT.3.0D0) .AND. (ECMnow.LE.50.D0) ) THEN
ROSh = -0.63D0 + 0.175D0*LOG(ECMnow)
ELSE IF ( ECMnow.GT.50.0D0 ) THEN
ROSh = 0.1D0
END IF
elab = (s-AAM(Ijproj)**2-AMP2)/(TWO*AMP)
plab = SQRT((elab-AAM(Ijproj))*(elab+AAM(Ijproj)))
IF ( MCGene.EQ.2 ) THEN
zero1 = ZERO
CALL DT_PHOXS(Ijproj,1,ECMnow,zero1,SIGsh,sdum2,sdum3,bdum,
& 0)
SIGsh = SIGsh/10.0D0
ELSE
C SIGSH = DT_SHNTOT(IJPROJ,1,ZERO,PLAB)/10.0D0
dumzer = ZERO
CALL DT_XSHN(Ijproj,1,plab,dumzer,SIGsh,sigel)
SIGsh = SIGsh/10.0D0
END IF
ELSE
BSLope = 6.0D0*(1.0D0+0.065D0*LOG(s))
ROSh = 0.01D0
elab = (s-AAM(Ijproj)**2-AMP2)/(TWO*AMP)
plab = SQRT((elab-AAM(Ijproj))*(elab+AAM(Ijproj)))
C SIGSH = DT_SHNTOT(IJPROJ,1,ZERO,PLAB)/10.0D0
dumzer = ZERO
CALL DT_XSHN(Ijproj,1,plab,dumzer,SIGsh,sigel)
SIGsh = SIGsh/10.0D0
END IF
GSH = 10.0D0/(TWO*BSLope*GEV2MB)
gam = GSH
rca = gam*SIGsh/TWOPI
fca = -ROSh*rca
ca = DCMPLX(rca,fca)
ci = DCMPLX(ONE,ZERO)
C impact parameter
100 IF ( MCGene.NE.3 ) CALL DT_MODB(B,Nidx)
ntry = 0
200 ntry = ntry + 1
C initializations
Jnt = 0
DO i = 1 , Na
Js(i) = 0
END DO
DO i = 1 , Nb
Jt(i) = 0
END DO
IF ( Ijproj.EQ.7 ) THEN
DO i = 1 , MAXNCL
js0(i) = 0
jnt0(i) = 0
DO j = 1 , Nb
jt0(i,j) = 0
END DO
END DO
END IF
C nucleon configuration
C IF ((NTARG.NE.NTARGO).OR.(MOD(ICNT,5).EQ.0)) THEN
IF ( (ntarg.NE.ntargo) .OR. (MOD(icnt,1).EQ.0) ) THEN
C CALL DT_CONUCL(PKOO,NA,RASH,2)
C CALL DT_CONUCL(TKOO,NB,RBSH(NTARG),1)
IF ( Nidx.LE.-1 ) THEN
CALL DT_CONUCL(PKOo,Na,RASh(1),0)
CALL DT_CONUCL(TKOo,Nb,RBSh(ntarg),0)
ELSE
CALL DT_CONUCL(PKOo,Na,RASh(ntarg),0)
CALL DT_CONUCL(TKOo,Nb,RBSh(1),0)
END IF
ntargo = ntarg
END IF
icnt = icnt + 1
C LEPTO: pick out one struck nucleon
IF ( MCGene.EQ.3 ) THEN
Jnt = 1
Js(1) = 1
idx = INT(DT_RNDM(x)*Nb) + 1
Jt(idx) = 1
B = ZERO
GOTO 300
END IF
DO ina = 1 , Na
C cross section fluctuations
afluc = ONE
IF ( IFLuct.EQ.1 ) THEN
ifluk = INT((DT_RNDM(x)+0.001D0)*1000.0D0)
afluc = FLUixx(ifluk)
END IF
kk1 = 1
kint = 1
DO inb = 1 , Nb
C photon-projectile: check for supression by coherence length
IF ( Ijproj.EQ.7 ) THEN
IF ( ABS(TKOo(3,inb)-TKOo(3,kk1)).GT.dcoh ) THEN
kk1 = inb
kint = kint + 1
END IF
END IF
qq1 = B + TKOo(1,inb) - PKOo(1,ina)
qq2 = TKOo(2,inb) - PKOo(2,ina)
xy = gam*(qq1*qq1+qq2*qq2)
IF ( xy.LE.15.0D0 ) THEN
c = ci - ca*afluc*EXP(-xy)
ar = DBLE(c)
ai = DIMAG(c)
p = ar*ar + ai*ai
IF ( DT_RNDM(xy).GE.p ) THEN
Jnt = Jnt + 1
IF ( Ijproj.EQ.7 ) THEN
jnt0(kint) = jnt0(kint) + 1
IF ( jnt0(kint).GT.MAXNCL ) THEN
IF ( LPRi.GT.4 ) WRITE (LOUt,99010) MAXNCL
99010 FORMAT (1X,
& 'DIAGR: no. of requested interactions',
& ' exceeds array dimensions ',I4)
STOP
END IF
js0(kint) = js0(kint) + 1
jt0(kint,inb) = jt0(kint,inb) + 1
ji1(kint,jnt0(kint)) = ina
ji2(kint,jnt0(kint)) = inb
ELSE
IF ( Jnt.GT.MAXINT ) THEN
IF ( LPRi.GT.4 ) WRITE (LOUt,99020) Jnt , MAXINT
99020 FORMAT (1X,
& 'DIAGR: no. of requested interactions (',I4,
& ') exceeds array dimensions (',I4,')')
STOP
END IF
Js(ina) = Js(ina) + 1
Jt(inb) = Jt(inb) + 1
INTer1(Jnt) = ina
INTer2(Jnt) = inb
END IF
END IF
END IF
END DO
END DO
IF ( Jnt.EQ.0 ) THEN
C WRITE(6,*) ' new impact parameter required (old= ',B,')'
IF ( ntry.LT.500 ) GOTO 200
GOTO 100
END IF
Idirec = 0
IF ( Ijproj.EQ.7 ) THEN
k = INT(ONE+DT_RNDM(x)*DBLE(kint))
250 IF ( jnt0(k).EQ.0 ) THEN
k = k + 1
IF ( k.GT.kint ) k = 1
GOTO 250
END IF
C supress Glauber-cascade by direct photon processes
CALL DT_POILIK(Nb,ntarg,ECMnow,sq2,ipnt,rpnt,2)
IF ( ipnt.GT.0 ) THEN
Jnt = 1
Js(1) = 1
DO inb = 1 , Nb
Jt(inb) = jt0(k,inb)
IF ( Jt(inb).GT.0 ) GOTO 260
END DO
260 INTer1(1) = 1
INTer2(1) = inb
Idirec = ipnt
ELSE
Jnt = jnt0(k)
Js(1) = js0(k)
DO inb = 1 , Nb
Jt(inb) = jt0(k,inb)
END DO
DO i = 1 , Jnt
INTer1(i) = ji1(k,i)
INTer2(i) = ji2(k,i)
END DO
END IF
END IF
300 Inta = 0
Intb = 0
DO i = 1 , Na
IF ( Js(i).NE.0 ) Inta = Inta + 1
END DO
DO i = 1 , Nb
IF ( Jt(i).NE.0 ) Intb = Intb + 1
END DO
ICWpg = Inta
ICWtg = Intb
ICIg = Jnt
IPGlb = IPGlb + Inta
ITGlb = ITGlb + Intb
NGLb = NGLb + 1
IF ( NCOmpo.EQ.0 ) THEN
ncall = ncall + 1
nwa(Inta) = nwa(Inta) + 1
nwb(Intb) = nwb(Intb) + 1
END IF
END SUBROUTINE
|
package com.dataart.spreadsheetanalytics.demo.main;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import com.dataart.spreadsheetanalytics.api.engine.IAuditor;
import com.dataart.spreadsheetanalytics.api.engine.IEvaluator;
import com.dataart.spreadsheetanalytics.api.model.IDataModel;
import com.dataart.spreadsheetanalytics.api.model.IExecutionGraph;
import com.dataart.spreadsheetanalytics.demo.util.DemoUtil;
import com.dataart.spreadsheetanalytics.engine.Converters;
import com.dataart.spreadsheetanalytics.engine.SpreadsheetAuditor;
import com.dataart.spreadsheetanalytics.engine.SpreadsheetEvaluator;
import com.dataart.spreadsheetanalytics.engine.graph.ExecutionGraphConfig;
public class EvaluationWorksheetSavingToXlsDemo {
public static void main(String[] args) throws Exception {
if (args.length < 2) {
System.err.println("Input Excel file path, output Excel file path, please!");
return;
}
final String excel = args[0];
final String outputFile = args[1];
final IDataModel model = Converters.toDataModel(new XSSFWorkbook(excel));
DemoUtil.initCaches(model, excel);
//create Evaluator
final IEvaluator evaluator = new SpreadsheetEvaluator(model);
try (FileOutputStream fileOut = new FileOutputStream(outputFile);
Workbook output = Converters.toWorkbook(evaluator.evaluate().getResult());)
{
output.write(fileOut);
fileOut.flush();
} catch (IOException e) {
System.err.println("Error saving output xlsx file");
}
//create Auditor
final IAuditor auditor = new SpreadsheetAuditor(model);
//build graph
final IExecutionGraph graph = auditor.buildExecutionGraph(ExecutionGraphConfig.JOIN_ALL_DUPLICATE_VERTICES);
//print graph
DemoUtil.generateVisJsData(graph);
DemoUtil.plainprint(graph);
}
}
|
// expect:be sure to finish!
// author:KercyLAN
// create at:2020-2-29 12:38
package ktime
import (
"testing"
"time"
)
func TestInterval(t *testing.T) {
d := time.Now()
t.Log(Interval(d.Unix()))
t.Log(Interval(d.Unix() - 10))
t.Log(Interval(d.Unix() - 100))
t.Log(Interval(d.Unix() - 10000))
t.Log(Interval(d.Unix() - 100000))
t.Log(Interval(d.Unix() - 100000000))
}
|
import 'package:every_door/models/osm_element.dart';
import 'package:test/test.dart';
import 'package:every_door/helpers/snap_nodes.dart';
import 'package:latlong2/latlong.dart' show LatLng;
OsmElement wayFromPoints(List<LatLng> points) {
return OsmElement(
id: OsmId(OsmElementType.way, 0),
version: 1,
tags: {},
timestamp: DateTime.now(),
nodes: List.generate(points.length, (index) => index),
nodeLocations: points.asMap(),
);
}
void main() {
final snapper = Snapper();
test('Simple case works', () {
final a = LatLng(0.0, 0.0);
final b = LatLng(1.0, 0.0);
expect(snapper.testProject(LatLng(0.0, 0.0), a, b), equals(0.0));
expect(snapper.testProject(LatLng(1.0, 0.0), a, b), equals(1.0));
expect(snapper.testProject(LatLng(0.11, 0.0), a, b), equals(0.11));
expect(snapper.testProject(LatLng(0.11, 10.0), a, b), equals(0.11));
expect(snapper.testProject(LatLng(0.91, -9.0), a, b), equals(0.91));
expect(snapper.testProject(LatLng(-10.0, 10.0), a, b), lessThan(0.0));
expect(snapper.testProject(LatLng(10.0, -1.0), a, b), greaterThan(1.0));
});
test('Projects to segments correctly', () {
final a = LatLng(1.0, 4.0);
final b = LatLng(5.0, 1.0);
double t = snapper.testProject(LatLng(0.0, 4.5), a, b);
expect(t, lessThan(0.0));
t = snapper.testProject(LatLng(0.0, 0.5), a, b);
expect(t, inExclusiveRange(0.0, 1.0));
});
final way =
wayFromPoints([LatLng(0.0, 0.0), LatLng(1.0, 0.0), LatLng(1.0, 2.0)]);
test('We build ways properly', () {
expect(way.nodes, equals([0, 1, 2]));
expect(way.nodeLocations, isNotNull);
});
test('Calculates distance to way', () {
expect(snapper.distanceToWay(LatLng(10.0, -5.0), way), greaterThan(100.0));
expect(snapper.distanceToWay(LatLng(0.3, 0.0), way), equals(0.0));
expect(snapper.distanceToWay(LatLng(1.00001, 1.0), way),
inExclusiveRange(1.0, 2.0));
// 0.00001 degrees is ~1.1 meters
});
test('Ignores distance to edges', () {
expect(snapper.distanceToWay(LatLng(0.0, 0.0), way, noEdges: true), isNull);
expect(snapper.distanceToWay(LatLng(1.01, -0.01), way, noEdges: true), isNull);
});
test('Does not match a way too far', () {
expect(snapper.closestWay(LatLng(0.1, 1.0), [way]), isNull);
expect(
snapper.closestWay(LatLng(0.1, 0.001), [way], maxDistance: 50), isNull);
});
test('Does not match vertices', () {
expect(snapper.closestWay(LatLng(1.0, 0.0), [way], noEdges: true), isNull);
expect(snapper.closestWay(LatLng(1.0, 2.0), [way], noEdges: true), isNull);
});
test('Does not snap to vertices', () {
expect(snapper.snap(-1, LatLng(1.0, 0.0), way), isNull);
});
test('Properly inserts a node', () {
final r = snapper.snap(-1, LatLng(0.5, 0.0), way);
expect(r, isNotNull);
if (r == null) return;
expect(r.newLocation, equals(LatLng(0.5, 0.0)));
expect(r.newElement.nodes, equals([0, -1, 1, 2]));
expect(r.newElement.nodeLocations, isNotNull);
expect(r.newElement.nodeLocations, contains(-1));
expect(r.newElement.nodeLocations![-1], equals(LatLng(0.5, 0.0)));
});
test('Projects and inserts a node', () {
final r = snapper.snap(-1, LatLng(1.1, 1.1), way);
expect(r, isNotNull);
if (r == null) return;
final loc = LatLng(1.0, 1.1);
expect(r.newLocation, equals(loc));
expect(r.newElement.nodes, equals([0, 1, -1, 2]));
expect(r.newElement.nodeLocations![-1], equals(loc));
});
}
|
# PIC LCD Driver Module Demonstration Circuit
In this directory is a KiCad project for the design of a minimal circuit to demonstrate driving
a segment LCD from a PIC MCU (in this case a PIC16LF19156) equipped with a built-in LCD driver module.
## Bill of Materials
Qty | Reference | Value | Description
--- | --------- | ----- | -----------
1 | IC1 | PIC16LF19156-I/SP or PIC16F19156-I/SP | microcontroller
1 | C1 | 100nF (0.1uF) | ceramic
1 | LCD1 | LCD-S401M16KR | 4-digit multiplexed 3V LCD 7-segment display
1 | D1 | 3V LED | (optional)
1 | J1 | 01x05 2.54mm pin header | for in-circuit programming
1 | BT1 | 3V | power source
|
{ **********************************************************************
* Unit FCOMP.PAS *
* Version 1.1 *
* (c) J. Debord, July 2000 *
**********************************************************************
Complex functions for TPMATH
(Based on CMPLX.ZIP by E.F. Glynn)
********************************************************************** }
unit FComp;
interface
uses
FMath;
{ **********************************************************************
Complex type
********************************************************************** }
type
ComplexForm = (Rec, Pol); { Rectangular or Polar form }
Complex = record
case Form : ComplexForm of
Rec : (X, Y : Float);
Pol : (R, Theta : Float);
end;
const
C_infinity : Complex = (Form : Rec; X : MAXNUM; Y : 0.0);
C_zero : Complex = (Form : Rec; X : 0.0; Y : 0.0);
C_one : Complex = (Form : Rec; X : 1.0; Y : 0.0);
C_i : Complex = (Form : Rec; X : 0.0; Y : 1.0);
C_pi : Complex = (Form : Rec; X : PI; Y : 0.0);
C_pi_div_2 : Complex = (Form : Rec; X : PIDIV2; Y : 0.0);
{ **********************************************************************
Complex number initialization and conversion
********************************************************************** }
procedure CSet(var Z : Complex; A, B : Float; F : ComplexForm);
{ ----------------------------------------------------------------------
Initializes a complex number according to the form specified by F
F = Rec ==> Z = A + i * B
F = Pol ==> Z = A * Exp(i * B)
---------------------------------------------------------------------- }
procedure CConvert(var Z : Complex; F : ComplexForm);
{ Converts the complex number Z to the form specified by F }
procedure CSwap(var X, Y : Complex);
{ Exchanges two complex numbers }
{ **********************************************************************
Complex functions
********************************************************************** }
function CReal(Z : Complex) : Float; { Re(Z) }
function CImag(Z : Complex) : Float; { Im(Z) }
function CAbs(Z : Complex) : Float; { |Z| }
function CArg(Z : Complex) : Float; { Arg(Z) }
function CSgn(Z : Complex) : Integer; { Complex sign }
procedure CNeg(A : Complex; var Z : Complex); { Z = -A }
procedure CConj(A : Complex; var Z : Complex); { Z = A* }
procedure CAdd(A, B : Complex; var Z : Complex); { Z = A + B }
procedure CSub(A, B : Complex; var Z : Complex); { Z = A - B }
procedure CDiv(A, B : Complex; var Z : Complex); { Z = A / B }
procedure CMult(A, B : Complex; var Z : Complex); { Z = A * B }
procedure CLn(A : Complex; var Z : Complex); { Z = Ln(A) }
procedure CExp(A : Complex; var Z : Complex); { Z = Exp(A) }
procedure CPower(A, B : Complex; var Z : Complex); { Z = A^B }
procedure CIntPower(A : Complex; N : Integer; var Z : Complex); { Z = A^N }
procedure CRealPower(A : Complex; X : Float; var Z : Complex); { Z = A^X }
procedure CSqrt(A : Complex; var Z : Complex); { Z = Sqrt(A) }
procedure CRoot(A : Complex; K, N : Integer; var Z : Complex); { Z = A^(1/N) }
procedure CSin(A : Complex; var Z : Complex); { Z = Sin(A) }
procedure CCos(A : Complex; var Z : Complex); { Z = Cos(A) }
procedure CTan(A : Complex; var Z : Complex); { Z = Tan(A) }
procedure CArcSin(A : Complex; var Z : Complex); { Z = ArcSin(A) }
procedure CArcCos(A : Complex; var Z : Complex); { Z = ArcCos(A) }
procedure CArcTan(A : Complex; var Z : Complex); { Z = ArcTan(A) }
procedure CSinh(A : Complex; var Z : Complex); { Z = Sinh(A) }
procedure CCosh(A : Complex; var Z : Complex); { Z = Cosh(A) }
procedure CTanh(A : Complex; var Z : Complex); { Z = Tanh(A) }
procedure CArcSinh(A : Complex; var Z : Complex); { Z = ArcSinh(A) }
procedure CArcCosh(A : Complex; var Z : Complex); { Z = ArcCosh(A) }
procedure CArcTanh(A : Complex; var Z : Complex); { Z = ArcTanh(A) }
procedure CLnGamma(A : Complex; var Z : Complex); { Z = Ln(Gamma(A)) }
implementation
{$IFDEF CPU387}
{$DEFINE USE_ASM}
{$ENDIF}
{$IFDEF CPUP2}
{$DEFINE USE_ASM}
{$ENDIF}
procedure CSet(var Z : Complex; A, B : Float; F : ComplexForm);
begin
Z.Form := F;
if F = Pol then
begin
Z.R := A;
Z.Theta := B;
end
else
begin
Z.X := A;
Z.Y := B;
end;
end;
function CAbs(Z : Complex) : Float;
begin
if Z.Form = Rec then
CAbs := Pythag(Z.X, Z.Y)
else
CAbs := Z.R;
end;
function CArg(Z : Complex) : Float;
begin
if Z.Form = Rec then
CArg := ArcTan2(Z.Y, Z.X)
else
CArg := Z.Theta;
end;
function CReal(Z : Complex) : Float;
begin
if Z.Form = Rec then
CReal := Z.X
else
CReal := Z.R * {$IFDEF USE_ASM}fCos{$ELSE}Cos{$ENDIF}(Z.Theta);
end;
function CImag(Z : Complex) : Float;
begin
if Z.Form = Rec then
CImag := Z.Y
else
CImag := Z.R * {$IFDEF USE_ASM}fSin{$ELSE}Sin{$ENDIF}(Z.Theta);
end;
function CSgn(Z : Complex) : Integer;
var
Re, Im : Float;
begin
Re := CReal(Z);
if Re > 0.0 then
CSgn := 1
else if Re < 0.0 then
CSgn := - 1
else
begin
Im := CImag(Z);
if Im > 0.0 then
CSgn := 1
else if Im < 0.0 then
CSgn := - 1
else
CSgn := 0;
end;
end;
procedure CConvert(var Z : Complex; F : ComplexForm);
var
A : Complex;
begin
if Z.Form = F then Exit;
if Z.Form = Pol then
begin { Polar-to-rectangular conversion }
A.Form := Rec;
A.X := Z.R * {$IFDEF USE_ASM}fCos{$ELSE}Cos{$ENDIF}(Z.Theta);
A.Y := Z.R * {$IFDEF USE_ASM}fSin{$ELSE}Sin{$ENDIF}(Z.Theta);
end
else
begin { Rectangular-to-polar conversion }
A.Form := Pol;
if Z.X = 0.0 then
if Z.Y = 0.0 then
A.R := 0.0
else if Z.Y > 0.0 then
A.R := Z.Y
else
A.R := - Z.Y
else
A.R := CAbs(Z);
A.Theta := ArcTan2(Z.Y, Z.X);
end;
Z := A;
end;
procedure CSwap(var X, Y : Complex);
var
Temp : Complex;
begin
Temp := X;
X := Y;
Y := Temp;
end;
procedure CNeg(A : Complex; var Z : Complex);
begin
Z.Form := A.Form;
if A.Form = Pol then
begin
Z.R := A.R;
Z.Theta := FixAngle(A.Theta + PI)
end
else
begin
Z.X := - A.X;
Z.Y := - A.Y
end;
end;
procedure CConj(A : Complex; var Z : Complex);
begin
Z.Form := A.Form;
if A.Form = Pol then
begin
Z.R := A.R;
Z.Theta := FixAngle(- A.Theta)
end
else
begin
Z.X := A.X;
Z.Y := - A.Y
end
end;
procedure CAdd(A, B : Complex; var Z : Complex);
begin
CConvert(A, Rec);
CConvert(B, Rec);
Z.Form := Rec;
Z.X := A.X + B.X;
Z.Y := A.Y + B.Y;
end;
procedure CSub(A, B : Complex; var Z : Complex);
begin
CConvert(A, Rec);
CConvert(B, Rec);
Z.Form := Rec;
Z.X := A.X - B.X;
Z.Y := A.Y - B.Y;
end;
procedure CMult(A, B : Complex; var Z : Complex);
begin
CConvert(B, A.Form); { arbitrarily convert one to type of other }
Z.Form := A.Form;
if A.Form = Pol then
begin
Z.R := A.R * B.R;
Z.Theta := FixAngle(A.Theta + B.Theta)
end
else
begin
Z.X := A.X * B.X - A.Y * B.Y;
Z.Y := A.X * B.Y + A.Y * B.X
end;
end;
procedure CDiv(A, B : Complex; var Z : Complex);
var
Temp : Float;
begin
if ((B.Form = Rec) and (B.X = 0.0) and (B.Y = 0.0)) or
((B.Form = Pol) and (B.R = 0.0)) then
begin
MathErr := FN_OVERFLOW;
Z := C_infinity;
Exit;
end;
CConvert(B, A.Form); { arbitrarily convert one to type of other }
Z.Form := A.Form;
if A.Form = Pol then
begin
Z.R := A.R / B.R;
Z.Theta := FixAngle(A.Theta - B.Theta);
end
else
begin
Temp := Sqr(B.X) + Sqr(B.Y);
Z.X := (A.X * B.X + A.Y * B.Y) / Temp;
Z.Y := (A.Y * B.X - A.X * B.Y) / Temp;
end;
end;
procedure CLn(A : Complex; var Z : Complex);
var
LnR : Float;
begin
CConvert(A, Pol);
LnR := Log(A.R);
if MathErr = FN_OK then
CSet(Z, LnR, FixAngle(A.Theta), Rec)
else
CSet(Z, - MAXNUM, 0.0, Rec);
end;
procedure CExp(A : Complex; var Z : Complex);
var
ExpX, SinY, CosY : Float;
begin
CConvert(A, Rec);
ExpX := Expo(A.X);
if MathErr = FN_OK then
begin
SinY := {$IFDEF USE_ASM}fSin{$ELSE}Sin{$ENDIF}(A.Y);
CosY := {$IFDEF USE_ASM}fCos{$ELSE}Cos{$ENDIF}(A.Y);
CSet(Z, ExpX * CosY, ExpX * SinY, Rec);
end
else
CSet(Z, ExpX, 0.0, Rec);
end;
procedure CPower(A, B : Complex; var Z : Complex);
var
BLnA, LnA : Complex;
begin
CConvert(A, Rec);
CConvert(B, Rec);
if (A.X = 0.0) and (A.Y = 0.0) then
if (B.X = 0.0) and (B.Y = 0.0) then
Z := C_one { lim a^a = 1 as a -> 0 }
else
Z := C_zero { 0^b = 0, b > 0 }
else
begin
CLn(A, LnA);
CMult(B, LnA, BLnA);
CExp(BLnA, Z);
end;
end;
procedure CIntPower(A : Complex; N : Integer; var Z : Complex);
{ CIntPower directly applies DeMoivre's theorem to calculate an integer
power of a complex number. The formula holds for both positive and
negative values of N }
begin
CConvert(A, Pol);
if A.R = 0.0 then
if N = 0 then
Z := C_one
else if N > 0 then
Z := C_zero
else
begin
MathErr := FN_SING;
Z := C_infinity;
end
else
CSet(Z, IntPower(A.R, N), FixAngle(N * A.Theta), Pol);
end;
procedure CRealPower(A : Complex; X : Float; var Z : Complex);
begin
CConvert(A, Pol);
if A.R = 0.0 then
if X = 0.0 then
Z := C_one
else if X > 0.0 then
Z := C_zero
else
begin
MathErr := FN_SING;
Z := C_infinity;
end
else
CSet(Z, Power(A.R, X), FixAngle(X * A.Theta), Pol);
end;
procedure CRoot(A : Complex; K, N : Integer; var Z : Complex);
{ CRoot can calculate all 'N' roots of 'A' by varying 'K' from 0..N-1 }
{ This is another application of DeMoivre's theorem. See CIntPower. }
begin
if (N <= 0) or (K < 0) or (K >= N) then
begin
MathErr := FN_DOMAIN;
Z := C_zero;
Exit;
end;
CConvert(A, Pol);
if A.R = 0.0 then
Z := C_zero
else
CSet(Z, Power(A.R, 1.0 / N), FixAngle((A.Theta + K * TWOPI) / N), Pol);
end;
procedure CSqrt(A : Complex; var Z : Complex);
begin
CConvert(A, Pol);
if A.R = 0.0 then
Z := C_zero
else
CSet(Z, Sqrt(A.R), FixAngle(0.5 * A.Theta), Pol);
end;
procedure CCos(A : Complex; var Z : Complex);
var
SinX, CosX, SinhY, CoshY : Float;
begin
CConvert(A, Rec);
SinCos(A.X, SinX, CosX);
SinhCosh(A.Y, SinhY, CoshY); { Called here to set MathErr }
CSet(Z, CosX * CoshY, - SinX * SinhY, Rec)
end;
procedure CSin(A : Complex; var Z : Complex);
var
SinX, CosX, SinhY, CoshY : Float;
begin
CConvert(A, Rec);
SinCos(A.X, SinX, CosX);
SinhCosh(A.Y, SinhY, CoshY); { Called here to set MathErr }
CSet(Z, SinX * CoshY, CosX * SinhY, Rec)
end;
procedure CTan(A : Complex; var Z : Complex);
var
X2, Y2, SinX2, CosX2, SinhY2, CoshY2, Temp : Float;
begin
CConvert(A, Rec);
X2 := 2.0 * A.X;
Y2 := 2.0 * A.Y;
SinCos(X2, SinX2, CosX2);
SinhCosh(Y2, SinhY2, CoshY2);
if MathErr = FN_OK then
Temp := CosX2 + CoshY2
else
Temp := CoshY2;
if Temp <> 0.0 then
CSet(Z, SinX2 / Temp, SinhY2 / Temp, Rec)
else
begin { A = Pi/2 + k*Pi }
MathErr := FN_SING;
CSet(Z, MAXNUM, 0.0, Rec);
end;
end;
procedure CCosh(A : Complex; var Z : Complex);
var
SinhX, CoshX, SinY, CosY : Float;
begin
CConvert(A, Rec);
SinCos(A.Y, SinY, CosY);
SinhCosh(A.X, SinhX, CoshX);
CSet(Z, CoshX * CosY, SinhX * SinY, Rec)
end;
procedure CSinh(A : Complex; var Z : Complex);
var
SinhX, CoshX, SinY, CosY : Float;
begin
CConvert(A, Rec);
SinCos(A.Y, SinY, CosY);
SinhCosh(A.X, SinhX, CoshX);
CSet(Z, SinhX * CosY, CoshX * SinY, Rec)
end;
procedure CTanh(A : Complex; var Z : Complex);
var
X2, Y2, SinY2, CosY2, SinhX2, CoshX2, Temp : Float;
begin
CConvert(A, Rec);
X2 := 2.0 * A.X;
Y2 := 2.0 * A.Y;
SinCos(Y2, SinY2, CosY2);
SinhCosh(X2, SinhX2, CoshX2);
if MathErr = FN_OK then
Temp := CoshX2 + CosY2
else
Temp := CoshX2;
if Temp <> 0.0 then
CSet(Z, SinhX2 / Temp, SinY2 / Temp, Rec)
else
begin { A = i * (Pi/2 + k*Pi) }
MathErr := FN_SING;
CSet(Z, 0.0, MAXNUM, Rec);
end;
end;
procedure CArcSin(A : Complex; var Z : Complex);
var
Rp, Rm, S, T, X2, XX, YY : Float;
B : Complex;
begin
CConvert(A, Rec);
CSet(B, A.Y, - A.X, Rec); { Y - i*X }
X2 := 2.0 * A.X;
XX := Sqr(A.X);
YY := Sqr(A.Y);
S := XX + YY + 1.0;
Rp := 0.5 * Sqrt(S + X2);
Rm := 0.5 * Sqrt(S - X2);
T := Rp + Rm;
Z.Form := Rec;
Z.X := ArcSin(Rp - Rm);
Z.Y := CSgn(B) * Log(T + Sqrt(Sqr(T) - 1.0));
end;
procedure CArcCos(A : Complex; var Z : Complex);
begin
CArcSin(A, Z);
CSub(C_pi_div_2, Z, Z); { Pi/2 - ArcSin(Z) }
end;
procedure CArcTan(A : Complex; var Z : Complex);
var
XX, Yp1, Ym1 : Float;
begin
CConvert(A, Rec);
if (A.X = 0.0) and (Abs(A.Y) = 1.0) then { A = +/- i }
begin
MathErr := FN_SING;
CSet(Z, 0.0, Sgn(A.Y) * MAXNUM, Rec);
Exit;
end;
XX := Sqr(A.X);
Yp1 := A.Y + 1.0;
Ym1 := A.Y - 1.0;
Z.Form := Rec;
Z.X := 0.5 * (ArcTan2(A.X, - Ym1) - ArcTan2(- A.X, Yp1));
Z.Y := 0.25 * Log((XX + Sqr(Yp1)) / (XX + Sqr(Ym1)));
end;
procedure CArcSinh(A : Complex; var Z : Complex);
{ ArcSinH(A) = -i*ArcSin(i*A) }
begin
CMult(C_i, A, Z);
CArcSin(Z, Z);
CMult(C_i, Z, Z);
CNeg(Z, Z);
end;
procedure CArcCosh(A : Complex; var Z : Complex);
{ ArcCosH(A) = CSgn(Y + i(1-X))*i*ArcCos(A) where A = X+iY }
var
B : Complex;
begin
CArcCos(A, Z);
CMult(C_i, Z, Z);
CSet(B, A.Y, 1.0 - A.X, Rec); { Y + i*(1-X) }
if CSgn(B) = -1 then CNeg(Z, Z);
end;
procedure CArcTanh(A : Complex; var Z : Complex);
{ ArcTanH(A) = -i*ArcTan(i*A) }
begin
CConvert(A, Rec);
if (Abs(A.X) = 1.0) and (A.Y = 0.0) then { A = +/- 1 }
begin
MathErr := FN_SING;
CSet(Z, Sgn(A.X) * MAXNUM, 0.0, Rec);
Exit;
end;
CMult(C_i, A, Z);
CArcTan(Z, Z);
CMult(C_i, Z, Z);
CNeg(Z, Z);
end;
procedure CApproxLnGamma(Z : Complex; var Sum : Complex);
{ This is the approximation used in the National Bureau of
Standards "Table of the Gamma Function for Complex Arguments,"
Applied Mathematics Series 34, 1954. The NBS table was created
using this approximation over the area 9 < Re(z) < 10 and
0 < Im(z) < 10. Other table values were computed using the
relationship:
_ _
ln | (z+1) = ln z + ln | (z) }
const
C : array[1..8] of Float =
(8.33333333333333E-02, - 2.77777777777778E-03,
7.93650793650794E-04, - 5.95238095238095E-04,
8.41750841750842E-04, - 1.91752691752692E-03,
6.41025641025641E-03, - 2.95506535947712E-02);
var
I : Integer;
Powers : array[1..8] of Complex;
Temp1, Temp2 : Complex;
begin
CConvert(Z, Rec);
CLn(Z, Temp1); { Ln(Z) }
CSet(Temp2, Z.X - 0.5, Z.Y, Rec); { Z - 0.5 }
CMult(Temp1, Temp2, Sum); { (Z - 0.5)*Ln(Z) }
CSub(Sum, Z, Sum); { (Z - 0.5)*ln(Z) - Z }
Sum.X := Sum.X + LN2PIDIV2;
Temp1 := C_one;
CDiv(Temp1, Z, Powers[1]); { Z^(-1) }
CMult(Powers[1], Powers[1], Temp2); { Z^(-2) }
for I := 2 to 8 do
CMult(Powers[I - 1], Temp2, Powers[I]);
for I := 8 downto 1 do
begin
CSet(Temp1, C[I] * Powers[I].X, C[I] * Powers[I].Y, Rec);
CAdd(Sum, Temp1, Sum);
end
end;
procedure CLnGamma(A : Complex; var Z : Complex);
var
LnA, Temp : Complex;
begin
CConvert(A, Rec);
if (A.X <= 0.0) and (A.Y = 0.0) then
if (Int(A.X - 1E-8) - A.X) = 0.0 then { Negative integer? }
begin
MathErr := FN_SING;
Z := C_infinity;
Exit
end;
if A.Y < 0.0 then { 3rd or 4th quadrant? }
begin
CConj(A, A);
CLnGamma(A, Z); { Try again in 1st or 2nd quadrant }
CConj(Z, Z) { Left this out! 1/3/91 }
end
else
begin
if A.X < 9.0 then { "left" of NBS table range }
begin
CLn(A, LnA);
CSet(A, A.X + 1.0, A.Y, Rec);
CLnGamma(A, Temp);
CSub(Temp, LnA, Z)
end
else
CApproxLnGamma(A, Z) { NBS table range: 9 < Re(z) < 10 }
end
end;
end.
|
# -*- coding: utf-8 -*-
"""fragments are block of html which can be dynamically added"""
from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import PermissionDenied
from django.forms.models import modelformset_factory
from django.shortcuts import render
from django.utils.translation import ugettext as _
from colorbox.decorators import popup_close
from coop_cms.forms.fragments import AddFragmentForm, EditFragmentForm
from coop_cms import models
@login_required
@popup_close
def add_fragment(request):
"""add a fragment to the current template"""
content_type = ContentType.objects.get_for_model(models.Fragment)
perm = '{0}.add_{1}'.format(content_type.app_label, content_type.model)
if not request.user.has_perm(perm):
raise PermissionDenied
if request.method == "POST":
form = AddFragmentForm(request.POST)
if form.is_valid():
form.save()
# popup_close decorator will close and refresh
return None
else:
form = AddFragmentForm()
context_dict = {
'form': form,
}
return render(
request,
'coop_cms/popup_add_fragment.html',
context_dict
)
@login_required
@popup_close
def edit_fragments(request):
"""edit fragments of the current template"""
content_type = ContentType.objects.get_for_model(models.Fragment)
perm = '{0}.add_{1}'.format(content_type.app_label, content_type.model)
if not request.user.has_perm(perm):
raise PermissionDenied
edit_fragment_formset = modelformset_factory(models.Fragment, EditFragmentForm, extra=0)
if request.method == "POST":
formset = edit_fragment_formset(request.POST, queryset=models.Fragment.objects.all())
if formset.is_valid():
formset.save()
# popup_close decorator will close and refresh
return None
else:
formset = edit_fragment_formset(queryset=models.Fragment.objects.all())
context_dict = {
'form': formset,
'title': _("Edit fragments of this template?"),
}
return render(
request,
'coop_cms/popup_edit_fragments.html',
context_dict
)
|
using System;
namespace Serilog.Console
{
class Program
{
static void Main(string[] args)
{
Log.Logger = new LoggerConfiguration().ReadFrom.AppSettings()
.MinimumLevel.Debug()
.WriteTo.Console()
.WriteTo.File("logs\\logs.txt", rollingInterval: RollingInterval.Day)
.CreateLogger();
Log.Information("Starting up");
Log.Information("Shutting down");
Log.CloseAndFlush();
System.Console.ReadLine();
}
}
}
|
<?php
namespace Midata\Object;
use Midata\Object;
/**
* This is class represents the database view.
*/
abstract class View extends Object
{
const ATTRIBUTE_DEFINITION = 'definition';
abstract public function definition();
public static function allAttributes()
{
return array(
self::ATTRIBUTE_DEFINITION
);
}
}
|
package com.mapbox.navigation.ui;
import androidx.annotation.NonNull;
import java.util.HashMap;
class WifiNetworkChecker {
private final HashMap<Integer, Boolean> statusMap;
WifiNetworkChecker(HashMap<Integer, Boolean> statusMap) {
this.statusMap = statusMap;
initialize(statusMap);
}
@NonNull
Boolean isFast(Integer wifiLevel) {
Boolean isConnectionFast = statusMap.get(wifiLevel);
if (isConnectionFast == null) {
isConnectionFast = false;
}
return isConnectionFast;
}
private void initialize(HashMap<Integer, Boolean> statusMap) {
statusMap.put(5, true);
statusMap.put(4, true);
statusMap.put(3, true);
statusMap.put(2, false);
statusMap.put(1, false);
statusMap.put(0, false);
}
}
|
<?php
namespace AppBundle\Controller;
use AppBundle\Entity\User;
use AppBundle\Form\UserType;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\Route;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
class AdminController extends Controller
{
/**
* @Route("/admin", name="administrator")
*/
public function principalAction()
{
}
/**
* @Route("/admin/usuarios", name="gestion_usuarios")
*/
public function userManagementAction()
{
$em = $this->getDoctrine()->getManager();
$users = $em->getRepository('AppBundle:User')->findAll();
return $this->render('admin/usuarios.html.twig', array('users' => $users));
}
}
|
//------------------------------------------------------------------------------
// <auto-generated>
// Dieser Code wurde von einem Tool generiert.
// Laufzeitversion:4.0.30319.42000
//
// Änderungen an dieser Datei können falsches Verhalten verursachen und gehen verloren, wenn
// der Code erneut generiert wird.
// </auto-generated>
//------------------------------------------------------------------------------
using NMF.Collections.Generic;
using NMF.Collections.ObjectModel;
using NMF.Expressions;
using NMF.Expressions.Linq;
using NMF.Models;
using NMF.Models.Collections;
using NMF.Models.Expressions;
using NMF.Models.Meta;
using NMF.Models.Repository;
using NMF.Serialization;
using NMF.Utilities;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Linq;
using TTC2017.SmartGrids.CIM;
using TTC2017.SmartGrids.CIM.IEC61968.Common;
using TTC2017.SmartGrids.CIM.IEC61970.Contingency;
using TTC2017.SmartGrids.CIM.IEC61970.Core;
using TTC2017.SmartGrids.CIM.IEC61970.Generation.Production;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.EnergyScheduling;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.Financial;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.InfERPSupport;
using TTC2017.SmartGrids.CIM.IEC61970.LoadModel;
using TTC2017.SmartGrids.CIM.IEC61970.Meas;
using TTC2017.SmartGrids.CIM.IEC61970.Wires;
namespace TTC2017.SmartGrids.CIM.IEC61970.Informative.MarketOperations
{
/// <summary>
/// The public interface for Flowgate
/// </summary>
[DefaultImplementationTypeAttribute(typeof(Flowgate))]
[XmlDefaultImplementationTypeAttribute(typeof(Flowgate))]
public interface IFlowgate : IModelElement, IPowerSystemResource
{
/// <summary>
/// The AfcUseCode property
/// </summary>
object AfcUseCode
{
get;
set;
}
/// <summary>
/// The IdcOperationalName property
/// </summary>
string IdcOperationalName
{
get;
set;
}
/// <summary>
/// The inServiceDate property
/// </summary>
DateTime InServiceDate
{
get;
set;
}
/// <summary>
/// The outOfServiceDate property
/// </summary>
DateTime OutOfServiceDate
{
get;
set;
}
/// <summary>
/// The coordinatedFlag property
/// </summary>
bool CoordinatedFlag
{
get;
set;
}
/// <summary>
/// The AtcFlag property
/// </summary>
bool AtcFlag
{
get;
set;
}
/// <summary>
/// The deletionDate property
/// </summary>
DateTime DeletionDate
{
get;
set;
}
/// <summary>
/// The reciprocalFlag property
/// </summary>
bool ReciprocalFlag
{
get;
set;
}
/// <summary>
/// The IdcAssignedId property
/// </summary>
int IdcAssignedId
{
get;
set;
}
/// <summary>
/// The positiveImpactValue property
/// </summary>
int PositiveImpactValue
{
get;
set;
}
/// <summary>
/// The counterFlowValue property
/// </summary>
int CounterFlowValue
{
get;
set;
}
/// <summary>
/// The coordinationStudyDate property
/// </summary>
DateTime CoordinationStudyDate
{
get;
set;
}
/// <summary>
/// The IdcType property
/// </summary>
object IdcType
{
get;
set;
}
/// <summary>
/// The managingEntityFlag property
/// </summary>
bool ManagingEntityFlag
{
get;
set;
}
/// <summary>
/// The TransmissionReliabilityMargin property
/// </summary>
ITransmissionReliabilityMargin TransmissionReliabilityMargin
{
get;
set;
}
/// <summary>
/// The ViolationLimits property
/// </summary>
IOrderedSetExpression<IViolationLimit> ViolationLimits
{
get;
}
/// <summary>
/// The CapacityBenefitMargin property
/// </summary>
IOrderedSetExpression<ICapacityBenefitMargin> CapacityBenefitMargin
{
get;
}
/// <summary>
/// The PowerTransormers property
/// </summary>
IOrderedSetExpression<IPowerTransformer> PowerTransormers
{
get;
}
/// <summary>
/// The FTRs property
/// </summary>
IOrderedSetExpression<IFTR> FTRs
{
get;
}
/// <summary>
/// The Lines property
/// </summary>
IOrderedSetExpression<ILine> Lines
{
get;
}
/// <summary>
/// The SubControlArea property
/// </summary>
ISubControlArea SubControlArea
{
get;
set;
}
/// <summary>
/// The TransmissionProvider property
/// </summary>
IOrderedSetExpression<ITransmissionProvider> TransmissionProvider
{
get;
}
/// <summary>
/// Gets fired before the AfcUseCode property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> AfcUseCodeChanging;
/// <summary>
/// Gets fired when the AfcUseCode property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> AfcUseCodeChanged;
/// <summary>
/// Gets fired before the IdcOperationalName property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> IdcOperationalNameChanging;
/// <summary>
/// Gets fired when the IdcOperationalName property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> IdcOperationalNameChanged;
/// <summary>
/// Gets fired before the InServiceDate property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> InServiceDateChanging;
/// <summary>
/// Gets fired when the InServiceDate property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> InServiceDateChanged;
/// <summary>
/// Gets fired before the OutOfServiceDate property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> OutOfServiceDateChanging;
/// <summary>
/// Gets fired when the OutOfServiceDate property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> OutOfServiceDateChanged;
/// <summary>
/// Gets fired before the CoordinatedFlag property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> CoordinatedFlagChanging;
/// <summary>
/// Gets fired when the CoordinatedFlag property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> CoordinatedFlagChanged;
/// <summary>
/// Gets fired before the AtcFlag property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> AtcFlagChanging;
/// <summary>
/// Gets fired when the AtcFlag property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> AtcFlagChanged;
/// <summary>
/// Gets fired before the DeletionDate property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> DeletionDateChanging;
/// <summary>
/// Gets fired when the DeletionDate property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> DeletionDateChanged;
/// <summary>
/// Gets fired before the ReciprocalFlag property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> ReciprocalFlagChanging;
/// <summary>
/// Gets fired when the ReciprocalFlag property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> ReciprocalFlagChanged;
/// <summary>
/// Gets fired before the IdcAssignedId property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> IdcAssignedIdChanging;
/// <summary>
/// Gets fired when the IdcAssignedId property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> IdcAssignedIdChanged;
/// <summary>
/// Gets fired before the PositiveImpactValue property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> PositiveImpactValueChanging;
/// <summary>
/// Gets fired when the PositiveImpactValue property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> PositiveImpactValueChanged;
/// <summary>
/// Gets fired before the CounterFlowValue property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> CounterFlowValueChanging;
/// <summary>
/// Gets fired when the CounterFlowValue property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> CounterFlowValueChanged;
/// <summary>
/// Gets fired before the CoordinationStudyDate property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> CoordinationStudyDateChanging;
/// <summary>
/// Gets fired when the CoordinationStudyDate property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> CoordinationStudyDateChanged;
/// <summary>
/// Gets fired before the IdcType property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> IdcTypeChanging;
/// <summary>
/// Gets fired when the IdcType property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> IdcTypeChanged;
/// <summary>
/// Gets fired before the ManagingEntityFlag property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> ManagingEntityFlagChanging;
/// <summary>
/// Gets fired when the ManagingEntityFlag property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> ManagingEntityFlagChanged;
/// <summary>
/// Gets fired before the TransmissionReliabilityMargin property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> TransmissionReliabilityMarginChanging;
/// <summary>
/// Gets fired when the TransmissionReliabilityMargin property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> TransmissionReliabilityMarginChanged;
/// <summary>
/// Gets fired before the SubControlArea property changes its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> SubControlAreaChanging;
/// <summary>
/// Gets fired when the SubControlArea property changed its value
/// </summary>
event System.EventHandler<ValueChangedEventArgs> SubControlAreaChanged;
}
}
|
(ns cmr.spatial.mbr
(:require
[cmr.spatial.math :as math :refer :all]
[primitive-math]
[cmr.spatial.point :as p]
[cmr.spatial.derived :as d]
[cmr.common.services.errors :as errors]
[cmr.common.validations.core :as v]
[pjstadig.assertions :as pj]
[cmr.spatial.validation :as sv]
[cmr.spatial.messages :as msg]
[cmr.common.dev.record-pretty-printer :as record-pretty-printer])
(:import cmr.spatial.point.Point))
(primitive-math/use-primitive-operators)
;; MBR - Minimum Bounding Rectangle
(defrecord Mbr
[
^double west
^double north
^double east
^double south
;; These are cached for performance improvement
corner-points])
(record-pretty-printer/enable-record-pretty-printing Mbr)
(defn- mbr-wnes
[^double west ^double north ^double east ^double south]
(let [corner-points [(p/point west north)
(p/point east north)
(p/point east south)
(p/point west south)]]
(->Mbr west north east south corner-points)))
(defn mbr
"Creates a new minimum bounding rectangle"
[^double west ^double north ^double east ^double south]
;; Handle west or east being on the antimeridian.
(let [am-west? (or (= west 180.0) (= west -180.0))
am-east? (or (= east 180.0) (= east -180.0))]
(cond
am-west?
(if am-east?
(if (= west east)
(mbr-wnes west north east south)
(mbr-wnes -180.0 north 180.0 south))
;; east is not on antimeridian
;; West should always be -180.0 if east isn't on AM.
(mbr-wnes -180.0 north east south))
am-east?
;; East should always be positive 180.0 if west isnt' on AM.
(mbr-wnes west north 180.0 south)
:else
(mbr-wnes west north east south))))
(defn corner-points
"Returns the corner points of the mbr as upper left, upper right, lower right, lower left."
[^Mbr br]
(.corner_points br))
(defn crosses-antimeridian? [^Mbr mbr]
(> (.west mbr) (.east mbr)))
(def ^:const COVERS_TOLERANCE
"Tolerance used for the covers method. Longitudes and latitudes technically outside the bounding rectangle
but within this tolerance will be considered covered by the bounding rectangle"
0.0000000001)
(defn point->mbr
"Returns an mbr that covers only a single point"
[point]
(cond
;; It's important that this covers all longitudes at the pole. The function is used for creating
;; an MBR to represent a point in Elasticsearch. An MBR touching the north pole might miss the
;; point mbr if it didn't cover every longitude.
(p/is-north-pole? point)
(mbr -180 90 180 90)
(p/is-south-pole? point)
(mbr -180 -90 180 -90)
:else
(let [{:keys [lon lat]} point]
(mbr lon lat lon lat))))
(defn geodetic-lon-range-covers-lon?
"Returns true if lon is between west and east."
[^double west ^double east ^double lon ^double tolerance]
(let [west (- west tolerance)
east (+ east tolerance)
crosses-antimeridian (> west east)]
(cond
crosses-antimeridian
(or (>= lon west) (<= lon east))
(= (abs lon) 180.0)
(let [within-180 (- 180.0 tolerance)]
(or (>= (abs west) within-180)
(>= (abs east) within-180)))
:else
(and (>= lon west) (<= lon east)))))
(defn cartesian-lon-range-covers-lon?
"Returns true if lon is between west and east."
[^double west ^double east ^double lon ^double tolerance]
(let [west (- west tolerance)
east (+ east tolerance)
crosses-antimeridian (> west east)]
(if crosses-antimeridian
(or (>= lon west) (<= lon east))
(and (>= lon west) (<= lon east)))))
(defn covers-lon?
"Returns true if the mbr covers the given longitude"
([coord-sys mbr v]
(covers-lon? coord-sys mbr v COVERS_TOLERANCE))
([coord-sys ^Mbr mbr ^double v tolerance]
(let [west (.west mbr) east (.east mbr)]
(if (= coord-sys :geodetic)
(geodetic-lon-range-covers-lon? west east v tolerance)
(cartesian-lon-range-covers-lon? west east v tolerance)))))
(defn covers-lat?
"Returns true if the mbr covers the given latitude"
([mbr v]
(covers-lat? mbr v COVERS_TOLERANCE))
([^Mbr mbr ^double v ^double tolerance]
(let [north (.north mbr) south (.south mbr)
north (+ north tolerance)
south (- south tolerance)]
(and (>= v south) (<= v north)))))
(defn cartesian-covers-point?
([mbr ^Point p]
(cartesian-covers-point? mbr p nil))
([^Mbr mbr ^Point p delta]
(let [delta (or delta COVERS_TOLERANCE)]
(and (covers-lat? mbr (.lat p) delta)
(cartesian-lon-range-covers-lon? (.west mbr) (.east mbr) (.lon p) delta)))))
(defn geodetic-covers-point?
([mbr ^Point p]
(geodetic-covers-point? mbr p nil))
([^Mbr mbr ^Point p delta]
(let [delta (or delta COVERS_TOLERANCE)]
(or
(and (p/is-north-pole? p)
(covers-lat? mbr 90.0 delta))
(and (p/is-south-pole? p)
(covers-lat? mbr -90.0 delta))
(and (covers-lat? mbr (.lat p) delta)
(geodetic-lon-range-covers-lon? (.west mbr) (.east mbr) (.lon p) delta))))))
(defn covers-point?
"Returns true if the mbr contains the given point"
[coord-sys mbr p & delta]
(if (= coord-sys :geodetic)
(geodetic-covers-point? mbr p delta)
(cartesian-covers-point? mbr p delta)))
(defn split-across-antimeridian
"Splits MBRs across the antimeridian. Returns a sequence of the mbrs if it crosses the antimeridian
or a sequence containing original mbr."
[m]
(if (crosses-antimeridian? m)
(let [{:keys [west north east south]} m]
[(mbr west north 180.0 south)
(mbr -180.0 north east south)])
[m]))
(defn covers-mbr?
"Returns true if the mbr completely covers the other-br."
[coord-sys mbr other-br]
(or (and (= (crosses-antimeridian? mbr)
(crosses-antimeridian? other-br))
(every? #(covers-point? coord-sys mbr %) (corner-points other-br)))
;; one crosses and one doesn't
(and (crosses-antimeridian? mbr)
(let [[c1 c2] (split-across-antimeridian mbr)]
;; Check to see if the mbr crosses the other br on either side of the antimeridian
(or (covers-mbr? coord-sys c1 other-br)
(covers-mbr? coord-sys c2 other-br))))))
(defn center-point [m]
(let [{^double n :north ^double s :south ^double e :east ^double w :west} m
lat-center (mid s n)
lon-center (mid-lon w e)]
(p/point lon-center lat-center)))
(defn round-to-float-map
"Converts a bounding rectangles values from double to float. It will round the bounding rectangle
from double to float such that the bounding rectangle will slightly increase in size or decrease.
If increase? is true if will round to a larger size. If false it will round to a smaller size. No
rounding will occur if float is capable of representing the exact value.
The values are returned in a map since the Mbr record fields are type hinted as double."
[m increase?]
(let [{:keys [west north east south]} m
max-lon (float 180)
min-lon (float -180)
max-lat (float 90)
min-lat (float -90)
[new-west new-east] (if (and (= east west) (not increase?))
;; We can't shrink between west and east anymore
[(float west) (float east)]
[(double->float west (not increase?))
(double->float east increase?)])
[new-south new-north] (if (and (= north south) (not increase?))
;; We can't shrink between south and north anymore
[(float south) (float north)]
[(double->float south (not increase?))
(double->float north increase?)])]
{:west (math/constrain ^float new-west min-lon max-lon)
:north (math/constrain ^float new-north min-lat max-lat)
:east (math/constrain ^float new-east min-lon max-lon)
:south (math/constrain ^float new-south min-lat max-lat)}))
(def whole-world
"an mbr that covers the whole world"
(mbr -180 90 180 -90))
(defn whole-world?
"Returns true if an mbr covers the whole world"
[mbr]
(= mbr whole-world))
(def whole-world-square-degrees
"The number of square degrees in the world"
^double (* 360.0 180.0))
(defn single-point?
"Returns true if the MBR only covers a single point."
[mbr]
(and (= (:west mbr) (:east mbr))
(= (:north mbr) (:south mbr))))
(defn percent-covering-world
"Returns percentage in square lat lons that the MBR covers the world"
^double [^Mbr mbr]
(let [w (.west mbr)
n (.north mbr)
e (.east mbr)
s (.south mbr)
lat-size (- n s)
lon-size (if (crosses-antimeridian? mbr)
(+ (- 180.0 w) (- e -180.0))
(- e w))
square-degrees (* lat-size lon-size)]
(* 100.0 (/ square-degrees ^double whole-world-square-degrees))))
(defn external-points
"Returns 3 points that are external to the mbr."
[^Mbr mbr]
(pj/assert (not (whole-world? mbr)))
(let [w (.west mbr)
n (.north mbr)
e (.east mbr)
s (.south mbr)
;; Finds three points within the area indicated
points-in-area (fn [w n e s]
; w n e s should define an area not crossing antimeridian
;; Find mid lon of range then find mid lon on left and right
;; use mid lat for all three points
(let [mid-lon (mid w e)
right-lon (mid w mid-lon)
left-lon (mid mid-lon e)
mid-lat (mid n s)]
[(p/point left-lon mid-lat)
(p/point mid-lon mid-lat)
(p/point right-lon mid-lat)]))
crosses-antimeridian (crosses-antimeridian? mbr)
;; Find the biggest area around the MBR to use to find external points
north-dist (- 90.0 n)
south-dist (- s -90.0)
west-dist (if crosses-antimeridian 0.0 (- w -180.0))
east-dist (if crosses-antimeridian 0.0 (- 180.0 e))
biggest-dist (max north-dist south-dist west-dist east-dist)]
(cond
(= biggest-dist north-dist) (points-in-area -180.0 90.0 180.0 n)
(= biggest-dist south-dist) (points-in-area -180.0 s 180.0 -90.0)
(and (not crosses-antimeridian)
(= biggest-dist west-dist)) (points-in-area -180.0 90.0 w -90.0)
(and (not crosses-antimeridian)
(= biggest-dist east-dist)) (points-in-area e 90.0 180.0 -90.0)
(crosses-antimeridian? mbr) (points-in-area e 90.0 w -90.0)
:else (errors/internal-error!
(str
"Logic error: One of the other distances should have been largest it "
"should have crossed the antimeridian: "
(pr-str mbr))))))
(defn non-crossing-intersects-br?
"Specialized version of intersects-br? for two mbrs that don't cross the antimeridian.
Returns true if the mbr intersects the other bounding rectangle."
[coord-sys ^Mbr m1 ^Mbr m2]
(pj/assert (not (or (crosses-antimeridian? m1)
(crosses-antimeridian? m2))))
(let [w1 (.west m1)
n1 (.north m1)
e1 (.east m1)
s1 (.south m1)
w2 (.west m2)
n2 (.north m2)
e2 (.east m2)
s2 (.south m2)
m1-touches-north? (double-approx= n1 90.0 0.0000001)
m1-touches-south? (double-approx= s1 -90.0 0.0000001)
m2-touches-north? (double-approx= n2 90.0 0.0000001)
m2-touches-south? (double-approx= s2 -90.0 0.0000001)]
(or (and (range-intersects? w1 e1 w2 e2)
(range-intersects? s1 n1 s2 n2))
(and (= coord-sys :geodetic)
(or (and m1-touches-north? m2-touches-north?)
(and m1-touches-south? m2-touches-south?))))))
(defn intersects-br?
"Returns true if the mbr intersects the other bounding rectangle"
[coord-sys ^Mbr mbr ^Mbr other-br]
(if (and (not (crosses-antimeridian? mbr)) (not (crosses-antimeridian? other-br)))
;; optimized case for mbrs that don't cross the antimeridian
(non-crossing-intersects-br? coord-sys mbr other-br)
(let [[m1-east m1-west] (split-across-antimeridian mbr)
[m2-east m2-west] (split-across-antimeridian other-br)]
(or (non-crossing-intersects-br? coord-sys m1-east m2-east)
(and m2-west (non-crossing-intersects-br? coord-sys m1-east m2-west))
(and m1-west (non-crossing-intersects-br? coord-sys m1-west m2-east))
(and m1-west m2-west (non-crossing-intersects-br? coord-sys m1-west m2-west))))))
(defn intersections
"Returns the intersection of the two minimum bounding rectangles. This could return multiple mbrs
if one crosses the antimeridian and the other intersects both sides."
[^Mbr m1 ^Mbr m2]
(filter identity
(for [m1-sub (split-across-antimeridian m1)
m2-sub (split-across-antimeridian m2)]
(when (non-crossing-intersects-br? :cartesian m1-sub m2-sub)
(let [{^double w1 :west ^double n1 :north ^double e1 :east ^double s1 :south} m1-sub
{^double w2 :west ^double n2 :north ^double e2 :east ^double s2 :south} m2-sub
new-west (max w1 w2)
new-east (min e1 e2)
new-north (min n1 n2)
new-south (max s1 s2)]
(mbr new-west new-north new-east new-south))))))
(defn union-not-crossing-antimeridian
"A specialized union for mbrs that do not cross the antimeridian and are not allowed to crossed
the antimeridian"
[^Mbr m1 ^Mbr m2]
(pj/assert (not (or (crosses-antimeridian? m1)
(crosses-antimeridian? m2)))
"allow-cross-antimeridian? was false and either m1 or m2 crossed the antimeridian")
(let [n (max (.north m1) (.north m2))
s (min (.south m1) (.south m2))]
(if (> (.west m2) (.west m1))
(mbr (min (.west m1) (.west m2))
n
(max (.east m1) (.east m2))
s)
(mbr (min (.west m2) (.west m1))
n
(max (.east m2) (.east m1))
s))))
(defn union
"Returns the union of the minimum bounding rectangles."
[^Mbr m1 ^Mbr m2]
(let [;; lon range union
[w e] (cond
;; both cross antimeridian
(and (crosses-antimeridian? m1) (crosses-antimeridian? m2))
(let [w (min (.west m1) (.west m2))
e (max (.east m1) (.east m2))]
(if (<= w e)
;; If the result covers the whole world then we'll set it to that.
[-180.0 180.0]
[w e]))
;; one crosses the antimeridian
(or (crosses-antimeridian? m1) (crosses-antimeridian? m2))
;; Make m1 cross the antimeridian
(let [[^Mbr m1 ^Mbr m2] (if (crosses-antimeridian? m2)
[m2 m1]
[m1 m2])
w1 (.west m1) e1 (.east m1)
w2 (.west m2) e2 (.east m2)
;; We could expand m1 to the east or to the west. Pick the shorter of the two.
west-dist (- w1 w2)
east-dist (- e2 e1)
[^double w ^double e] (cond
(or (<= west-dist 0.0) (<= east-dist 0.0)) [w1 e1]
(< east-dist west-dist) [w1 e2]
:else [w2 e1])]
(if (<= w e)
;; If the result covers the whole world then we'll set it to that.
[-180.0 180.0]
[w e]))
;; none cross the antimeridian
:else
(let [[^Mbr m1 ^Mbr m2] (if (> (.west m1) (.west m2))
[m2 m1]
[m1 m2])
w1 (.west m1) e1 (.east m1)
w2 (.west m2) e2 (.east m2)
w (min w1 w2)
e (max e1 e2)
;; Check if it's shorter to cross the antimeridian
dist (- e w)
alt-west w2
alt-east e1
alt-dist (+ (- 180.0 alt-west) (- alt-east -180.0))]
(if (< alt-dist dist)
[alt-west alt-east]
[w e])))
;; lat range union
n (max (.north m1) (.north m2))
s (min (.south m1) (.south m2))]
(mbr w n e s)))
(extend-protocol d/DerivedCalculator
cmr.spatial.mbr.Mbr
(calculate-derived
^Mbr [^Mbr mbr]
mbr))
(defn- north-less-than-south-validation
[field-path {:keys [^double north ^double south]}]
(when (< north south)
{field-path [(msg/br-north-less-than-south north south)]}))
(def validations
[{:west [v/required v/validate-number (v/within-range -180.0 180.0)]
:north [v/required v/validate-number (v/within-range -90.0 90.0)]
:east [v/required v/validate-number (v/within-range -180.0 180.0)]
:south [v/required v/validate-number (v/within-range -90.0 90.0)]}
north-less-than-south-validation])
(extend-protocol sv/SpatialValidation
cmr.spatial.mbr.Mbr
(validate
[record]
(v/create-error-messages (v/validate validations record))))
|
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABILITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System.Collections.Generic;
using Microsoft.Python.Analysis.Analyzer;
using Microsoft.Python.Analysis.Diagnostics;
using Microsoft.Python.Core;
using Microsoft.Python.Parsing;
using Microsoft.Python.Parsing.Ast;
using ErrorCodes = Microsoft.Python.Analysis.Diagnostics.ErrorCodes;
namespace Microsoft.Python.Analysis.Linting.UndefinedVariables {
internal sealed class UndefinedVariablesWalker : LinterWalker {
private readonly List<DiagnosticsEntry> _diagnostics = new List<DiagnosticsEntry>();
public UndefinedVariablesWalker(IDocumentAnalysis analysis, IServiceContainer services)
: base(analysis, services) { }
public IReadOnlyList<DiagnosticsEntry> Diagnostics => _diagnostics;
public override bool Walk(AssignmentStatement node) {
if (node.Right is ErrorExpression) {
return false;
}
node.Right?.Walk(new ExpressionWalker(this));
return false;
}
public override bool Walk(CallExpression node) {
node.Target?.Walk(new ExpressionWalker(this));
foreach (var arg in node.Args) {
arg?.Expression?.Walk(new ExpressionWalker(this));
}
return false;
}
public override bool Walk(IfStatement node) {
foreach (var test in node.Tests) {
test.Test.Walk(new ExpressionWalker(this));
}
return true;
}
public override bool Walk(GlobalStatement node) {
foreach (var nex in node.Names) {
var m = Eval.LookupNameInScopes(nex.Name, out _, LookupOptions.Global);
if (m == null) {
ReportUndefinedVariable(nex);
}
}
return false;
}
public override bool Walk(NonlocalStatement node) {
foreach (var nex in node.Names) {
var m = Eval.LookupNameInScopes(nex.Name, out _, LookupOptions.Nonlocal);
if (m == null) {
ReportUndefinedVariable(nex);
}
}
return false;
}
public void ReportUndefinedVariable(NameExpression node) {
var eval = Analysis.ExpressionEvaluator;
_diagnostics.Add(new DiagnosticsEntry(
Resources.UndefinedVariable.FormatInvariant(node.Name),
eval.GetLocation(node).Span, ErrorCodes.UndefinedVariable, Severity.Warning, DiagnosticSource.Linter));
}
}
}
|
package io.testaxis.intellijplugin.toolwindow.builds.views.testcasetabs
import com.intellij.ide.highlighter.HighlighterFactory
import com.intellij.openapi.editor.EditorFactory
import com.intellij.openapi.editor.LogicalPosition
import com.intellij.openapi.editor.colors.EditorColors
import com.intellij.openapi.editor.colors.EditorColorsManager
import com.intellij.openapi.editor.ex.EditorEx
import com.intellij.openapi.editor.markup.EffectType
import com.intellij.openapi.editor.markup.HighlighterTargetArea
import com.intellij.openapi.editor.markup.TextAttributes
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiDocumentManager
import com.intellij.psi.PsiElement
import com.intellij.psi.PsiFile
import com.intellij.psi.PsiMethod
import com.intellij.ui.JBColor
import com.intellij.ui.LanguageTextField
import java.awt.Color
private const val LINE_HIGHLIGHT_LAYER = 5950
private const val COVERED_LINE_HIGHLIGHT_LAYER = 5951
private const val CHANGED_LINE_HIGHLIGHT_LAYER = 5952
private const val COVERED_AND_CHANGED_LINE_HIGHLIGHT_LAYER = 5953
private const val FRAGMENT_HIGHLIGHT_LAYER = 5960
@Suppress("TooManyFunctions")
class TestCodeEditorField(project: Project) : LanguageTextField(null, project, "PLACEHOLDER") {
companion object {
val COVERED_LINE_COLOR = JBColor.YELLOW.darker()
val CHANGED_LINE_COLOR = JBColor.GREEN.darker().darker()
val COVERED_AND_CHANGED_LINE_COLOR = JBColor.MAGENTA.darker()
}
init {
isOneLineMode = false
autoscrolls = true
}
override fun createEditor(): EditorEx = super.createEditor().apply {
setVerticalScrollbarVisible(true)
setHorizontalScrollbarVisible(true)
colorsScheme = EditorColorsManager.getInstance().globalScheme
setCaretEnabled(true)
setCaretVisible(true)
settings.isLineNumbersShown = true
settings.isAdditionalPageAtBottom = true
isViewer = false
highlighter = HighlighterFactory.createHighlighter(project, fileType)
}
fun showFile(file: PsiFile?) {
if (file == null) {
document = EditorFactory.getInstance().createDocument("This file could not be found in the project.")
return
}
setNewDocumentAndFileType(
file.fileType,
PsiDocumentManager.getInstance(project).getDocument(file)
)
}
fun showTestMethod(method: PsiMethod?) {
if (method == null) {
document = EditorFactory.getInstance().createDocument("Test method could not be found.")
return
}
setNewDocumentAndFileType(
method.containingFile.fileType,
PsiDocumentManager.getInstance(project).getDocument(method.containingFile)
)
setCaretPosition(method.textOffset)
scrollToCaretPosition()
}
fun showText(text: String) {
document = EditorFactory.getInstance().createDocument(text)
}
fun highlightCoveredLine(lineNumber: Int) =
editor?.markupModel?.addLineHighlighter(
lineNumber - 1,
COVERED_LINE_HIGHLIGHT_LAYER,
createHighlightAttributes(COVERED_LINE_COLOR)
)
fun highlightChangedLine(lineNumber: Int) =
editor?.markupModel?.addLineHighlighter(
lineNumber - 1,
CHANGED_LINE_HIGHLIGHT_LAYER,
createHighlightAttributes(CHANGED_LINE_COLOR)
)
fun highlightCoveredAndChangedLine(lineNumber: Int) =
editor?.markupModel?.addLineHighlighter(
lineNumber - 1,
COVERED_AND_CHANGED_LINE_HIGHLIGHT_LAYER,
createHighlightAttributes(COVERED_AND_CHANGED_LINE_COLOR)
)
fun highlightElement(element: PsiElement?) = editor?.markupModel?.addRangeHighlighter(
element?.textRange?.startOffset ?: 0,
element?.textRange?.endOffset ?: 0,
LINE_HIGHLIGHT_LAYER,
createHighlightAttributes(editor?.colorsScheme?.getColor(EditorColors.MODIFIED_LINES_COLOR)),
HighlighterTargetArea.LINES_IN_RANGE
)
private fun createHighlightAttributes(color: Color?) = TextAttributes().apply {
backgroundColor = color
effectType = EffectType.ROUNDED_BOX
}
fun moveCaretToLine(lineNumber: Int) =
with(editor ?: throw IllegalStateException("Cannot set caret position when editor is not yet created.")) {
caretModel.moveToLogicalPosition(LogicalPosition(lineNumber, 0))
}
fun scrollToCaretPosition() =
with(editor ?: throw IllegalStateException("Cannot scroll to caret when editor is not yet created.")) {
scrollingModel.scrollVertically(offsetToPoint2D(caretModel.offset).y.toInt())
}
}
|
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.nio.file.Paths;
/**
* This class allow us to load a sudoku from a file and print it
* It's also used to merge the unsolved sudoku with the values from the CSP
*/
public class Sudoku {
private int[][] values;
public static final int SUDOKU_SIZE = 9;
public Sudoku() {
values = new int[SUDOKU_SIZE][SUDOKU_SIZE];
}
public void loadArrays(int[][] val, int[][] assignment) {
for (int i = 0; i < Sudoku.SUDOKU_SIZE; ++i) {
for (int j = 0; j < Sudoku.SUDOKU_SIZE; ++j) {
values[i][j] = assignment[i][j] == 0 ? val[i][j] : assignment[i][j];
}
}
}
public void loadFile(String filename) {
try (InputStream in = Files.newInputStream(Paths.get(filename));
BufferedReader reader = new BufferedReader(new InputStreamReader(in))) {
String line = null;
int lineNumber = 0;
while ((line = reader.readLine()) != null) {
for (int i = 0; i < line.length(); ++i)
values[lineNumber][i] = Integer.parseInt(line.charAt(i) + "");
lineNumber++;
}
} catch (IOException x) {
System.err.println(x);
}
}
public void print() {
for (int i = 0; i < SUDOKU_SIZE; ++i) {
if (i % 3 == 0 && i != 0)
System.out.println("|\n-------------------");
else if (i % 3 == 0)
System.out.println("\n-------------------");
else
System.out.println("|");
for (int j = 0; j < SUDOKU_SIZE; ++j) {
if (j % 3 == 0)
System.out.print("|");
else
System.out.print(" ");
System.out.print(values[i][j]);
}
}
System.out.println("|\n-------------------");
}
public boolean isGood() {
for (int i = 0; i < Sudoku.SUDOKU_SIZE; ++i) {
for (int j = 0; j < Sudoku.SUDOKU_SIZE; ++j) {
for (int line = 0; line < Sudoku.SUDOKU_SIZE; ++line) {
if (line != j && values[i][j] == values[i][line])
return false;
}
for (int column = 0; column < Sudoku.SUDOKU_SIZE; ++column) {
if (column != i && values[i][j] == values[column][j])
return false;
}
int minX = (i/3) * 3;
int maxX = minX + 3;
int minY = (j/3) * 3;
int maxY = minY + 3;
for (int squareX = minX; squareX < maxX; ++squareX) {
for (int squareY = minY; squareY < maxY; ++squareY) {
if (squareX != i && squareY != j && values[i][j] == values[squareX][squareY]) {
return false;
}
}
}
}
}
return true;
}
public int[][] getValues() {
return values;
}
public void setValues(int[][] values) {
this.values = values;
}
}
|
{# If our source override didn't take, this would be an errror #}
select * from {{ source('my_source', 'my_table') }}
|
<?php
namespace App\Http\Controllers;
use App\Http\Model\Managers\WorkerManager;
use Illuminate\Http\Request;
use App\Http\Controllers\Auth;
use App\Http\Model\Managers\CompanyManager;
class HomeController extends Controller
{
/**
* Create a new controller instance.
*
* @return void
*/
public function __construct()
{
$this->middleware('auth');
}
/**
* Show the application dashboard.
* If user is not logged redirect to the login form
*
* @return \Illuminate\Http\Response
*/
public function index()
{
if (auth()->id()) {
$compManager = new CompanyManager();
$countCompanies = $compManager->returnCountAllCompanies();
if (!($countCompanies > 0)) {
$countCompanies = 0;
}
$workerManager = new WorkerManager();
$countWorkers = $workerManager->countWorkers();
if (!($countWorkers > 0)) {
$countWorkers = 0;
}
$countInactiveWorkers = 0;
if ($countWorkers) {
$countInactiveWorkers = $workerManager->countWorkers(null, 1);
}
$countActiveWorkers = 0;
if ($countWorkers) {
$countActiveWorkers = $countWorkers - $countInactiveWorkers;
}
return view('home', [ 'companies_count' => $countCompanies,
'workers_count' => $countWorkers,
'active_workers_count' => $countActiveWorkers ]);
}
return redirect()->route('/login');
}
}
|
#!/bin/sh
export DJANGO_SETTINGS_MODULE=core.settings.dev
cd /code
celery -A core worker --beat --scheduler django --loglevel=info
|
# obelisk
A lightwight service for data transformations and interfacing
for reflectance calculations
## Installation
### Docker
Obelisk requires docker. The easiest way to put docker on your
system is through docker desktop, which requires administrator
privileges. You can find installation instructions for
docker [here](https://docs.docker.com/desktop/windows/install/)
### Obelisk
Clone or download a `.zip` of the repository via github and
navigate to the repository directory.
## Usage
- Build the Obelisk docker image
- Run the Obelisk docker image
- Navigate to `127.0.0.1:8000`
|
module.exports = {
name: "join",
description: "Tham gia phòng",
aliases: ['connect'],
execute(client, message, args) {
const voiceChannel = message.member.voice.channel;
if (!voiceChannel) return;
voiceChannel.join();
}
}
|
<?php
namespace App\Http\Controllers;
use App\Category;
use App\News;
use App\User;
use Illuminate\Support\Facades\DB;
use function GuzzleHttp\Promise\all;
class MainController extends Controller
{
public function index(){
$users= User::all();
$threeNews = DB::table('news')->orderBy('created_at', 'DESC')->paginate(3);
$news = News::all();
$randomNews1 = News::all()->random();
$randomNews2 = News::all()->random();
$randomNews3 = News::all()->random();
$randomNews4 = News::all()->random();
$categories = Category::all();
return view('index', [
'categories' => $categories,
'threeNews' => $threeNews,
'users' => $users,
'news' => $news,
'randomNews1' => $randomNews1,
'randomNews2' => $randomNews2,
'randomNews3' => $randomNews3,
'randomNews4' => $randomNews4,
]);
}
}
|
# Advanced Topics
This page presents advanced information in a not so structured manner. It is used as both a reference
for external and internal developers, and therefore rewards flexibility over structure.
## Examples
Multiple netius examples can be found in the [Examples](examples.md) page.
## Python 3
The migration to Python 3 is not easy and as such a compatibility layer was created under the name of
[legacy.py](../src/netius/base/legacy.py). This file should be the primary source of functionality related
with the compatibility between Python 2 and Python 3 and all the code regarding the transition should
be store there and used from there.
### WSGI
WSGI specification is specially problematic regarding the Python 3 unicode vs bytes problem and a common
specification for how to solve this is still pending, please refer to the links section for more information
regarding problems and solutions for Python 3 and WSGI.
### Links
* [Python3/WSGI](http://wsgi.readthedocs.org/en/latest/python3.html)
* [WSGI 2.0](http://wsgi.readthedocs.org/en/latest/proposals-2.0.html)
## Configuration
```json
"SSL_CONTEXTS" : {
"localhost" : {
"key_file" : "/secret.key",
"cer_file" : "/secret.cer"
}
}
```
## Benchmarks
### Apache Benchmark (ab)
To install `ab` run `scu install apache`.
Running `ab -n 20000 -c 5 -k http://localhost:9090/` should achieve the following results:
* HelloServer `PORT=9090 python -m netius.extra.hello` - 14.3 K req/sec
* WSGIServer `PORT=9090 python -m netius.servers.wsgi` - 9.6 K req/sec
### h2Load Benchmark
To install `h2load` run `scu install nghttp2`.
Running `h2load -n20000 -c5 -m100 --h1 http://localhost:9090` should achieve the following results:
* HelloServer `PORT=9090 python -m netius.extra.hello` - 17.8 K req/sec
* WSGIServer `PORT=9090 python -m netius.servers.wsgi` - 13.2 K req/sec
Using multiple children the results should increase in a linear way:
* HelloServer `CHILDREN=4 PORT=9090 python -m netius.extra.hello` - 44.6 K req/sec
* WSGIServer `CHILDREN=4 PORT=9090 python -m netius.extra.wsgi` - 33.0 K req/sec
* HelloServer PyPy `CHILDREN=4 PORT=9090 pypy -m netius.extra.hello` - 188.7 K req/sec
* WSGIServer PyPy `CHILDREN=4 PORT=9090 pypy -m netius.extra.hello` - 165.4 K req/sec
### Notes
These values have been verified for commit #008ba53 running in Python 2.7.11.
The results are a result of executing the benchmark on `servidor4.hive`.
## Compliance
### HTTP2
The `h2spec` tools allows proper verification of the RFC 5741 statements on a quick fashion.
```
scu install go
export GOPATH=~/go
mkdir -p ~/go
go get github.com/summerwind/h2spec/cmd/h2spec
~/go/bin/h2spec -h localhost -p 9090 -t -k
```
## Cryptography
Netius has some built-in cryptography utilities. The following are some
examples of RSA key operations that can be tested through the command line:
```bash
python -m netius.sh.rsa read_private private.key
python -m netius.sh.rsa read_public public.pub
python -m netius.sh.rsa private_to_public private.key public.pub
```
DKIM is an infra-structure for signing SMTP based messages which provides a way to avoid unwanted
SPAM tagging. Netius provides a series of utilities for DKIM processing, here are some examples:
```bash
python -m netius.sh.dkim generate hive.pt
python -m netius.sh.dkim sign hello.mail dkim.key 20140327175143 hive.pt
```
To generate a password protected by a cryptographic hash to be used with the netius
authentication/authorization infra-structure use:
```bash
python -m netius.sh.auth generate your_password
```
## IPv6
Netius is compatible with IPv6. To activate this mode set the `IPV6` configuration variable
to a valid value (eg: 1 or True), and an IPv6 socket will be used instead.
```python
IPV6=1 MESSAGE="Hello Netius" python -m netius.extra.hello
```
## Debugging
It's important to keep track of the memory leaks that may be created by any circular references or
unclosed resources associated with a netius server. For that purpose, a [special document](leak.md) has
been created, documenting the various tools and strategies that may be used to detect such leaks.
## Testing
### Edge triggered polling
Edge based polling is a bit tricky as it may easily end up in a data deadlock. The best way to test this
kind of problem is to change the `POLL_TIMEOUT` value to a negative value so that the loop blocks for data:
```bash
LEVEL=DEBUG POLL_TIMEOUT=-1 BASE_PATH=/ python -m netius.extra.file
```
Then try to extract a really large file from this server (eg: 1.0 GB) and see if it is able to serve it
without any problems.
|
Deface::Override.new(
virtual_path: 'spree/admin/products/_form',
name: 'add cyo_price to edit page',
insert_after: "[data-hook='admin_product_form_cost_currency']",
text: '<div data-hook="admin_product_form_cyo_price">
<%= f.check_box :cyo_price %>
<%= f.label :cyo_price, Spree.t(:cyo_price) %>
</div>'
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.