text
stringlengths 27
775k
|
|---|
import unittest
from katas.kyu_7.zip_it import lstzip
class ZipItTestCase(unittest.TestCase):
def setUp(self):
self.a = [1, 2, 3, 4, 5]
self.b = ['a', 'b', 'c', 'd', 'e']
def test_equals(self):
self.assertEqual(lstzip(self.a, self.b, lambda c, d: str(c) + str(d)),
['1a', '2b', '3c', '4d', '5e'])
def test_equals_2(self):
self.assertEqual(lstzip(self.b, self.a, lambda e, f: str(e) + str(f)),
['a1', 'b2', 'c3', 'd4', 'e5'])
def test_equals_3(self):
self.assertEqual(lstzip(self.b, lstzip(
self.a, self.b, lambda g, h: g * ord(h[0])),
lambda i, j: str(i) + str(j)),
['a97', 'b196', 'c297', 'd400', 'e505'])
|
fun foo(vararg x: Char) {}
fun bar() {
foo(*<caret>charArrayOf('a', 'b'))
}
|
import {featherStart} from './test-head'
import {expect} from 'chai'
import * as sinon from 'sinon'
describe('XHR', () => {
let window, feather, sandbox, document
before(async () => featherStart().then((w: any) => (
window = w,
feather = w.feather,
document = w.document
)))
beforeEach(() => this.sinon = sandbox = sinon.sandbox.create())
afterEach(() => sandbox.restore())
describe('App loads', () => {
it('should show rendered', () => {
const ef = document.querySelector('.extra-features')
expect(ef.firstChild.textContent).to.be.equal('Rendered')
})
})
describe('Rest', () => {
it('GET fetches data', async () => {
const app = window.ef as demo.ExtraFeatures,
spy = this.sinon.spy(app.getData, 'original')
const data = await app.getData()
spy.should.have.been.calledOnce
expect(data).to.be.deep.equal({
response: true,
method: 'GET'
})
})
it('POST fetches data', async () => {
const app = window.ef as demo.ExtraFeatures
const spy = this.sinon.spy(app.postData, 'original')
const data = await app.postData()
spy.should.have.been.calledOnce
expect(data).to.be.deep.equal({
response: true,
method: 'POST'
})
})
})
})
|
using System;
using NUnit.Framework;
namespace CollabEdit.VersionControl.Tests
{
[TestFixture]
public class TestCommitMetadata
{
[TestCase("", "", ExpectedResult = true)]
[TestCase("text", "text", ExpectedResult = true)]
[TestCase("text", "", ExpectedResult = false)]
[TestCase("text", "text1", ExpectedResult = false)]
public bool Test_CommitMetadata_GetHashCode(string first, string second)
{
var firstMeta = new CommitMetadata(first);
var secondMeta = new CommitMetadata(second);
return firstMeta.GetHashCode() == secondMeta.GetHashCode();
}
[TestCase("", "", ExpectedResult = true)]
[TestCase("text", "text", ExpectedResult = true)]
[TestCase("text", "", ExpectedResult = false)]
[TestCase("text", "text1", ExpectedResult = false)]
public bool Test_CommitMetadata_Equals(string first, string second)
{
var firstMeta = new CommitMetadata(first);
var secondMeta = new CommitMetadata(second);
return firstMeta.Equals(secondMeta);
}
}
}
|
package hk.edu.polyu.datamining.pamap2.actor
/**
* Created by beenotung on 1/21/16.
*/
class ClassifyActor {
}
|
import datetime
import math
import numpy
import pathlib
import random
import string
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509.oid import NameOID
from typing import Tuple
def random_string(length=5):
multiplier = math.ceil(length/len(string.ascii_letters))
return "".join(random.sample(string.ascii_letters*multiplier, length))
def random_bytes(length=1024):
return bytes(random_string(length), "utf-8")
def random_dict(items=10):
return {random_string(): random_string() for _ in range(items)}
def random_array(length=10) -> list:
return numpy.random.uniform(0, 1, size=length).tolist()
def create_self_signed_cert(path: pathlib.Path) -> Tuple[pathlib.Path,
pathlib.Path]:
key = rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
backend=default_backend(),
)
subject = issuer = x509.Name([
x509.NameAttribute(NameOID.COUNTRY_NAME, "US"),
x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, "California"),
x509.NameAttribute(NameOID.LOCALITY_NAME, "San Francisco"),
x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Test Company"),
x509.NameAttribute(NameOID.COMMON_NAME, "test.org"),
])
cert = x509.CertificateBuilder().subject_name(
subject
).issuer_name(
issuer
).public_key(
key.public_key()
).serial_number(
x509.random_serial_number()
).not_valid_before(
datetime.datetime.utcnow()
).not_valid_after(
# Certificate will be valid for 10 days
datetime.datetime.utcnow() + datetime.timedelta(days=10)
).add_extension(
x509.SubjectAlternativeName([x509.DNSName(u"localhost")]),
critical=False, # Sign certificate with our private key.
).sign(key, hashes.SHA256(), default_backend())
keypath = path.joinpath("key.pem")
with open(path.joinpath(keypath), "wb") as key_pem:
key_pem.write(key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
))
certpath = path.joinpath("cert.pem")
with open(path.joinpath(certpath), "wb") as cert_pem:
cert_pem.write(cert.public_bytes(serialization.Encoding.PEM))
return keypath, certpath
|
use std::ops::Mul;
pub mod data_type {
pub fn execute() {
// let i8 = 0;
// let u8 = 0;
// let i16 = 0;
// let u16 = 0;
// let i32 = 0;
// let u32 = 0;
// let i64 = 0;
// let u64 = 0;
// let i128 = 0;
// let u128 = 0;
// let isize = 0;// arch 32sys equal i32,64sys equal i64
// let usize = 0;
// let f32 = 0;
// let f64 = 0;
// let f: bool = true;
// let c: char = 'c';
// number type
// let decimal = 98_222; // 10进制: 98222
// let hex = 0xff; // 16进制: 255
// let octal = 0o77;// 8进制: 63
// let bianry = 0b1111_0000;
// tuple type
// let tup = (1,2,3);
// let (x, y, z) = tup;
// list type
// let list = [1,2,3,4];
// let list1:[i32;5] = [1,2,3,4,5];
// let list2 = [3;5]; // [3,3,3,3,3]
// let first = list[0]; // 1
// let second = list[1]; // 2
// Bitwise operations
println!("0011 AND 0101 is {:04b}", 0b0011u32 & 0b0101);
println!("0011 OR 0101 is {:04b}", 0b0011u32 | 0b0101);
println!("0011 XOR 0101 is {:04b}", 0b0011u32 ^ 0b0101);
println!("1 << 5 is {}", 1u32 << 5);
println!("0x80 >> 2 is 0x{:x}", 0x80u32 >> 2);
// Use underscores to improve readability!
println!("One million is written as {}", 1_000_000u32);
let _guess: u32 = "42".parse().expect("The string is not integer");
// println!("decimal = {:?}", first);
}
}
pub mod functions {
pub fn execute() {
let str = String::from("hello");
let mut ma = 1;
execute1(&str);
execute2(&mut ma);
}
pub fn execute1(p: &String) {
println!("p = {}", p)
}
pub fn execute2(p: &mut i32) -> i32 {
*p = *p + 1;
*p
}
}
pub mod control_flow {
pub fn execute() {
let a = true;
if a {
println!("a = true")
} else {
println!("else")
}
let number = 10;
if number > 10 {
println!("number > 10")
} else if number > 5 && number <= 10 {
println!("number < 5 && number < 10")
} else {
println!("number else")
}
let cond = true;
let result = if cond { 5 } else { 6 };
println!("reuslt = {}", result);
loop {
println!("loop ...");
break;
}
let loop_r = loop {
break 2;
};
let mut i = 0;
while i < 10 {
i += 1;
}
println!("loop_r = {}", loop_r);
let list = [1, 2, 3];
for e in list.iter() {
print!("{}", e);
}
println!();
// 倒序
for e in (1..4).rev() {
print!("{}", e)
}
}
}
// 结构体
pub mod struction {
// 形状接口
pub trait Shape<T> {
fn area(&self) -> T;
fn show(&self) {
println!("show trait Shape<T>")
}
}
pub trait Animal {
fn new() -> Self;
}
struct Dog {}
impl Animal for Dog {
fn new() -> Dog {
Dog {}
}
}
// 正方形
struct Rectangle<T> {
with: T,
height: T,
}
// 圆形
struct Round<T> {
r: T,
}
impl<T: super::Mul<T, Output = T> + Copy> Shape<T> for Rectangle<T> {
fn area(&self) -> T {
return self.with * self.height;
}
}
impl<T: super::Mul<T, Output = T> + Copy> Shape<T> for Round<T> {
fn area(&self) -> T {
self.r * self.r
}
}
fn show<T>(s: &impl Shape<T>) {
s.show();
}
fn is_hello<T: AsRef<str>>(s: T) {
assert_eq!("hello", s.as_ref())
}
pub fn execute() {
let rect = Rectangle {
with: 1.0,
height: 1.0,
};
let round = Round { r: 10 };
let round1 = Round { r: 1.1 };
show(&rect);
show(&round);
show(&round1);
println!(
"rectangle area = {}, round area = {}, round1 area = {}",
rect.area(),
round.area(),
round1.area()
);
let hello = "hello";
is_hello(hello);
let hello = "hello".to_string();
is_hello(hello);
}
}
pub mod image {
#[derive(Debug)]
pub struct Point<'a> {
x: &'a f32,
y: &'a f32,
}
pub trait Pixel {
type Subpixel;
fn new(&self) -> Self::Subpixel;
}
pub struct P;
impl Pixel for P {
type Subpixel = f32;
fn new(&self) -> f32 {
1.0
}
}
impl Pixel for f32 {
type Subpixel = f32;
fn new(&self) -> f32 {
2.0
}
}
/// Iterate over pixel refs.
#[warn(dead_code)]
pub struct Pixels<'a, P: Pixel + 'a>
where
P::Subpixel: 'a,
{
chunks: &'a f32,
b: &'a P,
}
impl<'a, P: Pixel + 'a> Pixel for Pixels<'a, P>
where
P::Subpixel: 'a,
{
type Subpixel = Point<'a>;
fn new(&self) -> Point<'a> {
Point {
x: self.chunks,
y: self.chunks,
}
}
}
pub fn execute() {
let p = 2.0;
let a = Pixels {
chunks: &1.0,
b: &p,
};
println!("{:?}", a.new());
}
}
pub mod enum_struct {
/// 枚举类型
/// Option作为捕获程序失败信息,不实用panic!
pub fn div(a: i32, b: i32) -> Option<i32> {
if b == 0 {
None
} else {
Some(a / b)
}
}
#[derive(Debug)]
pub enum Why {
ZERO,
}
pub type DivResult = Result<i32, Why>;
pub fn div_r(a: i32, b: i32) -> DivResult {
if b == 0 {
Err(Why::ZERO)
} else {
Ok(a / b)
}
}
pub fn check_dev(a: i32, b: i32) -> DivResult {
match div(a, b) {
None => println!("{} / {} failed !", a, b),
Some(v) => println!("{} / {} = {}", a, b, v),
}
match div_r(a, b) {
Err(why) => println!("{} / {} failed! why = {:?}", a, b, why),
Ok(v) => println!("{} / {} = {}", a, b, v),
}
println!("div_r({},{})? = {:?}", a, b, div_r(a, b)?);
div_r(a, b)
}
pub fn execute() {
let _ = check_dev(1, 2);
let _ = check_dev(1, 0);
super::checked::op(1.0, 10.0);
}
}
mod checked {
#[derive(Debug)]
enum MathError {
DivisionByZero,
NegativeLogarithm,
NegativeSquareRoot,
}
type MathResult = Result<f64, MathError>;
fn div(x: f64, y: f64) -> MathResult {
if y == 0.0 {
Err(MathError::DivisionByZero)
} else {
Ok(x / y)
}
}
fn sqrt(x: f64) -> MathResult {
if x < 0.0 {
Err(MathError::NegativeSquareRoot)
} else {
Ok(x.sqrt())
}
}
fn ln(x: f64) -> MathResult {
if x < 0.0 {
Err(MathError::NegativeLogarithm)
} else {
Ok(x.ln())
}
}
// 中间函数
fn op_(x: f64, y: f64) -> MathResult {
// 如果 `div` “失败” 了,那么返回 `DivisionByZero`
let ratio = div(x, y)?;
// 如果 `ln` “失败” 了,那么返回 `NegativeLogarithm`
let ln = ln(ratio)?;
sqrt(ln)
}
pub fn op(x: f64, y: f64) -> String {
match op_(x, y) {
Err(why) => match why {
MathError::NegativeLogarithm => format!("logarithm of negative number"),
MathError::DivisionByZero => format!("division by zero"),
MathError::NegativeSquareRoot => format!("square root of negative number"),
},
Ok(value) => format!("{}", value),
}
}
}
pub mod oop {
use std::collections::HashMap;
pub fn execute() {
let mut screen = Screen::new();
screen.install(Box::new(Button));
screen.install(Box::new(Icon {}));
screen.install(Box::new(String::from("kkkk")));
screen.run();
}
pub trait Task {
fn run(&self) -> bool {
println!("{} run ..", self.name());
true
}
fn name(&self) -> String {
"kkk".to_string()
}
}
pub struct Timer {
pub name: String,
}
impl Task for Timer {
fn name(&self) -> String {
"timer".to_string()
}
}
pub struct Context<T: Task> {
plugs: HashMap<String, T>,
}
impl<T: Task> Context<T> {
pub fn run(&self) {
for (_, e) in &self.plugs {
e.run();
}
}
pub fn install(&mut self, plug: T) {
self.plugs.insert(plug.name(), plug);
}
}
pub trait Draw {
fn draw(&self);
}
pub struct Button;
pub struct Icon;
pub struct Screen {
pub components: Vec<Box<dyn Draw>>,
}
impl Draw for String {
fn draw(&self) {
println!("String draw");
}
}
impl Draw for Button {
fn draw(&self) {
println!("button draw");
}
}
impl Draw for Icon {
fn draw(&self) {
println!("icon draw");
}
}
impl Screen {
pub fn new() -> Self {
Screen {
components: Vec::new(),
}
}
pub fn run(&self) {
for e in self.components.iter() {
e.draw()
}
}
pub fn install(&mut self, component: Box<dyn Draw>) {
self.components.push(component)
}
}
}
pub mod async_test {
use std::{thread, time::Duration};
use futures::channel::mpsc;
use futures::executor; //standard executors to provide a context for futures and streams
use futures::executor::ThreadPool;
use futures::StreamExt;
pub fn execute() {
// let (mut main_sender, mut main_rec) = mpsc::unbounded::<i32>();
let (main_sender, main_rec) = std::sync::mpsc::channel();
let thread = thread::spawn(move || {
let pool = ThreadPool::new().expect("Failed to build pool");
let (tx, rx) = mpsc::unbounded::<i32>();
// Create a future by an async block, where async is responsible for an
// implementation of Future. At this point no executor has been provided
// to this future, so it will not be running.
let fut_values = async {
// Create another async block, again where the Future implementation
// is generated by async. Since this is inside of a parent async block,
// it will be provided with the executor of the parent block when the parent
// block is executed.
//
// This executor chaining is done by Future::poll whose second argument
// is a std::task::Context. This represents our executor, and the Future
// implemented by this async block can be polled using the parent async
// block's executor.
println!("first async block");
let fut_tx_result = async move {
println!("second async block");
loop {
thread::sleep(Duration::from_millis(1));
match main_rec.try_recv() {
Ok(_msg) => {
println!("future receive terminate !!");
tx.unbounded_send(0).expect("Failed to send");
break;
}
Err(_e) => {}
}
}
};
// Use the provided thread pool to spawn the generated future
// responsible for transmission
println!("before spawn_ok ");
pool.spawn_ok(fut_tx_result);
println!("after spawn_ok ");
let fut_values = rx.map(|v| v + 1).collect();
// Use the executor provided to this async block to wait for the
// future to complete.
fut_values.await
};
// Actually execute the above future, which will invoke Future::poll and
// subsequenty chain appropriate Future::poll and methods needing executors
// to drive all futures. Eventually fut_values will be driven to completion.
// thread::sleep(Duration::from_secs(3));
println!("before executor.");
let values: Vec<i32> = executor::block_on(fut_values);
println!("Values={:?}", values[0]);
});
let mut line = String::new();
let stdin = std::io::stdin();
use std::io::BufRead;
let _ = stdin.lock().read_line(&mut line);
main_sender.send(()).unwrap();
if let Ok(_) = thread.join() {};
println!("future thread finished!");
}
}
pub mod rc_refcell_test {
use std::cell::RefCell;
use std::rc::Rc;
use List::{Cons, Nil};
#[derive(Debug)]
enum List<'a> {
Cons(Rc<RefCell<i32>>, &'a List<'a>),
Nil,
}
pub fn execute() {
let value = Rc::new(RefCell::new(5));
let a = Cons(Rc::clone(&value), &Nil);
let b = Cons(Rc::new(RefCell::new(3)), &a);
let c = Cons(Rc::new(RefCell::new(4)), &a);
*value.borrow_mut() += 10;
println!("a after = {:?}", a);
println!("b after = {:?}", b);
println!("c after = {:?}", c);
let m = MockMessenger::new();
m.send("kkk");
}
pub trait Messenger {
fn send(&self, msg: &str);
}
struct MockMessenger {
sent_messages: Rc<RefCell<Data>>,
}
#[derive(Debug)]
struct Data {
data: Vec<String>,
}
impl Data {
fn log(&mut self) {
println!("data log = {:?}", self.data)
}
}
impl MockMessenger {
fn new() -> MockMessenger {
MockMessenger {
sent_messages: Rc::new(RefCell::new(Data { data: vec![] })),
}
}
}
impl Messenger for MockMessenger {
fn send(&self, message: &str) {
self.sent_messages
.borrow_mut()
.data
.push(String::from(message));
self.sent_messages
.borrow_mut()
.data
.push(String::from(message));
self.sent_messages.borrow_mut().log();
// let mut one_borrow = self.sent_messages.borrow_mut();
// let mut two_borrow = self.sent_messages.borrow_mut();
// one_borrow.data.push(String::from(message));
// two_borrow.data.push(String::from(message));
}
}
}
pub mod macro_test {
trait IntoSoa {
fn log(self);
}
macro_rules! soa {
($( $t: ident),*) => {
impl <$( $t),*> IntoSoa for ($( Vec<$t>, )*){
fn log(self) {
let (a, b) = self;
println!("{:?}", a.len());
println!("{:?}", b.len())
}
}
}
}
soa!(A, B);
pub fn execute() {
let a = (vec![1], vec![2]);
a.log();
let a = vec![1, 2];
a.into_iter();
let tup = (1.3, 1, 'c');
let slice: &[&dyn (::std::fmt::Display)] = &[&tup.0, &tup.1, &tup.2];
let parts: Vec<_> = slice.iter().map(|x| x.to_string()).collect();
let joined = parts.join(", ");
println!("{}", joined);
}
}
mod write_for_cursor {
use std::fs::File;
use std::io::prelude::*;
use std::io::{self, SeekFrom};
// a library function we've written
fn write_ten_bytes_at_end<W: Write + Seek>(writer: &mut W) -> io::Result<()> {
writer.seek(SeekFrom::End(-10))?;
for i in 0..10 {
writer.write(&[i])?;
}
// all went well
Ok(())
}
// Here's some code that uses this library function.
//
// We might want to use a BufReader here for efficiency, but let's
// keep this example focused.
// let mut file = File::create("foo.txt")?;
// write_ten_bytes_at_end(&mut file)?;
// now let's write a test
#[test]
fn test_writes_bytes() {
// setting up a real File is much slower than an in-memory buffer,
// let's use a cursor instead
use std::io::Cursor;
let mut buff = Cursor::new(vec![0; 15]);
write_ten_bytes_at_end(&mut buff).unwrap();
assert_eq!(&buff.get_ref()[5..15], &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
}
}
pub mod time_test {
extern crate chrono;
use chrono::prelude::DateTime;
use chrono::Utc;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
pub fn execute() {
// Creates a new SystemTime from the specified number of whole seconds
// 计算北京时间
let d = UNIX_EPOCH + Duration::from_secs(1615966582 + 3600 * 8);
// Create DateTime from SystemTime
let datetime = DateTime::<Utc>::from(d);
// Formats the combined date and time with the specified format string.
let timestamp_str = datetime.format("%Y-%m-%d %H:%M:%S").to_string();
println! {"{}",timestamp_str};
}
}
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Subject;
class SubjectController extends Controller
{
public function index(){
$data = Subject::all();
return view('subject.subject')->with('data', $data);
}
public function newForm(){
return view('subject.subject_new_form');
}
public function store(Request $request){
$newSubject = new Subject();
$newSubject->title = $request->title;
$newSubject->save();
return redirect('/subject');
}
public function editForm($id){
$subject = Subject::find($id);
return view('subject.subject_edit_form')->with('subject',$subject);
}
public function update(Request $request){
$subject = Subject::find($request->id);
$subject->title = $request->title;
$subject->save();
return redirect('/subject');
}
public function delete($id){
$subject = Subject::find($id);
$subject->delete();
return redirect('/subject');
}
}
|
require 'spec_helper'
describe LogicalQueryParser do
it 'has a version number' do
expect(LogicalQueryParser::VERSION).not_to be nil
end
it 'walks on tree' do
result = LogicalQueryParser.new.parse("aa AND bb")
LogicalQueryParser.walk_tree(result) do |node|
expect(node).to be_a_kind_of Treetop::Runtime::SyntaxNode
end
end
end
|
using System.Collections.Generic;
using System.Drawing;
using System.IO;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading.Tasks;
using System.Web;
using ImageProcessor;
using Newtonsoft.Json;
using Umbraco.Core.Models;
namespace Gibe.Umbraco.IntelligentMedia.Azure
{
public class AzureVisionApi : IVisionApi
{
private readonly string _subscriptionKey;
private readonly string _region;
public AzureVisionApi(IntelligentMediaSettings settings)
{
_subscriptionKey = settings.AzureSubscriptionKey;
_region = settings.AzureRegion;
}
public async Task<IVisionResponse> MakeRequest(IMedia media)
{
var client = new HttpClient();
var queryString = HttpUtility.ParseQueryString(string.Empty);
client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", _subscriptionKey);
queryString["visualFeatures"] = "Tags,Description,Categories,Faces,Color";
queryString["language"] = "en";
var uri = $"https://{_region}.api.cognitive.microsoft.com/vision/v1.0/analyze?{queryString}";
var umbracoFileString = media.GetValue<string>("umbracoFile");
var umbracoFile = JsonConvert.DeserializeObject<UmbracoFileData>(umbracoFileString);
var byteData = GetImageAsByteArray(HttpContext.Current.Server.MapPath(umbracoFile.Src));
using (var content = new ByteArrayContent(byteData))
{
content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
return await client.PostAsync(uri, content).ContinueWith(ConvertResponse).Result;
}
}
public async Task<IVisionResponse> ConvertResponse(Task<HttpResponseMessage> httpResponse)
{
var json = await httpResponse.Result.Content.ReadAsStringAsync();
var response = JsonConvert.DeserializeObject<VisionResponse>(json);
return new AzureVisionResponse(response, json);
}
private static byte[] GetImageAsByteArray(string imageFilePath)
{
var fileStream = new FileStream(imageFilePath, FileMode.Open, FileAccess.Read);
using (var outStream = new MemoryStream())
{
using (var imageFactory = new ImageFactory())
{
imageFactory.Load(fileStream)
.Resize(new Size(1000, 1000))
.Save(outStream);
}
outStream.Position = 0;
return new BinaryReader(outStream).ReadBytes((int)outStream.Length);
}
}
public class UmbracoFileData
{
public string Src { get; set; }
}
public class VisionResponse
{
public Tag[] Tags { get; set; }
public Category[] Categories { get; set; }
public Description Description { get; set; }
public Colors Color { get; set; }
public IEnumerable<Face> Faces { get; set; }
}
public class Face
{
public int Age { get; set; }
public string Gender { get; set; }
}
public class Colors
{
public string DominantColorForeground { get; set; }
public string DominantColorBackground { get; set; }
}
public class Tag
{
public string Name { get; set; }
public decimal Confidence { get; set; }
}
public class Category
{
public string Name { get; set; }
public decimal Score { get; set; }
}
public class Description
{
public string[] Tags { get; set; }
public Caption[] Captions { get; set; }
}
public class Caption
{
public string Text { get; set; }
public decimal Confidence { get; set; }
}
}
}
|
// @flow
import "./setup";
import { app, Menu, ipcMain } from "electron";
import menu from "./menu";
import { createMainWindow, getMainWindow } from "./window-lifecycle";
import "./internal-lifecycle";
const gotLock = app.requestSingleInstanceLock();
if (!gotLock) {
app.quit();
} else {
app.on("second-instance", (event, commandLine, workingDirectory) => {
const w = getMainWindow();
if (w) {
if (w.isMinimized()) {
w.restore();
}
w.focus();
}
});
}
const showTimeout = setTimeout(() => {
const w = getMainWindow();
if (w) show(w);
}, 5000);
app.on("window-all-closed", () => {
app.quit();
});
app.on("activate", () => {
const w = getMainWindow();
if (w) {
w.focus();
}
});
app.on("ready", async () => {
if (__DEV__) {
await installExtensions();
}
Menu.setApplicationMenu(menu);
const w = await createMainWindow();
await clearSessionCache(w.webContents.session);
});
ipcMain.on("ready-to-show", () => {
const w = getMainWindow();
if (w) {
clearTimeout(showTimeout);
show(w);
}
});
async function installExtensions() {
const installer = require("electron-devtools-installer");
const forceDownload = true; // process.env.UPGRADE_EXTENSIONS
const extensions = ["REACT_DEVELOPER_TOOLS", "REDUX_DEVTOOLS"];
return Promise.all(
extensions.map(name => installer.default(installer[name], forceDownload)),
).catch(console.log);
}
function clearSessionCache(session) {
return new Promise(resolve => {
session.clearCache(resolve);
});
}
function show(win) {
win.show();
setImmediate(() => win.focus());
}
|
object exports:
class BitMap
class InkJet
class Printer {
type PrinterType
def print(bits: BitMap): Unit = ???
def status: List[String] = ???
}
class Scanner {
def scan(): BitMap = ???
def status: List[String] = ???
}
class Copier {
private val printUnit = new Printer { type PrinterType = InkJet }
private val scanUnit = new Scanner
export scanUnit.scan
export printUnit.{status => _, _}
def status: List[String] = printUnit.status ++ scanUnit.status
}
class C { type T }
object O { val c: C = ??? }
export O.c
def f: c.T = ???
class StringOps(x: String):
def *(n: Int): String = ???
def ::(c: Char) = c.toString + x
def capitalize: String = ???
extension (x: String)
def take(n: Int): String = x.substring(0, n)
def drop(n: Int): String = x.substring(n)
private def moreOps = new StringOps(x)
export moreOps.*
val s = "abc"
val t1 = (s.take(1) + s.drop(1)).capitalize * 2
val t2 = 'a' :: s
|
using System;
using DeltaEngine.Core;
using DeltaEngine.Datatypes;
using DeltaEngine.Extensions;
namespace DeltaEngine.Rendering3D.Particles
{
public struct ValueRange : Lerp<ValueRange>
{
public ValueRange(float value)
: this(value, value) {}
public ValueRange(float minimum, float maximum)
: this()
{
Start = minimum;
End = maximum;
}
public ValueRange(string rangeString)
: this()
{
float[] partitions = rangeString.SplitIntoFloats();
if(partitions.Length != 2)
throw new InvalidStringFormat();
Start = partitions[0];
End = partitions[1];
}
public class InvalidStringFormat : Exception{}
public float Start { get; set; }
public float End { get; set; }
public float GetRandomValue()
{
return Start.Lerp(End, Randomizer.Current.Get());
}
public ValueRange Lerp(ValueRange other, float interpolation)
{
return new ValueRange(Start.Lerp(other.Start, interpolation),
End.Lerp(other.End, interpolation));
}
public override string ToString()
{
return Start.ToInvariantString() + ", " + End.ToInvariantString();
}
}
}
|
using System;
using System.Net.Http;
using Bit.Owin;
using Microsoft.Owin.Testing;
using OpenQA.Selenium.Remote;
namespace Bit.Test.Server
{
public class OwinEmbeddedTestServer : TestServerBase
{
private TestServer _server;
public override void Dispose()
{
_server.Dispose();
}
public override void Initialize(string uri)
{
base.Initialize(uri);
_server = TestServer.Create<OwinAppStartup>();
}
protected override HttpMessageHandler GetHttpMessageHandler()
{
return _server.Handler;
}
public override RemoteWebDriver BuildWebDriver(RemoteWebDriverOptions options = null)
{
throw new NotSupportedException();
}
}
}
|
package dto;
import java.math.BigDecimal;
import java.util.Date;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
@ToString
@Getter
@Setter
public class Balance {
private String amountType;
private BigDecimal amount;
private String ccy;
private Date date;
private String description;
}
|
package org.json4s
private[json4s] object Segments {
private[json4s] var segmentSize: Int = ParserUtil.defaultSegmentSize
def apply(): Segment = {
Segment(new Array(segmentSize))
}
def release(s: Segment): Unit = ()
private[json4s] def clear(): Unit = ()
}
private[json4s] final case class Segment(seg: Array[Char])
|
<?php
use OpenConext\Component\EngineBlockMetadata\Entity\AbstractRole;
class EngineBlock_Corto_Module_Service_ProcessedAssertionConsumer extends EngineBlock_Corto_Module_Service_Abstract
{
public function serve($serviceName)
{
$response = $this->_server->getBindingsModule()->receiveResponse();
$receivedRequest = $this->_server->getReceivedRequestFromResponse($response);
if ($receivedRequest->getKeyId()) {
$this->_server->setKeyId($receivedRequest->getKeyId());
}
$remainingProcessingEntities = &$_SESSION['Processing'][$receivedRequest->getId()]['RemainingEntities'];
// @todo check if this is the correct place to flush log
// Flush log if SP or IdP has additional logging enabled
$sp = $this->_server->getRepository()->fetchServiceProviderByEntityId($receivedRequest->getIssuer());
$idp = $this->_server->getRepository()->fetchIdentityProviderByEntityId($response->getOriginalIssuer());
if (EngineBlock_SamlHelper::doRemoteEntitiesRequireAdditionalLogging(array($sp, $idp))) {
$application = EngineBlock_ApplicationSingleton::getInstance();
$application->flushLog('Activated additional logging for the SP or IdP');
$log = $application->getLogInstance();
$log->info('Raw HTTP request', array('http_request' => (string) $application->getHttpRequest()));
}
if (!empty($remainingProcessingEntities)) { // Moar processing!
/** @var AbstractRole $nextProcessingEntity */
$nextProcessingEntity = array_shift($remainingProcessingEntities);
$this->_server->setProcessingMode();
$newResponse = $this->_server->createEnhancedResponse($receivedRequest, $response);
// Change the destiny of the received response
$newResponse->setId($response->getId());
$newResponse->setDestination($nextProcessingEntity->responseProcessingService->location);
$newResponse->setDeliverByBinding($nextProcessingEntity->responseProcessingService->binding);
$newResponse->setReturn($this->_server->getUrl('processedAssertionConsumerService'));
$this->_server->getBindingsModule()->send($newResponse, $nextProcessingEntity);
return;
}
else { // Done processing! Send off to SP
$response->setDestination($_SESSION['Processing'][$receivedRequest->getId()]['OriginalDestination']);
$response->setDeliverByBinding($_SESSION['Processing'][$receivedRequest->getId()]['OriginalBinding']);
$response->setOriginalIssuer($_SESSION['Processing'][$receivedRequest->getId()]['OriginalIssuer']);
$this->_server->unsetProcessingMode();
$sentResponse = $this->_server->createEnhancedResponse($receivedRequest, $response);
$this->_server->sendResponseToRequestIssuer($receivedRequest, $sentResponse);
return;
}
}
}
|
require 'mgnu/sequence'
module MgNu
class Genbank
class Location
InvalidLocation = Class.new(StandardError)
LocationWithRemoteAccession = Class.new(StandardError)
BASERANGE_REGEX = /
(?<complement>complement)?\(?
(?<remote_accession>[A-Z\d\.]+:)?
(?<start_continues><)?
(?<start>\d+)\.\.
(?<stop_continues>>)?
(?<stop>\d+)
\)?/x
attr_accessor :raw_value, :start, :stop, :start_continues, :stop_continues
attr_accessor :complement, :type, :parts
# create a new Location object
def initialize(raw_value)
@raw_value = raw_value.gsub(/\s/, '')
parse_raw_value
end
# parsing the location from a loc line
def parse_raw_value
case raw_value
when /^complement\(join\((.+)\)/
@type = 'complement_with_join'
@complement = true
@parts = Regexp.last_match[1].split(/,/)
set_properties_for_join_types
when /^(?:join|order)\((.+)\)/
@type = 'join'
@parts = Regexp.last_match[1].split(/,/)
set_properties_for_join_types
when BASERANGE_REGEX
@type = 'standard'
set_basic_properties(raw_value)
when /^(\d+)\.(\d+)$/
@type = 'between_range'
@start, @stop = Regexp.last_match[1].to_i, Regexp.last_match[2].to_i
when /^(\d+)^(\d+)$/
@type = 'between_adjoining'
@start, @stop = Regexp.last_match[1].to_i, Regexp.last_match[2].to_i
when /^(complement)?\(?(\d+)\)?$/
@type = 'single'
@complement = !!Regexp.last_match[1]
@start = Regexp.last_match[2].to_i
else
fail InvalidLocation, 'This is not a valid Genbank location'
end
end
def set_properties_for_join_types
non_remote = parts.select { |part| part !~ /[A-Z\d\.]+:/ }
if non_remote.length == 1
set_basic_properties(non_remote.first)
else
@complement ||= !!(non_remote.first =~ /complement/)
# sets start and stop based on first and last non remote part, taking into account complement strand
stop_match, start_match = nil
if complement
stop_match = /(?<stop_continues><)?(?<stop>\d+)/.match(non_remote.first)
start_match = /\.\.(?<start_continues>>)?(?<start>\d+)/.match(non_remote.last)
else
start_match = /(?<start_continues><)?(?<start>\d+)/.match(non_remote.first)
stop_match = /\.\.(?<stop_continues>>)?(?<stop>\d+)/.match(non_remote.last)
end
@start = start_match[:start].to_i
@stop = stop_match[:stop].to_i
@start_continues = start_match[:start_continues]
@stop_continues = stop_match[:stop_continues]
end
end
def set_basic_properties(str)
md = BASERANGE_REGEX.match(str)
@complement ||= !!md[:complement]
@remote_accession = md[:remote_accession]
# start/stop continues takes into account the complement strand
@start = complement ? md[:stop].to_i : md[:start].to_i
@stop = complement ? md[:start].to_i : md[:stop].to_i
@start_continues = complement ? !!md[:stop_continues] : !!md[:start_continues]
@stop_continues = complement ? !!md[:start_continues] : !!md[:stop_continues]
end
def get_sequence(seq)
s = case type
when 'complement_with_join', 'join'
str = buidup_sequence_from_parts(seq)
str ? build_sequence(str) : nil
when 'standard'
if complement
build_sequence(seq[stop - 1 .. start - 1])
else
build_sequence(seq[start - 1 .. stop - 1])
end
when 'single'
build_sequence(seq[start - 1])
else
return nil
end
if s && ((%w(single standard).include?(type) && complement) || type == 'complement_with_join')
s.reverse_complement!
end
s
end
def buidup_sequence_from_parts(seq)
to_be_joined = ''
parts.each do |part|
md = BASERANGE_REGEX.match(part)
if md[:remote_accession]
return nil
else
temp = seq[md[:start].to_i - 1 .. md[:stop].to_i - 1]
temp.tr!('actg', 'tgac').reverse! if md[:complement]
to_be_joined += temp
end
end
to_be_joined
end
def build_sequence(str)
MgNu::Sequence.new(:value => str, :type => 'dna')
end
# string representation
def to_s
max = 79 - 21 # max length of location line
out = ''
if raw_value.length > max
split_str = raw_value.scan(/(.{1,#{max}})(,|$)/)
out += (split_str[0].first + split_str[0].last)
split_str[1 .. - 1].each do |a, b|
out << ("\n" + ' ' * 21 + a)
out << b unless b.empty?
end
else
out << raw_value
end
out
end
end # end of MgNu::Parser::Genbank::Location
end # end of MgNu::Parser::Genbank class
end # end of MgNu module
|
#
# autorake/configure.rb -- Configure scripts
#
require "autorake/directories"
require "yaml"
module Autorake
class Configuration
CONFIG_FILE = ".configure"
attr_reader :directories
attr_reader :features, :parameters
attr_reader :incdirs, :headers, :macros, :libdirs, :libs
def initialize environment, directories
@environment = {}
@environment.update environment
@directories = directories
@features = {}
@parameters = {}
@incdirs = []
@headers = []
@macros = {}
@libdirs = []
@libs = []
end
def do_env
@environment.each { |k,v| ENV[ k] = v }
end
def dump
puts "Environment:"
@environment.each { |k,v| puts " #{k}=#{v}" }
puts "Directories:"
@directories.keys.each { |k| puts " #{k}=#{@directories.expanded k}" }
puts "Features:"
@features.each { |k,v| puts " #{k}=#{v}" }
puts "Parameters:"
@parameters.each { |k,v| puts " #{k}=#{v}" }
puts "Include directories:"
@incdirs.each { |d| puts " #{d}" }
puts "Header files:"
@headers.each { |h| puts " #{h}" }
puts "C Macros:"
@macros.each { |k,v| puts " #{k}=#{v}" }
puts "Library directories:"
@libdirs.each { |d| puts " #{d}" }
puts "Libraries:"
@libs.each { |l| puts " #{l}" }
end
end
end
|
@rule Transition(:out, Marginalisation) (q_in::Categorical, q_a::MatrixDirichlet) = begin
a = clamp.(exp.(logmean(q_a) * probvec(q_in)), tiny, Inf)
return Categorical(a ./ sum(a))
end
@rule Transition(:out, Marginalisation) (m_in::Categorical, q_a::MatrixDirichlet) = begin
a = clamp.(exp.(logmean(q_a)) * probvec(m_in), tiny, Inf)
return Categorical(a ./ sum(a))
end
|
package groupnet.recomposition
import groupnet.euler.AbstractZone
import groupnet.euler.Description
import groupnet.euler.L
import groupnet.euler.Label
/**
* Single recomposition step.
* A valid step has following features:
*
* 1. The added contour data must be a single curve
* 2. Added curve must NOT be present in previous description and must be present in the next one
*
* Note: number of steps == number of curves.
*
* @author Almas Baimagambetov (almaslvl@gmail.com)
*/
class RecompositionStep(
/**
* @return description before this step
*/
val from: Description,
/**
* @return description after this step
*/
val to: Description,
/**
* The curve added at this step.
*/
val newLabel: Label,
/**
* The zones we split at this step. The zones are
* in the "from" abstract description.
*/
val splitZones: Set<AbstractZone>) {
// for GND cycle selection strategy
var connections: Map<Pair<AbstractZone, AbstractZone>, Int>? = null
init {
val l = newLabel
require(l !in L(from)) { "$l is already present in $from" }
require(l in L(to)) { "$l is not present in $to" }
}
override fun toString() = "R_Step[added=$newLabel,split=$splitZones,From=$from To=$to]"
/**
* @return true iff the added curve is nested (splits 1 zone)
*/
fun isNested() = splitZones.size == 1
fun isMaybeSinglePiercing() = splitZones.size == 2
fun isMaybeDoublePiercing() = splitZones.size == 4
fun isNotPiercing() = splitZones.size > 4
}
|
import 'package:cobble/domain/calendar/calendar_pin_convert.dart';
import 'package:cobble/domain/calendar/calendar_syncer.db.dart';
import 'package:cobble/domain/connection/connection_state_provider.dart';
import 'package:cobble/domain/db/dao/timeline_pin_dao.dart';
import 'package:cobble/domain/entities/pebble_device.dart';
import 'package:cobble/domain/logging.dart';
import 'package:cobble/domain/timeline/watch_timeline_syncer.dart';
import 'package:cobble/infrastructure/datasources/preferences.dart';
import 'package:cobble/infrastructure/pigeons/pigeons.g.dart';
import 'package:cobble/util/container_extensions.dart';
import 'package:flutter/widgets.dart';
import 'package:hooks_riverpod/all.dart';
import 'actions/master_action_handler.dart';
void main_background() {
WidgetsFlutterBinding.ensureInitialized();
BackgroundReceiver();
}
class BackgroundReceiver implements CalendarCallbacks, TimelineCallbacks {
final container = ProviderContainer();
CalendarSyncer calendarSyncer;
WatchTimelineSyncer watchTimelineSyncer;
Future<Preferences> preferences;
TimelinePinDao timelinePinDao;
MasterActionHandler masterActionHandler;
ProviderSubscription<WatchConnectionState> connectionSubscription;
BackgroundReceiver() {
init();
}
void init() async {
await BackgroundControl().notifyFlutterBackgroundStarted();
calendarSyncer = container.listen(calendarSyncerProvider).read();
watchTimelineSyncer = container.listen(watchTimelineSyncerProvider).read();
timelinePinDao = container.listen(timelinePinDaoProvider).read();
preferences = Future.microtask(() async {
final asyncValue =
await container.readUntilFirstSuccessOrError(preferencesProvider);
return asyncValue.data.value;
});
masterActionHandler = container.read(masterActionHandlerProvider);
connectionSubscription = container.listen(
connectionStateProvider.state,
mayHaveChanged: (sub) {
final currentConnectedWatch = sub.read().currentConnectedWatch;
if (isConnectedToWatch() && currentConnectedWatch.name.isNotEmpty) {
onWatchConnected(currentConnectedWatch);
}
},
);
CalendarCallbacks.setup(this);
TimelineCallbacks.setup(this);
}
@override
Future<void> doFullCalendarSync() async {
await calendarSyncer.syncDeviceCalendarsToDb();
await syncTimelineToWatch();
}
void onWatchConnected(PebbleDevice watch) async {
final lastConnectedWatch =
(await preferences).getLastConnectedWatchAddress();
if (lastConnectedWatch != watch.address) {
Log.d("Different watch connected than the last one. Resetting DB...");
await watchTimelineSyncer.clearAllPinsFromWatchAndResync();
} else if (watch.isUnfaithful) {
Log.d("Connected watch has beein unfaithful (tsk, tsk tsk). Reset DB...");
await watchTimelineSyncer.clearAllPinsFromWatchAndResync();
} else {
await syncTimelineToWatch();
}
(await preferences).setLastConnectedWatchAddress(watch.address);
}
Future syncTimelineToWatch() async {
if (isConnectedToWatch()) {
await watchTimelineSyncer.syncPinDatabaseWithWatch();
}
}
bool isConnectedToWatch() {
return connectionSubscription.read().isConnected;
}
@override
Future<void> deleteCalendarPinsFromWatch() async {
await timelinePinDao.markAllPinsFromAppForDeletion(calendarWatchappId);
await syncTimelineToWatch();
}
@override
Future<ActionResponsePigeon> handleTimelineAction(ActionTrigger arg) async {
return (await masterActionHandler.handleTimelineAction(arg)).toPigeon();
}
}
|
import {CommonModule} from '@angular/common';
import {NgModule} from '@angular/core';
import {RouterModule, Routes} from '@angular/router';
import {HomepageComponent} from './main/homepage.component';
const routes: Routes = [
{path: '', redirectTo: '/main', pathMatch: 'full'},
{path: 'main', component: HomepageComponent},
{path: 'demo', loadChildren: './demo/demo.module#DemoModule'},
{path: 'doc', loadChildren: './doc/doc.module#DocModule'},
];
@NgModule({
exports: [
RouterModule,
],
imports: [
CommonModule,
RouterModule.forRoot(routes),
],
declarations: [],
})
export class AppRoutingModule {
}
|
require "rails_helper"
RSpec.describe SendArticleUpdateJob do
it "sends an ArticleMailer" do
article = create(:article)
user = create(:user)
mailer = double("ArticleMailer", deliver: true)
expect(ArticleMailer).to receive(:send_updates_for)
.with(article, user).and_return(mailer)
described_class.perform_now(article.id, user.id)
end
end
|
module Crypto.Hash.Equihash
( Proof(..)
, checkProof
, findProof
) where
import Control.Exception
import Data.Vector.Storable (Vector)
import qualified Data.Vector.Storable as V
import Foreign.C.Types
import Foreign.ForeignPtr
import Foreign.Marshal.Alloc
import Foreign.Ptr
import Foreign.Storable
import Crypto.Hash.Equihash.Bindings
data Proof = Proof
{ proof_n :: CUInt
, proof_k :: CUInt
, proof_seed :: CUInt
, proof_nonce :: CUInt
, proof_inputs :: Vector CUInt
}
type CProof = Ptr ()
checkProof :: Proof -> IO Bool
checkProof (Proof n k seed nonce inputs) = V.unsafeWith inputs $ \ptr -> do
retval <- bracket (c_equihash_create_proof n k seed nonce l ptr)
c_equihash_destroy_proof
c_equihash_check_proof
return $! retval /= 0
where
l = fromIntegral (V.length inputs)
explodeProof :: CProof -> IO Proof
explodeProof p =
mask_ $
alloca $ \nptr ->
alloca $ \kptr ->
alloca $ \seedptr ->
alloca $ \nonceptr ->
alloca $ \sizeptr -> do
arr <- c_equihash_explode_proof p nptr kptr seedptr nonceptr sizeptr
arrptr <- newForeignPtr finalizerFree arr
n <- peek nptr
k <- peek kptr
seed <- peek seedptr
nonce <- peek nonceptr
sz <- peek sizeptr
let arrV = V.unsafeFromForeignPtr0 arrptr (fromIntegral sz)
return $! Proof n k seed nonce arrV
findProof :: CUInt -> CUInt -> CUInt -> IO Proof
findProof n k seed =
bracket (c_equihash_find_proof n k seed)
c_equihash_destroy_proof
explodeProof
|
module Horbits.Rematch where
import Control.Lens (Getting, (^.))
import Control.Rematch
import Control.Rematch.Formatting
import Control.Rematch.Run
import Test.QuickCheck
has :: (String, Getting a s a) -> Matcher a -> (String, Matcher s)
has (n, g) m = (n, on m ((^. g), n))
always :: Bool -> Matcher a
always b = Matcher (const b) ("const" ++ show b) (const $ "always " ++ show b)
allOf' :: [(String, Matcher a)] -> Matcher a
allOf' nm = Matcher {
match = and . fmap isMatch . matches,
description = describeList "all" $ map (description . snd) nm,
describeMismatch = join "," . foldr consMatch [] . matches
}
where
matches a = fmap (\ (n, m) -> (n, match m a, describeMismatch m a)) nm
consMatch (_, True, _) = id
consMatch (n, False, f) = ((n ++ " " ++ f) :)
isMatch (_, x, _) = x
matcherProperty :: Matcher a -> a -> Property
matcherProperty m x = case runMatch m x of MatchSuccess -> property True
MatchFailure f -> counterexample f $ property False
shouldBe :: a -> Matcher a -> Property
shouldBe = flip matcherProperty
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ApplicativeDo #-}
module Crik.Commands.Library.Info
(
libraryInfoCommandParser
) where
import Control.Applicative ((<|>))
import Data.Semigroup ((<>))
import Options.Applicative
import Crik.Commands.Library.Types
libraryInfoCommandParser :: Mod CommandFields LibrarySubCommand
libraryInfoCommandParser =
command "library-info" (info (LibraryInfo <$> libraryInfoParser)
(progDesc ("Info about a library")))
libraryInfoParser :: Parser (LibraryInfoOptions)
libraryInfoParser = do
name <-
(strOption $
long "name" <>
metavar "<name>" <>
help "library name") <|>
(strArgument $
metavar "<name>" <>
help "library name")
return $ LibraryInfoOptions name
|
package com.example.mdoming15.campusalma;
import android.content.Context;
import android.content.res.AssetManager;
import android.database.Cursor;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.CursorAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.example.mdoming15.campusalma.Model.DAO;
import com.example.mdoming15.campusalma.Model.Etablissement;
import com.example.mdoming15.campusalma.R;
import java.io.IOException;
import java.io.InputStream;
/**
* Created by Wiiki on 22-11-15.
*/
class EtablissementCursorAdapter extends CursorAdapter {
public EtablissementCursorAdapter(Context context, Cursor c, boolean autoRequery) {
super(context, c, autoRequery);
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
View convertView = LayoutInflater.from(context)
.inflate(R.layout.layout_item_etablissement_listview, parent, false);
return configureView(context, convertView, cursor);
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
configureView(context, view, cursor);
}
/**
* Construit une vue sur base du layout joueur_list_view_item et
* de l'élément courant du curseur
* @param context
* @param convertView
* @param cursor
* @return
*/
public View configureView(Context context, View convertView, Cursor cursor) {
Etablissement etablissement = DAO.getEtablissementFromCursor(cursor);
TextView nomE = (TextView)convertView.findViewById(R.id.item_nom_etablissement);
TextView noteE = (TextView)convertView.findViewById(R.id.item_note_etablissement);
//ImageView imageNote = (ImageView)convertView.findViewById(R.id.image_item);
nomE.setText(etablissement.getNom());
//nomE.setTextColor(Color.WHITE);et
noteE.setText(Integer.toString(etablissement.getNote()));
//noteE.setTextColor(Color.WHITE);
/* Prenons les images dans les assets
if (etablissement.getImage() != null){
AssetManager assetManager = context.getAssets();
try{
InputStream is = context.getAssets().open(etablissement.getImage());
Drawable image = Drawable.createFromStream(is, null); // createFromPath existe aussi
imageNote.setImageDrawable(image);
} catch (IOException e) {
// Ne rien faire, à la limitte, un log
e.printStackTrace();
}
}
else{
imageNote.setImageDrawable(null);
}
*/
return convertView;
}
}
|
using MemorizingWordsV2.Domain.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace MemorizingWordsV2.Infrastructure.Configurations
{
public class EnglishWordConfiguration : IEntityTypeConfiguration<EnglishWord>
{
public void Configure(EntityTypeBuilder<EnglishWord> entity)
{
entity.HasIndex(e => e.Word, "UQ__EnglishW__839740545E8AC8FF")
.IsUnique();
entity.Property(e => e.Id).HasColumnName("id");
entity.Property(e => e.CreatedAt)
.HasPrecision(0)
.HasColumnName("created_at");
entity.Property(e => e.PartOfSpeechId).HasColumnName("part_of_speech_id");
entity.Property(e => e.Word)
.IsRequired()
.HasMaxLength(50)
.IsUnicode(false)
.HasColumnName("word");
entity.HasOne(d => d.PartOfSpeech)
.WithMany(p => p.EnglishWords)
.HasForeignKey(d => d.PartOfSpeechId)
.OnDelete(DeleteBehavior.SetNull)
.HasConstraintName("FK__EnglishWo__part___276EDEB3");
}
}
}
|
#!/bin/env bash
INSTALL_BASE="/opt"
INSTALL_DIR="${INSTALL_BASE}/KSP_linux"
BASE_DIR="${HOME}/KSPSetup"
SCRIPT_DIR="${BASE_DIR}/scripts"
CKAN_LISTS="${BASE_DIR}/config"
FILES_DIR="${BASE_DIR}/files"
# Remove old installations
${SCRIPT_DIR}/header.sh "Removing old installations."
${SCRIPT_DIR}/rm_old_ksp.sh $INSTALL_DIR
sleep 3
# Unpack and setup KSP.
${SCRIPT_DIR}/header.sh "Installing KSP."
${SCRIPT_DIR}/install_ksp.sh ${FILES_DIR} ${INSTALL_BASE} ${INSTALL_DIR}
sleep 3
# Install and setup ckan
${SCRIPT_DIR}/header.sh "Installing and upgrading ckan."
${SCRIPT_DIR}/install_ckan.sh ${FILES_DIR} ${INSTALL_DIR}
# Install graphics mods.
${SCRIPT_DIR}/header.sh "Installing KSPRC."
${SCRIPT_DIR}/install_ksprc.sh ${INSTALL_DIR} ${FILES_DIR}
# Add basic mods.
${SCRIPT_DIR}/header.sh "Installing basic mods."
${SCRIPT_DIR}/install_mod.sh ${CKAN_LISTS} ${INSTALL_DIR} basic.ckan
# Add parts mods.
${SCRIPT_DIR}/header.sh "Installing parts mods."
${SCRIPT_DIR}/install_mod.sh ${CKAN_LISTS} ${INSTALL_DIR} parts.ckan
# Add control mods.
${SCRIPT_DIR}/header.sh "Installing control mods."
${SCRIPT_DIR}/install_mod.sh ${CKAN_LISTS} ${INSTALL_DIR} control.ckan
# Add control mods.
${SCRIPT_DIR}/header.sh "Installing graphics mods."
${SCRIPT_DIR}/install_mod.sh ${CKAN_LISTS} ${INSTALL_DIR} graphics.ckan
|
module Evaluator where
import Syntax
import Language
import PrettyPrinter
import Utils
import Compiler
import Data.Map as Map
import Data.List as List
import Control.Monad.State
import Text.PrettyPrint as PP
-- | Evaluates compiled program from initial state and returns all states.
eval :: TiState -> ([TiState], TiState)
eval st = (st : sts, finalSt)
where
(sts, finalSt)
| isFinalSt st = ([], st)
| otherwise = eval (step st)
isFinalSt :: TiState -> Bool
isFinalSt st
| dump st == []
&& (length (stack st) == 1) = or (List.map ($ stackNode) [isNumNode, isDataNode])
| stack st == [] = True
| otherwise = False
where
stackNode = hLookup (heap st) (head $ stack st)
isNumNode :: Node a -> Bool
isNumNode node = case node of
NNum n -> True
otherwise -> False
isDataNode :: Node a -> Bool
isDataNode node = case node of
NData tag arity -> True
otherwise -> False
-------- TRANSITIONS ------
-- | Performs state transition
step :: TiState -> TiState
step st = case hLookup (heap st) (head $ stack st) of
NAp a1 a2 -> apStep st a1 a2
NSupercomb sc args body -> scStep st sc args body
NNum n -> numStep st n
NInd a -> indStep st a
NPrim name prim -> primStep st prim
NData tag addrs -> dataStep st
------- DATA NODE TRANSITION -----------
dataStep :: TiState -> TiState
dataStep st
| length (stack st) == 1
&& length (dump st) > 0 = st {stack = head (dump st), dump = drop 1 (dump st)}
| otherwise = error "A data node has been applied as a function."
------- PRIMITIVE NODE TRANSITION ----------
primStep :: TiState -> Primitive -> TiState
primStep st prim = case prim of
Add -> primDyadic st (arithFunc (+))
Sub -> primDyadic st (arithFunc (-))
Mul -> primDyadic st (arithFunc (*))
Div -> primDyadic st (arithFunc (div))
LT_Prim -> primDyadic st (boolFunc (<))
LEQ_Prim -> primDyadic st (boolFunc (<=))
EQ_Prim -> primDyadic st (boolFunc (==))
GEQ_Prim -> primDyadic st (boolFunc (>=))
GT_Prim -> primDyadic st (boolFunc (>))
Neg -> primArith' st ((-) 0)
Cond -> primIf st
PrimConstr tag arity -> primConstr st tag arity
where
arithFunc f = (\(NNum x1) (NNum x2) -> NNum (x1 `f` x2))
boolFunc f = (\(NNum x1) (NNum x2) -> NData (cmp f x1 x2) [])
-- Assigns appropriate tag for boolean constructor.
cmp f a b
| f a b = 2
| otherwise = 1
primIf :: TiState -> TiState
primIf st = st {stack = stack', heap = heap', dump = dump'}
where
redexRootAddr = (head . drop 3) (stack st)
condAddr = getArgAddr (heap st) ((head . drop 1) (stack st))
condNode = hLookup (heap st) condAddr
(stack', dump', heap')
| isDataNode condNode
= (drop 3 (stack st), dump st, hUpdate (heap st) redexRootAddr (getResNode condNode (stack st) (heap st)))
| otherwise
= ([condAddr], (drop 1 (stack st)) : (dump st), heap st)
where
getApAddr stack tag
| tag == 1 = (head . drop 3) stack -- False
| otherwise = (head . drop 2) stack -- True
getResNode (NData tag _) stack heap
= (NInd . getArgAddr heap . getApAddr stack) tag
-- Checks that constructor is given enough arguments, and if so, it builds a
-- structured data object in the heap.
primConstr :: TiState -> Int -> Int -> TiState
primConstr st tag arity = st {stack = stack', heap = heap'}
where
stack' = drop arity (stack st)
redexRootAddr = head stack' -- should never result in exception
argAddrs = Prelude.map (getArgAddr (heap st)) (take arity . drop 1 $ stack st)
heap' = hUpdate (heap st) redexRootAddr (NData tag argAddrs)
-- | Pre: At least 3 addresses on stack with corresponding nodes in heap.
primDyadic :: TiState -> (Node Name -> Node Name -> Node Name) -> TiState
primDyadic st f = st {stack = stack', dump = dump', heap = heap'}
where
redexRootAddr = (stack st) !! 2
argAddrs = Prelude.map (getArgAddr (heap st)) (take 2 . drop 1 $ stack st)
argNodes = Prelude.map (hLookup (heap st)) argAddrs
(stack', dump', heap') = reduceArgNodes argNodes
where
stack'' = drop (length argNodes) (stack st)
reduceArgNodes []
= let res = f (argNodes !! 0) (argNodes !! 1)
in (stack'', dump st, hUpdate (heap st) redexRootAddr res)
reduceArgNodes (x:xs)
| isNumNode x = reduceArgNodes xs
--
| otherwise = ([argAddr'], dump'', heap st)
where
(argAddr':argAddrs') = drop (length argAddrs - length (x:xs)) argAddrs
dump'' = List.foldr (\addr acc -> [addr] : acc) (stack'' : (dump st)) argAddrs'
primArith' :: TiState -> (Int -> Int) -> TiState
primArith' st f = st {stack = stack', dump = dump', heap = heap'}
where
redexRootAddr = (stack st) !! 1
argAddr = getArgAddr (heap st) redexRootAddr
argNode = hLookup (heap st) argAddr
(stack', dump', heap')
| isNumNode argNode = (stack'', dump st, hUpdate (heap st) redexRootAddr (NNum res))
| otherwise = ([argAddr], stack'' : (dump st), heap st)
where
stack'' = drop 1 (stack st)
res = let (NNum x) = argNode in f x
---- NUMBER NODE TRANSITION -----
numStep :: TiState -> Int -> TiState
numStep st n
| length (stack st) == 1
&& (dump st /= []) = st {stack = head (dump st), dump = tail (dump st)}
| otherwise = error "A number has been applied as a function."
------- INDIRECTION NODE TRANSITION --------
indStep :: TiState -> Addr -> TiState
indStep st a = st {stack = a : drop 1 (stack st)}
------- APPLICATION NODE TRANSITION ---------
-- We reduce to the left since arguments of supercombinators
-- are always on the right-hand side of an application node.
apStep :: TiState -> Addr -> Addr -> TiState
apStep st a1 a2 = st {stack = stack', heap = heap'}
where
apRootAddr = head $ stack st
(stack', heap') = case hLookup (heap st) a2 of
NInd x -> (stack st, hUpdate (heap st) apRootAddr (NAp a1 x))
otherwise -> (a1 : (stack st), heap st)
------- SUPERCOMBINATOR NODE TRANSITION ---------
scStep :: TiState -> Name -> [Name] -> CoreExpr -> TiState
scStep st sc args body = st {stack = stack', heap = heap'}
where
redexRoot = (stack st) !! (length args)
heap' = instantiateAndUpdate body redexRoot (heap st) env
stack' = drop (length args) (stack st)
env = Map.union (Map.fromList argBindings) (globals st)
-- Bind argument names to addresses obtained from the stack and heap.
argBindings = zip args (getArgAddrs (heap st) (stack st))
where
getArgAddrs :: TiHeap -> TiStack -> [Addr]
getArgAddrs heap (scNameAddr : stack) = Prelude.map (getArgAddr heap) stack
getArgAddr :: TiHeap -> Addr -> Addr
getArgAddr heap addr = let (NAp funcAddr argAddr) = hLookup heap addr in argAddr
instantiateAndUpdate :: CoreExpr -> Addr -> TiHeap -> TiGlobals -> TiHeap
instantiateAndUpdate (EAp expr1 expr2) updAddr heap env
= hUpdate heap2 updAddr (NAp a1 a2)
where
(heap1, a1) = instantiate expr1 heap env
(heap2, a2) = instantiate expr2 heap1 env
instantiateAndUpdate (EBinApp binop expr1 expr2) updAddr heap env
= instantiateAndUpdate expr updAddr heap env
where
expr = EAp (EAp ((EVar . show) binop) expr1) expr2
instantiateAndUpdate (ENum n) updAddr heap env
= hUpdate heap updAddr (NNum n)
-- If the body is simply a variable, bound to the node stored at the variable's
-- address in the environment, then we should simply update the redex root with
-- an indirection to this node (or rather, the address of this node).
instantiateAndUpdate (EVar v) updAddr heap env
= case Map.lookup v env of
Nothing -> error $ "Undefined reference to variable: " ++ (show v)
Just varAddr -> hUpdate heap updAddr (NInd varAddr)
instantiateAndUpdate (ELet is_rec defns expr) updAddr heap env
= instantiateAndUpdate expr updAddr heap'' env''
where
(heap'', env'')
= instantiateDefs heap env defns
instantiateDefs heap env []
= (heap, env)
instantiateDefs heap env ((name, expr) : xs)
= let
{ (heap', defnAddr) = instantiate expr heap env
; env' = Map.insert name defnAddr env
} in instantiateDefs heap' env' xs
-- Takes an expression, heap and environment associating names to addresses and
-- creates an instance of the expression on the heap, returning the root of this
-- instance. This function performs the necessary expression reduction on the
-- graph.
instantiate :: CoreExpr -> TiHeap -> TiGlobals -> (TiHeap, Addr)
instantiate (ENum n) heap env = hAlloc heap (NNum n)
instantiate (EVar v) heap env
= case Map.lookup v env of
Nothing -> error $ "Undefined reference to variable: " ++ (show v)
Just a -> (heap, a)
instantiate (EAp expr1 expr2) heap env = hAlloc heap2 (NAp a1 a2)
where
(heap1, a1) = instantiate expr1 heap env
(heap2, a2) = instantiate expr2 heap1 env
instantiate (EBinApp binop expr1 expr2) heap env = instantiate expr heap env
where
expr = EAp (EAp ((EVar . show) binop) expr1) expr2
instantiate (EConstr tag arity) heap env
= instantiateConstr tag arity heap env
instantiate (ELet is_rec defns expr) heap env
= instantiateLet defns expr heap env
instantiate e heap env
= error "Can't instantiate binary expressions, case expressions or lambda expressions."
instantiateConstr :: Int -> Int -> TiHeap -> TiGlobals -> (TiHeap, Addr)
instantiateConstr tag arity heap env
= hAlloc heap (NPrim ("Constr" ++ (show tag)) (PrimConstr tag arity))
instantiateLet :: [CoreDefn] -> CoreExpr -> TiHeap -> TiGlobals -> (TiHeap, Addr)
instantiateLet defns expr heap env
= instantiate expr heap'' env''
where
(heap'', env'') = instantiateDefs heap env defns
instantiateDefs heap env [] = (heap, env)
instantiateDefs heap env ((name, expr):xs) = let {
(heap', defnAddr) = instantiate expr heap env'; -- Mutually recursive, to deal with recursive let bindings.
env' = Map.insert name defnAddr env;
} in instantiateDefs heap' env' xs
------------------------------ FOR SHOWING RESULTS -----------------------------
showResults :: [TiState] -> IO ()
showResults = print . PP.vcat . List.map showState
-- We only show the stack.
showState :: TiState -> Doc
showState (TiState stack dump heap globals stats)
= showStack heap stack <+> (PP.text ((show . length) dump)) <> (PP.text "\n")
showStack :: TiHeap -> TiStack -> Doc
showStack heap stack
= hcat
[ PP.text "Stack:\n"
, (PP.brackets . PP.nest 4 . PP.hcat) (PP.punctuate (PP.text "\n") (List.map showStackItem stack))
]
where
showStackItem addr
= PP.hcat [showAddr addr , PP.text ": ", showStackNode heap (hLookup heap addr)]
showAddr :: Addr -> Doc
showAddr addr = PP.text ("#" ++ (show addr))
-- We show the value of the value of the argument node and not the
-- function node
showStackNode :: Heap (Node a) -> Node a -> Doc
showStackNode heap (NAp func_addr arg_addr)
= PP.hsep
[ PP.text "NAp"
, showAddr func_addr
, showAddr arg_addr
, (PP.parens . showNode) (hLookup heap arg_addr)
]
showStackNode heap node = showNode node
-- Shows the value of a node
showNode :: Node a -> Doc
showNode (NAp a1 a2)
= PP.hsep [PP.text "NAp", showAddr a1, showAddr a2]
showNode (NSupercomb name args body)
= PP.hsep [PP.text "NSupercomb", PP.text name]
showNode (NNum n)
= PP.hsep [PP.text "NNum", PP.int n]
showNode (NInd a)
= PP.hsep [PP.text "NInd", showAddr a]
showNode (NPrim name prim)
= PP.hsep [PP.text "NPrim", PP.text name]
showNode (NData tag dataAddrs)
= PP.hsep
[ PP.text "NData"
, PP.int tag
, (PP.brackets . PP.hsep) (PP.punctuate PP.comma (List.map showAddr dataAddrs))
]
|
# react-native-copy-image
Copy images for Android and iOS in React Native, use image from local project.
## Install
- Create folder __scripts/__ from root project.
```bash
mkdir scripts
```
- Clone project:
```bash
cd scripts
git clone https://github.com/tuantvk/react-native-copy-image.git;
cd react-native-copy-image;rm -rf .git;
```
## Usage
Make sure folder images in __rootProject/assets/images/__
Add script in __package.json__
```json
"scripts": {
....
"copy-image": "bash scripts/react-native-copy-image/copy_image.sh" // add here
}
```
Finally, from root project run:
```bash
yarn run copy-image
# or
# npm run copy-image
```
Check all images in path:
- Android: __android/app/src/main/res/drawable/__
- iOS: __ios/[AppName]/Images.xcassets__
> Note:
>
> Rebuild app for update resource !
> Warning
>
> Images has format to **snake_case**
>
> Ex: **Apple.png** -> **apple.png** | **Apple Computer.jpg** -> **apple_computer.jpg**
## Example
This is example for image name _apple.png_
```js
import React from 'react';
import { View, Image } from 'react-native';
const Example = () => (
<View>
<Image
source={{ uri: 'apple' }}
style={{ width: 50, height: 50 }}
/>
</View>
);
export default Example;
```
## LICENSE
[MIT](LICENSE)
|
<?php
namespace App\Models;
use Core\{Model,DB,H};
class Harddelete extends Model{
protected static $_table = 'transactions';
protected static $_softDelete = true;
public static function findTransactionById($id){
return self::findFirst([
"conditions" => "id = ?",
"bind" => [$id]
]);
}
public static function hardDeleteItems($tx){
$tx->delete();
return;
}
}
|
#!/bin/bash
# the json command here is an npm package
#: here it is: https://www.npmjs.com/package/json
echo 'Please fill in youe data!'
read -p 'username: ' username
read -p 'email: ' email
read -sp 'password: ' pass1
read -sp 'password again: ' pass2
echo ''
if [ $pass1 != $pass2 ]; then
echo 'you passwrod_1 does not match password_2'
exit 1
fi
data="{username: \"$username\", password: \"$pass1\", email: \"$email\"}"
echo ''
echo 'saving data...'
all_users=$([ -e ./users.json ] && cat users.json || echo '[]')
if [[ -z $all_users ]]; then all_users='[]'; fi
count=$(echo $all_users | json length);
all_users=$(echo $all_users | json -A -e "this[$count] = $data" > users.json) &&
echo "new user \"$username\" added!"
|
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text;
using Roslyn.Compilers.Internal.MetadataReader.PEFileFlags;
using Roslyn.Compilers.Internal.MetadataReader.UtilityDataStructures;
namespace Roslyn.Compilers.Internal.MetadataReader.PEFile
{
internal struct OptionalHeaderNTAdditionalFields
{
internal ulong ImageBase;
internal int SectionAlignment;
internal uint FileAlignment;
internal ushort MajorOperatingSystemVersion;
internal ushort MinorOperatingSystemVersion;
internal ushort MajorImageVersion;
internal ushort MinorImageVersion;
internal ushort MajorSubsystemVersion;
internal ushort MinorSubsystemVersion;
internal uint Win32VersionValue;
internal int SizeOfImage;
internal int SizeOfHeaders;
internal uint CheckSum;
internal Subsystem Subsystem;
internal DllCharacteristics DllCharacteristics;
internal ulong SizeOfStackReserve;
internal ulong SizeOfStackCommit;
internal ulong SizeOfHeapReserve;
internal ulong SizeOfHeapCommit;
internal uint LoaderFlags;
internal int NumberOfRvaAndSizes;
}
}
|
import Ajv from 'ajv'
test('solidarity.example.json fits local schema', () => {
const ajv = new Ajv()
const localSchema = require('../../solidaritySchema.json')
const solidarityExample = require('../../.solidarity.example.json')
const valid = ajv.validate(localSchema, solidarityExample)
expect(valid).toBe(true)
})
|
package org.firstinspires.ftc.teamcode.mechanisms;
import com.qualcomm.robotcore.hardware.Servo;
import org.firstinspires.ftc.robotcore.external.Telemetry;
class QQ_TestServo extends QQ_Test {
private double offLocation;
private double onLocation;
private Servo servo;
/**
* @param description string that describes the test
* @param offLocation location to move too when off
* @param onLocation location to move too when on
* @param servo servo to use during test
*/
QQ_TestServo(String description, double offLocation, double onLocation, Servo servo) {
super(description);
this.offLocation = offLocation;
this.onLocation = onLocation;
this.servo = servo;
}
/**
* determines whether to move the servo to the on location of the off location
*
* @param on determines which position the servo moves to
* @param telemetry allows sending back the position
*/
@Override
void run(boolean on, Telemetry telemetry) {
if (on) {
servo.setPosition(onLocation);
} else {
servo.setPosition(offLocation);
}
telemetry.addData("Location", servo.getPosition());
}
}
|
from utils import get_data, answers, print_answers
from operator import itemgetter
data = get_data(2020, 5).split('\n')
find_highest = []
for ticket in data:
count = 10
find_row = list(range(0, 128))
find_col = list(range(0, 8))
for t in ticket:
if count > 0:
r_ind = int(len(find_row)/2)
c_ind = int(len(find_col)/2)
if t == 'F':
find_row = find_row[:r_ind]
if t == 'B':
find_row = find_row[r_ind:]
if t == 'L':
find_col = find_col[:c_ind]
if t == 'R':
find_col = find_col[c_ind:]
count -= 1
find_highest.append(find_row[0]*8+find_col[0])
def find_missing(lst):
start = lst[0]
end = lst[-1]
return sorted(set(range(start, end + 1)).difference(lst))
my_seat = find_missing(sorted(find_highest))
answers['first'] = max(find_highest)
answers['second'] = my_seat[0]
print_answers(answers)
|
import {Forecast} from "./Forecast";
class SpeechForecast extends Forecast {
constructor(
text: string,
lang: number,
public url: string
) {
super(text, lang)
}
}
export { SpeechForecast }
|
# andrew roy chen's portfolio
Andrew R Chen's portfolio page circa 2015, aka Fun with Jekyll era.
WIP on new page!
<3
|
<?php
namespace App\Logs;
use Illuminate\Database\Eloquent\Model;
class ErrorLog extends Model
{
const UPDATED_AT = null;
protected $table = "error_log";
protected $connection = 'error';
}
|
using UnityEngine;
using System.Collections;
public class GameInfoScripts : MonoBehaviour {
public void setGenderMale()
{
GameInformation.SpriteString = "Base Male";
GameInformation.IsMale = true;
}
public void setGenderFemale()
{
GameInformation.SpriteString = "Base Female";
GameInformation.IsMale = false;
}
public void setFemaleSkin_1() { GameInformation.SpriteString = "NPCWMage_f"; }
public void setFemaleSkin_2() { GameInformation.SpriteString = "NPCRouge_f"; }
public void setFemaleSkin_3() { GameInformation.SpriteString = "NPCWFighter_f"; }
public void setMaleSkin_1() { GameInformation.SpriteString = "NPCRookie_m"; }
public void setMaleSkin_2() { GameInformation.SpriteString = "NPCRouge_m"; }
public void setMaleSkin_3() { GameInformation.SpriteString = "NPCWarrior_m"; }
public void setClassBase()
{
GameInformation.PlayerClass = BaseCharacterClass.CharacterClasses.BASE;
GameInformation.Strength = 3;
GameInformation.Agility = 2;
GameInformation.Stamina = 3;
GameInformation.Intelect = 3;
GameInformation.Vitality = 4;
GameInformation.Spirit = 2;
PointsHolder.Points = 5;
}
public void setClassWarrior()
{
GameInformation.PlayerClass = BaseCharacterClass.CharacterClasses.WARRIOR;
GameInformation.Strength = 9;
GameInformation.Agility = 2;
GameInformation.Stamina = 8;
GameInformation.Intelect = 3;
GameInformation.Vitality = 9;
GameInformation.Spirit = 2;
PointsHolder.Points = 5;
}
public void setClassRouge()
{
GameInformation.PlayerClass = BaseCharacterClass.CharacterClasses.ROUGE;
GameInformation.Strength = 7;
GameInformation.Agility = 5;
GameInformation.Stamina = 6;
GameInformation.Intelect = 4;
GameInformation.Vitality = 5;
GameInformation.Spirit = 6;
PointsHolder.Points = 5;
}
public void setClassBMage()
{
GameInformation.PlayerClass = BaseCharacterClass.CharacterClasses.BMAGE;
GameInformation.Strength = 3;
GameInformation.Agility = 2;
GameInformation.Stamina = 4;
GameInformation.Intelect = 9;
GameInformation.Vitality = 6;
GameInformation.Spirit = 9;
PointsHolder.Points = 5;
}
public void setClassWMage()
{
GameInformation.PlayerClass = BaseCharacterClass.CharacterClasses.WMAGE;
GameInformation.Strength = 5;
GameInformation.Agility = 1;
GameInformation.Stamina = 6;
GameInformation.Intelect = 7;
GameInformation.Vitality = 4;
GameInformation.Spirit = 10;
PointsHolder.Points = 5;
}
public void PointsUp() { PointsHolder.Points++; }
public void PointsDown() { PointsHolder.Points--; }
public void StrUp() { GameInformation.Strength++; }
public void StrDown() { GameInformation.Strength--; }
public void IntUp() { GameInformation.Intelect++; }
public void IntDown() { GameInformation.Intelect--; }
public void AgiUp() { GameInformation.Agility++; }
public void AgiDown() { GameInformation.Agility--; }
public void StmUp() { GameInformation.Stamina++; }
public void StmDown() { GameInformation.Stamina--; }
public void VitUp() { GameInformation.Vitality++; }
public void VitDown() { GameInformation.Vitality--; }
public void SprUp() { GameInformation.Spirit++; }
public void SprDown() { GameInformation.Spirit--; }
}
|
//! # boticordrs
//!
//! Crate for [Boticord](https://boticord.top/) API
//! ## Usage
//!
//! Add this to your `Cargo.toml`
//! ```toml
//! [dependencies]
//! boticordrs = "0.1.3"
//! ```
//!
//! ## Example
//!
//! ```no_run
//! use boticordrs::{BoticordClient};
//! use boticordrs::types::{BotStats};
//!
//! #[tokio::main]
//! async fn main() {
//! let client = BoticordClient::new("your token".to_string(), 2).expect("failed client");
//!
//! let stats = BotStats {servers: 2514, shards: 3, users: 338250};
//!
//! match client.post_bot_stats(stats).await {
//! Ok(_) => {
//! println!("Well Done!")
//! },
//! Err(e) => eprintln!("{}", e),
//! }
//! }
//! ```
#![doc(html_root_url = "https://docs.rs/boticordrs/0.1.3")]
use reqwest::header::AUTHORIZATION;
use reqwest::{Client as ReqwestClient, Response};
use reqwest::{Method};
macro_rules! api_url {
($e:expr) => {
concat!("https://api.boticord.top/v", $e)
};
($e:expr, $($rest:tt)*) => {
format!(api_url!($e), $($rest)*)
};
}
pub mod types;
mod errors;
use types::*;
pub use errors::BoticordError;
/// You can use it to make it much easier to use the Boticord API.
#[derive(Clone)]
pub struct BoticordClient {
client: ReqwestClient,
token: String,
version: u64
}
impl BoticordClient {
/// Constructs a new Client.
///
/// In BotiCord API v2 there are some changes with token.
/// [Read more here](https://docs.boticord.top/topics/v1vsv2/)
///
/// # Arguments
///
/// * `token` - Your BotiCord token
/// * `version` - Version of BotiCord API.
///
pub fn new(token: String, version: u64) -> Result<Self, BoticordError> {
let client = ReqwestClient::builder().build().map_err(errors::from)?;
Ok(BoticordClient { client, token, version })
}
/// Constructs a new Client with ReqwestClient specified by user.
///
/// In BotiCord API v2 there are some changes with token.
/// [Read more here](https://docs.boticord.top/topics/v1vsv2/)
///
/// # Arguments
///
/// * `client` - Your custom ReqwestClient
/// * `token` - Your BotiCord token
/// * `version` - Version of BotiCord API
///
pub fn new_with_client(client: ReqwestClient, token: String, version: u64) -> Self {
BoticordClient { client, token, version }
}
/// Get information about a specific bot.
///
/// # Arguments
///
/// * `bot` - Id of bot.
///
pub async fn get_bot_info(&self, bot: String) -> Result<Bot, BoticordError> {
let url = api_url!("{}/bot/{}", &self.version, bot);
get(self, url).await
}
/// Get information about a specific server.
///
/// # Arguments
///
/// * `server` - Id of server.
///
pub async fn get_server_info(&self, server: String) -> Result<Server, BoticordError> {
let url = api_url!("{}/server/{}", &self.version, server);
get(self, url).await
}
/// Get information about a specific user.
///
/// # Arguments
///
/// * `user` - Id of user.
///
pub async fn get_user_info(&self, user: String) -> Result<UserInformation, BoticordError> {
let url = api_url!("{}/profile/{}", &self.version, user);
get(self, url).await
}
/// Get Vec of bot's comments.
///
/// # Arguments
///
/// * `bot` - Id of bot.
///
pub async fn get_bot_comments(&self, bot: String) -> Result<Vec<SingleComment>, BoticordError> {
let url = api_url!("{}/bot/{}/comments", &self.version, bot);
get(self, url).await
}
/// Get Vec of server's comments.
///
/// # Arguments
///
/// * `server` - Id of server.
///
pub async fn get_server_comments(&self,
server: String
) -> Result<Vec<SingleComment>, BoticordError> {
let url = api_url!("{}/server/{}/comments", &self.version, server);
get(self, url).await
}
/// Get Vec of user's comments.
///
/// # Arguments
///
/// * `user` - Id of user.
///
pub async fn get_user_comments(&self,
user: String
) -> Result<UserComments, BoticordError> {
let url = api_url!("{}/profile/{}/comments", &self.version, user);
get(self, url).await
}
/// Get Vec of user's bots.
///
/// # Arguments
///
/// * `user` - Id of user.
///
pub async fn get_user_bots(&self,
user: String
) -> Result<Vec<SingleUserBot>, BoticordError> {
let url = api_url!("{}/bots/{}", &self.version, user);
get(self, url).await
}
/// Get Vec of shorted by current user links
pub async fn get_my_shorted_links(&self) -> Result<Vec<ShortedLink>, BoticordError> {
let url = api_url!("{}/links/get", &self.version,);
post_with_response(self, url, Some(EmptyBody{})).await
}
/// Get Vec of shorted by current user links with the provided code
///
/// # Arguments
///
/// * `shortener_body` - Short information about a link, that we will search.
///
pub async fn search_for_shorted_link(&self,
shortener_body: ShortenerBody
) -> Result<Vec<ShortedLink>, BoticordError> {
let url = api_url!("{}/links/get", &self.version,);
post_with_response(self, url, Some(shortener_body)).await
}
/// Creates new shorted link
///
/// # Arguments
///
/// * `shortener_body` - Information about link we will create.
///
pub async fn create_shorted_link(&self,
shortener_body: ShortenerBody
) -> Result<ShortedLink, BoticordError> {
let url = api_url!("{}/links/create", &self.version,);
post_with_response(self, url, Some(shortener_body)).await
}
/// Deletes shorted link
///
/// # Arguments
///
/// * `shortener_body` - Information about link we will delete.
///
pub async fn delete_shorted_link(&self,
shortener_body: ShortenerBody
) -> Result<(), BoticordError> {
let url = api_url!("{}/links/delete", &self.version);
post(self, url, Some(shortener_body)).await
}
/// Post current bot's stats.
/// # How to set BotStats? (example)
///
/// # Arguments
///
/// * `stats` - Stats that we will post
///
/// # Examples
///
/// ```no_run
/// use boticordrs::types::{BotStats};
///
/// let stats = BotStats{servers: 2514, shards: 3, users: 338250};
/// ```
pub async fn post_bot_stats(&self, stats: BotStats) -> Result<(), BoticordError> {
let url = api_url!("{}/stats", &self.version);
post(self, url, Some(stats)).await
}
/// Post Server Stats Method.
///
/// Remember, that only Boticord-Service Bots can do it in global,
/// other will get an 403 error.
/// (but it may works for custom bots, but you need a special API-token)
///
///
/// # Arguments
///
/// * `stats` - Stats that we will post
///
pub async fn post_server_stats(&self, stats: ServerStats) -> Result<(), BoticordError> {
let url = api_url!("{}/server ", &self.version);
post(self, url, Some(stats)).await
}
}
async fn request<T>(
client: &BoticordClient,
method: Method,
url: String,
data: Option<T>,
) -> Result<Response, BoticordError>
where
T: serde::Serialize + Sized,
{
let mut req = client
.client
.request(method, &url)
.header(AUTHORIZATION, &client.token);
if let Some(data) = data {
req = req.json(&data);
}
let resp = match req.send().await {
Ok(resp) => resp,
Err(e) => return Err(errors::from(e)),
};
match resp.status() {
_ => resp.error_for_status().map_err(errors::from),
}
}
async fn get<T>(client: &BoticordClient, url: String) -> Result<T, BoticordError>
where
T: serde::de::DeserializeOwned + Sized,
{
let resp = request(client, Method::GET, url, None::<()>).await?;
match resp.json().await {
Ok(data) => Ok(data),
Err(e) => Err(errors::from(e)),
}
}
async fn post<T>(client: &BoticordClient,
url: String,
data: Option<T>) -> Result<(), BoticordError>
where
T: serde::Serialize + Sized,
{
request(client, Method::POST, url, data).await?;
Ok(())
}
async fn post_with_response<T, R>(client: &BoticordClient,
url: String,
data: Option<T>) -> Result<R, BoticordError>
where
T: serde::Serialize + Sized,
R: serde::de::DeserializeOwned + Sized,
{
let resp = request(client, Method::POST, url, data).await?;
match resp.json().await {
Ok(data) => Ok(data),
Err(e) => Err(errors::from(e)),
}
}
|
module LambdaCoucou.Parser where
import qualified LambdaCoucou.Cancer as LC.Cancer
import qualified LambdaCoucou.Command as LC.Cmd
import qualified LambdaCoucou.Crypto as LC.C
import qualified LambdaCoucou.Help as LC.Hlp
import qualified LambdaCoucou.ParserUtils as LC.P
import qualified LambdaCoucou.Remind as LC.R
import qualified LambdaCoucou.UserSettings as LC.Settings
import RIO
import qualified RIO.Text as T
import qualified RIO.Partial as RIO'
import qualified RIO.Time as Time
import qualified Text.Megaparsec as M
import qualified Text.Megaparsec.Char as C
import Control.Applicative.Combinators (manyTill_, manyTill, someTill)
type Parser = M.Parsec Void Text
parseCommand :: Text -> Either (M.ParseErrorBundle Text Void) LC.Cmd.CoucouCmd
parseCommand = M.parse commandParser "cmdParser"
commandParser :: Parser LC.Cmd.CoucouCmd
commandParser =
M.try sedCommandParser
<|> prefix
*> M.choice
[ M.try urlCommandParser,
M.try cryptoCommandParser,
M.try dateCommandParser,
M.try cancerCommandParser,
M.try miscCoucouCommandParser,
M.try helpCommandParser,
M.try prCommandParser,
M.try jokeCommandParser,
M.try remindCommandParser,
M.try settingsCommandParser,
M.try ytSearchCommandParser,
M.try liveStreamsCommandParser
]
<|> pure LC.Cmd.Nop
prefix :: Parser Char
prefix = C.char 'λ' <|> C.char '&' <|> C.char 'Σ' -- Σ for sigma_g
-------------------- URL --------------------
urlCommandParser :: Parser LC.Cmd.CoucouCmd
urlCommandParser = do
C.string "url"
LC.Cmd.Url <$> (M.try offsetParser <|> pure 0) <*> targetParser
where
offsetParser = do
M.some C.spaceChar
d <- M.some C.digitChar
pure $ RIO'.read d
-------------------- Crypto --------------------
cryptoCommandParser :: Parser LC.Cmd.CoucouCmd
cryptoCommandParser = do
C.string "crypto"
LC.P.spaces
LC.Cmd.Crypto <$> cryptoCoin <*> targetParser
cryptoCoin :: Parser (Either Text LC.C.CryptoCoin)
cryptoCoin =
M.try ((C.string' "btc" <|> C.string' "xbt") $> Right LC.C.Bitcoin)
<|> M.try (C.string' "eth" $> Right LC.C.Ethereum)
<|> (Left <$> LC.P.word)
-------------------- Date --------------------
dateCommandParser :: Parser LC.Cmd.CoucouCmd
dateCommandParser = do
C.string "date"
LC.P.spaces
LC.Cmd.Date <$> targetParser
-------------------- Cancer --------------------
cancerCommandParser :: Parser LC.Cmd.CoucouCmd
cancerCommandParser = do
C.string "cancer"
cancerType <-
M.try
(M.some C.spaceChar *> (LC.Cancer.SpecificCancer <$> LC.P.utf8Word'))
<|> pure LC.Cancer.RandomCancer
LC.Cmd.Cancer cancerType <$> targetParser
-------------------- Coucou --------------------
miscCoucouCommandParser :: Parser LC.Cmd.CoucouCmd
miscCoucouCommandParser = do
C.string "coucou"
LC.P.spaces
(M.eof $> LC.Cmd.ShoutCoucou) <|> ((LC.P.utf8Word <* LC.P.spaces <* M.eof) $> LC.Cmd.HeyCoucou)
-------------------- Help --------------------
helpCommandParser :: Parser LC.Cmd.CoucouCmd
helpCommandParser = do
C.string "help"
M.choice
[ M.try (f "url" LC.Hlp.Url),
M.try (f "crypto" LC.Hlp.Crypto),
M.try (f "date" LC.Hlp.Date),
M.try (f "cancer" LC.Hlp.Cancer),
M.try (f "coucou" LC.Hlp.ShoutCoucou),
M.try (f "joke" LC.Hlp.Joke),
M.try (f "remind" LC.Hlp.Remind),
M.try (f "settings" LC.Hlp.Settings),
M.try (f "ytSearch" LC.Hlp.YTSearch),
M.try (f "yt_search" LC.Hlp.YTSearch),
M.try (f "live" LC.Hlp.LiveStreams),
M.try (LC.P.spaces *> (LC.Cmd.Help LC.Hlp.General <$> targetParser) <* M.eof),
LC.P.spaces *> (LC.Cmd.Help . LC.Hlp.Unknown <$> LC.P.utf8Word <*> targetParser)
]
where
f str cmd = LC.Cmd.Help <$> (M.some C.spaceChar *> (C.string' str $> cmd)) <*> targetParser
-------------------- PR --------------------
prCommandParser :: Parser LC.Cmd.CoucouCmd
prCommandParser = do
C.string "pr"
LC.Cmd.PR <$> targetParser
-------------------- Joke --------------------
jokeCommandParser :: Parser LC.Cmd.CoucouCmd
jokeCommandParser = do
C.string "joke"
LC.Cmd.Joke <$> targetParser
-------------------- Remind --------------------
remindCommandParser :: Parser LC.Cmd.CoucouCmd
remindCommandParser = do
C.string "remind"
C.space1
LC.Cmd.Remind <$> (M.try reminder <|> M.try remindList <|> remindDelete)
reminder :: Parser LC.R.RemindCmd
reminder = do
remindSpec <- timeAtParser <|> durationParser <|> tomorrowParser <|> weekdayParser
txt <- M.takeWhile1P (Just "text to remind") (const True)
pure $ LC.R.Reminder remindSpec txt
remindList :: Parser LC.R.RemindCmd
remindList = C.string "list" $> LC.R.RemindList
remindDelete :: Parser LC.R.RemindCmd
remindDelete = do
C.string "del"
C.space1
reminderId <- LC.P.int
pure $ LC.R.RemindDelete reminderId
timeAtParser :: Parser LC.R.RemindSpec
timeAtParser = do
C.string "at"
C.space1
mbDate <- optional (M.try dateP)
mbDateTime <- optional (dateTimeP <* C.space1)
if isNothing mbDate && isNothing mbDateTime
then fail "all fields are Nothing"
else
let (y, m, d) = case mbDate of
Nothing -> (Nothing, Nothing, Nothing)
Just (a, b, c) -> (Just a, Just b, Just c)
(h, min) = case mbDateTime of
Nothing -> (Nothing, Nothing)
Just (a, b) -> (Just a, Just b)
ts = LC.R.TimeSpec y m d h min
in pure $ LC.R.RemindTime ts
where
dateP = do
y <- LC.P.int
C.char '-'
m <- LC.P.int
C.char '-'
d <- LC.P.int
void (C.char 'T') <|> void (C.char ' ') <|> M.eof
pure (y, m, d)
dateTimeP :: Parser (Int, Int)
dateTimeP = do
h <- LC.P.int <* optional (C.char 'h')
m <- fromMaybe 0 <$> optional (C.char ':' *> LC.P.int)
pure (h, m)
durationParser :: Parser LC.R.RemindSpec
durationParser = do
C.string "in"
C.space1
y <- optional (M.try $ LC.P.int <* LC.P.spaces <* yearP)
m <- optional (M.try $ LC.P.int <* LC.P.spaces <* monthP)
d <- optional (M.try $ LC.P.int <* LC.P.spaces <* dayP)
(h, minutes) <-
( M.try hourMinP <|> do
h <- optional (M.try $ LC.P.int <* LC.P.spaces <* hourP)
minutes <- optional (M.try $ LC.P.int <* LC.P.spaces <* minP)
pure (h, minutes)
)
case (y <|> m <|> d <|> h <|> minutes) of
Nothing -> fail "all fields are Nothing"
Just _ ->
pure $
LC.R.RemindDuration $
LC.R.TimeSpec
{ LC.R.dsYear = y,
LC.R.dsMonth = m,
LC.R.dsDay = d,
LC.R.dsHour = h,
LC.R.dsMinute = minutes
}
where
yearP =
(void $ C.string "year" <* optional (C.char 's') <* end)
<|> (M.try (void $ C.char 'y') <* end)
monthP =
(void $ C.string "month" <* optional (C.char 's') <* end)
<|> (M.try (void $ C.char 'M') <* end)
dayP =
(void $ C.string "day" <* optional (C.char 's') <* end)
<|> (M.try (void $ C.char 'd') <* end)
hourP =
(void $ C.string "hour" <* optional (C.char 's') <* end)
<|> (M.try (void $ C.char 'h') <* end)
minP =
(void $ C.string "min" <* optional (C.char 's') <* end)
<|> (M.try (void $ C.char 'm') <* end)
hourMinP = do
h <- (LC.P.int <* (C.char 'h' <|> C.char 'H'))
m <- LC.P.int
optional (C.char 'm')
end
pure (Just h, Just m)
-- the eof shouldn't parse anything in a real message but is handy for testing
end = M.eof <|> C.space1
tomorrowParser :: Parser LC.R.RemindSpec
tomorrowParser = do
C.string "tomorrow" <|> C.string "demain"
C.space1 <|> M.eof
time <- optional (atTime *> C.space1 *> dateTimeP <* C.space1)
pure $ LC.R.RemindTomorrow time
weekdayParser :: Parser LC.R.RemindSpec
weekdayParser = do
d <- weekdayP
C.space1
time <- optional (atTime *> C.space1 *> dateTimeP <* C.space1)
pure $ LC.R.RemindWeekDay d time
where
weekdayP =
M.choice
[ (C.string' "monday" <|> C.string' "lundi") $> Time.Monday,
(C.string' "tuesday" <|> C.string' "mardi") $> Time.Tuesday,
(C.string' "wednesday" <|> C.string' "mercredi") $> Time.Wednesday,
(C.string' "thursday" <|> C.string' "jeudi") $> Time.Thursday,
(C.string' "friday" <|> C.string' "vendredi") $> Time.Friday,
(C.string' "saturday" <|> C.string' "samedi") $> Time.Saturday,
(C.string' "sunday" <|> C.string' "dimanche") $> Time.Sunday
]
atTime :: Parser Text
atTime = C.string "at" <|> C.string "à"
-------------------- User Settings --------------------
settingsCommandParser :: Parser LC.Cmd.CoucouCmd
settingsCommandParser = do
C.string "settings"
C.space1
cmd <- displayP <|> tzP
pure $ LC.Cmd.Settings cmd
where
displayP = C.string "show" $> LC.Settings.Display
tzP = do
isSet <- (C.string "set" $> True <|> C.string "unset" $> False)
C.space1
(C.string "tz" <|> C.string "timezone")
LC.Settings.UserTZ <$> if isSet
then Just <$> (C.space1 *> LC.P.utf8Word')
else pure Nothing
-------------------- Youtube Search --------------------
ytSearchCommandParser :: Parser LC.Cmd.CoucouCmd
ytSearchCommandParser = do
C.string "yt_search" <|> C.string' "ytSearch"
C.space1
(queryWords, target) <- (LC.P.utf8Word <* C.space) `manyTill_` targetParser
pure $ LC.Cmd.YTSearch queryWords target
-------------------- Sed --------------------
-- note that this parser only handle s/foo/bar(/?)
-- and ignores escaping
sedCommandParser :: Parser LC.Cmd.CoucouCmd
sedCommandParser = do
C.string "s/"
rawRegex <- T.pack <$> M.anySingle `someTill` C.char '/'
replacement <- T.pack <$> M.anySingle `manyTill` (C.char '/' *> LC.P.spaces *> M.eof)
pure $ LC.Cmd.Sed rawRegex replacement
-------------------- Sed --------------------
liveStreamsCommandParser :: Parser LC.Cmd.CoucouCmd
liveStreamsCommandParser = do
C.string "live"
LC.P.spaces
LC.Cmd.LiveStreams <$> targetParser
-------------------- Utils --------------------
targetParser :: Parser (Maybe Text)
targetParser = C.space *> (M.try t <|> (M.eof $> Nothing))
where
t = do
C.char '>'
C.space1
target <- LC.P.utf8Word'
C.space
M.eof
pure (Just target)
|
import React, { memo } from 'react';
import { makeStyles } from '@material-ui/core/styles';
import { useSnackbar } from 'notistack';
import { useFetchApproval } from '../../../redux/hooks';
import Button from 'components/CustomButtons/Button.js';
import { useConnectWallet } from 'features/home/redux/hooks';
import { refundABI } from 'features/configure/abi';
import styles from './styles';
const useStyles = makeStyles(styles);
const RefundButtons = ({ tokenAddress, refundAddress, index }) => {
const classes = useStyles();
const { web3, address } = useConnectWallet();
const { enqueueSnackbar } = useSnackbar();
const { fetchApproval } = useFetchApproval();
const onRefundApproval = () => {
fetchApproval({
address,
web3,
tokenAddress,
refundAddress,
index,
})
.then(() => enqueueSnackbar(`Approval success`, { variant: 'success' }))
.catch(error => enqueueSnackbar(`Approval error: ${error}`, { variant: 'error' }));
};
const onRefund = () => {
const vault = new web3.eth.Contract(refundABI, refundAddress);
vault.methods.refund().send({ from: address });
};
return (
<>
<Button
className={`${classes.showDetailButton} ${classes.showDetailButtonContained}`}
onClick={onRefundApproval}
>
Approve
</Button>
<Button
className={`${classes.showDetailButton} ${classes.showDetailButtonContained}`}
onClick={onRefund}
>
Refund
</Button>
</>
);
};
export default memo(RefundButtons);
|
module Parakeet.Linguistics.Misc (
isKanji
, isChoonpu
, isMacron
, fromMacron
, toMacron
, toMacron'
, isVowel
, separator
, isSeparator
) where
import Data.Char (ord)
import Data.List (find)
import Control.Monad.Choice
isKanji :: Char -> Bool
isKanji = (\x -> (x >= 0x4e00 && x <= 0x9fbf) || x == k) . ord
where k = ord '々'
isChoonpu :: Char -> Bool
isChoonpu = (==) 'ー'
macrons :: [(Choice Char, Char)]
macrons = [ (fromList "āâ", 'a')
, (fromList "īî", 'i')
, (fromList "ūû", 'u')
, (fromList "ēê", 'e')
, (fromList "ōô", 'o')
]
isMacron :: Char -> Bool
isMacron c = c `elem` concatMap (toList . fst) macrons
fromMacron :: Char -> Char
fromMacron c = case find (\(m, _) -> c `elem` toList m) macrons of
Just (_, b) -> b
Nothing -> c
toMacron :: Char -> Choice Char
toMacron c = case find (\(_, b) -> b == c) macrons of
Just (a, _) -> a
Nothing -> return c
toMacron' :: Char -> Choice Char
toMacron' = toMacron . fromMacron
isVowel :: Char -> Bool
isVowel c = c `elem` "aiueo"
separator :: Char
separator = '$'
isSeparator :: Char -> Bool
isSeparator = (==) separator
|
---
layout: page
---
Hello this is Helen's site!
You can read more [about me](/about/), have a look at my [blog](/blog/) or see my [work info](/work/)
|
<?php
declare(strict_types=1);
namespace QR\TeamleaderApiClient\Model\LevelTwoArea;
/**
* Class LevelTwoAreaListView.
*/
class LevelTwoAreaListView extends LevelTwoAreaBase
{
}
|
import warnings
class ShapeError(Exception):
def __init__(self, name, input_dims, expected_dims):
msg = "Data fed into '{}' has {} dims; should be {}-D array".format(
name, input_dims, expected_dims)
super().__init__(msg)
class DisconnectedError(Exception):
def __init__(self, missing_inputs):
base_msg = "The following inputs are not connected to your provided outputs: {}"
msg = base_msg.format(', '.join([node.name for node in missing_inputs]))
super().__init__(msg)
def ExtraInputsWarning(extra_inputs):
msg = "Some input nodes provided that aren't used in the graph: {}"
msg.format(', '.join([node.node_name for node in extra_inputs]))
warnings.warn(msg)
|
---
title: cheese
categories:
- lunch
- dinner
description: cheese for dinner
type: raw bar
price: '40'
---
|
#!/bin/bash
# This script attempts to build MIMIC on an Oracle instance.
# You will likely need to modify it to fit your own system, for example,
# you may need to change the authentication, e.g. replace '\ as SYSDBA' with 'myusername/mypassword'
# The script requires sqlldr and sqlplus
# Create the tables
sqlplus '\ as SYSDBA' << EOF
WHENEVER OSERROR EXIT 9;
WHENEVER SQLERROR EXIT SQL.SQLCODE;
ALTER SESSION SET CURRENT_SCHEMA = MIMICIII;
@oracle_create_tables.sql
EOF
# Alternatively, you could specify a username/password here, and use the below snippet
#db_username=
#db_password=
#sqlplus -s /nolog << EOF
#WHENEVER OSERROR EXIT 9;
#WHENEVER SQLERROR EXIT SQL.SQLCODE;
#CONNECT ${db_username}/${db_password};
#@oracle_create_tables.sql
#EOF
# Call sqlldr to load the data
sqlldr '\ as SYSDBA' control='controlfiles/admissions.ctl' log=admissions.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/callout.ctl' log=callout.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/caregivers.ctl' log=caregivers.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/chartevents.ctl' log=chartevents.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/cptevents.ctl' log=cptevents.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/d_cpt.ctl' log=d_cpt.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/d_icd_diagnoses.ctl' log=d_icd_diagnoses.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/d_icd_procedures.ctl' log=d_icd_procedures.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/d_items.ctl' log=d_items.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/d_labitems.ctl' log=d_labitems.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/datetimeevents.ctl' log=datetimeevents.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/diagnoses_icd.ctl' log=diagnoses_icd.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/drgcodes.ctl' log=drgcodes.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/icustays.ctl' log=icustays.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/inputevents_cv.ctl' log=inputevents_cv.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/inputevents_mv.ctl' log=inputevents_mv.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/labevents.ctl' log=labevents.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/microbiologyevents.ctl' log=microbiologyevents.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/noteevents_output.ctl' log=noteevents_output.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/outputevents.ctl' log=outputevents.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/patients.ctl' log=patients.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/prescriptions.ctl' log=prescriptions.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/procedureevents_mv.ctl' log=procedureevents_mv.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/procedures_icd.ctl' log=procedures_icd.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/services.ctl' log=services.log parallel=true direct=true multithreading=true skip_index_maintenance=true
sqlldr '\ as SYSDBA' control='controlfiles/transfers.ctl' log=transfers.log parallel=true direct=true multithreading=true skip_index_maintenance=true
# Now, create the indexes and constraints
sqlplus '\ as SYSDBA' << EOF
WHENEVER OSERROR EXIT 9;
WHENEVER SQLERROR EXIT SQL.SQLCODE;
ALTER SESSION SET CURRENT_SCHEMA = MIMICIII;
@oracle_add_indexes.sql
@oracle_add_constraints.sql
EOF
|
# PHP-библиотека DiskSpace
**\* Только для Unix-based систем**
Проверка свободного места на диске
## Установка
```bash
composer require lemurro/lib-diskspace
```
## Использование
```php
$disk_space = new \Lemurro\Lib\DiskSpace\DiskSpace();
// Лимит свободного места, в бинарной системе основанной на 1024-байтах в килобайте
// Суффиксы: KiB, MiB, GiB, TiB, PiB, …
// По умолчанию: '1GiB'
$result = $disk_space->check('50GiB');
//Array
//(
// [data] => Array
// (
// [limit_exceeded] => true
// [free_space] => 43.2 GiB
// [space_limit] => 50.0 GiB
// )
//)
$result = $disk_space->check('30GiB');
//Array
//(
// [data] => Array
// (
// [limit_exceeded] => false
// [free_space] => 43.2 GiB
// [space_limit] => 30.0 GiB
// )
//)
```
## Ошибки
В случае возникновения ошибок будет возвращён массив содержащий элемент `errors`
```
Array
(
[errors] => Array
(
[status] => '400 Bad Request'
[code] => 'warning'
[title] => 'Не удалось преобразовать указанный лимит в байты'
[meta] => Array
(
[space_limit_string] => '50 abc'
)
)
)
```
|
-- file:macaddr.sql ln:28 expect:true
SELECT a, b, trunc(b) FROM macaddr_data ORDER BY 2, 1
|
---
layout: post
title: "Further Testing"
date: 2021-08-16 06:11:07 -0400
permalink: testing
---
#Further Testing
|
@include('mage2-ecommerce::forms.text',['name' => 'name','label' => 'Name'])
@include('mage2-ecommerce::forms.text',['name' => 'slug','label' => 'Slug'])
@include('mage2-ecommerce::forms.textarea',['name' => 'content',
'label' => 'Content',
'attributes' => ['class' => 'ckeditor','id' => 'content']])
@include('mage2-ecommerce::forms.text',['name' => 'meta_title','label' => 'Meta Title'])
@include('mage2-ecommerce::forms.text',['name' => 'meta_description','label' => 'Meta Description'])
|
package tech.mlsql.plugins.canal.util
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import scala.util.control.NonFatal
object JacksonUtil {
private val _mapper = new ObjectMapper()
_mapper.registerModule(DefaultScalaModule)
def toJson[T](obj: T): String = {
_mapper.writeValueAsString(obj)
}
def fromJson[T](json: String, `class`: Class[T]): T = {
try {
_mapper.readValue(json, `class`)
} catch {
case NonFatal(e) =>
null.asInstanceOf[T]
}
}
def prettyPrint[T](obj: T): String = {
_mapper.writerWithDefaultPrettyPrinter().writeValueAsString(obj)
}
}
|
package edi
import (
"os"
"strings"
"testing"
)
func TestNewReader(t *testing.T) {
reader := NewReader(strings.NewReader(""))
if reader.Comma != '*' {
t.Errorf("Reader.Comma is %c, but should be ','", reader.Comma)
}
}
func TestNewWriter(t *testing.T) {
writer := NewWriter(os.Stdout)
if writer.Comma != '*' {
t.Errorf("Writer.Comma is %x, but should be '*'", writer.Comma)
}
}
|
<?php
namespace App\Helpers\Loggers;
/**
* Class VipLogger
*
* @package App\Helpers\Loggers
*/
class VipPostLogger extends Logger
{
protected static $postId = 0;
protected static $loggerName = 'vipPost';
protected static function config()
{
static::$path = storage_path() . '/logs/vip_posts.log';
}
public static function setPostId($postId)
{
static::$postId = $postId;
}
protected static function addEntry($level, $entry)
{
if ( static::$postId > 0) {
$entry = '[Post ID: ' . static::$postId . '] - ' . $entry;
}
parent::addEntry($level, $entry);
}
}
|
#define _GNU_SOURCE
#include <assert.h>
#include <ctype.h>
#include <stdarg.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <stdnoreturn.h>
#include <string.h>
#define STR(var) #var
enum {
TK_NUM = 256, // numeric token
TK_IDENT, // operator token
TK_EOF, // end token
TK_EQ, // ==
TK_NE, // !=
TK_LE, // <=
TK_GE, // >=
TK_LT, // <
TK_GT, // >
TK_RETURN, // return
TK_IF, // if
TK_WHILE, // while
TK_ELSE, // else
TK_FOR, // for
};
enum {
ND_NUM = 256, // node type of numeric
ND_IDENT, // node type of operator
ND_EQ, // ==
ND_NE, // !=
ND_LE, // <=
ND_GE, // >=
ND_LT, // <
ND_GT, // >
ND_RETURN, // return statement
ND_IF, // if statement
ND_WHILE, // while statement
ND_FOR, // for statement
ND_BLOCK // block
};
typedef struct {
void **data;
int capacity; // buffer allocated area
int len; // elements count
} Vector;
Vector *new_vector(void); // create new vector
void vec_push(Vector *vec, void *elem); // push back in vector
void show_vec(Vector *vec, char type);
typedef struct {
int ty; // token type
int val; // Numeric token
char *input; // error point (for error message)
char *name; // variable name
} Token;
// tokenize input expression
void tokenize(char *p);
int is_alnum(char c);
typedef struct Node {
int ty; // node type from token type
struct Node *lhs; // left hand side node
struct Node *rhs; // right hand side node
int val; // the value of this node (ND_NUM)
char *name; // use in case ty == ND_INDNET
// "if" ( cond ) then "else" els
struct Node *cond;
struct Node *then;
struct Node *els;
// "while" ( cond ) body
struct Node *body;
// "for" ( init; cond; inc ) body
struct Node *init;
struct Node *inc;
// "{" block "}"
struct Vector *block;
} Node;
Node *new_node(int ty, Node *lhs, Node *rhs);
Node *new_node_num(int ty);
Node *new_node_indent(char *name);
int consume(int ty);
// generate assembly functions
// details: syntax.txt
void program();
Node *stmt();
Node *assign();
Node *expr();
Node *equality();
Node *relational();
Node *add();
Node *mul();
Node *unary();
Node *term();
void gen_lval(Node *node);
void gen(Node *node); // generate assembly
typedef struct {
Vector *keys;
Vector *vals;
} Map;
Map *new_map();
void map_set(Map *map, char *key, void *val);
void *map_get(Map *map, char *key);
// error functions
void error(char *fmt, ...); // error output
void error_at(char *loc, char *msg); // error output with error point in input expression
// test function
int expect(int line, int expected, int actual);
void runtest();
void test_vector();
void test_map();
Vector *tokens;
Vector *code;
Map *map;
int pos, code_pos, offset_count;
|
#!/usr/bin/env bash
rm -rf lib
./node_modules/.bin/babel src --out-dir lib;
chmod +x lib/simtron-cli.js;
|
/*
MIT License
Copyright 2016 Comcast Cable Communications Management, LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package packet
import (
"bytes"
"github.com/Comcast/gots/v2/"
)
// Iotas to track the state of the accumulator
const (
stateStarting = iota
stateAccumulating
stateDone
)
// Accumulator is used to gather multiple packets
// and return their concatenated payloads.
// Accumulator is not thread safe.
type Accumulator interface {
// WritePacket adds a packet to the accumulator and returns got.ErrAccumulatorDone if done
WritePacket(*Packet) (int, error)
// Bytes returns the payload bytes from the underlying buffer
Bytes() []byte
// Packets returns the packets used to fill the payload buffer
Packets() []*Packet
// Reset resets the accumulator state
Reset()
}
type accumulator struct {
f func([]byte) (bool, error)
buf *bytes.Buffer
packets []*Packet
state int
}
// NewAccumulator creates a new packet accumulator that is done when
// the provided function returns done as true.
func NewAccumulator(f func(data []byte) (done bool, err error)) Accumulator {
return &accumulator{
f: f,
buf: &bytes.Buffer{},
state: stateStarting}
}
// Add a packet to the accumulator. If the added packet completes
// the accumulation, based on the provided doneFunc, gots.ErrAccumulatorDone is returned.
// Returns an error if the packet is not valid.
func (a *accumulator) WritePacket(pkt *Packet) (int, error) {
switch a.state {
case stateStarting:
// need to check if the packet contains a payloadUnitStartIndicator to start
if !PayloadUnitStartIndicator(pkt) {
return PacketSize, gots.ErrNoPayloadUnitStartIndicator
}
a.packets = []*Packet{}
a.state = stateAccumulating
case stateAccumulating:
// need to check if the packet contains a payloadUnitStartIndicator so we know
// to drop old packets and start re-accumulation
if PayloadUnitStartIndicator(pkt) {
a.state = stateStarting
return a.WritePacket(pkt)
}
case stateDone:
return 0, gots.ErrAccumulatorDone
}
var cpyPkt = &Packet{}
copy(cpyPkt[:], pkt[:])
a.packets = append(a.packets, cpyPkt)
if b, err := Payload(pkt); err != nil {
return PacketSize, err
} else if _, err := a.buf.Write(b); err != nil {
return PacketSize, err
}
if done, err := a.f(a.buf.Bytes()); err != nil {
return PacketSize, err
} else if done {
a.state = stateDone
return PacketSize, gots.ErrAccumulatorDone
}
return PacketSize, nil
}
// Bytes returns the payload bytes from the underlying buffer
func (a *accumulator) Bytes() []byte {
return a.buf.Bytes()
}
// Packets returns the packets used to fill the payload buffer
// NOTE: Not thread safe
func (a *accumulator) Packets() []*Packet {
return a.packets
}
// Reset resets the accumulator state
func (a *accumulator) Reset() {
a.state = stateStarting
a.buf.Reset()
}
|
import { decorateVertexEvents } from '@/events/decorators';
import V from '@/utilities/Vector';
import SVG from 'svg.js';
import { GRAPHIC_TYPES, ISlideRenderer, IVertexRenderer, VERTEX_ROLES } from '../types';
type VertexRendererArgs = {
slide: ISlideRenderer;
scale: number;
role: VERTEX_ROLES;
center: V;
parentId: string;
};
class VertexRenderer implements IVertexRenderer {
public readonly type = GRAPHIC_TYPES.VERTEX;
public readonly parentId: string;
private _slide: ISlideRenderer;
private _svg: SVG.Ellipse | undefined;
private _role: VERTEX_ROLES;
private _scale: number;
private _width: number;
private _height: number;
private _center: V;
private _fillColor: string;
private _strokeColor: string;
private _strokeWidth: number;
constructor(args: VertexRendererArgs) {
this._slide = args.slide;
this._role = args.role;
this.parentId = args.parentId;
this._scale = args.scale;
this._center = args.center;
this._width = 8;
this._height = 8;
this._fillColor = '#400c8b';
this._strokeColor = 'none';
this._strokeWidth = 0;
}
public get isRendered(): boolean {
return this._svg !== undefined;
}
public get role(): VERTEX_ROLES {
return this._role;
}
public set center(center: V) {
this._center = center;
this._svg && this._svg.center(this._center.x, this._center.y);
}
public set scale(scale: number) {
this._scale = scale;
this._svg && this._svg.size(this._width * this._scale, this._height * this._scale);
}
public render(): void {
// Silently fail if the SVG is already rendered
if (this.isRendered) {
return;
}
this._svg = this._slide.canvas.ellipse(this._width * this._scale, this._height * this._scale)
.center(this._center.x, this._center.y)
.fill(this._fillColor)
.stroke({ color: this._strokeColor, width: this._strokeWidth });
decorateVertexEvents(this._svg, this._slide, this);
}
public unrender(): void {
this._svg && this._svg.remove();
this._svg = undefined;
}
}
export default VertexRenderer;
|
package com.devshed42.quient;
import android.app.Dialog;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.LinearLayout;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.Switch;
import android.widget.TextView;
import android.util.Log;
public class MainActivity extends AppCompatActivity {
private String LOG_TAG = MainActivity.class.getSimpleName();
LinearLayout actionLinearLayout;
TextView actionValue;
Switch statusSwitch;
RadioButton vibrateBtn;
RadioButton silentBtn;
final int VIBRATE = 1;
final int SILENT = 0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
actionLinearLayout = (LinearLayout) findViewById(R.id.action);
actionValue = (TextView) findViewById(R.id.actionValue);
statusSwitch = (Switch) findViewById(R.id.statusSwitch);
statusSwitch.setChecked(PreferencesManager.getSavedState(this));
final Dialog dialog = new Dialog(MainActivity.this);
dialog.setContentView(R.layout.alert_dialog);
final RadioGroup radioGroup = (RadioGroup) dialog.findViewById(R.id.alertRadioGroup);
vibrateBtn = (RadioButton) dialog.findViewById(R.id.vibrate);
silentBtn = (RadioButton) dialog.findViewById(R.id.silent);
final Intent intent = new Intent(getApplicationContext(), QuientService.class);
int actionId = PreferencesManager.getSavedAction(this);
switch (actionId) {
case 0:
actionValue.setText(R.string.Silent);
vibrateBtn.setChecked(false);
silentBtn.setChecked(true);
break;
case 1:
actionValue.setText(R.string.Vibrate);
vibrateBtn.setChecked(true);
silentBtn.setChecked(false);
break;
default:
break;
}
if (statusSwitch.isChecked()) {
getApplicationContext().startService(intent);
}
statusSwitch.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
Log.d(LOG_TAG, "Status: " + buttonView.isChecked());
PreferencesManager.saveState(MainActivity.this, buttonView.isChecked());
if (buttonView.isChecked()) {
// Start service
Log.d(LOG_TAG, "Starting Service");
getApplicationContext().startService(intent);
} else {
Log.d(LOG_TAG, "Stopping Service");
getApplicationContext().stopService(intent);
}
}
});
actionLinearLayout.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Button dialogBtnOk = (Button) dialog.findViewById(R.id.btnOk);
Button dialogBtnCancel = (Button) dialog.findViewById(R.id.btnCancel);
dialogBtnOk.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
switch (radioGroup.getCheckedRadioButtonId()) {
case R.id.silent:
Log.d(LOG_TAG, "Adding pref_action: 0");
actionValue.setText(R.string.Silent);
PreferencesManager.saveAction(MainActivity.this, SILENT);
if (QuientService.isQuientRunning()) {
Log.d(LOG_TAG, "Quient is Running; Restarting Service");
getApplicationContext().stopService(intent);
getApplicationContext().startService(intent);
}
break;
case R.id.vibrate:
Log.d(LOG_TAG, "Adding pref_action: 1");
actionValue.setText(R.string.Vibrate);
PreferencesManager.saveAction(MainActivity.this, VIBRATE);
if (QuientService.isQuientRunning()) {
Log.d(LOG_TAG, "Quient is running; Restarting Service");
getApplicationContext().stopService(intent);
getApplicationContext().startService(intent);
}
break;
default:
break;
}
dialog.dismiss();
}
});
dialogBtnCancel.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
dialog.show();
}
});
}
}
|
CREATE INDEX [WorkQueueItem_SelectByQueueStatusPage]
ON [dbo].WorkQueueItem
(
[WorkQueueId] ASC,
[AssignedToSecurityAuthorityId] ASC,
[CompletionDate] ASC,
[MilestoneDate] ASC,
[DueDate] ASC,
[LastWorkedDate] ASC,
[AddedDate] ASC,
[WorkQueueItemId] ASC
)
|
# Armadillo
[](http://clojars.org/armadillo)
[](https://circleci.com/gh/Rumel/armadillo)
|
require 'spec_helper'
describe 'mysql::server::backup' do
let(:default_params) {
{ 'backupuser' => 'testuser',
'backuppassword' => 'testpass',
'backupdir' => '/tmp',
'backuprotate' => '25',
'delete_before_dump' => true,
}
}
context 'standard conditions' do
let(:params) { default_params }
it { should contain_mysql_user('testuser@localhost')}
it { should contain_mysql_grant('testuser@localhost/*.*').with(
:privileges => ["SELECT", "RELOAD", "LOCK TABLES", "SHOW VIEW"]
)}
it { should contain_cron('mysql-backup').with(
:command => '/usr/local/sbin/mysqlbackup.sh',
:ensure => 'present'
)}
it { should contain_file('mysqlbackup.sh').with(
:path => '/usr/local/sbin/mysqlbackup.sh',
:ensure => 'present'
) }
it { should contain_file('mysqlbackupdir').with(
:path => '/tmp',
:ensure => 'directory'
)}
it 'should have compression by default' do
verify_contents(subject, 'mysqlbackup.sh', [
' --all-databases | bzcat -zc > ${DIR}/${PREFIX}`date +%Y%m%d-%H%M%S`.sql.bz2',
])
end
it 'should have 25 days of rotation' do
# MySQL counts from 0 I guess.
should contain_file('mysqlbackup.sh').with_content(/.*ROTATE=24.*/)
end
end
context 'with compression disabled' do
let(:params) do
{ :backupcompress => false }.merge(default_params)
end
it { should contain_file('mysqlbackup.sh').with(
:path => '/usr/local/sbin/mysqlbackup.sh',
:ensure => 'present'
) }
it 'should be able to disable compression' do
verify_contents(subject, 'mysqlbackup.sh', [
' --all-databases > ${DIR}/${PREFIX}`date +%Y%m%d-%H%M%S`.sql',
])
end
end
context 'with database list specified' do
let(:params) do
{ :backupdatabases => ['mysql'] }.merge(default_params)
end
it { should contain_file('mysqlbackup.sh').with(
:path => '/usr/local/sbin/mysqlbackup.sh',
:ensure => 'present'
) }
it 'should have a backup file for each database' do
content = catalogue.resource('file','mysqlbackup.sh').send(:parameters)[:content]
content.should match(' mysql | bzcat -zc \${DIR}\\\${PREFIX}mysql_`date')
# verify_contents(subject, 'mysqlbackup.sh', [
# ' mysql | bzcat -zc ${DIR}/${PREFIX}mysql_`date +%Y%m%d-%H%M%S`.sql',
# ])
end
end
context 'with file per database' do
let(:params) do
default_params.merge({ :file_per_database => true })
end
it 'should loop through backup all databases' do
verify_contents(subject, 'mysqlbackup.sh', [
'mysql -s -r -N -e \'SHOW DATABASES\' | while read dbname',
'do',
' mysqldump -u${USER} -p${PASS} --opt --flush-logs --single-transaction \\',
' ${dbname} | bzcat -zc > ${DIR}/${PREFIX}${dbname}_`date +%Y%m%d-%H%M%S`.sql.bz2',
'done',
])
end
context 'with compression disabled' do
let(:params) do
default_params.merge({ :file_per_database => true, :backupcompress => false })
end
it 'should loop through backup all databases without compression' do
verify_contents(subject, 'mysqlbackup.sh', [
' ${dbname} > ${DIR}/${PREFIX}${dbname}_`date +%Y%m%d-%H%M%S`.sql',
])
end
end
end
end
|
/**
* Flags that can be applied to item prototypes.
*/
declare type ItemPrototypeFlag =
"draw-logistic-overlay" |
"hidden" |
"always-show" |
"hide-from-bonus-gui" |
"hide-from-fuel-tooltip" |
"not-stackable" |
"can-extend-inventory" |
"primary-place-result" |
"mod-openable" |
"only-in-cursor" |
"spawnable";
|
extern crate amd;
// A simple test that illustrates the use of the interface to AMD.
//
// Identical to order.rs, except that it operates on an input matrix
// that has unsorted columns and duplicate entries.
fn main() {
// The symmetric can_24 Harwell/Boeing matrix (jumbled, and not symmetric).
// Since AMD operates on A+A', only A(i,j) or A(j,i) need to be specified,
// or both. The diagonal entries are optional (some are missing).
// There are many duplicate entries, which must be removed. */
let n: usize = 24;
let a_p = vec![
0, 9, 14, 20, 28, 33, 37, 44, 53, 58, 63, 63, 66, 69, 72, 75, 78, 82, 86, 91, 97, 101, 112,
112, 116,
];
let a_i = vec![
0, 17, 18, 21, 5, 12, 5, 0, 13, // column: 0
14, 1, 8, 13, 17, // column: 1
2, 20, 11, 6, 11, 22, // column: 2
3, 3, 10, 7, 18, 18, 15, 19, // column: 3
7, 9, 15, 14, 16, // column: 4
5, 13, 6, 17, // column: 5
5, 0, 11, 6, 12, 6, 23, // column: 6
3, 4, 9, 7, 14, 16, 15, 17, 18, // column: 7
1, 9, 14, 14, 14, // column: 8
7, 13, 8, 1, 17, // column: 9
// column: 10
2, 12, 23, // column: 11
5, 11, 12, // column: 12
0, 13, 17, // column: 13
1, 9, 14, // column: 14
3, 15, 16, // column: 15
16, 4, 4, 15, // column: 16
13, 17, 19, 17, // column: 17
15, 17, 19, 9, 10, // column: 18
17, 19, 20, 0, 6, 10, // column: 19
22, 10, 20, 21, // column: 20
6, 2, 10, 19, 20, 11, 21, 22, 22, 22, 22, // column: 21
// column: 22
12, 11, 12, 23, // column: 23
];
let mut p_inv = vec![0; 24];
let control = amd::Control::default();
let mut a = [[""; 24]; 24];
println!("AMD demo, with a jumbled version of the 24-by-24");
println!("Harwell/Boeing matrix, can_24:");
amd::control(&control);
// Print the input matrix.
let nz = a_p[n];
println!(
"\nJumbled input matrix: {}-by-{}, with {} entries.
Note that for a symmetric matrix such as this one, only the
strictly lower or upper triangular parts would need to be
passed to AMD, since AMD computes the ordering of A+A'. The
diagonal entries are also not needed, since AMD ignores them.
This version of the matrix has jumbled columns and duplicate
row indices.",
n, n, nz
);
for j in 0..n {
print!(
"\nColumn: {}, number of entries: {}, with row indices in
Ai [{} ... {}]:
row indices:",
j,
a_p[j + 1] - a_p[j],
a_p[j],
a_p[j + 1] - 1
);
for pj in a_p[j]..a_p[j + 1] {
let i = a_i[pj as usize];
print!(" {}", i);
}
println!();
}
// Print a character plot of the input matrix. This is only reasonable
// because the matrix is small.
println!("\nPlot of (jumbled) input matrix pattern:");
for j in 0..n {
for i in 0..n {
a[i][j] = ".";
}
for pj in a_p[j]..a_p[j + 1] {
let i = a_i[pj as usize] as usize;
a[i][j] = "X";
}
}
print!(" ");
for j in 0..n {
print!(" {}", j % 10);
}
println!();
for i in 0..n {
print!("{}: ", i);
for j in 0..n {
print!(" {}", a[i][j]);
}
println!();
}
// Print a character plot of the matrix A+A'.
println!("\nPlot of symmetric matrix to be ordered by amd::order:");
for j in 0..n {
for i in 0..n {
a[i][j] = ".";
}
}
for j in 0..n {
a[j][j] = "X";
for pj in a_p[j]..a_p[j + 1] {
let i = a_i[pj as usize] as usize;
a[i][j] = "X";
a[j][i] = "X";
}
}
print!(" ");
for j in 0..n {
print!(" {}", j % 10);
}
println!();
for i in 0..n {
print!("{}: ", i);
for j in 0..n {
print!(" {}", a[i][j]);
}
println!();
}
// Order the matrix.
let (p, _p_inv, info) = amd::order(n, &a_p, &a_i, &control).unwrap();
println!(
"return value from amd::order: {:?} (should be {:?})",
info.status,
amd::Status::OkButJumbled
);
// Print the statistics.
amd::info(&info);
if info.status != amd::Status::OkButJumbled {
println!("AMD failed");
return;
}
// Print the permutation vector, P, and compute the inverse permutation.
println!("Permutation vector:");
for k in 0..n {
// Row/column j is the kth row/column in the permuted matrix.
let j = p[k];
p_inv[j as usize] = k as i32;
print!(" {}", j);
}
println!();
println!();
println!("Inverse permutation vector:");
for j in 0..n {
let k = p_inv[j];
print!(" {}", k);
}
println!();
println!();
// Print a character plot of the permuted matrix.
println!("\nPlot of (symmetrized) permuted matrix pattern:");
for j in 0..n {
for i in 0..n {
a[i][j] = ".";
}
}
for jnew in 0..n {
let j = p[jnew] as usize;
a[jnew][jnew] = "X";
for pi in a_p[j]..a_p[j + 1] {
let i = a_i[pi as usize];
let inew = p_inv[i as usize] as usize;
a[inew][jnew] = "X";
a[jnew][inew] = "X";
}
}
print!(" ");
for j in 0..n {
print!(" {}", j % 10);
}
println!();
for i in 0..n {
print!("{}: ", i);
for j in 0..n {
print!(" {}", a[i][j]);
}
println!();
}
}
|
var unit__test_8h =
[
[ "test_all", "unit__test_8h.html#aa1c715cac916839a95e25b65b4071c4a", null ]
];
|
import qualified Duce.Test.Suites.Transducer as TransducerSuite
import Test.QuickCheck.Instances
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck
import Prelude
main =
defaultMain . testGroup "All" $
[ testGroup "Transducer" TransducerSuite.tests
]
|
using System.Linq;
using Dapper;
using StudentFollowingSystem.Models;
namespace StudentFollowingSystem.Data.Repositories
{
public class CounselingRepository : RepositoryBase<Counseling>
{
/// <summary>
/// Get a counseling by the appointment id joined with the appointment.
/// </summary>
/// <param name="id">The id of an appointment.</param>
public Counseling GetByAppointment(int id)
{
using (var con = ConnectionFactory.GetOpenConnection())
{
string sql = @"
select * from Counselings c
join Appointments a on a.Id = c.AppointmentId
where c.AppointmentId = @Id";
return con.Query<Counseling, Appointment, Counseling>(sql,
(c, a) =>
{
c.Appointment = a;
return c;
},
new { Id = id }).FirstOrDefault();
}
}
}
}
|
package bsnlp
/**
* Copyright 2011 The Open Source Research Group,
* University of Erlangen-Nürnberg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import de.fau.cs.osr.ptk.common.AstVisitor
import de.fau.cs.osr.utils.StringUtils.strrep
import org.sweble.wikitext.engine.PageTitle
import org.sweble.wikitext.engine.config.WikiConfig
import org.sweble.wikitext.engine.nodes.EngPage
import org.sweble.wikitext.parser.nodes._
import org.sweble.wikitext.parser.parser.LinkTargetException
import scala.collection.mutable.ArrayBuffer
class PlainText(private val config: WikiConfig) extends AstVisitor[WtNode] {
var sb = new StringBuilder()
val line = new StringBuilder()
/** True once we are no longer at the beginning of the document */
var pastBeginning = false
var needNewlines = 0
val entities = ArrayBuffer.empty[(String, String)]
/** This method is called by go() after visitation has finished */
protected override def after(node: WtNode, result: AnyRef): AnyRef = {
finishLine()
null
}
/** Fallback for all nodes that are not explicitly handled below */
def visit(n: WtNode): Unit = {
write("<")
write(n.getNodeName)
write(" />")
}
def visit(n: WtTable): Unit = {}
def visit(n: WtNodeList): Unit = iterate(n)
def visit(e: WtUnorderedList): Unit = iterate(e)
def visit(e: WtOrderedList): Unit = iterate(e)
def visit(item: WtListItem): Unit = {
write("*")
iterate(item)
wantNewLine(1)
}
def visit(p: EngPage): Unit = iterate(p)
def visit(text: WtText): Unit = write(text.getContent)
def visit(w: WtWhitespace): Unit = write(" ")
def visit(b: WtBold): Unit = iterate(b)
def visit(i: WtItalics): Unit = iterate(i)
def visit(cr: WtXmlCharRef): Unit = write(Character.toChars(cr.getCodePoint))
def visit(er: WtXmlEntityRef): Unit =
er.getResolved match {
case null => write('&' + er.getName + ';')
case ch => write(ch)
}
def visit(wtUrl: WtUrl): Unit = {
if (wtUrl.getProtocol.nonEmpty) {
write(wtUrl.getProtocol)
write(":")
}
write(wtUrl.getPath)
}
def visit(link: WtExternalLink): Unit = iterate(link.getTitle)
def getText(contentNode: WtContentNode): String =
contentNode.get(0).asInstanceOf[WtText].getContent
def visit(link: WtInternalLink): Unit = {
try {
if (link.getTarget.isResolved) {
val page = PageTitle.make(config, link.getTarget.getAsString)
if (page.getNamespace == config.getNamespace("Category")) return
}
} catch { case _: LinkTargetException =>
}
// Most likely an image or a Wikimedia link (category)
if (!getText(link.getTarget).contains(":")) {
if (link.hasTitle)
entities.append((getText(link.getTitle), getText(link.getTarget)))
else
entities.append((getText(link.getTarget), getText(link.getTarget)))
write(link.getPrefix)
if (link.hasTitle) iterate(link.getTitle) else iterate(link.getTarget)
write(link.getPostfix)
}
}
def visit(s: WtSection): Unit = {
finishLine()
val saveSb = sb
sb = new StringBuilder()
iterate(s.getHeading)
finishLine()
val title = sb.toString.trim()
sb = saveSb
write((0 until s.getLevel).map(_ => '#').mkString + " " + title)
iterate(s.getBody)
}
def visit(p: WtParagraph): Unit = iterate(p)
def visit(hr: WtHorizontalRule): Unit = {
wantNewLine(1)
write("---")
wantNewLine(2)
}
def visit(e: WtXmlElement): Unit =
if (e.getName.equalsIgnoreCase("br")) wantNewLine(1) else iterate(e.getBody)
// Hide
def visit(n: WtImageLink): Unit = {}
def visit(n: WtIllegalCodePoint): Unit = {}
def visit(n: WtXmlComment): Unit = {}
def visit(n: WtTemplate): Unit = {}
def visit(n: WtTemplateArgument): Unit = {}
def visit(n: WtTemplateParameter): Unit = {}
def visit(n: WtTagExtension): Unit = {}
def visit(n: WtPageSwitch): Unit = {}
def wantNewLine(num: Int): Unit =
if (pastBeginning && num > needNewlines) needNewlines = num
def finishLine(): Unit = {
sb.append(line.toString)
line.setLength(0)
}
def writeNewlines(num: Int): Unit = {
finishLine()
sb.append(strrep('\n', num))
needNewlines = 0
}
def write(s: String): Unit = {
if (needNewlines > 0) writeNewlines(needNewlines)
pastBeginning = true
line.append(s)
}
def write(cs: Array[Char]): Unit = write(String.valueOf(cs))
}
|
abstract class TennisGame {
void wonPoint(String playerName);
String getScore();
}
|
using MonitoringApp.XF.Managers;
using MonitoringApp.XF.ViewModels;
using System;
using System.Threading.Tasks;
using Xamarin.Forms;
namespace MonitoringApp.XF.Components.PnL
{
public class PnLPageVM : BaseViewModel
{
private PnLViewModel pnl;
public PnLViewModel PnL
{
get { return pnl; }
set
{
pnl = value;
OnPropertyChanged(nameof(PnL));
}
}
private DateTime day;
public DateTime Day
{
get { return day; }
set
{
if (day != value)
{
day = value;
OnPropertyChanged(nameof(Day));
}
}
}
private DateTime newDay;
public DateTime NewDay
{
get { return newDay; }
set
{
if (newDay != value)
{
newDay = value;
OnPropertyChanged(nameof(NewDay));
}
}
}
public Command RefreshCommand { get; private set; }
private bool isRefreshing;
public bool IsRefreshing
{
get { return isRefreshing; }
set
{
if (value != isRefreshing)
{
isRefreshing = value;
OnPropertyChanged(nameof(IsRefreshing));
}
}
}
public Command ChangeDayCommand { get; private set; }
public PnLPageVM()
{
RefreshCommand = new Command(ExecuteRefreshCommand, () => !IsRefreshing);
ChangeDayCommand = new Command(ExecuteChangeDayCommand);
Day = DateTime.Today;
NewDay = Day;
}
private async void ExecuteChangeDayCommand()
{
if (NewDay == Day)
return;
Day = NewDay;
await RefreshPnL(false);
}
private async void ExecuteRefreshCommand()
{
await RefreshPnL(true);
IsRefreshing = false;
}
public async Task RefreshPnL(bool refresh)
{
await LoadPnL(refresh);
}
private async Task LoadPnL(bool refresh)
{
PnL = (await PnLManager.Instance.LoadPnL(Day, refresh)).ToPnLViewModel();
}
}
}
|
#!/bin/bash
python -m sagas.graph.graph_manager create_lang_feeds it /pi/ai/seq2seq/ita-eng/ita.txt ./data/graph/ita_eng_feed.json
python -m sagas.graph.graph_manager create_lang_feeds pt /pi/ai/seq2seq/por-eng/por.txt ./data/graph/por_eng_feed.json
|
using Test
using TerminalClock
using TerminalClock: n2d, COLON_LARGE, setup_timer
using Dates
using TOML
# Taken from Prefernces.jl/test/runtests.jl
function with_temp_depot(f::Function)
mktempdir() do dir
saved_depot_path = copy(Base.DEPOT_PATH)
empty!(Base.DEPOT_PATH)
push!(Base.DEPOT_PATH, dir)
try
f()
finally
empty!(Base.DEPOT_PATH)
append!(Base.DEPOT_PATH, saved_depot_path)
end
end
end
# Taken from Prefernces.jl/test/runtests.jl
function activate_and_run(project_dir::String, code::String; env::Dict=Dict())
mktempdir() do dir
open(joinpath(dir, "test_code.jl"), "w") do io
write(io, code)
end
out = Pipe()
cmd =
setenv(`$(Base.julia_cmd()) --project=$(project_dir) $(dir)/test_code.jl`,
env..., "JULIA_DEPOT_PATH" => Base.DEPOT_PATH[1])
p = run(pipeline(cmd, stdout=out, stderr=out); wait=false)
close(out.in)
wait(p)
output = String(read(out))
if !success(p)
println(output)
end
@test success(p)
return output
end
end
@testset "Dial" begin
for n in 0:9
@test length(split(n2d(n).str, "\n")) == 9
@test length(split(n2d(n), "\n")) == 9
end
@test length(split(COLON_LARGE, "\n")) == 9
end
@testset "hcat" begin
d1 = hcat(n2d(1), n2d(2))
@test length(split(d1, "\n")) == 9
d2 = hcat(n2d(1), n2d(2), COLON_LARGE, n2d(3), n2d(4))
@test length(split(d2, "\n")) == 9
end
@testset "clock" begin
dt = DateTime(2021, 11, 15, 12, 34, 56, 7)
str = clock(dt)
txt = joinpath("references", "ASCII", "clock.txt")
@test str == join(readlines(txt), "\n")
end
@testset "stopwatch" begin
t = Time(12, 34, 56, 789)
str = stopwatch(t)
txt = joinpath("references", "ASCII", "stopwatch.txt")
@test str == join(readlines(txt), "\n")
end
@testset "setup_timer" begin
@test setup_timer(hour=1) == Time(1, 0, 0)
@test setup_timer(minute=2) == Time(0, 2, 0)
@test setup_timer(second=3) == Time(0, 0, 3)
@test setup_timer(hour=1, minute=2, second=3) == Time(1, 2, 3)
end
@testset "preference" begin
local_prefs_toml =
joinpath(dirname(dirname(pathof(TerminalClock))), "LocalPreferences.toml")
rm(local_prefs_toml; force=true)
with_temp_depot() do
project_dir = joinpath(dirname(@__DIR__))
# test for set_dials
activate_and_run(
project_dir,
"""
using Pkg; Pkg.instantiate()
using TerminalClock
unicodebox = joinpath(dirname(pathof(TerminalClock)), "dials", "UnicodeBox.toml")
TerminalClock.set_dials(unicodebox)
""",
)
prefs = local_prefs_toml |> TOML.parsefile
@test haskey(prefs, "TerminalClock")
@test basename(prefs["TerminalClock"]["tomlfile"]) == "UnicodeBox.toml"
activate_and_run(
project_dir,
"""
using Test
using TerminalClock, Dates
dt = DateTime(2021, 11, 15, 12, 34, 56, 7)
str = clock(dt)
txt = joinpath("references", "UnicodeBox", "clock.txt")
@test str == join(readlines(txt), "\n")
t = Time(12, 34, 56, 789)
str = stopwatch(t)
txt = joinpath("references", "UnicodeBox", "stopwatch.txt")
@test str == join(readlines(txt), "\n")
""",
)
# test for clear_dials
activate_and_run(
project_dir,
"""
using TerminalClock
TerminalClock.clear_dials()
""",
)
activate_and_run(
project_dir,
"""
using Test
using TerminalClock, Dates
dt = DateTime(2021, 11, 15, 12, 34, 56, 7)
str = clock(dt)
txt = joinpath("references", "ASCII", "clock.txt")
@test str == join(readlines(txt), "\n")
t = Time(12, 34, 56, 789)
str = stopwatch(t)
txt = joinpath("references", "ASCII", "stopwatch.txt")
@test str == join(readlines(txt), "\n")
""",
)
end # with_temp_depot
end
|
# Tutorial: Versioning
The goal of this example is to give you some hands-on experience with a basic
machine learning version control scenario: working with multiple versions of
datasets and ML models using DVC commands. We'll work with a
[tutorial](https://blog.keras.io/building-powerful-image-classification-models-using-very-little-data.html)
that [François Chollet](https://twitter.com/fchollet) put together to show how
to build a powerful image classifier using a pretty small dataset.
 _Dataset to classify cats and dogs_
> We highly recommend reading the François' tutorial itself. It's a great
> demonstration of how a general pre-trained model can be leveraged to build a
> new highly performant model, with very limited resources.
We first train a classifier model using 1000 labeled images, then we double the
number of images (2000) and retrain our model. We capture both datasets and
classifier results and show how to use `dvc checkout` along with `git checkout`
to switch between different versions.
The specific algorithm used to train and validate the classifier is not
important, and no prior knowledge of Keras is required. We'll reuse the
[script](https://gist.github.com/fchollet/f35fbc80e066a49d65f1688a7e99f069) from
the original blog post as a _black box_ – it takes some data and produces a
model file.
## Preparation
> We have tested our tutorials and examples with Python 3. We don't recommend
> using earlier versions.
You'll need [Git](https://git-scm.com) to run the commands in this tutorial.
Also, if DVC is not installed, please follow these [instructions](/doc/install)
to do so.
> If you're using Windows, please review
> [Running DVC on Windows](/doc/user-guide/running-dvc-on-windows) for important
> tips to improve your experience.
Okay! Let's first download the code and set up a Git repository:
```dvc
$ git clone https://github.com/iterative/example-versioning.git
$ cd example-versioning
```
This command pulls a <abbr>DVC project</abbr> with a single script `train.py`
that will train the model.
Let's now install the requirements. But before we do that, we **strongly**
recommend creating a
[virtual environment](https://packaging.python.org/tutorials/installing-packages/#creating-virtual-environments):
```dvc
$ virtualenv -p python3 .env
$ source .env/bin/activate
$ pip install -r requirements.txt
```
<details>
### Expand to learn about DVC internals
The repository you cloned is already DVC-initialized. There should be a `.dvc/`
directory with the `config` and `.gitignore` files. These and other files and
directories are hidden from user, as typically there's no need to interact with
them directly. See
[DVC Files and Directories](/doc/user-guide/dvc-files-and-directories) to learn
more.
</details>
## First model version
Now that we're done with preparations, let's add some data and then train the
first model. We'll capture everything with DVC, including the input dataset and
model [metrics](/doc/command-reference/metrics).
```dvc
$ dvc get https://github.com/iterative/dataset-registry \
tutorial/ver/data.zip
$ unzip -q data.zip
$ rm -f data.zip
```
> `dvc get` can use any <abbr>DVC project</abbr> hosted on a Git repository to
> find the appropriate [remote storage](/doc/command-reference/remote) and
> download <abbr>data artifacts</abbr> from it. (It works like `wget`, but for
> DVC repositories.) In this case we use
> [dataset-registry](https://github.com/iterative/dataset-registry)) as the
> source project. (Refer to [Data Registries](/doc/use-cases/data-registries)
> for more info about this setup.)
This command downloads and extracts our raw dataset, consisting of 1000 labeled
images for training and 800 labeled images for validation. In total, it's a 43
MB dataset, with a directory structure like this:
```sh
data
├── train
│ ├── dogs
│ │ ├── dog.1.jpg
│ │ ├── ...
│ │ └── dog.500.jpg
│ └── cats
│ ├── cat.1.jpg
│ ├── ...
│ └── cat.500.jpg
└── validation
├── dogs
│ ├── dog.1001.jpg
│ ├── ...
│ └── dog.1400.jpg
└── cats
├── cat.1001.jpg
├── ...
└── cat.1400.jpg
```
_(Who doesn't love ASCII directory art?)_
Let's capture the current state of this dataset with `dvc add`:
```dvc
$ dvc add data
```
This command should be used instead of `git add` on files or directories that
are too large to be put into Git: usually input datasets, models, some
intermediate results, etc. It tells Git to ignore the directory and puts it into
the <abbr>cache</abbr> (while keeping a
[file link](/doc/user-guide/large-dataset-optimization#file-link-types-for-the-dvc-cache)
to it in the <abbr>workspace</abbr>, so you can continue working the same way as
before). This is achieved by creating a simple human-readable
[DVC-file](/doc/user-guide/dvc-file-format) that serves as a pointer to the
cache.
Next, we train our first model with `train.py`. Because of the small dataset,
this training process should be small enough to run on most computers in a
reasonable amount of time (a few minutes). This command <abbr>outputs</abbr> a
bunch of files, among them `model.h5` and `metrics.csv`, weights of the trained
model, and [metrics](/doc/command-reference/metrics) history. The simplest way
to capture the current version of the model is to use `dvc add` again:
```dvc
$ python train.py
$ dvc add model.h5
```
> We manually added the model output here, which isn't ideal. The recommended
> way of capturing command outputs is with `dvc run`. More on this later.
Let's commit the current state:
```dvc
$ git add .gitignore model.h5.dvc data.dvc metrics.csv
$ git commit -m "First model, trained with 1000 images"
$ git tag -a "v1.0" -m "model v1.0, 1000 images"
```
<details>
### Expand to learn more about DVC internals
As we mentioned briefly, DVC does not commit the `data/` directory and
`model.h5` file with Git. Instead, `dvc add` stores them in the cache (usually
in `.dvc/cache`) and adds them to `.gitignore`. We then `git commit` DVC-files
that contain pointers to the cached data.
In this case we created `data.dvc` and `model.h5.dvc`. Refer to
[DVC-File Format](/doc/user-guide/dvc-file-format) to learn more about how these
files work.
</details>
> Note that executing `train.py` produced other intermediate files. This is OK,
> we will use them later.
>
> ```dvc
> $ git status
> ...
> bottleneck_features_train.npy
> bottleneck_features_validation.npy`
> ```
## Second model version
Let's imagine that our image dataset doubles in size. The next command extracts
500 new cat images and 500 new dog images into `data/train`:
```dvc
$ dvc get https://github.com/iterative/dataset-registry \
tutorial/ver/new-labels.zip
$ unzip -q new-labels.zip
$ rm -f new-labels.zip
```
For simplicity's sake, we keep the validation subset the same. Now our dataset
has 2000 images for training and 800 images for validation, with a total size of
67 MB:
```sh
data
├── train
│ ├── dogs
│ │ ├── dog.1.jpg
│ │ ├── ...
│ │ └── dog.1000.jpg
│ └── cats
│ ├── cat.1.jpg
│ ├── ...
│ └── cat.1000.jpg
└── validation
├── dogs
│ ├── dog.1001.jpg
│ ├── ...
│ └── dog.1400.jpg
└── cats
├── cat.1001.jpg
├── ...
└── cat.1400.jpg
```
We will now want to leverage these new labels and retrain the model:
```dvc
$ dvc add data
$ python train.py
$ dvc add model.h5
```
> `dvc remove` is necessary here because `model.h5` was already added with
> `dvc add` earlier, but we want to do so again. Later we'll see how `dvc run`
> eliminates this extra step.
Let's commit the second version:
```dvc
$ git add model.h5.dvc data.dvc metrics.csv
$ git commit -m "Second model, trained with 2000 images"
$ git tag -a "v2.0" -m "model v2.0, 2000 images"
```
That's it! We have a second model and dataset saved and pointers to them
committed with Git. Let's now look at how DVC can help us go back to the
previous version if we need to.
## Switching between versions
The DVC command that helps get a specific committed version of data is designed
to be similar to `git checkout`. All we need to do in our case is to
additionally run `dvc checkout` to get the right data into the
<abbr>workspace</abbr>.

There are two ways of doing this: a full workspace checkout or checkout of a
specific data or model file. Let's consider the full checkout first. It's pretty
straightforward:
```dvc
$ git checkout v1.0
$ dvc checkout
```
These commands will restore the working tree to the first snapshot we made:
code, data files, model, all of it. DVC optimizes this operation to avoid
copying data or model files each time. So `dvc checkout` is quick even if you
have large datasets, data files, or models.
On the other hand, if we want to keep the current version of the code and go
back to the previous dataset only, we can do something like this (make sure that
you don't have uncommitted changes in `data.dvc`):
```dvc
$ git checkout v1.0 data.dvc
$ dvc checkout data.dvc
```
If you run `git status` you'll see that `data.dvc` is modified and currently
points to the `v1.0` of the dataset, while code and model files are from the
`v2.0` version.
<details>
### Expand to learn more about DVC internals
As we have learned already, DVC keeps data files out of Git (by adjusting
`.gitignore`) and puts them into the cache (usually it's a `.dvc/cache`
directory inside the repository). Instead, DVC creates
[DVC-files](/doc/user-guide/dvc-file-format). These text files serve as pointers
(MD5 hash) to the cache and are version controlled by Git.
When we run `git checkout` we restore pointers (DVC-files) first, then when we
run `dvc checkout` we use these pointers to put the right data in the right
place.
</details>
## Automating capturing
`dvc add` makes sense when you need to keep track of different versions of
datasets or model files that come from source projects. The `data/` directory
above (with cats and dogs images) is a good example.
On the other hand, there are files that are the result of running some code. In
our example, `train.py` produces binary files (e.g.
`bottlneck_features_train.npy`), the model file `model.h5`, and the
[metrics](/doc/command-reference/metrics) file `metrics.csv`.
When you have a script that takes some data as an input and produces other data
<abbr>outputs</abbr>, a better way to capture them is to use `dvc run`:
> If you tried the commands in the
> [Switching between versions](#switching-between-versions) section, go back to
> the master branch code and data with:
>
> ```dvc
> $ git checkout master
> $ dvc checkout
> ```
```dvc
$ dvc remove -pf model.h5.dvc
$ dvc run -f Dvcfile \
-d train.py -d data \
-M metrics.csv \
-o model.h5 -o bottleneck_features_train.npy -o bottleneck_features_validation.npy \
python train.py
```
Similar to `dvc add`, `dvc run` creates a
[DVC-file](/doc/user-guide/dvc-file-format) named `Dvcfile` (specified using the
`-f` option). It puts all outputs (`-o`) under DVC control the same way as
`dvc add` does. Unlike `dvc add`, `dvc run` also tracks dependencies (`-d`) and
the command (`python train.py`) that was run to produce the result. We call such
a DVC-file a "stage file".
> At this point you could run `git add .` and `git commit` to save the `Dvcfile`
> stage file and its changed output files to the repository.
`dvc repro` will run `Dvcfile` if any of its dependencies (`-d`) changed. For
example, when we added new images to built the second version of our model, that
was a dependency change. It also updates outputs and puts them into the
<abbr>cache</abbr>.
To make things a little simpler: if `dvc add` and `dvc checkout` provide a basic
mechanism to version control large data files or models, `dvc run` and
`dvc repro` provide a build system for ML models, which is similar to
[Make](https://www.gnu.org/software/make/) in software build automation.
## What's next?
In this example, our focus was on giving you hands-on experience with versioning
ML models and datasets. We specifically looked at the `dvc add` and
`dvc checkout` commands. We'd also like to outline some topics and ideas you
might be interested to try next to learn more about DVC and how it makes
managing ML projects simpler.
First, you may have noticed that the script that trains the model is written in
a monolithic way. It uses the `save_bottleneck_feature` function to
pre-calculate the bottom, "frozen" part of the net every time it is run.
Features are written into files, and the intention was probably that the
`save_bottleneck_feature` can be commented out after the first run. It's not
very convenient to remember to comment/uncomment it every time dataset is
changed.
Here's where the [pipelines](/doc/command-reference/pipeline) feature of DVC
comes in handy. We touched on it briefly when we described `dvc run` and
`dvc repro`. The next step would be splitting the script into two parts and
utilizing pipelines. See [this example](/doc/tutorials/pipelines) to get
hands-on experience with pipelines, and try to apply it here. Don't hesitate to
join our [community](/chat) and ask any questions!
Another detail we only brushed upon here is the way we captured the
`metrics.csv` metrics file with the `-M` option of `dvc run`. Marking this
<abbr>output</abbr> as a metric enables us to compare its values across Git tags
or branches (for example, representing different experiments). See `dvc metrics`
and [Compare Experiments](/doc/get-started/compare-experiments) to learn more
about managing metrics with DVC.
|
/*
* This file is part of the bladeRF project:
* http://www.github.com/nuand/bladeRF
*
* Copyright (C) 2014 Nuand LLC
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef BLADERF_FPGA_H_
#define BLADERF_FPGA_H_
#include "bladerf_priv.h"
#define FPGA_IS_CONFIGURED(dev) dev->fn->is_fpga_configured(dev)
/**
* Test if a sufficiently new enough FPGA is being used and log warnings
* if not.
*
* @param dev Device handle
*
* @return 0 on success,
* BLADERF_ERR_UPDATE_FPGA if the device firmware requires a new FPGA,
* BLADERF_ERR_UPDATE_FW if the FPGA version being used requires,
* newer firmware,
* BLADERF_ERR_* values on other failures
*/
int fpga_check_version(struct bladerf *dev);
/**
* Load an FPGA bitstream from the specified RBF
*
* @param dev Device handle
* @param fpga_file Path to an RBF file
*
* @return 0 on success,
* BLADERF_ERR_TIMEOUT generally occurs when attempting to load
* the wrong size FPGA image.
* BLADERF_ERR_* values on other failures
*/
int fpga_load_from_file(struct bladerf *dev, const char *fpga_file);
/**
* Write an FPGA bitstream to the device's SPI flash. This will cause the
* FPGA to be autoloaded the next time the device is powered on.
*
* @param dev Device handle
* @param fpga_file Path to an RBF file
*
* @return 0 on success,
* BLADERF_ERR_* values on other failure
*/
int fpga_write_to_flash(struct bladerf *dev, const char *fpga_file);
#endif
|
package it.app.cie.activity.pin
import android.app.Activity
import android.content.Context
import android.content.DialogInterface
import android.os.Bundle
import android.widget.Button
import android.widget.EditText
import androidx.appcompat.app.AlertDialog
import androidx.appcompat.app.AppCompatActivity
import it.app.cie.R
import it.app.cie.lib.utils
import it.ipzs.cieidsdk.util.ActivityInfo
import it.ipzs.cieidsdk.util.ActivityType
import it.ipzs.cieidsdk.util.CieIDSdkLogger
import it.ipzs.cieidsdk.util.variables
import it.ipzs.cieidsdk.util.variables.Companion.rubrica
import java.io.File
class askPinActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_ask_pin)
variables.activityList.add(ActivityInfo(this, ActivityType.PIN, this))
val button: Button = findViewById(R.id.button_askpin_continua)
button.setOnClickListener {
clickedButton()
}
val buttonLoadRubrica: Button = findViewById(R.id.button_pin_carica)
buttonLoadRubrica.setOnClickListener { loadRubrica() }
val buttonSalvaRubrica: Button = findViewById(R.id.button_pin_salva)
buttonSalvaRubrica.setOnClickListener { salvaRubrica() }
val buttonSvuotaRubrica: Button = findViewById(R.id.button_svuotarubrica)
buttonSvuotaRubrica.setOnClickListener { svuotaRubrica() }
}
private fun svuotaRubrica() {
val dialogClickListener =
DialogInterface.OnClickListener { dialog, which ->
when (which) {
DialogInterface.BUTTON_POSITIVE -> {
try {
val file = File(this.filesDir, utils.filename_rubrica)
file.delete()
} catch (e: Exception) {
CieIDSdkLogger.log(e, true)
}
rubrica = null
}
DialogInterface.BUTTON_NEGATIVE -> {}
}
}
val builder = AlertDialog.Builder(this)
builder.setMessage("Sei sicuro di svuotare la rubrica?")
.setPositiveButton("Sì", dialogClickListener)
.setNegativeButton("No", dialogClickListener).show()
}
private fun salvaRubrica() {
if (rubrica == null)
rubrica = hashMapOf()
val textEditPin: EditText = findViewById(R.id.editTextNumber_pin)
val textEditNomeRubrica: EditText = findViewById(R.id.edittext_nome_rubrica)
if (textEditNomeRubrica.text.toString().isNotEmpty()
&& textEditPin.text.toString().length == 8
) {
rubrica?.put(textEditNomeRubrica.text.toString(), textEditPin.text.toString())
}
try {
val filename = utils.filename_rubrica
variables.activityList.last().context.openFileOutput(filename, Context.MODE_PRIVATE)
.use {
if (it != null) {
val whatToWrite = getWhatToWrite()
for (line in whatToWrite) {
it.write(line.toByteArray())
it.write("\n".toByteArray())
}
}
}
} catch (e: Exception) {
CieIDSdkLogger.log(e, true)
}
}
private fun getWhatToWrite(): MutableList<String> {
val list: MutableList<String> = mutableListOf()
if (rubrica != null) {
for (key in rubrica!!.keys) {
val line = key + utils.rubrica_delimiter + rubrica!![key]
list.add(line)
}
}
return list
}
private fun loadRubrica() {
try {
val file = File(this.filesDir, utils.filename_rubrica)
val lines = file.readLines()
rubrica = HashMap()
for (line in lines) {
if (line.isNotEmpty()) {
val lineExtracted: Pair<String, String> = extract(line)
if (rubrica?.containsKey(lineExtracted.first) == true) {
rubrica?.set(lineExtracted.first, lineExtracted.second)
} else {
rubrica?.put(lineExtracted.first, lineExtracted.second)
}
}
}
} catch (e: Exception) {
CieIDSdkLogger.log(e, true)
}
val array: Array<String>? = rubrica?.keys?.toList()?.toTypedArray()
if (array != null && array.isNotEmpty()) {
val builder: AlertDialog.Builder = AlertDialog.Builder(this)
builder.setTitle("Scegli")
builder.setItems(array) { _, which ->
// the user clicked on array[which]
val key = array[which]
val value = rubrica?.get(key)
val textEditPin: EditText = findViewById(R.id.editTextNumber_pin)
textEditPin.setText(value)
val textEditNomeRubrica: EditText = findViewById(R.id.edittext_nome_rubrica)
textEditNomeRubrica.setText(key)
}
builder.show()
}
}
private fun extract(line: String): Pair<String, String> {
val split = line.split(utils.rubrica_delimiter)
return Pair(split[0].trim(), split[1].trim())
}
private fun clickedButton() {
val textEdit: EditText = findViewById(R.id.editTextNumber_pin)
val value = textEdit.text?.toString()
if (value != null && value.length == 8) {
variables.ciePin = value
variables.activityList.removeLast()
setResult(Activity.RESULT_OK)
this.finish()
}
}
}
|
package models
import java.util.Date
case class Employee(name: String, email: String, dob: Date, companyName: String, id: Option[Int] = None)
|
import * as React from "react";
import styles from "./Content.module.scss";
import { Panel } from "./Panel";
import { useStores } from "../../hooks/useStores";
import { useObserver } from "mobx-react-lite";
import { Button } from "react-bootstrap";
import { Icon } from "../Icon";
export const Content: React.FC = () => {
const { visualizationStore: store } = useStores();
return useObserver(() => {
if (store.bufferEmpty) {
return null;
}
const panels = Object.values(store.panels).map((panel) => (
<Panel key={panel.id} panel={panel} />
));
return (
<div className={styles.content}>
{panels}
<Button
className={styles.addButton}
variant="light"
onClick={() => store.addPanel()}
>
<Icon id="plus" />
</Button>
</div>
);
});
};
|
# Table of contents
* [Introduction](README.md)
## Support
* [Log Files](support/log-files.md)
* [Updating](support/update.md)
## Installation
* [Create Media](installation/create-media.md)
* [Add-Ons](installation/add-ons.md)
* [Containers](installation/docker.md)
## Hardware
* [Allwinner](hardware/allwinner.md)
* [Amlogic](hardware/amlogic.md)
* [NXP \(iMX6\)](hardware/nxp-imx6.md)
* [Intel x86-64 \(Generic\)](hardware/intel-x86-64-generic.md)
* [Qualcomm](hardware/qualcomm.md)
* [Raspberry Pi](hardware/raspberry-pi.md)
* [Rockchip](hardware/rockchip.md)
* [Samsung \(Exynos\)](hardware/samsung-exynos.md)
* [Virtual Image](hardware/virtual.md)
## Configuration
* [Blu-Ray Playback](configuration/blu-ray.md)
* [Config.txt](configuration/config_txt.md)
* [Dual Boot](configuration/dual-boot.md)
* [EDID](configuration/edid.md)
* [Hypercon](configuration/hypercon.md)
* [Hyperion](configuration/hyperion.md)
* [Infra-Red Remotes](configuration/ir-remotes.md)
* [LCDProc](configuration/lcdproc.md)
* [Network Boot](configuration/network-boot.md)
* [Pulseaudio](configuration/pulseaudio.md)
* [Startup & Shutdown](configuration/startup-shutdown.md)
* [Useful Scripts](configuration/useful_scripts.md)
* [WireGuard](configuration/wireguard.md)
## How To
* [Add Firmware](how-to/add-firmware.md)
* [Blacklist Kernel Module](how-to/blacklist-kernel-module.md)
* [Change Bootsplash](how-to/change-bootscreen.md)
* [Force Add-on Update](how-to/force-add-on-update.md)
## Development <a id="development-1"></a>
* [Building \(Basics\)](development-1/build-basics.md)
* [Building \(Advanced\)](development-1/build-advanced.md)
* [Beginners Guide to Git](development-1/git-tutorial.md)
* [Build Commands](development-1/build-commands/README.md)
* [Build Commands \(Add-ons\)](development-1/build-commands/build-addons.md)
* [Build Commands LE 10.0.x](development-1/build-commands/build-commands-le10.md)
* [Build Commands LE 9.2.x](development-1/build-commands/build-commands-le92.md)
* [Build Commands LE 9.0.x](development-1/build-commands/build-commands-le90.md)
* [Build Commands LE 8.2.x](development-1/build-commands/build-commands-le82.md)
* [Build Commands LE 8.0.x](development-1/build-commands/build-commands-le80.md)
* [Build Commands LE 7.0.x](development-1/build-commands/build-commands-le70.md)
* [Nightly Images](development-1/jenkins-ci.md)
## Project
* [Releases](project/releases.md)
* [Forks](project/forks.md)
* [Licenses](project/license/README.md)
* [Source Code](project/license/source-code.md)
* [Documentation](project/license/documentation.md)
* [Mirrors](project/mirrors.md)
|
package org.learning.server.entity
import com.fasterxml.jackson.annotation.JsonIgnore
import org.learning.server.entity.base.OrganizationBase
import java.io.Serializable
import java.util.*
import javax.persistence.*
/**
* 表示一个组织,一个组织下有若干的部门。
*/
@Entity
@Deprecated("")
class Organization: Serializable {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
var id: Int = -1
var name: String = ""
var description: String = ""
@OneToMany(targetEntity = Department::class, mappedBy = "organization")
var departments: MutableCollection<Department> = LinkedList()
@OneToMany(targetEntity = UserOrganization::class, mappedBy = "organization")
@JsonIgnore
var userOrganizations: MutableCollection<UserOrganization> = LinkedList()
@OneToMany(targetEntity = UserOrganizationInvitation::class, mappedBy = "organization")
@JsonIgnore
var userOrganizationInvitations: MutableCollection<UserOrganizationInvitation> = LinkedList()
// calculated property
val users get() = userOrganizations.map { it.user.toBase() }
// plus 各个部门的user,这里使用到了高阶函数
.plus(userOrganizations.flatMap { it.organization.departments }
.flatMap { it.users }).distinct()
val owner get() = userOrganizations.find { it.level == 2 }?.user?.toBase()
// partition function
fun toBase(): OrganizationBase {
return OrganizationBase().apply {
id = this@Organization.id
name = this@Organization.name
description = this@Organization.description
}
}
/**
* 获取结构信息(包括是否处于邀请状态)
*/
fun toStructInfo(user: User): OrganizationBase {
return OrganizationBase().apply {
id = this@Organization.id
name = this@Organization.name
description = this@Organization.description
owner = this@Organization.owner
departments = this@Organization.departments.map { it.toBase() }
state = this@Organization.userOrganizationInvitations.find { it.user.uid == user.uid && it.active }?.state
}
}
}
|
"""This file contains the custom exceptions necessary for the game."""
class Impossible(Exception):
"""Attempted the impossible"""
|
import {Page} from "ui/page";
import * as trace from "trace";
import tests = require("../testRunner");
trace.enable();
trace.addCategories(trace.categories.Test + "," + trace.categories.Error);
let page = new Page();
page.id = "mainPage";
page.on(Page.navigatedToEvent, onNavigatedTo);
function onNavigatedTo(args) {
args.object.off(Page.navigatedToEvent, onNavigatedTo);
setTimeout(function () {
tests.runAll();
}, 10);
}
export function createPage() {
return page;
}
|
<?php
/*
* This file is part of prooph/link.
* (c) prooph software GmbH <contact@prooph.de>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* Date: 06.12.14 - 21:26
*/
namespace Application;
use Application\Service\ActionControllerInitializer;
use Application\Service\ControllerTranslatorProvider;
use Zend\Mvc\Controller\ControllerManager;
use Zend\Mvc\ModuleRouteListener;
use Zend\Mvc\MvcEvent;
class Module
{
public function onBootstrap(MvcEvent $e)
{
$eventManager = $e->getApplication()->getEventManager();
$moduleRouteListener = new ModuleRouteListener();
$moduleRouteListener->attach($eventManager);
}
public function getConfig()
{
return include __DIR__ . '/config/module.config.php';
}
public function getAutoloaderConfig()
{
return array(
'Zend\Loader\StandardAutoloader' => array(
'namespaces' => array(
__NAMESPACE__ => __DIR__ . '/src',
),
),
);
}
}
|
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace PacmanWinForms
{
public partial class frmScores : Form
{
DataTable dtDriveFiles;
private int curScore, coinsAdded;
Difficulty difficulty;
public frmScores()
{
InitializeComponent();
}
private void frmHighScores_Load(object sender, EventArgs e)
{
txtName.Enabled = false;
btnAdd.Enabled = false;
dgvInit();
}
private void dgvInit()
{
dtDriveFiles = null;
dtDriveFiles = GTools.ToDataTable<HighScores>(HighScoreList.hsList);
dgvHighScores.DataSource = dtDriveFiles;
}
public void parseScore(int curScore, Difficulty dif, int coinsAdded)
{
if (InvokeRequired)
{
Invoke(new Action<int, Difficulty, int>(parseScore), new object[] { curScore, dif, coinsAdded});
return;
}
txtName.Enabled = true;
btnAdd.Enabled = true;
txtName.Focus();
difficulty = dif;
this.coinsAdded = coinsAdded;
this.curScore = curScore;
lblScore.Text = curScore.ToString();
lblCoins.Text = coinsAdded.ToString();
lblDifficulty.Text = difficulty.ToString();
lblLocationInit();
}
private void lblLocationInit()
{
lblScore.Location = new Point((pnlScore.Width - lblScore.Width) / 2, 26);
lblCoins.Location = new Point((pnlCoin.Width - lblCoins.Width) / 2, 26);
lblDifficulty.Location = new Point((pnlDiff.Width - lblDifficulty.Width) / 2, 26);
}
private void btnAdd_Click(object sender, EventArgs e)
{
HighScoreList.add(new HighScores(txtName.Text, curScore, coinsAdded, difficulty));
txtName.Enabled = false;
btnAdd.Enabled = false;
dgvInit();
this.Close();
}
private void frmScores_FormClosing(object sender, FormClosingEventArgs e)
{
if(btnAdd.Enabled) HighScoreList.add(new HighScores(txtName.Text, curScore, coinsAdded, difficulty));
GTools.saveHighScores(GTools.saveFile);
}
}
}
|
#!/bin/sh
# (C) Copyright 2005- ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
#
# In applying this licence, ECMWF does not waive the privileges and immunities granted to it by
# virtue of its status as an intergovernmental organisation nor does it submit to any jurisdiction.
#
. ./include.sh
#Define a common label for all the tmp files
label="bufr_read_tempf_f"
tempOut=temp.${label}.txt
tempRef=temp.${label}.ref
rm -f $tempRef $tempOut
# The path to the BUFR file is hard coded in the example
${examples_dir}/eccodes_f_bufr_read_tempf > $tempOut
# Check the results
cat > $tempRef<<EOF
Statid: MISSING
Ob: 1 16 245 20151202 110419 41.670 12.450 35.0 36.0 114 64
WMO list lat, lon, ht: 41.670 12.450 0.0
Radiosonde number/software: J2063451 MW41 2.3.0
level dtime dlat dlon pressure geopotH airTemp dewPtT windDir windSp signif
2 44.0 -0.000 0.001 100000.0 243.0 286.16 5.60 147.00 0.20 65536
3 169.0 -0.001 0.000 92500.0 892.0 280.94 2.87 68.00 3.00 65536 tjump
7 316.0 -0.000 -0.002 85000.0 1593.0 283.60 14.37 60.00 4.00 65536
14 630.0 -0.003 0.003 70000.0 3187.0 274.99 30.49 296.00 5.40 65536
24 1117.0 -0.005 0.018 50000.0 5828.0 258.61 11.68 310.00 3.30 65536 tjump
29 1411.0 -0.011 0.030 40000.0 7482.0 247.70 27.79 273.00 2.20 65536
32 1761.0 -0.004 0.033 30000.0 9493.0 230.33 13.63 157.00 3.30 65536
37 1967.0 0.003 0.028 25000.0 10696.0 220.52 16.47 157.00 5.20 65536
42 2234.0 0.016 0.017 20000.0 12098.0 208.45 9.01 119.00 8.40 65536
53 2578.0 0.015 0.018 15000.0 13835.0 207.93 17.32 316.00 8.70 65536
64 3087.0 -0.021 0.036 10000.0 16310.0 208.29 24.46 340.00 9.60 79872
EOF
#diff -w $tempRef $tempOut
# Clean up
rm -f $tempRef $tempOut
|
package com.lzhlyle.leetcode.week.no189;
public class RotateArray_BruteForce {
public void rotate(int[] nums, int k) {
int len = nums.length;
k %= len;
while (k-- > 0) {
int forward = nums[len - 1];
System.arraycopy(nums, 0, nums, 1, len - 1);
nums[0] = forward;
}
}
}
|
curl -OL https://github.com/derailed/k9s/releases/download/v0.24.2/k9s_Linux_x86_64.tar.gz
mkdir -p tmp/ && tar -C tmp/ -xvf k9s_Linux_x86_64.tar.gz
rm k9s_Linux_x86_64.tar.gz
mv tmp/k9s /usr/local/bin
rm -rf tmp/
|
<?php
declare(strict_types=1);
namespace OpenIDConnect\Claims;
interface Claimable
{
/**
* @return string[]
*/
public function getClaims(): array;
}
|
#!/bin/bash
echo "Killing process on port 8080"
sudo kill -9 $(sudo lsof -t -i:8080)
echo "Done killing the process"
|
package me.hellofwy.v2ex.domain.interactors.impl;
import java.io.IOException;
import me.hellofwy.v2ex.domain.executor.Executor;
import me.hellofwy.v2ex.domain.executor.MainThread;
import me.hellofwy.v2ex.domain.interactors.GetMemberInfoInteractor;
import me.hellofwy.v2ex.domain.interactors.base.AbstractInteractor;
import me.hellofwy.v2ex.domain.model.MemberModel;
import me.hellofwy.v2ex.domain.repository.TopicRepository;
/**
* This is an interactor boilerplate with a reference to a model repository.
* <p/>
*/
public class GetMemberInfoInteractorImpl extends AbstractInteractor
implements GetMemberInfoInteractor {
private final String mQueryParameter;
private final QueryType mQueryType;
private Callback mCallback;
private TopicRepository mTopicRepository;
public GetMemberInfoInteractorImpl(Executor threadExecutor,
MainThread mainThread,
Callback callback,
TopicRepository topicRepository,
String queryParameter,
QueryType queryType) {
super(threadExecutor, mainThread);
mCallback = callback;
mTopicRepository = topicRepository;
mQueryParameter = queryParameter;
mQueryType = queryType;
}
@Override
public void run() {
try {
final MemberModel member;
if (mQueryType == QueryType.ID) {
member = mTopicRepository.getMemberInfoById(mQueryParameter);
} else {
member = mTopicRepository.getMemberInfoByName(mQueryParameter);
}
mMainThread.post(new Runnable() {
@Override
public void run() {
mCallback.onGetMemberInfo(member);
}
});
} catch (final IOException e) {
mMainThread.post(new Runnable() {
@Override
public void run() {
mCallback.onGetMemberInfoError(e.getMessage());
}
});
}
}
}
|
// +build !amd64
// Package etime provides extended time primitives for systems where available.
package etime
import "time"
// Now returns the current nanosecond.
func Now() int64 {
return time.Now().UnixNano()
}
// Duration returns the time duration between two etime.Now calls, ignoring the
// second argument.
func Duration(delta, _ int64) time.Duration {
return time.Duration(delta)
}
|
#!/usr/bin/perl
#
# tofascn.pl v1.7.4
# vim: ts=2 nowrap
#
#
# Usage: ./tofascn.pl <FASC-N in 200-bit format>
#
# Converts 200-bit format FASC-N to human-readble form
#
use strict;
use warnings;
use FindBin;
use lib $FindBin::Bin . '/../lib/perl/lib';
use Getopt::Long qw(GetOptions);
use LogParser qw(&cook &clean);
my $raw = "";
# Check args
GetOptions("--raw|r=s" => \$raw);
if (length $raw == 0) {
die "Usage: $0 --raw=<raw fascn>\n";
}
$raw = clean($raw);
my $length = length $raw;
die "Invalid input length ($length)\n" if (length $raw != 50);
my $fascn = cook($raw);
$length = length $fascn;
die "Invalid output length ($fascn)\n" if (length $fascn != 32);
print "$fascn\n";
exit 0;
__END__
|
/*
* @flow
*/
import { DateTime } from 'luxon';
import type { UUID } from 'lattice';
const SEARCH_PREFIX = 'entity';
const getSearchTerm = (
propertyTypeId :UUID,
searchString :string
) => `${SEARCH_PREFIX}.${propertyTypeId}:"${searchString}"`;
const getSearchTermNotExact = (
propertyTypeId :UUID,
searchString :string
) => `${SEARCH_PREFIX}.${propertyTypeId}:${searchString}`;
const getUTCDateRangeSearchString = (
propertyTypeId :UUID,
timeUnits :any,
startDate ? :DateTime,
endDate ? :DateTime
) => {
let start :string = '*';
if (startDate && startDate.isValid) start = startDate.startOf(timeUnits).toUTC().toISO();
let end :string = '*';
if (endDate && endDate.isValid) end = endDate.endOf(timeUnits).toUTC().toISO();
const dateRangeString = `[${start} TO ${end}]`;
return getSearchTermNotExact(propertyTypeId, dateRangeString);
};
export {
getSearchTerm,
getSearchTermNotExact,
getUTCDateRangeSearchString,
};
|
#!/bin/sh
g++ -fpic -Wall -O3 -o fastmerkle.sox fastmerkle.cc -std=c++11 -shared -lpthread && \
sudo cp fastmerkle.sox /usr/local/bin
|
#if !defined _grab_lib_hack_h
#define _grab_lib_hack_h
#include <basic/bool.h>
namespace omega {
bool lib_hack(Conjunct *c, DNF *d, Rel_Body *rb);
extern bool WANT_SEGM_FAULT;
class grab_lib_hack {
public:
grab_lib_hack() { if (WANT_SEGM_FAULT)
lib_hack(NULL, NULL,NULL);
}
};
static grab_lib_hack hack_hack;
} // end of namespace omega
#endif
|
/**
* Created by iamhosseindhv on 04/10/2017.
*/
$(document).ready(function () {
const newcol = $('.newcol');
const column4 = $('.column-4');
const column6Header = $('.column-6-header');
const listingScrollSummary = $('.listing-scroll-summary--inner');
const imageHeight = $('.listing-images').height() + $('.top-head').height();
const newcolHeight = newcol.outerHeight();
const column4Height = column4.height();
const windowWidth = window.innerWidth;
$(document).on('scroll', onScroll);
$(document).scroll(function () {
var newcolOffset = newcol.offset().top + newcolHeight;
var column4Offset = column4.offset().top + column4Height;
//for listing header
if (windowWidth <= 1127 && windowWidth > 744){
if (window.scrollY > column4Offset){ fixListingHeader() }
else { unfixListingHeader() }
} else {
if (window.scrollY > imageHeight){ fixListingHeader() }
else { unfixListingHeader() }
}
//for booking info window
if (window.innerWidth > 1128){
if (window.scrollY > imageHeight){
if (newcolOffset < column4Offset-1) {
newcol.addClass("column-4--fixed");
} else {
newcol.removeClass("column-4--fixed");
newcol.addClass('stick-bottom');
listingScrollSummary.css('padding-top', '12px').css('background-color', '#595959');
const windowScroll = $(window).scrollTop() + newcolHeight;
if (newcolOffset > windowScroll) {
newcol.removeClass('stick-bottom');
newcol.addClass("column-4--fixed");
listingScrollSummary.css('padding-top', '50px').css('background-color', '#fff');
}
}
} else {
newcol.removeClass("column-4--fixed");
newcol.removeClass('stick-bottom');
}
}
});
//when listing navbar item clicked
$('.header a').click(function() {
scrollToId(this);
});
//favourite button hovered
$('.heart-btn')
.mouseover(function() {
$('.heart').css('fill', 'red');
})
.mouseout(function() {
$('.heart').css('fill', 'transparent');
});
//share button hovered
$('.share-btn')
.mouseover(function() {
$('.share').css('fill', '#484848');
})
.mouseout(function() {
$('.share').css('fill', 'transparent');
});
//favourite button clicked
$('#favourite-listing').click(function(e){
e.preventDefault();
const listingid = $(".column-6").attr('id');
$.ajax({
type: 'POST',
url: 'https://rentaly.herokuapp.com/edit/add-listing-to-favourite',
data: {
listingid: listingid
},
success: function(data) {
if (data.presentLogin) {
$('.base-layer').css('display', 'flex'); //present login then
} else {
if (data.favourited) {
successfullyFavourited();
} else {
//already favourited
alert(data.message);
}
}
},
error: function(data) {
console.log(data)
}
});
});
//share button clicked
$('#share-listing').click(function(e){
e.preventDefault();
//you should display a window with some options for sharing
});
//date picker stuff
const customOptions = {
placeholder: "روز / ماه / سال",
twodigit: true,
closeAfterSelect: true,
buttonsColor: "blue",
forceFarsiDigits: true,
markToday: true,
markHolidays: true,
highlightSelectedDay: true,
sync: true,
gotoToday: true,
};
kamaDatepicker('checkin', customOptions);
kamaDatepicker('checkout', customOptions);
});
function fixListingHeader() {
$('.waste').css('display', 'block');
// $('.waste').css('opacity', '1');
$('.header').addClass('fixed');
$('.rest').css('padding-top', '58px');
}
function unfixListingHeader() {
$('.waste').css('display', 'none');
// $('.waste').css('opacity', '0');
$('.header').removeClass('fixed');
$('.rest').css('padding-top', '0');
}
function scrollToId(self) {
const goTo = $(self).attr('href');
const id = $(self).attr('id');
// Desired offset, in pixels
const offset = $('.header').height() + 25;
const scrollTime = 500;
if (goTo === '#details'){
$('html, body').animate({
scrollTop: $(goTo).offset().top - offset + 18
}, scrollTime);
} else {
$('html, body').animate({
scrollTop: $(goTo).offset().top - offset
}, scrollTime);
}
activateLink(id);
}
function successfullyFavourited() {
$('.heart-btn').css('fill', 'red !important');
$('#favourite-detail').text('Favourited');
$('.heart-btn')
.off('mouseover')
.off('mouseout');
}
function activateLink(id) {
$('.header-main a.active').removeClass('active');
$('#' + id).addClass('active');
}
var inview = [];
var prevView = 0;
var currentView = 0;
function onScroll(){
inview = [];
var docViewTop = $(window).scrollTop();
var docViewBottom = docViewTop + $(window).height();
prevView = currentView;
currentView = docViewTop;
var detailTop = $('#details').offset().top;
var detailBottom = detailTop + $('#details').height();
var hostTop = $('#host').offset().top;
var hostBottom = hostTop + $('#host').height();
var reviewTop = $('#reviews').offset().top;
var reviewBottom = reviewTop + $('#reviews').height();
var locationTop = $('#location').offset().top;
var locationBottom = locationTop + $('#location').height();
if ((detailBottom <= docViewBottom) && (detailTop >= docViewTop)) {
inview.push('first-link');
}
if ((hostBottom <= docViewBottom) && (hostTop >= docViewTop)) {
inview.push('second-link');
}
if ((reviewBottom <= docViewBottom) && (reviewTop >= docViewTop)) {
inview.push('third-link');
}
if ((locationBottom <= docViewBottom) && (locationTop >= docViewTop)) {
inview.push('fourth-link');
}
activateLink(inview[0]);
// console.log(inview);
}
var map;
function initMap() {
var styles = {
hide: [{
featureType: 'poi.business',
stylers: [{visibility: 'off'}]
},
{
featureType: 'transit',
elementType: 'labels.icon',
stylers: [{visibility: 'off'}]
}
]
};
map = new google.maps.Map(document.getElementById('map'), {
// center: {lat: 35.6891975, lng: 51.3889735},
zoom: 12,
mapTypeId: 'roadmap',
disableDefaultUI: true,
fullscreenControl: false,
mapTypeControl: true,
mapTypeControlOptions: {
style: google.maps.MapTypeControlStyle.HORIZONTAL_BAR,
mapTypeIds: ['roadmap','satellite'],
position: google.maps.ControlPosition.RIGHT_BOTTOM
},
zoomControl: true,
zoomControlOptions: {
position: google.maps.ControlPosition.RIGHT_TOP
}
});
map.setOptions({styles: styles['hide']});
}
function newMarker(lat, lng) {
const point = new google.maps.LatLng(
parseFloat(lat),
parseFloat(lng));
const cityCircle = new google.maps.Circle({
strokeColor: '#484848',
strokeOpacity: .8,
strokeWeight: 2,
fillColor: '#ad974f',
fillOpacity: .4,
map: map,
center: point,
radius: 3000
});
map.setCenter(point);
}
|
#
# Helpers for the cookbook
#
# Author:: Andrew Coulton (<andrew@ingenerator.com>)
#
module Ingenerator
module Mysql
# Assigned as the default privileges for a mysql_local_admin
# The ||= is to prevent warnings when chefspec reloads the helper in each run
DEFAULT_ADMIN_PRIVS ||= [
'CREATE TEMPORARY TABLES',
'DELETE',
'EXECUTE',
'FILE',
'INSERT',
'LOCK TABLES',
'PROCESS',
'SELECT',
'SHOW DATABASES',
'SHOW VIEW',
'UPDATE',
'USAGE'
].freeze
module Helpers
# Gets the current root account connection details as a hash
def mysql_root_connection
{
username: 'root',
socket: node['mysql']['default_server_socket']
}
end
end
end
end
# Make the helpers available in all recipes
Chef::Node.send(:include, Ingenerator::Mysql::Helpers)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.