text
stringlengths 27
775k
|
|---|
module Rubillow
module Models
# Common data for responses containing zpid's
module Zpidable
# @return [String] ZPID of property
attr_accessor :zpid
protected
# @private
def extract_zpid(xml)
# TODO: clean up this logic
if !xml.xpath('//response/zpid').empty?
selector = '//response/zpid'
elsif !xml.xpath('//result/zpid').empty?
selector = '//result/zpid'
else
selector = '//zpid'
end
@zpid = xml.xpath(selector).first.text
end
end
end
end
|
mod client;
pub mod code;
mod game;
pub mod shift_code;
pub use crate::{
client::Client,
code::Code,
game::Game,
shift_code::ShiftCode,
};
/// Library Result Type
pub type OrczResult<T> = Result<T, OrczError>;
/// Library Error Type
#[derive(Debug, thiserror::Error)]
pub enum OrczError {
/// Reqwest HTTP Error
#[error("{0}")]
Reqwest(#[from] reqwest::Error),
/// Invalid HTTP StatusCode
#[error("invalid status '{0}'")]
InvalidStatus(reqwest::StatusCode),
/// Error Parsing a Table
///
/// This is usually a library error; update this lib.
#[error("invalid table")]
TableParse,
/// a tokio task failed
#[error("{0}")]
TokioJoin(#[from] tokio::task::JoinError),
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn it_works_bl() {
let client = Client::new();
let codes = client.get_shift_codes(Game::Borderlands).await.unwrap();
dbg!(codes);
}
#[tokio::test]
async fn it_works_bl2() {
let client = Client::new();
let codes = client.get_shift_codes(Game::Borderlands2).await.unwrap();
dbg!(codes);
}
#[tokio::test]
async fn it_works_blps() {
let client = Client::new();
let codes = client
.get_shift_codes(Game::BorderlandsPreSequel)
.await
.unwrap();
dbg!(codes);
}
#[tokio::test]
async fn it_works_bl3() {
let client = Client::new();
let codes = client.get_shift_codes(Game::Borderlands3).await.unwrap();
dbg!(codes);
}
}
|
## Signs a file
param([string] $file = $(throw "Please specify a filename."))
$cert = @(Get-ChildItem cert:\CurrentUser\My -CodeSigningCert)[0]
Set-AuthenticodeSignature $file $cert
|
#![no_std]
#![no_main]
#![feature(naked_functions)]
#![feature(alloc_error_handler)]
#![feature(llvm_asm)]
#![feature(asm)]
#![feature(global_asm)]
mod hal;
#[cfg(not(test))]
use core::alloc::Layout;
#[cfg(not(test))]
use core::panic::PanicInfo;
use linked_list_allocator::LockedHeap;
use rustsbi::{print, println};
use riscv::register::{
mcause::{self, Exception, Interrupt, Trap},
medeleg, mepc, mhartid, mideleg, mie, mip, misa::{self, MXL},
mstatus::{self, MPP},
mtval,
mtvec::{self, TrapMode},
};
#[global_allocator]
static ALLOCATOR: LockedHeap = LockedHeap::empty();
#[cfg(not(test))]
#[panic_handler]
fn panic(info: &PanicInfo) -> ! {
let hart_id = mhartid::read();
// 输出的信息大概是“[rustsbi-panic] hart 0 panicked at ...”
println!("[rustsbi-panic] hart {} {}", hart_id, info);
println!("[rustsbi-panic] system shutdown scheduled due to RustSBI panic");
use rustsbi::Reset;
hal::Reset.system_reset(
rustsbi::reset::RESET_TYPE_SHUTDOWN,
rustsbi::reset::RESET_REASON_SYSTEM_FAILURE
);
loop { }
}
#[cfg(not(test))]
#[alloc_error_handler]
fn oom(_layout: Layout) -> ! {
loop {}
}
lazy_static::lazy_static! {
// 最大的硬件线程编号;只在启动时写入,跨核软中断发生时读取
pub static ref MAX_HART_ID: spin::Mutex<usize> =
spin::Mutex::new(compiled_max_hartid());
}
// #[export_name = "_mp_hook"]
pub extern "C" fn mp_hook() -> bool {
let hartid = mhartid::read();
if hartid == 0 {
true
} else {
use riscv::asm::wfi;
use hal::Clint;
unsafe {
let mut clint = Clint::new(0x200_0000 as *mut u8);
// Clear IPI
clint.clear_soft(hartid);
// Start listening for software interrupts
mie::set_msoft();
loop {
wfi();
if mip::read().msoft() {
break;
}
}
// Stop listening for software interrupts
mie::clear_msoft();
// Clear IPI
clint.clear_soft(hartid);
}
false
}
}
#[export_name = "_start"]
#[link_section = ".text.entry"] // this is stable
#[naked]
// extern "C" for Rust ABI is by now unsupported for naked functions
unsafe extern "C" fn start() -> ! {
asm!(
"
csrr a2, mhartid
lui t0, %hi(_max_hart_id)
add t0, t0, %lo(_max_hart_id)
bgtu a2, t0, _start_abort
la sp, _stack_start
lui t0, %hi(_hart_stack_size)
add t0, t0, %lo(_hart_stack_size)
.ifdef __riscv_mul
mul t0, a2, t0
.else
beqz a2, 2f // Jump if single-hart
mv t1, a2
mv t2, t0
1:
add t0, t0, t2
addi t1, t1, -1
bnez t1, 1b
2:
.endif
sub sp, sp, t0
csrw mscratch, zero
j main
_start_abort:
wfi
j _start_abort
", options(noreturn))
}
#[export_name = "main"]
fn main() -> ! {
// Ref: https://github.com/qemu/qemu/blob/aeb07b5f6e69ce93afea71027325e3e7a22d2149/hw/riscv/boot.c#L243
let dtb_pa = unsafe {
let dtb_pa: usize;
llvm_asm!("":"={a1}"(dtb_pa));
dtb_pa
};
if mp_hook() {
// init
}
/* setup trap */
extern "C" {
fn _start_trap();
}
unsafe {
mtvec::write(_start_trap as usize, TrapMode::Direct);
}
/* main function start */
extern "C" {
static mut _sheap: u8;
static _heap_size: u8;
}
if mhartid::read() == 0 {
let sheap = unsafe { &mut _sheap } as *mut _ as usize;
let heap_size = unsafe { &_heap_size } as *const u8 as usize;
unsafe {
ALLOCATOR.lock().init(sheap, heap_size);
}
// 其实这些参数不用提供,直接通过pac库生成
let serial = hal::Ns16550a::new(0x10000000, 0, 11_059_200, 115200);
// use through macro
use rustsbi::legacy_stdio::init_legacy_stdio_embedded_hal;
init_legacy_stdio_embedded_hal(serial);
let clint = hal::Clint::new(0x2000000 as *mut u8);
use rustsbi::init_ipi;
init_ipi(clint);
// todo: do not create two instances
let clint = hal::Clint::new(0x2000000 as *mut u8);
use rustsbi::init_timer;
init_timer(clint);
use rustsbi::init_reset;
init_reset(hal::Reset);
}
// 把S的中断全部委托给S层
unsafe {
mideleg::set_sext();
mideleg::set_stimer();
mideleg::set_ssoft();
medeleg::set_instruction_misaligned();
medeleg::set_breakpoint();
medeleg::set_user_env_call();
medeleg::set_instruction_page_fault();
medeleg::set_load_page_fault();
medeleg::set_store_page_fault();
medeleg::set_instruction_fault();
medeleg::set_load_fault();
medeleg::set_store_fault();
mie::set_mext();
// 不打开mie::set_mtimer
mie::set_msoft();
}
if mhartid::read() == 0 {
println!("[rustsbi] RustSBI version {}", rustsbi::VERSION);
println!("{}", rustsbi::LOGO);
println!("[rustsbi] Platform: QEMU (Version {})", env!("CARGO_PKG_VERSION"));
let isa = misa::read();
if let Some(isa) = isa {
let mxl_str = match isa.mxl() {
MXL::XLEN32 => "RV32",
MXL::XLEN64 => "RV64",
MXL::XLEN128 => "RV128",
};
print!("[rustsbi] misa: {}", mxl_str);
for ext in 'A'..='Z' {
if isa.has_extension(ext) {
print!("{}", ext);
}
}
println!("");
}
println!("[rustsbi] mideleg: {:#x}", mideleg::read().bits());
println!("[rustsbi] medeleg: {:#x}", medeleg::read().bits());
let mut guard = MAX_HART_ID.lock();
*guard = unsafe { count_harts(dtb_pa) };
drop(guard);
println!("[rustsbi] Kernel entry: 0x80200000");
}
unsafe {
mepc::write(s_mode_start as usize);
mstatus::set_mpp(MPP::Supervisor);
rustsbi::enter_privileged(mhartid::read(), dtb_pa)
}
}
#[naked]
#[link_section = ".text"] // must add link section for all naked functions
unsafe extern "C" fn s_mode_start() -> ! {
asm!("
1: auipc ra, %pcrel_hi(1f)
ld ra, %pcrel_lo(1b)(ra)
jr ra
.align 3
1: .dword 0x80200000
", options(noreturn))
}
unsafe fn count_harts(dtb_pa: usize) -> usize {
use device_tree::{DeviceTree, Node};
const DEVICE_TREE_MAGIC: u32 = 0xD00DFEED;
// 遍历“cpu_map”结构
// 这个结构的子结构是“处理核簇”(cluster)
// 每个“处理核簇”的子结构分别表示一个处理器核
fn enumerate_cpu_map(cpu_map_node: &Node) -> usize {
let mut tot = 0;
for cluster_node in cpu_map_node.children.iter() {
let name = &cluster_node.name;
let count = cluster_node.children.iter().count();
// 会输出:Hart count: cluster0 with 2 cores
// 在justfile的“threads := "2"”处更改
println!("[rustsbi-dtb] Hart count: {} with {} cores", name, count);
tot += count;
}
tot
}
#[repr(C)]
struct DtbHeader { magic: u32, size: u32 }
let header = &*(dtb_pa as *const DtbHeader);
// from_be 是大小端序的转换(from big endian)
let magic = u32::from_be(header.magic);
if magic == DEVICE_TREE_MAGIC {
let size = u32::from_be(header.size);
// 拷贝数据,加载并遍历
let data = core::slice::from_raw_parts(dtb_pa as *const u8, size as usize);
if let Ok(dt) = DeviceTree::load(data) {
if let Some(cpu_map) = dt.find("/cpus/cpu-map") {
return enumerate_cpu_map(cpu_map)
}
}
}
// 如果DTB的结构不对(读不到/cpus/cpu-map),返回默认的8个核
let ans = compiled_max_hartid();
println!("[rustsbi-dtb] Could not read '/cpus/cpu-map' from 'dtb_pa' device tree root; assuming {} cores", ans);
ans
}
#[inline]
fn compiled_max_hartid() -> usize {
let ans;
unsafe { asm!("
lui {ans}, %hi(_max_hart_id)
add {ans}, {ans}, %lo(_max_hart_id)
", ans = out(reg) ans) };
ans
}
global_asm!(
"
.equ REGBYTES, 8
.macro STORE reg, offset
sd \\reg, \\offset*REGBYTES(sp)
.endm
.macro LOAD reg, offset
ld \\reg, \\offset*REGBYTES(sp)
.endm
.section .text
.global _start_trap
.p2align 2
_start_trap:
csrrw sp, mscratch, sp
bnez sp, 1f
/* from M level, load sp */
csrrw sp, mscratch, zero
1:
addi sp, sp, -16 * REGBYTES
STORE ra, 0
STORE t0, 1
STORE t1, 2
STORE t2, 3
STORE t3, 4
STORE t4, 5
STORE t5, 6
STORE t6, 7
STORE a0, 8
STORE a1, 9
STORE a2, 10
STORE a3, 11
STORE a4, 12
STORE a5, 13
STORE a6, 14
STORE a7, 15
mv a0, sp
call _start_trap_rust
LOAD ra, 0
LOAD t0, 1
LOAD t1, 2
LOAD t2, 3
LOAD t3, 4
LOAD t4, 5
LOAD t5, 6
LOAD t6, 7
LOAD a0, 8
LOAD a1, 9
LOAD a2, 10
LOAD a3, 11
LOAD a4, 12
LOAD a5, 13
LOAD a6, 14
LOAD a7, 15
addi sp, sp, 16 * REGBYTES
csrrw sp, mscratch, sp
mret
"
);
// #[doc(hidden)]
// #[export_name = "_mp_hook"]
// pub extern "Rust" fn _mp_hook() -> bool {
// match mhartid::read() {
// 0 => true,
// _ => loop {
// unsafe { riscv::asm::wfi() }
// },
// }
// }
#[allow(unused)]
#[derive(Debug)]
struct TrapFrame {
ra: usize,
t0: usize,
t1: usize,
t2: usize,
t3: usize,
t4: usize,
t5: usize,
t6: usize,
a0: usize,
a1: usize,
a2: usize,
a3: usize,
a4: usize,
a5: usize,
a6: usize,
a7: usize,
}
#[export_name = "_start_trap_rust"]
extern "C" fn start_trap_rust(trap_frame: &mut TrapFrame) {
let cause = mcause::read().cause();
match cause {
Trap::Exception(Exception::SupervisorEnvCall) => {
let params = [trap_frame.a0, trap_frame.a1, trap_frame.a2, trap_frame.a3];
// Call RustSBI procedure
let ans = rustsbi::ecall(trap_frame.a7, trap_frame.a6, params);
// Return the return value to TrapFrame
trap_frame.a0 = ans.error;
trap_frame.a1 = ans.value;
// Skip ecall instruction
mepc::write(mepc::read().wrapping_add(4));
}
Trap::Interrupt(Interrupt::MachineSoft) => {
// 机器软件中断返回给S层
unsafe {
mip::set_ssoft();
mie::clear_msoft();
}
}
Trap::Interrupt(Interrupt::MachineTimer) => {
// 机器时间中断返回给S层
unsafe {
mip::set_stimer();
mie::clear_mtimer();
}
}
Trap::Exception(Exception::IllegalInstruction) => {
#[inline]
unsafe fn get_vaddr_u32(vaddr: usize) -> u32 {
let mut ans: u32;
llvm_asm!("
li t0, (1 << 17)
mv t1, $1
csrrs t0, mstatus, t0
lwu t1, 0(t1)
csrw mstatus, t0
mv $0, t1
"
:"=r"(ans)
:"r"(vaddr)
:"t0", "t1");
ans
}
let vaddr = mepc::read();
let ins = unsafe { get_vaddr_u32(vaddr) };
if ins & 0xFFFFF07F == 0xC0102073 {
// rdtime
let rd = ((ins >> 7) & 0b1_1111) as u8;
// todo: one instance only
let clint = hal::Clint::new(0x2000000 as *mut u8);
let time_usize = clint.get_mtime() as usize;
match rd {
10 => trap_frame.a0 = time_usize,
11 => trap_frame.a1 = time_usize,
12 => trap_frame.a2 = time_usize,
13 => trap_frame.a3 = time_usize,
14 => trap_frame.a4 = time_usize,
15 => trap_frame.a5 = time_usize,
16 => trap_frame.a6 = time_usize,
17 => trap_frame.a7 = time_usize,
5 => trap_frame.t0 = time_usize,
6 => trap_frame.t1 = time_usize,
7 => trap_frame.t2 = time_usize,
28 => trap_frame.t3 = time_usize,
29 => trap_frame.t4 = time_usize,
30 => trap_frame.t5 = time_usize,
31 => trap_frame.t6 = time_usize,
_ => panic!("invalid target"),
}
mepc::write(mepc::read().wrapping_add(4)); // 跳过指令
} else {
#[cfg(target_pointer_width = "64")]
panic!("invalid instruction, mepc: {:016x?}, instruction: {:016x?}", mepc::read(), ins);
#[cfg(target_pointer_width = "32")]
panic!("invalid instruction, mepc: {:08x?}, instruction: {:08x?}", mepc::read(), ins);
}
}
#[cfg(target_pointer_width = "64")]
cause => panic!(
"Unhandled exception! mcause: {:?}, mepc: {:016x?}, mtval: {:016x?}, trap frame: {:p}, {:x?}",
cause,
mepc::read(),
mtval::read(),
&trap_frame as *const _,
trap_frame
),
#[cfg(target_pointer_width = "32")]
cause => panic!(
"Unhandled exception! mcause: {:?}, mepc: {:08x?}, mtval: {:08x?}, trap frame: {:x?}",
cause,
mepc::read(),
mtval::read(),
trap_frame
),
}
}
|
set -v
sudo apt-get update
sudo apt-get install -y git linux-image-extra-`uname -r`
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 36A1D7869245C8950F966E92D8576A8BA88D21E9
sudo sh -c "echo deb http://get.docker.io/ubuntu docker main > /etc/apt/sources.list.d/docker.list"
sudo apt-get update
sudo apt-get install -y lxc-docker
|
//$ class TopWindow {
public:
virtual void State(int reason);
private:
TopWindowFrame *frame;
void SyncRect();
void SyncFrameRect(const Rect& r);
void DestroyFrame();
friend class Ctrl;
public:
void GripResize();
//$ };
|
package provider
import (
"context"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
"os"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)
var testAccProvider *schema.Provider
var testAccProviderFactories = map[string]func() (*schema.Provider, error){
"runscope": func() (*schema.Provider, error) {
return Provider(), nil
},
}
const testAccBucketNamePrefix = "terraform-runscope-testacc"
func init() {
testAccProvider = Provider()
testAccProviderFactories = map[string]func() (*schema.Provider, error){
"runscope": func() (*schema.Provider, error) {
return Provider(), nil
},
}
}
func TestMain(m *testing.M) {
resource.TestMain(m)
}
func TestProvider(t *testing.T) {
if err := Provider().InternalValidate(); err != nil {
t.Fatalf("err: %s", err)
}
}
func TestProviderImpl(t *testing.T) {
var _ = Provider()
}
func testAccPreCheck(t *testing.T) {
ctx := context.TODO()
if v := os.Getenv("RUNSCOPE_ACCESS_TOKEN"); v == "" {
t.Fatal("RUNSCOPE_ACCESS_TOKEN must be set for acceptance tests")
}
if v := os.Getenv("RUNSCOPE_TEAM_ID"); v == "" {
t.Fatal("RUNSCOPE_TEAM_ID must be set for acceptance tests")
}
diags := testAccProvider.Configure(ctx, terraform.NewResourceConfigRaw(nil))
if diags.HasError() {
t.Fatal(diags[0].Summary)
}
return
}
func testAccRandomBucketName() string {
return acctest.RandomWithPrefix("terraform-runscope-testacc")
}
|
package me.aleiv.core.paper.tablist;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
import lombok.Getter;
import me.aleiv.core.paper.Core;
import me.aleiv.core.paper.teams.objects.Team;
import me.aleiv.core.paper.utilities.PlayerDBUtil;
import net.md_5.bungee.api.ChatColor;
public class DedsafioTablistGenerator extends TablistGenerator {
private @Getter static ConcurrentHashMap<UUID, String> cachedNames = new ConcurrentHashMap<>();
private static final Gson gson = new Gson();
private static final String STAR = Character.toString('\uEAA6');
private static final String teamTag = ChatColor.of("#59e4fc") + "Team %s " + ChatColor.WHITE + "%d"
+ ChatColor.RESET + STAR;
private static final String teamMemberTag = ChatColor.of("#fef1aa") + "%s";
private static final String ffaTag = "%d" + STAR + " " + ChatColor.of("#fef1aa") + "%s";
class SortByPoints implements Comparator<Team> {
@Override
public int compare(Team a, Team b) {
return b.getPoints() - a.getPoints();
}
}
public DedsafioTablistGenerator(Core plugin) {
super(plugin);
plugin.getTeamManager().getRedisSyncConnection().hgetall("uuids:names").entrySet().forEach(entry -> {
var name = gson.fromJson(entry.getValue(), JsonObject.class);
if (name != null) {
var actualName = name.get("name");
if (actualName != null && !actualName.isJsonNull()) {
cachedNames.put(UUID.fromString(entry.getKey()), actualName.getAsString());
return;
}
}
cachedNames.put(UUID.fromString(entry.getKey()), "null");
});
}
public static void recacheNames() {
Core.getInstance().getTeamManager().getRedisSyncConnection().hgetall("uuids:names").entrySet()
.forEach(entry -> {
var name = gson.fromJson(entry.getValue(), JsonObject.class);
if (name != null) {
var actualName = name.get("name");
if (actualName != null && !actualName.isJsonNull()) {
cachedNames.put(UUID.fromString(entry.getKey()), actualName.getAsString());
return;
}
}
cachedNames.put(UUID.fromString(entry.getKey()), "null");
});
}
@Override
public String[] generateHeaderFooter(Player paramPlayer) {
return List.of("§7§lDEDSAFIO", "§7§lDEDSAFIO").toArray(new String[0]);
}
@Override
public TabEntry[] generateBars(Player paramPlayer) {
var array = new TabEntry[80];
int i = 0;
// Obtain all the entries
var entries = new ArrayList<Team>(plugin.getTeamManager().getTeamsMap().values());
// Sort them by points
Collections.sort(entries, new SortByPoints());
// Handle the ffa case
if (plugin.getTeamManager().getDataset().equalsIgnoreCase("ffa")) {
var iter = entries.iterator();
while (iter.hasNext() && i < 80) {
var team = iter.next();
var id = team.getMembers().get(0);
var name = getNameForId(id);
var shortName = name.substring(0, Math.min(12, name.length()));
var entry = new TabEntry(String.format(ffaTag, team.getPoints(), shortName));
array[i] = entry;
i++;
}
} else { // Handle team case
var iter = entries.iterator();
while (iter.hasNext() && i < 80) {
var next = iter.next();
array[i] = new TabEntry(String.format(teamTag, next.getTeamName(), next.getPoints()));
for (var member : next.getMembers()) {
i++;
if (i < 80) {
array[i] = new TabEntry(String.format(teamMemberTag, getNameForId(member)));
} else {
break;
}
}
i++;
}
}
// Fill emptys slots with nothing.
for (; i < 80; i++) {
array[i] = new TabEntry(" ");
}
return array;
}
private String getNameForId(UUID id) {
var player = Bukkit.getOfflinePlayer(id);
if (player.getName() != null) {
return player.getName();
}
var cachedName = cachedNames.get(id);
if (cachedName != null) {
return cachedName;
}
// If the player is not cached, we need to get it from playerdb.co
cacheName(id);
return "null";
}
private static void cacheName(UUID uuid) {
PlayerDBUtil.getNameFromIdAsync(uuid).thenAccept(name -> {
if (name != null) {
cachedNames.put(uuid, name);
writeNameOntoRedisCache(uuid, name);
}
});
}
private static void writeNameOntoRedisCache(UUID uuid, String name) {
var json = new JsonObject();
json.addProperty("name", name);
json.addProperty("timeStamp", System.currentTimeMillis());
Core.getInstance().getTeamManager().getRedisSyncConnection().hset("uuids:names", uuid.toString(),
gson.toJson(json));
}
public static void writeIfNotPresent(Player player) {
if (!cachedNames.containsKey(player.getUniqueId())) {
writeNameOntoRedisCache(player.getUniqueId(), player.getName());
}
}
}
|
package com.emaginalabs.wecodeproperties
import org.scalacheck.Gen
import org.scalatest.prop.PropertyChecks
import org.scalatest.{FlatSpec, Matchers}
import scala.util.{Failure, Success, Try}
class PlayingWithLibrarySpec
extends FlatSpec
with PropertyChecks
with Matchers {
"Playing with the library" should "add logs to know how it executes" in {
forAll() { (a: Int, b: Int) =>
println(s"Just executed the test with values [a: $a, b: $b]")
a + b shouldBe b + a
}
}
it should "allow us to modify the number of executions" in {
var numOfExecutions = 0;
forAll(minSuccessful(500)) { (a: Int) =>
numOfExecutions += 1
succeed
}
numOfExecutions shouldBe 500
}
it should "make a test fail in order to see how shrink works" in {
val result = Try {
forAll() { (a: Int, b: Int) =>
println(s"Just executed the test with values [a: $a, b: $b]")
if (a > 5 || b > 7) {
fail("Just fail")
}
}
}
result match {
case Failure(exception) => print(exception.getMessage)
case Success(_) => fail("The property should fail but it didn't")
}
}
it should "fail when a generator is too restrictive" in {
val result = Try {
forAll(
Gen
.choose(Int.MinValue, Int.MaxValue)
.filter(a => {
a > 3 && a < 5
})) { (a: Int) =>
succeed
}
}
result match {
case Failure(exception) => print(exception.getMessage)
case Success(_) =>
fail("The property should fail because generator is to restritive")
}
}
def numericRangeGenerator(a: Int, b: Int): Gen[Int] =
for {
generated <- Gen.chooseNum(Int.MinValue, Int.MaxValue)
absA = Math.abs(a)
absB = Math.abs(b)
i = Math.abs(generated) % (absB - absA)
number = i + absA
x = number
} yield (number)
it should "fail when a generator is limited but not restrictive" in {
forAll(numericRangeGenerator(3, 5)) { (a: Int) =>
a should be >= 3
a should be <= 5
}
}
}
|
//
// IRILaunchRouterName.h
// IRiskSDK
//
// Created by owen on 2020/8/13.
// Copyright © 2020 owen. All rights reserved.
//
#import <Foundation/Foundation.h>
FOUNDATION_EXTERN NSString * _Nullable const IRROUTERNAME_INSPECT;
NS_ASSUME_NONNULL_BEGIN
@interface IRILaunchRouterName : NSObject
@end
NS_ASSUME_NONNULL_END
|
package com.tencent.bk.devops.plugin.utils
import java.util.Locale
object MachineEnvUtils {
fun getOS(): String {
val osName = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH)
return if (osName.indexOf(string = "mac") >= 0 || osName.indexOf("darwin") >= 0) {
OSType.MAC_OS
} else if (osName.indexOf("win") >= 0) {
OSType.WINDOWS
} else if (osName.indexOf("nux") >= 0) {
OSType.LINUX
} else {
OSType.OTHER
}
}
object OSType {
const val WINDOWS = "WINDOWS"
const val LINUX = "LINUX"
const val MAC_OS = "MAC_OS"
const val OTHER = "OTHER"
}
}
|
<?php
/**
* [PHPFOX_HEADER]
*/
defined('PHPFOX') or exit('NO DICE!');
/**
*
*
* @copyright [PHPFOX_COPYRIGHT]
* @author Raymond Benc
* @package Module_Rss
* @version $Id: ajax.class.php 704 2009-06-21 18:50:42Z Raymond_Benc $
*/
class Rss_Component_Ajax_Ajax extends Phpfox_Ajax
{
public function updateFeedActivity()
{
if (Phpfox::getService('rss.process')->updateActivity($this->get('id'), $this->get('active')))
{
}
}
public function updateSiteWide()
{
if (Phpfox::getService('rss.process')->updateSiteWide($this->get('id'), $this->get('active')))
{
}
}
public function ordering()
{
if (Phpfox::getService('rss.process')->updateOrder($this->get('val')))
{
}
}
public function groupOrdering()
{
if (Phpfox::getService('rss.group.process')->updateOrder($this->get('val')))
{
}
}
public function log()
{
Phpfox::isUser(true);
Phpfox::getBlock('rss.log', array(
'rss' => array(
'table' => 'rss_log_user',
'field' => 'user_id',
'key' => Phpfox::getUserId()
)
)
);
}
}
?>
|
Один из красивейших портов на сегодняшний день. Поддерживает 3D Модели, текстуры высокого разрешения, прыжки, обзор с помощью мыши, навороченные спецэффекты, игру через интернет или по локальной сети и многое другое.
В общем, рекомендуется всем, кто хочет совместить динамичный геймплей классического Doomа и достаточно современную графику.
Скачанные файлы с расширением **pk3** не пытаться распаковывать, а кидать непосредственно в папку **addons** внутри папки **snowberry**
ZIP-файлы с дополнительными текстурами - **распаковывать** туда же.
[Линукс-версии.](http://dengine.net/linux)
|
/**
* @file btree.h
* Definition of a B-tree class which can be used as a generic dictionary
* (insert-only). Designed to take advantage of caching to be faster than
* standard balanced binary search trees.
*/
#pragma once
#include <vector>
#include <iostream>
#include <string>
#include <sstream>
/**
* BTree class. Provides interfaces for inserting and finding elements in
* B-tree.
*
* @author Matt Joras
* @date Winter 2013
*/
template <class K, class V>
class BTree
{
private:
/**
* A fancy key-value pair which acts as elements in the BTree.
* Can be compared with <, >, ==. Additionally they can be compared against
* a K with <, > and == based on its key.
* */
struct DataPair {
K key;
V value;
/**
* Constructs a DataPair from the given key and value.
* @param key The key of the pair.
* @param value The value of the pair.
*/
DataPair(K key, V value) : key(key), value(value)
{
}
/**
* Less than operator for a DataPair. The object is less than another
* if its key is less than the other's key.
* @param rhs The right hand of the < operator.
* @return true if the object's key is less than rhs' key, false
* otherwise.
*/
inline bool operator<(const DataPair& rhs) const
{
return this->key < rhs.key;
}
/**
* Less than operator for a DataPair and a K.
* @param rhs The right hand side (K) of the < operator.
* @return true if the object's key is less than rhs, false otherwise.
*/
inline bool operator<(const K& rhs) const
{
return this->key < rhs;
}
/**
* Less than operator for a K and a DataPair.
* @param lhs The left hand side (K) of the < operator.
* @param rhs The right hand side (DataPair) of the < operator.
* @return true if lhs is less than rhs's key, false otherwise.
*/
inline friend bool operator<(const K& lhs, const DataPair& rhs)
{
return lhs < rhs.key;
}
/**
* Greater than operator for a DataPair. DataPair is greater than another
* if its key is greater than the other's key.
* @param rhs The right hand of the > operator.
* @return true if the object's key is greater than rhs's key, false otherwise.
*/
inline bool operator>(const DataPair& rhs) const
{
return this->key > rhs.key;
}
/**
* Greater than operator for a K and a DataPair.
* @param lhs The left hand side (K) of the > operator.
* @param rhs The right hand side (DataPair) of the > operator.
* @return true if lhs is greater than rhs's key, false otherwise.
*/
inline friend bool operator>(const K& lhs, const DataPair& rhs)
{
return lhs > rhs.key;
}
/**
* Greater than operator for a DataPair and a K.
* @param rhs The right hand side (K) of the > operator.
* @return true if the object's key is greater than rhs, false otherwise.
*/
inline bool operator>(const K& rhs) const
{
return this->key > rhs;
}
/**
* Equality operator for a DataPair. One is equal to another
* if its key is equal to the other's key.
* @param rhs The right hand of the == operator.
* @return true if the object's key is greater than rhs's key, false otherwise.
*/
inline bool operator==(const DataPair& rhs) const
{
return this->key == rhs.key;
}
/**
* Equality operator for a DataPair and a K.
* @param rhs The right hand side (K) of the == operator.
* @return true if the object's key is equal to rhs, false otherwise.
*/
inline bool operator==(const K& rhs) const
{
return this->key == rhs;
}
/**
* Equality operator for a K and a DataPair.
* @param lhs The left hand side (K) of the == operator.
* @param rhs The right hand side (DataPair) of the == operator.
* @return true if lhs is equal to rhs's key, false otherwise.
*/
inline friend bool operator==(const K& lhs, const DataPair& rhs)
{
return lhs == rhs.key;
}
};
/**
* A class for the basic node structure of the BTree. A node contains
* two vectors, one with DataPairs representing the data, and one of
* BTreeNode*s, representing the node's children.
*/
struct BTreeNode {
bool is_leaf;
std::vector<DataPair> elements;
std::vector<BTreeNode*> children;
/**
* Constructs a BTreeNode. The vectors will reserve to avoid
* reallocations.
*/
BTreeNode(bool is_leaf, unsigned int order) : is_leaf(is_leaf)
{
elements.reserve(order + 1);
children.reserve(order + 2);
}
/**
* Constructs a BTreeNode based on another. Only copies over
* the elements and is_leaf information.
*/
BTreeNode(const BTreeNode& other)
: is_leaf(other.is_leaf), elements(other.elements)
{
}
/**
* Printing operator for a BTreeNode. E.g. a node containing 4, 5, 6
* would look like:
* <pre>
* | 4 | 5 | 6 |
* * * * *
* </pre>
* The stars below the bars represent non-null child pointers. Null
* child pointers are represented by an "N". If there are no children
* then "no children" is displayed instead.
* @param out The ostream to be written to.
* @param n The node to be printed.
* @return The modified ostream.
*/
inline friend std::ostream& operator<<(std::ostream& out,
const BTreeNode& n)
{
std::string node_str;
node_str.reserve(2 * (4 * n.elements.size() + 1));
for (auto& elem : n.elements) {
std::stringstream temp;
temp << elem.key;
node_str += "| ";
node_str += temp.str();
node_str += " ";
}
if (!n.elements.empty()) {
node_str += "|";
}
node_str += "\n";
for (auto& child : n.children) {
if (child == nullptr) {
node_str += "N ";
} else {
node_str += "* ";
}
}
if (n.children.empty()) {
node_str += "no children";
}
out << node_str;
return out;
}
};
unsigned int order;
BTreeNode* root;
public:
/**
* Constructs a default, order 64 BTree.
*/
BTree();
/**
* Constructs a BTree with the specified order. The minimum order allowed
* is order 3.
* @param order The order of the constructed BTree.
*/
BTree(unsigned int order);
/**
* Constructs a BTree as a deep copy of another.
* @param other The BTree to copy.
*/
BTree(const BTree& other);
/**
* Performs checks to make sure the BTree is valid. Specifically
* it will check to make sure that an in-order traversal of the tree
* will result in a sorted sequence of keys. Also verifies that each
* BTree node doesn't have more nodes than its order.
* @return true if it satisfies the conditions, false otherwise.
*/
bool is_valid(unsigned int order = 64) const;
/**
* Destroys a BTree.
*/
~BTree();
/**
* Assignment operator for a BTree.
* @param rhs The BTree to assign into this one.
* @return The copied BTree.
*/
const BTree& operator=(const BTree& rhs);
/**
* Clears the BTree of all data.
*/
void clear();
/**
* Inserts a key and value into the BTree. If the key is already in the
* tree do nothing.
* @param key The key to insert.
* @param value The value to insert.
*/
void insert(const K& key, const V& value);
/**
* Finds the value associated with a given key.
* @param key The key to look up.
* @return The value (if found), the default V if not.
*/
V find(const K& key) const;
private:
/**
* Private recursive version of the insert function.
* @param subroot A reference of a pointer to the current BTreeNode.
* @param pair The DataPair to be inserted.
*/
void insert(BTreeNode* subroot, const DataPair& pair);
/**
* Private recursive version of the find function.
* @param subroot A reference of a pointer to the current BTreeNode.
* @param key The key we are looking up.
* @return The value (if found), the default V if not.
*/
V find(const BTreeNode* subroot, const K& key) const;
/**
* Splits a child node of a BTreeNode. Called if the child became too
* large. Modifies the parent such that children[child_idx] contains
* half as many elements as before, and similarly for
* children[child_idx + 1] (which is a new BTreeNode*).
* @param parent The parent whose child we are trying to split.
* @param child_idx The index of the child in its parent's children
* vector.
*/
void split_child(BTreeNode* parent, size_t child_idx);
/**
* Private recursive version of the clear function.
* @param subroot A pointer to the current node being cleared.
*/
void clear(BTreeNode* subroot);
/**
* Private recursive version of the copy function.
* @param subroot A pointer to the current node being copied.
*/
BTreeNode* copy(const BTreeNode* subroot);
/**
* Private recursive version of the is_valid function.
* @param subroot A pointer to the current node being checked for
* validity.
* @return true if the node's subtree is valid, false otherwise.
*/
bool is_valid(const BTreeNode* subroot, std::vector<DataPair>& data,
unsigned int order) const;
};
/**
* Generalized function for finding the insertion index of a given element
* into a given sorted vector.
* @param elements A sorted vector of some type.
* @param val A value which represents something to be inserted into the vector.
* Must either be the same type as T, or one that can compare to it. E.g. for
* the elements of a BTreeNode we might pass in either a DataPair value or a
* K value (the key).
* @return The index at which val could be inserted into elements to maintain
* the sorted order of elements. If val occurs in elements, then this returns
* the index of val in elements.
*/
template <class T, class C>
size_t insertion_idx(const std::vector<T>& elements, const C& val)
{
/* TODO Your code goes here! */
for(unsigned i = 0; i < elements.size(); i++){
if(elements[i] > val || elements[i] == val){
return i;
}
}
return elements.size();
// return 5;
}
#include "btree_given.cpp"
#include "btree.cpp"
|
# encoding: UTF-8
# Copyright (c) 2015 VMware, Inc. All Rights Reserved.
require 'spec_helper'
require 'vagrant-guests-photon/guest'
describe VagrantPlugins::GuestPhoton::Guest do
include_context 'machine'
it 'should be detected with Photon' do
expect(communicate).to receive(:test).with("grep 'VMware Photon' /etc/photon-release")
guest.detect?(machine)
end
end
|
package world.gregs.game.playground.spatial.quadtree
import java.awt.Point
import java.awt.Rectangle
interface QuadTree {
/**
* The capacity of a leaf before division
*/
val capacity: Int
/**
* Inserts a point into the tree
*/
fun insert(point: Point): Boolean
/**
* Queries an [area] for points
*/
fun query(area: Rectangle, results: MutableList<Point>): List<Point>
}
|
import { Component, Input } from "@angular/core";
import { CompassForm } from "../compass-form";
import { CompassControl } from "../compass-control";
@Component({
selector: "compass-form",
templateUrl: "./compass-form.component.html",
styleUrls: ["./compass-form.component.scss"]
})
export class CompassFormComponent<T> {
@Input()
compassForm: CompassForm<T>;
@Input()
ignoreControls: string[] = [];
getControls(): CompassControl<T, any>[] {
return this.compassForm.controlsArray.filter(
x => !this.ignoreControls.some(p => p === x.key)
);
}
getStyle(control: CompassControl<T, any>) {
return {
width: control.snapshot.width,
"flex-basis": control.snapshot.width,
display: control.snapshot.display ? "block" : "none"
};
}
}
|
import routes from '@/modules/iam/iam-routes';
import store from '@/modules/iam/iam-store';
export default {
routes,
store,
};
|
import ValueComponent from './ValueComponent';
import CheckboxInput from './CheckboxInput';
export default class BooleanComponent extends ValueComponent {
getActionHandlers() {
return {
toggleValue: this._toggleValue,
};
}
render($$) {
const model = this.props.model;
const value = model.getValue();
let el = $$('div').addClass('sc-boolean');
if (!this.context.editable) {
el.addclass('sm-readonly');
}
el.append($$(CheckboxInput, { value, disabled: this.props.disabled }));
return el;
}
_toggleValue() {
if (this.context.editable) {
const model = this.props.model;
this.props.model.setValue(!model.getValue());
}
}
}
|
<?php
/**
* Created by PhpStorm.
* User: jon
* Date: 2018/10/6
* Time: 下午4:44
*/
namespace app\common\model;
class Complete extends BaseModel
{
public function Theraise()
{
return $this->hasOne('Theraise', 'id', 'theraise_id');
}
public static function PostByAdd($data)
{
$res = self::create($data);
return $res;
}
public static function PostByUpdate($data)
{
$res = self::where('theraise_id', $data['theraise_id'])->data($data)->update();
return $res;
}
public static function GetByList($data)
{
$res = self::with('Theraise')->paginate($data['limit'], false, ['query' => $data['page']]);
return $res;
}
public static function GetByFind($id)
{
$res = self::with('Theraise')->where('id', $id)->find();
return $res;
}
}
|
import Document, {
Head,
Main,
NextScript,
DocumentContext,
DocumentInitialProps
} from "next/document";
import React from "react";
import { ServerStyleSheets } from "@material-ui/core";
import {
RenderPage,
NextComponentType,
AppContextType,
AppInitialProps,
AppPropsType
} from "next/dist/next-server/lib/utils";
import { NextRouter } from "next/router";
export default class MyDocument extends Document {
render() {
return (
<html lang="en">
<Head />
<body>
<Main />
<NextScript />
</body>
</html>
);
}
}
MyDocument.getInitialProps = async (
ctx: DocumentContext
): Promise<DocumentInitialProps> => {
const sheets: ServerStyleSheets = new ServerStyleSheets();
const originalRenderPage: RenderPage = ctx.renderPage;
ctx.renderPage = () => {
return originalRenderPage({
enhanceApp: (
App: NextComponentType<
AppContextType<NextRouter>,
AppInitialProps,
AppPropsType<NextRouter, {}>
>
) => (props: React.PropsWithChildren<AppPropsType<NextRouter, {}>>) => {
return sheets.collect(<App {...props} />);
}
});
};
const initialProps: DocumentInitialProps = await Document.getInitialProps(
ctx
);
return {
...initialProps,
styles: [
...React.Children.toArray(initialProps.styles),
sheets.getStyleElement()
]
};
};
|
<!-- section start -->
<!-- attr: { id:'', class:'slide-title', showInPresentation:true, hasScriptWrapper:true } -->
# Defensive Programming, Assertions and Exceptions
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic01.png" style="top:60%; left:62%; width:38.41%; z-index:-1; border: 1px solid white; border-radius: 5px;" /> -->
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic02.png" style="top:15%; left:2%; width:17.08%; z-index:-1" /> -->
<article class="signature">
<p class="signature-course">High-Quality Code - Part II</p>
<p class="signature-initiative">Telerik Software Academy</p>
<a href="http://academy.telerik.com " class="signature-link">http://academy.telerik.com </a>
</article>
<!-- section start -->
# Defensive programming
“Programming today is a race between software engineers striving to build bigger and better idiot-proof programs, and the Universe trying to produce bigger and better idiots. So far, the Universe is winning.”
\- Rick Cook, The Wizardry Compiled
<!-- section start -->
<!-- attr: { id:'', showInPresentation:true, hasScriptWrapper:true } -->
# Table of Contents
- What is Defensive Programming?
- Assertions and **Debug.Assert(…)**
- Exceptions Handling Principles
- Error Handling Strategies
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic03.png" style="top:39.67%; left:67.37%; width:36.14%; z-index:-1" /> -->
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic04.png" style="top:49.92%; left:22.72%; width:21.23%; z-index:-1" /> -->
<!-- section start -->
<!-- attr: { id:'', class:'slide-section', showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Defensive Programming -->
<!-- ## Using Assertions and Exceptions Correctly -->
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic05.png" style="top:42.96%; left:8.07%; width:39%; z-index:-1; border: 1px solid white; border-radius: 5px;" /> -->
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic06.png" style="top:43.20%; left:55.79%; width:45%; z-index:-1; border: 1px solid white; border-radius: 5px;" /> -->
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# What is Defensive Programming?
- Similar to defensive driving – you are never sure what other drivers will do
- **Expect incorrect input** and handle it correctly
- Think not only about the usual execution flow, but consider also **unusual** situations!
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic07.png" style="top:64%; left:30%; width:40%; z-index:-1; border: 1px solid white; border-radius: 5px;" /> -->
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Protecting from <br/> Invalid Input
- “Garbage in → garbage out” – **Wrong!**
- Garbage in → nothing out / exception out / error message out / no garbage allowed in
- Check the values of all data from external sources (from user, file, internet, DB, etc.)
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic08.png" style="top:63%; left:2%; width:52.77%; z-index:-1; border: 1px solid black; border-radius: 5px;" /> -->
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic09.png" style="top:76%; left:50%; width:50.37%; z-index:-1; border: 1px solid black; border-radius: 5px;" /> -->
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Protecting from <br/> Invalid Input -->
- Check the values of all **routine input parameters**
- Decide how to handle **bad inputs**
- Return neutral value
- Substitute with valid data
- Throw an exception
- Display error message, log it, etc.
- The best form of defensive coding is not inserting error at first place
<!-- section start -->
<!-- attr: { id:'', class:'slide-section', showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Assertions -->
<!-- ## Checking Preconditions and Postconditions -->
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic10.png" style="top:45%; left:20%; width:30.85%; z-index:-1" /> -->
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic11.png" style="top:45%; left:60%; width:25.56%; z-index:-1" /> -->
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Assertions
- **Assertion** – a statement placed in the code that **must always be true** at that moment
- Assertions are used during development
- Removed in release builds
- Assertions check for bugs in code
```cs
public double GetAverageStudentGrade()
{
Debug.Assert(studentGrades.Count > 0,
"Student grades are not initialized!");
return studentGrades.Average();
}
```
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic12.png" style="top:30%; left:85%; width:24.13%; z-index:-1; border: 1px solid white; border-radius:5px;" /> -->
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Assertions -->
- Use assertions for conditions that **should never occur** in practice
- Failed assertion indicates a **fatal error** in the program (usually unrecoverable)
- Use assertions to **document assumptions** made in code (preconditions & postconditions)
```cs
private Student GetRegisteredStudent(int id)
{
Debug.Assert(id > 0);
Student student = registeredStudents[id];
Debug.Assert(student.IsRegistered);
}
```
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Assertions -->
- Failed assertion indicates a **fatal error** in the program (usually unrecoverable)
- Avoid putting executable code in assertions
```cs
Debug.Assert(PerformAction(), "Could not perform action");
```
- Won’t be compiled in production. Better use:
```cs
bool actionPerformed = PerformAction();
Debug.Assert(actionPerformed, "Could not perform action");
```
- Assertions should fail loud
- It is fatal error → total crash
<!-- attr: { class:'slide-section demo', showInPresentation:true, hasScriptWrapper:true } -->
# Assertions
## [Demo]()
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic13.png" style="top:18%; left:0%; width:40%; z-index:-1" /> -->
<!-- section start -->
<!-- attr: { id:'', class:'slide-section', showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Exceptions -->
## Best Practices for Exception Handling
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic14.png" style="top:55%; left:30%; width:40%; z-index:-1" /> -->
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Exceptions
- **Exceptions** provide a way to inform the caller about an error or exceptional events
- Can be caught and processed by the callers
- Methods can **throw** exceptions:
```cs
public void ReadInput(string input)
{
if (input == null)
{
throw new ArgumentNullException("input"); }
…
}
```
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Exceptions -->
- Use **try-catch** statement to handle exceptions:
- You can use multiple **catch** blocks to specify handlers for different exceptions
- Not handled exceptions propagate to the caller
```cs
void PlayNextTurn()
{
try
{
readInput(input);
…
}
catch (ArgumentException e)
{
Console.WriteLine("Invalid argument!");
}
}
```
<div class="fragment balloon" style="top:55%; left:40.55%; width:35.26%">Exception thrown here</div>
<div class="fragment balloon" style="top:70%; left:43.20%; width:52.01%">The code here will not be executed</div>
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Exceptions -->
- Use **finally** block to execute code even if exception occurs (not supported in C++):
- Perfect place to perform cleanup for any resources allocated in the **try** block
```cs
void PlayNextTurn()
{
try
{
… }
finally
{
Console.WriteLine("Hello from finally!");
}
}
```
<div class="fragment balloon" style="top:60%; left:22.04%; width:37.91%">Exceptions can be eventually thrown here</div>
<div class="fragment balloon" style="top:78%; left:27.33%; width:49.37%">The code here is always executed</div>
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Exceptions -->
- Use exceptions to notify the other parts of the program about errors
- Errors that should not be ignored
- Throw an exception only for conditions that are **truly exceptional**
- Should I throw an exception when I check for user name and password? → better return false
- Don’t use exceptions as control flow mechanisms
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Exceptions -->
- Throw exceptions at the right **level of abstraction**
```cs
class Employee
{
// Bad
…
public TaxId
{ get { throw new NullReferenceException(…); }
}
```
```cs
class Employee
{
// Better
…
public TaxId
{ get { throw new EmployeeDataNotAvailable(…); }
}
```
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Exceptions -->
- Use **descriptive error messages**
- Incorrect example:
```cs
throw new Exception("Error!");
```
- _Example_:
```cs
throw new ArgumentException("The speed should be a number " +
"between " + MIN_SPEED + " and " + MAX_SPEED + ".");
```
- Avoid **empty catch blocks**
```cs
try
{
…
}
catch (Exception ex)
{
}
```
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Exceptions -->
- Always include the exception **cause** when throwing a new exception
```cs
try
{
WithdrawMoney(account, amount);
}
catch (DatabaseException dbex)
{
throw new WithdrawException(String.Format(
"Can not withdraw the amount {0} from acoount {1}",
amount, account), dbex);
}
```
<div class="fragment balloon" style="top:70%; left:45.84%; width:47.60%">We chain the original exception (the source of the problem)</div>
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Exceptions -->
- Catch only exceptions that you are capable to process correctly
- Do not catch all exceptions!
- Incorrect example:
```cs
try
{
ReadSomeFile();
}
catch
{
Console.WriteLine("File not found!");
}
```
- What about **OutOfMemoryException**?
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Exceptions -->
- Have an exception handling strategy for all unexpected / unhandled exceptions:
- Consider logging (e.g. Log4Net)
- Display to the end users only messages that they could understand
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic15.png" style="top:55%; left:3.74%; width:48.48%; z-index:-1; border: 1px solid black; border-radius:5px;" /> -->
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic16.png" style="top:55%; left:61.26%; width:44.55%; z-index:-1; border: 1px solid black; border-radius:5px;" /> -->
<!-- attr: { class:'slide-section demo', showInPresentation:true, hasScriptWrapper:true } -->
# Exceptions
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic17.png" style="top:42%; left:33%; width:34.38%; z-index:-1; border: 1px solid black; border-radius:5px;" /> -->
<!-- section start -->
<!-- attr: { id:'', class:'slide-section', showInPresentation:true, hasScriptWrapper:true } -->
<!-- # Error Handling Strategies -->
## Assertions vs. Exceptions vs. Other Techniques
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic18.png" style="top:57%; left:72.02%; width:23.20%; z-index:-1" /> -->
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic19.png" style="top:57%; left:46.09%; width:17.63%; z-index:-1" /> -->
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic20.png" style="top:57%; left:15.21%; width:23.25%; z-index:-1" /> -->
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Error Handling Techniques
- How to handle **errors that you expect** to occur?
- Depends on the situation:
- Throw an **exception** (in OOP)
- The most typical action you can do
- Return a neutral value, e.g. **-1** in **IndexOf(…)**
- Substitute the next piece of valid data (e.g. file)
- Return the same answer as the previous time
- Substitute the closest legal value
- Return an error code (in old languages / APIs)
- Display an error message in the UI
- Call method / Log a warning message to a file
- Crash / shutdown / reboot
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Assertions vs. Exceptions
- **Exceptions** are announcements about error condition or unusual event
- Inform the caller about error or exceptional event
- Can be caught and application can continue working
- **Assertions** are fatal errors
- Assertions always indicate bugs in the code
- Can not be caught and processed
- Application can’t continue in case of failed assertion
- When in doubt → throw an exception
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Assertions in C#
- Assertions in C# are rarely used
- In C# prefer throwing an **exception** when the input data / internal object state are invalid
- Exceptions are used in C# and Java instead of **preconditions checking**
- Prefer using **unit testing** for testing the code instead of **postconditions checking**
- Assertions are popular in C / C++
- Where exceptions & unit testing are not popular
- In JS there are no built-in assertion mechanism
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Error Handling Strategy
- Choose your **error handling strategy** and follow it consistently
- Assertions / exceptions / error codes / other
- In C#, .NET and OOP prefer using **exceptions**
- Assertions are rarely used, only as additional checks for fatal error
- Throw an exception for incorrect input / incorrect object state / invalid operation
- In JavaScript use exceptions: **try-catch-finally**
- In non-OOP languages use error codes
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Robustness vs. Correctness
- How will you handle error while calculating single pixel color in a computer game?
- How will you handle error in financial software? Can you afford to lose money?
- **Correctness** == never returning wrong result
- Try to achieve correctness as a primary goal
- **Robustness** == always trying to do something that will allow the software to keep running
- Use as last resort, for non-critical errors
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Assertions vs. Exceptions
```cs
public string Substring(string str, int startIndex, int length)
{
if (str == null)
{
throw new NullReferenceException("Str is null.");
}
if (startIndex >= str.Length)
{
throw new ArgumentException(
"Invalid startIndex:" + startIndex);
}
if (startIndex + count > str.Length)
{
throw new ArgumentException("Invalid length:" + length);
}
…
Debug.Assert(result.Length == length);
}
```
<div class="fragment balloon" style="top:34.62%; left:66.12%; width:29.09%">Check the input and preconditions</div>
<div class="fragment balloon" style="top:75%; left:20.28%; width:46.72%">Perform the method main logic</div>
<div class="fragment balloon" style="top:85%; left:55%; width:24.68%">Check the postconditions</div>
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Error Barricades
- Barricade your program to stop the damage caused by incorrect data
- Consider same approach for class design
- Public methods → validate the data
- Private methods → assume the data is safe
- Consider using exceptions for public methods and assertions for private
- **public methods / functions**
- **private methods / functions**
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic21.png" style="top:20%; left:100%; width:13.46%; z-index:-1" /> -->
<!-- <img class="slide-image" showInPresentation="true" src="imgs\pic22.png" style="top:40%; left:100%; width:10.50%; z-index:-1" /> -->
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Being Defensive About Defensive Programming
- Too much defensive programming is not good
- Strive for balance
- How much defensive programming to leave in production code?
- Remove the code that results in hard crashes
- Leave in code that checks for important errors
- Log errors for your technical support personnel
- See that the error messages you show are user-friendly
<!-- attr: { class:'slide-section', showInPresentation:true, hasScriptWrapper:true } -->
<!-- # HQC-Part 2: Defensive Programming
## Questions? -->
<!-- attr: { showInPresentation:true, hasScriptWrapper:true } -->
# Free Trainings @ Telerik Academy
- C# Programming @ Telerik Academy
- [HQC-Part II course](http://academy.telerik.com/student-courses/programming/high-quality-code-part-2/about)
- Telerik Software Academy
- [telerikacademy.com](https://telerikacademy.com)
- Telerik Academy @ Facebook
- [facebook.com/TelerikAcademy](facebook.com/TelerikAcademy)
- Telerik Software Academy Forums
- [forums.academy.telerik.com](forums.academy.telerik.com)
|
my $channel = Channel.new();
$channel.send($_) for 0..10;
$channel.close;
my @readers;
for 1..3 {
push @readers, start {
while 1 {
my $value = $channel.poll;
last if $value === Any;
say "$value² = " ~ $value * $value;
}
};
}
await @readers;
|
import 'database.dart';
const DEFAULT_DURATION_MINUTES=30;
class Visit {
static final empty = Visit(code: '', name: '', address: '', startDate: DateTime.now());
static final table = 'visit';
int id;
final DateTime createDate;
final String code;
final String name;
final String address;
final String? latitude;
final String? longitude;
final DateTime startDate;
final DateTime endDate;
Visit(
{int? id,
required this.code,
required this.name,
required this.address,
this.latitude,
this.longitude,
required this.startDate,
DateTime? endDate,
DateTime? created}):
this.id = id ?? 0,
this.createDate = created ?? DateTime.now(),
this.endDate = endDate ?? startDate.add(Duration(minutes: DEFAULT_DURATION_MINUTES));
/// Create a visit instance from database field map
Visit.fromDb(Map<String, dynamic> db) :
this.id = db['id'] as int,
this.code = db['code'] as String,
this.name = db['name'] as String,
this.address = db['address'] as String,
this.latitude = db['latitude'] as String?,
this.longitude = db['longitude'] as String?,
this.createDate = DateTime.fromMillisecondsSinceEpoch(db['create_date'] as int),
this.startDate = DateTime.fromMillisecondsSinceEpoch(db['start_date'] as int),
this.endDate = DateTime.fromMillisecondsSinceEpoch(db['end_date'] as int);
/// Create a new immutable Visit from permitted
Visit from({DateTime? endDate}) {
return Visit(
id: this.id,
code: this.code,
name: this.name,
address: this.address,
longitude: this.longitude,
latitude: this.latitude,
startDate: this.startDate,
endDate: endDate ?? this.endDate,
created: this.createDate
);
}
/// Create database field map from instance
Map<String, Object?> toDb() {
return {
'id': this.id == 0 ? null : this.id,
'code': this.code,
'name': this.name,
'address': this.address,
'latitude': this.latitude,
'longitude': this.longitude,
'start_date': this.startDate.millisecondsSinceEpoch,
'end_date': this.endDate.millisecondsSinceEpoch,
'create_date': this.createDate.millisecondsSinceEpoch
};
}
/// Create or update a visit row:
/// - If the id is null, then insert; otherwise
/// - Update the row
createOrUpdate() async {
var database = await DbManager().database;
if (this.id == 0) {
this.id = await database.insert(table, this.toDb());
} else {
await database.update(table, this.toDb());
}
}
/// Delete the current visit from the database. Dispose of this instance
/// afterwards.
delete() async {
var database = await DbManager().database;
await database.delete(table, where: 'id=?', whereArgs: [this.id]);
}
/// Get a list of visits
static Future<List<Visit>> list(int start, {int limit : 20}) async {
var database = await DbManager().database;
var rows = await database.query(table,
columns: ['id',
'code',
'name',
'address',
'latitude',
'longitude',
'start_date',
'end_date',
'create_date'],
orderBy: 'start_date desc',
offset: start,
limit: limit
);
return rows.map( (r) => Visit.fromDb(r)).toList();
}
}
|
package leetcode
/**
* https://leetcode.com/problems/check-if-numbers-are-ascending-in-a-sentence/
*/
class Problem2042 {
fun areNumbersAscending(s: String): Boolean {
val words = s.split(" ")
var number = 0
for (word in words) {
val num = word.toIntOrNull()
if (num != null) {
if (number >= num) {
return false
}
number = num
}
}
return true
}
}
|
# Linux Server Configuration
- IP: 34.235.63.160
- SSH Port: 2200
- App URL 34.235.63.160/catalog
### Installed software
- psycopg2
- psycopg2-binary
- python
- apache2
- postgresql
- libapache2-mod-wsgi
### Configuration
https://github.com/ladytrell/LinuxServerConfig
1. Created user grader
a. create ssh keypair
b. add to sudo list
2. Configured ssh to port 2200
a. Changed port in /etc/ssh/sshd_config
3. Configured ufw
a. Blocked all incoming traffic
b. Allow port 2200, 80, and 123
4. Installed and configured Apache2
a. Configued to run WSGI scipt
5. Install and config WSGI for python app
a. Wrote script to call catalog app
6. Installed and configured PostgreSQL
a. Created Users ubuntu and catalog
b. Created catalog app
### App Location
1. /usr/local/www/catalog/
a. catalog.db
b. catalog.wsgi
c. lotofitems.py
d. catalogDB_Model.py
e. client_secrets.google.json
f. catalogDB_Model.pyc
g. fb_client_secrets.json
h. catalog.py
i. static
i. responsive.css
ii. styles.css
j. templates
i. catalog.html
ii. item.html
v. category.html
2. /etc/apache2/sites-available/
a. 000-default.conf
### Referenced Sites
https://modwsgi.readthedocs.io/en/develop/configuration-directives/WSGIScriptAlias.html
https://modwsgi.readthedocs.io/en/develop/user-guides/quick-configuration-guide.html
http://flask.pocoo.org/docs/1.0/deploying/mod_wsgi/
https://realpython.com/flask-by-example-part-2-postgres-sqlalchemy-and-alembic/
https://www.digitalocean.com/community/tutorials/how-to-deploy-a-flask-application-on-an-ubuntu-vps
https://docs.sqlalchemy.org/en/latest/core/connections.html
https://www.postgresql.org/docs/9.5/database-roles.html
https://overiq.com/sqlalchemy-101/installing-sqlalchemy-and-connecting-to-database/
https://tutorials.ubuntu.com/tutorial/install-and-configure-apache#2
https://serverfault.com/questions/265410/ubuntu-server-message-says-packages-can-be-updated-but-apt-get-does-not-update
License
----
MIT
**Free Software**
|
import mongoose, { Schema } from "mongoose";
import Bot from "../types/Bot";
const schema: Schema<Bot> = new Schema({
exchangeConnectionId: {
type: mongoose.Schema.Types.ObjectId
},
startBalance: {
type: Number,
required: true
},
currentBalance: {
type: Number,
required: true
},
startDate: {
type: Date,
required: true
},
endDate: {
type: Date
},
status: {
type: String,
enum: ['online', 'offline', 'ended'],
required: true
},
strategy: {
type: mongoose.Schema.Types.Mixed,
required: true
},
type: {
type: String,
enum: ['TEST', 'LIVE'],
required: true
},
userId: {
type: mongoose.Schema.Types.ObjectId,
required: true
},
quoteCurrency: {
type: String,
required: true
}
});
schema.set('toJSON', {
virtuals: true,
versionKey: false,
transform: (doc, ret) => {
ret.id = ret._id.toString();
delete ret._id;
}
});
const model = mongoose.model('bot', schema);
export default model;
|
---
featuredpath: "/book2/main/page01.jpg"
featured: ""
preview: "/book2/preview/page01.jpg"
title: "Book 2, Page 1"
categories: ["book2"]
type: "post"
linktitle: ""
date: "2018-03-23T22:01:03-05:00"
author: "Maria Rice"
featuredalt: ""
description2: []
---
# First colored Morphic page ever!
Welcome back from the intermission! Hope you like the new character and the new coloring style.
I managed to finish coloring page 2 and now I'm coloring pages 3 and 4 (got all the other drawing
for those pages done and only the coloring is left).
I'm pleased with my progress so far, but now Spring Break is over and I have to buckle down for
the second half of the semester. I expect the school/work pace will pick up from here on out.
No worries, though! I expect I'll keep up with the update schedule. I only need four more pages
to ensure that the schedule is covered until classes are over for the summer. I think I can push that.
**In the meantime---enjoy the colors!** And thanks for reading! Return next week to find out who
Vix is talking to.
|
import 'package:anvil/src/build/build_data.dart';
import 'package:anvil/src/config.dart';
import 'package:anvil/src/content/page.dart';
import 'package:anvil/src/content/section.dart';
import 'build_page.dart';
void buildSection(
Config config, BuildData buildData, Section section) {
if (section.index != null) {
final children = section.children.map((e) => e.toMap()).toList();
final pages = section.children
.whereType<Page>()
.map((content) => content.toMap())
.toList();
final sections = section.children
.whereType<Section>()
.map((content) => content.toMap())
.toList();
buildPage(
config,
buildData,
section.index!,
extraData: <String, Object?>{
'children': children,
'pages': pages,
'sections': sections,
},
);
}
try {
for (final child in section.children) {
child.when(
config,
buildData,
section: buildSection,
page: buildPage,
);
}
} catch (e) {
rethrow;
}
}
|
#![allow(non_snake_case, non_upper_case_globals)]
#![allow(non_camel_case_types)]
//! MCU debug component
//!
//! Used by: stm32l412, stm32l4x1, stm32l4x2, stm32l4x3
use crate::{RORegister, RWRegister};
#[cfg(not(feature = "nosync"))]
use core::marker::PhantomData;
/// DBGMCU_IDCODE
pub mod IDCODE {
/// Device identifier
pub mod DEV_ID {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (12 bits: 0xfff << 0)
pub const mask: u32 = 0xfff << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Revision identifie
pub mod REV_ID {
/// Offset (16 bits)
pub const offset: u32 = 16;
/// Mask (16 bits: 0xffff << 16)
pub const mask: u32 = 0xffff << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// Debug MCU configuration register
pub mod CR {
/// Debug Sleep mode
pub mod DBG_SLEEP {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Debug Stop mode
pub mod DBG_STOP {
/// Offset (1 bits)
pub const offset: u32 = 1;
/// Mask (1 bit: 1 << 1)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Debug Standby mode
pub mod DBG_STANDBY {
/// Offset (2 bits)
pub const offset: u32 = 2;
/// Mask (1 bit: 1 << 2)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Trace pin assignment control
pub mod TRACE_IOEN {
/// Offset (5 bits)
pub const offset: u32 = 5;
/// Mask (1 bit: 1 << 5)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Trace pin assignment control
pub mod TRACE_MODE {
/// Offset (6 bits)
pub const offset: u32 = 6;
/// Mask (2 bits: 0b11 << 6)
pub const mask: u32 = 0b11 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// Debug MCU APB1 freeze register1
pub mod APB1FZR1 {
/// TIM2 counter stopped when core is halted
pub mod DBG_TIM2_STOP {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// TIM6 counter stopped when core is halted
pub mod DBG_TIM6_STOP {
/// Offset (4 bits)
pub const offset: u32 = 4;
/// Mask (1 bit: 1 << 4)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// TIM7 counter stopped when core is halted
pub mod DBG_TIM7_STOP {
/// Offset (5 bits)
pub const offset: u32 = 5;
/// Mask (1 bit: 1 << 5)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// RTC counter stopped when core is halted
pub mod DBG_RTC_STOP {
/// Offset (10 bits)
pub const offset: u32 = 10;
/// Mask (1 bit: 1 << 10)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Window watchdog counter stopped when core is halted
pub mod DBG_WWDG_STOP {
/// Offset (11 bits)
pub const offset: u32 = 11;
/// Mask (1 bit: 1 << 11)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Independent watchdog counter stopped when core is halted
pub mod DBG_IWDG_STOP {
/// Offset (12 bits)
pub const offset: u32 = 12;
/// Mask (1 bit: 1 << 12)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// I2C1 SMBUS timeout counter stopped when core is halted
pub mod DBG_I2C1_STOP {
/// Offset (21 bits)
pub const offset: u32 = 21;
/// Mask (1 bit: 1 << 21)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// I2C2 SMBUS timeout counter stopped when core is halted
pub mod DBG_I2C2_STOP {
/// Offset (22 bits)
pub const offset: u32 = 22;
/// Mask (1 bit: 1 << 22)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// I2C3 SMBUS timeout counter stopped when core is halted
pub mod DBG_I2C3_STOP {
/// Offset (23 bits)
pub const offset: u32 = 23;
/// Mask (1 bit: 1 << 23)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// bxCAN stopped when core is halted
pub mod DBG_CAN_STOP {
/// Offset (25 bits)
pub const offset: u32 = 25;
/// Mask (1 bit: 1 << 25)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// LPTIM1 counter stopped when core is halted
pub mod DBG_LPTIM1_STOP {
/// Offset (31 bits)
pub const offset: u32 = 31;
/// Mask (1 bit: 1 << 31)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// Debug MCU APB1 freeze register 2
pub mod APB1FZR2 {
/// LPTIM2 counter stopped when core is halted
pub mod DBG_LPTIM2_STOP {
/// Offset (5 bits)
pub const offset: u32 = 5;
/// Mask (1 bit: 1 << 5)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// Debug MCU APB2 freeze register
pub mod APB2FZR {
/// TIM1 counter stopped when core is halted
pub mod DBG_TIM1_STOP {
/// Offset (11 bits)
pub const offset: u32 = 11;
/// Mask (1 bit: 1 << 11)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// TIM15 counter stopped when core is halted
pub mod DBG_TIM15_STOP {
/// Offset (16 bits)
pub const offset: u32 = 16;
/// Mask (1 bit: 1 << 16)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// TIM16 counter stopped when core is halted
pub mod DBG_TIM16_STOP {
/// Offset (17 bits)
pub const offset: u32 = 17;
/// Mask (1 bit: 1 << 17)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
#[repr(C)]
pub struct RegisterBlock {
/// DBGMCU_IDCODE
pub IDCODE: RORegister<u32>,
/// Debug MCU configuration register
pub CR: RWRegister<u32>,
/// Debug MCU APB1 freeze register1
pub APB1FZR1: RWRegister<u32>,
/// Debug MCU APB1 freeze register 2
pub APB1FZR2: RWRegister<u32>,
/// Debug MCU APB2 freeze register
pub APB2FZR: RWRegister<u32>,
}
pub struct ResetValues {
pub IDCODE: u32,
pub CR: u32,
pub APB1FZR1: u32,
pub APB1FZR2: u32,
pub APB2FZR: u32,
}
#[cfg(not(feature = "nosync"))]
pub struct Instance {
pub(crate) addr: u32,
pub(crate) _marker: PhantomData<*const RegisterBlock>,
}
#[cfg(not(feature = "nosync"))]
impl ::core::ops::Deref for Instance {
type Target = RegisterBlock;
#[inline(always)]
fn deref(&self) -> &RegisterBlock {
unsafe { &*(self.addr as *const _) }
}
}
#[cfg(feature = "rtic")]
unsafe impl Send for Instance {}
|
#!/bin/bash
outfile=RooUnfoldExample.cxx.ref
RooUnfoldExample > $outfile
bash ref/cleanup.sh $outfile
diff $outfile ref/$outfile
|
using System.Threading;
using MediatR;
using NetCoreKit.Samples.TodoAPI.Domain;
namespace NetCoreKit.Samples.TodoAPI.v1.Services
{
public class EventSubscriber : INotificationHandler<ProjectCreated>
{
public async System.Threading.Tasks.Task Handle(ProjectCreated @event, CancellationToken cancellationToken)
{
// do something with @event
//...
await System.Threading.Tasks.Task.FromResult(@event);
}
}
}
|
## 内存信息收集
从Node v. 12开始,可以收集Appium的内存使用信息来分析问题。 这对于分析内存泄漏问题非常有帮助。
### 创建dump文件
为了在任意时间创建dump文件,执行`node`进程时增加如下命令行参数,这会执行appium.js脚本:
```
--heapsnapshot-signal=<signal>
```
这里的 `signal` 可以是一个有效的自定义信号,例如 `SIGUSR2`。然后你就可以
```
kill -SIGUSR2 <nodePID>
```
dump文件会被存放在Appium主脚本执行路径下。文件扩展名为 `.heapsnapshot`,文件可以在Chrome Inspector中加载来进行分析。
### dump文件分析
详细信息请查看[Rising Stack article](https://blog.risingstack.com/finding-a-memory-leak-in-node-js/)。
|
<?php
/**
* InterKassa driver for the Omnipay PHP payment processing library
*
* @link https://github.com/hiqdev/omnipay-interkassa
* @package omnipay-interkassa
* @license MIT
* @copyright Copyright (c) 2015-2017, HiQDev (http://hiqdev.com/)
*/
namespace Omnipay\InterKassa\Message;
/**
* InterKassa Abstract Request.
*/
abstract class AbstractRequest extends \Omnipay\Common\Message\AbstractRequest
{
/**
* {@inheritdoc}
*/
protected $zeroAmountAllowed = false;
/**
* @var string
*/
protected $endpoint = 'https://sci.interkassa.com/';
/**
* Get the unified purse.
*
* @return string merchant purse
*/
public function getPurse()
{
return $this->getCheckoutId();
}
/**
* Set the unified purse.
*
* @param $value
* @return self
*/
public function setPurse($value)
{
return $this->setCheckoutId($value);
}
/**
* Get the merchant purse.
*
* @return string merchant purse
*/
public function getCheckoutId()
{
return $this->getParameter('checkoutId');
}
/**
* Set the merchant purse.
*
* @param string $purse merchant purse
*
* @return self
*/
public function setCheckoutId($purse)
{
return $this->setParameter('checkoutId', $purse);
}
/**
* Get the sign algorithm.
*
* @return string sign algorithm
*/
public function getSignAlgorithm()
{
return strtolower($this->getParameter('signAlgorithm'));
}
/**
* Set the sign algorithm.
*
* @param string $value sign algorithm
*
* @return self
*/
public function setSignAlgorithm($value)
{
return $this->setParameter('signAlgorithm', $value);
}
/**
* Get the sign key.
*
* @return string sign key
*/
public function getSignKey()
{
return $this->getParameter('signKey');
}
/**
* Set the sign key.
*
* @param string $value sign key
*
* @return self
*/
public function setSignKey($value)
{
return $this->setParameter('signKey', $value);
}
/**
* Get the test key.
*
* @return string test key
*/
public function getTestKey()
{
return $this->getParameter('testKey');
}
/**
* Set the test key.
*
* @param string $value test key
*
* @return self
*/
public function setTestKey($value)
{
return $this->setParameter('testKey', $value);
}
/**
* Get the method for success return.
*
* @return mixed
*/
public function getReturnMethod()
{
return $this->getParameter('returnMethod');
}
/**
* Sets the method for success return.
*
* @param $returnMethod
* @return \Omnipay\Common\Message\AbstractRequest
*/
public function setReturnMethod($returnMethod)
{
return $this->setParameter('returnMethod', $returnMethod);
}
/**
* Get the method for canceled payment return.
*
* @return mixed
*/
public function getCancelMethod()
{
return $this->getParameter('cancelMethod');
}
/**
* Sets the method for canceled payment return.
*
* @param $cancelMethod
* @return \Omnipay\Common\Message\AbstractRequest
*/
public function setCancelMethod($cancelMethod)
{
return $this->setParameter('cancelMethod', $cancelMethod);
}
/**
* Get the method for request notify.
*
* @return mixed
*/
public function getNotifyMethod()
{
return $this->getParameter('notifyMethod');
}
/**
* Sets the method for request notify.
*
* @param $notifyMethod
* @return \Omnipay\Common\Message\AbstractRequest
*/
public function setNotifyMethod($notifyMethod)
{
return $this->setParameter('notifyMethod', $notifyMethod);
}
/**
* Calculates sign for the $data.
*
* @param array $data
* @param string $signKey
* @return string
*/
public function calculateSign($data, $signKey)
{
unset($data['ik_sign']);
ksort($data, SORT_STRING);
array_push($data, $signKey);
$signAlgorithm = $this->getSignAlgorithm();
$signString = implode(':', $data);
return base64_encode(hash($signAlgorithm, $signString, true));
}
}
|
#!/usr/bin/env ruby
# frozen_string_literal: true
require File.expand_path("../config/boot.rb", __dir__)
require File.expand_path("../config/environment.rb", __dir__)
require File.expand_path("../app/extensions/extensions.rb", __dir__)
def do_report(year, do_labels = false)
warn("Doing #{year.inspect}...")
query = Query.lookup(:Observation, :all, date: year)
report = ObservationReport::Symbiota.new(query: query).render
report.sub!(/^[^\n]*\n/, "") unless do_labels
puts report
warn(" #{query.num_results} observations\n")
sleep 60
end
do_report(%w[1000 1999], :do_labels)
(2000..2019).each do |year|
do_report([year.to_s, year.to_s])
end
exit 0
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
// TODO: Include PageTemplateModels here too?? Probably
*/
/**
* @class
* Initializes a new instance of the Sku class.
* @constructor
* SKU details
*
* @member {string} name SKU name to specify whether the key vault is a
* standard vault or a premium vault. Possible values include: 'standard',
* 'premium'
*
*/
export interface Sku {
name: string;
}
/**
* @class
* Initializes a new instance of the AccessPolicyEntry class.
* @constructor
* An identity that have access to the key vault. All identities in the array
* must use the same tenant ID as the key vault's tenant ID.
*
* @member {uuid} tenantId The Azure Active Directory tenant ID that should be
* used for authenticating requests to the key vault.
*
* @member {uuid} objectId The object ID of a user, service principal or
* security group in the Azure Active Directory tenant for the vault. The
* object ID must be unique for the list of access policies.
*
* @member {uuid} [applicationId] Application ID of the client making request
* on behalf of a principal
*
* @member {object} permissions Permissions the identity has for keys, secrets
* and certificates.
*
* @member {array} [permissions.keys] Permissions to keys
*
* @member {array} [permissions.secrets] Permissions to secrets
*
* @member {array} [permissions.certificates] Permissions to certificates
*
*/
export interface AccessPolicyEntry {
tenantId: string;
objectId: string;
applicationId?: string;
permissions: Permissions;
}
/**
* @class
* Initializes a new instance of the Permissions class.
* @constructor
* Permissions the identity has for keys, secrets and certificates.
*
* @member {array} [keys] Permissions to keys
*
* @member {array} [secrets] Permissions to secrets
*
* @member {array} [certificates] Permissions to certificates
*
*/
export interface Permissions {
keys?: string[];
secrets?: string[];
certificates?: string[];
}
/**
* @class
* Initializes a new instance of the VaultProperties class.
* @constructor
* Properties of the vault
*
* @member {string} [vaultUri] The URI of the vault for performing operations
* on keys and secrets.
*
* @member {uuid} tenantId The Azure Active Directory tenant ID that should be
* used for authenticating requests to the key vault.
*
* @member {object} sku SKU details
*
* @member {string} [sku.name] SKU name to specify whether the key vault is a
* standard vault or a premium vault. Possible values include: 'standard',
* 'premium'
*
* @member {array} accessPolicies An array of 0 to 16 identities that have
* access to the key vault. All identities in the array must use the same
* tenant ID as the key vault's tenant ID.
*
* @member {boolean} [enabledForDeployment] Property to specify whether Azure
* Virtual Machines are permitted to retrieve certificates stored as secrets
* from the key vault.
*
* @member {boolean} [enabledForDiskEncryption] Property to specify whether
* Azure Disk Encryption is permitted to retrieve secrets from the vault and
* unwrap keys.
*
* @member {boolean} [enabledForTemplateDeployment] Property to specify
* whether Azure Resource Manager is permitted to retrieve secrets from the
* key vault.
*
*/
export interface VaultProperties {
vaultUri?: string;
tenantId: string;
sku: Sku;
accessPolicies: AccessPolicyEntry[];
enabledForDeployment?: boolean;
enabledForDiskEncryption?: boolean;
enabledForTemplateDeployment?: boolean;
}
/**
* @class
* Initializes a new instance of the VaultCreateOrUpdateParameters class.
* @constructor
* Parameters for creating or updating a vault
*
* @member {string} location The supported Azure location where the key vault
* should be created.
*
* @member {object} [tags] The tags that will be assigned to the key vault.
*
* @member {object} properties Properties of the vault
*
* @member {string} [properties.vaultUri] The URI of the vault for performing
* operations on keys and secrets.
*
* @member {uuid} [properties.tenantId] The Azure Active Directory tenant ID
* that should be used for authenticating requests to the key vault.
*
* @member {object} [properties.sku] SKU details
*
* @member {string} [properties.sku.name] SKU name to specify whether the key
* vault is a standard vault or a premium vault. Possible values include:
* 'standard', 'premium'
*
* @member {array} [properties.accessPolicies] An array of 0 to 16 identities
* that have access to the key vault. All identities in the array must use
* the same tenant ID as the key vault's tenant ID.
*
* @member {boolean} [properties.enabledForDeployment] Property to specify
* whether Azure Virtual Machines are permitted to retrieve certificates
* stored as secrets from the key vault.
*
* @member {boolean} [properties.enabledForDiskEncryption] Property to specify
* whether Azure Disk Encryption is permitted to retrieve secrets from the
* vault and unwrap keys.
*
* @member {boolean} [properties.enabledForTemplateDeployment] Property to
* specify whether Azure Resource Manager is permitted to retrieve secrets
* from the key vault.
*
*/
export interface VaultCreateOrUpdateParameters extends BaseResource {
location: string;
tags?: { [propertyName: string]: string };
properties: VaultProperties;
}
/**
* @class
* Initializes a new instance of the Resource class.
* @constructor
* Key Vault resource
*
* @member {string} [id] The Azure Resource Manager resource ID for the key
* vault.
*
* @member {string} name The name of the key vault.
*
* @member {string} [type] The resource type of the key vault.
*
* @member {string} location The supported Azure location where the key vault
* should be created.
*
* @member {object} [tags] The tags that will be assigned to the key vault.
*
*/
export interface Resource extends BaseResource {
id?: string;
name: string;
type?: string;
location: string;
tags?: { [propertyName: string]: string };
}
/**
* @class
* Initializes a new instance of the Vault class.
* @constructor
* Resource information with extended details.
*
* @member {object} properties Properties of the vault
*
* @member {string} [properties.vaultUri] The URI of the vault for performing
* operations on keys and secrets.
*
* @member {uuid} [properties.tenantId] The Azure Active Directory tenant ID
* that should be used for authenticating requests to the key vault.
*
* @member {object} [properties.sku] SKU details
*
* @member {string} [properties.sku.name] SKU name to specify whether the key
* vault is a standard vault or a premium vault. Possible values include:
* 'standard', 'premium'
*
* @member {array} [properties.accessPolicies] An array of 0 to 16 identities
* that have access to the key vault. All identities in the array must use
* the same tenant ID as the key vault's tenant ID.
*
* @member {boolean} [properties.enabledForDeployment] Property to specify
* whether Azure Virtual Machines are permitted to retrieve certificates
* stored as secrets from the key vault.
*
* @member {boolean} [properties.enabledForDiskEncryption] Property to specify
* whether Azure Disk Encryption is permitted to retrieve secrets from the
* vault and unwrap keys.
*
* @member {boolean} [properties.enabledForTemplateDeployment] Property to
* specify whether Azure Resource Manager is permitted to retrieve secrets
* from the key vault.
*
*/
export interface Vault extends Resource {
properties: VaultProperties;
}
|
class ArticleCategory {
String name;
ArticleCategory({required this.name});
}
|
import {ICache} from './ICache';
export class LRUMemCache<T> implements ICache<T> {
list: { key: string, value: T }[] = [];
hash: { [key: string]: T } = {};
constructor(private size: number) {
}
get(key: string): Promise<T> {
if (this.hash[key]) {
const index = this.list.findIndex(i => i.key === key);
const item = this.list.splice(index, 1)[0];
this.list.unshift(item);
}
return Promise.resolve(this.hash[key]);
}
set(key: string, value: T): Promise<void> {
if (this.list.length >= this.size) {
delete this.hash[this.list.pop().key];
}
const item = {key: key, value: value};
this.list.unshift(item);
this.hash[key] = value;
return Promise.resolve();
}
}
|
<?php
namespace Traits;
Trait Errors{
public function error($status){
if($status === 404){
$this->errorFormat($status , 'Not Found');
}elseif ($status === 403){
$this->errorFormat($status ,'Forbidden');
}elseif ($status === 401){
$this->errorFormat($status ,'Unauthorized');
}elseif ($status === 400){
$this->errorFormat($status ,'Bad Request');
}elseif ($status === 408){
$this->errorFormat($status ,'Request Timeout');
}elseif ($status === 501){
$this->errorFormat($status ,'Not Implemented');
}elseif ($status === 502){
$this->errorFormat($status ,'Bad Gateway');
}elseif ($status === 503){
$this->errorFormat($status ,'Service Unavailable');
}
}
private function errorFormat($status , $message){
echo '<p style="text-align: center; margin-top: 350px; font-size: larger">'.$status.'<br>'.$message.'</p>';
}
}
|
#!/bin/bash
# Based on
# https://github.com/docker-32bit/debian/blob/i386/build-image.sh
# and
# https://github.com/docker/docker/blob/master/contrib/mkimage.sh
# Other resources:
# https://l3net.wordpress.com/2013/09/21/how-to-build-a-debian-livecd/
# https://www.opengeeks.me/2015/04/build-your-hybrid-debian-distro-with-xorriso/
# https://www.reversengineered.com/2014/05/17/building-and-booting-debian-live-over-the-network/
if [ "$(id -u)" != "0" ]; then
echo "This script must be run as root" 1>&2
exit 1
fi
T_START=$(date +'%s')
# Make functions in the files below available for use
. chroot/chroot_functions.sh
. chroot/image_functions.sh
OWNER=$1
DISTRO=$2
SUITE=$3
TGZ1=$4
TGZ2=$5
UNAME=$6
# Settings
ARCH=i386
DIR_CHROOT="/var/chroot/$SUITE/min"
APT_MIRROR='http://httpredir.debian.org/debian'
DOCKER_IMAGE="$OWNER/32bit-$DISTRO-$SUITE-min"
echo '-----------------'
echo 'Build parameters:'
echo "Architecture: $ARCH"
echo "Suite: $SUITE"
echo "Chroot directory: $DIR_CHROOT"
echo "Apt-get mirror: $APT_MIRROR"
echo "Docker image: $DOCKER_IMAGE"
echo '---------------------------'
# CHROOT OPERATIONS
create_debian $OWNER $SUITE $DIR_CHROOT
DIR_ROOT=$(dirname $PWD)
DIR_USR_LOCAL_BIN=$DIR_ROOT/usr_local_bin
cp_user_local_bin () {
SCRIPT_TO_COPY=$1
DIR_ROOT=$(dirname $PWD)
cp $DIR_ROOT/min/usr_local_bin/* $DIR_CHROOT/usr/local/bin
chmod a+x $DIR_CHROOT/usr/local/bin/$SCRIPT_TO_COPY
}
cp_user_local_bin 'aptget'
cp_user_local_bin 'finalize-root'
cp_user_local_bin 'finalize-user'
cp_user_local_bin 'min-root'
cp_user_local_bin 'min-user'
cp_user_local_bin 'check-min'
exec_chroot $DIR_CHROOT /usr/local/bin/min-root
T_END=$(date +'%s')
T_ELAPSED=$(($T_END-$T_START))
echo '-------------'
echo 'Time elapsed:'
echo "$(($T_ELAPSED / 60)) minutes and $(($T_ELAPSED % 60)) seconds"
# CHROOT -> TGZ
TGZ_SHORT=$TGZ1
TGZ_LONG=$TGZ2
create_tgz $TGZ_LONG $DIR_CHROOT
rm $TGZ_SHORT
cp $TGZ_LONG $TGZ_SHORT
# OUTPUT FILES: change ownership to user
chown $UNAME:users $TGZ_SHORT
chown $UNAME:users $TGZ_LONG
chown $UNAME:users $TGZ_LONG.md5sum
T_END=$(date +'%s')
T_ELAPSED=$(($T_END-$T_START))
echo '-------------'
echo 'Time elapsed:'
echo "$(($T_ELAPSED / 60)) minutes and $(($T_ELAPSED % 60)) seconds"
# TGZ -> IMAGE
import_local_image $TGZ_LONG $DOCKER_IMAGE
T_END=$(date +'%s')
T_ELAPSED=$(($T_END-$T_START))
echo '-------------'
echo 'Time elapsed:'
echo "$(($T_ELAPSED / 60)) minutes and $(($T_ELAPSED % 60)) seconds"
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace viva{
public class itemSphereClothInteraction : MonoBehaviour
{
public Cloth cloth;
[Range(1,4)]
[SerializeField]
private int maxColliders = 3;
[SerializeField]
private float minimumRadius = 0.04f;
private Set<SphereCollider> sphereColliders = new Set<SphereCollider>();
private Set<CapsuleCollider> capsuleColliders = new Set<CapsuleCollider>();
private void OnTriggerEnter( Collider collider ){
var newSphere = collider.GetComponentInChildren<SphereCollider>();
if( newSphere && !newSphere.isTrigger && newSphere.radius > 0.04f && sphereColliders.Count < maxColliders ){
sphereColliders.Add( newSphere );
UpdateSphereArray();
}
var newCapsule = collider.GetComponentInChildren<CapsuleCollider>();
if( newCapsule && capsuleColliders.Count < maxColliders ){
capsuleColliders.Add( newCapsule );
UpdateCapsuleArray();
}
}
private void OnTriggerExit( Collider collider ){
var newSphere = collider.GetComponentInChildren<SphereCollider>();
if( newSphere && !newSphere.isTrigger ){
sphereColliders.Remove( newSphere );
UpdateSphereArray();
}
var newCapsule = collider.GetComponentInChildren<CapsuleCollider>();
if( newCapsule ){
capsuleColliders.Remove( newCapsule );
UpdateCapsuleArray();
}
}
private void UpdateSphereArray(){
var list = new List<ClothSphereColliderPair>();
foreach( var sphere in sphereColliders.objects ){
list.Add( new ClothSphereColliderPair( sphere ) );
}
cloth.sphereColliders = list.ToArray();
}
private void UpdateCapsuleArray(){
for( int i=capsuleColliders.Count; i-->0; ){
if( capsuleColliders.objects[i] == null ){
capsuleColliders.objects.RemoveAt(i);
}
}
cloth.capsuleColliders = capsuleColliders.objects.ToArray();
}
}
}
|
#ifndef __DHT11_H__
#define __DHT11_H__
#include "stm32f10x_gpio.h"
typedef struct
{
GPIO_TypeDef*DATA_GPIO;
uint16_t DATA_Pin;
}DHT11;
typedef struct
{
uint8_t HumidityInteger;
uint8_t HumidityDecimal;
uint8_t TemperatureInteger;
uint8_t TemperatureDecimal;
uint8_t Check;
}DHT11_Data;
void DHT11_Init(DHT11*dht11);
void DHT11_Get(DHT11*dht11,DHT11_Data*data);
#endif
|
import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
import 'package:flutter/scheduler.dart';
import 'package:flutter_news_app/EventsTabs.dart';
import 'package:flutter_news_app/NewsTabs.dart';
import 'package:flutter_news_app/PodcastPage.dart';
import 'package:flutter_news_app/page_view.dart';
import 'package:flutter_news_app/util.dart';
class PodcastTabs extends StatefulWidget {
@override
PodcastPageState createState() => new PodcastPageState();
}
class PodcastPageState extends State<PodcastTabs> {
Util newUtil = new Util();
static String _podCastApi;
String _urlStringPodCast = "http://api.digitalpodcast.com/v2r/search/?format=json&appid=";
String _keyword = "&keywords=";
String _search;
int _currentIndex = 3;
@override
void initState() {
super.initState();
_podCastApi = newUtil.podCastApi;
}
@override
Widget build(BuildContext context) {
timeDilation = 1.0;
return MaterialApp(
debugShowCheckedModeBanner: false,
home: DefaultTabController(
length: 13,
child: Scaffold(
appBar: AppBar(
backgroundColor: Color.fromRGBO(128, 0, 128, 50.0),
leading: new IconButton(
icon: new Icon(Icons.arrow_back),
onPressed: () {
Navigator.pop(context, true);
}),
bottom: new TabBar(
isScrollable: true,
indicatorColor: Color.fromRGBO(128, 0, 128, 50.0),
tabs: <Widget>[
new Tab(text: "Music"),
new Tab(text: "Business"),
new Tab(text: "Educational"),
new Tab(text: "Comedy"),
new Tab(text: "News & Politics",),
new Tab(text: "Science & Medicine"),
new Tab(text: "Sports"),
new Tab(text: "Technology & Gadgets",),
new Tab(text: "Television",),
new Tab(text: "Film & Entertainment",),
new Tab(text: "Charity & Causes",),
new Tab(text: "Religion & Spirituality",),
new Tab(text: "Arts",)
],
labelStyle: TextStyle(
fontSize: 20.0, fontFamily: 'RobotoMono',),
),
title: Text("Podcast", style: new TextStyle(
fontWeight: FontWeight.bold,
fontFamily: 'Raleway',
fontSize: 22.0,
color: Colors.white,
),)),
body: TabBarView(
children: [
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword + "music"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword +
"business"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword +
"educational"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword + "comedy"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword +
"news & politics"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword +
"science & medicine"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword + "sports"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword +
"technology & gadgets"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword +
"television"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword +
"film & entertainment"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword +
"charity & causes"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword +
"religion & spirituality"),
new HomePage(
url: _urlStringPodCast + _podCastApi + _keyword + "arts")
],
),
bottomNavigationBar: BottomNavigationBar(
currentIndex: _currentIndex,
onTap: (newIndex) =>
setState(() {
_currentIndex = newIndex;
switch (_currentIndex) {
case 0:
print("In the intropage");
Navigator.push(
context,
MaterialPageRoute(
builder: (context) => IntroPageView()),
);
break;
case 1:
print("In the newstabs");
Navigator.of(context, rootNavigator: true).push(
new CupertinoPageRoute<bool>(
fullscreenDialog: false,
builder: (BuildContext context) =>
new NewsTabs(country: 'us',)),
);
break;
case 2:
print("In the eventstabs");
Navigator.of(context, rootNavigator: true).push(
new CupertinoPageRoute<bool>(
fullscreenDialog: false,
builder: (
BuildContext context) => new EventsTabs()),
);
break;
}
print(_currentIndex);
}),
items: [
BottomNavigationBarItem(
icon: new Icon(Icons.home),
title: new Text('Home'),
backgroundColor: Color.fromRGBO(128, 0, 128, 50.0)
),
BottomNavigationBarItem(
icon: new Icon(Icons.book),
title: new Text('News'),
backgroundColor: Color.fromRGBO(128, 0, 128, 50.0)
),
BottomNavigationBarItem(
icon: new Icon(Icons.event),
title: new Text('Events'),
backgroundColor: Color.fromRGBO(128, 0, 128, 50.0)
),
BottomNavigationBarItem(
icon: Icon(Icons.headset),
title: Text('Podcast'),
backgroundColor: Color.fromRGBO(128, 0, 128, 50.0)
),
],
),
),
)
);
}
}
String searchKeyword(String searchCode) {
String _searchCode;
switch (searchCode) {
case 'Music':
_searchCode = "music";
break;
case 'Business':
_searchCode = "business";
break;
case 'Educational':
_searchCode = "educational";
break;
case 'Comedy':
_searchCode = "comedy";
break;
case 'News & Politics':
_searchCode = "news & politics";
break;
case 'Science & Medicine':
_searchCode = "science & medicine";
break;
case 'Sports':
_searchCode = "sports";
break;
case 'Technology & Gadgets':
_searchCode = "technology & gadgets";
break;
case 'Television':
_searchCode = "television";
break;
case 'Film & Entertainment':
_searchCode = "film & entertainment";
break;
case 'Charity & Causes':
_searchCode = "charity & causes";
break;
case 'Religion & Spirituality':
_searchCode = "religion & spirituality";
break;
case 'Arts':
_searchCode = "arts";
break;
}
return _searchCode;
}
|
//
// RunsNetworkMonitor.h
// OU_iPad
//
// Created by runs on 2017/10/12.
// Copyright © 2017年 Olacio. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Reachability.h"
FOUNDATION_EXTERN NSString * const RunsNetworkMonitorDidChangeMessage; //object NSNumber(NetworkStatus)
typedef void(^RunsNetworkChangeCallback)(NetworkStatus status);
@interface RunsNetworkMonitor : NSObject
+ (BOOL)isReachable;
+ (BOOL)isReachableViaWWAN;
+ (BOOL)isReachableViaWiFi;
+ (BOOL)NetworkIsReachableWithShowTips:(BOOL)isShow;
+ (void)NetWorkMonitorWithReachableBlock:(NetworkReachable)reachable unreachableBlock:(NetworkUnreachable)unreachable;
@end
|
import {
all,
fork,
call,
delay,
takeLatest,
put,
actionChannel,
throttle,
} from 'redux-saga/effects';
import { http } from './httpHelper';
import { actionTypes } from '../reducers/actionTypes';
import { Dictionary } from '../typings/Dictionary';
import { AxiosResponse } from 'axios';
import {
JsonResult,
ListResult,
PostModel,
CategoryModel,
TagModel,
ImageModel,
} from '../typings/dto';
import { BaseAction } from '../typings/BaseAction';
function loadMyPostsApi(query) {
const { page, limit, keyword } = query;
return http().get(
`/me/posts?page=${page}&limit=${limit}&keyword=${encodeURIComponent(
keyword,
)}`,
);
}
function* loadMyPosts(action: BaseAction) {
try {
const { page, limit, keyword } = action.data;
const result = yield call(loadMyPostsApi, {
page: page || '1',
limit: limit || 10,
keyword: keyword,
});
const resultData = result.data as JsonResult<ListResult<PostModel>>;
const { success, data, message } = resultData;
if (!success) {
throw new Error(message);
}
yield put<BaseAction>({
type: actionTypes.LOAD_MY_POSTS_DONE,
data: {
...data,
page: page || 1,
},
});
} catch (e) {
// console.error(e);
yield put<BaseAction>({
type: actionTypes.LOAD_MY_POSTS_FAIL,
error: e,
message: e.message,
});
}
}
function* watchLoadMyPosts() {
yield takeLatest(actionTypes.LOAD_MY_POSTS_CALL, loadMyPosts);
}
function writePostApi(formData) {
return http().post('/me/post', formData);
}
function* writePost(action) {
try {
const result = yield call(writePostApi, action.data);
const resultData = result.data as JsonResult<PostModel>;
const { success, data, message } = resultData;
if (success) {
yield put<BaseAction>({
type: actionTypes.WRITE_POST_DONE,
data: data,
});
} else {
yield put<BaseAction>({
type: actionTypes.WRITE_POST_FAIL,
error: new Error(message),
message: message,
});
}
} catch (e) {
yield put<BaseAction>({
type: actionTypes.WRITE_POST_FAIL,
error: e,
message: e.message,
});
}
}
function* watchWritePost() {
yield takeLatest(actionTypes.WRITE_POST_CALL, writePost);
}
function loadCategoriesApi(query) {
const { limit, keyword, page } = query;
return http().get(
`/me/categories?page=${page}&limit=${limit}&keyword=${encodeURIComponent(
keyword,
)}`,
);
}
function* loadCategories(action: BaseAction) {
try {
const { limit, keyword, page } = action.data;
// console.debug('[DEBUG]: category ==> ', action.data);
const result: AxiosResponse<
JsonResult<ListResult<CategoryModel>>
> = yield call(loadCategoriesApi, {
page: page || 1,
limit: limit || 10,
keyword: keyword || '',
});
const { success, data, message } = result.data;
// console.debug('[DEBUG]: categories ==> ', data);
if (!success) {
throw new Error(message);
}
yield put<BaseAction>({
type: actionTypes.LOAD_MY_CATEGORIES_DONE,
data: {
...data,
page: page || 1,
},
});
} catch (e) {
console.error(e);
yield put<BaseAction>({
type: actionTypes.LOAD_MY_CATEGORIES_FAIL,
error: e,
message: e.message,
});
}
}
function* watchLoadCategories() {
yield takeLatest(actionTypes.LOAD_MY_CATEGORIES_CALL, loadCategories);
}
function loadTagsApi() {
return http().get('/me/tags');
}
function* loadTags(action) {
try {
const result: AxiosResponse<
JsonResult<ListResult<TagModel>>
> = yield call(loadTagsApi);
// const resultData = result.data as IJsonResult<IListResult<ITagModel>>;
const { success, data, message } = result.data;
if (success) {
yield put<BaseAction>({
type: actionTypes.LOAD_MY_TAGS_DONE,
data: data,
});
} else {
yield put<BaseAction>({
type: actionTypes.LOAD_MY_TAGS_FAIL,
error: new Error(message),
message: message,
});
}
} catch (e) {
yield put<BaseAction>({
type: actionTypes.LOAD_MY_TAGS_FAIL,
error: e,
message: e.message,
});
}
}
function* watchLoadTags() {
yield takeLatest(actionTypes.LOAD_MY_TAGS_CALL, loadTags);
}
function editPostApi(id, data) {
return http().patch(`/me/post/${id}`, data);
}
function* editPost(action) {
try {
const result = yield call(editPostApi, action.id, action.data);
const resultData = result.data as JsonResult<PostModel>;
const { success, data, message } = resultData;
if (success) {
yield put<BaseAction>({
type: actionTypes.EDIT_POST_DONE,
data: data,
});
} else {
yield put<BaseAction>({
type: actionTypes.EDIT_POST_FAIL,
error: new Error(message),
message: message,
});
}
} catch (e) {
yield put<BaseAction>({
type: actionTypes.EDIT_POST_FAIL,
error: e,
message: e.message,
});
}
}
function* watchEditPost() {
yield takeLatest(actionTypes.EDIT_POST_CALL, editPost);
}
/**
* 글을 삭제합니다.
*
* @param {number} id 글 식별자 Post.Id
*
*/
function deletePostApi(id) {
return http().delete(`/me/post/${id}`);
}
function* deletePost(action) {
try {
const result = yield call(deletePostApi, action.data);
const resultData = result.data as JsonResult<number>;
const { success, data, message } = resultData;
if (success) {
yield put<BaseAction>({
type: actionTypes.DELETE_POST_DONE,
data: { id: data },
});
} else {
yield put<BaseAction>({
type: actionTypes.DELETE_POST_FAIL,
error: new Error(message),
message: message,
});
}
} catch (e) {
// console.error(e);
yield put<BaseAction>({
type: actionTypes.DELETE_POST_FAIL,
error: e,
message: e.message,
});
}
}
function* watchDeletePost() {
yield takeLatest(actionTypes.DELETE_POST_CALL, deletePost);
}
function loadMyPostApi(query) {
const { id } = query;
return http().get(`/me/post/${id}`);
}
function* loadMyPost(action) {
try {
const { id } = action.data;
const result: AxiosResponse<JsonResult<PostModel>> = yield call(
loadMyPostApi,
{ id },
);
const { success, data, message } = result.data;
if (!success) {
yield put<BaseAction>({
type: actionTypes.LOAD_MY_POST_FAIL,
error: new Error(message),
message: message,
});
}
yield put<BaseAction>({
type: actionTypes.LOAD_MY_POST_DONE,
data: {
post: data,
},
});
} catch (e) {
// console.error(e);
yield put<BaseAction>({
type: actionTypes.LOAD_MY_POST_FAIL,
error: e,
message: e.message,
});
}
}
function* watchLoadMyPost() {
yield takeLatest(actionTypes.LOAD_MY_POST_CALL, loadMyPost);
}
function* writeNewPost(action) {
try {
yield put<BaseAction>({
type: actionTypes.WRITE_NEW_POST_DONE,
});
} catch (e) {
yield put<BaseAction>({
type: actionTypes.WRITE_NEW_POST_FAIL,
error: e,
});
}
}
function* watchWriteNewPost() {
yield takeLatest(actionTypes.WRITE_NEW_POST_CALL, writeNewPost);
}
function uploadMyMediaFilesApi(data) {
return http().post('/me/media', data);
}
function* uploadMyMediaFiles(action) {
try {
// console.log('==========> form data:', action.data);
const result = yield call(uploadMyMediaFilesApi, action.data);
const resultData = result.data as JsonResult<ListResult<ImageModel>>;
const { success, data, message } = resultData;
if (success) {
yield put<BaseAction>({
type: actionTypes.UPLOAD_MY_MEDIA_FILES_DONE,
data: {
...data,
},
});
} else {
yield put<BaseAction>({
type: actionTypes.UPLOAD_MY_MEDIA_FILES_FAIL,
error: new Error(message),
message: message,
});
}
} catch (e) {
// console.error(e);
yield put<BaseAction>({
type: actionTypes.UPLOAD_MY_MEDIA_FILES_FAIL,
error: e,
message: e.message,
});
}
}
function* watchUploadMyMediaFiles() {
yield takeLatest(
actionTypes.UPLOAD_MY_MEDIA_FILES_CALL,
uploadMyMediaFiles,
);
}
function loadMediaFilesApi(query) {
const { page, limit, keyword } = query;
return http().get(
`/me/media/?page=${page}&limit=${limit}&keyword=${encodeURIComponent(
keyword,
)}`,
);
}
function* loadMediaFiles(action) {
try {
const { page, limit, keyword } = action.data;
const result = yield call(loadMediaFilesApi, {
page: page || 1,
limit: limit || 10,
keyword: keyword || '',
});
const resultData = result.data as JsonResult<ListResult<ImageModel>>;
const { success, data, message } = resultData;
if (success) {
yield put<BaseAction>({
type: actionTypes.LOAD_MY_MEDIA_FILES_DONE,
data: {
...data,
page: page || 1,
},
});
} else {
yield put<BaseAction>({
type: actionTypes.LOAD_MY_MEDIA_FILES_FAIL,
error: new Error(message),
message: message,
});
}
} catch (e) {
console.error(e);
yield put<BaseAction>({
type: actionTypes.LOAD_MY_MEDIA_FILES_FAIL,
error: e,
message: e.message,
});
}
}
function* watchLoadMediaFiles() {
yield takeLatest(actionTypes.LOAD_MY_MEDIA_FILES_CALL, loadMediaFiles);
}
function deleteMediaFileApi(id) {
return http().delete(`/me/media/${id}`);
}
function* deleteMediaFile(action) {
try {
const { id } = action.data;
const result: AxiosResponse<JsonResult<number>> = yield call(
deleteMediaFileApi,
id,
);
const { success, data, message } = result.data;
if (success) {
yield put<BaseAction>({
type: actionTypes.DELETE_MY_MEDIA_FILES_DONE,
data: {
id: data,
},
});
} else {
yield put<BaseAction>({
type: actionTypes.DELETE_MY_MEDIA_FILES_FAIL,
error: new Error(message),
message: message,
});
}
} catch (e) {
console.error(e);
yield put<BaseAction>({
type: actionTypes.DELETE_MY_MEDIA_FILES_FAIL,
error: e,
message: e.message,
});
}
}
function* watchDeleteMediaFile() {
yield takeLatest(actionTypes.DELETE_MY_MEDIA_FILES_CALL, deleteMediaFile);
}
function editCategoryApi(formData) {
if (!!formData.id) {
return http().patch(`/me/category/${formData.id}`, formData);
} else {
return http().post('/me/category', formData);
}
}
function* editCategory(action) {
try {
const result = yield call(editCategoryApi, action.data);
const resultData = result.data as JsonResult<CategoryModel>;
const { success, data, message } = resultData;
if (success) {
yield put<BaseAction>({
type: actionTypes.EDIT_MY_CATEGORY_DONE,
data: {
category: data,
},
});
} else {
yield put<BaseAction>({
type: actionTypes.EDIT_MY_CATEGORY_FAIL,
error: new Error(message),
message: message,
});
}
} catch (e) {
// console.error(e);
yield put<BaseAction>({
type: actionTypes.EDIT_MY_CATEGORY_FAIL,
error: e,
message: e.message,
});
}
}
function* wacthEditCategory() {
yield takeLatest(actionTypes.EDIT_MY_CATEGORY_CALL, editCategory);
}
function deleteCategoryApi(id) {
return http().delete(`/me/category/${id}`);
}
function* deleteCategory(action) {
try {
const { id } = action.data;
const result: AxiosResponse<JsonResult<number>> = yield call(
deleteCategoryApi,
id,
);
const { success, data, message } = result.data;
if (!success) {
throw new Error(message);
}
yield put<BaseAction>({
type: actionTypes.DELETE_MY_CATEGORY_DONE,
data: {
id: data,
},
});
} catch (e) {
// console.error(e);
yield put<BaseAction>({
type: actionTypes.DELETE_MY_CATEGORY_FAIL,
error: e,
message: e.message,
});
}
}
function* watchDeleteCategory() {
yield takeLatest(actionTypes.DELETE_MY_CATEGORY_CALL, deleteCategory);
}
function loadLikedPostsApi(query) {
const { limit, keyword, page } = query;
return http().get(
`/me/liked?&page=${page}&limit=${limit}&keyword=${encodeURIComponent(
keyword,
)}`,
);
}
function* loadLikedPosts(action) {
try {
const { limit, keyword, page } = action.data;
const result = yield call(loadLikedPostsApi, {
page: page || 1,
limit: limit || 10,
keyword: keyword || '',
});
const resultData = result.data as JsonResult<ListResult<PostModel>>;
const { success, data, message } = resultData;
if (success) {
yield put<BaseAction>({
type: actionTypes.LOAD_LIKED_POSTS_DONE,
data: {
...data,
keyword: keyword,
page: page || 1,
},
});
} else {
yield put<BaseAction>({
type: actionTypes.LOAD_LIKED_POSTS_FAIL,
error: new Error(message),
message: message,
});
}
} catch (e) {
// console.error(e);
yield put<BaseAction>({
type: actionTypes.LOAD_LIKED_POSTS_FAIL,
error: e,
message: e.message,
});
}
}
function* watchLoadLikedPosts() {
yield takeLatest(actionTypes.LOAD_LIKED_POSTS_CALL, loadLikedPosts);
}
function loadStatGeneralApi(query) {
return http().get('/me/stat/general');
}
function* loadStatGeneral(action) {
try {
const result = yield call(loadStatGeneralApi, action.data);
const resultData = result.data as JsonResult<Dictionary<any>>;
const { success, data, message } = resultData;
if (success) {
yield put<BaseAction>({
type: actionTypes.LOAD_STAT_GENERAL_DONE,
data: data,
});
} else {
yield put<BaseAction>({
type: actionTypes.LOAD_STAT_GENERAL_FAIL,
error: new Error(message),
message: message,
});
}
} catch (e) {
// console.error(e);
yield put<BaseAction>({
type: actionTypes.LOAD_STAT_GENERAL_FAIL,
error: e,
message: e.message,
});
}
}
function* watchLoadStatGeneral() {
yield takeLatest(actionTypes.LOAD_STAT_GENERAL_CALL, loadStatGeneral);
}
function loadStatReadApi(query) {
return http().get('/me/stat/postread');
}
function* loadStatRead(action) {
try {
const result = yield call(loadStatReadApi, action.data);
const resultData = result.data as JsonResult<Dictionary<any>>;
const { success, data, message } = resultData;
if (success) {
yield put<BaseAction>({
type: actionTypes.LOAD_STAT_READ_DONE,
data: data,
});
} else {
yield put<BaseAction>({
type: actionTypes.LOAD_STAT_READ_FAIL,
error: new Error(message),
message: message,
});
}
} catch (e) {
// console.error(e);
yield put<BaseAction>({
type: actionTypes.LOAD_STAT_READ_FAIL,
error: e,
message: e.message,
});
}
}
function* watchLoadStatRead() {
yield takeLatest(actionTypes.LOAD_STAT_READ_CALL, loadStatRead);
}
export default function* postSaga() {
yield all([
fork(watchLoadMyPosts),
fork(watchLoadMyPost),
fork(watchWritePost),
fork(watchEditPost),
fork(watchDeletePost),
fork(watchLoadCategories),
fork(watchLoadTags),
fork(watchWriteNewPost),
fork(watchUploadMyMediaFiles),
fork(watchLoadMediaFiles),
fork(watchDeleteMediaFile),
fork(wacthEditCategory),
fork(watchDeleteCategory),
fork(watchLoadLikedPosts),
fork(watchLoadStatGeneral),
fork(watchLoadStatRead),
]);
}
|
# Omnipay: Instamojo
**[Instamojo](https://www.instamojo.com/) driver for the Omnipay PHP payment processing library**
[Omnipay](https://github.com/thephpleague/omnipay) is a framework agnostic, multi-gateway payment
processing library for PHP 5.3+.
This package implements [Instamojo Payments API v1.1](https://docs.instamojo.com/docs/payments-api).
## Installation
Omnipay is installed via [Composer](http://getcomposer.org/). To install, simply run:
```
composer require gentor/omnipay-instamojo
```
## Purchase
```php
use Omnipay\Omnipay;
// Setup payment gateway
$gateway = Omnipay::create('Instamojo');
$gateway->setApiKey('abc123');
$gateway->setAuthToken('abc123');
// Send purchase request
$response = $gateway->purchase(
[
'amount' => '10.00',
'purpose' => 'Instamojo Payment'
]
)->send();
// Process response
if ($response->isSuccessful() && $response->isRedirect()) {
// Redirect to offsite payment gateway
// print_r($response->getData());
// echo $response->getTransactionStatus();
$response->redirect();
} else {
// Request failed
echo $response->getMessage();
}
```
## Complete Purchase
```php
// Send complete purchase request
$response = $gateway->completePurchase(
[
'transactionReference' => $_GET['payment_id'],
]
)->send();
// Process response
if ($response->isSuccessful()) {
// Request was successful
print_r($response->getData());
echo $response->getTransactionStatus();
} else {
// Request failed
echo $response->getMessage();
}
```
## Refund
```php
// Send refund request
$response = $gateway->refund(
[
'transactionReference' => $payment_id,
]
)->send();
// Process response
if ($response->isSuccessful()) {
// Request was successful
print_r($response->getData());
echo $response->getTransactionStatus();
} else {
// Request failed
echo $response->getMessage();
}
```
## Fetch Payment Request
```php
// Send fetch payment request
$response = $gateway->fetchPaymentRequest(
[
'transactionReference' => $payment_request_id,
]
)->send();
// Process response
if ($response->isSuccessful()) {
// Request was successful
print_r($response->getData());
echo $response->getTransactionStatus();
} else {
// Request failed
echo $response->getMessage();
}
```
## Webhook
```php
use Omnipay\Omnipay;
// Setup payment gateway
$gateway = Omnipay::create('Instamojo');
$gateway->setSalt('abc123');
// Payment notification request
$response = $gateway->acceptNotification()->send();
// Process response
if ($response->isSuccessful()) {
// Request was successful
print_r($response->getData());
echo $response->getTransactionReference();
echo $response->getTransactionStatus();
} else {
// Request failed
echo $response->getMessage();
}
```
## [Instamojo API v1.1 Documentation](https://docs.instamojo.com/docs/payments-api)
|
using Microsoft.Extensions.Configuration;
namespace Kubernetes.Configuration.Extensions.Configmap
{
public class ConfigmapConfigurationSource : IConfigurationSource
{
public string? Namespace { get; set; }
public string? LabelSelector { get; set; }
public string? Separator { get; set; }
public bool ReloadOnChange { get; set; }
public IConfigurationProvider Build(IConfigurationBuilder builder)
{
return new ConfigmapConfigurationProvider(Namespace, LabelSelector, Separator, ReloadOnChange);
}
}
}
|
# frozen_string_literal: true
class User < ApplicationRecord
has_many :authentication_tokens, dependent: :destroy
rolify before_add: :before_add_role, strict: true
validates :email, presence: true
validates :email, uniqueness: true, allow_blank: true
devise :trackable, :token_authenticatable, :omniauthable, omniauth_providers: [:google_oauth2]
def role_level_in(organization)
levels = roles.global.map(&:level)
levels << local_role_level_in(organization)
levels.max
end
def role_in(organization)
# Returns only an explicit role in the passed organization, not including global roles
roles.find_by resource_id: organization.id
end
def administrator?(organization = nil)
is_admin_of?(organization) || global_administrator?
end
def global_role?
roles.global.present?
end
def global_role
roles.global.first
end
def organizations
return Organization.all if global_role?
membership_organizations
end
def global_administrator?
is_global_admin? || is_super_admin?
end
def membership_organizations
# All organizations in which this user has an explicit role, not including global roles
Organization.where(id: roles.pluck(:resource_id))
end
def member_of?(organization)
roles.pluck(:resource_id).include?(organization.id)
end
def read_only?
(roles.pluck(:name).map(&:to_sym) - Role::READ_ONLY_ROLES).empty?
end
private
def before_add_role(role)
raise ActiveRecord::Rollback if Role::LOCAL_ROLES[role.symbol].nil? && Role::GLOBAL_ROLES[role.symbol].nil?
raise ActiveRecord::Rollback if roles.pluck(:resource_id).include?(role.resource_id)
end
def local_role_level_in(organization)
# Role level in explicit organization, excluding global roles
role = role_in organization
return Role::MINIMAL_ROLE_LEVEL if role.nil?
role.level
end
class << self
def from_omniauth(auth)
user = get_user_from_auth auth
return update_user_from_omniauth user, auth if user
return create_first_user auth if first_user?
empty_user
end
def from_id_token(id_token)
client = OAuth2::Client.new(Rails.configuration.google_client_id, Rails.configuration.google_client_secret)
response = client.request(:get, Rails.configuration.google_token_info_url, params: { id_token: id_token }).parsed
User.find_by(email: response['email'])
end
private
def update_user_from_omniauth(user, auth)
user.update auth_params auth
user
end
def empty_user
User.new
end
def create_first_user(auth)
user = User.new auth_params auth
user.save
user.add_role :super_admin
user
end
def get_user_from_auth(auth)
User.find_by email: auth['info']['email']
end
def first_user?
User.count.zero?
end
def auth_params(auth)
{
uid: auth['uid'],
name: auth['info']['name'],
email: auth['info']['email'],
provider: auth.provider,
image: auth['info']['image']
}
end
end
end
|
import React from 'react';
import classes from './Spinner.module.css';
const Spinner = (props) => {
const style = {
backgroundColor: `var(--${props.variant})`,
};
return (
<div className={classes.Spinner}>
<div className={classes.Bounce1} style={style}></div>
<div className={classes.Bounce2} style={style}></div>
</div>
);
};
export default Spinner;
|
<?php
/**
* Created by PhpStorm.
* User: KustovVA
* Date: 25.06.2015
* Time: 18:40
*/
/** @var \common\models\Store $store */
?>
<div class="info-panel f-right">
<span class="info-link" title="Info"></span>
<div class="info-popup">
<div class="info-item font-edit-write">Add Note</div>
<a href="#" class="info-item font-user">View Profile</a>
<div class="info-item font-bar-chart">View Dashboard</div>
<div class="info-item font-letter-mail">Email</div>
<div class="info-item font-link-broken">Disconnect</div>
</div>
</div>
|
describe Coactive::Interface do
context 'default' do
let :interface_class do
Variables::DefaultInterface
end
it 'sets default value' do
interface = interface_class.new
expect(interface.context.in).to eq('default value')
end
it 'sets default value by method' do
interface = interface_class.new
expect(interface.context.in_method).to eq('default value')
end
it 'sets default value by proc' do
interface = interface_class.new
expect(interface.context.in_proc).to eq('default value')
end
end
end
|
## v0.1.6
* Further Opal 1.4 compatibility
## v0.1.5
* Opal 1.4 compatibility
|
/** Michał Wójcik 2021 */
/**
* L-System zaimplementowany w języku javascript z wykorzystaniem
* HTML5 Canvas i turtle-graphics-js [https://www.npmjs.com/package/turtle-graphics-js]
*
* Program przyjmuje parametry przez pola tekstowe na stronie
* a następnie rysuje po wciśnięciu przycisku "rysuj"
*
* Składnia reguł:
* Znak:Wartosc_do_zamiany;Znak:Wartosc_do_zamiany;
* Dowolna liczba reguł
*
* Operacje:
* - F - idź do przodu i rysuj
* - f - idź do przodu (nie rysuj)
* - + - obrót w prawo
* - - - obrót w lewo
* - [ - odłóż pozycję i rotację na stos
* - ] - zdejmij pozycję i rotację ze stosu
* - C - losuj nowy kolor
* - L - zwiększ długość rysowanej linii
*/
var stack = [];
var turtle = new Turtle(document.getElementById("canvas"));
turtle.pen.color = "#000";
turtle.pen.width = 2;
turtle.moveTo(400, 300);
var initLoc = JSON.parse(JSON.stringify(turtle.loc));
/**Attributes */
var lineLength = 1;
var rotation = 90;
var axiom = "";
var rules = "";
var iterations = 5;
/**DOM references */
var lengthInput = document.getElementById("length");
var rotationInput = document.getElementById("rotation");
var iterationsInput = document.getElementById("iterations");
var axiomInput = document.getElementById("axiom");
var rulesInput = document.getElementById("rules");
var drawButton = document.getElementById("drawButton");
var inter = null;
drawButton.addEventListener("click", () => {
clearAll();
drawLSystem();
});
/**Functions */
const drawLSystem = function () {
getAttributes();
let ruleObjectsArray = interpretRules(rules);
axiom = applyRules(axiom, ruleObjectsArray, iterations);
var i = 0;
inter = setInterval(() => {
if (i > axiom.length) {
clearInterval(inter);
return;
}
drawSign(axiom.charAt(i));
i++;
}, 6);
};
const getAttributes = function () {
lineLength = lengthInput.value !== undefined ? lengthInput.value : lineLength;
rotation = rotationInput.value !== undefined ? rotationInput.value : rotaion;
iterations =
iterationsInput.value !== undefined ? iterationsInput.value : iterations;
axiom = axiomInput.value.length > 0 ? axiomInput.value : axiom;
rules = rulesInput.value.length > 0 ? rulesInput.value : rules;
};
/**Rule structure:
* {
* sign: string;
* value: string;
* }
*/
const interpretRules = function (rules) {
let ruleArray = rules.split(";");
let ruleObjectsArray = ruleArray.map((ruleString) => {
let arr = ruleString.split(":");
return { sign: arr[0], value: arr[1] };
});
return ruleObjectsArray;
};
const applyRules = function (axiom, ruleObjectsArray, iterations) {
let result = axiom;
for (let i = 0; i < iterations; i++) {
ruleObjectsArray.forEach((rule) => {
result = result.replaceAll(rule.sign, rule.value);
});
}
return result;
};
const drawSign = function (sign) {
switch (sign) {
case "F":
turtle.penDown();
case "f":
turtle.forward(lineLength);
turtle.penUp();
break;
case "+":
turtle.right(rotation);
break;
case "-":
turtle.left(rotation);
break;
case "[":
stack.push(JSON.parse(JSON.stringify(turtle.loc)));
break;
case "]":
let loc = stack.pop();
turtle.loc = loc;
break;
case "C":
turtle.pen.color = getRandomColor();
break;
case "L":
lineLength++;
break;
default:
break;
}
return;
};
const getRandomColor = function () {
return `#${Math.floor(Math.random() * 16777215).toString(16)}`;
};
function clearAll() {
clearInterval(inter);
stack = [];
turtle.moveTo(400, 300);
turtle.angle = 0;
turtle.pen.color = "#000";
turtle.ctx.clearRect(
0,
0,
canvas.width || canvas.style.width,
canvas.height || canvas.style.height
);
}
|
package output
import (
"encoding/json"
"time"
"github.com/shopspring/decimal"
)
type ReportInput struct {
Metadata map[string]string
Root Root
}
func Load(data []byte) (Root, error) {
var out Root
err := json.Unmarshal(data, &out)
return out, err
}
func Combine(currency string, inputs []ReportInput, opts Options) Root {
var combined Root
var totalHourlyCost *decimal.Decimal
var totalMonthlyCost *decimal.Decimal
projects := make([]Project, 0)
summaries := make([]*Summary, 0, len(inputs))
for _, input := range inputs {
projects = append(projects, input.Root.Projects...)
summaries = append(summaries, input.Root.Summary)
if input.Root.TotalHourlyCost != nil {
if totalHourlyCost == nil {
totalHourlyCost = decimalPtr(decimal.Zero)
}
totalHourlyCost = decimalPtr(totalHourlyCost.Add(*input.Root.TotalHourlyCost))
}
if input.Root.TotalMonthlyCost != nil {
if totalMonthlyCost == nil {
totalMonthlyCost = decimalPtr(decimal.Zero)
}
totalMonthlyCost = decimalPtr(totalMonthlyCost.Add(*input.Root.TotalMonthlyCost))
}
}
combined.Version = outputVersion
combined.Currency = currency
combined.Projects = projects
combined.TotalHourlyCost = totalHourlyCost
combined.TotalMonthlyCost = totalMonthlyCost
combined.TimeGenerated = time.Now()
combined.Summary = MergeSummaries(summaries)
return combined
}
|
import produce from 'immer';
import {
categoriesActionTypes,
categoryState,
SELECT_CATEGORY,
} from './types';
const INITIAL_STATE: categoryState = {
category: '',
};
export default function optionReducer (
state = INITIAL_STATE,
action: categoriesActionTypes,
): categoryState {
return produce(state, draft => {
switch (action.type) {
case SELECT_CATEGORY: {
draft.category = action.payload.category
break;
}
default:
}
});
};
|
#ifndef _IOTEX_ABI_READ_CONTRACT_H_
#define _IOTEX_ABI_READ_CONTRACT_H_
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
uint64_t abi_get_order_start(const char *, size_t);
uint32_t abi_get_order_duration(const char *, size_t);
const char *abi_get_order_endpoint(const char *input, size_t);
const char *abi_get_order_token(const char *input, size_t);
#ifdef __cplusplus
}
#endif
#endif /* _IOTEX_ABI_READ_CONTRACT_H_ */
|
<?php
namespace App\Http\Controllers;
use App\Models\request_status;
use Illuminate\Support\Facades\DB;
use Illuminate\Http\Request;
class request_statusController extends Controller
{
public function index(){
$requestor = request_status::all();
return response()->json([
'success' => true,
'message' => 'Data Request',
'data' => $requestor
], 200);
}
public function show_requests(Request $request){
$reciever = DB::table('request_status')
->where('reciever', $request->reciever)
->get();
if($reciever){
return response()->json([
'success' => true,
'message' => 'Hasil penelusuran',
'data' => $reciever
], 200);
}
else{
return response()->json([
'success' => false,
'message' => 'Gagal, tidak ada penelusuran',
'data' => $reciever
], 400);
}
}
public function show_friends(Request $request){
$requestor = DB::table('request_status')
->where('requestor', $request->requestor)
->get();
if($requestor){
return response()->json([
'success' => true,
'message' => 'Hasil penelusuran',
'data' => $requestor
], 200);
}
else{
return response()->json([
'success' => false,
'message' => 'Gagal, tidak ada penelusuran',
'data' => $requestor
], 400);
}
}
public function created(Request $request){
$cek = DB::table('request_status')
->where('requestor', $request->requestor)
->where('reciever', $request->reciever)
// ->where('status','=', 'pending','and','status','=', 'accepted')
->count();
$requestor = new request_status;
$requestor->requestor = $request->requestor;
$requestor->reciever = $request->reciever;
$requestor->status = $request->status;
if($cek >= 1){
return response()->json([
'success' => false,
'message' => 'Data Gagal Ditambahkan',
'data' => $requestor
], 400);
}
else{
$requestor->save();
return response()->json([
'success' => true,
'message' => 'Data Berhasil Ditambahkan',
'data' => $requestor
], 200);
}
}
public function update(Request $request, $reciever){
$cek = DB::table('request_status')
->where('requestor', $request->requestor)
->where('reciever', $reciever)
->where('status','=', 'pending')
->count();
if($cek >= 1){
$update = DB::table('request_status')
->select('request_status_id')
->where('requestor', $request->requestor)
->where('reciever', $reciever)
->update(['status' => $request->status]);
return response()->json([
'success' => true,
'message' => 'Data Berhasil Diubah'
], 200);
}
return response()->json([
'success' => false,
'message' => 'Data Gagal Diubah'
], 400);
}
public function delete($id){
$requestor = request_status::find($id);
$requestor->delete();
return response()->json([
'success' => true,
'message' => 'Post Deleted',
], 200);
}
}
|
/// Provides data structures for storing component data.
library component_data;
import 'dart:async';
import 'dart:collection';
import 'package:observable/observable.dart';
import 'package:quiver/core.dart';
part 'src/component_data/linked_hash_map_store.dart';
/// Registers [ComponentTypesStores] for component types.
///
/// Stores
class TypeStoreRegistry {
final Map<Type, ComponentTypeStore> _typesStores = {};
final ChangeNotifier<TypeStoreRegistryChangeRecord> _changeNotifier =
new ChangeNotifier();
/// A synchronous stream of the changes made to this [TypeStoreRegistry].
///
/// A change is triggered when a [ComponentTypeStore] is added, removed or
/// changed.
Stream<List<TypeStoreRegistryChangeRecord>> get changes =>
_changeNotifier.changes;
/// The [ComponentTypeStore]s registered with this [TypeStoreRegistry].
Iterable<ComponentTypeStore> get stores => _typesStores.values;
/// The types for which [ComponentTypeStore]s are registered with this
/// [TypeStoreRegistry].
Iterable<Type> get types => _typesStores.keys;
/// Whether or not this [TypeStoreRegistry] contains a [ComponentTypeStore]
/// for the [type].
bool hasStore(Type type) => _typesStores.containsKey(type);
/// Returns the [ComponentTypeStore] registered for the [type] or `null` if
/// no [ComponentTypeStore] is currently registered for the [type].
ComponentTypeStore<T> getStore<T>(Type type) => _typesStores[type]
as ComponentTypeStore<T>;
/// Registers the [store] for type [type].
///
/// If another [ComponentTypeStore] was already registered for the [type],
/// then this other store is replaced with the [store].
void add<T>(Type type, ComponentTypeStore<T> store) {
final oldStore = _typesStores[type] as ComponentTypeStore<T>;
_typesStores[type] = store;
if (oldStore == null) {
_changeNotifier
..notifyChange(new TypeStoreRegistryChangeRecord<T>.insert(type, store))
..deliverChanges();
} else {
_changeNotifier
..notifyChange(new TypeStoreRegistryChangeRecord<T>(type, oldStore, store))
..deliverChanges();
}
}
/// Removes the [ComponentTypeStore] associated with the [type] from this
/// [TypeStoreRegistry].
ComponentTypeStore<T> remove<T>(Type type) {
final store = _typesStores[type] as ComponentTypeStore<T>;
if (store != null) {
_typesStores.remove(type);
_changeNotifier
..notifyChange(new TypeStoreRegistryChangeRecord<T>.remove(type, store))
..deliverChanges();
return store;
} else {
return null;
}
}
}
/// Stores component values of type [T] and associates them with entity IDs.
abstract class ComponentTypeStore<T> {
/// Instantiates a new [ComponentTypeStore] using the default implementation,
/// [LinkedHashMapStore].
factory ComponentTypeStore() = LinkedHashMapStore<T>;
/// A synchronous stream of the changes made to this [ComponentTypeStore].
///
/// A change is triggered when a component value is added, when a component
/// value is removed, or when a component value is updated.
///
/// See also [ComponentTypeStoreChangeRecord].
Stream<List<ComponentTypeStoreChangeRecord<T>>> get changes;
/// The number of component values currently stored in this
/// [ComponentTypeStore].
int get length;
/// Whether this [ComponentTypeStore] is currently empty.
bool get isEmpty;
/// Whether there is currently at least 1 component value in this
/// [ComponentTypeStore].
bool get isNotEmpty;
/// The component values currently stored in this [ComponentTypeStore].
Iterable<T> get components;
/// The entity IDs for which a component value is currently stored in this
/// [ComponentTypeStore].
Iterable<int> get entityIds;
/// Returns a [ComponentStoreIterator] over this [ComponentTypeStore].
ComponentStoreIterator<T> get iterator;
/// Executes the given function [f] for each ([entityId], [component]) pair
/// stored in this [ComponentTypeStore].
void forEach(void f(int entityId, T component));
/// Whether or not this [ComponentTypeStore] contains a component value for
/// the [entityId].
bool containsComponentFor(int entityId);
/// Removes the component value associated with the [entityId] from this
/// [ComponentTypeStore].
///
/// Does nothing if this [ComponentTypeStore] does not contain a component
/// value for the [entityId].
///
/// Returns the component value if this [ComponentTypeStore] did contain a
/// component value for the [entityId], or `null` otherwise.
T remove(int entityId);
/// Returns the value associated with the [entityId] or `null` if this
/// [ComponentTypeStore] does not currently contain a value for the
/// [entityId].
T operator [](int entityId);
/// Associated the given [component] value with the [entityId] and stores it
/// in this [ComponentTypeStore].
void operator []=(int entityId, T component);
}
/// An iterator over a [ComponentTypeStore].
///
/// Extends an ordinary [Iterator] by also exposing the [currentEntityId] that
/// is associated with the [current] component value.
abstract class ComponentStoreIterator<T> extends Iterator<T> {
int get currentEntityId;
}
/// A [ChangeRecord] that denotes adding, removing, or updating a
/// [ComponentTypeStore].
class ComponentTypeStoreChangeRecord<T> implements ChangeRecord {
/// The entity id for which a component changed.
final int entityId;
/// The previous component value associated with this key.
///
/// Is always `null` if [isInsert].
final T oldValue;
/// The new component value associated with this key.
///
/// Is always `null` if [isRemove].
final T newValue;
/// True if this component value was inserted.
final bool isInsert;
/// True if this component value was removed.
final bool isRemove;
/// Create an update record of [entityId] from [oldValue] to [newValue].
const ComponentTypeStoreChangeRecord(
this.entityId, this.oldValue, this.newValue)
: isInsert = false,
isRemove = false;
/// Create an insert record of [entityId] and [newValue].
const ComponentTypeStoreChangeRecord.insert(this.entityId, this.newValue)
: isInsert = true,
isRemove = false,
oldValue = null;
/// Create a remove record of [entityId] with a former [oldValue].
const ComponentTypeStoreChangeRecord.remove(this.entityId, this.oldValue)
: isInsert = false,
isRemove = true,
newValue = null;
/// Apply this change record to the [componentStore].
void apply(ComponentTypeStore<T> componentStore) {
if (isRemove) {
componentStore.remove(entityId);
} else {
componentStore[entityId] = newValue;
}
}
bool operator ==(Object o) =>
identical(this, o) ||
o is ComponentTypeStoreChangeRecord<T> &&
entityId == o.entityId &&
oldValue == o.oldValue &&
newValue == o.newValue &&
isInsert == o.isInsert &&
isRemove == o.isRemove;
int get hashCode => hashObjects([
entityId,
oldValue,
newValue,
isInsert,
isRemove,
]);
}
class TypeStoreRegistryChangeRecord<T> extends ChangeRecord {
/// The component type for which the store changed.
final Type type;
/// The previous store associated with the [type].
///
/// Is always `null` if [isInsert].
final ComponentTypeStore<T> oldValue;
/// The new value associated with the [type].
///
/// Is always `null` if [isRemove].
final ComponentTypeStore<T> newValue;
/// Whether or not this change concerns an insertion.
final bool isInsert;
/// Whether or not this change concerns a removal.
final bool isRemove;
/// Create an update record for [type] from [oldValue] to [newValue].
const TypeStoreRegistryChangeRecord(this.type, this.oldValue, this.newValue)
: isInsert = false,
isRemove = false;
/// Create an insert record for [type] and [newValue].
const TypeStoreRegistryChangeRecord.insert(this.type, this.newValue)
: isInsert = true,
isRemove = false,
oldValue = null;
/// Create a remove record for [type] with a former [oldValue].
const TypeStoreRegistryChangeRecord.remove(this.type, this.oldValue)
: isInsert = false,
isRemove = true,
newValue = null;
/// Apply this change record to the [typeStoreRegistry].
void apply(TypeStoreRegistry typeStoreRegistry) {
if (isRemove) {
typeStoreRegistry.remove(type);
} else {
typeStoreRegistry.add(type, newValue);
}
}
bool operator ==(Object other) =>
identical(this, other) ||
other is TypeStoreRegistryChangeRecord<T> &&
type == other.type &&
oldValue == other.oldValue &&
newValue == other.newValue &&
isInsert == other.isInsert &&
isRemove == other.isRemove;
int get hashCode => hashObjects([
type,
oldValue,
newValue,
isInsert,
isRemove,
]);
}
|
package Monitoring::GLPlugin::TableItem;
our @ISA = qw(Monitoring::GLPlugin::Item);
use strict;
sub new {
my ($class, %params) = @_;
my $self = {};
bless $self, $class;
foreach (keys %params) {
$self->{$_} = $params{$_};
}
if ($self->can("finish")) {
$self->finish(%params);
}
return $self;
}
sub check {
my ($self) = @_;
# some tableitems are not checkable, they are only used to enhance other
# items (e.g. sensorthresholds enhance sensors)
# normal tableitems should have their own check-method
}
1;
__END__
|
require 'rails_helper'
require 'email_spec/rspec'
require 'timecop'
require 'shared_context/stub_email_rendering'
RSpec.describe EmailAlert, type: :model do
let(:mock_log) { instance_double("ActivityLogger") }
# set subject appropriately since it's a Singleton
let(:subject) { described_class.instance }
let(:user) { create(:user) }
let(:config) { { days: [2, 5, 10] } }
let(:condition) { create(:condition, config: { days: [2, 5, 10] }) }
let(:timing) { :on }
let(:dec_1) { Time.zone.local(2018, 12, 1) }
let(:users) do
[create(:user, first_name: 'u1'),
create(:user, first_name: 'u2')]
end
describe '.condition_response' do
it 'gets the config from the condition' do
# stubbed methods:
allow(subject).to receive(:entities_to_check)
.and_return([])
allow(subject).to receive(:send_alert_this_day?)
.and_return(true)
allow(subject).to receive(:send_email)
.with(anything, mock_log)
# expected results:
expect(described_class).to receive(:get_config)
# actual test:
Timecop.freeze(dec_1) do
subject.condition_response(condition, mock_log)
end
end
it 'gets the timing from the condition' do
# stubbed methods:
allow(subject).to receive(:entities_to_check)
.and_return([])
allow(subject).to receive(:send_alert_this_day?)
.and_return(true)
allow(subject).to receive(:send_email)
.with(anything, mock_log)
# expected results:
expect(described_class).to receive(:get_timing)
# actual test:
Timecop.freeze(dec_1) do
subject.condition_response(condition, mock_log)
end
end
it 'calls process_entities' do
# stubbed methods:
allow(subject).to receive(:entities_to_check)
.and_return(users)
# expected results:
expect(subject).to receive(:process_entities)
.and_return(true)
# actual test:
Timecop.freeze(dec_1) do
subject.condition_response(condition, mock_log)
end
end
end
describe 'process_entities' do
it 'loops through entities_to_check and calls take_action on each' do
# stub this method
allow(subject).to receive(:take_action).and_return(true)
expect(subject).to receive(:take_action).exactly(users.size).times
# actual test:
Timecop.freeze(dec_1) do
subject.process_entities(users, mock_log)
end
end
end
describe 'take_action' do
let(:entity) { create(:member_with_membership_app) }
it 'calls send_email for the entity and log if send_alert_this_day? is true' do
# stubbed methods:
allow(subject).to receive(:send_alert_this_day?)
.with(timing, config, anything)
.and_return(true)
# expected results:
expect(subject).to receive(:send_alert_this_day?)
.with(timing, config, anything)
.once
expect(subject).to receive(:send_email)
.with(anything, mock_log)
.once
# actual test:
Timecop.freeze(dec_1) do
subject.timing = timing
subject.config = config
subject.take_action(entity, mock_log)
end
end
it 'does nothing when send_alert_this_day? is false for a user' do
# stubbed methods:
allow(subject).to receive(:send_alert_this_day?)
.with(anything, config, user)
.and_return(false)
# expected results:
expect(subject).to receive(:send_alert_this_day?)
.with(anything, config, anything)
.once
expect(subject).to receive(:send_email).never
# actual test:
Timecop.freeze(dec_1) do
subject.timing = timing
subject.config = config
subject.take_action(entity, mock_log)
end # Timecop
end # it 'does nothing when send_alert_this_day? is false for a user'
end
it '.entities_to_check raises NoMethodError (subclasses should implement)' do
expect {subject.entities_to_check }.to raise_exception NoMethodError
end
it '.mailer_class raises NoMethodError (subclasses should implement)' do
expect {subject.mailer_class }.to raise_exception NoMethodError
end
it '.mailer_args raises NoMethodError (subclasses should implement)' do
expect {subject.mailer_args(create(:user)) }.to raise_exception NoMethodError
end
describe '.send_email' do
include_context 'stub email rendering'
before(:all) do
# define a method for MemberMailer just for this test
MemberMailer.class_eval do
def fake_mailer_method(_user)
nil
end
end
end
after(:all) do
# remove the method we added
MemberMailer.undef_method(:fake_mailer_method)
end
before(:each) do
Rails.configuration.action_mailer.delivery_method = :mailgun
ApplicationMailer.mailgun_client.enable_test_mode!
allow(Memberships::MembershipActions).to receive(:for_user)
.and_return(true)
end
after(:each) { ApplicationMailer.mailgun_client.disable_test_mode! }
let(:entity) { build(:member) }
it 'sends alert email to user and logs a message' do
expect(MemberMailer.fake_mailer_method(user)).to be_truthy
# stubbed methods:
allow(subject).to receive(:mailer_class)
.and_return(MemberMailer)
allow(subject).to receive(:mailer_args)
.and_return([entity])
allow(subject).to receive(:mailer_method).and_return(:test_email)
allow(subject).to receive(:success_str).with(entity)
.and_return('succeeded with entity')
# expected results:
expect(MemberMailer).to receive(:test_email).with(entity)
.and_call_original
expect(subject).to receive(:log_mail_response)
Timecop.freeze(dec_1)
subject.send_email(entity, mock_log)
Timecop.return
email = ActionMailer::Base.deliveries.last
expect(email).to deliver_to(entity.email)
end
it 'does not send email if an error is raised or mail has errors' do
subject.create_alert_logger(mock_log)
expect(MemberMailer.fake_mailer_method(user)).to be_truthy
# stubbed methods:
allow(subject).to receive(:mailer_class)
.and_return(MemberMailer)
allow(subject).to receive(:mailer_args)
.and_return([entity])
allow(subject).to receive(:mailer_method).and_return(:test_email)
allow(subject).to receive(:failure_str).with(entity)
.and_return('failed with entity')
allow_any_instance_of(Mail::Message).to receive(:deliver)
.and_raise(Net::ProtocolError)
# expected results:
expect(MemberMailer).to receive(:test_email).with(entity)
.and_call_original
expect(mock_log).to receive(:error).with(/EmailAlert email ATTEMPT FAILED failed with entity\. Net::ProtocolError Also see for possible info/)
Timecop.freeze(dec_1)
subject.send_email(entity, mock_log)
Timecop.return
expect(ActionMailer::Base.deliveries.size).to eq 0
end
end
describe '.mail_message' do
let(:entity) { create(:company) }
it 'calls mailer_args to get the arguments' do
# stubbed methods:
allow(subject).to receive(:mailer_method).and_return(:test_email)
allow(subject).to receive(:mailer_class).and_return(MemberMailer)
expect(subject).to receive(:mailer_args).with(entity)
subject.mail_message(entity)
end
it 'calls mailer_class to get the mailer class' do
# stubbed methods:
allow(subject).to receive(:mailer_method).and_return(:test_email)
allow(subject).to receive(:mailer_class).and_return(MemberMailer)
allow(subject).to receive(:mailer_args).and_return([entity])
expect(subject).to receive(:mailer_class)
subject.mail_message(entity)
end
it 'sends the mailer_method to the mailer_class with the arguments' do
# stubbed methods:
allow(subject).to receive(:mailer_method).and_return(:test_email)
allow(subject).to receive(:mailer_class).and_return(MemberMailer)
allow(subject).to receive(:mailer_args).and_return([entity])
expect(MemberMailer).to receive(:test_email).with(entity)
subject.mail_message(entity)
end
end
describe '.send_on_day_number?' do
let(:config) { { days: [1, 3, 5] } }
it 'true if config[:days].include? day_number' do
expect(subject.send_on_day_number?(3, config)).to be_truthy
end
it 'false if day_number is not in config[:days]' do
expect(subject.send_on_day_number?(0, config)).to be_falsey
end
it 'false if config does not have :days as a key' do
expect(subject.send_on_day_number?(3, { blorf: 'blorf' })).to be_falsey
end
end
describe '.log_mail_response' do
let(:entity) { create(:user) }
context 'no mail_response errors (successful)' do
it 'sends log_success to the alert logger' do
subject.create_alert_logger(mock_log)
mail_response_dbl = double("Mail::Message")
allow(mail_response_dbl).to receive(:errors).and_return([])
expect_any_instance_of(AlertLogger).to receive(:log_success)
subject.log_mail_response(mock_log, mail_response_dbl, entity)
end
end
context 'with mail_response_errors (failure)' do
before(:all) do
# define a method for MemberMailer just for this test
MemberMailer.class_eval do
def fake_mailer_method(_user)
nil
end
end
end
after(:all) do
# remove the method we added
MemberMailer.undef_method(:fake_mailer_method)
end
it 'sends log_failure' do
subject.create_alert_logger(mock_log)
mail_response_dbl = double("Mail::Message")
allow(mail_response_dbl).to receive(:errors).and_return([3])
expect_any_instance_of(AlertLogger).to receive(:log_failure)
subject.log_mail_response(mock_log, mail_response_dbl, entity)
end
end
end
it '.success_str raises NoMethodError (should be defined by subclasses)' do
expect{subject.success_str([])}.to raise_exception NoMethodError
end
it '.failure_str raises NoMethodError (should be defined by subclasses)' do
expect{subject.failure_str([])}.to raise_exception NoMethodError
end
it '.send_alert_this_day?(timing, config, user) raises NoMethodError (should be defined by subclasses)' do
config = {}
timing = 'blorf' # doesn't matter what this is
expect {subject.send_alert_this_day?(timing, config, user) }.to raise_exception NoMethodError
end
it '.mailer_method raises NoMethodError (should be defined by subclasses)' do
expect {subject.mailer_method }.to raise_exception NoMethodError
end
end
|
cordova.commandProxy.add("EchoPlugin",{
echo:function(successCallback,errorCallback,strInput) {
var res = EchoRuntimeComponent.EchoPluginRT.echo(strInput);
if(res.indexOf("Error") == 0) {
errorCallback(res);
}
else {
successCallback(res);
}
}
});
|
package org.leveloneproject.central.kms.domain.keys
import java.util.UUID
import org.leveloneproject.central.kms.domain._
import scala.concurrent.Future
trait KeyStore {
def create(key: Key): Future[Either[KmsError, Key]]
def getById(id: UUID): Future[Option[Key]]
}
|
package services
import (
"fmt"
"io"
"log"
"net/http"
"os"
utils "github.com/kuruvi-bits/transform/utils"
)
func Resize(message utils.Message) {
dirPath := fmt.Sprintf("%s/%s", utils.RESIZED_VOL, message.AlbumName)
filePath := fmt.Sprintf("%s/%s", dirPath, message.PhotoName)
utils.CreateDirIfNotExist(dirPath)
url := utils.GetResizeURL(message)
response, e := http.Get(url)
if e != nil {
log.Fatal(e)
}
defer response.Body.Close()
//open a file for writing
file, err := os.Create(filePath)
if err != nil {
log.Fatal(err)
}
defer file.Close()
// Use io.Copy to just dump the response body to the file. This supports huge files
_, err = io.Copy(file, response.Body)
if err != nil {
log.Fatal(err)
}
fmt.Println("Success!")
}
|
/**
* Copyright 2014 Yahoo! Inc. Licensed under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.yahoo.sql4d.indexeragent.meta;
import com.google.common.collect.ImmutableMap;
import static com.yahoo.sql4d.indexeragent.Agent.*;
import static com.yahoo.sql4d.indexeragent.sql.SqlMeta.*;
import com.yahoo.sql4d.indexeragent.meta.beans.DataSource;
import com.yahoo.sql4d.indexeragent.meta.beans.StatusTrail;
import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Handles database interaction specifically for Indexer Agent.
* @author srikalyan
*/
public class DBHandler {
private static final Logger log = LoggerFactory.getLogger(DBHandler.class);
private final EntityManagerFactory emFactory;
enum Action { ADD, UPDATE, DELETE }
public DBHandler() {
String host = getHost();
int port = getPort();
String id = getId();
String password = getPassword();
String dbName = getDbName();
String dbType = getDbType();
String connectUrl, dialect, driver;
switch(dbType) {
case "mysql":
connectUrl = String.format("jdbc:mysql://%s:%d/%s?autoReconnectForPools=true", host, port, dbName);
driver = "com.mysql.jdbc.Driver";
dialect = "org.hibernate.dialect.MySQLDialect";
break;
case "derby" :
default:
connectUrl = String.format("jdbc:derby://%s:%d/%s;create=true", host, port, dbName);
driver = "org.apache.derby.jdbc.ClientDriver";
dialect = "org.hibernate.dialect.DerbyDialect";
}
Map<String, String> configOverride = ImmutableMap.of(
"javax.persistence.jdbc.url", connectUrl,
"javax.persistence.jdbc.user", id,
"javax.persistence.jdbc.password", password,
"hibernate.dialect", dialect,
"javax.persistence.jdbc.driver", driver);
log.info("Overriding database configuration : {}", configOverride);
emFactory = Persistence.createEntityManagerFactory("indexerAgent", configOverride);
}
private EntityManager getEntityManager() {
return emFactory.createEntityManager();
}
private void addUpdateDeleteEntity(Object entity, Action action) {
EntityManager em = getEntityManager();
try {
em.getTransaction().begin();
switch (action) {
case ADD:
em.persist(entity);
break;
case UPDATE:
em.merge(entity);
break;
case DELETE:
em.remove(entity);
break;
}
} catch(RuntimeException e) {
log.error("Something wrong persisting/merging/removing entity {}, so rolling back . Exception is {}", entity, ExceptionUtils.getStackTrace(e));
em.getTransaction().rollback();
} finally {
if (em.getTransaction().isActive()) {
em.getTransaction().commit();
}
em.close();
}
}
public void addDataSource(DataSource ds) {
addUpdateDeleteEntity(ds, Action.ADD);
}
public void updateDataSource(DataSource ds) {
addUpdateDeleteEntity(ds, Action.UPDATE);
}
public void removeDataSource(DataSource ds) {
addUpdateDeleteEntity(ds, Action.DELETE);
}
public void addStatusTrail(StatusTrail st) {
addUpdateDeleteEntity(st, Action.ADD);
}
public void updateStatusTrail(StatusTrail st) {
addUpdateDeleteEntity(st, Action.UPDATE);
}
public void removeStatusTrail(StatusTrail st) {
addUpdateDeleteEntity(st, Action.DELETE);
}
public List<DataSource> getAllDataSources() {
EntityManager em = getEntityManager();
try {
return em.createQuery("SELECT ds FROM DataSource ds", DataSource.class).getResultList();
} finally {
em.close();
}
}
public DataSource getDataSource(String tableName) {
EntityManager em = getEntityManager();
try {
List<DataSource> resultList = em.createQuery("SELECT ds FROM DataSource ds WHERE ds.name = :name", DataSource.class).setParameter("name", tableName).getResultList();
return resultList.isEmpty()?null:resultList.get(0);
} finally {
em.close();
}
}
public DataSource getDataSource(int id) {
EntityManager em = getEntityManager();
try {
return em.find(DataSource.class, id);
} finally {
em.close();
}
}
/**
* Tasks whose status:not_done and givenUp:zero
* @param ds
* @return
*/
public List<StatusTrail> getIncompleteTasks(DataSource ds) {
EntityManager em = getEntityManager();
try {
return em.createQuery("SELECT st FROM StatusTrail st WHERE st.dataSourceId = :dataSourceId "
+ "AND st.status = 'not_done' AND st.givenUp = 0 ORDER BY st.id DESC",
StatusTrail.class).
setParameter("dataSourceId", ds.getId()).getResultList();
} finally {
em.close();
}
}
/**
*
* @return
*/
public List<StatusTrail> getAllIncompleteTasks() {
EntityManager em = getEntityManager();
try {
return em.createQuery("SELECT st FROM StatusTrail st WHERE "
+ " st.status = 'not_done' AND st.givenUp = 0 ORDER BY st.id DESC",
StatusTrail.class).getResultList();
} finally {
em.close();
}
}
/**
*
* @return
*/
public List<StatusTrail> getAllInprogressTasks() {
EntityManager em = getEntityManager();
try {
return em.createQuery("SELECT st FROM StatusTrail st WHERE "
+ " st.status = 'in_progress' AND st.givenUp = 0",
StatusTrail.class).getResultList();
} finally {
em.close();
}
}
/**
*
* @return
*/
public long getInprogressTasksCount() {
EntityManager em = getEntityManager();
try {
return (long)em.createQuery("SELECT COUNT(st.id) FROM StatusTrail st WHERE "
+ " st.status = 'in_progress' AND st.givenUp = 0").getSingleResult();
} finally {
em.close();
}
}
/**
* Change the status of a task.
* @param st
* @param success
*/
public void markTask(StatusTrail st, boolean success) {
st.setStatus(success ? JobStatus.done : JobStatus.not_done);
st.setAttemptsDone(st.getAttemptsDone() + 1);
st.setGivenUp(st.getAttemptsDone() >= getMaxTaskAttempts() ? 1 : 0);
updateStatusTrail(st);
}
public void shutdown() {
log.info("Shutting down and cleaning up database connections..");
emFactory.close();
}
}
|
<?php
require "config.php";
use Illuminate\Database\Capsule\Manager as Capsule;
Capsule::schema()->drop('price_intervals');
Capsule::schema()->create('price_intervals', function ($table) {
$table->increments('id');
$table->date('start_date');
$table->date('end_date');
$table->double('price');
$table->timestamps();
});
|
import ecdsa
import json
import redis
from typing import NamedTuple, Union
import binascii
from binascii import unhexlify
from luracoin import errors
from luracoin.exceptions import TransactionNotValid
from luracoin.wallet import pubkey_to_address
from luracoin.config import Config
from luracoin.helpers import (
mining_reward,
sha256d,
bytes_to_signing_key,
little_endian_to_int,
)
class Transaction:
def __init__(
self,
chain: int = 0,
nonce: int = 0,
fee: int = 0,
value: int = 0,
to_address: str = None,
unlock_sig: bytes = None,
) -> None:
self.chain = chain
self.nonce = nonce
self.fee = fee
self.value = value
self.to_address = to_address
self.unlock_sig = unlock_sig
@property
def is_coinbase(self) -> bool:
return self.unlock_sig == Config.COINBASE_UNLOCK_SIGNATURE
def sign(self, private_key) -> "Transaction":
signature = sign_transaction(
private_key=private_key,
transaction_serialized=self.serialize(to_sign=True).hex(),
)
self.unlock_sig = signature
return self
def json(self) -> dict:
result = {
"id": self.id,
"chain": self.chain,
"nonce": self.nonce,
"fee": self.fee,
"value": self.value,
"to_address": self.to_address,
"unlock_sig": None,
}
if self.unlock_sig:
result["unlock_sig"] = self.unlock_sig.hex()
return result
def serialize(self, to_sign=False) -> bytes:
chain = self.chain.to_bytes(1, byteorder="little", signed=False)
nonce = self.nonce.to_bytes(4, byteorder="little", signed=False)
fee = self.fee.to_bytes(4, byteorder="little", signed=False)
value = self.value.to_bytes(8, byteorder="little", signed=False)
to_address = str.encode(self.to_address)
if self.unlock_sig:
unlock_sig = self.unlock_sig
serialized = chain + nonce + fee + value + to_address
if not to_sign and self.unlock_sig:
serialized += unlock_sig
return serialized
def deserialize(self, serialized_bytes: bytes):
self.chain = int.from_bytes(serialized_bytes[0:1], byteorder="little")
self.nonce = int.from_bytes(serialized_bytes[1:5], byteorder="little")
self.fee = int.from_bytes(serialized_bytes[5:9], byteorder="little")
self.value = int.from_bytes(serialized_bytes[9:17], byteorder="little")
self.to_address = serialized_bytes[17:51].decode("utf-8")
if len(serialized_bytes) > 51:
self.unlock_sig = serialized_bytes[51:]
@property
def id(self) -> str:
"""
The ID will be the hash SHA256 of all the txins and txouts.
"""
msg = self.serialize().hex().encode()
tx_id = sha256d(msg)
return tx_id
def make_msg(self) -> str:
"""
TODO: Improve the message.
bitcoin.stackexchange.com/questions/37093/what-goes-in-to-the-message-of-a-transaction-signature
"""
return self.id
def validate_fields(self, raise_exception=False) -> bool:
"""
Checks that the transaction has the correct fields.
"""
if self.chain < 0 or self.chain > 256:
if raise_exception:
raise TransactionNotValid(errors.TRANSACTION_FIELD_CHAIN)
return False
if self.nonce < 0 or self.nonce > 4_294_967_295:
if raise_exception:
raise TransactionNotValid(errors.TRANSACTION_FIELD_NONCE)
return False
if self.fee < 0 or self.fee > 4_294_967_295:
if raise_exception:
raise TransactionNotValid(errors.TRANSACTION_FIELD_FEE)
return False
if self.value <= 0 or self.value > 18_446_744_073_709_551_615:
if raise_exception:
raise TransactionNotValid(errors.TRANSACTION_FIELD_VALUE)
return False
if not self.to_address or len(self.to_address) != 34:
if raise_exception:
raise TransactionNotValid(errors.TRANSACTION_FIELD_TO_ADDRESS)
return False
if not self.unlock_sig or len(self.unlock_sig) != 128:
if raise_exception:
raise TransactionNotValid(errors.TRANSACTION_FIELD_SIGNATURE)
return False
if (
self.unlock_sig == Config.COINBASE_UNLOCK_SIGNATURE
and self.to_address == Config.STAKING_ADDRESS
):
if raise_exception:
raise TransactionNotValid(errors.TRANSACTION_INVALID_STAKING)
return False
return True
def validate(self, raise_exception=False) -> bool:
"""
Validate a transaction. For a transaction to be valid it has to follow
these conditions:
"""
if not self.validate_fields(raise_exception=raise_exception):
return False
if (
self.unlock_sig != Config.COINBASE_UNLOCK_SIGNATURE
and not is_valid_unlocking_script(
unlocking_script=self.unlock_sig,
transaction_serialized=self.serialize(to_sign=True).hex(),
)
):
if raise_exception:
raise TransactionNotValid(errors.TRANSACTION_INVALID_SIGNATURE)
return False
return True
def to_transaction_pool(self) -> None:
redis_client = redis.Redis(
host=Config.REDIS_HOST, port=Config.REDIS_PORT, db=Config.REDIS_DB
)
redis_client.set(self.id, self.serialize())
def save(self, block_height: int) -> None:
"""
Add a transaction to the chainstate. Inside the chainstate database,
the following key/value pairs are stored:
'c' + 32-byte transaction hash -> unspent transaction output record for
that transaction. These records are only present for transactions that
have at least one unspent output left.
Each record stores:
The version of the transaction.
Whether the transaction was a coinbase or not.
Which height block contains the transaction.
Which outputs of that transaction are unspent.
The scriptPubKey and amount for those unspent outputs.
[TX VERSION][COINBASE][HEIGHT][NUM OUTPUTS][∞][OUTPUT_LEN][OUTPUT]
^ ^ ^ ^ ^
4 bytes 1 byte 4 bytes VARINT VARINT
'B' -> 32-byte block hash: the block hash up to which the database
represents the unspent transaction outputs
"""
pass
def build_message(outpoint, pub_key: str) -> str:
"""
TODO: https://bitcoin.stackexchange.com/questions/37093/what-goes-in-to-the-message-of-a-transaction-signature
"""
return sha256d(str(outpoint.txid) + str(outpoint.txout_idx) + pub_key)
def build_script_sig(signature: str, public_key: str) -> str:
"""
<VARINT>SIGNATURE<VARINT>PUBLIC_KEY
"""
return signature + public_key
def verify_signature(message: str, public_key: str, signature: str) -> bool:
vk = ecdsa.VerifyingKey.from_string(public_key, curve=ecdsa.SECP256k1)
return vk.verify(signature, message)
def deserialize_unlocking_script(unlocking_script: bytes) -> dict:
unlocking_script = unlocking_script.hex()
pub_key = unlocking_script[:128]
signature = unlocking_script[128:]
return {
"signature": signature,
"public_key": pub_key,
"address": pubkey_to_address(pub_key.encode()),
}
def is_valid_unlocking_script(
unlocking_script: str, transaction_serialized: str
) -> bool:
# TODO: This functions allows to spend all outpoints since we are
# verifying the signature not the signature + matching public key.
try:
unlocking_script = deserialize_unlocking_script(unlocking_script)
except binascii.Error:
return False
message = transaction_serialized.encode()
try:
is_valid = verify_signature(
message=message,
public_key=bytes.fromhex(unlocking_script["public_key"]),
signature=bytes.fromhex(unlocking_script["signature"]),
)
except ecdsa.keys.BadSignatureError:
is_valid = False
except AssertionError:
is_valid = False
return is_valid
def sign_transaction(private_key: bytes, transaction_serialized: str) -> bytes:
private_key = bytes_to_signing_key(private_key=private_key)
vk = private_key.get_verifying_key()
public_key = vk.to_string()
signature = private_key.sign(transaction_serialized.encode())
return public_key + signature
|
reload("Persa")
using Base.Test
using DecisionTree
using DatasetsCF
# write your own tests here
#@test 1 == 2
###
reload("COFILS")
dataset = DatasetsCF.MovieLens()
holdout = Persa.HoldOut(dataset, 0.9)
(ds_train, ds_test) = Persa.get(holdout)
model = COFILS.Cofils(ds_train, 10)
Persa.train!(model, ds_train)
print(Persa.aval(model, ds_test))
|
pub static TEXT: &'static str = "{% macro asset_url(filename) %}
\"/assets/{{ filename }}\"
{% endmacro asset_url %}";
|
using System;
using System.Runtime.Serialization;
namespace DomainBlocks.Persistence
{
[Serializable]
public class StreamDeletedException : Exception
{
public string StreamName { get; }
public StreamDeletedException(string streamName)
{
StreamName = streamName;
}
public StreamDeletedException(string streamName, string message) : base(message)
{
StreamName = streamName;
}
public StreamDeletedException(string streamName, string message, Exception inner) : base(message, inner)
{
StreamName = streamName;
}
protected StreamDeletedException(
SerializationInfo info,
StreamingContext context) : base(info, context)
{
if (info == null) throw new ArgumentNullException(nameof(info));
info.AddValue(nameof(StreamName), StreamName);
base.GetObjectData(info, context);
}
}
}
|
import {bindable} from 'aurelia-framework';
import {inject} from 'aurelia-framework';
import moment from 'moment';
import {GameService} from '../services/gameService';
@inject(GameService)
export class GameListItemCustomElement {
constructor(GameService){
this.gameService = GameService;
}
@bindable game;
get gameDate(){
//TODO Localization
return moment(`${this.game.date}`).format("MMM Do YY");
}
}
|
#!/usr/bin/env ruby
IO.foreach("2.2 Ruby Day 2.md") do |block|
puts block if block =~ /(.*)代码块(.*)/
end
|
package net.jp2p.jxse.services;
import net.jp2p.jxta.factory.IJxtaComponents.JxtaComponents;
import net.jxta.impl.loader.JxtaLoaderModuleManager;
import net.jxta.impl.modulemanager.JxtaModuleBuilder;
import net.jxta.module.IModuleBuilder;
import net.jxta.peergroup.core.Module;
public class Component{
private static JxtaLoaderModuleManager<Module> manager;
private boolean canBuild;
public Component() {
manager = JxtaLoaderModuleManager.getRoot( Component.class, true );
this.canBuild = false;
}
public void activate(){ /* DO NOTHING */ }
public void deactivate(){ /* DO NOTHING */ }
protected final boolean canBuild() {
return canBuild;
}
public void registerBuilder(IModuleBuilder<Module> builder) {
manager.registerBuilder( builder);
if( builder instanceof JxtaModuleBuilder )
this.canBuild = true;
}
public void unregisterBuilder( IModuleBuilder<Module> builder ) {
manager.unregisterBuilder( builder );
}
public static final boolean canBuild( JxtaComponents jxtaComponent ) {
//PlatformDescriptor descriptor = new PlatformDescriptor();
return true;//manager.canBuild(descriptor);
}
}
|
%%%-------------------------------------------------------------------
%%% @author Michal Stanisz
%%% @copyright (C) 2021 ACK CYFRONET AGH
%%% This software is released under the MIT license
%%% cited in 'LICENSE.txt'.
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% Module responsible for managing QoS status persistent model.
%%% For more details consult `qos_status` module doc.
%%% @end
%%%-------------------------------------------------------------------
-module(qos_status_model).
-author("Michal Stanisz").
-include("modules/datastore/qos.hrl").
-include("modules/datastore/datastore_models.hrl").
-include("modules/datastore/datastore_runner.hrl").
-include_lib("ctool/include/errors.hrl").
-include_lib("ctool/include/logging.hrl").
%% API
-export([create/4, update/3, get/2, delete/2]).
%% datastore_model callbacks
-export([get_record_struct/1, get_record_version/0]).
-type doc() :: datastore_doc:doc(record()).
-type diff() :: datastore_doc:diff(record()).
-type id() :: datastore_doc:key().
-type record() :: #qos_status{}.
-type dir_type() :: ?QOS_STATUS_TRAVERSE_CHILD_DIR | ?QOS_STATUS_TRAVERSE_START_DIR.
-export_type([diff/0]).
-define(CTX, (qos_status:get_ctx())).
%%%===================================================================
%%% API
%%%===================================================================
-spec create(od_space:id(), traverse:id(), file_meta:uuid(), dir_type()) ->
{ok, doc()}.
create(SpaceId, TraverseId, DirUuid, DirType) ->
Id = generate_status_doc_id(TraverseId, DirUuid),
datastore_model:create(?CTX, #document{key = Id, scope = SpaceId,
value = #qos_status{is_start_dir = DirType == ?QOS_STATUS_TRAVERSE_START_DIR}
}).
-spec update(traverse:id(), file_meta:uuid(), diff()) -> {ok, doc()} | {error, term()}.
update(TraverseId, Uuid, Diff) ->
Id = generate_status_doc_id(TraverseId, Uuid),
datastore_model:update(?CTX, Id, Diff).
-spec get(traverse:id(), file_meta:uuid()) -> {ok, doc()} | {error, term()}.
get(TraverseId, Uuid) ->
Id = generate_status_doc_id(TraverseId, Uuid),
datastore_model:get(?CTX, Id).
-spec delete(traverse:id(), file_meta:uuid()) -> ok | {error, term()}.
delete(TraverseId, Uuid)->
Id = generate_status_doc_id(TraverseId, Uuid),
datastore_model:delete(?CTX, Id).
%%%===================================================================
%%% datastore_model callbacks
%%%===================================================================
-spec get_record_version() -> datastore_model:record_version().
get_record_version() ->
1.
-spec get_record_struct(datastore_model:record_version()) ->
datastore_model:record_struct().
get_record_struct(1) ->
{record, [
{previous_batch_last_filename, binary},
{current_batch_last_filename, binary},
{files_list, [string]},
{child_dirs_count, integer},
{is_last_batch, boolean},
{is_start_dir, boolean}
]}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
%% @private
-spec generate_status_doc_id(traverse:id(), file_meta:uuid()) -> id().
generate_status_doc_id(TraverseId, DirUuid) ->
datastore_key:adjacent_from_digest([DirUuid, TraverseId], DirUuid).
|
---
author: mikeparker104
ms.author: miparker
ms.date: 06/02/2020
ms.service: notification-hubs
ms.topic: include
ms.openlocfilehash: 5e75c5d5510f596eb7911cae0310e60b6bef67bf
ms.sourcegitcommit: 5cace04239f5efef4c1eed78144191a8b7d7fee8
ms.translationtype: MT
ms.contentlocale: pl-PL
ms.lasthandoff: 07/08/2020
ms.locfileid: "86146452"
---
### <a name="send-a-test-notification"></a>Wysyłanie powiadomienia testowego
1. Otwórz nową kartę w programie [Poster](https://www.postman.com/downloads/).
1. Ustaw żądanie na **wpis**, a następnie wprowadź następujący adres:
```xml
https://<app_name>.azurewebsites.net/api/notifications/requests
```
1. Jeśli wybrano opcję ukończenia [uwierzytelniania klientów przy użyciu klucza interfejsu API](#authenticate-clients-using-an-api-key-optional) , należy skonfigurować nagłówki żądania, aby zawierały wartość **apikey** .
| Klucz | Wartość |
| ------------------------------ | ------------------------------ |
| apikey | <your_api_key> |
1. Wybierz opcję **RAW** dla **treści**, a następnie wybierz pozycję **JSON** z listy opcje formatu, a następnie Dołącz niepewną zawartość **JSON** :
```json
{
"text": "Message from Postman!",
"action": "action_a"
}
```
1. Wybierz przycisk **kod** , który znajduje się poniżej przycisku **Zapisz** w prawym górnym rogu okna. Żądanie powinno wyglądać podobnie do poniższego przykładu w przypadku wyświetlania **kodu HTML** (w zależności od tego, czy został dołączony nagłówek **apikey** ):
```html
POST /api/notifications/requests HTTP/1.1
Host: https://<app_name>.azurewebsites.net
apikey: <your_api_key>
Content-Type: application/json
{
"text": "Message from backend service",
"action": "action_a"
}
```
1. Uruchom aplikację **PushDemo** na jednej lub obu platformach docelowych (**Android** i **iOS**).
> [!NOTE]
> W przypadku testowania w systemie **Android** upewnij się, że nie uruchomiono **debugowania**lub jeśli aplikacja została wdrożona przez uruchomienie aplikacji, Wymuś zamknięcie aplikacji i jej ponowne uruchomienie przy użyciu programu uruchamiającego.
1. W aplikacji **PushDemo** naciśnij przycisk **zarejestruj** .
1. Z powrotem w programie **[Poster](https://www.postman.com/downloads)** Zamknij okno **Generuj fragmenty kodu** (jeśli jeszcze tego nie zrobiono), a następnie kliknij przycisk **Wyślij** .
1. Sprawdź, czy w programie **[Poster](https://www.postman.com/downloads)** znajduje się odpowiedź **200 OK** i czy alert pojawia się w aplikacji z **odebraną akcją Action**.
1. Zamknij aplikację **PushDemo** , a następnie ponownie kliknij przycisk **Wyślij** **[.](https://www.postman.com/downloads)**
1. Sprawdź, czy ponownie otrzymujesz odpowiedź **200 OK** w **[ogłoszeniu](https://www.postman.com/downloads)** . Sprawdź, czy w obszarze powiadomień dla aplikacji **PushDemo** jest wyświetlana informacja o poprawnym komunikacie.
1. Naciśnij pozycję powiadomienie, aby upewnić się, że aplikacja zostanie otwarta i zostanie wyświetlona **Akcja akcja akcji odebrana** .
1. Z powrotem w programie **[Poster](https://www.postman.com/downloads)** zmodyfikuj poprzednią treść żądania, aby wysłać powiadomienie dyskretne, określając *action_b* zamiast *action_a* dla wartości **akcji** .
```json
{
"action": "action_b",
"silent": true
}
```
1. Gdy aplikacja jest nadal otwarta, kliknij przycisk **Wyślij** w programie **[Poster](https://www.postman.com/downloads)**.
1. Sprawdź, czy otrzymujesz odpowiedź na **200 OK** w programie **[Poster](https://www.postman.com/downloads)** i czy alert pojawia się w aplikacji pokazującej **odebraną akcję ActionB** zamiast akcji **Action**.
1. Zamknij aplikację **PushDemo** , a następnie ponownie kliknij przycisk **Wyślij** **[.](https://www.postman.com/downloads)**
1. Sprawdź, czy otrzymujesz odpowiedź na **200 OK** w programie **[Poster](https://www.postman.com/downloads)** i czy powiadomienie dyskretne nie jest wyświetlane w obszarze powiadomień.
|
/**
* System Extensions
*
* Copyright (C) 2014-2017 Peter "SaberUK" Powell <petpow@saberuk.com>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0.html
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
using System;
using SystemExt.Log;
using SystemExt.Terminal;
namespace SystemExt.Demo
{
/// <summary>
/// Demo for <see cref="SystemExt.Log"/>.
/// </summary>
public static class Log
{
/// <summary>
/// Entry point for the <see cref="SystemExt.Log"/> demo.
/// </summary>
/// <param name="args">
/// Command line arguments.
/// </param>
/// <returns>
/// The code to terminate the application with on exit.
/// </returns>
public static int EntryPoint(string[] args)
{
return new ApplicationChooser()
.AddEntryPoint(LogComponents, "Iterate through various component filters and write a message")
.AddEntryPoint(LogLevels, "Iterate through all log levels and write a message")
.Run(args);
}
/// <summary>
/// A demo which iterates over a series of tokens and logs them to a stream.
/// </summary>
/// <param name="arg">
/// Command line arguments.
/// </param>
/// <returns>
/// The code to terminate the application with on exit.
/// </returns>
private static int LogComponents(string[] arg)
{
// Initialize manager and STDOUT logger.
var manager = new LogManager();
var logger = new StreamLogger(Console.OpenStandardOutput());
// Iterate over various log tokens.
foreach (var logToken in new[] { "*", "INVALID DEMO2", "DEMO1 -DEMO2", "* -DEMO2 -* INVALID" })
{
manager.AddLogger(logToken, LogLevel.Verbose, logger);
Console.WriteLine("Component filter set to {0}", logToken);
manager.Write(LogLevel.Verbose, "DEMO1", "Logging with the DEMO1 component!");
manager.Write(LogLevel.Verbose, "DEMO2", "Logging with the DEMO2 component!");
manager.Write(LogLevel.Verbose, manager, "Logging with the LogManager component!");
}
Console.WriteLine("Press any key to exit.");
Console.ReadKey();
return 0;
}
/// <summary>
/// A demo which iterates over log levels and writes messages at each.
/// </summary>
/// <param name="arg">
/// Command line arguments.
/// </param>
/// <returns>
/// The code to terminate the application with on exit.
/// </returns>
private static int LogLevels(string[] arg)
{
// Initialize manager and STDOUT logger.
var manager = new LogManager();
var logger = new StreamLogger(Console.OpenStandardOutput());
// Iterate through the log levels.
for (var level = LogLevel.None; level <= LogLevel.Verbose; level++)
{
// Change the log level.
Console.WriteLine("Setting log level to {0:G}.", level);
manager.AddLogger("DEMO", level, logger);
// Write messages to the logger at every level.
manager.Write(LogLevel.Verbose, "DEMO", "Verbose!");
manager.Write(LogLevel.Information, "DEMO", "Information!");
manager.Write(LogLevel.Warning, "DEMO", "Warning!");
manager.Write(LogLevel.Error, "DEMO", "Error!");
manager.Write(LogLevel.Critical, "DEMO", "Critical!");
}
Console.WriteLine("Press any key to exit.");
Console.ReadKey();
return 0;
}
}
}
|
# Gamification
> Climbing is demanding, let's make it more fun with game mechanics!
See p91:
- p21 for the self-assessment
- p91 for the technical clues
<!---------------------------------------------------------------------------->
# Table of Contents
- [Physical Skills](#physical-skills)
- [Technical Skills](#technical-skills)
- [Mental Skills](#mental-skills)
- [Organizational Skills](#organizational-skills)
- [Overall Skills](#overall-skills)
|
---
layout: post
title: Distributed software testing
author: Daniel Mewes
author_github: danielmewes
---
# About me
A word about me first: My name is Daniel Mewes, and I just came over to
California to work at RethinkDB as an intern for the oncoming months. After
having been an undergraduate student of computer science at Saarland
University, Germany for the last two years, I am exited to work on an
influential real-world project at RethinkDB now. Why RethinkDB? Not only does
RethinkDB develop an exciting and novel piece of database technology, RethinkDB
also provides the great "startup kind" of work experience.
# Software testing
In complex software systems like database management systems, different
components have to work together. These components can interact in complex
ways, yielding a virtually infinite number of possible states that the overall
system can reach. This has consequences for software testing. As bugs in the
code might only show up in a small fraction of the possible states,
comprehensive testing of the system is essential. Encapsulation of code and
data into objects can reduce the number of states that must be considered for
any single piece of code. However an extremely large number of states can still
remain, especially when considering parallel systems. Reliability requirements
for database management systems on the other hand are stringent. Losing or
corrupting data due to bugs in the program cannot be tolerated here.
<!--more-->
Among other measures, we at RethinkDB ensure the reliability of our software by
running extensive tests on a daily basis. The problem with these tests is that
they take a lot of time to complete. We recently reached time requirements of
more than 24 hours on a decent machine for a single test run. So clearly a
single machine is not enough anymore to run the tests. For our daily test runs,
we want to get results quickly. Buying more machines is pricey, especially as
those machines would be idle during the times at which no tests are run. It
also is not very flexible.
# Tapping into the cloud
Cloud computing provides a more flexible and less pricey way to circumvent the
limitations of limited local hardware resources. We decided to use Amazon's
Elastic Compute Cloud ([Amazon EC2][]). If you need the computing power of ten
systems, you can get that from EC2 in a matter of minutes. If you need the
power of a hundred machines, you can get that in a matter of minutes, too.
Basically, Amazon's EC2 provides you with as much computing power as you need,
at just the time that you need it. EC2 allows to dynamically allocate and
deallocate virtual compute nodes, which are billed on an hourly basis. Each
node can be used like a normal computer. The nodes run Linux (Windows nodes are
also available) and are accessible through SSH. So EC2 looked like a promising
platform to make our tests finish faster.
[Amazon EC2]: http://aws.amazon.com/ec2/

_EC2 console showing a few nodes_
Our existing test suite already split up the work into independent test
scripts. What was missing for utilizing EC2 was an automated mechanism to start
and setup a number of EC2 nodes and dispatch the individual tests to these
nodes to run in parallel. Setting up a node especially involves the step of
installing a current build of RethinkDB together with a number of dependencies
on the node's file system. I wrote a Python script to fulfill exactly these
tasks. Our main concern was to improve the overall performance of the testing
process as much as possible.
In more detail, our new distributed testing tool works in the following steps:
* Allocate a number of nodes in Amazon's EC2.
* Once all nodes are up and booted, install the current build of RethinkDB on
each of them. As the bandwidth of the Internet connection in our office is
much lower than what is available to the EC2 nodes, we use SFTP to install
RethinkDB on only one of the nodes and then let that node distribute it to
all remaining ones.
* We can now start running tests on the nodes:
* Pick a test from the list of all individual tests to be run.
* Find a node which is not currently busy running another test. If no node
is available, wait until a node becomes free.
* Initiate the test on the free node. To do this, we use a wrapper script
which we invoke and immediately background on the remote node. The
wrapper script takes care of running the actual test and redirecting its
output and result into specific files, which we can later retrieve
asynchronously.
* After repeating step 3 for all tests in the list, wait for all nodes to
finish their current work.
* Collect the results of all tests from the different nodes. This works by
reading from the files in which our wrapper script has stored the tests'
results.
* Finally, terminate the allocated nodes in EC2.
To communicate with the compute nodes, I opted for the use of [Paramiko][], an
implementation of SSH2 for Python. Having direct access to the SSH2 protocol
from a Python script makes running commands remotely as well as fetching and
installing files from/into the remote systems very convenient. For allocating
and terminating EC2 nodes, we use [Boto][], which provides an interface for
accessing Amazon's AWS API from within Python programs.
[Paramiko]: http://www.lag.net/paramiko/
[Boto]: http://boto.s3.amazonaws.com/index.html
The results are convincing: Instead of 26 hours on a (fast) local machine,
running all of our tests takes only 4 hours when distributed across ten nodes
in EC2. By using still more nodes, the time for testing can be lowered even
further. This is very useful. Say we just made an important change to our code
and want to verify that everything works as it is supposed to. With local test
runs, this would mean waiting at least a day, even longer if our testing
machine is occupied with an earlier test run. If one of the test detects a
problem with the change and we fix it, it takes another day at least until we
can see if the fix even worked and had no other side effects. Thanks to cloud
computing and our distributed testing system, we can now initiate an arbitrary
number of test runs on demand, each of which finishes in a matter of mere
hours.
|
# AWS User Group Kochi
Official Website of AWS User Group Kochi community
### Powered by
- GitHub
- Gatsby
- Netlify
|
import { Injectable } from '@angular/core';
import { Observable } from 'rxjs/Observable';
import { HttpClient} from '@angular/common/http'
export interface Charm {
id: number,
slug: string,
name: string,
ranks: CharmRank[]
}
export interface CharmRank {
name: string,
level: number,
rarity: number,
skills: SkillRank[],
crafting: CharmRankCrafting
}
export interface SkillRank {
id: number,
slug: string,
level: number,
description: string,
skill: number,
skillName: string,
modifiers: SkillRankModifiers
}
export interface SkillRankModifiers {
affinity: number,
attack: number,
damageFire: number,
damageWater: number,
damageIce: number,
damageThunder: number,
damageDragon: number,
defense: number,
health: number,
sharpnessBonus: number,
resistAll: number,
resistFire: number,
resistWater: number,
resistIce: number,
resistThunder: number,
resistDragon: number
}
export interface CharmRankCrafting {
craftable: boolean,
materials: CraftingCost[]
}
export interface CraftingCost {
quantity: number,
item: Item
}
export interface Item {
id: number,
name: string,
description: string,
rarity: number,
carryLimit: number,
value: number
}
@Injectable({
providedIn: 'root'
})
export class CharmService {
charms: Charm[] = [];
constructor( private http: HttpClient) { }
getCharm(id: number): Observable<Charm> {
return this.http.get<Charm>('https://mhw-db.com/charms/' + id)
}
getAllCharm(): Observable<Charm[]> {
console.log("test");
var temp = this.http.get<Charm[]>('https://mhw-db.com/charms');
console.log(temp);
return temp;
}
}
|
<?php
declare(strict_types=1);
namespace Linio\SellerCenter\Factory\Xml\Order;
use DateTimeImmutable;
use Linio\SellerCenter\Exception\InvalidXmlStructureException;
use Linio\SellerCenter\Model\Order\Order;
use SimpleXMLElement;
class OrderFactory
{
public static function make(SimpleXMLElement $element): Order
{
if (!property_exists($element, 'OrderId')) {
throw new InvalidXmlStructureException('Order', 'OrderId');
}
if (!property_exists($element, 'CustomerFirstName')) {
throw new InvalidXmlStructureException('Order', 'CustomerFirstName');
}
if (!property_exists($element, 'CustomerLastName')) {
throw new InvalidXmlStructureException('Order', 'CustomerLastName');
}
if (!property_exists($element, 'OrderNumber')) {
throw new InvalidXmlStructureException('Order', 'OrderNumber');
}
if (!property_exists($element, 'PaymentMethod')) {
throw new InvalidXmlStructureException('Order', 'PaymentMethod');
}
if (!property_exists($element, 'Remarks')) {
throw new InvalidXmlStructureException('Order', 'Remarks');
}
if (!property_exists($element, 'DeliveryInfo')) {
throw new InvalidXmlStructureException('Order', 'DeliveryInfo');
}
if (!property_exists($element, 'Price')) {
throw new InvalidXmlStructureException('Order', 'Price');
}
if (!property_exists($element, 'GiftOption')) {
throw new InvalidXmlStructureException('Order', 'GiftOption');
}
if (!property_exists($element, 'GiftMessage')) {
throw new InvalidXmlStructureException('Order', 'GiftMessage');
}
if (!property_exists($element, 'VoucherCode')) {
throw new InvalidXmlStructureException('Order', 'VoucherCode');
}
if (!property_exists($element, 'CreatedAt')) {
throw new InvalidXmlStructureException('Order', 'CreatedAt');
}
if (!property_exists($element, 'UpdatedAt')) {
throw new InvalidXmlStructureException('Order', 'UpdatedAt');
}
if (!property_exists($element, 'AddressUpdatedAt')) {
throw new InvalidXmlStructureException('Order', 'AddressUpdatedAt');
}
if (!property_exists($element, 'AddressBilling')) {
throw new InvalidXmlStructureException('Order', 'AddressBilling');
}
if (!property_exists($element, 'AddressShipping')) {
throw new InvalidXmlStructureException('Order', 'AddressShipping');
}
if (!property_exists($element, 'NationalRegistrationNumber')) {
throw new InvalidXmlStructureException('Order', 'NationalRegistrationNumber');
}
if (!property_exists($element, 'ItemsCount')) {
throw new InvalidXmlStructureException('Order', 'ItemsCount');
}
if (!property_exists($element, 'PromisedShippingTime')) {
throw new InvalidXmlStructureException('Order', 'PromisedShippingTime');
}
if (!property_exists($element, 'ExtraAttributes')) {
throw new InvalidXmlStructureException('Order', 'ExtraAttributes');
}
if (!property_exists($element, 'Statuses')) {
throw new InvalidXmlStructureException('Order', 'Statuses');
}
$giftOption = !empty($element->GiftOption);
$dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->CreatedAt);
$createdAt = !empty($dateTime) ? $dateTime : null;
$dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->UpdatedAt);
$updatedAt = !empty($dateTime) ? $dateTime : null;
$dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->AddressUpdatedAt);
$addressUpdatedAt = !empty($dateTime) ? $dateTime : null;
$addressBilling = AddressFactory::make($element->AddressBilling);
$addressShipping = AddressFactory::make($element->AddressShipping);
$dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->PromisedShippingTime);
$promisedShippingTime = !empty($dateTime) ? $dateTime : null;
$statuses = [];
foreach ($element->Statuses->Status as $status) {
array_push($statuses, (string) $status);
}
return Order::fromData(
(int) $element->OrderId,
(int) $element->OrderNumber,
(string) $element->CustomerFirstName,
(string) $element->CustomerLastName,
(string) $element->PaymentMethod,
(string) $element->Remarks,
(string) $element->DeliveryInfo,
(float) $element->Price,
$giftOption,
(string) $element->GiftMessage,
(string) $element->VoucherCode,
$createdAt,
$updatedAt,
$addressUpdatedAt,
$addressBilling,
$addressShipping,
(string) $element->NationalRegistrationNumber,
(int) $element->ItemsCount,
$promisedShippingTime,
(string) $element->ExtraAttributes,
$statuses
);
}
}
|
require 'rails_helper'
describe 'GET /locations/:location_id/contacts' do
context 'when location has contacts' do
before :all do
@loc = create(:location)
@first_contact = @loc.contacts.
create!(attributes_for(:contact_with_extra_whitespace))
end
before :each do
get api_location_contacts_url(@loc, subdomain: ENV['API_SUBDOMAIN'])
end
after(:all) do
Organization.find_each(&:destroy)
end
it 'returns a 200 status' do
expect(response).to have_http_status(200)
end
it 'includes the id attribute in the serialization' do
expect(json.first['id']).to eq(@first_contact.id)
end
it 'includes the name attribute in the serialization' do
expect(json.first['name']).to eq(@first_contact.name)
end
it 'includes the title attribute in the serialization' do
expect(json.first['title']).to eq(@first_contact.title)
end
it 'includes the email attribute in the serialization' do
expect(json.first['email']).to eq(@first_contact.email)
end
it 'includes the fax attribute in the serialization' do
expect(json.first['fax']).to eq(@first_contact.fax)
end
it 'includes the phone attribute in the serialization' do
expect(json.first['phone']).to eq(@first_contact.phone)
end
it 'includes the extension attribute in the serialization' do
expect(json.first['extension']).to eq(@first_contact.extension)
end
end
context "when location doesn't have contacts" do
before :all do
@loc = create(:location)
end
before :each do
get api_location_contacts_url(@loc, subdomain: ENV['API_SUBDOMAIN'])
end
after(:all) do
Organization.find_each(&:destroy)
end
it 'returns an empty array' do
expect(json).to eq([])
end
it 'returns a 200 status' do
expect(response).to have_http_status(200)
end
end
end
|
<?php
declare(strict_types=1);
/*
* This file is part of the Runroom package.
*
* (c) Runroom <runroom@runroom.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Runroom\UserBundle\Repository;
use Doctrine\ORM\EntityManagerInterface;
use Doctrine\ORM\EntityRepository;
use Runroom\UserBundle\Model\UserInterface;
final class UserRepository implements UserRepositoryInterface
{
private EntityManagerInterface $entityManager;
/** @phpstan-var class-string<UserInterface> */
private string $class;
/** @phpstan-param class-string<UserInterface> $class */
public function __construct(EntityManagerInterface $entityManager, string $class)
{
$this->entityManager = $entityManager;
$this->class = $class;
}
public function loadUserByIdentifier(string $identifier): ?UserInterface
{
return $this->getRepository()->findOneBy(['email' => $identifier]);
}
public function create(): UserInterface
{
return new $this->class();
}
public function save(UserInterface $user): void
{
$this->entityManager->persist($user);
$this->entityManager->flush();
}
/** @phpstan-return EntityRepository<UserInterface> */
private function getRepository(): EntityRepository
{
return $this->entityManager->getRepository($this->class);
}
}
|
json.id entry.id
json.feed format_text(@titles[entry.feed_id] || entry.feed.title)
json.title format_text(entry.title)
json.author format_text(entry.author)
json.published entry.published.iso8601
json.content text_format(entry.content)
|
package api
import (
"path"
"time"
)
// Experiment describes an experiment and its tasks.
type Experiment struct {
// Identity
ID string `json:"id"`
Name string `json:"name,omitempty"`
// Ownership
Owner Identity `json:"owner"`
Author Identity `json:"author"`
User Identity `json:"user"` // TODO: Deprecated.
Description string `json:"description,omitempty"`
Nodes []ExperimentNode `json:"nodes"`
Created time.Time `json:"created"`
}
// DisplayID returns the most human-friendly name available for an experiment
// while guaranteeing that it's unique and non-empty.
func (e *Experiment) DisplayID() string {
if e.Name != "" {
return path.Join(e.User.Name, e.Name)
}
return e.ID
}
// ExperimentSpec describes a set of tasks with optional dependencies.
// This set represents a (potentially disconnected) directed acyclic graph.
type ExperimentSpec struct {
// (optional) Organization on behalf of whom this resource is created. The
// user issuing the request must be a member of the organization. If omitted,
// the resource will be owned by the requestor.
Organization string `json:"org,omitempty"`
// (optional) Text description of the experiment.
Description string `json:"description,omitempty"`
// (required) Tasks to create. Tasks may be defined in any order, though all
// dependencies must be internally resolvable within the experiment.
Tasks []ExperimentTaskSpec `json:"tasks"`
// (optional) A token representing the user to which the object should be attributed.
// If omitted attribution will be given to the user issuing the request.
AuthorToken string `json:"author_token,omitempty"`
// (optional) Settings for the Comet.ml integration, if it should be used for this experiment.
Comet *ExperimentCometSpec `json:"comet,omitempty"`
}
// ExperimentNode describes a task along with its links within an experiment.
type ExperimentNode struct {
Name string `json:"name,omitempty"`
TaskID string `json:"task_id"`
ResultID string `json:"result_id"`
Status TaskStatus `json:"status"`
CometURL string `json:"cometUrl,omitempty"`
// Identifiers of tasks dependent on this node within the containing experiment.
ChildTasks []string `json:"child_task_ids"`
// Identifiers of task on which this node depends within the containing experiment.
ParentTasks []string `json:"parent_task_ids"`
}
// DisplayID returns the most human-friendly name available for an experiment
// node while guaranteeing that it's unique within the context of its experiment.
func (n *ExperimentNode) DisplayID() string {
if n.Name != "" {
return n.Name
}
return n.TaskID
}
// ExperimentTaskSpec describes a task spec with optional dependencies on other
// tasks within an experiment. Tasks refer to each other by the Name field.
type ExperimentTaskSpec struct {
// (optional) Name of the task node, which need only be defined if
// dependencies reference it.
Name string `json:"name,omitempty"`
// (required) Specification describing the task to run.
Spec TaskSpec `json:"spec"`
// (optional) Tasks on which this task depends. Mounts will be applied, in
// the order defined here, after existing mounts in the task spec.
DependsOn []TaskDependency `json:"depends_on,omitempty"`
}
// TaskDependency describes a single "edge" in a task dependency graph.
type TaskDependency struct {
// (required) Name of the task on which the referencing task depends.
ParentName string `json:"parent_name"`
// (optional) Path in the child task to which parent results will be mounted.
// If absent, this is treated as an order-only dependency.
ContainerPath string `json:"container_path,omitempty"`
}
type ExperimentCometSpec struct {
// (required) Whether or not to enable the integration for this experiment.
Enable bool `json:"enable"`
// (optional) The name of the experiment (shown in the Comet.ml interface)
ExperimentName string `json:"experiment,omitempty"`
// (optional) The name of the Comet.ml project for this experiment.
ProjectName string `json:"project,omitempty"`
// (optional) The name of the Comet.ml workspace for this experiment.
WorkspaceName string `json:"workspace,omitempty"`
}
// ExperimentPatchSpec describes a patch to apply to an experiment's editable
// fields. Only one field may be set in a single request.
type ExperimentPatchSpec struct {
// (optional) Unqualified name to assign to the experiment. It is considered
// a collision error if another experiment has the same creator and name.
Name *string `json:"name,omitempty"`
// (optional) Description to assign to the experiment or empty string to
// delete an existing description.
Description *string `json:"description,omitempty"`
}
|
#!/bin/bash
# ftrc.sh
# Simple wrapper to use kernel ftrace facility.
trap 'echo 0 > ${PFX}/tracing_on ; popd > /dev/null' INT QUIT
name=$(basename $0)
PFX=/sys/kernel/debug/tracing
TRACE_INTERVAL=5
if [ `id -u` -ne 0 ]; then
echo "$name: need to be root."
exit 1
fi
if [ $# -ne 1 ]; then
echo "Usage: $name ftrace-interval-in-sec"
exit 1
fi
TRACE_INTERVAL=$1
pushd . >/dev/null
cd ${PFX}
echo "Select tracer from the list:"
cat ${PFX}/available_tracers
read tracer
echo "tracer = $tracer"
#TODO- validity check
echo "${tracer}" > ${PFX}/current_tracer
echo -n "[current_tracer] Current Tracer is: "
cat ${PFX}/current_tracer
echo "[trace_options] Current Trace Options are: "
cat ${PFX}/trace_options
echo
if [ ${tracer} == "function_graph" ]; then
echo "[set_graph_function] Current function(s) traced are: "
cat /sys/kernel/debug/tracing/set_graph_function
echo "Type in your own functions (space-separated); [Enter] keeps default: "
read graph_funcs
if [ -n "${graph_funcs}" ]; then
for func in ${graph_funcs}
do
echo "function: $func"
echo "$func" >> /sys/kernel/debug/tracing/set_graph_function
done
echo
echo "New graph-traced functions are:"
cat /sys/kernel/debug/tracing/set_graph_function
fi
fi
echo -n "Confirm Trace options above and START trace? [Y/n]: "
read reply
if [[ $reply == "n" ]] || [[ $reply == "N" ]]; then
echo "$name: aborting now..."
exit 1
fi
echo
echo "Will now ftrace for $TRACE_INTERVAL seconds..."
echo "To manually Stop, ^C"
echo
echo "Starting trace now..."
echo 1 > ${PFX}/tracing_on
sleep $TRACE_INTERVAL
echo 0 > ${PFX}/tracing_on
#tail -f ${PFX}/trace >> /tmp/ftrace_log.txt
cat ${PFX}/trace > /tmp/ftrace_log.txt
popd > /dev/null
|
# `Faker().breakingBad`
[Dictionary file](../src/main/resources/locales/en/breaking_bad.yml)
Available Functions:
```kotlin
Faker().breakingBad.character() // => Walter White
Faker().breakingBad.episode() // => Pilot
```
|
(function (window) {
// 'use strict';//目前驾驭不了严格模式有空尽量看一看
// Your starting point. Enjoy the ride!
//ajax原理
// var xhr = new XMLHttpRequest()
// xhr.open('get','http://localhost:8080/todos/getDataAll')
// xhr.send()
// xhr.onreadystatechange = function(){
// if(xhr.readyState === 4 && xhr.status === 200){
// console.log(xhr.responseText)
// }
// }
//模版引擎通过原理获取模版引擎内部的内容去覆盖,想要修饰的内容。也就是说模版引擎解决了内容的问题,往哪里放随你咯。
axios.defaults.baseURL = "http://localhost:8080/todos/"
getListDetail()
function getListDetail() {
axios({
url: 'getDataAll'
}).then(res => {
const { data, meta } = res.data
if (meta.code === 200) {
//渲染页面
// console.log(data)
//判断hash变化在渲染页面之前控制数据
//通过window.location可以获取有关网页url相关的信息
const url = window.location.hash
const active = data.filter(item => { return item.isFinish === '0' })
const completed = data.filter(item => { return item.isFinish === '1' })
switch (url) {
case "":
case '#/': renderPage(data, total = data, url)
break
case '#/active': renderPage(active, data, url)
break
case '#/completed': renderPage(completed, data, url)
break
}
}
})
}
//使用total判断footer的隐藏,但是#/的情况下也需要穿入参数判断,所以给其默认值为temporarily。
function renderPage(temporarily, total, url) {
const todos = document.querySelector('.todoapp');
const noFinish = total.filter(item => { return item.isFinish === '0' }).length
const isFinish = total.filter(item => { return item.isFinish === '1' }).length
const html = template('tpl-todos', { list: temporarily, total, noFinish, isFinish })
todos.innerHTML = html
//在页面加载之后运行添加一个todo函数是不行的因为它获取的是页面原有的元素,而并不是由模版引擎渲染之后的页面元素。
addTodo()
delTodo()
modify()
showEdit(temporarily)
delCompleted(temporarily)
selectAll(temporarily)
// changeAll(data)
footerChange(url)
}
//添加一个todo
function addTodo() {
//js中事件是元素的一个属性
// document.querySelector('.new-todo').onkeyup= function (e){
// console.log(e)
// }
const addTodo = document.querySelector('.new-todo')
addTodo.addEventListener('keyup', (e) => {
//此处因为使用箭头函数所以this指向window,我们用元素本身代替也没问题的
if (e.keyCode === 13 && addTodo.value.trim() !== '') {
let data = {
content: addTodo.value.trim(), // 必须携带,新增 todo 的内容
isFinish: 0 // 必须携带,新增 todo 的状态
}
axios.post('addTodo', data).then(res => {
const { meta } = res.data
if (meta.code === 201) {
getListDetail()
}
})
}
})
}
//footer角标切换
function footerChange(url) {
const arr = document.querySelectorAll('.filters li > a ')
if (arr.length === 0) return
arr.forEach(item => { item.classList.remove('selected') })
switch (url) {
case '':
case '#/': arr[0].classList.add('selected')
break
case '#/active': arr[1].classList.add('selected')
break
case "#/completed": arr[2].classList.add('selected')
}
}
//删除一个todo
function delTodo() {
const delTodo = document.querySelectorAll('.destroy')
// console.log(delTodo)
delTodo.forEach(function (item) {
item.addEventListener('click', function (e) {
const id = this.dataset.id
if (confirm('确定要删除?')) {
axios.delete(`delTodo?id=${id}`).then(res => {
const { meta } = res.data
if (meta.code === 202) {
getListDetail()
}
})
}
})
})
}
//修改单条状态
function modify() {
const toggle = document.querySelectorAll('.toggle')
toggle.forEach(item => {
item.addEventListener('change', function () {
const data = {
id: this.dataset.id,
isFinish: this.checked ? '1' : '0'
}
axios.put('changeStatu', data).then(res => {
const { meta } = res.data
if (meta.code === 203) {
getListDetail()
}
})
})
})
}
//编辑一条todo
function showEdit(data) {
const lis = document.querySelectorAll('.todo-list li')
//显示编辑栏
lis.forEach((item, index) => {
item.addEventListener('dblclick', function () {
lis.forEach(item => {
item.classList.remove('editing')
})
this.classList.add('editing')
edit(index, data[index], item)
})
})
}
//为了拿到删除的todo我们通过回调函数的方式传递参数
//编辑操作
function edit(index, data, todo) {
const edit = document.querySelectorAll('.edit')[index]
edit.focus()
edit.value = data.content
const id = todo.dataset.id
edit.addEventListener('keyup', function (e) {
if (e.keyCode === 13) {
const value = {
content: this.value,
id
}
//如果修改后为空,删除此todo
if (!this.value) {
axios.delete(`delTodo?id=${id}`).then(res => {
const { meta } = res.data
if (meta.code === 202) {
getListDetail()
}
})
return
}
//修改后数据相同取消编辑样式
if (this.value === data.content) {
todo.classList.remove('editing')
return
}
//和原数据不同时发起修改请求
axios.put('changeContent', value).then(res => {
const { meta } = res.data
if (meta.code === 203) {
getListDetail()
}
})
}
})
}
//删除所有已经完成的todo
function delCompleted(data) {
const completed = document.querySelector('.clear-completed')
if (!completed) return
const arr = []
data.filter(item => {
if (item.isFinish === '1') {
arr.push(item.id)
}
})
completed.addEventListener('click', function (e) {
axios.delete(`/delAll?id=${arr.toString()}`).then(res => {
const { meta } = res.data
if (meta.code === 202) {
getListDetail()
}
})
})
}
//全选按钮
function selectAll(data) {
const toggle_all = document.querySelector('.toggle-all')
toggle_all.addEventListener('click', function (e) {
// console.log(this.checked)
const noFinish = data.filter(item => { return item.isFinish === "0" }).length
const isFinish = data.filter(item => { return item.isFinish === "1" }).length
if (isFinish === data.length) {
getSelAll(false)
return
}
if (noFinish <= data.length) {
getSelAll(true)
return
}
})
}
//请求函数
function getSelAll(bool) {
axios.get(`changeStatusAll?isFinish=${bool}`).then(res => {
const { meta } = res.data
if (meta.code === 203) {
getListDetail()
}
})
}
//通过change事件改变全选按钮
// function changeAll(data){
// const toggle_all = document.querySelector('#toggle-all')
// toggle_all.addEventListener('change',function(){
// // const bool = this.checked
// console.log(this.checked)
// axios.get(`changeStatusAll?isFinish=false`).then(res => {
// const { meta } = res.data
// if (meta.code === 203) {
// getListDetail()
// }
// })
// })
// }
//监听一个hashchange改变事件 给window添加一个hashchange事件
window.addEventListener('hashchange', (e) => {
getListDetail()
})
})(window);
|
package com.github.antonpopoff.colorwheel.extensions
import android.os.Build
import android.os.Parcel
internal fun Parcel.writeBooleanCompat(value: Boolean) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
this.writeBoolean(value)
} else {
this.writeInt(if (value) 1 else 0)
}
}
internal fun Parcel.readBooleanCompat(): Boolean {
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
this.readBoolean()
} else {
this.readInt() == 1
}
}
|
/*!
* CanJS - 2.3.27
* http://canjs.com/
* Copyright (c) 2016 Bitovi
* Thu, 15 Sep 2016 21:14:18 GMT
* Licensed MIT
*/
/*can@2.3.27#construct/super/super*/
steal('can/util', 'can/construct', function (can, Construct) {
var isFunction = can.isFunction, fnTest = /xyz/.test(function () {
return this.xyz;
}) ? /\b_super\b/ : /.*/, getset = [
'get',
'set'
], getSuper = function (base, name, fn) {
return function () {
var tmp = this._super, ret;
this._super = base[name];
ret = fn.apply(this, arguments);
this._super = tmp;
return ret;
};
};
can.Construct._defineProperty = function (addTo, base, name, descriptor) {
var _super = Object.getOwnPropertyDescriptor(base, name);
if (_super) {
can.each(getset, function (method) {
if (isFunction(_super[method]) && isFunction(descriptor[method])) {
descriptor[method] = getSuper(_super, method, descriptor[method]);
} else if (!isFunction(descriptor[method])) {
descriptor[method] = _super[method];
}
});
}
Object.defineProperty(addTo, name, descriptor);
};
can.Construct._overwrite = function (addTo, base, name, val) {
addTo[name] = isFunction(val) && isFunction(base[name]) && fnTest.test(val) ? getSuper(base, name, val) : val;
};
return can;
});
|
from django.conf import settings
from django.contrib.auth.mixins import PermissionRequiredMixin
from django.shortcuts import get_object_or_404
from django.views.generic import DetailView
from django_filters.views import FilterView
from django_tables2.views import SingleTableView
from sidekick.filters import (
LogicalSystemFilterSet, RoutingTypeFilterSet,
NetworkServiceTypeFilterSet, NetworkServiceFilterSet,
NetworkServiceGroupFilterSet,
)
from sidekick.tables import (
IPPrefixTable,
LogicalSystemTable, RoutingTypeTable,
NetworkServiceTypeTable, NetworkServiceTable,
NetworkServiceGroupTable,
)
from sidekick.models import (
LogicalSystem, RoutingType,
NetworkServiceType,
NetworkService,
NetworkServiceGroup,
)
from sidekick.utils import (
get_all_ip_prefixes,
get_graphite_service_graph,
)
# IP Prefix Index
class IPPrefixIndexView(PermissionRequiredMixin, SingleTableView):
permission_required = 'sidekick.view_ipprefix'
model = NetworkService
context_object_name = 'ns'
template_name = 'sidekick/networkservice/ipprefix_index.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
prefixes = []
for member_id, data in get_all_ip_prefixes().items():
for prefix in data['prefixes']:
prefixes.append({
'prefix': prefix,
'member': data['member'],
})
table = IPPrefixTable(prefixes)
context['table'] = table
return context
# Logical System Index
class LogicalSystemIndexView(PermissionRequiredMixin, FilterView, SingleTableView):
permission_required = 'sidekick.view_logicalsystem'
model = LogicalSystem
table_class = LogicalSystemTable
filterset_class = LogicalSystemFilterSet
template_name = 'sidekick/networkservice/logicalsystem_index.html'
# Logical System Details
class LogicalSystemDetailView(PermissionRequiredMixin, DetailView):
permission_required = 'sidekick.view_logicalsystem'
model = LogicalSystem
template_name = 'sidekick/networkservice/logicalsystem.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
logical_system = get_object_or_404(LogicalSystem, slug=self.kwargs['slug'])
context['logical_system'] = logical_system
table = NetworkServiceTable(NetworkService.objects.filter(
network_service_devices__network_service_l3__logical_system=logical_system.id))
context['table'] = table
return context
# Routing Type Index
class RoutingTypeIndexView(PermissionRequiredMixin, FilterView, SingleTableView):
permission_required = 'sidekick.view_routingtype'
model = RoutingType
table_class = RoutingTypeTable
filterset_class = RoutingTypeFilterSet
template_name = 'sidekick/networkservice/routingtype_index.html'
# Routing Type Details
class RoutingTypeDetailView(PermissionRequiredMixin, DetailView):
permission_required = 'sidekick.view_routingtype'
model = RoutingType
template_name = 'sidekick/networkservice/routingtype.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
routing_type = get_object_or_404(RoutingType, slug=self.kwargs['slug'])
context['routing_type'] = routing_type
table = NetworkServiceTable(NetworkService.objects.filter(
network_service_devices__network_service_l3__routing_type=routing_type.id))
context['table'] = table
return context
# Network Service Type Index
class NetworkServiceTypeIndexView(PermissionRequiredMixin, FilterView, SingleTableView):
permission_required = 'sidekick.view_networkservicetype'
model = NetworkServiceType
table_class = NetworkServiceTypeTable
filterset_class = NetworkServiceTypeFilterSet
template_name = 'sidekick/networkservice/networkservicetype_index.html'
# Network Service Type Details
class NetworkServiceTypeDetailView(PermissionRequiredMixin, DetailView):
permission_required = 'sidekick.view_networkservicetype'
model = NetworkServiceType
template_name = 'sidekick/networkservice/networkservicetype.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
nst = get_object_or_404(NetworkServiceType, slug=self.kwargs['slug'])
context['nst'] = nst
table = NetworkServiceTable(NetworkService.objects.filter(
network_service_type=nst.id))
context['table'] = table
return context
# Network Service Index
class NetworkServiceIndexView(PermissionRequiredMixin, FilterView, SingleTableView):
permission_required = 'sidekick.view_networkservice'
model = NetworkService
table_class = NetworkServiceTable
filterset_class = NetworkServiceFilterSet
template_name = 'sidekick/networkservice/networkservice_index.html'
# Network Service Details
class NetworkServiceDetailView(PermissionRequiredMixin, DetailView):
permission_required = 'sidekick.view_networkservice'
model = NetworkService
context_object_name = 'ns'
template_name = 'sidekick/networkservice/networkservice.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
ns = NetworkService.objects.get(pk=self.kwargs['pk'])
graphite_render_host = settings.PLUGINS_CONFIG['sidekick'].get('graphite_render_host', None)
graph_data = get_graphite_service_graph(ns, graphite_render_host)
context['graph_data'] = graph_data
return context
# Network Service Group Index
class NetworkServiceGroupIndexView(PermissionRequiredMixin, FilterView, SingleTableView):
permission_required = 'sidekick.view_networkservicegroup'
model = NetworkServiceGroup
table_class = NetworkServiceGroupTable
filterset_class = NetworkServiceGroupFilterSet
template_name = 'sidekick/networkservice/networkservicegroup_index.html'
# Network Service Group Details
class NetworkServiceGroupDetailView(PermissionRequiredMixin, DetailView):
permission_required = 'sidekick.view_networkservicegroup'
model = NetworkServiceGroup
context_object_name = 'nsg'
template_name = 'sidekick/networkservice/networkservicegroup.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
nsg = get_object_or_404(NetworkServiceGroup, pk=self.kwargs['pk'])
context['nsg'] = nsg
table = NetworkServiceTable(NetworkService.objects.filter(
pk__in=nsg.network_services.all()))
context['table'] = table
return context
|
# zergtel-android
Port of ZTVDC to android
Deprecated - practically no features implemented at the moment, and probably indefintely.
See [https://github.com/s-zeng/ZTVDC](https://github.com/s-zeng/ZTVDC) instead
|
import { Component, OnInit } from '@angular/core';
import { Product } from '../../../model/beans/product/product.model';
import { ProductService } from '../../../model/services/product/product.service';
@Component({
selector: 'bp-landing-page-jewelery-component',
templateUrl: './jewelery.component.html'
})
export class LandingPageJeweleryComponent implements OnInit {
public pretrad: string;
public urlRest: string;
public p1: Product;
public p2: Product;
constructor (
private productService: ProductService
) {
this.pretrad = 'MODULES.LANDING-PAGE.JEWELERY.';
this.urlRest = process.env.API_URL.slice(0, -1);
this.p1 = new Product();
this.p2 = new Product();
}
public ngOnInit(): void {
this.productService.getByReference('P1', Product).then(
(response) => {
this.p1 = response;
},
(error) => {
console.error(error);
}
);
this.productService.getByReference('P2', Product).then(
(response) => {
this.p2 = response;
},
(error) => {
console.error(error);
}
);
}
}
|
//=========================================================================
// Copyright (C) 2012 The Elastos Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//=========================================================================
#include "elastos/droid/server/pm/CLauncherAppsImpl.h"
#include "Elastos.Droid.Net.h"
#include "Elastos.Droid.Provider.h"
#include "elastos/droid/os/Binder.h"
#include "elastos/droid/os/UserHandle.h"
#include "elastos/droid/app/AppGlobals.h"
#include <elastos/utility/logging/Logger.h>
#include <elastos/core/AutoLock.h>
using Elastos::Core::AutoLock;
using Elastos::Droid::App::AppGlobals;
using Elastos::Droid::Content::CIntent;
using Elastos::Droid::Content::Pm::IPackageInfo;
using Elastos::Droid::Content::Pm::IIPackageManager;
using Elastos::Droid::Content::Pm::IApplicationInfo;
using Elastos::Droid::Content::Pm::IActivityInfo;
using Elastos::Droid::Content::Pm::IUserInfo;
using Elastos::Droid::Content::Pm::EIID_IILauncherApps;
using Elastos::Droid::Content::Pm::IPackageItemInfo;
using Elastos::Droid::Content::Pm::IComponentInfo;
using Elastos::Droid::Net::IUriHelper;
using Elastos::Droid::Net::CUriHelper;
using Elastos::Droid::Net::IUri;
using Elastos::Droid::Os::Binder;
using Elastos::Droid::Os::UserHandle;
using Elastos::Droid::Os::CUserHandle;
using Elastos::Droid::Os::EIID_IBinder;
using Elastos::Droid::Provider::ISettings;
using Elastos::Utility::Logging::Logger;
using Elastos::Utility::IArrayList;
using Elastos::Utility::CArrayList;
using Elastos::Utility::IIterator;
namespace Elastos {
namespace Droid {
namespace Server {
namespace Pm {
//==============================================================================
// CLauncherAppsImpl::MyPackageMonitor
//==============================================================================
Boolean CLauncherAppsImpl::MyPackageMonitor::IsEnabledProfileOf(
/* [in] */ IUserHandle* user,
/* [in] */ IUserHandle* listeningUser,
/* [in] */ const String& debugMsg)
{
Int32 id, lisId;
user->GetIdentifier(&id);
listeningUser->GetIdentifier(&lisId);
if (id == lisId) {
if (DEBUG) Logger::D(TAG, "Delivering msg to same user %s", debugMsg.string());
return TRUE;
}
Int64 ident = Binder::ClearCallingIdentity();
// try {
AutoPtr<IUserInfo> userInfo, listeningUserInfo;
if (FAILED(mHost->mUm->GetUserInfo(id, (IUserInfo**)&userInfo))) {
Binder::RestoreCallingIdentity(ident);
return FALSE;
}
if (FAILED(mHost->mUm->GetUserInfo(lisId, (IUserInfo**)&listeningUserInfo))) {
Binder::RestoreCallingIdentity(ident);
return FALSE;
}
Int32 groupId, lisGroupId;
Boolean isEnabled;
if (userInfo == NULL || listeningUserInfo == NULL
|| (userInfo->GetProfileGroupId(&groupId), groupId == IUserInfo::NO_PROFILE_GROUP_ID)
|| (listeningUserInfo->GetProfileGroupId(&lisGroupId), groupId != lisGroupId)
|| (userInfo->IsEnabled(&isEnabled), !isEnabled)) {
if (DEBUG) {
Logger::D(TAG, "Not delivering msg from %p to %p:%s", user, listeningUser, debugMsg.string());
}
Binder::RestoreCallingIdentity(ident);
return FALSE;
}
else {
if (DEBUG) {
Logger::D(TAG, "Delivering msg from %p to %p:%s", user, listeningUser, debugMsg.string());
}
Binder::RestoreCallingIdentity(ident);
return TRUE;
}
// } finally {
// Binder.restoreCallingIdentity(ident);
// }
}
ECode CLauncherAppsImpl::MyPackageMonitor::OnPackageAdded(
/* [in] */ const String& packageName,
/* [in] */ Int32 uid)
{
Int32 id;
GetChangingUserId(&id);
AutoPtr<IUserHandle> user;
CUserHandle::New(id, (IUserHandle**)&user);
Int32 n;
mHost->mListeners->BeginBroadcast(&n);
for (Int32 i = 0; i < n; i++) {
AutoPtr<IInterface> item;
mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item);
AutoPtr<IOnAppsChangedListener> listener = IOnAppsChangedListener::Probe(item);
AutoPtr<IInterface> cookie;
mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie);
AutoPtr<IUserHandle> listeningUser = IUserHandle::Probe(cookie);
if (!IsEnabledProfileOf(user, listeningUser, String("onPackageAdded"))) continue;
// try {
if (FAILED(listener->OnPackageAdded(user, packageName))) {
Logger::D(TAG, "Callback failed ");
}
// } catch (RemoteException re) {
// Slog.d(TAG, "Callback failed ", re);
// }
}
mHost->mListeners->FinishBroadcast();
return PackageMonitor::OnPackageAdded(packageName, uid);
}
ECode CLauncherAppsImpl::MyPackageMonitor::OnPackageRemoved(
/* [in] */ const String& packageName,
/* [in] */ Int32 uid)
{
Int32 id;
GetChangingUserId(&id);
AutoPtr<IUserHandle> user;
CUserHandle::New(id, (IUserHandle**)&user);
Int32 n;
mHost->mListeners->BeginBroadcast(&n);
for (Int32 i = 0; i < n; i++) {
AutoPtr<IInterface> item;
mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item);
AutoPtr<IOnAppsChangedListener> listener = IOnAppsChangedListener::Probe(item);
AutoPtr<IInterface> cookie;
mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie);
AutoPtr<IUserHandle> listeningUser = IUserHandle::Probe(cookie);
if (!IsEnabledProfileOf(user, listeningUser, String("onPackageRemoved"))) continue;
// try {
if (FAILED(listener->OnPackageRemoved(user, packageName))) {
Logger::D(TAG, "Callback failed ");
}
// } catch (RemoteException re) {
// Slog.d(TAG, "Callback failed ", re);
// }
}
mHost->mListeners->FinishBroadcast();
return PackageMonitor::OnPackageRemoved(packageName, uid);
}
ECode CLauncherAppsImpl::MyPackageMonitor::OnPackageModified(
/* [in] */ const String& packageName)
{
Int32 id;
GetChangingUserId(&id);
AutoPtr<IUserHandle> user;
CUserHandle::New(id, (IUserHandle**)&user);
Int32 n;
mHost->mListeners->BeginBroadcast(&n);
for (Int32 i = 0; i < n; i++) {
AutoPtr<IInterface> item;
mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item);
AutoPtr<IOnAppsChangedListener> listener = IOnAppsChangedListener::Probe(item);
AutoPtr<IInterface> cookie;
mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie);
AutoPtr<IUserHandle> listeningUser = IUserHandle::Probe(cookie);
if (!IsEnabledProfileOf(user, listeningUser, String("onPackageModified"))) continue;
// try {
if (FAILED(listener->OnPackageChanged(user, packageName))) {
Logger::D(TAG, "Callback failed ");
}
// } catch (RemoteException re) {
// Slog.d(TAG, "Callback failed ", re);
// }
}
mHost->mListeners->FinishBroadcast();
return PackageMonitor::OnPackageModified(packageName);
}
ECode CLauncherAppsImpl::MyPackageMonitor::OnPackagesAvailable(
/* [in] */ ArrayOf<String>* packages)
{
Int32 id;
GetChangingUserId(&id);
AutoPtr<IUserHandle> user;
CUserHandle::New(id, (IUserHandle**)&user);
Int32 n;
mHost->mListeners->BeginBroadcast(&n);
for (Int32 i = 0; i < n; i++) {
AutoPtr<IInterface> item;
mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item);
AutoPtr<IOnAppsChangedListener> listener = IOnAppsChangedListener::Probe(item);
AutoPtr<IInterface> cookie;
mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie);
AutoPtr<IUserHandle> listeningUser = IUserHandle::Probe(cookie);
if (!IsEnabledProfileOf(user, listeningUser, String("onPackagesAvailable"))) continue;
// try {
Boolean isReplacing;
IsReplacing(&isReplacing);
if (FAILED(listener->OnPackagesAvailable(user, packages, isReplacing))) {
Logger::D(TAG, "Callback failed ");
}
// } catch (RemoteException re) {
// Slog.d(TAG, "Callback failed ", re);
// }
}
mHost->mListeners->FinishBroadcast();
return PackageMonitor::OnPackagesAvailable(packages);
}
ECode CLauncherAppsImpl::MyPackageMonitor::OnPackagesUnavailable(
/* [in] */ ArrayOf<String>* packages)
{
Int32 id;
GetChangingUserId(&id);
AutoPtr<IUserHandle> user;
CUserHandle::New(id, (IUserHandle**)&user);
Int32 n;
mHost->mListeners->BeginBroadcast(&n);
for (Int32 i = 0; i < n; i++) {
AutoPtr<IInterface> item;
mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item);
AutoPtr<IOnAppsChangedListener> listener = IOnAppsChangedListener::Probe(item);
AutoPtr<IInterface> cookie;
mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie);
AutoPtr<IUserHandle> listeningUser = IUserHandle::Probe(cookie);
if (!IsEnabledProfileOf(user, listeningUser, String("onPackagesUnavailable"))) continue;
// try {
Boolean isReplacing;
IsReplacing(&isReplacing);
if (FAILED(listener->OnPackagesUnavailable(user, packages, isReplacing))) {
Logger::D(TAG, "Callback failed ");
}
// } catch (RemoteException re) {
// Slog.d(TAG, "Callback failed ", re);
// }
}
mHost->mListeners->FinishBroadcast();
return PackageMonitor::OnPackagesUnavailable(packages);
}
//==============================================================================
// CLauncherAppsImpl::PackageCallbackList
//==============================================================================
ECode CLauncherAppsImpl::PackageCallbackList::OnCallbackDied(
/* [in] */ IInterface* callback,
/* [in] */ IInterface* cookie)
{
mHost->CheckCallbackCount();
return NOERROR;
}
//==============================================================================
// CLauncherAppsImpl
//==============================================================================
const Boolean CLauncherAppsImpl::DEBUG;
const String CLauncherAppsImpl::TAG("CLauncherAppsImpl");
CLauncherAppsImpl::CLauncherAppsImpl()
{
mListeners = new PackageCallbackList(this);
mPackageMonitor = new MyPackageMonitor(this);
}
CAR_INTERFACE_IMPL_2(CLauncherAppsImpl, Object, IILauncherApps, IBinder)
CAR_OBJECT_IMPL(CLauncherAppsImpl)
ECode CLauncherAppsImpl::constructor(
/* [in] */ IContext* ctx)
{
mContext = ctx;
mContext->GetPackageManager((IPackageManager**)&mPm);
AutoPtr<IInterface> service;
mContext->GetSystemService(IContext::USER_SERVICE, (IInterface**)&service);
mUm = IUserManager::Probe(service);
return NOERROR;
}
ECode CLauncherAppsImpl::AddOnAppsChangedListener(
/* [in] */ IOnAppsChangedListener* listener)
{
{ AutoLock syncLock(mListenersLock);
if (DEBUG) {
Logger::D(TAG, "Adding listener from %p", Binder::GetCallingUserHandle().Get());
}
Int32 count;
if (mListeners->GetRegisteredCallbackCount(&count), count == 0) {
if (DEBUG) {
Logger::D(TAG, "Starting package monitoring");
}
StartWatchingPackageBroadcasts();
}
Boolean result;
FAIL_RETURN(mListeners->Unregister(listener, &result))
AutoPtr<IUserHandle> handle = Binder::GetCallingUserHandle();
FAIL_RETURN(mListeners->Register(listener, handle, &result))
}
return NOERROR;
}
ECode CLauncherAppsImpl::RemoveOnAppsChangedListener(
/* [in] */ IOnAppsChangedListener* listener)
{
{ AutoLock syncLock(mListenersLock);
if (DEBUG) {
Logger::D(TAG, "Removing listener from %p", Binder::GetCallingUserHandle().Get());
}
Boolean result;
FAIL_RETURN(mListeners->Unregister(listener, &result))
Int32 count;
if (mListeners->GetRegisteredCallbackCount(&count), count == 0) {
StopWatchingPackageBroadcasts();
}
}
return NOERROR;
}
void CLauncherAppsImpl::StartWatchingPackageBroadcasts()
{
mPackageMonitor->Register(mContext, NULL, UserHandle::ALL, TRUE);
}
void CLauncherAppsImpl::StopWatchingPackageBroadcasts()
{
if (DEBUG) {
Logger::D(TAG, "Stopped watching for packages");
}
mPackageMonitor->Unregister();
}
void CLauncherAppsImpl::CheckCallbackCount()
{
{ AutoLock syncLock(mListenersLock);
Int32 count;
mListeners->GetRegisteredCallbackCount(&count);
if (DEBUG) {
Logger::D(TAG, "Callback count = %d", count);
}
if (count == 0) {
StopWatchingPackageBroadcasts();
}
}
}
ECode CLauncherAppsImpl::EnsureInUserProfiles(
/* [in] */ IUserHandle* userToCheck,
/* [in] */ const String& message)
{
Int32 callingUserId = UserHandle::GetCallingUserId();
Int32 targetUserId;
userToCheck->GetIdentifier(&targetUserId);
if (targetUserId == callingUserId) return NOERROR;
Int64 ident = Binder::ClearCallingIdentity();
// try {
AutoPtr<IUserInfo> callingUserInfo;
mUm->GetUserInfo(callingUserId, (IUserInfo**)&callingUserInfo);
AutoPtr<IUserInfo> targetUserInfo;
mUm->GetUserInfo(targetUserId, (IUserInfo**)&targetUserInfo);
Int32 targetId, callingId;
if (targetUserInfo == NULL
|| (targetUserInfo->GetProfileGroupId(&targetId), targetId == IUserInfo::NO_PROFILE_GROUP_ID)
|| (callingUserInfo->GetProfileGroupId(&callingId), targetId != callingId)) {
Binder::RestoreCallingIdentity(ident);
return E_SECURITY_EXCEPTION;
}
// } finally {
// Binder.restoreCallingIdentity(ident);
// }
Binder::RestoreCallingIdentity(ident);
return NOERROR;
}
Boolean CLauncherAppsImpl::IsUserEnabled(
/* [in] */ IUserHandle* user)
{
Int64 ident = Binder::ClearCallingIdentity();
// try {
Int32 id;
user->GetIdentifier(&id);
AutoPtr<IUserInfo> targetUserInfo;
mUm->GetUserInfo(id, (IUserInfo**)&targetUserInfo);
Binder::RestoreCallingIdentity(ident);
Boolean isEnabled;
return targetUserInfo != NULL && (targetUserInfo->IsEnabled(&isEnabled), isEnabled);
// } finally {
// Binder.restoreCallingIdentity(ident);
// }
}
ECode CLauncherAppsImpl::GetLauncherActivities(
/* [in] */ const String& packageName,
/* [in] */ IUserHandle* user,
/* [out] */ IList** list)
{
VALIDATE_NOT_NULL(list)
*list = NULL;
String str = Object::ToString(user);
FAIL_RETURN(EnsureInUserProfiles(user,
String("Cannot retrieve activities for unrelated profile ") + str))
if (!IsUserEnabled(user)) {
return CArrayList::New(list);
}
AutoPtr<IIntent> mainIntent;
CIntent::New(IIntent::ACTION_MAIN, NULL, (IIntent**)&mainIntent);
mainIntent->AddCategory(IIntent::CATEGORY_LAUNCHER);
mainIntent->SetPackage(packageName);
Int64 ident = Binder::ClearCallingIdentity();
// try {
Int32 id;
user->GetIdentifier(&id);
ECode ec = mPm->QueryIntentActivitiesAsUser(mainIntent, 0 /* flags */, id, list);
Binder::RestoreCallingIdentity(ident);
return ec;
// } finally {
// Binder.restoreCallingIdentity(ident);
// }
}
ECode CLauncherAppsImpl::ResolveActivity(
/* [in] */ IIntent* intent,
/* [in] */ IUserHandle* user,
/* [out] */ IResolveInfo** info)
{
VALIDATE_NOT_NULL(info)
*info = NULL;
String str = Object::ToString(user);
FAIL_RETURN(EnsureInUserProfiles(user,
String("Cannot resolve activity for unrelated profile ") + str))
if (!IsUserEnabled(user)) {
return NOERROR;
}
Int64 ident = Binder::ClearCallingIdentity();
// try {
Int32 id;
user->GetIdentifier(&id);
ECode ec = mPm->ResolveActivityAsUser(intent, 0, id, info);
Binder::RestoreCallingIdentity(ident);
return ec;
// } finally {
// Binder.restoreCallingIdentity(ident);
// }
}
ECode CLauncherAppsImpl::IsPackageEnabled(
/* [in] */ const String& packageName,
/* [in] */ IUserHandle* user,
/* [out] */ Boolean* result)
{
VALIDATE_NOT_NULL(result)
*result = FALSE;
String str = Object::ToString(user);
FAIL_RETURN(EnsureInUserProfiles(user,
String("Cannot check package for unrelated profile ") + str))
if (!IsUserEnabled(user)) {
return NOERROR;
}
Int64 ident = Binder::ClearCallingIdentity();
// try {
AutoPtr<IIPackageManager> pm = AppGlobals::GetPackageManager();
Int32 id;
user->GetIdentifier(&id);
AutoPtr<IPackageInfo> info;
ECode ec = pm->GetPackageInfo(packageName, 0, id, (IPackageInfo**)&info);
if (FAILED(ec)) {
Binder::RestoreCallingIdentity(ident);
return ec;
}
if (info != NULL) {
AutoPtr<IApplicationInfo> ai;
info->GetApplicationInfo((IApplicationInfo**)&ai);
ai->GetEnabled(result);
}
Binder::RestoreCallingIdentity(ident);
return NOERROR;
// } finally {
// Binder.restoreCallingIdentity(ident);
// }
}
ECode CLauncherAppsImpl::IsActivityEnabled(
/* [in] */ IComponentName* component,
/* [in] */ IUserHandle* user,
/* [out] */ Boolean* result)
{
VALIDATE_NOT_NULL(result)
*result = FALSE;
String str = Object::ToString(user);
FAIL_RETURN(EnsureInUserProfiles(user,
String("Cannot check component for unrelated profile ") + str))
if (!IsUserEnabled(user)) {
return NOERROR;
}
Int64 ident = Binder::ClearCallingIdentity();
// try {
AutoPtr<IIPackageManager> pm = AppGlobals::GetPackageManager();
Int32 id;
user->GetIdentifier(&id);
AutoPtr<IActivityInfo> info;
ECode ec = pm->GetActivityInfo(component, 0, id, (IActivityInfo**)&info);
if (FAILED(ec)) {
Binder::RestoreCallingIdentity(ident);
return ec;
}
*result = info != NULL;
Binder::RestoreCallingIdentity(ident);
return NOERROR;
// } finally {
// Binder.restoreCallingIdentity(ident);
// }
}
ECode CLauncherAppsImpl::StartActivityAsUser(
/* [in] */ IComponentName* component,
/* [in] */ IRect* sourceBounds,
/* [in] */ IBundle* opts,
/* [in] */ IUserHandle* user)
{
String str = Object::ToString(user);
FAIL_RETURN(EnsureInUserProfiles(user,
String("Cannot start activity for unrelated profile ") + str))
if (!IsUserEnabled(user)) {
Logger::E(TAG, "Cannot start activity for disabled profile %s", str.string());
return E_ILLEGAL_STATE_EXCEPTION;
}
AutoPtr<IIntent> launchIntent;
CIntent::New(IIntent::ACTION_MAIN, (IIntent**)&launchIntent);
launchIntent->AddCategory(IIntent::CATEGORY_LAUNCHER);
launchIntent->SetSourceBounds(sourceBounds);
launchIntent->AddFlags(IIntent::FLAG_ACTIVITY_NEW_TASK);
String pkgName;
component->GetPackageName(&pkgName);
launchIntent->SetPackage(pkgName);
Int64 ident = Binder::ClearCallingIdentity();
// try {
AutoPtr<IIPackageManager> pm = AppGlobals::GetPackageManager();
Int32 id;
user->GetIdentifier(&id);
AutoPtr<IActivityInfo> info;
ECode ec = pm->GetActivityInfo(component, 0, id, (IActivityInfo**)&info);
if (FAILED(ec)) {
Binder::RestoreCallingIdentity(ident);
return ec;
}
Boolean exported;
if (IComponentInfo::Probe(info)->GetExported(&exported), !exported) {
Logger::E(TAG, "Cannot launch non-exported components %p", component);
Binder::RestoreCallingIdentity(ident);
return E_SECURITY_EXCEPTION;
}
// Check that the component actually has Intent.CATEGORY_LAUCNCHER
// as calling startActivityAsUser ignores the category and just
// resolves based on the component if present.
AutoPtr<IList> apps;
ec = mPm->QueryIntentActivitiesAsUser(launchIntent, 0 /* flags */, id, (IList**)&apps);
if (FAILED(ec)) {
Binder::RestoreCallingIdentity(ident);
return ec;
}
AutoPtr<IIterator> it;
apps->GetIterator((IIterator**)&it);
Boolean hasNext;
String aiPkgName, aiClsName, className;
component->GetClassName(&className);
while (it->HasNext(&hasNext), hasNext) {
AutoPtr<IInterface> value;
it->GetNext((IInterface**)&value);
AutoPtr<IResolveInfo> ri = IResolveInfo::Probe(value);
AutoPtr<IActivityInfo> activityInfo;
ri->GetActivityInfo((IActivityInfo**)&activityInfo);
IPackageItemInfo::Probe(activityInfo)->GetPackageName(&aiPkgName);
if (aiPkgName.Equals(pkgName)) {
IPackageItemInfo::Probe(activityInfo)->GetName(&aiClsName);
if (aiClsName.Equals(className)) {
// Found an activity with category launcher that matches
// this component so ok to launch.
launchIntent->SetComponent(component);
ec = mContext->StartActivityAsUser(launchIntent, opts, user);
Binder::RestoreCallingIdentity(ident);
if (FAILED(ec)) {
Logger::E(TAG, "Failed to launch activity [%s], ec=%08x.", TO_CSTR(component), ec);
}
return ec;
}
}
}
Logger::E(TAG, "Attempt to launch activity [%s] without category Intent.CATEGORY_LAUNCHER", TO_CSTR(component));
Binder::RestoreCallingIdentity(ident);
return E_SECURITY_EXCEPTION;
// } finally {
// Binder.restoreCallingIdentity(ident);
// }
}
ECode CLauncherAppsImpl::ShowAppDetailsAsUser(
/* [in] */ IComponentName* component,
/* [in] */ IRect* sourceBounds,
/* [in] */ IBundle* opts,
/* [in] */ IUserHandle* user)
{
String str = Object::ToString(user);
FAIL_RETURN(EnsureInUserProfiles(user,
String("Cannot show app details for unrelated profile ") + str))
if (!IsUserEnabled(user)) {
Logger::E(TAG, "Cannot show app details for disabled profile %s", str.string());
}
Int64 ident = Binder::ClearCallingIdentity();
// try {
String packageName;
component->GetPackageName(&packageName);
AutoPtr<IUriHelper> helper;
CUriHelper::AcquireSingleton((IUriHelper**)&helper);
AutoPtr<IUri> uri;
helper->FromParts(String("package"), packageName, String(NULL), (IUri**)&uri);
AutoPtr<IIntent> intent;
CIntent::New(ISettings::ACTION_APPLICATION_DETAILS_SETTINGS, uri, (IIntent**)&intent);
intent->SetFlags(IIntent::FLAG_ACTIVITY_NEW_TASK | IIntent::FLAG_ACTIVITY_CLEAR_TASK |
IIntent::FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS);
intent->SetSourceBounds(sourceBounds);
Binder::RestoreCallingIdentity(ident);
ECode ec = mContext->StartActivityAsUser(intent, opts, user);
return ec;
// } finally {
// Binder.restoreCallingIdentity(ident);
// }
}
ECode CLauncherAppsImpl::ToString(
/* [out] */ String* str)
{
VALIDATE_NOT_NULL(str)
return Object::ToString(str);
}
} // namespace Pm
} // namespace Server
} // namespace Droid
} // namespace Elastos
|
using FluentValidation;
using FluentValidation.TestHelper;
using Survi.Prevention.ApiClient.DataTransferObjects;
using Survi.Prevention.ServiceLayer.Import.Lane;
using Xunit;
namespace Survi.Prevention.ServiceLayer.Tests.Import.LaneImportation
{
public class LaneGenericCodeImportValidatorTests: AbstractValidator<LaneGenericCode>
{
private readonly LaneGenericCodeValidator validator;
public LaneGenericCodeImportValidatorTests()
{
validator = new LaneGenericCodeValidator();
}
[Fact]
public void IdIsValidWhenNotEmpty()
{
validator.ShouldNotHaveValidationErrorFor(genCode => genCode.Id, "IdGenericCode");
}
[Theory]
[InlineData("")]
[InlineData(" ")]
[InlineData(null)]
public void IdIsNotValidWhenEmpty(string id)
{
validator.ShouldHaveValidationErrorFor(genCode => genCode.Id, id);
}
[Fact]
public void CodeIsValidWhenNotEmpty()
{
validator.ShouldNotHaveValidationErrorFor(genCode => genCode.Code, "1");
}
[Theory]
[InlineData("")]
[InlineData(" ")]
[InlineData(null)]
[InlineData("CodeTooLong")]
public void CodeIsInvalidWhenNullEmptyOrTooLong(string code)
{
validator.ShouldHaveValidationErrorFor(genCode => genCode.Code, code);
}
[Fact]
public void DescriptionIsValidWhenNotEmpty()
{
validator.ShouldNotHaveValidationErrorFor(genCode => genCode.Description, "Generic code");
}
[Theory]
[InlineData(null)]
[InlineData("TooLongDescriptionToValidate")]
public void DescriptionIsInvalidWhenNullEmptyOrTooLong(string description)
{
validator.ShouldHaveValidationErrorFor(genCode => genCode.Description, description);
}
}
}
|
# AWS Upload & Transcribe Local Files
###### Uploads local audio files to Amazon AWS bucket and starts the transcription job
### _Future Features_
```
1) Save file locally after transcription is completed
2) Format and save the file as a .docx format
3) Identify and split multiple speakers and format in the response
4) Accept audio and video files
```
#### Setup Environment
###### Create a .env file in the src directory and add the following keys
###### The language to transcribe the audio in, by default set to english
LANG=en-US
###### The AWS access key id
AWS_ACCESS_KEY_ID
###### The AWS secret access token
AWS_SECRET_ACCESS_KEY=
###### The storage bucket name
AWS_STORAGE_BUCKET=
###### The region name that the bucket is located in
AWS_STORAGE_REGION=
### Setup
Clone the repository and install the dependencies.
```
Step 1 - git clone https://github.com/BradleySeymourSAE/transcribe-audio-file.git
Step 2 - Get AWS authentication credentials and create a .env file with the above keys
Step 3 - npm install
Step 4 - npm start
```
<br></br>
#### Version
__node@12.18.3__ <br>
__npm@6.14.6__
</br>
## License
MIT
|
namespace WebCore.API.Models
{
public class Note
{
public string Key {get;set;}
public string Subject {get;set;}
public string Body {get;set;}
}
}
|
package com.entimer.coronatracker.view.splash
import android.content.Context
import android.content.Intent
import android.net.ConnectivityManager
import android.os.Bundle
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import com.entimer.coronatracker.R
import com.entimer.coronatracker.view.main.MainActivity
class SplashActivity: AppCompatActivity(), SplashContract.View {
private lateinit var presenter: SplashPresenter
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
if(!checkNetwork()) {
Toast.makeText(applicationContext, getString(R.string.splashNetworkFailed), Toast.LENGTH_LONG).show()
finishAffinity()
}
else {
presenter = SplashPresenter(this)
presenter.initCountryList(applicationContext)
}
}
private fun checkNetwork(): Boolean {
val manager = getSystemService(Context.CONNECTIVITY_SERVICE) as ConnectivityManager
val networdInfo = manager.activeNetworkInfo
if(networdInfo != null) {
val type = networdInfo.type
if(type == ConnectivityManager.TYPE_MOBILE || type == ConnectivityManager.TYPE_WIFI)
return true
}
return false
}
override fun onInitFinished() {
val intent = Intent(applicationContext, MainActivity::class.java)
startActivity(intent)
finish()
}
override fun onInitFailed() {
Toast.makeText(applicationContext, getString(R.string.splashInitFailed), Toast.LENGTH_LONG).show()
finishAffinity()
}
}
|
package Agua::Ops::Sge;
use Moose::Role;
use Method::Signatures::Simple;
#### SUN GRID ENGINE METHODS
method stopSgeProcess ($port) {
$self->logDebug("Ops::stopSgeProcess(port)");
$self->logDebug("port", $port);
#### INPUT FORMAT: netstat -ntulp | grep sge_*
#### tcp 0 0 0.0.0.0:36472 0.0.0.0:* LISTEN 9855/sge_exec
my $netstat = qq{netstat -ntulp | grep sge | grep $port};
$self->logDebug("netstat", $netstat);
my $output = $self->runCommand($netstat);
my ($pid) = $output =~ /^\s*\S+\s+\S+\s+\S+\s+[^:]+:\d+\s+\S+\s+\S+\s+(\d+)\/\S+\s*/;
$self->logDebug("pid", $pid) if defined $pid;
$self->logDebug("pid NOT DEFINED. No running SGE port") if not defined $pid;
return if not defined $pid;
$self->killProcess($pid);
}
method killProcess ($pid) {
$self->logError("pid is empty") and exit if $pid eq '';
my $command = "kill -9 $pid";
$self->logDebug("command", $command);
$self->runCommand($command);
}
method qmasterRunning ($port) {
#### VERIFY THAT THE SGE MASTER DAEMON IS LISTENING AT CORRECT PORT
$self->logDebug("port", $port);
return $self->sgeProcessListening($port, "sge_qmaster");
}
method execdRunning ($port) {
#### VERIFY THAT SGE EXEC DAEMON IS LISTENING AT CORRECT PORT
$self->logDebug("port", $port);
return $self->sgeProcessListening($port, "sge_execd");
}
method sgeProcessListening ($port, $pattern) {
#### LISTENER VERIFIER. LATER: REDO WITH REGEX
$self->logDebug("port", $port);
$self->logDebug("pattern", $pattern) if defined $pattern;
$self->logError("Neither port nor pattern are defined") and exit if not defined $port and not defined $pattern;
my $command = "netstat -ntulp ";
$command .= "| grep $port " if defined $port;
$command .= "| grep $pattern " if defined $pattern;
#### EXPECTED OUTPUT FORMAT:
####tcp 0 0 0.0.0.0:36361 0.0.0.0:* LISTEN 5920/sge_qmaster
####tcp 0 0 0.0.0.0:36362 0.0.0.0:* LISTEN 4780/sge_execd
my ($result) = $self->runCommand($command);
$result =~ s/\s+$//;
$self->logDebug("result", $result);
return $result if defined $result and $result;
return 0;
}
1;
|
#!/usr/bin/env zsh
bindkey -e
# Black magic to set terminal modes properly
# See: https://github.com/robbyrussell/oh-my-zsh/blob/3705d47bb3f3229234cba992320eadc97a221caf/lib/key-bindings.zsh#L5
if (( ${+terminfo[smkx]} )) && (( ${+terminfo[rmkx]} )); then
function zle-line-init() {
echoti smkx
}
function zle-line-finish() {
echoti rmkx
}
zle -N zle-line-init
zle -N zle-line-finish
fi
autoload -U up-line-or-beginning-search
zle -N up-line-or-beginning-search
bindkey "${terminfo[kcuu1]}" up-line-or-beginning-search
autoload -U down-line-or-beginning-search
zle -N down-line-or-beginning-search
bindkey "${terminfo[kcud1]}" down-line-or-beginning-search
bindkey '^?' backward-delete-char
if [[ "${terminfo[kdch1]}" != "" ]]; then
bindkey "${terminfo[kdch1]}" delete-char
else
bindkey "^[[3~" delete-char
bindkey "^[3;5~" delete-char
bindkey "\e[3~" delete-char
fi
|
class SurveyTaker < ActiveRecord::Base
def self.search(search)
puts search.class
where(number: search)
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.