text
stringlengths 27
775k
|
|---|
module PipelineAggregator
class Base
@config_data = nil
def data_directory
@config_data["datadir"]
end
def api_directory
@config_data["apidir"]
end
def jobs_directory
File.expand_path("#{data_directory}/jobs")
end
def builds_directory
File.expand_path("#{data_directory}/builds")
end
def graphs_directory
File.expand_path("#{data_directory}/graphs")
end
def log(message)
puts message
end
end
end
|
include("poweriteration.jl")
# This code generates a dictionnary where index -> page_name
function get_index_to_page(filename)
io = open(filename)
lines = readlines(io)
dictionnary = Dict()
println("Construction index to page dictionnary")
for line in ProgressBar(lines)
if !startswith(line, "#") && line != ""
index = ""
page = ""
is_index = true
for element in line
if element == '\t'
is_index = false
else
if is_index
index = string(index, element)
else
page = string(page, element)
end
end
end
dictionnary[index] = page
end
end
return dictionnary
end
|
# Code of Conduct
This project is governed by [e-Rum2020 Code of Conduct](https://2020.erum.io/about/code-of-conduct).
By participating, you are expected to uphold this code.
This code of conduct is adapted from the ESO workshop & conference code of conduct, which was derived from original Creative Commons documents by PyCon and Geek Feminism.
It is released under a CC-0 license for reuse.
|
#!/usr/bin/env python3.5
from raspi_io import I2C
import raspi_io.utility as utility
if __name__ == '__main__':
i2c = I2C(utility.scan_server()[0], '/dev/i2c-1', 0x56)
# Write
print("Write {} bytes".format(i2c.write(0x0, list(range(256)))))
# Read
data = i2c.read(0x0, 256)
print("Read {} bytes".format(len(data)))
# Should be 0 and 255
i2c.print_binary(data, 16)
print(data[0], data[255])
|
package pl.umk.mat.fastSDA.image;
public enum BitScale {
GRAY_8,
GRAY_16
}
|
class Fate
module Output
class Handlers
def initialize(service, handlers)
@service = service
@handlers = handlers
end
def [](name)
if handler = @handlers[name]
handler
elsif @handlers["default"]
@handlers[name] = @handlers["default"][name]
else
@handlers[name] = @service.logger[name]
end
end
end
class IOFilter
def initialize(master, name)
@master = master
@io = @master.io
@name = name
end
# duck typing for IO
def write(string)
num = @io.write(@master.format(@name, string))
@io.flush
num
end
def method_missing(method, *args, &block)
if @io.respond_to?(method)
@io.send(method, *args, &block)
else
super
end
end
end
class IOMux
attr_reader :io
attr_accessor :last_identifier
def initialize(options)
@last_identifier = nil
if file = options[:file]
@io = File.new(file, "a")
elsif io = options[:io]
@io = io
end
@handlers = {}
end
def [](name)
@handlers[name] ||= IOFilter.new(self, name)
end
def format(name, string)
if name == @last_identifier
string
else
@last_identifier = name
"==> #{name} <==\n#{string}"
end
end
end
end
end
|
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
class UsuarioModel extends CI_Model {
function __construct(){
parent::__construct();
}
public function salvar()
{
$this->nome = $_POST['nome'];
$this->email = $_POST['email'];
$this->senha = md5($_POST['senha']);
return $this->db->insert('usuarios', $this);
}
public function verificar_login()
{
$parametros = [
$this->input->post('usuario'),
md5($this->input->post('senha'))
];
$resultado = $this->db->query('SELECT * FROM usuarios WHERE email = ? AND senha = ?', $parametros);
if ($resultado->num_rows()==0) {
return false;
} else {
//Nessa parte eu fiz as SESSION
$dados = $resultado->row();
$this->session->set_userdata('id', $dados->id);
$this->session->set_userdata('nome', $dados->id);
$this->session->set_userdata('email', $dados->id);
return true;
}
}
}
?>
|
'use strict';
const circular = '<<circular reference>>',
maxDepthMsg = '<<max depth reached>>',
_ = require('lodash'),
fixCircular = function (obj, maxDepth) {
let references = new Set();
const visitor = (x, depth) => {
if (depth >= maxDepth) {
return maxDepthMsg;
} else if (typeof x !== 'object') {
return x;
} else if (references.has(x)) {
return circular;
}
references.add(obj);
const pairs = _.toPairs(x)
.map(pair => {
return [pair[0], visitor(pair[1], depth + 1)];
});
return _.fromPairs(pairs);
};
return visitor(obj, 0);
};
module.exports = function (obj, maxDepth) {
return fixCircular(obj, maxDepth);
};
|
## 100DaysCodeChallenge
The repository is dedicated to my 100 days of code starting from Saturday 26/06/2021
**Pacheko**
## Timeline
|**Day:pushpin:**|**Date :calendar:**|**Lessons/Tasks Done :alarm_clock:**| **Reference Links :link:**|
|------|-----------------|--------------------|---------------------|
|1 | 28/06/2021| Read on kotlin functions.Built simple rock paper scissor game | |
|2 | 30/06/2021| Finished on Kotlin functions and Began kotlin objects and classes| |
|3 | 04/07/2021| Completed Kotlin objects and classes| |
|4 | 05/07/2021| Began Inheritance - class design| |
|5 | 07/07/2021|Completed Classes and Objects - Inheritance| |
|6 | 10/08/2021|Built web blocker using python| |
|7 | 11/08/2021|Built web map using python| |
|
## Creative Commons CC0
This book is released under the terms of the [CC0 License][0], and
therefore dedicated to the Public Domain.
[0]: http://creativecommons.org/publicdomain/zero/1.0/
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using PropertyHook;
using UnityEngine;
public class DS2GXPointLight : DS2GXLightBase
{
public DS2GXPointLight(PHook hook, PHPointer b, int index)
{
Hook = hook;
BasePointer = b;
Index = index;
}
public override bool IsValid()
{
if (!Hook.Hooked)
{
return false;
}
if (BasePointer == null)
{
return false;
}
// See if vtable pointer is valid
if (BasePointer.ReadUInt64(0x0) != 0x1411DA3D0)
{
return false;
}
return true;
}
public override Vector3 Position
{
get
{
if (!IsValid())
{
return new Vector3(0.0f, 0.0f, 0.0f);
}
float x = BasePointer.ReadSingle(0x50);
float y = BasePointer.ReadSingle(0x54);
float z = BasePointer.ReadSingle(0x58);
return new Vector3(x, y, z);
}
set
{
if (!IsValid())
{
return;
}
BasePointer.WriteSingle(0x50, value.x);
BasePointer.WriteSingle(0x54, value.y);
BasePointer.WriteSingle(0x58, value.z);
}
}
public override float Radius
{
get
{
return BasePointer.ReadSingle(0x5C);
}
set
{
BasePointer.WriteSingle(0x5C, value);
}
}
public override float FlickerMin
{
get
{
return BasePointer.ReadSingle(0x74);
}
set
{
BasePointer.WriteSingle(0x74, value);
}
}
public override float FlickerMax
{
get
{
return BasePointer.ReadSingle(0x78);
}
set
{
BasePointer.WriteSingle(0x78, value);
}
}
public override float FlickerMult
{
get
{
return BasePointer.ReadSingle(0x7C);
}
set
{
BasePointer.WriteSingle(0x7C, value);
}
}
}
|
import { useRouter } from 'next/router';
import React, { ReactNode, useEffect, useState } from 'react';
import { useKeyboard } from '../../lib/useKeyboard';
import { ConfirmCondition, ConfirmScreen } from './ConfirmScreen';
type ConfirmContext = {
confirmScreen: {
render: boolean;
setRender: (visible: boolean) => void;
visible: boolean;
setVisible: (visible: boolean) => void;
confirmButtonText: string;
setConfirmButtonText: (confirmButtonText: string) => void;
condition: ConfirmCondition;
setCondition: (condition: ConfirmCondition) => void;
onConfirm: () => Promise<void>;
setOnConfirm: (onConfirm: () => Promise<void>) => void;
title: React.ReactNode | string;
setTitle: (message: React.ReactNode | string) => void;
message: React.ReactNode | string;
setMessage: (message: React.ReactNode | string) => void;
};
};
export const ConfirmContext = React.createContext<ConfirmContext>({
confirmScreen: {
render: false,
setRender: () => undefined,
visible: false,
setVisible: () => undefined,
confirmButtonText: undefined,
setConfirmButtonText: () => undefined,
onConfirm: () => undefined,
setOnConfirm: () => undefined,
condition: undefined,
setCondition: () => undefined,
title: undefined,
setTitle: () => undefined,
message: undefined,
setMessage: () => undefined,
},
});
interface ConfirmContextProviderProps {
children: ReactNode;
}
export const ConfirmContextProvider: React.FC<ConfirmContextProviderProps> = ({
children,
}: ConfirmContextProviderProps) => {
const [render, setRender] = useState<boolean>(false);
const [visible, setVisible] = useState<boolean>(false);
const [confirmButtonText, setConfirmButtonText] = useState<string>();
const [onConfirm, setOnConfirm] = useState<() => Promise<void>>();
const [title, setTitle] = useState<React.ReactNode | string>('');
const [message, setMessage] = useState<React.ReactNode | string>('');
const router = useRouter();
const [condition, setCondition] = useState<ConfirmCondition>();
useKeyboard(() => {
if (render && visible) {
setVisible(false);
setRender(false);
setOnConfirm(undefined);
}
}, ['Escape']);
useEffect(() => {
const handleRouteChange = () => {
setVisible(false);
setRender(false);
setOnConfirm(undefined);
};
router?.events.on('routeChangeStart', handleRouteChange);
return () => {
router?.events.off('routeChangeStart', handleRouteChange);
};
}, [router?.events]);
return (
<ConfirmContext.Provider
value={{
confirmScreen: {
render,
setRender,
visible,
setVisible,
confirmButtonText,
setConfirmButtonText,
onConfirm,
setOnConfirm,
title,
setTitle,
message,
setMessage,
condition,
setCondition,
},
}}
>
{children}
{render && (
<ConfirmScreen
visible={visible}
message={message}
onConfirm={async () => {
if (typeof onConfirm === 'function') {
await onConfirm();
}
setVisible(false);
setRender(false);
setOnConfirm(undefined);
}}
onCancel={() => {
setVisible(false);
setRender(false);
setOnConfirm(undefined);
}}
title={title}
confirmButtonText={confirmButtonText}
condition={condition}
/>
)}
</ConfirmContext.Provider>
);
};
|
require Rails.root.join('spec/support/probate_fees_switchover_helper.rb')
def probate_disabled
disable_address_lookup
travel_to probate_fees_release_date + 1.day
log "probate is disabled: #{ProbateFeesSwitch.disable_probate_fees?}"
end
def probate_enabled
disable_address_lookup
travel_to a_day_before_disable_probate_fees
print "probate is disabled: #{ProbateFeesSwitch.disable_probate_fees?}"
end
def checklist_page
@checklist_page ||= ChecklistPage.new
end
def form_name_page
@form_name_page ||= FormNamePage.new
end
def fee_page
@fee_page ||= FeePage.new
end
def marital_status_page
@marital_status_page ||= MaritalStatusPage.new
end
def savings_investment_page
@savings_investment_page ||= SavingsInvestmentPage.new
end
def savings_investment_extra_page
@savings_investment_extra_page ||= SavingsInvestmentExtraPage.new
end
def benefit_page
@benefit_page ||= BenefitPage.new
end
def dependent_page
@dependent_page ||= DependentPage.new
end
def income_kind_page
@income_kind_page ||= IncomeKindPage.new
end
def income_range_page
@income_range_page ||= IncomeRangePage.new
end
def income_amount_page
@income_amount_page ||= IncomeAmountPage.new
end
def probate_page
@probate_page ||= ProbatePage.new
end
def claim_page
@claim_page ||= ClaimPage.new
end
def national_insurance_page
@national_insurance_page ||= NationalInsurancePage.new
end
def national_insurance_presence_page
@national_insurance_presence_page ||= NationalInsurancePresencePage.new
end
def home_office_page
@home_office_page ||= HomeOfficePage.new
end
def dob_page
@dob_page ||= DobPage.new
end
def personal_details_page
@personal_details_page ||= PersonalDetailsPage.new
end
def address_page
@address_page ||= AddressPage.new
end
def contact_page
@contact_page ||= ContactPage.new
end
def summary_page
@summary_page ||= SummaryPage.new
end
def confirmation_page
@confirmation_page ||= ConfirmationPage.new
end
def confirmation_done_page
@confirmation_done_page ||= ConfirmationDonePage.new
end
def base_page
@base_page ||= BasePage.new
end
def saucelabs_page
@saucelabs_page ||= SaucelabsPage.new
end
def footer_page
@footer_page ||= FooterPage.new
end
def cookie_page
@cookie_page ||= CookiePage.new
end
def checklist_continue
base_page.content.checklist_continue_button.click
end
def continue
base_page.content.continue_button.click
end
def disable_address_lookup
stub_request(:post, "https://api.os.uk/oauth2/token/v1").
with(
body: { "grant_type" => "client_credentials" },
headers: {
'Accept' => '*/*',
'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
'Authorization' => 'Basic YXBpX2tleTphcGlfc2VjcmV0',
'Content-Type' => 'application/x-www-form-urlencoded',
'User-Agent' => 'Ruby'
}
).
to_return(status: 200, body: "", headers: {})
end
|
<?hh // strict
namespace Zynga\Framework\Service\V2\Test;
use Zynga\Framework\Service\V2\Response\Base;
class Response extends Base {}
|
### CollectionSchema
| Methods | Descriptions | 参数描述 | 返回值 |
| -------------------- | -------------------------------------- | -------------------- | -------------------- |
| CollectionSchema(fields, description="", **kwargs) | 构造一个CollectionSchema对象 | 参数fields是一个 list-like的对象,每个元素是FieldSchema对象<br />description 类型 string 自定义描述 | CollectionSchema对象或者Raise Exception |
| CollectionSchema.fields | 返回所有的列 | / | list,每个元素是一个 FieldSchema 对象 |
| CollectionSchema.description | 返回自定义描述 | / | string 自定义描述 |
| CollectionSchema.primary_field | 返回主键列的FieldSchema | / | None 或 FieldSchema 对象 |
| CollectionSchema.auto_id | 是否自动生成主键 | / | bool |
| | | | |
### FieldSchema
| Methods | Descriptions | 参数描述 | 返回值 |
| --------------------------------------------------- | ----------------------- | ------------------------------------------------------------ | ------------------------------------------ |
| FieldSchema(name, dtype, description="", **kwargs) | 构造一个FieldScheam对象 | name 参数类型是string<br />dtype参数类型是 名为 DataType 的 python enum<br />description 类型是 string,自定义描述 | FieldScheam对象或者Raise Exception |
| | | | |
| FieldSchema.name | 列名 | / | string |
| FieldSchema.dtype | 返回数据类型 | / | DataType |
| FieldSchema.description | 返回自定义描述 | / | string, 自定义描述 |
| FieldSchema.xxx | 其他属性 | / | None 或者确定的值<br />比如ndim, str_len等 |
#### DataType
| DataType Enum |
| ----------------------- |
| DataType.BOOL |
| DataType.INT8 |
| DataType.INT16 |
| DataType.INT32 |
| DataType.INT64 |
| DataType.FLOAT |
| DataType.DOUBLE |
| DataType.BINARY_VECTOR |
| DataType.FLOAT_VECTOR |
### 例子
```python
fields = [
FieldSchema("A", DataType.INT32, True),
FieldSchema("B", DataType.INT64),
FieldSchema("C", DataType.FLOAT),
FieldSchema("Vec", DataType.FLOAT_VECTOR)]
schema = Schema(fields, description = "This is a test collection.")
assert len(schema.fields()) == len(fields)
```
|
# ida-xtensa
This is a processor plugin for IDA 7.x, to support the Xtensa core found in
Espressif ESP8266 and ESP32.
It does not support other configurations of the Xtensa architecture, but that is probably
(hopefully) easy to implement.
This is a beta release, for your hacking pleasure.
## Usage
Copy the file to the `procs/` directory in your IDA install. Ta-daa!
## License
GPLv2!
|
<h1 align="center">
<img src="./doc/lilac.jpg" alt="Lilac-breated Roller, by David Clode" width="300px" /><br />
inline-c
</h1>
[](https://crates.io/crates/inline-c)
[](https://docs.rs/inline-c)
`inline-c` is a small crate that allows a user to write C (including
C++) code inside Rust. Both environments are strictly sandboxed: it is
non-obvious for a value to cross the boundary. The C code is
transformed into a string which is written in a temporary file. This
file is then compiled into an object file, that is finally
executed. It is possible to run assertions about the execution of the
C program.
The primary goal of `inline-c` is to ease the testing of a C API of a
Rust program (generated with
[`cbindgen`](https://github.com/eqrion/cbindgen/) for example). Note
that it's not tied to a Rust program exclusively, it's just its
initial reason to live.
## Install
Add the following lines to your `Cargo.toml` file:
```toml
[dev-dependencies]
inline-c = "0.1"
```
## Documentation
The `assert_c` and `assert_cxx` macros live in the `inline-c-macro`
crate, but are re-exported in this crate for the sake of simplicity.
Being able to write C code directly in Rust offers nice opportunities,
like having C examples inside the Rust documentation that are
executable and thus tested (with `cargo test --doc`). Let's dig into
some examples.
### Basic usage
The following example is super basic: C prints `Hello, World!` on the
standard output, and Rust asserts that.
```rust
use inline_c::assert_c;
fn test_stdout() {
(assert_c! {
#include <stdio.h>
int main() {
printf("Hello, World!");
return 0;
}
})
.success()
.stdout("Hello, World!");
}
```
Or with a C++ program:
```rust
use inline_c::assert_cxx;
fn test_cxx() {
(assert_cxx! {
#include <iostream>
using namespace std;
int main() {
cout << "Hello, World!";
return 0;
}
})
.success()
.stdout("Hello, World!");
}
```
The `assert_c` and `assert_cxx` macros return a `Result<Assert,
Box<dyn Error>>`. See `Assert` to learn more about the possible
assertions.
The following example tests the returned value:
```rust
use inline_c::assert_c;
fn test_result() {
(assert_c! {
int main() {
int x = 1;
int y = 2;
return x + y;
}
})
.failure()
.code(3);
}
```
### Environment variables
It is possible to define environment variables for the execution of
the given C program. The syntax is using the special `#inline_c_rs` C
directive with the following syntax:
```c
#inline_c_rs <variable_name>: "<variable_value>"
```
Please note the double quotes around the variable value.
```rust
use inline_c::assert_c;
fn test_environment_variable() {
(assert_c! {
#inline_c_rs FOO: "bar baz qux"
#include <stdio.h>
#include <stdlib.h>
int main() {
const char* foo = getenv("FOO");
if (NULL == foo) {
return 1;
}
printf("FOO is set to `%s`", foo);
return 0;
}
})
.success()
.stdout("FOO is set to `bar baz qux`");
}
```
#### Meta environment variables
Using the `#inline_c_rs` C directive can be repetitive if one needs to
define the same environment variable again and again. That's why meta
environment variables exist. They have the following syntax:
```sh
INLINE_C_RS_<variable_name>=<variable_value>
```
It is usually best to define them in [a `build.rs`
script](https://doc.rust-lang.org/cargo/reference/build-scripts.html)
for example. Let's see it in action with a tiny example:
```rust
use inline_c::assert_c;
use std::env::{set_var, remove_var};
fn test_meta_environment_variable() {
set_var("INLINE_C_RS_FOO", "bar baz qux");
(assert_c! {
#include <stdio.h>
#include <stdlib.h>
int main() {
const char* foo = getenv("FOO");
if (NULL == foo) {
return 1;
}
printf("FOO is set to `%s`", foo);
return 0;
}
})
.success()
.stdout("FOO is set to `bar baz qux`");
remove_var("INLINE_C_RS_FOO");
}
```
#### `CFLAGS`, `CPPFLAGS`, `CXXFLAGS` and `LDFLAGS`
Some classical `Makefile` variables like `CFLAGS`, `CPPFLAGS`,
`CXXFLAGS` and `LDFLAGS` are understood by `inline-c` and consequently
have a special treatment. Their values are added to the appropriate
compilers when the C code is compiled and linked into an object file.
Pro tip: Let's say we have a Rust crate named `foo`, and it exports a
C API. It is possible to define `CFLAGS` and `LDFLAGS` as follow to
correctly compile and link all the C codes to the Rust `libfoo` shared
object by writing this in a `build.rs` script (it is assumed that
`libfoo` lands in the `target/<profile>/` directory, and that `foo.h`
lands in the root directory):
```rust
use std::{env, ffi::OsStr};
fn main() {
let include_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let mut shared_object_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
shared_object_dir.push("target");
shared_object_dir.push(env::var("PROFILE").unwrap());
let shared_object_dir = shared_object_dir.as_path().to_string_lossy();
// The following options mean:
//
// * `-I`, add `include_dir` to include search path,
// * `-L`, add `shared_object_dir` to library search path,
// * `-D_DEBUG`, enable debug mode to enable `assert.h`.
println!(
"cargo:rustc-env=INLINE_C_RS_CFLAGS=-I{I} -L{L} -D_DEBUG",
I = include_dir,
L = shared_object_dir.clone(),
);
// Here, we pass the fullpath to the shared object with
// `LDFLAGS`.
println!(
"cargo:rustc-env=INLINE_C_RS_LDFLAGS={shared_object_dir}/{lib}",
shared_object_dir = shared_object_dir,
lib = if cfg!(target_os = "windows") {
"foo.dll".to_string()
} else if cfg!(target_os = "macos") {
"libfoo.dylib".to_string()
} else {
"libfoo.so".to_string()
}
);
}
```
_Et voilà !_ Now run `cargo build --release` (to generate the
shared objects) and then `cargo test --release` to see it in
action.
### Using `inline-c` inside Rust documentation
Since it is now possible to write C code inside Rust, it is
consequently possible to write C examples, that are:
1. Part of the Rust documentation with `cargo doc`, and
2. Tested with all the other Rust examples with `cargo test --doc`.
Yes. Testing C code with `cargo test --doc`. How _fun_ is that? No
trick needed. One can write:
```rust
/// Blah blah blah.
///
/// # Example
///
/// ```rust
/// # use inline_c::assert_c;
/// #
/// # fn main() {
/// # (assert_c! {
/// #include <stdio.h>
///
/// int main() {
/// printf("Hello, World!");
///
/// return 0;
/// }
/// # })
/// # .success()
/// # .stdout("Hello, World!");
/// # }
/// ```
pub extern "C" fn some_function() {}
```
which will compile down into something like this:
```c
int main() {
printf("Hello, World!");
return 0;
}
```
Notice that this example above is actually Rust code, with C code
inside. Only the C code is printed, due to the `#` hack of `rustdoc`,
but this example is a valid Rust example, and is fully tested!
There is one minor caveat though: the highlighting. The Rust set of
rules are applied, rather than the C ruleset. [See this issue on
`rustdoc` to follow the
fix](https://github.com/rust-lang/rust/issues/78917).
### C macros
C macros with the `#define` directive is supported only with Rust
nightly. One can write:
```rust,ignore
use inline_c::assert_c;
fn test_c_macro() {
(assert_c! {
#define sum(a, b) ((a) + (b))
int main() {
return !(sum(1, 2) == 3);
}
})
.success();
}
```
Note that multi-lines macros don't work! That's because the `\` symbol
is consumed by the Rust lexer. The best workaround is to define the
macro in another `.h` file, and to include it with the `#include`
directive.
## Who is using it?
* [Wasmer](https://github.com/wasmerio/wasmer), the leading
WebAssembly runtime,
* [Cargo C](https://github.com/lu-zero/cargo-c), to build and install
C-compatible libraries; it configures `inline-c` for you when using
`cargo ctest`!
* [Biscuit](https://github.com/CleverCloud/biscuit-rust), an
authorization token microservices architectures.
## License
`BSD-3-Clause`, see `LICENSE.md`.
|
class PeopleController < ApplicationController
def index
@people = Person.includes( :person_positions => :position ).all
end
end
|
<!-- Three -->
<section id="three" class="wrapper style2 special">
<header class="major">
<h2>Magna leo sapien gravida</h2>
<p>Gravida at leo elementum elit fusce accumsan dui libero, quis vehicula<br />
lectus ultricies eu. In convallis amet leo sapien iaculis efficitur.</p>
</header>
<ul class="actions">
<li><a href="#" class="button special icon fa-download">Download</a></li>
<li><a href="#" class="button">Learn More</a></li>
</ul>
</section>
|
<?php
namespace Database\Seeders;
use Illuminate\Database\Seeder;
use App\Models\User;
use App\Models\Schedule;
use Illuminate\Support\Facades\Hash;
class DatabaseSeeder extends Seeder
{
/**
* Seed the application's database.
*
* @return void
*/
public function run()
{
// \App\Models\User::factory(10)->create();
$user = new User;
$user->name = 'EPW Dev';
$user->email = 'dev@epwits.com';
$user->password = Hash::make('DipaksaRaihanASU');
$user->role = 'Dev';
$user->save();
$schedule = new Schedule;
$schedule->name = 'Daffa Kurnia Fatah';
$schedule->nrp = '02311940000068';
$schedule->date = 'Saturday, 18 September 2021';
$schedule->time = '09.00 WIB';
$schedule->breakout = '5';
$schedule->save();
}
}
|
#!/bin/bash
cwd=`pwd`
cd ../../..
echo "make pese-nlev60"
make -j4 pese-nlev60
cd $cwd
|
<?php
namespace Application\Model;
use Zend\Db\TableGateway\Feature\GlobalAdapterFeature;
class AuthModel extends AuthBasic
{
public $staffEmail;
public $password;
public $staffModel;
public $passwordModel;
public $branchModel;
public $shopModel;
public $vcModel;
public $vendorModel;
public $tmpFmtHeaderModel;
public $tmpInputDataModel;
public $inputSetListModel;
public function setStaffModel(StaffModel $staffModel)
{
$this->staffModel = $staffModel;
}
public function getStaffModel()
{
return $this->staffModel;
}
public function setPasswordModel(PasswordModel $passwordModel)
{
$this->passwordModel = $passwordModel;
}
public function getPasswordModel()
{
return $this->passwordModel;
}
public function setBranchModel(BranchModel $branchModel)
{
$this->branchModel = $branchModel;
}
public function getBranchModel()
{
return $this->branchModel;
}
public function setInputSetListModel(InputSetListModel $inputSetListModel)
{
$this->inputSetListModel = $inputSetListModel;
}
public function getInputSetListModel()
{
return $this->branchModel;
}
public function setTmpInputDataModel(TmpInputDataModel $tmpInputDataModel)
{
$this->tmpInputDataModel = $tmpInputDataModel;
}
public function getTmpInputDataModel()
{
return $this->tmpInputDataModel;
}
public function setTmpFmtHeaderModel(TmpFmtHeaderModel $tmpFmtHeaderModel)
{
$this->tmpFmtHeaderModel = $tmpFmtHeaderModel;
}
public function getTmpFmtHeaderModel()
{
return $this->tmpFmtHeaderModel;
}
public function setTmpInputHeaderModel(TmpInputHeaderModel $tmpInputHeaderModel)
{
$this->tmpInputHeaderModel = $tmpInputHeaderModel;
}
public function getTmpInputHeaderModel()
{
return $this->tmpInputHeaderModel;
}
public function setTmpFmtDataModel(TmpFmtDataModel $tmpFmtDataModel)
{
$this->tmpFmtDataModel = $tmpFmtDataModel;
}
public function getTmpFmtDataModel()
{
return $this->tmpFmtDataModel;
}
public function setVendorModel(VendorModel $vendorModel)
{
$this->vendorModel = $vendorModel;
}
public function getVendorModel()
{
return $this->vendorModel;
}
public function setShopModel(ShopModel $shopModel)
{
$this->shopModel = $shopModel;
}
public function getShopModel()
{
return $this->shopModel;
}
public function setVcModel(VcModel $vcModel)
{
$this->vcModel = $vcModel;
}
public function getVcModel()
{
return $this->vcModel;
}
public function exchangeArray($data)
{
$this->staffEmail = (!empty($data['staff_email'])) ? $data['staff_email'] : null;
$this->password = (!empty($data['password'])) ? $data['password'] : null;
}
public function login($data)
{
//TODO: repair this authentication after all
$row = null;
$this->exchangeArray($data);
//search staffId by staffEmail passed from front
$this->staffModel->staffEmail = $data['staff_email'];
$row = $this->staffModel->searchWithWhere();
if(!$row){
throw new \Exception('該当するEメールがみつかりませんでした。');
}
$this->staffModel->exchangeArrayFromTable($row);
//search salt by staffId
$this->passwordModel->staffId = $this->staffModel->id;
$row = $this->passwordModel->searchWithWhere();
if(!$row){
throw new \Exception('該当するEメールが登録されていません。');
}
$this->passwordModel->exchangeArrayFromTable($row);
$salt = $this->passwordModel->salt;
$realPassword = md5($this->password.$salt);
$this->passwordModel->password = $realPassword;
parent::login($this->passwordModel, GlobalAdapterFeature::getStaticAdapter());
}
public function logout()
{
$this->auth->getStorage()->clear();
$this->auth->clearIdentity();
}
public function getLoginUser(){
//TODO: repair this authentication after all
return array(
'staff_id' => '1',
'staff_cd' => 'STAFF001',
'staff_name' => 'TESTSTAFF',
'branch_name' => 'TESTBRANCH',
);
$data = array();
if($this->auth->hasIdentity()){
$this->passwordModel = $this->auth->getIdentity();
$arr = $this->staffModel->get($this->passwordModel->staff_id);
$this->staffModel->exchangeArrayFromTable($arr);
$data['staff_id'] = $this->passwordModel->staff_id;
$data['staff_cd'] = $this->staffModel->staffCd;
$data['staff_name'] = $this->staffModel->staffName;
$arr = $this->branchModel->get($this->staffModel->branchId);
$this->branchModel->exchangeArrayFromTable($arr);
$data['branch_name'] = $this->branchModel->branchName;
}
return $data;
}
public function isLogin()
{
//TODO: repair this authentication after all
// return true;
return $this->auth->hasIdentity();
}
}
|
/* analyze_immed_sess_p.sql
**
** Disable/enable immediate analyze of user tables from within a session.
**
** This is useful from within a PLpgSQL function or SQL script.
** If you are running batch jobs from outside the database, it
** is probably more useful to use analyze_immed_sess_p which
** enables/disables the property for the user.
**
** This is for use with YBDW 4.x where ANALYZE HLL is now executed
** for every instance of a backend INSERT/UPDATE/DELETE or bulk-load.
** Not just when there has been a change of x% in data.
**
** Usage:
** See COMMENT ON FUNCTION statement after CREATE PROCEDURE.
**
** Prerequisites:
** The CREATE PROCEDURE must be run as a superuser.
**
** (c) 2020 Yellowbrick Data Corporation.
** . This script is provided free of charge by Yellowbrick Data Corporation as a
** convenience to its customers.
** . This script is provided "AS-IS" with no warranty whatsoever.
** . The customer accepts all risk in connection with the use of this script, and
** Yellowbrick Data Corporation shall have no liability whatsoever.
**
** Revision History:
** . 2021.12.09 - ybCliUtils inclusion.
** . 2020.07.31 - Yellowbrick Technical Support
*/
CREATE OR REPLACE PROCEDURE analyze_immed_sess_p( _off_or_on VARCHAR )
RETURNS VOID
LANGUAGE 'plpgsql'
VOLATILE
SECURITY DEFINER
AS
$proc$
DECLARE
BEGIN
EXECUTE 'SET ybd_analyze_after_writes TO ' || _off_or_on ;
END;
$proc$
;
COMMENT ON FUNCTION analyze_immed_sess_p( VARCHAR ) IS
'Description:
SETs ybd_analyze_after_writes TO [OFF|ON]` for session as a superuser.
The property can only be set by a superuser. This procedure sets the property
only within the current session. If you need it to span sessions, set it at the
user level using analyze_immed_user_p().
Examples:
SELECT * FROM analyze_immed_sess_p( ''OFF'' );
CALL analyze_immed_sess_p( $$on$$ );
Arguments:
. _off_or_on - The literal text ''OFF'' or ''ON''.
Case insensitive.
Revision:
. 2021.12.09 - Yellowbrick Technical Support
'
;
|
SUBROUTINE BR3COL(IMETHD)
C
C ------------------------------------------------
C ROUTINE NO. ( 42) VERSION (A8.1) 02:JUL:86
C ------------------------------------------------
C
C THIS ROUTINE SETS UP THE METHOD OF COLOURING USED
C BY THE THREE DIMENSIONAL BARCHART ROUTINE.
C
C THE PARAMETER IS :
C
C <IMETHD> IS AN INTEGER REFERING TO THE METHOD OF
C COLOURING :
C 0 : NO COLOURING TO BE DONE.
C 1 : EACH X BAR DIFFERENT COLOUR.
C 2 : EACH Y BAR DIFFERENT COLOUR.
C 3 : EACH SURFACE OF BAR DIFFERENT
C COLOUR.
C 4 : EACH BAR DIFFERENT COLOUR.
C
COMMON /T0BKLM/ KMTHOD
COMMON /T0TRAC/ IPRINT
COMMON /T0TRAI/ ITRAC1,ITRAC2,ITRAC3,ITRAC4
C
C
CALL G3INIT(2)
ITRAC1= IMETHD
IF (IPRINT.EQ.1) CALL G0MESG(172,5)
IF (IMETHD.LT.0.OR.IMETHD.GT.4) RETURN
C
KMTHOD= IMETHD
C
RETURN
END
|
package com.acornui.component
import com.acornui.assertionsEnabled
import com.acornui.string.toRadix
import org.junit.Before
import org.junit.Test
import kotlin.test.assertEquals
import kotlin.test.assertFailsWith
import kotlin.test.assertFalse
class ValidationGraphTest {
private val ONE: Int = 1 shl 0
private val TWO: Int = 1 shl 1
private val THREE: Int = 1 shl 2
private val FOUR: Int = 1 shl 3
private val FIVE: Int = 1 shl 4
private val SIX: Int = 1 shl 5
private val SEVEN: Int = 1 shl 6
private val EIGHT: Int = 1 shl 7
private val NINE: Int = 1 shl 8
private lateinit var n: ValidationGraph
@Before fun before() {
assertionsEnabled = true
n = validationGraph {
addNode(ONE, {})
addNode(TWO, ONE, {})
addNode(THREE, TWO, {})
addNode(FOUR, THREE, {})
addNode(FIVE, TWO, {})
addNode(SIX, FIVE, {})
addNode(SEVEN, TWO, {})
}
}
@Test fun invalidate() {
n.validate()
val f = n.invalidate(FIVE)
assertEquals(FIVE or SIX, f)
n.assertIsValid(ONE, TWO, THREE, FOUR, SEVEN)
n.assertIsNotValid(FIVE, SIX)
n.validate()
val f2 = n.invalidate(TWO)
assertEquals(TWO or THREE or FOUR or FIVE or SIX or SEVEN, f2)
n.assertIsValid(ONE)
n.assertIsNotValid(TWO, THREE, FOUR, FIVE, SIX, SEVEN)
}
@Test fun validate() {
val f = n.validate(SEVEN or THREE)
assertEquals(ONE or TWO or THREE or SEVEN, f)
n.assertIsValid(ONE, TWO, THREE, SEVEN)
n.assertIsNotValid(FIVE, SIX, FOUR)
val iF = n.invalidate(FOUR)
assertEquals(0, iF)
// No change
n.assertIsValid(ONE, TWO, THREE, SEVEN)
n.assertIsNotValid(FIVE, SIX, FOUR)
val iF2 = n.invalidate(THREE)
assertEquals(THREE, iF2)
n.assertIsValid(ONE, TWO, SEVEN)
n.assertIsNotValid(THREE, FOUR, FIVE, SIX)
val f2 = n.validate(THREE)
assertEquals(THREE, f2)
n.assertIsValid(ONE, TWO, THREE, SEVEN)
n.assertIsNotValid(FOUR, FIVE, SIX)
val f3 = n.validate()
assertEquals(FOUR or FIVE or SIX, f3)
n.assertIsValid(ONE, TWO, THREE, FOUR, FIVE, SIX, SEVEN)
}
@Test fun dependents() {
n.addNode(EIGHT, dependencies = FOUR, dependents = FIVE) {}
n.validate(FIVE)
n.assertIsValid(FIVE, EIGHT, FOUR, TWO)
n.invalidate(FIVE)
n.assertIsValid(TWO, FOUR, EIGHT)
n.assertIsNotValid(FIVE)
n.invalidate(FOUR)
n.assertIsValid(TWO)
n.assertIsNotValid(FOUR, FIVE, EIGHT)
n.validate(EIGHT)
n.assertIsValid(TWO, FOUR, EIGHT)
n.assertIsNotValid(FIVE)
n.validate(FIVE)
n.assertIsValid(TWO, FOUR, FIVE, EIGHT)
n.invalidate(EIGHT)
n.assertIsNotValid(FIVE, EIGHT)
n.assertIsValid(TWO, FOUR)
}
@Test fun dependencyAssertion() {
assertFailsWith(Exception::class) {
n.addNode(EIGHT, NINE, {})
}
}
@Test fun powerOfTwoAssertion() {
assertFailsWith(IllegalArgumentException::class) {
n.addNode(3, {})
}
}
@Test fun textComponentsBug() {
val validation = validationGraph {
ValidationFlags.apply {
addNode(STYLES, {})
addNode(PROPERTIES, STYLES, {})
addNode(SIZE_CONSTRAINTS, PROPERTIES, {})
addNode(LAYOUT, PROPERTIES or SIZE_CONSTRAINTS, {})
addNode(TRANSFORM, {})
addNode(CONCATENATED_TRANSFORM, TRANSFORM, {})
addNode(COLOR_TRANSFORM, {})
addNode(CONCATENATED_COLOR_TRANSFORM, COLOR_TRANSFORM, {})
addNode(INTERACTIVITY_MODE, {})
addNode(HIERARCHY_ASCENDING, PROPERTIES, {})
addNode(HIERARCHY_DESCENDING, PROPERTIES, {})
}
}
validation.addNode(ValidationFlags.RESERVED_1, 0, ValidationFlags.STYLES or ValidationFlags.LAYOUT, {})
validation.validate()
validation.invalidate(ValidationFlags.RESERVED_1)
assertFalse(validation.isValid(ValidationFlags.STYLES))
}
@Test fun dependenciesCanBeValidated() {
val t = validationGraph {
addNode(ONE, { validate(TWO) })
addNode(TWO, 0, ONE, {})
}
t.validate()
}
private fun ValidationGraph.assertIsValid(vararg flags: Int) {
for (flag in flags) {
assertEquals(true, isValid(flag), "flag ${flag.toRadix(2)} is not valid")
}
}
private fun ValidationGraph.assertIsNotValid(vararg flags: Int) {
for (flag in flags) {
assertEquals(false, isValid(flag), "flag ${flag.toRadix(2)} is valid")
}
}
}
|
package send
import (
"bytes"
"context"
"errors"
"fmt"
"io/ioutil"
"math/rand"
"os"
"path/filepath"
"runtime"
"strings"
"testing"
"time"
"cdr.dev/grip/level"
"cdr.dev/grip/message"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
)
type SenderSuite struct {
senders map[string]Sender
rand *rand.Rand
tempDir string
suite.Suite
}
func TestSenderSuite(t *testing.T) {
suite.Run(t, new(SenderSuite))
}
func (s *SenderSuite) SetupSuite() {
var err error
s.rand = rand.New(rand.NewSource(time.Now().Unix()))
s.tempDir, err = ioutil.TempDir("", "sender-test-")
s.Require().NoError(err)
}
func (s *SenderSuite) SetupTest() {
s.Require().NoError(os.MkdirAll(s.tempDir, 0766))
l := LevelInfo{level.Info, level.Notice}
s.senders = map[string]Sender{
"slack": &slackJournal{Base: NewBase("slack")},
"xmpp": &xmppLogger{Base: NewBase("xmpp")},
"buildlogger": &buildlogger{
Base: NewBase("buildlogger"),
conf: &BuildloggerConfig{Local: MakeNative()},
},
}
internal := MakeInternalLogger()
internal.name = "internal"
internal.output = make(chan *InternalMessage)
s.senders["internal"] = internal
native, err := NewNativeLogger("native", l)
s.Require().NoError(err)
s.senders["native"] = native
s.senders["writer"] = NewWriterSender(native)
var plain, plainerr, plainfile Sender
plain, err = NewPlainLogger("plain", l)
s.Require().NoError(err)
s.senders["plain"] = plain
plainerr, err = NewPlainErrorLogger("plain.err", l)
s.Require().NoError(err)
s.senders["plain.err"] = plainerr
plainfile, err = NewPlainFileLogger("plain.file", filepath.Join(s.tempDir, "plain.file"), l)
s.Require().NoError(err)
s.senders["plain.file"] = plainfile
var asyncOne, asyncTwo Sender
asyncOne, err = NewNativeLogger("async-one", l)
s.Require().NoError(err)
asyncTwo, err = NewNativeLogger("async-two", l)
s.Require().NoError(err)
s.senders["async"] = NewAsyncGroupSender(context.Background(), 16, asyncOne, asyncTwo)
nativeErr, err := NewErrorLogger("error", l)
s.Require().NoError(err)
s.senders["error"] = nativeErr
nativeFile, err := NewFileLogger("native-file", filepath.Join(s.tempDir, "file"), l)
s.Require().NoError(err)
s.senders["native-file"] = nativeFile
callsite, err := NewCallSiteConsoleLogger("callsite", 1, l)
s.Require().NoError(err)
s.senders["callsite"] = callsite
callsiteFile, err := NewCallSiteFileLogger("callsite", filepath.Join(s.tempDir, "cs"), 1, l)
s.Require().NoError(err)
s.senders["callsite-file"] = callsiteFile
stream, err := NewStreamLogger("stream", &bytes.Buffer{}, l)
s.Require().NoError(err)
s.senders["stream"] = stream
jsons, err := NewJSONConsoleLogger("json", LevelInfo{level.Info, level.Notice})
s.Require().NoError(err)
s.senders["json"] = jsons
jsonf, err := NewJSONFileLogger("json", filepath.Join(s.tempDir, "js"), l)
s.Require().NoError(err)
s.senders["json"] = jsonf
var sender Sender
multiSenders := []Sender{}
for i := 0; i < 4; i++ {
sender, err = NewNativeLogger(fmt.Sprintf("native-%d", i), l)
s.Require().NoError(err)
multiSenders = append(multiSenders, sender)
}
multi, err := NewMultiSender("multi", l, multiSenders)
s.Require().NoError(err)
s.senders["multi"] = multi
slackMocked, err := NewSlackLogger(&SlackOptions{
client: &slackClientMock{},
Hostname: "testhost",
Channel: "#test",
Name: "smoke",
}, "slack", LevelInfo{level.Info, level.Notice})
s.Require().NoError(err)
s.senders["slack-mocked"] = slackMocked
xmppMocked, err := NewXMPPLogger("xmpp", "target",
XMPPConnectionInfo{client: &xmppClientMock{}},
LevelInfo{level.Info, level.Notice})
s.Require().NoError(err)
s.senders["xmpp-mocked"] = xmppMocked
bufferedInternal, err := NewNativeLogger("buffered", l)
s.Require().NoError(err)
s.senders["buffered"] = NewBufferedSender(bufferedInternal, minInterval, 1)
annotatingBase, err := NewNativeLogger("async-one", l)
s.Require().NoError(err)
s.senders["annotating"] = NewAnnotatingSender(annotatingBase, map[string]interface{}{
"one": 1,
"true": true,
"string": "string",
})
for _, size := range []int{1, 100, 10000, 1000000} {
name := fmt.Sprintf("inmemory-%d", size)
s.senders[name], err = NewInMemorySender(name, l, size)
s.Require().NoError(err)
s.NoError(s.senders[name].SetFormatter(MakeDefaultFormatter()))
}
}
func (s *SenderSuite) TearDownTest() {
if runtime.GOOS == "windows" {
_ = s.senders["native-file"].Close()
_ = s.senders["callsite-file"].Close()
_ = s.senders["json"].Close()
_ = s.senders["plain.file"].Close()
}
s.Require().NoError(os.RemoveAll(s.tempDir))
}
func (s *SenderSuite) functionalMockSenders() map[string]Sender {
out := map[string]Sender{}
for t, sender := range s.senders {
if t == "slack" || t == "internal" || t == "xmpp" || t == "buildlogger" {
continue
} else if strings.HasPrefix(t, "github") {
continue
} else {
out[t] = sender
}
}
return out
}
func (s *SenderSuite) TearDownSuite() {
s.NoError(s.senders["internal"].Close())
}
func (s *SenderSuite) TestSenderImplementsInterface() {
// this actually won't catch the error; the compiler will in
// the fixtures, but either way we need to make sure that the
// tests actually enforce this.
for name, sender := range s.senders {
s.Implements((*Sender)(nil), sender, name)
}
}
const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()"
func randomString(n int, r *rand.Rand) string {
b := make([]byte, n)
for i := range b {
b[i] = letters[r.Int63()%int64(len(letters))]
}
return string(b)
}
func (s *SenderSuite) TestNameSetterRoundTrip() {
for n, sender := range s.senders {
for i := 0; i < 100; i++ {
name := randomString(12, s.rand)
s.NotEqual(sender.Name(), name, n)
sender.SetName(name)
s.Equal(sender.Name(), name, n)
}
}
}
func (s *SenderSuite) TestLevelSetterRejectsInvalidSettings() {
levels := []LevelInfo{
{level.Invalid, level.Invalid},
{level.Priority(-10), level.Priority(-1)},
{level.Debug, level.Priority(-1)},
{level.Priority(800), level.Priority(-2)},
}
for n, sender := range s.senders {
if n == "async" {
// the async sender doesn't meaningfully have
// its own level because it passes this down
// to its constituent senders.
continue
}
s.NoError(sender.SetLevel(LevelInfo{level.Debug, level.Alert}))
for _, l := range levels {
s.True(sender.Level().Valid(), n)
s.False(l.Valid(), n)
s.Error(sender.SetLevel(l), n)
s.True(sender.Level().Valid(), n)
s.NotEqual(sender.Level(), l, n)
}
}
}
func (s *SenderSuite) TestCloserShouldUsuallyNoop() {
for t, sender := range s.senders {
s.NoError(sender.Close(), t)
}
}
func (s *SenderSuite) TestBasicNoopSendTest() {
for _, sender := range s.functionalMockSenders() {
for i := -10; i <= 110; i += 5 {
m := message.NewDefaultMessage(level.Priority(i), "hello world! "+randomString(10, s.rand))
sender.Send(m)
}
}
}
func TestBaseConstructor(t *testing.T) {
assert := assert.New(t)
sink, err := NewInternalLogger("sink", LevelInfo{level.Debug, level.Debug})
assert.NoError(err)
handler := ErrorHandlerFromSender(sink)
assert.Equal(0, sink.Len())
assert.False(sink.HasMessage())
for _, n := range []string{"logger", "grip", "sender"} {
made := MakeBase(n, func() {}, func() error { return nil })
newed := NewBase(n)
assert.Equal(made.name, newed.name)
assert.Equal(made.level, newed.level)
assert.Equal(made.closer(), newed.closer())
for _, s := range []*Base{made, newed} {
assert.Error(s.SetFormatter(nil))
assert.Error(s.SetErrorHandler(nil))
assert.NoError(s.SetErrorHandler(handler))
s.ErrorHandler()(errors.New("failed"), message.NewString("fated"))
}
}
assert.Equal(6, sink.Len())
assert.True(sink.HasMessage())
}
|
# frozen_string_literal: true
require "rails_helper"
RSpec.describe "Instrument Relay Tab" do
let(:facility) { FactoryBot.create(:setup_facility) }
let(:user) { FactoryBot.create(:user, :administrator) }
before do
login_as user
visit facility_instrument_relays_path(facility, instrument)
end
context "the instrument has no relay" do
let(:instrument) { FactoryBot.create(:instrument, no_relay: true, facility: facility) }
it "renders the page" do
expect(page).to have_content("Reservation only")
end
context "creating a new relay" do
before do
click_link "Edit"
end
it "renders the page" do
expect(page.current_path).to eq new_facility_instrument_relay_path(facility, instrument, instrument.relay)
end
it "saves a new relay" do
select "Timer with relay", from: "Control mechanism"
fill_in "relay_ip", with: "123.456.789"
fill_in "relay_ip_port", with: "1234"
fill_in "relay_outlet", with: "1"
fill_in "relay_username", with: "root"
fill_in "relay_password", with: "root"
fill_in "relay_mac_address", with: "123abc456"
fill_in "relay_building_room_number", with: "1a"
fill_in "relay_circuit_number", with: "1"
fill_in "relay_ethernet_port_number", with: "2000"
click_button "Save"
instrument.reload
expect(instrument.relay).to be_present
expect(instrument.relay.ip).to eq("123.456.789")
expect(instrument.relay.ip_port).to eq(1234)
expect(instrument.relay.outlet).to eq(1)
expect(instrument.relay.username).to eq("root")
expect(instrument.relay.password).to eq("root")
expect(instrument.relay.mac_address).to eq("123abc456")
expect(instrument.relay.building_room_number).to eq("1a")
expect(instrument.relay.circuit_number).to eq("1")
expect(instrument.relay.ethernet_port_number).to eq(2000)
end
it "raises an error if there's no outlet entered" do
select "Timer with relay", from: "Control mechanism"
fill_in "relay_ip", with: "123.456.789"
fill_in "relay_ip_port", with: "1234"
fill_in "relay_username", with: "root"
fill_in "relay_password", with: "root"
click_button "Save"
expect(page).to have_content("Outlet may not be blank")
expect(page).to have_content("Outlet is not a valid number")
end
it "raises an error if there's no ip entered" do
select "Timer with relay", from: "Control mechanism"
fill_in "relay_outlet", with: "1"
fill_in "relay_ip_port", with: "1234"
fill_in "relay_username", with: "root"
fill_in "relay_password", with: "root"
click_button "Save"
expect(page).to have_content("IP Address may not be blank")
end
it "raises an error if there's no username or password entered" do
select "Timer with relay", from: "Control mechanism"
fill_in "relay_ip", with: "123.456.789"
fill_in "relay_ip_port", with: "1234"
fill_in "relay_outlet", with: "1"
click_button "Save"
expect(page).to have_content("Username may not be blank")
expect(page).to have_content("Password may not be blank")
end
context "a director can create a relay" do
let(:user) { FactoryBot.create(:user, :facility_director, facility: facility) }
it "saves a new relay" do
select "Timer with relay", from: "Control mechanism"
fill_in "relay_ip", with: "123.456.789"
fill_in "relay_ip_port", with: "1234"
fill_in "relay_outlet", with: "1"
fill_in "relay_username", with: "root"
fill_in "relay_password", with: "root"
fill_in "relay_mac_address", with: "123abc456"
fill_in "relay_building_room_number", with: "1a"
fill_in "relay_circuit_number", with: "1"
fill_in "relay_ethernet_port_number", with: "2000"
click_button "Save"
instrument.reload
expect(instrument.relay).to be_present
expect(instrument.relay.ip).to eq("123.456.789")
expect(instrument.relay.ip_port).to eq(1234)
expect(instrument.relay.outlet).to eq(1)
expect(instrument.relay.username).to eq("root")
expect(instrument.relay.password).to eq("root")
expect(instrument.relay.mac_address).to eq("123abc456")
expect(instrument.relay.building_room_number).to eq("1a")
expect(instrument.relay.circuit_number).to eq("1")
expect(instrument.relay.ethernet_port_number).to eq(2000)
end
end
context "the relay has already been taken by a different instrument" do
let!(:instrument2) { create(:instrument, facility: facility, no_relay: true) }
let!(:existing_relay) { create(:relay_synb, instrument: instrument2) }
it "raises an error" do
select "Timer with relay", from: "Control mechanism"
fill_in "relay_ip", with: existing_relay.ip
fill_in "relay_ip_port", with: existing_relay.ip_port
fill_in "relay_outlet", with: existing_relay.outlet
fill_in "relay_username", with: "root"
fill_in "relay_password", with: "root"
click_button "Save"
expect(page).to have_content("Outlet has already been taken")
end
context "both instruments have the same schedule" do
let!(:instrument2) { create(:instrument, facility: facility, no_relay: true, schedule: instrument.schedule) }
let!(:existing_relay) { create(:relay_syna, instrument: instrument2) }
it "saves the relay" do
select "Timer with relay", from: "Control mechanism"
select "Synaccess Revision A", from: "Relay Type"
fill_in "relay_ip", with: existing_relay.ip
fill_in "relay_ip_port", with: existing_relay.ip_port
fill_in "relay_outlet", with: existing_relay.outlet
fill_in "relay_username", with: "root"
fill_in "relay_password", with: "root"
click_button "Save"
expect(page).to have_content("Relay was successfully updated.")
end
end
end
end
end
context "editing an existing relay" do
let(:instrument) { FactoryBot.create(:setup_instrument, facility: facility, relay: build(:relay)) }
before do
click_link "Edit"
end
it "renders the page" do
expect(page.current_path).to eq edit_facility_instrument_relay_path(facility, instrument, instrument.relay)
end
it "can be saved" do
select "Timer with relay", from: "Control mechanism"
fill_in "relay_ip", with: "123.456.789"
fill_in "relay_ip_port", with: "1234"
fill_in "relay_outlet", with: "1"
fill_in "relay_username", with: "root"
fill_in "relay_password", with: "root"
fill_in "relay_mac_address", with: "123abc456"
fill_in "relay_building_room_number", with: "1a"
fill_in "relay_circuit_number", with: "1"
fill_in "relay_ethernet_port_number", with: "2000"
click_button "Save"
instrument.reload
expect(instrument.relay.ip).to eq("123.456.789")
expect(instrument.relay.ip_port).to eq(1234)
expect(instrument.relay.outlet).to eq(1)
expect(instrument.relay.username).to eq("root")
expect(instrument.relay.password).to eq("root")
expect(instrument.relay.mac_address).to eq("123abc456")
expect(instrument.relay.building_room_number).to eq("1a")
expect(instrument.relay.circuit_number).to eq("1")
expect(instrument.relay.ethernet_port_number).to eq(2000)
end
end
context "switching relay types" do
let(:instrument) { FactoryBot.create(:setup_instrument, facility: facility, relay: build(:relay)) }
before do
click_link "Edit"
end
context "from relay to timer" do
it "it deletes the relay and assigns the instrument a RelayDummy" do
select "Timer without relay", from: "Control mechanism"
click_button "Save"
instrument.reload
expect(instrument.relay).to be_a(RelayDummy)
end
end
context "from relay to reservation only" do
it "deletes the relay" do
select "Reservation only", from: "Control mechanism"
click_button "Save"
instrument.reload
expect(instrument.relay).not_to be_present
end
end
context "from reservation only to timer" do
let(:instrument) { FactoryBot.create(:instrument, facility: facility, no_relay: true) }
it "sets the instrument relay to a RelayDummy" do
select "Timer without relay", from: "Control mechanism"
click_button "Save"
instrument.reload
expect(instrument.relay).to be_a(RelayDummy)
end
end
context "from timer to reservation only" do
let(:instrument) { FactoryBot.create(:setup_instrument, facility: facility) }
it "deletes the relay dummy" do
select "Reservation only", from: "Control mechanism"
click_button "Save"
instrument.reload
expect(instrument.relay).not_to be_present
end
end
end
end
|
import React from 'react'
import PropTypes from 'prop-types'
import cx from 'classnames'
import TetherContent from '../../addons/BootTether'
import { getTetherAttachments, mapToCssModules, omit } from '../../lib/'
export const tetherAttachements = [
'top',
'bottom',
'left',
'right',
'top left',
'top center',
'top right',
'right top',
'right middle',
'right bottom',
'bottom right',
'bottom center',
'bottom left',
'left top',
'left middle',
'left bottom',
]
const propTypes = {
placement: PropTypes.oneOf(tetherAttachements),
target: PropTypes.string.isRequired,
isOpen: PropTypes.bool,
tether: PropTypes.object,
tetherRef: PropTypes.func,
className: PropTypes.string,
cssModule: PropTypes.object,
toggle: PropTypes.func,
}
const defaultProps = {
isOpen: false,
placement: 'bottom',
toggle: () => {},
}
const defaultTetherConfig = {
classPrefix: 'bs-tether',
classes: {
element: false,
enabled: 'show',
},
constraints: [
{ to: 'scrollParent', attachment: 'together none' },
{ to: 'window', attachment: 'together none' },
],
}
class Popover extends React.Component {
constructor(props) {
super(props)
this.getTetherConfig = this.getTetherConfig.bind(this)
}
getTetherConfig() {
const attachments = getTetherAttachments(this.props.placement)
return {
...defaultTetherConfig,
...attachments,
target: `#${this.props.target}`,
...this.props.tether,
}
}
render() {
if (!this.props.isOpen) {
return null
}
const tetherConfig = this.getTetherConfig()
const classes = mapToCssModules(cx(
'popover-inner',
this.props.className,
), this.props.cssModule)
const attributes = omit(this.props, Object.keys(propTypes))
return (
<TetherContent
className={mapToCssModules('popover', this.props.cssModule)}
tether={tetherConfig}
tetherRef={this.props.tetherRef}
isOpen={this.props.isOpen}
toggle={this.props.toggle}
>
<div {...attributes} className={classes} />
</TetherContent>
)
}
}
Popover.propTypes = propTypes
Popover.defaultProps = defaultProps
export default Popover
|
##获取验证码的button计时器
---
#LYButtonTimer
-----------
> Sunshine_ly
### 简单用法
``` xml
- (void)buttonAction:(UIButton *)button
{
button.time = 10;
button.format = @"%ld秒后重试";
[button startTimer];
}
```
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Implements the interface of the results_processor module.
Provides functions to parse command line arguments and process options.
"""
import argparse
import datetime
import logging
import os
import re
import sys
from py_utils import cloud_storage
from core.results_processor import formatters
from core.results_processor import util
def ArgumentParser(standalone=False):
"""Create an ArgumentParser defining options required by the processor."""
all_output_formats = list(formatters.FORMATTERS.keys())
if not standalone:
all_output_formats.append('none')
parser, group = _CreateTopLevelParser(standalone)
parser.add_argument(
'-v', '--verbose', action='count', dest='verbosity', default=0,
help='Increase verbosity level (repeat as needed)')
group.add_argument(
'--output-format', action='append', dest='output_formats',
metavar='FORMAT', choices=all_output_formats, required=standalone,
help=Sentences(
'Output format to produce.',
'May be used multiple times to produce multiple outputs.',
'Avaliable formats: %(choices)s.',
'' if standalone else 'Defaults to: html.'))
group.add_argument(
'--intermediate-dir', metavar='DIR_PATH', required=standalone,
help=Sentences(
'Path to a directory where intermediate results are stored.',
'' if standalone else 'If not provided, the default is to create a '
'new directory within "{output_dir}/artifacts/".'))
group.add_argument(
'--output-dir', default=_DefaultOutputDir(), metavar='DIR_PATH',
help=Sentences(
'Path to a directory where to write final results.',
'Default: %(default)s.'))
group.add_argument(
'--max-values-per-test-case', type=int, metavar='NUM',
help=Sentences(
'Fail a test run if it produces more than this number of values.'
'This includes both ad hoc and metric generated measurements.'))
group.add_argument(
'--reset-results', action='store_true',
help=Sentences(
'Overwrite any previous output files in the output directory.',
'The default is to append to existing results.'))
group.add_argument(
'--results-label', metavar='LABEL',
help='Label to identify the results generated by this run.')
group.add_argument(
'--test-path-format', metavar='FORMAT',
choices=[util.TELEMETRY_TEST_PATH_FORMAT, util.GTEST_TEST_PATH_FORMAT],
default=util.TELEMETRY_TEST_PATH_FORMAT,
help=Sentences(
'How to interpret the testPath attribute.',
'Available options: %(choices)s. Default: %(default)s.'))
group.add_argument(
'--trace-processor-path',
help=Sentences('Path to trace processor shell.',
'Default: download a pre-built version from the cloud.'))
group.add_argument(
'--upload-results', action='store_true',
help='Upload generated artifacts to cloud storage.')
group.add_argument(
'--upload-bucket', default='output', metavar='BUCKET',
help=Sentences(
'Storage bucket to use for uploading artifacts.',
'Supported values are: %s; or a valid cloud storage bucket name.'
% ', '.join(sorted(cloud_storage.BUCKET_ALIASES)),
'Defaults to: %(default)s.'))
group.add_argument(
'--experimental-tbmv3-metrics', action='store_true',
help='Enable running experimental TBMv3 metrics.')
group.add_argument(
'--fetch-power-profile',
action='store_true',
help=('Specify this if you want to run proxy power metrics that use '
'device power profiles.'))
group.add_argument(
'--extra-metric', action='append', dest='extra_metrics', metavar='METRIC',
help=('Compute an extra metric on the test results. Metric should have '
'the form "version:name", e.g. "tbmv3:power_rails_metric". '
'Can be used multiple times.'))
group.add_argument(
'--is-unittest',
action='store_true',
help='Is running inside a unittest.')
return parser
def ProcessOptions(options):
"""Adjust result processing options as needed before running benchmarks.
Note: The intended scope of this function is limited to only adjust options
defined by the ArgumentParser above. One should not attempt to read or modify
any other attributes that the options object may have.
Currently the main job of this function is to tease out and separate output
formats to be handled by the results processor, from those that should fall
back to the legacy output formatters in Telemetry.
Args:
options: An options object with values parsed from the command line.
"""
if options.verbosity >= 2:
logging.getLogger().setLevel(logging.DEBUG)
elif options.verbosity == 1:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARNING)
# The output_dir option is None or missing if the selected Telemetry command
# does not involve output generation, e.g. "run_benchmark list", and the
# argument parser defined above was not invoked.
if getattr(options, 'output_dir', None) is None:
return
def resolve_dir(path):
return os.path.realpath(os.path.expanduser(path))
options.output_dir = resolve_dir(options.output_dir)
if options.intermediate_dir:
options.intermediate_dir = resolve_dir(options.intermediate_dir)
else:
if options.results_label:
filesafe_label = re.sub(r'\W+', '_', options.results_label)
else:
filesafe_label = 'run'
start_time = datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%SZ')
options.intermediate_dir = os.path.join(
options.output_dir, 'artifacts', '%s_%s' % (filesafe_label, start_time))
if options.upload_results:
options.upload_bucket = cloud_storage.BUCKET_ALIASES.get(
options.upload_bucket, options.upload_bucket)
else:
options.upload_bucket = None
if not options.output_formats:
options.output_formats = ['html']
else:
options.output_formats = sorted(set(options.output_formats))
if 'none' in options.output_formats:
options.output_formats.remove('none')
def _CreateTopLevelParser(standalone):
"""Create top level parser, and group for result options."""
if standalone:
parser = argparse.ArgumentParser(
description='Standalone command line interface to results_processor.')
# In standalone mode, both the parser and group are the same thing.
return parser, parser
else:
parser = argparse.ArgumentParser(add_help=False)
group = parser.add_argument_group(title='Result processor options')
return parser, group
def _DefaultOutputDir():
"""Default output directory.
Points to the directory of the benchmark runner script, if found, or the
current working directory otherwise.
"""
main_module = sys.modules['__main__']
if hasattr(main_module, '__file__'):
return os.path.realpath(os.path.dirname(main_module.__file__))
else:
return os.getcwd()
def Sentences(*args):
return ' '.join(s for s in args if s)
|
var express = require('express');
var router = express.Router();
const FoodModel = require('../models/food_items')
var users = require('../controllers/users')
var crud = require('../middlewares/crud')
router.get('/test', function(req, res) {
res.send('Food Route is Up and Running.');
});
router.get('/allFoods', function(req, res) {
crud.getAllEntriesv2(req, res, FoodModel)
});
router.get('/FoodByID', function(req, res) {
crud.getEntryByID(req, res, FoodModel)
});
router.post('/saveFood', function(req, res) {
crud.createEntry(req, res, FoodModel)
});
router.put('/editFood', function(req, res) {
crud.updateEntryByID(req, res, FoodModel);
});
router.delete('/deleteFood', function(req, res) {
crud.deleteEntryByID(req, res, FoodModel);
});
module.exports = router;
|
import request from "@/utils/request";
/**
*
* @param {number} pageSize
* @param {number} currentPage
* @param {string} currentPage
*/
export function getList(ownMaterial = 1, parentId = null) {
var url = `/material?ownMaterial=${ownMaterial}`;
if (parentId) url += `&parentId=${parentId}`;
return request({
url,
method: "get"
});
}
export function uploadFile(data) {
var url = `/material`;
return request({
url: url,
method: "post",
data: data,
headers: {
"Content-Type": "multipart/form-data"
}
});
}
export function downloadMaterial(id) {
var url = `/material/downloadMaterial/${id}`;
return request({ url, method: "get" });
}
export function ReName(id, name) {
var url = `/material/${id}`;
var data = {
source: name
};
return request({
url,
method: "put",
data: data
});
}
export function createFolder(name = "", folderId = null) {
var url = `/material`;
if (folderId === 0) folderId = null;
var data = {
type: 2,
folder_id: folderId,
source: name
};
return request({
url,
method: "post",
data
});
}
export function destroy(id) {
var url = `/material/${id}`;
return request({
url,
method: "delete"
});
}
export function authUnit(id, unitId) {
var url = `/material/authUnit`;
var data = {
id: id,
targetId: unitId
};
return request({
url,
data: data,
method: "post"
});
}
export function authUser(id, userId) {
var url = `/material/authUser`;
var data = {
id: id,
targetId: userId
};
return request({
url,
data: data,
method: "post"
});
}
export function getAuthUnits(id) {
var url = `/material/getAuthUnits/${id}`;
return request({
url,
method: "get"
});
}
export function getAuthUsers(id) {
var url = `/material/getAuthUsers/${id}`;
return request({
url,
method: "get"
});
}
export function deAuthUnit(id, unitId) {
var url = `/material/deAuthUnit`;
var data = {
id: id,
targetId: unitId
};
return request({
url,
data: data,
method: "post"
});
}
export function deAuthUser(id, userId) {
var url = `/material/deAuthUser`;
var data = {
id: id,
targetId: userId
};
return request({
url,
data: data,
method: "post"
});
}
|
%%%=============================================================================
%% Copyright 2013 Klarna AB
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @copyright 2013 Klarna AB
%% @author Alexander Dergachev <alexander.dergachev@klarna.com>
%%
%% @doc JESSE (JSon Schema Erlang)
%%
%% This is an interface module which provides an access to the main
%% functionality of jesse, such as 1) updating of the schema definitions cache;
%% 2) validation json data against a schema.
%% @end
%%%=============================================================================
-module(jesse).
%% API
-export([ add_schema/2
, add_schema/3
, del_schema/1
, load_schemas/2
, load_schemas/4
, validate/2
, validate/3
, validate_with_schema/2
, validate_with_schema/3
, validate_with_accumulator/2
, validate_with_accumulator/3
, validate_with_accumulator/4
, validate_with_accumulator/5
, explain_errors/1
, explain_errors/2
]).
-export_type([ json_term/0
]).
-type accumulator() :: fun(( jesse_json_path:path()
, jesse_schema_validator:error()
, term() ) -> term()).
-type parser() :: fun((binary()) -> json_term()).
-type encoder() :: fun((json_term()) -> iolist()).
-type json_term() :: term().
-type error() :: {error, jesse_schema_validator:error()}.
%%% API
%% @doc Adds a schema definition `Schema' to in-memory storage associated with
%% a key `Key'. It will overwrite an existing schema with the same key if
%% there is any.
-spec add_schema(Key :: any(), Schema :: json_term()) -> ok | error().
add_schema(Key, Schema) ->
ValidationFun = fun jesse_schema_validator:is_json_object/1,
MakeKeyFun = fun(_) -> Key end,
jesse_database:add(Schema, ValidationFun, MakeKeyFun).
%% @doc Equivalent to `add_schema/2', but `Schema' is a binary string, and
%% the third agument is a parse function to convert the binary string to
%% a supported internal representation of json.
-spec add_schema( Key :: any()
, Schema :: binary()
, ParseFun :: parser()
) -> ok | error().
add_schema(Key, Schema, ParseFun) ->
case try_parse(ParseFun, Schema) of
{parse_error, _} = SError -> {error, {schema_error, SError}};
ParsedSchema -> add_schema(Key, ParsedSchema)
end.
%% @doc Deletes a schema definition from in-memory storage associated with
%% the key `Key'.
-spec del_schema(Key :: any()) -> ok.
del_schema(Key) ->
jesse_database:delete(Key).
%% @doc Loads schema definitions from filesystem to in-memory storage.
%%
%% Equivalent to `load_schemas(Path, ParseFun, ValidationFun, MakeKeyFun)'
%% where `ValidationFun' is `fun jesse_json:is_json_object/1' and
%% `MakeKeyFun' is `fun jesse_schema_validator:get_schema_id/1'. In this case
%% the key will be the value of `id' attribute from the given schemas.
-spec load_schemas( Path :: string()
, ParseFun :: parser()
) -> jesse_database:update_result().
load_schemas(Path, ParseFun) ->
load_schemas( Path
, ParseFun
, fun jesse_schema_validator:is_json_object/1
, fun jesse_schema_validator:get_schema_id/1
).
%% @doc Loads schema definitions from filesystem to in-memory storage.
%% The function loads all the files from directory `Path', then each schema
%% entry will be checked for a validity by function `ValidationFun', and
%% will be stored in in-memory storage with a key returned by `MakeKeyFun'
%% function.
%%
%% In addition to a schema definition, a timestamp of the schema file will be
%% stored, so, during the next update timestamps will be compared to avoid
%% unnecessary updates.
%%
%% Schema definitions are stored in the format which json parsing function
%% `ParseFun' returns.
%%
%% NOTE: it's impossible to automatically update schema definitions added by
%% add_schema/2, the only way to update them is to use add_schema/2
%% again with the new definition.
-spec load_schemas( Path :: string()
, ParseFun :: parser()
, ValidationFun :: fun((any()) -> boolean())
, MakeKeyFun :: fun((json_term()) -> any())
) -> jesse_database:update_result().
load_schemas(Path, ParseFun, ValidationFun, MakeKeyFun) ->
jesse_database:update(Path, ParseFun, ValidationFun, MakeKeyFun).
%% @doc Validates json `Data' against a schema with the same key as `Schema'
%% in the internal storage. If the given json is valid, then it is returned
%% to the caller, otherwise an error with an appropriate error reason
%% is returned.
-spec validate(Schema :: any(), Data :: json_term()) -> {ok, json_term()}
| error().
validate(Schema, Data) ->
try
JsonSchema = jesse_database:read(Schema),
jesse_schema_validator:validate(JsonSchema, Data,
{fun jesse_utils:failfast/3, undefined})
catch
throw:Error ->
{error, Error}
end.
%% @doc Equivalent to `validate/2', but `Data' is a binary string, and
%% the third agument is a parse function to convert the binary string to
%% a supported internal representation of json.
-spec validate( Schema :: any()
, Data :: binary()
, ParseFun :: parser()
) -> {ok, json_term()}
| error().
validate(Schema, Data, ParseFun) ->
case try_parse(ParseFun, Data) of
{parse_error, _} = DError -> {error, {data_error, DError}};
ParsedJson -> validate(Schema, ParsedJson)
end.
%% @doc Validates json `Data' agains the given schema `Schema'. If the given
%% json is valid, the it is returned to the caller, otherwise an error with
%% an appropriate error reason is returned.
-spec validate_with_schema( Schema :: json_term()
, Data :: json_term()
) -> {ok, json_term()}
| error().
validate_with_schema(Schema, Data) ->
try
jesse_schema_validator:validate(Schema, Data,
{fun jesse_utils:failfast/3, undefined})
catch
throw:Error ->
{error, Error}
end.
%% @doc Equivalent to `validate_with_schema/2', but both `Schema' and
%% `Data' are binary strings, and the third arguments is a parse function
%% to convert the binary string to a supported internal representation of json.
-spec validate_with_schema( Schema :: binary()
, Data :: binary()
, ParseFun :: parser()
) -> {ok, json_term()}
| error().
validate_with_schema(Schema, Data, ParseFun) ->
case try_parse(ParseFun, Schema) of
{parse_error, _} = SError ->
{error, {schema_error, SError}};
ParsedSchema ->
case try_parse(ParseFun, Data) of
{parse_error, _} = DError ->
{error, {data_error, DError}};
ParsedData ->
validate_with_schema(ParsedSchema, ParsedData)
end
end.
%% @doc Equivalent to {@link validate_with_accumulator/4} where both
%% <code>Schema</code> and <code>Data</code> are parsed json terms.
-spec validate_with_accumulator( Schema :: json_term(),
Data :: json_term()
) ->
{ok, json_term()} | {error, [error()]}.
validate_with_accumulator(Schema, Data) ->
validate_with_accumulator(Schema, Data, fun jesse_utils:collect/3, []).
%% @doc Equivalent to {@link validate_with_schema/3} but with the additional
%% argument fun to collect errors. This function will return the original
%% JSON in case it is fully correspond to the schema or a list of all
%% collected errors, if they are not critical.
-spec validate_with_accumulator( Schema :: binary(),
Data :: binary(),
ParseFun :: parser()
) ->
{ok, json_term()} | {error, [error()]}.
validate_with_accumulator(Schema, Data, ParseFun) ->
case try_parse(ParseFun, Schema) of
{parse_error, _} = SError ->
{error, {schema_error, SError}};
ParsedSchema ->
case try_parse(ParseFun, Data) of
{parse_error, _} = DError ->
{error, {data_error, DError}};
ParsedData ->
validate_with_accumulator(ParsedSchema, ParsedData)
end
end.
%% @doc Equivalent to {@link validate_with_accumulator/4} where both
%% <code>Schema</code> and <code>Data</code> are parsed json terms.
-spec validate_with_accumulator( Schema :: json_term(),
Data :: json_term(),
Accumulator :: accumulator(),
Initial :: term()
) ->
{ok, json_term()} | {error, term()}.
validate_with_accumulator(Schema, Data, Accumulator, Initial) ->
try
jesse_schema_validator:validate(Schema, Data,
{Accumulator, Initial})
catch
throw:Error ->
{error, Error}
end.
%% @doc Equivalent to {@link validate_with_accumulator/4} where both
%% <code>Schema</code> and <code>Data</code> are parsed json terms.
-spec validate_with_accumulator( Schema :: json_term(),
Data :: json_term(),
ParseFun :: parser(),
Accumulator :: accumulator(),
Initial :: term()
) ->
{ok, json_term()} | {error, term()}.
validate_with_accumulator(Schema, Data, ParseFun, Accumulator, Initial) ->
case try_parse(ParseFun, Schema) of
{parse_error, _} = SError ->
{error, {schema_error, SError}};
ParsedSchema ->
case try_parse(ParseFun, Data) of
{parse_error, _} = DError ->
{error, {data_error, DError}};
ParsedData ->
validate_with_accumulator(ParsedSchema, ParsedData,
Accumulator, Initial)
end
end.
%% @doc Explain list of errors in the internal format and return
%% <code>iolist()</code> in human-readable format.
%% This function accepts the list of errors, collected with the default
%% accumulator.
%% @see validate_with_accumulator/2.
-spec explain_errors( Errors :: [ { json_schema_path:path()
, json_schema_validator:error()} ]
) ->
iolist().
explain_errors(Errors) ->
jesse_utils:explain(Errors).
%% @doc A variant of {@link explain/1} function that takes an additional
%% argument as a <code>fun</code> of encoder
%% This function accepts the list of errors, collected with the default
%% accumulator.
%% @see validate_with_accumulator/2.
-spec explain_errors( Errors :: [ { json_schema_path:path()
, json_schema_validator:error()} ],
Encoder :: encoder()
) ->
iolist().
explain_errors(Errors, Encoder) ->
jesse_utils:explain(Errors, Encoder).
%%% ----------------------------------------------------------------------------
%%% Internal functions
%%% ----------------------------------------------------------------------------
%% @doc Wraps up calls to a third party json parser.
%% @private
try_parse(ParseFun, JsonBin) ->
try
ParseFun(JsonBin)
catch
_:Error ->
{parse_error, Error}
end.
%%% Local Variables:
%%% erlang-indent-level: 2
%%% End:
|
# This file is part of the MapProxy project.
# Copyright (C) 2010 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Retrieve tiles from different tile servers (TMS/TileCache/etc.).
"""
import sys
from mapproxy.image.opts import ImageOptions
from mapproxy.source import SourceError
from mapproxy.client.http import HTTPClientError
from mapproxy.source import InvalidSourceQuery
from mapproxy.layer import BlankImage, map_extent_from_grid, CacheMapLayer, MapLayer
from mapproxy.util.py import reraise_exception
import logging
log = logging.getLogger('mapproxy.source.tile')
log_config = logging.getLogger('mapproxy.config')
class TiledSource(MapLayer):
def __init__(self, grid, client, coverage=None, image_opts=None, error_handler=None,
res_range=None):
MapLayer.__init__(self, image_opts=image_opts)
self.grid = grid
self.client = client
self.image_opts = image_opts or ImageOptions()
self.coverage = coverage
self.extent = coverage.extent if coverage else map_extent_from_grid(grid)
self.res_range = res_range
self.error_handler = error_handler
def get_map(self, query):
if self.grid.tile_size != query.size:
ex = InvalidSourceQuery(
'tile size of cache and tile source do not match: %s != %s'
% (self.grid.tile_size, query.size)
)
log_config.error(ex)
raise ex
if self.grid.srs != query.srs:
ex = InvalidSourceQuery(
'SRS of cache and tile source do not match: %r != %r'
% (self.grid.srs, query.srs)
)
log_config.error(ex)
raise ex
if self.res_range and not self.res_range.contains(query.bbox, query.size,
query.srs):
raise BlankImage()
if self.coverage and not self.coverage.intersects(query.bbox, query.srs):
raise BlankImage()
_bbox, grid, tiles = self.grid.get_affected_tiles(query.bbox, query.size)
if grid != (1, 1):
raise InvalidSourceQuery('BBOX does not align to tile')
tile_coord = next(tiles)
try:
return self.client.get_tile(tile_coord, format=query.format)
except HTTPClientError as e:
if self.error_handler:
resp = self.error_handler.handle(e.response_code, query)
if resp:
return resp
log.warning('could not retrieve tile: %s', e)
reraise_exception(SourceError(e.args[0]), sys.exc_info())
class CacheSource(CacheMapLayer):
def __init__(self, tile_manager, extent=None, image_opts=None,
max_tile_limit=None, tiled_only=False):
CacheMapLayer.__init__(self, tile_manager, extent=extent, image_opts=image_opts,
max_tile_limit=max_tile_limit)
self.supports_meta_tiles = not tiled_only
self.tiled_only = tiled_only
def get_map(self, query):
if self.tiled_only:
query.tiled_only = True
return CacheMapLayer.get_map(self, query)
|
from typing import Optional, Set, Union
import logging
from overrides import overrides
from allennlp.common.file_utils import cached_path
from allennlp.data.dataset_readers.dataset_reader import DatasetReader
from contexteval.contextualizers import Contextualizer
from contexteval.data.dataset_readers import TaggingDatasetReader
logger = logging.getLogger(__name__)
@DatasetReader.register("billion_word_benchmark_language_modeling")
class LanguageModelingDatasetReader(TaggingDatasetReader):
"""
Reads a file with a sentence per line (billion-word benchmark format), and
returns instances for language modeling. Each instances is a line in the dataset,
and they are predicted independently of each other.
Parameters
----------
max_length: int, optional (default=50)
The maximum length of the sequences to use in the LM task. Any sequences that are
longer than this value will be discarded.
backward: bool, optional (default=False)
If so, generate instances suitable for evaluating the a backward language model.
For example, if the sentence is [a, b, c, d], the forward instance would have tokens of
[a, b, c] and labels of [b, c, d], whereaas the backward instance would have tokens of
[b, c, d] and labels of [a, b, c].
vocabulary_path: str, optional (default=None)
If provided, words in the input files that are not in this vocabulary are set to "<UNK>".
contextualizer: Contextualizer, optional (default=``None``)
If provided, it is used to produce contextualized representations of the text.
max_instances: int or float, optional (default=``None``)
The number of instances to use during training. If int, this value is taken
to be the absolute amount of instances to use. If float, this value indicates
that we should use that proportion of the total training data. If ``None``,
all instances are used.
seed: int, optional (default=``0``)
The random seed to use.
lazy : ``bool``, optional (default=``False``)
If this is true, ``instances()`` will return an object whose ``__iter__`` method
reloads the dataset each time it's called. Otherwise, ``instances()`` returns a list.
"""
def __init__(self,
max_length: int = 50,
backward: bool = False,
vocabulary_path: Optional[str] = None,
contextualizer: Optional[Contextualizer] = None,
max_instances: Optional[Union[int, float]] = None,
seed: int = 0,
lazy: bool = False) -> None:
super().__init__(
contextualizer=contextualizer,
max_instances=max_instances,
seed=seed,
lazy=lazy)
self._max_length = max_length
self._vocabulary_path = vocabulary_path
self._vocabulary: Set[str] = set()
if vocabulary_path:
# Load the vocabulary
cached_vocabulary_path = cached_path(vocabulary_path)
with open(cached_vocabulary_path) as cached_vocabulary_file:
for line in cached_vocabulary_file:
token = line.rstrip("\n")
self._vocabulary.add(token)
self._backward = backward
@overrides
def _read_dataset(self,
file_path: str,
count_only: bool = False,
keep_idx: Optional[Set[int]] = None):
"""
Yield instances from the file_path.
Parameters
----------
file_path: str, required
The path to the data file.
count_only: bool, optional (default=``False``)
If True, no instances are returned and instead a dummy object is
returned. This is useful for quickly counting the number of instances
in the data file, since creating instances is relatively expensive.
keep_idx: Set[int], optional (default=``None``)
If not None, only yield instances whose index is in this set.
"""
# if `file_path` is a URL, redirect to the cache
file_path = cached_path(file_path)
if count_only:
logger.info("Counting instances (backward: %s) in LM file at: %s",
self._backward, file_path)
else:
logger.info("Reading instances (backward: %s) from lines in LM file at: %s",
self._backward, file_path)
index = 0
with open(file_path) as input_file:
for line in input_file:
clean_line = line.rstrip("\n")
if line.startswith("#"):
continue
# Get tokens and the labels of the instance
tokenized_line = clean_line.split(" ")
if not tokenized_line or len(tokenized_line) > self._max_length:
continue
if count_only:
yield 1
continue
if keep_idx is not None and index not in keep_idx:
index += 1
continue
# Replace OOV tokens in tokenized_line
if self._vocabulary:
tokenized_line = [word if word in self._vocabulary else "<UNK>" for
word in tokenized_line]
if self._backward:
# Tokens are all tokens, labels are a BOS indicator + all except last token
labels = ["<S>"] + tokenized_line[:-1]
else:
# Tokens are all tokens, and labels
# are all except first token + a EOS indicator
labels = tokenized_line[1:] + ["</S>"]
# Contextualize the tokens if a Contextualizer was provided.
if self._contextualizer:
token_representations = self._contextualizer([tokenized_line])[0]
else:
token_representations = None
yield self.text_to_instance(tokenized_line,
token_representations,
labels)
index += 1
|
package cn.woyeshi.client.activity
import android.os.Bundle
import android.os.Handler
import cn.woyeshi.base.activities.BaseActivity
import cn.woyeshi.client.R
import cn.woyeshi.client.utils.Navigation
class SplashActivity : BaseActivity() {
override fun getContentLayoutID(): Int {
return R.layout.activity_splash
}
override fun isHaveTitleBar(): Boolean {
return false
}
override fun onActivityCreated(savedInstanceState: Bundle?) {
Handler().postDelayed({
Navigation.toMainActivity(this@SplashActivity)
finish()
}, 2000)
}
}
|
/*=========================================================================
*
* Copyright Insight Software Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#include <fstream>
#include "itkFEMRegistrationFilter.h"
#include "itkImageFileWriter.h"
// tyepdefs used for registration
const unsigned int ImageDimension = 3;
typedef unsigned char PixelType;
typedef itk::Image<PixelType, ImageDimension> ImageType;
typedef itk::fem::Element3DC0LinearHexahedronMembrane ElementType;
// Template function to fill in an image with a value
template <class TImage>
void
FillImage(
TImage * image,
typename TImage::PixelType value )
{
typedef itk::ImageRegionIteratorWithIndex<TImage> Iterator;
Iterator it( image, image->GetBufferedRegion() );
for( it.GoToBegin(); !it.IsAtEnd(); ++it )
{
it.Set( value );
}
}
// Template function to fill in an image with a circle.
template <class TImage>
void
FillWithCircle(
TImage * image,
double * center,
double radius,
typename TImage::PixelType foregnd,
typename TImage::PixelType backgnd )
{
typedef itk::ImageRegionIteratorWithIndex<TImage> Iterator;
Iterator it( image, image->GetBufferedRegion() );
typename TImage::IndexType index;
double r2 = vnl_math_sqr( radius );
for( it.GoToBegin(); !it.IsAtEnd(); ++it )
{
index = it.GetIndex();
double distance = 0;
for( unsigned int j = 0; j < TImage::ImageDimension; j++ )
{
distance += vnl_math_sqr( (double) index[j] - center[j]);
}
if( distance <= r2 )
{
it.Set( foregnd );
}
else
{
it.Set( backgnd );
}
}
}
int itkFEMRegistrationFilterTest(int argc, char *argv[] )
{
typedef itk::Vector<float, ImageDimension> VectorType;
typedef itk::Image<VectorType, ImageDimension> FieldType;
typedef ImageType::IndexType IndexType;
typedef ImageType::SizeType SizeType;
typedef ImageType::RegionType RegionType;
//--------------------------------------------------------
std::cout << "Generate input images and initial deformation field";
std::cout << std::endl;
ImageType::SizeValueType sizeArray[ImageDimension];
for (unsigned int i=0;i<ImageDimension;i++)
{
sizeArray[i] = 32;
}
SizeType size;
size.SetSize( sizeArray );
IndexType index;
index.Fill( 0 );
RegionType region;
region.SetSize( size );
region.SetIndex( index );
ImageType::Pointer moving = ImageType::New();
ImageType::Pointer fixed = ImageType::New();
FieldType::Pointer initField = FieldType::New();
moving->SetLargestPossibleRegion( region );
moving->SetBufferedRegion( region );
moving->Allocate();
fixed->SetLargestPossibleRegion( region );
fixed->SetBufferedRegion( region );
fixed->Allocate();
initField->SetLargestPossibleRegion( region );
initField->SetBufferedRegion( region );
initField->Allocate();
double center[ImageDimension];
double radius;
PixelType fgnd = 250;
PixelType bgnd = 15;
// Set the Cricle Center
for (unsigned int i=0;i<ImageDimension;i++)
{
center[i] = 16;
}
radius = 5;
FillWithCircle<ImageType>( moving, center, radius, fgnd, bgnd );
// fill fixed with circle
radius = 8;
FillWithCircle<ImageType>( fixed, center, radius, fgnd, bgnd );
// fill initial deformation with zero vectors
VectorType zeroVec;
zeroVec.Fill( 0.0 );
FillImage<FieldType>( initField, zeroVec );
// -------------------------------------------------------------
typedef itk::fem::FEMObject<ImageDimension> FEMObjectType;
typedef itk::fem::FEMRegistrationFilter<ImageType, ImageType, FEMObjectType> RegistrationType;
std::cout << "Run registration and warp moving" << std::endl;
for( unsigned int met = 0; met < 4; met++ )
{
RegistrationType::Pointer registrator = RegistrationType::New();
registrator->SetFixedImage( fixed );
registrator->SetMovingImage( moving );
registrator->SetMaxLevel(1);
registrator->SetUseNormalizedGradient( true );
registrator->ChooseMetric( met );
unsigned int maxiters = 5;
float e = 10;
float p = 1;
registrator->SetElasticity(e, 0);
registrator->SetRho(p, 0);
registrator->SetGamma(1., 0);
registrator->SetAlpha(1.);
registrator->SetMaximumIterations( maxiters, 0 );
registrator->SetMeshPixelsPerElementAtEachResolution(4, 0);
registrator->SetWidthOfMetricRegion(0, 0);
if( met == 0 || met == 3 )
{
registrator->SetWidthOfMetricRegion(0, 0);
}
else
{
registrator->SetWidthOfMetricRegion(1, 0);
}
registrator->SetNumberOfIntegrationPoints(2, 0);
registrator->SetDoLineSearchOnImageEnergy( 0 );
registrator->SetTimeStep(1.);
if( met == 0 )
{
registrator->SetDoLineSearchOnImageEnergy( (int)2);
registrator->SetEmployRegridding(true);
}
else
{
registrator->SetDoLineSearchOnImageEnergy( (int)0);
registrator->SetEmployRegridding(false);
}
registrator->SetUseLandmarks(false);
itk::fem::MaterialLinearElasticity::Pointer m;
m = itk::fem::MaterialLinearElasticity::New();
m->SetGlobalNumber(0); // Global number of the material ///
m->SetYoungsModulus(registrator->GetElasticity() ); // Young modulus -- used in the membrane ///
m->SetCrossSectionalArea(1.0); // Crossection area ///
m->SetThickness(1.0); // Crossection area ///
m->SetMomentOfInertia(1.0); // Moment of inertia ///
m->SetPoissonsRatio(0.); // .0; // poissons -- DONT CHOOSE 1.0!!///
m->SetDensityHeatProduct(1.0);
// Create the element type
ElementType::Pointer e1 = ElementType::New();
e1->SetMaterial(dynamic_cast<itk::fem::MaterialLinearElasticity *>( &*m ) );
registrator->SetElement(&*e1);
registrator->SetMaterial(m);
registrator->Print( std::cout );
try
{
// Register the images
registrator->RunRegistration();
}
catch( ::itk::ExceptionObject & err )
{
std::cerr << "ITK exception detected: " << err;
std::cout << "Test FAILED" << std::endl;
return EXIT_FAILURE;
}
catch( ... )
{
// fixme - changes to femparray cause it to fail : old version works
std::cout << "Caught an exception: " << std::endl;
return EXIT_FAILURE;
// std::cout << err << std::endl;
// throw err;
}
if (argc > 1)
{
std::cout << "Write out deformation field" << argv[1] << std::endl;
std::string outFileName = argv[1];
std::stringstream ss;
ss << met;
outFileName += ss.str();
outFileName += ".mhd";
typedef itk::ImageFileWriter<RegistrationType::FieldType> ImageWriterType;
ImageWriterType::Pointer writer = ImageWriterType::New();
writer->SetFileName( outFileName );
writer->SetInput( registrator->GetDisplacementField() );
writer->Update();
}
if (argc > 2)
{
std::cout << "Write out deformed image" << argv[2] << std::endl;
std::string outFileName = argv[2];
std::stringstream ss;
ss << met;
outFileName += ss.str();
outFileName += ".mhd";
typedef itk::ImageFileWriter<ImageType> ImageWriterType;
ImageWriterType::Pointer writer = ImageWriterType::New();
writer->SetFileName( outFileName );
writer->SetInput( registrator->GetWarpedImage() );
writer->Update();
}
}
/*
// get warped reference image
// ---------------------------------------------------------
std::cout << "Compare warped moving and fixed." << std::endl;
// compare the warp and fixed images
itk::ImageRegionIterator<ImageType> fixedIter( fixed,
fixed->GetBufferedRegion() );
itk::ImageRegionIterator<ImageType> warpedIter( registrator->GetWarpedImage(),
fixed->GetBufferedRegion() );
unsigned int numPixelsDifferent = 0;
while( !fixedIter.IsAtEnd() )
{
if( fixedIter.Get() != warpedIter.Get() )
{
numPixelsDifferent++;
}
++fixedIter;
++warpedIter;
}
std::cout << "Number of pixels different: " << numPixelsDifferent;
std::cout << std::endl;
if( numPixelsDifferent > 400 )
{
std::cout << "Test failed - too many pixels different." << std::endl;
return EXIT_FAILURE;
}
std::cout << "Test passed" << std::endl;
*/
return EXIT_SUCCESS;
}
|
package com.klex.extensions.network
import com.klex.extensions.KlexException
class NetworkKlexException(
val serverMessage: String? = null,
val errorCode: String? = null
) : KlexException()
|
module RubyDesignPatternsInPractice
module Composite
class MonsterTask
attr_reader :reward
def initialize
@reward = 100
end
end
end
end
|
<?php
declare(strict_types=1);
namespace Omexon\Filesystem;
class Path
{
/**
* Get path to root of site.
*
* @param string|string[] $segments Dot notation is supported in string. Default null.
* @return string
*/
public static function root($segments = null): string
{
$path = __DIR__;
// Step back 4 steps since this is a package in vendor path.
for ($c1 = 0; $c1 < 4; $c1++) {
$path = dirname($path);
}
$path = str_replace('\\', '/', $path);
$path = self::addSegmentsToPath($path, $segments);
return $path;
}
/**
* Get path to current package.
*
* @param string|string[] $segments Dot notation is supported in string. Default null.
* @return string
*/
public static function packageCurrent($segments = null): string
{
return self::package(null, null, $segments);
}
/**
* Get path to package.
* Note: if both $vendor and $package is null, current package is returned.
*
* @param string $vendor Default null which means current.
* @param string $package Default null which means current.
* @param string|string[] $segments Dot notation is supported in string. Default null.
* @return string
*/
public static function package(?string $vendor = null, ?string $package = null, $segments = null): string
{
$path = dirname(dirname(static::packagePath()));
if ($package === null) {
$package = static::packageName();
}
if ($vendor === null) {
$vendor = static::vendorName();
}
$path .= '/' . $vendor . '/' . $package;
$path = self::addSegmentsToPath($path, $segments);
return $path;
}
/**
* Get vendor name.
*
* @return string
*/
public static function vendorName(): string
{
$path = static::packagePath();
return basename(dirname($path));
}
/**
* Get package name.
*
* @return string
*/
public static function packageName(): string
{
$path = static::packagePath();
return basename($path);
}
/**
* Get package path.
* Note: if this class is extended, this method has to be overridden to
* give the base path for the parent package.
*
* @return string
*/
protected static function packagePath(): string
{
return dirname(__DIR__);
}
/**
* Add segments to path.
*
* @param string $path
* @param string|string[] $segments
* @return string
*/
protected static function addSegmentsToPath(string $path, $segments): string
{
if ($segments === null) {
return $path;
}
// Make sure it is an array.
if (!is_array($segments)) {
$segments = [$segments];
}
// Add segments.
if (count($segments) > 0) {
$path = rtrim($path, '/') . '/' . implode('/', $segments);
}
return $path;
}
}
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace QuickSearch {
public abstract class SearchIndexerBase {
public void NotifyOnStartup () {
OnStartup();
}
public void NotifyOnOpen () {
OnOpen();
}
public void NotifyOnQuery (string query) {
OnQuery(query);
}
public void RequestElements (List<ISearchableElement> outResult) {
outResult.Clear();
if (!IsActive())
return;
var elements = GetElements();
lock (elements) {
outResult.AddRange(elements);
}
}
protected virtual void OnStartup () {
}
protected virtual void OnOpen () {
}
protected virtual void OnQuery (string query) {
}
protected abstract List<ISearchableElement> GetElements ();
protected virtual bool IsActive () {
return true;
}
}
}
|
-- Add an index to the students table
CREATE UNIQUE INDEX email_UNIQUE ON students (email ASC);
-- Check status of table
CHECK TABLE students FOR UPGRADE;
|
!
! Date_Utility
!
! Module containing date conversion routines
!
!
! CREATION HISTORY:
! Written by: Paul van Delst, 03-Apr-2000
! paul.vandelst@noaa.gov
MODULE Date_Utility
! -----------------
! Environment setup
! -----------------
IMPLICIT NONE
! ------------
! Visibilities
! ------------
PRIVATE
! Parameters
PUBLIC :: N_MONTHS
PUBLIC :: DAYS_PER_MONTH_IN_NONLEAP
PUBLIC :: MONTH_NAME
PUBLIC :: N_DAYS
PUBLIC :: DAY_NAME
! Procedures
PUBLIC :: IsLeapYear
PUBLIC :: DayOfYear
PUBLIC :: DaysInMonth
PUBLIC :: NameOfMonth
PUBLIC :: DayOfWeek
! ----------
! Parameters
! ----------
! String length for character functions
INTEGER, PARAMETER :: NL = 20
! Number of Months in a Year
INTEGER, PARAMETER :: N_MONTHS = 12
! Days per Month in a non leap Year
INTEGER, PARAMETER :: DAYS_PER_MONTH_IN_NONLEAP(N_MONTHS) = &
(/ 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 /)
! Month names
CHARACTER(*), PARAMETER :: MONTH_NAME(N_MONTHS) = &
(/'January ','February ','March ','April ','May ','June ', &
'July ','August ','September','October ','November ','December ' /)
! Number of Days in a Week
INTEGER, PARAMETER :: N_DAYS = 7
! Day names
CHARACTER(*), PARAMETER :: DAY_NAME(N_DAYS) = &
(/'Sunday ','Monday ','Tuesday ','Wednesday','Thursday ','Friday ','Saturday '/)
CONTAINS
!##############################################################################
!##############################################################################
!## ##
!## ## PUBLIC MODULE ROUTINES ## ##
!## ##
!##############################################################################
!##############################################################################
!------------------------------------------------------------------------------
!:sdoc+:
!
! NAME:
! IsLeapYear
!
! PURPOSE:
! Elemental function to determine if a specified year is a leap year.
!
! CALLING SEQUENCE:
! Result = IsLeapYear( Year )
!
! INPUT ARGUMENTS:
! Year: The year in 4-digit format, e.g. 1997.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Scalar
! ATTRIBUTES: INTENT(IN)
!
! FUNCTION RESULT:
! Result: The return value is a logical value indicating whether
! the specified year is a leap year.
! If .TRUE. the specified year IS a leap year.
! .FALSE. the specified year is NOT a leap year.
! UNITS: N/A
! TYPE: LOGICAL
! DIMENSION: Scalar
!:sdoc-:
!------------------------------------------------------------------------------
ELEMENTAL FUNCTION IsLeapYear( Year )
INTEGER, INTENT(IN) :: Year
LOGICAL :: IsLeapYear
IsLeapYear = ( (MOD(Year,4) == 0) .AND. (MOD(Year,100) /= 0) ) .OR. &
(MOD(Year,400) == 0)
END FUNCTION IsLeapYear
!------------------------------------------------------------------------------
!:sdoc+:
!
! NAME:
! DayOfYear
!
! PURPOSE:
! Elemental function to convert input numeric (e.g. DD,MM,YYYY) date
! information to a sequential day of year.
!
! CALLING SEQUENCE:
! DoY = DayOfYear ( Day, Month, Year )
!
! INPUTS:
! Day: The day-of-month.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Scalar or any rank
! ATTRIBUTES: INTENT(IN)
!
! Month: The month-of-year.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Same as Day input
! ATTRIBUTES: INTENT(IN)
!
! Year: The year in 4-digit format, e.g. 1997.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Same as Day input
! ATTRIBUTES: INTENT(IN)
!
! FUNCTION RESULT:
! DoY: Integer defining the day-of-year.
! Return value is 0 for invalid input.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Same as Day input
!:sdoc-:
!------------------------------------------------------------------------------
ELEMENTAL FUNCTION DayOfYear( &
Day , & ! Input
Month, & ! Input
Year ) & ! Input
RESULT( DoY )
! Arguments
INTEGER, INTENT(IN) :: Day
INTEGER, INTENT(IN) :: Month
INTEGER, INTENT(IN) :: Year
! Function result
INTEGER :: DoY
! Local variables
INTEGER :: Days_per_Month( N_MONTHS )
! Set up
DoY = 0
! ...Check year and month input
IF ( Year < 1 .OR. &
Month < 1 .OR. &
Month > N_MONTHS ) RETURN
! ...Check the day of month
Days_per_Month = DAYS_PER_MONTH_IN_NONLEAP
IF ( IsLeapYear(Year) ) Days_per_Month(2) = 29
IF ( Day > Days_per_Month(Month) ) RETURN
! Compute the day of year
DoY = SUM(Days_per_Month(1:Month-1)) + Day
END FUNCTION DayOfYear
!------------------------------------------------------------------------------
!:sdoc+:
! NAME:
! DaysInMonth
!
! PURPOSE:
! Elemental function to return the number of days in a given
! month and year.
!
! CALLING SEQUENCE:
! n_Days = DaysInMonth( Month, Year )
!
! INPUTS:
! Month: The month of the year (1-12).
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Scalar or any rank
! ATTRIBUTES: INTENT(IN)
!
! Year: The year in 4-digit format, e.g. 1997.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Same as Month input
! ATTRIBUTES: INTENT(IN)
!
! FUNCTION RESULT:
! n_Days: The number of days in the month.
! Return value is 0 for invalid input.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Same as input
!:sdoc-:
!------------------------------------------------------------------------------
ELEMENTAL FUNCTION DaysInMonth(Month, Year) RESULT(n_Days)
! Arguments
INTEGER, INTENT(IN) :: Month
INTEGER, INTENT(IN) :: Year
! Function result
INTEGER :: n_Days
! Local variables
INTEGER :: Days_per_Month(N_MONTHS)
! Set up
n_Days = 0
! ...Check year and month input
IF ( Year < 1 .OR. &
Month < 1 .OR. &
Month > N_MONTHS ) RETURN
! Assemble the days of month
Days_per_Month = DAYS_PER_MONTH_IN_NONLEAP
IF ( IsLeapYear(Year=Year) ) Days_per_Month(2) = 29
! Set the number of days
n_Days = Days_per_Month(Month)
END FUNCTION DaysInMonth
!------------------------------------------------------------------------------
!:sdoc+:
!
! NAME:
! NameOfMonth
!
! PURPOSE:
! Elemental function to return the name of the month.
!
! CALLING SEQUENCE:
! name = NameOfMonth( Month )
!
! INPUT ARGUMENTS:
! Month: The month of the year (1-12).
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Scalar or any rank.
! ATTRIBUTES: INTENT(IN)
!
! FUNCTION RESULT:
! name: The return value is a character string containing the
! name of the month.
! UNITS: N/A
! TYPE: CHARACTER
! DIMENSION: Conformable with input Month arugment
!:sdoc-:
!------------------------------------------------------------------------------
ELEMENTAL FUNCTION NameOfMonth( Month )
INTEGER, INTENT(IN) :: Month
CHARACTER(NL) :: NameOfMonth
NameOfMonth = 'Invalid'
IF ( Month < 1 .OR. Month > N_MONTHS ) RETURN
NameOfMonth = MONTH_NAME( Month )
END FUNCTION NameOfMonth
!------------------------------------------------------------------------------
!:sdoc+:
!
! NAME:
! DayOfWeek
!
! PURPOSE:
! Elemental function to return the name of the day of week.
!
! NOTE:
! - Only valid for Gregorian calendar.
! - Since different places switched to the Gregorian calendar at
! different times, this routine will only output day of week names
! for dates AFTER 1918 (the year Russia adopted the Gregorian
! calendar).
!
! CALLING SEQUENCE:
! name = DayOfWeek ( Day, Month, Year )
!
! INPUTS:
! Day: The day of the month.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Scalar or any rank
! ATTRIBUTES: INTENT(IN)
!
! Month: The month of the year (1-12).
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Conformable with input Day argument.
! ATTRIBUTES: INTENT(IN)
!
! Year: The year in 4-digit format, e.g. 1997.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Conformable with input Day argument.
! ATTRIBUTES: INTENT(IN)
!
! FUNCTION RESULT:
! name: The return value is a character string containing the
! name of the day-of-week.
! UNITS: N/A
! TYPE: CHARACTER
! DIMENSION: Conformable with input Day argument.
!
!:sdoc-:
!------------------------------------------------------------------------------
ELEMENTAL FUNCTION DayOfWeek( Day, Month, Year )
INTEGER, INTENT(IN) :: Day
INTEGER, INTENT(IN) :: Month
INTEGER, INTENT(IN) :: Year
CHARACTER(NL) :: DayOfWeek
INTEGER :: i
DayOfWeek = 'Invalid'
i = iDayOfWeek( Day, Month, Year )
IF ( i == 0 ) RETURN
DayOfWeek = DAY_NAME(i)
END FUNCTION DayOfWeek
!##############################################################################
!##############################################################################
!## ##
!## ## PRIVATE MODULE ROUTINES ## ##
!## ##
!##############################################################################
!##############################################################################
!------------------------------------------------------------------------------
!
! NAME:
! iDayOfWeek
!
! PURPOSE:
! Elemental function to convert input numeric (e.g. DD,MM,YYYY) date
! information to a day of week index, 1-7.
!
! NOTE:
! - Only valid for Gregorian calendar.
! - Since different places switched to the Gregorian calendar at
! different times, this routine will only output valid day of week
! indices for dates AFTER 1918 (the year Russia adopted the Gregorian
! calendar).
!
! CALLING SEQUENCE:
! iDoW = iDayOfWeek ( Day, Month, Year )
!
! INPUTS:
! Day: The day of the month.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Scalar or any rank
! ATTRIBUTES: INTENT(IN)
!
! Month: The month of the year (1-12).
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Same as Day input
! ATTRIBUTES: INTENT(IN)
!
! Year: The year in 4-digit format, e.g. 1997.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Same as Day input
! ATTRIBUTES: INTENT(IN)
!
! FUNCTION RESULT:
! iDoW: Integer defining the day-of-week (1-7).
! Return value is 0 for invalid input.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Same as Day input
!
!------------------------------------------------------------------------------
ELEMENTAL FUNCTION iDayOfWeek( Day, Month, Year ) RESULT( iDoW )
! Arguments
INTEGER, INTENT(IN) :: Day
INTEGER, INTENT(IN) :: Month
INTEGER, INTENT(IN) :: Year
! Function result
INTEGER :: iDoW
! Local variables
INTEGER :: jdn
iDoW = 0
jdn = JulianDay( Day, Month, Year )
IF ( jdn < 0 ) RETURN
iDoW = MOD(jdn+1, 7) + 1
IF ( iDoW < 1 .OR. iDoW > 7 ) iDow = 0
END FUNCTION iDayOfWeek
!------------------------------------------------------------------------------
!
! NAME:
! JulianDay
!
! PURPOSE:
! Elemental function to convert input numeric (e.g. DD,MM,YYYY) date
! information to a Julian Day Number, which is defined as the number of
! days since noon January 1, 4713 BCE.
!
! NOTE:
! - Only valid for Gregorian calendar.
! - Since different places switched to the Gregorian calendar at different
! times, this routine will only output valid Julian day numbers for dates
! AFTER 1918 (the year Russia adopted the Gregorian calendar).
!
! CALLING SEQUENCE:
! jdn = JulianDay( Day, Month, Year )
!
! INPUTS:
! Day: The day of the month.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Scalar or any rank
! ATTRIBUTES: INTENT(IN)
!
! Month: The month of the year (1-12).
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Same as Day input
! ATTRIBUTES: INTENT(IN)
!
! Year: The year in 4-digit format, e.g. 1997.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Same as Day input
! ATTRIBUTES: INTENT(IN)
!
! FUNCTION RESULT:
! jdn: Julian Day Number.
! Return value is negative for invalid input.
! UNITS: N/A
! TYPE: INTEGER
! DIMENSION: Same as Day input
!
! REFERENCES:
! - http://en.wikipedia.org/wiki/Julian_day
! - http://www.cs.utsa.edu/~cs1063/projects/Spring2011/Project1/jdn-explanation.html
!
!------------------------------------------------------------------------------
ELEMENTAL FUNCTION JulianDay( Day, Month, Year ) RESULT( jdn )
! Arguments
INTEGER, INTENT(IN) :: Day
INTEGER, INTENT(IN) :: Month
INTEGER, INTENT(IN) :: Year
! Function result
INTEGER :: jdn
! Local variables
INTEGER :: m, y, a
jdn = -1
IF ( year <= 1918 ) RETURN
! Compute the number of years and months since March 1, 4801 BCE
a = (14 - month)/12 ! 1 for Jan, 2 for Feb, 0 for other months.
y = year + 4800 - a ! Add 4800 to start counting from -4800.
m = month + (12*a) - 3 ! Pretend the year begins in March and ends in Feb.
jdn = day + &
(153*m + 2)/5 + & ! Number of days in the previous months
(365*y) + & ! Duh.
(y/4) - (y/100) + (y/400) - & ! Number of leap years since -4800
32045 ! Ensure result is 0 for Jan 1, 4713 BCE.
END FUNCTION JulianDay
END MODULE Date_Utility
|
<?php
namespace App\Models;
use Illuminate\Support\Facades\DB;
class Image extends AppModel {
protected $table = 'images';
protected $fillable = [
'image',
'object_id',
'object_type',
'status',
'deleted'
];
public $timestamps = false;
public function image()
{
return $this->morphTo();
}
public function createImage($image) {
$new = new Image();
$new->image = $image['image'];
$new->object_id = $image['object_id'];
$new->object_type = $image['object_type'];
$new->save();
return $new->id;
}
}
|
import createRequest from 'utils/createRequest'
import { uploadProfilePicture } from 'graphql/mutations/profileMutations'
import { RootState } from 'redux/store'
import { createAsyncThunk, createSlice, PayloadAction } from '@reduxjs/toolkit'
import { Props as AlertProps } from 'components/Alerts/Alert'
export type Base64 = ArrayBuffer | string | null
export interface InitialState {
uploadModal: boolean
previewLink: string
previewModal: boolean
file: Base64
uploading: boolean
successful: boolean
failed: boolean
alertProps: AlertProps | {}
}
const initialState: InitialState = {
uploadModal: false,
previewLink: '',
previewModal: false,
file: null,
uploading: false,
successful: false,
failed: false,
alertProps: {
severity: 'info',
message: '',
checked: false,
},
}
export const uploadFile = createAsyncThunk(
'profilePictureUpload/uploadFileStatus',
async (_, { getState }) => {
const state = getState() as RootState
const image = state.profilePictureUpload.file
return createRequest({
key: uploadProfilePicture,
values: { image },
})
}
)
const profilePictureUploadSlice = createSlice({
name: 'profilePictureUpload',
initialState,
reducers: {
openUploadModal: state => {
state.uploadModal = true
},
closeUploadModal: state => {
state.uploadModal = false
},
openPreviewModal: (
state,
{ payload: previewLink }: PayloadAction<string>
) => {
state.previewLink = previewLink
state.previewModal = true
},
closePreviewModal: state => {
state.previewLink = ''
state.previewModal = false
state.file = null
},
makeBase64Image: (state, { payload: file }: PayloadAction<Base64>) => {
state.file = file
},
resetState: () => initialState,
},
extraReducers: builder => {
builder
.addCase(uploadFile.pending, state => {
state.uploading = true
state.alertProps = {
severity: 'info',
message: 'Profile picture is uploading',
checked: true,
}
})
.addCase(uploadFile.fulfilled, (state, { payload }) => {
state.uploading = false
state.successful = true
const data = payload.uploadProfilePicture
if (data.message) {
state.alertProps = {
severity: 'success',
message: data.message,
checked: true,
}
}
if (data.errorMessage) {
state.alertProps = {
severity: 'error',
message: data.errorMessage,
checked: true,
}
}
})
.addCase(uploadFile.rejected, state => {
state.uploading = false
state.failed = true
state.alertProps = {
severity: 'error',
message: 'Something went wrong. Please try again',
checked: true,
}
})
},
})
export const {
openUploadModal,
closeUploadModal,
openPreviewModal,
closePreviewModal,
makeBase64Image,
resetState,
} = profilePictureUploadSlice.actions
export default profilePictureUploadSlice.reducer
|
package com.truelayer.java;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.nio.charset.StandardCharsets;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
public class SigningOptionsBuilderTests {
public static final String A_KEY_ID = "a-key-id";
public static final byte[] A_PRIVATE_KEY = "a-private-key".getBytes(StandardCharsets.UTF_8);
@Test
@DisplayName("It should build signing options")
public void itShouldBuildASigningOptionsInstance() {
SigningOptions signingOptions = SigningOptions.builder()
.keyId(A_KEY_ID)
.privateKey(A_PRIVATE_KEY)
.build();
assertEquals(A_KEY_ID, signingOptions.keyId());
assertEquals(A_PRIVATE_KEY, signingOptions.privateKey());
}
@Test
@DisplayName("It should throw an exception if key id is not set")
public void itShouldThrowExceptionIfKeyIdNotSet() {
Throwable thrown = assertThrows(
TrueLayerException.class, () -> SigningOptions.builder().build());
assertEquals("key id must be set", thrown.getMessage());
}
@Test
@DisplayName("It should throw an exception if private key is not set")
public void itShouldThrowExceptionIfPrivateKeyNotSet() {
Throwable thrown = assertThrows(
TrueLayerException.class,
() -> SigningOptions.builder().keyId(A_KEY_ID).build());
assertEquals("private key must be set", thrown.getMessage());
}
}
|
<?php
$title = get_field('article_left_title');
$content = get_field('article_left_content');
$image_ID = get_field('article_left_img_ID');
?>
<section id="article">
<div class="container p0 article">
<div class="cell-6 pd-h-sm cell-m-12 article__txt">
<h2 class="article__txt--title article__txt--title-border"> <?= $title ;?></h2>
<div class="article__txt--content wp-content"> <?= $content ;?></div>
</div>
<div class="cell-6 pd-h-sm cell-m-12 article__video">
<?= wp_get_attachment_image( $image_ID, 'article-img' ); ?>
</div>
</div>
</section>
|
using Godot;
using Drawing;
namespace Examples.Chapter0
{
/// <summary>
/// Exercise 0.4 - Paint splatter.
/// </summary>
/// Mix of DrawCanvas and RandomNumberGenerator to simulate paint.
public class C0Exercise4 : Node2D, IExample
{
public string GetSummary()
{
return "Exercise I.4:\n"
+ "Paint splatter";
}
private RandomNumberGenerator generator;
public override void _Ready()
{
generator = new RandomNumberGenerator();
generator.Randomize();
var canvas = new DrawCanvas((pen) =>
{
var size = GetViewportRect().Size;
// Follow mouse for fun
var mousePosition = GetViewport().GetMousePosition();
float xNum = generator.Randfn(0, 1); // Gaussian distribution
float yNum = generator.Randfn(0, 1); // Gaussian distribution
var colNumR = (byte)(generator.Randfn(0, 1) * 255);
var colNumG = (byte)(generator.Randfn(0, 1) * 255);
var colNumB = (byte)(generator.Randfn(0, 1) * 255);
float x = (20 * xNum) + mousePosition.x;
float y = (20 * yNum) + mousePosition.y;
pen.DrawCircle(new Vector2(x, y), 8, Color.Color8(colNumR, colNumG, colNumB, 64));
});
canvas.QueueClearDrawing(Color.Color8(45, 45, 45));
AddChild(canvas);
}
}
}
|
using System;
using System.IO;
namespace DataBoss.DataPackage
{
public sealed class CsvWriter : IDisposable
{
enum WriterState : byte {
BeginRecord = 0,
InRecord = 1
}
static readonly char[] QuotableChars = new[] { '"', '\n', };
public const string DefaultDelimiter = ";";
const string RecordDelimiter = "\r\n";
WriterState state;
readonly bool leaveOpen;
public readonly string Delimiter;
public TextWriter Writer { get; }
public CsvWriter(TextWriter output, string delimiter = null, bool leaveOpen = false) {
this.Writer = output;
this.leaveOpen = leaveOpen;
this.Delimiter = delimiter ?? DefaultDelimiter;
}
public void WriteField(string value) {
NextField();
if (ShouldQuote(value)) {
Writer.Write('"');
for(var i = 0; i != value.Length; ++i) {
var c = value[i];
Writer.Write(c);
if(c == '"')
Writer.Write('"');
}
Writer.Write('"');
}
else
Writer.Write(value);
}
public void NextField() {
if (state == WriterState.InRecord)
Writer.Write(Delimiter);
else
state = WriterState.InRecord;
}
bool ShouldQuote(string value) =>
value.IndexOfAny(QuotableChars) != -1
|| value.Contains(Delimiter);
public void NextRecord() {
Writer.Write(RecordDelimiter);
state = WriterState.BeginRecord;
}
public void Flush() => Writer.Flush();
public void Dispose() {
Flush();
if(!leaveOpen)
Writer.Close();
}
}
}
|
module Juno.Types.Event
( Event(..)
) where
import Juno.Types.Message
data Event = ERPC RPC
| AERs AlotOfAERs
| ElectionTimeout String
| HeartbeatTimeout String
deriving (Show)
|
using UnityEngine;
using UnityEngine.UI;
public class GameoverMenu : MonoBehaviour
{
#region Fields
private float timeScale;
private Text winnerText;
private string winnerTextEnding = "wins!";
public FighterType winner = FighterType.Player;
#endregion
#region Properties
public FighterType Winner
{
get { return winner; }
set { winner = value; }
}
#endregion
#region Unity API
private void Start()
{
timeScale = Time.timeScale;
Time.timeScale = 0;
winnerText = gameObject.transform.Find("Canvas/WinnerText").gameObject.GetComponent<Text>();
}
private void Update()
{
winnerText.text = winner + " " + winnerTextEnding;
}
#endregion
#region Events Handlers
public void HandleReturnButtonOnClickEvent()
{
MenuManager.GoToMenu(MenuName.Main);
Time.timeScale = timeScale;
MenuManager.RemoveTemporalMenu(gameObject);
}
public void HandleQuitButtonOnClickEvent()
{
Application.Quit();
}
#endregion
}
|
/*
* Copyright (c) 2006-2011, AIOTrade Computing Co. and Contributors
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* o Neither the name of AIOTrade Computing Co. nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.aiotrade.lib.neuralnetwork.machine.mlp.learner
import org.aiotrade.lib.math.vector.DefaultVec
import org.aiotrade.lib.math.vector.Vec
import org.aiotrade.lib.neuralnetwork.machine.mlp.neuron.PerceptronNeuron
/**
*
* @author Caoyuan Deng
*/
class MomentumBpLearner(_neuron: PerceptronNeuron) extends AbstractBpLearner(_neuron, Mode.Serial) {
/** weight updated value vector: deltaW_ij(t) */
private lazy val deltaWeight: Vec = new DefaultVec(_neuron.inputDimension)
/** parameters */
private var momentumRate: Double = _
private var learningRate: Double = _
def adapt(args: Double*) {
learningRate = args(0)
momentumRate = args(1)
// Adapt the weight using the delta rule.
val weight = _neuron.weight
val gradient = sumGradient
val n = _neuron.inputDimension
var i = 0
while (i < n) {
val gradientTerm = gradient(i) * learningRate
val prevDeltaWeightTerm = deltaWeight(i) * momentumRate
deltaWeight(i) = gradientTerm + prevDeltaWeightTerm
weight(i) + weight(i) + deltaWeight(i)
i += 1
}
/** this learner should reset gradient to 0 after adapt() is called each time */
reset
}
override
val learnerName ="Momentum Leaner"
override
def setOpts(opts: Double*) {
learningRate = opts(0)
momentumRate = opts(1)
}
}
|
'use babel';
import { BufferedNodeProcess } from 'atom'
import { getExportsForFiles } from '../ast/AstProvider.js'
const finder = require('findit');
const nodePath = require('path');
const _ = require('lodash');
export default class ExportIndex {
constructor() {
this.activeObserveChange = null;
this.index = [];
}
observeChanges(editor) {
this.disposeIfNecessary();
this.activeObserveChange = editor.onDidSave((ev) => {
const fileName = ev.path;
getExportsForFiles([fileName])
.then((indexIncrement) => {
this.index = _.filter(this.index, (record) => record.file !== fileName);
this.index = this.index.concat(indexIncrement);
});
});
}
disposeIfNecessary() {
if (this.activeObserveChange) {
this.activeObserveChange.dispose();
}
}
getIndex() {
return this.index;
}
buildIndex() {
const startTime = new Date().getTime();
const files = [];
atom.project.getPaths().forEach((path) => {
finder(path)
.on('directory', (dir, stat, stop) => {
const base = nodePath.basename(dir);
if (path !== dir && _.contains(atom.config.get('js-autoimport.ignoredFolders'), base)) {
stop();
}
})
.on('file', (file) => {
const extname = nodePath.extname(file);
if (_.contains(atom.config.get('js-autoimport.allowedSuffixes'), extname)) {
files.push(file);
}
})
.on('end', () => {
console.debug(`Traversed ${files.length} files in ${new Date().getTime() - startTime}ms`, files);
const startIndexTime = new Date().getTime();
getExportsForFiles(files)
.then((index) => {
this.index = index;
console.debug(`Index has been obtained in ${new Date().getTime() - startIndexTime}ms`);
});
});
});
}
}
|
## Helm deployment of Dex
### Preparation to installation
You have to set all values specify in this chapter to properly run this chart
``` accessKey: <minio_access_key>
secretKey: <minio_secret_key>
```
### Installation
To install this chart after preparation phase use:
```helm install .```
### MANUAL STEPS REQUIRED TO SECURE MINIO WITH TLS (SELF-SIGNED CERTIFICATES)
Assumed that self-signed certificate and key is placed in current directory. Cert and key file name is important.
Create kubernetes secret.
```
kubectl create secret generic tls-ssl-minio --from-file=./private.key --from-file=./public.crt
kubectl edit deployment minioplatform
```
Follow instructions from https://github.com/minio/minio/tree/master/docs/tls/kubernetes#3-update-deployment-yaml-file
|
#!/bin/bash
if [ ${GITHUB_ACTIONS:-false} = true ]; then
mysqladmin -h127.0.0.1 -P${MYSQL_PORT:-3306} -uroot -p${MYSQL_ROOT_PASSWORD} password '';
fi
##
# MySQL
cat <<EOF | mysql -h127.0.0.1 -P${MYSQL_PORT:-3306} -uroot
SET SESSION SQL_MODE='ANSI';
CREATE DATABASE IF NOT EXISTS "egg-orm";
USE "egg-orm";
source test/dumpfile.sql;
EOF
|
package main
import (
"fmt"
"github.com/fwhezfwhez/errorx"
"github.com/fwhezfwhez/tcpx"
//"tcpx"
"time"
)
func main() {
srv := tcpx.NewTcpX(nil)
// start server
go func() {
fmt.Println("tcp listen on :8080")
srv.ListenAndServe("tcp", ":8080")
}()
// after 10 seconds and stop it
go func() {
time.Sleep(10 * time.Second)
if e := srv.Stop(false); e != nil {
fmt.Println(errorx.Wrap(e).Error())
return
}
//
//if e:=srv.Stop(true); e!=nil {
// fmt.Println(errorx.Wrap(e).Error())
// return
//}
}()
select {}
}
|
<?php
// DataTable translations from: https://github.com/DataTables/Plugins/tree/master/i18n
return [
'sEmptyTable' => 'Žádná dostupná data v tabulce',
'sInfo' => 'Zobrazuji _START_ - _END_ z _TOTAL_ záznamů',
'sInfoEmpty' => 'Zobrazuji 0 - 0 z 0 záznamů',
'sInfoFiltered' => '(filtrováno z _MAX_ všech záznamů)',
'sInfoPostFix' => '',
'sInfoThousands' => ',',
'sLengthMenu' => 'Zobrazit _MENU_ záznamů',
'sLoadingRecords' => 'Načítání...',
'sProcessing' => 'Zpracovávání...',
'sSearch' => 'Vyhledat:',
'sZeroRecords' => 'Žádné záznamy nenalezeny',
'oPaginate' => [
'sFirst' => 'První',
'sLast' => 'Poslední',
'sNext' => 'Další',
'sPrevious' => 'Předchozí',
],
'oAria' => [
'sSortAscending' => ': třídění sloupce vzestupně',
'sSortDescending' => ': třídění sloupce sestupně',
],
];
|
import torch
import torch.nn as nn
import torch.nn.functional as f
import numpy as np
from math import log
def one_hot_encode(board):
data = torch.zeros(1, 16, 4, 4)
for i in range(4):
for j in range(4):
if board[i,j] == 0:
data[0, 0, i, j] = 1
else:
data[0, int(log(board[i, j], 2)), i, j] = 1
return data
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(16, 128, (1, 4))
self.conv2 = nn.Conv2d(16, 128, (4, 1))
self.conv3 = nn.Conv2d(16, 128, (1, 2))
self.conv4 = nn.Conv2d(16, 128, (2, 1))
self.conv5 = nn.Conv2d(16, 128, 2)
self.conv6 = nn.Conv2d(16, 128, 3)
self.conv7 = nn.Conv2d(16, 128, 4)
self.fc1 = nn.Linear(5888, 1024)
self.fc2 = nn.Linear(1024, 128)
self.fc3 = nn.Linear(128, 4)
self.dropout = nn.Dropout(0.5)
def forward(self, x):
x1 = f.relu(self.conv1(x))
x2 = f.relu(self.conv2(x))
x3 = f.relu(self.conv3(x))
x4 = f.relu(self.conv4(x))
x5 = f.relu(self.conv5(x))
x6 = f.relu(self.conv6(x))
x7 = f.relu(self.conv7(x))
x1 = x1.view(x1.size()[0], -1)
x2 = x2.view(x2.size()[0], -1)
x3 = x3.view(x3.size()[0], -1)
x4 = x4.view(x4.size()[0], -1)
x5 = x5.view(x5.size()[0], -1)
x6 = x6.view(x5.size()[0], -1)
x7 = x7.view(x5.size()[0], -1)
x = torch.cat([x1, x2, x3, x4, x5, x6, x7], dim=1)
x = f.relu(nn.LayerNorm(x.size()[1:], elementwise_affine=False)(x))
x = self.dropout(self.fc1(x))
x = f.relu(nn.LayerNorm(x.size()[1:], elementwise_affine=False)(x))
x = self.dropout(self.fc2(x))
x = f.relu(nn.LayerNorm(x.size()[1:], elementwise_affine=False)(x))
x = self.fc3(x)
return x
def find_direction(self, board):
inputs = one_hot_encode(board)
output = self(inputs)
_, predict = torch.max(output.data, -1)
return int(predict)
|
<?php
namespace Boekkooi\CS;
interface ConfigInterface extends \Symfony\CS\ConfigInterface
{
/**
* Returns the checkers to run.
*
* @return CheckerInterface[]
*/
public function getCheckers();
/**
* Returns the path to the checker cache file.
*
* @return string
*/
public function getCheckerCacheFile();
}
|
package service
import (
"context"
"github.com/ibuildthecloud/k3v/pkg/translate"
v1 "github.com/rancher/wrangler-api/pkg/generated/controllers/core/v1"
"github.com/rancher/wrangler/pkg/apply"
corev1 "k8s.io/api/core/v1"
)
const (
vServiceNames = "vServiceNames"
)
type handler struct {
targetNamespace string
apply apply.Apply
vServiceCache v1.ServiceCache
vService v1.ServiceClient
pService v1.ServiceClient
}
func Register(
ctx context.Context,
targetNamespace string,
apply apply.Apply,
pService v1.ServiceController,
vService v1.ServiceController,
) {
h := &handler{
targetNamespace: targetNamespace,
apply: apply,
vServiceCache: vService.Cache(),
vService: vService,
pService: pService,
}
vService.Cache().AddIndexer(vServiceNames, func(obj *corev1.Service) (strings []string, e error) {
return []string{translate.ObjectToPName(obj)}, nil
})
vService.OnRemove(ctx, "service-populate", h.Remove)
vService.OnChange(ctx, "service-populate", h.Populate)
pService.OnChange(ctx, "service-backpopulate", h.BackPopulate)
}
|
# encoding: UTF-8
class DirectoryCategory < ActionWebService::Struct
member :fullViewableName, :string
member :specialEncoding, :string
end
class ResultElement < ActionWebService::Struct
member :summary, :string
member :URL, :string
member :snippet, :string
member :title, :string
member :cachedSize, :string
member :relatedInformationPresent, :bool
member :hostName, :string
member :directoryCategory, DirectoryCategory
member :directoryTitle, :string
end
class GoogleSearchResult < ActionWebService::Struct
member :documentFiltering, :bool
member :searchComments, :string
member :estimatedTotalResultsCount, :int
member :estimateIsExact, :bool
member :resultElements, [ResultElement]
member :searchQuery, :string
member :startIndex, :int
member :endIndex, :int
member :searchTips, :string
member :directoryCategories, [DirectoryCategory]
member :searchTime, :float
end
class GoogleSearchAPI < ActionWebService::API::Base
inflect_names false
api_method :doGetCachedPage, :returns => [:string], :expects => [{:key=>:string}, {:url=>:string}]
api_method :doGetSpellingSuggestion, :returns => [:string], :expects => [{:key=>:string}, {:phrase=>:string}]
api_method :doGoogleSearch, :returns => [GoogleSearchResult], :expects => [
{:key=>:string},
{:q=>:string},
{:start=>:int},
{:maxResults=>:int},
{:filter=>:bool},
{:restrict=>:string},
{:safeSearch=>:bool},
{:lr=>:string},
{:ie=>:string},
{:oe=>:string}
]
end
|
/**
* @Author: thomasvanhoutte
* @Date: 2017-01-08T19:30:48+01:00
* @Last modified by: thomasvanhoutte
* @Last modified time: 2017-01-08T19:47:13+01:00
*/
import { Component, OnInit } from '@angular/core';
import { Waarneming } from '../models/waarneming.model';
import { WaarnemingService } from '../services/waarneming.service';
@Component({
selector: 'waarneming',
//template: require('telling.component.view.html')
//templateUrl: './telling.component.view.html'
template: `
<div class="waarnemingen">
<h3>Waarnemingen</h3>
<div *ngFor="let waarneming of waarnemingen; let i=index" class="waarneming">
<p class="vogelsoort">{{waarneming.vogelsoort}}</p>
<p class="aantal">{{waarneming.aantal}}</p>
</div>
</div>
`
})
export class WaarnemingComponent implements OnInit {
errorMsg: string;
waarnemingen: Waarneming[];
constructor(public waarnemingService: WaarnemingService) { }
getWaarnemingen(waarnemingService: WaarnemingService) {
return waarnemingService.getWaarnemingService().map((waarnemingen) => {
this.waarnemingen = waarnemingen;
});
// users => this.users = users,
// error => this.errorMsg = <any>error);
}
ngOnInit() {
this.getWaarnemingen(this.waarnemingService).subscribe(_ => {;
});
}
}
|
from unittest.mock import Mock
import pytest
from argo_workflows.model.pod_security_context import PodSecurityContext
from argo_workflows.models import HostAlias as ArgoHostAlias
from hera.host_alias import HostAlias
from hera.operator import Operator
from hera.resources import Resources
from hera.security_context import WorkflowSecurityContext
from hera.task import Task
from hera.template_ref import TemplateRef
from hera.ttl_strategy import TTLStrategy
from hera.volume_claim_gc import VolumeClaimGCStrategy
from hera.volumes import (
ConfigMapVolume,
EmptyDirVolume,
ExistingVolume,
SecretVolume,
Volume,
)
from hera.workflow import Workflow
from hera.workflow_status import WorkflowStatus
def test_wf_contains_specified_service_account(ws):
w = Workflow('w', service=ws, service_account_name='w-sa')
expected_sa = 'w-sa'
assert w.spec.service_account_name == expected_sa
assert w.spec.templates[0].service_account_name == expected_sa
def test_wf_does_not_contain_sa_if_one_is_not_specified(ws):
w = Workflow('w', service=ws)
assert not hasattr(w.spec, 'service_account_name')
@pytest.fixture
def workflow_security_context_kwargs():
sc_kwargs = {
"run_as_user": 1000,
"run_as_group": 1001,
"fs_group": 1002,
"run_as_non_root": False,
}
return sc_kwargs
def test_wf_contains_specified_security_context(ws, workflow_security_context_kwargs):
wsc = WorkflowSecurityContext(**workflow_security_context_kwargs)
w = Workflow('w', service=ws, security_context=wsc)
expected_security_context = PodSecurityContext(**workflow_security_context_kwargs)
assert w.spec.security_context == expected_security_context
@pytest.mark.parametrize("set_only", ["run_as_user", "run_as_group", "fs_group", "run_as_non_root"])
def test_wf_specified_partial_security_context(ws, set_only, workflow_security_context_kwargs):
one_param_kwargs = {set_only: workflow_security_context_kwargs[set_only]}
wsc = WorkflowSecurityContext(**one_param_kwargs)
w = Workflow('w', service=ws, security_context=wsc)
expected_security_context = PodSecurityContext(**one_param_kwargs)
assert w.spec.security_context == expected_security_context
def test_wf_does_not_contain_specified_security_context(ws):
w = Workflow('w', service=ws)
assert "security_context" not in w.spec
def test_wf_does_not_add_empty_task(w):
t = None
w.add_task(t)
assert not w.dag_template.tasks
def test_wf_adds_specified_tasks(w, no_op):
n = 3
ts = [Task(f't{i}', no_op) for i in range(n)]
w.add_tasks(*ts)
assert len(w.dag_template.tasks) == n
for i, t in enumerate(w.dag_template.tasks):
assert ts[i].name == t.name
def test_wf_adds_task_volume(w, no_op):
t = Task(
't',
no_op,
resources=Resources(volumes=[Volume(name='v', size='1Gi', mount_path='/', storage_class_name='custom')]),
)
w.add_task(t)
claim = w.spec.volume_claim_templates[0]
assert claim.spec.access_modes == ['ReadWriteOnce']
assert claim.spec.resources.requests['storage'] == '1Gi'
assert claim.spec.storage_class_name == 'custom'
assert claim.metadata.name == 'v'
def test_wf_adds_task_secret_volume(w, no_op):
t = Task('t', no_op, resources=Resources(volumes=[SecretVolume(name='s', secret_name='sn', mount_path='/')]))
w.add_task(t)
vol = w.spec.volumes[0]
assert vol.name == 's'
assert vol.secret.secret_name == 'sn'
def test_wf_adds_task_config_map_volume(w):
t = Task('t', resources=Resources(volumes=[ConfigMapVolume(config_map_name='cmn', mount_path='/')]))
w.add_task(t)
assert w.spec.volumes[0].name
assert w.spec.volumes[0].config_map.name == "cmn"
def test_wf_adds_task_existing_checkpoints_staging_volume(w, no_op):
t = Task('t', no_op, resources=Resources(volumes=[ExistingVolume(name='v', mount_path='/')]))
w.add_task(t)
vol = w.spec.volumes[0]
assert vol.name == 'v'
assert vol.persistent_volume_claim.claim_name == 'v'
def test_wf_adds_task_existing_checkpoints_prod_volume(w, no_op):
t = Task(
't',
no_op,
resources=Resources(volumes=[ExistingVolume(name='vol', mount_path='/')]),
)
w.add_task(t)
vol = w.spec.volumes[0]
assert vol.name == 'vol'
assert vol.persistent_volume_claim.claim_name == 'vol'
def test_wf_adds_task_empty_dir_volume(w, no_op):
t = Task('t', no_op, resources=Resources(volumes=[EmptyDirVolume(name='v')]))
w.add_task(t)
vol = w.spec.volumes[0]
assert vol.name == 'v'
assert not vol.empty_dir.size_limit
assert vol.empty_dir.medium == 'Memory'
def test_wf_adds_head(w, no_op):
t1 = Task('t1', no_op)
t2 = Task('t2', no_op)
t1 >> t2
w.add_tasks(t1, t2)
h = Task('head', no_op)
w.add_head(h)
assert t1.argo_task.dependencies == ['head']
assert t2.argo_task.dependencies == ['t1', 'head']
def test_wf_adds_tail(w, no_op):
t1 = Task('t1', no_op)
t2 = Task('t2', no_op)
t1 >> t2
w.add_tasks(t1, t2)
t = Task('tail', no_op)
w.add_tail(t)
assert not hasattr(t1.argo_task, 'dependencies')
assert t2.argo_task.dependencies == ['t1']
assert t.argo_task.dependencies == ['t2']
def test_wf_overwrites_head_and_tail(w, no_op):
t1 = Task('t1', no_op)
t2 = Task('t2', no_op)
t1 >> t2
w.add_tasks(t1, t2)
h2 = Task('head2', no_op)
w.add_head(h2)
assert t1.argo_task.dependencies == ['head2']
assert t2.argo_task.dependencies == ['t1', 'head2']
h1 = Task('head1', no_op)
w.add_head(h1)
assert h2.argo_task.dependencies == ['head1']
assert t1.argo_task.dependencies == ['head2', 'head1']
assert t2.argo_task.dependencies == ['t1', 'head2', 'head1']
def test_wf_contains_specified_labels(ws):
w = Workflow('w', service=ws, labels={'foo': 'bar'})
expected_labels = {'foo': 'bar'}
assert w.metadata.labels == expected_labels
def test_wf_contains_specified_annotations(ws):
w = Workflow('w', service=ws, annotations={'foo': 'bar'})
expected_annotations = {'foo': 'bar'}
assert w.metadata.annotations == expected_annotations
def test_wf_submit_with_default(ws):
w = Workflow('w', service=ws, labels={'foo': 'bar'}, namespace="test")
w.service = Mock()
w.create()
w.service.create.assert_called_with(w.workflow, w.namespace)
def test_wf_adds_image_pull_secrets(ws):
w = Workflow('w', service=ws, image_pull_secrets=['secret0', 'secret1'])
secrets = [{'name': secret.name} for secret in w.spec.get('image_pull_secrets')]
assert secrets[0] == {'name': 'secret0'}
assert secrets[1] == {'name': 'secret1'}
def test_wf_adds_ttl_strategy(ws):
w = Workflow(
'w',
service=ws,
ttl_strategy=TTLStrategy(seconds_after_completion=5, seconds_after_failure=10, seconds_after_success=15),
)
expected_ttl_strategy = {
'seconds_after_completion': 5,
'seconds_after_failure': 10,
'seconds_after_success': 15,
}
assert w.spec.ttl_strategy._data_store == expected_ttl_strategy
def test_wf_adds_volume_claim_gc_strategy_on_workflow_completion(ws):
w = Workflow('w', service=ws, volume_claim_gc_strategy=VolumeClaimGCStrategy.OnWorkflowCompletion)
expected_volume_claim_gc = {"strategy": "OnWorkflowCompletion"}
assert w.spec.volume_claim_gc._data_store == expected_volume_claim_gc
def test_wf_adds_volume_claim_gc_strategy_on_workflow_success(ws):
w = Workflow('w', service=ws, volume_claim_gc_strategy=VolumeClaimGCStrategy.OnWorkflowSuccess)
expected_volume_claim_gc = {"strategy": "OnWorkflowSuccess"}
assert w.spec.volume_claim_gc._data_store == expected_volume_claim_gc
def test_wf_adds_host_aliases(ws):
w = Workflow(
'w',
service=ws,
host_aliases=[
HostAlias(hostnames=["host1", "host2"], ip="0.0.0.0"),
HostAlias(hostnames=["host3"], ip="1.1.1.1"),
],
)
assert w.spec.host_aliases[0] == ArgoHostAlias(hostnames=["host1", "host2"], ip="0.0.0.0")
assert w.spec.host_aliases[1] == ArgoHostAlias(hostnames=["host3"], ip="1.1.1.1")
def test_wf_add_task_with_template_ref(w):
t = Task("t", template_ref=TemplateRef(name="name", template="template"))
w.add_task(t)
assert w.dag_template.tasks[0] == t.argo_task
# Not add a Task with TemplateRef to w.spec.templates
# Note: w.spec.templates[0] is a template of dag
assert len(w.spec.templates) == 1
def test_wf_adds_exit_tasks(w, no_op):
t1 = Task('t1', no_op)
w.add_task(t1)
t2 = Task(
't2',
no_op,
resources=Resources(volumes=[SecretVolume(name='my-vol', mount_path='/mnt/my-vol', secret_name='my-secret')]),
).on_workflow_status(Operator.equals, WorkflowStatus.Succeeded)
w.on_exit(t2)
t3 = Task(
't3', no_op, resources=Resources(volumes=[Volume(name='my-vol', mount_path='/mnt/my-vol', size='5Gi')])
).on_workflow_status(Operator.equals, WorkflowStatus.Failed)
w.on_exit(t3)
assert len(w.exit_template.dag.tasks) == 2
assert len(w.spec.templates) == 5
assert len(w.spec.volume_claim_templates) == 1
assert len(w.spec.volumes) == 1
def test_wf_catches_tasks_without_exit_status_conditions(w, no_op):
t1 = Task('t1', no_op)
w.add_task(t1)
t2 = Task('t2', no_op)
with pytest.raises(AssertionError) as e:
w.on_exit(t2)
assert (
str(e.value)
== 'Each exit task must contain a workflow status condition. Use `task.on_workflow_status(...)` to set it'
)
def test_wf_catches_exit_tasks_without_parent_workflow_tasks(w, no_op):
t1 = Task('t1', no_op)
with pytest.raises(AssertionError) as e:
w.on_exit(t1)
assert str(e.value) == 'Cannot add an exit condition to empty workflows'
def test_wf_contains_expected_default_exit_template(w):
assert w.exit_template
assert w.exit_template.name == 'exit-template'
assert w.exit_template.dag.tasks == []
|
import { Injectable } from '@nestjs/common';
import { GameServer } from '@/game-servers/models/game-server';
import { Rcon } from 'rcon-client';
@Injectable()
export class RconFactoryService {
async createRcon(gameServer: GameServer): Promise<Rcon> {
return new Promise((resolve, reject) => {
const rcon = new Rcon({
host: gameServer.internalIpAddress,
port: parseInt(gameServer.port, 10),
password: gameServer.rconPassword,
timeout: 30000,
});
rcon.on('error', (error) => {
return reject(error);
});
rcon.connect().then(resolve).catch(reject);
});
}
}
|
#!/usr/bin/env bash
# Alternative to `make check`
# Script for running all the executable scripts in the folder
# Success/Failure is reported
cd "${BASH_SOURCE[0]%/*}"
echo "Working Directory:" $(pwd)
#echo "Which bash" $(which bash)
fails=0
for test in [^_]*.sh;
do
if [[ -x "$test" ]]; then
echo ">$test"
out=$(./"$test")
[[ $? != 0 ]] && fails=$((fails + 1))
[ "$1" == "-v" ] && echo "$out"
fi
done
[[ $fails == 0 ]] && echo "Pass" || echo "Fails: $fails"
exit $fails
|
package bits
type Slice struct {
length int
data uint64
}
// Creates a new slice of Bits
func NewSlice(length int, data uint64) (slice *Slice) {
if length > 64 {
panic("bits.Slice too big")
}
return &Slice{
length: length,
data: data}
}
// Appends a bit slice
func (slice *Slice) AppendSlice(rhs *Slice) {
new_length := slice.length + rhs.length
if new_length > 64 {
panic("bits.Slice too big")
}
slice.data = (slice.data << uint(rhs.length)) | rhs.data
slice.length = new_length
}
// Appends a bit
func (slice *Slice) AppendBit(bit bool) {
new_length := slice.length + 1
if new_length > 64 {
panic("bits.Slice too big")
}
slice.data <<= 1
if bit {
slice.data |= 0x1
}
slice.length = new_length
}
// Appends padding to least significant bits.
// This ensures the slice length is a multiple of 8.
func (slice *Slice) AppendPadding() {
if slice.length%8 == 0 {
return
}
bit_padding := 8 - (slice.length % 8)
slice.data <<= uint(bit_padding)
slice.length += bit_padding
}
// Returns leading 8 bits as a byte
// Ok indicates whether there are 8 bytes to be returned.
func (slice *Slice) PopLeadingBytes() (bytes []byte) {
bytes = []byte{}
for slice.length >= 8 {
b := byte(slice.data >> uint(slice.length-8))
bytes = append(bytes, b)
slice.length -= 8
}
return
}
|
# BOOST_PP_DEDUCE_D
`BOOST_PP_DEDUCE_D` マクロは `BOOST_PP_WHILE` の構築状態を手動で推論する。
## Usage
```cpp
BOOST_PP_DEDUCE_D()
```
## Remarks
このマクロは深い展開における*自動再帰*の使用を避けるためにある。
いくつかのプリプロセッサでは、そのような深さでの*自動再帰*は非効率的となり得る。
これは接尾辞 `_D` を持ったマクロの実行に直接使用されるためのものではない。例えば:
```cpp
BOOST_PP_ADD_D(BOOST_PP_DEDUCE_D(), x, y)
```
もしこのような文脈でこのマクロが使われた場合、`_D` マクロは失敗するだろう。
`_D` マクロは渡されたパラメータ `d` を直接、`BOOST_PP_DEDUCE_D()` が展開されるのを邪魔して、結合する。
さらに言えば、このマクロをさきの例のような状況で使用するのは無意味である。
効率を得るにはすでに遅すぎるからだ。
## See Also
- [`BOOST_PP_WHILE`](while.md)
## Requirements
Header: <boost/preprocessor/control/deduce_d.hpp>
## Sample Code
```cpp
#include <boost/preprocessor/arithmetic/add.hpp>
#include <boost/preprocessor/arithmetic/inc.hpp>
#include <boost/preprocessor/arithmetic/sub.hpp>
#include <boost/preprocessor/control/deduce_d.hpp>
#include <boost/preprocessor/punctuation/comma_if.hpp>
#include <boost/preprocessor/repetition/repeat.hpp>
#include <boost/preprocessor/tuple/elem.hpp>
#define RANGE(first, last) \
BOOST_PP_REPEAT( \
BOOST_PP_INC( \
BOOST_PP_SUB(last, first) \
, \
RANGE_M, \
(first, BOOST_PP_DEDUCE_D()) \
) \
/**/
#define RANGE_M(z, n, data) \
RANGE_M_2( \
n, \
BOOST_PP_TUPLE_ELEM(2, 0, data), \
BOOST_PP_TUPLE_ELEM(2, 1, data) \
) \
/**/
#define RANGE_M_2(n, first, d) \
BOOST_PP_COMMA_IF(n) BOOST_PP_ADD_D(d, n, first) \
/**/
RANGE(5, 10) // 5, 6, 7, 8, 9, 10 に展開される
```
* BOOST_PP_REPEAT[link repeat.md]
* BOOST_PP_INC[link inc.md]
* BOOST_PP_SUB[link sub.md]
* BOOST_PP_DEDUCE_D[link deduce_d.md]
* BOOST_PP_TUPLE_ELEM[link tuple_elem.md]
* BOOST_PP_TUPLE_ELEM[link tuple_elem.md]
* BOOST_PP_COMMA_IF[link comma_if.md]
* BOOST_PP_ADD_D[link add_d.md]
|
from math import factorial
if __name__ == "__main__":
print(factorial(2 * 20) // (factorial(20) ** 2))
|
# Validate CPF and CNPJ, remove and include mask.
## Installation / Configuration
Navigate to your project folder, for example:
```
cd /etc/www/projeto
```
And then run:
```
composer require rcngo/cpfcnpj:1.0.* --no-scripts
```
Or add it to the composer.json file, add it to your "require" :, example:
```php
{
"require": {
"rcngo/cpfcnpj": "1.0.*"
}
}
```
Run the composer update --no-scripts command.
## Use
```php
cnpfcnpj::removeMaskCpfOrCnpj('000.000.000-00');
// return cpf string 00000000000
cnpfcnpj::removeMaskCpfOrCnpj('00.000.000/0000-00');
// return cnpj string 00000000000000
cnpfcnpj::maskCpfOrCnpj('00000000000');
// return cpf string 000.000.000-00
cnpfcnpj::maskCpfOrCnpj('00000000000000');
// return cnpj string 00.000.000/0000-00
cnpfcnpj::cpfValidate('000.000.000-00');
// return true or false
cnpfcnpj::cnpjValidate('00.000.000/0000-00');
// return true or false
```
|
package com.phicdy.mycuration.domain.util
import timber.log.Timber
import java.text.ParseException
import java.text.SimpleDateFormat
import java.util.Calendar
import java.util.Date
import java.util.Locale
object DateParser {
private fun parseDate(pubDate: String): Date? {
val input = SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss Z", Locale.US)
val formatWithPubDate: Date?
try {
formatWithPubDate = input.parse(pubDate)
return formatWithPubDate
} catch (e: ParseException) {
}
//2014-06-25 17:24:07
// TODO: set device locale
val noTimezone = SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.JAPAN)
val formatWithNoTimeZone: Date?
try {
formatWithNoTimeZone = noTimezone.parse(pubDate)
return formatWithNoTimeZone
} catch (e2: ParseException) {
}
//2014-07-27T14:38:34+0900
if (!pubDate.contains("T")) {
Timber.d("Invalid format, return null")
return null
}
var replaced = pubDate.replace("T", " ")
if (pubDate.contains("Z")) {
//2014-07-27T14:38:34Z
replaced = replaced.replace("Z", "+0900")
}
// Delete millisecond, 2018-04-12T22:38:00.000+09:00 to 2018-04-12T22:38:00+09:00
val regexMillisecond = "([0-9][0-9]:[0-9][0-9]:[0-9][0-9])\\.[0-9][0-9][0-9]".toRegex()
if (regexMillisecond.containsMatchIn(replaced)) {
replaced = replaced.replace(regexMillisecond, "$1")
}
// Delete timezone with colon, 2014-07-27T14:38:34+09:00 to 2014-07-27T14:38:34+0900
val regexTimezoneWithColon = "\\+([0-9][0-9]):([0-9][0-9])".toRegex()
if (regexTimezoneWithColon.containsMatchIn(replaced)) {
replaced = replaced.replace(regexTimezoneWithColon, "+$1$2")
}
val w3cdtf = SimpleDateFormat("yyyy-MM-dd HH:mm:ssZ", Locale.US)
val formatWithW3cdtf: Date?
try {
formatWithW3cdtf = w3cdtf.parse(replaced)
return formatWithW3cdtf
} catch (e: ParseException) {
}
Timber.d("Contains T, but failed to parse")
return null
}
fun changeToJapaneseDate(dateBeforeChange: String): Long {
val cal = Calendar.getInstance()
val date = parseDate(dateBeforeChange) ?: return 0
cal.time = date
return cal.timeInMillis
}
}
|
package com.yazan98.domain.models
import androidx.lifecycle.viewModelScope
import com.yazan98.data.ApplicationPrefs
import com.yazan98.data.ReposComponentImpl
import com.yazan98.data.models.internal.LoginInfo
import com.yazan98.data.models.ProfileResponse
import com.yazan98.data.repos.HomeRepository
import com.yazan98.domain.actions.ProfileAction
import com.yazan98.domain.state.ProfileState
import io.vortex.android.reducer.VortexViewModel
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
class ProfileViewModel: VortexViewModel<ProfileState, ProfileAction>() {
private val profileResponse = ProfileResponse()
private val homeRepository: HomeRepository by lazy {
ReposComponentImpl().getHomeRepository()
}
override suspend fun execute(newAction: ProfileAction) {
withContext(Dispatchers.IO) {
if (getStateHandler().value == null || getStateHandler().value is ProfileState.ErrorResponse) {
when (newAction) {
is ProfileAction.GetProfileInfoAction -> getProfileInfo()
is ProfileAction.GetRepositoriesAction -> getRepositories()
is ProfileAction.GetOrganizationsAction -> getOrgs()
is ProfileAction.LoginAccountInfoAction -> loginAccount(newAction.get())
}
}
}
}
private suspend fun getRepositories() {
withContext(Dispatchers.IO) {
addRxRequest(homeRepository.getServiceProvider().getRepositories().subscribe({
viewModelScope.launch {
profileResponse.repositories = it
}
}, {
it.message?.let {
viewModelScope.launch {
acceptLoadingState(false)
acceptNewState(ProfileState.ErrorResponse(it))
}
}
}))
}
}
private suspend fun getProfileInfo() {
withContext(Dispatchers.IO) {
acceptLoadingState(true)
addRxRequest(homeRepository.getServiceProvider().getProfileInfo().subscribe({
profileResponse.profile = it
}, {
it.message?.let {
viewModelScope.launch {
acceptLoadingState(false)
acceptNewState(ProfileState.ErrorResponse(it))
}
}
}))
}
}
private suspend fun loginAccount(loginInfo: LoginInfo) {
withContext(Dispatchers.IO) {
ApplicationPrefs.saveUsername(loginInfo.username)
ApplicationPrefs.savePassword(loginInfo.password)
acceptLoadingState(true)
addRxRequest(homeRepository.getServiceProvider().getProfileInfo().subscribe({
profileResponse.profile = it
}, {
it.message?.let {
viewModelScope.launch {
acceptLoadingState(false)
acceptNewState(ProfileState.ErrorResponse(it))
}
}
}))
}
}
private suspend fun getOrgs() {
withContext(Dispatchers.IO) {
addRxRequest(homeRepository.getOrgs().subscribe({
viewModelScope.launch {
profileResponse.organizations = it
handleStateWithLoading(ProfileState.SuccessState(profileResponse))
}
}, {
it.message?.let {
viewModelScope.launch {
acceptLoadingState(false)
acceptNewState(ProfileState.ErrorResponse(it))
}
}
}))
}
}
override suspend fun getInitialState(): ProfileState {
return ProfileState.EmptyState()
}
}
|
<?php
namespace Deliverist\Builder\Commands;
use Deliverist\Builder\Builder;
use Deliverist\Builder\InvalidArgumentException;
use Deliverist\Builder\ICommand;
use Nette\Utils\FileSystem;
class Copy implements ICommand
{
/**
* @param Builder
* @param string|string[]
* @param string|NULL
*/
public function run(Builder $builder, $source = NULL, $destination = NULL)
{
if (!isset($source)) {
throw new InvalidArgumentException("Missing parameter 'source'.");
}
$paths = array();
if (is_array($source)) {
$paths = $source;
} else {
if ($destination === NULL) {
throw new InvalidArgumentException("Missing parameter 'destination'.");
}
$paths[$source] = $destination;
}
foreach ($paths as $sourcePath => $destinationPath) {
$builder->log("Copying '$sourcePath' destination '$destinationPath'.");
FileSystem::copy($builder->getPath($sourcePath), $builder->getPath($destinationPath), FALSE);
}
}
}
|
#!/bin/bash
while (( $# > 0 ))
do
case $1 in
*.png | *.jpg | *.jpeg)
file_path=$(dirname "$1")
old_name=$(basename "$1")
new_name=shadow_"$old_name"
cp "$1" "$new_name"
convert "$new_name" \
\( +clone -background black -shadow 10x10+0+0 \) \
+swap -background none -layers merge +repage \
"${file_path}/${new_name}"
;;
esac
shift
done
|
require 'spec_helper'
describe OrderRequest do
include WebMock::API
it "has a valid factory" do
expect(FactoryGirl.build(:order_request)).to be_valid
end
it "fails without order" do
expect(FactoryGirl.build(:order_request, :order => nil)).not_to be_valid
end
it "fails without order_status" do
expect(FactoryGirl.build(:order_request, :order_status =>
nil)).not_to be_valid
end
it "fails without external_system" do
expect(FactoryGirl.build(:order_request, :external_system =>
nil)).not_to be_valid
end
it "callback fails" do
stub_request(:get, "http://localhost/callback?status=cancel&supplier_order_id=1").
to_return(:status => 404, :body => "", :headers => {})
FactoryGirl.create(:order_status, code: 'cancel')
order_request = FactoryGirl.build(:order_request)
assert_raise StandardError do
order_request.cancel
end
end
it "callback unsuccesfull" do
stub_request(:get, "http://localhost/callback?status=confirm").
to_return(:status => 404, :body => "", :headers => {})
FactoryGirl.create(:order_status, code: 'confirm')
order_request = FactoryGirl.build(:order_request)
assert_raise StandardError do
order_request.confirm
end
end
end
|
# need special handling for showing a string as a textmime
# type, since in that case the string is assumed to be
# raw data unless it is text/plain
israwtext(::MIME, x::AbstractString) = true
israwtext(::MIME"text/plain", x::AbstractString) = false
israwtext(::MIME, x) = false
InlineIOContext(io, KVs::Pair...) = IOContext(
io,
:limit=>true, :color=>true, :jupyter=>true,
KVs...
)
# convert x to a string of type mime, making sure to use an
# IOContext that tells the underlying show function to limit output
function limitstringmime(mime::MIME, x)
buf = IOBuffer()
if istextmime(mime)
if israwtext(mime, x)
return String(x)
else
show(InlineIOContext(buf), mime, x)
end
else
b64 = Base64.Base64EncodePipe(buf)
if isa(x, Vector{UInt8})
write(b64, x) # x assumed to be raw binary data
else
show(InlineIOContext(b64), mime, x)
end
close(b64)
end
return String(take!(buf))
end
|
use serde::{Deserialize, Serialize};
use yew::prelude::*;
use yew_router::prelude::*;
use crate::Route;
const ELLIPSIS: &str = "\u{02026}";
#[derive(Serialize, Deserialize, PartialEq, Clone, Debug)]
pub struct PageQuery {
pub page: u64,
}
#[derive(Clone, Debug, PartialEq, Properties)]
pub struct Props {
pub page: u64,
pub total_pages: u64,
pub route_to_page: Route,
}
pub struct Pagination;
impl Component for Pagination {
type Message = ();
type Properties = Props;
fn create(_ctx: &Context<Self>) -> Self {
Self
}
fn view(&self, ctx: &Context<Self>) -> Html {
html! {
<nav class="pagination is-right" role="navigation" aria-label="pagination">
{ self.view_relnav_buttons(ctx.props()) }
<ul class="pagination-list">
{ self.view_links(ctx.props()) }
</ul>
</nav>
}
}
}
impl Pagination {
fn render_link(&self, to_page: u64, props: &Props) -> Html {
let Props {
page,
route_to_page,
..
} = props.clone();
let is_current_class = if to_page == page { "is-current" } else { "" };
html! {
<li>
<Link<Route, PageQuery>
classes={classes!("pagination-link", is_current_class)}
to={route_to_page}
query={Some(PageQuery{page: to_page})}
>
{ to_page }
</Link<Route, PageQuery>>
</li>
}
}
fn render_links<P>(&self, mut pages: P, len: usize, max_links: usize, props: &Props) -> Html
where
P: Iterator<Item = u64> + DoubleEndedIterator,
{
if len > max_links {
let last_link = self.render_link(pages.next_back().unwrap(), props);
// remove 1 for the ellipsis and 1 for the last link
let links = pages
.take(max_links - 2)
.map(|page| self.render_link(page, props));
html! {
<>
{ for links }
<li><span class="pagination-ellipsis">{ ELLIPSIS }</span></li>
{ last_link }
</>
}
} else {
html! { for pages.map(|page| self.render_link(page, props)) }
}
}
fn view_links(&self, props: &Props) -> Html {
const LINKS_PER_SIDE: usize = 3;
let Props {
page, total_pages, ..
} = *props;
let pages_prev = page.checked_sub(1).unwrap_or_default() as usize;
let pages_next = (total_pages - page) as usize;
let links_left = LINKS_PER_SIDE.min(pages_prev)
// if there are less than `LINKS_PER_SIDE` to the right, we add some more on the left.
+ LINKS_PER_SIDE.checked_sub(pages_next).unwrap_or_default();
let links_right = 2 * LINKS_PER_SIDE - links_left;
html! {
<>
{ self.render_links(1..page, pages_prev, links_left, props) }
<li>{ self.render_link(page, props) }</li>
{ self.render_links(page + 1..=total_pages, pages_next, links_right, props) }
</>
}
}
fn view_relnav_buttons(&self, props: &Props) -> Html {
let Props {
page,
total_pages,
route_to_page: to,
} = props.clone();
html! {
<>
<Link<Route, PageQuery>
classes={classes!("pagination-previous")}
disabled={page==1}
query={Some(PageQuery{page: page - 1})}
to={to.clone()}
>
{ "Previous" }
</Link<Route, PageQuery>>
<Link<Route, PageQuery>
classes={classes!("pagination-next")}
disabled={page==total_pages}
query={Some(PageQuery{page: page + 1})}
{to}
>
{ "Next page" }
</Link<Route, PageQuery>>
</>
}
}
}
|
<?php
declare(strict_types=1);
namespace app\common\lib;
/**
* 记录和数字相关的类库方法啊
* Class Num
* @package app\common\lib
*/
class Num
{
/**
* @param int $length
* @return int
*/
public static function getCode($length = 4) :int
{
switch ($length) {
case 4:
$code = mt_rand(1000, 9999);
break;
case 6:
$code = mt_rand(100000, 999999);
break;
default:
$code = mt_rand(1000, 9999);
}
return $code;
}
}
|
ALTER TABLE [dbo].[Cap]
ADD CONSTRAINT [CHK_Cap_CountryOfOrigin] CHECK ([CountryOfOrigin] like '[A-Z][A-Z]');
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace OfficialPlugins.Compiler.Models
{
public class CompilerSettingsModel
{
public bool GenerateGlueControlManagerCode { get; set; }
public int PortNumber { get; set; } = 8021;
public bool ShowScreenBoundsWhenViewingEntities { get; set; } = true;
public decimal GridSize { get; set; } = 32;
}
}
|
#!/bin/sh
trap "trap - TERM && kill -- -$$" INT TERM EXIT
CWD=`pwd`
cd ${Carla_DIST}
# Start Carla server without display
echo "Start CARLA server (-opengl -quality-level=Low)."
DISPLAY=
./CarlaUE4.sh -opengl -quality-level=Low &
#./CarlaUE4.sh -opengl -quality-level=Epic&
SERVER_PID=$!
sleep 5s
# Change the the map
echo "Change map to Town04."
cd PythonAPI/util
python config.py --map Town04
sleep 3s
# Disable rendering
echo "Disable rendering of the CARLA server (GPU related data won't work)."
python config.py --no-rendering
sleep 3s
# Set the simulation step
echo "Simulation step is set to 0.05s (20Hz)."
python config.py --delta-seconds 0.05
# Do not exit until
echo "CARLA server initialization finishes."
echo "Ctrl+c to terminate the server."
wait ${SERVER_PID}
cd ${CWD}
|
<?php
return [
'bonus_setting' => 'การตั้งค่าโบนัส',
'festival_name' => 'ชื่อเทศกาล',
'percentage_of_bonus' => 'ร้อยละของโบนัส',
'add_new_bonus' => 'เพิ่มโบนัสใหม่',
'edit_bonus' => 'แก้ไขโบนัส',
'view_bonus' => 'ดูโบนัส',
'bonus_type' => 'ประเภทโบนัส',
'tax' => 'ภาษี',
'bonus_amount' => 'จำนวนโบนัส',
'generate_bonus' => 'สร้างโบนัส',
'view_bonus_list' => 'ดูรายการโบนัส',
'view_bonus_list' => 'ดูรายการโบนัส',
// newly added
'bonus_list' => 'รายการโบนัส',
'net_bonus' => 'โบนัสสุทธิ',
'total_bonus' => 'โบนัสทั้งหมด',
];
|
Address(Rockwell Drive) is a residential street in the Wildhorse section of East Davis.
Intersecting Streets
Moore Boulevard
Audubon Circle
Audubon Circle again and Hartley Street
Wyeth Court
Sloan Street
Wildhorse Golf Club
Moore Boulevard again
Duchamp Street
|
use serde::{Deserialize, Serialize};
use crate::{errors::*, http::client::SHClient};
const PATH: &str = "/v2/tags";
#[derive(Serialize, Deserialize, Debug)]
pub struct TagsMeta {
pub color: String,
pub dimension: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Tags {
pub meta: TagsMeta,
pub name: String,
pub priority: u32,
pub updated_at: String,
pub uuid: String,
}
pub fn request<C: SHClient>(client: C, access_token: &String) -> Result<Vec<Tags>> {
let access_token = String::from(access_token);
client.get_request::<Vec<Tags>>(PATH, &access_token, None)
}
|
# APEX-Advent-Calendar

This project is a Region Plug-in for Oracle APEX that let's you easily use a nice advent calendar for christmas time.
You can also use this code without APEX (index.html).
The Images are made by myself and are free to use. But you can also use other images.
The configuration of the Plug-in is really easy:

You can set:
- To be able to open everytime all doors (Yes/No Item)
- In which month the doors can be opened (Number Field)
- Link to the images (Text Field)
If you don't to know how to install this Plug-in in APEX, please take look at the API of Oracle APEX.
For working Demo just click on:
https://APEX.oracle.com/pls/APEX/f?p=103428
Login is: user-demo / 123456@
If you like my stuff, donate me a coffee
[](https://www.paypal.me/RonnyW1)
|
export type VecRecord = {
date: Date;
hour: number;
consumption: number;
};
|
package org.nypl.simplified.reader.bookmarks.api
/**
* The result of attempting to enable/disable syncing (assuming that the attempt didn't
* outright fail with an exception).
*/
enum class ReaderBookmarkSyncEnableResult {
SYNC_ENABLE_NOT_SUPPORTED,
SYNC_ENABLED,
SYNC_DISABLED
}
|
using System;
namespace test5.c
{
class Program
{
public struct Dress
{
public string Color;
public double Size;
public Dress(string color, double size)
{
Color = color;
Size = size;
}
}
static void F1(Dress d)
{
d.Color = "红色";
d.Size = 17.213d;
}
static void F2(ref Dress d)
{
d.Color = "紫色";
d.Size = 19.5d;
}
static void F3(out Dress d)
{
d = new Dress("浅灰", 18.37d);
}
static void Main(string[] args)
{
Dress d1 = new Dress("绿色", 19.2);
Console.WriteLine("调用F1前:d1..Color:{0},Size:{1}", d1.Color, d1.Size);
F1(d1);
Console.WriteLine("调用F1后,Color:{0},Size:{1}", d1.Color, d1.Size);
Dress d2 = new Dress("绿色", 8.5777d);
Console.WriteLine("调用F2方法前:d2.Color:{0},Size:{1}", d2.Color, d2.Size);
F2(ref d2);
Console.WriteLine("调用F2方法后:d2.Color:{0},Size:{1}", d2.Color, d2.Size);
Dress d3 = new Dress("红色", 22.2);
Console.WriteLine("调用F3方法前:d3.Color:{0},Size:{1}", d3.Color, d3.Size);
F3(out d3);
Console.WriteLine("调用F3方法后:d2.Color:{0},Size:{1}", d3.Color, d3.Size);
Console.Read();
}
}
}
|
#include "mesh_qc_private.h"
static void mesh_qc_hodge_p_values_d(
double * m_hodge_p_values, const mesh * m,
const double * m_inner_q, const double * m_coeff_q)
{
int d_exp, i, ind, j, j_loc, m_cn_d;
jagged1 m_cf_d_0_i;
jagged2 m_cf_d_0;
d_exp = 1 << m->dim;
m_cn_d = m->cn[m->dim];
mesh_cf_part2(&m_cf_d_0, m, m->dim, 0);
ind = 0;
for (i = 0; i < m_cn_d; ++i)
{
jagged2_part1(&m_cf_d_0_i, &m_cf_d_0, i);
for (j_loc = 0; j_loc < d_exp; ++j_loc)
{
j = m_cf_d_0_i.a1[j_loc];
m_hodge_p_values[ind] = (1. / d_exp) * (m_coeff_q[j] / m_inner_q[j]);
++ind;
}
}
}
static void mesh_qc_hodge_p_values_0(
double * m_hodge_p_values, const mesh * m,
const double * m_inner_q, const double * m_coeff_q)
{
int d_exp, i, ind, k, k_loc, m_cn_0;
jagged1 m_fc_0_d_i;
jagged2 m_fc_0_d;
d_exp = 1 << m->dim;
m_cn_0 = m->cn[0];
mesh_fc_part2(&m_fc_0_d, m, 0, m->dim);
ind = 0;
for (i = 0; i < m_cn_0; ++i)
{
jagged2_part1(&m_fc_0_d_i, &m_fc_0_d, i);
for (k_loc = 0; k_loc < m_fc_0_d_i.a0; ++k_loc)
{
k = m_fc_0_d_i.a1[k_loc];
m_hodge_p_values[ind] = (1. / d_exp) * (m_coeff_q[k] / m_inner_q[k]);
++ind;
}
}
}
static void mesh_qc_hodge_p_values_nontrivial(
double * m_hodge_p_values, const mesh * m, matrix_sparse ** m_bd, int p,
const double * m_inner_q, const double * m_coeff_q)
{
int d_exp, i, ind, j, j_loc, k, k_loc, m_cn_p, m_dim, node, p_exp, q;
int nodes[8], perp[8]; /* #nodes(quasi_cube) = 2^3 = 8 */
double sign;
jagged1 m_fc_p_d_i;
jagged2 m_cf_d_q, m_cf_p_0, m_cf_q_0, m_fc_p_d;
m_dim = m->dim;
d_exp = 1 << m_dim;
m_cn_p = m->cn[p];
p_exp = 1 << p;
q = m_dim - p;
mesh_fc_part2(&m_fc_p_d, m, p, m_dim);
mesh_cf_part2(&m_cf_d_q, m, m_dim, q);
mesh_cf_part2(&m_cf_p_0, m, p, 0);
mesh_cf_part2(&m_cf_q_0, m, q, 0);
ind = 0;
for (i = 0; i < m_cn_p; ++i)
{
jagged2_part1(&m_fc_p_d_i, &m_fc_p_d, i);
for (k_loc = 0; k_loc < m_fc_p_d_i.a0; ++k_loc)
{
k = m_fc_p_d_i.a1[k_loc];
mesh_qc_perpendicular(nodes, perp, &m_cf_d_q, &m_cf_p_0, &m_cf_q_0, i, k);
for (j_loc = 0; j_loc < p_exp; ++j_loc)
{
j = perp[j_loc];
node = nodes[j_loc];
sign = mesh_qc_cup_product_sign(m_bd, node, p, i, q, j, m_dim, k);
m_hodge_p_values[ind] = (sign / d_exp) * (m_coeff_q[j] / m_inner_q[j]);
++ind;
}
}
}
}
void mesh_qc_hodge_p_values(
double * m_hodge_p_values, const mesh * m, matrix_sparse ** m_bd, int p,
const double * m_inner_q, const double * m_coeff_q)
{
if (p == m->dim)
mesh_qc_hodge_p_values_d(m_hodge_p_values, m, m_inner_q, m_coeff_q);
else if (p == 0)
mesh_qc_hodge_p_values_0(m_hodge_p_values, m, m_inner_q, m_coeff_q);
else
mesh_qc_hodge_p_values_nontrivial(m_hodge_p_values, m, m_bd, p, m_inner_q,
m_coeff_q);
}
|
Rails.application.routes.draw do
resources :articles, only: %i[index show]
root 'pages#home'
get '/check/:slug', to: 'recipients#new', as: 'new_recipient'
post '/check/:slug', to: 'recipients#create'
get '/check/:slug/proposal/:hashid', to: 'proposals#new', as: 'new_proposal'
post '/check/:slug/proposal/:hashid', to: 'proposals#create'
get '/reports/:proposal_id/upgrade', to: 'charges#new', as: 'new_charge'
post '/reports/:proposal_id/upgrade', to: 'charges#create'
get '/reports/:proposal_id', to: 'reports#show', as: 'report'
get '/reports/:proposal_id/make-private', to: 'reports#make_private', as: 'make_report_private'
get '/reports', to: 'reports#index', as: 'reports'
get '/opportunities', to: 'opportunities#index', as: 'opportunities'
get '/opportunities/:slug/reports', to: 'opportunities#show', as: 'opportunity'
get '/assessments/:id/vote', to: 'votes#new', as: 'new_assessment_vote'
post '/assessments/:id/vote', to: 'votes#create'
get '/assessments/:id/answers', to: 'answers#show', as: 'answers'
namespace :sign_in, path: 'sign-in' do
get '/', to: 'lookup#new', as: 'lookup'
post '/', to: 'lookup#create'
get '/auth', to: 'auth#new', as: 'auth'
post '/auth', to: 'auth#create'
get '/reset', to: 'reset#new', as: 'reset'
post '/reset', to: 'reset#create'
get '/set/:token', to: 'set#new', as: 'set'
post '/set/:token', to: 'set#create'
end
get '/sign-out', to: 'sign_in/auth#destroy', as: 'sign_out'
namespace :api do
namespace :v1 do
get '/districts/:country_id', to: 'districts#index', as: 'districts'
end
end
# Errors
match '/404', to: 'errors#not_found', via: :all
match '/410', to: 'errors#gone', via: :all
match '/500', to: 'errors#internal_server_error', via: :all
# Admin
devise_for :admin_users, ActiveAdmin::Devise.config
ActiveAdmin.routes(self)
# Pages
get '/about', to: redirect('/articles/about'), as: 'about'
get '/add-an-opportunity', to: 'pages#add_opportunity', as: 'add_opportunity'
get '/faq', to: redirect('/articles/faq'), as: 'faq'
get '/opportunity-providers', to: redirect('/articles/opportunity-providers'), as: 'opportunity_providers'
get '/pricing', to: 'pages#pricing', as: 'pricing'
get '/privacy', to: 'pages#privacy', as: 'privacy'
get '/terms', to: 'pages#terms', as: 'terms'
# Misc.
post '/agree-to-terms/:id', to: 'users#terms_version', as: 'terms_version'
post '/acknowledge-update/:id', to: 'users#update_version', as: 'update_version'
get '/cookies/update', to: 'cookies#update', as: 'update_cookies'
# Legacy
get '/:slug/funds', to: redirect('/opportunities')
get '/for-funders', to: redirect('/faq')
get '/fund/:slug', to: redirect('/opportunities')
get '/funds', to: redirect('/opportunities')
get '/funds/:slug', to: redirect('/opportunities')
get '/funds/theme/:slug', to: redirect('/opportunities')
get '/password_resets/new', to: redirect('/sign-in')
get '/welcome', to: redirect('/')
end
|
using Csla;
using System;
using System.Linq;
using System.Collections.Generic;
using System.Threading.Tasks;
namespace ProjectTracker.Library
{
[Serializable()]
public class ResourceList : ReadOnlyListBase<ResourceList, ResourceInfo>
{
public void RemoveChild(int resourceId)
{
var iro = IsReadOnly;
IsReadOnly = false;
try
{
var item = this.Where(r => r.Id == resourceId).FirstOrDefault();
if (item != null)
{
var index = this.IndexOf(item);
Remove(item);
}
}
finally
{
IsReadOnly = iro;
}
}
public static ResourceList GetEmptyList()
{
return DataPortal.Create<ResourceList>();
}
public static async Task<ResourceList> GetResourceListAsync()
{
return await DataPortal.FetchAsync<ResourceList>();
}
public static ResourceList GetResourceList()
{
return DataPortal.Fetch<ResourceList>();
}
[Create]
[RunLocal]
private void Create()
{ }
[Fetch]
private void DataPortal_Fetch()
{
var rlce = RaiseListChangedEvents;
RaiseListChangedEvents = false;
IsReadOnly = false;
using (var ctx = ProjectTracker.Dal.DalFactory.GetManager())
{
var dal = ctx.GetProvider<ProjectTracker.Dal.IResourceDal>();
List<ProjectTracker.Dal.ResourceDto> list = null;
list = dal.Fetch();
foreach (var item in list)
Add(DataPortal.FetchChild<ResourceInfo>(item));
}
IsReadOnly = true;
RaiseListChangedEvents = rlce;
}
}
}
|
# Copyright (c) 2009-2012 VMware, Inc.
module Bosh::Director
module ProblemHandlers
class OutOfSyncVm < Base
register_as :out_of_sync_vm
auto_resolution :ignore
def initialize(vm_id, data)
super
@vm = Models::Vm[vm_id]
@data = data
if @vm.nil?
handler_error("VM `#{vm_id}' is no longer in the database")
end
@deployment = @vm.deployment
@instance = @vm.instance
if @deployment.nil?
handler_error("VM `#{@vm.cid}' doesn't belong to any deployment")
end
end
def description
actual_deployment = @data["deployment"] || "unknown deployment"
actual_job = @data["job"] || "unknown job"
actual_index = @data["index"] || "unknown index"
expected = "#{@deployment.name}: #{instance_name(@vm)}"
actual = "#{actual_deployment}: #{actual_job}/#{actual_index}"
"VM `#{@vm.cid}' is out of sync: expected `#{expected}', got `#{actual}'"
end
resolution :ignore do
plan { "Ignore problem" }
action { }
end
resolution :delete_vm do
plan { "Delete VM (unless it has persistent disk)"}
action { validate; delete_vm(@vm) }
end
def validate
state = agent_timeout_guard(@vm) { |agent | agent.get_state }
return if state["deployment"] != @deployment.name
# VM is no longer out of sync if no instance is referencing it,
# as this situation can actually be handled by regular deployment
if @instance.nil? ||
state["job"] && state["job"]["name"] == @instance.job &&
state["index"] == @instance.index
handler_error("VM is now back in sync")
end
end
end
end
end
|
## Checker Examples
| Filename | Description |
| :------- | :---------- |
| duplicate_requests.py | 检查在某段时间内是否有重复的网络请求 |
| img_size.py | 检查图片大小是否超出限制 |
| add_request_param.py | 修改请求参数 |
| add_response_header.py | 修改请求返回数据 |
## Debug Config
调试检查器的Vscode debug配置如下
```json
{
"version": "0.2.0",
"configurations": [
{
"name": "checker",
"type": "pythonExperimental",
"request": "launch",
"program": "${workspaceFolder}/venv/bin/lyrebird",
"args": [
"--script",
"${file}"
],
"console": "integratedTerminal"
}
]
}
```
|
describe('dataSrc', function() {
var table;
dt.libs({
js: ['jquery', 'datatables', 'rowgroup'],
css: ['datatables', 'rowgroup']
});
dt.html('basic');
it('Default is 0', function() {
expect($.fn.dataTable.RowGroup.defaults.dataSrc).toBe(0);
});
it('Is indeed 0 when run', function() {
table = $('#example').DataTable({
rowGroup: true
});
expect($('#example tbody tr:eq(0)').text()).toBe('Airi Satou');
});
dt.html('basic');
it('Can be used with object data', function() {
table = $('#example').DataTable({
order: [[2, 'asc']],
columns: [
{ data: 'name' },
{ data: 'position' },
{ data: 'office' },
{ data: 'age' },
{ data: 'startDate' },
{ data: 'salary' }
],
rowGroup: {
dataSrc: 'office'
}
});
expect($('#example tbody tr:eq(0) td:eq(0)').html()).toBe('Edinburgh');
expect($('#example tbody tr:eq(1) td:eq(0)').html()).toBe('Tiger Nixon');
expect($('#example tbody tr').length).toBe(12);
});
dt.html('basic');
it('Can be column number', function() {
table = $('#example').DataTable({
order: [[2, 'asc']],
rowGroup: {
dataSrc: 2
}
});
expect($('#example tbody tr:eq(0) td:eq(0)').html()).toBe('Edinburgh');
expect($('#example tbody tr:eq(1) td:eq(0)').html()).toBe('Tiger Nixon');
expect($('#example tbody tr').length).toBe(12);
});
dt.html('empty');
it('Works with Ajax loaded data', function(done) {
table = $('#example').DataTable({
ajax: '/base/test/data/data.txt',
deferRender: true,
columns: [
{ data: 'name' },
{ data: 'position' },
{ data: 'office' },
{ data: 'age' },
{ data: 'start_date' },
{ data: 'salary' }
],
order: [[2, 'asc']],
rowGroup: {
dataSrc: 'office'
},
initComplete: function(settings, json) {
expect($('#example tbody tr:eq(0) td:eq(0)').html()).toBe('Edinburgh');
expect($('#example tbody tr:eq(1) td:eq(0)').html()).toBe('Tiger Nixon');
expect($('#example tbody tr').length).toBe(12);
done();
}
});
});
dt.html('basic');
it('Can be function', function() {
table = $('#example').DataTable({
order: [[3, 'asc']],
rowGroup: {
dataSrc: function(row) {
var base = Math.floor(row[3] / 10);
return '' + base + '0 - ' + base + '9';
}
}
});
expect($('#example tbody tr:eq(0)').text()).toBe('10 - 19');
expect($('#example tbody tr:eq(1) td:eq(0)').text()).toBe('Tatyana Fitzpatrick');
expect($('#example tbody tr').length).toBe(12);
});
dt.html('basic');
it('Can be an array', function() {
table = $('#example').DataTable({
order: [[2, 'asc'], [3, 'asc']],
rowGroup: {
dataSrc: [2, 3]
}
});
expect($('#example tbody tr:eq(0)').text()).toBe('Edinburgh');
expect($('#example tbody tr:eq(1) td:eq(0)').text()).toBe('22');
expect($('#example tbody tr:eq(2) td:eq(0)').text()).toBe('Cedric Kelly');
expect($('#example tbody tr').length).toBe(21);
});
});
|
cask 'awscli-bundled' do
version :latest
sha256 :no_check
url 'https://s3.amazonaws.com/aws-cli/awscli-bundle.zip'
appcast 'https://github.com/aws/aws-cli/releases.atom'
name 'AWS CLI (Bundled)'
homepage 'https://aws.amazon.com/cli/'
installer script: {
executable: "#{staged_path}/awscli-bundle/install",
args: ['-i', "#{caskroom_path}/lib", '-b', "#{HOMEBREW_PREFIX}/bin/aws"]
}
uninstall delete: ["#{caskroom_path}/lib", "#{HOMEBREW_PREFIX}/bin/aws"]
end
|
#include "../tesis/glwidget.h"
#include <QtGui>
#include <QtOpenGL>
#include <math.h>
GLWidget::GLWidget(QWidget *parent)
: QGLWidget(parent)
{
xRot = 0;
yRot = 0;
zRot = 0;
xEsc = 0;
medX = 0;
medY = 0;
medZ = 0;
xTras = 0.0;
yTras = 0.0;
zTras = -7.0;
rotar = fill = true;
escalar = trasladar = zbuf = fog = plano = edge = box = false;
mesh = true;
mallaColor = QColor::fromRgb(255,0,0);
fillColor = QColor::fromRgb(0,255,0);
edgeColor = QColor::fromRgb(255,255,255);
boxColor = QColor::fromRgb(255,0,0);
fogColor = QColor::fromRgb(25,25,25);
ventana = false;
p = new Manager();
p->scaner3D();
fileName = "delaunay.off";
onAbrir();
}
GLWidget::~GLWidget()
{
makeCurrent();
}
QSize GLWidget::minimumSizeHint() const
{
return QSize(50,50);
}
QSize GLWidget::sizeHint() const
{
return QSize(400,400);
}
void GLWidget::setXRotation(int angle)
{
normalizeAngle(&angle);
if (angle != xRot)
xRot = angle;
updateGL();
}
void GLWidget::setYRotation(int angle)
{
normalizeAngle(&angle);
if (angle != yRot)
yRot = angle;
updateGL();
}
void GLWidget::setZRotation(int angle)
{
normalizeAngle(&angle);
if (angle != zRot)
zRot = angle;
}
void GLWidget::initializeGL()
{
glDepthFunc(GL_LEQUAL);
glEnable(GL_DEPTH_TEST);
glEnable(GL_POLYGON_OFFSET_FILL);
glPolygonOffset(1.0,1.0);
}
void GLWidget::paintGL()
{
glMatrixMode(GL_MODELVIEW);
glClearColor(0.0, 0.0, 0.0, 0.0); // Color de fondo
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glTranslatef(xTras, yTras, zTras);
glRotatef(xRot / 8, 1.0, 0.0, 0.0);
glRotatef(yRot / 8, 0.0, 1.0, 0.0);
glRotatef(zRot / 8, 0.0, 0.0, 1.0);
if (escalar && xEsc>0 ){
glScalef(xEsc,xEsc,xEsc);
}
else if(!escalar && xEsc!=0)
{
glScalef(xEsc,xEsc,xEsc);
}
if (plano)
{
thePlane[0] = medX; // Para plano de corte
thePlane[1] = 0.0;
thePlane[2] = 0.0;
thePlane[3] = 0.0;
glClipPlane(GL_CLIP_PLANE0, thePlane);
}
glTranslatef(medX, medY, medZ);
if (fill)
{
glColor3f((float)fillColor.red()/255,(float)fillColor.green()/255,(float)fillColor.blue()/255);
glBegin(GL_TRIANGLES);
for (int i = 0; i < poly.size(); i++)
{
int v1 = poly[i][0];
int v2 = poly[i][1];
int v3 = poly[i][2];
glVertex3f(vertex[v1][0],vertex[v1][1],vertex[v1][2]);
glVertex3f(vertex[v2][0],vertex[v2][1],vertex[v2][2]);
glVertex3f(vertex[v3][0],vertex[v3][1],vertex[v3][2]);
}
glEnd();
}
if (edge || (!fill && !mesh))
vertices();
glPolygonMode(GL_FRONT_AND_BACK,GL_LINE);
if(mesh)
malla();
if (box)
boundingBox();
glPolygonMode(GL_FRONT_AND_BACK,GL_FILL);
}
void GLWidget::resizeGL(int width, int height)
{
glViewport(0,0,width,height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
GLfloat x = GLfloat(width)/height;
//glOrtho(-1.5, +1.5, +1.5, -1.5, -1.0, 15);
gluPerspective(45.0f, 1.333f, .0001f, 30.0f);
}
void GLWidget::mousePressEvent(QMouseEvent *evento)
{
lastPos = evento->pos();
}
void GLWidget::mouseMoveEvent(QMouseEvent *evento)
{
int dx = evento->x() - lastPos.x();
int dy = evento->y() - lastPos.y();
if (evento->buttons() & Qt::LeftButton) {
if (rotar){
setXRotation(xRot + 16 * dy);
setYRotation(yRot + 16 * dx);
}
if (escalar){
xEsc += (float)dx/8;
}
if (trasladar){
xTras += ((float)dx/500);
yTras -= ((float)dy/500);
}
} else if (evento->buttons() & Qt::RightButton) {
if (rotar){
setXRotation(xRot + 16 * dy);
setZRotation(zRot + 16 * dx);
}
if (trasladar){
zTras += ((float)dy/500);
}
}
lastPos = evento->pos();
updateGL();
}
void GLWidget::normalizeAngle(int *angle)
{
while (*angle < 0)
*angle += 360 * 8;
while (*angle > 360 * 8)
*angle -= 360 * 8;
}
void GLWidget::onVentanaAbrir()
{
ventana = true;
}
void GLWidget::onAbrir()
{
if (ventana)
fileName = QFileDialog::getOpenFileName(this, QObject::tr("Open File"),"/",tr("*.off"));
QFile file( fileName );
if ( file.open( QIODevice::ReadOnly) ) {
QTextStream f( &file );
QString off = f.readLine();
if (off!="OFF") return;
while (!f.atEnd()){
QString s = f.readLine();
QStringList list1 = s.split(" ");
int numV = list1[0].toInt();
int numP = list1[1].toInt();
vertex.resize(numV);
for (int i=0; i < numV; i++)
vertex[i].resize(3);
poly.resize(numP);
for (int i=0; i < vertex.size(); i++)
{
s = f.readLine();
QStringList num = s.split(" ");
for (int j = 0; j < 3; j++)
vertex[i][j] = num[j].toFloat();
}
extremos();
medX = (float)(minX + maxX)/2;
medX *= -1;
medY = (float)(minY + maxY)/2;
medY *= -1;
medZ = (float)(minZ + maxZ)/2;
medZ *= -1;
for (int i=0; i< poly.size(); i++)
{
s = f.readLine();
QStringList num = s.split(" ");
poly[i].resize(num.size()-2);
for (int j = 2; j < num.size(); j++)
poly[i][j-2] = num[j].toInt();
}
}
}
file.close();
updateGL();
}
void GLWidget::onEscalar()
{
escalar = true;
trasladar = false;
rotar = false;
}
void GLWidget::onTrasladar()
{
escalar = false;
trasladar = true;
rotar = false;
}
void GLWidget::onRotar()
{
escalar = false;
trasladar = false;
rotar = true;
}
void GLWidget::extremos()
{
minX = INT_MAX;
minY = INT_MAX;
minZ = INT_MAX;
maxX = INT_MIN;
maxY = INT_MIN;
maxZ = INT_MIN;
/* printf("AQUI\n");
printf("tam %d\n",p->puntos.size());
for (int i=0; i < p->puntos.size(); i++)
{
if (p->puntos[i][0]<minX) minX = p->puntos[i][0];
if (p->puntos[i][0]>maxX) maxX = p->puntos[i][0];
if (p->puntos[i][1]<minY) minY = p->puntos[i][1];
if (p->puntos[i][1]>maxY) maxY = p->puntos[i][1];
if (p->puntos[i][2]<minZ) minZ = p->puntos[i][2];
if (p->puntos[i][2]>maxZ) maxZ = p->puntos[i][2];
}
*/
for (int i=0; i < vertex.size(); i++)
{
if (vertex[i][0]<minX) minX = vertex[i][0];
if (vertex[i][0]>maxX) maxX = vertex[i][0];
if (vertex[i][1]<minY) minY = vertex[i][1];
if (vertex[i][1]>maxY) maxY = vertex[i][1];
if (vertex[i][2]<minZ) minZ = vertex[i][2];
if (vertex[i][2]>maxZ) maxZ = vertex[i][2];
}
}
void GLWidget::boundingBox()
{
glColor3f((float)boxColor.red()/255, (float)boxColor.green()/255,(float)boxColor.blue()/255);
glBegin(GL_POLYGON);
glVertex3f(minX,minY,minZ);
glVertex3f(minX,maxY,minZ);
glVertex3f(maxX,maxY,minZ);
glVertex3f(maxX,minY,minZ);
glEnd();
glBegin(GL_POLYGON);
glVertex3f(minX,minY,maxZ);
glVertex3f(minX,maxY,maxZ);
glVertex3f(maxX,maxY,maxZ);
glVertex3f(maxX,minY,maxZ);
glEnd();
glBegin(GL_POLYGON);
glVertex3f(minX,maxY,minZ);
glVertex3f(minX,maxY,maxZ);
glVertex3f(minX,minY,maxZ);
glVertex3f(minX,minY,minZ);
glEnd();
glBegin(GL_POLYGON);
glVertex3f(maxX,maxY,minZ);
glVertex3f(maxX,maxY,maxZ);
glVertex3f(maxX,minY,maxZ);
glVertex3f(maxX,minY,minZ);
glEnd();
}
void GLWidget::malla()
{
glColor3f((float)mallaColor.red()/255,(float)mallaColor.green()/255,(float)mallaColor.blue()/255);
glBegin(GL_TRIANGLES);
for (int i = 0; i < poly.size(); i++)
{
int v1 = poly[i][0];
int v2 = poly[i][1];
int v3 = poly[i][2];
glVertex3f(vertex[v1][0],vertex[v1][1],vertex[v1][2]);
glVertex3f(vertex[v2][0],vertex[v2][1],vertex[v2][2]);
glVertex3f(vertex[v3][0],vertex[v3][1],vertex[v3][2]);
}
glEnd();
}
void GLWidget::vertices()
{
glColor3f((float)edgeColor.red()/255,(float)edgeColor.green()/255,(float)edgeColor.blue()/255);
/*glBegin(GL_POINTS);
for (int i = 0; i < p->puntos.size(); i++)
{
glVertex3f(p->puntos[i][0],p->puntos[i][1],p->puntos[i][2]);
}
glEnd();*/
glBegin(GL_POINTS);
for (int i = 0; i < vertex.size(); i++)
{
glVertex3f(vertex[i][0],vertex[i][1],vertex[i][2]);
}
glEnd();
}
void GLWidget::onZBuffer()
{
zbuf = !zbuf;
if (!zbuf)
glDisable(GL_DEPTH_TEST);
else
{
glDepthFunc(GL_LEQUAL);
glEnable(GL_DEPTH_TEST);
glClearDepth(1.0);
}
}
void GLWidget::onFog()
{
fog = !fog;
if (!fog)
glDisable(GL_FOG);
else
{
GLfloat fog_color[4] = { (float)fogColor.red()/255, (float)fogColor.green()/255, (float)fogColor.blue()/255, 0.0};
glFogi(GL_FOG_MODE, GL_LINEAR);
glFogf(GL_FOG_START, 0.0);
glFogf(GL_FOG_END, 15.0);
glFogfv(GL_FOG_COLOR, fog_color); // COLOR DE LA NIEBLA
// glClearColor(0.1, 0.1, 0.1, 0.0);
glEnable(GL_FOG);
}
}
void GLWidget::onRelleno()
{
fill = !fill;
}
void GLWidget::onPlano()
{
plano = !plano;
if (!plano)
glDisable(GL_CLIP_PLANE0);
else
glEnable(GL_CLIP_PLANE0);
}
void GLWidget::onMalla()
{
mesh = !mesh;
}
void GLWidget::onVertices()
{
edge = !edge;
}
void GLWidget::onBox()
{
box = !box;
}
void GLWidget::onColorMalla()
{
mallaColor = QColorDialog::getColor();
}
void GLWidget::onColorBox()
{
boxColor = QColorDialog::getColor();
}
void GLWidget::onColorRelleno()
{
fillColor = QColorDialog::getColor();
}
void GLWidget::onColorVertices()
{
edgeColor = QColorDialog::getColor();
}
void GLWidget::onColorFog()
{
fogColor = QColorDialog::getColor();
}
|
from .errors import *
end_hex = b"\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82"
class PNG:
"""
Function for PNG.
-
-----
Parameter :
- image_path: `str` | Set PNG file to modify
-----
Methods :
- hide_message
- hide_program
- reveal_message
- reveal_program
"""
def __init__(self, image_path:str):
if image_path == None:
raise CannotNone("image_path")
self.image_path = image_path
def hide_message(self, message:str):
"""
Method to hide message in PNG file.
-----
Parameter :
- message: `str` | Messages that want to hide
"""
if message == None:
raise CannotNone("message")
encoded_msg = str.encode(message)
with open(self.image_path, 'ab') as f:
f.write(encoded_msg)
print("Success!")
def hide_program(self, program_path:str):
"""
Method to hide program (`.exe`) in PNG file.
-----
Parameter :
- program_path: `str` | Program that want to hide
"""
if program_path == None:
raise CannotNone("program_path")
if ".exe" in program_path:
pass
else:
program_path = program_path + ".exe"
with open(self.image_path, 'ab') as f, open(program_path, 'rb') as p:
f.write(p.read())
print("Success!")
def reveal_message(self, encoding:str="UTF-8"):
"""
Method to reveal secret message in PNG file.
-----
Parameter :
- encoding: `str` | Default: `UTF-8`
"""
with open(self.image_path, 'rb') as f:
content = f.read()
offset = content.index(end_hex)
f.seek(offset + len(end_hex))
return f.read().decode(encoding)
def reveal_program(self, new_name:str):
"""
Method to reveal secret program and create new file (`.exe`) from inside PNG file.
-
Parameter :
- new_name: `str` | Set name for files from PNG
"""
if new_name == None:
raise CannotNone("new_name")
if ".exe" in new_name:
pass
else:
new_name = new_name + ".exe"
with open(self.image_path, 'rb') as f:
content = f.read()
offset = content.index(end_hex)
f.seek(offset + len(end_hex))
new_name = str(new_name)
with open(new_name, 'wb') as p:
p.write(f.read())
print("Success!")
class JPG:
"""
Function for JPG/JPEG.
-
-----
Parameter :
- image_path: `str` | Set PNG file to modify
-----
Methods :
- hide_message
- hide_program
- reveal_message
- reveal_program
"""
def __init__(self, image_path:str):
if image_path == None:
raise CannotNone("image_path")
self.image_path = image_path
def hide_message(self, message:str):
"""
Method to hide message in JPG/JPEG file.
-----
Parameter :
- message: `str` | Messages that want to hide
"""
if message == None:
raise CannotNone("message")
encoded_msg = str.encode(message)
with open(self.image_path, 'ab') as f:
f.write(encoded_msg)
print("Success!")
def hide_program(self, program_path:str):
"""
Method to hide program (`.exe`) in JPG/JPEG file.
-----
Parameter :
- program_path: `str` | Program that want to hide
"""
if program_path == None:
raise CannotNone("program_path")
if ".exe" in program_path:
pass
else:
program_path = program_path + ".exe"
with open(self.image_path, 'ab') as f, open(program_path, 'rb') as p:
f.write(p.read())
print("Success!")
def reveal_message(self, encoding:str="UTF-8"):
"""
Method to reveal secret message in JPG/JPEG file.
-----
Parameter :
- encoding: `str` | Default: `UTF-8`
"""
with open(self.image_path, 'rb') as f:
content = f.read()
offset = content.index(bytes.fromhex('FFD9'))
f.seek(offset + 2)
return f.read().decode(encoding)
def reveal_program(self, new_name:str):
"""
Method to reveal secret program and create new file (`.exe`) from inside JPG/JPEG file.
-
Parameter :
- new_name: `str` | Set name for files from JPG/JPEG
"""
if new_name == None:
raise CannotNone("new_name")
if ".exe" in new_name:
pass
else:
new_name = new_name + ".exe"
with open(self.image_path, 'rb') as f:
content = f.read()
offset = content.index(bytes.fromhex('FFD9'))
f.seek(offset + 2)
new_name = str(new_name)
with open(new_name, 'wb') as p:
p.write(f.read())
print("Success!")
|
import numpy as np
import matplotlib.pyplot as plt
# Source matrix
a = ((np.arange(200)+1)-100)/100
a = np.concatenate((a,a,a,a,a), axis=0)
b = np.sin((np.arange(1000)+1)/20)
S_test= np.vstack((b,a)).T
# Mixing matrix
A = np.array([0.291, 0.6557, -0.5439, 0.5572]).reshape((2, 2))
# test data
X_test = S_test @ A
def test(ic):
np.random.seed(1)
plt.subplot(121)
plt.plot(np.arange(1000)+1, ic[:,0])
plt.title("IC 1")
plt.subplot(122)
plt.plot(np.arange(1000)+1, ic[:,1])
plt.title("IC 2")
pass
|
#!/usr/bin/env bash
echo "Uploading to pip"
set -x
test -e "./tests/test_mdvl.py" || exit 1
clean () {
rm -rf ./dist
rm -rf ./mdvl.egg-info
rm -rf ./__pycache__
rm -rf ./build
}
clean
pandoc ./README.md -o README.rst
python setup.py clean sdist bdist_wheel
twine upload ./dist/*
clean
|
# wakeworddetection
Models for wake word detection.
This is the very first step in Virtual Assistant for wake word detection (or Triggered word detection)
# TODO:
* Add real sound augmentations
* Convert models to ONNX
* Convert models to TorchScript
* Refactor serving code
* Add bash script for training models
|
---
title: "Compatibilité et problèmes de conformité en Visual C++ | Microsoft Docs"
ms.custom: ""
ms.date: "11/03/2016"
ms.prod: "visual-studio-dev14"
ms.reviewer: ""
ms.suite: ""
ms.technology:
- "devlang-csharp"
ms.tgt_pltfrm: ""
ms.topic: "article"
dev_langs:
- "C++"
helpviewer_keywords:
- "problèmes de conformité standard dans Visual C++"
- "compilateur CL.exe, problèmes de conformité standard C++"
- "Visual C++, les problèmes de conformité C++ standard"
- "Visual C++, problèmes de conformité standard"
ms.assetid: 7d715eb4-b409-4720-91ab-324a202b8068
caps.latest.revision: 10
caps.handback.revision: 10
author: "mikeblome"
ms.author: "mblome"
manager: "douge"
---
# Compatibilité et problèmes de conformité en Visual C++
Ce contenu a été supprimé.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.