text
stringlengths 27
775k
|
|---|
from django.contrib import admin
from db.admin.utils import DiffModelAdmin
from db.models.sso import SSOIdentity
class SSOIdentityAdmin(DiffModelAdmin):
pass
admin.site.register(SSOIdentity, SSOIdentityAdmin)
|
import { combineReducers } from 'redux';
import authReducer from './authReducer';
import storeReducer from './storeReducer';
const reducers = combineReducers({ authReducer, storeReducer });
export default reducers;
|
use pyo3::prelude::*;
mod dual;
mod greedy;
mod meta;
mod primal;
mod regularized;
pub fn submodule(py: Python, m: &PyModule) -> PyResult<()> {
let dual = PyModule::new(py, "dual")?;
dual::submodule(py, dual)?;
m.add_submodule(dual)?;
let greedy = PyModule::new(py, "greedy")?;
greedy::submodule(py, greedy)?;
m.add_submodule(greedy)?;
let meta = PyModule::new(py, "meta")?;
meta::submodule(py, meta)?;
m.add_submodule(meta)?;
let primal = PyModule::new(py, "primal")?;
primal::submodule(py, primal)?;
m.add_submodule(primal)?;
let regularized = PyModule::new(py, "regularized")?;
regularized::submodule(py, regularized)?;
m.add_submodule(regularized)?;
Ok(())
}
|
#pragma once
#include <algorithm>
#include <functional>
#include <iterator>
#include <map>
#include <memory>
#include <optional>
#include <string>
#include <tuple>
#include <utility>
#include <robin_hood.h>
#include "abstract_syntax_tree.hpp"
#include "model.hpp"
namespace pyqubo {
// Expand to polynomial.
// TODO: ใใใซใใฃใๆๅพใงใฏใชใ้ไธญใง่ถณใๅใใใใใใๆค่จใใใใใ้ไธญใง่ถณใๅใใใใใใชใใๆปใๅคใไธใคใซใชใฃใฆๅฌใใใ
class expand final {
robin_hood::unordered_map<std::string, polynomial> _sub_hamiltonians;
robin_hood::unordered_map<std::string, std::pair<polynomial, std::function<bool(double)>>> _constraints;
variables* _variables;
public:
auto operator()(const std::shared_ptr<const expression>& expression, variables* variables) noexcept {
_sub_hamiltonians = {};
_constraints = {};
_variables = variables;
auto [polynomial, penalty] = visit<std::tuple<pyqubo::polynomial, pyqubo::polynomial>>(*this, expression);
return std::tuple{polynomial + penalty, _sub_hamiltonians, _constraints};
}
auto operator()(const std::shared_ptr<const add_operator>& add_operator) noexcept {
// ๆฑใใณใผใใงใใใใชใใใใใใฉใผใใณในใฎใใใชใฎใงใใๅฎน่ตฆใใ
const auto& merge = [](auto& polyominal, const auto& other) {
for (const auto& [product, coefficient] : other) {
const auto [it, emplaced] = polyominal.emplace(product, coefficient);
if (!emplaced) {
it->second = it->second + coefficient;
}
}
};
auto polynomial = pyqubo::polynomial{};
auto penalty = pyqubo::polynomial{};
for (const auto& child: add_operator->children()) {
const auto [child_polynomial, child_penalty] = visit<std::tuple<pyqubo::polynomial, pyqubo::polynomial>>(*this, child);
merge(polynomial, child_polynomial);
merge(penalty, child_penalty);
}
return std::tuple{polynomial, penalty};
}
auto operator()(const std::shared_ptr<const mul_operator>& mul_operator) noexcept {
const auto [l_polynomial, l_penalty] = visit<std::tuple<polynomial, polynomial>>(*this, mul_operator->lhs());
const auto [r_polynomial, r_penalty] = visit<std::tuple<polynomial, polynomial>>(*this, mul_operator->rhs());
return std::tuple{l_polynomial * r_polynomial, l_penalty + r_penalty};
}
auto operator()(const std::shared_ptr<const binary_variable>& binary_variable) noexcept {
return std::tuple{polynomial{{{_variables->index(binary_variable->name())}, std::make_shared<numeric_literal>(1)}}, polynomial{}};
}
auto operator()(const std::shared_ptr<const spin_variable>& spin_variable) noexcept {
return std::tuple{polynomial{{{_variables->index(spin_variable->name())}, std::make_shared<numeric_literal>(2)}, {{}, std::make_shared<numeric_literal>(-1)}}, polynomial{}};
}
auto operator()(const std::shared_ptr<const placeholder_variable>& place_holder_variable) noexcept {
return std::tuple{polynomial{{{}, place_holder_variable}}, polynomial{}};
}
auto operator()(const std::shared_ptr<const sub_hamiltonian>& sub_hamiltonian) noexcept {
const auto [polynomial, penalty] = visit<std::tuple<pyqubo::polynomial, pyqubo::polynomial>>(*this, sub_hamiltonian->expression());
_sub_hamiltonians.emplace(sub_hamiltonian->name(), polynomial);
return std::tuple{polynomial, penalty};
}
auto operator()(const std::shared_ptr<const constraint>& constraint) noexcept {
const auto [polynomial, penalty] = visit<std::tuple<pyqubo::polynomial, pyqubo::polynomial>>(*this, constraint->expression());
_constraints.emplace(constraint->name(), std::pair{polynomial, constraint->condition()});
return std::tuple{polynomial, penalty};
}
auto operator()(const std::shared_ptr<const with_penalty>& with_penalty) noexcept {
const auto [e_polynomial, e_penalty] = visit<std::tuple<polynomial, polynomial>>(*this, with_penalty->expression());
const auto [p_polynomial, p_penalty] = visit<std::tuple<polynomial, polynomial>>(*this, with_penalty->penalty());
return std::tuple{e_polynomial, e_penalty + p_penalty + p_polynomial};
}
auto operator()(const std::shared_ptr<const user_defined_expression>& user_defined_expression) noexcept {
return visit<std::tuple<polynomial, polynomial>>(*this, user_defined_expression->expression());
}
auto operator()(const std::shared_ptr<const numeric_literal>& numeric_literal) noexcept {
return std::tuple{polynomial{{{}, numeric_literal}}, polynomial{}};
}
};
// Convert to quadratic polynomial.
inline std::optional<std::pair<int, int>> find_replacing_pair(const pyqubo::polynomial& polynomial) noexcept {
auto counts = [&] {
auto result = std::map<std::pair<int, int>, int>{};
for (const auto& [product, _] : polynomial) {
if (std::size(product.indexes()) <= 2) {
continue;
}
for (auto it_1 = std::begin(product.indexes()); it_1 != std::prev(std::end(product.indexes())); ++it_1) {
for (auto it_2 = std::next(it_1); it_2 != std::end(product.indexes()); ++it_2) {
const auto [it, emplaced] = result.emplace(std::pair{*it_1, *it_2}, 1);
if (!emplaced) {
it->second++;
}
}
}
}
return result;
}();
if (std::size(counts) == 0) {
return std::nullopt;
}
const auto it = std::max_element(std::begin(counts), std::end(counts), [](const auto& count_1, const auto& count_2) {
return count_1.second < count_2.second;
});
return it->first;
}
inline auto convert_to_quadratic(const pyqubo::polynomial& polynomial, double strength, variables* variables) noexcept {
auto result = polynomial;
for (;;) {
const auto replacing_pair = find_replacing_pair(result);
if (!replacing_pair) {
break;
}
const auto replacing_pair_index = variables->index(variables->name(replacing_pair->first) + " * " + variables->name(replacing_pair->second));
// replace.
for (;;) {
auto it = std::find_if(std::begin(result), std::end(result), [&](const auto& term) {
return std::binary_search(std::begin(term.first.indexes()), std::end(term.first.indexes()), replacing_pair->first) && std::binary_search(std::begin(term.first.indexes()), std::end(term.first.indexes()), replacing_pair->second);
});
if (it == std::end(result)) {
break;
}
const auto indexes = [&] {
auto result = pyqubo::indexes{};
std::copy_if(std::begin(it->first.indexes()), std::end(it->first.indexes()), std::back_inserter(result), [&](const auto& index) {
return index != replacing_pair->first && index != replacing_pair->second;
});
result.emplace_back(replacing_pair_index);
return result;
}();
const auto expression = it->second;
result.erase(it);
result.emplace(product(indexes), expression);
}
// insert.
const auto emplace_term = [](pyqubo::polynomial& polynomial, const pyqubo::product& product, const std::shared_ptr<const expression>& coefficient) {
const auto [it, emplaced] = polynomial.emplace(product, coefficient);
if (!emplaced) {
it->second = it->second + coefficient;
}
};
// clang-format off
emplace_term(result, product{replacing_pair_index }, std::make_shared<numeric_literal>(strength * 3));
emplace_term(result, product{replacing_pair->first, replacing_pair_index }, std::make_shared<numeric_literal>(strength * -2));
emplace_term(result, product{replacing_pair->second, replacing_pair_index }, std::make_shared<numeric_literal>(strength * -2));
emplace_term(result, product{replacing_pair->first, replacing_pair->second}, std::make_shared<numeric_literal>(strength * 1));
// clang-format on
}
return result;
}
// Compile.
inline auto compile(const std::shared_ptr<const expression>& expression, double strength) noexcept {
auto variables = pyqubo::variables();
const auto [polynomial, sub_hamiltonians, constraints] = expand()(expression, &variables);
const auto quadratic_polynomial = convert_to_quadratic(polynomial, strength, &variables);
return model(quadratic_polynomial, sub_hamiltonians, constraints, variables);
}
}
|
CREATE TABLE `references` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(250) NOT NULL DEFAULT '' COMMENT 'i.e. foo2018',
`title` varchar(250) DEFAULT '',
`authors` varchar(250) DEFAULT NULL,
`source` varchar(250) DEFAULT NULL,
`license` varchar(50) DEFAULT NULL COMMENT 'SPDX license identifier',
`changes` varchar(250) DEFAULT NULL,
`created` datetime NOT NULL,
`modified` datetime NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB;
ALTER TABLE `media` ADD `reference_id` INT(11) UNSIGNED NULL DEFAULT NULL AFTER `source`;
|
/* Copyright (c) 2012-2016 Tresys Technology, LLC. All rights reserved.
*
* Developed by: Tresys Technology, LLC
* http://www.tresys.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal with
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimers.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimers in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the names of Tresys Technology, nor the names of its contributors
* may be used to endorse or promote products derived from this Software
* without specific prior written permission.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
* SOFTWARE.
*/
package edu.illinois.ncsa.daffodil.dpath
import edu.illinois.ncsa.daffodil.exceptions.Assert
import NodeInfo._
object NodeInfoUtils {
/**
* For a comparison operator, compute type to which the args should be converted
*/
def generalizeArgTypesForComparisonOp(op: String,
inherent1: Numeric.Kind,
inherent2: Numeric.Kind): Numeric.Kind = {
val argType: Numeric.Kind = (inherent1, inherent2) match {
case (x, y) if (x eq y) => x
case (_, Decimal) => Decimal
case (Decimal, _) => Decimal
case (_, Double) => Double
case (Double, _) => Double
case (_, Float) => Double
case (Float, _) => Double
case (_, Integer) => Integer
case (Integer, _) => Integer
case (_, NonNegativeInteger) => Integer
case (NonNegativeInteger, _) => Integer
case (_: UnsignedLong.Kind, UnsignedLong) => UnsignedLong
case (UnsignedLong, _: UnsignedLong.Kind) => UnsignedLong
case (_, UnsignedLong) => Integer
case (UnsignedLong, _) => Integer
case (_, ArrayIndex) => ArrayIndex
case (ArrayIndex, _) => ArrayIndex
case (_, Long) => Long
case (Long, _) => Long
case (_, UnsignedInt) => Long
case (UnsignedInt, _) => Long
case (_, Int) => Int
case (Int, _) => Int
case (_, UnsignedShort) => Int
case (UnsignedShort, _) => Int
case (_, Short) => Short
case (Short, _) => Short
case (_, UnsignedByte) => Short
case (UnsignedByte, _) => Short
case (_, Byte) => Byte
case (Byte, _) => Byte
case _ => Assert.usageError(
"Unsupported types for comparison op '%s' were %s and %s.".format(op, inherent1, inherent2))
}
argType
}
/**
* For a numeric operation, compute types the args should be converted to, and the resulting type
* from the operation on them.
*/
def generalizeArgAndResultTypesForNumericOp(op: String,
leftArgType: Numeric.Kind,
rightArgType: Numeric.Kind): ( //
Numeric.Kind, // first result is generalized arg type
Numeric.Kind // second result is generalized result type
) = {
/*
* Adjust for the Decimal result type when div/idiv is used
*/
def divResult(resultType: NodeInfo.Numeric.Kind) = resultType match {
case Decimal => Decimal
case Integer => Decimal
case Double => Double
case Long => Double
case Float => Float
case Int => Float
case ArrayIndex => ArrayIndex
case _ => Assert.usageError("Unsupported return type: %s".format(resultType))
}
def idivResult(resultType: NodeInfo.Numeric.Kind) = resultType match {
case Decimal => Integer
case Integer => Integer
case Double => Long
case Long => Long
case Float => Int
case Int => Int
case ArrayIndex => ArrayIndex
case _ => Assert.usageError("Unsupported return type: %s".format(resultType))
}
val (argType: Numeric.Kind, resultType: Numeric.Kind) = (leftArgType, rightArgType) match {
case (_, Decimal) => (Decimal, Decimal)
case (Decimal, _) => (Decimal, Decimal)
case (_, Double) => (Double, Double)
case (Double, _) => (Double, Double)
case (_, Float) => (Double, Double)
case (Float, _) => (Double, Double)
case (_, Integer) => (Integer, Integer)
case (Integer, _) => (Integer, Integer)
case (_, NonNegativeInteger) => (NonNegativeInteger, Integer)
case (NonNegativeInteger, _) => (NonNegativeInteger, Integer)
case (_, UnsignedLong) => (UnsignedLong, Integer)
case (UnsignedLong, _) => (UnsignedLong, Integer)
case (_, ArrayIndex) => (ArrayIndex, ArrayIndex)
case (ArrayIndex, _) => (ArrayIndex, ArrayIndex)
case (_, Long) => (Long, Long)
case (Long, _) => (Long, Long)
case (_, UnsignedInt) => (UnsignedInt, Long)
case (UnsignedInt, _) => (UnsignedInt, Long)
case (_, Int) => (Int, Int)
case (Int, _) => (Int, Int)
case (_, UnsignedShort) => (UnsignedShort, Int)
case (UnsignedShort, _) => (UnsignedShort, Int)
case (_, Short) => (Short, Int)
case (Short, _) => (Short, Int)
case (_, UnsignedByte) => (UnsignedByte, Int)
case (UnsignedByte, _) => (UnsignedByte, Int)
case (_, Byte) => (Byte, Int)
case (Byte, _) => (Byte, Int)
case _ => Assert.usageError(
"Unsupported types for numeric op '%s' were %s and %s.".format(op, leftArgType, rightArgType))
}
val res = op match {
case "div" => (argType, divResult(resultType))
case "idiv" => (argType, idivResult(resultType))
case _ => (argType, resultType)
}
res
}
}
|
<div style="text-align: center">
@foreach ($skoly as $skola)
{{$skola->nazev}}
<br>
@endforeach
{{ $skoly->links() }}
</div>
|
<?php
namespace MauroMoreno\DataFactory\Tests\Entity;
use MauroMoreno\DataFactory\Entity\Error;
use PHPUnit\Framework\TestCase;
class ErrorTest extends TestCase
{
public function test_getters_and_setters_ok()
{
$error = new Error;
$this->assertEquals($error, $error->setValue('error_value'));
$this->assertEquals('error_value', $error->getValue());
}
}
|
import React from 'react';
import RouteHandler from 'app/App/RouteHandler';
import { actions } from 'app/BasicReducer';
import { UserManagement } from 'app/Users/UserManagement';
import UsersAPI from './UsersAPI';
export class Users extends RouteHandler {
static async requestState(requestParams) {
const users = await UsersAPI.get(requestParams);
return [actions.set('users', users)];
}
render() {
return <UserManagement />;
}
}
export default Users;
|
# Audio Visualiser 3D

## Demo
[The live demo is available in my website](https://ahabram.fr/audio-visualiser/)
## Description
This is a website that propose an auditive experience.
Based on the user music, the website adapt its animations based on the drums of your music.
I advise you to put a music with some drums and bass. The algorithm is much more efficient with
electro, trans, dupstep or even rap music.
I used Web Audio API to do all audio processing.
## Dependencies
- Three.js - used for rendering the 3D scene
- SimplexNoise - used to animate the ground plane
## Technical Description
Before playing music:
- Receiving the song from input
- Cloning the song to apply some filters on one & play the original song with the other one
- Applying a "LowShelf" filter to lower the intensity of the bass and keep the high frequencies in order to detect
correctly drums
- Applying a "LowPass" filter to lower unwanted high frequencies (voices for example) and keep the bass
- Applying a gain filter (without it, the frequencies are too low to detect drums & bass)
- Playing the original song & live-analysing the filtered song
During the music:
- Getting the current frequencies in byte format
- Detecting the highest frequencies and parsing the next 20/50 bytes
- If the next x frequencies are still high, it's probably a drum or a bass
- If its a drum, I send the information to the 3D scene to increase animation speed, apply some shaders, ...
## Author
- Anas Habib ABRAM
> Contact: anas-habib.abram@hotmail.com
> Personal Achievement: "I know how to make coffee"
## License
MIT
|
import { Injectable } from '@angular/core';
import { HttpClient } from '@angular/common/http';
import { HistoryEntry, NewHistoryEntry } from '../models/history';
import { environment } from 'src/environments/environment';
import { zip, forkJoin } from 'rxjs';
import { switchMap } from 'rxjs/operators';
@Injectable({
providedIn: 'root'
})
export class HistoryService {
constructor(private httpClient: HttpClient) { }
all() {
return this.httpClient.get<HistoryEntry[]>(`${environment.apiUrl}/history`);
}
append(historyEntry: NewHistoryEntry) {
return this.httpClient.post<HistoryEntry>(`${environment.apiUrl}/history`, historyEntry);
}
remove(historyEntryId: number) {
return this.httpClient
.delete<HistoryEntry>(`${environment.apiUrl}/history/${encodeURIComponent(historyEntryId)}`);
}
removeAll() {
return this.httpClient
.get<HistoryEntry[]>(`${environment.apiUrl}/history`)
.pipe(
switchMap(history => forkJoin(history.map(entry => this.remove(entry.id))))
);
}
}
|
! RUN: %python %S/test_errors.py %s %flang_fc1 -fopenmp
! OpenMP Atomic construct
! section 2.17.7
! Intrinsic procedure name is one of MAX, MIN, IAND, IOR, or IEOR.
program OmpAtomic
use omp_lib
real x
integer :: y, z, a, b, c, d
x = 5.73
y = 3
z = 1
!$omp atomic
y = IAND(y, 4)
!$omp atomic
y = IOR(y, 5)
!$omp atomic
y = IEOR(y, 6)
!$omp atomic
y = MAX(y, 7)
!$omp atomic
y = MIN(y, 8)
!$omp atomic
!ERROR: Atomic update variable 'z' not found in the argument list of intrinsic procedure
z = IAND(y, 4)
!$omp atomic
!ERROR: Atomic update variable 'z' not found in the argument list of intrinsic procedure
z = IOR(y, 5)
!$omp atomic
!ERROR: Atomic update variable 'z' not found in the argument list of intrinsic procedure
z = IEOR(y, 6)
!$omp atomic
!ERROR: Atomic update variable 'z' not found in the argument list of intrinsic procedure
z = MAX(y, 7, b, c)
!$omp atomic
!ERROR: Atomic update variable 'z' not found in the argument list of intrinsic procedure
z = MIN(y, 8, a, d)
!$omp atomic
!ERROR: Invalid intrinsic procedure name in OpenMP ATOMIC (UPDATE) statement
y = FRACTION(x)
!$omp atomic
!ERROR: Invalid intrinsic procedure name in OpenMP ATOMIC (UPDATE) statement
y = REAL(x)
!$omp atomic update
y = IAND(y, 4)
!$omp atomic update
y = IOR(y, 5)
!$omp atomic update
y = IEOR(y, 6)
!$omp atomic update
y = MAX(y, 7)
!$omp atomic update
y = MIN(y, 8)
!$omp atomic update
!ERROR: Atomic update variable 'z' not found in the argument list of intrinsic procedure
z = IAND(y, 4)
!$omp atomic update
!ERROR: Atomic update variable 'z' not found in the argument list of intrinsic procedure
z = IOR(y, 5)
!$omp atomic update
!ERROR: Atomic update variable 'z' not found in the argument list of intrinsic procedure
z = IEOR(y, 6)
!$omp atomic update
!ERROR: Atomic update variable 'z' not found in the argument list of intrinsic procedure
z = MAX(y, 7)
!$omp atomic update
!ERROR: Atomic update variable 'z' not found in the argument list of intrinsic procedure
z = MIN(y, 8)
!$omp atomic update
!ERROR: Invalid intrinsic procedure name in OpenMP ATOMIC (UPDATE) statement
y = MOD(y, 9)
!$omp atomic update
!ERROR: Invalid intrinsic procedure name in OpenMP ATOMIC (UPDATE) statement
x = ABS(x)
end program OmpAtomic
subroutine conflicting_types()
type simple
integer :: z
end type
real x
integer :: y, z
type(simple) ::s
z = 1
!$omp atomic
!ERROR: Atomic update variable 'z' not found in the argument list of intrinsic procedure
z = IAND(s%z, 4)
end subroutine
|
package com.demo.developer.deraesw.demomoviewes.core.data.entity
import androidx.room.Entity
import androidx.room.PrimaryKey
import com.google.gson.annotations.SerializedName
@Entity(tableName = "people")
data class People(
@PrimaryKey
var id: Int = 0,
var name: String = "",
var gender: Int = 0,
var profilePath: String = "",
@SerializedName(value = "insert_date")
var insertDate: String = ""
)
|
๏ปฟ//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by Cake.
// </auto-generated>
//------------------------------------------------------------------------------
using System.Reflection;
[assembly: AssemblyDescription("Opinionated wrapper for System.Net.Http.HttpClient")]
[assembly: AssemblyProduct("Restify")]
[assembly: AssemblyVersion("0.1.0")]
[assembly: AssemblyFileVersion("0.1.0")]
[assembly: AssemblyInformationalVersion("0.1.0")]
[assembly: AssemblyCopyright("Copyright (c) Lotpath 2015")]
|
import { TypeCreator } from '@src/creator/types';
import { AllowedTypes, typeCollection as T } from '@constant/dataType';
import { typeOf } from '@utils/utils';
import { transformData } from './transform';
const format = (
currentData: AllowedTypes.AllDataType,
types: TypeCreator.MixTypeValue | TypeCreator.AllType
): unknown => {
const innerTypes = (typeOf(types) === T.array
? types
: (types as TypeCreator.AllType).value) as TypeCreator.MixTypeValue;
const currentType = innerTypes[0];
switch (currentType) {
case T.string:
case T.number:
case T.boolean: {
const [dataType, normalValue] = innerTypes;
return transformData(
currentData as AllowedTypes.PrimitiveType,
dataType,
normalValue as AllowedTypes.PrimitiveType
);
}
case T.array: {
const [_, items, normalValue] = innerTypes as [symbol, TypeCreator.MixTypeValue, any];
const itemsType = typeOf(items) === T.object ? T.object : ((items as unknown) as [symbol])[0];
if (typeOf(currentData) !== T.array) {
return normalValue;
}
const itemsTypeValue = (itemsType === T.object
? [T.object, items, format({}, [T.object, items, {}])]
: items) as TypeCreator.MixTypeValue;
return (currentData as AllowedTypes.ArrayType).map((v) => {
return format(v, itemsTypeValue);
});
}
case T.object: {
const [_, properties, normalValue] = innerTypes;
if (typeOf(currentData) !== T.object && Object.keys(normalValue as object).length) {
return normalValue;
}
return Object.entries(properties).reduce((preItem, [key, value]) => {
return {
...preItem,
[key]: format((currentData as any)?.[key], value)
};
}, {});
}
default:
return null;
}
};
export type Format = typeof format;
export default format;
|
# What was the faithfulness of Ephraim and Judah like?
Their faithfulness was like a morning cloud, like the dew that goes away early.
|
<?php
namespace Bixev\Migrations\Updater;
class MysqlUpdater extends AbstractUpdater
{
protected $_replacements = [];
/**
* @var callable
*/
protected $_queryExecutor;
/**
* @param array $replacements array of replacements to replace in query strings
*/
public function setReplacements(array $replacements = [])
{
$this->_replacements = $replacements;
}
public function setQueryExecutor(callable $callable)
{
$this->_queryExecutor = $callable;
}
protected function doUpdate($path)
{
$content = file_get_contents($path);
$queries = explode(";\n", $content);
foreach ($queries as $query) {
if (trim($query) != '') {
if ($this->_queryExecutor === null) {
throw new \Exception('query executor is not defined');
}
call_user_func($this->_queryExecutor, $query);
}
}
}
}
|
๏ปฟusing Songhay.Extensions;
using Songhay.Models;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Xml;
using System.Xml.Serialization;
namespace Songhay.Xml
{
/// <summary>
/// Static members for XHTML Documents.
/// </summary>
public static partial class XhtmlDocumentUtility
{
/// <summary>
/// Writes the index of XHTML documents.
/// </summary>
/// <param name="indexFileName">Name of the index file.</param>
/// <param name="indexTitle">The index title.</param>
/// <param name="publicRoot">The public root.</param>
/// <param name="pathToDirectory">The path to the specified directory.</param>
/// <param name="pathToOutput">The path to output.</param>
public static void WriteDocumentIndex(string indexFileName,
string indexTitle, string publicRoot,
string pathToDirectory, string pathToOutput)
{
var directory = new DirectoryInfo(pathToDirectory);
var list = new List<XhtmlDocument>();
directory.GetFiles()
.ForEachInEnumerable(f =>
{
var uri = string.Concat(publicRoot, f.Name);
list.Add(XhtmlDocumentUtility.LoadDocument(f.FullName, uri));
});
var serializer = new XmlSerializer(typeof(XhtmlDocuments));
using(var writer = new XmlTextWriter(string.Concat(pathToOutput, indexFileName), Encoding.UTF8))
{
var documents = new XhtmlDocuments
{
Documents = list.OrderBy(d => d.Title).ToArray(),
Title = indexTitle
};
serializer.Serialize(writer, documents);
}
}
}
}
|
๏ปฟusing UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Runtime.InteropServices;
using System.IO;
using netty;
using UnityEngine.UI;
using System;
public class ThirdParty : MonoBehaviour
{
public HomeController controller = null;
private int playerId = 1001;
#if UNITY_IPHONE
[DllImport("__Internal")]
private static extern void weixinLoginByIos();
/// <summary>
/// ๅไบซๅพ็้พๆฅ
/// ้่ฆๆณจๆ็ๆฏ๏ผๅไบซ็ผฉ็ฅๅพๅบๅฎไธบๅบ็จiconๅพๆ
/// </summary>
/// <param name="url"> ้พๆฅๅฐๅ </param>
/// <param name="title"> ๆ ้ข </param>
/// <param name="des"> ๆ่ฟฐ</param>
/// <param name="sharetype"> ๅไบซ็ฑปๅ 0 ๅไบซ็ปๆๅๅ๏ผ 1 ๅไบซๅฐๅฅฝๅ </param>
[DllImport("__Internal")]
private static extern void WXShareToFriend(string url, string title, string des, string sharetype);
/// <summary>
/// ๆชๅพๅไบซ
/// </summary>
/// <param name="imagepath">ๅไบซ็ๅพ็ๆฌๅฐ่ทฏๅพ</param>
[DllImport("__Internal")]
private static extern void WXShareScreenshot(string imagepath);
#elif UNITY_ANDROID
private void AndroidLogin()
{
AndroidJavaClass jc = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
AndroidJavaObject jo = jc.GetStatic<AndroidJavaObject>("currentActivity");
jo.Call("weiLogin");
}
private void WXShareToFriend(string url, string title, string des, string sharetype)
{
AndroidJavaClass jc = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
AndroidJavaObject jo = jc.GetStatic<AndroidJavaObject>("currentActivity");
// ๅๆฐ
string[] mObject = new string[4];
mObject[0] = url;
mObject[1] = title;
mObject[2] = des;
mObject[3] = sharetype;
jo.Call("WXShareToFriend", mObject);
}
private void WXShareScreenshot(string imagepath)
{
AndroidJavaClass jc = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
AndroidJavaObject jo = jc.GetStatic<AndroidJavaObject>("currentActivity");
// ๅๆฐ
string[] mObject = new string[1];
mObject[0] = imagepath;
jo.Call("WXShareScreenshot", mObject);
}
#endif
public void NomalLogin(string token = "")
{
if (PPSocket.GetInstance().Connect(controller))
{
MessageInfo req = new MessageInfo();
LoginReq login = new LoginReq();
req.messageId = MESSAGE_ID.msg_LoginReq;
login.code = "";
login.clientinfos = "Win32_version_" + Application.version;
Debug.Log(login.clientinfos);
if (token.Length > 0)
{
login.code = token;
}
else
{
login.playerid = playerId;
}
req.loginReq = login;
PPSocket.GetInstance().SendMessage(req);
// ๆๅผไธป็้ข
controller.OpenWindow(WINDOW_ID.WINDOW_ID_HOME);
// ๅ
ณ้ญ็ปๅฝ็้ข
controller.CloseWindow(WINDOW_ID.WINDOW_ID_LOGIN);
// ๅ ่ฝฝๆก
controller.LoadingStart();
}
else
{
controller.ShowTips(Strings.SS_CONNECT_FAILS);
Debug.LogError("้พๆฅๆๅกๅจๅคฑ่ดฅ๏ผ");
}
}
public void ThirdPartyLogin()
{
#if UNITY_IPHONE
Debug.logger.Log("IOSๅๅคๆๅๅพฎไฟกๆๆ็ปๅฝ๏ผ");
weixinLoginByIos();
#elif UNITY_ANDROID
Debug.logger.Log("Androidๅๅคๆๅๅพฎไฟกๆๆ็ปๅฝ๏ผ");
AndroidLogin();
#elif UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
NomalLogin();
#endif
}
public void ThirdPartyShare(string title, string des, int sharetype)
{
#if UNITY_IPHONE
Debug.logger.Log("IOSๅๅคๅไบซ๏ผ");
WXShareToFriend(controller.ShareUrl, title, des, sharetype.ToString());
#elif UNITY_ANDROID
Debug.logger.Log("Androidๅๅคๅไบซ๏ผ");
WXShareToFriend(controller.ShareUrl, title, des, sharetype.ToString());
#endif
}
public void ShareScreenshot(string imgPath)
{
#if UNITY_IPHONE
Debug.logger.Log("IOSๅๅคๅไบซๆชๅฑ๏ผ");
WXShareScreenshot(imgPath);
#elif UNITY_ANDROID
Debug.logger.Log("Androidๅๅคๅไบซๆชๅฑ๏ผ");
WXShareScreenshot(imgPath);
#endif
}
public void WXCallBack(string param)
{
Debug.LogError("ๅพฎไฟกๅ่ฐ่ๆฌๆๅ๏ผ๏ผ param=" + param);
string[] result = param.Split(' ');
int code = 0;
int.TryParse(result[0], out code);
if (code == 0)
{
// ็ปๅฝๆๅ
if (PPSocket.GetInstance().IsConnected() || PPSocket.GetInstance().Connect(controller))
{
MessageInfo req = new MessageInfo();
LoginReq login = new LoginReq();
PlayerBaseInfo playerBaseInfo = new PlayerBaseInfo();
req.messageId = MESSAGE_ID.msg_LoginReq;
login.code = result[1];
req.loginReq = login;
#if UNITY_IPHONE
login.clientinfos = "iPhone_version_" + Application.version;
#elif UNITY_ANDROID
login.clientinfos = "Android_version_" + Application.version;
#endif
Debug.Log(login.clientinfos);
PPSocket.GetInstance().SendMessage(req);
// ๆๅผไธป็้ข
controller.OpenWindow(WINDOW_ID.WINDOW_ID_HOME);
// ๅ
ณ้ญ็ปๅฝ็้ข
controller.CloseWindow(WINDOW_ID.WINDOW_ID_LOGIN);
// ๅ ่ฝฝๆก
controller.LoadingStart();
}
else
{
controller.ShowTips(Strings.SS_CONNECT_FAILS);
}
}
else
{
// ็ปๅฝๅคฑ่ดฅ
controller.ShowTips(Strings.SS_WXEMPOWER_FAILS + result[0]);
}
}
public void WXShareCallBack(string param)
{
int code = 0;
int.TryParse(param, out code);
if (code == 0)
{
// ๅไบซๅ่ฐ
controller.ShowTips(Strings.SS_SHARE_SUCCESS);
}
}
public void InputPlayerId(InputField input)
{
int.TryParse(input.text, out playerId);
}
}
|
๏ปฟusing System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace MpdBaileyTechnology.GenericApp.Model
{
public interface IReadingSource
{
Reading GetReading();
}
}
|
#include "NetworkWrapper.h"
#include "curl/curl.h"
extern "C"
{
#include "md5.h"
}
using namespace std;
#define invokeLib(LibFn,arg1,args...) _p->lasterr=LibFn(arg1,##args)
static int _cnt_native_lib=0;
int InitNativeLib()
{
if(_cnt_native_lib==0 && curl_global_init(CURL_GLOBAL_ALL)!=0)
{
return -1;
}
else
{
_cnt_native_lib++;
return 0;
}
}
int CleanUpNativeLib()
{
if(_cnt_native_lib==1)
{
curl_global_cleanup();
_cnt_native_lib=0;
return 0;
}
else
{
_cnt_native_lib--;
return 0;
}
}
class HTTPConnection::_impl
{
public:
CURL* c;
CURLcode lasterr;
FILE* delegated_fp;
};
HTTPConnection::HTTPConnection() : _p(new _impl)
{
_p->c=NULL;
_p->lasterr=CURLE_OK;
_p->delegated_fp=NULL;
_p->c=curl_easy_init();
}
HTTPConnection::~HTTPConnection()
{
if(_p)
{
curl_easy_cleanup(_p->c);
if(_p->delegated_fp)
{
fclose(_p->delegated_fp);
}
delete _p;
}
}
bool HTTPConnection::isReady() const
{
return _p&&_p->c;
}
int HTTPConnection::setURL(const string& URL)
{
return invokeLib(curl_easy_setopt,_p->c,CURLOPT_URL,URL.c_str());
}
static size_t _general_writer(char* ptr,size_t sz,size_t n,void* userfn)
{
int sum=sz*n;
return (*reinterpret_cast<function<int(char*,int)>*>(userfn))(ptr,sum);
}
int HTTPConnection::setWriter(const function<int(char*,int)>& fn)
{
invokeLib(curl_easy_setopt,_p->c,CURLOPT_WRITEFUNCTION,_general_writer);
invokeLib(curl_easy_setopt,_p->c,CURLOPT_WRITEDATA,&fn);
return 0;
}
int HTTPConnection::setOutputFile(const string& filename)
{
FILE* fp=fopen(filename.c_str(),"w");
if(!fp) return -2;
invokeLib(curl_easy_setopt,_p->c,CURLOPT_WRITEFUNCTION,fwrite);
invokeLib(curl_easy_setopt,_p->c,CURLOPT_WRITEDATA,fp);
_p->delegated_fp=fp;
return 0;
}
int HTTPConnection::setTimeout(int second)
{
return curl_easy_setopt(_p->c,CURLOPT_TIMEOUT,second);
}
int HTTPConnection::setVerbos(bool v)
{
if(v)
{
return invokeLib(curl_easy_setopt,_p->c,CURLOPT_VERBOSE,1);
}
else
{
return invokeLib(curl_easy_setopt,_p->c,CURLOPT_VERBOSE,0);
}
}
int HTTPConnection::perform()
{
return curl_easy_perform(_p->c);
}
int HTTPConnection::getLastErrCode()
{
return _p->lasterr;
}
string HTTPConnection::getLastError()
{
return curl_easy_strerror(_p->lasterr);
}
string getMD5(const string& filename)
{
MD5_CTX c;
md5_init(&c);
FILE* fp=fopen(filename.c_str(),"rb");
char buff[128];
memset(buff,0,128);
int len;
while((len=fread(buff,1,128,fp))>0)
{
md5_update(&c,(unsigned char*)buff,len);
memset(buff,0,128);
}
md5_final(&c,(unsigned char*)buff);
string result;
for(int i=0;i<16;i++)
{
int v=(unsigned char)buff[i];
result.push_back(v/16<10?v/16+'0':v/16-10+'A');
result.push_back(v%16<10?v%16+'0':v%16-10+'A');
}
return result;
}
|
#!/usr/bin/env bash
src=$1
output=$2
for filename in $(ls $src)
do
echo "processing:" $filename
./wmc -c $src/$filename $output/$filename
echo "--------------------------------"
done
|
"""
qpvc : example from Bazinga.jl
Quadratic program with vanishing constraints, from [KPB11].
Original formulations:
minimize 1/2 x' Q x + x' q
subject to x[i] โฅ 0 โ i โ [1:nvc]
x[i] (G[i,:] x - g[i]) โฅ 0 โ i โ [1:nvc]
Reformulation as a constrained structured problem in the form
minimize f(x)
subject to c(x) in S
where
f(x) = 1/2 x' Q x + x' q
c(x) = [ x[1:nvc] ]
[ G x - g ]
S = { (a,b) | โi โ [1:nvc] a_i = 0 or a_i, b_i โฅ 0 }
References:
[KPB11] Kirches, Potschka, Bock,, Sager, "A parametric active set method
for quadratic programs with vanishing constraints" (2011).
Pacific Journal of Optimization
"""
using Bazinga, OptiMo
using Random, LinearAlgebra
using DataFrames, Query, CSV
using Printf, Plots
###################################################################################
# problem definition
###################################################################################
mutable struct QPVC <: AbstractOptiModel
meta::OptiModelMeta
Q::Matrix
q::Vector
G::Matrix
g::Vector
nvc::Int
end
function QPVC(; nx::Int = 100, nvc::Int = 20, x0::Vector = zeros(Float64, nx))
@assert nx >= 2 * nvc
name = "qpvc"
ncon = 2 * nvc
R = Float64
Q = randn(R, nx, nx)
Q .= (Q' * Q)
q = randn(R, nx)
G = randn(R, nvc, nx)
g = randn(R, nvc)
meta = OptiModelMeta(nx, ncon, x0 = x0, name = name)
return QPVC(meta, Q, q, G, g, nvc)
end
# necessary methods:
# obj, grad!: cons!, jprod!, jtprod!, proj!, prox!, objprox!
function OptiMo.obj(prob::QPVC, x::AbstractVector)
OptiMo.@lencheck prob.meta.nvar x
return 0.5 * x' * prob.Q * x + x' * prob.q
end
function OptiMo.grad!(prob::QPVC, x::AbstractVector, dfx::AbstractVector)
OptiMo.@lencheck prob.meta.nvar x
dfx .= prob.Q * x .+ prob.q
return nothing
end
function OptiMo.cons!(prob::QPVC, x::AbstractVector, cx::AbstractVector)
OptiMo.@lencheck prob.meta.nvar x
OptiMo.@lencheck prob.meta.ncon cx
cx .= [
x[1:prob.nvc]
prob.G * x .- prob.g
]
return nothing
end
function OptiMo.jprod!(prob::QPVC, x::AbstractVector, v::AbstractVector, Jv::AbstractVector)
OptiMo.@lencheck prob.meta.nvar x v
OptiMo.@lencheck prob.meta.ncon Jv
Jv .= [
v[1:prob.nvc]
prob.G * v
]
return nothing
end
function OptiMo.jtprod!(
prob::QPVC,
x::AbstractVector,
v::AbstractVector,
Jtv::AbstractVector,
)
OptiMo.@lencheck prob.meta.nvar x Jtv
OptiMo.@lencheck prob.meta.ncon v
Jtv .= prob.G' * v[prob.nvc+1:prob.meta.ncon]
Jtv[1:prob.nvc] .+= v[1:prob.nvc]
return nothing
end
function OptiMo.prox!(prob::QPVC, x::AbstractVector, a::Real, z::AbstractVector)
OptiMo.@lencheck prob.meta.nvar x z
z .= x
z[1:prob.nvc] .= max.(x[1:prob.nvc], 0)
return nothing
end
function OptiMo.objprox!(prob::QPVC, x::AbstractVector, a::Real, z::AbstractVector)
OptiMo.@lencheck prob.meta.nvar x z
z .= x
z[1:prob.nvc] .= max.(x[1:prob.nvc], 0)
return 0.0
end
function OptiMo.proj!(prob::QPVC, cx::AbstractVector, px::AbstractVector)
OptiMo.@lencheck prob.meta.ncon cx px
for i = 1:prob.nvc
a = cx[i]
b = cx[i+prob.nvc]
a = max(a, 0)
if a + b < 0
# if a + b โค 0 ...
a = 0
else
b = max(0, b)
end
px[i] = a
px[i+prob.nvc] = b
end
return nothing
end
foldername = "/home/alberto/Documents/Bazinga.jl/demo/data/"
filename = "qpvc"
# problem build
problem = QPVC()
# solver build
solver = Bazinga.ALPX(max_sub_iter = 1000, verbose = false)
# solver warm-up
out = solver(problem)
data = DataFrame()
ntests = 1000
for i = 1:ntests
local p_nx = rand(10:250)
local p_nvc = Int(ceil(p_nx / 5))
local problem = QPVC(nx = p_nx, nvc = p_nvc)
local out = solver(problem)
#print(out)
push!(
data,
(
id = i,
nx = p_nx,
nvc = p_nvc,
time = out.time,
iters = out.iterations,
subiters = out.solver[:sub_iterations],
cviol = out.cviolation,
optim = out.optimality,
cslack = out.solver[:cslackness],
solved = out.status == :first_order ? 1 : 0,
),
)
@printf "."
if mod(i, 50) == 0
@printf "\n"
end
end
@printf "\n"
# write
CSV.write(
foldername * filename * ".csv",
data,
header = false,
)
max_cviol = maximum(data[!, 5])
max_optim = maximum(data[!, 6])
max_cslack = maximum(data[!, 7])
datatmp = data |> @filter(_.solved == 1) |> DataFrame
n_first_order = size(datatmp, 1)
pyplot()
scatter( data[!,2], data[!,4],
color = :blue,
marker = :circle,
markerstrokewidth = 0,
legend = false,
yaxis= :log, xaxis=:log,
xlabel="Problem size nx",
ylabel="Run time [s]"
)
savefig(foldername * filename * ".pdf")
|
---
layout: post
title: "Hello world"
date: 2015-02-24
summary: my first post...
---
<p>This is my first try to create my own blog... Hopefully this will not be my last post. We will see :-)</p>
|
pub mod color;
pub mod escape_parser;
/// Convert C string to Rust string
pub unsafe fn from_cstr(s: *const u8) -> &'static str {
use core::{str, slice};
let len = (0usize..).find(|&i| *s.add(i) == 0).unwrap();
str::from_utf8(slice::from_raw_parts(s, len)).unwrap()
}
/// Write a Rust string to C string
pub unsafe fn write_cstr(ptr: *mut u8, s: &str) {
ptr.copy_from(s.as_ptr(), s.len());
ptr.add(s.len()).write(0);
}
|
require_relative '../../lib/google_static_map/middleware'
module GoogleStaticMap
describe Middleware do
it 'conforms to the rack middleware api' do
expect{Middleware.new(double)}.not_to raise_error
end
describe :call do
context 'for non static map requests' do
it 'calls the next middleware' do
app = double('app')
expect(app).to receive(:call).with({})
Middleware.new(app).call({})
end
end
context 'with static map requests' do
let(:app) { double('app') }
let(:env) { {"REQUEST_PATH" => '/google-static-map'} }
let(:result) { Middleware.new(app).call(env) }
let(:status_code) { result[0] }
let(:header) { result[1] }
let(:body) { result[2] }
before do
allow(SignedURL).to receive(:new).and_return('a url')
end
it 'should not call next middleware app' do
expect(app).not_to receive(:call)
result
end
it 'should return a 301 status code' do
expect(status_code).to eq 301
end
it 'should return correct location in header' do
expect(header['Location']).not_to eq nil
expect(header['Location']).to eq 'a url'
end
it 'should return correct content type in header' do
expect(header['Content-Type']).to eq 'text/html'
end
it 'should return correct content type in header' do
expect(header['Content-Length']).to eq '0'
end
it 'should return nothing for the body' do
expect(body).to be_empty
end
end
end
end
end
|
/*
* FXGL - JavaFX Game Library. The MIT License (MIT).
* Copyright (c) AlmasB (almaslvl@gmail.com).
* See LICENSE for details.
*/
package intermediate;
import com.almasb.fxgl.animation.Interpolators;
import com.almasb.fxgl.app.GameApplication;
import com.almasb.fxgl.app.GameSettings;
import com.almasb.fxgl.app.scene.FXGLMenu;
import com.almasb.fxgl.app.scene.MenuType;
import com.almasb.fxgl.app.scene.SceneFactory;
import javafx.beans.binding.StringBinding;
import javafx.geometry.Point2D;
import javafx.geometry.Pos;
import javafx.scene.CacheHint;
import javafx.scene.Group;
import javafx.scene.Node;
import javafx.scene.effect.BoxBlur;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javafx.scene.shape.Rectangle;
import javafx.util.Duration;
import java.util.ArrayList;
import java.util.List;
import static com.almasb.fxgl.dsl.FXGL.*;
import static javafx.beans.binding.Bindings.createStringBinding;
import static javafx.beans.binding.Bindings.when;
/**
* @author Almas Baimagambetov (almaslvl@gmail.com)
*/
public class CustomGameMenuSample2 extends GameApplication {
@Override
protected void initSettings(GameSettings settings) {
settings.setWidth(1066);
settings.setHeightFromRatio(16/9.0);
settings.setSceneFactory(new SceneFactory() {
@Override
public FXGLMenu newGameMenu() {
return new MyGameMenu();
}
});
}
public static class MyGameMenu extends FXGLMenu {
private List<Node> buttons = new ArrayList<>();
private int animIndex = 0;
public MyGameMenu() {
super(MenuType.GAME_MENU);
var bg = texture("background.png", getAppWidth() + 450, getAppHeight() + 200);
bg.setTranslateY(-85);
bg.setTranslateX(-450);
var titleView = getUIFactoryService().newText(getSettings().getTitle(), 48);
centerTextBind(titleView, getAppWidth() / 2.0, 100);
var body = createBody();
body.setTranslateY(-25);
getContentRoot().getChildren().addAll(bg, titleView, body);
}
@Override
public void onCreate() {
animIndex = 0;
buttons.forEach(btn -> {
btn.setOpacity(0);
animationBuilder(this)
.delay(Duration.seconds(animIndex * 0.1))
.interpolator(Interpolators.BACK.EASE_OUT())
.translate(btn)
.from(new Point2D(-200, 0))
.to(new Point2D(0, 0))
.buildAndPlay();
animationBuilder(this)
.delay(Duration.seconds(animIndex * 0.1))
.fadeIn(btn)
.buildAndPlay();
animIndex++;
});
}
private Node createBody() {
double midY = getAppHeight() / 2.0;
double distance = midY - 25;
var btnContinue = createActionButton(localizedStringProperty("menu.continue"), this::fireContinue);
var btn1 = createActionButton(localizedStringProperty("menu.newGame"), this::fireNewGame);
var btn2 = createActionButton(createStringBinding(() -> "PLACEHOLDER 1"), this::fireNewGame);
var btn3 = createActionButton(createStringBinding(() -> "PLACEHOLDER 2"), this::fireNewGame);
var btn4 = createActionButton(createStringBinding(() -> "PLACEHOLDER 3"), this::fireNewGame);
var btn5 = createActionButton(createStringBinding(() -> "PLACEHOLDER 4"), this::fireNewGame);
var btn6 = createActionButton(createStringBinding(() -> "PLACEHOLDER 5"), this::fireNewGame);
var btn7 = createActionButton(localizedStringProperty("menu.exit"), this::fireExit);
Group group = new Group(btnContinue, btn1, btn2, btn3, btn4, btn5, btn6, btn7);
double dtheta = Math.PI / (group.getChildren().size() - 1);
double angle = Math.PI / 2;
int i = 0;
for (Node n : group.getChildren()) {
Point2D vector = new Point2D(Math.cos(angle), -Math.sin(angle))
.normalize()
.multiply(distance)
.add(0, midY);
n.setLayoutX(vector.getX() - (i == 0 || i == 7 ? 0 : 100));
n.setLayoutY(vector.getY());
angle -= dtheta;
// slightly hacky way to get a nice looking radial menu
// we assume that there are 8 items
if (i == 0 || i == group.getChildren().size() - 2) {
angle -= dtheta / 2;
} else if (i == 2 || i == 4) {
angle += dtheta / 4;
} else if (i == 3) {
angle += dtheta / 2;
}
i++;
}
return group;
}
/**
* Creates a new button with given name that performs given action on click/press.
*
* @param name button name (with binding)
* @param action button action
* @return new button
*/
private Node createActionButton(StringBinding name, Runnable action) {
var bg = new Rectangle(200, 50);
bg.setEffect(new BoxBlur());
var text = getUIFactoryService().newText(name);
text.setTranslateX(15);
text.setFill(Color.BLACK);
var btn = new StackPane(bg, text);
bg.fillProperty().bind(when(btn.hoverProperty())
.then(Color.LIGHTGREEN)
.otherwise(Color.DARKGRAY)
);
btn.setAlignment(Pos.CENTER_LEFT);
btn.setOnMouseClicked(e -> action.run());
// clipping
buttons.add(btn);
Rectangle clip = new Rectangle(200, 50);
clip.translateXProperty().bind(btn.translateXProperty().negate());
btn.setTranslateX(-200);
btn.setClip(clip);
btn.setCache(true);
btn.setCacheHint(CacheHint.SPEED);
return btn;
}
}
public static void main(String[] args) {
launch(args);
}
}
|
require "google/cloud/bigquery"
class ExportTablesToBigQuery
include Google::Cloud
BIGQUERY_DATASET = ENV["BIG_QUERY_DATASET"]
# This is to allow us to load new tables to the production dataset without disturbing the existing ones.
BIGQUERY_TABLE_PREFIX = "feb20".freeze
# How many rows to process at one time, and how many threads at the same time
# Deliberately low to prevent memory starvation
BATCH_SIZE = 100
THREADS = 1
# Attributes with postgres schema types that do not exist in BigQuery.
CONVERT_THESE_TYPES = { point: :float, text: :string, uuid: :string, json: :string, jsonb: :string }.freeze
# Skip attributes that cannot be queried, we do not report on or that frequently break the import.
# Drop gias_data because it is aliased to data. This alias allows all records to be handled the same way and dropping
# gias_data removes duplication of data.
DROP_THESE_ATTRIBUTES = %w[
benefits
description
education
experience
frequency
geolocation
gias_data
job_summary
legacy_job_roles
qualifications
readable_phases
].freeze
# This is to deal with a gem that automatically maps an integer column to a look up table of strings.
ENUM_ATTRIBUTES = {
"candidate_hired_from" => :string,
"category" => :string,
"contract_type" => :string,
"end_listing_reason" => :string,
"frequency" => :string,
"hired_status" => :string,
"job_roles" => :string,
"job_location" => :string,
"listed_elsewhere" => :string,
"phase" => :string,
"reason" => :string,
"status" => :string,
"user_participation_response" => :string,
"visit_purpose" => :string,
"working_patterns" => :string,
}.freeze
EXCLUDE_TABLES = %w[
alert_runs
ar_internal_metadata
feedbacks
friendly_id_slugs
location_polygons
schema_migrations
sessions
spatial_ref_sys
].freeze
TYPES_TO_CONVERT_FROM_DATE = %i[datetime date].freeze
TYPES_TO_CONVERT_FROM_JSON = %i[json jsonb].freeze
attr_reader :dataset, :runtime, :tables
def initialize(bigquery: Bigquery.new)
@dataset = bigquery.dataset(BIGQUERY_DATASET)
@runtime = Time.current.to_s(:db).parameterize
# This ensures that new tables are automatically added to the BigQuery dataset.
@tables = ApplicationRecord.connection.tables
.reject { |table| EXCLUDE_TABLES.include?(table) }
.sort
.map { |table| table.camelize.singularize }
.freeze
end
def run!
Rails.logger.info({ bigquery_export: "started" }.to_json)
tables.each do |table|
bigquery_load(table.constantize)
rescue StandardError => e
# If any table causes an uncaught error, no data from any later table is sent.
# Catch errors and skip the failing tables
Rollbar.error("The '#{table}' table failed during the BigQuery export job.", details: e)
Rails.logger.error({ bigquery_export: "error", status: "handled", table: table, message: e })
end
Rails.logger.info({ bigquery_export: "finished" }.to_json)
end
private
def bigquery_data(record, table)
@bigquery_data = {}
table.columns.map do |c|
next if DROP_THESE_ATTRIBUTES.include?(c.name)
data = record.send(c.name)
# Another bloody enum gem edge case. Only in vacancies and causes that whole table to fail despite the column
# being nullable.
data = "" if c.name == "hired_status" && data.nil?
data = data.to_s if data.is_a?(Array)
data = data.to_s(:db) if !data.nil? && TYPES_TO_CONVERT_FROM_DATE.include?(c.type)
data = data.to_s if TYPES_TO_CONVERT_FROM_JSON.include?(c.type)
@bigquery_data[c.name] = data
end
if table.column_names.include?("geolocation")
@bigquery_data["geolocation_x"] = record.geolocation&.x
@bigquery_data["geolocation_y"] = record.geolocation&.y
end
json_record = record.data if record.respond_to?(:data)
return @bigquery_data if json_record.nil?
json_record.map do |key, value|
data = value.presence
data = Date.parse(data).to_s(:db) if data.is_a?(String) && data.match?(%r{^\d{2}[-/]\d{2}[-/]\d{4}})
data = data.to_i if data.is_a?(String) && data.match?(/^\d+$/)
@bigquery_data[data_key_name(key)] = data
end
@bigquery_data
end
def bigquery_load(db_table)
started_at = Time.current.to_s(:db)
table_name = [BIGQUERY_TABLE_PREFIX, db_table.to_s.downcase].join("_")
dataset.table(table_name)&.delete
bq_table = dataset.table(table_name) || dataset.create_table(table_name) do |schema|
bigquery_schema(db_table).each do |column_name, column_type|
schema.send(column_type, column_name)
end
end
record_count = total = db_table.count
error_count = 0
inserter = bq_table.insert_async(
ignore_unknown: true,
skip_invalid: true,
max_rows: BATCH_SIZE,
threads: THREADS,
) do |result|
if result.error?
Rails.logger.error({
table: table_name,
error: result.error,
}.to_json)
else
Rails.logger.info({
table: table_name,
inserted: result.insert_count,
remaining: total,
error_count: result.error_count,
}.to_json)
if result.error_count.positive?
Rollbar.warning(result.insert_errors)
Rails.logger.error(result.insert_errors)
error_count += result.error_count
record_count -= result.error_count
end
total -= result.insert_count
end
end
db_table.find_in_batches(batch_size: inserter.max_rows) do |batch|
inserter.insert(batch.map { |record| bigquery_data(record, db_table) })
end
monitoring({
error_count: error_count,
finished_at: Time.current.to_s(:db),
records_processed: record_count,
started_at: started_at,
table: table_name,
})
inserter.stop.wait!
end
def bigquery_schema(table)
@bigquery_schema = {}
table.columns.map { |c|
next if DROP_THESE_ATTRIBUTES.include?(c.name)
@bigquery_schema[c.name] = ENUM_ATTRIBUTES[c.name] || CONVERT_THESE_TYPES[c.type] || c.type
}.compact
if table.column_names.include?("geolocation")
@bigquery_schema["geolocation_x"] = :float
@bigquery_schema["geolocation_y"] = :float
end
return @bigquery_schema unless table == Organisation
json_template = School.where.not(data: nil).first.data.merge(SchoolGroup.where.not(data: nil).first.data)
json_template.sort_by { |k, _| k }.map do |key, value|
data_type = :string
data_type = :date if key.match?(/date/i)
data_type = :integer if value.match(/^\d+$/) && !key.match(/diocese/i)
@bigquery_schema[data_key_name(key)] = data_type
end
@bigquery_schema
end
def data_key_name(key)
"data_#{key.chomp(')').gsub(/\W+/, '_').downcase}"
end
def monitoring(data)
Rails.logger.info(data.to_json)
table = dataset.table("monitoring") || dataset.create_table("monitoring") do |schema|
schema.integer "error_count"
schema.timestamp "finished_at"
schema.integer "records_processed"
schema.timestamp "started_at"
schema.string "table"
end
table.insert(data)
end
end
|
{-|
Description : Game ends when network detects SDL_QuitEvent or Escape key press.
Example : 002
Copyright : (c) Archibald Neil MacDonald, 2018
License : BSD3
Maintainer : FortOyer@hotmail.co.uk
This is the second example that shows how quitting can be done via the event
pump.
In this example we have made use heavily of our manager and convenience
functions, as well as our custom SDL looper. This example is more typical of
a user's workflow than example 001.
Basic test of our event system using reactive banana. This test does the
following:
1. Sets up our SDL init with the start function, followed by some managed
methods to retrieve other SDL resources.
2. Sets up our network graph which hooks into our SDL events. Filters for
quit events.
3. Loops until the quit callback has been invoked.
-}
{-# LANGUAGE OverloadedStrings #-}
module DynamicQuit (main) where
import Examples.Common
import Reactive.Banana
import Reactive.Banana.Frameworks
import qualified Reactive.Banana.SDL.Events as BSDL
import qualified Reactive.Banana.SDL.Managed as BSDL
import qualified Reactive.Banana.SDL as BSDL
import qualified SDL
main :: IO ()
main = BSDL.start "Dynamic quit" SDL.defaultWindow $
\game ->
-- Create our event watcher
BSDL.withEventWatchHandler $ \eventHandle ->
-- Compile and run our network
do network <- compile $ networkDesc (BSDL.quit game) eventHandle
actuate network
BSDL.run game
networkDesc :: BSDL.QuitHandle -> BSDL.EventHandler -> MomentIO ()
networkDesc quit eventHandler = do
-- Listen for all SDL events.
eSDL <- fromAddHandler eventHandler
-- Quit event triggers whenever escape key is pressed or an SDL_QuitEvent
-- appears in stream. Filter eSDL for these cases.
let eQuit = unionWith const (() <$ filterE isQuitEvent eSDL)
(() <$ filterE isEscKey eSDL)
-- Print out all events we have detected.
reactimate $ print <$> eSDL
-- Call quit handler when a quit is detected.
reactimate $ quit <$> eQuit
|
#!/usr/bin/env bash
set -o pipefail
set -o nounset
set -o errexit
# enable dod stig
if [ "${HARDENING_FLAG}" = "stig" ]; then
# install dependencies
yum install -y dracut-fips-aesni dracut-fips openscap openscap-scanner scap-security-guide
# we will configure FIPS ourselves as the generated STIG locks the OS
# configure dracut-fips
dracut -f
# udpate the kernel settings
grubby --update-kernel=ALL --args="fips=1"
# configure this to meet the stig checker
sed -i "/^GRUB_CMDLINE_LINUX/ s/\"$/ fips=1\"/" /etc/default/grub
# set the ssh ciphers
sed -i 's/^Cipher.*/Ciphers aes128-ctr,aes192-ctr,aes256-ctr/' /etc/ssh/sshd_config
sed -i 's/^MACs.*/MACs hmac-sha2-256,hmac-sha2-512/' /etc/ssh/sshd_config
# run stig hardening without FIPS as it breaks EC2 booting because /boot isn't on its
# own partition
oscap xccdf generate fix \
--output /etc/packer/stig.sh \
--tailoring-file /etc/packer/files/ssg-rhel7-ds-tailoring.xml \
--profile xccdf_org.ssgproject.content_profile_stig_aws \
--fetch-remote-resources \
/usr/share/xml/scap/ssg/content/ssg-rhel7-ds.xml
/etc/packer/stig.sh
reboot
fi
|
JSON.lower(g::Granularity) = non_nothing_dict(g)
"""
SimpleGranularity(name::String)
One of the simple predefined granularities of Druid.
"""
struct SimpleGranularity <: Granularity
name::String
function SimpleGranularity(name)
name = lowercase(name)
name โ [
"all", "none", "second", "minute", "fifteen_minute", "thirty_minute",
"hour", "day", "week", "month", "quarter", "year"
] || error("Unknown type of simple granularity")
new(name)
end
end
JSON.lower(sg::SimpleGranularity) = sg.name
"""
DurationGranularity(duration::UInt64; origin=nothing)
Duration type granularity where the duration is specified as milliseconds since
origin.
Specifying origin is not required (defaults to Druid's default), but should be a
ISO8601 datetime string if you do specify it.
"""
struct DurationGranularity <: Granularity
type::String
duration::UInt64
origin
function DurationGranularity(duration; origin=nothing)
nothing_or_type(origin, String)
new("duration", duration, origin)
end
end
"""
PeriodGranularity(period::String; origin=nothing, timezone=nothing)
Period type granularity where the period is specified as an ISO8601 period
string. Period starts on origin in the timezone.
Specifying origin and timezone is not required (defaults to Druid's default).
But if you do specify them, origin should be a ISO8601 datetime string and
timezone should be one of those [supported by
Druid](https://druid.apache.org/docs/latest/querying/granularities.html#supported-time-zones).
"""
struct PeriodGranularity <: Granularity
type::String
period::String
origin
timeZone
function PeriodGranularity(period; origin=nothing, timezone=nothing)
nothing_or_type(origin, String)
nothing_or_type(timezone, String)
new("period", period, origin, timezone)
end
end
|
require 'securerandom'
module CoinAPI
class XRP < BaseAPI
def initialize(*)
super
@json_rpc_call_id = 0
@json_rpc_endpoint = URI.parse(currency.json_rpc_endpoint)
end
def endpoint
@json_rpc_endpoint
end
def to_address(tx)
normalize_address(tx['Destination'])
end
def from_address(tx)
normalize_address(tx['Account'])
end
def load_balance!(address, currency)
json_rpc(:account_info, [account: normalize_address(address), ledger_index: 'validated', strict: true])
.fetch('result')
.fetch('account_data')
.fetch('Balance')
.to_d
.yield_self { |amount| convert_from_base_unit(amount, currency) }
rescue => e
report_exception_to_screen(e)
0.0
end
def build_transaction(tx:, currency:)
{
id: tx.fetch('hash'),
entries: build_entries(tx, currency)
}
end
def build_entries(tx, currency)
[
{
amount: convert_from_base_unit(tx.fetch('Amount'), currency)
}
]
end
def inspect_address!(address)
{
address: normalize_address(address),
is_valid: valid_address?(normalize_address(address))
}
end
def fetch_transactions(ledger_index)
json_rpc(
:ledger,
[{
"ledger_index": ledger_index || 'validated',
"transactions": true,
"expand": true
}]
).dig('result', 'ledger', 'transactions') || []
end
def latest_block_number
Rails.cache.fetch :latest_ripple_ledger, expires_in: 5.seconds do
response = json_rpc(:ledger, [{"ledger_index": 'validated'}])
response.dig('result', 'ledger_index').to_i
end
end
def create_raw_address!(options = {})
secret = options.fetch(:secret) {Passgen.generate(length: 64, symbols: true)}
json_rpc(:wallet_propose, {passphrase: secret}).fetch('result')
.yield_self do |result|
result.slice('key_type', 'master_seed', 'master_seed_hex',
'master_key', 'public_key', 'public_key_hex')
.merge(address: normalize_address(result.fetch('account_id')), secret: secret)
.symbolize_keys
end
end
def destination_tag_from(address)
address =~ /\?dt=(\d*)\Z/
$1.to_i
end
def load_deposit!(txid)
json_rpc(:tx, [transaction: txid]).fetch('result').yield_self do |tx|
next unless tx['status'].to_s == 'success'
#next unless tx['validated']
next unless valid_address?(normalize_address(tx['Destination'].to_s))
next unless tx['TransactionType'].to_s == 'Payment'
#next unless tx.dig('meta', 'TransactionResult').to_s == 'tesSUCCESS'
#next if tx['DestinationTag'].present?
next unless String === tx['Amount']
xrp_val = {id: tx.fetch('hash'),
confirmations: calculate_confirmations(tx),
entries: [{amount: convert_from_base_unit(tx.fetch('Amount'), @currency),
address: normalize_address(tx['Destination'])}]}
return xrp_val
end
end
def each_deposit!
each_batch_of_deposits do |deposits|
deposits.each {|deposit| yield deposit if block_given?}
end
end
def each_deposit
each_batch_of_deposits false do |deposits|
deposits.each {|deposit| yield deposit if block_given?}
end
end
# Returns fee in drops that is enough to process transaction in current ledger
def calculate_current_fee
json_rpc(:fee, {}).fetch('result').yield_self do |result|
result.dig('drops', 'open_ledger_fee')
end
end
#Admin Method
def xrp_info
json_rpc(:fetch_info,{clear:'false'}).fetch('result')
end
protected
def connection
Faraday.new(@json_rpc_endpoint).tap do |connection|
unless @json_rpc_endpoint.user.blank?
connection.basic_auth(@json_rpc_endpoint.user, @json_rpc_endpoint.password)
end
end
end
memoize :connection
def json_rpc(method, params = [])
body = {
jsonrpc: '1.0',
id: @json_rpc_call_id += 1,
method: method,
params: [params].flatten
}.to_json
headers = {
'Accept' => 'application/json',
'Content-Type' => 'application/json'
}
connection.post('/', body, headers).yield_self do |response|
response.assert_success!.yield_self do |response|
JSON.parse(response.body).tap do |response|
response.dig('result', 'error').tap do |error|
raise Error, error.inspect if error.present?
end
end
end
end
end
def normalize_address(address)
address.gsub(/\?dt=\d*\Z/, '')
end
# def normalize_txid(txid)
# txid.downcase
# end
def valid_address?(address)
/\Ar[0-9a-zA-Z]{33}(:?\?dt=[1-9]\d*)?\z/.match?(address)
end
def each_batch_of_deposits(raise = true)
offset = 0
collected = []
loop do
begin
# Nullify variables before running dangerous code.
response = nil
batch_deposits = nil
response = json_rpc(:tx_history, [start: offset])
batch_deposits = build_deposit_collection(response.fetch('result').fetch('txs'))
offset += batch_deposits.count
rescue => e
report_exception(e)
raise e if raise
end
collected += batch_deposits if batch_deposits
yield batch_deposits if batch_deposits
break if response.blank? || !more_deposits_available?(response)
end
collected
end
def build_deposit_collection(txs)
txs.map do |tx|
next unless tx['TransactionType'].to_s == 'Payment'
next unless address?(normalize_address(tx['Destination'].to_s))
next if tx['DestinationTag'].present?
next unless String === tx['Amount']
{id: tx.fetch('hash'),
confirmations: calculate_confirmations(tx),
entries: [{amount: convert_from_base_unit(tx.fetch('Amount'), @currency),
address: normalize_address(tx['Destination'])}]}
end.compact
end
def more_deposits_available?(response)
response.fetch('result').fetch('txs').present?
end
# def calculate_confirmations(tx)
# tx.fetch('LastLedgerSequence') { tx.fetch('ledger_index') } - tx.fetch('inLedger')
# end
def calculate_confirmations(tx, ledger_index = nil)
ledger_index ||= tx.fetch('ledger_index')
latest_block_number - ledger_index
end
private
def generate_destination_tag
begin
# Reserve destination 1 for system purpose
tag = SecureRandom.random_number(10**9) + 2
end while PaymentAddress.where(currency: :xrp)
.where('address LIKE ?', "%dt=#{tag}")
.any?
tag
end
end
end
|
#ifndef GOALKEEPER_ENUMS_H_
#define GOALKEEPER_ENUMS_H_
typedef enum JumpSide_e {
JUMP_LEFT_SIDE, JUMP_RIGHT_SIDE, JUMP_MIDDLE_SIDE, DONT_JUMP
} JumpSide_e;
typedef enum StepSide_e {
MOVE_LEFT_STEP, MOVE_RIGHT_STEP, DONT_MOVE
} StepSide_e;
#endif
|
---
layout: default
title: Projects
---
<h2>{{ page.title }}</h2>
<div class="container">
{% for project in site.data.projects %}
<div class="project">
<h3>{{ project.title }} </h3>
<p> {{ project.description }} </p>
{% if project.img %}
<img src="{{ project.img }}"/>
{% endif %}
</div>
{% endfor %}
</div>
|
๏ปฟnamespace MonetDB.Driver.Handlers
{
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
public delegate void RowsCopiedEventHandler(object sender, RowsCopiedEventArgs e);
}
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CLOCKEN2STAT {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `CLOCKEN2STAT`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CLOCKEN2STATR {
#[doc = "Clock enable for the IO MASTER 1 IFC INTERFACE value."]
IOMSTRIFC1_CLKEN,
#[doc = "Clock enable for the IO MASTER 2 IFC INTERFACE value."]
IOMSTRIFC2_CLKEN,
#[doc = "Clock enable for the IO MASTER 3 IFC INTERFACE value."]
IOMSTRIFC3_CLKEN,
#[doc = "Clock enable for the IO MASTER 4 IFC INTERFACE value."]
IOMSTRIFC4_CLKEN,
#[doc = "Clock enable for the IO MASTER 5 IFC INTERFACE value."]
IOMSTRIFC5_CLKEN,
#[doc = "Clock enable for the PDM value."]
PDM_CLKEN,
#[doc = "Clock enable for the PDM INTERFACE value."]
PDMIFC_CLKEN,
#[doc = "Clock enable for the PWRCTRL value."]
PWRCTRL_CLKEN,
#[doc = "Clock enable for the PWRCTRL counter value."]
PWRCTRL_COUNT_CLKEN,
#[doc = "Clock enable for the RSTGEN value."]
RSTGEN_CLKEN,
#[doc = "Clock enable for the SCARD value."]
SCARD_CLKEN,
#[doc = "Clock enable for the SCARD ALTAPB value."]
SCARD_ALTAPB_CLKEN,
#[doc = "Clock enable for the STIMER_CNT_CLKEN value."]
STIMER_CNT_CLKEN,
#[doc = "Clock enable for the TPIU_CLKEN value."]
TPIU_CLKEN,
#[doc = "Clock enable for the UART0 HF value."]
UART0HF_CLKEN,
#[doc = "Clock enable for the UART1 HF value."]
UART1HF_CLKEN,
#[doc = "Clock enable for the XT 32KHZ value."]
XT_32KHZ_EN,
#[doc = "HFRC is forced on Status. value."]
FORCEHFRC,
#[doc = r" Reserved"]
_Reserved(u32),
}
impl CLOCKEN2STATR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
match *self {
CLOCKEN2STATR::IOMSTRIFC1_CLKEN => 1,
CLOCKEN2STATR::IOMSTRIFC2_CLKEN => 2,
CLOCKEN2STATR::IOMSTRIFC3_CLKEN => 4,
CLOCKEN2STATR::IOMSTRIFC4_CLKEN => 8,
CLOCKEN2STATR::IOMSTRIFC5_CLKEN => 16,
CLOCKEN2STATR::PDM_CLKEN => 32,
CLOCKEN2STATR::PDMIFC_CLKEN => 64,
CLOCKEN2STATR::PWRCTRL_CLKEN => 128,
CLOCKEN2STATR::PWRCTRL_COUNT_CLKEN => 256,
CLOCKEN2STATR::RSTGEN_CLKEN => 512,
CLOCKEN2STATR::SCARD_CLKEN => 1024,
CLOCKEN2STATR::SCARD_ALTAPB_CLKEN => 2048,
CLOCKEN2STATR::STIMER_CNT_CLKEN => 4096,
CLOCKEN2STATR::TPIU_CLKEN => 8192,
CLOCKEN2STATR::UART0HF_CLKEN => 16384,
CLOCKEN2STATR::UART1HF_CLKEN => 32768,
CLOCKEN2STATR::XT_32KHZ_EN => 1073741824,
CLOCKEN2STATR::FORCEHFRC => 2147483648,
CLOCKEN2STATR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u32) -> CLOCKEN2STATR {
match value {
1 => CLOCKEN2STATR::IOMSTRIFC1_CLKEN,
2 => CLOCKEN2STATR::IOMSTRIFC2_CLKEN,
4 => CLOCKEN2STATR::IOMSTRIFC3_CLKEN,
8 => CLOCKEN2STATR::IOMSTRIFC4_CLKEN,
16 => CLOCKEN2STATR::IOMSTRIFC5_CLKEN,
32 => CLOCKEN2STATR::PDM_CLKEN,
64 => CLOCKEN2STATR::PDMIFC_CLKEN,
128 => CLOCKEN2STATR::PWRCTRL_CLKEN,
256 => CLOCKEN2STATR::PWRCTRL_COUNT_CLKEN,
512 => CLOCKEN2STATR::RSTGEN_CLKEN,
1024 => CLOCKEN2STATR::SCARD_CLKEN,
2048 => CLOCKEN2STATR::SCARD_ALTAPB_CLKEN,
4096 => CLOCKEN2STATR::STIMER_CNT_CLKEN,
8192 => CLOCKEN2STATR::TPIU_CLKEN,
16384 => CLOCKEN2STATR::UART0HF_CLKEN,
32768 => CLOCKEN2STATR::UART1HF_CLKEN,
1073741824 => CLOCKEN2STATR::XT_32KHZ_EN,
2147483648 => CLOCKEN2STATR::FORCEHFRC,
i => CLOCKEN2STATR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `IOMSTRIFC1_CLKEN`"]
#[inline]
pub fn is_iomstrifc1_clken(&self) -> bool {
*self == CLOCKEN2STATR::IOMSTRIFC1_CLKEN
}
#[doc = "Checks if the value of the field is `IOMSTRIFC2_CLKEN`"]
#[inline]
pub fn is_iomstrifc2_clken(&self) -> bool {
*self == CLOCKEN2STATR::IOMSTRIFC2_CLKEN
}
#[doc = "Checks if the value of the field is `IOMSTRIFC3_CLKEN`"]
#[inline]
pub fn is_iomstrifc3_clken(&self) -> bool {
*self == CLOCKEN2STATR::IOMSTRIFC3_CLKEN
}
#[doc = "Checks if the value of the field is `IOMSTRIFC4_CLKEN`"]
#[inline]
pub fn is_iomstrifc4_clken(&self) -> bool {
*self == CLOCKEN2STATR::IOMSTRIFC4_CLKEN
}
#[doc = "Checks if the value of the field is `IOMSTRIFC5_CLKEN`"]
#[inline]
pub fn is_iomstrifc5_clken(&self) -> bool {
*self == CLOCKEN2STATR::IOMSTRIFC5_CLKEN
}
#[doc = "Checks if the value of the field is `PDM_CLKEN`"]
#[inline]
pub fn is_pdm_clken(&self) -> bool {
*self == CLOCKEN2STATR::PDM_CLKEN
}
#[doc = "Checks if the value of the field is `PDMIFC_CLKEN`"]
#[inline]
pub fn is_pdmifc_clken(&self) -> bool {
*self == CLOCKEN2STATR::PDMIFC_CLKEN
}
#[doc = "Checks if the value of the field is `PWRCTRL_CLKEN`"]
#[inline]
pub fn is_pwrctrl_clken(&self) -> bool {
*self == CLOCKEN2STATR::PWRCTRL_CLKEN
}
#[doc = "Checks if the value of the field is `PWRCTRL_COUNT_CLKEN`"]
#[inline]
pub fn is_pwrctrl_count_clken(&self) -> bool {
*self == CLOCKEN2STATR::PWRCTRL_COUNT_CLKEN
}
#[doc = "Checks if the value of the field is `RSTGEN_CLKEN`"]
#[inline]
pub fn is_rstgen_clken(&self) -> bool {
*self == CLOCKEN2STATR::RSTGEN_CLKEN
}
#[doc = "Checks if the value of the field is `SCARD_CLKEN`"]
#[inline]
pub fn is_scard_clken(&self) -> bool {
*self == CLOCKEN2STATR::SCARD_CLKEN
}
#[doc = "Checks if the value of the field is `SCARD_ALTAPB_CLKEN`"]
#[inline]
pub fn is_scard_altapb_clken(&self) -> bool {
*self == CLOCKEN2STATR::SCARD_ALTAPB_CLKEN
}
#[doc = "Checks if the value of the field is `STIMER_CNT_CLKEN`"]
#[inline]
pub fn is_stimer_cnt_clken(&self) -> bool {
*self == CLOCKEN2STATR::STIMER_CNT_CLKEN
}
#[doc = "Checks if the value of the field is `TPIU_CLKEN`"]
#[inline]
pub fn is_tpiu_clken(&self) -> bool {
*self == CLOCKEN2STATR::TPIU_CLKEN
}
#[doc = "Checks if the value of the field is `UART0HF_CLKEN`"]
#[inline]
pub fn is_uart0hf_clken(&self) -> bool {
*self == CLOCKEN2STATR::UART0HF_CLKEN
}
#[doc = "Checks if the value of the field is `UART1HF_CLKEN`"]
#[inline]
pub fn is_uart1hf_clken(&self) -> bool {
*self == CLOCKEN2STATR::UART1HF_CLKEN
}
#[doc = "Checks if the value of the field is `XT_32KHZ_EN`"]
#[inline]
pub fn is_xt_32khz_en(&self) -> bool {
*self == CLOCKEN2STATR::XT_32KHZ_EN
}
#[doc = "Checks if the value of the field is `FORCEHFRC`"]
#[inline]
pub fn is_forcehfrc(&self) -> bool {
*self == CLOCKEN2STATR::FORCEHFRC
}
}
#[doc = "Values that can be written to the field `CLOCKEN2STAT`"]
pub enum CLOCKEN2STATW {
#[doc = "Clock enable for the IO MASTER 1 IFC INTERFACE value."]
IOMSTRIFC1_CLKEN,
#[doc = "Clock enable for the IO MASTER 2 IFC INTERFACE value."]
IOMSTRIFC2_CLKEN,
#[doc = "Clock enable for the IO MASTER 3 IFC INTERFACE value."]
IOMSTRIFC3_CLKEN,
#[doc = "Clock enable for the IO MASTER 4 IFC INTERFACE value."]
IOMSTRIFC4_CLKEN,
#[doc = "Clock enable for the IO MASTER 5 IFC INTERFACE value."]
IOMSTRIFC5_CLKEN,
#[doc = "Clock enable for the PDM value."]
PDM_CLKEN,
#[doc = "Clock enable for the PDM INTERFACE value."]
PDMIFC_CLKEN,
#[doc = "Clock enable for the PWRCTRL value."]
PWRCTRL_CLKEN,
#[doc = "Clock enable for the PWRCTRL counter value."]
PWRCTRL_COUNT_CLKEN,
#[doc = "Clock enable for the RSTGEN value."]
RSTGEN_CLKEN,
#[doc = "Clock enable for the SCARD value."]
SCARD_CLKEN,
#[doc = "Clock enable for the SCARD ALTAPB value."]
SCARD_ALTAPB_CLKEN,
#[doc = "Clock enable for the STIMER_CNT_CLKEN value."]
STIMER_CNT_CLKEN,
#[doc = "Clock enable for the TPIU_CLKEN value."]
TPIU_CLKEN,
#[doc = "Clock enable for the UART0 HF value."]
UART0HF_CLKEN,
#[doc = "Clock enable for the UART1 HF value."]
UART1HF_CLKEN,
#[doc = "Clock enable for the XT 32KHZ value."]
XT_32KHZ_EN,
#[doc = "HFRC is forced on Status. value."]
FORCEHFRC,
}
impl CLOCKEN2STATW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u32 {
match *self {
CLOCKEN2STATW::IOMSTRIFC1_CLKEN => 1,
CLOCKEN2STATW::IOMSTRIFC2_CLKEN => 2,
CLOCKEN2STATW::IOMSTRIFC3_CLKEN => 4,
CLOCKEN2STATW::IOMSTRIFC4_CLKEN => 8,
CLOCKEN2STATW::IOMSTRIFC5_CLKEN => 16,
CLOCKEN2STATW::PDM_CLKEN => 32,
CLOCKEN2STATW::PDMIFC_CLKEN => 64,
CLOCKEN2STATW::PWRCTRL_CLKEN => 128,
CLOCKEN2STATW::PWRCTRL_COUNT_CLKEN => 256,
CLOCKEN2STATW::RSTGEN_CLKEN => 512,
CLOCKEN2STATW::SCARD_CLKEN => 1024,
CLOCKEN2STATW::SCARD_ALTAPB_CLKEN => 2048,
CLOCKEN2STATW::STIMER_CNT_CLKEN => 4096,
CLOCKEN2STATW::TPIU_CLKEN => 8192,
CLOCKEN2STATW::UART0HF_CLKEN => 16384,
CLOCKEN2STATW::UART1HF_CLKEN => 32768,
CLOCKEN2STATW::XT_32KHZ_EN => 1073741824,
CLOCKEN2STATW::FORCEHFRC => 2147483648,
}
}
}
#[doc = r" Proxy"]
pub struct _CLOCKEN2STATW<'a> {
w: &'a mut W,
}
impl<'a> _CLOCKEN2STATW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CLOCKEN2STATW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Clock enable for the IO MASTER 1 IFC INTERFACE value."]
#[inline]
pub fn iomstrifc1_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::IOMSTRIFC1_CLKEN)
}
#[doc = "Clock enable for the IO MASTER 2 IFC INTERFACE value."]
#[inline]
pub fn iomstrifc2_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::IOMSTRIFC2_CLKEN)
}
#[doc = "Clock enable for the IO MASTER 3 IFC INTERFACE value."]
#[inline]
pub fn iomstrifc3_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::IOMSTRIFC3_CLKEN)
}
#[doc = "Clock enable for the IO MASTER 4 IFC INTERFACE value."]
#[inline]
pub fn iomstrifc4_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::IOMSTRIFC4_CLKEN)
}
#[doc = "Clock enable for the IO MASTER 5 IFC INTERFACE value."]
#[inline]
pub fn iomstrifc5_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::IOMSTRIFC5_CLKEN)
}
#[doc = "Clock enable for the PDM value."]
#[inline]
pub fn pdm_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::PDM_CLKEN)
}
#[doc = "Clock enable for the PDM INTERFACE value."]
#[inline]
pub fn pdmifc_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::PDMIFC_CLKEN)
}
#[doc = "Clock enable for the PWRCTRL value."]
#[inline]
pub fn pwrctrl_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::PWRCTRL_CLKEN)
}
#[doc = "Clock enable for the PWRCTRL counter value."]
#[inline]
pub fn pwrctrl_count_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::PWRCTRL_COUNT_CLKEN)
}
#[doc = "Clock enable for the RSTGEN value."]
#[inline]
pub fn rstgen_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::RSTGEN_CLKEN)
}
#[doc = "Clock enable for the SCARD value."]
#[inline]
pub fn scard_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::SCARD_CLKEN)
}
#[doc = "Clock enable for the SCARD ALTAPB value."]
#[inline]
pub fn scard_altapb_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::SCARD_ALTAPB_CLKEN)
}
#[doc = "Clock enable for the STIMER_CNT_CLKEN value."]
#[inline]
pub fn stimer_cnt_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::STIMER_CNT_CLKEN)
}
#[doc = "Clock enable for the TPIU_CLKEN value."]
#[inline]
pub fn tpiu_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::TPIU_CLKEN)
}
#[doc = "Clock enable for the UART0 HF value."]
#[inline]
pub fn uart0hf_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::UART0HF_CLKEN)
}
#[doc = "Clock enable for the UART1 HF value."]
#[inline]
pub fn uart1hf_clken(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::UART1HF_CLKEN)
}
#[doc = "Clock enable for the XT 32KHZ value."]
#[inline]
pub fn xt_32khz_en(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::XT_32KHZ_EN)
}
#[doc = "HFRC is forced on Status. value."]
#[inline]
pub fn forcehfrc(self) -> &'a mut W {
self.variant(CLOCKEN2STATW::FORCEHFRC)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
const MASK: u32 = 4294967295;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:31 - Clock enable status 2"]
#[inline]
pub fn clocken2stat(&self) -> CLOCKEN2STATR {
CLOCKEN2STATR::_from({
const MASK: u32 = 4294967295;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u32
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:31 - Clock enable status 2"]
#[inline]
pub fn clocken2stat(&mut self) -> _CLOCKEN2STATW {
_CLOCKEN2STATW { w: self }
}
}
|
package pub.devrel.easypermissionsx.helper
import android.app.Activity
import android.content.Context
import android.os.Build
import android.support.annotation.StyleRes
import androidx.appcompat.app.AppCompatActivity
import androidx.fragment.app.Fragment
/**
* Delegate class to make permission calls based on the 'host' (Fragment, Activity, etc).
*/
abstract class PermissionHelper<T>
// ============================================================================
// Public concrete methods
// ============================================================================
(val host: T) {
abstract val context: Context
private fun shouldShowRationale(vararg perms: String): Boolean {
for (perm in perms) {
if (shouldShowRequestPermissionRationale(perm)) {
return true
}
}
return false
}
fun requestPermissions(rationale: String,
positiveButton: String,
negativeButton: String,
@StyleRes theme: Int,
requestCode: Int,
vararg perms: String) {
if (shouldShowRationale(*perms)) {
showRequestPermissionRationale(
rationale, positiveButton, negativeButton, theme, requestCode, *perms)
} else {
directRequestPermissions(requestCode, *perms)
}
}
fun somePermissionPermanentlyDenied(perms: List<String>): Boolean {
for (deniedPermission in perms) {
if (permissionPermanentlyDenied(deniedPermission)) {
return true
}
}
return false
}
fun permissionPermanentlyDenied(perms: String): Boolean {
return !shouldShowRequestPermissionRationale(perms)
}
fun somePermissionDenied(vararg perms: String): Boolean {
return shouldShowRationale(*perms)
}
// ============================================================================
// Public abstract methods
// ============================================================================
abstract fun directRequestPermissions(requestCode: Int, vararg perms: String)
abstract fun shouldShowRequestPermissionRationale(perm: String): Boolean
abstract fun showRequestPermissionRationale(rationale: String,
positiveButton: String,
negativeButton: String,
@StyleRes theme: Int,
requestCode: Int,
vararg perms: String)
companion object {
fun newInstance(host: AppCompatActivity): PermissionHelper<out AppCompatActivity> {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
return LowApiPermissionsHelper(host)
}
return AppCompatActivityPermissionHelper(host)
}
fun newInstance(host: Fragment): PermissionHelper<Fragment> {
return if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
LowApiPermissionsHelper(host)
} else SupportFragmentPermissionHelper(host)
}
}
}
|
$:.unshift File.expand_path '../../lib', __FILE__
gem 'minitest'
require 'minitest/autorun'
require 'mocha/setup'
require 'nutcracker'
require 'tempfile'
require 'fileutils'
module Nutcracker
module Unit
class TestCase < ::Minitest::Test
def fixture name
File.expand_path("../fixtures/#{name}", __FILE__)
end
def load_fixture name
YAML.load_file fixture name
end
def redirect_output file, &block
file.sync = true
err, out = STDERR.clone , STDOUT.clone
STDERR.reopen(file)
STDOUT.reopen(file)
block.call
ensure
STDERR.reopen(err)
STDOUT.reopen(out)
end
end
end
end
|
Shopping list
===========
This is a shopping list manager hosted on Google App Engine composed of two App Engine modules:
- A frontend written using [Polymer][1]
- A backend written in [Go][2]
## Running locally
To run this application locally install the [Go App Engine SDK][3] and then execute:
```
$ goapp serve dispatch.yaml frontend/app.yaml backend/app.yaml
```
[1]: https://www.polymer-project.org
[2]: https://golang.org
[3]: https://cloud.google.com/appengine/downloads
|
# Command class
This represents the various operations available
```csharp
public static class Command
```
## Public Members
| name | description |
| --- | --- |
| staticย [Collect](Command/Collect-apidoc)(โฆ) | Process coverage |
| staticย [FormattedVersion](Command/FormattedVersion-apidoc)() | Indicate the current version |
| staticย [ImportModule](Command/ImportModule-apidoc)() | Indicate how to consume for PowerShell |
| staticย [Prepare](Command/Prepare-apidoc)(โฆ) | Instrument assemblies |
| staticย [Summary](Command/Summary-apidoc)() | Return the last computed coverage summary |
| staticย [Version](Command/Version-apidoc)() | Indicate the current version |
## See Also
* namespaceย [AltCover](../AltCover.Engine-apidoc)
<!-- DO NOT EDIT: generated by xmldocmd for AltCover.Engine.dll -->
|
---
title: Repository of GeoGebra Apps
date: 2020-04-13T03:30:20.742Z
subtitle: Dynamic Illustrations made by me.
summary: My collection of GeoGebra apps.
draft: false
featured: false
image:
filename: screen-shot-2020-04-13-at-1.46.03-pm.png
focal_point: Smart
preview_only: false
---
## Statistics
* [Moving Mean](https://www.geogebra.org/m/whxwwvzf)
* [Moving Median](https://www.geogebra.org/m/p9zqn7nc)
* [Least Squares Regression WalkThrough](https://www.geogebra.org/m/r35gcn6u)
## Calculus
* [Instantaneous Slope from First Principles](https://www.geogebra.org/m/dvxbcmud)
* [Rectangle Integral Approximation](https://www.geogebra.org/m/rgtjuqn7) (under construction)
* [Graph of the Derivative](https://www.geogebra.org/m/zv6fkbgb)
## Geometry and Trigonometry
* [Trigonometric Ratios from the Unit Circle](https://www.geogebra.org/m/wb3vmqvg) (under construction)
Feel free to leave comments with feedback/suggestions/recommendations.
|
require 'sshmenu'
require 'gconf2'
##############################################################################
# = License
#
# Copyright 2002-2009 Grant McLean <grant@mclean.net.nz>
#
# This package is free software; you can redistribute it and/or modify it
# under the terms of the License.txt file (a BSD-style license) distributed
# with the software:
#
# http://sshmenu.git.sourceforge.net/git/gitweb.cgi?p=sshmenu;a=blob;f=License.txt;hb=HEAD
#
# = Description
#
# Classes in the GnomeSSHMenu module inherit from classes in the SSHMenu module
# and override methods to provide GNOME-specific functionality. The main
# differences are:
# * the use of gnome-terminal rather than xterm
# * support for gnome-terminal profiles
# * support for opening multiple host connections as tabs within a single
# terminal window.
module GnomeSSHMenu
# The GnomeSSHMenu::Factory class overrides the SSHMenu::Factory class to
# provide alternate class mappings.
class Factory <SSHMenu::Factory
# Sets the default mapping for 'app.model' to GnomeSSHMenu::Config.
def Factory.option_defaults
return super.merge(
:model_class => GnomeSSHMenu::Config
)
end
# Sets the default mapping for 'app' to GnomeSSHMenu::App.
def Factory.inject_defaults
mapper.inject('app' => GnomeSSHMenu::App)
super
end
end
############################################################################
# The GnomeSSHMenu::App class builds on SSHMenu::App to add support for
# gnome-terminal and tabbed windows.
class App <SSHMenu::App
# Sets default class mappings to refer to GnomeSSHMenu dialog classes.
def inject_defaults
mapper.inject(
'app.dialog.prefs' => GnomeSSHMenu::PrefsDialog,
'app.dialog.host' => GnomeSSHMenu::HostDialog,
'app.dialog.menu' => GnomeSSHMenu::MenuDialog
)
super
end
# Given a GnomeSSHMenu::HostItem, constructs a command line to open a
# gnome-terminal window containing an SSH connection to the host.
def build_window_command(host)
command = 'gnome-terminal'
if host.env_settings and host.env_settings.length > 0
command = host.env_settings + command + ' --disable-factory'
end
if host.geometry and host.geometry.length > 0
command += " --geometry=#{host.geometry}"
end
if host.profile and host.profile.length > 0
command += ' --window-with-profile=' + shell_quote(host.profile)
end
command += ' --title=' + shell_quote(host.title)
ssh_cmnd = "#{ssh_command(host)} #{host.sshparams_noenv}"
command += ' -e ' + shell_quote("sh -c #{shell_quote(ssh_cmnd)}")
return command + ' &';
end
# Called when the user selects 'Open all as tabs' from a sub menu. Calls
# build_tabbed_window_command to open a multi-tabbed terminal window.
def open_tabs(menu)
return unless menu.items.length > 0
add_key
system(build_tabbed_window_command(menu))
end
# Given a GnomeSSHMenu::MenuItem, constructs a command line to open a
# gnome-terminal window containing multiple tabs - each with an SSH
# connection to a host.
def build_tabbed_window_command(menu)
command = 'gnome-terminal'
first_host = true
menu.items.each do |i|
if i.host?
if first_host
if i.env_settings and i.env_settings.length > 0
command = i.env_settings + command + ' --disable-factory'
end
if i.geometry and i.geometry.length > 0
command += " --geometry=#{i.geometry}"
end
end
if i.profile and i.profile.length > 0
command += ' --tab-with-profile=' + shell_quote(i.profile)
else
command += " --tab-with-profile=Default"
end
command += ' --title=' + shell_quote(i.title)
ssh_cmnd = "#{ssh_command(i)} #{i.sshparams_noenv}"
command += ' -e ' + shell_quote("sh -c #{shell_quote(ssh_cmnd)}")
first_host = false
end
end
return command + ' &'
end
# Helper routine for SSHMenu::App#show_hosts_menu. Adds the 'Open all as
# tabs' menu item if the option is enabled.
def menu_add_menu_options(mif, parents, item)
return unless item.has_children?
need_sep = super(mif, parents, item)
if @config.menus_open_tabs?
mif.create_item(
item_path(parents, item) + '/Open all as tabs', "<Item>"
) { open_tabs(item) }
need_sep = true
end
return need_sep
end
# Debian's 'popcon' (Popularity Contest) normally reports the sshmenu-gnome
# package as 'installed but not used' since the panel applet does not
# access /usr/bin/sshmenu-gnome. This routine updates the atime on that
# file each time the applet starts. This functionality is completely
# non-essential and can be safely disabled in the unlikely event that it
# causes some problem.
def appease_popcon # :nodoc:
# update access time on a file the Debian popcon is looking at :-)
begin
open('/usr/bin/sshmenu-gnome') { |f| f.readline }
rescue Exception
end
super
end
end
############################################################################
# The GnomeSSHMenu::Config class builds on SSHMenu::Config to add support for
# gnome-terminal with configurable profiles and tabbed windows.
class Config <SSHMenu::Config
# GConf key for retrieving a list of terminal profiles.
GnomeTermProfiles = '/apps/gnome-terminal/global/profile_list'
# GConf key template to get visible name for a profile.
GnomeTermProfName = '/apps/gnome-terminal/profiles/%s/visible_name'
# Sets default class mappings to refer to GnomeSSHMenu item classes.
def inject_defaults
mapper.inject(
'app.model.item' => GnomeSSHMenu::Item,
'app.model.hostitem' => GnomeSSHMenu::HostItem,
'app.model.menuitem' => GnomeSSHMenu::MenuItem
)
super
end
# Retrieves a list of gnome-terminal profile names from the GConf registry.
def list_profiles
profiles = []
client = GConf::Client.default
names = client[GnomeTermProfiles] or return profiles
names.each { |prof_name|
title = client[sprintf(GnomeTermProfName, prof_name)]
profiles.push(title) unless title.nil?
}
profiles.sort! { |a,b| a.upcase <=> b.upcase }
return profiles
end
# Returns true if the 'Open all as tabs' global option is enabled.
def menus_open_tabs?
if opt = get('menus_open_tabs')
return opt != 0
end
return false
end
# Used to set the 'Open all as tabs' global option.
def menus_open_tabs=(val)
set('menus_open_tabs', val ? 1 : 0)
end
end
############################################################################
# The GnomeSSHMenu::PrefsDialog class builds on SSHMenu::PrefsDialog to add
# support for tabbed gnome-terminal windows.
class PrefsDialog <SSHMenu::PrefsDialog
# Stores the value of the 'Open all as tabs' checkbox and then delegates to
# SSHMenu::PrefsDialog#save_options.
def save_options
@config.menus_open_tabs = @chk_open_tabs.active?
super
end
# Adds the 'Open all as tabs' checkbox to the global option pane created by
# SSHMenu::PrefsDialog#make_options_pane.
def make_options_pane
table = super
r = table.get_property('n-rows')
@chk_open_tabs = Gtk::CheckButton.new(
'include "Open all as _tabs" selection', true
)
@chk_open_tabs.active = @config.menus_open_tabs?
table.attach(
@chk_open_tabs, 0, 1, r, r+1, Gtk::EXPAND|Gtk::FILL, Gtk::FILL, 0, 0
)
return table
end
end
############################################################################
# The GnomeSSHMenu::HostDialog class builds on SSHMenu::HostDialog to add
# support for gnome-terminal profiles.
class HostDialog <SSHMenu::HostDialog
# Adds a drop-down menu for selecting a terminal profile and then delegates
# to SSHMenu::HostDialog#add_other_inputs.
def add_other_inputs
@profiles = @config.list_profiles
prof_menu = Gtk::ComboBox.new(true)
prof_menu.append_text('< None >')
prof_menu.active = 0
@profiles.each_index { |i|
prof_menu.append_text(@profiles[i])
prof_menu.active = i + 1 if @profiles[i] == @host.profile
}
@profile_menu = add_input('Profile', @host.profile, prof_menu)
super()
end
# Helper routine for dialog_to_host. Returns the name of the profile
# currently selected in the drop-down menu.
def selected_profile
i = @profile_menu.active
if i > 0
return @profiles[i-1]
else
return ''
end
end
# Delegates to SSHMenu::HostDialog#dialog_to_host and adds suport for
# saving the profile name.
def dialog_to_host(host=nil)
host = super(host)
host.profile = selected_profile
return host
end
end
############################################################################
# An empty class the inherits from SSHMenu::MenuDialog
class MenuDialog <SSHMenu::MenuDialog
end
############################################################################
# An empty class the inherits from SSHMenu::Item
class Item <SSHMenu::Item
end
############################################################################
# Inherits from SSHMenu::HostItem and adds support for storing the selected
# gnome-terminal profile name.
class HostItem <SSHMenu::HostItem
# Adds the gnome-terminal profile name to the list of attributes supported
# by SSHMenu::HostItem.
def HostItem.attributes
super + [ :profile ]
end
make_accessors
end
############################################################################
# An empty class the inherits from SSHMenu::MenuItem
class MenuItem <SSHMenu::MenuItem
end
end
|
from toolz import memoize
import numpy as np
from datashader.glyphs.line import _build_map_onto_pixel_for_line
from datashader.glyphs.points import _GeometryLike
from datashader.utils import ngjit
class PolygonGeom(_GeometryLike):
@property
def geom_dtypes(self):
from spatialpandas.geometry import PolygonDtype, MultiPolygonDtype
return PolygonDtype, MultiPolygonDtype
@memoize
def _build_extend(self, x_mapper, y_mapper, info, append):
expand_aggs_and_cols = self.expand_aggs_and_cols(append)
map_onto_pixel = _build_map_onto_pixel_for_line(x_mapper, y_mapper)
draw_polygon = _build_draw_polygon(
append, map_onto_pixel, x_mapper, y_mapper, expand_aggs_and_cols
)
perform_extend_cpu = _build_extend_polygon_geometry(
draw_polygon, expand_aggs_and_cols
)
geom_name = self.geometry
def extend(aggs, df, vt, bounds, plot_start=True):
sx, tx, sy, ty = vt
xmin, xmax, ymin, ymax = bounds
aggs_and_cols = aggs + info(df)
geom_array = df[geom_name].array
perform_extend_cpu(
sx, tx, sy, ty,
xmin, xmax, ymin, ymax,
geom_array, *aggs_and_cols
)
return extend
def _build_draw_polygon(append, map_onto_pixel, x_mapper, y_mapper, expand_aggs_and_cols):
@ngjit
@expand_aggs_and_cols
def draw_polygon(
i, sx, tx, sy, ty, xmin, xmax, ymin, ymax,
offsets, values, xs, ys, yincreasing, eligible,
*aggs_and_cols
):
"""Draw a polygon using a winding-number scan-line algorithm
"""
# Initialize values of pre-allocated buffers
xs.fill(np.nan)
ys.fill(np.nan)
yincreasing.fill(0)
eligible.fill(1)
# First pass, compute bounding box of polygon vertices in data coordinates
start_index = offsets[0]
stop_index = offsets[-1]
# num_edges = stop_index - start_index - 2
poly_xmin = np.min(values[start_index:stop_index:2])
poly_ymin = np.min(values[start_index + 1:stop_index:2])
poly_xmax = np.max(values[start_index:stop_index:2])
poly_ymax = np.max(values[start_index + 1:stop_index:2])
# skip polygon if outside viewport
if (poly_xmax < xmin or poly_xmin > xmax
or poly_ymax < ymin or poly_ymin > ymax):
return
# Compute pixel bounds for polygon
startxi, startyi = map_onto_pixel(
sx, tx, sy, ty, xmin, xmax, ymin, ymax,
max(poly_xmin, xmin), max(poly_ymin, ymin)
)
stopxi, stopyi = map_onto_pixel(
sx, tx, sy, ty, xmin, xmax, ymin, ymax,
min(poly_xmax, xmax), min(poly_ymax, ymax)
)
stopxi += 1
stopyi += 1
# Handle subpixel polygons (pixel width and/or height of polygon is 1)
if (stopxi - startxi) == 1 and (stopyi - startyi) == 1:
append(i, startxi, startyi, *aggs_and_cols)
return
elif (stopxi - startxi) == 1:
for yi in range(min(startyi, stopyi) + 1, max(startyi, stopyi)):
append(i, startxi, yi, *aggs_and_cols)
return
elif (stopyi - startyi) == 1:
for xi in range(min(startxi, stopxi) + 1, max(startxi, stopxi)):
append(i, xi, startyi, *aggs_and_cols)
return
# Build arrays of edges in canvas coordinates
ei = 0
for j in range(len(offsets) - 1):
start = offsets[j]
stop = offsets[j + 1]
for k in range(start, stop - 2, 2):
x0 = values[k]
y0 = values[k + 1]
x1 = values[k + 2]
y1 = values[k + 3]
# Map to canvas coordinates without rounding
x0c = x_mapper(x0) * sx + tx - 0.5
y0c = y_mapper(y0) * sy + ty - 0.5
x1c = x_mapper(x1) * sx + tx - 0.5
y1c = y_mapper(y1) * sy + ty - 0.5
if y1c > y0c:
xs[ei, 0] = x0c
ys[ei, 0] = y0c
xs[ei, 1] = x1c
ys[ei, 1] = y1c
yincreasing[ei] = 1
elif y1c < y0c:
xs[ei, 1] = x0c
ys[ei, 1] = y0c
xs[ei, 0] = x1c
ys[ei, 0] = y1c
yincreasing[ei] = -1
else:
# Skip horizontal edges
continue
ei += 1
# Perform scan-line algorithm
num_edges = ei
for yi in range(startyi, stopyi):
# All edges eligible at start of new row
eligible.fill(1)
for xi in range(startxi, stopxi):
# Init winding number
winding_number = 0
for ei in range(num_edges):
if eligible[ei] == 0:
# We've already determined that edge is above, below, or left
# of edge for the current pixel
continue
# Get edge coordinates.
# Note: y1c > y0c due to how xs/ys were populated
x0c = xs[ei, 0]
x1c = xs[ei, 1]
y0c = ys[ei, 0]
y1c = ys[ei, 1]
# Reject edges that are above, below, or left of current pixel.
# Note: Edge skipped if lower vertex overlaps,
# but is kept if upper vertex overlaps
if (y0c >= yi or y1c < yi
or (x0c < xi and x1c < xi)
):
# Edge not eligible for any remaining pixel in this row
eligible[ei] = 0
continue
if xi <= x0c and xi <= x1c:
# Edge is fully to the right of the pixel, so we know ray to the
# the right of pixel intersects edge.
winding_number += yincreasing[ei]
else:
# Now check if edge is to the right of pixel using cross product
# A is vector from pixel to first vertex
ax = x0c - xi
ay = y0c - yi
# B is vector from pixel to second vertex
bx = x1c - xi
by = y1c - yi
# Compute cross product of B and A
bxa = (bx * ay - by * ax)
if bxa < 0 or (bxa == 0 and yincreasing[ei]):
# Edge to the right
winding_number += yincreasing[ei]
else:
# Edge to left, not eligible for any remaining pixel in row
eligible[ei] = 0
continue
if winding_number != 0:
# If winding number is not zero, point
# is inside polygon
append(i, xi, yi, *aggs_and_cols)
return draw_polygon
def _build_extend_polygon_geometry(
draw_polygon, expand_aggs_and_cols
):
def extend_cpu(
sx, tx, sy, ty, xmin, xmax, ymin, ymax, geometry, *aggs_and_cols
):
values = geometry.buffer_values
missing = geometry.isna()
offsets = geometry.buffer_offsets
# Compute indices of potentially intersecting polygons using
# geometry's R-tree
eligible_inds = geometry.sindex.intersects((xmin, ymin, xmax, ymax))
if len(offsets) == 3:
# MultiPolygonArray
offsets0, offsets1, offsets2 = offsets
else:
# PolygonArray
offsets1, offsets2 = offsets
offsets0 = np.arange(len(offsets1))
extend_cpu_numba(
sx, tx, sy, ty, xmin, xmax, ymin, ymax,
values, missing, offsets0, offsets1, offsets2, eligible_inds, *aggs_and_cols
)
@ngjit
@expand_aggs_and_cols
def extend_cpu_numba(
sx, tx, sy, ty, xmin, xmax, ymin, ymax,
values, missing, offsets0, offsets1, offsets2,
eligible_inds, *aggs_and_cols
):
# Pre-allocate temp arrays
max_edges = 0
if len(offsets0) > 1:
for i in eligible_inds:
if missing[i]:
continue
polygon_inds = offsets1[offsets0[i]:offsets0[i + 1] + 1]
for j in range(len(polygon_inds) - 1):
start = offsets2[polygon_inds[j]]
stop = offsets2[polygon_inds[j + 1]]
max_edges = max(max_edges, (stop - start - 2) // 2)
xs = np.full((max_edges, 2), np.nan, dtype=np.float32)
ys = np.full((max_edges, 2), np.nan, dtype=np.float32)
yincreasing = np.zeros(max_edges, dtype=np.int8)
# Initialize array indicating which edges are still eligible for processing
eligible = np.ones(max_edges, dtype=np.int8)
for i in eligible_inds:
if missing[i]:
continue
polygon_inds = offsets1[offsets0[i]:offsets0[i + 1] + 1]
for j in range(len(polygon_inds) - 1):
start = polygon_inds[j]
stop = polygon_inds[j + 1]
draw_polygon(i, sx, tx, sy, ty, xmin, xmax, ymin, ymax,
offsets2[start:stop + 1], values,
xs, ys, yincreasing, eligible, *aggs_and_cols)
return extend_cpu
|
# frozen_string_literal: true
require 'data_classification/migration'
namespace :data_classification do
include DataClassification
desc 'Classify your table/columns in bulk'
task bulk_classify: :environment do
classifications = []
unclassified = []
ActiveRecord::Base.connection.tables.each do |table_name|
ActiveRecord::Base.connection.columns(table_name).each do |column|
next if column.comment.present?
unclassified << [table_name, column.name]
end
end
unclassified.each do |table_name, column_name|
puts "Table: #{table_name}, Column: #{column_name}"
puts "Data classification (#{DATA_CLASSIFICATIONS.join(',')}, q(uit), n(ext) >"
selection = STDIN.gets.chomp
if selection == 'q'
break
elsif selection == 'n'
next
elsif DATA_CLASSIFICATIONS.map(&:to_s).include? selection
classifications << [table_name, column_name, selection]
else
puts 'Invalid option, try again.'
redo
end
end
if classifications.any?
arguments = classifications.map { |classification| classification.join(':') }
Rails::Generators.invoke 'data_classification:create', arguments
else
puts 'No classifications made'
end
end
task list: :environment do
ActiveRecord::Base.connection.tables.each do |table_name|
ActiveRecord::Base.connection.columns(table_name).each do |column|
next if column.comment.nil?
data_classification = JSON.parse(column.comment)
puts [table_name, column.name, data_classification['tags']].join(',')
end
end
end
end
|
package com.abhrp.daily.cache.dao.feed
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import com.abhrp.daily.cache.constants.CacheSQLConstants
import com.abhrp.daily.cache.model.feed.CachedTimeItem
import io.reactivex.Maybe
@Dao
interface CacheTimeDao {
@Query(CacheSQLConstants.SELECT_CACHE_TIME)
fun getLastCacheTime(pageNo: Int): Maybe<CachedTimeItem>
@Query(CacheSQLConstants.DELETE_CACHE_ITEM)
fun clearCacheTime(pageNo: Int)
@Query(CacheSQLConstants.DELETE_ALL_CACHE)
fun clearAllCacheTime()
@Insert(onConflict = OnConflictStrategy.REPLACE)
fun saveCacheTime(cachedTimeItem: CachedTimeItem)
}
|
abstract class ItemEvent {
const ItemEvent();
}
class ItemLoadStarted extends ItemEvent {
final bool loadAll;
const ItemLoadStarted({this.loadAll = false});
}
class ItemLoadMoreStarted extends ItemEvent {}
class ItemSelectChanged extends ItemEvent {
final String itemId;
const ItemSelectChanged({required this.itemId});
}
|
<?php
declare(strict_types=1);
namespace Edudobay\DoctrineSerializable;
use Edudobay\DoctrineSerializable\Attributes\Serializable;
use ReflectionProperty;
class ClassMetadataBuilder
{
/** @var FieldMapping[] */
private array $fields = [];
public function addProperty(
ReflectionProperty $property,
Serializable $serializable,
ReflectionProperty $backingProperty,
): self {
$this->fields[] = new FieldMapping(
domainProperty: $property,
backingProperty: $backingProperty,
serializable: $serializable
);
return $this;
}
public function build(): ClassMetadata
{
return new ClassMetadata($this->fields);
}
}
|
package nagoya.kuu.miolife.ui.main
import android.content.Context
import android.view.ViewGroup
import androidx.recyclerview.widget.DiffUtil
import androidx.recyclerview.widget.ListAdapter
import nagoya.kuu.miolife.ui.main.viewentity.ContractViewEntity
internal class ContractAdapter(
private val context: Context
) : ListAdapter<ContractViewEntity, ContractViewHolder>(DIFF_UTIL) {
companion object {
private val DIFF_UTIL = object : DiffUtil.ItemCallback<ContractViewEntity>() {
override fun areItemsTheSame(
oldItem: ContractViewEntity,
newItem: ContractViewEntity
): Boolean {
return oldItem.id == newItem.id
}
override fun areContentsTheSame(
oldItem: ContractViewEntity,
newItem: ContractViewEntity
): Boolean {
return oldItem == newItem
}
}
}
var simOnclikedListener: SimOnclikedListener? = null
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ContractViewHolder {
return ContractViewHolder.create(context, parent)
}
override fun onBindViewHolder(holder: ContractViewHolder, position: Int) {
holder.bindTo(getItem(position), simOnclikedListener, context)
}
}
|
#!/bin/bash
echo "input 100 to fscanf"
echo "100" | ./example
echo "input 10000 to fscanf"
echo "10000" | ./example
echo "input 100h to fscanf"
echo "100h" | ./example
echo "input 10000h to fscanf"
echo "10000h" | ./example
echo "input abcd to fscanf"
echo "abcd" | ./example
|
root_path = File.join(File.dirname(__FILE__), '..')
schema_path = File.join(root_path, 'dummy', 'db')
load File.join(schema_path, 'schema.rb')
|
package writer
import scala.concurrent.{Await, Future}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
/**
* Created by nnguyen on 09/11/16.
*/
object WriterTExample {
import scalaz._
import Scalaz._
def test(): (List[String], Int) = {
def calc1 = Writer(List("doing calc"), 11)
def calc2 = Writer(List("doing other"), 22)
val r = for {
a <- calc1
b <- calc2
} yield {
a + b
}
r.run
}
def test1(): WriterT[Future, Map[String, Double], Int] = {
val doubleMonoid: Monoid[Double] = new Monoid[Double] {
def zero = 0.0
def append(a: Double, b: => Double) = a + b
}
def immutableMapMonoid[K, V](implicit V: Monoid[V]): Monoid[Map[K, V]] = new Monoid[Map[K, V]] {
def zero = Map.empty
def append(m1: Map[K, V], m2: => Map[K, V]): Map[K, V] = {
m2.foldLeft(m1) {
case (m, (k, v)) =>
m + (k -> V.append(m.getOrElse(k, V.zero), v))
}
}
}
implicit val monoid = immutableMapMonoid[String, Double](doubleMonoid)
def calc1 = WriterT(Future.successful(Map("doing calc" -> 1.0) -> 11))
def calc2 = WriterT(Future.successful(Map("doing other" -> 2.0) -> 22))
for {
a <- calc1
b <- calc2
} yield {
a + b
}
}
trait Test {
def f(): Unit = g()
def g(): Unit = f()
}
object ObjectTest extends Test {
override def f() = println("F")
}
def main(args: Array[String]): Unit = {
println(test())
println(Await.result(test1().run, Duration.Inf))
ObjectTest.f()
ObjectTest.g()
}
}
|
import { IPictureModel } from './products.models';
export interface IProductsAccessoriesViewModel {
furnitureUnitId?: string;
id?: number;
name: string;
amount: number;
materialCode?: string;
materialId?: string;
picture?: IPictureModel;
src?: string;
// size: ISizeModel;
// edging?: IEdgingModel;
}
|
import os
import random
valid_pct_of_training = .25
# TODO: warn if validation set already exists
if not os.path.isdir("data/valid"):
os.mkdir("data/valid")
for i in os.listdir("data/train"):
num = random.randint(1, (1/valid_pct_of_training))
if num == 1:
os.rename("data/train/%s" % i, "data/valid/%s" % i)
|
#!/usr/bin/env python2
ga_tracking_code = '''
<!-- Global site tag (gtag.js) - Google Analytics -->
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-114302089-1"></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'UA-114302089-1');
</script>
'''.strip()
with open('index.html', 'r') as f: content = f.read()
content = content.replace("<head>", "<head>\n" + ga_tracking_code)
with open('index.html', 'w') as f: f.write(content)
|
๏ปฟnamespace SeleniumScript.Interfaces
{
using static SeleniumScript.Grammar.SeleniumScriptParser;
public interface ISeleniumScriptInterpreter
{
event CallBackEventHandler OnCallback;
void Run(ExecutionUnitContext context);
}
}
|
// Copyright (C) 2017 Dmitry Yakimenko (detunized@gmail.com).
// Licensed under the terms of the MIT license. See LICENCE for details.
using System;
using NUnit.Framework;
namespace TrueKey.Test
{
[TestFixture]
class ExceptionsTest
{
[Test]
public void BaseException_properties_are_set()
{
VerifyException(new BaseException(Message, InnerException));
}
[Test]
public void CryptoException_properties_are_set()
{
VerifyException(new CryptoException(Message, InnerException));
}
[Test]
public void JTokenAccessException_properties_are_set()
{
VerifyException(new JTokenAccessException(Message, InnerException));
}
[Test]
public void FetchException_properties_are_set()
{
VerifyExceptionWithReason(new FetchException(Reason, Message, InnerException));
}
//
// Data
//
private const string Message = "message";
private static readonly Exception InnerException = new Exception();
private const FetchException.FailureReason Reason =
FetchException.FailureReason.UnknownError;
//
// Helpers
//
private static void VerifyException(BaseException e)
{
Assert.That(e.Message, Is.EqualTo(Message));
Assert.That(e.InnerException, Is.SameAs(InnerException));
}
private static void VerifyExceptionWithReason(BaseException e)
{
VerifyException(e);
Assert.That(e, Has.Property("Reason").EqualTo(Reason));
}
}
}
|
<p align="left">
<a href="https://250.cn">
<img src="https://www.250.cn/static/250/images/logo.png" alt="Build Status"></a>
<a href="https://packagist.org/packages/laravel/framework">
<img src="https://poser.pugx.org/laravel/framework/license.svg" alt="License">
</a>
</p>
## About ๆตๅ
ๆ้
ๆตๅ
ๆ้
๏ผๆนๅ๏ผ็งๆๆ้ๅ
ฌๅธ๏ผๆ็ซไบ2019ๅนด๏ผๆไปฌๅ
ฌๅธ่ดๅไบไบ่็ฝๆๆฏ็ๅผๅไธ่ฟ็จ๏ผไป่ๅฎ็ฐๆๅฐไธไบ่็ฝ็ไบบๆบไบคไบ๏ผไธบไบบไปฌ็ๆดป็ๆนๆน้ข้ขๅธฆๆฅไพฟๅฉใๆไปฌไปฅๅ่ถ็ๅ่ดจใไธไธ็ๆๆฏๆๅกๅฎๅ๏ผไธบไธๅ็พคไฝ็็จๆทๆไพๆดไผ่ดจ็ๆๅกใ
- [ๆไปฌ็ๅฎ็ฝ](https://250.cn).
## About ๆฌ้กน็ฎ
ๆฌ้กน็ฎๆฏโๅ
จๆฐๅธฎๅธฎโ้กน็ฎ็ฎก็ๅๅฐๅ็ซฏไปฃ็ ใๅบไบvue-element-adminๆจกๆฟๅผๅ
## ็ฏๅข้ๆฑ
1. ๆไฝ็ณป็ป windows/linux
2. Nodejs 6.11.5ๅไปฅไธ
## ่ฏดๆ
ๆฌ้กน็ฎๆฏโๅ
จๆฐๅธฎๅธฎโ้กน็ฎ็็ฎก็ๅๅฐๅ็ซฏไปฃ็ ๏ผโๅ
จๆฐๅธฎๅธฎโๆฏๅๅ็ซฏๅไบซๆถๆ๏ผๆ
ๆฌ้กน็ฎ้่ฆ[ๅ็ซฏไปๅบ](https://github.com/lgxj/SuperCooperationAPI)ๆฏๆ
## ๅ่ฝ
```
- ็ปๅฝ / ๆณจ้
- ๆ้้ช่ฏ
- ้กต้ขๆ้
- ๆไปคๆ้
- ๆ้็ฎก็
- ่ง่ฒๅ่กจ
- ็ฎก็ๅๅ่กจ
- ๆไฝๆฅๅฟ
- ๅ
ๅฎน็ฎก็
- ๆ็ซ ๅ็ฑป
- ๆ็ซ ๅ่กจ
- ็ณป็ป็ฎก็
- ้
็ฝฎ็ฎก็
- APPๆดๆฐ็ฎก็
- ็จๆท็ฎก็
- ็จๆทๅ่กจ
- ็จๆทๅ้ฆ
- ๅฎขๆๅ่กจ
- ็จๆทๅฎไฝ
- ไปปๅก็ฎก็
- ๅๅๅ่กจ
- ๆฅๅๅ่กจ
- ไปปๅกๅ็ฑป
- ่ดฆๅก็ฎก็
- ๆ็ฐๅ่กจ
- ๆฏไปๅ่กจ
- ้ๆฌพๅ่กจ
- ๆตๆฐด่ฎฐๅฝ
- ๆถ่ดน็ฎก็
- ่ดฆๆท็ฎก็
- ็ป่ฎก็ฎก็
- ไปปๅก็ป่ฎก
```
## ๅผๅ
```bash
# clone the project
git clone https://github.com/lgxj/SuperCooperationAdmin.git
# enter the project directory
cd SuperCooperationAdmin
# install dependency
npm install
# develop
npm run dev
```
ๆต่งๅจ่ฎฟ้ฎ๏ผhttp://localhost:9527
## ้
็ฝฎ
ไฟฎๆน`.env.development`ๅ`.env.production`ไธญ`VUE_APP_BASE_API`ไธบๆต่ฏๆๆญฃๅผๆฅๅฃๅฐๅ
ไฟฎๆน`src/settings.js`ไธญ`appId`ใ`appSecret`ไธบๆญฃ็กฎๅผ(ๅจๅ็ซฏ้กน็ฎ้
็ฝฎๅน้
)
## ๅๅธ
```bash
# build for test environment
npm run build:stage
# build for production environment
npm run build:prod
```
[ไฝ้ชๅฐๅ](http://sc-admin.250.cn/)
## ่็ณปๆไปฌ

|
#!/bin/bash
echo Manual trigger for document id: $1
curl --request POST \
-i \
-H "Content-Type: application/json" \
-H "Accept: application/json" \
--data "{\"input\":\"{\\\"id\\\": \\\"$1\\\"}\"}" \
http://localhost:7071/admin/functions/ManualEmailTrigger
|
package epy0n0ff.com.rx_okhttp_sample.net.feign.codec;
import epy0n0ff.com.rx_okhttp_sample.net.feign.exception.HttpErrorException;
import feign.Response;
/**
* ErrorDecoder
* Created by epy0n0ff on 2015/08/07.
*/
public class ErrorDecoder implements feign.codec.ErrorDecoder {
@Override public Exception decode(String methodKey, Response response) {
return new HttpErrorException(response.status());
}
}
|
/*
* Copyright 2018-2019 Scala Steward contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalasteward.core.coursier
import cats.effect._
import cats.implicits._
import cats.{Applicative, Parallel}
import coursier.interop.cats._
import coursier.util.StringInterpolators.SafeIvyRepository
import coursier.{Info, Module, ModuleName, Organization}
import io.chrisdavenport.log4cats.Logger
import org.http4s.Uri
import org.scalasteward.core.application.Config
import org.scalasteward.core.data.{Dependency, Version}
/** An interface to [[https://get-coursier.io Coursier]] used for
* fetching dependency versions and metadata.
*/
trait CoursierAlg[F[_]] {
def getArtifactUrl(dependency: Dependency): F[Option[Uri]]
def getNewerVersions(dependency: Dependency): F[List[Version]]
final def getArtifactIdUrlMapping(dependencies: List[Dependency])(
implicit F: Applicative[F]
): F[Map[String, Uri]] =
dependencies
.traverseFilter(dep => getArtifactUrl(dep).map(_.map(dep.artifactId.name -> _)))
.map(_.toMap)
}
object CoursierAlg {
def create[F[_]](
implicit
config: Config,
contextShift: ContextShift[F],
logger: Logger[F],
F: Sync[F]
): CoursierAlg[F] = {
implicit val parallel: Parallel.Aux[F, F] = Parallel.identity[F]
val cache = coursier.cache.FileCache[F]().withTtl(config.cacheTtl)
val sbtPluginReleases =
ivy"https://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/[defaultPattern]"
val fetch = coursier.Fetch[F](cache).addRepositories(sbtPluginReleases)
val versions = coursier.Versions[F](cache).addRepositories(sbtPluginReleases)
new CoursierAlg[F] {
override def getArtifactUrl(dependency: Dependency): F[Option[Uri]] = {
val coursierDependency = toCoursierDependency(dependency)
for {
maybeFetchResult <- fetch
.addDependencies(coursierDependency)
.addArtifactTypes(coursier.Type.pom, coursier.Type.ivy)
.ioResult
.map(Option.apply)
.handleErrorWith { throwable =>
logger.debug(throwable)(s"Failed to fetch artifacts of $coursierDependency").as(None)
}
} yield {
for {
result <- maybeFetchResult
moduleVersion = (coursierDependency.module, coursierDependency.version)
(_, project) <- result.resolution.projectCache.get(moduleVersion)
url <- getScmUrlOrHomePage(project.info)
} yield url
}
}
override def getNewerVersions(dependency: Dependency): F[List[Version]] = {
val module = toCoursierModule(dependency)
val version = Version(dependency.version)
versions
.withModule(module)
.versions()
.map(_.available.map(Version.apply).filter(_ > version).sorted)
.handleErrorWith { throwable =>
logger.error(throwable)(s"Failed to get newer versions of $module").as(List.empty)
}
}
}
}
private def toCoursierDependency(dependency: Dependency): coursier.Dependency =
coursier.Dependency(toCoursierModule(dependency), dependency.version).withTransitive(false)
private def toCoursierModule(dependency: Dependency): Module =
Module(
Organization(dependency.groupId.value),
ModuleName(dependency.artifactId.crossName),
dependency.attributes
)
private def getScmUrlOrHomePage(info: Info): Option[Uri] =
(info.scm.flatMap(_.url).toList :+ info.homePage)
.filterNot(url => url.isEmpty || url.startsWith("git@"))
.flatMap(Uri.fromString(_).toList)
.headOption
}
|
extern crate iron_slog;
extern crate iron;
extern crate router;
#[macro_use]
extern crate slog;
extern crate slog_term;
extern crate slog_async;
use slog::{Drain, Logger};
use iron::{Iron, Request, Response, IronResult, status};
use router::Router;
use iron_slog::{LoggerMiddleware, DefaultLogFormatter};
fn hello(_req: &mut Request) -> IronResult<Response> {
Ok(Response::with((status::Ok, "Hello")))
}
fn main() {
let decorator = slog_term::TermDecorator::new().build();
let drain = slog_term::CompactFormat::new(decorator).build().fuse();
let drain = slog_async::Async::new(drain).build().fuse();
let logger = Logger::root(drain, o!());
let formatter = DefaultLogFormatter;
// To use custom format:
//
// use iron_slog::LogContext;
// fn formatter(f: &mut std::fmt::Formatter, context: &LogContext) -> std::fmt::Result {
// write!(f, "{:?}", context)
// }
let mut router = Router::new();
router.get("/", hello, "hello");
let handler = LoggerMiddleware::new(router, logger, formatter);
Iron::new(handler).http("0.0.0.0:3000").unwrap();
}
|
{-# LANGUAGE Arrows, TupleSections, LambdaCase #-}
{-
After LambdaCase
Missing:
TypeSynonymInstances
Needs work.
-}
import Prelude hiding ((.), id)
import Control.Category
import Control.Monad((<=<), join)
import Control.Comonad
import Control.Arrow hiding (Kleisli(..))
import Control.Applicative((<$>))
newtype Kleisli m a b = K { runKleisli :: a -> m b }
newtype CoKleisli w a b = CK { runCoKleisli :: w a -> b }
instance Monad m => Category (Kleisli m) where
id = K return
(K f) . (K g) = K (f <=< g)
instance Comonad w => Category (CoKleisli w) where
id = CK extract
(CK f) . (CK g) = CK (f =<= g)
instance (Monad m, Functor m) => Arrow (Kleisli m) where
arr f = K $ return . f
first (K f) = K (\(b, d) -> (,d) <$> f b)
second (K f) = K (\(d, b) -> (d,) <$> f b)
addA :: Arrow a => a b Int -> a b Int -> a b Int
addA f g = proc x ->
do
y <- f -< x
z <- g -< x
returnA -< y + z
f :: Kleisli Maybe String Int
f = K $ \case
"" -> Nothing
xs -> Just $ length xs
g :: Kleisli Maybe String Int
g = K $ \case
"butt" -> Nothing
xs -> Just $ length xs
something :: String -> Maybe Int
something = runKleisli (addA f g)
-- Unrelated but cool bits
-- RankNTypes
--newtype Nat f g = N { runNat :: forall a. f a -> g a }
--instance Category Nat where
-- id = N id
-- (N f) . (N g) = N (f . g)
|
'use strict';
// Wrap everything in an anonymous function to avoid polluting the global namespace
(function () {
$(document).ready(function () {
tableau.extensions.initializeAsync().then(function () {
addVizImage(tableau.MarkType.Bar, 'tableau20_10_0');
let markSelector = $('#mark-select');
let colorSelector = $('#color-select');
markSelector.prop('disabled', false);
colorSelector.prop('disabled', false);
// updating viz images with new values upon a selector change.
markSelector.change(function () {
addVizImage(markSelector.val(), colorSelector.val());
});
colorSelector.change(function () {
addVizImage(markSelector.val(), colorSelector.val());
});
});
});
// This function creates and displays a viz image.
function addVizImage (markType, palette) {
// Building the input specification object that is used to create the viz image.
// Data values used in the viz image are prefilled.
const vizInputSpec = {
description: 'A sample viz', // optional parameter.
size: {width: 400, height: 300},
data: {
values: [
{'Product': 'Paper', 'Sales': 28, 'Region': 'Central'},
{'Product': 'Pens', 'Sales': 45, 'Region': 'East'},
{'Product': 'Rulers', 'Sales': 35, 'Region': 'East'},
{'Product': 'Rulers', 'Sales': 43, 'Region': 'South'},
{'Product': 'Paper', 'Sales': 50, 'Region': 'West'},
{'Product': 'Pens', 'Sales': 56, 'Region': 'West'}
]
},
mark: markType,
markcolor: '#FFED5F', // may not get used in viz if color is encoded in viz.
encoding: {
columns: {field: 'Region', type: tableau.VizImageEncodingType.Discrete},
rows: {field: 'Sales', type: tableau.VizImageEncodingType.Continuous},
color: {field: 'Product', type: tableau.VizImageEncodingType.Discrete, palette}
}
};
// defaulting values if null.
if (markType === null) {
vizInputSpec.mark = tableau.MarkType.Bar;
}
if (palette === null) {
vizInputSpec.encoding.color.palette = 'tableau20_10_0';
}
// making call to create viz image from the input specifications.
tableau.extensions.createVizImageAsync(vizInputSpec).then(function (svg) {
var blob = new Blob([svg], { type: 'image/svg+xml' });
var url = URL.createObjectURL(blob);
var image = document.createElement('img');
image.src = url;
image.style.maxWidth = '100%';
image.style.maxHeight = '100%';
image.className = 'center-block';
var vizApiElement = document.getElementById('viz-container');
// clearing UI and adding in new viz.
vizApiElement.innerHTML = '';
vizApiElement.appendChild(image);
image.addEventListener('load', function () { return URL.revokeObjectURL(url); }, { once: true });
}, function (err) {
console.log(err);
});
}
})();
|
class SFxGetIncident : SFxClientAPI {
SFxGetIncident() : base('incident', 'GET') { }
[SFxGetIncident] IncludeResolved(){
$this.Uri = $this.Uri + '{0}includeResolved=true' -f $this.GetDelimiter()
return $this
}
[SFxGetIncident] Offset([int]$offset) {
$this.Uri = $this.Uri + '{0}offset={1}' -f $this.GetDelimiter(), $offset
return $this
}
[SFxGetIncident] Limit([int]$limit) {
$this.Uri = $this.Uri + '{0}limit={1}' -f $this.GetDelimiter(), $limit
return $this
}
}
class SFxClearIncident : SFxClientAPI {
SFxClearIncident([string]$Id) : base('incident', 'PUT') {
$this.Uri = $this.Uri + '/{0}/clear' -f $Id
}
}
|
/*
* Copyright 2014-2021 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.server.util
import io.ktor.util.*
import kotlinx.coroutines.*
import java.util.concurrent.*
import kotlin.coroutines.*
/**
* Specialized dispatcher useful for graceful shutdown
*/
@InternalAPI
public class DispatcherWithShutdown(delegate: CoroutineDispatcher) : CoroutineDispatcher() {
private var delegate: CoroutineDispatcher? = delegate
@Volatile
private var shutdownPhase = ShutdownPhase.None
private val shutdownPool = lazy { Executors.newCachedThreadPool() }
/**
* Prepare for shutdown so we will not dispatch on [delegate] anymore. It is still possible to
* dispatch coroutines.
*/
public fun prepareShutdown() {
shutdownPhase = ShutdownPhase.Graceful
delegate = null
}
/**
* Complete shutdown. Any further attempts to dispatch anything will fail with [RejectedExecutionException]
*/
public fun completeShutdown() {
shutdownPhase = ShutdownPhase.Completed
if (shutdownPool.isInitialized()) shutdownPool.value.shutdown()
}
override fun isDispatchNeeded(context: CoroutineContext): Boolean {
return when (shutdownPhase) {
ShutdownPhase.None -> delegate?.isDispatchNeeded(context) ?: true
else -> true
}
}
override fun dispatch(context: CoroutineContext, block: Runnable) {
when (shutdownPhase) {
ShutdownPhase.None -> {
try {
delegate?.dispatch(context, block) ?: return dispatch(context, block)
} catch (rejected: RejectedExecutionException) {
if (shutdownPhase != ShutdownPhase.None) return dispatch(context, block)
throw rejected
}
}
ShutdownPhase.Graceful -> {
try {
shutdownPool.value.submit(block)
} catch (rejected: RejectedExecutionException) {
shutdownPhase = ShutdownPhase.Completed
return dispatch(context, block)
}
}
ShutdownPhase.Completed -> {
block.run()
}
}
}
private enum class ShutdownPhase {
None, Graceful, Completed
}
}
|
const express = require("express");
const router = express.Router();
const { isParamIdValid } = require("../../utils");
const { url } = require("../../hitomi-chan-utility");
const response = require("../../response");
const dbClient = require("../../dbClient");
router.get("/:id", (req, res) => {
const id = parseInt(req.params.id);
if(!isParamIdValid(id)) {
res.send(response.getInvalidGalleryIdResponse());
return;
}
dbClient.findGalleryById(id, (data) => {
if(data === undefined) {
res.send(response.getQueryErrorResponse());
return;
}
else if(data === null) {
response.sendNotFoundResponse(res);
return;
}
let galleryName = data[0].n;
response.pipeImage(
url.getAnimeFileUrl(galleryName),
res
);
});
});
module.exports = router;
|
package cps
import org.junit.{Test,Ignore}
import org.junit.Assert._
import scala.quoted._
import scala.util.Success
class TestBS1While:
@Test def tWhileC1_00(): Unit =
val c = async[ComputationBound]{
val n = 10
var s = 0
var i = 0
while(i < n)
s += i
i += 1
s
}
assert(c.run() == Success(45))
@Test def tWhileC1_10(): Unit =
val c = async[ComputationBound]{
val n = 10
var s = 0
var i = 0
while(await( T1.cbBool(i < n) )) {
s += i
i += 1
}
s
}
assert(c.run() == Success(45))
@Test def tWhileC1_01(): Unit =
val c = async[ComputationBound]{
val n = 10
var s = 0
var i = 0
while(i < n) {
val q = await(T1.cbi(i))
s += q
i += 1
}
s
}
assert(c.run() == Success(45))
// Dotty crash.
// TODO: minimize and submit bug.
// onlu this test : branch dotty-break-while-00,
// submitted to dotty
// https://github.com/lampepfl/dotty/issues/8029
// pull request with fix also submitted:
// https://github.com/lampepfl/dotty/pull/8057
//
@Test def tWhileC1_11(): Unit =
val c = macros.Async.transform[ComputationBound,Int]{
val n = 10
var s = 0
var i = 0
while(await(T1.cbBool(i < n))) {
val q = await(T1.cbi(i))
s += q
i += 1
}
s
}
assert(c.run() == Success(45))
|
# sacloud/iaas-api-go
- URL: https://github.com/sacloud/iaas-api-go/pull/2
- Parent: https://github.com/sacloud/iaas-service-go/pull/1
- Author: @yamamoto-febc
## ๆฆ่ฆ
[iaas-service-goใฎๅบๆฌๆน้](https://github.com/sacloud/iaas-service-go/pull/1)ใซๅพใใsacloud/libsacloud v2ใใIaaS้จๅใๅใๅบใใ
## ใใใใจ/ใใใชใใใจ
### ใใใใจ
- libsacloudใใใฎIaaS้จๅใฎๅใๅบใ
- iaas-api-go v1ใจใใฆใชใชใผใน
- libsacloudใฎ`sacloud`ใใใฑใผใธ้
ไธใฎๆด็
- ~typesใostypeใจใใฃใใใใฑใผใธๆงๆใฎๅ่/ๆด็~
### ใใใชใใใจ
- libsacloud v2ใฎ็ฌ่ชDSLใๅซใlibsacloudใฎๅฎ่ฃ
ใฎๆนๅ
ๅบๆฌ็ใซlibsacloud v2ใใใฎใพใพ็งปๆคใใใใใ ใใๅ่ฟฐใฎ`sacloud`ใใใฑใผใธ้
ไธใฎๆด็ใชใฉใฎใชใใกใฏใฟใฌใใซใฎไฟฎๆญฃใฏ่กใใ
ๅพๆฅ[libsacloud v3ใจใใฆๆค่จใใใฆใใๅ
ๅฎน](https://github.com/sacloud/libsacloud/issues/791)ใฏiaas-service-goใiaas-api-go v2ใงๅฎ็พใใใ
## ๅฎ่ฃ
### libsacloudใจiaas-api-goใฎไธฆๅ้็บ
ๅฝ้ขใฏlibsacloudใฎไฟฎๆญฃใ็ถ็ถใใใlibsacloudใซๅฏพใใฆ่กใใใไฟฎๆญฃใฏๆไฝๆฅญใงiaas-api-goใซๅใ่พผใใ
iaas-api-goใธใฎ็งปๆคใฏ[libsacloud v2.32.2](https://github.com/sacloud/libsacloud/tree/v2.32.2)ใๅ
ใซใใใ
### ๆน้
`sacloud`ใใใฑใผใธใซใคใใฆใlibsacloudใฎใฏใฉใคใขใณใๅดใงใฎไฟฎๆญฃใๅฎนๆใซ่กใใ็จๅบฆใฎๆนไฟฎใใใคใค็งปๆคใใใ
(ๅฎนๆ == ๆฉๆขฐ็ใซ็ฝฎใๆใใงใใใใจใใ็จๅบฆ)
### ็งปๆคๅฏพ่ฑก/ๅฏพๅฟ
#### ใชใใธใใช้็จ
[libsacloud v2.32.2](https://github.com/sacloud/libsacloud/tree/v2.32.2)ใๅบ็นใซใฝใผในใณใผใ้กใใณใใผใใฆ็งป่กใใใ
libsacloudใใใฎforkใฏ่กใใๆฐใใชใชใใธใใชใง้็บใใฆใใใ
#### libsacloudใฎใใใฑใผใธๆงๆ/็งป่กๅฏพ่ฑก
```console
- examples: otelๅฉ็จไพ
- helper: ้ซใฌใใซAPI็พค(ไธ้จใiaas-service-goใธ)
- internal: ็ฌ่ชDSL
- pkg: libsacloudใซไพๅญใใชใใฆใผใใฃใชใใฃใชใฉ => packages-goใธ
- sacloud
- accessor
- fake
- naked
- ostype
- pointer => packages-goใธ
- profile => api-client-goใธ
- search
- stub
- test
- testutil => ไธ้จใpackages-goใธ
- trace
- types
- sacloud็ดไธ
```
- `profile`ใฏapi-client-goใงๅฎ่ฃ
ใใ
- testutilใฏๆด็ใใฆใใๅใๅบใ/ๅๅฒใชใฉใฎๅฏพๅฟใๅฟ
่ฆ
- ~typesใฏๆด็ใใฆใใsacloud็ดไธใธ็งปๅใชใฉใฎๅฏพๅฟใๅฟ
่ฆ~
#### iaas-api-goใฎใใใฑใผใธๆงๆ
ๅพๆฅใฏsacloudใใใฑใผใธ้
ไธใ ใฃใใใฎใiaas-api-goใฎ้
ไธใซใใใ
ใใใฑใผใธๅใฏ`iaas`ใจใใใ
```console
- accessor
- defaults => libsacloudใฎhelper/defaultsใฎ็งปๆค
- fake
- helper
- cleanup
- plans
- power
- query
- wait
- internal => libsacloudใฎ็ฌ่ชDSLๅฎ่ฃ
ใชใฉ
- naked
- ostype
- search
- stub
- test
- testutil
- trace
- types
- sacloud็ดไธ
```
## ๆน่จๅฑฅๆญด
- 2022/3/4: ๅ็ไฝๆ
- 2022/3/7: libsacloud/v2็ดไธใฎใใใฑใผใธใซใคใใฆ่ฟฝ่จ
- 2022/3/10: typesใฎๆนไฟฎใๅฝ้ขๅปถๆ
- 2022/3/29: sacloud-goใฎๅๅฒ/ใชใใผใ ใๅๆ
|
using System;
using System.Collections;
using System.Collections.Generic;
using System.Configuration;
using System.Data;
using System.Web;
using System.Web.Security;
using System.Web.UI;
using System.Web.UI.HtmlControls;
using System.Web.UI.WebControls;
using System.Web.UI.WebControls.WebParts;
using Activizr.Logic.Structure;
using Telerik.Web.UI;
public partial class Controls_v4_OrganizationTree : System.Web.UI.UserControl
{
protected void Page_Load(object sender, EventArgs e)
{
if (!Page.IsPostBack)
{
if (root == null)
{
root = Organization.Root;
}
Organizations orgs = root.GetTree();
// We need a real fucking tree structure.
Dictionary<int, Organizations> lookup = new Dictionary<int, Organizations>();
foreach (Organization org in orgs)
{
if (!lookup.ContainsKey(org.ParentIdentity))
{
lookup[org.ParentIdentity] = new Organizations();
}
lookup[org.ParentIdentity].Add(org);
}
//Tree.NodeClick += new RadTreeViewEventHandler (Tree_NodeClick);
Tree.Nodes.Add(RecursiveAdd(lookup, orgs[0].ParentIdentity)[0]);
Tree.Nodes[0].Expanded = true;
if (!String.IsNullOrEmpty(this.onClientNodeClicking))
{
Tree.OnClientNodeClicking = this.onClientNodeClicking;
}
}
}
public void Tree_NodeClick(object sender, RadTreeNodeEventArgs e)
{
// Fire SelectedNodeChanged. This is a bit of a cheat since we don't really know that
// the node has changed when the user clicks on a node - but the same behavior needs
// to be triggered anyway.
if (this.SelectedNodeChanged != null)
{
SelectedNodeChanged (this, new EventArgs());
}
}
private RadTreeNodeCollection RecursiveAdd (Dictionary<int,Organizations> lookup, int parentIdentity)
{
RadTreeNodeCollection result = new RadTreeNodeCollection(this.Tree);
foreach (Organization org in lookup[parentIdentity])
{
RadTreeNode node = new RadTreeNode(org.NameShort, org.Identity.ToString());
if (lookup.ContainsKey(org.Identity))
{
RadTreeNodeCollection collection = RecursiveAdd(lookup, org.Identity);
foreach (RadTreeNode subnode in collection)
{
node.Nodes.Add(subnode);
}
}
result.Add(node);
}
return result;
}
public event EventHandler SelectedNodeChanged;
public Organization Root
{
set
{
this.root = value;
}
}
public Organization SelectedOrganization
{
get
{
if (Tree.SelectedNode != null)
{
return Organization.FromIdentity(Int32.Parse(Tree.SelectedNode.Value));
}
else
{
return null;
}
}
}
public string OnClientNodeClicking
{
set { this.onClientNodeClicking = value; }
get { return this.onClientNodeClicking; }
}
public string ParentClientID
{
set { this.parentClientID = value; }
get { return this.parentClientID; }
}
private string parentClientID;
private string onClientNodeClicking;
private Organization root;
}
|
const fs = require('fs')
const path = require('path')
const JSON5 = require('json5')
const DIST_DIR = path.join(__dirname, '../dist')
const DATA_DIR = path.join(__dirname, '../data')
const sourceEntities = JSON5.parse(fs.readFileSync(`${DATA_DIR}/entities.json5`, 'utf8'))
if (!fs.existsSync(DIST_DIR)) fs.mkdirSync(DIST_DIR)
fs.writeFileSync(`${DIST_DIR}/entities.json`, JSON.stringify(sourceEntities))
|
package customizations_test
import (
"context"
"github.com/aws/aws-sdk-go-v2/aws"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/internal/awstesting/unit"
"github.com/aws/aws-sdk-go-v2/service/eventbridge"
"github.com/aws/aws-sdk-go-v2/service/eventbridge/types"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
"strings"
"testing"
)
func TestPutEventsUpdateEndpoint(t *testing.T) {
tests := map[string]struct {
DisableHTTPS bool
CustomEndpoint *aws.Endpoint
UseDualStack aws.DualStackEndpointState
UseFIPS aws.FIPSEndpointState
EndpointId *string
Region string
WantErr bool
WantEndpoint string
WantSignedRegion string
WantSignedName string
WantV4a bool
}{
"standard aws endpoint": {
Region: "us-mock-1",
WantEndpoint: "https://events.us-mock-1.amazonaws.com/",
WantSignedName: "events",
WantSignedRegion: "us-mock-1",
},
"dualstack aws endpoint": {
Region: "us-mock-1",
UseDualStack: aws.DualStackEndpointStateEnabled,
WantEndpoint: "https://events.us-mock-1.api.aws/",
WantSignedName: "events",
WantSignedRegion: "us-mock-1",
},
"fips aws endpoint": {
Region: "us-mock-1",
UseFIPS: aws.FIPSEndpointStateEnabled,
WantEndpoint: "https://events-fips.us-mock-1.amazonaws.com/",
WantSignedName: "events",
WantSignedRegion: "us-mock-1",
},
"dualstack & fips aws endpoint": {
Region: "us-mock-1",
UseDualStack: aws.DualStackEndpointStateEnabled,
UseFIPS: aws.FIPSEndpointStateEnabled,
WantEndpoint: "https://events-fips.us-mock-1.api.aws/",
WantSignedName: "events",
WantSignedRegion: "us-mock-1",
},
"custom endpoint": {
Region: "us-mock-1",
CustomEndpoint: &aws.Endpoint{
URL: "https://custom.amazonaws.com",
SigningRegion: "us-mock-1",
Source: aws.EndpointSourceCustom,
},
WantEndpoint: "https://custom.amazonaws.com/",
WantSignedName: "events",
WantSignedRegion: "us-mock-1",
},
"multi-region aws endpoint": {
Region: "us-mock-1",
EndpointId: aws.String("abc123.456def"),
WantEndpoint: "https://abc123.456def.endpoint.events.amazonaws.com/",
WantSignedName: "events",
WantSignedRegion: "*",
WantV4a: true,
},
"multi-region dualstack aws endpoint": {
Region: "us-mock-1",
EndpointId: aws.String("abc123.456def"),
UseDualStack: aws.DualStackEndpointStateEnabled,
WantEndpoint: "https://abc123.456def.endpoint.events.api.aws/",
WantSignedName: "events",
WantSignedRegion: "*",
WantV4a: true,
},
"multi-region fips aws endpoint": {
Region: "us-mock-1",
EndpointId: aws.String("abc123.456def"),
UseFIPS: aws.FIPSEndpointStateEnabled,
WantErr: true,
},
"multi-region dualstack & fips aws endpoint": {
Region: "us-mock-1",
EndpointId: aws.String("abc123.456def"),
UseDualStack: aws.DualStackEndpointStateEnabled,
UseFIPS: aws.FIPSEndpointStateEnabled,
WantErr: true,
},
"multi-region custom endpoint not service source": {
Region: "us-mock-1",
EndpointId: aws.String("abc123.456def"),
CustomEndpoint: &aws.Endpoint{
URL: "https://custom.amazonaws.com",
SigningRegion: "us-mock-1",
Source: aws.EndpointSourceCustom,
},
WantEndpoint: "https://custom.amazonaws.com/",
WantSignedName: "events",
WantSignedRegion: "*",
WantV4a: true,
},
"multi-region custom endpoint service source": {
Region: "us-mock-1",
EndpointId: aws.String("abc123.456def"),
CustomEndpoint: &aws.Endpoint{
URL: "https://custom.amazonaws.com",
SigningRegion: "us-mock-1",
Source: aws.EndpointSourceServiceMetadata,
},
WantEndpoint: "https://abc123.456def.endpoint.events.amazonaws.com/",
WantSignedName: "events",
WantSignedRegion: "*",
WantV4a: true,
},
"multi-region custom endpoint service source alt signing region for alt partition": {
Region: "us-mock-1",
EndpointId: aws.String("abc123.456def"),
CustomEndpoint: &aws.Endpoint{
URL: "https://custom.amazonaws.com",
SigningRegion: "us-iso-mock-1",
Source: aws.EndpointSourceServiceMetadata,
},
WantEndpoint: "https://abc123.456def.endpoint.events.c2s.ic.gov/",
WantSignedName: "events",
WantSignedRegion: "*",
WantV4a: true,
},
"multi-region aws endpoint no ssl": {
Region: "us-mock-1",
DisableHTTPS: true,
EndpointId: aws.String("abc123.456def"),
WantEndpoint: "http://abc123.456def.endpoint.events.amazonaws.com/",
WantSignedName: "events",
WantSignedRegion: "*",
WantV4a: true,
},
"multi-region aws endpoint empty endpoint id": {
Region: "us-mock-1",
EndpointId: aws.String(""),
WantErr: true,
},
"multi-region aws endpoint bad host label": {
Region: "us-mock-1",
EndpointId: aws.String("badactor.com?foo=bar"),
WantErr: true,
},
"multi-region us-iso-mock-1": {
Region: "us-iso-mock-1",
EndpointId: aws.String("abc123.456def"),
WantEndpoint: "https://abc123.456def.endpoint.events.c2s.ic.gov/",
WantSignedName: "events",
WantSignedRegion: "*",
WantV4a: true,
},
}
for name, tt := range tests {
t.Run(name, func(t *testing.T) {
options := eventbridge.Options{
Credentials: unit.StubCredentialsProvider{},
Retryer: aws.NopRetryer{},
Region: tt.Region,
HTTPClient: smithyhttp.NopClient{},
EndpointOptions: eventbridge.EndpointResolverOptions{
DisableHTTPS: tt.DisableHTTPS,
UseDualStackEndpoint: tt.UseDualStack,
UseFIPSEndpoint: tt.UseFIPS,
},
}
if tt.CustomEndpoint != nil {
options.EndpointResolver = eventbridge.EndpointResolverFunc(
func(region string, options eventbridge.EndpointResolverOptions) (aws.Endpoint, error) {
return *tt.CustomEndpoint, nil
})
}
client := eventbridge.New(options)
var (
request *smithyhttp.Request
signingRegion string
signingName string
)
_, err := client.PutEvents(context.TODO(), &eventbridge.PutEventsInput{
Entries: []types.PutEventsRequestEntry{{
Detail: aws.String("{}"),
}},
EndpointId: tt.EndpointId,
}, func(o *eventbridge.Options) {
o.APIOptions = append(o.APIOptions, func(stack *middleware.Stack) error {
return stack.Deserialize.Add(middleware.DeserializeMiddlewareFunc("loopback", func(
ctx context.Context, input middleware.DeserializeInput, handler middleware.DeserializeHandler,
) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
request = input.Request.(*smithyhttp.Request)
signingName = awsmiddleware.GetSigningName(ctx)
signingRegion = awsmiddleware.GetSigningRegion(ctx)
out.Result = &eventbridge.PutEventsOutput{}
return out, metadata, nil
}), middleware.Before)
})
})
if e, a := tt.WantErr, err != nil; e != a {
t.Fatalf("WantErr(%v) got %v", e, err)
}
if tt.WantErr {
return
}
req := request.Build(context.Background())
if e, a := tt.WantEndpoint, req.URL.String(); e != a {
t.Errorf("expect url %s, got %s", e, a)
}
if e, a := tt.WantSignedRegion, signingRegion; e != a {
t.Errorf("expect %s, got %s", e, a)
}
if e, a := tt.WantSignedName, signingName; e != a {
t.Errorf("expect %s, got %s", e, a)
}
authValue := strings.SplitN(req.Header.Get("Authorization"), " ", 2)
wantAuth := "AWS4-HMAC-SHA256"
if tt.WantV4a {
wantAuth = "AWS4-ECDSA-P256-SHA256"
}
if e, a := wantAuth, authValue[0]; e != a {
t.Errorf("expect %v, got %v", e, a)
}
})
}
}
|
namespace MassTransit.KafkaIntegration.Specifications
{
using GreenPipes;
using MassTransit.Registration;
using Transport;
public interface IKafkaProducerSpecification :
ISpecification
{
IKafkaProducerFactory CreateProducerFactory(IBusInstance busInstance);
}
}
|
# frozen_string_literal: true
require "active_record"
require "active_record/relation"
require "active_record/relation/merger"
require "active_record/relation/query_methods"
require "active_record_extended/predicate_builder/array_handler_decorator"
require "active_record_extended/active_record/relation_patch"
require "active_record_extended/query_methods/where_chain"
require "active_record_extended/query_methods/with_cte"
require "active_record_extended/query_methods/unionize"
require "active_record_extended/query_methods/any_of"
require "active_record_extended/query_methods/either"
require "active_record_extended/query_methods/inet"
require "active_record_extended/query_methods/json"
require "active_record_extended/query_methods/select"
if Gem::Requirement.new("~> 5.1.0").satisfied_by?(ActiveRecord.gem_version)
require "active_record_extended/patch/5_1/where_clause"
else
require "active_record_extended/patch/5_2/where_clause"
end
|
import bootstrap from "./bootstrap";
window.__webpack_require__ = __webpack_require__
console.dir(window.__webpack_require__)
bootstrap(() => {});
|
/* eslint-disable no-undef */
/* eslint-disable indent */
const getNumberOfHours24: Array<string> = [
'00',
'13',
'14',
'15',
'16',
'17',
'18',
'19',
'20',
'21',
'22',
'23',
];
const getNumberOfHours12: Array<string> = [
'12',
'1',
'2',
'3',
'4',
'5',
'6',
'7',
'8',
'9',
'10',
'11',
];
const getNumberOfMinutes: Array<string> = [
'00',
'05',
'10',
'15',
'20',
'25',
'30',
'35',
'40',
'45',
'50',
'55',
];
const getModalTemplate = (options: {
iconTemplate?: string;
selectTimeLabel?: string;
amLabel?: string;
pmLabel?: string;
cancelLabel?: string;
okLabel?: string;
enableSwitchIcon?: boolean;
animation?: boolean;
editable?: boolean;
clockType?: string;
}): string => {
const {
iconTemplate,
selectTimeLabel,
amLabel,
pmLabel,
cancelLabel,
okLabel,
enableSwitchIcon,
animation,
editable,
clockType,
} = options;
return `
<div class="timepicker-ui-modal normalize" role="dialog" style='transition:${
animation ? 'opacity 0.15s linear' : 'none'
}'>
<div class="timepicker-ui-wrapper ">
<div class="timepicker-ui-header">
<div class="timepicker-ui-select-time">${selectTimeLabel}</div>
<div class="timepicker-ui-wrapper-time ${
clockType === '24h' ? 'timepicker-ui-wrapper-time-24h' : ''
}">
<div class="timepicker-ui-hour" role="button" contenteditable="${
editable ? true : false
}">05</div>
<div class="timepicker-ui-dots">:</div>
<div class="timepicker-ui-minutes" role="button" contenteditable="${
editable ? true : false
}">00</div>
</div>
${
clockType !== '24h'
? `
<div class="timepicker-ui-wrapper-type-time">
<div class="timepicker-ui-type-mode timepicker-ui-am" role="button" data-type="AM">${amLabel}</div>
<div class="timepicker-ui-type-mode timepicker-ui-pm" role="button" data-type="PM">${pmLabel}</div>
</div>
`
: ''
}
</div>
<div class="timepicker-ui-wrapper-landspace">
<div class="timepicker-ui-body">
<div class="timepicker-ui-clock-face">
<div class="timepicker-ui-dot"></div>
<div class="timepicker-ui-clock-hand">
<div class="timepicker-ui-circle-hand"></div>
</div>
<div class="timepicker-ui-tips-wrapper"></div>
${clockType === '24h' ? '<div class="timepicker-ui-tips-wrapper-24h"></div>' : ''}
</div>
</div>
<div class="timepicker-ui-footer">
${
enableSwitchIcon
? `
<div class="timepicker-ui-keyboard-icon-wrapper" role="button" aria-pressed="false" data-view="desktop">
${iconTemplate}
</div>`
: ''
}
<div class="timepicker-ui-wrapper-btn">
<div class="timepicker-ui-cancel-btn" role="button" aria-pressed="false">${cancelLabel}</div>
<div class="timepicker-ui-ok-btn" role="button" aria-pressed="false">${okLabel}</div>
</div>
</div>
</div>
</div>
</div>`;
};
const getMobileModalTemplate = (options: {
enterTimeLabel?: string;
amLabel?: string;
pmLabel?: string;
cancelLabel?: string;
okLabel?: string;
iconTemplateMobile?: string;
minuteMobileLabel?: string;
hourMobileLabel?: string;
enableSwitchIcon?: boolean;
animation?: boolean;
clockType?: string;
}): string => {
const {
enterTimeLabel,
amLabel,
pmLabel,
cancelLabel,
okLabel,
iconTemplateMobile,
minuteMobileLabel,
hourMobileLabel,
enableSwitchIcon,
animation,
clockType,
} = options;
return `
<div class="timepicker-ui-modal normalize mobile" role="dialog" style='transition:${
animation ? 'opacity 0.15s linear' : 'none'
}'>
<div class="timepicker-ui-wrapper mobile">
<div class="timepicker-ui-header mobile">
<div class="timepicker-ui-select-time mobile">${enterTimeLabel}</div>
<div class="timepicker-ui-wrapper-time mobile">
<div class="timepicker-ui-hour mobile" contenteditable="false">12</div>
<div class="timepicker-ui-hour-text mobile">${hourMobileLabel}</div>
<div class="timepicker-ui-dots mobile">:</div>
<div class="timepicker-ui-minute-text mobile">${minuteMobileLabel}</div>
<div class="timepicker-ui-minutes mobile" contenteditable="false">00</div>
</div>
${
clockType !== '24h'
? `<div class="timepicker-ui-wrapper-type-time mobile">
<div class="timepicker-ui-type-mode timepicker-ui-am mobile" data-type="AM">${amLabel}</div>
<div class="timepicker-ui-type-mode timepicker-ui-pm mobile" data-type="PM">${pmLabel}</div>
</div>`
: ''
}
</div>
<div class="timepicker-ui-footer mobile" data-view="mobile">
${
enableSwitchIcon
? `
<div class="timepicker-ui-keyboard-icon-wrapper mobile" role="button" aria-pressed="false" data-view="desktop">
${iconTemplateMobile}
</div>`
: ''
}
<div class="timepicker-ui-wrapper-btn mobile">
<div class="timepicker-ui-cancel-btn mobile" role="button" aria-pressed="false">${cancelLabel}</div>
<div class="timepicker-ui-ok-btn mobile" role="button" aria-pressed="false">${okLabel}</div>
</div>
</div>
</div>
</div>`;
};
export {
getMobileModalTemplate,
getModalTemplate,
getNumberOfHours12,
getNumberOfHours24,
getNumberOfMinutes,
};
|
A MLDVerifierForVm is a verifier that runs the verification on the currently executed host vm.
Instance Variables
|
// Copyright 2020 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::borrow::BorrowMut;
use std::fs::File;
use std::io::BufReader;
use std::net::SocketAddr;
use std::path::Path;
use std::sync::Arc;
use axum::handler::get;
use axum::routing::BoxRoute;
use axum::AddExtensionLayer;
use axum::Router;
use axum_server;
use axum_server::tls::TlsLoader;
use axum_server::Handle;
use common_base::tokio;
use common_base::tokio::task::JoinHandle;
use common_exception::ErrorCode;
use common_exception::Result;
use tokio_rustls::rustls::internal::pemfile::certs;
use tokio_rustls::rustls::internal::pemfile::pkcs8_private_keys;
use tokio_rustls::rustls::AllowAnyAuthenticatedClient;
use tokio_rustls::rustls::Certificate;
use tokio_rustls::rustls::NoClientAuth;
use tokio_rustls::rustls::PrivateKey;
use tokio_rustls::rustls::RootCertStore;
use tokio_rustls::rustls::ServerConfig;
use crate::configs::Config;
use crate::servers::Server;
use crate::sessions::SessionManagerRef;
pub struct HttpService {
sessions: SessionManagerRef,
join_handle: Option<JoinHandle<std::io::Result<()>>>,
abort_handler: Handle,
}
impl HttpService {
pub fn create(sessions: SessionManagerRef) -> Box<HttpService> {
Box::new(HttpService {
sessions,
join_handle: None,
abort_handler: axum_server::Handle::new(),
})
}
fn build_tls(config: &Config) -> Result<ServerConfig> {
let tls_key = Path::new(config.query.api_tls_server_key.as_str());
let tls_cert = Path::new(config.query.api_tls_server_cert.as_str());
let key = HttpService::load_keys(tls_key)?.remove(0);
let certs = HttpService::load_certs(tls_cert)?;
let mut tls_config = ServerConfig::new(NoClientAuth::new());
if let Err(cause) = tls_config.set_single_cert(certs, key) {
return Err(ErrorCode::TLSConfigurationFailure(format!(
"Cannot build TLS config for http service, cause {}",
cause
)));
}
HttpService::add_tls_pem_files(config, tls_config)
}
fn add_tls_pem_files(config: &Config, mut tls_config: ServerConfig) -> Result<ServerConfig> {
let pem_path = &config.query.api_tls_server_root_ca_cert;
if let Some(pem_path) = HttpService::load_ca(pem_path) {
log::info!("Client Authentication for http service.");
let pem_file = File::open(pem_path.as_str())?;
let mut root_cert_store = RootCertStore::empty();
if root_cert_store
.add_pem_file(BufReader::new(pem_file).borrow_mut())
.is_err()
{
return Err(ErrorCode::TLSConfigurationFailure(
"Cannot add client ca in for http service",
));
}
let authenticated_client = AllowAnyAuthenticatedClient::new(root_cert_store);
tls_config.set_client_certificate_verifier(authenticated_client);
}
Ok(tls_config)
}
fn load_certs(path: &Path) -> Result<Vec<Certificate>> {
match certs(&mut BufReader::new(File::open(path)?)) {
Ok(certs) => Ok(certs),
Err(_) => Err(ErrorCode::TLSConfigurationFailure("invalid cert")),
}
}
// currently only PKCS8 key supports for TLS setup
fn load_keys(path: &Path) -> Result<Vec<PrivateKey>> {
match pkcs8_private_keys(&mut BufReader::new(File::open(path)?)) {
Ok(keys) => Ok(keys),
Err(_) => Err(ErrorCode::TLSConfigurationFailure("invalid key")),
}
}
// Client Auth(mTLS) CA certificate configuration
fn load_ca(ca_path: &str) -> Option<String> {
match Path::new(ca_path).exists() {
false => None,
true => Some(ca_path.to_string()),
}
}
fn build_router(&self) -> Router<BoxRoute> {
Router::new()
.route("/v1/health", get(super::http::v1::health::health_handler))
.route("/v1/config", get(super::http::v1::config::config_handler))
.route("/v1/logs", get(super::http::v1::logs::logs_handler))
.route(
"/v1/cluster/list",
get(super::http::v1::cluster::cluster_list_handler),
)
.route(
"/debug/home",
get(super::http::debug::home::debug_home_handler),
)
.route(
"/debug/pprof/profile",
get(super::http::debug::pprof::debug_pprof_handler),
)
.layer(AddExtensionLayer::new(self.sessions.clone()))
.boxed()
}
async fn start_with_tls(&mut self, listening: SocketAddr) -> Result<SocketAddr> {
log::info!("Http API TLS enabled");
let loader = Self::tls_loader(self.sessions.get_conf());
let server = axum_server::bind_rustls(listening.to_string())
.handle(self.abort_handler.clone())
.loader(loader.await?)
.serve(self.build_router());
self.join_handle = Some(tokio::spawn(server));
self.abort_handler.listening().await;
match self.abort_handler.listening_addrs() {
None => Err(ErrorCode::CannotListenerPort("")),
Some(addresses) if addresses.is_empty() => Err(ErrorCode::CannotListenerPort("")),
Some(addresses) => {
// 0.0.0.0, for multiple network interface, we may listen to multiple address
let first_address = addresses[0];
for address in addresses {
if address.port() != first_address.port() {
return Err(ErrorCode::CannotListenerPort(""));
}
}
Ok(first_address)
}
}
}
async fn tls_loader(config: &Config) -> Result<TlsLoader> {
let mut tls_loader = TlsLoader::new();
tls_loader.config(Arc::new(Self::build_tls(config)?));
match tls_loader.load().await {
Ok(_) => Ok(tls_loader),
Err(cause) => Err(ErrorCode::TLSConfigurationFailure(format!(
"Cannot load tls config, cause {}",
cause
))),
}
}
async fn start_without_tls(&mut self, listening: SocketAddr) -> Result<SocketAddr> {
log::warn!("Http API TLS not set");
let server = axum_server::bind(listening.to_string())
.handle(self.abort_handler.clone())
.serve(self.build_router());
self.join_handle = Some(tokio::spawn(server));
self.abort_handler.listening().await;
match self.abort_handler.listening_addrs() {
None => Err(ErrorCode::CannotListenerPort("")),
Some(addresses) if addresses.is_empty() => Err(ErrorCode::CannotListenerPort("")),
Some(addresses) => {
// 0.0.0.0, for multiple network interface, we may listen to multiple address
let first_address = addresses[0];
for address in addresses {
if address.port() != first_address.port() {
return Err(ErrorCode::CannotListenerPort(""));
}
}
Ok(first_address)
}
}
}
}
#[async_trait::async_trait]
impl Server for HttpService {
async fn shutdown(&mut self) {
self.abort_handler.graceful_shutdown();
if let Some(join_handle) = self.join_handle.take() {
if let Err(error) = join_handle.await {
log::error!(
"Unexpected error during shutdown Http API handler. cause {}",
error
);
}
}
}
async fn start(&mut self, listening: SocketAddr) -> Result<SocketAddr> {
let config = &self.sessions.get_conf().query;
match config.api_tls_server_key.is_empty() || config.api_tls_server_cert.is_empty() {
true => self.start_without_tls(listening).await,
false => self.start_with_tls(listening).await,
}
}
}
|
/*
* *
* * Created by Marcin Wasilewski on 26/07/20 12:03
* * Copyright (c) 2020 . All rights reserved.
* * Last modified 26/07/20 11:47
*
*/
import 'package:dartz/dartz.dart';
import 'package:flutter/material.dart';
import 'package:rotashiftcleanarch/core/error/failures.dart';
import 'package:rotashiftcleanarch/domain/entities/day_off.dart';
import 'package:rotashiftcleanarch/domain/usecases/Params/param.dart';
import 'package:rotashiftcleanarch/domain/value_objects/rota_year.dart';
// Future<Either<Failure, Map<DateTime,List<String>>>>
// getAnnualDaysOff(RotaYear rotaYear);
abstract class AnnualDaysOffRepository {
//Future<Either<Failure, List<DayOff>>> getAnnualDaysOff({int rota, int year});
//The rewaso not to use entities as transfer object : clean architecture are:
//1 Because Objects are immutable
// I cannot create Object DayOffModel because I do not know the firebase_key yes
//that is why I will have my own dto = InputParam
Future<Either<Failure, String>> insertDayOffIntoCloud(
{@required InsertParam insertParam});
Future<Either<Failure, Map<DateTime, List<DayOff>>>> getAnnualDaysOff({
@required RotaYear rotaYear,
});
}
|
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { TypiCodePost } from '@app/shared/model';
import { Observable, BehaviorSubject } from 'rxjs';
@Injectable({
providedIn: 'root'
})
export class PostsService {
resourceUrl = 'http://jsonplaceholder.typicode.com/posts';
constructor(private httpClient: HttpClient) {
}
getAllPosts(): Observable<Array<TypiCodePost>> {
const headers = new HttpHeaders({
'Accept': 'application/json'
});
return this.httpClient.get<Array<TypiCodePost>>(this.resourceUrl, { headers });
// const mockPosts = new Array<TypiCodePost>();
// for(let i = 0; i< 101; i++) {
// const item = new TypiCodePost('' + i, 'Name abc ' + i);
// mockPosts.push(item);
// }
// const mockPostsSubject: BehaviorSubject<Array<TypiCodePost>> = new BehaviorSubject(mockPosts);
// return mockPostsSubject.asObservable();
}
create(post: TypiCodePost): Observable<TypiCodePost> {
const headers = new HttpHeaders({
'Content-type': 'application/json; charset=utf-8',
'Accept': 'application/json'
});
return this.httpClient.post<TypiCodePost>(this.resourceUrl, { headers });
}
update(post: TypiCodePost): Observable<TypiCodePost> {
const headers = new HttpHeaders({
'Content-type': 'application/json; charset=utf-8',
'Accept': 'application/json'
});
return this.httpClient.patch<TypiCodePost>(this.resourceUrl + '/' + post.id, { headers });
}
delete(post: TypiCodePost): Observable<TypiCodePost> {
const headers = new HttpHeaders({
'Content-type': 'application/json; charset=utf-8',
'Accept': 'application/json'
});
return this.httpClient.delete<TypiCodePost>(this.resourceUrl + '/' + post.id, { headers });
}
}
|
(ns anaphorae.partial-test
(:refer-clojure :exclude [partial])
(:require [clojure.test :refer :all]
[anaphorae.partial :refer :all]))
(deftest test-partial
(testing "single argument"
(let [x2 (partial * 2)]
(is (= 4 (x2 2)))))
(testing "multiple arguments"
(let [part (partial str "a")]
(is (= "abc" (part "b" "c")))))
(testing "single %"
(let [part (partial str % "b" "c")]
(is (= "abc" (part "a")))))
(testing "multiple %s"
(let [part (partial str % "b" % "d")]
(is (= "abcd" (part "a" "c")))))
(testing "%& captures the rest of the args"
(let [part (partial str "a" %& "d")]
(is (= "abcd" (part "b" "c"))))))
|
package models
import (
"time"
)
type Categroy struct{
Id int64
Title string
Created time.Time
views int64
}
|
SELECT r.Description,
c.Name AS CategoryName
FROM Reports r
JOIN Categories c
ON c.Id = r.CategoryId
ORDER BY r.Description,
c.Name
|
๏ปฟusing ICSharpCode.AvalonEdit.Highlighting;
using ICSharpCode.AvalonEdit.Highlighting.Xshd;
using PS2Disassembler.ViewModel;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.IO.Compression;
using System.Text;
using System.Windows;
using System.Xml;
using Microsoft.Win32;
using PS2Disassembler.Core;
namespace PS2Disassembler
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
private readonly MainViewVM _vm;
public MainWindow()
{
InitializeComponent();
var assembly = this.GetType().Assembly;
using (var stream = assembly.GetManifestResourceStream(assembly.GetName().Name + ".MIPSSyntax.xshd"))
{
using (var reader = new XmlTextReader(stream))
{
AssemblyEditor.SyntaxHighlighting = HighlightingLoader.Load(reader, HighlightingManager.Instance);
}
}
_vm = new MainViewVM();
this.DataContext = _vm;
}
protected override void OnInitialized(EventArgs e)
{
base.OnInitialized(e);
}
private void ButtonBase_OnClick(object sender, RoutedEventArgs e)
{
var openFileDialog = new OpenFileDialog();
var parsedBuffers = new List<byte[]>();
var disassembler = new Disassembler();
var outputBuilder = new StringBuilder();
var stopWatch = new Stopwatch();
if (openFileDialog.ShowDialog() == true)
{
stopWatch.Start();
var fileName = openFileDialog.FileName;
var fInfo = new FileInfo(openFileDialog.FileName);
using var fileStream = fInfo.OpenRead();
//bool success = GC.TryStartNoGCRegion(110000000);
var buffer = new byte[41 * 2048];
var parsedBuffer = new byte[32 * 2048];
do
{
var inputLength = 0;
var bufferDataLength = FillBuffer(fileStream, buffer);
if (bufferDataLength == 0)
break;
for (int i = 0, pI = 0; i < bufferDataLength; i += 41)
{
for (int j = i; j < i + 40; j += 10, pI += 8)
{
if (j + 8 < buffer.Length)
{
parsedBuffer[pI] = buffer[j + 7];
parsedBuffer[pI + 1] = buffer[j + 8];
parsedBuffer[pI + 2] = buffer[j + 5];
parsedBuffer[pI + 3] = buffer[j + 6];
parsedBuffer[pI + 4] = buffer[j + 2];
parsedBuffer[pI + 5] = buffer[j + 3];
parsedBuffer[pI + 6] = buffer[j];
parsedBuffer[pI + 7] = buffer[j + 1];
inputLength += 8;
}
}
}
disassembler.DisassembleByteArray(parsedBuffer, inputLength, ref outputBuilder);
//parsedBuffers.Add(parsedBuffer);
} while (true);
}
stopWatch.Stop();
_vm.StatusText = stopWatch.Elapsed.ToString();
//GC.EndNoGCRegion();
_vm.Output.BeginUpdate();
_vm.Output.Replace(0, _vm.Output.TextLength, outputBuilder.ToString());
_vm.Output.EndUpdate();
//_vm.DisassembleByteArrays(parsedBuffers);
}
private static int FillBuffer(Stream stream, byte[] buffer)
{
int read;
int totalRead = 0;
do
{
read = stream.Read(buffer, totalRead, buffer.Length - totalRead);
totalRead += read;
} while (read > 0 && totalRead < buffer.Length);
return totalRead;
}
}
}
|
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_samples/apps/music_app/bloc/music_player_bloc.dart';
import 'package:flutter_samples/apps/music_app/models/music.dart';
import 'package:provider/provider.dart';
class NameMusicAndArtist extends StatelessWidget {
const NameMusicAndArtist({Key key}) : super(key: key);
@override
Widget build(BuildContext context) {
final musicPlayerBloc = Provider.of<MusicPlayerBLoC>(context, listen: false);
return Padding(
padding: const EdgeInsets.only(top: 10),
child: ValueListenableBuilder(
valueListenable: musicPlayerBloc.currentTrack,
builder: (_, track, __) {
final music = musics[track];
return AnimatedSwitcher(
duration: const Duration(milliseconds: 300),
child: AspectRatio(
key: Key(music.artist),
aspectRatio: 16 / 2,
child: Column(
children: [
Text(music.name, style: Theme.of(context).textTheme.headline6),
const SizedBox(height: 10),
Text(music.artist,
style: Theme.of(context)
.textTheme
.bodyText1
.copyWith(color: Colors.white54, fontWeight: FontWeight.w400)),
],
),
),
);
},
),
);
}
}
|
#!/bin/sh
#
# Copyright (c) 2016, Linaro Limited
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
#
# This test is intend to test pkt_mmap_vlan_insert() feature for
# linux-generic packet mmap pktio.
#
#
# directory where platform test sources are, including scripts
TEST_SRC_DIR=$(dirname $0)
# exit codes expected by automake for skipped tests
TEST_SKIPPED=77
# directories where binary can be found:
# -in the validation dir when running make check (intree or out of tree)
# -in the script directory, when running after 'make install', or
# -in the validation when running standalone intree.
# -in the current directory.
# running stand alone out of tree requires setting PATH
PATH=${TEST_DIR}/../mmap_vlan_ins:$PATH
PATH=`pwd`/mmap_vlan_ins:$PATH
PATH=$(dirname $0):$PATH
PATH=.:$PATH
bin_path=$(which plat_mmap_vlan_ins${EXEEXT})
if [ -x "$bin_path" ] ; then
echo "running with plat_mmap_vlan_ins: $bin_path"
else
echo "cannot find plat_mmap_vlan_ins: please set you PATH for it."
pwd
exit 1
fi
# Use installed pktio env or for make check take it from platform directory
if [ -f "./pktio_env" ]; then
. ./pktio_env
elif [ -f ${TEST_SRC_DIR}/pktio_env ]; then
. ${TEST_SRC_DIR}/pktio_env
else
echo "BUG: unable to find pktio_env!"
echo "pktio_env has to be in current directory or"
echo " in platform/\$ODP_PLATFORM/test."
echo "ODP_PLATFORM=\"$ODP_PLATFORM\""
exit 1
fi
setup_pktio_env
if [ $? -ne 0 ]; then
return 77 # Skip the test
fi
PCAP_IN=`find . ${TEST_DIR} $(dirname $0) -name vlan.pcap -print -quit`
echo "using PCAP_IN = ${PCAP_IN}"
PCAP_OUT=vlan_out.pcap
# Listen on veth pipe and write to pcap Send pcap
plat_mmap_vlan_ins${EXEEXT} pktiop0p1 pcap:out=${PCAP_OUT} \
00:02:03:04:05:06 00:08:09:0a:0b:0c &
P1=$!
# Send pcap file to veth interface
plat_mmap_vlan_ins${EXEEXT} pcap:in=${PCAP_IN} pktiop0p1 \
01:02:03:04:05:06 01:08:09:0a:0b:0c &
P2=$!
sleep 1
kill -s INT ${P1}
kill -s INT ${P2}
ret=$?
rm -f ${PCAP_OUT}
cleanup_pktio_env
exit $ret
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
-- | Interfacing with the wallet (for making payments)
module Plutus.Trace.Effects.EmulatedWalletAPI(
EmulatedWalletAPI(..)
, liftWallet
, payToWallet
, handleEmulatedWalletAPI
) where
import Control.Monad.Freer (Eff, Member, subsume, type (~>))
import Control.Monad.Freer.Error (Error)
import Control.Monad.Freer.Extras (raiseEnd)
import Control.Monad.Freer.TH (makeEffect)
import Ledger.Tx (txId)
import Ledger.TxId (TxId)
import Ledger.Value (Value)
import Wallet.API (WalletAPIError, defaultSlotRange, payToPublicKeyHash)
import Wallet.Effects (WalletEffect)
import Wallet.Emulator qualified as EM
import Wallet.Emulator.MultiAgent (MultiAgentEffect, walletAction)
import Wallet.Emulator.Wallet (Wallet)
data EmulatedWalletAPI r where
LiftWallet :: Wallet -> Eff '[WalletEffect, Error WalletAPIError] a -> EmulatedWalletAPI a
makeEffect ''EmulatedWalletAPI
-- | Make a payment from one wallet to another
payToWallet ::
forall effs.
Member EmulatedWalletAPI effs
=> Wallet
-> Wallet
-> Value
-> Eff effs TxId
payToWallet source target amount = do
ctx <- liftWallet source
$ payToPublicKeyHash defaultSlotRange amount (EM.walletPubKeyHash target)
case ctx of
Left _ -> error "Plutus.Trace.EmulatedWalletAPI.payToWallet: Expecting a mock tx, not an Alonzo tx"
Right tx -> pure $ txId tx
-- | Handle the 'EmulatedWalletAPI' effect using the emulator's
-- 'MultiAgent' effect.
handleEmulatedWalletAPI ::
( Member MultiAgentEffect effs
)
=> EmulatedWalletAPI
~> Eff effs
handleEmulatedWalletAPI = \case
LiftWallet w action ->
walletAction w
$ subsume
$ subsume
$ raiseEnd action
|
@extends('app')
@section('html_title')
Docentes
@endsection
|
PoCoWeb is a web based post-correction system for OCRed historical
documents. It is based on
[PoCoTo](https://github.com/cisocrgroup/PoCoTo). PoCoWeb consists of
a backend that offers a REST API for the post-correction and a
frontend that facilitates the post-correction of historical documents
using a web-browser.
Users can upload their OCRed documents and correct them in the web
front end. The corrected documents can then be downloaded and further
processed. PoCoWeb understands various OCR formats that can be used.
Any corrections are written back into the original format of the
documents. This makes it possible to integrate the manual
post-correction of OCRed documents into other digitalisation
work flows.
As PoCoTo, PoCoWeb is connected with a language
[profiler](https://github.com/cisocrgroup/Profiler) that enables
semi-automatic correction of historical documents. The profiler is
able to differentiate between historical spelling variation and real
OCR errors in the documents and generates correction suggestions for
suspicious words.
<p align="center">
<img width="50%" src="assets/images/doc/welcome.png" alt="Main page"/>
</p>
PoCoWeb offers:
* Simple user management to enable parallel correction of documents
packages.
* The possibility to split documents into packages to parallel the
manual post-correction.
* A connection to the language profiler that:
* generates correction suggestions.
* lists suspicious words and error patterns.
* adapts to the manual correction on the document.
* An interface to an automatic correction system that:
* uses the language profiler.
* automatically generates lexicon extensions for the profiler.
* corrects the document
* allows for interactive manual inspection of the correction.
* The possibility to download a global pool of corrected lines in the
whole corpus to be used for OCR-training etc.
* An extensive REST API to enable the automation of different aspects
of the post-correction.
- - -
## Table of contents
* [Login](#user-content-login)
* [Users](#user-content-users)
* [Account settings](#user-content-account-settings)
* [User management](#user-content-user-management)
* [Projects](#user-content-projects)
* [Creating new projects](#user-content-upload-new-project)
* [Project archives](#user-content-project-archives)
* [Project overview](#user-content-project-table)
* [Deleting of projects and packages](#user-content-delete-projects)
* [Splitting and reclaiming of projects and packages](#user-content-split-projects)
* [Downloading projects](#user-content-download-project)
* [Profiling projects](#user-content-profile-project)
* [Post correction](#user-content-post-correction)
* [Page correction](#user-content-page-correction)
* [Navigation bar](#user-content-navigation-bar)
* [Concordance view](#user-content-concordance-view)
* [Installation](#user-content-installation)
* [Grond-truth pool](#user-content-pool)
* [Services](#user-content-overview-services)
- - -
<a id='login'></a>
## Login
In order to use PoCoWeb, each user has to login with an appropriate
user account using the login link at the right top of the page. Users
can logout again using the logout link at the same position.
<p align="center">
<img width="50%" src="assets/images/doc/login.png" alt="PoCoWeb login"/>
</p>
- - -
<a id='users'></a>
## Users
In general there are two kinds of user accounts in PoCoWeb. The two
types have differen permissings in the system:
1. Administrator accounts
* Create new administrator and normal users accounts
* Delete user accounts
* Upload new documents and create new
[projects](#user-content-projects)
* Split projects into [packages](#user-content-projects)
* [Assign](#user-content-split-projects) packages to different user
accounts
* [Profile](#user-content-profile-project) projects.
* [Post correct](#user-content-post-correction) projects and
packages.
* [Delete](#user-content-delete-projects) documents and packages.
* [Download](#user-content-download-project) corrected projects.
* Download a [pool](#user-content-pool) of corrected projects.
2. Normal user accounts
* Post correct [packages](#user-content-projects) that
where assigned to them
* [Profile](#user-content-profile-project) projects.
* [Reassign](#user-content-split-projects) packages back to their
original owner.
- - -
<a id='account-settings'></a>
### Account settings
Users can manage their account settings using the Account tab on the page.
Each user can only manage their own user's settings. It is not possible to change
the settings of another user account even if you are logged in with an
administrator account.
<p align="center">
<img width="50%" src="assets/images/doc/user_account.png" alt="Account settings"/>
</p>
In order to update user settings or to change the user's password,
fill in or change the appropriate fields and click on the 
button.
If you want to delete your account click on the  button. This
will delete the user and all its projects and packages. Use with
caution.
Note: Using the account settings is the only way to delete an
administrator account (see [User
management](#user-content-user-management) below). So in order to
delete an administrator account one has to login with the account and
delete it using the account settings page.
- - -
<a id='user-management'></a>
### User management
User management is done via the User tab. You have to be logged in
with an administrator account in order to access and use the user
management page.
<p align="center">
<img width="50%" src="assets/images/doc/users_list.png" alt="User management"/>
</p>
The user management page consist of an overview list of all users in
the system. In order to remove a user account (the user cannot be an
administrator), click on the
 button in the
according user entry in the user table.
To delete an administrator account log in with this account, go to the
[Account tab](#user-content-account-settings) and click to the  button.
To create new user accounts Click to the  button and fill
out the input fields accordingly. If the account should be an
administrator account, do not forget to check the  check box.
<p align="center">
<img width="50%" src="assets/images/doc/user_new_modal.png" alt="Create a user"/>
</p>
- - -
<a id='projects'></a>
## Projects
Project management is done via the project tile or the project tab.
This page lists all available projects and packages (see below) that
your user owns. On this page you can upload new project archives (see
below). In order to process a project or package simply select the
appropriate entry in the project table.
<p align="center">
<img width="50%" src="assets/images/doc/project_list.png" alt="Project list"/>
</p>
In the list you can see some basic information about the available
projects and packages. Projects and packages are marked with either
the project symbol  or the package symbol
.
From this page you can also [create new
projects](#user-content-upload-new-project) and download a
[pool](#user-content-pool) of all you user's projects or all globally
pooled projects.
PoCoWeb uses two different kinds of correctable documents. *Projects*
on the one hand represent whole documents or books that should be
corrected. *Packages* on the other hand are smaller subsets of
projects that contain a subset of the pages of their parent project.
Two packages with the same parent never contain overlapping pages.
This makes it possible for two different users to correct different
packages of the same project in parallel.
Only administrators can upload new projects, split them into a number
of packages and assign those packages to different users. It is not
possible to assign an existing project to a different user.
<div class='doc-note'> Note: Since it is possible to correct a whole
project as well, one should never correct a project while other users
could be correcting an associated package. This could lead to cases
where one user accidentally overrides an other users work. </div>
<div class='doc-note'> Note: For historical reasons the api uses the
term ยซbooksยป to refer to projects and the term ยซprojectsยป to refer to
packages. So for example in the REST API `projectId` refers to the id
of a package and `bookId` refers to the id of a project. Hopefully
this issue can be solved soon. </div>
<a id='upload-new-project'></a>
### Creating new projects
If you are logged in with an administrator account, you can create a
new project. Fill in the meta data fields in the mask, select the
according [project archive](#user-content-project-archives) on your
computer and click on the
 button. Depending on
the size of the project archive, the uploading of the project can take
a while.
Do not forget to set the right language for the new project if you are
planning to use the profiler. The language field lists all available
languages for the chosen profiler.
<p align="center">
<img width="50%" src="assets/images/doc/create_new_project.png" alt="Project list"/>
</p>
The [PoCoWeb back-end](#user-content-PoCoWeb-backend) analyzes the
project archive, processes its OCR and image files and publishes the
project. If the uploading was successfully, you should see the new
project in the project table.
The project's author, title and year can be filled out as it seems
fit. Theses values are not used internally and are there for an easy
reference.
A project's profiler and its language are settings specific for the
language profiler. If you plan to use a profiler for the post
correction of your project, you have to set them accordingly. The
normal installation of PoCoWeb comes with a local configured profiler,
you can use `default` or `local` as value for your profiler. If not
or if you want to use another profiler, you have to specify its URL.
<a id='project-archives'></a>
### Project archives
A project archive is a zipped directory structure that contains the
OCR and image files for the pages of a document. Image files should
be encoded as PNG files if possible, but specify or TIFF encoded
images are fine, too. The OCR files should be either [ABBYY-XML](),
[ALTO-XML](), [PAGE-XML]() or [hOCR]() encoded files.
The back-end tries its best to automatically search and order the
pages in project archives. Therefore the matching OCR and image files
should have the same name (without the extension). It does not matter
in which directory structure the different image and OCR files reside.
If the matching OCR and image files do have the same filenames for any
reason, you can add a [METS/MOTS]() meta data file to the archive, in
which the page ordering and the association between image and OCR
files is specified.
Example structure of a project archive:
```
archive
โโโ img
โย ย โโโ page-0001.png
โย ย โโโ page-0002.png
โย ย โโโ page-0003.png
โโโ ocr
ย ย โโโ page-0001.xml
ย ย โโโ page-0002.xml
ย ย โโโ page-0003.xml
```
There is no need to separate the image and OCR files with different directories.
You can also use a flat directory structure:
```
archive
โโโ page-0001.png
โโโ page-0001.xml
โโโ page-0002.png
โโโ page-0002.xml
โโโ page-0003.png
โโโ page-0003.xml
```
It is also possible to have the files in a page directory structure like this:
```
archive
โโโ page-0001
โย ย โโโ page.xml
โย ย โโโ page.png
โโโ page-0003
โย ย โโโ page.xml
โย ย โโโ page.png
โโโ page-0002
ย ย โโโ page.xml
ย ย โโโ page.png
```
If you use [ocropy](https://github.com/tmbdev/ocropy),
you can also use its project structure directly to upload a project archive:
```
archive
โโโ book
โโโ 0001.png
โโโ 0001
โย ย โโโ 010001.bin.png
โย ย โโโ 010001.txt
โ โโ ...
โโโ 0002.png
โโโ 0002
โย ย โโโ 010001.bin.png
โย ย โโโ 010001.txt
โ โโ ...
โโโ 0003.png
โโโ 0003
ย ย โโโ 010001.bin.png
ย ย โโโ 010001.txt
โโ ...
```
<a id='project-table'></a>
### Project overview
If you select a project or package the system opens the project
overview. Depending if you are handling a project or package and if
you are logged as normal user or as administrator you will see a
different page with different edit options.
<p align="center">
<img width="50%" src="assets/images/doc/project_overview.png" alt="Project overview"/>
</p>
On the top the system presents you some basic information about the
project. Below you see the correction status of the project, listing
the percentage of corrected lines and tokens and the (estimated) word
error rate of the project.
<div class='doc-note'> Note: The word error rate is based only on the
manually corrected tokens. As long as not all tokens are manually
corrected, the number is only a rough estimate.</div>
Below is a list of tiles that offer access to different edit options
(the actual list may be missing some of the tiles):
1. Click on the __Order Profile__ tile to
[profile](#user-content-profile-project) the project.
2. Click on the __Automatic Postcorrection__ tile to start the
automatic post-correction.
3. Click on the __Manual Postcorrection__ tile to start to [manually
correct](#user-content-post-correction) the document.
4. Click on the __Adaptive tokens__ tile to get a list of the
adaptive tokens (manually corrected tokens) in the project.
5. Click on the __Split__ tile to
[split](#user-content-split-projects) a project into packages.
6. Click on the __Edit__ tile to edit the meta information about
the project.
7. Click on the __Download__ tile to
[download](#user-content-download-project) the project.
8. Click on the __Delete__ tile to
[delete](#user-content-delete-projects) this project.
9. Click on the __Reclaim package__ tile to
[reclaim](#user-content-split-projects) all packages for the
project.
<a id='delete-projects'></a>
### Deleting projects and packages
Both projects and packages can be deleted. If you delete a package
only the according package is deleted. No content of the parent
project is touched. On the other hand, if you delete a project all
content is deleted and is irretrievable lost.
You cannot no delete a project that contains packages. So in order to
delete a project one has to [reclaim](#user-content-split-projects)
all packages and delete them beforehand.
<a id='split-projects'></a>
### Splitting and reclaiming of projects and packages
You can split a project into different packages and assign each
package to a different user (both normal and administrators).
Packages are generated by distributing the pages of the project as
evenly as possible among the packages. You can select to randomly
distribute the pages instead of sequentially assigning them.
<p align="center">
<img width="50%" src="assets/images/doc/split_project_modal.png" alt="Split project"/>
</p>
If you are the owner of a project, you can forcefully reclaim all
packages. If you are assigned a package you can reassign it to the
project's owner.
<a id='download-project'></a>
### Downloading projects
You can download an archive of a project. It is not possible to
download archives of packages. The PoCoWeb back-end will then
generate a project archive and you will be asked a location to store
the archive.
The project archive will retain the same structure as the project archive,
that was used to create the project. All corrections are written back into
the original OCR files.
Additionally the archive contains a list of all adaptive tokens of the last
profiler run over the document and a page an line segmented directory structure,
that can directly be used with ocropy.
<a id='profile-project'></a>
### Profiling projects
You can order a document profile for packages and projects. If you
order a profile for a sub package, the profiler will be run over the
package's parent project. There can always be only on profiling
process for a document. More generally there can always be only one
background job (profiling, lexicon extension or automatic post
correction) for any project.
The profiling is done in the background. If the profiling is done,
various helpers are available for the post correction of the profiled project
and its associated packages:
* You will see a list of suspicious words in the manual correction.
* You will see a list of frequent error patterns in the manual correction.
* You will be able to inspect the list of adaptive tokens.
After the profiling is done, you can inspect the list of *adaptive
tokens*. Adaptive tokens are generated by the profiler. Whenever a
manually corrected token is found during the profiling, the profiler
tries to match the correction to a known dictionary entry. If no such
match can be generated, a new adaptive token is generated and inserted
into a temporary dictionary for this document. For this reason
Adaptive tokens are possible candidates for new dictionary entries.
For more information about the profiling and adaptive tokens
see the [profiler paper]() and [profiler paper 2]()
- - -
<a id='post-correction'></a>
## Post correction
After clicking on the __Manual Postcorrection__ tile you will be
directed to the project's or package's first page and you can start to
manually post-correct the first page. In generally the post
correction is the same for projects an packages. Every statement
about the correction of projects also applies to the correction of
packages.
<p align="center">
<img width="50%" src="assets/images/doc/post_correct_project.png" alt="Page view"/>
</p>
Other than PoCoTo the post correction with PoCoWeb is line based.
Generally you correct whole lines of the document and not single tokens.
The page correction view presents the lines of the document and lets
you correct each line individually.
<a id='page-correction'></a>
### Page correction
The page correction view shows the lines of a page of the project.
You can use the forward and backward buttons of the navigation bar to
go to the next, previous first and last pages of the project. It is
also possible to directly select specific pages of the project.
<p align="left">
<img width="25%" src="assets/images/doc/page_navigation.png" alt="Page navigation"/>
For each line the image of the line and the according OCR text are
shown. Lines and tokens are marked with green if they have been
manually corrected or in orange if the according lines or tokens have
been automatically corrected (either by a script or by the automatic
post-correction system).
If you click in the text you can edit the whole line. After you have
corrected a line you can click on the
 button to correct the
line and send the correction to the back-end After the whole line has
been corrected, the line it is shown with a green background to mark
it as manually corrected.
<p align="center">
<img width="75%" src="assets/images/doc/select_token.png" alt="Token selection"/>
</p>
In general you can insert every character into the text field. There
are no special characters or escape sequences that get by the input
field interpreted. The only exception are the special escape
sequences of the form `\\u017f` or `\\u{10FFFF}` that can be used to
enter arbitrary Unicode points into the text field. E.g
`Wa\\u017f\\u017fer` gets interpreted as `Waลฟลฟer`. In the unlikely
case that you have to literally insert something like `\\u017f`, you
can use an additional leading `\\` to escape the special
interpretation of the sequence: `\\\\u017f`.
If you are editing a line, you can hit the _Enter_ key to correct the
current line. You can use the _Up_ and _Down_ arrow keys to navigate
to the previous or next line.
You can click on the __Show line image__ button to see an overview
over the whole line's page image. On the bottom of the view there is
a button __Set whole page as corrected__ to correct all lines on the
page in on go.
It is also possible to select single tokens in a line, by marking them
with the mouse. If a token is selected, you can choose a correction
candidate for it or list all occurrences of the token in the
[concordance view](#user-content-concordance-view).
<a id='navigation-bar'></a>
### Navigation bar
The navigation bar lets you navigate the pages of you project. It
stays on the left side of your browser's screen even while you
navigate down the browser page.
Besides the navigation buttons, the navigation bar shows a tab to list
the assumed error patterns (_OCR error patterns_) and error tokens
(_Suspicious words_) of the project. If the project was profiled
these list assumed errors in the document by the number of their
occurrence or by their common error patterns. Click one of the
entries to open the [concordance view](#user-content-concordance-view)
of the according token or error patterns.
<img width="25%" src="assets/images/doc/navigation_bar.png" alt="Navigation bar"/>
There is also a list of _special characters_ available. If you click
on one of the special characters the according Unicode point is
inserted into your clip board (you can use CTRL+v to paste the
character into a text field). Special characters are all non ASCII
characters encountered in the document.
In the _display settings_ tab you can set options for the displaying
of pages. These settings are user specific and are only saved in your
browser's local storage and not in the back end.
<a id='concordance-view'></a>
### Concordance view
The concordance view lists tokens in their line context. These tokens
can be corrected individually or all at once. The concordance view
never shows manual corrected tokens, since its purpose is the manual
correction of tokens in the project. If a token gets manually
corrected, it can never be found in the concordance view.
Depending on how you opened the concordance view, you will see a list
of similar words (using suspicious words or the concordance button for
a selected token) or words where the profiler assumes a similar OCR
error pattern for its best interpretation.
<p align="center">
<img width="50%" src="assets/images/doc/concordance_view.png" alt="Concordance view"/>
</p>
Each matching token is shown in a blue outline. All matched tokens can
be individually corrected using profiler suggestions or manual
editing. It is also possible to globally batch-correct the shown
tokens using the correction bar at the top of the concordance
view. All inputs allow the insertion of Unicode escape codes.
After one or more tokens have been corrected, a new batch of tokens is
shown (if there are more to show). It is possible to set the number
of tokens per page in the _display settings_ tab of the [page
correction](#user-content-page-correction).
- - -
<a id='installation'></a>
## Installation
PoCoWeb is open source software and is distributed under the
[Apache 2.0](LICENSE) license.
You can download the source code from its
[github repository](http://github.com/cisocrgroup/pocoweb).
Installation instructions can be found in the project's
[README](https://github.com/cisocrgroup/pocoweb/blob/master/README.md)
file.
PoCoWeb can be deployed using
[docker-compose](https://docs.docker.com/compose/). The according
Docker files can be found in the projects's source directory.
- - -
<a id='pool'></a>
## Ground-truth pool
It is possible to download an archive of all corrected lines with
their respective image files. This pool contains line segmented
ground-truth data and is suitable to be used as training material for
OCR.
Adminstrators can download a pool containing all their [owned
projects](#user-content-api-get-user-pool) or a [global
pool](#user-content-api-get-global-pool) containing data of all
projects in PoCoWeb. If for some reason a project should not be part
of this global pool it is possible to set the variable `pooled` to
`false` in the projects's settings.
The pool is a zipped archive that contains the line segmented
correction data in a directory structure that is ordered by
project, page ids and line ids:
```
corpus
โโโ year-author_name1_book_title1.json
โโโ year-author_name1_book_title1
โ โโโ pageid1
โ โ โโโ lineid1.gt.txt
โ โ โโโ lineid1.txt
โ โ โโโ lineid1.png
โ โ โโโ lineid2.gt.txt
โ โ โโโ lineid2.txt
โ โ โโโ lineid2.png
โ โโโ pageid2
โ โโโ lineid1.gt.txt
โ โโโ lineid1.txt
โ โโโ lineid1.png
โ โโโ lineid2.gt.txt
โ โโโ lineid2.txt
โ โโโ lineid2.png
โโโ year-author_name2_book_title2.json
โโโ year-author_name2_book_title2
โโโ pageid1
โ โโโ lineid1.gt.txt
โ โโโ lineid1.txt
โ โโโ lineid1.png
โ โโโ lineid2.gt.txt
โ โโโ lineid2.txt
โ โโโ lineid2.png
โโโ pageid2
โโโ lineid1.gt.txt
โโโ lineid1.txt
โโโ lineid1.png
โโโ lineid2.gt.txt
โโโ lineid2.txt
โโโ lineid2.png
```
For each project directory, a small JSON-formatted info file is
included. The file has the following layout:
```json
{
"Author":"author's name",
"Title":"book's title",
"Description":"book's description",
"OwnerEmail":"owner's email",
"Language": "book's profiler language",
"ID":3,
"Year":1900,
"OCRTXTFiles": [
"..."
],
"GTIMGFiles": [
"..."
],
"GTTXTFiles": [
"..."
],
"Pooled":true
}
```
- - -
<a id='overview-services'></a>
## Services
PoCoWeb is composed from a number of interdependend services:
* nginx serves images, web-content and redirectes API calls
* pcwauth authentificates API requests and forwards them to the various other services
* mysql holds all database tables
* PoCoWeb manages project, packages and the up- and download
* www-data is a directory (volume) that holds the frontend
* project-data is a directory (volume) that holds the project files
* db-data is a directory (volume) that holds the database files
* ocr-data is a direcotry (volume) that holds the global ocr models
* pcwuser handles user management
* pcwprofiler handles profiling of projects
* pcwpostcorrection handles the automatic postcorrection
* pcwpkg handles splitting and assignment of packages
* pcwpool handles download of [pool](#user-content-pool) data

- - -
|
import React from 'react';
import { Card, Col, Row } from 'reactstrap';
import { STATE_LOGIN } from '../components/Auth/AuthForm.js';
import Signup from '../components/Auth/Signup';
class AuthPage extends React.Component {
handleAuthState = authState => {
if (authState === STATE_LOGIN) {
this.props.history.push('/login');
} else {
this.props.history.push('/signup');
}
};
handleLogoClick = () => {
this.props.history.push('/');
};
render() {
return (
<Row
style={{
height: '100vh',
justifyContent: 'center',
alignItems: 'center',
}}>
<Col md={6} lg={4}>
<Signup/>
</Col>
</Row>
);
}
}
export default AuthPage;
|
<?php declare(strict_types=1);
namespace Kiboko\Component\ExpressionLanguage\Akeneo;
use Symfony\Component\ExpressionLanguage\ExpressionFunction;
final class Coalesce extends ExpressionFunction
{
public function __construct($name)
{
parent::__construct(
$name,
\Closure::fromCallable([$this, 'compile'])->bindTo($this),
\Closure::fromCallable([$this, 'evaluate'])->bindTo($this)
);
}
private function compile(string ...$scopes)
{
$pattern =<<<"PATTERN"
function (array \$input): array {
\$output = array_filter(\$input, function(array \$item) {
return in_array(\$item['scope'], [%1\$s]);
});
usort(\$output, function(array \$left, array \$right) {
\$leftIndex = array_search(\$left['scope'], [%1\$s]);
\$rightIndex = array_search(\$right['scope'], [%1\$s]);
if (\$leftIndex === \$rightIndex) {
return 0;
}
if (false === \$rightIndex) {
return -1;
}
if (false === \$leftIndex) {
return 1;
}
return \$leftIndex <=> \$rightIndex;
});
return \$output;
}
PATTERN;
return sprintf($pattern, implode(', ', $scopes));
}
private function evaluate(array $context, string ...$scopes)
{
return function (array $input) use($scopes): array {
$output = array_filter($input, function(array $item) use($scopes) {
return in_array($item['scope'], $scopes);
});
usort($output, function(array $left, array $right) use($scopes) {
$leftIndex = array_search($left['scope'], $scopes);
$rightIndex = array_search($right['scope'], $scopes);
if ($leftIndex === $rightIndex) {
return 0;
}
if (false === $rightIndex) {
return -1;
}
if (false === $leftIndex) {
return 1;
}
return $leftIndex <=> $rightIndex;
});
return $output;
};
}
}
|
// ใคใณใฏใซใผใ
import ArakinPart from './arakin_part.js';
export default class ArakinPartRect extends ArakinPart {
constructor(params) {
super(params);
this.fillStyle = this.profile.getProfileData('fillStyle', 'rgb(0,0,0)');
}
static getPropertyList() {
var params = super.getPropertyList();
params['fillStyle'] = 'rgb(0,0,0)';
return params;
}
draw() {
var canvas = document.getElementById(this.canvasId);
if( !canvas || !canvas.getContext ) return false;
var ctx = canvas.getContext('2d');
var angle = this.getCalcValue('angle') * Math.PI / 180;
ctx.save( );
ctx.translate( this.getCalcValue('centerx') + this.getCalcValue('x'),
this.getCalcValue('centery') + this.getCalcValue('y'));
ctx.rotate(angle);
ctx.scale(this.getCalcValue('scalex'), this.getCalcValue('scaley'));
ctx.globalAlpha = this.getCalcValue('a');
ctx.fillStyle = this.fillStyle;
ctx.fillRect( -this.getCalcValue('centerx'),
-this.getCalcValue('centery'),
this.getCalcValue('w'),
this.getCalcValue('h')
);
ctx.restore();
}
}
|
document.getElementById("button1").onclick = function(e) {
window.location.href="no-sidebar.html";
}
|
<?php
namespace Oro\Bundle\ApiBundle\Tests\Unit\Fixtures\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity
* @ORM\Table(name="product_table")
*/
class Product
{
/**
* @ORM\Id
* @ORM\Column(type="integer", name="id")
* @ORM\GeneratedValue(strategy="AUTO")
*/
protected $id;
/**
* @ORM\Column(name="name", type="string", length=50)
*/
protected $name;
/**
* @ORM\ManyToOne(targetEntity="Category")
* @ORM\JoinColumn(name="category_name", referencedColumnName="name")
*/
protected $category;
/**
* @ORM\ManyToOne(targetEntity="User", inversedBy="products")
* @ORM\JoinColumn(name="owner_id", referencedColumnName="id")
*/
protected $owner;
/**
* @ORM\Column(name="updated_at", type="datetime", nullable=true)
*/
protected $updatedAt;
/** @var ProductPrice */
protected $price;
/**
* @ORM\Column(name="price_value", type="string")
*/
protected $priceValue;
/**
* @ORM\Column(name="price_currency", type="string")
*/
protected $priceCurrency;
/**
* @return int
*/
public function getId()
{
return $this->id;
}
/**
* @param int $id
*/
public function setId($id)
{
$this->id = $id;
}
/**
* @return string
*/
public function getName()
{
return $this->name;
}
/**
* @param string $name
*/
public function setName($name)
{
$this->name = $name;
}
/**
* @return Category
*/
public function getCategory()
{
return $this->category;
}
/**
* @param Category $category
*/
public function setCategory($category)
{
$this->category = $category;
}
/**
* @return User
*/
public function getOwner()
{
return $this->owner;
}
/**
* @param User|null $owner
*/
public function setOwner($owner)
{
$this->owner = $owner;
}
/**
* @return \DateTime
*/
public function getUpdatedAt()
{
return $this->updatedAt;
}
public function setUpdatedAt(\DateTime $updatedAt)
{
$this->updatedAt = $updatedAt;
}
/**
* @return ProductPrice
*/
public function getPrice()
{
if (!$this->price) {
$this->price = new ProductPrice($this->priceValue, $this->priceCurrency);
}
return $this->price;
}
/**
* @param ProductPrice $price
*
* @return self
*/
public function setPrice(ProductPrice $price)
{
$this->price = $price;
$this->priceValue = $this->price->getValue();
$this->priceCurrency = $this->price->getCurrency();
return $this;
}
/**
* @return ProductPrice|null
*/
public function getNullablePrice()
{
return $this->price;
}
/**
* @param ProductPrice|null $price
*
* @return self
*/
public function setNullablePrice(?ProductPrice $price)
{
$this->price = $price;
if (null === $this->price) {
$this->priceValue = null;
$this->priceCurrency = null;
} else {
$this->priceValue = $this->price->getValue();
$this->priceCurrency = $this->price->getCurrency();
}
return $this;
}
/**
* @return string
*/
public function __toString()
{
return (string)$this->name;
}
}
|
USE didactinaut_dev;
DELIMITER $$
DROP PROCEDURE IF EXISTS AddVideo $$
CREATE PROCEDURE AddVideo (
IN _address VARCHAR(255),
IN _duration INT,
IN _lesson_id INT
)
BEGIN
DELETE FROM Videos
WHERE lesson_id = _lesson_id;
INSERT INTO Videos (
video_address,
video_duration,
lesson_id
)
VALUES (
_address,
_duration,
_lesson_id
);
SELECT LAST_INSERT_ID();
END $$
DELIMITER ;
DELIMITER $$
DROP PROCEDURE IF EXISTS EditVideo $$
CREATE PROCEDURE EditVideo (
IN _id INT,
IN _address VARCHAR(255),
IN _duration INT
)
BEGIN
UPDATE Videos AS V
SET
V.video_address = _address,
V.video_duration = _duration
WHERE
V.video_id = _id;
END $$
DELIMITER ;
DELIMITER $$
DROP PROCEDURE IF EXISTS DeleteVideo $$
CREATE PROCEDURE DeleteVideo (
IN _id INT
)
BEGIN
DELETE FROM Videos
WHERE
video_id = _id;
END $$
DELIMITER ;
DELIMITER $$
DROP PROCEDURE IF EXISTS GetVideo $$
CREATE PROCEDURE GetVideo (
IN _id INT
)
BEGIN
SELECT
V.video_id,
V.video_address,
V.video_duration,
V.lesson_id
FROM
Videos as V
WHERE
V.video_id = _id;
END $$
DELIMITER ;
DELIMITER $$
DROP PROCEDURE IF EXISTS GetLessonVideo $$
CREATE PROCEDURE GetLessonVideo (
IN _lesson_id INT
)
BEGIN
SELECT
V.video_id,
V.video_address,
V.video_duration,
V.lesson_id
FROM
Videos as V
WHERE
V.lesson_id = _lesson_id;
END $$
DELIMITER ;
|
# frozen_string_literal: true
require "base64"
require "forwardable"
require "websocket/driver"
# have to roll our own, as the default client bundles its own
# HTTP client handshake logic
class WSDriver < WebSocket::Driver::Hybi
include WebSocket
def initialize(*, opts)
h = opts.delete(:headers)
super
@headers = h
@key = Base64.strict_encode64(SecureRandom.random_bytes(16))
@headers["upgrade"] = "websocket"
@headers["connection"] = "Upgrade"
@headers["sec-websocket-key"] = @key
@headers["sec-websocket-version"] = VERSION
@headers["Sec-WebSocket-Protocol"] = @protocols * ", " if @protocols.size.positive?
extensions = @extensions.generate_offer
@headers["Sec-WebSocket-Extensions"] = extensions if extensions
end
def start(bytes)
open
parse(bytes)
end
def validate(headers)
accept = headers["sec-websocket-accept"]
protocol = headers["sec-websocket-protocol"]
return fail_handshake("Sec-WebSocket-Accept mismatch") unless accept == Driver::Hybi.generate_accept(@key)
if protocol && !protocol.empty?
return fail_handshake("Sec-WebSocket-Protocol mismatch") unless @protocols.include?(protocol)
@protocol = protocol
end
begin
@extensions.activate(@headers["Sec-WebSocket-Extensions"])
rescue ::WebSocket::Extensions::ExtensionError => e
return fail_handshake(e.message)
end
true
end
def fail_handshake(message)
message = "Error during WebSocket handshake: #{message}"
@ready_state = 3
emit(:error, message)
emit(:close, Driver::CloseEvent.new(Driver::Hybi::ERRORS[:protocol_error], message))
false
end
end
class WSCLient
extend Forwardable
def_delegator :@driver, :headers, :close
attr_reader :messages
def initialize(io, headers)
@io = io
@closed = false
@messages = []
@driver = WSDriver.new(self, masking: true, headers: headers)
@driver.on(:open) { |_event| send("handshake") }
@driver.on(:message) { |event| @messages << event.data }
@driver.on(:error) { |error| warn("ws error: #{error}") }
@driver.on(:close) { |event| finalize(event) }
end
def start(bytes)
@driver.start(bytes)
@thread = Thread.new do
until @closed
bytes = @io.read(1)
@driver.parse(bytes)
end
end
end
def validate(*args)
@driver.validate(*args)
end
def send(message)
@driver.text(message)
end
def write(data)
@io.write(data)
end
def close
@driver.close
end
def finalize(_event)
@closed = true
end
end
module WSTestPlugin
class << self
def configure(klass)
klass.plugin(:upgrade)
klass.default_options.upgrade_handlers.register("websocket", self)
end
def call(connection, request, response)
return unless (ws = request.websocket)
return unless ws.validate(response.headers)
connection.hijack_io
response.websocket = ws
ws.start(response.body.to_s)
end
def extra_options(options)
options.merge(max_concurrent_requests: 1)
end
end
module InstanceMethods
def find_connection(request, *)
return super if request.websocket
conn = super
return conn unless conn && !conn.upgrade_protocol
request.init_websocket(conn)
conn
end
end
module RequestMethods
attr_reader :websocket
def init_websocket(connection)
socket = connection.to_io
@websocket = WSCLient.new(socket, @headers)
end
end
module ResponseMethods
attr_accessor :websocket
end
end
|
package org.jetbrains.kotlinx.jupyter.magics
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.arguments.optional
import com.github.ajalt.clikt.parameters.types.choice
import org.jetbrains.kotlinx.jupyter.common.ReplLineMagic
import org.jetbrains.kotlinx.jupyter.exceptions.ReplPreprocessingException
abstract class AbstractMagicsHandler : MagicsHandler {
protected var arg: String? = null
protected var tryIgnoreErrors: Boolean = false
protected var parseOnly: Boolean = false
protected fun argumentsList() = arg?.trim()?.takeIf { it.isNotEmpty() }?.split(" ") ?: emptyList()
protected fun handleSingleOptionalFlag(action: (Boolean?) -> Unit) {
object : CliktCommand() {
val arg by nullableFlag()
override fun run() {
action(arg)
}
}.parse(argumentsList())
}
private val callbackMap: Map<ReplLineMagic, () -> Unit> = mapOf(
ReplLineMagic.USE to ::handleUse,
ReplLineMagic.TRACK_CLASSPATH to ::handleTrackClasspath,
ReplLineMagic.TRACK_EXECUTION to ::handleTrackExecution,
ReplLineMagic.DUMP_CLASSES_FOR_SPARK to ::handleDumpClassesForSpark,
ReplLineMagic.USE_LATEST_DESCRIPTORS to ::handleUseLatestDescriptors,
ReplLineMagic.OUTPUT to ::handleOutput,
ReplLineMagic.LOG_LEVEL to ::handleLogLevel,
ReplLineMagic.LOG_HANDLER to ::handleLogHandler,
)
override fun handle(magicText: String, tryIgnoreErrors: Boolean, parseOnly: Boolean) {
try {
val parts = magicText.split(' ', limit = 2)
val keyword = parts[0]
val arg = if (parts.count() > 1) parts[1] else null
val magic = if (parseOnly) null else ReplLineMagic.valueOfOrNull(keyword)?.value
if (magic == null && !parseOnly && !tryIgnoreErrors) {
throw ReplPreprocessingException("Unknown line magic keyword: '$keyword'")
}
if (magic != null) {
handle(magic, arg, tryIgnoreErrors, parseOnly)
}
} catch (e: Exception) {
throw ReplPreprocessingException("Failed to process '%$magicText' command. " + e.message, e)
}
}
fun handle(magic: ReplLineMagic, arg: String?, tryIgnoreErrors: Boolean, parseOnly: Boolean) {
val callback = callbackMap[magic] ?: throw UnhandledMagicException(magic, this)
this.arg = arg
this.tryIgnoreErrors = tryIgnoreErrors
this.parseOnly = parseOnly
callback()
}
open fun handleUse() {}
open fun handleTrackClasspath() {}
open fun handleTrackExecution() {}
open fun handleDumpClassesForSpark() {}
open fun handleUseLatestDescriptors() {}
open fun handleOutput() {}
open fun handleLogLevel() {}
open fun handleLogHandler() {}
companion object {
fun CliktCommand.nullableFlag() = argument().choice(mapOf("on" to true, "off" to false)).optional()
}
}
|
# frozen_string_literal: true
module JapanETC
module EntranceOrExit
ENTRANCE = 'ๅ
ฅๅฃ'
EXIT = 'ๅบๅฃ'
def self.from(text)
case text
when /ๅ
ฅๅฃ/, /๏ผๅ
ฅ๏ผ/, 'ๅ
ฅ'
ENTRANCE
when /ๅบๅฃ/, /๏ผๅบ๏ผ/, 'ๅบ'
EXIT
end
end
end
end
|
<?php
/*
This file is a part of Phun Project
The MIT License (MIT)
Copyright (c) 2015 Pierre Ruyter and Xavier Van de Woestyne
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
declare (strict_types=1);
/**
* Provide a Javascript Sandbox for Dom manipulation
* @author Van de Woestyne Xavier <xaviervdw@gmail.com>
*/
namespace phun\javascript;
// Variables where object are stored
const elements = '$PHUN_INTERNAL_ELEMENTS';
trait Sandbox
{
// Attributes
protected $referenced_nodes;
protected $colored;
/**
* Initialize de sandbox
*/
protected function init_sandbox()
{
$this->referenced_nodes = [];
$this->colored = false;
}
/**
* Check if a node is used in JavaScript
*/
protected function is_colored()
{
return $this->colored;
}
/**
* Get all referenced nodes
*/
public function referenced()
{
return $this->referenced_nodes;
}
/**
* Colorize a node
*/
public function colorize()
{
$this->colored = true;
}
/**
* Low level binding for referencing node
*/
protected function reference(...$nodes)
{
foreach ($nodes as $node) {
if (!is_string($node) && $node->isReferenceable()) {
$this->referenced_nodes[$node->getUID()] = $node;
$this->reference(...array_values($node->referenced()));
}
}
}
}
trait Props
{
/**
* Set a props usable in JavaScript
*/
public function set_props(string $key, $value)
{
$this->props[$key] = $value;
return $this;
}
/**
* Get props
*/
public function get_props(string $key)
{
return $this->props[$key];
}
/**
* Get All props
*/
public function get_all_props()
{
return $this->props;
}
}
class Element
{
// Props
protected $props;
// UID
protected $uid;
/**
* Construct a CLient's side element
* @param props
* @param uid
* @param colored
*/
public function __construct($uid, $props = [])
{
$this->uid = $uid;
$this->props = $props;
}
/**
* GetUID
*/
public function getUID()
{
return $this->uid;
}
/**
* Return a string representation of the element
* @return string
*/
public function __toString() : string
{
return elements . '["' . $this->uid . '"][0]';
}
// Use Props
use Props;
/**
* Magic overloading for easy attributes access.
*/
public function __get($attribute)
{
return $this->get_props($attribute);
//return elements . '["' . $this->uid . '"][1]["'.$attribute.'"]';
}
/**
* Magic overloading for easy attributes modificator.
*/
public function __set($name, $value)
{
$this->set_propos($name, $value);
}
}
|
๏ปฟusing System.Collections.Generic;
using System.Linq;
using Antlr4.Runtime.Tree;
using Omnium.Core.ast.statements;
namespace Omnium.Core.ast.declarations
{
public class ConstructorDeclaration : Node
{
public readonly List<MemberModifier> Modifiers = new List<MemberModifier>();
public IEnumerable<VariableDeclaration> Parameters => Children.OfType<VariableDeclaration>();
public BlockStatement Body => Children.OfType<BlockStatement>().Single();
public ConstructorDeclaration(IParseTree context, IEnumerable<INode> children) : base(context, children)
{
}
}
}
|
../engine --analy-multi-group ../seed/0-0-0.txt ../farthest/farthest_0-0-0.csv ../solution/solution_0-0-0.csv
touch 0-0_OK
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.