text
stringlengths 27
775k
|
|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.phaserule;
import org.apache.axis2.AbstractTestCase;
import org.apache.axis2.description.HandlerDescription;
import org.apache.axis2.description.PhaseRule;
import org.apache.axis2.engine.AxisConfiguration;
import org.apache.axis2.engine.DispatchPhase;
import org.apache.axis2.engine.Handler;
import org.apache.axis2.engine.Phase;
import org.apache.axis2.phaseresolver.PhaseHolder;
import java.util.ArrayList;
public class PreDispatchPhaseRuleTest extends AbstractTestCase {
PreDispatchPhaseRuleTest phaserul;
AxisConfiguration axisSytem;
public PreDispatchPhaseRuleTest(String testName) {
super(testName);
}
public void testPhaseRule() throws Exception {
//TODO Fix me
phaserul = new PreDispatchPhaseRuleTest("");
axisSytem = new AxisConfiguration();
ArrayList inPhase = axisSytem.getInFlowPhases();
Phase transportIN = new Phase("TransportIn");
Phase preDispatch = new Phase("PreDispatch");
DispatchPhase dispatchPhase = new DispatchPhase();
//
dispatchPhase.setName("Dispatch");
inPhase.add(transportIN);
inPhase.add(preDispatch);
inPhase.add(dispatchPhase);
PhaseHolder ph = new PhaseHolder(inPhase);
HandlerDescription pre = new HandlerDescription();
pre.setClassName("org.apache.axis2.handlers.AbstractHandler");
Handler h1 = new PhaseRuleHandler();
h1.init(pre);
pre.setHandler(h1);
pre.setName("pre-H1");
PhaseRule pre_rule1 = new PhaseRule();
pre_rule1.setPhaseName("PreDispatch");
pre.setRules(pre_rule1);
ph.addHandler(pre);
HandlerDescription pre2 = new HandlerDescription();
pre2.setClassName("org.apache.axis2.handlers.AbstractHandler");
Handler h2 = new PhaseRuleHandler();
h2.init(pre2);
pre2.setHandler(h2);
pre2.setName("dispatch");
PhaseRule prerule2 = new PhaseRule();
prerule2.setPhaseName("Dispatch");
pre2.setRules(prerule2);
ph.addHandler(pre2);
HandlerDescription hm = new HandlerDescription();
hm.setClassName("org.apache.axis2.handlers.AbstractHandler");
Handler h3 = new PhaseRuleHandler();
h3.init(hm);
hm.setHandler(h3);
hm.setName("pre-H2");
PhaseRule rule = new PhaseRule();
rule.setPhaseName("PreDispatch");
rule.setPhaseFirst(true);
hm.setRules(rule);
ph.addHandler(hm);
HandlerDescription hm1 = new HandlerDescription();
hm1.setClassName("org.apache.axis2.handlers.AbstractHandler");
Handler h4 = new PhaseRuleHandler();
h4.init(hm1);
hm1.setHandler(h4);
hm1.setName("pre-H3");
PhaseRule rule1 = new PhaseRule();
rule1.setPhaseName("PreDispatch");
rule1.setAfter("pre-H2");
hm1.setRules(rule1);
ph.addHandler(hm1);
HandlerDescription hm2 = new HandlerDescription();
hm2.setClassName("org.apache.axis2.handlers.AbstractHandler");
Handler h5 = new PhaseRuleHandler();
h5.init(hm2);
hm2.setHandler(h5);
hm2.setName("H3");
PhaseRule rule2 = new PhaseRule();
rule2.setPhaseName("PreDispatch");
rule2.setAfter("pre-H2");
rule2.setBefore("pre-H3");
hm2.setRules(rule2);
ph.addHandler(hm2);
/*ArrayList oh = ph.getOrderHandler();
for (int i = 0; i < oh.size(); i++) {
HandlerDescription metadata = (HandlerDescription) oh.get(i);
System.out.println("Name:" + metadata.getName().getLocalPart());
}*/
}
}
|
# Report Suite Tools listed in Product Profile permissions
Use this page to understand each permission item under Report Suite Tools.
|
package list
import (
"fmt"
"reflect"
"strings"
"text/template"
"github.com/mantidtech/tplr/functions/helper"
)
// Functions operate on collections of items
func Functions() template.FuncMap {
return template.FuncMap{
"list": List,
"first": First,
"last": Last,
"rest": Rest,
"pop": Pop,
"push": Push,
"shift": Rest,
"unshift": Unshift,
"contains": Contains,
"filter": Filter,
"join": Join,
"joinWith": JoinWith,
"slice": Slice,
}
}
// List returns a new list comprised of the given elements
func List(items ...interface{}) (interface{}, error) {
return items, nil
}
// First returns the head of a list
func First(list interface{}) (interface{}, error) {
a, l, err := helper.ListInfo(list)
if err != nil || l == 0 {
return nil, err
}
return a.Index(0).Interface(), nil
}
// Rest / Shift returns the tail of a list
func Rest(list interface{}) (interface{}, error) {
a, l, err := helper.ListInfo(list)
if err != nil || l < 2 {
return nil, err
}
return a.Slice(1, l).Interface(), nil
}
// Last returns the last item of a list
func Last(list interface{}) (interface{}, error) {
a, l, err := helper.ListInfo(list)
if err != nil || l == 0 {
return nil, err
}
return a.Index(l - 1).Interface(), nil
}
// Pop removes the last element of the list, returning the list
func Pop(list interface{}) (interface{}, error) {
a, l, err := helper.ListInfo(list)
if err != nil || l < 2 {
return nil, err
}
return a.Slice(0, l-1).Interface(), nil
}
// Slice returns a slice of a list
// where i is the lower index (inclusive) and j is the upper index (exclusive) to extract
func Slice(i, j int, list interface{}) (interface{}, error) {
a, l, err := helper.ListInfo(list)
if err != nil {
return list, err
} else if i < 0 {
return nil, fmt.Errorf("index '%d' out of bounds (min 0)", i)
} else if j > l {
return nil, fmt.Errorf("index '%d' out of bounds (max %d)", j, l)
}
return a.Slice(i, j).Interface(), nil
}
// Contains returns true if the item is present in the list
func Contains(list interface{}, item interface{}) (bool, error) {
a, l, err := helper.ListInfo(list)
if err != nil {
return false, err
}
for i := 0; i < l; i++ {
if item == a.Index(i).Interface() {
return true, nil
}
}
return false, nil
}
// Filter returns list with all instances of item removed
func Filter(list interface{}, item interface{}) (interface{}, error) {
a, l, err := helper.ListInfo(list)
if err != nil || l == 0 {
return list, err
}
s := reflect.MakeSlice(a.Type(), 0, l)
for c := 0; c < l; c++ {
v := a.Index(c)
if item != v.Interface() {
s = reflect.Append(s, v)
}
}
return s.Interface(), nil
}
// Push returns the list with item appended
func Push(list interface{}, item interface{}) (interface{}, error) {
a, _, err := helper.ListInfo(list)
if err != nil {
return nil, err
}
i := helper.ItemForList(item)
a = reflect.Append(a, i)
return a.Interface(), nil
}
// Unshift returns the list with item prepended
func Unshift(list interface{}, item interface{}) (interface{}, error) {
a, l, err := helper.ListInfo(list)
if err != nil {
return nil, err
}
i := helper.ItemForList(item)
s := reflect.MakeSlice(a.Type(), 1, l+1)
s.Index(0).Set(i)
s = reflect.AppendSlice(s, a)
return s.Interface(), nil
}
// Join joins the given strings together
func Join(list interface{}) (string, error) {
return JoinWith("", list)
}
// JoinWith joins the given strings together using the given string as glue
func JoinWith(glue string, list interface{}) (string, error) {
s, err := helper.AsStringList(list)
return strings.Join(s, glue), err
}
|
import 'dart:async';
import 'package:seal_note/data/appstate/GlobalState.dart';
import 'dart:io' show Platform;
import 'package:seal_note/util/tcb/TcbSystemInfoHandler.dart';
enum UpdateAppOption {
NoUpdate,
OptionalUpdate,
CompulsoryUpdate,
HasError,
}
class AppUpdateHandler {
static Future<UpdateAppOption> getUpdateAppOption({
bool forceToGetUpdateAppOptionFromTCB = true,
bool shouldTriggerExecuteSyncMethodByTheWay = false,
bool updateSystemInfoBasicDataWhenDataVersionIsDifferent = false,
}) async {
// get update option method
var updateAppOption = UpdateAppOption.NoUpdate;
if ((Platform.isIOS || Platform.isAndroid)) {
// Currently, only iOS and Android need to show update dialog,
// and making sure the user has logged in, don't show update dialog before login
var response = await TcbSystemInfoHandler.getSystemInfo(
forceToGetSystemInfoFromTcb: forceToGetUpdateAppOptionFromTCB,
shouldTriggerExecuteSyncMethodByTheWay:
shouldTriggerExecuteSyncMethodByTheWay,
updateSystemInfoBasicDataWhenDataVersionIsDifferent:
updateSystemInfoBasicDataWhenDataVersionIsDifferent,
);
if (response.code == 0) {
// Succeed to get latest app version released
var systemInfo = response.result;
if (GlobalState.appVersion < systemInfo.minSupportedAppVersion) {
// When the app version < minSupportedVersion,
// we need update the app forcibly
updateAppOption = UpdateAppOption.CompulsoryUpdate;
} else if (GlobalState.appVersion <
systemInfo.latestAppVersionReleased) {
// When the app version isn't less than minSupportedVersion,
// but there is a latest version greater than the app version
updateAppOption = UpdateAppOption.OptionalUpdate;
} else {
// When no update
updateAppOption = UpdateAppOption.NoUpdate;
}
} else {
// Fail to get latest app version released
updateAppOption = UpdateAppOption.HasError;
}
}
return updateAppOption;
}
}
|
<?php declare(strict_types=1);
use ast\Node;
use Phan\Issue;
use Phan\Language\Element\Func;
use Phan\Language\Element\FunctionInterface;
use Phan\Language\Element\Method;
use Phan\PluginV3;
use Phan\PluginV3\PluginAwarePostAnalysisVisitor;
use Phan\PluginV3\PostAnalyzeNodeCapability;
/**
* Plugin which looks for empty methods/functions
*
* This Plugin hooks into one event;
*
* - getPostAnalyzeNodeVisitorClassName
* This method returns a class that is called on every AST node from every
* file being analyzed
*/
final class EmptyMethodAndFunctionPlugin extends PluginV3 implements PostAnalyzeNodeCapability
{
public static function getPostAnalyzeNodeVisitorClassName(): string
{
return EmptyMethodAndFunctionVisitor::class;
}
}
/**
* Visit method/function/closure
*/
final class EmptyMethodAndFunctionVisitor extends PluginAwarePostAnalysisVisitor
{
public function visitMethod(Node $node) : void
{
$stmts_node = $node->children['stmts'] ?? null;
if ($stmts_node && !$stmts_node->children) {
$method = $this->context->getFunctionLikeInScope($this->code_base);
if (!($method instanceof Method)) {
throw new AssertionError("Expected $method to be a method");
}
if (!$method->isOverriddenByAnother()
&& !$method->isOverride()
&& !$method->isDeprecated()
) {
$this->emitIssue(
$this->getIssueTypeForEmptyMethod($method),
$node->lineno,
$method->getName()
);
}
}
}
public function visitFuncDecl(Node $node) : void
{
$this->analyzeFunction($node);
}
public function visitClosure(Node $node) : void
{
$this->analyzeFunction($node);
}
// No need for visitArrowFunc.
// By design, `fn($args) => expr` can't have an empty statement list because it must have an expression.
// It's always equivalent to `return expr;`
private function analyzeFunction(Node $node): void
{
$stmts_node = $node->children['stmts'] ?? null;
if ($stmts_node && !$stmts_node->children) {
$function = $this->context->getFunctionLikeInScope($this->code_base);
if (!($function instanceof Func)) {
throw new AssertionError("Expected $function to be Func\n");
}
if (! $function->isDeprecated()) {
if (!$function->isClosure()) {
$this->emitIssue(
Issue::EmptyFunction,
$node->lineno,
$function->getName()
);
} else {
$this->emitIssue(
Issue::EmptyClosure,
$node->lineno
);
}
}
}
}
private function getIssueTypeForEmptyMethod(FunctionInterface $method) : string
{
if (!$method instanceof Method) {
throw new \InvalidArgumentException("\$method is not an instance of Method");
}
if ($method->isPrivate()) {
return Issue::EmptyPrivateMethod;
}
if ($method->isProtected()) {
return Issue::EmptyProtectedMethod;
}
return Issue::EmptyPublicMethod;
}
}
// Every plugin needs to return an instance of itself at the
// end of the file in which it's defined.
return new EmptyMethodAndFunctionPlugin();
|
#!/bin/sh
#
# Bourne shell script to create Xalan-C documentation pages
#
# SET THE LIBRARY PATH FOR YOUR OPERATING SYSTEM, REQUIRED BY "Xalan" PROGRAM
#
# SOLARIS AND LINUX
# export LD_LIBRARY_PATH=/usr/local/lib
#
# AIX AND BSD
# export LIBPATH=/usr/local/lib
#
# HPUX
# export SHLIB_PATH=/usr/local/lib
#
# MAC OS/X
# export DYLD_LIBRARY_PATH=/usr/local/lib
#
# CYGWIN AND MINGW
# export PATH=$PATH;/usr/local/lib
#
umask 0002
mkdir -p ../../build/docs/xalan-c/resources
make-xalan.sh usagepatterns
make-xalan.sh samples
make-xalan.sh resources
# make-xalan.sh build_instruct
make-xalan.sh install
make-xalan.sh download
make-xalan.sh overview
make-xalan.sh charter
# make-xalan.sh history
make-xalan.sh whatsnew
make-xalan.sh extensionslib
make-xalan.sh commandline
make-xalan.sh readme
make-xalan.sh releasenotes
make-xalan.sh programming
make-xalan.sh getstarted
make-xalan.sh index
make-xalan.sh extensions
make-xalan.sh faq
# make-xalan.sh test-faqs
make-xalan.sh license
make-xalan.sh buildlibs
make-xalan.sh secureweb
make-xalan.sh builddocs
cp xalan-graphic/*.* ../../build/docs/xalan-c
cp xslt-resources/*.* ../../build/docs/xalan-c/resources
|
import 'package:faker_dart/faker_dart.dart';
import 'package:faker_dart/src/utils/random_utils.dart';
/// {@macro internet}
class Internet {
/// {@macro internet}
Internet(this._faker);
final Faker _faker;
/// returns a random URL
///
/// e.g. `faker.internet.url() // https://optical-sensor.com`
String url() {
var protocols = ['http', 'https'];
final randomProtocol = RandomUtils.arrayElement(protocols);
final domain = '${domainName()}.${domainSuffix()}';
return '$randomProtocol://$domain';
}
/// returns a random domain name
///
/// e.g. `faker.internet.domainName() // neural-monitor`
String domainName() {
final randomAdjective =
RandomUtils.arrayElement(_faker.locale.hacker.adjective);
final randomNoun = RandomUtils.arrayElement(_faker.locale.hacker.noun);
return '$randomAdjective-$randomNoun'
.replaceAll(RegExp(r'\\~#&*{}/:<>?|\"\s\b|\b\s'), '')
.toLowerCase();
}
/// returns a random domain suffix
///
/// e.g. `faker.internet.domainSuffix() // com`
String domainSuffix() {
return RandomUtils.arrayElement(_faker.locale.internet.domainSuffix);
}
/// returns a random HTTP method
///
/// e.g. `faker.internet.httpMethod() // POST`
String httpMethod() {
var httpMethods = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH'];
return RandomUtils.arrayElement(httpMethods);
}
/// returns a random IPv4 Address
///
/// e.g. `faker.internet.ip() // 67.185.47.31`
String ip() {
var result = List<int>.filled(4, 0, growable: false);
for (var i = 0; i < 4; i++) {
result[i] = _faker.datatype.number(max: 255);
}
return result.join('.');
}
}
|
# VisualEditor
VisualEditor provides a visual editor for wiki pages. It is written in
JavaScript and runs in a web browser.
It uses the Parsoid parser to convert wikitext documents to annotated HTML
which the VisualEditor is able to load, modify and emit back to Parsoid at
which point it is converted back into wikitext.
For more information about these projects, check out the [VisualEditor][]
and [Parsoid][] pages on mediawiki.
## Developing and installing
For information on installing VisualEditor on a local wiki, please
see https://www.mediawiki.org/wiki/Extension:VisualEditor
For information about running tests and contributing code to VisualEditor,
see [CONTRIBUTING.md](./CONTRIBUTING.md). Patch submissions are reviewed and managed with
[Gerrit][]. There is also [API documentation][] available for the
VisualEditor.
[VisualEditor]: https://www.mediawiki.org/wiki/VisualEditor
[Parsoid]: https://www.mediawiki.org/wiki/Parsoid
[API documentation]: https://doc.wikimedia.org/VisualEditor/master/
[Gerrit]: https://www.mediawiki.org/wiki/Gerrit
|
---
layout: post
title: MNIST For ML Begineers
date: 2018-02-09 20:52:43
categories: Deep Learning
tags: [PDF,Deep Learning,A.I.,Data Science]
---
本篇從Ubuntu建構Tensorflow實驗環境開始描述,並導入Google以Tensorflow建構DNN分類模型的介紹作為實驗方法。MNIST為阿拉伯數字0-9手寫的資料集,本文章為介紹使用DNN分類MNIST資料集。
<!--more-->
[PDF Download](/assets/2018-02-09-MNIST-For-ML-Begineers/MNIST-For-ML-Begineers.pdf )
|
<?php
function getConexao(){
$dsn="mysql:host=localhost;dbname=bd_app;charset=utf8";
$user="root";
$pass="";
try{
$pdo = new PDO($dsn, $user, $pass);
return $pdo;
} catch (PDOException $e) {
echo "erro banco: ".$e->getMessage();
}
catch (Exception $e) {
echo "erro generico: ".$e->getMessage();
}
}
function insertDB($nome_aplicativo, $tamanho, $data_instalacao, $versao){
$conn = getConexao();
$sql = "INSERT INTO tb_aplicativo(nome_aplicativo, tamanho, data_instalacao, versao)
VALUES (:nome_aplicativo,:tamanho,:data_instalacao,:versao)";
$stmt = $conn->prepare($sql);
$stmt->bindParam(":nome_aplicativo",$nome_aplicativo);
$stmt->bindParam(":tamanho",$tamanho);
$stmt->bindParam(":data_instalacao",$data_instalacao);
$stmt->bindParam(":versao",$versao);
if ($stmt->execute()) {
echo "<center><strong>SALVO COM SUCESSO</strong></center>";
}
else {
echo "DEU RUIM! =/";
}
}
insertDB($_POST['nome_aplicativo'], $_POST['tamanho'], $_POST['data_instalacao'], $_POST['versao'],);
?>
|
import pytest
from mxnet.util import use_np
from mxnet.gluon.data import DataLoader
import numpy as np
import numpy.testing as npt
import tempfile
import pickle
import os
from sklearn.model_selection import train_test_split
from autogluon.core.utils.loaders import load_pd
from autogluon.text.text_prediction.mx.preprocessing import MultiModalTextFeatureProcessor,\
base_preprocess_cfg, MultiModalTextBatchify, get_cls_sep_id, auto_shrink_max_length
from autogluon.text.text_prediction.infer_types import infer_column_problem_types
TEST_CASES = [
['melbourne_airbnb_sample',
'https://autogluon-text-data.s3.amazonaws.com/test_cases/melbourne_airbnb_sample_1000.pq',
'price_label'],
['women_clothing_rating',
'https://autogluon-text-data.s3.amazonaws.com/test_cases/women_clothing_sample.pq',
'Rating']
]
def assert_dataset_match(lhs_dataset, rhs_dataset, threshold=1E-4):
assert len(lhs_dataset) == len(rhs_dataset)
for i in range(len(lhs_dataset)):
for j in range(len(lhs_dataset[0])):
npt.assert_allclose(lhs_dataset[i][j], rhs_dataset[i][j], threshold, threshold)
@use_np
@pytest.mark.parametrize('dataset_name,url,label_column', TEST_CASES)
@pytest.mark.parametrize('backbone_name', ['google_electra_small',
'google_albert_base_v2'])
@pytest.mark.parametrize('all_to_text', [False, True])
def test_preprocessor(dataset_name, url, label_column,
backbone_name, all_to_text):
all_df = load_pd.load(url)
feature_columns = [col for col in all_df.columns if col != label_column]
train_df, valid_df = train_test_split(all_df, test_size=0.1,
random_state=np.random.RandomState(100))
column_types, problem_type = infer_column_problem_types(train_df, valid_df,
label_columns=label_column)
cfg = base_preprocess_cfg()
if all_to_text:
cfg.defrost()
cfg.categorical.convert_to_text = True
cfg.numerical.convert_to_text = True
cfg.freeze()
preprocessor = MultiModalTextFeatureProcessor(column_types=column_types,
label_column=label_column,
tokenizer_name=backbone_name,
cfg=cfg)
train_dataset = preprocessor.fit_transform(train_df[feature_columns], train_df[label_column])
train_dataset_after_transform = preprocessor.transform(train_df[feature_columns], train_df[label_column])
for i in range(len(train_dataset)):
for j in range(len(train_dataset[0])):
npt.assert_allclose(train_dataset[i][j],
train_dataset_after_transform[i][j],
1E-4, 1E-4)
valid_dataset = preprocessor.transform(valid_df[feature_columns], valid_df[label_column])
test_dataset = preprocessor.transform(valid_df[feature_columns])
assert_dataset_match(train_dataset, train_dataset_after_transform)
for i in range(len(test_dataset)):
for j in range(len(test_dataset[0])):
npt.assert_allclose(valid_dataset[i][j],
test_dataset[i][j],
1E-4, 1E-4)
# Test for pickle dump and load
with tempfile.TemporaryDirectory() as tmp_dir_name:
with open(os.path.join(tmp_dir_name, 'preprocessor.pkl'), 'wb') as out_f:
pickle.dump(preprocessor, out_f)
with open(os.path.join(tmp_dir_name, 'preprocessor.pkl'), 'rb') as in_f:
preprocessor_loaded = pickle.load(in_f)
valid_dataset_loaded = preprocessor_loaded.transform(valid_df[feature_columns],
valid_df[label_column])
assert_dataset_match(valid_dataset_loaded, valid_dataset)
test_dataset_loaded = preprocessor_loaded.transform(valid_df[feature_columns])
assert_dataset_match(test_dataset_loaded, test_dataset)
@use_np
@pytest.mark.parametrize('dataset_name,url,label_column', TEST_CASES)
@pytest.mark.parametrize('backbone_name', ['google_electra_small',
'google_albert_base_v2',
'fairseq_roberta_base'])
@pytest.mark.parametrize('all_to_text', [False, True])
@pytest.mark.parametrize('insert_sep', [False, True])
@pytest.mark.parametrize('stochastic_chunk', [False, True])
def test_multimodal_batchify(dataset_name, url, label_column,
backbone_name, all_to_text, insert_sep,
stochastic_chunk):
# Test for multimodal batchify
all_df = load_pd.load(url)
feature_columns = [col for col in all_df.columns if col != label_column]
train_df, valid_df = train_test_split(all_df, test_size=0.1,
random_state=np.random.RandomState(100))
column_types, problem_type = infer_column_problem_types(train_df, valid_df,
label_columns=label_column)
cfg = base_preprocess_cfg()
if all_to_text:
cfg.defrost()
cfg.categorical.convert_to_text = True
cfg.numerical.convert_to_text = True
cfg.freeze()
preprocessor = MultiModalTextFeatureProcessor(column_types=column_types,
label_column=label_column,
tokenizer_name=backbone_name,
cfg=cfg)
cls_id, sep_id = get_cls_sep_id(preprocessor.tokenizer)
train_dataset = preprocessor.fit_transform(train_df[feature_columns], train_df[label_column])
test_dataset = preprocessor.transform(valid_df[feature_columns])
auto_max_length = auto_shrink_max_length(train_dataset=train_dataset,
insert_sep=insert_sep,
num_text_features=len(preprocessor.text_feature_names),
auto_max_length_quantile=0.9,
round_to=32,
max_length=512)
train_batchify_fn = MultiModalTextBatchify(num_text_inputs=len(preprocessor.text_feature_names),
num_categorical_inputs=len(preprocessor.categorical_feature_names),
num_numerical_inputs=len(preprocessor.numerical_feature_names) > 0,
cls_token_id=cls_id,
sep_token_id=sep_id,
max_length=auto_max_length,
mode='train',
stochastic_chunk=stochastic_chunk,
insert_sep=insert_sep)
test_batchify_fn = MultiModalTextBatchify(num_text_inputs=len(preprocessor.text_feature_names),
num_categorical_inputs=len(preprocessor.categorical_feature_names),
num_numerical_inputs=len(preprocessor.numerical_feature_names) > 0,
cls_token_id=cls_id,
sep_token_id=sep_id,
max_length=auto_max_length,
mode='test',
stochastic_chunk=stochastic_chunk,
insert_sep=insert_sep)
train_dataloader = DataLoader(train_dataset, batch_size=4,
batchify_fn=train_batchify_fn, shuffle=True)
test_dataloader = DataLoader(test_dataset, batch_size=4,
batchify_fn=test_batchify_fn, shuffle=False)
for sample in train_dataloader:
features, label = sample[0], sample[1]
assert len(features) == train_batchify_fn.num_text_outputs + \
train_batchify_fn.num_categorical_outputs + train_batchify_fn.num_numerical_outputs
text_token_ids, text_valid_length, text_segment_ids = features[0]
assert text_token_ids.shape[1] <= auto_max_length
assert text_segment_ids.shape[1] <= auto_max_length
assert text_token_ids.shape == text_segment_ids.shape
for sample in test_dataloader:
assert len(sample) == test_batchify_fn.num_text_outputs + \
test_batchify_fn.num_categorical_outputs + test_batchify_fn.num_numerical_outputs
text_token_ids, text_valid_length, text_segment_ids = sample[0]
assert text_token_ids.shape[1] <= auto_max_length
assert text_segment_ids.shape[1] <= auto_max_length
assert text_token_ids.shape == text_segment_ids.shape
|
package com.jama.mpesa_biz_no_detector.states
import android.graphics.Bitmap
sealed class CameraFlowState {
object Detecting: CameraFlowState()
object Detected: CameraFlowState()
object Confirming: CameraFlowState()
data class Confirmed(val bitmap: Bitmap): CameraFlowState()
}
|
plugins {
kotlin("jvm") version "1.3.60"
}
repositories {
jcenter()
}
dependencies {
implementation(kotlin("stdlib"))
implementation(kotlin("reflect"))
implementation(kotlin("stdlib-jdk8"))
testImplementation("org.junit.jupiter:junit-jupiter-engine:5.5.2")
}
|
#!/bin/bash
# Tools for working with bash.
reload_shell() {
clear
source $HOME/.bashrc
}
SETUP_SCRIPT=https://raw.githubusercontent.com/matthewbradshaw-io/monorepo/main/shell/setup_local.sh
revolve_shell() {
bash <(curl -s $SETUP_SCRIPT)
reload_shell
}
|
package com.powerincode.questionnaire_app.di.modules.app
import com.jakewharton.retrofit2.adapter.kotlin.coroutines.CoroutineCallAdapterFactory
import com.powerincode.questionnaire_app.data.api.TempService.TempService
import dagger.Module
import dagger.Provides
import retrofit2.Retrofit
import retrofit2.converter.moshi.MoshiConverterFactory
import javax.inject.Singleton
/**
* Created by powerman23rus on 21/02/2019.
*/
@Module
object NetworkModule {
const val BASE_URL = "https://jsonplaceholder.typicode.com"
@Singleton
@JvmStatic
@Provides
fun provideTemService(retrofit : Retrofit): TempService {
return retrofit.create(TempService::class.java)
}
@Singleton
@JvmStatic
@Provides
fun provideRetrofit(): Retrofit {
return Retrofit.Builder()
.baseUrl(BASE_URL)
.addConverterFactory(MoshiConverterFactory.create())
.addCallAdapterFactory(CoroutineCallAdapterFactory())
.build()
}
}
|
package com.saraha.paws.View.AccountViews.Profile
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ViewModel
import com.google.firebase.auth.ktx.auth
import com.google.firebase.ktx.Firebase
import com.saraha.paws.Model.User
import com.saraha.paws.Repository.UserRepository
import com.saraha.paws.Util.AppSharedPreference
class ProfileViewModel: ViewModel() {
//Shared preference helper class object
val sharedPref = AppSharedPreference
//Function to get user info from shared preference and return user object
fun getUserInfo() = User(Firebase.auth.currentUser?.uid, sharedPref.read("pName",""),
sharedPref.read("eName","")!!, null, sharedPref.read("uName","")!!,
sharedPref.read("mName","")!!, sharedPref.read("gName","")!!,
sharedPref.read("tName","")!!
)
}
|
import * as React from "react";
import PropTypes from "prop-types";
import { DataGrid } from "@mui/x-data-grid";
const columns = [
{ field: "id", headerName: "ID", width: 160 },
{ field: "name", headerName: "Name", width: 160 },
{ field: "email", headerName: "Email", width: 300 },
];
export default function UsersTable({
height,
users,
canSelect,
onSelectionModelChange,
selectionModel,
}) {
const rows = users;
if (!users) {
return null;
}
return (
<div style={{ height: height, width: "100%" }}>
<DataGrid
rows={rows}
columns={columns}
pageSize={10}
rowsPerPageOptions={[15]}
checkboxSelection={canSelect}
onSelectionModelChange={onSelectionModelChange}
selectionModel={selectionModel}
/>
</div>
);
}
UsersTable.defaultProps = {
height: 300,
canSelect: false,
onSelectionModelChange: () => {},
selectionModel: [],
users: [],
};
UsersTable.propTypes = {
height: PropTypes.number,
canSelect: PropTypes.bool,
onSelectionModelChange: PropTypes.func,
selectionModel: PropTypes.array,
users: PropTypes.array,
};
|
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
*/
package opengl.templates
import org.lwjgl.generator.*
import opengl.*
val AMD_gpu_shader_int64 = "AMDGPUShaderInt64".nativeClassGL("AMD_gpu_shader_int64", postfix = NV) {
documentation =
"""
Native bindings to the ${registryLink("AMD", "gpu_shader_int64")} extension.
This extension was developed based on the NV_gpu_shader5 extension to allow implementations supporting 64-bit integers to expose the feature without
the additional requirements that are present in NV_gpu_shader5.
The extension introduces the following features for all shader types:
${ul(
"""
support for 64-bit scalar and vector integer data types, including uniform API, uniform buffer object, transform feedback, and shader input and
output support;
""",
"new built-in functions to pack and unpack 64-bit integer types into a two-component 32-bit integer vector;",
"new built-in functions to convert double-precision floating-point values to or from their 64-bit integer bit encodings;",
"vector relational functions supporting comparisons of vectors of 64-bit integer types; and",
"common functions abs, sign, min, max, clamp, and mix supporting arguments of 64-bit integer types."
)}
This extension is designed to be a functional superset of the 64-bit integer support introduced by NV_gpu_shader5 and to be source code compatible with
that, thus the new procedures, functions, and tokens are identical to those found in that extension.
Requires ${GL40.core} and GLSL 4.00.
"""
IntConstant(
"Returned by the {@code type} parameter of GetActiveAttrib, GetActiveUniform, and GetTransformFeedbackVarying.",
"INT64_NV"..0x140E,
"UNSIGNED_INT64_NV"..0x140F,
"INT8_NV"..0x8FE0,
"INT8_VEC2_NV"..0x8FE1,
"INT8_VEC3_NV"..0x8FE2,
"INT8_VEC4_NV"..0x8FE3,
"INT16_NV"..0x8FE4,
"INT16_VEC2_NV"..0x8FE5,
"INT16_VEC3_NV"..0x8FE6,
"INT16_VEC4_NV"..0x8FE7,
"INT64_VEC2_NV"..0x8FE9,
"INT64_VEC3_NV"..0x8FEA,
"INT64_VEC4_NV"..0x8FEB,
"UNSIGNED_INT8_NV"..0x8FEC,
"UNSIGNED_INT8_VEC2_NV"..0x8FED,
"UNSIGNED_INT8_VEC3_NV"..0x8FEE,
"UNSIGNED_INT8_VEC4_NV"..0x8FEF,
"UNSIGNED_INT16_NV"..0x8FF0,
"UNSIGNED_INT16_VEC2_NV"..0x8FF1,
"UNSIGNED_INT16_VEC3_NV"..0x8FF2,
"UNSIGNED_INT16_VEC4_NV"..0x8FF3,
"UNSIGNED_INT64_VEC2_NV"..0x8FF5,
"UNSIGNED_INT64_VEC3_NV"..0x8FF6,
"UNSIGNED_INT64_VEC4_NV"..0x8FF7,
"FLOAT16_NV"..0x8FF8,
"FLOAT16_VEC2_NV"..0x8FF9,
"FLOAT16_VEC3_NV"..0x8FFA,
"FLOAT16_VEC4_NV"..0x8FFB
)
NV_gpu_shader5 reuse "Uniform1i64NV"
NV_gpu_shader5 reuse "Uniform2i64NV"
NV_gpu_shader5 reuse "Uniform3i64NV"
NV_gpu_shader5 reuse "Uniform4i64NV"
NV_gpu_shader5 reuse "Uniform1i64vNV"
NV_gpu_shader5 reuse "Uniform2i64vNV"
NV_gpu_shader5 reuse "Uniform3i64vNV"
NV_gpu_shader5 reuse "Uniform4i64vNV"
NV_gpu_shader5 reuse "Uniform1ui64NV"
NV_gpu_shader5 reuse "Uniform2ui64NV"
NV_gpu_shader5 reuse "Uniform3ui64NV"
NV_gpu_shader5 reuse "Uniform4ui64NV"
NV_gpu_shader5 reuse "Uniform1ui64vNV"
NV_gpu_shader5 reuse "Uniform2ui64vNV"
NV_gpu_shader5 reuse "Uniform3ui64vNV"
NV_gpu_shader5 reuse "Uniform4ui64vNV"
NV_gpu_shader5 reuse "GetUniformi64vNV"
NV_shader_buffer_load reuse "GetUniformui64vNV"
NV_gpu_shader5 reuse "ProgramUniform1i64NV"
NV_gpu_shader5 reuse "ProgramUniform2i64NV"
NV_gpu_shader5 reuse "ProgramUniform3i64NV"
NV_gpu_shader5 reuse "ProgramUniform4i64NV"
NV_gpu_shader5 reuse "ProgramUniform1i64vNV"
NV_gpu_shader5 reuse "ProgramUniform2i64vNV"
NV_gpu_shader5 reuse "ProgramUniform3i64vNV"
NV_gpu_shader5 reuse "ProgramUniform4i64vNV"
NV_gpu_shader5 reuse "ProgramUniform1ui64NV"
NV_gpu_shader5 reuse "ProgramUniform2ui64NV"
NV_gpu_shader5 reuse "ProgramUniform3ui64NV"
NV_gpu_shader5 reuse "ProgramUniform4ui64NV"
NV_gpu_shader5 reuse "ProgramUniform1ui64vNV"
NV_gpu_shader5 reuse "ProgramUniform2ui64vNV"
NV_gpu_shader5 reuse "ProgramUniform3ui64vNV"
NV_gpu_shader5 reuse "ProgramUniform4ui64vNV"
}
|
; ::: data1.txt
0001_1000
; ::: data2.txt
101
; ::: data3.txt
0101
1010
1 1 1 1
0101
1010
; ::: data4.txt
0b1101
; :::
#d incbinstr("data1.txt") ; = 0x18
; :::
#d incbinstr("data2.txt") ; = 0b101
; :::
#d incbinstr("data3.txt") ; = 0x5af5a
; :::
#d incbinstr("data4.txt") ; error: invalid character
; :::
#d incbinstr("unk") ; error: not found
; :::
#d incbinstr("unk" @ 0xffff) ; error: not found
; :::
#d x
#d incbinstr(x) ; error: not found
x = "data1.txt"
; :::
#d incbinstr() ; error: wrong
; :::
#d incbinstr("data1.txt", "data2.txt") ; error: wrong
|
package com.waes.diffservice.dto;
import lombok.AllArgsConstructor;
import lombok.Getter;
@Getter
@AllArgsConstructor
public class JsonDifference {
private final String operation;
private final String path;
private final String value;
}
|
// Comparador mayor que y menor que
const dinero = 500, totalAPagar = 300
if(dinero > totalAPagar){
console.log('Si podemos pagar')
} else {
console.log('Fondos insuficientes')
}
|
var Sequence = require('./Sequence');
var Util = require('util');
var Packets = require('../packets');
module.exports = Quit;
Util.inherits(Quit, Sequence);
function Quit(callback) {
Sequence.call(this, callback);
}
Quit.prototype.start = function() {
this.emit('packet', new Packets.ComQuitPacket);
};
|
export default {
Query: {
hello: () => 'Hello world!',
nodeVersion: () => process.version
}
};
|
#!/bin/sh
adb wait-for-device
adb -e push install.sh /data/local/tmp
adb -e shell "su -c 'sh /data/local/tmp/install.sh /data/local/tmp'"
|
<?php
/**
* Created by PhpStorm.
* User: Rubén
* Date: 25/06/2019
* Time: 20:58
*/
namespace AppBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity
* @ORM\Table(name="replic")
*/
class Replic
{
/**
* @ORM\Id
* @ORM\GeneratedValue(strategy="AUTO")
* @ORM\Column(type="integer")
*/
private $id;
/**
* @ORM\ManyToOne(targetEntity="AppBundle\Entity\User", inversedBy="replics")
* @ORM\JoinColumn(nullable=true)
* @var User
*/
private $owner;
/**
* @ORM\ManyToOne(targetEntity="AppBundle\Entity\Mercenary", inversedBy="copys")
* @var Mercenary
*/
private $mercenary;
/**
* @ORM\Column(type="bigint")
* @var int
*/
private $exp;
/**
* @return mixed
*/
public function getId()
{
return $this->id;
}
/**
* @return User
*/
public function getOwner()
{
return $this->owner;
}
/**
* @param User $owner
* @return Replic
*/
public function setOwner($owner)
{
$this->owner = $owner;
return $this;
}
/**
* @return Mercenary
*/
public function getMercenary()
{
return $this->mercenary;
}
/**
* @param Mercenary $mercenary
* @return Replic
*/
public function setMercenary($mercenary)
{
$this->mercenary = $mercenary;
return $this;
}
/**
* @return int
*/
public function getExp()
{
return $this->exp;
}
/**
* @param int $exp
* @return Replic
*/
public function setExp($exp)
{
$this->exp = $exp;
return $this;
}
public function getLevel(){
$lvl = $this->getExp() / 50;
round($lvl,0, PHP_ROUND_HALF_DOWN);
return $lvl;
}
}
|
using Snouthill.Game;
namespace Snouthill.Net.Packets.Catalog;
[RequestOpcode(100)] // "GRPC"
public record BuyItemRequest(
string PageName,
string SaleCode,
string ExtraData,
bool IsGift,
string? GiftRecipient,
string? GiftMessage) : IRequest;
public class BuyItemRequestDeserializer : IRequestDeserializer<BuyItemRequest>
{
public BuyItemRequest Deserialize(PacketReader reader)
{
var data = reader.ReadRawString().Split("\r");
var stage = data[0]; // "production"
var pageName = data[1]; // "Gothic"
var language = data[2]; // "en"
var saleCode = data[3]; // "gothic_chair*3"
var extraData = data[4]; // "-"
var isGift = data[5] == "1";
var giftRecipient = isGift ? data[6] : null;
var giftMessage = isGift ? data[7] : null;
return new BuyItemRequest(pageName, saleCode, extraData, isGift, giftRecipient, giftMessage);
}
}
public class BuyItemRequestHandler : IRequestHandler<BuyItemRequest>
{
public void Handle(BuyItemRequest request, Player player)
{
player.Send(new NoCreditsResponse());
}
}
|
require 'matrix'
require 'mb-math'
require 'mb-util'
require_relative 'geometry/version'
module MB
# Inefficient algorithms for some basic geometric operations.
module Geometry
class << self
# Finds the line intersection, if any, between two lines given coordinates
# in the form used by rubyvor (either [a, b, c] or [:l, a, b, c], using
# the formula ax + by = c). Returns an array of [x, y] if a single
# intersection exists. Returns nil if the lines are coincident or there is
# no intersection.
def line_intersection(line1, line2)
a, b, c = line1
d, e, f = line2
denom = (b * d - a * e).to_f
# Detect coincident and parallel lines
return nil if denom == 0
x = (b * f - c * e) / denom
y = (c * d - a * f) / denom
[x, y]
end
# Returns an array of [x, y] if the two segments (given by arrays of [x1,
# y1, x2, y2]) intersect. Returns nil if the segments are parallel or do
# not intersect.
def segment_intersection(seg1, seg2)
x1, y1, x2, y2 = seg1
x3, y3, x4, y4 = seg2
line1 = segment_to_line(*seg1)
line2 = segment_to_line(*seg2)
xmin = [[x1, x2].min, [x3, x4].min].max
xmax = [[x1, x2].max, [x3, x4].max].min
ymin = [[y1, y2].min, [y3, y4].min].max
ymax = [[y1, y2].max, [y3, y4].max].min
x, y = line_intersection(line1, line2)
return nil unless x && y
if x >= xmin && x <= xmax && y >= ymin && y <= ymax
return [x, y]
end
nil
end
# Generates an arbitrary segment for the given line a * x + b * y = c.
# Possibly useful for working with vertical or horizontal lines via the dot
# product. Returns [x1, y1, x2, y2].
def line_to_segment(a, b, c)
raise 'Invalid line (a or b must be nonzero)' if a == 0 && b == 0
if a == 0
y = c.to_f / b
[0.0, y, 1.0, y]
elsif b == 0
x = c.to_f / a
[x, 0.0, x, 1.0]
else
[0.0, c.to_f / b, 1.0, (c - a).to_f / b]
end
end
# Finds the general form of a line intersecting the given points. Returns
# [a, b, c] where a * x + b * y = c.
def segment_to_line(x1, y1, x2, y2)
raise 'Need two distinct points to define a segment' if x1 == x2 && y1 == y2
# Vertical/horizontal/oblique lines
if y1 == y2
[0.0, 1.0, y1]
elsif x1 == x2
[1.0, 0.0, x1]
else
[y1 - y2, x2 - x1, x2 * y1 - x1 * y2]
end
end
# Returns the area of a 2D polygon with the given +vertices+ in order of
# connection, each of which must be a 2D coordinate (an array of two
# numbers). If vertices are given clockwise, the area will be negative.
#
# Uses the formula from http://mathworld.wolfram.com/PolygonArea.html
def polygon_area(vertices)
raise "A polygon must have 3 or more vertices, not #{vertices.length}" unless vertices.length >= 3
area = 0
# Rely on Ruby's circular array indexing for negative indices
vertices.size.times do |idx|
x2, y2 = vertices[idx]
x1, y1 = vertices[idx - 1]
area += x1 * y2 - x2 * y1
end
return area * 0.5
end
# Dot product of two vectors (x1, y1) and (x2, y2).
#
# Using Ruby's Vector class (from require 'matrix') is probably a better
# option, when possible.
def dot(x1, y1, x2, y2)
x1 * x2 + y1 * y2
end
# Computes a bounding box for the given 2D +points+ (an array of
# two-element arrays), returned as [xmin, ymin, xmax, ymax]. If +expand+
# is given and greater than 0.0, then the bounding box dimensions will be
# multiplied by (1.0 + +expand+).
def bounding_box(points, expand = nil)
raise ArgumentError, 'No points were given' if points.empty?
xmin = Float::INFINITY
ymin = Float::INFINITY
xmax = -Float::INFINITY
ymax = -Float::INFINITY
points.each do |x, y|
xmin = x if xmin > x
ymin = y if ymin > y
xmax = x if xmax < x
ymax = y if ymax < y
end
if expand && expand > 0.0
extra_width = 0.5 * expand * (xmax - xmin)
extra_height = 0.5 * expand * (ymax - ymin)
xmin -= extra_width
xmax += extra_width
ymin -= extra_height
ymax += extra_height
end
[xmin, ymin, xmax, ymax]
end
# Clips a segment to a bounding box. Returns the clipped segment as an
# array with [x1, y1, x2, y2].
#
# TODO: Delete this if it is never used and no tests are written.
def clip_segment(segment, box)
xmin, ymin, xmax, ymax = box
x1, y1, x2, y2 = segment
new_segment = []
if x1 >= xmin && x1 <= xmax && y1 >= ymin && y1 <= ymax
new_segment += [x1, y1]
end
if x2 >= xmin && x2 <= xmax && y2 >= ymin && y2 <= ymax
new_segment += [x2, y2]
end
return new_segment if new_segment.size == 4
bounds = {
top: [xmin, ymax, xmax, ymax],
left: [xmin, ymin, xmin, ymax],
bottom: [xmin, ymin, xmax, ymin],
right: [xmax, ymin, xmax, ymax],
}
bounds.each do |edge, edge_seg|
if intersection = segment_intersection(segment, edge_seg)
puts "Segment intersects with #{edge}"
new_segment += intersection
puts "New segment is #{new_segment.inspect}"
return new_segment if new_segment.size == 4
end
end
raise 'No segment could be formed'
end
# Returns the distance from the line described by a*x + b*y = c to the
# point (x, y).
#
# Based on the formula from
# http://mathworld.wolfram.com/Point-LineDistance2-Dimensional.html
def distance_to_line(a, b, c, x, y)
# Using -c instead of +c due to moving C across the equals sign
(a * x + b * y - c).abs.to_f / Math.sqrt(a * a + b * b)
end
# Returns a line that is the perpendicular bisector of the given segment as
# [a, b, c], where a * x + b * y = c.
#
# Based on the derivation from https://math.stackexchange.com/a/2079662
def perpendicular_bisector(x1, y1, x2, y2)
[
x2 - x1,
y2 - y1,
0.5 * (x2 * x2 - x1 * x1 + y2 * y2 - y1 * y1)
]
end
# Returns the circumcenter of the triangle defined by the given three
# points as [x, y]. Returns nil if the points are collinear.
#
# The circumcenter of a polygon is the center of the circle that passes
# through all the points of the polygon. See also #circumcircle.
def circumcenter(x1, y1, x2, y2, x3, y3)
b1 = perpendicular_bisector(x1, y1, x2, y2)
b2 = perpendicular_bisector(x2, y2, x3, y3)
x, y = line_intersection(b1, b2)
return nil if x.nil? || y.nil?
[x, y]
end
# Returns the circumcircle of the triangle defined by the given three
# points as [x, y, rsquared]. Returns nil if the points are collinear.
def circumcircle(x1, y1, x2, y2, x3, y3)
x, y = circumcenter(x1, y1, x2, y2, x3, y3)
return nil if x.nil? || y.nil?
dx = x - x1
dy = y - y1
rsquared = dx * dx + dy * dy
[x, y, rsquared]
end
# Returns the average of all of the given points. Each point should have
# the same number of dimensions. Returns nil if no points were given.
def centroid(points)
return nil if points.empty?
sum = points.reduce([0] * points.first.size) { |acc, point|
acc.size.times do |i|
acc[i] += point[i]
end
acc
}
sum.map { |v| v.to_f / points.size }
end
# Returns a Matrix that will rotate augmented 2D vectors by +:radians+
# around the point (+:xcenter+, +:ycenter+).
def rotation_matrix(radians:, xcenter: 0, ycenter: 0)
pre_translate = Matrix[
[1, 0, -xcenter],
[0, 1, -ycenter],
[0, 0, 1]
]
r = radians.rotation
rotation = Matrix[[*r.row(0), 0], [*r.row(1), 0], [0, 0, 1]]
post_translate = Matrix[
[1, 0, xcenter],
[0, 1, ycenter],
[0, 0, 1]
]
post_translate * rotation * pre_translate
end
# Returns a Matrix that will scale 2D vectors by +:xscale+/+:yscale+
# (each defaults to copying the other, but at least one must be
# specified) centered around the point (+:xcenter+, +:ycenter+).
# Multiply with an augmented Vector to apply the transformation.
#
# Example:
# v = Vector[2, 2, 1] # Third/augmented element (w) must be 1
# m = MB::Geometry.scale_matrix(xscale: 3, yscale: 2, xcenter: 1, ycenter: 1)
# m * v
# # => Vector[4, 3, 1] # x, y, w
def scale_matrix(xscale:, yscale: nil, xcenter: 0, ycenter: 0)
raise "Specify at least one of :xscale and :yscale" if !(xscale || yscale)
xscale ||= yscale
yscale ||= xscale
Matrix[
[xscale, 0, -xcenter * (xscale - 1)],
[0, yscale, -ycenter * (yscale - 1)],
[0, 0, 1]
]
end
end
end
Geo = Geometry
end
require_relative 'geometry/generators'
require_relative 'geometry/delaunay'
require_relative 'geometry/voronoi'
require_relative 'geometry/voronoi_animator'
require_relative 'geometry/correction'
|
# CHANGELOG - istio
1.0.0 / 2018-03-23
==================
### Changes
* [FEATURE] Adds Istio Integration
|
package org.usfirst.frc.team2083.commands.groups;
import org.usfirst.frc.team2083.commands.ArmCommandPos;
import org.usfirst.frc.team2083.commands.GripperCommand;
import org.usfirst.frc.team2083.commands.WristCommandPos;
import edu.wpi.first.wpilibj.command.CommandGroup;
public class PowerCubeCatch extends CommandGroup
{
public PowerCubeCatch()
{
addSequential(new GripperCommand(GripperCommand.Action.CLOSE), 1500); // call stop or fix code
addSequential(new WristCommandPos(0.0));
addSequential(new ArmCommandPos(0.0));
addSequential(new WristCommandPos(0.0));
addSequential(new GripperCommand(GripperCommand.Action.OPEN), 1500); // call stop or fix code
}
}
|
from mixcoatl.resource import Resource
from mixcoatl.decorators.validations import required_attrs
from mixcoatl.decorators.lazy import lazy_property
from mixcoatl.utils import camelize, camel_keys, uncamel_keys
import json
class MachineImage(Resource):
""" A machine image is the baseline image or template from which virtual machines may be
provisioned. Some clouds allow machine image/template sharing. In those clouds, Dell Cloud
Manager creates multiple machine image records referencing the shared machine image object
to enable users to maintain separate meta-data over those shared images. """
PATH = 'infrastructure/MachineImage'
COLLECTION_NAME = 'images'
PRIMARY_KEY = 'machine_image_id'
def __init__(self, machine_image_id=None, endpoint=None, *args, **kwargs):
"""A machine image is the baseline image or template from which
virtual machines may be provisioned.
"""
Resource.__init__(self, request_details='basic', endpoint=endpoint)
self.__machine_image_id = machine_image_id
@property
def machine_image_id(self):
"""`int` - The unique DCM id for this machine image"""
return self.__machine_image_id
@lazy_property
def architecture(self):
"""`str` - The underlying CPU architecture of the virtual machine in question"""
return self.__architecture
@lazy_property
def legacy_owner_id(self):
return self.__legacy_owner_id
@lazy_property
def cloud(self):
"""`dict` - The cloud in which this machine image is installed"""
return self.__cloud
@lazy_property
def creation_timestamp(self):
"""`str` - The date and time this machine image was first created
.. .note::
Some clouds do not report his value and it may therefore be `00:00 UTC January 1, 1970
"""
return self.__creation_timestamp
@lazy_property
def customer(self):
"""`dict` - The customer in whose library this machine image record is being managed"""
return self.__customer
@lazy_property
def budget(self):
"""`int` - The id of the billing code against which costs associated are
billed for this machine image
"""
return self.__budget
@lazy_property
def name(self):
"""`str` - The user friendly name for the machine image"""
return self.__name
@name.setter
def name(self, n):
self.__name = n
@lazy_property
def description(self):
"""`str` - The description of the machine image established in DCM"""
return self.__description
@description.setter
def description(self, d):
self.__description = d
@lazy_property
def owning_account(self):
"""`dict` - The DCM cloud account that is the account under which
the machine image is registered
.. .note::
This value may be empty if the machine image belongs to an account
not using DCM
"""
return self.__owning_account
@lazy_property
def owning_cloud_account_number(self):
"""`str` - The DCM cloud account that is the account under which
the machine image is registered.
.. .note::
This value is empty for public images and in rare circumstances
when DCM is unable to determine the ownership
"""
return self.__owning_cloud_account_number
@lazy_property
def owning_user(self):
"""`dict` or `None` - The user who is the owner of record of this machine image.
.. .note::
The owner may be null in cases of auto-discovery or certain automated scenarios
"""
return self.__owning_user
@lazy_property
def owning_groups(self):
"""`list` - The groups who have ownership over this machine image"""
return self.__owning_groups
@owning_groups.setter
def owning_groups(self, d):
self.__owning_groups = d
@lazy_property
def platform(self):
"""`str` - The operating system bundled into the machine image/template"""
return self.__platform
@lazy_property
def provider_id(self):
"""`str` - The cloud provider's unique id for the machine image"""
return self.__provider_id
@lazy_property
def region(self):
"""`dict` - The region in which this machine image is available"""
return self.__region
@lazy_property
def removable(self):
"""`bool` - Whether or not this machine image can be deleted"""
return self.__removable
@lazy_property
def sharable(self):
"""`bool` - Whether or not this image can be shared"""
return self.__sharable
@lazy_property
def status(self):
"""`str` - The current status of this machine image"""
return self.__status
@lazy_property
def label(self):
"""`str` - A color label assigned to this machine image"""
return self.__label
@lazy_property
def products(self):
"""`list` - The server products that can be used to provision a virtual
machine based on this machine image/template
"""
return self.__products
@lazy_property
def agent_version(self):
"""`int` - The version of the DCM agent if installed on the machine image"""
return self.__agent_version
@lazy_property
def public(self):
"""`bool` - Indicates whether or not this image is publicly shared.
This value may be modified only for machine images that belong to your account. """
return self.__public
@public.setter
def public(self, p):
self.__public = p
@lazy_property
def budget(self):
return self.__budget
@budget.setter
def budget(self, b):
self.__budget = b
@required_attrs(['machine_image_id'])
def destroy(self, reason='no reason provided'):
"""Deletes machine image with reason :attr:`reason`
:param reason: The reason for removing the image
:type reason: str.
:returns: bool -- Result of API call
"""
p = self.PATH + "/" + str(self.machine_image_id)
qopts = {'reason': reason}
return self.delete(p, params=qopts)
@required_attrs(['server_id', 'name', 'budget', 'description', 'owning_groups'])
def create(self, callback=None):
"""Creates a machine image from server_id
>>> def cb(j): print(j)
>>> m = MachineImage()
>>> m.server_id = 12345
>>> m.name = 'image-1-test'
>>> m.budget = 12345
>>> m.create(callback=cb)
:returns: int -- The job id of the create request
"""
payload = {'imageServer':
[{
'budget': int(self.budget),
'description': self.description,
'name': self.name,
'server': {"serverId": int(self.server_id)},
'owningGroups': [{"groupId": self.owning_groups}],
}]}
self.post(data=json.dumps(payload))
if self.last_error is None:
if callback is not None:
callback(self.current_job)
else:
return self.current_job
else:
raise MachineImageException(self.last_error)
@required_attrs(['machine_image_id'])
def update(self, **kwargs):
"""Updates meta-data for an image.
:param description: The description of an image.
:type description: str.
:param name: The name of an image.
:type name: str.
:param label: The label of an image.
:type label: str.
:returns: True if successful or an error message if fails.
"""
payload = {'describeImage': [{}]}
if 'description' in kwargs:
payload['describeImage'][0]['description'] = kwargs['description']
if 'name' in kwargs:
payload['describeImage'][0]['name'] = kwargs['name']
if 'label' in kwargs:
payload['describeImage'][0]['label'] = kwargs['label']
p = self.PATH + "/" + str(self.machine_image_id)
return self.put(p, data=json.dumps(payload))
@required_attrs(['machine_image_id'])
def register_agent(self, agent_version, **kwargs):
"""Registers an agent with the machine image.
:param agent_version: The agent protocol version to be registered.
:type name: int.
:returns: True if successful or an error message if fails.
"""
payload = {'registerAgent': [{"agentVersion": agent_version}]}
p = self.PATH + "/" + str(self.machine_image_id)
return self.put(p, data=json.dumps(payload))
@required_attrs(['machine_image_id'])
def unregister_agent(self, **kwargs):
"""Unregisters an agent from the machine image.
:returns: True if successful or an error message if fails.
"""
payload = {'unRegisterAgent': [{}]}
p = self.PATH + "/" + str(self.machine_image_id)
return self.put(p, data=json.dumps(payload))
@classmethod
def all(cls, endpoint=None, **kwargs):
"""Return all machine images
:param machine_image_id: The id of the machine image
:type machine_image_id: int.
:param region_id: The region to search for machine images
:type region_id: int.
:param keys_only: Return :attr:`machine_image_id` instead of :class:`MachineImage`
:type keys_only: bool.
:param available: Return only available images. Default is `true`
:type available: str.
:param registered: Return only images with the DCM agent installed. Default is `false`
:type registered: str.
:param detail: The level of detail to return - `basic` or `extended`
:type detail: str.
:returns: `list` of :class:`MachineImage` or :attr:`machine_image_id`
:raises: :class:`MachineImageException`
"""
if 'machine_image_id' in kwargs:
r = Resource(cls.PATH + "/" + str(kwargs['machine_image_id']), endpoint=endpoint)
else:
r = Resource(cls.PATH, endpoint=endpoint)
params = {}
if 'keys_only' in kwargs:
keys_only = kwargs['keys_only']
else:
keys_only = False
if 'region_id' in kwargs:
params['regionId'] = kwargs['region_id']
if 'available' in kwargs:
params['active'] = kwargs['available']
if 'registered' in kwargs:
params['registered'] = kwargs['registered']
x = r.get(params=params)
if r.last_error is None:
if keys_only is True:
return [i[camelize(cls.PRIMARY_KEY)]
for i in x[cls.COLLECTION_NAME]]
else:
return [type(cls.__name__, (object,), i)
for i in uncamel_keys(x)[cls.COLLECTION_NAME]]
else:
raise MachineImageException(r.last_error)
class MachineImageException(BaseException):
pass
|
/// <reference path="../node_modules/pxt-core/built/pxteditor.d.ts" />
namespace pxt.editor {
initExtensionsAsync = function(opts: pxt.editor.ExtensionOptions): Promise<pxt.editor.ExtensionResult> {
pxt.debug('loading microbit target extensions...')
const res: pxt.editor.ExtensionResult = {
hexFileImporters: [{
id: "blockly",
canImport: data => data.meta.cloudId == "microbit.co.uk" && data.meta.editor == "blockly",
importAsync: (project, data) => project.createProjectAsync({
filesOverride: {
"main.blocks": data.source
}, name: data.meta.name
})
}, {
id: "td",
canImport: data => data.meta.cloudId == "microbit.co.uk" && data.meta.editor == "touchdevelop",
importAsync: (project, data) =>
project.createProjectAsync({
filesOverride: { "main.blocks": "", "main.ts": " " },
name: data.meta.name
})
.then(() => project.convertTouchDevelopToTypeScriptAsync(data.source))
.then(text => project.overrideTypescriptFile(text))
}]
};
return Promise.resolve<pxt.editor.ExtensionResult>(res);
}
}
|
require 'paysecure/api'
require 'paysecure/configuration'
require 'paysecure/logger'
require 'paysecure/request'
require 'savon'
module HTTPI
module Adapter
class NetHTTP < Base
alias request_old request
def request(method)
@request.ssl = true
@request.auth.ssl.ssl_version = :TLSv1
@request.auth.ssl.verify_mode = :none
request_old(method)
end
end
end
end
module Paysecure
class Client
include Request
include Api
include Logger
# @private
attr_accessor *Configuration::VALID_OPTIONS_KEYS
def initialize(options={})
options = Paysecure.options.merge(options)
Configuration::VALID_OPTIONS_KEYS.each do |key|
send("#{key}=", options[key])
end
end
def paysecure_client
@client ||= Savon::Client.new(:ssl_version => :SSLv3, soap_header: requestor_credentials_header ) do
# binding.pry
wsdl Paysecure.options[:endpoint]
convert_request_keys_to :camelcase
end
end
end
end
|
package global.msnthrp.scanner.utils
import android.view.View
fun View.setVisible(visible: Boolean) {
if (visible) show() else hide()
}
fun View.show() {
visibility = View.VISIBLE
}
fun View.hide() {
visibility = View.GONE
}
|
# D-fine
A webapp to define words and phrases
## How to use
* Install [Redis](http://redis.io/) (2.0+)
* Setup virtualenv
<pre>
virtualenv --no-site-packages ./
. bin/active
pip install -r requirements.txt
</pre>
* Run the Webapp
<pre>
python webserver.py
</pre>
Note: D-fine *will* work without redis, but all the data be lost on each restart.
## Why I build this
Two reasons:
1. To scratch an itch I had for an easy to use definition site.
1. To play around more with Flask and virtualenv.
### TODO
* Switch back to using cdnjs.com for the javascript files
* Fix the date/time
* Maybe add a fall back to a standard dictionary site
|
# -*- coding: utf-8 -*-
=begin rdoc
Blob file methods for accessing a blob via the filesystem.
Exclusively for the Blob class.
=end
class Blob
# Get the blob file's dir name e.g. "/my/photos".
#
# This impl calls #dir which is typically fine.
#
# Override this if the local storage file dir
# name is different than the generic base name.
#
# @return [String] the blob file's dir name
#
def file_dir
dir
end
# Get the blob file's base name e.g. "photo.jpg".
#
# This impl calls #base which is typically fine.
#
# Override this when the local storage file base
# name is different than the generic base name.
#
# @return [String] the blob file's base name
#
def file_base
base
end
# Get the blob file's path e.g. "/my/photos/photo.jpg"
#
# This impl calls #file_dir and #file_base.
# Subclasses can likely use this as-is.
#
# @return [Pathname] the blob file's path
#
def file_path
file_pathname.to_s
end
# Get the blob file's pathname e.g. Pathname("/my/photos/photo.jpg")
#
# This impl calls #file_dir and #file_base.
# Subclasses can likely use this as-is.
#
# @return [Pathname] the blob file's pathname
#
def file_pathname
Pathname(file_dir) + file_base
end
# Does the file exist on the local filesystem?
#
# @return [boolean] iff the file exists
#
def file_exist?
FileTest.exist? file_path
end
end
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
-- | Unit Tests
module Main where
import Test.Tasty.QuickCheck
import Test.Tasty.HUnit
import Test.Tasty
import Types -- Arbitrary instances
import Ray
import Ray.Imports
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Panther Tests" [
testGroup "sphere" [
testProperty "intersect sphere at origin with radius 1" $
\dir -> dir /= 0 ==>
intersect (Sphere 0 1) (mkRay 0 (normalize dir) 0) ^? _Wrapped . _Just . tHit =~ Just 1,
testProperty "sphere has fixed r" $
\(getSphere -> s@(Sphere c r), dir) -> dir /= 0 && r > 0 ==>
intersect s (mkRay c (normalize dir) 0) ^? _Wrapped . _Just . tHit =~ Just r
],
testGroup "plane" [
testProperty "all rays intersect a plane" $
\(getPlane -> plane, ray) -> isJust (getOption $ intersect (plane) ray)
|| isJust (getOption $ intersect plane $ ray & rayDir *~ -1)
],
testGroup "mesh" [
testCase "a ray intersects a triangle" $ let
mesh = oneTriangleMesh $ V3 0 (p3 1 0 0) (p3 0 1 0)
ray = mkRay (p3 0.33 0.33 1) (V3 0 0 (-1)) 0
in intersect mesh ray ^? _Wrapped . _Just . tHit @?= Just 1,
testProperty "arbitrary direction, no false negatives, correct tHit" $
\(tri, bary, v) -> let
ray = mkRay o v 0
o = barycentric (firstTriangle tri) bary .-^ v
in intersect (getTri tri) ray ^? _Wrapped . _Just . tHit =~ Just 1,
testProperty "arbitrary direction, no false positives" $
\(tri, bary, v) -> let
ray = mkRay o v 0
o = barycentric (firstTriangle tri) bary .+^ v
in intersect (getTri tri) ray ^? _Wrapped . _Just . tHit =~ Nothing
-- testProperty "arbitrary rays, no false positives" $
-- \(tri, bary, v) -> let
]
]
------------------------------------------------------------
-- Approximate Comparison for Doubles, Points
ε :: Floating a => a
ε = 0.0001
class Approx a where
(=~) :: a -> a -> Bool
infix 4 =~
instance Approx Double where
(=~) a b = abs (a - b) < ε
-- instance Epsilon a => Approx a where
-- a =~ b = nearZero $ a - b
-- instance (Metric f, Floating a, Ord a) => Approx (f a) where
-- u =~ v = distance u v < ε
instance (Floating a, Ord a) => Approx (Point V3 a) where
u =~ v = distance u v < ε
instance Approx a => Approx (Maybe a) where
Nothing =~ Nothing = True
Just a =~ Just b = a =~ b
_ =~ _ = False
|
# frozen_string_literal: true
# ProjectContext join table model
class ProjectContext < ActiveRecord::Base
validates_presence_of :project, :context
belongs_to :project
belongs_to :context
end
|
package com.example.babyweather.mode;
import java.util.List;
import com.example.babyweather.R;
import com.example.babyweather.mode.MainListAdapter.ViewHolder;
import com.zcw.togglebutton.ToggleButton;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
public class GuanxinAdapter extends ArrayAdapter<GuanxinPerson> {
private Context context;
private int resourceId;
private List<GuanxinPerson> persons;
public GuanxinAdapter(Context context, int resource,
List<GuanxinPerson> list) {
super(context, resource, list);
this.context = context;
this.resourceId = resource;
this.persons = list;
}
@Override
public int getCount() {
// TODO Auto-generated method stub
return persons.size();
}
@Override
public GuanxinPerson getItem(int position) {
// TODO Auto-generated method stub
return super.getItem(position);
}
@Override
public long getItemId(int position) {
// TODO Auto-generated method stub
return super.getItemId(position);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null) {
convertView = LayoutInflater.from(getContext()).inflate(resourceId,
null);
new ViewHolder(convertView);
}
ViewHolder holder = (ViewHolder) convertView.getTag();
GuanxinPerson person = getItem(position);
holder.name.setText(person.getName());
holder.num.setText(person.getPhnum());
holder.city.setText(person.getCity());
holder.weather.setText(person.getWeather());
holder.time.setText(person.getTime());
holder.content.setText(person.getContent());
return convertView;
}
class ViewHolder {
TextView name;
TextView num;
TextView city;
TextView weather;
TextView time;
TextView content;
public ViewHolder(View view) {
name = (TextView) view.findViewById(R.id.guanxin_name);
num = (TextView) view.findViewById(R.id.guanxin_num);
city = (TextView) view.findViewById(R.id.guanxin_chengshi);
weather = (TextView) view.findViewById(R.id.guanxin_tianqi);
time = (TextView) view.findViewById(R.id.guanxin_shijian);
content = (TextView) view.findViewById(R.id.guanxin_content);
view.setTag(this);
}
}
}
|
/* for development mode */
const argv = require('yargs/yargs')(process.argv.slice(2)).parse()
const execa = require('execa')
async function run() {
const { theme = 'default', env = 'development' } = argv
const cmdStr = `cross-env NODE_ENV=development NUXT_APP_ENV=${env} NUXT_APP_THEME_NAME=${theme} node server/index.js`
await execa.command(cmdStr, {
stdio: 'inherit',
})
}
run()
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using Azure.Core;
namespace Azure.AI.MetricsAdvisor.Models
{
/// <summary>
/// Used as part of a <see cref="MetricBoundaryCondition"/>. Specifies which measure type should be
/// used when checking boundaries. Defaults to <see cref="BoundaryMeasureType.Value"/>.
/// </summary>
[CodeGenModel("ValueType")]
public readonly partial struct BoundaryMeasureType
{
/// <summary>
/// The value of the metric is used as it is.
/// </summary>
public static BoundaryMeasureType Value { get; } = new BoundaryMeasureType(ValueValue);
/// <summary>
/// The mean of the latest metric values in the time series is used.
/// </summary>
public static BoundaryMeasureType Mean { get; } = new BoundaryMeasureType(MeanValue);
}
}
|
// Load up the angular formly module
import index from './index.common'
// Bring in the test suites
import './providers/formlyApiCheck.test'
import './providers/formlyConfig.test'
import './services/formlyUtil.test'
import './directives/formly-custom-validation.test'
import './directives/formly-field.test'
import './directives/formly-focus.test'
import './directives/formly-form.test'
import './directives/formly-form.controller.test'
import './run/formlyCustomTags.test'
import './run/formlyNgModelAttrsManipulator.test'
import './other/utils.test'
export default index
|
import { SubtitleModel, GetSubtitleRepository } from './db-get-subtitle-protocols'
import { DbGetSubtitle } from './db-get-subtitle'
const makeFakeSubtitle = (): SubtitleModel => ({
id: 'valid_id',
language: 'valid_language',
external_id: 'valid_external_id',
sent_to_creation: true,
file_id: 'valid_file_id'
})
const makeGetSubtitleRepository = (): GetSubtitleRepository => {
class GetSubtitleRepositoryStub implements GetSubtitleRepository {
async get (fileId: string): Promise<SubtitleModel> {
return await new Promise((resolve) => resolve(makeFakeSubtitle()))
}
}
return new GetSubtitleRepositoryStub()
}
interface SutTypes {
sut: DbGetSubtitle
getSubtitleRepositoryStub: GetSubtitleRepository
}
const makeSut = (): SutTypes => {
const getSubtitleRepositoryStub = makeGetSubtitleRepository()
const sut = new DbGetSubtitle(getSubtitleRepositoryStub)
return {
sut,
getSubtitleRepositoryStub
}
}
describe('DbGetSubtitle Usecase', () => {
test('Should call GetSubtitleRepository with correct value', async () => {
const { sut, getSubtitleRepositoryStub } = makeSut()
const getSubtitleSpy = jest.spyOn(getSubtitleRepositoryStub, 'get')
const id = 'valid_id'
await sut.get(id)
expect(getSubtitleSpy).toHaveBeenCalledWith(id)
})
test('Should throw if GetSubtitleRepository throws', async () => {
const { sut, getSubtitleRepositoryStub } = makeSut()
jest.spyOn(getSubtitleRepositoryStub, 'get').mockImplementationOnce(async () => {
throw new Error()
})
const promise = sut.get('valid_id')
await expect(promise).rejects.toThrow()
})
test('Should return falsy if subtitle is not found', async () => {
const { sut, getSubtitleRepositoryStub } = makeSut()
jest.spyOn(getSubtitleRepositoryStub, 'get').mockReturnValueOnce(new Promise((resolve) => resolve(null)))
const subtitle = await sut.get('valid_id')
expect(subtitle).toBeFalsy()
})
test('Should return a subtitle on success', async () => {
const { sut } = makeSut()
const subtitle = await sut.get('valid_id')
expect(subtitle).toEqual(makeFakeSubtitle())
})
})
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Caching.Memory;
using p1_2.Data;
using p1_2.Models;
namespace p1_2.Controllers
{
public class StoreController : Controller
{
private readonly BookopolisDbContext _db;
private IMemoryCache _cache;//must set this for DI in Startup.cs
public StoreController(BookopolisDbContext db, IMemoryCache cache)
{
_db = db;
_cache = cache;
}
public IActionResult LoadLocalStorage()
{
//send username to local store GET request here!
string s = _cache.Get("UserName").ToString();
return new JsonResult(s);
}
public IActionResult Index()
{
if (Util.Util.IsLoggedIn(_cache))
{
return RedirectToAction("Login", "Customer");
}
IEnumerable<Store> storeList = _db.Stores;
return View(storeList);
}
public IActionResult LoadCustomer(Customer c)
{
_cache.Set("UserName", c.UserName);
return RedirectToAction("Index");
}
public IActionResult StoreProducts(int? id)
{
if (id == null)
{
return NotFound();
}
Store store = _db.Stores.FirstOrDefault(s => s.StoreId == id);
if (store == null)
{
return NotFound();
}
return RedirectToAction("Index", "Product", new { id });
}
}
}
|
//go:build unit
// +build unit
//
// Code generated by MockGen. DO NOT EDIT.
// Source: psql_psql_upsert.go
// Package mock_models is a generated GoMock package.
package mock_models
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.voapropertylinking.http
import basespecs.BaseUnitSpec
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.{ArgumentCaptor, ArgumentMatchers}
import uk.gov.hmrc.http.HttpReads.Implicits._
import uk.gov.hmrc.http._
import uk.gov.hmrc.play.bootstrap.http.DefaultHttpClient
import uk.gov.hmrc.voapropertylinking.auth.Principal
class VoaHttpClientSpec extends BaseUnitSpec {
val authorization = Authorization("authorization")
val forwarded = ForwardedFor("ipAdress")
val sessionId = SessionId("1234567890")
val requestId = RequestId("0987654321")
val deviceId = "testDeviceId"
val akamaiReputation = AkamaiReputation("foo")
trait Setup {
val mockUrl = "http://mock-url"
val mockQueryParams = Seq("key" -> "value")
val mockHeaders = Seq("key" -> "value")
val mockHttpClient: DefaultHttpClient = mock[DefaultHttpClient]
val headerCaptor: ArgumentCaptor[HeaderCarrier] = ArgumentCaptor.forClass(classOf[HeaderCarrier])
val voaHttpClient = new VoaHttpClient(mockHttpClient)
}
"using the VOA HTTP Client" should {
def checkGovernmentGatewayHeaders(headerCaptor: ArgumentCaptor[HeaderCarrier])(implicit principal: Principal) =
headerCaptor.getValue.extraHeaders shouldBe Seq(
"GG-EXTERNAL-ID" -> principal.externalId,
"GG-GROUP-ID" -> principal.groupId
)
"preserve the existing headers when adding the extra GG headers" in new Setup {
val voaHc: HeaderCarrier = voaHttpClient.buildHeaderCarrier(hc, principal)
voaHc shouldBe hc.withExtraHeaders(
Seq(
"GG-EXTERNAL-ID" -> principal.externalId,
"GG-GROUP-ID" -> principal.groupId
): _*)
}
"enrich the GG headers when calling a GET" in new Setup {
voaHttpClient.GET[HttpResponse](mockUrl)
verify(mockHttpClient)
.GET(ArgumentMatchers.eq(mockUrl), any(), any())(any(), headerCaptor.capture(), any())
checkGovernmentGatewayHeaders(headerCaptor)
}
"enrich the GG headers when calling a GET with query params" in new Setup {
voaHttpClient.GET[HttpResponse](mockUrl, mockQueryParams)
verify(mockHttpClient)
.GET(ArgumentMatchers.eq(mockUrl), ArgumentMatchers.eq(mockQueryParams), any())(
any(),
headerCaptor.capture(),
any())
checkGovernmentGatewayHeaders(headerCaptor)
}
"enrich the GG headers when calling a DELETE" in new Setup {
voaHttpClient.DELETE[HttpResponse](mockUrl)
verify(mockHttpClient)
.DELETE(ArgumentMatchers.eq(mockUrl), any())(any(), headerCaptor.capture(), any())
checkGovernmentGatewayHeaders(headerCaptor)
}
"enrich the GG headers when calling a PUT" in new Setup {
voaHttpClient.PUT[String, HttpResponse](mockUrl, "")
verify(mockHttpClient)
.PUT(ArgumentMatchers.eq(mockUrl), ArgumentMatchers.eq(""), any())(any(), any(), headerCaptor.capture(), any())
checkGovernmentGatewayHeaders(headerCaptor)
}
"enrich the GG headers when calling a POST" in new Setup {
voaHttpClient.POST[String, HttpResponse](mockUrl, "", mockHeaders)
verify(mockHttpClient)
.POST(ArgumentMatchers.eq(mockUrl), ArgumentMatchers.eq(""), ArgumentMatchers.eq(mockHeaders))(
any(),
any(),
headerCaptor.capture(),
any())
checkGovernmentGatewayHeaders(headerCaptor)
}
}
}
|
var env = require('../..')('parent');
env.foo = 10;
require('./parent.child');
|
import css from 'styled-jsx/css'
export default css`
.side-item {
align-items: center;
justify-content: center;
display: flex;
cursor: pointer;
}
.side-item-expand {
display: flex;
flex: 1 1 auto;
}
.side-icon-expand.expand {
width: 72px;
}
.side-icon-expand {
width: 72px;
padding: 1% 0%;
justify-content: center;
display: flex;
}
.side-text {
display: flex;
justify-content: center;
align-items: center;
padding-left: 8px;
color: #9593a0;
font-weight: 500;
}
.side-text.hover {
color: #a880f7;
font-weight: bold;
}
#side-nav {
width: 5%;
background: white;
display: flex;
flex-flow: column;
padding-top: 1%;
z-index:25;
position: fixed;
top:0;
left:0;
height:100vh;
}
.side-icon.expand {
width: 72px;
}
.side-icon {
width: 72px;
justify-content: center;
align-items: center;
display: flex;
cursor: pointer;
}
.side-nav-list {
display: flex;
flex-flow: column;
height: 100%;
justify-content: space-evenly;
}
`
|
namespace BlazorBrowserStorage
{
public interface ISessionStorage : IStorage { }
}
|
using System.IO;
namespace Microsoft.Extensions.Logging.Terminal
{
public delegate void TerminalLoggerWriter(TerminalLoggerOptions options, TerminalWriter writer,
in TerminalLoggerEntry entry);
}
|
/*!
* @file jvmutil.c
*
* @brief Utilities for operating the JVM on this
* real machine implementation.
*
*
* @section Control
*
* \$URL$
*
* \$Id$
*
* Copyright 2005 The Apache Software Foundation
* or its licensors, as applicable.
*
* Licensed under the Apache License, Version 2.0 ("the License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied.
*
* See the License for the specific language governing permissions
* and limitations under the License.
*
* @version \$LastChangedRevision$
*
* @date \$LastChangedDate$
*
* @author \$LastChangedBy$
*
* Original code contributed by Daniel Lydick on 09/28/2005.
*
* @section Reference
*
*/
#include "arch.h"
ARCH_SOURCE_COPYRIGHT_APACHE(jvmutil, c,
"$URL$",
"$Id$");
#include "jvmcfg.h"
#include "cfmacros.h"
#include "classfile.h"
#include "attribute.h"
#include "jvm.h"
#include "linkage.h"
#include "util.h"
/*!
* @name Debug message verbosity utilities for sysDbgMsg().
*
* @brief Set and get an integer value that determines the number of
* debug messages that get displayed by sysDbgMsg().
*
* When sysDbgMsg() is inserted into the code of a function,
* a verbosity level is its first parameter. The higher the
* number, the more verbose the debug output becomes, up to
* level @link #DML10 DMLMAX@endlink. The lower
* the number, the less it is displayed at run time, down to
* @link #DML1 DMLMIN@endlink. At level @link #DML0 DMLOFF@endlink,
* only @e vital diagnostic messages are displayed. The rest are
* suppressed.
*
* This value is heuristically applied by the developer as to the
* importance of that information in various development and
* testing situations.
*
* The importance of the situation defaults to
* @link #DMLDEFAULT DMLDEFAULT@endlink at compile time and may
* be changed at run time with the
* @link #JVMCFG_DEBUGMSGLEVEL_FULL_PARM -Xdebug_level@endlink
* command line parameter.
*
* @see jvm_debug_level_enum
*
* @see sysDbgMsg()
*
*/
/*@{ */ /* Begin grouped definitions */
/*!
* @brief Set current debug message level
*
*
* @param level New level to set.
*
*
* @returns @link #rvoid rvoid@endlink
*
*/
rvoid jvmutil_set_dml(jvm_debug_level_enum level)
{
ARCH_FUNCTION_NAME(jvmutil_set_dml);
pjvm->debug_message_level = level;
} /* END of jvmutil_set_dml() */
/*!
* @brief Get current debug message level
*
*
* @b Parameters: @link #rvoid rvoid@endlink
*
*
* @returns current debug message verbosity
*
*/
jvm_debug_level_enum jvmutil_get_dml()
{
ARCH_FUNCTION_NAME(jvmutil_get_dml);
return(pjvm->debug_message_level);
} /* END of jvmutil_get_dml() */
/*@} */ /* End of grouped definitions */
/*!
* @brief Show program version message to standard output.
*
*
* @b Parameters: @link #rvoid rvoid@endlink
*
*
* @returns @link #rvoid rvoid@endlink
*
*/
rvoid jvmutil_versionmsg(rvoid)
{
ARCH_FUNCTION_NAME(jvmutil_versionmsg);
fprintfLocalStdout("%s\n", CONFIG_RELEASE_LEVEL);
return;
} /* END of jvmutil_versionmsg() */
/*!
* @brief Show program copyright message to standard output.
*
*
* @b Parameters: @link #rvoid rvoid@endlink
*
*
* @returns @link #rvoid rvoid@endlink
*
*/
rvoid jvmutil_copyrightmsg(rvoid)
{
ARCH_FUNCTION_NAME(jvmutil_copyrightmsg);
fprintfLocalStdout("\n%s: %s, version %s\n%s\n\n",
CONFIG_PROGRAM_NAME,
CONFIG_PROGRAM_DESCRIPTION,
CONFIG_RELEASE_LEVEL,
ARCH_COPYRIGHT_TEXT_APACHE);
return;
} /* END of jvmutil_copyrightmsg() */
/*!
* @brief Show program software license message to standard output.
*
*
* @b Parameters: @link #rvoid rvoid@endlink
*
*
* @returns @link #rvoid rvoid@endlink
*
*/
rvoid jvmutil_licensemsg(rvoid)
{
ARCH_FUNCTION_NAME(jvmutil_licensemsg);
jvmutil_copyrightmsg();
fprintfLocalStdout("%s\n\n", ARCH_LICENSE_TEXT_APACHE);
return;
} /* END of jvmutil_licensemsg() */
/*!
* @name Stack dump utilities.
*
* @brief Print contents of a thread's stack to standard error.
*
* Several forms are available that provide various amounts of stack
* frame detail. The most verbose also shows local variables
* in the stack frame.
*
* @attention These routines are @e not intended as a replacement
* for the normal routine
* <b><code>java.lang.Throwable.printStackTrace()</code></b> !!!
*
* @param thridx Thread table index of thread to show
*
* @param pheader Null-terminated header string. If no header is
* desired, pass a @link #rnull rnull@endlink
* pointer here.
*
* @param showdetails If @link #rtrue rtrue@endlink, show frame
* details, else less verbose.
*
* @param showlocals If @link #rtrue rtrue@endlink, show local
* variables also, but only if @b showdetails is
* also @link #rtrue rtrue@endlink.
*
*
* @returns @link #rvoid rvoid@endlink
*
*
* @todo HARMONY-6-jvm-jvmutil.c-1 This function needs unit testing.
*
* @todo HARMONY-6-jvm-jvmutil.c-2 Add line numbers to output. Sample
* output might look like this (when line numbers are added):
*
* @verbatim
* Exception in thread "main" java.lang.NullPointerException
at Testit.sub1(Testit.java:9)
at Testit.main(Testit.java:23)
@endverbatim
*
*/
/*@{ */ /* Begin grouped definitions */
/*!
* @brief Common function to perform final output from all
* stack print utilities.
*
*/
static rvoid jvmutil_print_stack_common(jvm_thread_index thridx,
rchar *pheader,
rboolean showdetails,
rboolean showlocals)
{
ARCH_FUNCTION_NAME(jvmutil_print_stack_common);
/* Print header if one is passed in, else skip */
if (rnull != pheader)
{
fprintfLocalStderr("%s\n", pheader);
}
/*
* Read down through all frames until bottom of stack.
* The very last stack frame holds a null FP.
*/
jvm_sp fp = FIRST_STACK_FRAME(thridx);
while (!CHECK_FINAL_STACK_FRAME_GENERIC(thridx, fp))
{
jvm_class_index clsidx =
STACK(thridx,
GET_FP(thridx) + JVMREG_STACK_PC_CLSIDX_OFFSET);
ClassFile *pcfs = CLASS_OBJECT_LINKAGE(clsidx)->pcfs;
jvm_method_index mthidx =
STACK(thridx,
GET_FP(thridx) + JVMREG_STACK_PC_MTHIDX_OFFSET);
rint star_len_cls = CP1_NAME_STRLEN(CONSTANT_Class_info,
pcfs,
pcfs->this_class,
name_index);
rint star_len_mth = CP1_NAME_STRLEN(CONSTANT_Class_info,
pcfs,
mthidx,
name_index);
jvm_attribute_index atridx =
attribute_find_in_class_by_enum(clsidx,
LOCAL_SOURCEFILE_ATTRIBUTE);
jvm_constant_pool_index cpidx;
rint star_len_src;
rchar *srcname;
if (jvm_attribute_index_bad == atridx)
{
cpidx = jvm_constant_pool_index_null;
star_len_src = 7; /* Length of "unknown" */
srcname = "unknown";
}
else
{
cpidx = ((SourceFile_attribute *)
&pcfs->attributes[atridx]->ai)->sourcefile_index;
star_len_src = CP_THIS_STRLEN(pcfs, cpidx);
srcname = PTR_CP_THIS_STRNAME(pcfs, cpidx);
}
/* Least verbosity, called from jvmutil_print_stack() */
fprintfLocalStderr(" at %*.*s%c%*.*s(%*.*s:%d)\n",
star_len_cls, star_len_cls,
PTR_CP1_NAME_STRNAME(CONSTANT_Class_info,
pcfs,
pcfs->this_class,
name_index),
CLASSNAME_EXTERNAL_DELIMITER_CHAR,
star_len_mth, star_len_mth,
PTR_CP1_NAME_STRNAME(CONSTANT_Class_info,
pcfs,
mthidx,
name_index),
star_len_src, star_len_src,
srcname,
0 /*! @todo HARMONY-6-jvm-jvmutil.c-3 Get line
numbers */);
/*
* Fill in frame details and local variables
*/
if (rtrue == showdetails)
{
/*! @todo HARMONY-6-jvm-jvmutil.c-4 Show details
of stack frame */
if (rtrue == showlocals)
{
/*! @todo HARMONY-6-jvm-jvmutil.c-5 Show local
variables in stack frame */
}
}
/* Look at next stack frame */
fp = NEXT_STACK_FRAME_GENERIC(thridx, fp);
}
} /* END of jvmutil_print_stack_common() */
/*!
* @brief Print basic stack frame summary only.
*
*/
rvoid jvmutil_print_stack(jvm_thread_index thridx, rchar *pheader)
{
ARCH_FUNCTION_NAME(jvmutil_print_stack);
jvmutil_print_stack_common(thridx, pheader, rfalse, rfalse);
} /* END of jvmutil_print_stack() */
/*!
* @brief Print stack frame with some details.
*
*/
rvoid jvmutil_print_stack_details(jvm_thread_index thridx,
rchar *pheader)
{
ARCH_FUNCTION_NAME(jvmutil_print_stack_details);
jvmutil_print_stack_common(thridx, pheader, rtrue, rfalse);
} /* END of jvmutil_print_stack_details() */
/*!
* @brief Print stack frame with details and local variables.
*
*/
rvoid jvmutil_print_stack_locals(jvm_thread_index thridx,
rchar *pheader)
{
ARCH_FUNCTION_NAME(jvmutil_print_stack_locals);
jvmutil_print_stack_common(thridx, pheader, rtrue, rtrue);
} /* END of jvmutil_print_stack_locals() */
/*!
* @brief Common print basic stack frame summary showing error type.
*
*/
static rvoid jvmutil_print_errtype_stack(jvm_thread_index thridx,
rchar *errtype)
{
ARCH_FUNCTION_NAME(jvmutil_print_errtype_stack);
rchar *pheader = HEAP_GET_DATA(JVMCFG_STDIO_BFR, rfalse);
jvm_class_index clsidx =
STACK(thridx, GET_FP(thridx) + JVMREG_STACK_PC_CLSIDX_OFFSET);
ClassFile *pcfs = CLASS_OBJECT_LINKAGE(clsidx)->pcfs;
rint star_len = CP1_NAME_STRLEN(CONSTANT_Class_info,
pcfs,
pcfs->this_class,
name_index);
sprintfLocal(pheader,
"%s in thread \"s\" *.*%s",
errtype,
THREAD(thridx).name,
star_len, star_len,
PTR_CP1_NAME_STRNAME(CONSTANT_Class_info,
pcfs,
pcfs->this_class,
name_index));
jvmutil_print_stack(thridx, pheader);
HEAP_FREE_DATA(pheader);
return;
} /* END of jvmutil_print_errtype_stack() */
/*!
* @brief Print basic stack frame summary reporting an error versus
* an exception.
*
*/
rvoid jvmutil_print_error_stack(jvm_thread_index thridx)
{
ARCH_FUNCTION_NAME(jvmutil_print_error_stack);
jvmutil_print_errtype_stack(thridx, "Error");
} /* END of jvmutil_print_error_stack() */
/*!
* @brief Print basic stack frame summary reporting an exception versus
* an error.
*
*/
rvoid jvmutil_print_exception_stack(jvm_thread_index thridx)
{
ARCH_FUNCTION_NAME(jvmutil_print_exception_stack);
jvmutil_print_errtype_stack(thridx, "Exception");
} /* END of jvmutil_print_exception_stack() */
/*@} */ /* End of grouped definitions */
/* EOF */
|
#!/bin/bash
# remove directories
rm -rf bower_components node_modules build
# install node modules (e.g., bower, grunt-cli)
npm install
# install bower dependencies
bower install
# copy bower dependencies into source directory
grunt bowercopy
|
module.exports = ctx=>({
dest:'dist',
title: "K-Sword",
description: "Study well and make progress every day. ",
locales: {
'/': {
lang: 'zh-CN',
title: 'K-Sword',
description: '🌈 一个 JavaScript/TypeScript 实用程序库。'
}
},
head: [
['link', { rel: 'icon', href: `/favicon.ico` }],
['meta', { name: 'theme-color', content: '#4569d4' }],
['link', { rel: 'mask-icon', href: '/icons/safari-pinned-tab.svg', color: '#4569d4' }],
],
themeConfig:{
markdown: {
lineNumbers: true
},
locales: {
'/': {
label: '中文',
selectText: 'Languages',
ariaLabel: 'Select language',
editLinkText: 'Edit this page on GitHub',
lastUpdated: 'Last Updated',
nav: require('./nav/zh'),
sidebar: {
'/config/': getApiSidebar('配置'),
'/guide/': getGuideSidebar('API说明', 'Advanced'),
}
},
}
}
});
function getApiSidebar (group) {
return ['']
// return [
// {
// title: group,
// collapsable: false,
// children: [
// '',
// ]
// },
// ]
}
function getGuideSidebar (groupA, groupB) {
return [
{
title: groupA,
collapsable: false,
children: [
'',
'ajax',
'conf',
'type',
'device',
'storage',
]
},
]
}
|
import { HeatmapShape2d, HeatmapShape3d, GridHeatmapLayerStyleOptions, HeatmapLayerConfig } from '../../types';
import { GridAggregation, ISourceCFG, Source } from '../../types';
/**
* 数据配置
*/
export interface GridLayerSourceOptions extends Pick<ISourceCFG, 'parser' | 'transforms'> {
data: any;
/**
* 方格网格聚合
*/
aggregation: GridAggregation;
}
export interface GridLayerOptions extends HeatmapLayerConfig {
/**
* 具体的数据
*/
source: GridLayerSourceOptions | Source;
/**
* 图形形状
*/
shape?: HeatmapShape2d | HeatmapShape3d;
/**
* 图层样式
*/
style?: GridHeatmapLayerStyleOptions;
}
|
select name, bonus
from
(Employee left join Bonus on Employee.empId = Bonus.empId)
where (bonus < 1000 or bonus is null);
|
# -*-coding:utf-8 -*-
"""
Created on 2016-8-16
@author: Danny<manyunkai@hotmail.com>
DannyWork Project
"""
from __future__ import unicode_literals
import uuid
from hashlib import md5
from bs4 import BeautifulSoup
def gen_sign(params, key):
"""
签名生成函数
:param params: 参数,dict 对象
:param key: API 密钥
:return: sign string
"""
param_list = []
for k in sorted(params.keys()):
v = params.get(k)
if not v:
# 参数的值为空不参与签名
continue
param_list.append('{0}={1}'.format(k, v))
# 在最后拼接 key
param_list.append('key={}'.format(key))
# 用 & 连接各 k-v 对,然后对字符串进行 MD5 运算
return md5('&'.join(param_list).encode('utf8')).hexdigest()
def gen_nonce_str():
"""
生成随机字符串,有效字符a-zA-Z0-9
:return: 随机字符串
"""
return ''.join(str(uuid.uuid4()).split('-'))
def trans_xml_to_dict(xml):
"""
将微信支付交互返回的 XML 格式数据转化为 Python Dict 对象
:param xml: 原始 XML 格式数据
:return: dict 对象
"""
soup = BeautifulSoup(xml, features='xml')
xml = soup.find('xml')
if not xml:
return {}
# 将 XML 数据转化为 Dict
data = dict([(item.name, item.text) for item in xml.find_all()])
return data
def trans_dict_to_xml(data):
"""
将 dict 对象转换成微信支付交互所需的 XML 格式数据
:param data: dict 对象
:return: xml 格式数据
"""
xml = []
for k in sorted(data.keys()):
v = data.get(k)
if k == 'detail' and not v.startswith('<![CDATA['):
v = '<![CDATA[{}]]>'.format(v)
xml.append('<{key}>{value}</{key}>'.format(key=k, value=v))
return '<xml>{}</xml>'.format(''.join(xml))
|
SUBROUTINE ATMO62(H,RHO,ASOUND,TEMP,PRES)
C
C 'ATMO62' IS THE ATMOSPHERIC MODEL BASED ON THE U.S.
C STANDARD ATMOSPHERE OF 1962
C
IMPLICIT REAL*8 (A-H,O-Z)
DIMENSION TB(26),HB(26),BM(26),R(26),PB(26),TWTM(26)
DATA TB /
1 320.65D0, 288.15D0, 216.65D0, 216.65D0, 228.65D0, 270.65D0,
X 270.65D0,
2 252.65D0, 180.65D0, 180.65D0, 210.65D0, 235.65D0, 260.65D0,
X 360.65D0,
3 560.65D0, 960.65D0, 1110.65D0, 1210.65D0, 1350.65D0,
X 1550.65D0,
4 1830.65D0, 2160.65D0, 2290.65D0, 2420.65D0, 2590.65D0,
X 2700.65D0/
DATA HB /
1 -5000.0D0, 0.0D0, 11000.0D0, 20000.0D0, 32000.0D0,
X 47000.0D0, 52000.0D0,
2 61000.0D0, 79000.0D0, 88743.0D0, 98451.0D0, 103294.0D0,
X 108129.0D0,
3 117776.0D0, 127394.0D0, 146541.0D0, 156071.0D0, 165571.0D0,
X 184485.0D0,
4 221967.0D0, 286476.0D0, 376312.0D0, 420240.0D0, 463526.0D0,
X 548230.0D0, 630530.0D0/
DATA BM /
1 -0.0065D0, -0.0065D0, 0.0D0, 0.001D0, 0.0028D0, 0.0D0,
X -0.0020D0,
2 -0.0040D0, 0.0D0, 0.0030902D0, 0.005162090D0, 0.005170631D0,
3 0.01036591D0, 0.02079434D0, 0.020891D0, 0.015739D0,
X 0.0105263D0,
4 0.00740193D0, 0.00533590D0, 0.00434048D0, 0.00367336D0,
X 0.002959388D0,
5 0.00300328D0, 0.00200699D0, 0.00133657D0, 0.0D0/
DATA R /
1 0.193049997D+1, 0.122500263D+1, 0.363918856D-0, 0.880350657D-1,
2 0.132250458D-1, 0.142753743D-2, 0.759434283D-3, 0.251091339D-3,
3 0.200114310D-4, 0.317015171D-5, 0.497409776D-6, 0.211674012D-6,
4 0.982986927D-7, 0.243629359D-7, 0.759159245D-8, 0.183655895D-8,
5 0.115938455D-8, 0.804025792D-9, 0.434910767D-9, 0.156481698D-9,
6 .358881794D-10, .650950819D-11, .312789872D-11, .157970712D-11,
7 0.464882739D-12, 0.154055260D-12/
DATA PB /
1 0.120676249D+4, 0.472681269D+3, 0.105578616D+3, 0.255403660D+2,
2 0.404931098D+1, 0.517378375D+0, 0.275239632D-0, 0.849500768D-1,
3 0.484093063D-2, 0.766885906D-3, 0.140309744D-3, 0.667954773D-4,
4 0.343097486D-4, 0.117659643D-4, 0.569949880D-5, 0.236255541D-5,
5 0.172430381D-5, 0.130345970D-5, 0.786596574D-6, 0.324927509D-6,
6 0.879763417D-7, 0.188339643D-7, 0.959446043D-8, 0.512056269D-8,
7 0.161272861D-8, 0.557126634D-9/
DATA TWTM /
1 28.9644D0, 28.9644D0, 28.9644D0, 28.9644D0, 28.9644D0,
X 28.9644D0,
2 28.9644D0, 28.9644D0, 28.9644D0, 28.9644D0, 28.88D0,
X 28.75D0, 28.56D0,
3 28.07D0, 27.58D0, 26.92D0, 26.66D0, 26.40D0, 25.85D0,
X 24.70D0, 22.66D0,
4 19.94D0, 18.82D0, 17.94D0, 16.84D0, 16.17D0/
DATA AG /9.80665D0/, AM0 /28.9644D0/, ARR /8.31432D+3/,
. AR /6356766.0D0/
DATA CON1 /0.3048D0/, CON2 /1.8D0/, CON3 /0.0021156D0/,
. CON4 /263.83052D0/
C
C
IF((H-2275000.0D0).LE.0.0D0) GO TO 2
RHO=3.3D-15
ASOUND=3413.42933D0
TEMP=2713.95661D0
PRES=0.126068033D-11
GO TO 105
2 ZZ=H*CON1
AH=AR*ZZ/(AR+ZZ)
DO 7 I=1,26
IF(AH-HB(I)) 7,8,9
9 IF(AH-HB(I+1)) 8,7,7
8 K=I
GO TO 10
7 CONTINUE
10 WS4=(AH-HB(K))
TM=TB(K)+(BM(K)*WS4)
CALL ARTLU1(1,AH,HB,WTM,TWTM)
TEMP=TM*CON2*(WTM/AM0)
12 IF(BM(K)-0.0D0) 15,16,15
15 WS5=(TB(K)/TM)
WS6=AG*AM0/(ARR*BM(K))
RHO=(R(K)*WS5**(1.0D0+WS6))/R(2)
PRES=(WS5**WS6)*CON3*PB(K)
GO TO 17
16 WS7=DEXP(-(AG*AM0*WS4)/(ARR*TB(K)))
RHO=R(K)*WS7/R(2)
PRES=PB(K)*WS7*CON3
17 CONTINUE
ASOUND=CON4*DSQRT(TEMP/WTM)
105 RETURN
END
|
# Getting Started
We're glad to have you joining us, taking a few minutes to read the
following pages will help you be a better member of our community:
- Our [Code of Conduct](../code-of-conduct.md) is important to us, and helps us maintain a healthy community.
- We also have a [guide to help you learn where to get help](../index.md#communication) that you should look over.
This guide assumes a basic familiarity with using the command line, git, and Python.
No matter how experienced you are, it is a good idea to read through this section before diving into Open States' code.
No worries if you aren't an expert though, we'll walk you through the
steps. And as for Python, if you've written other languages like
Javascript or Ruby you'll probably be just fine.
Don't be afraid to [ask for help](../index.md#communication) either!
## Project Overview
Open States is a fairly large & somewhat complex project comprised of many moving parts with a long history.
As you look to contribute, it may be beneficial to understand a little bit about the various components.
These repositories make up the core of the project, if you're looking to contribute there's a 95% chance one of these is what you want.
- [openstates-scrapers](https://github.com/openstates/openstates-scrapers) - Open States' scrapers.
- [people](https://github.com/openstates/people) - Open States people & committee data, maintained as editable YAML files.
- [openstates-core](https://github.com/openstates/openstates-core) - Open States data model & scraper backend.
- [openstates.org](https://github.com/openstates/openstates.org) - Powers [OpenStates.org](https://openstates.org/) website & GraphQL API.
- [api-v3](https://github.com/openstates/api-v3) - Powers [API v3](https://v3.openstates.org).
- [documentation](https://github.com/openstates/documentation) - [you're reading it now](https://docs.openstates.org/).
## Installing Prerequisites
### poetry
If you're working on the `people` repo, `api-v3`, or want to work on scrapers without Docker, you'll need `poetry` to build your Python virtual environment.
!!! note
If you haven't used `poetry` before, it is similar to `pipenv`, `pip`, and `conda` in that it manages a Python virtualenv on your behalf.
**Installing Poetry**
The [official poetry docs](https://python-poetry.org/docs/master/#installation) recommend installing with:
curl -sSL https://install.python-poetry.org | python3 -
Then within each repo you check out, be sure to run:
poetry install
Which will fetch the correct version of dependencies.
### docker & docker-compose
When working on scrapers or openstates.org, you have the option to use Docker.
The first thing you will need to do is get a working docker environment
on your local machine. We'll do this using Docker. No worries if you
aren't familiar with Docker, you'll barely have to touch it beyond
what this guide explains.
Install Docker and docker-compose (if not already installed on your local system):
**(a)** Installing Docker:
- On OSX: [Docker for Mac](https://docs.docker.com/docker-for-mac/)
- On Windows: [Docker for Windows](https://docs.docker.com/docker-for-windows/)
- On Linux: Use your package manager of choice or [follow Docker's instructions](https://docs.docker.com/engine/installation/linux/).
(*Docker Compose is probably already installed by step 1(a) if not, proceed to step 1(b)*)
**(b)** Installing docker-compose:
- For easy installation on [macOS, Windows, and 64-bit Linux.](https://docs.docker.com/compose/install/#prerequisites)
Ensure that Docker and docker-compose are installed locally:
$ docker --version
Docker version 19.03.4, build 9013bf5
$ docker-compose --version
docker-compose version 1.24.1, build 4667896b
Of course, your versions will differ, but ensure they are relatively
recent to avoid strange issues.
### pre-commit
To help keep the code as managable as possible we **strongly recommend**
you use pre-commit to make sure all commits adhere to our preferred
style.
- See [pre-commit's installation instructions](https://pre-commit.com/#installation)
- Within each repo you check out, run `pre-commit install` after checking out. It should look something like:
$ pre-commit install
pre-commit installed at .git/hooks/pre-commit
!!! note
If you're running `flake8` and `black` yourself via your editor or
similar this isn't strictly necessary, but we find it helps ensure
commits don't fail linting. **We require all PRs to pass linting!**
## Recent Major Work
To give a sense of recent priorities, here are major milestones from the
past few years:
- [Federal Data & Committee Data](https://blog.openstates.org/open-states-2021-q2/) - 2021
- [API v3](https://blog.openstates.org/open-states-api-v3/) - Q3 2020
- [Legislation Tracking](https://blog.openstates.org/tracking-legislation-on-open-states/) - Q1 2020
- **Restoration of Historical Legislator Data** - Q4 2019
- [Full Text Search](https://blog.openstates.org/adding-full-text-search-to-open-states-14b665c1fe30/) - Q4 2019
- **2019 Legislative Session Updates** - Q1 2019
- [OpenStates.org 2019 rewrite](https://blog.openstates.org/introducing-the-new-openstates-org-64bcbd765f58/) - Q1 2019
- [OpenStates GraphQL API](https://blog.openstates.org/more-ways-to-get-state-legislative-data-d9aece2245f0/) - Q4 2018
- **Scraper Overhaul** - Throughout much of 2017 we reworked our
scrapers to be more resilient and to use an updated tech stack,
replacing the one that powered the site from 2011-2016.
|
require 'spec_helper'
describe 'Admin - Orders' do
it 'Supplier should not be authorized' do
login_as create(:supplier_user)
visit spree.admin_orders_path
expect(page).to have_content('Authorization Failure')
end
end
|
<?php
namespace App\Http\Controllers;
use App\Models\Task;
use App\Models\User;
use App\Models\Branche;
use App\Models\TaskStatus;
use App\Models\Administration;
use App\Models\TasksType;
use App\Models\Section;
use App\Models\employee;
use Validator;
use Illuminate\Support\Facades\Session;
use Illuminate\Http\Request;
class TaskController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{ $tasks = Task::orderBy('id' , 'asc')->get();
return view('admin.tasks.index')
->with('tasks' , $tasks);
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
$users = User::get();
$branches = Branche::get();
$taskstatus = TaskStatus::get();
$taskstype = TasksType::get();
$administrations = Administration::get();
$sections = Section::get();
$employees = employee::get();
return view('admin.tasks.create')
->with('users' , $users)
->with('branches' , $branches)
->with('taskstatus' , $taskstatus)
->with('taskstype' , $taskstype)
->with('administrations' , $administrations)
->with('sections' , $sections)
->with('employees' , $employees);
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
$rules = [
'user_id' => 'required',
'type_id' => 'required',
'statue_id' => 'required',
'sdate' => 'required',
/// 'administration_id' => 'required',
];
$customMessages = [
'user_id.required' => 'يرجي إدخال الإسم بالكامل ',
'type_id.required' => 'يرجي إدخال رقم الهوية ',
'statue_id.required' => 'يرجي إدخال الحالة ',
'phone.required' => 'يرجي إدخال رقم الجوال ',
'sdate.required' => 'يرجي إدخال تاريخ بداية المهمة ',
];
$validater = Validator::make($request->all(), $rules , $customMessages);
if($request->edate == null && $request->worktime == null)
{
$validater->after(function($validater){
$validater->errors()->add('feild' , '(يرجي إدخال نهاية المهمة أو ساعات العمل)' );
});
}
if($validater->fails()){
return redirect()->back()
->withErrors($validater)
->withInput();
}
Task::create($request->all());
Session::flash("msg", "تم إضافة المهمة بنجاح");
return redirect()->route('tasks.index');
}
/**
* Display the specified resource.
*
* @param \App\Models\Task $task
* @return \Illuminate\Http\Response
*/
public function show($id)
{
// dd(20);
$tasks = Task::find($id);
$users = User::get();
$branches = Branche::get();
$taskstatus = TaskStatus::get();
$taskstype = TasksType::get();
$administrations = Administration::get();
$sections = Section::get();
$employees = employee::get();
return view('admin.tasks.show')
->with('users' , $users)
->with('branches' , $branches)
->with('taskstatus' , $taskstatus)
->with('taskstype' , $taskstype)
->with('administrations' , $administrations)
->with('sections' , $sections)
->with('employees' , $employees)
->with('tasks' , $tasks);
}
/**
* Show the form for editing the specified resource.
*
* @param \App\Models\Task $task
* @return \Illuminate\Http\Response
*/
public function edit(Request $request , $id)
{
$tasks = Task::find($id);
$users = User::get();
$branches = Branche::get();
$taskstatus = TaskStatus::get();
$taskstype = TasksType::get();
$administrations = Administration::get();
$sections = Section::get();
$employees = employee::get();
return view('admin.tasks.edit')
->with('users' , $users)
->with('branches' , $branches)
->with('taskstatus' , $taskstatus)
->with('taskstype' , $taskstype)
->with('administrations' , $administrations)
->with('sections' , $sections)
->with('employees' , $employees)
->with('tasks' , $tasks);
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param \App\Models\Task $task
* @return \Illuminate\Http\Response
*/
public function update(Request $request, $id)
{
Task::find($id)->update($request->all());
Session::flash("msg", "تم تعديل المهمة بنجاح");
return redirect()->route('tasks.index');
}
/**
* Remove the specified resource from storage.
*
* @param \App\Models\Task $task
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
$tasks = Task::findOrFail($id)->delete();
session()->flash("msg", "w: تم الحذف بنجاح");
return redirect()->route('tasks.index');
}
}
|
//
// Copyright 2020 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
/**
* Provides colors for dark and light mode. @c GSCXScanner is meant to contrast with the application
* so it's more explicit what is part of the original application and what is part of the scanner.
* In light mode (the only mode before iOS 12), this is light text on a dark background. However, in
* dark mode, this is dark text on a light background, since the app itself will have light text on
* a dark background.
*/
@interface UIViewController (GSCXAppearance)
/**
* @return The appropriate color for the current view controller's appearance, maximizing contrast.
* Pre iOS 12, defaults to black, because dark mode doesn't exist.
*/
- (UIColor *)gscx_textColorForCurrentAppearance;
/**
* @return The appropriate color for the curent view controller's appearance, maximizing contrast.
* Pre iOS 12, defaults to white, because dark mode doesn't exist.
*/
- (UIColor *)gscx_backgroundColorForCurrentAppearance;
/**
* @return The appropriate @c UIBlurEffectStyle value for the current view controller's appearance,
* maximizing contrast. Pre iOS 12, defaults to @c UIBlurEffectStyleDark, because dark mode doesn't
* exist.
*/
- (UIBlurEffectStyle)gscx_blurEffectStyleForCurrentAppearance;
/**
* Sets the value of @c overrideUserInterfaceStyle for overlay view controllers for the current
* appearance. Before iOS 13, apperance doesn't exist, so this is a no-op.
*/
- (void)gscx_setOverrideUserInterfaceStyleForCurrentApperance;
@end
NS_ASSUME_NONNULL_END
|
package run.halo.app.model.entity;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
/**
* Sheet comment.
*
* @author johnniang
* @date 19-4-24
*/
@Entity(name = "SheetComment")
@DiscriminatorValue("1")
public class SheetComment extends BaseComment {
}
|
### A Pluto.jl notebook ###
# v0.14.0
using Markdown
using InteractiveUtils
# ╔═╡ b2d786ec-7f73-11ea-1a0c-f38d7b6bbc1e
md"""
# The Basel problem
_Leonard Euler_ proved in 1741 that the series
```math
\frac{1}{1} + \frac{1}{4} + \frac{1}{9} + \cdots
```
converges to
```math
\frac{\pi^2}{6}.
```
"""
# ╔═╡ b2d79330-7f73-11ea-0d1c-a9aad1efaae1
n = 1:100000
# ╔═╡ b2d79376-7f73-11ea-2dce-cb9c449eece6
seq = n .^ -2
# ╔═╡ b2d792c2-7f73-11ea-0c65-a5042701e9f3
sqrt(sum(seq) * 6.0)
# ╔═╡ Cell order:
# ╟─b2d786ec-7f73-11ea-1a0c-f38d7b6bbc1e
# ╠═b2d792c2-7f73-11ea-0c65-a5042701e9f3
# ╠═b2d79330-7f73-11ea-0d1c-a9aad1efaae1
# ╠═b2d79376-7f73-11ea-2dce-cb9c449eece6
|
import DropOffType from "../../../../enums/DropOffType";
import PickupType from "../../../../enums/PickupType";
import TravelMode from "../../../../enums/TravelMode";
const connections = [
{
value: {
"id": "2a",
"travelMode": TravelMode.Train,
"arrivalStop": "http://irail.be/stations/NMBS/008891702", // B
"departureStop": "http://irail.be/stations/NMBS/008892007", // C
"departureTime": new Date("2017-12-19T16:22:00.000Z"),
"arrivalTime": new Date("2017-12-19T16:30:00.000Z"),
"tripId": "A",
"gtfs:pickupType": PickupType.Regular,
"gtfs:dropOffType": DropOffType.Regular,
},
done: false,
},
{
value: {
"id": "2b",
"travelMode": TravelMode.Train,
"arrivalStop": "http://irail.be/stations/NMBS/008891702", // B
"departureStop": "http://irail.be/stations/NMBS/008812005", // D
"departureTime": new Date("2017-12-19T16:23:00.000Z"),
"arrivalTime": new Date("2017-12-19T16:34:00.000Z"),
"tripId": "B",
"nextConnection": ["1"],
"gtfs:pickupType": PickupType.Regular,
"gtfs:dropOffType": DropOffType.Regular,
},
done: false,
},
{
value: {
"id": "1",
"travelMode": TravelMode.Train,
"arrivalStop": "http://irail.be/stations/NMBS/008821006", // A
"departureStop": "http://irail.be/stations/NMBS/008891702", // B
"departureTime": new Date("2017-12-19T16:35:00.000Z"),
"arrivalTime": new Date("2017-12-19T16:50:00.000Z"),
"tripId": "A",
"gtfs:pickupType": PickupType.Regular,
"gtfs:dropOffType": DropOffType.Regular,
},
done: false,
},
];
export default connections;
|
require "test_helper"
describe User do
let(:auth_hash) do
{
provider: "google",
uid: "12345",
info: {
name: "Stub User",
email: "stub.user@example.org",
image: "https://example.org/image.jpg",
},
}
end
it "can be created from an auth hash" do
user = User.find_or_create_from_auth_hash!(auth_hash)
assert_equal "Stub User", user.name
assert_equal "stub.user@example.org", user.email
assert_equal "google", user.provider
assert_equal "12345", user.provider_uid
assert_equal "https://example.org/image.jpg", user.image_url
end
it "can be found from a matching email" do
existing_user = create(:user, email: auth_hash[:info][:email])
user = User.find_or_create_from_auth_hash!(auth_hash)
assert_equal existing_user.id, user.id
end
it "updates the user details on sign in" do
existing_user = create(:user, email: auth_hash[:info][:email],
name: "Another Name")
User.find_or_create_from_auth_hash!(auth_hash)
existing_user.reload
assert_equal "Stub User", existing_user.name
end
it "does not restrict user emails to a hostname by default" do
Books.stubs(:permitted_email_hostnames).returns([])
user = build(:user, email: "stub.user@something.org")
assert user.valid?
end
it "restricts user emails to a specified hostname" do
Books.stubs(:permitted_email_hostnames).returns(["example.org"])
user = build(:user, email: "stub.user@foo.org")
assert_not user.valid?
assert user.errors.key?(:email)
user = build(:user, email: "stub.user@example.org")
assert user.valid?
end
it "restricts user emails to multiple specified hostnames" do
Books.stubs(:permitted_email_hostnames).returns(["example.org", "foo.org"])
user = build(:user, email: "stub.user@foo.org")
assert user.valid?
user = build(:user, email: "stub.user@example.org")
assert user.valid?
user = build(:user, email: "stub.user@bar.org")
assert_not user.valid?
assert user.errors.key?(:email)
end
it "only enforces the email hostname restriction on create" do
Books.stubs(:permitted_email_hostnames).returns(["banned.org"])
user = create(:user, email: "stub.user@banned.org")
Books.stubs(:permitted_email_hostnames).returns(["allowed.org"])
user.reload
assert user.valid?
end
end
|
#!/bin/bash
set +e
. /opt/cloud-deploy-scripts/common/env.sh
. /opt/cloud-deploy-scripts/$cloud_provider/env.sh
# It is required to bind to all interfaces for load balancer on GCP to work
if [ "$cloud_provider" == "gcp" ]; then
export BIND_TO_ALL="true"
fi
/opt/cloud-deploy-scripts/common/config-es.sh
/opt/cloud-deploy-scripts/common/config-beats.sh
/opt/cloud-deploy-scripts/$cloud_provider/config-es.sh
/opt/cloud-deploy-scripts/$cloud_provider/config-es-discovery.sh
cat <<'EOF' >>/etc/elasticsearch/elasticsearch.yml
node.master: false
node.data: false
node.ingest: false
EOF
# Start Elasticsearch
systemctl daemon-reload
systemctl enable elasticsearch.service
systemctl start elasticsearch.service
/opt/cloud-deploy-scripts/common/config-clients.sh
|
package com.appframework;
import com.tengmoney.gui.AppPage;
import com.tmoney.foundation.utils.Configuration;
import io.appium.java_client.AppiumDriver;
import io.appium.java_client.MobileElement;
import lombok.extern.slf4j.Slf4j;
import org.openqa.selenium.By;
import static com.tmoney.foundation.utils.Configuration.Parameter.*;
@Slf4j
public class Wework extends AppPage {
private final String workSpace = "工作台";
private By messageTab = byText("消息");
private By workbench = byText("工作台");
//手机号输入框
private By phoneInput = By.id("com.tencent.wework:id/fow");
private By nextStep = By.id("com.tencent.wework:id/di");
private static boolean flag = false;
//验证码
// private By varifyCode = By.id("com.tencent.wework:id/dq2");
private By h5app = byText(Configuration.get(H5APPLICATIONNAME));
//手机号输入框
private By loginWithNum= By.id("com.tencent.wework:id/fop");
private By companyName = byText(Configuration.get(COMPANY));
private By miniproName = byText(Configuration.get(MINIPRONAME));
private static String phoneNum = "13242424028";
public Wework() {
super();
}
public Wework(AppiumDriver<MobileElement> driver) {
super(driver);
}
private final void initWework() {
if(flag==true){
return ;
}else {
log.info("wework init");
//TODO 进入配置的公司
if(hasElement(loginWithNum)){
click(loginWithNum);
// click(phoneInput);
sendKeys(phoneInput,phoneNum);
click(nextStep);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
log.info("输入验证码");
click(nextStep);
//同意
driver.findElementById("com.tencent.wework:id/gg5").click();
click(companyName);
//同意
driver.findElementById("com.tencent.wework:id/cey").click();
//权限按钮
driver.findElementById("com.android.packageinstaller:id/permission_allow_button").click();
//权限按钮
driver.findElementById("com.android.packageinstaller:id/permission_allow_button").click();
//权限按钮
driver.findElementById("com.android.packageinstaller:id/permission_allow_button").click();
}
wait.until(
x ->
{
log.info(String.valueOf(System.currentTimeMillis()));
String source = driver.getPageSource();
Boolean exist = source.contains(workSpace);
log.info("寻找工作台/腾银信息" + exist);
return exist;
}
);
flag = true;
}
}
/**
* appium不支持小程序操作
*/
public final MiniproPage jumpToMiniproPage() {
log.info("start jump to minipro");
click(workbench);
click(miniproName);
return new MiniproPage(driver);
}
public final MessagePage jumpToMessage() {
click(messageTab);
//这里必须带上参数driver,否则会新开一个窗口
return new MessagePage(driver);
}
public final 日程Page 日程() {
click(By.xpath("//*[@text='日程']"));
return new 日程Page();
}
public Boolean isWeworkMainPage() {
return hasElement(messageTab);
}
}
|
package de.metzgore.beansplan.util.di.modules
import dagger.Module
import dagger.Provides
import de.metzgore.beansplan.dailyschedule.DailyScheduleFragment
import de.metzgore.beansplan.weeklyschedule.WeeklyScheduleFragment
@Module
class FragmentModule {
@Provides
fun provideDailyScheduleFragment(fragment: DailyScheduleFragment): DailyScheduleFragment {
return fragment
}
@Provides
fun provideWeeklyScheduleFragment(fragment: WeeklyScheduleFragment): WeeklyScheduleFragment {
return fragment
}
}
|
<?php
namespace app\shop\controller;
// use think\View;
use think\Controller;
class Index extends Controller
{
public function Index(){
return $this->fetch();
}
}
|
import Model from "ember-data/model";
import { belongsTo } from "ember-data/relationships";
export default Model.extend({
channel: belongsTo( "twitchChannel", { async: false } )
}).reopenClass({
toString() { return "kraken/search/channels"; }
});
|
<?php
namespace App\Listeners;
use App\Events\UsersubscripedToStack;
use Illuminate\Queue\InteractsWithQueue;
use Illuminate\Contracts\Queue\ShouldQueue;
use App\SheetResponse;
class CreateUserSheetsRecord
{
/**
* Create the event listener.
*
* @return void
*/
public function __construct()
{
//
}
/**
* Handle the event.
*
* @param UsersubscripedToStack $event
* @return void
*/
public function handle(UsersubscripedToStack $event)
{
foreach ($event->stack->sheets as $sheet) {
$sheet_response = new SheetResponse;
$sheet_response->user_id=$event->user->id;
$sheet_response->sheet_id=$sheet->id;;
$sheet_response->save();
}
}
}
|
---
layout: page
title: On Turing machines with syntactic restrictions
---
On Turing machines with syntactic restrictions
======
### by Dr. Keisuke Nakano
- When: Friday, 22/10/2021, between 9am and 10am EDT (1pm-2pm UTC)
- Where: Zoom; Outside guests please RSVP by emailing <a href="mailto:harley.eades@gmail.com">Harley Eades</a>
- YouTube Stream/Recording: <https://youtu.be/lCJ2Kq4heTI>
#### Abstract
A Turing machine is a computational model that can represent all
computable functions. By imposing syntactic restrictions on the Turing
machine, it is possible to construct a computational model that
defines only computable functions satisfying a certain property
P. However, it is not obvious whether the restricted computational
model covers all computable functions satisfying the property P. In
this talk, I will introduce computational models corresponding to the
three properties of functions: injectivity, involutoriness, and
idempotence. Although all of these properties are undecidable, the
corresponding computational models are given as Turing machines with
decidable syntactic restrictions.
|
// Package dmc provides application with APIs for obtaining Delegated Machine Credentials (DMC)
//
// Run go tests
//
// You need to do the followings to have a successful run of go unit tests:
// 1. Install Centrify Client (version 21.5 or later) on the system.
// 2. Enroll Centrify Client to a PAS tenant:
// - enable DMC feature by specifying "-F all" or "-F dmc" in cenroll command line.
// - specify the DMC scope "testsdk" by specifying "-d testsdk:security/whoami" in cenroll command line.
// 3. Run the unit test as root
//
//
// Sample Program
//
// A sample program can be found in https://github.com/centrify/platform-go-sdk/examples/dmc
package dmc
import (
"fmt"
"strings"
"github.com/centrify/platform-go-sdk/internal/lrpc"
"github.com/centrify/platform-go-sdk/utils"
)
func getDMCEndPoint() string {
return utils.GetDMCEndPoint()
}
// GetDMCToken returns an oauth token for the requested scope that has the
// identity of the current machine account.
//
// Possible error returns:
//
// ErrCannotGetToken - other errors in getting the token
// ErrCannotSetupConnection - Cannot setup connection to Centrify Client
// ErrClientNotInstalled - Centrify Client is not installed in system
// ErrCommunicationError - Communication error with Centrify Client
func GetDMCToken(scope string) (string, error) {
installed, err := utils.IsCClientInstalled()
if err != nil {
return "", fmt.Errorf("Cannot get Centrify Client installation status: %w", utils.ErrClientNotInstalled)
}
if !installed {
return "", utils.ErrClientNotInstalled
}
// create a lrpc2 client and connect to it
cl := lrpc.NewLrpc2ClientSession(getDMCEndPoint())
if cl == nil {
return "", utils.ErrCannotSetupConnection
}
err = cl.Connect()
if err != nil {
return "", utils.ErrCannotSetupConnection
}
defer cl.Close()
// send LRPC message to Centrify Client
var args []interface{}
args = append(args, scope)
results, err := lrpc.DoRequest(cl, lrpc.Lrpc2MsgIDAdminClientGetToken, args)
if err != nil {
return "", utils.ErrCommunicationError
}
// Check results of LRPC call
// return message should have
// results[0] status
// results[1] error message
// results[2] access token
if len(results) != 3 {
// error...
return "", utils.ErrCommunicationError
}
status, ok := results[0].(int32)
if ok {
if status == 0 {
// good status, token is in results[2]
token, ok1 := results[2].(string)
if !ok1 {
return "", utils.ErrCommunicationError
}
return token, nil
} else {
// error message return in second value
errmsg, ok := results[1].(string)
if ok {
return "", fmt.Errorf("%w: %s", utils.ErrCannotGetToken, errmsg)
}
}
}
return "", utils.ErrCommunicationError
}
// GetEnrollmentInfo returns information about Centrify Client enrollment information
func GetEnrollmentInfo() (string, string, error) {
installed, err := utils.IsCClientInstalled()
if err != nil {
return "", "", fmt.Errorf("Cannot get Centrify Client installation status: %w", utils.ErrClientNotInstalled)
}
if !installed {
return "", "", utils.ErrClientNotInstalled
}
// create a lrpc2 client and connect to it
cl := lrpc.NewLrpc2ClientSession(getDMCEndPoint())
if cl == nil {
return "", "", utils.ErrCannotSetupConnection
}
err = cl.Connect()
if err != nil {
return "", "", utils.ErrCannotSetupConnection
}
defer cl.Close()
// send LRPC message to Centrify Client
results, err := lrpc.DoRequest(cl, lrpc.LrpcMsgIDClientInfo, nil)
if err != nil {
return "", "", utils.ErrCommunicationError
}
// Check results of LRPC call
if len(results) < 1 {
// error...
return "", "", utils.ErrCommunicationError
}
info, ok := results[0].(map[string]string)
if ok {
tenantURL, ok1 := info["ServiceURI"]
oAuthClientID, ok2 := info["OauthClientID"]
if !ok1 || !ok2 {
// error...
return "", "", utils.ErrCommunicationError
}
// strip off https:// or http:// prefix
tenantURL = strings.TrimPrefix(tenantURL, "https://")
tenantURL = strings.TrimPrefix(tenantURL, "http://")
tenantURL = strings.TrimSuffix(tenantURL, "/")
return tenantURL, oAuthClientID, nil
}
return "", "", utils.ErrCommunicationError
}
|
---
title: Dynamic Script Refresh
parent: Developer Tools
has_children: false
nav_order: 50
---
## {{page.title}}
A concept of the MDI apps framework is to allow the framework
and app code to be viewed and updated in an app
while it is running.
### Re-sourcing app scripts without page reload
One part of this is that a **refresh link** is placed
in the top menu bar when working in single-user
developer modes, e.g. `mode = 'local', developer = TRUE`.
Clicking the link allows a developer
to re-`source()` many of the session scripts used
by an app without reloading the web page.
{% include figure.html file="developer-tools/refresh-icon.png" width="250px" %}
Not all scripts can be re-loaded in this fashion.
A complete description is beyond the scope here,
but in general, appStep modules cannot be dynamically
updated, whereas utility scripts can.
Let trial and error be your guide. One suggestion
is to make liberal use of module utility scripts
that can be re-sourced.
This feature can save a lot of time reloading
apps when adding app features,
especially if they are doing slower work
on each reload.
### Page reload via auto-saved bookmarks
For scripts that cannot be dynamically refreshed,
you can click on the upper left page label
(default value "MDI") to force a hard reload
of the page where the auto-saved bookmark will
take you back to the same app step and state, but
now having reloaded the framework and all appStep modules.
This feature is always available in all apps
but is most useful to developers, which is
why it is not advertised to users in a more obvious way.
|
module CBR
class Similarity
class NumericSimilarity < Similarity
def initialize(opts={})
opts[:borderpoints] = Hash[opts[:borderpoints].map {|k, v| [BigDecimal.new(k, 4), v]}]
super(opts)
end
def compare(real_value)
real_value = BigDecimal.new(real_value, 4)
score(real_value)
end
end
end
end
|
# AsteroidOSLinux
AsteroidOS Linux control application
## Prerequisites
- pydbus
- mpd
- pyowm
Get the necessary modules with:
```
pip3 install pydbus python-mpd2 pyowm
```
## Setup
Get the files:
```
git clone https://github.com/atx/AsteroidOSLinux.git
```
Switch to the directory:
```
cd AsteroidOSLinux/
```
Make sure your watch is already connected via bluetooth and run the example script:
```
./example.py
```
|
FactoryGirl.define do
factory :player do
sequence(:nickname) { |n| "nick_name_cater_#{n}" }
association :member
end
end
|
default[:delayed_job][:timeout] = 60
default[:delayed_job][:bin] = "bin/delayed_job"
default[:delayed_job][:suffix] = ""
default[:delayed_job][:options] = ""
|
When(/^the membership count job runs$/) do
MembershipCount.perform
end
When(/^the membership coverage job runs$/) do
MembershipCoverage.perform
end
When(/^the membership renewals job runs$/) do
MembershipRenewals.perform
end
|
using UnityEngine;
using System.Collections;
public class GearHold : MonoBehaviour
{
Collider2D hit;
int mask;
void Awake()
{
mask = 1 << LayerMask.NameToLayer("Interactable");
}
void FixedUpdate()
{
hit = Physics2D.OverlapPoint(transform.position, mask);
if (hit != null)
{
Gear g = hit.GetComponent<Gear>();
if (g != null)
{
g.Attach(transform.position);
gameObject.SetActive(false);
}
}
}
}
|
<?php
return [
1010001 => '参数错误',
1010025 => '操作失败',
];
|
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var _createClass = (function() {
function defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ('value' in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
return function(Constructor, protoProps, staticProps) {
if (protoProps) defineProperties(Constructor.prototype, protoProps);
if (staticProps) defineProperties(Constructor, staticProps);
return Constructor;
};
})();
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _reactDom = require('react-dom');
var _isEqual = require('lodash/isEqual');
var _isEqual2 = _interopRequireDefault(_isEqual);
var _clone = require('lodash/clone');
var _clone2 = _interopRequireDefault(_clone);
var _uuid = require('../helpers/uuid');
var _uuid2 = _interopRequireDefault(_uuid);
var _ucFirst = require('../helpers/ucFirst');
var _ucFirst2 = _interopRequireDefault(_ucFirst);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : { default: obj };
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError('Cannot call a class as a function');
}
}
function _possibleConstructorReturn(self, call) {
if (!self) {
throw new ReferenceError(
"this hasn't been initialised - super() hasn't been called"
);
}
return call && (typeof call === 'object' || typeof call === 'function')
? call
: self;
}
function _inherits(subClass, superClass) {
if (typeof superClass !== 'function' && superClass !== null) {
throw new TypeError(
'Super expression must either be null or a function, not ' +
typeof superClass
);
}
subClass.prototype = Object.create(superClass && superClass.prototype, {
constructor: {
value: subClass,
enumerable: false,
writable: true,
configurable: true
}
});
if (superClass)
Object.setPrototypeOf
? Object.setPrototypeOf(subClass, superClass)
: (subClass.__proto__ = superClass);
}
var EVENTS = [
'focusin',
'focusout',
'click',
'dblclick',
'mousedown',
'mouseup',
'mousemove',
'mouseover',
'beforepaste',
'paste',
'cut',
'copy',
'selectionchange',
'mouseout',
'mouseenter',
'mouseleave',
'keydown',
'keypress',
'keyup',
'contextmenu',
'dragend',
'dragover',
'draggesture',
'dragdrop',
'drop',
'drag',
'BeforeRenderUI',
'SetAttrib',
'PreInit',
'PostRender',
'init',
'deactivate',
'activate',
'NodeChange',
'BeforeExecCommand',
'ExecCommand',
'show',
'hide',
'ProgressState',
'LoadContent',
'SaveContent',
'BeforeSetContent',
'SetContent',
'BeforeGetContent',
'GetContent',
'VisualAid',
'remove',
'submit',
'reset',
'BeforeAddUndo',
'AddUndo',
'change',
'undo',
'redo',
'ClearUndos',
'ObjectSelected',
'ObjectResizeStart',
'ObjectResized',
'PreProcess',
'PostProcess',
'focus',
'blur',
'dirty'
];
var HANDLER_NAMES = EVENTS.map(function(event) {
return 'on' + (0, _ucFirst2.default)(event);
});
var TinyMCE = (function(_React$Component) {
_inherits(TinyMCE, _React$Component);
function TinyMCE(props) {
_classCallCheck(this, TinyMCE);
var _this = _possibleConstructorReturn(
this,
(TinyMCE.__proto__ || Object.getPrototypeOf(TinyMCE)).call(this, props)
);
_this.state = {
config: {},
content: props.content
};
return _this;
}
_createClass(TinyMCE, [
{
key: 'componentWillMount',
value: function componentWillMount() {
this.id = this.id || this.props.id || (0, _uuid2.default)();
}
},
{
key: 'componentDidMount',
value: function componentDidMount() {
var config = (0, _clone2.default)(this.props.config);
this._init(config, this.props.content);
}
},
{
key: 'componentWillReceiveProps',
value: function componentWillReceiveProps(nextProps) {
if (
!(0, _isEqual2.default)(this.props.config, nextProps.config) ||
!(0, _isEqual2.default)(this.props.id, nextProps.id)
) {
this.id = nextProps.id;
this._init((0, _clone2.default)(nextProps.config), nextProps.content);
return;
}
}
},
{
key: 'shouldComponentUpdate',
value: function shouldComponentUpdate(nextProps) {
return (
!(0, _isEqual2.default)(this.props.config, nextProps.config) ||
!(0, _isEqual2.default)(this.props.entityId, nextProps.entityId)
);
}
},
{
key: 'componentWillUnmount',
value: function componentWillUnmount() {
this._remove();
}
},
{
key: 'render',
value: function render() {
return this.props.config.inline
? _react2.default.createElement('div', {
id: this.id,
className: this.props.className,
dangerouslySetInnerHTML: { __html: this.props.content }
})
: _react2.default.createElement('textarea', {
id: this.id,
className: this.props.className,
name: this.props.name,
defaultValue: this.props.content
});
}
},
{
key: '_init',
value: function _init(config, content) {
var _this2 = this;
if (this._isInit) {
this._remove();
}
// hide the textarea that is me so that no one sees it
(0, _reactDom.findDOMNode)(this).style.hidden = 'hidden';
var setupCallback = config.setup;
var hasSetupCallback = typeof setupCallback === 'function';
config.selector = '#' + this.id;
config.setup = function(editor) {
EVENTS.forEach(function(eventType, index) {
editor.on(eventType, function(e) {
var handler = _this2.props[HANDLER_NAMES[index]];
if (typeof handler === 'function') {
// native DOM events don't have access to the editor so we pass it here
handler(e, editor);
}
});
});
// need to set content here because the textarea will still have the
// old `this.props.content`
if (typeof content !== 'undefined') {
editor.on('init', function() {
editor.setContent(content);
});
}
if (hasSetupCallback) {
setupCallback(editor);
}
};
tinymce.init(config);
(0, _reactDom.findDOMNode)(this).style.hidden = '';
this._isInit = true;
}
},
{
key: '_remove',
value: function _remove() {
tinymce.EditorManager.execCommand('mceRemoveEditor', true, this.id);
this._isInit = false;
}
}
]);
return TinyMCE;
})(_react2.default.Component);
exports.default = TinyMCE;
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef SERVICES_WINDOW_MANAGER_FOCUS_RULES_H_
#define SERVICES_WINDOW_MANAGER_FOCUS_RULES_H_
#include "mojo/services/view_manager/public/cpp/types.h"
#include "mojo/services/view_manager/public/cpp/view.h"
namespace window_manager {
// Implemented by an object that establishes the rules about what can be
// focused or activated.
class FocusRules {
public:
virtual ~FocusRules() {}
// Returns true if the children of |window| can be activated.
virtual bool SupportsChildActivation(mojo::View* window) const = 0;
// Returns true if |view| is a toplevel view. Whether or not a view
// is considered toplevel is determined by a similar set of rules that
// govern activation and focus. Not all toplevel views are activatable,
// call CanActivateView() to determine if a view can be activated.
virtual bool IsToplevelView(mojo::View* view) const = 0;
// Returns true if |view| can be activated or focused.
virtual bool CanActivateView(mojo::View* view) const = 0;
// For CanFocusView(), null is supported, because null is a valid focusable
// view (in the case of clearing focus).
virtual bool CanFocusView(mojo::View* view) const = 0;
// Returns the toplevel view containing |view|. Not all toplevel views
// are activatable, call GetActivatableView() instead to return the
// activatable view, which might be in a different hierarchy.
// Will return null if |view| is not contained by a view considered to be
// a toplevel view.
virtual mojo::View* GetToplevelView(mojo::View* view) const = 0;
// Returns the activatable or focusable view given an attempt to activate or
// focus |view|. Some possible scenarios (not intended to be exhaustive):
// - |view| is a child of a non-focusable view and so focus must be set
// according to rules defined by the delegate, e.g. to a parent.
// - |view| is an activatable view that is the transient parent of a modal
// view, so attempts to activate |view| should result in the modal
// transient being activated instead.
// These methods may return null if they are unable to find an activatable
// or focusable view given |view|.
virtual mojo::View* GetActivatableView(mojo::View* view) const = 0;
virtual mojo::View* GetFocusableView(mojo::View* view) const = 0;
// Returns the next view to activate in the event that |ignore| is no longer
// activatable. This function is called when something is happening to
// |ignore| that means it can no longer have focus or activation, including
// but not limited to:
// - it or its parent hierarchy is being hidden, or removed from the
// RootView.
// - it is being destroyed.
// - it is being explicitly deactivated.
// |ignore| cannot be null.
virtual mojo::View* GetNextActivatableView(mojo::View* ignore) const = 0;
};
} // namespace window_manager
#endif // SERVICES_WINDOW_MANAGER_FOCUS_RULES_H_
|
#include "urg3d_sensor.h"
#include <string.h>
#if defined(URG3D_WINDOWS_OS)
#else
#include <unistd.h>
#endif
#include <stdio.h>
#include <stdlib.h>
urg3d_data_range_intensity_t data_range_intensity;
urg3d_data_ax_t ax_data;
urg3d_vssp_header_t header;
urg3d_range_header_t range_header;
urg3d_range_index_t range_index;
urg3d_ax_header_t ax_header;
char data[URG3D_MAX_RX_LENGTH];
int length_data;
int main(int argc, char *argv[])
{
enum {
CAPTURE_TIMES = 100 /*!< number of record to collect */
};
int n = 0; /* records count */
int ret = 0; /* operation return */
const char* const device = "192.168.0.10"; /* device ip address */
const long port = 10940; /* device port number. It is a fixed value */
urg3d_t urg;
/*
* open the connection to the sensor
*/
if((ret = urg3d_open(&urg, device, port)) < 0) {
printf("error urg3d_open %d\n", ret);
#if defined(URG3D_MSC)
getchar();
#endif
return -1;
} else {
printf("open ok\n");
}
/*
* request to start the range and intensity data
*/
if((ret = urg3d_low_request_command(&urg, "DAT:ri=1\n")) < 0) {
printf("error urg3d_low_request_command %d\n", ret);
ret = urg3d_close(&urg);
#if defined(URG3D_MSC)
getchar();
#endif
return -1;
} else {
printf("send ok -> DAT:ri=1\n");
}
/*
* request to start the auxiliary data
*/
if((ret = urg3d_low_request_command(&urg, "DAT:ax=1\n")) < 0) {
printf("error urg3d_low_request_command %d\n", ret);
ret = urg3d_close(&urg);
#if defined(URG3D_MSC)
getchar();
#endif
return -1;
} else {
printf("send ok -> DAT:ax=1\n");
}
/*
* start main loop
*/
printf("start loop\n");
while(1) {
/*
* check received data
*/
if(urg3d_next_receive_ready(&urg)) {
/*
* pick up the data (non-blocking)
*/
if(urg3d_low_get_ri(&urg, &header, &range_header, &range_index, &data_range_intensity) > 0) {
printf("ri %d\n",n);
n++;
} else if(urg3d_low_get_ax(&urg, &header, &ax_header, &ax_data) > 0) {
printf("ax %d\n",n);
n++;
} else if(urg3d_low_get_binary(&urg, &header, data, &length_data) > 0) {
/*
* check error data
*/
if(strncmp(header.type, "ERR", 3) == 0 || strncmp(header.type, "_er", 3) == 0) {
printf("error %c%c%c %s", header.status[0], header.status[1], header.status[2], data);
if(header.status[0] != '0'){
break;
}
}
}
} else {
#ifdef URG3D_WINDOWS_OS
Sleep(10);
#else
usleep(10000);
#endif
}
/*
* check capture times
*/
if(n > CAPTURE_TIMES) {
break;
}
}
/*
* request to stop the range and intensity data
*/
if((ret = urg3d_low_request_command(&urg, "DAT:ri=0\n")) < 0) {
printf("error urg3d_low_request_command %d\n", ret);
ret = urg3d_close(&urg);
#if defined(URG3D_MSC)
getchar();
#endif
return -1;
} else {
printf("send ok -> DAT:ri=0\n");
}
/*
* request to stop the auxiliary data
*/
if((ret = urg3d_low_request_command(&urg, "DAT:ax=0\n")) < 0) {
printf("error urg3d_low_request_command %d\n", ret);
ret = urg3d_close(&urg);
#if defined(URG3D_MSC)
getchar();
#endif
return -1;
} else {
printf("send ok -> DAT:ax=0\n");
}
/*
* close the connection to the sensor
*/
if((ret = urg3d_close(&urg)) < 0) {
printf("error urg3d_close %d\n", ret);
#if defined(URG3D_MSC)
getchar();
#endif
return -1;
} else {
printf("close ok\n");
}
#if defined(URG3D_MSC)
getchar();
#endif
return 0;
}
|
// Copyright (c) 2022 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml
package http.perf
import dbutils.ConnectionPool
import http.dbbackend.{DbStartupMode, JdbcConfig}
import testing.oracle.OracleAround
import scala.util.{Success, Try}
private[this] final class OracleRunner {
private val defaultUser = "ORACLE_USER"
private val retainData = sys.env.get("RETAIN_DATA").exists(_ equalsIgnoreCase "true")
private val useDefaultUser = sys.env.get("USE_DEFAULT_USER").exists(_ equalsIgnoreCase "true")
type St = OracleAround.RichOracleUser
def start(): Try[St] = Try {
if (useDefaultUser) OracleAround.createOrReuseUser(defaultUser)
else OracleAround.createNewUniqueRandomUser()
}
def jdbcConfig(user: St): JdbcConfig = {
import DbStartupMode._
val startupMode: DbStartupMode = if (retainData) CreateIfNeededAndStart else CreateAndStart
JdbcConfig(
dbutils.JdbcConfig(
"oracle.jdbc.OracleDriver",
user.jdbcUrlWithoutCredentials,
user.oracleUser.name,
user.oracleUser.pwd,
ConnectionPool.PoolSize.Production,
),
startMode = startupMode,
)
}
def stop(user: St): Try[Unit] = {
if (retainData) Success(()) else Try(user.drop())
}
}
|
---
title: "Project: SAM-REST-API"
date: 2020-06-21
tags: [awsSAM]
header:
image: "/images/Toronto.jpg"
excerpt: "SAM, REST-API"
---
[REST-API created using AWS SAM and python](https://github.com/cheonu/REST-APIv1)
Created a REST-API using AWS SAM and python. Resources utilized include SAM Build and SAM Deploy and python
|
<?php
use frontend\assets\FrontendAsset;
use common\models\Settings;
$bundle = FrontendAsset::register($this);
/* @var $this yii\web\View */
$this->title = Yii::$app->name;
$site_setting=Settings::findOne(1);
?>
<script type="text/javascript">
(function (g) {
var s = document.createElement('script'),
t = document.getElementsByTagName('script')[0];
s.async = true;
s.src = g + '?v=' + (new Date()).getTime();
s.charset = 'UTF-8';
s.setAttribute('crossorigin', '*');
t.parentNode.insertBefore(s, t);
})('https://www.canvasflip.com/plugins/vi/vi.min.js');
</script>
<div class="row">
<div class="col-md-2"></div>
<div class="col-md-10">
<div class="isth">
<b><?php echo $site_setting->title; ?></b><br>
<?php echo $site_setting->tag_line; ?><br>
<b><!-- <div class="barh1"> -->LANDON SINGAPORE<!-- </div> --></b><br>
<b><!-- <div class="barh1"> -->STOCKHOLM<!-- </div> --></b></div>
<?php echo $site_setting->short_description; ?>
<div class="cc3"><b><hr></b>
<b><?php echo $site_setting->email; ?> <br><?php echo $site_setting->phone; ?></b>
<b><hr></b>
<div> <?php echo $site_setting->address; ?></div>
<b><hr></b></div>
</div></div>
<br>
<?php echo \yii\widgets\ListView::widget([
'dataProvider' => $dataProvider,
'pager' => [
'hideOnSinglePage' => true,
],
'summary'=>'',
'itemView' => '_item'
])?>
|
package othello;
import othello.exception.CantPutException;
import othello.utils.Arrow;
import othello.utils.Coordinate;
public class Board {
private Disk[][] board = new Disk[8][8];
private Othello othello = null;
public Board() {
init();
}
private void init() {
this.board = new Disk[8][8];
this.board[3][3] = new Disk(new Coordinate(3, 3), Disk.WHITE, this);
this.board[4][4] = new Disk(new Coordinate(4, 4), Disk.WHITE, this);
this.board[3][4] = new Disk(new Coordinate(3, 4), Disk.BLACK, this);
this.board[4][3] = new Disk(new Coordinate(4, 3), Disk.BLACK, this);
}
public void setOthello(Othello othello) {
this.othello = othello;
}
public Disk setDisk(Coordinate coordinate, int turn) throws CantPutException {
Disk newDisk = new Disk(coordinate, turn, this);
if (!canPut(coordinate, turn))
throw new CantPutException();
board[coordinate.x][coordinate.y] = newDisk;
for (Arrow arrow : Arrow.values()) {
try {
Coordinate shiftCoordinate = coordinate.shift(arrow);
Disk arrowDisk = this.getDisk(shiftCoordinate);
if (arrowDisk.state == newDisk.state) continue;
arrowDisk.maybeTurn(arrow);
} catch (Exception e) {
continue;
}
}
return newDisk;
}
public Disk getDisk(Coordinate coordinate) {
return board[coordinate.x][coordinate.y];
}
public Disk[][] getBoard() {
return board;
}
public void printBoard() {
for (int i = 0; i < 8; i++) {
for (int j = 0; j < 8; j++) {
if (board[j][i] == null)
System.out.print(" ");
else if (board[j][i].state == Disk.BLACK)
System.out.print("黒");
else
System.out.print("白");
}
System.out.println();
}
}
public int count(int color) {
int i = 0;
for (Disk[] disks : board) {
for (Disk disk : disks) {
try {
if (disk.state == color)
i++;
} catch (NullPointerException e) {
}
}
}
return i;
}
public boolean canPut(int color) {
for (int i = 0; i < 8; i++) {
for (int j = 0; j < 8; j++) {
Coordinate coordinate = new Coordinate(i, j);
if (canPut(coordinate, color))
return true;
}
}
return false;
}
public boolean canPut(Coordinate coordinate, int color) {
if (getDisk(coordinate) != null)
return false;
for (Arrow arrow : Arrow.values()) {
try {
Coordinate shiftCoordinate = coordinate.shift(arrow);
Disk disk = this.getDisk(shiftCoordinate);
if (disk.state != color)
while (true) {
shiftCoordinate = shiftCoordinate.shift(arrow);
disk = this.getDisk(shiftCoordinate);
if (disk.state == color)
return true;
}
} catch (Exception e) {
continue;
}
}
return false;
}
public void reset() {
init();
}
}
|
#!/bin/bash
#
# Copyright (c) University of Luxembourg 2021.
# Created by Oscar Eduardo CORNEJO OLIVARES, oscar.cornejo@uni.lu, SnT, 2021.
#
PROJ=$1
PROJ_SRC=$2
PROJ_BUILD=$3
PROJ_ORIGINAL_BUILD=$4
FLAG=$5
SRC_MUTANTS=$6
COMPILED=$7
EXEC_DIR=$8
LOGFILE=$EXEC_DIR/main.log
mkdir -p $EXEC_DIR
touch $LOGFILE
if [ ! -f "$PROJ_ORIGINAL_BUILD" ]; then
sed "s/TCE/$FLAG/g" ${ORIGINAL_MAKEFILE}.template > ${ORIGINAL_MAKEFILE}
cat $ORIGINAL_MAKEFILE
cd $PROJ
eval "${TCE_COMPILE_CMD[*]}" 2>&1 | tee -a $LOGFILE
mkdir -p original_build
cp $PROJ_BUILD/$COMPILED $PROJ/original_build
fi
ORIGINAL_HASH=$EXEC_DIR/original_hash
touch $ORIGINAL_HASH
original_hash=$(sha512sum -b $PROJ_ORIGINAL_BUILD | awk -F' ' '{print $1}')
echo original hash is $original_hash
echo "${original_hash}" > $ORIGINAL_HASH
HASHES=$EXEC_DIR/hashes.csv
NOT_COMPILED=$EXEC_DIR/notcompiled.csv
touch $HASHES $NOT_COMPILED
shopt -s extglob
trap "exit" INT
count=0
for i in $(find $SRC_MUTANTS \( -name '*.c' -or -name '*.cpp' -or -name '*.cc' \));do
start_time=$(($(date +%s%N)/1000000))
file_wo_opt=${i//$SRC_MUTANTS/}
file_extension="${i##*.}"
mutant_name="$(basename -- ${file_wo_opt%.*})"
file_wo_mut_end=.${file_wo_opt%%.*}.${file_extension}
filename="$(basename -- ${file_wo_mut_end%.*})"
filename_orig=$(echo $file_wo_mut_end | sed -e "s/\(.*\)$filename\//\1/g" | sed "s:./::")
echo "------------------------------------" 2>&1 | tee -a $LOGFILE
echo "Mutant: "$i 2>&1 | tee -a $LOGFILE
cd $PROJ
# replacing mutant by original source
echo cp $filename_orig $filename_orig.orig 2>&1 | tee -a $LOGFILE
cp $filename_orig $filename_orig.orig
echo cp $i $filename_orig 2>&1 | tee -a $LOGFILE
cp $i $filename_orig
eval "${TCE_COMPILE_CMD[*]}" 2>&1 | tee -a $LOGFILE
RET_CODE=${PIPESTATUS[0]}
if [ $RET_CODE -gt 0 ]; then
echo "Error: mutant could not be compiled"
echo $mutant_name" not compiled" 2>&1 | tee -a $LOGFILE
echo "${mutant_name};${filename_orig}" >> $NOT_COMPILED
else
echo "Success: mutant compiled"
echo $mutant_name" compiled" 2>&1 | tee -a $LOGFILE
hash=$(sha512sum -b $PROJ_BUILD/$COMPILED | awk -F' ' '{print $1}')
echo "${mutant_name};${filename_orig};${hash}" >> $HASHES
fi
echo "Replacing original source "$i 2>&1 | tee -a $LOGFILE
cd $PROJ
mv $filename_orig.orig $filename_orig
touch $filename_orig
if [ $count -eq 10000000 ];then
break
else
count=$((count+1))
fi
end_time=$(($(date +%s%N)/1000000))
elapsed="$(($end_time-$start_time))"
echo "elapsed time $elapsed [ms]" | tee -a $LOGFILE
done
|
package model
import java.time.ZoneOffset
import implicits.Dates.jodaToJavaInstant
import com.gu.contentapi.client.model.v1.{
Asset,
AssetFields,
AssetType,
ElementType,
Content => ApiContent,
Element => ApiElement,
}
import com.gu.contentapi.client.utils.CapiModelEnrichment.RichOffsetDateTime
import contentapi.FixtureTemplates
import org.joda.time.DateTime
import org.scalatest.{FlatSpec, Matchers}
import org.scalatestplus.play.guice.GuiceOneAppPerSuite
class ElementsTest extends FlatSpec with Matchers with GuiceOneAppPerSuite {
"Elements" should "find the biggest crop of the main picture" in {
val offsetDate = jodaToJavaInstant(new DateTime()).atOffset(ZoneOffset.UTC)
val images: Elements = Content(
ApiContent(
id = "foo/2012/jan/07/bar",
sectionId = None,
sectionName = None,
webPublicationDate = Some(offsetDate.toCapiDateTime),
webTitle = "Some article",
webUrl = "http://www.guardian.co.uk/foo/2012/jan/07/bar",
apiUrl = "http://content.guardianapis.com/foo/2012/jan/07/bar",
elements = Some(
List(
image("test-image-0", "main", 0, List(asset("smaller picture 1", 50), asset("biggest picture 1", 100))),
image("test-image-1", "main", 1, "a single picture 2", 200),
),
),
fields = None,
),
).elements
images.mainPicture.flatMap(_.images.largestImage.flatMap(_.caption)) should be(Some("biggest picture 1"))
}
def thumbnailFixture(crops: (Int, Int)*): ApiElement =
FixtureTemplates.emptyElement.copy(
`type` = ElementType.Image,
relation = "thumbnail",
assets = crops.toList map {
case (width, height) =>
FixtureTemplates.emptyAsset.copy(
`type` = AssetType.Image,
mimeType = Some("image/jpeg"),
typeData = Some(
AssetFields(
width = Some(width),
height = Some(height),
),
),
)
},
)
"trailPicture" should "find an asset with an aspect ratio within 1% of the desired aspect ratio" in {
val theImage = thumbnailFixture((504, 300))
Content(
FixtureTemplates.emptyApiContent.copy(
elements = Some(List(theImage)),
),
).trail.trailPicture
.map(_.allImages.headOption.exists(image => image.width == 504 && image.height == 300)) shouldBe Some(true)
}
it should "reject images more than 1% from the desired aspect ratio" in {
val theImage = thumbnailFixture((506, 300))
Content(
FixtureTemplates.emptyApiContent.copy(
elements = Some(List(theImage)),
),
).trail.trailPicture shouldEqual None
}
it should "not die if an image has 0 height or width" in {
Content(
FixtureTemplates.emptyApiContent.copy(
elements = Some(
List(
thumbnailFixture((0, 300), (500, 0), (500, 300)),
),
),
),
).trail.trailPicture shouldBe defined
}
private def image(id: String, relation: String, index: Int, caption: String, width: Int): ApiElement = {
ApiElement(id, relation, ElementType.Image, Some(0), List(asset(caption, width)))
}
private def image(id: String, relation: String, index: Int, assets: List[Asset]): ApiElement = {
ApiElement(id, relation, ElementType.Image, Some(0), assets)
}
private def asset(caption: String, width: Int): Asset = {
Asset(
AssetType.Image,
Some("image/jpeg"),
Some("http://www.foo.com/bar"),
Some(AssetFields(caption = Some(caption), width = Some(width))),
)
}
}
|
// The MIT License (MIT)
// Copyright (c) 2015 Y. T. Chung <zonyitoo@gmail.com>
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//! BSON, short for Binary JSON, is a binary-encoded serialization of JSON-like documents.
//! Like JSON, BSON supports the embedding of documents and arrays within other documents
//! and arrays. BSON also contains extensions that allow representation of data types that
//! are not part of the JSON spec. For example, BSON has a datetime type and a binary data type.
//!
//! ```text
//! // JSON equivalent
//! {"hello": "world"}
//!
//! // BSON encoding
//! \x16\x00\x00\x00 // total document size
//! \x02 // 0x02 = type String
//! hello\x00 // field name
//! \x06\x00\x00\x00world\x00 // field value
//! \x00 // 0x00 = type EOO ('end of object')
//! ```
//!
//! BSON is the primary data representation for [MongoDB](https://www.mongodb.com/), and this crate is used in the
//! [`mongodb`](https://docs.rs/mongodb/0.10.0/mongodb/) driver crate in its API and implementation.
//!
//! For more information about BSON itself, see [bsonspec.org](http://bsonspec.org).
//!
//! ## BSON values
//!
//! Many different types can be represented as a BSON value, including 32-bit and 64-bit signed
//! integers, 64 bit floating point numbers, strings, datetimes, embedded documents, and more. To
//! see a full list of possible BSON values, see the [BSON specification](http://bsonspec.org/spec.html). The various
//! possible BSON values are modeled in this crate by the [`Bson`](enum.Bson.html) enum.
//!
//! ### Creating [`Bson`](enum.Bson.html) instances
//!
//! [`Bson`](enum.Bson.html) values can be instantiated directly or via the
//! [`bson!`](macro.bson.html) macro:
//!
//! ```rust
//! # use bson::{bson, Bson};
//! let string = Bson::String("hello world".to_string());
//! let int = Bson::Int32(5);
//! let array = Bson::Array(vec![Bson::Int32(5), Bson::Boolean(false)]);
//!
//! let string: Bson = "hello world".into();
//! let int: Bson = 5i32.into();
//!
//! let string = bson!("hello world");
//! let int = bson!(5);
//! let array = bson!([5, false]);
//! ```
//! [`bson!`](macro.bson.html) has supports both array and object literals, and it automatically
//! converts any values specified to [`Bson`](enum.Bson.html), provided they are `Into<Bson>`.
//!
//! ### [`Bson`](enum.Bson.html) value unwrapping
//!
//! [`Bson`](enum.Bson.html) has a number of helper methods for accessing the underlying native Rust
//! types. These helpers can be useful in circumstances in which the specific type of a BSON value
//! is known ahead of time.
//!
//! e.g.:
//! ```rust
//! # use bson::{bson, Bson};
//! let value = Bson::Int32(5);
//! let int = value.as_i32(); // Some(5)
//! let bool = value.as_bool(); // None
//!
//! let value = bson!([true]);
//! let array = value.as_array(); // Some(&Vec<Bson>)
//! ```
//!
//! ## BSON documents
//!
//! BSON documents are ordered maps of UTF-8 encoded strings to BSON values. They are logically
//! similar to JSON objects in that they can contain subdocuments, arrays, and values of several
//! different types. This crate models BSON documents via the
//! [`Document`](document/struct.Document.html) struct.
//!
//! ### Creating [`Document`](document/struct.Document.html)s
//!
//! [`Document`](document/struct.Document.html)s can be created directly either from a byte
//! reader containing BSON data or via the `doc!` macro:
//! ```rust
//! # use bson::{doc, Document};
//! # use std::io::Read;
//! let mut bytes = hex::decode("0C0000001069000100000000").unwrap();
//! let doc = Document::from_reader(&mut bytes.as_slice()).unwrap(); // { "i": 1 }
//!
//! let doc = doc! {
//! "hello": "world",
//! "int": 5,
//! "subdoc": { "cat": true },
//! };
//! ```
//! [`doc!`](macro.doc.html) works similarly to [`bson!`](macro.bson.html), except that it always
//! returns a [`Document`](document/struct.Document.html) rather than a [`Bson`](enum.Bson.html).
//!
//! ### [`Document`](document/struct.Document.html) member access
//!
//! [`Document`](document/struct.Document.html) has a number of methods on it to facilitate member
//! access:
//!
//! ```rust
//! # use bson::doc;
//! let doc = doc! {
//! "string": "string",
//! "bool": true,
//! "i32": 5,
//! "doc": { "x": true },
//! };
//!
//! // attempt get values as untyped Bson
//! let none = doc.get("asdfadsf"); // None
//! let value = doc.get("string"); // Some(&Bson::String("string"))
//!
//! // attempt to get values with explicit typing
//! let string = doc.get_str("string"); // Ok("string")
//! let subdoc = doc.get_document("doc"); // Some(Document({ "x": true }))
//! let error = doc.get_i64("i32"); // Err(...)
//! ```
//!
//! ## Modeling BSON with strongly typed data structures
//!
//! While it is possible to work with documents and BSON values directly, it will often introduce a
//! lot of boilerplate for verifying the necessary keys are present and their values are the correct
//! types. [`serde`](https://serde.rs/) provides a powerful way of mapping BSON data into Rust data structures largely
//! automatically, removing the need for all that boilerplate.
//!
//! e.g.:
//! ```rust
//! # use serde::{Deserialize, Serialize};
//! # use bson::{bson, Bson};
//! #[derive(Serialize, Deserialize)]
//! struct Person {
//! name: String,
//! age: i32,
//! phones: Vec<String>,
//! }
//!
//! // Some BSON input data as a `Bson`.
//! let bson_data: Bson = bson!({
//! "name": "John Doe",
//! "age": 43,
//! "phones": [
//! "+44 1234567",
//! "+44 2345678"
//! ]
//! });
//!
//! // Deserialize the Person struct from the BSON data, automatically
//! // verifying that the necessary keys are present and that they are of
//! // the correct types.
//! let mut person: Person = bson::from_bson(bson_data).unwrap();
//!
//! // Do things just like with any other Rust data structure.
//! println!("Redacting {}'s record.", person.name);
//! person.name = "REDACTED".to_string();
//!
//! // Get a serialized version of the input data as a `Bson`.
//! let redacted_bson = bson::to_bson(&person).unwrap();
//! ```
//!
//! Any types that implement `Serialize` and `Deserialize` can be used in this way. Doing so helps
//! separate the "business logic" that operates over the data from the (de)serialization logic that
//! translates the data to/from its serialized form. This can lead to more clear and concise code
//! that is also less error prone.
#![allow(clippy::cognitive_complexity)]
pub use self::{
bson::{
Array,
Binary,
Bson,
DateTime,
DbPointer,
Document,
JavaScriptCodeWithScope,
Regex,
Timestamp,
},
de::{from_bson, from_document, Deserializer},
decimal128::Decimal128,
ser::{to_bson, to_document, Serializer},
};
#[macro_use]
mod macros;
mod bson;
pub mod compat;
pub mod de;
pub mod decimal128;
pub mod document;
pub mod extjson;
pub mod oid;
pub mod ser;
pub mod spec;
#[cfg(test)]
mod tests;
|
module Fibon.Analyse.Analysis (
Analysis(..)
, runAnalysis
, computeRows
, Normalize(..)
, NormMethod
)
where
import Data.List
import Data.Maybe
import qualified Data.Map as M
import Control.Monad.Error
import Fibon.Result
import Fibon.Analyse.AnalysisRoutines
import Fibon.Analyse.Parse
import Fibon.Analyse.Result
import Fibon.Analyse.Metrics
import Fibon.Analyse.Statistics
import Fibon.Analyse.Tables
import qualified Data.Vector.Unboxed as V
runAnalysis :: Analysis a -> FilePath -> IO (Maybe [ResultColumn a])
runAnalysis analysis file = do
fibonResults <- parse file
case fibonResults of
Nothing -> return Nothing
Just rs -> do x <- createResultColumns analysis rs
return (Just x)
where
parse f | ".SHOW" `isSuffixOf` f = parseShowFibonResults f
| otherwise = parseBinaryFibonResults f
createResultColumns :: Analysis a
-> M.Map ResultLabel [FibonResult]
-> IO [ResultColumn a]
createResultColumns analysis fibonResults =
mapM (analyseResults analysis) (M.toList fibonResults)
analyseResults :: Analysis a
-> (ResultLabel, [FibonResult])
-> IO (ResultColumn a)
analyseResults analysis (resultName, fibonResults) = do
ars <- mapM (analyseResult analysis) fibonResults
return $ ResultColumn resultName (resMap ars)
where
resMap ars = foldr create M.empty (zip ars fibonResults)
create (ar,fr) m = M.insert (benchNameOnly fr) ar m
benchNameOnly fr = takeWhile (/= '-') (benchName fr)
analyseResult ::
Analysis a
-> FibonResult
-> IO (AnalyseResult a) -- ^ final result
analyseResult analysis fibonR = do
fibonS <- (fibonAnalysis analysis) fibonR
extraS <- case mbExtras of
Nothing -> return Nothing
Just [] -> return Nothing
Just es -> return . Just =<< (extraAnalysis analysis) es
return (AnalyseResult fibonS extraS)
where
mbExtras = sequence $ map (extraP.runStats) (details.runData $ fibonR)
extraP = extraParser analysis
-- | Functions for normalizing and computing summaries of results
--
--
type RowData = (RowName, [PerfData])
type RowName = String
type TableError = String
type PerfMonad = Either TableError
type NormMethod a = ResultColumn a -> Normalize a
data Normalize a =
NormPercent (ResultColumn a)
| NormRatio (ResultColumn a)
| NormNone
computeRows :: [(Normalize a, ResultColumn a)]
-> [BenchName]
-> TableSpec a
-> Either TableError ([RowData], [RowData])
computeRows resultColumns benchs colSpecs = do
rows <- mapM (computeOneRow resultColumns colSpecs) benchs
let colData = transpose $ map snd rows
doSumm how = mapM (summarize how) colData
minRow <- doSumm Min
meanRow <- doSumm GeoMean
arithRow <- doSumm ArithMean
maxRow <- doSumm Max
let sumRows = [
("min", minRow)
, ("geomean", meanRow)
, ("arithmean", arithRow)
, ("max", maxRow)
]
return (rows, sumRows)
computeOneRow :: [(Normalize a, ResultColumn a)]
-> [ColSpec a]
-> BenchName
-> PerfMonad RowData
computeOneRow resultColumns colSpecs bench = do
row <- mapM (\spec ->
mapM (computeOneColumn bench spec) resultColumns
) colSpecs
return (bench, concat row)
computeOneColumn :: BenchName
-> ColSpec a
-> (Normalize a, ResultColumn a)
-> PerfMonad PerfData
computeOneColumn bench (ColSpec _ metric) (normType, resultColumn) =
maybe (return NoResult) doNormalize (getRawPerf resultColumn)
where
doNormalize peak =
case normType of
NormPercent base -> normToBase base normalizePercent
NormRatio base -> normToBase base normalizeRatio
NormNone -> return (mkRaw peak)
where
mkRaw = Basic . Raw
mkNorm = Basic . Norm
normToBase base normFun = maybe (return NoResult)
(\b -> mkNorm `liftM` normFun b peak)
(getRawPerf base)
getRawPerf rc = perf $ fmap metric ((M.lookup bench . results) rc)
type NormFun a =(a -> Double) -> a -> a -> NormPerf
normalizePercent :: RawPerf -> RawPerf -> PerfMonad NormPerf
normalizePercent = normalize normP
normalizeRatio :: RawPerf -> RawPerf -> PerfMonad NormPerf
normalizeRatio = normalize normR
normalize :: NormFun RawPerf
-> RawPerf
-> RawPerf
-> PerfMonad NormPerf
normalize n base@(RawTime _) peak@(RawTime _) =
return(n rawPerfToDouble base peak)
normalize n base@(RawSize _) peak@(RawSize _) =
return(n rawPerfToDouble base peak)
normalize _ _ _ = throwError "Can not normalize a size by time"
normP :: NormFun a
normP = norm Percent (\base peak -> (peak / base) * 100)
normR :: NormFun a
normR = norm Ratio (\base peak -> (base / peak))
-- TODO: use the intervals to compute the resulting interval
norm :: (Estimate Double -> NormPerf) -- ^ NormPerf constructor
-> (Double -> Double -> Double) -- ^ Normalizing function
-> (a -> Double) -- ^ Conversion to double
-> a -> a -- ^ Values to normalize
-> NormPerf
norm c f toDouble base peak =
c (mkPointEstimate mkStddev (f (toDouble base) (toDouble peak)))
where mkStddev = fromIntegral :: Int -> Double
summarize :: Summary -> [PerfData] -> PerfMonad PerfData
summarize how perfData =
case (normData, rawData) of
([], []) -> return NoResult
(nd, []) -> summarizeNorm how nd
([], rd) -> summarizeRaw how rd
_ -> throwError "Mixed raw and norm results in column"
where
normData = getData (\p -> case p of Basic (Norm n) -> Just n ; _ -> Nothing)
rawData = getData (\p -> case p of Basic (Raw r) -> Just r ; _ -> Nothing)
getData f = (catMaybes . map f) perfData
summarizeRaw :: Summary -> [RawPerf] -> PerfMonad PerfData
summarizeRaw how rawPerfs =
case (isTime rawPerfs, isSize rawPerfs) of
(True, _) -> summarizeRaw' how ExecTime RawTime rawPerfs
(_, True) -> summarizeRaw' how (MemSize . round) RawSize rawPerfs
_ -> throwError "Can only summarize column with time or size"
where
isTime = all (\r -> case r of RawTime _ -> True; RawSize _ -> False)
isSize = all (\r -> case r of RawSize _ -> True; RawTime _ -> False)
summarizeRaw' :: Summary -- ^ what kind of summary
-> (Double -> a) -- ^ rounding function
-> (Estimate a -> RawPerf) -- ^ RawPerf constructor
-> [RawPerf] -- ^ Performance numbers to summary
-> PerfMonad PerfData
summarizeRaw' how roundFun makeRaw rawPerfs =
return $ Summary how (Raw (makeRaw (fmap roundFun (computeSummary how vec))))
where
vec = V.fromList (map rawPerfToDouble rawPerfs)
summarizeNorm :: Summary -> [NormPerf] -> PerfMonad PerfData
summarizeNorm how normPerfs =
case (isPercent normPerfs, isRatio normPerfs) of
(True, _) -> summarizeNorm' how Percent normPerfs
(_, True) -> summarizeNorm' how Ratio normPerfs
_ -> throwError "Can only summarize column with percent or ratio"
where
isPercent = all (\r -> case r of Percent _ -> True; Ratio _ -> False)
isRatio = all (\r -> case r of Percent _ -> False; Ratio _ -> True)
summarizeNorm' :: Summary
-> (Estimate Double -> NormPerf)
-> [NormPerf]
-> PerfMonad PerfData
summarizeNorm' how makeNorm normPerfs =
return $ Summary how (Norm (makeNorm (computeSummary how vec)))
where
vec = V.fromList (map normPerfToDouble normPerfs)
|
---
layout: post
title: 《广告算法》学习笔记
subtitle: 搜索广告
date: 2019-05-09
author: Ann
header-img: img/post-bg-coffee.jpeg
catalog: true
tags:
- 广告算法
- 学习笔记
- 机器学习
---
> 本章主要对搜索广告的业务进行介绍,搜索广告模块可以分为**广告检索、广告排序、流量分配**三大子模块。
<div class="mermaid">
graph LR;
A(搜索广告)-.->B[广告检索]
A-.->C[广告排序]
A-.->D[流量分配]
B -.-> E1[广告索引]
B -.-> E2[广告匹配]
C -.-> H[ML方法预估点击率]
E2 -.-> G[分词,查询纠错,查询扩张等...]
E1 -.-> F[键值对索引]
classDef className fill:#f9f,stroke:#333,stroke-width:4px;
class B,C,D className;
style A fill:#ccf,stroke:#f66,stroke-width:2px,stroke-dasharray: 5, 5
</div>
## 广告检索
>一般来说,广告检索需要经过三个步骤,**广告分析,关键字分析和相关性匹配**。
### 广告分析
- 生成倒排索引
广告的倒排索引指<竞价词,广告ID列表>这样的键值对。竞价词通常有广告商自行定义和广告平台生成两种方式。
- 抽取广告特征
### 查询分析(关键字分析)
+ 对于用户输入内容,根据其输入字符串的长短,有不同的处理方式。
+ 对于长串来说,重点在于**分词,提取关键词汇**。
+ 对于短串来说,终点在于**消除歧义**。常根据用户的行为分析,和上下文搜索信息进行消歧处理。
+ 查询分析的重点:查询扩展(**<font color="red">扩大召回率</font>**)
+ 用来做查询扩展的技术包括,topic model、语意字典扩充、事先构造本体扩充以及通过query-ad链接分析查询相似度做关键字聚类的方法等等。
+ 相关性匹配:精确匹配/模糊匹配
### 广告排序
>通常,广告系统会按照ECPM(Effective Cost Per Mille,指的是每一千次展现可以获得的广告收入)降序排列广告候选集,然后将排序靠前的广告展现出来。
其中ECPM正比于点击率CTR
如何预测CTR?----逻辑回归LR
将广告特征处理为0,1向量作为输入,通过逻辑回归函数对其进行CTR预测。由于单个向量维度过高,为了防止过拟合,需要加入正则项。<font color="red">常用L1正则</font>
+ L1 VS L2
+ L2正则,可以防止得到单维绝对值过高的w,从而防止模型过拟合。L1则更加激进,不仅可以防止出现单维绝对值过高,还能使得大量特征的权重为0,从而起到特征选择的作用
+ 逻辑回归
+ 之前对极大似然法理解一直不深,贴个链接以后有需要的话推一下子~嘻嘻 [逻辑回归推导](https://www.jianshu.com/p/894bda167422)
相似度预估CTR方式
- 广告和查询字相似度
- 广告和查询关键字的共现词个数
- 广告和查询关键字的cosine距离、KL距离、编辑距离等
- 语义相似度
- 广告本身的特征,如广告的历史CTR、历史展现、广告对于商品的价格、广告的字数、广告所包含的词语等。
- 查询本身的特征,如查询关键字历史的广告CTR、历史展现、查询所包含的词语等。
- 相似广告的特征,比如与当前广告相似的广告的历史CTR等特征,以及其与当前广告的相似度。
- 相似查询的特征,比如与当前查询相似的查询的历史CTR等特征,以及其与当前查询的相似度。
对于LR来说,需要把连续的特征转化为0/1离散值,常用的方式有<font color="red">等距划分和等频划分</font>
### 评估标准
评估标准-->AUC
若正样本数为M个,负样本数为N个,计算AUC的开销为M*N。
样本集过多的情况下,可以通过排序的方式减少AUC计算复杂度。(这块没看懂 呜呜呜)
### 延伸
知乎上看到一个[广告计算中特征工程](https://zhuanlan.zhihu.com/p/27033340)的帖子,先mark一下~
## 广告主推荐工具
对于广告主来说,他们关注的重点就是**竞价词**,竞价词作为被推荐的关键词汇,不仅要保证其能准确定位,还要关注“推荐被采用以后的效果”。
关于竞价词的推荐,有**主动推荐**,和**被动推荐**两种方式。主动推荐指广告平台直接替广告商生成竞价词,被动推荐指广告商参与指定部分竞价词。
关于竞价词的匹配,一种可以叫做精确匹配,一种可以叫做模糊匹配。
如果要找到推荐给广告主的所有候选词,需要计算广告主跟候选词全集的相关度,然后排序,即找到<广告主,候选词>的所有打分,然后找到最高的N个。
1. 广告主到中间节点的边的归一化权重,中间节点到候选词的边的归一化权重。主要是文本相关性,计算相似度的方式如Cosine距离、欧式距离、KL距离、Jaccard系数、Dice系数、Pearson系数……等等。
2. 根据中间节点出度、入度等信息,计算中间节点的调整系数,结合第一步的相关性,计算出:广告主1,通过一个中间节点,到一个候选词的分数;即<广告主1,中间节点,候选词>的分数。
3. 根据<广告主1,中间节点,候选词>的数据,固定一个候选词(比如是候选词1),综合所有中间节点,计算所有的<广告主1,候选词1>分数。循环计算,直至算完所有的候选词集合,获得<广告主1,候选词>打分列表。
4. 根据分数排序,获得最前面的N个词。
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.events
import org.scalatest._
import SharedHelpers._
import refspec.RefSpec
class LocationMethodSuiteProp extends MethodSuiteProp {
test("Method suites should have correct TopOfMethod location in test events.") {
forAll(examples) { suite =>
val reporter = new EventRecordingReporter
suite.run(None, Args(reporter, Stopper.default, Filter(), ConfigMap.empty, None, new Tracker(new Ordinal(99)), Set.empty))
val eventList = reporter.eventsReceived
eventList.foreach { event => suite.checkFun(event) }
suite.allChecked
}
}
type FixtureServices = TestLocationMethodServices
def spec = new TestLocationSpec
class TestLocationSpec extends RefSpec with FixtureServices {
val suiteTypeName = "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec"
val expectedStartingList = List(TestStartingPair("A Spec test succeed", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020succeed()"),
TestStartingPair("A Spec test pending", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020pending()"),
TestStartingPair("A Spec test cancel", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020cancel()"))
val expectedResultList = List(TestResultPair(classOf[TestSucceeded], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020succeed()"),
TestResultPair(classOf[TestPending], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020pending()"),
TestResultPair(classOf[TestCanceled], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020cancel()"),
TestResultPair(classOf[TestIgnored], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020ignore()"))
val expectedScopeOpenedList = List(ScopeOpenedPair("A Spec", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$"))
val expectedScopeClosedList = List(ScopeClosedPair("A Spec", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$"))
object `A Spec` {
def `test succeed`: Unit = {
}
def `test pending`: Unit = {
pending
}
def `test cancel`: Unit = {
cancel()
}
@Ignore
def `test ignore`: Unit = {
}
}
}
def junit3Suite = new TestLocationMethodJUnit3Suite
def junitSuite = new TestLocationMethodJUnitSuite
def testngSuite = new TestLocationMethodTestNGSuite
}
|
# frozen_string_literal: true
require_relative 'simple_policy/version'
require_relative 'simple_policy/entity'
module SimplePolicy
end
|
/*! \brief Calculates the velocity of the vertices of a Voronoi face
\author Almog Yalinewich
\file calc_face_vertex_velocity.hpp
*/
#ifndef CALC_FACE_VERTEX_VELOCITY_HPP
#define CALC_FACE_VERTEX_VELOCITY_HPP 1
#include "geometry.hpp"
/*! \brief Calculates the velocity of the vertices of a Voronoi face
\param p1_pos Position of the first mesh generating point
\param p1_vel Velocity of the first mesh generating point
\param p2_pos Position of the second mesh generating point
\param p2_vel Velocity of the second mesh generating point
\param vertex Vertex
\return Velocity at vertex
*/
Vector2D calc_face_vertex_velocity(const Vector2D& p1_pos, const Vector2D& p1_vel,
const Vector2D& p2_pos, const Vector2D& p2_vel,
const Vector2D& vertex);
#endif // CALC_FACE_VERTEX_VELOCITY
|
CREATE TABLE IF NOT EXISTS `wb_core`.`user` (
`ID` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT,
`Name` VARCHAR(50) NOT NULL COLLATE 'utf8_general_ci',
`Twitch_ID` INT(16) UNSIGNED NOT NULL,
`First_Seen` DATETIME NOT NULL DEFAULT current_timestamp() COMMENT 'When user was first added to the database',
PRIMARY KEY (`ID`) USING BTREE,
UNIQUE INDEX `Name` (`Name`) USING BTREE,
UNIQUE INDEX `Twitch_ID` (`Twitch_ID`) USING BTREE
)
COMMENT='Users stored in the system'
COLLATE='utf8_general_ci'
ENGINE=InnoDB
;
|
package com.sonic.agent.maps;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author ZhouYiXun
* @des 本地自己维护一下设备状态
* @date 2021/08/16 10:26
*/
public class IOSDeviceManagerMap {
private static Map<String, String> deviceStatusMap = new ConcurrentHashMap<String, String>();
public static Map<String, String> getMap() {
return deviceStatusMap;
}
}
|
(uiop:define-package #:dag-gp/explicit-dag/parent-dependent-base/base
(:use #:cl
#:dag-gp/utils/distance
#:dag-gp/kernel
#:dag-gp/output)
(:export #:parent-dependent-gp
#:variational-parent-dependent-gp
#:variational-combined-output-parent-dependent-gp
#:input-dim
#:obs-locs
#:obs
#:obs-mat
#:obs-mat-copy
#:n-obs
#:param-vec
#:n-gp-params
#:output
#:n-latent
#:u-locs
#:q-mean-params
#:q-mean
#:q-mean-copy
#:q-chol
#:q-chol-mat
#:q-cov
#:middle
#:qf-mean
#:reshaped-qf-mean
#:dLL/dqf-mu
#:dLL/dqf-var
#:parent-gps
#:parent-outputs
#:parent-dist-fns
#:parent-squared-dist
#:parent-abs-dist
#:child-obs-locs
#:child-obs
#:dist-fns
#:ff-squared-dist
#:ff-abs-dist
#:fu-squared-dist
#:fu-abs-dist
#:uu-squared-dist
#:uu-abs-dist
#:pred-parent-squared-dist
#:pred-parent-abs-dist
#:pred-obs-parent-squared-dist
#:pred-obs-parent-abs-dist
#:pred-ff-squared-dist
#:pred-ff-abs-dist
#:pred-obs-ff-squared-dist
#:pred-obs-ff-abs-dist
#:pred-fu-squared-dist
#:pred-fu-abs-dist
#:kernel
#:Kff
#:reshaped-Kff
#:Kff-inv
#:Kfu
#:KfuM
#:Kuu
#:Kuu-chol
#:Kuu-inv
#:pred-Kff
#:pred-obs-Kff
#:pred-Kfu
#:default-parent-param
#:n-combined
#:closed-downwards-p
#:true-obs-locs
#:true-obs
#:n-true-obs
#:update-parameter-vector
#:count-params
#:initialize-specialized-parameters))
(in-package #:dag-gp/explicit-dag/parent-dependent-base/base)
(defclass parent-dependent-gp ()
;; Parents
((parent-gps
:initform nil
:initarg :parent-gps
:accessor parent-gps)
(parent-outputs
:initform nil
:accessor parent-outputs)
;; Raw outputs
(observation-locations
:initform nil
:accessor obs-locs)
(observations
:initform nil
:accessor obs)
(observations-mat
:initform nil
:accessor obs-mat)
(observations-mat-copy
:initform nil
:accessor obs-mat-copy)
(n-observations
:initform 0
:accessor n-obs)
;; Output
(output
:initarg :output
:initform nil
:accessor output)
;; Distances between inputs
(distance-functions
:initarg :dist-fns
:accessor dist-fns
:initform (list #'squared-distance))
(ff-squared-distances
:accessor ff-squared-dist
:initform nil)
(ff-absolute-distances
:accessor ff-abs-dist
:initform nil)
;; Predictive distances
(predictive-ff-squared-distances
:accessor pred-ff-squared-dist
:initform nil)
(predictive-ff-absolute-distances
:accessor pred-ff-abs-dist
:initform nil)
(predictive-observed-ff-squared-distances
:accessor pred-obs-ff-squared-dist
:initform nil)
(predictive-observed-ff-absolute-distances
:accessor pred-obs-ff-abs-dist
:initform nil)
;; Kernels and covariances
(kernel
:initarg :kernel
:accessor kernel)
(ff-covariance
:initform nil
:accessor Kff
:documentation "The covariance Kff.")
(ff-covariance-inverse
:initform nil
:accessor Kff-inv
:documentation "The matrix Kff^-1.")
(likelihood-var-deriv
:initform nil
:accessor dLL/dqf-var)
;; Predictive covariances
(predictive-ff-covariance
:initform nil
:accessor pred-Kff
:documentation "The predictive covariance Kff.")
(predictive-observed-ff-covariance
:initform nil
:accessor pred-obs-Kff
:documentation "Covariance between predicted and observed locations.")
(default-parent-param
:initarg :default-parent-param
:initform nil
:accessor default-parent-param)
;; Parameter vectors
(parameter-indices
:accessor param-indices
:documentation "Indices of various parameters in the parameter vector.")
(n-gp-parameters
:accessor n-gp-params
:documentation "Number of total parameters in the Gaussian Process.")
(parameter-vector
:accessor param-vec
:documentation "A single vector of all parameters.")
(closed-downwards-p
:accessor closed-downwards-p
:initarg :closed-downwards-p
:initform nil)
(true-observation-locations
:initform nil
:accessor true-obs-locs)
(true-observations
:initform nil
:accessor true-obs)
(n-true-observations
:initform 0
:accessor n-true-obs)))
(defclass variational-parent-dependent-gp (parent-dependent-gp)
((input-dimension
:initarg :input-dim
:accessor input-dim)
;; Latent variables
(n-latent
:initarg :n-latent
:accessor n-latent
:documentation "The number of latent locations u.")
(latent-locations
:initform nil
:accessor u-locs
:documentation "The latent variable locations.")
(posterior-mean-params
:initform nil
:accessor q-mean-params)
(posterior-mean
:initform nil
:accessor q-mean)
(posterior-mean-copy
:initform nil
:accessor q-mean-copy)
(posterior-cholesky
:initform nil
:accessor q-chol)
(posterior-cholesky-mat
:initform nil
:accessor q-chol-mat)
(posterior-covariances
:initform nil
:accessor q-cov)
(middle
:initform nil
:accessor middle
:documentation "The middle matrices Su - Kuu.")
(posterior-f-mean
:initform nil
:accessor qf-mean)
(likelihood-mean-deriv
:initform nil
:accessor dLL/dqf-mu)
;; Distances between inputs
;; (distance-functions
;; :initarg :dist-fns
;; :accessor dist-fns
;; :initform (list #'squared-distance))
(fu-squared-distances
:accessor fu-squared-dist
:initform nil)
(fu-absolute-distances
:accessor fu-abs-dist
:initform nil)
(uu-squared-distances
:accessor uu-squared-dist
:initform nil)
(uu-absolute-distances
:accessor uu-abs-dist
:initform nil)
;; Predictive distances
(predictive-fu-squared-distances
:accessor pred-fu-squared-dist
:initform nil)
(predictive-fu-absolute-distances
:accessor pred-fu-abs-dist
:initform nil)
;; Covariances
(fu-covariance
:initform nil
:accessor Kfu
:documentation "The covariance Kfu.")
(fu-covariance-mid
:accessor KfuM
:documentation "The product Kfu Kuu^-1 (q-cov - Kuu).")
(uu-covariance
:initform nil
:accessor Kuu
:documentation "The covariance Kuu.")
(uu-covariance-cholesky
:initform nil
:accessor Kuu-chol
:documentation "chol(Kuu)")
(uu-covariance-inverse
:initform nil
:accessor Kuu-inv
:documentation "Kuu^-1")
;; Predictive covariances
(predictive-fu-covariance
:initform nil
:accessor pred-Kfu
:documentation "The predictive covariance Kfu.")))
(defclass variational-combined-output-parent-dependent-gp
(variational-parent-dependent-gp)
((n-combined
:initarg :n-combined
:accessor n-combined
:documentation "The number of independent GPs used to make the output.")
(reshaped-posterior-f-mean
:initform nil
:accessor reshaped-qf-mean
:documentation "Posterior f mean reshaped to be ((loc0ind0 loc0ind1 ...) (loc1ind0 loc1ind1 ...)).")
(reshaped-ff-covariance
:initform nil
:accessor reshaped-Kff
:documentation "Kff reshaped to be ((loc0ind0 loc0ind1 ...) (loc1ind0 loc1ind1 ...)).")))
;; On creation, if parents are specified, collect outputs
(defmethod initialize-instance :after ((gp parent-dependent-gp) &key)
(when (parent-gps gp)
(setf (parent-outputs gp) (loop for parent-gp in (parent-gps gp)
collect (output parent-gp)))))
(defmethod initialize-instance :after
((gp variational-combined-output-parent-dependent-gp) &key)
(setf (n-combined gp) (n-gps (output gp))))
(defgeneric count-params (gp)
(:documentation "Gives the number of parameters in the GP.")
(:method ((gp parent-dependent-gp))
(+ (n-kern-params (kernel gp))
(n-output-params (output gp))))
(:method ((gp variational-parent-dependent-gp))
(+ (call-next-method)
(n-latent gp)
(/ (* (n-latent gp) (1+ (n-latent gp))) 2)))
(:method ((gp variational-combined-output-parent-dependent-gp))
(+ (loop for kern in (kernel gp)
sum (n-kern-params kern))
(n-output-params (output gp))
(* (n-combined gp)
(+ (n-latent gp)
(/ (* (n-latent gp) (1+ (n-latent gp))) 2))))))
(defgeneric initialize-specialized-parameters (gp)
(:documentation "Initializes any parameters that are only for this GP subclass.")
(:method ((gp parent-dependent-gp))
nil))
(defgeneric update-parameter-vector (gp new-params)
(:documentation "Updates parameters in the GP.")
(:method ((gp parent-dependent-gp) new-params)
(adjust-array (param-vec gp) (n-gp-params gp) :displaced-to new-params)))
|
./../../bin/ring ../codegen/parsec.ring freeglut.cf ring_freeglut.c ring_freeglut.rh
|
#!/bin/bash
KUBEPLUS_NS=`kubectl get deployments -A | grep kubeplus-deployment | awk '{print $1}'`
if [[ $KUBEPLUS_NS == '' ]]; then
KUBEPLUS_NS=default
fi
kubectl delete deployments kubeplus-deployment -n $KUBEPLUS_NS
kubectl delete mutatingwebhookconfigurations platform-as-code.crd-binding
kubectl delete sa kubeplus -n $KUBEPLUS_NS
kubectl delete svc crd-hook-service -n $KUBEPLUS_NS
kubectl delete svc kubeplus -n $KUBEPLUS_NS
kubectl delete svc kubeplus-consumerui -n $KUBEPLUS_NS
kubectl delete crds resourcecompositions.workflows.kubeplus
kubectl delete crds resourcepolicies.workflows.kubeplus
kubectl delete crds resourceevents.workflows.kubeplus
kubectl delete crds resourcemonitors.workflows.kubeplus
kubectl delete secret webhook-tls-certificates -n $KUBEPLUS_NS
kubectl delete clusterrolebinding kubeplus:cluster-admin
kubectl delete configmaps kubeplus-saas-consumer-kubeconfig kubeplus-saas-provider-kubeconfig -n $KUBEPLUS_NS
kubectl delete sa kubeplus-saas-consumer kubeplus-saas-provider -n $KUBEPLUS_NS
kubectl delete clusterroles kubeplus-saas-consumer kubeplus-saas-provider
kubectl delete clusterrolebindings kubeplus-saas-consumer kubeplus-saas-provider
echo "If you had installed KubePlus using Helm, delete the kubeplus helm release."
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.