text
stringlengths 27
775k
|
|---|
using Auk.CsharpBootstrapper.Extensions;
using WcfWsdlExample.Base.Interface.DataLayer;
using WcfWsdlExample.DataLayer.NorthwindDataSet;
namespace WcfWsdlExample.DataLayer.DataModel
{
public class NorthwindCustomerModel : ISqlDataModel
{
public NorthwindCustomerModel(northwind.CustomerRow customerRow)
{
this.SafeInjectPropertiesValuesWithSameNames(customerRow, isLogWarningIfPropertyMismatchOrNotFound: true);
}
public string CompanyName { get; set; }
public string ContactName { get; set; }
public string ContactTitle { get; set; }
public string Address { get; set; }
public string City { get; set; }
public string Region { get; set; }
public string PostalCode { get; set; }
public string Country { get; set; }
public string Phone { get; set; }
public string Fax { get; set; }
public string CustomerID { get; set; }
}
}
|
using System.Threading.Tasks;
namespace Lykke.Service.LiquidityEngine.Domain.Services
{
public interface IB2C2OrderBookService
{
Task SetAsync(OrderBook orderBook);
Quote[] GetQuotes(string assetPairId);
}
}
|
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
namespace DomainCQRS
{
/// <summary>
/// Provides a proxy for Aggregate Root types. The proxy is used to cache delegate calls to methods.
/// </summary>
public interface IAggregateRootProxy
{
/// <summary>
/// The aggregate root type to proxy
/// </summary>
Type Type { get; }
/// <summary>
/// Create an object using the empty constructor.
/// </summary>
/// <returns></returns>
object Create();
/// <summary>
/// Applies a command to the aggregate root.
/// </summary>
/// <param name="aggregateRoot">The aggregate root to apply the command to.</param>
/// <param name="command">The command to apply.</param>
/// <returns>The events generated from the aggregate root applying the command.</returns>
IEnumerable ApplyCommand(object aggregateRoot, object command);
/// <summary>
/// Applies a historical event to the aggregate root.
/// </summary>
/// <param name="aggregateRoot">The aggregate root.</param>
/// <param name="event">The event to apply.</param>
void ApplyEvent(object aggregateRoot, object @event);
/// <summary>
/// Register a command (message) that can be applied to the aggregate root type.
/// </summary>
/// <param name="messageProxy">The <see cref="IMessageProxy"/> for the command type.</param>
/// <param name="aggregateRootApplyMethod">The name of the method that will apply the message type.</param>
/// <returns>The <see cref="IAggregateRootProxy"/>.</returns>
IAggregateRootProxy Register(IMessageProxy messageProxy, string aggregateRootApplyMethod);
}
}
|
using System.Threading.Tasks;
namespace Cumulocity.SDK.Client.Rest.Model.Buffering
{
public interface IBufferRequestService
{
Task<object> Create(BufferedRequest request);
void addResponse(long requestId, object result);
}
}
|
part of '../../../main.dart';
class ModeSelectorWidget extends StatelessWidget {
final String caption;
final List<String> options;
final String value;
final double captionFontSize;
final double valueFontSize;
final onChange;
final EdgeInsets padding;
ModeSelectorWidget({
Key key,
@required this.caption,
@required this.options,
this.value,
@required this.onChange,
this.captionFontSize,
this.valueFontSize,
this.padding: const EdgeInsets.fromLTRB(Sizes.leftWidgetPadding, Sizes.rowPadding, Sizes.rightWidgetPadding, 0.0),
}) : super(key: key);
@override
Widget build(BuildContext context) {
return Padding(
padding: padding,
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: <Widget>[
Text("$caption", style: TextStyle(
fontSize: captionFontSize ?? Sizes.stateFontSize
)),
Row(
children: <Widget>[
Expanded(
child: ButtonTheme(
alignedDropdown: true,
child: DropdownButton<String>(
value: value,
iconSize: 30.0,
isExpanded: true,
style: TextStyle(
fontSize: valueFontSize ?? Sizes.largeFontSize,
color: Colors.black,
),
hint: Text("Select ${caption.toLowerCase()}"),
items: options.map((String value) {
return new DropdownMenuItem<String>(
value: value,
child: Text(value),
);
}).toList(),
onChanged: (mode) => onChange(mode),
),
),
)
],
)
],
),
);
}
}
|
IF OBJECT_ID('zutil.DateDay') IS NOT NULL
DROP FUNCTION zutil.DateDay
GO
CREATE FUNCTION zutil.DateDay(@dt datetime2(0))
RETURNS date
BEGIN
RETURN CONVERT(date, @dt)
END
GO
GRANT EXEC ON zutil.DateDay TO zzp_server
GO
|
<?php
namespace AlibabaCloud\Iot;
/**
* @method string getVoice()
* @method string getProjectCode()
* @method string getIotInstanceId()
* @method string getText()
* @method string getSpeechType()
* @method string getVolume()
* @method string getBizCode()
* @method string getApiProduct()
* @method string getApiRevision()
* @method string getSpeechRate()
*/
class CreateSpeech extends Rpc
{
/**
* @param string $value
*
* @return $this
*/
public function withVoice($value)
{
$this->data['Voice'] = $value;
$this->options['form_params']['Voice'] = $value;
return $this;
}
/**
* @param string $value
*
* @return $this
*/
public function withProjectCode($value)
{
$this->data['ProjectCode'] = $value;
$this->options['form_params']['ProjectCode'] = $value;
return $this;
}
/**
* @param string $value
*
* @return $this
*/
public function withIotInstanceId($value)
{
$this->data['IotInstanceId'] = $value;
$this->options['form_params']['IotInstanceId'] = $value;
return $this;
}
/**
* @param string $value
*
* @return $this
*/
public function withText($value)
{
$this->data['Text'] = $value;
$this->options['form_params']['Text'] = $value;
return $this;
}
/**
* @param string $value
*
* @return $this
*/
public function withSpeechType($value)
{
$this->data['SpeechType'] = $value;
$this->options['form_params']['SpeechType'] = $value;
return $this;
}
/**
* @param string $value
*
* @return $this
*/
public function withVolume($value)
{
$this->data['Volume'] = $value;
$this->options['form_params']['Volume'] = $value;
return $this;
}
/**
* @param string $value
*
* @return $this
*/
public function withBizCode($value)
{
$this->data['BizCode'] = $value;
$this->options['form_params']['BizCode'] = $value;
return $this;
}
/**
* @param string $value
*
* @return $this
*/
public function withApiProduct($value)
{
$this->data['ApiProduct'] = $value;
$this->options['form_params']['ApiProduct'] = $value;
return $this;
}
/**
* @param string $value
*
* @return $this
*/
public function withApiRevision($value)
{
$this->data['ApiRevision'] = $value;
$this->options['form_params']['ApiRevision'] = $value;
return $this;
}
/**
* @param string $value
*
* @return $this
*/
public function withSpeechRate($value)
{
$this->data['SpeechRate'] = $value;
$this->options['form_params']['SpeechRate'] = $value;
return $this;
}
}
|
package com.example.hakatonapplication.socket
class ResponseMessage(
val data: ResponseData?
)
data class ResponseData(val first: Int?, val second: Int?, val third: Int?, val fourth: Int?)
|
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Snowdrift.CI.Parser where
import Control.Applicative
import Control.Monad
import Data.Aeson
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import Snowdrift.CI.Type (Command (..), MergeRequest (MergeRequest))
import qualified Snowdrift.CI.Type as CI
stripPrefix :: Text -> Text -> Text
stripPrefix p t =
if p `T.isPrefixOf` t
then fromJust $ T.stripPrefix p t
else t
stripSSH :: Text -> Text
stripSSH = stripPrefix "ssh://"
instance FromJSON MergeRequest where
parseJSON (Object o) = do
attrs <- o .: "object_attributes"
mergeRequestId <- attrs .: "id"
targetId <- attrs .: "target_project_id"
targetBranch <- attrs .: "target_branch"
sourceBranch <- attrs .: "source_branch"
state <- attrs .: "state"
source <- attrs .: "source"
sourceUrl <- source .: "ssh_url"
target <- attrs .: "target"
targetUrl <- target .: "ssh_url"
commit <- attrs .: "last_commit"
commitId <- commit .: "id"
commitMessage <- commit .: "message"
author <- commit .: "author"
authorName <- author .: "name"
authorEmail <- author .: "email"
if (state `elem` ["opened", "reopened" :: Value])
then return $! MergeRequest
{ CI.mergeRequestId = CI.MergeRequestId mergeRequestId
, CI.targetId = CI.TargetId targetId
, CI.targetBranch = CI.Branch targetBranch
, CI.targetUrl = CI.Url $ stripSSH targetUrl
, CI.sourceBranch = CI.Branch sourceBranch
, CI.sourceUrl = CI.Url $ stripSSH sourceUrl
, CI.commitId = CI.CommitId commitId
, CI.commitMessage = CI.CommitMessage commitMessage
, CI.authorName = CI.AuthorName authorName
, CI.authorEmail = CI.AuthorEmail authorEmail
}
else mzero
parseJSON _ = mzero
parseCommands :: Text -> [Command]
parseCommands t = catMaybes cs
where
ts = T.lines t
cs = (parse . T.words) <$> ts
parse [] = Nothing
parse (w:ws) = Just $ Command w ws
|
<?php
/**
* Created by IntelliJ IDEA.
* User: tcook
* Date: 7/22/19
* Time: 8:02 PM
*/
namespace User\Model\User;
interface CommandInterface
{
public function addUser(User $user);
public function updateUser(User $user);
public function deleteUser(User $user);
}
|
2 14
0 0 0 0 12 7
0 0 0 1 10 1
0 0 0 12 0 10
0 0 0 7 1 12
0 0 0 7 10 1
0 0 1 0 0 9
0 0 1 1 5 12
0 0 1 7 5 12
0 0 10 1 0 9
0 0 10 12 0 5
0 0 12 1 1 1
0 0 12 12 1 1
0 0 4 0 9 12
0 0 7 0 1 12
0 0 7 7 1 1
0 0 9 4 12 1
0 1 0 0 3 4
0 1 1 0 1 0
0 1 1 3 1 2
0 1 1 9 0 5
0 1 12 3 0 9
0 1 12 4 1 1
0 1 3 0 0 9
0 1 3 2 1 2
0 1 3 3 3 2
0 1 3 4 2 2
0 1 3 9 0 5
0 1 4 0 3 0
0 1 4 2 1 1
0 1 7 4 12 1
0 12 0 0 4 4
0 12 0 4 3 4
0 12 1 3 12 4
0 12 12 4 1 2
0 12 4 0 1 3
0 12 4 0 2 3
0 12 4 12 5 3
0 13 0 0 0 4
0 13 4 0 4 3
0 2 0 0 2 2
0 2 0 0 4 4
0 2 3 0 3 2
0 2 3 2 2 2
0 2 3 3 3 2
0 2 3 4 3 2
0 2 4 0 2 2
0 2 4 0 3 2
0 3 0 0 4 4
0 3 3 0 0 9
0 3 4 0 0 9
0 3 4 0 2 3
0 3 4 0 3 3
0 3 4 0 4 3
0 3 4 0 9 3
0 4 0 0 2 2
0 4 12 0 3 3
0 4 7 0 3 3
0 7 0 0 3 4
0 7 0 0 4 4
0 7 4 7 3 3
0 7 7 0 2 3
0 9 0 0 0 4
1 0 0 1 0 1
1 0 5 1 0 1
1 0 5 12 1 1
1 0 9 1 1 1
1 1 1 3 0 3
1 1 1 5 0 4
1 1 1 5 1 5
1 1 12 3 3 1
1 1 4 3 0 3
1 1 5 1 1 1
1 1 5 3 0 3
1 1 6 12 1 1
1 10 12 3 0 1
1 5 1 3 0 3
10 0 0 1 0 1
11 5 3 3 12 0
12 1 1 3 0 3
12 1 3 0 1 6
12 1 5 3 0 3
12 5 3 4 12 0
13 1 9 3 0 3
13 3 9 0 0 3
2 1 3 3 12 5
2 1 5 3 3 1
3 0 13 12 9 11
3 0 3 3 3 1
3 1 0 0 3 13
3 1 1 12 0 5
3 1 1 5 0 5
3 1 1 9 0 5
3 1 3 0 1 0
3 1 3 0 3 0
3 1 5 3 0 3
3 12 0 0 0 3
3 12 11 3 3 3
3 13 12 4 3 3
3 3 0 0 0 3
3 3 0 13 1 12
3 3 0 3 12 13
3 3 0 9 12 13
3 3 1 13 3 5
3 3 13 1 9 3
3 3 5 13 3 3
3 5 1 3 0 3
3 5 12 3 0 1
3 5 13 4 3 3
3 5 5 1 0 3
3 5 5 3 0 3
3 6 12 11 5 3
4 0 0 1 0 1
4 0 9 0 3 12
4 1 5 3 0 3
4 12 0 0 0 3
4 3 0 0 0 3
4 3 4 0 0 3
4 3 4 0 3 3
4 3 9 0 0 3
5 0 0 1 0 1
5 0 0 12 0 1
6 1 1 5 0 5
9 0 0 0 0 12
9 3 0 0 0 3
|
import React from "react";
import { FormFields } from "@clutch-sh/experimentation";
import { shallow } from "enzyme";
import { ExperimentDetails } from "../start-experiment";
jest.mock("@clutch-sh/core", () => {
return {
...(jest.requireActual("@clutch-sh/core") as any),
useNavigate: jest.fn(),
};
});
describe("Start Experiment workflow", () => {
it("renders correctly", () => {
const component = shallow(<ExperimentDetails environments={[]} onStart={() => {}} />);
expect(component.find(FormFields).dive().debug()).toMatchSnapshot();
});
it("renders correctly with upstream cluster type selection enabled", () => {
const component = shallow(<ExperimentDetails environments={[]} onStart={() => {}} />);
expect(component.find(FormFields).dive().debug()).toMatchSnapshot();
});
it("renders correctly with environments selection enabled", () => {
const component = shallow(
<ExperimentDetails
downstreamClusterTemplate=""
environments={[{ value: "staging" }]}
onStart={() => {}}
/>
);
expect(component.find(FormFields).dive().debug()).toMatchSnapshot();
});
});
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Eventstore.PostgreSQL.Internal.Watch where
import Control.Monad
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.RWS.Strict (ask, get, gets, put)
import Data.ByteString.Char8 (readInt)
import Data.ByteString.Lazy as L
import Data.ByteString.Builder
import Data.ByteString.Builder.Extra
import Data.Set as Set
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.Notification
import Database.PostgreSQL.Simple.SqlQQ
import Eventstore.PostgreSQL.Internal.Types
watch :: [Int] -> PgStore ()
watch streams = PgStore $ do
conn <- ask
s <- get
when (Set.null (sWatches s)) $ liftIO $ void $ execute_ conn [sql|listen event_stream_updated|]
put s { sWatches = Set.union (Set.fromList streams) (sWatches s) }
unwatch :: [Int] -> PgStore ()
unwatch streams = PgStore $ do
conn <- ask
s <- get
let w' = Set.difference (Set.fromList streams) (sWatches s)
when (Set.null w') $ liftIO $ void $ execute_ conn [sql|unlisten event_stream_updated|]
put s { sWatches = w' }
wait :: PgStore Int
wait = do
(conn, watches) <- PgStore $ (,) <$> ask <*> gets sWatches
when (Set.null watches) $ throwError "Watch set is empty"
nextNotification conn watches
where
nextNotification conn watches = do
n <- liftIO $ getNotification conn
if notificationChannel n == "event_stream_updated"
then
case readInt (notificationData n) of
Just (streamid, "") -> if Set.member streamid watches
then return streamid
else nextNotification conn watches
_ -> nextNotification conn watches
else
nextNotification conn watches
notify :: Int -> PgStore ()
notify stream = PgStore $ do
conn <- ask
let payload = toSmall $ intDec stream
liftIO $ void $ execute conn [sql|notify event_stream_updated, ?|] (Only payload)
where
toSmall = toStrict . toSmallLazy
toSmallLazy = toLazyByteStringWith (untrimmedStrategy 32 128) L.empty
|
/**
* @file src/config/storage.cpp
* @brief Decompilation configuration manipulation: storage.
* @copyright (c) 2017 Avast Software, licensed under the MIT license
*/
#include <algorithm>
#include <cassert>
#include <vector>
#include "retdec/config/storage.h"
namespace {
const std::string JSON_type = "type";
const std::string JSON_value = "value";
const std::string JSON_regNum = "registerNumber";
const std::vector<std::string> typeStrings =
{
"undefined",
"global",
"register",
"stack"
};
} // anonymous namespace
namespace retdec {
namespace config {
Storage::Storage() :
type(eType::UNDEFINED)
{
}
Storage Storage::undefined()
{
return Storage();
}
Storage Storage::onStack(int offset)
{
Storage ret;
ret.type = eType::STACK;
ret._stackOffset = offset;
return ret;
}
/**
* Create on stack storage associtated with the provided register number.
*/
Storage Storage::onStack(int offset, unsigned registerNumber)
{
Storage ret;
ret.type = eType::STACK;
ret._stackOffset = offset;
ret._registerNumber = registerNumber;
return ret;
}
Storage Storage::inMemory(const retdec::utils::Address& address)
{
assert(address.isDefined());
Storage ret;
ret.type = eType::GLOBAL;
ret._globalAddress = address;
return ret;
}
/**
* There are two possible register flavours: named and numbered registers.
* This creates register storage using register name.
*/
Storage Storage::inRegister(const std::string& registerName)
{
Storage ret;
ret.type = eType::REGISTER;
ret._registerName = registerName;
return ret;
}
/**
* There are two possible register flavours: named and numbered registers.
* This creates register storage using register number.
*/
Storage Storage::inRegister(unsigned registerNumber)
{
Storage ret;
ret.type = eType::REGISTER;
ret._registerNumber = registerNumber;
return ret;
}
/**
* There are two possible register flavours: named and numbered registers.
* This creates register storage using register name, which also contains
* information about register's number and class.
*/
Storage Storage::inRegister(
const std::string& registerName,
unsigned registerNumber)
{
Storage ret;
ret.type = eType::REGISTER;
ret._registerName = registerName;
ret._registerNumber = registerNumber;
return ret;
}
Storage Storage::fromJsonValue(const Json::Value& val)
{
Storage ret;
ret.readJsonValue(val);
return ret;
}
void Storage::readJsonValue(const Json::Value& val)
{
std::string enumStr = safeGetString(val, JSON_type);
auto it = std::find(typeStrings.begin(), typeStrings.end(), enumStr);
if (it == typeStrings.end())
{
type = eType::UNDEFINED;
}
else
{
type = static_cast<eType>( std::distance(typeStrings.begin(), it) );
}
if (isMemory())
{
_globalAddress = safeGetAddress(val, JSON_value);
}
else if (isRegister())
{
_registerName = safeGetString(val, JSON_value);
}
else if (isStack())
{
_stackOffset = safeGetInt(val, JSON_value);
}
else
{
assert(isUndefined());
}
if (val.isMember(JSON_regNum))
{
_registerNumber = safeGetUint(val, JSON_regNum);
}
}
Json::Value Storage::getJsonValue() const
{
Json::Value obj;
if (isMemory())
{
obj[JSON_type] = typeStrings[ static_cast<size_t>(eType::GLOBAL) ];
obj[JSON_value] = toJsonValue(getAddress());
}
else if (isRegister())
{
obj[JSON_type] = typeStrings[ static_cast<size_t>(eType::REGISTER) ];
obj[JSON_value] = getRegisterName();
}
else if (isStack())
{
obj[JSON_type] = typeStrings[ static_cast<size_t>(eType::STACK) ];
obj[JSON_value] = getStackOffset();
}
else
{
assert(isUndefined());
obj[JSON_type] = typeStrings[ static_cast<size_t>(eType::UNDEFINED) ];
}
auto registerNumber = getRegisterNumber();
if (registerNumber.isDefined())
{
obj[JSON_regNum] = registerNumber.getValue();
}
return obj;
}
bool Storage::isDefined() const
{
return !isUndefined();
}
bool Storage::isUndefined() const
{
return type == eType::UNDEFINED;
}
bool Storage::isMemory() const
{
return type == eType::GLOBAL;
}
bool Storage::isRegister() const
{
return type == eType::REGISTER;
}
bool Storage::isStack() const
{
return type == eType::STACK;
}
/**
* @param[out] globalAddress If storage is global this is set to its address.
* Otherwise it is set to an undefined value.
*/
bool Storage::isMemory(retdec::utils::Address& globalAddress) const
{
globalAddress = _globalAddress;
return isMemory();
}
/**
* @param[out] registerName If storage is register this is set to its name.
* Otherwise it is set to an undefined value.
* If register number was set, but name was not,
* this may be empty even if storage is a register.
*/
bool Storage::isRegister(std::string& registerName) const
{
registerName = _registerName;
return isRegister();
}
/**
* @param[out] registerNumber If register number is set, return it in parameter.
* Otherwise parameter is left unchanged.
* @return If register number is set, return @c true.
* Otherwise return @c false.
*/
bool Storage::isRegister(int& registerNumber) const
{
if (_registerNumber.isDefined())
{
registerNumber = _registerNumber;
return true;
}
else
{
return false;
}
}
/**
* @param[out] stackOffset If storage is starck this is set to its offset.
* Otherwise it is set to an undefined value.
*/
bool Storage::isStack(int& stackOffset) const
{
stackOffset = _stackOffset;
return isStack();
}
/**
* @return If storage is global return its address.
* Otherwise return an undefined value.
*/
retdec::utils::Address Storage::getAddress() const
{
return _globalAddress;
}
/**
* @return If storage is register return its name.
* Otherwise return an undefined value.
*/
std::string Storage::getRegisterName() const
{
return _registerName;
}
/**
* @return If storage is stack return its offset.
* Otherwise return an undefined value.
*/
int Storage::getStackOffset() const
{
return _stackOffset;
}
/**
* This method can be used on any storage type, which might contain register number.
* Right now, it is either register or stack storage.
* @return If register number is set, return defined value.
* Otherwise return undefined value.
*/
retdec::utils::Maybe<unsigned> Storage::getRegisterNumber() const
{
return _registerNumber;
}
} // namespace config
} // namespace retdec
|
class ApplicationController < ActionController::Base
def test
not_used_variable = ""
end
end
|
module Frodo
# Authentication middleware used if client_id, client_secret, and client_credentials: true are set
class Middleware::Authentication::Password < Frodo::Middleware::Authentication
def params
{ grant_type: 'password',
client_id: @options[:client_id],
username: @options[:username],
password: @options[:password],
resource: @options[:instance_url],
}
end
end
end
|
## monte-carlo、cesaro-test 简述
书中 [3.1.2](./monte_carlo.scm) 使用了两个函数,一个是 monte-carlo,一个是 cesaro-test。
### 蒙特卡罗方法
蒙特卡罗(Monte Carlo)是摩纳哥的一座城市,有很多赌场。
蒙特卡罗方法也称统计模拟方法,生成大量随机数去测试系统,通过概率统计得到问题的解。
此方法诞生于上个世纪40年代美国的"曼哈顿计划",主要由斯塔尼斯拉夫·乌拉姆(Stanislaw Marcin Ulam) 和 冯·诺伊曼(John von Neumann) 提出。
乌拉姆的叔叔,经常在摩纳哥的蒙特卡洛的赌场输钱。这种方法基于概率,于是以蒙特卡罗(Monte Carlo)命名。
### 切萨罗
Ernesto Cesàro 是位意大利数学家。cesaro-test 求 pi 实现如下
``` Scheme
(define (estimate-pi trials)
(sqrt (/ 6 (monte-carlo trials cesaro-test))))
(define (cesaro-test)
(= (gcd (rand) (rand)) 1))
```
cesaro-test 求 pi 的数学原理是:随机选择两个数字,他们互质的概率是 (pi^2)/6。[初步证明参考这里](http://www.pi314.net/eng/cesaro.php)
假如两个数字互质。
那么他们并非都是 2 的倍数。都是 2 的倍数的概率为 (1/2) * (1/2), 并非都是 2 的倍数,概率就为 [1 - (1/2)^2]
同理
* 他们并非都是 3 的倍数,概率就为 [1 - (1/3)^2]。
* 他们并非都是 5 的倍数,概率就为 [1 - (1/5)^2]。
* 他们并非都是 7 的倍数,概率就为 [1 - (1/7)^2]。
* ....
两数互质,他们并非都是任何素数的倍数,并且同时成立。于是其概率为
```
P = [1 - (1/2)^2] * [1 - (1/3)^2] * [1 - (1/5)^2] * [1 - (1/7)^2] ....
```
根据[欧拉乘积公式](https://zh.wikipedia.org/wiki/欧拉乘积),将素数乘法和自然数加法关联起来。(额外参考[黎曼ζ函数](https://zh.wikipedia.org/wiki/黎曼ζ函數))。
<img src="euler_product.svg"/>
于是随机两数互质的概率为
```
P = 1 + (1/2)^2 + (1/3)^2 + (1/4)^2 .....
```
这要求所有自然数平方的倒数和,是著名的[巴塞尔问题](https://zh.wikipedia.org/wiki/巴塞尔问题)。第一个给出上式精确值的也是欧拉,为 (pi^2)/6。
|
namespace Micser.Plugins.Main.Widgets
{
public partial class DeviceInputWidget
{
public DeviceInputWidget()
{
InitializeComponent();
}
}
}
|
class simple_class_2_MyClass(val b: Int, var c: Int)
fun simple_class_2_genMyClass(i: Int): simple_class_2_MyClass {
return simple_class_2_MyClass(i, i)
}
fun simple_class_2_change(x: Int): Int {
val y = simple_class_2_MyClass(x, x)
y.c = x
y.c = x
y.c = 1
y.c = x + 1
return y.c
}
fun simple_class_2_testGen(i: Int): Int {
val j = simple_class_2_genMyClass(i)
return j.b
}
|
import { run } from '..'
import { github } from '../plugins/plugin-github'
import { typescript } from '../plugins/plugin-typescript'
jest.mock('../core/api')
describe('runner', () => {
beforeEach(() => {
console.log = () => {}
})
it('can generate report without goals specified', async () => {
const output = await run({
organization: 'monito',
checkAchieved: (percentage: number) => ({
status: percentage > 75 ? 'pass' : 'fail',
}),
plugins: {
github,
typescript,
},
goals: [],
})
expect(output).toMatchSnapshot({
generatedAt: expect.any(String),
})
})
it('can generate report with goals applied', async () => {
const output = await run({
organization: 'monito',
checkAchieved: (percentage: number) => ({
status: percentage > 75 ? 'pass' : 'fail',
}),
plugins: {
github,
typescript,
},
goals: [
{
name: 'main branch',
description: 'the project is using main branch',
check: async ({ defaultBranchName }) => {
const status = defaultBranchName === 'main' ? 'pass' : 'fail'
return { status, value: defaultBranchName }
},
},
],
})
expect(output).toMatchSnapshot({
generatedAt: expect.any(String),
})
})
})
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.inference.aggs;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InvalidAggregationPathException;
import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class InternalInferenceAggregation extends InternalAggregation {
private final InferenceResults inferenceResult;
protected InternalInferenceAggregation(String name, Map<String, Object> metadata,
InferenceResults inferenceResult) {
super(name, metadata);
this.inferenceResult = inferenceResult;
}
public InternalInferenceAggregation(StreamInput in) throws IOException {
super(in);
inferenceResult = in.readNamedWriteable(InferenceResults.class);
}
public InferenceResults getInferenceResult() {
return inferenceResult;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(inferenceResult);
}
@Override
public InternalAggregation reduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
throw new UnsupportedOperationException("Reducing an inference aggregation is not supported");
}
@Override
public Object getProperty(List<String> path) {
Object propertyValue;
if (path.isEmpty()) {
propertyValue = this;
} else if (path.size() == 1) {
if (CommonFields.VALUE.getPreferredName().equals(path.get(0))) {
propertyValue = inferenceResult.predictedValue();
} else {
throw invalidPathException(path);
}
} else {
throw invalidPathException(path);
}
return propertyValue;
}
private InvalidAggregationPathException invalidPathException(List<String> path) {
return new InvalidAggregationPathException("unknown property " + path + " for " +
InferencePipelineAggregationBuilder.NAME + " aggregation [" + getName() + "]");
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
return inferenceResult.toXContent(builder, params);
}
@Override
public String getWriteableName() {
return "inference";
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), inferenceResult);
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null || getClass() != obj.getClass()) return false;
if (super.equals(obj) == false) return false;
InternalInferenceAggregation other = (InternalInferenceAggregation) obj;
return Objects.equals(inferenceResult, other.inferenceResult);
}
}
|
#![cfg(feature = "url-impl")]
#![allow(unused)]
use ts_rs::TS;
use url::Url;
#[test]
fn contains_url() {
#[derive(TS)]
pub struct StructWithUrl {
some_url: Url,
}
assert_eq!(
StructWithUrl::decl(),
"interface StructWithUrl { some_url: string, }"
)
}
#[test]
fn newtype_url() {
#[derive(TS)]
pub struct SomeUrl(Url);
assert_eq!(
SomeUrl::decl(),
"type SomeUrl = string;"
)
}
|
using Aquality.Selenium.Elements.Interfaces;
using Aquality.Selenium.Forms;
using OpenQA.Selenium;
namespace Aquality.Selenium.Tests.Integration.TestApp.AutomationPractice.Forms
{
internal class AuthenticationForm : Form
{
public AuthenticationForm() : base(By.Id("email_create"), "Authentication")
{
}
private IButton CreateAccountButton => ElementFactory.GetButton(By.Id("SubmitCreate"), "Submit Create");
private ITextBox EmailTextBox => ElementFactory.GetTextBox(By.Id("email_create"), "Email Create");
public void ClickCreateAccountButton()
{
CreateAccountButton.ClickAndWait();
}
public void SetEmail(string email)
{
EmailTextBox.ClearAndType(email);
}
}
}
|
package org.webpieces.http2client.api.dto;
import org.webpieces.data.api.DataWrapper;
import com.webpieces.http2.api.dto.highlevel.Http2Request;
import com.webpieces.http2.api.dto.highlevel.Http2Trailers;
public class FullRequest extends Http2Message {
protected Http2Request headers;
public FullRequest() {
}
public FullRequest(Http2Request request, DataWrapper fullData, Http2Trailers trailingHeaders) {
super(fullData, trailingHeaders);
this.headers = request;
}
public Http2Request getHeaders() {
return headers;
}
public void setHeaders(Http2Request headers) {
this.headers = headers;
}
@Override
public String toString() {
return "FullRequest[headers="+headers+" body size="+payload.getReadableSize()+" trailers="+trailingHeaders;
}
}
|
package Graph;
import java.util.*;
/**
* Given a source word, target word and an English dictionary, transform the source word to target
* by changing/adding/removing 1 character at a time, while all intermediate words being valid
* English words. Return the transformation chain which has the smallest number of intermediate words.
*/
public class TransformWord {
public static void main(String[] args) {
Set<String> dict = new HashSet<>(Arrays.asList("cat", "bat", "at", "bed", "bet", "ed", "ad"));
Map<String, Set<String>> graph = constructGraph(dict);
for (String s : graph.keySet()) {
System.out.println(s + ": " + graph.get(s));
}
String start = "cat";
String end = "bed";
transformWord(graph, start, end);
}
//bfs search
private static void transformWord(Map<String, Set<String>> graph, String start, String end) {
ArrayList<String> path = new ArrayList<>();
path.add(start);
Queue<ArrayList<String>> queue = new LinkedList<>();
queue.add(path);
while(!queue.isEmpty()){
ArrayList<String> currentPath = queue.poll();
String word = currentPath.get(currentPath.size()-1);
if(word.equals(end)) {
System.out.println(currentPath);
}
Set<String> transformations = graph.get(word);
for(String t : transformations){
if(!currentPath.contains(t)){
ArrayList<String> newPath = new ArrayList<>(currentPath);
newPath.add(t);
queue.add(newPath);
}
}
}
}
private static Map<String, Set<String>> constructGraph(Set<String> dict) {
Map<String, Set<String>> map = new HashMap<>();
for(String word: dict){
char[] arr = word.toCharArray();
for(int i=0; i<arr.length; i++){
//remove one char
//char temp = arr[i];
//arr[i] = '';
String remove = word.substring(0, i) + word.substring(i+1, word.length());
if(dict.contains(remove)){
populate(map, word, remove);
}
//change one char
for(char c='a'; c<='z'; c++){
char temp = arr[i];
arr[i] = c;
String change = new String(arr);
if(!word.equals(change) && dict.contains(change)){
populate(map, word, change);
}
arr[i] = temp;
}
}
//add one char
for(int i=0; i<arr.length+1; i++){
for(char c='a'; c<='z'; c++){
String add = word.substring(0, i) + Character.toString(c) + word.substring(i, word.length());
if(dict.contains(add)){
populate(map, word, add);
}
}
}
}
return map;
}
private static void populate(Map<String, Set<String>> map, String word, String add) {
Set<String> strings = map.get(word);
if(strings == null) {
strings = new HashSet<>();
map.put(word, strings);
}
strings.add(add);
}
}
|
import {Component, OnInit} from '@angular/core';
import {NgxSmartModalComponent, NgxSmartModalService} from 'ngx-smart-modal';
import {Subject} from 'rxjs';
import {FormControl, FormGroup} from '@angular/forms';
import {VideoControlService} from '../../../service/video-control.service';
import {InterpolationService} from '../../../service/interpolation.service';
import {InterpolationType} from '../../../model/InterpolationType';
@Component({
selector: 'app-video-setting-modal',
templateUrl: './video-setting-modal.component.html',
styleUrls: ['./video-setting-modal.component.css']
})
export class VideoSettingModalComponent implements OnInit {
/**
* This id should not be changed.
* It's also hard coded in the template file.
* It's also used in the taskListComponent to select this modal component.
*/
public readonly modalId: string = 'VideoSettingModal';
// Acts as a reset without destroying the original subject
private unsubscribe: Subject<void> = new Subject<void>();
public readonly formGroup = new FormGroup({
'playbackRate': new FormControl(),
'interpolationType': new FormControl(),
});
public hasOptions: boolean = true;
public readonly playbackRates: {id: number, label: string}[]= [];
/**
* All possible InterpolationTypes are listed here.
*/
public readonly interpolationTypes: string[] = InterpolationType.values();
constructor(
private modalService: NgxSmartModalService,
private videoControl: VideoControlService,
private interpolationService: InterpolationService
) {
this.playbackRates = videoControl.playbackRates;
}
ngOnInit(): void {
}
ngOnDestroy(): void {
this.unsubscribe.next();
this.unsubscribe.complete();
}
/**
* Opens the video control menu via cog.
*/
onOpenMenu() {
const modal: NgxSmartModalComponent = this.modalService.get(this.modalId);
modal.setData(true);
if (modal.hasData()) {
this.preselectElements();
modal.open();
}
}
onClose() {
const modal: NgxSmartModalComponent = this.modalService.get(this.modalId);
modal.removeData();
modal.close();
}
onCancel() {
this.onClose();
}
onSave() {
this.videoControl.playbackRate = this.formGroup.controls['playbackRate'].value as number;
// Interpolation change is disabled until the BE supports changes (per geometry?).
// this.formGroup.controls['interpolationType'].value as InterpolationType;
this.interpolationService.interpolationType = InterpolationType.LINEAR;
this.onClose();
}
/**
* Preselect the settings.
*
* @private
*/
private preselectElements(): void {
/*
* Find the index of the current speed on the fly. So we don't
* depend on the number / order of options.
* Not found means index -1 and is not allowed
*/
const playbackRate = this.videoControl.playbackRate;
const playbackRateIndex = this.playbackRates.findIndex(option => option.id === playbackRate);
if (playbackRateIndex !== -1) {
this.formGroup.controls['playbackRate'].reset(this.playbackRates[playbackRateIndex].id);
}
// Preselect LINEAR as it's the default value and the only one supported
// until the BE is ready for different types.
// const interpolationType = this.interpolationService.interpolationType;
const interpolationType = InterpolationType.LINEAR;
if (!!interpolationType) {
this.formGroup.controls['interpolationType'].reset(interpolationType);
this.formGroup.controls['interpolationType'].disable();
}
}
}
|
import * as path from "path";
import * as punycode from "punycode";
import { RepositoryError } from "../errors";
import { IBaseJson, ITypeJson } from "../lib/interfaces";
import { createDirectory, readFile, walk, writeFile } from "../lib/platform";
import { Post, Type} from "../models";
type Constructor<T = {}> = new (...args: any[]) => T;
export default class Repository {
public static createRepository = async (repositoryDir: string): Promise<Repository> => {
const repo = new Repository(repositoryDir);
await repo.initializeRepository();
return repo;
};
public readonly postsDir: string;
public readonly typesDir: string;
private repositorydir: string;
constructor(repositoryDir: string) {
this.repositorydir = repositoryDir;
this.postsDir = path.join(this.repositorydir, "posts");
this.typesDir = path.join(this.repositorydir, "types");
}
public initializeRepository = async () => {
await createDirectory(this.postsDir);
await createDirectory(this.typesDir);
};
public async getOne<TModel>(classParam: Constructor<TModel>, arg: string): Promise<Post | Type> {
switch(classParam.name) {
case "Post":
return await this.getPostById(arg);
case "Type":
return await this.getTypeByName(arg);
}
throw new RepositoryError(`No models of type ${classParam.name} in the repository!`);
}
public async get<TModel>(classParam: Constructor<TModel>, query?: IBaseJson): Promise<Post[] | Type[]> {
if (query === undefined) {
query = {};
}
switch(classParam.name) {
case "Post":
return await this.queryPost(query);
case "Type":
return await this.queryType(query);
}
throw new RepositoryError(`No models of type ${classParam.name} in the repository!`);
}
public async save<TModel>(model: TModel): Promise<void> {
if (model instanceof Post) {
await this.savePost(model);
return;
}
if (model instanceof Type) {
await this.saveType(model);
return;
}
throw new RepositoryError(`No models of type ${model.constructor.name} in the repository!`);
}
public getPath<TModel>(model: TModel): string {
if (model instanceof Post) {
return this.getPostPath(model.id);
} else if (model instanceof Type) {
return this.getTypeFilePath(model.name);
}
return "";
}
private getPostById = async (postId: string): Promise<Post> => {
const postPath = this.getPostPath(postId);
return this.getPostByPath(postPath);
};
private getPostByPath = async (filePath: string): Promise<Post> => {
const postText = await readFile(filePath);
return new Post(JSON.parse(postText));
};
private getPostDirectory = (postId: string): string => {
const dir1 = postId.substr(0, 3);
const dir2 = postId.substr(3, 3);
return path.join(this.postsDir, dir1, dir2);
};
private getPostPath = (postId: string): string => {
const postDir = this.getPostDirectory(postId);
return path.join(postDir, `${postId}.json`);
};
private getTypeByName = async (name: string): Promise<Type> => {
const typeName = Type.validateName(name);
const typeFilePath = this.getTypeFilePath(typeName);
return await this.getTypeByPath(typeFilePath);
};
private getTypeByPath = async (filePath: string): Promise<Type> => {
const typeText = await readFile(filePath);
const typeData: ITypeJson = JSON.parse(typeText);
return new Type(typeData);
};
private getTypeFilePath = (typeName: string): string => {
// Convert any Unicode characters to ASCII for the filename
const typeFileName = punycode.toASCII(typeName) + ".json";
const typeFilePath = path.join(this.typesDir, typeFileName);
return typeFilePath;
};
private queryPost = async (query: IBaseJson): Promise<Post[]> => {
const posts: Post[] = [];
await walk(this.postsDir, async (filename: string) => {
// TODO: Actually filter stuff based on the query!
const post = await this.getPostByPath(filename);
posts.push(post);
});
return posts;
};
private queryType = async (query: IBaseJson): Promise<Type[]> => {
const types: Type[] = [];
await walk(this.typesDir, async (filename: string) => {
// TODO: Actually filter stuff based on the query!
const type = await this.getTypeByPath(filename);
types.push(type);
});
return types;
};
private savePost = async (post: Post): Promise<void> => {
const postId = post.id;
const postPath = this.getPostPath(postId);
await createDirectory(this.getPostDirectory(postId));
await writeFile(postPath, JSON.stringify(post));
};
private saveType = async (type: Type) => {
const typeFilePath = this.getTypeFilePath(type.name);
await writeFile(typeFilePath, JSON.stringify(type, null, 4));
};
}
|
using FluentAssertions;
using Highway.Data.Contexts;
using Highway.Data.Tests.InMemory.Domain;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Highway.Data.Tests.InMemory
{
[TestClass]
public class IntegerIdentityStrategyTests
{
private readonly int seedNumber = 500;
private IntegerIdentityStrategy<Post> target;
[TestInitialize]
public void Setup()
{
IntegerIdentityStrategy<Post>.LastValue = seedNumber;
target = new IntegerIdentityStrategy<Post>(x => x.Id);
}
[TestMethod]
public void Next_ShouldReturnNextValue()
{
// Arrange
// Act
int result = target.Next();
// Assert
result.Should().Be(seedNumber + 1);
}
[TestMethod]
public void Assign_ShouldAssignIdOfPost()
{
// Arrange
var post = new Post {Id = 0};
// Act
target.Assign(post);
// Assert
post.Id.Should().Be(seedNumber + 1);
}
}
}
|
namespace RedditVideoRotationBot.Interfaces
{
public interface IFfmpegExecutor
{
void ExecuteFfmpegCommandWithArgString(string argString);
}
}
|
package me.oriharel.seriesmanager.config
import me.oriharel.seriesmanager.interceptor.SameUserCheckInterceptor
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.context.annotation.Configuration
import org.springframework.web.servlet.config.annotation.InterceptorRegistry
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer
@Configuration
class InterceptorConfig : WebMvcConfigurer {
@Autowired
private lateinit var sameUserCheckInterceptor: SameUserCheckInterceptor
override fun addInterceptors(registry: InterceptorRegistry) {
registry.addInterceptor(sameUserCheckInterceptor)
.excludePathPatterns("/**")
.addPathPatterns("/api/v1/**")
.excludePathPatterns("/api/v1/auth/**")
}
}
|
; =============================================================================
; Pure64 -- a 64-bit OS/software loader written in Assembly for x86-64 systems
; Copyright (C) 2008-2017 Return Infinity -- see LICENSE.TXT
;
; INIT ACPI
; =============================================================================
init_acpi:
mov rsi, 0x00000000000E0000 ; Start looking for the Root System Description Pointer Structure
mov rbx, 'RSD PTR ' ; This in the Signature for the ACPI Structure Table (0x2052545020445352)
searchingforACPI:
lodsq ; Load a quad word from RSI and store in RAX, then increment RSI by 8
cmp rax, rbx
je foundACPI
cmp rsi, 0x00000000000FFFFF ; Keep looking until we get here
jge noACPI ; ACPI tables couldn't be found, Fail.
jmp searchingforACPI
foundACPI: ; Found a Pointer Structure, verify the checksum
push rsi
xor ebx, ebx
mov ecx, 20 ; As per the spec only the first 20 bytes matter
sub rsi, 8 ; Bytes 0 thru 19 must sum to zero
nextchecksum:
lodsb ; Get a byte
add bl, al ; Add it to the running total
sub cl, 1
cmp cl, 0
jne nextchecksum
pop rsi
cmp bl, 0
jne searchingforACPI ; Checksum didn't check out? Then keep looking.
lodsb ; Checksum
lodsd ; OEMID (First 4 bytes)
lodsw ; OEMID (Last 2 bytes)
lodsb ; Grab the Revision value (0 is v1.0, 1 is v2.0, 2 is v3.0, etc)
cmp al, 0
je foundACPIv1 ; If AL is 0 then the system is using ACPI v1.0
jmp foundACPIv2 ; Otherwise it is v2.0 or higher
foundACPIv1:
xor eax, eax
lodsd ; Grab the 32 bit physical address of the RSDT (Offset 16).
mov rsi, rax ; RSI now points to the RSDT
lodsd ; Grab the Signiture
cmp eax, 'RSDT' ; Make sure the signiture is valid
jne novalidacpi ; Not the same? Bail out
sub rsi, 4
mov [os_ACPITableAddress], rsi ; Save the RSDT Table Address
add rsi, 4
xor eax, eax
lodsd ; Length
add rsi, 28 ; Skip to the Entry offset
sub eax, 36 ; EAX holds the table size. Subtract the preamble
shr eax, 2 ; Divide by 4
mov rdx, rax ; RDX is the entry count
xor ecx, ecx
foundACPIv1_nextentry:
lodsd
push rax
add ecx, 1
cmp ecx, edx
je findACPITables
jmp foundACPIv1_nextentry
foundACPIv2:
lodsd ; RSDT Address
lodsd ; Length
lodsq ; Grab the 64 bit physical address of the XSDT (Offset 24).
mov rsi, rax ; RSI now points to the XSDT
lodsd ; Grab the Signiture
cmp eax, 'XSDT' ; Make sure the signiture is valid
jne novalidacpi ; Not the same? Bail out
sub rsi, 4
mov [os_ACPITableAddress], rsi ; Save the XSDT Table Address
add rsi, 4
xor eax, eax
lodsd ; Length
add rsi, 28 ; Skip to the start of the Entries (offset 36)
sub eax, 36 ; EAX holds the table size. Subtract the preamble
shr eax, 3 ; Divide by 8
mov rdx, rax ; RDX is the entry count
xor ecx, ecx
foundACPIv2_nextentry:
lodsq
push rax
add ecx, 1
cmp ecx, edx
jne foundACPIv2_nextentry
findACPITables:
xor ecx, ecx
nextACPITable:
pop rsi
lodsd
add ecx, 1
mov ebx, 'APIC' ; Signature for the Multiple APIC Description Table
cmp eax, ebx
je foundAPICTable
mov ebx, 'HPET' ; Signiture for the HPET Description Table
cmp eax, ebx
je foundHPETTable
cmp ecx, edx
jne nextACPITable
jmp init_smp_acpi_done ;noACPIAPIC
foundAPICTable:
call parseAPICTable
jmp nextACPITable
foundHPETTable:
call parseHPETTable
jmp nextACPITable
init_smp_acpi_done:
ret
noACPI:
novalidacpi:
jmp $
; -----------------------------------------------------------------------------
parseAPICTable:
push rcx
push rdx
lodsd ; Length of MADT in bytes
mov ecx, eax ; Store the length in ECX
xor ebx, ebx ; EBX is the counter
lodsb ; Revision
lodsb ; Checksum
lodsd ; OEMID (First 4 bytes)
lodsw ; OEMID (Last 2 bytes)
lodsq ; OEM Table ID
lodsd ; OEM Revision
lodsd ; Creator ID
lodsd ; Creator Revision
xor eax, eax
lodsd ; Local APIC Address
mov [os_LocalAPICAddress], rax ; Save the Address of the Local APIC
lodsd ; Flags
add ebx, 44
mov rdi, 0x0000000000005100 ; Valid CPU IDs
readAPICstructures:
cmp ebx, ecx
jge parseAPICTable_done
lodsb ; APIC Structure Type
cmp al, 0x00 ; Processor Local APIC
je APICapic
cmp al, 0x01 ; I/O APIC
je APICioapic
cmp al, 0x02 ; Interrupt Source Override
je APICinterruptsourceoverride
; cmp al, 0x03 ; Non-maskable Interrupt Source (NMI)
; je APICnmi
; cmp al, 0x04 ; Local APIC NMI
; je APIClocalapicnmi
; cmp al, 0x05 ; Local APIC Address Override
; je APICaddressoverride
cmp al, 0x09 ; Processor Local x2APIC
je APICx2apic
; cmp al, 0x0A ; Local x2APIC NMI
; je APICx2nmi
jmp APICignore
APICapic:
xor eax, eax
xor edx, edx
lodsb ; Length (will be set to 8)
add ebx, eax
lodsb ; ACPI Processor ID
lodsb ; APIC ID
xchg eax, edx ; Save the APIC ID to EDX
lodsd ; Flags (Bit 0 set if enabled/usable)
bt eax, 0 ; Test to see if usable
jnc readAPICstructures ; Read the next structure if CPU not usable
inc word [cpu_detected]
xchg eax, edx ; Restore the APIC ID back to EAX
stosb
jmp readAPICstructures ; Read the next structure
APICioapic:
xor eax, eax
lodsb ; Length (will be set to 12)
add ebx, eax
lodsb ; IO APIC ID
lodsb ; Reserved
xor eax, eax
lodsd ; IO APIC Address
push rdi
push rcx
mov rdi, os_IOAPICAddress
xor ecx, ecx
mov cl, [os_IOAPICCount]
shl cx, 3 ; Quick multiply by 8
add rdi, rcx
pop rcx
stosd ; Store the IO APIC Address
lodsd ; System Vector Base
stosd ; Store the IO APIC Vector Base
pop rdi
inc byte [os_IOAPICCount]
jmp readAPICstructures ; Read the next structure
APICinterruptsourceoverride:
xor eax, eax
lodsb ; Length (will be set to 10)
add ebx, eax
lodsb ; Bus
lodsb ; Source
lodsd ; Global System Interrupt
lodsw ; Flags
jmp readAPICstructures ; Read the next structure
APICx2apic:
xor eax, eax
xor edx, edx
lodsb ; Length (will be set to 16)
add ebx, eax
lodsw ; Reserved; Must be Zero
lodsd
xchg eax, edx ; Save the x2APIC ID to EDX
lodsd ; Flags (Bit 0 set if enabled/usable)
bt eax, 0 ; Test to see if usable
jnc APICx2apicEnd ; Read the next structure if CPU not usable
xchg eax, edx ; Restore the x2APIC ID back to EAX
; TODO - Save the ID's somewhere
APICx2apicEnd:
lodsd ; ACPI Processor UID
jmp readAPICstructures ; Read the next structure
APICignore:
xor eax, eax
lodsb ; We have a type that we ignore, read the next byte
add ebx, eax
add rsi, rax
sub rsi, 2 ; For the two bytes just read
jmp readAPICstructures ; Read the next structure
parseAPICTable_done:
pop rdx
pop rcx
ret
; -----------------------------------------------------------------------------
; -----------------------------------------------------------------------------
parseHPETTable:
lodsd ; Length of HPET in bytes
lodsb ; Revision
lodsb ; Checksum
lodsd ; OEMID (First 4 bytes)
lodsw ; OEMID (Last 2 bytes)
lodsq ; OEM Table ID
lodsd ; OEM Revision
lodsd ; Creator ID
lodsd ; Creator Revision
lodsd ; Event Timer Block ID
lodsd ; Base Address Settings
lodsq ; Base Address Value
mov [os_HPETAddress], rax ; Save the Address of the HPET
lodsb ; HPET Number
lodsw ; Main Counter Minimum
lodsw ; Page Protection And OEM Attribute
ret
; -----------------------------------------------------------------------------
; =============================================================================
; EOF
|
import * as React from 'react';
import { baseLayers, overlayLayers, IAttributionDef } from 'fm3/mapDefinitions';
import { Translator } from 'fm3/l10nInjector';
interface IProps {
t: Translator;
imhd: boolean;
mapType: string; // TODO enum
overlays: string[]; // TODO enum
}
const Attribution: React.FC<IProps> = ({ t, mapType, overlays, imhd }) => {
return (
<ul style={{ padding: '10px 0 0 20px' }}>
{categorize(
[
...baseLayers.filter(({ type }) => mapType === type),
...overlayLayers.filter(({ type }) => overlays.includes(type)),
].reduce((a, b) => [...a, ...b.attribution], [] as IAttributionDef[]),
).map(({ type, attributions }) => (
<li key={type}>
{t(`mapLayers.type.${type}`)}{' '}
{attributions.map((a, j) => [
j > 0 ? ', ' : '',
a.url ? (
<a
key={a.type}
href={a.url}
target="_blank"
rel="noopener noreferrer"
>
{a.name || (a.nameKey && t(a.nameKey))}
</a>
) : (
a.name || (a.nameKey && t(a.nameKey))
),
])}
</li>
))}
{imhd && (
<li>
{'; '}
{t('routePlanner.imhdAttribution')}
{' ©\xa0'}
<a href="https://imhd.sk" target="_blank" rel="noopener noreferrer">
imhd.sk
</a>
</li>
)}
</ul>
);
};
function categorize(
attributions: IAttributionDef[],
): Array<{ type: string; attributions: IAttributionDef[] }> {
const res: { [type: string]: IAttributionDef[] } = {};
for (const attribution of attributions) {
let x = res[attribution.type];
if (!x) {
x = [];
res[attribution.type] = x;
}
if (!x.includes(attribution)) {
x.push(attribution);
}
}
return Object.keys(res).map(type => ({ type, attributions: res[type] }));
}
export default Attribution;
|
import {Page, ViewController, Events} from 'ionic-framework/ionic'
@Page({
templateUrl: 'build/pages/home/language.html'
})
export class LanguagePage {
constructor(viewCtrl: ViewController,
events: Events) {
this.viewCtrl = viewCtrl;
this.events = events;
this.languages = [
{id: 'pl', name: 'Polski'},
{id: 'en', name: 'English'}
]
}
changeLanguage(language: any) {
this.events.publish('app:language', language);
this.dismiss();
}
dismiss() {
this.viewCtrl.dismiss();
}
}
|
/* Ceated by Ya Lin. 2019/8/6 15:16:11 */
using NUnit.Framework;
using System;
using System.Collections.Generic;
using System.Text;
using Thunder.Blazor.Libs;
namespace ThunderBlazorTest.Libs
{
class Cssbuildtest
{
[Test]
public void cssbuildtest()
{
var s1 = "alert alert-secondary mt-4";
var css = CssBuild.New.Add(s1);
var s2 = css.Build().CssString;
//基础计算
Assert.IsTrue(s1 == s2);
//添加单个
Assert.IsTrue("alert alert-secondary mt-4 btn" == css.Add("btn").Build().CssString);
//移除单个
Assert.IsTrue("alert mt-4 btn" == css.Remove("alert-secondary").Build().CssString);
//添加队列
Assert.IsTrue("alert mt-4 btn top-row px-4" == css.Add("top-row px-4").Build().CssString);
//添加重复值
Assert.IsTrue("alert mt-4 btn top-row px-4" == css.Add("top-row px-4").Build().CssString);
Assert.IsTrue("alert mt-4 btn top-row px-4" == css.Add("").Build().CssString);
//移除重复值
Assert.IsTrue("alert mt-4 btn top-row px-4" == css.Remove("alert-secondary").Build().CssString);
Assert.IsTrue("alert mt-4 btn top-row px-4" == css.Remove("").Build().CssString);
//不存在的值
Assert.IsTrue("alert mt-4 btn top-row px-4" == css.Remove("notthis").Build().CssString);
//移除队列
Assert.IsTrue("alert mt-4 btn" == css.Remove("top-row px-4").Build().CssString);
//单个对比
var listadd = new List<string>
{
"alert",
"alert-secondary",
"mt-4",
"btn",
"top-row",
"px-4"
};
var listremove = new List<string>
{
"alert-secondary",
"notthis",
"top-row",
"px-4"
};
var listCss = new List<string>
{
"alert",
"mt-4",
"btn",
};
css.CssAdd.ForEach(x => Assert.IsTrue(listadd.Contains(x)));
css.CssRemove.ForEach(x => Assert.IsTrue(listremove.Contains(x)));
css.CssList.ForEach(x => Assert.IsTrue(listCss.Contains(x)));
Assert.IsTrue(css.AddNoList("cssok").Build().CssString == "alert mt-4 btn");
Assert.IsTrue(css.AddOnHasList("notalone").Build().CssString == "alert mt-4 btn notalone");
Assert.IsTrue(css.Reset().Build().CssString == "");
Assert.IsTrue(css.AddOnHasList("notalone").Build().CssString == "");
Assert.IsTrue(css.AddNoList("cssok").Build().CssString == "cssok");
//空值
string c = null;
Assert.IsTrue(css.Add(c).Build().CssString == "cssok");
}
}
}
|
<?php
declare(strict_types=1);
namespace GreenLinks\Psr16Adapter\Exception;
use Psr\SimpleCache\CacheException;
use Exception;
class GeneralException extends Exception implements CacheException
{
}
|
#include "Schema.h"
#include <IPC/Managed/detail/Interop/TransportImpl.h>
namespace IPC
{
namespace Managed
{
namespace detail
{
namespace Interop
{
template Transport<Calc::Request, Calc::Response>;
} // Interop
} // detail
} // Managed
} // IPC
|
#!/bin/sh
python /home/pi/oled/luma.examples/examples/moode.py -f /home/pi/oled/luma.examples/examples/ws15old.cfg
|
#!/bin/bash
rm -r dist ;
python3 setup.py sdist
pip3 uninstall dimred
pip3 install dist/dimred-*.gz
|
package com.netcai.admin.service;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.ibatis.annotations.Param;
import com.netcai.admin.entity.OrderInfo;
import com.netcai.admin.utils.PageUtil;
import com.netcai.admin.vo.OrderTaskVo;
import com.netcai.admin.vo.PresentOrderVo;
public interface OrderInfoService {
/**
* 通过Id查询单个
*/
public OrderInfo getOrderInfoById(Long id);
/**
* 添加
*/
public int insertOrderInfo(OrderInfo orderInfo);
/**
* 通过条件查询
*/
public PageUtil getPageResult(OrderInfo orderInfo, int currentPageNum, int currentPageSize);
/**
* 通过条件查询
*/
public List<OrderInfo> getResult(OrderInfo orderInfo, Integer currentPageNum, Integer currentPageSize);
/**
* 根据区域统计所有的订单数量和消费金额
*/
public Map<String, Object> getCountAndAmount();
/**
* 统计今日的订单数量和总金额
*/
public Map<String, Object> getOrderCountAndAmountByToday();
/**
* 查询当天的订单详情
*/
public List<OrderInfo> getOrderInfoByDate(OrderInfo orderInfo);
/**
* 查询统计订单的详细信息每个买家只显示一个
*/
public List<OrderInfo> getOrderInfoByDateByBuyer(OrderInfo orderInfo);
/**
* 查询今日下单买家数量
*/
public int getBuyerCountByTodayOrder();
/**
* 查询所有买家当天购买买家总数 不分区域;
*/
public int getBuyerNumByThatDay();
/**
* 根据时间类型(按天或者按月)统计一个周期内的订单数量根据区域分组
*/
public Map<String, List<Integer>> getCountByTimeType(Integer timeType);
/**
* 根据时间类型(按天或者按月)统计一个周期内的订单金额根据区域分组
*/
public Map<String, List<BigDecimal>> getAmountByTimeType(Integer timeType);
/**
* 查询一周内消费前十的买家
*/
public List<Map<String, Object>> getTopTenAmount();
/**
* 批量修改订单状态
*/
public int batchUpdateStatus(List<Long> ids, Integer tradeStatus);
/**
* 修改最佳送货时间
*/
public int updateBestTime(Long id, Date bestTime);
/**
* 查询当天送货订单
*/
public ArrayList<OrderTaskVo> getByThatDay();
/**
* 根据卖家id和时间(年月日)查询订单
*/
public List<PresentOrderVo> getOrdersBySellerIdAndDate(@Param(value = "sellerId") Long sellerId,
@Param(value = "time") String time);
/**
* 删除单个订单,则把所有对应的订单项都取消
*/
public void deleteOrderInfo(Long buyerId, Long orderId);
/**
* 查询今天业绩中午12点到晚上24点
*/
public List<OrderInfo> getRealTimeRevenue(OrderInfo o);
/**
* 查询今天业绩中午12点到晚上24点 总计
*/
public Map<String, Object> getRealTimeRevenueCount();
/**
* 添加线下收款数据
*/
public int insertBuyerReceipt();
}
|
def attack(iv, c, pos, p, p_):
"""
Replaces the original plaintext with a new plaintext at a position in the ciphertext.
:param iv: the initialization vector
:param c: the ciphertext
:param pos: the position to modify at
:param p: the original plaintext
:param p_: the new plaintext
:return: a tuple containing the modified initialization vector and the modified ciphertext
"""
iv_ = bytearray(iv)
c_ = bytearray(c)
for i in range(len(p)):
if pos + i < 16:
iv_[pos + i] = iv[pos + i] ^ p[i] ^ p_[i]
else:
c_[pos + i - 16] = c[pos + i - 16] ^ p[i] ^ p_[i]
return iv_, c_
|
import { prepareTheme, Theme, ThemeMode } from "./themes"
import { createReducer } from "@reduxjs/toolkit"
import { changeTheme } from "./action"
export interface ThemeStore extends Theme {
name: ThemeMode
}
export const theme = createReducer(prepareTheme(), builder =>
builder.addCase(changeTheme, (state, { payload: { name, cached = false } }) => ({
...state,
...prepareTheme(name, cached),
})),
)
|
<?php
/**
* MIT License
* For full license information, please view the LICENSE file that was distributed with this source code.
*/
namespace SprykerTest\Zed\Install\Business\Stage\Section;
use Codeception\Test\Unit;
use Spryker\Zed\Install\Business\Stage\Section\Command\Command;
use Spryker\Zed\Install\Business\Stage\Section\Command\Exception\CommandExistsException;
use Spryker\Zed\Install\Business\Stage\Section\Section;
/**
* Auto-generated group annotations
*
* @group SprykerTest
* @group Zed
* @group Install
* @group Business
* @group Stage
* @group Section
* @group SectionTest
* Add your own group annotations below this line
*/
class SectionTest extends Unit
{
/**
* @var string
*/
public const SECTION_NAME = 'section';
/**
* @return void
*/
public function testGetName()
{
$section = new Section(static::SECTION_NAME);
$this->assertSame(static::SECTION_NAME, $section->getName());
}
/**
* @return void
*/
public function testAddCommandToSection()
{
$section = new Section(static::SECTION_NAME);
$section->addCommand(new Command('command'));
$this->assertCount(1, $section->getCommands());
}
/**
* @return void
*/
public function testAddCommandsToSection()
{
$section = new Section(static::SECTION_NAME);
$section
->addCommand(new Command('commandA'))
->addCommand(new Command('commandB'));
$this->assertCount(2, $section->getCommands());
}
/**
* @return void
*/
public function testAddCommandsWithSameNameThrowsException()
{
$this->expectException(CommandExistsException::class);
$section = new Section(static::SECTION_NAME);
$section
->addCommand(new Command('commandA'))
->addCommand(new Command('commandA'));
}
}
|
program Project1;
uses
Vcl.Forms,
MainAlfa in 'MainAlfa.pas' {Falfa},
bsdialog in 'bsdialog.pas' {Fbsdialog},
bsNone in 'bsNone.pas' {FbsNone},
bsSigle in 'bsSigle.pas' {FbsSigle},
bsSizeble in 'bsSizeble.pas' {FbsSizeble},
bsSizeToolWin in 'bsSizeToolWin.pas' {FbsSizeToolWin},
bsToolWindow in 'bsToolWindow.pas' {FbsToolWindow},
enable in 'enable.pas' {FEnable},
Dock in 'Dock.pas' {FDock},
BorderWidth in 'BorderWidth.pas' {FBorderWidth},
WindowSize in 'WindowSize.pas' {FWidowSize},
WidowColor in 'WidowColor.pas' {FWidowColor};
{$R *.res}
begin
Application.Initialize;
Application.MainFormOnTaskbar := True;
Application.CreateForm(TFWidowColor, FWidowColor);
Application.CreateForm(TFWidowSize, FWidowSize);
Application.CreateForm(TFBorderWidth, FBorderWidth);
Application.CreateForm(TFDock, FDock);
Application.CreateForm(TFEnable, FEnable);
Application.CreateForm(TFbsToolWindow, FbsToolWindow);
Application.CreateForm(TFbsSizeToolWin, FbsSizeToolWin);
Application.CreateForm(TFbsSizeble, FbsSizeble);
Application.CreateForm(TFbsSigle, FbsSigle);
Application.CreateForm(TFbsNone, FbsNone);
Application.CreateForm(TFbsdialog, Fbsdialog);
Application.CreateForm(TFalfa, Falfa);
Application.Run;
end.
|
FactoryBot.define do
factory :address do
phone_number { Faker::PhoneNumber.cell_phone.gsub(/-/, '') }
prefecture_id { rand(1..47) }
postal_code { Faker::Address.postcode }
city { Faker::Address.city }
house_number { Faker::Address.building_number }
building_name { Faker::Address.street_name }
## association :userを入れるとuserを作る際に無限ループする
end
end
|
package ch.epfl.bluebrain.nexus.delta.sdk
import ch.epfl.bluebrain.nexus.delta.kernel.syntax.{ClassTagSyntax, InstantSyntax, KamonSyntax, TaskSyntax}
import ch.epfl.bluebrain.nexus.delta.rdf.instances.{SecretInstances, TripleInstances, UriInstances}
import ch.epfl.bluebrain.nexus.delta.rdf.syntax.{IriSyntax, IterableSyntax, JsonLdEncoderSyntax, JsonSyntax, PathSyntax, UriSyntax}
import ch.epfl.bluebrain.nexus.delta.sdk.instances.{CredentialsInstances, OffsetJsonLdInstances}
import ch.epfl.bluebrain.nexus.delta.sdk.syntax.{HttpRequestSyntax, HttpResponseFieldsSyntax, IOSyntax}
import ch.epfl.bluebrain.nexus.delta.sourcing.projections.stream.CancelableStreamSyntax
import ch.epfl.bluebrain.nexus.delta.sourcing.syntax.{OffsetSyntax, ProjectionStreamSyntax}
/**
* Aggregate instances and syntax from rdf plus the current sdk instances and syntax to avoid importing multiple
* instances and syntax
*/
package object implicits
extends TripleInstances
with UriInstances
with SecretInstances
with OffsetJsonLdInstances
with CredentialsInstances
with JsonSyntax
with IriSyntax
with JsonLdEncoderSyntax
with UriSyntax
with PathSyntax
with IterableSyntax
with KamonSyntax
with IOSyntax
with HttpRequestSyntax
with HttpResponseFieldsSyntax
with TaskSyntax
with ClassTagSyntax
with OffsetSyntax
with ProjectionStreamSyntax
with CancelableStreamSyntax
with InstantSyntax
|
using Core.Data.Dtos;
using Core.Data.Interfaces.Audit;
using Core.Data.Interfaces.Entitys;
namespace Core.Data.Entitys
{
public abstract class BaseDocument : BaseEntity, IBaseDocument, IAuditable
{
private string code;
public string Code { get {
return code;
} set {
code = value;
} }
public string Name { get; set; }
public void SetCode(string code)
{
this.code = code;
}
protected void SetDto(BaseDocumentDto dto)
{
SetDto((BaseDto)dto);
Code = dto.Code;
Name = dto.Name;
}
}
}
|
#!/bin/bash
sudo docker exec -t -i "$1" /bin/bash
|
import styled, { css } from "styled-components";
export default styled.img`
width: 500px;
height: 285px;
border-radius: 10px;
object-fit: cover;
margin-top: 0.5rem;
${(props) =>
props.newtweet &&
css`
margin-bottom: 1.5rem;
`}
@media screen and (max-width: 530px) {
width: 350px;
height: 260px;
}
@media screen and (max-width: 430px) {
width: 100%;
height: 220px;
}
`;
|
#tool nuget:?package=GitVersion.CommandLine
#tool nuget:?package=vswhere
var sln = new FilePath("SGTabbedPager.sln");
var project = new FilePath("SGTabbedPager/SGTabbedPager.csproj");
var projectMvx = new FilePath("SGTabbedPagerMvx/SGTabbedPagerMvx.csproj");
var binDir = new DirectoryPath("SGTabbedPager/bin/Release");
var binDirMvx = new DirectoryPath("SGTabbedPagerMvx/bin/Release");
var nuspec = new FilePath("SGTabbedPager.nuspec");
var nuspecMvx = new FilePath("SGTabbedPagerMvx.nuspec");
var outputDir = new DirectoryPath("artifacts");
var target = Argument("target", "Default");
var isRunningOnAppVeyor = AppVeyor.IsRunningOnAppVeyor;
var isPullRequest = AppVeyor.Environment.PullRequest.IsPullRequest;
var releaseNotes = ParseReleaseNotes("./releasenotes.md").Notes.ToArray();
Task("Clean").Does(() =>
{
CleanDirectories("./**/bin");
CleanDirectories("./**/obj");
CleanDirectories(outputDir.FullPath);
});
FilePath msBuildPath;
Task("ResolveBuildTools")
.Does(() =>
{
var vsLatest = VSWhereLatest();
msBuildPath = (vsLatest == null)
? null
: vsLatest.CombineWithFilePath("./MSBuild/15.0/Bin/MSBuild.exe");
});
GitVersion versionInfo = null;
Task("Version").Does(() => {
var branchName = AppVeyor.Environment.Repository.Branch;
GitVersion(new GitVersionSettings {
UpdateAssemblyInfo = true,
OutputType = GitVersionOutput.BuildServer,
Branch = branchName
});
versionInfo = GitVersion(new GitVersionSettings {
OutputType = GitVersionOutput.Json,
Branch = branchName
});
Information("VI:\t{0}", versionInfo.FullSemVer);
});
Task("Restore").Does(() => {
NuGetRestore(sln);
});
Task("Build")
.IsDependentOn("Clean")
.IsDependentOn("Version")
.IsDependentOn("Restore")
.IsDependentOn("ResolveBuildTools")
.Does(() => {
var settings = new MSBuildSettings
{
Configuration = "Release",
ToolPath = msBuildPath
};
settings.Properties.Add("Platform", new [] {"iPhone"});
MSBuild(sln, settings);
});
Task("Package")
.IsDependentOn("Build")
.Does(() => {
EnsureDirectoryExists(outputDir);
var dllDir = binDir + "/SGTabbedPager.*";
Information("Dll Dir: {0}", dllDir);
var nugetContent = new List<NuSpecContent>();
foreach(var dll in GetFiles(dllDir)){
var dllString = dll.ToString();
if (dllString.EndsWith(".mdb"))
continue;
Information("File: {0}", dll.ToString());
nugetContent.Add(new NuSpecContent {
Target = "lib/Xamarin.iOS10",
Source = dllString
});
}
Information("File Count {0}", nugetContent.Count);
NuGetPack(nuspec, new NuGetPackSettings {
Authors = new [] { "Tomasz Cielecki" },
Owners = new [] { "Tomasz Cielecki" },
IconUrl = new Uri("http://i.imgur.com/V3983YY.png"),
ProjectUrl = new Uri("https://github.com/Cheesebaron/SGTabbedPager"),
LicenseUrl = new Uri("https://github.com/Cheesebaron/SGTabbedPager/blob/master/LICENSE"),
Copyright = "Copyright (c) Tomasz Cielecki",
RequireLicenseAcceptance = false,
Tags = new [] {"monotouch", "ui", "pager", "xamarin", "ios"},
Version = versionInfo.NuGetVersion,
ReleaseNotes = releaseNotes,
Symbols = false,
NoPackageAnalysis = true,
OutputDirectory = outputDir,
Verbosity = NuGetVerbosity.Detailed,
Files = nugetContent,
BasePath = "/."
});
});
Task("PackageMvx")
.IsDependentOn("Build")
.Does(() => {
EnsureDirectoryExists(outputDir);
var dllDir = binDirMvx + "/SGTabbedPagerMvx.*";
Information("Dll Dir: {0}", dllDir);
var nugetContent = new List<NuSpecContent>();
foreach(var dll in GetFiles(dllDir)){
var dllString = dll.ToString();
if (dllString.EndsWith(".mdb"))
continue;
Information("File: {0}", dll.ToString());
nugetContent.Add(new NuSpecContent {
Target = "lib/Xamarin.iOS10",
Source = dllString
});
}
Information("File Count {0}", nugetContent.Count);
NuGetPack(nuspecMvx, new NuGetPackSettings {
Authors = new [] { "Tomasz Cielecki" },
Owners = new [] { "Tomasz Cielecki" },
IconUrl = new Uri("http://i.imgur.com/V3983YY.png"),
ProjectUrl = new Uri("https://github.com/Cheesebaron/SGTabbedPager"),
LicenseUrl = new Uri("https://github.com/Cheesebaron/SGTabbedPager/blob/master/LICENSE"),
Copyright = "Copyright (c) Tomasz Cielecki",
RequireLicenseAcceptance = false,
Tags = new [] {"monotouch", "mvvmcross", "ui", "pager", "xamarin", "ios"},
Version = versionInfo.NuGetVersion,
ReleaseNotes = releaseNotes,
Symbols = false,
NoPackageAnalysis = true,
OutputDirectory = outputDir,
Verbosity = NuGetVerbosity.Detailed,
Files = nugetContent,
BasePath = "/."
});
});
Task("UploadAppVeyorArtifact")
.IsDependentOn("Package")
.IsDependentOn("PackageMvx")
.WithCriteria(() => !isPullRequest)
.WithCriteria(() => isRunningOnAppVeyor)
.Does(() => {
Information("Artifacts Dir: {0}", outputDir.FullPath);
foreach(var file in GetFiles(outputDir.FullPath + "/*")) {
Information("Uploading {0}", file.FullPath);
AppVeyor.UploadArtifact(file.FullPath);
}
});
Task("Default")
.IsDependentOn("UploadAppVeyorArtifact");
RunTarget(target);
|
<?php declare(strict_types=1);
namespace Respector\Test\Conceptions;
use Respector\Analyze;
abstract class RespectDDDTest extends BaseConceptionTest
{
public function testNotInfrastructureReferencesInDomain(): void
{
$analyzer = Analyze::create('composer.json')->inNamespace('Domain\\');
foreach ($analyzer as $parsedFile) {
$this->assertHasNoDependencyTo('Infrastructure\\', $parsedFile);
$this->assertHasNoDependencyTo('Symfony\\', $parsedFile);
}
}
public function testCountMigrations(): void
{
$analyzer = Analyze::create('composer.json')
->inNamespace('Infrastructure\\Doctrine\\Migrations');
$this->assertCount(2, $analyzer);
}
}
|
var http = require('../../utils/http.js');
Page({
/**
* 页面的初始数据
*/
data: {
sts: 0,
prodList: [],
title: "",
current: 1,
size: 10,
pages: 0,
tagid: 0
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function(options) {
this.setData({
current: 1,
pages: 0,
sts: options.sts,
title: options.title ? options.title : ""
});
if (options.tagid) {
this.setData({
tagid: options.tagid
});
}
if (this.data.sts == 0) {
wx.setNavigationBarTitle({
title: this.data.title
})
} else if (this.data.sts == 1) {
wx.setNavigationBarTitle({
title: '新品推荐',
})
} else if (this.data.sts == 2) {
wx.setNavigationBarTitle({
title: '限时特惠',
})
} else if (this.data.sts == 3) {
wx.setNavigationBarTitle({
title: '每日疯抢',
})
} else if (this.data.sts == 4) {
wx.setNavigationBarTitle({
title: '优惠券活动商品',
})
} else if (this.data.sts == 5) {
wx.setNavigationBarTitle({
title: '我的收藏商品',
})
} else {
wx.setNavigationBarTitle({
title: this.data.title
})
}
this.loadProdData(options);
},
/**
* 加载商品数据
*/
loadProdData: function(options) {
let sts = this.data.sts
if (sts == 0) {
// 分组标签商品列表
this.getTagProd();
} else if (sts == 1) {
// 首页tag分类
let url = "/prod/lastedProdPage"
this.getActProd(url)
} else if (sts == 2) {
// 限时特惠
let url = "/prod/discountProdList"
this.getActProd(url)
} else if (sts == 3) {
// 每日疯抢
let url = "/prod/moreBuyProdList"
this.getActProd(url)
} else if (sts == 4) {
// 优惠券商品列表
this.getProdByCouponId(options.tagid)
} else if (sts == 5) {
// 收藏商品列表
this.getCollectionProd()
}
},
getActProd: function(url) {
var ths = this;
wx.showLoading();
var params = {
url: url,
method: "GET",
data: {
current: ths.data.current,
size: ths.data.size,
},
callBack: function(res) {
let list = []
if (res.current == 1) {
list = res.records
} else {
list = ths.data.prodList
list = list.concat(res.records)
}
ths.setData({
prodList: list,
pages: res.pages
});
wx.hideLoading();
}
};
http.request(params);
},
/**
* 获取我的收藏商品
*/
getCollectionProd: function() {
var ths = this;
wx.showLoading();
var params = {
url: "/p/user/collection/prods",
method: "GET",
data: {
current: ths.data.current,
size: ths.data.size,
},
callBack: function(res) {
let list = []
if (res.current == 1) {
list = res.records
} else {
list = ths.data.prodList
list = list.concat(res.records)
}
ths.setData({
prodList: list,
pages: res.pages
});
wx.hideLoading();
}
};
http.request(params);
},
/**
* 获取标签列表
*/
getTagProd: function(id) {
var ths = this;
wx.showLoading();
var param = {
url: "/prod/prodListByTagId",
method: "GET",
data: {
tagId: ths.data.tagid,
current: ths.data.current,
size: ths.data.size
},
callBack: (res) => {
let list = []
if (res.current == 1) {
list = res.records
} else {
list = ths.data.prodList.concat(res.records)
}
ths.setData({
prodList: list,
pages: res.pages
});
wx.hideLoading();
}
};
http.request(param);
},
/**
* 获取优惠券商品列表
*/
getProdByCouponId(id) {
var ths = this;
wx.showLoading();
var param = {
url: "/coupon/prodListByCouponId",
method: "GET",
data: {
couponId: id,
current: this.data.current,
size: this.data.size
},
callBack: (res) => {
let list = []
if (res.current == 1) {
list = res.records
} else {
list = ths.data.prodList.concat(res.records)
}
ths.setData({
prodList: list,
pages: res.pages
});
wx.hideLoading();
}
};
http.request(param);
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function() {
},
/**
* 生命周期函数--监听页面显示
*/
onShow: function() {
},
/**
* 生命周期函数--监听页面隐藏
*/
onHide: function() {
},
/**
* 生命周期函数--监听页面卸载
*/
onUnload: function() {
},
/**
* 页面相关事件处理函数--监听用户下拉动作
*/
onPullDownRefresh: function() {
},
/**
* 页面上拉触底事件的处理函数
*/
onReachBottom: function() {
if (this.data.current < this.data.pages) {
this.setData({
current: this.data.current + 1
})
this.loadProdData()
}
},
/**
* 用户点击右上角分享
*/
onShareAppMessage: function() {
}
})
|
create or replace
PROCEDURE SAPC_MXP_ATTR_DELETE (Pattrid number)
IS
rc number;
BEGIN
/****** This stored procedure enables RT to call MC_REPOSITORY_DELETE ******/
MXP_ATTR_DELETE (Pattrid, 0, rc);
END;
create or replace synonym MXMC_RT.SAPC_MXP_ATTR_DELETE for MXMC_OPER.SAPC_MXP_ATTR_DELETE;
grant execute on MXMC_OPER.SAPC_MXP_ATTR_DELETE to mxmc_rt_role;
commit;
|
import java.io.IOException;
import java.util.Set;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import redis.clients.jedis.Jedis;
import com.cedarsoftware.util.io.JsonWriter;
/**
* Save data to redis
*/
public class DataImporter {
public static void saveMovieToRedis(JSONObject movie, Jedis jedis) {
Set<String> keyset = movie.keySet();
// only add data data with location info -- coz we want to show on map
if (keyset.contains("locations") && movie.get("locations") != null) {
addToAutoComplete(jedis, (String) movie.get("title"), "title");
processFilmData(movie, jedis);
}
}
/**
* add sorted prefix set to "category_compl"
*/
public static boolean addToAutoComplete(Jedis jedis, String title,
String category) {
if (title != null && title.length() > 0) {
title = title.trim();
String setName = category + "_compl"; // name of set
for (int i = 1; i <= title.length(); i++) {
String prefix = title.substring(0, i);
jedis.zadd(setName, 0, prefix);
}
jedis.zadd(setName, 0, title + "*");
return true;
} else {
return false;
}
}
/**
* Add serialized movie json object to redis, keyed by title
*/
public static boolean processFilmData(JSONObject movie, Jedis jedis) {
try {
String locations = (String) movie.get("locations");
JSONObject ob = (JSONObject) DataFetcher
.readJsonFromUrl(getGoogleApiUrl(locations));
JSONArray arr = (JSONArray) ob.get("results");
if (arr.size() > 0) {
movie.put("geocode", arr.get(0));
}
String json = JsonWriter.objectToJson(movie);
jedis.sadd((String) movie.get("title"), json);
System.out.println("added movie " + movie);
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return true;
}
public static String getGoogleApiUrl(String locations) {
locations = locations + ", San Francisco, CA";
locations = locations.replace(" ", "%20");
return "https://maps.googleapis.com/maps/api/geocode/json?address="
+ locations
+ "&sensor=false+key=AIzaSyBujpw4p4cnB-JgFFMBGEuwvAGFYllkGps";
}
}
|
# k3d
- [rancher/k3d](https://github.com/rancher/k3d)
```bash
brew install k3d
# 创建集群
k3d cluster create k3d
#删除集群
k3d cluster delete k3d
```
|
package org.exthmui.updater.model;
import android.util.Base64;
public class Notice implements NoticeInfo {
private String mBase64Title;
private String mBase64Texts;
private String mTitle;
private String mTexts;
private String mImageUrl;
private String mId;
public Notice() {
}
public Notice(String base64Title, String base64Texts, String id, String imageUrl) {
mBase64Title = base64Title;
mBase64Texts = base64Texts;
mId = id;
mImageUrl = imageUrl;
init();
}
public Notice(NoticeInfo noticeInfo) {
mBase64Title = noticeInfo.getBase64Title();
mBase64Texts = noticeInfo.getBase64Texts();
mId = noticeInfo.getId();
mImageUrl = noticeInfo.getImageUrl();
init();
}
public void init() {
mTitle = new String(Base64.decode(mBase64Title.getBytes(), Base64.DEFAULT));
mTexts = new String(Base64.decode(mBase64Texts.getBytes(), Base64.DEFAULT));
}
public void setBase64Title(String base64Title) {
mBase64Title = base64Title;
}
public void setBase64Texts(String base64Texts) {
mBase64Texts = base64Texts;
}
public void setTitle(String title) {
mTitle = title;
}
public void setTexts(String texts) {
mTexts = texts;
}
public void setId(String id) {
mId = id;
}
public void setImageUrl(String imageUrl) {
mImageUrl = imageUrl;
}
@Override
public String getBase64Title() {
return mBase64Title;
}
@Override
public String getBase64Texts() {
return mBase64Texts;
}
@Override
public String getTitle() {
return mTitle;
}
@Override
public String getTexts() {
return mTexts;
}
@Override
public String getImageUrl() {
return mImageUrl;
}
@Override
public String getId() {
return mId;
}
}
|
var EventEmitter = require('events').EventEmitter;
var async = require('async');
var defaultData = require('./default');
var secret = require('./secret');
var defaults = require('defaults');
var httpify = require('httpify');
var assign = require('object-assign');
function DataStore () {
EventEmitter.call(this);
// assign localStorage to our internal cache
var storage = this.cache = {};
for (var key in window.localStorage) {
storage[key] = JSON.parse(window.localStorage[key]);
}
// default our internal cache to pre-loaded data
defaults(storage, defaultData);
this.cache.settings = {
aboutLogo: './img/thinkmill-logo_white.svg'
};
this.cache.starred = {};
this.cache.speakers = [];
this.cache.sponsors = [];
this.cache.organisers = [];
this.__preprocess(storage);
this.cache = Object.assign(this.cache, storage);
// TODO maybe window.addEventListener('online', this.updateOnlineStatus);
// TODO maybe window.addEventListener('offline', this.updateOnlineStatus);
// generic API queue
var self = this;
var url = 'http://api.eventlama.com';
this.apiQueue = async.queue(function (opts, callback) {
var { authToken } = storage;
var { endpoint, data, method } = opts;
httpify({
method: method || 'POST',
url: url + endpoint,
headers: { 'Content-Type': 'application/json' },
timeout: 20000
}, (err, res) => {
if (err) return callback(err);
if (res.statusCode !== 200) return callback(new Error('Error ' + res.statusCode));
var body = res.body || {};
var data = body || {};
self.__preprocess(data);
// emit all the events
for (var key in data) {
var updated = data[key];
storage[key] = updated;
window.localStorage[key] = JSON.stringify(updated);
self.emit('update-' + key, updated);
}
self.emit('update', data);
callback(null, data);
});
}, 1)
var context = this;
this.apiQueue.push({
method: 'GET',
endpoint: '/checkin/events/28/checkinlists/' + secret.main.id + '/' + secret.main.token
}, function (err, data) {
// TODO Proper error handling
if (!err) {
context.cache.people = data.attendees;
}
});
// every 30s, attempt synchronize, queues incase it takes a while
self.synchronize();
setInterval(function () {
self.synchronize();
}, 30000);
}
// mutates data
DataStore.prototype.__preprocess = function (data) {
if (data.attendees) {
var starred = this.cache.starred;
var talks = data.Proposals || this.cache.Proposals;
data.attendees.forEach(person => {
// Set properties of attendees
person.bio = person.bio || '';
person.github = person.github || '';
person.picture = person.picture || '';
person.starred = starred[person.id];
person.twitter = person.twitter || '';
person.name = person.purchase.attendee_first_name + ' ' + person.purchase.attendee_last_name;
// Filter for talks associated with a person
person.talks = person.talks || talks
.filter(talk => {
if (talk.speakers) {
var isSpeaker = false;
// Map over the speakers of a talk
talk.speakers.map((speaker) => {
// If the email of the speaker is the same as the attendee email, they're giving this talk!
if (speaker.email === person.purchase.attendee_email) {
isSpeaker = true;
person.bio = speaker.bio;
person.twitter = speaker.twitter;
person.url = speaker.url;
person.github = speaker.github;
person.picture = speaker.avatar_url;
}
});
return isSpeaker;
}
});
});
this.cache.people = data.attendees;
}
if (data.Proposals && this.cache.speakers.length === 0) {
var feedback = this.cache.feedback;
var speakers = this.cache.speakers;
var talks = this.cache.Proposals;
if (!feedback) {
feedback = this.cache.feedback = {};
}
data.Proposals.forEach(talk => {
talk.endTime = talk.start_date + talk.length;
if (!feedback[talk.id]) {
feedback[talk.id] = {};
}
talk.feedback = feedback[talk.id];
if (talk.speakers) {
talk.speakers.forEach(speaker => {
var duplicate = false;
this.cache.speakers.forEach((cachedSpeaker) => {
if (cachedSpeaker.email === speaker.email) {
duplicate = true;
}
});
if (!duplicate) {
speaker.talks = talks.filter(talk => {
var isSpeaker = false;
talk.speakers && talk.speakers.filter((talkSpeaker) => {
if (speaker.email === talkSpeaker.email) {
isSpeaker = true;
}
});
return isSpeaker;
});
speakers.push(speaker);
}
});
}
});
}
if (data.Organizer && this.cache.organisers.length === 0) {
var organisers = this.cache.organisers;
organisers.push(data.Organizer);
data.Collaborators.forEach(function (collaborator) {
organisers.push(collaborator);
});
this.cache.organisers = organisers;
}
if (data.DiamondSponsors && this.cache.sponsors.length === 0) {
var sponsors = this.cache.sponsors;
if (data.DiamondSponsors) {
data.DiamondSponsors.map(sponsor => {
sponsor.tier = 'diamond';
sponsors.push(sponsor);
});
}
if (data.PlatinumSponsors) {
data.PlatinumSponsors.map(sponsor => {
sponsor.tier = 'platinum';
sponsors.push(sponsor);
});
}
if (data.GoldSponsors) {
data.GoldSponsors.map(sponsor => {
sponsor.tier = 'gold';
sponsors.push(sponsor);
});
}
if (data.BronzeSponsors) {
data.BronzeSponsors.map(sponsor => {
sponsor.tier = 'bronze';
sponsors.push(sponsor);
});
}
if (data.BasicSponsors) {
data.BasicSponsors.map(sponsor => {
sponsor.tier = 'basic';
sponsors.push(sponsor);
});
}
if (data.PartnersSponsors) {
data.PartnersSponsors.map(sponsor => {
sponsor.tier = 'partner';
sponsors.push(sponsor);
});
}
this.cache.sponsors = sponsors;
}
};
Object.assign(DataStore.prototype, EventEmitter.prototype);
// Synchronized, external API functions
DataStore.prototype.activate = function (ticketCode, callback) {
this.cache.ticketCode = ticketCode;
var context = this;
this.apiQueue.push({
method: 'GET',
endpoint: '/checkin/events/28/checkinlists/' + secret.main.id + '/' + secret.main.token + '/attendee/' + ticketCode
}, function (err, data) {
if (err) return callback(err);
context.cache.me = assign({}, context.cache.me, data);
// Save answer to questions to user data
for (var i = 0; i < data.Questions.length; i++) {
switch (data.Questions[i].question) {
case "A one line bio about you or what you do":
context.cache.me.bio = data.Questions[i].answer;
break;
case "Twitter":
context.cache.me.twitter = data.Questions[i].answer;
break;
default:
break;
}
}
callback();
});
};
DataStore.prototype.activateHackathon = function (hackathonTicketCode, callback) {
var context = this;
this.apiQueue.push({
method: 'GET',
endpoint: '/checkin/events/28/checkinlists/' + secret.hackathon.id + '/' + secret.hackathon.token + '/attendee/' + hackathonTicketCode
}, function (err, data) {
if (err) return callback(err);
context.cache.me = context.cache.me || {};
context.cache.me.hackathonTicketCode = hackathonTicketCode;
callback();
});
}
DataStore.prototype.activateWorkshop = function (workshopTicketCode, callback) {
var context = this;
this.apiQueue.push({
method: 'GET',
endpoint: '/checkin/events/28/checkinlists/' + secret.workshop.id + '/' + secret.workshop.token + '/attendee/' + workshopTicketCode
}, function (err, data) {
if (err) return callback(err);
context.cache.me = context.cache.me || {};
context.cache.me.workshopTicketCode = workshopTicketCode;
callback();
});
}
DataStore.prototype.editMe = function (newMe, callback) {
this.cache.me = newMe;
this.apiQueue.push({
endpoint: '/me/update',
data: { me: newMe }
}, callback || function () {});
};
DataStore.prototype.feedback = function (id, feedback, callback) {
this.cache.feedback[id] = feedback;
// update feedback cache
this.cache.schedule.forEach(function (talk) {
if (talk.id !== id) return;
talk.feedback = feedback;
});
this.apiQueue.push({
endpoint: '/me/feedback',
data: Object.assign({ id: id }, feedback)
}, callback || function () {})
};
DataStore.prototype.resend = function (email, callback) {
this.apiQueue.push({
endpoint: '/resend',
data: { email: email }
}, callback || function () {});
};
DataStore.prototype.synchronize = function (callback) {
var context = this;
this.apiQueue.push({
method: 'GET',
endpoint: '/data/reacteurope-2016/all.json'
}, function (err, data) {
});
};
// Unsynchronized, non external API functions
DataStore.prototype.star = function (id, starred) {
this.cache.starred[id] = starred;
var foundStarredPerson = false;
// update people cache
this.cache.speakers.forEach(function (person) {
if (person.id !== id || foundStarredPerson) return;
foundStarredPerson = true;
person.starred = starred;
});
if (!foundStarredPerson) {
this.cache.organisers.forEach(function (person) {
if (person.id !== id || foundStarredPerson) return;
foundStarredPerson = true;
person.starred = starred;
});
}
window.localStorage.starred = JSON.stringify(this.cache.starred);
};
DataStore.prototype.amRegistered = function () { return !!this.cache.me };
DataStore.prototype.getAttendees = function () { return [] };
DataStore.prototype.getMe = function () { return this.cache.me };
DataStore.prototype.getOrganisers = function () { return this.cache.organisers };
DataStore.prototype.getPerson = function (id) { return this.cache.speakers.filter(person => person.id === id).pop() };
DataStore.prototype.getPeople = function () { return this.cache.people };
DataStore.prototype.getSchedule = function () { return this.cache.Proposals.sort((a, b) => new Date(a.start_date) - new Date(b.start_date)) };
DataStore.prototype.getSettings = function () { return this.cache.settings };
DataStore.prototype.getSpeakers = function () { return this.cache.speakers };
DataStore.prototype.getSponsors = function () { return this.cache.sponsors };
DataStore.prototype.getTicketCode = function () { return this.cache.ticketCode };
DataStore.prototype.getHackathonTicketCode = function () { return this.cache.me && this.cache.me.hackathonTicketCode };
DataStore.prototype.getWorkshopTicketCode = function () { return this.cache.me && this.cache.me.workshopTicketCode };
module.exports = DataStore;
|
#!/bin/bash
# Autoformat the code.
dub run --verror autoformat --skip-registry=all -- `find source -name '*.d'`
# Check we have no typos.
which misspell 2>/dev/null >/dev/null
if [[ $? -eq 0 ]]; then
misspell -error `find source -name '*.d'`
fi
# Nuke .orig files from modification
find . -name '*.d.orig' | xargs -I{} rm {}
|
#
# Generate a file of filtered messages base on the the "creativeNumber" attribute of the message and the array of IDs.
#
Clear
# Replace the file root (example file job.JSON.txt - "job" is the root)
$fileRoot = "job"
# Replace the array contents below with the missing job IDs to be extracted. (no commas needed)
$creativeIds = @(
392721
398744
415244
420452
411788
448396
457595
457596
473038
480620
491080
493630
491343
488550
502987
504776
504774
)
#========================= DO NOT EDIT BELOW THIS LINE ===========================================
$messageFile = ".\$fileRoot.JSON.txt"
$filteredMessageFile = ".\$fileRoot.filtered.JSONarray.txt"
Write-Output "Reading $messageFile ..."
# Each row is a valid JSON message (the entire file is NOT valid JSON)
$allMessages = Get-Content -Path $messageFile
Write-Output "Filtering messages ..."
Write-Output "[" > $filteredMessageFile
foreach ($message in $allMessages)
{
# Convert each row into a valid JSON object so it can be tested against the filter IDs.
$jsonMessage = $message | ConvertFrom-Json
if ($jsonMessage.creativeNumber -in $creativeIds) {
Write-Output $message
# The last comma is invalid, but formatting it using Notepad++ will detect this error to remind you to delete it.
Write-Output $message "," >> $filteredMessageFile
}
}
Write-Output "]" >> $filteredMessageFile
|
import pyspark.sql.functions as fn
from pyspark.sql import DataFrame
from pyspark.ml import Transformer
class RemoveDuplicates(Transformer):
"""Drops duplicate records"""
def __init__(self, id_col: str = None):
super(RemoveDuplicates, self).__init__()
self._id_col = id_col
def _transform(self, df: DataFrame) -> DataFrame:
# Drop exact duplicates
df = df.dropDuplicates()
# Drop duplicates that are exact duplicates apart from ID
df = df.dropDuplicates(subset=[c for c in df.columns if c != self._id_col])
if self._id_col == '' or self._id_col is None:
return df
# Provide monotonically increasing, unique ID (in case of duplicate ID)
df = df.withColumn(self._id_col, fn.monotonically_increasing_id())
return df
|
export default class {
constructor() {
document.body.classList.add('js');
}
}
|
fn letter_combinations(digits: &str) -> Vec<String> {
let numbers: Vec<_> = digits.chars().collect();
match letter_combinations_rec(&numbers) {
Some(combinations) => combinations.iter().map(|a| a.iter().collect()).collect(),
None => vec![],
}
}
fn letter_combinations_rec(numbers: &[char]) -> Option<Vec<Vec<char>>> {
if numbers.is_empty() {
return None;
}
let mut result = Vec::new();
let letters = get_letters(numbers[0]);
let combos = letter_combinations_rec(&numbers[1..]).unwrap_or_else(|| vec![vec![]]);
for combo in combos {
for letter in &letters {
let mut n = vec![*letter];
n.append(&mut combo.clone());
result.push(n);
}
}
Some(result)
}
fn get_letters(number_char: char) -> Vec<char> {
match number_char.to_digit(10).unwrap() {
2 => vec!['a', 'b', 'c'],
3 => vec!['d', 'e', 'f'],
4 => vec!['g', 'h', 'i'],
5 => vec!['j', 'k', 'l'],
6 => vec!['m', 'n', 'o'],
7 => vec!['p', 'q', 'r', 's'],
8 => vec!['t', 'u', 'v'],
9 => vec!['w', 'x', 'y', 'z'],
_ => unreachable!(),
}
}
impl Solution {
#[must_use]
#[allow(clippy::needless_pass_by_value)]
pub fn letter_combinations(digits: String) -> Vec<String> {
letter_combinations(&digits)
}
}
pub struct Solution;
#[cfg(test)]
mod tests {
use crate::problem_0017::letter_combinations;
#[test]
fn test_2() {
assert_eq!(letter_combinations(""), Vec::<String>::new());
}
#[test]
fn test_3() {
let mut result = letter_combinations("23");
result.sort_unstable();
assert_eq!(
result,
vec!["ad", "ae", "af", "bd", "be", "bf", "cd", "ce", "cf"]
);
}
}
|
# MAT.2017
Data analysis scripts for 1989 Toolik MAT Warming and Fertilization experiment, sampled in Summer 2017
Master version includes only analyses in manuscript
|
/*
* Copyright 2000-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jetspeed.services.webpage;
// javax.servlet
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServlet;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
// java.io
import java.io.IOException;
// java.util
import java.util.Collection;
/**
* <P>This is a commodity static accessor class around the
* <code>WebPageService</code> interface</P>
*
* @see org.apache.jetspeed.services.webpage.WebPageService
*
* @author <a href="mailto:taylor@apache.org">David Sean Taylor</a>
* @version $Id: WebPageManager.java,v 1.2 2004/02/23 03:46:26 jford Exp $
*/
public class WebPageManager
{
// the singleton service reference
private static WebPageService service = null;
/**
* Commodity method for getting a reference to the service
* singleton
*/
private static WebPageService getService()
{
if (service == null)
{
// TODO: load from configuration
service = new JetspeedWebPageService();
}
return service;
}
/**
* @see WebPageService#isInit
*/
public static boolean isInit()
{
return getService().isInit();
}
/**
* @see WebPageService#get
*/
public static void get(HttpServlet servlet,
HttpServletRequest request,
HttpServletResponse response)
throws ServletException, IOException
{
getService().get(servlet, request, response);
}
/**
* @see WebPageService#post
*/
public static void post(HttpServlet servlet,
HttpServletRequest request,
HttpServletResponse response)
throws ServletException, IOException
{
getService().post(servlet, request, response);
}
/**
* @see WebPageService#init
*/
public static void init(ServletConfig config)
throws ServletException, IOException
{
getService().init(config);
}
/**
* @see WebPageService#destroy
*/
public static void destroy()
{
getService().destroy();
}
/**
* @see WebPageService#getSessions
*/
public static Collection getSessions()
{
return getService().getSessions();
}
/**
* @see WebPageService#getSession
*/
public static SessionMap getSession(String id)
{
return getService().getSession(id);
}
/**
* @see WebPageService#getNetworkElements
*/
public static Collection getSites()
{
return getService().getSites();
}
/**
* @see WebPageService#getErrorString
*/
public static String getErrorString()
{
return getService().getErrorString();
}
}
|
# python3
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements the Orchestrate API Service."""
from orchestrateapi import orchestrate_pb2_grpc
from orchestrateapi.commands import images
from orchestrateapi.commands import instances
from orchestrateapi.commands import projects
from orchestrateapi.commands import templates
from google.cloud import error_reporting
error_client = error_reporting.Client()
def log_errors(function):
"""Explicitly log unhandled exceptions to Stackdriver's Error reporting.
This happens automatically for Cloud Functions but not for code running
in Kubernetes.
Args:
function: Decorated function.
Returns:
Decoratored function.
"""
def wrapper(*arguments, **kwargs):
try:
return function(*arguments, **kwargs)
except:
error_client.report_exception()
raise
return wrapper
class Orchestrate(orchestrate_pb2_grpc.OrchestrateServicer):
"""Implements the Orchestrate API Service endpoints.
"""
@log_errors
def CreateImage(self, request, context):
"""Creates an image asynchronously.
Args:
request (orchestrate_pb2.CreateImageRequest): Request payload.
context: Context.
Returns:
A orchestrate_pb2.CreateImageResponse with the status of the request.
"""
return images.create.run(request, context)
@log_errors
def CreateTemplate(self, request, context):
"""Creates a template.
Args:
request (orchestrate_pb2.CreateTemplateRequest): Request payload.
context: Context.
Returns:
A orchestrate_pb2.CreateTemplateResponse with the status of the request.
"""
return templates.create.run(request, context)
@log_errors
def DeleteTemplate(self, request, context):
"""Deletes a template.
Args:
request (orchestrate_pb2.DeleteTemplateRequest): Request payload.
context: Context.
Returns:
A orchestrate_pb2.DeleteTemplateResponse with the status of the request.
"""
return templates.delete.run(request, context)
@log_errors
def CreateInstance(self, request, context):
"""Creates an instance.
Args:
request (orchestrate_pb2.CreateInstanceRequest): Request payload.
context: Context.
Returns:
A orchestrate_pb2.CreateInstanceResponse with the status of the request.
"""
return instances.create.run(request, context)
@log_errors
def RegisterProject(self, request, context):
"""Registers a project.
Args:
request (orchestrate_pb2.RegisterProjectRequest): Request payload.
context: Context.
Returns:
A orchestrate_pb2.RegisterProjectResponse with the status of the request.
"""
return projects.register.run(request, context)
@log_errors
def DeregisterProject(self, request, context):
"""Deregisters a project.
Args:
request (orchestrate_pb2.DeregisterProjectRequest): Request payload.
context: Context.
Returns:
A orchestrate_pb2.DeregisterProjectResponse with the status of the request.
"""
return projects.deregister.run(request, context)
|
package org.cuieney.videolife.kotlin.presenter.contract
import org.cuieney.videolife.entity.VeerListBean
/**
* Created by cuieney on 2017/6/8.
*/
interface VeerCategoryContract{
interface View{
fun showContent(veerListBean: VeerListBean)
fun error(throwable: Throwable)
}
interface Presenter{
fun getVrCategoryData(view:View,id:Int,order:String,pageNo:Int)
}
}
|
var userdict = {};
// dict.key1 = "value1";
// dict.key2 = "value2";
var time_stamp = function(author){
var time_stamp = Date.now() / 1000 | 0;
userdict[author]=time_stamp;
}
var show_all_stamps = function(author){
var ts_string = "";
for (var key in userdict) {
var value = userdict[key];
var ts = new Date(value*1000).toISOString().slice(0, -5);
ts_string = ts_string + key+" was "+ ts + "\n";
// Use `key` and `value`
}
return ts_string;
}
module.exports = {
time_stamp : time_stamp,
show_all_stamps : show_all_stamps,
userdict:userdict
}
|
import { mount, Wrapper } from '@vue/test-utils';
import Vue from 'vue';
import { addMessages } from '../../../tests/helpers/lang';
import I18nPlugin from '../../components/i18n/i18n';
import FileSizeFilterPlugin from './filesize';
describe(`f-m-filesize`, () => {
beforeEach(() => {
Vue.use(I18nPlugin);
Vue.use(FileSizeFilterPlugin);
addMessages(Vue, ['filters/filesize/filesize.lang.en.json']);
});
it(`the result should be a string with the file size formatted`, () => {
const element: Wrapper<Vue> = mount(
{
template: `<span>{{ 1024 | f-m-filesize }}</span>`
},
{ localVue: Vue }
);
expect(element.vm.$el.textContent).toEqual('1 Kb');
});
});
|
package cat.pantsu.nyaapantsu.ui.fragment
import android.Manifest
import android.app.Activity
import android.app.ProgressDialog
import android.content.*
import android.content.pm.PackageManager
import android.graphics.Bitmap
import android.graphics.Point
import android.graphics.drawable.BitmapDrawable
import android.graphics.drawable.LevelListDrawable
import android.net.Uri
import android.os.Build
import android.os.Bundle
import android.support.v4.app.ActivityCompat
import android.support.v4.app.Fragment
import android.support.v4.content.ContextCompat
import android.text.Html
import android.text.Spanned
import android.text.TextUtils
import android.util.Log
import android.view.LayoutInflater
import android.view.MenuItem
import android.view.View
import android.view.ViewGroup
import cat.pantsu.nyaapantsu.R
import cat.pantsu.nyaapantsu.helper.TorrentStreamHelper
import cat.pantsu.nyaapantsu.helper.addTorrentToRecentPlaylist
import cat.pantsu.nyaapantsu.model.Torrent
import cat.pantsu.nyaapantsu.util.Utils
import com.facebook.common.executors.CallerThreadExecutor
import com.facebook.common.references.CloseableReference
import com.facebook.datasource.DataSource
import com.facebook.drawee.backends.pipeline.Fresco
import com.facebook.imagepipeline.datasource.BaseBitmapDataSubscriber
import com.facebook.imagepipeline.image.CloseableImage
import com.facebook.imagepipeline.request.ImageRequestBuilder
import com.github.kittinunf.fuel.android.core.Json
import com.github.kittinunf.fuel.android.extension.responseJson
import com.github.kittinunf.fuel.httpGet
import com.github.kittinunf.result.Result
import com.github.kittinunf.result.getAs
import com.github.se_bastiaan.torrentstream.StreamStatus
import com.github.se_bastiaan.torrentstream.listeners.TorrentListener
import kotlinx.android.synthetic.main.fragment_torrent_view.*
import org.jetbrains.anko.backgroundColor
import org.jetbrains.anko.support.v4.toast
import org.json.JSONObject
import java.lang.Exception
/**
* Created by ltype on 2017/7/9.
*/
class TorrentViewFragment : Fragment(), TorrentListener {
var torrent = Torrent(JSONObject())
var showDet = false
var progressdialog: ProgressDialog? = null
companion object {
fun newInstance(torrent: String): TorrentViewFragment {
val fragment = TorrentViewFragment()
val args = Bundle()
args.putString("torrent", torrent)
fragment.arguments = args
return fragment
}
fun newInstance(id: Int): TorrentViewFragment {
val fragment = TorrentViewFragment()
val args = Bundle()
args.putInt("id", id)
fragment.arguments = args
return fragment
}
}
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View {
return inflater.inflate(R.layout.fragment_torrent_view, container, false)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
if (arguments!!.getString("torrent") !== null) {
torrent = Torrent(JSONObject(arguments!!.getString("torrent")))
genView()
} else if (arguments!!.containsKey("id")) {
torrent.id = arguments!!.getInt("id", 0)
getData()
} else {
toast("No data provided")
activity!!.finish()
}
progressdialog = ProgressDialog(context)
}
fun getData() {
("/view/" + torrent.id).httpGet().responseJson { request, response, result ->
when (result) {
is Result.Failure -> {
Log.d("Network", "Big Fail :/")
Log.d("Network", response.toString())
Log.d("Network", request.toString())
}
is Result.Success -> {
Log.d("Network", result.toString())
Log.d("Network", request.toString())
Log.d("Network", response.toString())
val json = result.getAs<Json>()
if (json !== null) {
torrent = Torrent(json.obj())
genView()
}
}
}
}
}
fun genView() {
if (!isAdded) return
torrentName.text = torrent.name
torrentCategory.text = resources.getStringArray(R.array.cat_array)[torrent.category]
torrentUser.text = torrent.username
torrentHash.text = torrent.hash
torrentDate.text = torrent.date
torrentSize.text = torrent.size
val spanned: Spanned
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
spanned = Html.fromHtml(torrent.description, Html.FROM_HTML_MODE_COMPACT, imageGetter(), null)
} else {
spanned = Html.fromHtml(torrent.description, imageGetter(), null)
}
torrentDescription.text = spanned
torrentDownloads.text = torrent.completed.toString()
torrentWebsite.text = torrent.website
torrentSeeders.text = "S: ${torrent.seeders}"
torrentLeechers.text = "L: ${torrent.leechers}"
torrentLastScraped.text = torrent.last_scrape
torrentFiles.text = torrent.fileList.length().toString()
when (torrent.status) {
2 -> torrentName.backgroundColor = ContextCompat.getColor(this.context!!, R.color.colorRemake)
3 -> torrentName.backgroundColor = ContextCompat.getColor(this.context!!, R.color.colorTrusted)
4 -> torrentName.backgroundColor = ContextCompat.getColor(this.context!!, R.color.colorAPlus)
}
torrentProgress.max = (torrent.seeders + torrent.leechers)
torrentProgress.progress = torrent.seeders
torrentDetails.visibility = View.VISIBLE
downloadButton.setOnClickListener { _ ->
if (!TextUtils.isEmpty(torrent.download)) {
Utils.download(this.activity!!, downloadButton, torrent.download, torrent.name)
} else {
toast(getString(R.string.torrent_not_available))
}
}
copyButton.setOnClickListener { _ ->
val clipboard = activity!!.getSystemService(Context.CLIPBOARD_SERVICE) as ClipboardManager
val clipData = ClipData.newPlainText(torrent.name, torrent.magnet)
clipboard.primaryClip = clipData
toast(getString(R.string.magnet_copied))
}
streamButton.setOnClickListener { _ ->
val magnet = torrent.magnet
if (magnet != "") {
if (ContextCompat.checkSelfPermission(context!!,
Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED) {
if (Utils.isExternalStorageWritable()) {
Log.d("stream", "Magnet: " + magnet)
if (!TorrentStreamHelper.instance.isStreaming()) {
TorrentStreamHelper.instance.start(magnet)
TorrentStreamHelper.torrent = torrent
addTorrentToRecentPlaylist(torrent)
}
// show current stream status
TorrentStreamHelper.instance.setListener(this)
displayProgress()
} else {
toast(getString(R.string.external_storage_not_available))
}
} else {
ActivityCompat.requestPermissions(activity!!, arrayOf(Manifest.permission.WRITE_EXTERNAL_STORAGE), 10)
}
} else {
toast(getString(R.string.torrent_not_available))
}
}
showMoreToggle.setOnClickListener { _ ->
if (showDet) {
moreDetails.visibility = View.GONE
showDet = false
} else {
moreDetails.visibility = View.VISIBLE
showDet = true
}
}
}
fun imageGetter(): Html.ImageGetter {
return Html.ImageGetter { source ->
val ld = LevelListDrawable()
val empty = ContextCompat.getDrawable(activity!!, R.drawable.abc_btn_check_material)
ld.addLevel(0, 0, empty)
ld.setBounds(0, 0, empty!!.intrinsicWidth, empty.intrinsicHeight)
val imageRequestBuilder: ImageRequestBuilder = ImageRequestBuilder.newBuilderWithSource(Uri.parse(source))
val imagePipeline = Fresco.getImagePipeline()
val dataSource = imagePipeline.fetchDecodedImage(imageRequestBuilder.build(), this)
dataSource.subscribe(object : BaseBitmapDataSubscriber() {
override fun onFailureImpl(ds: DataSource<CloseableReference<CloseableImage>>?) {
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
}
override fun onNewResultImpl(bitmap: Bitmap?) {
if (bitmap == null) return
val d = BitmapDrawable(context!!.resources, bitmap)
val size = Point()
(context as Activity).windowManager.defaultDisplay.getSize(size)
val multiplier = size.x / bitmap.width
ld.addLevel(1, 1, d)
ld.setBounds(0, 0, bitmap.width * multiplier, bitmap.height * multiplier)
ld.level = 1
}
}, CallerThreadExecutor.getInstance())
return@ImageGetter ld
}
}
fun displayProgress() {
progressdialog!!.setTitle(TorrentStreamHelper.torrent!!.name)
progressdialog!!.setMessage(getString(R.string.preparing))
progressdialog!!.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL)
progressdialog!!.isIndeterminate = false
progressdialog!!.setCanceledOnTouchOutside(true)
progressdialog!!.setCancelable(true)
progressdialog!!.setButton(DialogInterface.BUTTON_POSITIVE, getString(R.string.ok), { _, _ ->
progressdialog!!.dismiss()
})
progressdialog!!.setButton(DialogInterface.BUTTON_NEGATIVE, getString(R.string.cancel), { _, _ ->
//FIXME cancel need already starting
if (TorrentStreamHelper.instance.isStreaming()) TorrentStreamHelper.instance.stop()
progressdialog!!.dismiss()
})
progressdialog!!.progress = 0
progressdialog!!.max = 100
progressdialog!!.show()
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
// handle arrow click here
if (item.itemId == android.R.id.home) {
activity!!.finish() // close this activity and return to preview activity (if there is any)
}
return super.onOptionsItemSelected(item)
}
override fun onStreamPrepared(t: com.github.se_bastiaan.torrentstream.Torrent) {
Log.d(javaClass.simpleName, "OnStreamPrepared")
t.startDownload()
if (!isAdded) return
progressdialog!!.setMessage(getString(R.string.downloading))
}
override fun onStreamStarted(t: com.github.se_bastiaan.torrentstream.Torrent?) {
Log.d(javaClass.simpleName, "onStreamStarted")
}
override fun onStreamProgress(t: com.github.se_bastiaan.torrentstream.Torrent?, s: StreamStatus?) {
Log.d(javaClass.simpleName, "Progress: ${s?.progress}, ${s?.bufferProgress}")
if (isAdded && s?.bufferProgress!! <= 100 && progressdialog!!.progress < 100 && progressdialog!!.progress != s.bufferProgress) {
progressdialog!!.progress = s.bufferProgress
}
}
override fun onStreamReady(t: com.github.se_bastiaan.torrentstream.Torrent) {
Log.d(javaClass.simpleName, "onStreamReady: " + t.videoFile)
if (!isAdded) return
progressdialog!!.progress = 100
progressdialog!!.dismiss()
//FIXME check file type
val uri = Uri.parse(t.videoFile.toString())
val intent = Intent(Intent.ACTION_VIEW, uri)
intent.setDataAndType(uri, "video/*")
startActivity(intent)
}
override fun onStreamStopped() {
TorrentStreamHelper.instance.setListener(null)
Log.d(javaClass.simpleName, "onStreamStopped")
}
override fun onStreamError(t: com.github.se_bastiaan.torrentstream.Torrent?, e: Exception?) {
Log.e(javaClass.simpleName, "onStreamError", e)
toast("Stream error: " + e)
}
}
|
package console_calculator.calculator.exception;
public class DivisionByZeroException extends Exception {
public DivisionByZeroException(double a, double b) {
super(a + "/" + b + " Incorrect! Division by zero!");
}
}
|
-- left outer join
-- COALESCE on a join columns: both sides
select
test.c_integer,
test.c_bigint,
test.c_float,
test.c_double,
test.d9,
test.d18,
test.d28,
test.d38,
test.c_date,
test.c_time,
test.c_timestamp,
test.c_boolean,
test.c_varchar,
j2.c_bigint,
j2.c_float,
j2.c_double,
j2.d9,
j2.d18,
j2.d28,
j2.d38,
j2.c_date,
j2.c_time,
j2.c_timestamp,
j2.c_boolean,
j2.c_varchar
from test
left outer join
j2
on (COALESCE(test.c_integer, 10000) = COALESCE(j2.c_integer, 10000))
where
test.c_integer IS NULL
and j2.c_integer IS NULL
order by
test.c_integer,
test.c_bigint,
test.c_float,
test.c_double,
test.d9,
test.d18,
test.d28,
test.d38,
test.c_date,
test.c_time,
test.c_timestamp,
test.c_boolean,
test.c_varchar
;
|
package org.burningokr.mapper.structure;
import java.util.ArrayList;
import java.util.Collection;
import org.burningokr.dto.structure.CompanyDto;
import org.burningokr.mapper.interfaces.DataMapper;
import org.burningokr.model.cycles.CompanyHistory;
import org.burningokr.model.cycles.Cycle;
import org.burningokr.model.okr.Objective;
import org.burningokr.model.structures.Company;
import org.burningokr.model.structures.Department;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
@Service
public class CompanyMapper implements DataMapper<Company, CompanyDto> {
private final Logger logger = LoggerFactory.getLogger(CompanyMapper.class);
@Override
public Company mapDtoToEntity(CompanyDto companyDto) {
Company company = new Company();
company.setId(companyDto.getStructureId());
company.setName(companyDto.getStructureName());
company.setLabel(companyDto.getLabel());
company.setDepartments(new ArrayList<>());
company.setObjectives(new ArrayList<>());
Cycle cycle = null;
if (companyDto.getCycleId() != null) {
cycle = new Cycle();
cycle.setId(companyDto.getCycleId());
}
company.setCycle(cycle);
CompanyHistory history = null;
if (companyDto.getHistoryId() != null) {
history = new CompanyHistory();
history.setId(companyDto.getHistoryId());
}
company.setHistory(history);
logger.info("Mapped CompanyDto (id:" + companyDto.getStructureId() + ") to Company.");
return company;
}
@Override
public CompanyDto mapEntityToDto(Company company) {
CompanyDto companyDto = new CompanyDto();
companyDto.setStructureId(company.getId());
companyDto.setStructureName(company.getName());
companyDto.setLabel(company.getLabel());
companyDto.setCycleId(company.getCycle().getId());
companyDto.setHistoryId(company.getHistory().getId());
Collection<Long> departmentIds = new ArrayList<>();
for (Department department : company.getDepartments()) {
departmentIds.add(department.getId());
}
companyDto.setDepartmentIds(departmentIds);
Collection<Long> objectiveIds = new ArrayList<>();
for (Objective objective : company.getObjectives()) {
objectiveIds.add(objective.getId());
}
companyDto.setObjectiveIds(objectiveIds);
logger.info("Mapped Company (id:" + company.getId() + ") to CompanyDto.");
return companyDto;
}
@Override
public Collection<Company> mapDtosToEntities(Collection<CompanyDto> input) {
Collection<Company> entities = new ArrayList<>();
input.forEach(companyDto -> entities.add(mapDtoToEntity(companyDto)));
return entities;
}
@Override
public Collection<CompanyDto> mapEntitiesToDtos(Collection<Company> companies) {
Collection<CompanyDto> companyDtos = new ArrayList<>();
for (Company company : companies) {
CompanyDto companyDto = mapEntityToDto(company);
companyDtos.add(companyDto);
}
return companyDtos;
}
}
|
import { getAPIData } from '../../ApiHttp';
export default async function getEvents(req, params) {
const message = await getAPIData('events',params);
return {
message : message,
time: Date.now()
};
}
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.data.tables.models;
import com.azure.core.annotation.Fluent;
import com.azure.data.tables.implementation.models.RetentionPolicy;
/**
* A model representing configurable Azure Analytics Logging settings of the Table service.
*/
@Fluent
public final class TableServiceLogging {
/*
* The version of Analytics to configure.
*/
private String analyticsVersion;
/*
* Indicates whether all delete requests should be logged.
*/
private boolean deleteLogged;
/*
* Indicates whether all read requests should be logged.
*/
private boolean readLogged;
/*
* Indicates whether all write requests should be logged.
*/
private boolean writeLogged;
/*
* The retention policy.
*/
private TableServiceRetentionPolicy retentionPolicy;
/**
* Get the version of Analytics to configure.
*
* @return The {@code analyticsVersion}.
*/
public String getAnalyticsVersion() {
return this.analyticsVersion;
}
/**
* Set the version of Analytics to configure.
*
* @param analyticsVersion The {@code analyticsVersion} to set.
*
* @return The updated {@link TableServiceLogging} object.
*/
public TableServiceLogging setAnalyticsVersion(String analyticsVersion) {
this.analyticsVersion = analyticsVersion;
return this;
}
/**
* Get a value that indicates whether all delete requests should be logged.
*
* @return The {@code deleteLogged} value.
*/
public boolean isDeleteLogged() {
return this.deleteLogged;
}
/**
* Set a value that indicates whether all delete requests should be logged.
*
* @param delete The {@code deleteLogged} value to set.
*
* @return The updated {@link TableServiceLogging} object.
*/
public TableServiceLogging setDeleteLogged(boolean delete) {
this.deleteLogged = delete;
return this;
}
/**
* Get a value that indicates whether all read requests should be logged.
*
* @return The {@code readLogged} value.
*/
public boolean isReadLogged() {
return this.readLogged;
}
/**
* Set a value that indicates whether all read requests should be logged.
*
* @param read The {@code readLogged} value to set.
*
* @return The updated {@link TableServiceLogging} object.
*/
public TableServiceLogging setReadLogged(boolean read) {
this.readLogged = read;
return this;
}
/**
* Get a value that indicates whether all write requests should be logged.
*
* @return The {@code writeLogged} value.
*/
public boolean isWriteLogged() {
return this.writeLogged;
}
/**
* Set a value that indicates whether all writeLogged requests should be logged.
*
* @param writeLogged The {@code writeLogged} value to set.
*
* @return The updated {@link TableServiceLogging} object.
*/
public TableServiceLogging setWriteLogged(boolean writeLogged) {
this.writeLogged = writeLogged;
return this;
}
/**
* Get the {@link RetentionPolicy}.
*
* @return The {@link RetentionPolicy}.
*/
public TableServiceRetentionPolicy getRetentionPolicy() {
return this.retentionPolicy;
}
/**
* Set the {@link RetentionPolicy}.
*
* @param retentionPolicy The {@link RetentionPolicy} to set.
*
* @return The updated {@link TableServiceLogging} object.
*/
public TableServiceLogging setRetentionPolicy(TableServiceRetentionPolicy retentionPolicy) {
this.retentionPolicy = retentionPolicy;
return this;
}
}
|
package de.lv1871.oss.tester.test.dmnassert.model;
import java.util.List;
import java.util.Map;
public class DecisionSimulationResponse {
private final String message;
private final List<Map<String, Object>> result;
private final List<String> resultRuleIds;
private final Map<String, List<String>> resultTableRuleIds;
private DecisionSimulationResponse(String message, List<Map<String, Object>> result, List<String> resultRuleIds,
Map<String, List<String>> resultTableRuleIds) {
this.message = message;
this.result = result;
this.resultRuleIds = resultRuleIds;
this.resultTableRuleIds = resultTableRuleIds;
}
public Map<String, List<String>> getResultTableRuleIds() {
return resultTableRuleIds;
}
public List<Map<String, Object>> getResult() {
return result;
}
public String getMessage() {
return message;
}
public List<String> getResultRuleIds() {
return resultRuleIds;
}
public static class DecisionTestCaseResponseBuilder {
private String message = null;
private List<Map<String, Object>> result = null;
private List<String> resultRuleIds = null;
private Map<String, List<String>> resultTableRuleIds = null;
public static DecisionTestCaseResponseBuilder create() {
return new DecisionTestCaseResponseBuilder();
}
public DecisionTestCaseResponseBuilder withMessage(String message) {
this.message = message;
return this;
}
public DecisionTestCaseResponseBuilder withResult(List<Map<String, Object>> result) {
this.result = result;
return this;
}
public DecisionSimulationResponse build() {
return new DecisionSimulationResponse(message, result, resultRuleIds, resultTableRuleIds);
}
public DecisionTestCaseResponseBuilder withResultRuleIds(List<String> resultRuleIds) {
this.resultRuleIds = resultRuleIds;
return this;
}
public DecisionTestCaseResponseBuilder withResultTableRuleIds(Map<String, List<String>> resultTableRuleIds) {
this.resultTableRuleIds = resultTableRuleIds;
return this;
}
}
}
|
# References a notes.md
class Note < ApplicationRecord
belongs_to :user
validates :content, :project_name, presence: true
validates :project_name, uniqueness: { scope: :user }
end
|
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Xml;
using Microsoft.Msagl.Core.Geometry;
using Microsoft.Msagl.Core.Geometry.Curves;
using Microsoft.Msagl.Core.Layout;
using Microsoft.Msagl.DebugHelpers.Persistence;
using Microsoft.Msagl.Drawing;
using Microsoft.Msagl.Routing;
using Node = Microsoft.Msagl.Drawing.Node;
namespace TestGraphmaps {
internal class GexfParser {
XmlReader xmlReader;
XmlTextReader xmlTextReader;
Graph graph;
Dictionary<string,GexfNodeAttr> idsToGexfNodeAttr=new Dictionary<string,GexfNodeAttr>();
GexfParser(Stream stream) {
var settings = new XmlReaderSettings {IgnoreComments = false, IgnoreWhitespace = true};
xmlTextReader = new XmlTextReader(stream);
xmlReader = XmlReader.Create(xmlTextReader, settings);
graph = new Graph();
}
public static Graph Parse(string fileName, out int line, out int column, out string msg) {
using (Stream stream = File.OpenRead(fileName)) {
line = 0;
column = 0;
msg = "";
var gexfReader = new GexfParser(stream);
return gexfReader.Run();
}
}
Graph Run() {
xmlReader.MoveToContent();
while (IsStartElement()) {
switch (Name) {
case "edges":
ReadEdges();
break;
case "nodes":
ReadNodes();
break;
case "gexf":
Read();
break;
case "graph":
Read();
break;
default:
xmlReader.Skip();
break;
}
}
if (!GeometryPresent(graph.Nodes))
return graph;
graph.CreateGeometryGraph();
foreach (var n in graph.Nodes) {
var geomNode = n.GeometryNode;
GexfNodeAttr nodeData;
if (idsToGexfNodeAttr.TryGetValue(n.Id, out nodeData)) {
n.Label.FontSize *= nodeData.Size;
geomNode.BoundaryCurve = CurveFactory.CreateCircle(nodeData.Size, nodeData.Position);
}
}
foreach (var e in graph.Edges) {
if (e.GeometryEdge.Source.BoundaryCurve != null && e.GeometryEdge.Target.BoundaryCurve != null)
StraightLineEdges.RouteEdge(e.GeometryEdge, e.Source==e.Target?graph.LayoutAlgorithmSettings.NodeSeparation/4: 0);
}
return graph;
}
bool GeometryPresent(IEnumerable<Node> nodes) {
return nodes.Count() == idsToGexfNodeAttr.Count && idsToGexfNodeAttr.Values.All(v => v.Size != 0) &&
idsToGexfNodeAttr.Values.Any(v => v.Position != new Point(0, 0));
}
void ReadEdges() {
Read();
while (IsStartElement() && Name == "edge")
ReadEdge();
xmlReader.ReadEndElement();
}
void ReadEdge() {
graph.AddEdge(GetAttr("source"), GetAttr("target"));
do {
Read();
} while (Name != "edge" && Name != "edges");
if(Name=="edge"&& xmlReader.NodeType==XmlNodeType.EndElement)
xmlReader.ReadEndElement();
}
void ReadNodes() {
xmlReader.Read();
while (IsStartElement() && Name == "node")
ReadNode();
if(Name=="nodes")
xmlReader.ReadEndElement();
}
string Name {
get { return xmlReader.Name; }
}
bool IsStartElement() {
return xmlReader.IsStartElement();
}
void ReadNode() {
string id=xmlReader.GetAttribute("id");
var node = graph.AddNode(id);
ReadNodeContent(node);
if (IsStartElement() && Name == "node")
return;
Read();
}
void ReadNodeContent(Node node) {
var label = xmlReader.GetAttribute("label");
if (label != null)
node.LabelText = label;
xmlReader.Read();
if (IsStartElement() && Name == "node"
|| Name=="nodes") return;
ReadNodeFeatures(node);
}
private void SetLabelFromAttrValues(Node node, GexfNodeAttr gexfNodeAttr)
{
if (gexfNodeAttr.Attvalues.Count > 0)
{
var first = true;
String labelText = "";
foreach (var attVal in gexfNodeAttr.Attvalues.Values)
{
if (!first) labelText += " ";
labelText += attVal;
first = false;
}
node.LabelText = labelText;
}
}
void ReadNodeFeatures(Node node) {
GexfNodeAttr gexfNodeAttr;
idsToGexfNodeAttr[node.Id] = gexfNodeAttr = new GexfNodeAttr();
while (xmlReader.IsStartElement()) {
switch (xmlReader.Name) {
case "viz:color":
ReadColor(node);
xmlReader.Read();
xmlReader.ReadEndElement();
break;
case "viz:position":
ReadPosition(gexfNodeAttr);
xmlReader.Read();
xmlReader.ReadEndElement();
break;
case "viz:size":
ReadSize(gexfNodeAttr);
xmlReader.Read();
xmlReader.ReadEndElement();
break;
case "attvalues":
ReadAttvalues(gexfNodeAttr);
break;
default:
xmlReader.Skip();
break;
}
//xmlReader.Read();
}
SetLabelFromAttrValues(node, gexfNodeAttr);
}
private void ReadAttvalues(GexfNodeAttr gexfNodeAttr)
{
xmlReader.Read();
while (IsStartElement() && Name == "attvalue")
{
ReadAttvalue(gexfNodeAttr);
if(!xmlReader.IsEmptyElement)
xmlReader.Read();
xmlReader.ReadEndElement();
}
if(Name=="attvalues")
xmlReader.ReadEndElement();
}
private void ReadAttvalue(GexfNodeAttr gexfNodeAttr)
{
var attFor = xmlReader.GetAttribute("for");
var attVal = xmlReader.GetAttribute("value");
if (attFor != null && attVal != null)
gexfNodeAttr.Attvalues[attFor] = attVal;
}
void ReadSize(GexfNodeAttr gexfNodeAttr) {
var sizeVal = xmlReader.GetAttribute("value");
if (sizeVal != null)
gexfNodeAttr.Size = double.Parse(sizeVal);
}
void ReadPosition(GexfNodeAttr gexfNodeAttr) {
var xStr = GetAttr("x");
if (xStr != null) {
double x;
if (double.TryParse(xStr, out x)) {
var yStr = GetAttr("y");
if(yStr!=null) {
double y;
if (double.TryParse(yStr, out y)) {
gexfNodeAttr.Position = new Point(x, y);
}
}
}
}
}
void Read() {
xmlReader.Read();
}
string GetAttr(string a) {
return xmlReader.GetAttribute(a);
}
void ReadColor(Node node) {
var r = byte.Parse(GetAttr("r"));
var g = byte.Parse(GetAttr("g"));
var b = byte.Parse(GetAttr("b"));
node.Attr.Color = new Color(r, g, b);
}
}
}
|
trait Type[T]:
type Out
type varchar
given Type[varchar] with
type Out = String
class Placeholder[T, U]
object Placeholder:
def apply[T](using t: Type[T]): Placeholder[T, t.Out] = new Placeholder
trait Encoder[P, X]:
def encode(x: X): String
object Encoder:
def apply[P, X](placeholder: P)(using e: Encoder[P, X]): X => String = e.encode
given [T, X]: Encoder[Placeholder[T, X], X] with
def encode(x: X): String = ???
def Test =
// the following compiles just fine
Encoder(new Placeholder[varchar, String])("hello")
// the following fails
Encoder(Placeholder[varchar])("hello")
|
page 20035 "APIV1 - Income Statement"
{
APIVersion = 'v1.0';
Caption = 'incomeStatement', Locked = true;
DelayedInsert = true;
DeleteAllowed = false;
Editable = false;
EntityName = 'incomeStatement';
EntitySetName = 'incomeStatement';
InsertAllowed = false;
ModifyAllowed = false;
PageType = API;
SourceTable = "Acc. Schedule Line Entity";
SourceTableTemporary = true;
Extensible = false;
layout
{
area(content)
{
repeater(Group)
{
field(lineNumber; "Line No.")
{
Caption = 'lineNumber', Locked = true;
}
field(display; Description)
{
Caption = 'description', Locked = true;
}
field(netChange; "Net Change")
{
AutoFormatType = 0;
BlankZero = true;
Caption = 'netChange', Locked = true;
}
field(lineType; "Line Type")
{
Caption = 'lineType', Locked = true;
}
field(indentation; Indentation)
{
Caption = 'indentation', Locked = true;
}
field(dateFilter; "Date Filter")
{
Caption = 'dateFilter', Locked = true;
}
}
}
}
actions
{
}
trigger OnOpenPage()
var
GraphMgtReports: Codeunit "Graph Mgt - Reports";
RecVariant: Variant;
ReportAPIType: Option "Balance Sheet","Income Statement","Trial Balance","CashFlow Statement","Aged Accounts Payable","Aged Accounts Receivable","Retained Earnings";
begin
RecVariant := Rec;
GraphMgtReports.SetUpAccountScheduleBaseAPIDataWrapper(RecVariant, ReportAPIType::"Income Statement");
end;
}
|
export function displayDuration(sec: number, type = 1) {
const format = (val: number) => `0${Math.floor(val)}`.slice(-2)
const hours = sec / 3600
const minutes = (sec % 3600) / 60
const seconds = sec % 60
if (type === 1) {
if (format(hours) !== '00') {
return `${format(hours)} giờ ${format(minutes)} phút`
} else {
return `${format(minutes)} phút`
}
}
if (type === 2) {
return [format(minutes), format(seconds)].join(':')
}
}
|
module Lib
( someFunc
) where
import Prelude ()
import Prelude.Compat
import Control.Monad.Except
import Control.Monad.Reader
import Data.Aeson
import Data.Aeson.Types
import Data.Attoparsec.ByteString
import Data.ByteString (ByteString)
import Data.List
import Data.Maybe
import Data.String.Conversions
import Data.Time.Calendar
import GHC.Generics
import Lucid
import Network.HTTP.Media ((//), (/:))
import Network.Wai
import Network.Wai.Handler.Warp
import System.Directory
import Text.Blaze
import Text.Blaze.Html.Renderer.Utf8
import Servant.Types.SourceT (source)
import qualified Data.Aeson.Parser
import qualified Text.Blaze.Html
import Data.Swagger
import Servant
import Servant.Swagger
import Servant.Swagger.UI
import System.IO
import Control.Lens hiding ((.=))
import Persistence
type UserAPI = "users" :> Get '[JSON] [User]
:<|> "albert" :> Get '[JSON] User
:<|> "isaac" :> Get '[JSON] User
instance ToJSON User
instance ToSchema User
isaac :: User
isaac = User "Isaac Newton" 372 "isaac@newton.co.uk" (fromGregorian 1683 3 1)
albert :: User
albert = User "Albert Einstein" 136 "ae@mc2.org" (fromGregorian 1905 12 1)
users :: Handler [User]
users = liftIO getPersons
userAPI :: Proxy API
userAPI = Proxy
type API = SwaggerSchemaUI "swagger-ui" "swagger.json"
:<|> UserAPI
server :: Server API
server = swaggerSchemaUIServer swaggerDoc :<|> users :<|> pure albert :<|> pure isaac
app :: Application
app = serve userAPI server
swaggerDoc :: Swagger
swaggerDoc = toSwagger (Proxy :: Proxy UserAPI)
& info.title .~ "Operden API"
& info.version .~ "1.0.0"
& info.description ?~ "This is an API that perform some operen actions"
someFunc :: IO ()
someFunc = do
print $ foo "Running"
--_ <- m
print $ foo "Inserted"
hFlush stdout
run 8081 app
|
"use strict";
const MINTEMP = 10;
function Thermostat() {
this.temp = 20;
this.minTemp = MINTEMP;
this.maxTemp;
this.powerSavingMode("On");
}
Thermostat.prototype.getCurrentTemp = function() {
return this.temp;
};
Thermostat.prototype.up = function() {
if (this.isMaxTemp()) {
throw new Error("Over Max Temp");
}
this.temp += 1;
};
Thermostat.prototype.down = function() {
if (this.isMinTemp()) {
throw new Error("Min Temp is reached");
}
this.temp -= 1;
};
Thermostat.prototype.isMinTemp = function() {
return this.temp === this.minTemp;
};
Thermostat.prototype.isMaxTemp = function() {
return this.temp === this.maxTemp;
};
Thermostat.prototype.powerSavingMode = function(status = "On") {
if (status === "On") {
this._setMaxTemp(25);
} else if (status === "Off") {
this._setMaxTemp(32);
}
return status;
};
Thermostat.prototype.reset = function() {
return (this.temp = 20);
};
Thermostat.prototype.currentEnergyUsage = function() {
if (this.temp < 18) {
return "low-usage";
} else if (this.temp < 25) {
return "medium-usage";
} else {
return "high-usage";
}
};
Thermostat.prototype._setMaxTemp = function(degrees) {
this.maxTemp = degrees;
};
|
package de.codeshelf.consoleui.prompt.builder;
import de.codeshelf.consoleui.elements.items.impl.ChoiceItem;
/**
* Created by andy on 22.01.16.
*/
public class ExpandableChoiceItemBuilder {
private final ExpandableChoicePromptBuilder choicePromptBuilder;
private String name;
private String message;
private Character key;
private boolean asDefault;
public ExpandableChoiceItemBuilder(ExpandableChoicePromptBuilder choicePromptBuilder) {
this.choicePromptBuilder = choicePromptBuilder;
}
public ExpandableChoiceItemBuilder name(String name) {
this.name = name;
return this;
}
public ExpandableChoiceItemBuilder message(String message) {
this.message = message;
return this;
}
public ExpandableChoiceItemBuilder key(char key) {
this.key = key;
return this;
}
public ExpandableChoicePromptBuilder add() {
ChoiceItem choiceItem = new ChoiceItem(key, name, message, asDefault);
choicePromptBuilder.addItem(choiceItem);
return choicePromptBuilder;
}
public ExpandableChoiceItemBuilder asDefault() {
this.asDefault = true;
return this;
}
}
|
import React from 'react';
import { storiesOf } from '@kadira/storybook'; // eslint-disable-line
import HotelCard from 'components/HotelSearchResult/HotelCard';
import { fromJS } from 'immutable';
const fiveStarHotel = fromJS({
address: '44B LY THUONG KIET St',
cheapestRate: {
price: {
amount: 574,
currencyCode: 'SGD',
},
},
cityName: 'Hanoi',
districtName: 'Hoan Kiem District - Consulates/Government',
id: '111799',
imageUrl: 'http://res.cloudinary.com/wego/image/upload/v1395103767/hotels/111799/15711099.jpg',
name: 'Melia Hanoi',
reviewCount: '1000',
reviewDescription: 'Very Good',
reviewScore: '80',
star: 4.5,
});
const threeStarHotel = fromJS({
address: '48 Tran Nhan Tong street n',
cheapestRate: {
price: {
amount: 906,
currencyCode: 'SGD',
},
},
cityName: 'Hanoi',
districtName: 'Pho Hue / Hom Market - Thong Nhat Park',
id: '111797',
imageUrl: 'http://res.cloudinary.com/wego/image/upload/v1395103123/hotels/111797/15713422.jpg',
name: 'Ocean Hotel I',
reviewCount: '5',
reviewDescription: 'Poor',
reviewScore: '71',
star: 3,
});
const fourStarHotel = fromJS({
address: '48 Tran Nhan Tong street n',
cheapestRate: {
price: {
amount: 906,
currencyCode: 'SGD',
},
},
cityName: 'Hanoi',
districtName: 'Pho Hue / Hom Market - Thong Nhat Park',
id: '111797',
imageUrl: 'http://res.cloudinary.com/wego/image/upload/v1395103123/hotels/111797/15713422.jpg',
name: 'Ocean Hotel I',
reviewCount: '500',
reviewDescription: 'Very Good',
reviewScore: '85',
star: 4,
});
const twoStarHotel = fromJS({
address: '1 Cam Chi St (Hang Bong) Hoan Kiem District, Hanoi',
cheapestRate: {
price: {
amount: 800,
currencyCode: 'SGD',
},
},
cityName: 'Hanoi',
districtName: 'Ba Dinh',
id: '111798',
imageUrl: 'http://res.cloudinary.com/wego/image/upload/v1397026892/hotels/111798/7678197.jpg',
name: 'Hanoi Street Hotel',
reviewCount: '35',
reviewDescription: 'Poor',
reviewScore: '63',
star: 2,
});
storiesOf('HotelCard', module)
.add('Excellent', () => (
<div>
<HotelCard hotel={fiveStarHotel} />
</div>
))
.add('Very Good', () => (
<HotelCard hotel={fourStarHotel} />
))
.add('Fair (Orange)', () => (
<HotelCard hotel={threeStarHotel} />
))
.add('Fair (Red)', () => (
<HotelCard hotel={twoStarHotel} />
));
|
# Normative references
```{bibliography}
:filter: keywords % "nr"
:labelprefix: NR_MIS_
:keyprefix: mis-
```
|
Status:
Tags:
Links: [[Obsidian Community Plugins]]
___
# Obsidian Checklist Plugin
- Can be used with Kanban Boards
## Possible Uses
- Daily tasks
- Use the hashtag on the 1 3 5 daily note for a quick reference
- Use it to call tasks from specific, chosen tq
- Use it to switch between projects to see all that needs to be done
___
# Backlinks
```dataview
list from [[Obsidian Checklist Plugin]]
```
___
References:
Created:: 2021-07-29 19:15
|
using Microsoft.Xna.Framework;
using Terraria;
using Terraria.ModLoader;
using TerrariaOverhaul.Core.Systems.Debugging;
using TerrariaOverhaul.Utilities;
using TerrariaOverhaul.Utilities.DataStructures;
using TerrariaOverhaul.Utilities.Extensions;
namespace TerrariaOverhaul.Common.ModEntities.Players
{
public sealed class PlayerWallOcclusion : ModPlayer
{
public float OcclusionFactor { get; private set; }
public override void PostUpdate()
{
if(!Player.IsLocal()) {
return;
}
Vector2Int areaCenter = Player.Center.ToTileCoordinates();
Vector2Int halfSize = new Vector2Int(5, 5);
Vector2Int size = halfSize * 2;
Vector2Int start = areaCenter - halfSize;
Vector2Int end = areaCenter + halfSize;
const float RequiredWallRatio = 0.4f;
int maxTiles = size.X * size.Y;
int requiredWallTiles = (int)(maxTiles * RequiredWallRatio);
int numWalls = 0;
GeometryUtils.FloodFill(
areaCenter - start,
size,
(Vector2Int p, out bool occupied, ref bool stop) => {
int x = p.X + start.X;
int y = p.Y + start.Y;
Tile tile = Main.tile[x, y];
occupied = tile.IsActive && Main.tileSolid[tile.type] && !Main.tileSolidTop[tile.type] && tile.BlockType == Terraria.ID.BlockType.Solid;
if(!occupied && (tile.wall > 0 || y >= Main.worldSurface)) {
numWalls++;
if(DebugSystem.EnableDebugRendering) {
DebugSystem.DrawRectangle(new Rectangle(x * 16, y * 16, 16, 16), Color.Red, 1);
}
if(numWalls >= requiredWallTiles && !DebugSystem.EnableDebugRendering) {
stop = true;
}
}
}
);
OcclusionFactor = MathHelper.Clamp(numWalls / (float)requiredWallTiles, 0f, 1f);
}
}
}
|
# ----------------------------------------------------------------------------------
#
# Copyright Microsoft Corporation
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------------
<#
.SYNOPSIS
Tests getting index recommendations
#>
function Test-GetIndexRecommendations
{
# Get all recommended indexes for server
$response = Get-AzSqlDatabaseIndexRecommendation -ResourceGroup Group-6 -ServerName witest-eus
ValidateResponse($response)
Assert-AreEqual "Active" $response[0].State
# Get all recommended indexes for database
$response = Get-AzSqlDatabaseIndexRecommendation -ResourceGroup Group-6 -ServerName witest-eus -DatabaseName witestdb-eus
ValidateResponse($response)
Assert-AreEqual "Active" $response[0].State
# Get recommended indexes by name
$response = Get-AzSqlDatabaseIndexRecommendation -ResourceGroup Group-6 -ServerName witest-eus -DatabaseName witestdb-eus -IndexRecommendationName nci_wi_Clusters_034590D0-0378-4AB9-96D5-C144B14F6A9B
ValidateResponse($response)
Assert-AreEqual "Active" $response[0].State
}
<#
.SYNOPSIS
Tests starting and canceling index operation
#>
function Test-CreateIndex
{
# Start index operation
$response = Start-AzSqlDatabaseExecuteIndexRecommendation -ResourceGroup Group-6 -ServerName witest-eus -DatabaseName witestdb-eus -IndexRecommendationName nci_wi_Clusters_034590D0-0378-4AB9-96D5-C144B14F6A9B
Assert-AreEqual "Pending" $response[0].State
# Start index operation
$response = Stop-AzSqlDatabaseExecuteIndexRecommendation -ResourceGroup Group-6 -ServerName witest-eus -DatabaseName witestdb-eus -IndexRecommendationName nci_wi_Clusters_034590D0-0378-4AB9-96D5-C144B14F6A9B
Assert-AreEqual "Active" $response[0].State
}
function ValidateResponse($response)
{
Assert-NotNull $response
Assert-AreEqual 1 $response.Count
Assert-AreEqual "nci_wi_Clusters_034590D0-0378-4AB9-96D5-C144B14F6A9B" $response[0].Name
Assert-AreEqual "Create" $response[0].Action
Assert-AreEqual '07/21/2015 17:12:32' $response[0].Created
Assert-AreEqual "NONCLUSTERED" $response[0].IndexType
Assert-AreEqual '07/21/2015 17:12:32' $response[0].LastModified
Assert-AreEqual "dbo" $response[0].Schema
Assert-AreEqual "Clusters" $response[0].Table
}
|
---
title: Filters API | ldapjs
markdown2extras: wiki-tables
logo-color: green
logo-font-family: google:Aldrich, Verdana, sans-serif
header-font-family: google:Aldrich, Verdana, sans-serif
---
# ldapjs Filters API
This document covers the ldapjs filters API and assumes that you are familiar
with LDAP. If you're not, read the [guide](http://ldapjs.org/guide.html) first.
LDAP search filters are really the backbone of LDAP search operations, and
ldapjs tries to get you in "easy" with them if your dataset is small, and also
lets you introspect them if you want to write a "query planner". For reference,
make sure to read over [RFC2254](http://www.ietf.org/rfc/rfc2254.txt), as this
explains the LDAPv3 text filter representation.
ldapjs gives you a distinct object type mapping to each filter that is
context-sensitive. However, _all_ filters have a `matches()` method on them, if
that's all you need. Most filters will have an `attribute` property on them,
since "simple" filters all operate on an attribute/value assertion. The
"complex" filters are really aggregations of other filters (i.e. 'and'), and so
these don't provide that property.
All Filters in the ldapjs framework extend from `Filter`, which wil have the
property `type` available; this will return a string name for the filter, and
will be one of:
||equal||an `EqualityFilter`||
||present||a `PresenceFilter`||
||substring||a `SubstringFilter`||
||ge||a `GreaterThanEqualsFilter`||
||le||a `LessThanEqualsFilter`||
||and||an `AndFilter`||
||or||an `OrFilter`||
||not||a `NotFilter`||
||approx||an `ApproximateMatchFilter` (quasi-supported in ldapjs)||
||ext||an `ExtensibleMatchFilter` (not supported in ldapjs)||
# parseFilter(filterString)
Parses an [RFC2254](http://www.ietf.org/rfc/rfc2254.txt) filter string into an
ldapjs object(s). If the filter is "complex", it will be a "tree" of objects.
For example:
var parseFilter = require('ldapjs').parseFilter;
var f = parseFilter('(objectclass=*)');
Is a "simple" filter, and would just return a `PresenceFilter` object. However,
var f = parseFilter('(&(employeeType=manager)(l=Seattle))');
Would return an `AndFilter`, which would have a `filters` array of two
`EqualityFilter` objects.
`parseFilter` will throw if an invalid string is passed in (that is, a
syntactically invalid string). All filter objects in th
# EqualityFilter
The equality filter is used to check exact matching of attribute/value
assertions. This object will have an `attribute` and `value` property, and the
`name` proerty will be `equal`.
The string syntax for an equality filter is `(attr=value)`.
The `matches()` method will return true IFF the passed in object has a
key matching `attribute` and a value matching `value`.
var f = new EqualityFilter({
attribute: 'cn',
value: 'foo'
});
f.matches({cn: 'foo'}); => true
f.matches({cn: 'bar'}); => false
Equality matching uses "strict" type JavaScript comparison, and by default
everything in ldapjs (and LDAP) is a UTF-8 string. If you want comparison
of numbers, or something else, you'll need to use a middleware interceptor
that transforms values of objects.
# PresenceFilter
The presence filter is used to check if an object has an attribute at all, with
any value. This object will have an `attribute` property, and the `name`
property will be `present`.
The string syntax for a presence filter is `(attr=*)`.
The `matches()` method will return true IFF the passed in object has a
key matching `attribute`.
var f = new PresenceFilter({
attribute: 'cn'
});
f.matches({cn: 'foo'}); => true
f.matches({sn: 'foo'}); => false
# SubstringFilter
The substring filter is used to do wildcard matching of a string value. This
object will have an `attribute` property and then it will have an `initial`
property, which is the prefix match, an `any` which will be an array of strings
that are to be found _somewhere_ in the target string, and a `final` property,
which will be the suffix match of the string. `any` and `final` are both
optional. The `name` property will be `substring`.
The string syntax for a presence filter is `(attr=foo*bar*cat*dog)`, which would
map to:
{
initial: 'foo',
any: ['bar', 'cat'],
final: 'dog'
}
The `matches()` method will return true IFF the passed in object has a
key matching `attribute` and the "regex" matches the value
var f = new SubstringFilter({
attribute: 'cn',
initial: 'foo',
any: ['bar'],
final: 'baz'
});
f.matches({cn: 'foobigbardogbaz'}); => true
f.matches({sn: 'fobigbardogbaz'}); => false
# GreaterThanEqualsFilter
The ge filter is used to do comparisons and ordering based on the value type. As
mentioned elsewhere, by default everything in LDAP and ldapjs is a string, so
this filter's `matches()` would be using lexicographical ordering of strings.
If you wanted `>=` semantics over numeric values, you would need to add some
middleware to convert values before comparison (and the value of the filter).
Note that the ldapjs schema middleware will do this.
The GreaterThanEqualsFilter will have an `attribute` property, a `value`
property and the `name` property will be `ge`.
The string syntax for a ge filter is:
(cn>=foo)
The `matches()` method will return true IFF the passed in object has a
key matching `attribute` and the value is `>=` this filter's `value`.
var f = new GreaterThanEqualsFilter({
attribute: 'cn',
value: 'foo',
});
f.matches({cn: 'foobar'}); => true
f.matches({cn: 'abc'}); => false
# LessThanEqualsFilter
The le filter is used to do comparisons and ordering based on the value type. As
mentioned elsewhere, by default everything in LDAP and ldapjs is a string, so
this filter's `matches()` would be using lexicographical ordering of strings.
If you wanted `<=` semantics over numeric values, you would need to add some
middleware to convert values before comparison (and the value of the filter).
Note that the ldapjs schema middleware will do this.
The string syntax for a le filter is:
(cn<=foo)
The LessThanEqualsFilter will have an `attribute` property, a `value`
property and the `name` property will be `le`.
The `matches()` method will return true IFF the passed in object has a
key matching `attribute` and the value is `<=` this filter's `value`.
var f = new LessThanEqualsFilter({
attribute: 'cn',
value: 'foo',
});
f.matches({cn: 'abc'}); => true
f.matches({cn: 'foobar'}); => false
# AndFilter
The and filter is a complex filter that simply contains "child" filters. The
object will have a `filters` property which is an array of `Filter` objects. The
`name` property will be `and`.
The string syntax for an and filter is (assuming below we're and'ing two
equality filters):
(&(cn=foo)(sn=bar))
The `matches()` method will return true IFF the passed in object matches all
the filters in the `filters` array.
var f = new AndFilter({
filters: [
new EqualityFilter({
attribute: 'cn',
value: 'foo'
}),
new EqualityFilter({
attribute: 'sn',
value: 'bar'
})
]
});
f.matches({cn: 'foo', sn: 'bar'}); => true
f.matches({cn: 'foo', sn: 'baz'}); => false
# OrFilter
The or filter is a complex filter that simply contains "child" filters. The
object will have a `filters` property which is an array of `Filter` objects. The
`name` property will be `or`.
The string syntax for an or filter is (assuming below we're or'ing two
equality filters):
(|(cn=foo)(sn=bar))
The `matches()` method will return true IFF the passed in object matches *any*
of the filters in the `filters` array.
var f = new OrFilter({
filters: [
new EqualityFilter({
attribute: 'cn',
value: 'foo'
}),
new EqualityFilter({
attribute: 'sn',
value: 'bar'
})
]
});
f.matches({cn: 'foo', sn: 'baz'}); => true
f.matches({cn: 'bar', sn: 'baz'}); => false
# NotFilter
The not filter is a complex filter that contains a single "child" filter. The
object will have a `filter` property which is an instance of a `Filter` object.
The `name` property will be `not`.
The string syntax for a not filter is (assuming below we're not'ing an
equality filter):
(!(cn=foo))
The `matches()` method will return true IFF the passed in object does not match
the filter in the `filter` property.
var f = new NotFilter({
filter: new EqualityFilter({
attribute: 'cn',
value: 'foo'
})
});
f.matches({cn: 'bar'}); => true
f.matches({cn: 'foo'}); => false
# ApproximateFilter
The approximate filter is used to check "approximate" matching of
attribute/value assertions. This object will have an `attribute` and
`value` property, and the `name` proerty will be `approx`.
As a side point, this is a useless filter. It's really only here if you have
some whacky client that's sending this. It just does an exact match (which
is what ActiveDirectory does too).
The string syntax for an equality filter is `(attr~=value)`.
The `matches()` method will return true IFF the passed in object has a
key matching `attribute` and a value exactly matching `value`.
var f = new ApproximateFilter({
attribute: 'cn',
value: 'foo'
});
f.matches({cn: 'foo'}); => true
f.matches({cn: 'bar'}); => false
|
# frozen_string_literal: true
$LOAD_PATH.push File.expand_path('lib', __dir__)
# Maintain your gem's version:
require 'open_invoice/version'
# Describe your gem and declare its dependencies:
Gem::Specification.new do |spec|
spec.name = 'open_invoice'
spec.version = OpenInvoice::VERSION
spec.authors = ['moduloTech']
spec.email = ['philib_j@modulotech.fr']
spec.homepage = 'https://open-invoice.io'
spec.summary = 'OpenInvoice Rails engine provides easy way to manage invoices.'
spec.description = 'Description of OpenInvoice.'
spec.license = 'MIT'
spec.files = Dir['{app,config,lib}/**/*', 'MIT-LICENSE', 'Rakefile', 'README.md',
'package.json', 'yarn.lock', 'r.sh']
spec.add_dependency 'rails', '~> 6.0'
spec.add_dependency 'carrierwave-aws'
spec.add_dependency 'jbuilder', '~> 2.9'
spec.add_dependency 'orm_adapter', '~> 0.5.0'
end
|
---
title: Release Inferno v5.0.0 · infernojs/inferno
author: azu
layout: post
itemUrl: 'https://github.com/infernojs/inferno/releases/tag/v5.0.0'
editJSONPath: 'https://github.com/jser/jser.info/edit/gh-pages/data/2018/03/index.json'
date: '2018-03-18T05:00:56Z'
tags:
- JavaScript
- React
- ReleaseNote
- library
---
Inferno v5.0.0 リリース。 `.mjs`で配布していたのを`.js`に変更、 IE9のサポート終了、TypeScriptのサポート改善など
|
<?php
class M_sale extends CI_Model {
function get_product( $barcode )
{
$result = $this->db->get_where('product', array('barcode' => $barcode))->row_array();
return $result;
}
}
|
create or replace function static.quest_upsert
( p_internal_name static.quest.internal_name%TYPE
, p_name static.localized_text_detail.value%TYPE
, p_start_text static.localized_text_detail.value%TYPE
, p_success_text static.localized_text_detail.value%TYPE
, p_fail_text static.localized_text_detail.value%TYPE
, p_summary_text static.localized_text_detail.value%TYPE
, p_reward_id static.quest.reward_id%TYPE
, p_language static.language
)
returns integer AS $$
declare
v_name_id static.localized_text.text_id%TYPE;
v_start_id static.quest.start_text_id%TYPE;
v_success_id static.quest.success_text_id%TYPE;
v_fail_id static.quest.fail_text_id%TYPE;
v_summary_id static.quest.summary_text_id%TYPE;
v_quest_id static.quest.quest_id%TYPE;
begin
select static.get_localized_text_id_or_insert(p_language,p_name) into v_name_id;
select static.get_localized_text_id_or_insert(p_language,p_start_text) into v_start_id;
select static.get_localized_text_id_or_insert(p_language,p_success_text) into v_success_id;
select static.get_localized_text_id_or_insert(p_language,p_fail_text) into v_fail_id;
select static.get_localized_text_id_or_insert(p_language,p_summary_text) into v_summary_id;
INSERT INTO static.quest(
quest_id, internal_name, name_id, start_text_id, success_text_id,
fail_text_id, summary_text_id, reward_id)
VALUES (DEFAULT, p_internal_name, v_name_id, v_start_id, v_success_id, v_fail_id, v_summary_id, p_reward_id)
ON CONFLICT (internal_name)
DO UPDATE
SET name_id=v_name_id
, start_text_id=v_start_id
, success_text_id = v_success_id
, fail_text_id = v_fail_id
, summary_text_id = v_summary_id
, reward_id=p_reward_id
where static.quest.internal_name = p_internal_name
returning quest_id into v_quest_id;
return v_quest_id;
end; $$
language PLPGSQL security definer;
;
|
fun main() {
println( (2+3) * 4) // expect: 20
println( 100 / ((4+1) * 2)) // expect: 10
}
|
#!/bin/bash
#
# Build Figicons
#
# Optimize and build icons:
# ./build.sh publish
#
# Optimize, build icons and create Figicons:
# ./build.sh
function DELETE_TEMP {
rm -rf ./dist
mkdir dist
}
function BUILD {
yarn build
}
function PUBLISH {
yarn package
}
function COPY {
cd dist/src
mv * ../
cd ../
rm -rf dist/src
}
function CLEAN {
rm -rf dist
}
if [[ $1 == "publish" ]]; then
DELETE_TEMP
BUILD
COPY
PUBLISH
CLEAN
echo -e "Build & publish completed."
elif [ $# -eq 0 ]; then
DELETE_TEMP
BUILD
echo -e "Build completed."
fi
|
# pylint: disable=no-self-use,invalid-name
from __future__ import division
from __future__ import absolute_import
import pytest
from allennlp.data.dataset_readers import Seq2SeqDatasetReader
from allennlp.common.util import ensure_list
from allennlp.common.testing import AllenNlpTestCase
class TestSeq2SeqDatasetReader(object):
@pytest.mark.parametrize(u"lazy", (True, False))
def test_default_format(self, lazy):
reader = Seq2SeqDatasetReader(lazy=lazy)
instances = reader.read(unicode(AllenNlpTestCase.FIXTURES_ROOT / u'data' / u'seq2seq_copy.tsv'))
instances = ensure_list(instances)
assert len(instances) == 3
fields = instances[0].fields
assert [t.text for t in fields[u"source_tokens"].tokens] == [u"@start@", u"this", u"is",
u"a", u"sentence", u"@end@"]
assert [t.text for t in fields[u"target_tokens"].tokens] == [u"@start@", u"this", u"is",
u"a", u"sentence", u"@end@"]
fields = instances[1].fields
assert [t.text for t in fields[u"source_tokens"].tokens] == [u"@start@", u"this", u"is",
u"another", u"@end@"]
assert [t.text for t in fields[u"target_tokens"].tokens] == [u"@start@", u"this", u"is",
u"another", u"@end@"]
fields = instances[2].fields
assert [t.text for t in fields[u"source_tokens"].tokens] == [u"@start@", u"all", u"these", u"sentences",
u"should", u"get", u"copied", u"@end@"]
assert [t.text for t in fields[u"target_tokens"].tokens] == [u"@start@", u"all", u"these", u"sentences",
u"should", u"get", u"copied", u"@end@"]
def test_source_add_start_token(self):
reader = Seq2SeqDatasetReader(source_add_start_token=False)
instances = reader.read(unicode(AllenNlpTestCase.FIXTURES_ROOT / u'data' / u'seq2seq_copy.tsv'))
instances = ensure_list(instances)
assert len(instances) == 3
fields = instances[0].fields
assert [t.text for t in fields[u"source_tokens"].tokens] == [u"this", u"is", u"a", u"sentence", u"@end@"]
assert [t.text for t in fields[u"target_tokens"].tokens] == [u"@start@", u"this", u"is",
u"a", u"sentence", u"@end@"]
|
# frozen_string_literal: true
module ClaimsApi
module PoaPdfConstructor
class Signature
attr_reader :data, :x, :y, :height
def initialize(data:, x:, y:, height: 20)
@data = data
@x = x
@y = y
@height = height
end
def path
return @path if @path.present?
@path = "#{::Common::FileHelpers.random_file_path}.png"
File.open(@path, 'wb') do |f|
f.write(Base64.decode64(@data))
end
@path
end
end
end
end
|
titulo = '\033[01;32m'
txt = '\033[32m'
nada = '\033[m'
lista = list()
print(f'\n{titulo:-<11} Validação de Expressões Matemática {nada:->6}')
expressão = str(input(f'\n{txt}Digite uma expressão: {nada}'))
for c in expressão:
lista.append(c)
if (lista.count(')') + lista.count('(')) % 2 == 0:
print(f'\n{titulo}Expressão Aceita{nada}')
else:
print(f'\n{titulo}Expressão Não Aceita{nada}')
|
def parse_string(input, vars={}):
for var in vars:
input = input.replace(f"${var}$", vars[var])
return input
|
CREATE VIEW [AppDbo].[QueueManagement_View]
AS
SELECT x.[QueueId]
,x.[QueueName]
,x.[CurrentSizeLimit]
,x.[CurrentRetryLimit]
,x.[LockValidForSec]
,x.[Disable]
FROM [AppDbo].[QueueManagement] x
WHERE x.[_deletedDate] is null;
|
const assert = require('assert')
const fs = require('fs')
assert.equal(1+2, 3)
const countLines = (cb) => {
fs.readFile('file.txt', 'utf8', (err, src) => {
if (err) cb(err)
else cb(null, src.split('\n').length)
})
}
countLines((err, n) => {
assert.ifError(err)
assert.equal(n, 3)
})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.