text
stringlengths 27
775k
|
|---|
# encoding: utf-8
module Bunny
=begin rdoc
=== DESCRIPTION:
Asks the server to start a "consumer", which is a transient request for messages from a specific
queue. Consumers last as long as the channel they were created on, or until the client cancels them
with an _unsubscribe_. Every time a message reaches the queue it is passed to the _blk_ for
processing. If error occurs, _Bunny_::_ProtocolError_ is raised.
==== OPTIONS:
* <tt>:consumer_tag => '_tag_'</tt> - Specifies the identifier for the consumer. The consumer tag is
local to a connection, so two clients can use the same consumer tags. If this option is not
specified a server generated name is used.
* <tt>:ack => false (_default_) or true</tt> - If set to _false_, the server does not expect an
acknowledgement message from the client. If set to _true_, the server expects an acknowledgement
message from the client and will re-queue the message if it does not receive one within a time specified
by the server.
* <tt>:exclusive => true or false (_default_)</tt> - Request exclusive consumer access, meaning
only this consumer can access the queue.
* <tt>:nowait => true or false (_default_)</tt> - Ignored by Bunny, always _false_.
* <tt>:timeout => number of seconds - The subscribe loop will continue to wait for
messages until terminated (Ctrl-C or kill command) or this timeout interval is reached.
* <tt>:message_max => max number messages to process</tt> - When the required number of messages
is processed subscribe loop is exited.
==== OPERATION:
Passes a hash of message information to the block, if one has been supplied. The hash contains
:header, :payload and :delivery_details. The structure of the data is as follows -
:header has instance variables -
@klass
@size
@weight
@properties is a hash containing -
:content_type
:delivery_mode
:priority
:payload contains the message contents
:delivery details is a hash containing -
:consumer_tag
:delivery_tag
:redelivered
:exchange
:routing_key
If the :timeout option is specified then the subscription will
automatically cease if the given number of seconds passes with no
message arriving.
==== EXAMPLES
my_queue.subscribe(:timeout => 5) {|msg| puts msg[:payload]}
my_queue.subscribe(:message_max => 10, :ack => true) {|msg| puts msg[:payload]}
=end
class Subscription < Bunny::Consumer
def setup_consumer
subscription_options = {
:queue => queue.name,
:consumer_tag => consumer_tag,
:no_ack => !ack,
:exclusive => exclusive,
:nowait => false
}.merge(@opts)
client.send_frame(Qrack::Protocol::Basic::Consume.new(subscription_options))
method = client.next_method
client.check_response(method, Qrack::Protocol::Basic::ConsumeOk, "Error subscribing to queue #{queue.name}")
@consumer_tag = method.consumer_tag
end
end
end
|
import * as React from "react";
import { withRouter, RouteComponentProps } from "react-router";
import Button, { ButtonProps } from "@material-ui/core/Button";
import { onLinkClick } from "./utils";
export interface ButtonLinkProps
extends Omit<ButtonProps, "href" | "component"> {
to: string;
}
type Props = ButtonLinkProps & RouteComponentProps;
class ButtonLink extends React.Component<Props> {
private _onClick = onLinkClick.bind(this);
render() {
const {
children,
history,
location,
to,
staticContext,
...props
} = this.props;
return (
<Button
{...props}
component="a"
href={history.createHref({ pathname: to })}
onClick={this._onClick}
>
{children}
</Button>
);
}
}
export default withRouter(ButtonLink);
|
using System.Collections.Generic;
using System.ComponentModel;
using System.Threading.Tasks;
using Inedo.Documentation;
using Inedo.Extensibility;
using Inedo.Extensibility.Operations;
using Inedo.Web;
namespace Inedo.Extensions.Docker.Operations.Compose
{
[DisplayName("Run command in Docker Compose")]
[Description("Runs an arbitrary docker-compose command.")]
[ScriptAlias("Compose-Command")]
public sealed class DockerComposeCommandOperation : ComposeOperationBase
{
[Category]
[ScriptAlias("Args")]
[DisplayName("Command arguments")]
[FieldEditMode(FieldEditMode.Multiline)]
public override IEnumerable<string> AddArgs { get; set; }
public override Task ExecuteAsync(IOperationExecutionContext context)
{
return this.RunDockerComposeAsync(context);
}
protected override ExtendedRichDescription GetDescription(IOperationConfiguration config)
{
return new ExtendedRichDescription(
new RichDescription("Run docker-compose for ", new Hilite(config[nameof(ProjectName)])),
new RichDescription("with arguments ", new ListHilite(this.AddArgs))
);
}
}
}
|
package p2102
import "testing"
func TestSample1(t *testing.T) {
tracker := Constructor()
tracker.Add("bradford", 2)
tracker.Add("branford", 3)
res := tracker.Get()
if res != "branford" {
t.Fatalf("should get branford at 1th Get(), but got %s", res)
}
tracker.Add("alps", 2)
res = tracker.Get()
if res != "alps" {
t.Fatalf("should get alps at 2nd Get() but got %s", res)
}
tracker.Add("orland", 2)
res = tracker.Get()
if res != "bradford" {
t.Fatalf("should get bradford at 3rd Get(), but got %s", res)
}
tracker.Add("orlando", 3)
res = tracker.Get()
if res != "bradford" {
t.Fatalf("should get bradford at 4rd Get(), but got %s", res)
}
tracker.Add("alpine", 2)
res = tracker.Get()
if res != "bradford" {
t.Fatalf("should get bradford at 5th Get(), but got %s", res)
}
res = tracker.Get()
if res != "orland" {
t.Fatalf("should get orland at 6th Get(), bug got %s", res)
}
}
|
namespace DevRocks.Ocelot.FileServiceDiscovery
{
public class ServiceConfig
{
public string Schema { get; set; }
public string Host { get; set; }
public int Port { get; set; }
public bool IsGrpcHost { get; set; }
}
}
|
package typingsSlinky.locatePath
import typingsSlinky.locatePath.locatePathStrings.directory
import typingsSlinky.locatePath.locatePathStrings.file
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
object mod {
/**
Get the first path that exists on disk of multiple paths.
@param paths - Paths to check.
@returns The first path that exists or `undefined` if none exists.
@example
```
import locatePath = require('locate-path');
const files = [
'unicorn.png',
'rainbow.png', // Only this one actually exists on disk
'pony.png'
];
(async () => {
console(await locatePath(files));
//=> 'rainbow'
})();
```
*/
@JSImport("locate-path", JSImport.Namespace)
@js.native
def apply(paths: js.Iterable[String]): js.Promise[js.UndefOr[String]] = js.native
@JSImport("locate-path", JSImport.Namespace)
@js.native
def apply(paths: js.Iterable[String], options: AsyncOptions): js.Promise[js.UndefOr[String]] = js.native
/**
Synchronously get the first path that exists on disk of multiple paths.
@param paths - Paths to check.
@returns The first path that exists or `undefined` if none exists.
*/
@JSImport("locate-path", "sync")
@js.native
def sync(paths: js.Iterable[String]): js.UndefOr[String] = js.native
@JSImport("locate-path", "sync")
@js.native
def sync(paths: js.Iterable[String], options: Options): js.UndefOr[String] = js.native
@js.native
trait AsyncOptions extends Options {
/**
Number of concurrently pending promises. Minimum: `1`.
@default Infinity
*/
val concurrency: js.UndefOr[Double] = js.native
/**
Preserve `paths` order when searching.
Disable this to improve performance if you don't care about the order.
@default true
*/
val preserveOrder: js.UndefOr[Boolean] = js.native
}
object AsyncOptions {
@scala.inline
def apply(): AsyncOptions = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[AsyncOptions]
}
@scala.inline
implicit class AsyncOptionsMutableBuilder[Self <: AsyncOptions] (val x: Self) extends AnyVal {
@scala.inline
def setConcurrency(value: Double): Self = StObject.set(x, "concurrency", value.asInstanceOf[js.Any])
@scala.inline
def setConcurrencyUndefined: Self = StObject.set(x, "concurrency", js.undefined)
@scala.inline
def setPreserveOrder(value: Boolean): Self = StObject.set(x, "preserveOrder", value.asInstanceOf[js.Any])
@scala.inline
def setPreserveOrderUndefined: Self = StObject.set(x, "preserveOrder", js.undefined)
}
}
@js.native
trait Options extends StObject {
/**
Allow symbolic links to match if they point to the requested path type.
@default true
*/
val allowSymlinks: js.UndefOr[Boolean] = js.native
/**
Current working directory.
@default process.cwd()
*/
val cwd: js.UndefOr[String] = js.native
/**
Type of path to match.
@default 'file'
*/
val `type`: js.UndefOr[file | directory] = js.native
}
object Options {
@scala.inline
def apply(): Options = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[Options]
}
@scala.inline
implicit class OptionsMutableBuilder[Self <: Options] (val x: Self) extends AnyVal {
@scala.inline
def setAllowSymlinks(value: Boolean): Self = StObject.set(x, "allowSymlinks", value.asInstanceOf[js.Any])
@scala.inline
def setAllowSymlinksUndefined: Self = StObject.set(x, "allowSymlinks", js.undefined)
@scala.inline
def setCwd(value: String): Self = StObject.set(x, "cwd", value.asInstanceOf[js.Any])
@scala.inline
def setCwdUndefined: Self = StObject.set(x, "cwd", js.undefined)
@scala.inline
def setType(value: file | directory): Self = StObject.set(x, "type", value.asInstanceOf[js.Any])
@scala.inline
def setTypeUndefined: Self = StObject.set(x, "type", js.undefined)
}
}
}
|
package initialize
import (
"gin-vue-admin/global"
"gin-vue-admin/service"
)
func Data() {
var err error
err = service.InitSysApi()
err = service.InitSysUser()
err = service.InitExaCustomer()
err = service.InitCasbinModel()
err = service.InitSysAuthority()
err = service.InitSysBaseMenus()
err = service.InitAuthorityMenu()
err = service.InitSysDictionary()
err = service.InitSysAuthorityMenus()
err = service.InitSysDataAuthorityId()
err = service.InitSysDictionaryDetail()
err = service.InitExaFileUploadAndDownload()
if err != nil {
global.GVA_LOG.Error("initialize data failed", err)
}
global.GVA_LOG.Debug("initialize data success")
}
|
package com.fengyongge.wanandroidclient.mvp.contract
import com.fengyongge.baseframework.mvp.IBaseView
import com.fengyongge.rxhttp.bean.BaseResponse
import com.fengyongge.rxhttp.exception.ResponseException
import io.reactivex.Observable
class SettingContract {
interface Presenter{
fun getLogout()
fun clearCache()
}
interface Model{
fun getLogout(): Observable<BaseResponse<String>>
}
interface View : IBaseView{
fun getLogoutSuccess(data: BaseResponse<String>)
fun getLogoutFail(e: ResponseException)
fun clearCacheSuccess(size: String)
}
}
|
CREATE OR REPLACE VIEW balancer.view_remove_liquidity AS SELECT a.caller
AS liquidity_provider,
a.contract_address AS exchange_address,
a."tokenAmountOut" / 10 ^ t.decimals AS token_amount,
(a."tokenAmountOut" / 10 ^ t.decimals) * p.price AS usd_amount,
t.symbol AS token_symbol, a.evt_tx_hash AS tx_hash,
a.evt_block_time AS block_time FROM balancer
."BPool_evt_LOG_EXIT" a LEFT JOIN erc20.tokens t ON t
.contract_address = a."tokenOut" LEFT JOIN prices.usd p ON
date_trunc ('minute', a.evt_block_time)
= p.minute AND p.contract_address = t.contract_address
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.translator.util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.lang.common.statement.CreateViewStatement;
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.metadata.utils.KeyFieldTypeUtil;
import org.apache.asterix.om.typecomputer.impl.TypeComputeUtils;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.utils.RecordUtil;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.api.exceptions.SourceLocation;
/**
* A util that can verify if a filter field, a list of partitioning expressions,
* or a list of key fields are valid in a record type.
*/
public class ValidateUtil {
private static final String PRIMARY = "primary";
private ValidateUtil() {
}
/**
* Validates the field that will be used as filter for the components of an LSM index.
*
* @param recordType
* the record type
* @param metaType
* the meta record type
* @param filterSourceIndicator
* indicates where the filter attribute comes from, 0 for record, 1 for meta record.
* since this method is called only when a filter field presents, filterSourceIndicator will not be null
*
* @param filterField
* the full name of the field
* @param sourceLoc
* @throws AlgebricksException
* if field is not found in record.
* if field type can't be a filter type.
* if field type is nullable.
*/
public static void validateFilterField(ARecordType recordType, ARecordType metaType, Integer filterSourceIndicator,
List<String> filterField, SourceLocation sourceLoc) throws AlgebricksException {
ARecordType itemType = filterSourceIndicator == 0 ? recordType : metaType;
IAType fieldType = itemType.getSubFieldType(filterField);
if (fieldType == null) {
throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND, sourceLoc,
RecordUtil.toFullyQualifiedName(filterField));
}
switch (fieldType.getTypeTag()) {
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case FLOAT:
case DOUBLE:
case STRING:
case BINARY:
case DATE:
case TIME:
case DATETIME:
case UUID:
case YEARMONTHDURATION:
case DAYTIMEDURATION:
break;
case UNION:
throw new CompilationException(ErrorCode.COMPILATION_FILTER_CANNOT_BE_NULLABLE,
RecordUtil.toFullyQualifiedName(filterField));
default:
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_FILTER_TYPE,
fieldType.getTypeTag().name());
}
}
/**
* Validates the partitioning expression that will be used to partition a dataset and returns expression type.
*
* @param recType
* the record type
* @param metaRecType
* the meta record type
* @param partitioningExprs
* a list of partitioning expressions that will be validated
* @param keySourceIndicators
* the key sources (record vs. meta)
* @param autogenerated
* true if auto generated, false otherwise
* @param sourceLoc
* @return a list of partitioning expressions types
* @throws AlgebricksException
* if composite key is autogenerated.
* if autogenerated and of a type that can't be autogenerated.
* if a field could not be found in its record type.
* if partitioning key is nullable.
* if the field type can't be a primary key.
*/
public static List<IAType> validatePartitioningExpressions(ARecordType recType, ARecordType metaRecType,
List<List<String>> partitioningExprs, List<Integer> keySourceIndicators, boolean autogenerated,
SourceLocation sourceLoc) throws AlgebricksException {
return validatePartitioningExpressionsImpl(recType, metaRecType, partitioningExprs, keySourceIndicators,
autogenerated, true, sourceLoc);
}
private static List<IAType> validatePartitioningExpressionsImpl(ARecordType recType, ARecordType metaRecType,
List<List<String>> partitioningExprs, List<Integer> keySourceIndicators, boolean autogenerated,
boolean forPrimaryKey, SourceLocation sourceLoc) throws AlgebricksException {
String keyKindDisplayName = forPrimaryKey ? PRIMARY : "";
List<IAType> partitioningExprTypes = new ArrayList<>(partitioningExprs.size());
if (autogenerated) {
if (partitioningExprs.size() > 1) {
throw new CompilationException(ErrorCode.COMPILATION_CANNOT_AUTOGENERATE_COMPOSITE_KEY, sourceLoc,
keyKindDisplayName);
}
List<String> fieldName = partitioningExprs.get(0);
IAType fieldType = recType.getSubFieldType(fieldName);
if (fieldType == null) {
String unTypeField = fieldName.get(0) == null ? "" : fieldName.get(0);
throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND, sourceLoc, unTypeField);
}
partitioningExprTypes.add(fieldType);
ATypeTag pkTypeTag = fieldType.getTypeTag();
if (pkTypeTag != ATypeTag.UUID) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_AUTOGENERATED_TYPE, sourceLoc,
keyKindDisplayName, pkTypeTag.name(), ATypeTag.UUID.name());
}
} else {
partitioningExprTypes =
KeyFieldTypeUtil.getKeyTypes(recType, metaRecType, partitioningExprs, keySourceIndicators);
for (int i = 0; i < partitioningExprs.size(); i++) {
List<String> partitioningExpr = partitioningExprs.get(i);
IAType fieldType = partitioningExprTypes.get(i);
if (fieldType == null) {
throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND, sourceLoc,
RecordUtil.toFullyQualifiedName(partitioningExpr));
}
if (forPrimaryKey) {
boolean nullable = KeyFieldTypeUtil.chooseSource(keySourceIndicators, i, recType, metaRecType)
.isSubFieldNullable(partitioningExpr);
if (nullable) {
// key field is nullable
throw new CompilationException(ErrorCode.COMPILATION_KEY_CANNOT_BE_NULLABLE, sourceLoc,
keyKindDisplayName, RecordUtil.toFullyQualifiedName(partitioningExpr));
}
} else {
fieldType = TypeComputeUtils.getActualType(fieldType);
}
switch (fieldType.getTypeTag()) {
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case FLOAT:
case DOUBLE:
case STRING:
case BINARY:
case DATE:
case TIME:
case UUID:
case DATETIME:
case YEARMONTHDURATION:
case DAYTIMEDURATION:
break;
case UNION:
throw new CompilationException(ErrorCode.COMPILATION_KEY_CANNOT_BE_NULLABLE, sourceLoc,
keyKindDisplayName, RecordUtil.toFullyQualifiedName(partitioningExpr));
default:
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_KEY_TYPE, sourceLoc,
fieldType.getTypeTag(), keyKindDisplayName);
}
}
}
return partitioningExprTypes;
}
/**
* Validates the key fields that will be used as keys of an index.
*
* @param indexType
* the type of the index that its key fields is being validated
* @param fieldType
* a key field type
* @param displayFieldName
* a field name to use for error reporting
* @param sourceLoc
* the source location
* @throws AlgebricksException
*/
public static void validateIndexFieldType(IndexType indexType, IAType fieldType, List<String> displayFieldName,
SourceLocation sourceLoc) throws AlgebricksException {
switch (indexType) {
case ARRAY:
case BTREE:
switch (fieldType.getTypeTag()) {
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case FLOAT:
case DOUBLE:
case STRING:
case BINARY:
case DATE:
case TIME:
case DATETIME:
case UNION:
case UUID:
case YEARMONTHDURATION:
case DAYTIMEDURATION:
break;
default:
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
"The field \"" + displayFieldName + "\" which is of type " + fieldType.getTypeTag()
+ " cannot be indexed using the BTree index.");
}
break;
case RTREE:
switch (fieldType.getTypeTag()) {
case POINT:
case LINE:
case RECTANGLE:
case CIRCLE:
case POLYGON:
case GEOMETRY:
case UNION:
break;
default:
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
"The field \"" + displayFieldName + "\" which is of type " + fieldType.getTypeTag()
+ " cannot be indexed using the RTree index.");
}
break;
case LENGTH_PARTITIONED_NGRAM_INVIX:
switch (fieldType.getTypeTag()) {
case STRING:
case UNION:
break;
default:
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
"The field \"" + displayFieldName + "\" which is of type " + fieldType.getTypeTag()
+ " cannot be indexed using the Length Partitioned N-Gram index.");
}
break;
case LENGTH_PARTITIONED_WORD_INVIX:
switch (fieldType.getTypeTag()) {
case STRING:
case MULTISET:
case ARRAY:
case UNION:
break;
default:
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
"The field \"" + displayFieldName + "\" which is of type " + fieldType.getTypeTag()
+ " cannot be indexed using the Length Partitioned Keyword index.");
}
break;
case SINGLE_PARTITION_NGRAM_INVIX:
switch (fieldType.getTypeTag()) {
case STRING:
case UNION:
break;
default:
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
"The field \"" + displayFieldName + "\" which is of type " + fieldType.getTypeTag()
+ " cannot be indexed using the N-Gram index.");
}
break;
case SINGLE_PARTITION_WORD_INVIX:
switch (fieldType.getTypeTag()) {
case STRING:
case MULTISET:
case ARRAY:
case UNION:
break;
default:
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
"The field \"" + displayFieldName + "\" which is of type " + fieldType.getTypeTag()
+ " cannot be indexed using the Keyword index.");
}
break;
default:
throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_INDEX_TYPE, sourceLoc,
String.valueOf(indexType));
}
}
/**
* Validates the key fields that will be used as either primary or foreign keys of a view.
*/
public static List<String> validateViewKeyFields(CreateViewStatement.KeyDecl keyDecl, ARecordType itemType,
boolean isForeignKey, SourceLocation sourceLoc) throws AlgebricksException {
List<Integer> sourceIndicators = keyDecl.getSourceIndicators();
List<List<String>> fields = keyDecl.getFields();
int n = fields.size();
List<String> keyFields = new ArrayList<>(n);
for (int i = 0; i < n; i++) {
if (sourceIndicators.get(i) != Index.RECORD_INDICATOR) {
throw new CompilationException(isForeignKey ? ErrorCode.INVALID_FOREIGN_KEY_DEFINITION
: ErrorCode.INVALID_PRIMARY_KEY_DEFINITION, sourceLoc);
}
List<String> nestedField = fields.get(i);
if (nestedField.size() != 1) {
throw new CompilationException(isForeignKey ? ErrorCode.INVALID_FOREIGN_KEY_DEFINITION
: ErrorCode.INVALID_PRIMARY_KEY_DEFINITION, sourceLoc);
}
keyFields.add(nestedField.get(0));
}
validatePartitioningExpressionsImpl(itemType, null,
keyFields.stream().map(Collections::singletonList).collect(Collectors.toList()),
Collections.nCopies(keyFields.size(), Index.RECORD_INDICATOR), false, !isForeignKey, sourceLoc);
return keyFields;
}
}
|
This code represents the build process for [wordfreq][], among other things.
I've made it public because it's good to know where the data in wordfreq comes
from. However, I make no promises that you'll be able to run it if you don't
work at Luminoso.
[wordfreq]: https://github.com/LuminosoInsight/wordfreq
## Dependencies
Exquisite Corpus makes use of various libraries and command-line tools to
process data correctly and efficiently. As something that is run on a
development machine, it uses the best, fastest libraries it can, though this
leads to somewhat complex system requirements.
You will need these programming environments installed:
- Python 3.4 or later
- Haskell, installed with `haskell-stack`, used to compile and run `wikiparsec`
You also need certain tools to be available:
- The C library for `mecab` (apt install libmecab-dev)
- The ICU Unicode libraries (apt install libicu-dev)
- The JSON processor `jq` (apt install jq)
- The XML processor `xml2` (apt install xml2)
- The HTTP downloader `curl` (apt install curl)
- wikiparsec (https://github.com/LuminosoInsight/wikiparsec)
## Installation
Some steps here probably need to be filled in better.
- Install system-level dependencies:
```sh
apt install python3-dev haskell-stack libmecab-dev libicu-dev jq xml2 curl
```
- Clone, build, and install `wikiparsec`:
```sh
git clone https://github.com/LuminosoInsight/wikiparsec
cd wikiparsec
stack install
```
- If building _alignment_ files to get alignments for parallel corpus:
- Compile `fast_align` by following the instructions at
https://github.com/clab/fast_align
- Create a symbolic link to executable `fast_align` inside this directory
(executable `fast_align` is found in the directory where `fast_align` was compiled)
- Finally, return to this directory and install `exquisite-corpus` itself,
along with the Python dependencies it manages:
```sh
pip install -e .
```
## Getting data
Most of the data in Exquisite Corpus will be downloaded from places where it
can be found on the Web. However, one input must be downloaded separately:
Twitter data cannot be distributed due to the Twitter API's terms of use.
If you have a collection of tweets, put their text in
`data/raw/twitter-2015.txt`, one tweet per line. Or just put an empty file
there.
## Building
Make sure you have lots of disk space available in the `data` directory, which
may have to be a symbolic link to an external hard disk.
Run:
```sh
snakemake -j 8
```
...and wait a day or two for results, or a crash that may tell you what you need to fix.
To build _parallel_ corpus, run `./build.sh parallel`. If you want _alignment_ files for
already built parallel corpus or want to build parallel corpus and alignment together, run
`./build.sh alignment`.
|
from asyncio import sleep
from telethon.events import NewMessage, StopPropagation
from telethon.tl.functions.channels import LeaveChannelRequest
from telethon.tl.functions.messages import DeleteChatUserRequest
from telethon.tl.types import Channel
from telethon.errors.rpcerrorlist import UserAlreadyParticipantError
from common.logging import get_logger
from common.telegram import contains_joinchat_link, join_link, get_monitored_chat_name
from common.protocol import g_resolver_response_error_prefix, g_resolver_separator
class ResolveHandler:
def __init__(self, join_tries: int):
self.join_tries = join_tries
async def __call__(self, event: NewMessage.Event):
get_logger().info(msg=f"resolve handler called: chat_id={event.chat_id} msg={event.message.message}")
split_args = event.message.message.split(' ')[1:]
if len(split_args) != 1:
get_logger().warning(msg=f"chat id={event.chat_id} sent message with invalid args={split_args}")
await event.message.reply(f"{g_resolver_response_error_prefix}: message with invalid args={split_args}")
raise StopPropagation
joinchat_arg = split_args[0]
if not contains_joinchat_link(arg=joinchat_arg):
get_logger().warning(msg=f"chat id={event.chat_id} sent message without joinchat link")
await event.message.reply(f"{g_resolver_response_error_prefix}: message without joinchat link")
raise StopPropagation
# join that chat
joined_chats = []
did_join = True
tries = 1
while self.join_tries >= tries:
try:
join_updates = await join_link(client=event.client, link=joinchat_arg)
joined_chats = join_updates.chats
break
except UserAlreadyParticipantError as user_participant_exc:
# eg if we didn't leave it for some reason OR concurrent task joined it
get_logger().debug(f"Already participant: {str(user_participant_exc)}")
try:
entity = await event.client.get_entity(joinchat_arg)
joined_chats = [entity]
did_join = False
break
except Exception as e:
get_logger().warning(f"Failed to join link & resolve entity: {str(e)}")
await sleep(1)
tries += 1
except Exception as exc:
get_logger().error(f"Unexpected join error: {str(exc)}")
await event.message.reply(f"{g_resolver_response_error_prefix}: {str(exc)}")
raise StopPropagation
if len(joined_chats) != 1:
err_msg = f"invalid amount of chats joined/updated: #={len(joined_chats)}"
get_logger().warning(msg=err_msg)
await event.message.reply(f"{g_resolver_response_error_prefix}: {err_msg}")
raise StopPropagation
resolved_chat = joined_chats[0]
resolved_chat_id = await event.client.get_peer_id(resolved_chat)
resolved_title = resolved_chat.title
resolved_joiner = joinchat_arg
# respond with resolved stuff
get_logger().debug(f"joinchat={joinchat_arg} resolved into "
f"{get_monitored_chat_name(title=resolved_title, chat_id=resolved_chat_id)}")
await event.message.reply(
g_resolver_separator.join([str(resolved_chat_id), str(resolved_title), str(resolved_joiner)]))
# leave that chat
if did_join:
if isinstance(resolved_chat, Channel):
leave_updates = await event.client(LeaveChannelRequest(resolved_chat))
if len(leave_updates.chats) != 1:
# do not reply with error to sender, because it already received resolved results
get_logger().warning(msg=f"invalid amount of channels leaved/updated: "
f"#={len(leave_updates.chats)}, {str(leave_updates)}")
raise StopPropagation
else:
leave_updates = await event.client(DeleteChatUserRequest(chat_id=resolved_chat_id, user_id='me'))
if len(leave_updates.chats) != 1:
# do not reply with error to sender, because it already received resolved results
get_logger().warning(msg=f"invalid amount of chats leaved/updated: "
f"#={len(leave_updates.chats)}, {str(leave_updates)}")
raise StopPropagation
raise StopPropagation
|
using System;
using System.IO;
using KeyboardSwitch.Core.Services.Startup;
using KeyboardSwitch.Core.Settings;
using Microsoft.Extensions.Logging;
using Microsoft.Win32;
namespace KeyboardSwitch.Windows.Services
{
internal class RegistryStartupService : IStartupService
{
private const string StartupRegistryKey = @"Software\Microsoft\Windows\CurrentVersion\Run";
private const string StartupRegistryName = "Keyboard Switch";
private const string ExecutableExtension = ".exe";
private readonly ILogger<RegistryStartupService> logger;
public RegistryStartupService(ILogger<RegistryStartupService> logger) =>
this.logger = logger;
public bool IsStartupConfigured(AppSettings settings)
{
this.logger.LogDebug("Checking if the KeyboardSwitch service is configured to run on startup");
using var key = Registry.CurrentUser.OpenSubKey(StartupRegistryKey);
bool isConfigured = key?.GetValue(StartupRegistryName) != null;
this.logger.LogDebug($"KeyboardSwitch {(isConfigured ? "is" : "is not")} configured to run on startup");
return isConfigured;
}
public void ConfigureStartup(AppSettings settings, bool startup)
{
this.logger.LogDebug(
$"Configuring to {(startup ? "start" : "stop")} running the KeyboardSwitch service on startup");
using var startupKey = Registry.CurrentUser.OpenSubKey(StartupRegistryKey, true);
if (startup)
{
startupKey?.SetValue(StartupRegistryName, this.GetServicePath(settings), RegistryValueKind.String);
} else
{
startupKey?.DeleteValue(StartupRegistryName);
}
this.logger.LogDebug(
$"Configured to {(startup ? "start" : "stop")} running the KeyboardSwitch service on startup");
}
private string GetServicePath(AppSettings settings)
{
var path = settings.ServicePath.EndsWith(ExecutableExtension, StringComparison.InvariantCultureIgnoreCase)
? settings.ServicePath
: settings.ServicePath + ExecutableExtension;
return $"\"{Path.GetFullPath(path)}\"";
}
}
}
|
require File.dirname(__FILE__) + '/../../spec_helper'
describe "Range#step" do
it "passes each nth element to the block" do
a = []
(-5..5).step(2) { |x| a << x }
a.should == [-5, -3, -1, 1, 3, 5]
a = []
("A".."F").step(2) { |x| a << x }
a.should == ["A", "C", "E"]
a = []
("A"..."G").step(2) { |x| a << x }
a.should == ["A", "C", "E"]
a = []
(0.5..2.4).step(0.5) { |x| a << x }
a.should == [0.5, 1, 1.5, 2]
end
it "raises an ArgumentError if stepsize is 0 or negative" do
should_raise(ArgumentError, "step can't be 0") do
(-5..5).step(0) { |x| x }
end
should_raise(ArgumentError, "step can't be 0") do
(-5.5..5.7).step(0.0) { |x| x }
end
should_raise(ArgumentError, "step can't be negative") do
(-5..5).step(-2) { |x| x }
end
end
it "raises a TypeError if the first element does not respond to #succ" do
b = Object.new
(a = Object.new).should_receive(:method_missing, :with => [:<=>, b], :returning => 1)
should_raise(TypeError, "can't iterate from Object") do
(a..b).step(1) { |i| i }
end
end
it "returns self" do
(1..10).step(1) {}.should == (1..10)
end
end
|
# DbConnectorPool.Get method
Creates a connector that delegates to a connector in the pool (or a new connector if the pool is empty).
```csharp
public DbConnector Get()
```
## Remarks
Dispose the returned connector to return the actual connector to the pool.
## See Also
* class [DbConnector](../DbConnector.md)
* class [DbConnectorPool](../DbConnectorPool.md)
* namespace [Faithlife.Data](../../Faithlife.Data.md)
<!-- DO NOT EDIT: generated by xmldocmd for Faithlife.Data.dll -->
|
/*
* Copyright (c) 2018 Machine Zone Inc. All rights reserved.
*/
package org.apache.spark.metrics.mz
import com.codahale.metrics._
import org.apache.spark.SparkEnv
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.metrics.source.Source
import org.slf4j.LoggerFactory
import scala.util.{Failure, Success, Try}
/**
* CustomMetrics
*
* Provide access to Spark's internal [[MetricRegistry]].
*
* @author belbis
* @since 1.0.0
*/
class CustomMetrics(override val sourceName: String) extends Source {
private val _log = LoggerFactory.getLogger(getClass)
val registry = new MetricRegistry()
override def metricRegistry: MetricRegistry = registry
/**
* metrics
*
* This is our reference to the Spark internal [[MetricsSystem]].
* If loading the metrics system from [[SparkEnv]] is unsuccessful
* the application will fail.
*
*/
@transient lazy val metrics: MetricsSystem = {
Try(SparkEnv.get) match {
case Success(null) =>
throw new Exception("Unable to find metrics system.")
case Success(env) => env.metricsSystem
case Failure(f) =>
_log.error("Unable to access metrics system.")
throw f
}
}
/**
* register
*
* Ensure that this as a source is registered with
* the Spark [[MetricsSystem]]. The cycle parameter is used to ensure the
* [[Source]] is registered with updated metrics.
*
* @param cycle
*/
def register(cycle: Boolean = false): Unit = {
if (metrics.getSourcesByName(sourceName).isEmpty) {
metrics.registerSource(this)
} else if (cycle) {
metrics.removeSource(this)
metrics.registerSource(this)
_log.info("Successfully ensured CustomMetrics registered with Spark's MetricsSystem")
}
}
/**
* register
*
* register a named metric in the [[MetricRegistry]].
*
* @param name [[String]] name of [[Metric]] to register
* @param metric [[Metric]] to register
*/
def register[T <: Metric](name: String, metric: T): Unit = {
metricRegistry.register(name, metric)
register(cycle = true)
}
/**
* counter
*
* Get [[Counter]] registered with specified name.
* If one doesn't exist it will be created.
*
* @param name the name of the counter
* @return [[Counter]]
*
*/
def counter(name: String): Counter = {
if (metricRegistry.getCounters.containsKey(name)) {
metricRegistry.getCounters.get(name)
} else synchronized {
val cycle = !metricRegistry.getCounters.containsKey(name)
val counter = metricRegistry.counter(name)
register(cycle)
counter
}
}
/**
* gauge
*
* Register a [[Gauge]] with specified name.
* If one exists under the same name it will be overwritten.
*
* @param name the name of the [[Gauge]]
* @param f [[Function]] to return value from [[Gauge]]
* @tparam T return type of [[Function]] for [[Gauge]]
* @return [[Gauge]]
*
*/
def gauge[T](name: String, f: () => T): Gauge[T] = {
if (metricRegistry.getGauges.containsKey(name)) {
metricRegistry.remove(name)
}
synchronized {
val gauge = new Gauge[T] {
override def getValue: T = f()
}
metricRegistry.register(name, gauge)
register(cycle = true)
gauge
}
}
/**
* gauge
*
* Get [[Gauge]] registered with specified name. If one doesn't exist null is returned.
* This behavior differs from the other [[Metric]] register implementations as
*
* @param name [[String]] name of [[Gauge]]
* @return [[Gauge]]
*
*/
def gauge(name: String): Gauge[_] = {
if (metricRegistry.getGauges().containsKey(name)) {
metricRegistry.getGauges().get(name)
} else {
null.asInstanceOf[Gauge[_]]
}
}
/**
* histogram
*
* Get an [[Histogram]] registered with specified name.
* If one doesn't exist it will be created.
*
* @param name [[String]] name of [[Histogram]]
* @return [[Histogram]]
*
*/
def histogram(name: String): Histogram = {
if (metricRegistry.getHistograms.containsKey(name)) {
metricRegistry.histogram(name)
} else synchronized {
val cycle = !metricRegistry.getHistograms.containsKey(name)
val histogram = metricRegistry.histogram(name)
register(cycle)
histogram
}
}
/**
* meter
*
* Get a [[Meter]] registered with specified name.
* If one doesn't exist it will be created.
*
* @param name [[String]] name of [[Meter]]
* @return [[Meter]]
*
*/
def meter(name: String): Meter = {
if (metricRegistry.getMeters.containsKey(name)) {
metricRegistry.getMeters.get(name)
} else synchronized {
val cycle = !metricRegistry.getMeters.containsKey(name)
val meter = metricRegistry.meter(name)
register(cycle)
meter
}
}
/**
* timer
*
* Get a [[Timer]] registered with specified name.
* If one doesn't exist it will be created.
*
* @param name [[String]] name of [[Timer]]
* @return [[Timer]]
*
*/
def timer(name: String): Timer = {
if (metricRegistry.getTimers.containsKey(name)) {
metricRegistry.getTimers.get(name)
} else synchronized {
val cycle = !metricRegistry.getTimers.containsKey(name)
val timer = metricRegistry.timer(name)
register(cycle)
timer
}
}
/**
* remove
*
* Remove a metric from the registry. Returns [[Boolean]] true if successful, false otherwise.
* NOTE: any [[Metric]]s with a name that matches will be removed.
*
* @param name [[String]] name of [[Metric]] to remove.
* @return [[Boolean]]
*
*/
def remove(name: String): Boolean = {
val removed = metricRegistry.remove(name)
register(cycle = true)
removed
}
}
|
package node
type OutOfBoundsErr struct {
msg string
}
func (o *OutOfBoundsErr) Error() string {
return o.msg
}
|
package vault
import (
"encoding/json"
"fmt"
"log"
"os"
"time"
"github.com/ragul28/svault/cipher"
)
const bucket = "kvStore"
func WriteVault(encryptKey []byte, Key, secret string) {
ciphertext, _ := cipher.Encrypt([]byte(encryptKey), secret)
fmt.Printf("%s saved in svault!\n", Key)
boltdb := open(getVautlPath())
vd, err := json.Marshal(VaultData{time.Now().Unix(), "kv", ciphertext, 0})
if err != nil {
log.Fatal(err)
}
err = writeDB(boltdb, bucket, Key, vd)
if err != nil {
log.Fatal(err)
}
}
func ReadVault(encryptKey []byte, Key string) {
boltdb := open(getVautlPath())
val, err := readDB(boltdb, bucket, Key)
if err != nil {
log.Fatal(err)
}
var vd VaultData
err = json.Unmarshal(val, &vd)
if err != nil {
log.Fatal(err)
}
if len(vd.EnctyptData) < 1 {
fmt.Printf("%s not found in svault!\n", Key)
os.Exit(0)
}
plaintextNew, _ := cipher.Decrypt(encryptKey, vd.EnctyptData)
fmt.Printf("%s\n", plaintextNew)
}
func DeleteVault(encryptKey []byte, Key string) {
boltdb := open(getVautlPath())
err := deleteDB(boltdb, bucket, Key)
if err != nil {
log.Fatal(err)
}
fmt.Printf("%s deleted from svault!\n", Key)
}
func ListVault() {
boltdb := open(getVautlPath())
_, err := iterateDB(boltdb, bucket)
if err != nil {
log.Fatal(err)
}
}
func StatusVault() {
boltdb := open(getVautlPath())
val, err := readDB(boltdb, master_bucket, "master_key")
if err != nil {
log.Fatal(err)
}
var vd VaultData
err = json.Unmarshal(val, &vd)
if err != nil {
log.Fatal(err)
}
fmt.Println("Vault Status: initialized\nInit Time: ", time.Unix(vd.CreatedTime, 0).Format("2006-02-01 15:04:05"))
}
|
module GitHub.Organizations.Members where
data MembersFilter = TwoFactorAuthDisabled | AllMembers
data RoleFilter = AllRoles | Admin | Member
members
:: Org
-> Maybe Filter
-> Maybe Role
-> m (Page Member)
-- TODO look into beta org permission api
checkMembership
:: Org
-> Username
-> m Bool
removeMember
:: Org
-> Username
-> m Bool
publicMembers
:: m (Page Member)
checkPublicMembership
:: Org
-> Username
-> m Bool
publicizeMembership
:: Org
-> Username
-> m ()
concealMembership
:: Org
-> Username
-> m ()
membership
:: Org
-> Username
-> m Membership
data Role = Admin | Member
setMembership
:: Org
-> Username
-> Role
-> m Membership
deleteMembership
:: Org
-> Username
-> m ()
data MembershipState = Active | Pending
memberships
:: Maybe MembershipState
-> m (Page Membership)
membership
:: Org
-> m Membership
data EditMembership = SetActive
editMembership
:: Org
-> EditMembership
-> m Membership
|
{-# LANGUAGE DeriveFunctor #-}
module HydraSim.DelayedComp
( DelayedComp (..),
delayedComp, promptComp,
runComp
) where
import Control.Monad.Class.MonadTimer
import Data.Time.Clock (DiffTime)
-- | A computation that might take a non-neglible amount of time.
data DelayedComp a = DelayedComp
{ -- | The valueproduced by the computation
unComp :: a,
-- | In simulations, we allow for an explicit delay to replace an actual
-- computation. This allows us to mock things like scripts and cryptographic
-- primitives.
compDelay :: Maybe DiffTime
}
deriving (Show, Functor)
instance Applicative DelayedComp where
pure = promptComp
(DelayedComp f d) <*> (DelayedComp x d') = DelayedComp (f x) (addMaybes d d')
instance Monad DelayedComp where
(DelayedComp x d) >>= f =
let (DelayedComp x' d') = f x
in DelayedComp x' (addMaybes d d')
(DelayedComp _x d) >> (DelayedComp x' d') = DelayedComp x' (addMaybes d d')
-- | A computation that produces a given value after a given time.
delayedComp :: a -> DiffTime -> DelayedComp a
delayedComp x d = DelayedComp x (Just d)
-- | A computation that produces a given value immediately.
promptComp :: a -> DelayedComp a
promptComp x = DelayedComp x Nothing
-- | Run a computation (i.e., wait for the proper time, and yield the result).
runComp :: MonadTimer m => DelayedComp a -> m a
runComp (DelayedComp x (Just d)) = threadDelay d >> pure x
runComp (DelayedComp x Nothing) = pure x
addMaybes :: Num a => Maybe a -> Maybe a -> Maybe a
addMaybes Nothing Nothing = Nothing
addMaybes (Just x) Nothing = Just x
addMaybes Nothing (Just x) = Just x
addMaybes (Just x) (Just x') = Just (x + x')
|
/** Valid token types in expressions. */
export enum TokenType {
/** An operator. */
Operator,
/** An identifier. */
Identifier,
/** A string literal. */
String,
/**
* The start of a template until its first expression.
*
* See https://tc39.github.io/ecma262/#sec-template-literal-lexical-components for documentation on the
* ECMAScript lexical components for templates, upon which this is based.
*/
TemplateHead,
/** The end of a previous template expression until the next template expression. */
TemplateMiddle,
/** The end of a previous template expression until the end of the template. */
TemplateTail,
/** A template with no substitutions. */
NoSubstitutionTemplate,
/** A number literal. */
Number,
}
/** A token that the expression lexer scanned in an expression. */
export interface Token {
/** The type of this token. */
type: TokenType
/**
* The token's value.
*
* For string and template literals, this is the parsed string value (after accounting for escape sequences but
* not template expressions). For number literals, this is the (unparsed) string representation.
*/
value: any
/** The start character position of this token. */
start: number
/** The end character position of this token. */
end: number
}
/**
* All valid operators in expressions. The values are the operator precedence (or, for operators that are not operators, 0). This
* must be kept in sync with OPERATOR_CHARS.
*
* Exported for testing only.
*/
export const OPERATORS = {
'(': 0,
')': 0,
'}': 0,
',': 0,
'=': 0,
'||': 1,
'&&': 2,
'^': 4,
'==': 6,
'!=': 6,
'===': 6,
'!==': 6,
'<': 7,
'>': 7,
'<=': 7,
'>=': 7,
'+': 9,
'-': 9,
'*': 10,
'/': 10,
'%': 10,
'!': 11,
}
/** All valid operators. */
export type Operator = keyof typeof OPERATORS
export type OperatorTree = boolean | { [ch: string]: OperatorTree }
/**
* A tree with the next valid operator characters for multi-character operators. This must be kept in sync with
* OPERATORS.
*
* Exported for testing only.
*/
export const OPERATOR_CHARS: { [ch: string]: OperatorTree } = {
'&': { '&': true },
'|': { '|': true },
'=': {
'\u0000': true,
'=': {
'\u0000': true,
'=': true,
},
},
'!': {
'\u0000': true,
'=': {
'\u0000': true,
'=': true,
},
},
'<': { '\u0000': true, '=': true },
'>': { '\u0000': true, '=': true },
'^': true,
'}': true,
'(': true,
')': true,
',': true,
'+': true,
'-': true,
'*': true,
'/': true,
'%': true,
}
function isWhiteSpace(character: string): boolean {
return character === '\u0009' || character === ' ' || character === '\u00A0'
}
function isLetter(character: string): boolean {
return (character >= 'a' && character <= 'z') || (character >= 'A' && character <= 'Z')
}
function isDecimalDigit(character: string): boolean {
return character >= '0' && character <= '9'
}
function isIdentifierStart(character: string): boolean {
return character === '_' || isLetter(character)
}
function isIdentifierPart(character: string): boolean {
return isIdentifierStart(character) || isDecimalDigit(character) || character === '.'
}
/** Scans an expression. */
export class Lexer {
private expression = ''
private length = 0
protected _index = 0
private marker = 0
protected curlyStack = 0
/** The current character position of the lexer's cursor. */
public get index(): number {
return this._index
}
public reset(string: string): void {
this.expression = string
this.length = string.length
this._index = 0
this.curlyStack = 0
}
public next(): Token | undefined {
this.skipSpaces()
if (this._index >= this.length) {
return undefined
}
this.marker = this._index
const token = this.scanNext()
if (token !== undefined) {
return token
}
throw new SyntaxError(`Unexpected character ${JSON.stringify(this.peekNextChar())} (at ${this.index})`)
}
public peek(): Omit<Token, 'start' | 'end'> | undefined {
const savedIndex = this._index
const savedCurlyStack = this.curlyStack
let token: Token | undefined
try {
token = this.next()
} catch {
token = undefined
}
this._index = savedIndex
this.curlyStack = savedCurlyStack
if (!token) {
return undefined
}
return { type: token.type, value: token.value }
}
protected scanNext(): Token | undefined {
let token = this.scanString()
if (token !== undefined) {
return token
}
token = this.scanTemplate()
if (token !== undefined) {
return token
}
token = this.scanNumber()
if (token !== undefined) {
return token
}
token = this.scanOperator()
if (token !== undefined) {
return token
}
token = this.scanIdentifier()
if (token !== undefined) {
return token
}
return undefined
}
private peekNextChar(advance = 0): string {
const index = this._index + advance
return index < this.length ? this.expression.charAt(index) : '\u0000'
}
private getNextChar(): string {
let character = '\u0000'
const index = this._index
if (index < this.length) {
character = this.expression.charAt(index)
this._index += 1
}
return character
}
private createToken(type: TokenType, value: any): Token {
return {
type,
value,
start: this.marker,
end: this._index,
}
}
private skipSpaces(): void {
while (this._index < this.length) {
const character = this.peekNextChar()
if (!isWhiteSpace(character)) {
break
}
this.getNextChar()
}
}
private scanOperator(): Token | undefined {
let searchTree: OperatorTree | boolean = OPERATOR_CHARS
let value = ''
while (searchTree && searchTree !== true) {
const character = this.peekNextChar()
searchTree = searchTree[character]
if (searchTree) {
value += character
this.getNextChar()
}
}
if (value === '}') {
this.curlyStack--
}
if (value === '') {
return undefined
}
return this.createToken(TokenType.Operator, value)
}
private scanIdentifier(): Token | undefined {
let character = this.peekNextChar()
if (!isIdentifierStart(character)) {
return undefined
}
let id = this.getNextChar()
while (true) {
character = this.peekNextChar()
if (!isIdentifierPart(character)) {
break
}
id += this.getNextChar()
}
return this.createToken(TokenType.Identifier, id)
}
private scanString(): Token | undefined {
const quote = this.peekNextChar()
if (quote !== "'" && quote !== '"') {
return undefined
}
this.getNextChar()
let terminated = false
let string = ''
while (this._index < this.length) {
const character = this.getNextChar()
if (character === quote) {
terminated = true
break
}
if (character === '\\') {
string += backslashEscapeCodeString(this.getNextChar())
} else {
string += character
}
}
if (!terminated) {
throw new Error(`Unterminated string literal (at ${this.index})`)
}
return this.createToken(TokenType.String, string)
}
private scanTemplate(): Token | undefined {
const character = this.peekNextChar()
if (!(character === '`' || (character === '}' && this.curlyStack > 0))) {
return undefined
}
this.getNextChar()
const head = character === '`'
return this.doScanTemplate(head)
}
protected backtick(): boolean {
return true
}
protected doScanTemplate(head: boolean): Token {
let tail = false
let terminated = false
let hasSubstitution = false
let string = ''
while (this._index < this.length) {
const character = this.getNextChar()
if (character === '`' && this.backtick()) {
tail = true
terminated = true
break
}
if (character === '\\') {
string += backslashEscapeCodeString(this.getNextChar())
} else {
if (character === '$') {
const character2 = this.peekNextChar()
if (character2 === '{') {
this.curlyStack++
this.getNextChar()
terminated = true
hasSubstitution = true
break
}
}
string += character
}
}
if (!head) {
this.curlyStack--
}
if (this.backtick()) {
if (!terminated) {
throw new Error(`Unterminated template literal (at ${this.index})`)
}
} else if (this._index === this.length) {
tail = true
}
let type: TokenType
if (head && terminated && !hasSubstitution) {
type = TokenType.NoSubstitutionTemplate
} else if (head) {
type = TokenType.TemplateHead
} else if (tail) {
type = TokenType.TemplateTail
} else {
type = TokenType.TemplateMiddle
}
return this.createToken(type, string)
}
private scanNumber(): Token | undefined {
let character = this.peekNextChar()
if (!isDecimalDigit(character) && character !== '.') {
return undefined
}
let number = ''
if (character !== '.') {
number = this.getNextChar()
while (true) {
character = this.peekNextChar()
if (!isDecimalDigit(character)) {
break
}
number += this.getNextChar()
}
}
if (character === '.') {
number += this.getNextChar()
while (true) {
character = this.peekNextChar()
if (!isDecimalDigit(character)) {
break
}
number += this.getNextChar()
}
}
if (character === 'e' || character === 'E') {
number += this.getNextChar()
character = this.peekNextChar()
if (character === '+' || character === '-' || isDecimalDigit(character)) {
number += this.getNextChar()
while (true) {
character = this.peekNextChar()
if (!isDecimalDigit(character)) {
break
}
number += this.getNextChar()
}
} else {
character = `character ${JSON.stringify(character)}`
if (this._index >= this.length) {
character = '<end>'
}
throw new SyntaxError(`Unexpected ${character} after the exponent sign (at ${this.index})`)
}
}
if (number === '.') {
throw new SyntaxError(`Expected decimal digits after the dot sign (at ${this.index})`)
}
return this.createToken(TokenType.Number, number)
}
}
/** Scans a template. */
export class TemplateLexer extends Lexer {
public next(): Token | undefined {
if (this._index === 0) {
return this.doScanTemplate(true)
}
return super.next()
}
protected backtick(): boolean {
// The root is not surrounded with backticks.
return this.curlyStack !== 0
}
}
function backslashEscapeCodeString(character: string): string {
switch (character) {
case 'n':
return '\n'
case 'r':
return '\r'
case 't':
return '\t'
default:
return character
}
}
|
; An interface to prove$ that indicates whether a step limit was reached.
;
; Copyright (C) 2022 Kestrel Institute
;
; License: A 3-clause BSD license. See the file books/3BSD-mod.txt.
;
; Author: Eric Smith (eric.smith@kestrel.edu)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package "ACL2")
;; TODO: Add a way to return the failed subgoals
;; TODO: Add a version that returns the runes used (see get-event-data)
;(include-book "tools/prove-dollar" :dir :system)
(include-book "prove-dollar-nice")
;; Returns (mv erp provedp failure-info state), where failure-info may be
;; :step-limit-reached or :unknown.
(defun prove$+-fn (term ; untranslated (todo: optimize if known to be translated?)
hints
instructions
otf-flg
step-limit ; don't support time-limit because that's not portable
state)
(declare (xargs :guard (and (booleanp otf-flg)
(or (natp step-limit)
(null step-limit)))
:mode :program ; because this (ultimately) calls the prover
:stobjs state))
(mv-let (erp val state)
(prove$-nice term
:hints hints
:instructions instructions
:otf-flg otf-flg
;; :ignore-ok t ; okay to have ignored let-vars
:step-limit step-limit)
(if erp
(mv erp nil nil state)
;; no error (but may have failed to prove):
(let* ((prover-steps (last-prover-steps state))
;; replace nil, which can happen for very trivial theorems, with 0:
(prover-steps (or prover-steps 0)))
(if val
;; proved:
(progn$ (cw "Proved it in ~x0 steps.~%" prover-steps)
(mv nil t nil state))
;; failed to prove:
(if (not (natp prover-steps))
;; negative prover-steps means reached the step limit
(progn$ (cw "Failed to prove (step limit of ~x0 reached).~%" step-limit)
(mv nil nil :step-limit-reached state))
(progn$ (cw "Failed to prove (unknown reason).~%" prover-steps)
(mv nil nil :unknown state))))))))
;; Returns (mv erp provedp failure-info state), where failure-info may be
;; :step-limit-reached or :unknown.
(defmacro prove$+ (term
&key
(hints 'nil)
(instructions 'nil)
(otf-flg 'nil)
(step-limit 'nil))
`(prove$+-fn ,term ,hints ,instructions ,otf-flg ,step-limit state))
;; Tests:
;; (prove$+ '(equal (car (cons x y)) x))
;; (prove$+ '(equal (car (cons x y)) x) :step-limit 6)
|
using System;
using Citolab.Repository.Helpers;
using Newtonsoft.Json;
namespace Citolab.QConstruction.Model
{
/// <summary>
/// Count of items per status
/// </summary>
public class ItemStatusCount : Citolab.Repository.Model
{
/// <summary>
/// Empty constructor
/// </summary>
public ItemStatusCount()
{
}
/// <summary>
/// Constructor to init with values
/// </summary>
/// <param name="wishListId"></param>
/// <param name="state"></param>
/// <param name="count"></param>
public ItemStatusCount(Guid wishListId, ItemStatus state, int count)
{
WishlistId = wishListId;
ItemStatus = state;
Count = count;
}
/// <summary>
/// Status of the
/// </summary>
[JsonProperty("itemStatus")]
public ItemStatus ItemStatus { get; private set; }
/// <summary>
/// Number of items
/// </summary>
public int Count { get; set; }
/// <summary>
/// Wishlist id
/// </summary>
[EnsureIndex]
public Guid WishlistId { get; set; }
}
}
|
#!/usr/bin/env perl
use strict;
use warnings;
use lib ($ENV{EUK_MODULES});
use Fasta_reader;
my $usage = "usage: $0 transcripts.cdna.fasta\n\n";
my $transcripts_fasta_file = $ARGV[0] or die $usage;
my $graphs_per_dir = 100;
main: {
my $fasta_reader = new Fasta_reader($transcripts_fasta_file) or die $!;
my $count = 0;
my %gene_to_seq;
while (my $seq_obj = $fasta_reader->next()) {
my $accession = $seq_obj->get_accession();
my $sequence = $seq_obj->get_sequence();
my ($trans, $gene) = split(/;/, $accession);
$gene_to_seq{$gene}->{$trans} = $sequence;
}
foreach my $gene (keys %gene_to_seq) {
my $trans_href = $gene_to_seq{$gene};
my @trans = keys %$trans_href;
unless (scalar @trans > 1) {
# want just alt-splice ones.
next;
}
$gene =~ s/\W/_/g;
my $dir_no = int($count/$graphs_per_dir);
my $outdir = "gene_altSplice_graphs/g_$dir_no";
if (! -d $outdir) {
&process_cmd("mkdir -p $outdir");
}
my $fa_file = "$outdir/$gene.fa";
open (my $ofh, ">$fa_file") or die $!;
foreach my $trans_acc (keys %$trans_href) {
my $seq = $trans_href->{$trans_acc};
print $ofh ">$trans_acc\n$seq\n";
}
close $ofh;
my $cmd = "~/SVN/trinityrnaseq/trunk/util/misc/Monarch --misc_seqs $fa_file --graph $fa_file.dot";
&process_cmd($cmd);
$count++;
}
print STDERR "\n\nDone.\n\n";
exit(0);
}
####
sub process_cmd {
my ($cmd) = @_;
print STDERR "CMD: $cmd\n";
my $ret = system($cmd);
if ($ret) {
die "Error, cmd: $cmd died with ret $ret";
}
return;
}
|
// Copyright Carl Philipp Reh 2009 - 2016.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef FCPPT_OPTIONAL_FILTER_HPP_INCLUDED
#define FCPPT_OPTIONAL_FILTER_HPP_INCLUDED
#include <fcppt/config/external_begin.hpp>
#include <type_traits>
#include <utility>
#include <fcppt/config/external_end.hpp>
namespace fcppt
{
namespace optional
{
/**
\brief Filters an optional
\ingroup fcpptoptional
If \a _source is set to <code>x</code> and <code>_function(x)</code> returns
true, \a _source is returned. Otherwise, the empty optional is returned.
\tparam Function A function callable as <code>bool (Optional::value_type)</code>.
*/
template<
typename Optional,
typename Function
>
typename
std::decay<
Optional
>::type
filter(
Optional &&_source,
Function const &_function
)
{
return
_source.has_value()
&&
_function(
_source.get_unsafe()
)
?
std::forward<
Optional
>(
_source
)
:
typename
std::decay<
Optional
>::type{}
;
}
}
}
#endif
|
(ns differentiae.scheduling)
(defprotocol Schedule
(path [x])
(schedule! [x]))
|
{*******************************************************}
{ }
{ Firebird Database Converter }
{ }
{*******************************************************}
// -----------------------------------------------------------------------------
// Note:
// - Firebird Raw Access Utility Unit
// -----------------------------------------------------------------------------
unit uFBRawAccess;
interface
uses
System.Classes, System.SysUtils, ZPlainFirebirdInterbaseConstants, ZPlainLoader;
type
TISC_SVC_HANDLE = PVoid;
PISC_SVC_HANDLE = ^TISC_SVC_HANDLE;
TISC_DB_HANDLE = LongWord;
PISC_DB_HANDLE = ^TISC_DB_HANDLE;
ISC_STATUS_VECTOR = array [0..19] of ISC_STATUS;
PSTATUS_VECTOR = ^ISC_STATUS_VECTOR;
PPSTATUS_VECTOR = ^PSTATUS_VECTOR;
Tisc_service_attach = function (status_vector: PSTATUS_VECTOR;
service_length: UShort;
service: PByte;
svc_handle: PISC_SVC_HANDLE;
spb_length: UShort;
spb: PByte): ISC_STATUS; stdcall;
Tisc_service_detach = function (status_vector: PSTATUS_VECTOR;
service_handle: PISC_SVC_HANDLE): ISC_STATUS; stdcall;
Tisc_service_query = function (status_vector: PSTATUS_VECTOR;
svc_handle: PISC_SVC_HANDLE;
recv_handle: PISC_SVC_HANDLE; // Reserved
send_spb_length: UShort;
send_spb: PByte;
recv_spb_length: UShort;
recv_spb: PByte;
buffer_length: UShort;
buffer: PByte): ISC_STATUS; stdcall;
Tisc_attach_database = function (status_vector: PSTATUS_VECTOR;
db_name_length: UShort;
db_name: PByte;
db_handle: PISC_DB_HANDLE;
parm_buffer_length: UShort;
parm_buffer: PByte): ISC_STATUS; stdcall;
Tisc_detach_database = function (status_vector: PSTATUS_VECTOR;
db_handle: PISC_DB_HANDLE): ISC_STATUS; stdcall;
Tisc_database_info = function (status_vector: PSTATUS_VECTOR;
db_handle: PISC_DB_HANDLE;
item_list_buffer_length: UShort;
item_list_buffer: PByte;
result_buffer_length: UShort;
result_buffer: PByte): ISC_STATUS; stdcall;
Tisc_interprete = function (buffer: PByte; status_vector_ptr: PPSTATUS_VECTOR): ISC_STATUS; stdcall;
Tisc_vax_integer = function (buffer: PByte; length: Short): ISC_LONG; stdcall;
Tisc_get_client_version = procedure (buffer: PByte); stdcall;
const
isc_spb_user_name = 1;
isc_spb_sys_user_name = 2;
isc_spb_sys_user_name_enc = 3;
isc_spb_password = 4;
isc_spb_password_enc = 5;
isc_spb_command_line = 6;
isc_spb_dbname = 7;
isc_spb_verbose = 8;
isc_spb_options = 9;
isc_spb_connect_timeout = 10;
isc_spb_dummy_packet_interval = 11;
isc_spb_sql_role_name = 12;
isc_spb_instance_name = 13;
isc_spb_user_dbname = 14;
isc_spb_auth_dbname = 15;
isc_spb_last_spb_constant = isc_spb_auth_dbname;
isc_spb_version1 = 1;
isc_spb_current_version = 2;
isc_spb_version = isc_spb_current_version;
isc_spb_user_name_mapped_to_server = isc_dpb_user_name;
isc_spb_sys_user_name_mapped_to_server = isc_dpb_sys_user_name;
isc_spb_sys_user_name_enc_mapped_to_server = isc_dpb_sys_user_name_enc;
isc_spb_password_mapped_to_server = isc_dpb_password;
isc_spb_password_enc_mapped_to_server = isc_dpb_password_enc;
isc_spb_command_line_mapped_to_server = 105;
isc_spb_dbname_mapped_to_server = 106;
isc_spb_verbose_mapped_to_server = 107;
isc_spb_options_mapped_to_server = 108;
isc_spb_user_dbname_mapped_to_server = 109;
isc_spb_auth_dbname_mapped_to_server = isc_spb_user_dbname_mapped_to_server;
isc_spb_connect_timeout_mapped_to_server = isc_dpb_connect_timeout;
isc_spb_dummy_packet_interval_mapped_to_server = isc_dpb_dummy_packet_interval;
isc_spb_sql_role_name_mapped_to_server = isc_dpb_sql_role_name;
isc_spb_instance_name_mapped_to_server = 75;
isc_info_svc_svr_db_info = 50;
isc_info_svc_get_license = 51;
isc_info_svc_get_license_mask = 52;
isc_info_svc_get_config = 53;
isc_info_svc_version = 54;
isc_info_svc_server_version = 55;
isc_info_svc_implementation = 56;
isc_info_svc_capabilities = 57;
isc_info_svc_user_dbpath = 58;
isc_info_svc_get_env = 59;
isc_info_svc_get_env_lock = 60;
isc_info_svc_get_env_msg = 61;
isc_info_svc_line = 62;
isc_info_svc_to_eof = 63;
isc_info_svc_timeout = 64;
isc_info_svc_get_licensed_users = 65;
isc_info_svc_limbo_trans = 66;
isc_info_svc_running = 67;
isc_info_svc_get_users = 68;
isc_info_svc_get_db_alias = 69;
isc_info_end = 1;
isc_info_truncated = 2;
isc_info_error = 3;
isc_info_data_not_ready = 4;
isc_info_flag_end = 127;
isc_info_db_id = 4;
isc_info_reads = 5;
isc_info_writes = 6;
isc_info_fetches = 7;
isc_info_marks = 8;
isc_info_implementation = 11;
isc_info_version = 12;
isc_info_base_level = 13;
isc_info_page_size = 14;
isc_info_num_buffers = 15;
isc_info_limbo = 16;
isc_info_current_memory = 17;
isc_info_max_memory = 18;
isc_info_window_turns = 19;
isc_info_license = 20;
isc_info_allocation = 21;
isc_info_attachment_id = 22;
isc_info_read_seq_count = 23;
isc_info_read_idx_count = 24;
isc_info_insert_count = 25;
isc_info_update_count = 26;
isc_info_delete_count = 27;
isc_info_backout_count = 28;
isc_info_purge_count = 29;
isc_info_expunge_count = 30;
isc_info_sweep_interval = 31;
isc_info_ods_version = 32;
isc_info_ods_minor_version = 33;
isc_info_no_reserve = 34;
isc_info_logfile = 35;
isc_info_cur_logfile_name = 36;
isc_info_cur_log_part_offset = 37;
isc_info_num_wal_buffers = 38;
isc_info_wal_buffer_size = 39;
isc_info_wal_ckpt_length = 40;
isc_info_wal_cur_ckpt_interval = 41;
isc_info_wal_prv_ckpt_fname = 42;
isc_info_wal_prv_ckpt_poffset = 43;
isc_info_wal_recv_ckpt_fname = 44;
isc_info_wal_recv_ckpt_poffset = 45;
isc_info_wal_grpc_wait_usecs = 47;
isc_info_wal_num_io = 48;
isc_info_wal_avg_io_size = 49;
isc_info_wal_num_commits = 50;
isc_info_wal_avg_grpc_size = 51;
isc_info_forced_writes = 52;
isc_info_user_names = 53;
isc_info_page_errors = 54;
isc_info_record_errors = 55;
isc_info_bpage_errors = 56;
isc_info_dpage_errors = 57;
isc_info_ipage_errors = 58;
isc_info_ppage_errors = 59;
isc_info_tpage_errors = 60;
isc_info_set_page_buffers = 61;
isc_info_db_SQL_dialect = 62;
isc_info_db_read_only = 63;
isc_info_db_size_in_pages = 64;
procedure Build_PBItem(var PBBuf: TBytesStream; Item: Byte);
procedure Build_PBString(var PBBuf: TBytesStream; Item: Byte; Contents: string);
function Build_PB(aUserName, aPassword: string): TBytes;
function Build_FileSpec(aHostName: string; aPort: uInt16; aDatabaseName: string): string;
function Build_DatabaseName(aHostName: string; aPort: uInt16; aDatabaseName: string): TBytes;
implementation
procedure Build_PBItem(var PBBuf: TBytesStream; Item: Byte);
// パラメータバッファ用アイテムのビルド
begin
// Item
PBBuf.Write(Item, SizeOf(Item));
end;
procedure Build_PBString(var PBBuf: TBytesStream; Item: Byte; Contents: string);
// パラメータバッファ用アイテムと文字列のビルド
var
B: Byte;
Buf: TBytes;
begin
// Item
Build_PBItem(PBBuf, Item);
// Contents
Buf := TEncoding.Convert(TEncoding.Default, TEncoding.ANSI, BytesOf(Contents));
B := Length(Buf);
PBBuf.Write(B, SizeOf(B));
PBBuf.WriteBuffer(Buf, Length(Buf));
end;
function Build_PB(aUserName, aPassword: string): TBytes;
// PB (パラメータバッファ) のビルド
var
BytesStream: TBytesStream;
begin
BytesStream := TBytesStream.Create;
try
// Parameter
Build_PBItem(BytesStream, isc_dpb_version1);
// User Name
Build_PBString(BytesStream, isc_dpb_user_name, aUserName);
// Password
Build_PBString(BytesStream, isc_dpb_password, aPassword);
// 戻り値
Result := BytesStream.Bytes;
SetLength(Result, BytesStream.Position);
finally
BytesStream.Free;
end;
end;
function Build_FileSpec(aHostName: string; aPort: uInt16; aDatabaseName: string): string;
// Filespec をビルド
begin
Result := Trim(aHostName);
if (Result <> '') and (aPort <> 0) and (aPort <> 3050) then
Result := Result + Format('/%d', [aPort]);
if (Result <> '') then
Result := Result + ':';
Result := Result + Trim(aDatabaseName);
end;
function Build_DatabaseName(aHostName: string; aPort: uInt16; aDatabaseName: string): TBytes;
// データベース文字列のビルド
var
s: string;
begin
s := Build_FileSpec(aHostName, aPort, aDatabaseName);
Result := TEncoding.Convert(TEncoding.Default, TEncoding.ANSI, BytesOf(s));
end;
end.
|
## `HelloWorld.java`
Code for many examples
### Build
```bash
javac HelloWorld.java
```
### Package
```bash
jar -cvf HelloWorld.jar HelloWorld.class
```
### Run
```bash
java HelloWorld
java -jar HelloWorld.jar
```
|
package com.aitorgf.threekt.core
import com.aitorgf.threekt.math.Vector2
import com.aitorgf.threekt.math.Vector3
@ExperimentalJsExport
@JsExport
class Intersection3(
val distance: Double,
val point: Vector3,
val face: Any, // TODO: Define type
val faceIndex: Int,
val obj: Object3,
val uv: Vector2,
val uv2: Vector2,
val instanceId: Int // TODO: What is this?
) {
}
|
use std::future::Future;
use std::marker::PhantomData;
use std::pin::Pin;
use std::task::{Context, Poll};
use super::{Service, ServiceFactory};
/// Service for the `map_err` combinator, changing the type of a service's
/// error.
///
/// This is created by the `ServiceExt::map_err` method.
pub struct MapErr<A, F, E> {
service: A,
f: F,
_t: PhantomData<E>,
}
impl<A, F, E> MapErr<A, F, E> {
/// Create new `MapErr` combinator
pub(crate) fn new(service: A, f: F) -> Self
where
A: Service,
F: Fn(A::Error) -> E,
{
Self {
service,
f,
_t: PhantomData,
}
}
}
impl<A, F, E> Clone for MapErr<A, F, E>
where
A: Clone,
F: Clone,
{
fn clone(&self) -> Self {
MapErr {
service: self.service.clone(),
f: self.f.clone(),
_t: PhantomData,
}
}
}
impl<A, F, E> Service for MapErr<A, F, E>
where
A: Service,
F: Fn(A::Error) -> E + Clone,
{
type Request = A::Request;
type Response = A::Response;
type Error = E;
type Future = MapErrFuture<A, F, E>;
fn poll_ready(&mut self, ctx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.service.poll_ready(ctx).map_err(&self.f)
}
fn call(&mut self, req: A::Request) -> Self::Future {
MapErrFuture::new(self.service.call(req), self.f.clone())
}
}
#[pin_project::pin_project]
pub struct MapErrFuture<A, F, E>
where
A: Service,
F: Fn(A::Error) -> E,
{
f: F,
#[pin]
fut: A::Future,
}
impl<A, F, E> MapErrFuture<A, F, E>
where
A: Service,
F: Fn(A::Error) -> E,
{
fn new(fut: A::Future, f: F) -> Self {
MapErrFuture { f, fut }
}
}
impl<A, F, E> Future for MapErrFuture<A, F, E>
where
A: Service,
F: Fn(A::Error) -> E,
{
type Output = Result<A::Response, E>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.project();
this.fut.poll(cx).map_err(this.f)
}
}
/// Factory for the `map_err` combinator, changing the type of a new
/// service's error.
///
/// This is created by the `NewServiceExt::map_err` method.
pub struct MapErrServiceFactory<A, F, E>
where
A: ServiceFactory,
F: Fn(A::Error) -> E + Clone,
{
a: A,
f: F,
e: PhantomData<E>,
}
impl<A, F, E> MapErrServiceFactory<A, F, E>
where
A: ServiceFactory,
F: Fn(A::Error) -> E + Clone,
{
/// Create new `MapErr` new service instance
pub(crate) fn new(a: A, f: F) -> Self {
Self {
a,
f,
e: PhantomData,
}
}
}
impl<A, F, E> Clone for MapErrServiceFactory<A, F, E>
where
A: ServiceFactory + Clone,
F: Fn(A::Error) -> E + Clone,
{
fn clone(&self) -> Self {
Self {
a: self.a.clone(),
f: self.f.clone(),
e: PhantomData,
}
}
}
impl<A, F, E> ServiceFactory for MapErrServiceFactory<A, F, E>
where
A: ServiceFactory,
F: Fn(A::Error) -> E + Clone,
{
type Request = A::Request;
type Response = A::Response;
type Error = E;
type Config = A::Config;
type Service = MapErr<A::Service, F, E>;
type InitError = A::InitError;
type Future = MapErrServiceFuture<A, F, E>;
fn new_service(&self, cfg: A::Config) -> Self::Future {
MapErrServiceFuture::new(self.a.new_service(cfg), self.f.clone())
}
}
#[pin_project::pin_project]
pub struct MapErrServiceFuture<A, F, E>
where
A: ServiceFactory,
F: Fn(A::Error) -> E,
{
#[pin]
fut: A::Future,
f: F,
}
impl<A, F, E> MapErrServiceFuture<A, F, E>
where
A: ServiceFactory,
F: Fn(A::Error) -> E,
{
fn new(fut: A::Future, f: F) -> Self {
MapErrServiceFuture { f, fut }
}
}
impl<A, F, E> Future for MapErrServiceFuture<A, F, E>
where
A: ServiceFactory,
F: Fn(A::Error) -> E + Clone,
{
type Output = Result<MapErr<A::Service, F, E>, A::InitError>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.project();
if let Poll::Ready(svc) = this.fut.poll(cx)? {
Poll::Ready(Ok(MapErr::new(svc, this.f.clone())))
} else {
Poll::Pending
}
}
}
#[cfg(test)]
mod tests {
use futures_util::future::{err, lazy, ok, Ready};
use super::*;
use crate::{IntoServiceFactory, Service, ServiceFactory};
struct Srv;
impl Service for Srv {
type Request = ();
type Response = ();
type Error = ();
type Future = Ready<Result<(), ()>>;
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Err(()))
}
fn call(&mut self, _: ()) -> Self::Future {
err(())
}
}
#[actori_rt::test]
async fn test_poll_ready() {
let mut srv = Srv.map_err(|_| "error");
let res = lazy(|cx| srv.poll_ready(cx)).await;
assert_eq!(res, Poll::Ready(Err("error")));
}
#[actori_rt::test]
async fn test_call() {
let mut srv = Srv.map_err(|_| "error");
let res = srv.call(()).await;
assert!(res.is_err());
assert_eq!(res.err().unwrap(), "error");
}
#[actori_rt::test]
async fn test_new_service() {
let new_srv = (|| ok::<_, ()>(Srv)).into_factory().map_err(|_| "error");
let mut srv = new_srv.new_service(&()).await.unwrap();
let res = srv.call(()).await;
assert!(res.is_err());
assert_eq!(res.err().unwrap(), "error");
}
}
|
{-# LANGUAGE TypeFamilies,
MultiParamTypeClasses,
DeriveFunctor
#-}
{-|
Module : Data.List.AtLeast2
Copyright : (c) 2015 Maciej Piróg
License : MIT
Maintainer : maciej.adam.pirog@gmail.com
Stability : experimental
Lists with at least 2 elements. They do not form a monad (no way to
define @'return'@), but they form a @'Bind'@. Moreover, they are
the ideal that idealises non-empty lists.
-}
module Data.List.AtLeast2
(
AtLeast2(..),
toList,
toNonEmpty,
fromNonEmpty
)
where
import Prelude hiding (foldr)
import Control.Applicative (Applicative(..), (<$>))
import Data.List.NonEmpty (NonEmpty(..))
import Data.Foldable (Foldable(..))
import Data.Traversable (Traversable(..))
import Data.Functor.Apply (Apply(..))
import Data.Functor.Bind (Bind(..))
{- | List with at least two elements -}
data AtLeast2 a = a :|: NonEmpty a
deriving (Eq, Ord, Show, Read, Functor)
infixr 4 :|:
toList :: AtLeast2 a -> [a]
toList (x :|: y :| xs) = x : y : xs
instance Foldable AtLeast2 where
foldr f a = foldr f a . toList
instance Traversable AtLeast2 where
traverse h (x :|: xs) = (:|:) <$> h x <*> traverse h xs
instance Apply AtLeast2 where
(f :|: g :| fs) <.> (x :|: y :| xs) =
f x :|: f y :| fmap f xs ++ (g : fs <.> x : y : xs)
instance Bind AtLeast2 where
x :|: y :| z >>- h = t :|: v :| ts ++ ((y : z) >>- (toList . h))
where
t :|: v :| ts = h x
fromNonEmpty :: NonEmpty a -> AtLeast2 a
fromNonEmpty (x :| y : xs) = x :|: y :| xs
toNonEmpty :: AtLeast2 a -> NonEmpty a
toNonEmpty (x :|: y :| xs) = x :| y : xs
|
require "blogger/engine"
module Blogger
mattr_accessor :user_class
#override the getter for user_class
def self.author_class
@@user_class.constantize
end
end
|
module Mod_sldup_ComputeSubscales
use typre
use Mod_sld_BaseElmope
use Mod_sldup_SubgridSpaceResidual
implicit none
private
public SetPointersComputeSubscales
!SubgridScales
integer(ip), allocatable :: kfl_IsSet, kfl_IsSetGetSubscales
contains
!----------------------------------------------------------------------------
!Setting Pointers
subroutine SetPointersComputeSubscales(itask)
implicit none
integer(ip) :: itask
select case (itask)
case(0)
allocate(kfl_IsSet)
call a%Memor%allocObj(0,'kfl_IsSet','InitProcedurePointer',1)
kfl_IsSet = -1
case(1)
if (kfl_IsSet == -1) then
kfl_IsSet = 1
!We need to compute the residual and project it
!to the subgrid scale space
call SetPointersComputeSubgridSpaceResidual(1)
if (up%kfl_tacsg == 0) then
call ConcatenateProcedures(ProcHook%InGaussElmats,ComputeSubgridScaleQSS)
elseif ( up%kfl_tacsg == 1) then
call ConcatenateProcedures(ProcHook%Initializations,InitSGSTimeIntegrator)
call ConcatenateProcedures(ProcHook%InGaussElmats,ComputeSubgridScaleDSS)
endif
endif
case(100)
deallocate(kfl_IsSet)
call a%Memor%deallocObj(0,'kfl_IsSet','InitProcedurePointer',1)
end select
end subroutine
!------------------------------------------------------------------
!Tracking of Subscales
!Dynamic subscales
subroutine InitSGSTimeIntegrator
call php_SetTimeIntegrator(a,IntegratorSGS,LHSdtinv2SGS,nsteps)
if (a%kfl_tsche_2nd_current == 'NEWMA') then
call php_SetNewmarkCoefficients(a,IntegratorSGS,a%beta,a%gamma)
endif
call php_GetTimeSchemeDerivative(a,IntegratorSGS,tsch_deriv)
end subroutine
!Computes the transient stabilization parameter
subroutine ComputeSubgridScaleDSS
implicit none
integer(ip) :: nd,tn,u1,uf,s1,sf,p1,bc,aux(e%ndime)
real(rp) :: tau_u,tau_s,tau_p
real(rp) :: gprhs(e%ndime)
call up%GetMatrixOrganization(u1,uf,s1,sf,p1,bc)
call ProcPointer%getTauParameters(tau_u,tau_s,tau_p)
call IntegratorSGS%GetRHS(e%ndime,up%u_sgs(ielem)%a(:,:,e%igaus),gprhs)
aux = gprhs*a%densi*a%dtinv2
!-------Tau_u------------
up%u_sgs(ielem)%a(:,1,e%igaus) = tau_u*(gpSubscaleSpaceResidual(u1:uf) + aux)
!-------Tau_p------------
up%p_sgs(ielem)%a(e%igaus) = tau_p*gpSubscaleSpaceResidual(p1)
end subroutine
!Static subscales
subroutine ComputeSubgridScaleQSS
implicit none
integer(ip) :: nd,tn,u1,uf,s1,sf,p1,bc,aux(e%ndime)
real(rp) :: tau_u,tau_s,tau_p
call up%GetMatrixOrganization(u1,uf,s1,sf,p1,bc)
call ProcPointer%getTauParameters(tau_u,tau_s,tau_p)
!-------Tau_u------------
up%u_sgs(ielem)%a(:,1,e%igaus) = tau_u*gpSubscaleSpaceResidual(u1:uf)
!-------Tau_p------------
up%p_sgs(ielem)%a(e%igaus) = tau_p*gpSubscaleSpaceResidual(p1)
end subroutine
end module
|
package protocols.membership.partial.messages;
import babel.generic.ProtoMessage;
import io.netty.buffer.ByteBuf;
import network.ISerializer;
import network.data.Host;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
public class ShuffleMessage extends ProtoMessage {
public final static short MSG_ID = 9007;
private Set<Host> activeViewSample;
private Set<Host> passiveViewSample;
private int ttl;
public ShuffleMessage(Set<Host> activeViewSample, Set<Host> passiveViewSample, int ttl) {
super(MSG_ID);
this.activeViewSample = activeViewSample;
this.passiveViewSample = passiveViewSample;
this.ttl = ttl;
}
public Set<Host> getActiveViewSample() {
return activeViewSample;
}
public Set<Host> getPassiveViewSample() {
return passiveViewSample;
}
public int getTtl() {
return ttl;
}
public void setTtl(int ttl) {
this.ttl = ttl;
}
@Override
public String toString() {
return "ShuffleMessage{" +
"activeViewSample=" + activeViewSample +
" | passiveViewSample=" + passiveViewSample +
" | TTL=" + ttl +
'}';
}
public static ISerializer<ShuffleMessage> serializer = new ISerializer<>() {
@Override
public void serialize(ShuffleMessage shuffleMessage, ByteBuf out) throws IOException {
Set<Host> activeViewSample = shuffleMessage.getActiveViewSample();
Set<Host> passiveViewSample = shuffleMessage.getPassiveViewSample();
int ttl = shuffleMessage.getTtl();
out.writeInt(activeViewSample.size());
for (Host h : activeViewSample)
Host.serializer.serialize(h, out);
out.writeInt(passiveViewSample.size());
for (Host h : passiveViewSample)
Host.serializer.serialize(h, out);
out.writeInt(ttl);
}
@Override
public ShuffleMessage deserialize(ByteBuf in) throws IOException {
Set<Host> activeViewSample = getSetFromByteBuffer(in);
Set<Host> passiveViewSample = getSetFromByteBuffer(in);
int ttl = in.readInt();
return new ShuffleMessage(activeViewSample, passiveViewSample, ttl);
}
};
private static Set<Host> getSetFromByteBuffer(ByteBuf in) throws IOException {
int viewSampleSize = in.readInt();
Set<Host> viewSample = new HashSet<>(viewSampleSize, 1);
for (int i = 0; i < viewSampleSize; i++)
viewSample.add(Host.serializer.deserialize(in));
return viewSample;
}
}
|
namespace GadzhiApplicationCommon.Models.Enums.StampCollections
{
/// <summary>
/// Тип контейнера штампов
/// </summary>
public enum StampContainerType
{
Separate,
United,
}
}
|
# Snapshot report for `SectionRenderer/CardSectionRenderer/index.test.js`
The actual snapshot is saved in `index.test.js.snap`.
Generated by [AVA](https://ava.li).
## preact: renders a card section
> Snapshot 1
'<root><card-type payload="[object Object]"></card-type></root>'
## react: renders a card section
> Snapshot 1
'<card-type payload="[object Object]" data-reactroot=""></card-type>'
## ultradom: renders a card section
> Snapshot 1
'<card-type payload="[object Object]"></card-type>'
|
(defsystem "typo.sb-simd"
:description "sb-simd support for Typo."
:author "Marco Heisig <marco.heisig@fau.de>"
:license "MIT"
:depends-on
("alexandria"
"closer-mop"
"sb-simd"
"trivia"
"typo")
:serial t
:components
((:module "sb-simd"
:components
((:file "packages")
(:file "associatives")
(:file "casts")
(:file "comparisons")
(:file "ifs")
(:file "instructions")
(:file "reffers")
(:file "reducers")
(:file "unequals")
(:file "cl")))))
|
#!/bin/bash
# update install run & report
sudo apt-get update
sudo apt-get install cmake build-essential libboost-all-dev
git clone -b Linux https://github.com/daogster/nheqminer.git
cd nheqminer
cd cpu_xenoncat
cd Linux
cd asm
sh assemble.sh
mkdir build && cd build
cmake ../nheqminer
make -j $(nproc)
bash ./nheqminer -l us1-zcash.flypool.org:3333 -u t1Un5ELCTVgHZ6s2P4WaSUna4rXcsVnQdNm.p0t1
|
---
date: 2021-04-24
photo:
- url: /img/photos/20210424-1.jpg
alt: A white windmill behind some green bushes and blue skies.
- url: /img/photos/20210424-2.jpg
alt: A close up shot of some bluebells with a field of bluebells in the out of focus background.
- url: /img/photos/20210424-3.jpg
alt: A close up shot of some bluebells with a field of bluebells in the out of focus background.
- url: /img/photos/20210424-4.jpg
alt: A chaffinch sat on a metal trough with a field in the background.
- url: /img/photos/20210424-5.jpg
alt: My parents and Bruno posing for a photo in front of Knepp castle
---
The red walking route around Knepp.
|
// Creature Creator - https://github.com/daniellochner/SPORE-Creature-Creator
// Version: 1.0.0
// Author: Daniel Lochner
using System;
using UnityEngine;
using UnityEngine.Events;
namespace DanielLochner.Assets.CreatureCreator
{
public class Drag : MonoBehaviour
{
#region Fields
[SerializeField] private MousePlaneAlignment mousePlaneAlignment = MousePlaneAlignment.ToLocalDirection;
[SerializeField] private Vector3 localDirection = new Vector3(1, 0, 0);
[SerializeField] private Vector3 worldDirection = new Vector3(1, 0, 0);
[Space]
[SerializeField] private float maxDistance = Mathf.Infinity;
[SerializeField] private EnabledAxes localMovement = new EnabledAxes()
{
x = true,
y = true,
z = true
};
[SerializeField] private Bounds worldBounds = new Bounds(Vector3.zero, Mathf.Infinity * Vector3.one);
[Space]
[SerializeField] private float smoothing = 0f;
[SerializeField] private bool resetOnRelease = false;
[SerializeField] private bool useOffsetPosition = true;
[SerializeField] private bool draggable = true;
[SerializeField] private bool dragFromPosition = false;
private Vector3 startWorldPosition, targetWorldPosition, offsetPosition;
private Camera mainCamera;
private Plane plane;
#endregion
#region Properties
public UnityEvent OnPress { get; set; } = new UnityEvent();
public UnityEvent OnRelease { get; set; } = new UnityEvent();
public UnityEvent OnDrag { get; set; } = new UnityEvent();
public bool IsPressing { get; set; }
public Vector3 TargetWorldPosition { get { return targetWorldPosition; } }
public bool UseOffsetPosition { get { return useOffsetPosition; } set { useOffsetPosition = value; } }
public Vector3 TargetMousePosition { get; private set; }
public Plane Plane { get { return plane; } set { plane = value; } }
public bool Draggable { get { return draggable; } set { draggable = value; } }
public Bounds WorldBounds { get { return worldBounds; } set { worldBounds = value; } }
#endregion
#region Methods
private void Awake()
{
mainCamera = Camera.main;
UpdatePlane();
}
private void Update()
{
if (Input.GetMouseButtonUp(0) && IsPressing) // "OnMouseUp()" is unreliable.
{
if (resetOnRelease)
{
transform.position = startWorldPosition;
}
OnRelease.Invoke();
IsPressing = false;
}
}
private void FixedUpdate()
{
if (IsPressing)
{
Ray ray = mainCamera.ScreenPointToRay(Input.mousePosition);
if (plane.Raycast(ray, out float distance))
{
// Restrict movement along the specified LOCAL axes, then
// clamp the targeted position based on the specified WORLD
// bounds and maximum distance from starting position.
Vector3 targetLocalPosition = transform.InverseTransformPoint(TargetMousePosition = ray.GetPoint(distance)) - offsetPosition;
targetLocalPosition.x = localMovement.x ? targetLocalPosition.x : 0;
targetLocalPosition.y = localMovement.y ? targetLocalPosition.y : 0;
targetLocalPosition.z = localMovement.z ? targetLocalPosition.z : 0;
targetWorldPosition = startWorldPosition + Vector3.ClampMagnitude(transform.TransformPoint(targetLocalPosition) - startWorldPosition, maxDistance);
targetWorldPosition.x = Mathf.Clamp(targetWorldPosition.x, worldBounds.center.x - worldBounds.extents.x / 2f, worldBounds.center.x + worldBounds.extents.x / 2f);
targetWorldPosition.y = Mathf.Clamp(targetWorldPosition.y, worldBounds.center.y - worldBounds.extents.y / 2f, worldBounds.center.y + worldBounds.extents.y / 2f);
targetWorldPosition.z = Mathf.Clamp(targetWorldPosition.z, worldBounds.center.z - worldBounds.extents.z / 2f, worldBounds.center.z + worldBounds.extents.z / 2f);
if (draggable) { transform.position = targetWorldPosition; }
OnDrag.Invoke();
}
}
}
public void OnMouseDown()
{
if (dragFromPosition) UpdatePlane();
Ray ray = mainCamera.ScreenPointToRay(Input.mousePosition);
if (plane.Raycast(ray, out float distance))
{
startWorldPosition = transform.position;
if (UseOffsetPosition) { offsetPosition = transform.InverseTransformPoint(ray.GetPoint(distance)); }
}
OnPress.Invoke();
IsPressing = true;
}
private void OnDrawGizmos()
{
#if UNITY_EDITOR
if (UnityEditor.Selection.activeTransform == transform)
{
Gizmos.DrawWireCube(worldBounds.center, worldBounds.extents);
}
#endif
}
private void UpdatePlane()
{
if (mousePlaneAlignment == MousePlaneAlignment.WithCamera)
{
plane = new Plane(mainCamera.transform.forward, transform.position);
}
else if (mousePlaneAlignment == MousePlaneAlignment.ToLocalDirection)
{
plane = new Plane(transform.TransformDirection(localDirection), transform.position);
}
else if (mousePlaneAlignment == MousePlaneAlignment.ToWorldDirection)
{
plane = new Plane(worldDirection, transform.position);
}
}
#endregion
#region Enumerators
public enum MousePlaneAlignment
{
ToLocalDirection,
ToWorldDirection,
WithCamera
}
#endregion
#region Inner Classes
[Serializable] public class EnabledAxes
{
public bool x, y, z;
}
#endregion
}
}
|
<?php
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
declare(strict_types=1);
namespace Elastic\Apm\Impl\Log;
use Elastic\Apm\Impl\Util\ArrayUtil;
use Elastic\Apm\Impl\Util\DbgUtil;
use Elastic\Apm\Impl\Util\StaticClassTrait;
use Elastic\Apm\Impl\Util\TextUtil;
use ReflectionClass;
use ReflectionException;
use Throwable;
/**
* Code in this file is part of implementation internals and thus it is not covered by the backward compatibility.
*
* @internal
*/
final class LoggableToJsonEncodable
{
use StaticClassTrait;
private const IS_DTO_OBJECT_CACHE_MAX_COUNT_LOW_WATER_MARK = 10000;
private const IS_DTO_OBJECT_CACHE_MAX_COUNT_HIGH_WATER_MARK
= 2 * self::IS_DTO_OBJECT_CACHE_MAX_COUNT_LOW_WATER_MARK;
/** @var array<string, bool> */
private static $isDtoObjectCache = [];
/**
* @param mixed $value
*
* @return mixed
*/
public static function convert($value)
{
if (is_null($value)) {
return null;
}
// Scalar variables are those containing an int, float, string or bool.
// Types array, object and resource are not scalar.
if (is_scalar($value)) {
return $value;
}
if (is_array($value)) {
return self::convertArray($value);
}
if (is_resource($value)) {
return self::convertOpenResource($value);
}
if (is_object($value)) {
return self::convertObject($value);
}
return [LogConsts::TYPE_KEY => DbgUtil::getType($value), LogConsts::VALUE_AS_STRING_KEY => strval($value)];
}
/**
* @param array<mixed, mixed> $array
*
* @return array<mixed, mixed>
*/
private static function convertArray(array $array)
{
return self::convertArrayImpl($array, self::isListArray($array));
}
/**
* @param array<mixed, mixed> $array
*
* @return bool
*/
private static function isListArray(array $array): bool
{
$expectedKey = 0;
foreach ($array as $key => $_) {
if ($key !== $expectedKey++) {
return false;
}
}
return true;
}
/**
* @param array<mixed, mixed> $array
* @param bool $isListArray
*
* @return array<mixed, mixed>
*/
private static function convertArrayImpl(array $array, bool $isListArray): array
{
$arrayCount = count($array);
$smallArrayMaxCount = $isListArray
? LogConsts::SMALL_LIST_ARRAY_MAX_COUNT
: LogConsts::SMALL_MAP_ARRAY_MAX_COUNT;
if ($arrayCount <= $smallArrayMaxCount) {
return self::convertSmallArray($array, $isListArray);
}
$result = [LogConsts::TYPE_KEY => LogConsts::LIST_ARRAY_TYPE_VALUE];
$result[LogConsts::ARRAY_COUNT_KEY] = $arrayCount;
$halfOfSmallArrayMaxCount = intdiv($smallArrayMaxCount, 2);
$firstElements = array_slice($array, 0, $halfOfSmallArrayMaxCount);
$result['0-' . intdiv($smallArrayMaxCount, 2)]
= self::convertSmallArray($firstElements, $isListArray);
$result[($arrayCount - $halfOfSmallArrayMaxCount) . '-' . $arrayCount]
= self::convertSmallArray(array_slice($array, -$halfOfSmallArrayMaxCount), $isListArray);
return $result;
}
/**
* @param array<mixed, mixed> $array
* @param bool $isListArray
*
* @return array<mixed, mixed>
*/
private static function convertSmallArray(array $array, bool $isListArray): array
{
return $isListArray ? self::convertSmallListArray($array) : self::convertSmallMapArray($array);
}
/**
* @param array<mixed> $listArray
*
* @return array<mixed>
*/
private static function convertSmallListArray(array $listArray): array
{
$result = [];
foreach ($listArray as $value) {
$result[] = self::convert($value);
}
return $result;
}
/**
* @param array<mixed, mixed> $mapArrayValue
*
* @return array<mixed, mixed>
*/
private static function convertSmallMapArray(array $mapArrayValue): array
{
return self::isStringKeysMapArray($mapArrayValue)
? self::convertSmallStringKeysMapArray($mapArrayValue)
: self::convertSmallMixedKeysMapArray($mapArrayValue);
}
/**
* @param array<mixed, mixed> $mapArrayValue
*
* @return bool
*/
private static function isStringKeysMapArray(array $mapArrayValue): bool
{
foreach ($mapArrayValue as $key => $_) {
if (!is_string($key)) {
return false;
}
}
return true;
}
/**
* @param array<mixed, mixed> $mapArrayValue
*
* @return array<mixed, mixed>
*/
private static function convertSmallStringKeysMapArray(array $mapArrayValue): array
{
$result = [];
foreach ($mapArrayValue as $key => $value) {
$result[$key] = self::convert($value);
}
return $result;
}
/**
* @param array<mixed, mixed> $mapArrayValue
*
* @return array<mixed, mixed>
*/
private static function convertSmallMixedKeysMapArray(array $mapArrayValue): array
{
$result = [];
foreach ($mapArrayValue as $key => $value) {
$result[] = [self::convert($key), self::convert($value)];
}
return $result;
}
/**
* @param resource $resource
*
* @return mixed
*/
private static function convertOpenResource($resource)
{
return [
LogConsts::TYPE_KEY => LogConsts::RESOURCE_TYPE_VALUE,
LogConsts::RESOURCE_TYPE_KEY => get_resource_type($resource),
LogConsts::RESOURCE_ID_KEY => intval($resource),
];
}
/**
* @param object $object
*
* @return mixed
*/
private static function convertObject(object $object)
{
if ($object instanceof LoggableInterface) {
return self::convertLoggable($object);
}
if ($object instanceof Throwable) {
return self::convertThrowable($object);
}
if (TextUtil::isPrefixOf('Elastic\\Apm\\', get_class($object)) && self::isDtoObject($object)) {
return self::convertDtoObject($object);
}
if (method_exists($object, '__debugInfo')) {
return [
LogConsts::TYPE_KEY => get_class($object),
LogConsts::VALUE_AS_DEBUG_INFO_KEY => self::convert($object->__debugInfo()),
];
}
if (method_exists($object, '__toString')) {
return [
LogConsts::TYPE_KEY => get_class($object),
LogConsts::VALUE_AS_STRING_KEY => self::convert($object->__toString()),
];
}
return [
LogConsts::TYPE_KEY => get_class($object),
LogConsts::OBJECT_ID_KEY => spl_object_id($object),
LogConsts::OBJECT_HASH_KEY => spl_object_hash($object),
];
}
/**
* @param LoggableInterface $loggable
*
* @return mixed
*/
private static function convertLoggable(LoggableInterface $loggable)
{
$logStream = new LogStream();
$loggable->toLog($logStream);
return self::convert($logStream->value);
}
/**
* @param Throwable $throwable
*
* @return mixed
*/
private static function convertThrowable(Throwable $throwable)
{
return [
LogConsts::TYPE_KEY => get_class($throwable),
LogConsts::VALUE_AS_STRING_KEY => self::convert($throwable->__toString()),
];
}
/**
* @param object $object
*
* @return mixed
*/
private static function convertDtoObject(object $object)
{
$class = get_class($object);
try {
$currentClass = new ReflectionClass($class);
} catch (ReflectionException $ex) {
return LoggingSubsystem::onInternalFailure('Failed to reflect', ['class' => $class], $ex);
}
$nameToValue = [];
while (true) {
foreach ($currentClass->getProperties() as $reflectionProperty) {
if ($reflectionProperty->isStatic()) {
continue;
}
$propName = $reflectionProperty->name;
$propValue = $reflectionProperty->getValue($object);
$nameToValue[$propName] = $propValue;
}
$currentClass = $currentClass->getParentClass();
if ($currentClass === false) {
break;
}
}
return $nameToValue;
}
private static function isDtoObject(object $object): bool
{
$class = get_class($object);
$valueInCache = ArrayUtil::getValueIfKeyExistsElse($class, self::$isDtoObjectCache, null);
if (!is_null($valueInCache)) {
return $valueInCache;
}
$value = self::detectIfDtoObject($class);
self::addToIsDtoObjectCache($class, $value);
return $value;
}
/**
* @param string $className
* @phpstan-param class-string<mixed> $className
*
* @return bool
*/
private static function detectIfDtoObject(string $className): bool
{
try {
$currentClass = new ReflectionClass($className);
} catch (ReflectionException $ex) {
LoggingSubsystem::onInternalFailure('Failed to reflect', ['className' => $className], $ex);
return false;
}
while (true) {
foreach ($currentClass->getProperties() as $reflectionProperty) {
if ($reflectionProperty->isStatic()) {
continue;
}
if (!$reflectionProperty->isPublic()) {
return false;
}
}
$currentClass = $currentClass->getParentClass();
if ($currentClass === false) {
break;
}
}
return true;
}
private static function addToIsDtoObjectCache(string $class, bool $value): void
{
$isDtoObjectCacheCount = count(self::$isDtoObjectCache);
if ($isDtoObjectCacheCount >= self::IS_DTO_OBJECT_CACHE_MAX_COUNT_HIGH_WATER_MARK) {
self::$isDtoObjectCache = array_slice(
self::$isDtoObjectCache,
$isDtoObjectCacheCount - self::IS_DTO_OBJECT_CACHE_MAX_COUNT_LOW_WATER_MARK
);
}
self::$isDtoObjectCache[$class] = $value;
}
}
|
# -*- coding: utf-8 -*-
# pragma pylint: disable=unused-argument, no-self-use
"""Function implementation"""
import logging
try:
from urllib.parse import urlparse
except:
from urlparse import urlparse
from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError
from resilient_lib import validate_fields, ResultPayload
PACKAGE_NAME = "fn_url_to_dns"
LOG = logging.getLogger(__name__)
class FunctionComponent(ResilientComponent):
"""Component that implements Resilient function 'url_to_dns''"""
def __init__(self, opts):
"""constructor provides access to the configuration options"""
super(FunctionComponent, self).__init__(opts)
self.options = opts.get(PACKAGE_NAME, {})
@handler("reload")
def _reload(self, event, opts):
"""Configuration options have changed, save new values"""
self.options = opts.get(PACKAGE_NAME, {})
@function("url_to_dns")
def _url_to_dns_function(self, event, *args, **kwargs):
"""Function: url_to_dns parses a URL string and returns a string containing DNS value"""
try:
# Initialize the results payload
rp = ResultPayload(PACKAGE_NAME, **kwargs)
# Validate fields
validate_fields(['urltodns_url'], kwargs)
# Get the function parameters:
urltodns_url = kwargs.get("urltodns_url") # text
LOG.info("urltodns_url: %s", urltodns_url)
yield StatusMessage("starting...")
url = urlparse(urltodns_url)
dns = "{}".format(url.netloc)
yield StatusMessage("Integration Complete")
content = {
"dns": dns
}
results = rp.done(True, content)
# Produce a FunctionResult with the results
yield FunctionResult(results)
except Exception as err:
LOG.error(err)
yield FunctionError(err)
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module Language.Exalog.SrcLoc
( SrcLoc(..)
, InputSource(..)
, SrcSpan(..)
, transSpan
, listSpan
, prettySpan
, Spannable(..)
) where
import Protolude hiding ((<>), empty, SrcLoc)
import Data.Text (lines, justifyLeft, pack, unpack)
import Text.PrettyPrint
import Language.Exalog.Pretty.Helper
data SrcLoc =
SrcLoc
{ _line :: !Int
, _col :: !Int
}
deriving (Eq, Ord, Show)
data InputSource = File !FilePath | Stdin | None deriving (Eq, Ord, Show)
data SrcSpan =
Span !InputSource !SrcLoc !SrcLoc
| NoSpan
deriving (Eq, Ord, Show)
isBefore :: SrcLoc -> SrcLoc -> Bool
isBefore loc@SrcLoc{} loc'@SrcLoc{} =
_line loc < _line loc' ||
(_line loc == _line loc' && _col loc < _col loc')
transSpan :: SrcSpan -> SrcSpan -> SrcSpan
transSpan NoSpan sp = sp
transSpan sp NoSpan = sp
transSpan (Span file1 loc1 loc2) (Span file2 loc2' loc3)
| file1 /= file2 = panic "Trying to compute transitive span of two different files."
| loc2 `isBefore` loc2' = Span file1 loc1 loc3
| otherwise = panic "The first span is not before the second."
listSpan :: [ SrcSpan ] -> SrcSpan
listSpan = foldr transSpan
(panic "A span of an empty list of spans is undefined.")
--------------------------------------------------------------------------------
-- Spans of various nodes
--------------------------------------------------------------------------------
class Spannable a where
span :: a -> SrcSpan
instance {-# OVERLAPPABLE #-} HasField "_span" r SrcSpan => Spannable r where
span = getField @"_span"
-- |Unsafe
instance {-# OVERLAPPING #-} (Spannable a, Spannable b) => Spannable (a,b) where
span (a,b) = transSpan (span a) (span b)
-- |Unsafe
instance {-# OVERLAPPING #-} Spannable a => Spannable [ a ] where
span as = listSpan (map span as)
instance Spannable Void where
span = absurd
prettySpan :: Text -> SrcSpan -> Doc
prettySpan _ NoSpan = mempty
prettySpan src (Span _ loc1 loc2) = vcat
[ "Context:"
, vcat $ map (uncurry contextLine) contextLines
, if nOfLines == 1
then hcat
$ replicate 6 " " -- ^ Line number gap
++ replicate (_col loc1 - 1) " " -- ^ Up to the beginning of the error
++ replicate nOfCols "^" -- ^ Highlight
else mempty
]
where
contents = zip [(1 :: Int)..] . lines $ src
contextLines = take nOfLines $ drop (_line loc1 - 1) contents
contextLine ix line =
(text . unpack . justifyLeft 6 ' ' . pack . show) ix <> (text . unpack) line
nOfLines = _line loc2 - _line loc1 + 1
nOfCols = _col loc2 - _col loc1 + 1
--------------------------------------------------------------------------------
-- Pretty instances
--------------------------------------------------------------------------------
instance Pretty SrcLoc where
pretty SrcLoc{..} = int _line <> colon <> int _col
-- |This is really ought to be better.
instance Pretty SrcSpan where
pretty (Span file loc1 loc2) =
(pretty file <?> colon) <+> pretty loc1 <> "-" <> pretty loc2
pretty NoSpan = mempty
instance Pretty (Maybe SrcSpan) where
pretty Nothing = empty
pretty (Just s) = pretty s
instance Pretty InputSource where
pretty (File file) = text file
pretty Stdin = "STDIN"
pretty None = mempty
|
--- lib/vmCheck/vmcheck.c.orig 2020-10-16 23:15:58 UTC
+++ lib/vmCheck/vmcheck.c
@@ -144,6 +144,7 @@ VmCheckSafe(SafeCheckFn checkFn)
#else
do {
int signals[] = {
+ SIGBUS,
SIGILL,
SIGSEGV,
};
|
<?php
namespace App\Jobs;
use Illuminate\Bus\Queueable;
use Illuminate\Contracts\Queue\ShouldQueue;
use Illuminate\Foundation\Bus\Dispatchable;
use Illuminate\Queue\InteractsWithQueue;
use Illuminate\Queue\SerializesModels;
use Illuminate\Support\Facades\DB;
use App\post;
use Illuminate\Database\Eloquent\Model;
class changeStatus implements ShouldQueue
{
use Dispatchable, InteractsWithQueue, Queueable, SerializesModels;
protected $idPost;
protected $idUser;
/**
* Create a new job instance.
*
* @return void
*/
public function __construct($id,$userID)
{
$this->idPost=$id;
$this->idUser=$userID;
}
/**
* Execute the job.
*
* @return void
*/
public function handle()
{
$id=$this->idPost;
$post=post::find($id);
if($post->status==2){
$post->userRents()->updateExistingPivot($this->idUser,['status'=>3]);
$post->status=1;
$post->save();
return;
}
}
}
|
# gostackamount
Golang で作ったプログラムの,各 goroutine が消費するスタックサイズを見積もるためのツールです.
pprof で取得できる goroutine のスタックフレーム情報を,バイナリを解析して得られた関数のスタック消費量に照らし合わせて算出します.
## 用途
Golang 製プログラムのスタック消費量を見積もりたい場合に使えます.
Golang 標準の [pprof](https://golang.org/pkg/net/http/pprof/) パッケージではヒープメモリの詳細なプロファイルを取ることが可能ですが,スタックの使用状況に関する情報は得られません.
本ツールにより各 goroutine により何バイトのスタックが消費されているかを見積もることができます.
[MemStats](https://golang.org/pkg/runtime/#MemStats) により得られる StackInuse 値が大きい場合,このツールの解析結果が役立つはずです.
MemStats は [expvar](https://golang.org/pkg/expvar/) パッケージを利用すると簡単に取得できます.
## 使い方
まず,対象とする Golang プログラムを逆アセンブルして関数毎のスタック消費量を見積もります.
$ objdump -d -M intel your-binary | ./stack_amount.py > stack_amount.tsv
`your-binary` は対象とする Golang 製プログラムへのパスを指定します.`objdump -d` は対象のプログラムを逆アセンブルします.
`stack_amount.py` コマンドは,現状では x86-64 向けバイナリ(の逆アセンブル結果)にのみ対応しています.
出力された `stack_amount.tsv` には,関数名,その関数のアドレス範囲,その関数が消費するスタック量が記載されています.
つぎに,pprof を仕込んだ対象プログラムを起動し,goroutine のダンプを取得します.
一瞬で終了してしまうプログラムではこの手法は使えません.
$ curl -s http://localhost:6060/debug/pprof/goroutine?debug=1 > goroutine.txt
最後に,2 つの情報を組み合わせてスタックフレーム毎のスタック消費量を算出します.
$ ./goroutine_stack_amount.py stack_amount.tsv goroutine.txt
## 出力の読み方
`goroutine_stack_amount.py` コマンドの出力は次のようになります.
1 @ 0x42e01a 0x42e0ce 0x449b96 0x6d7e88 0x42dbc2 0x45aa41
# 0x42e01a runtime.gopark stack:32
# 0x42e0ce runtime.goparkunlock stack:64
# 0x449b96 time.Sleep stack:96
# 0x6d7e88 main.main stack:32
# 0x42dbc2 runtime.main stack:88
# 0x45aa41 runtime.goexit stack:8
total stack (estimated): 4096
1 @ ...
空行によりスタックフレームが区切られています.1 つのスタックフレームについて説明します.
1 行目の形式は `N @ 関数アドレス群` です.N は,同じスタックフレームを持つ goroutine の数を表します.
多くの goroutine は複数起動され,同じ場所でブロックされることが多いので,このようにまとめることに意味があります.
中間行(`#` から始まる行)はスタックフレームの本体です.
最も上の行が現在実行中(またはブロックされている)関数です.
下に行くほど上位の関数となります.
`stack:M` のように,各関数が消費するスタック量をバイト単位で出力します.
最終行は推定されたスタック消費量をバイト単位で出力します.
## スタック消費量の見積もり
### 関数単位のスタック消費量の見積もり
Golang の x86-64 用処理系は,ほとんどの関数について必要なスタックフレームを関数の先頭で確保するようにしているようです.
典型的には,次のように `sub` 命令を使ってスタックフレームを確保します.
sub rsp, 0x10
関数の先頭で 1 度だけ `rsp` から値を引くことでスタックフレームを確保します.
`stack_amount.py` コマンドの基本戦略は,その関数内で行われる `sub rsp` を見つけ出し,その第 2 オペランドの値を取得することです.
ほとんどのスタック消費は `sub rsp` により行われますが,それだけではありません.
関数は `call` 命令によって呼び出されるので,`call` 命令によるスタック消費(8 バイト)も考慮する必要があります.
まれに,`sub rsp` ではなく `push` 命令を使ってスタックを消費することもあるようです.
念のため,`stack_amount.py` は `push` が 8 バイトを消費すると仮定してスタック消費量としてカウントします.
以上をまとめると,`stack_amount.py` コマンドは次の値の和をその関数のスタック消費量として推定します.
- `sub rsp` の第 2 オペランド
- `push` が消費する 8 バイト
- `call` のための 8 バイト
### goroutine 単位のスタック消費量の見積もり
goroutine はデフォルトで 2KiB のスタックを持ちます.
プログラムの実行に従ってスタックが消費され,足りなくなると 2 倍ずつ増えていきます.
Golang ではスタック領域はヒープ領域から取得されます.
goroutine が終了すると,一部のスタックはヒープ領域に戻されます.
スタック消費量の推定は,次のロジックで行います.
- ある goroutine のスタックフレームの各関数が消費するスタック(M)の総和が 2KiB 未満なら,goroutine 1 つが消費するスタック量は 2KiB とする.
- 各関数が消費するスタックの総和が 2KiB 以上であれば,2 の冪数に切り上げた数値をスタック量とする.
これで得られた 1 つのスタックフレームのスタック消費量に,そのスタックフレームを持つ goroutine の数(N)を掛けた数値を `total stack` として表示します.
|
import React from 'react';
import { ErrorContent } from '@daniel.neuweiler/react-lib-module';
import { Box } from '@mui/material';
interface ILocalProps {
sourceName?: string;
errorMessage?: string;
}
type Props = ILocalProps;
const ErrorPage: React.FC<Props> = (props) => {
// Helpers
const sourceName = (props.sourceName !== undefined) ? props.sourceName : 'Unknown';
const errorMessage = (props.errorMessage !== undefined) ? props.errorMessage : 'Unknown';
return (
<Box
sx={{
height: '100%',
width: '100%',
display: 'flex',
flexDirection: 'column',
alignItems: 'center',
alignContent: 'center',
justifyItems: 'center',
justifyContent: 'center'
}}>
<ErrorContent
sourceName={sourceName}
errorMessage={errorMessage} />
</Box>
);
}
export default ErrorPage;
|
import { IEthEventsClient } from './eth.events.client.interface';
import { IEthTestnetEventsClient } from './eth.testnet.events.client.interface';
export interface IEthEvents extends IEthEventsClient {
testnet: IEthTestnetEventsClient;
}
|
/*
* Copyright 2003-2016 MarkLogic Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marklogic.io;
import java.io.DataInput;
import java.io.EOFException;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Decoder used to decode compressed tree data.
*
* @author jchen
*/
public class Decoder {
public static final Log LOG = LogFactory.getLog(Decoder.class);
DataInput in;
int numBitsInReg = 0;
public long reg = 0;
public Decoder(DataInput in) {
this.in = in;
}
private void load() throws IOException {
try {
long bits = in.readInt() & 0xffffffffL;
reg |= bits << numBitsInReg;
numBitsInReg += 32;
} catch (EOFException e) {
return;
}
}
private boolean load32(int[] array, int index) {
try {
array[index] = in.readInt();
return true;
} catch (EOFException e) {
return false;
} catch (IOException e) {
return false;
}
}
public long decodeUnsignedLong() throws IOException {
long lobits = decodeUnsigned() & 0xffffffffL;
long hibits = decodeUnsigned() & 0xffffffffL;
return (hibits<<32)|lobits;
}
public long decode64bits() throws IOException {
long lobits = decode32bits() & 0xffffffffL;
long hibits = decode32bits() & 0xffffffffL;
return (hibits << 32) | lobits;
}
public int decode32bits() throws IOException {
if (numBitsInReg == 0)
return in.readInt();
if (32 > numBitsInReg)
load();
int val = (int)(reg & 0xffffffffL);
reg >>>= 32;
numBitsInReg -= 32;
return val;
}
public double decodeDouble() throws IOException {
long lobits = decode32bits() & 0xffffffffL;
long hibits = decode32bits() & 0xffffffffL;
long bits = (hibits << 32) | lobits;
return Double.longBitsToDouble(bits);
}
public void realign()
{
if (numBitsInReg < 32) {
reg = 0;
numBitsInReg = 0;
} else {
reg >>= (numBitsInReg - 32);
numBitsInReg = 32;
}
}
public void decode(int[] array, int i, int count) throws IOException {
if (count <= 4) {
for (; i < count; i++) {
array[i] = decode32bits();
}
} else {
realign();
if (numBitsInReg==32) {
array[i++] = (int)reg;
reg = 0;
numBitsInReg = 0;
}
for (; i<count; ++i) {
if (load32(array, i)) break;
}
}
}
public int decodeUnary() throws IOException {
if (numBitsInReg < 16)
load();
int e, n, v;
if ((n = (e = unary1[((int)reg & 0xff)]&0xff) + 1) <= 8)
v = e;
else if ((n = (e = unary2[((int)reg & 0xffff)]&0xff) + 1) <= 16)
v = e;
else
return _decodeUnary();
reg >>>= n;
numBitsInReg -= n;
return v;
}
int _decodeUnary() throws IOException {
for (int i = 1;; ++i) {
if (i > numBitsInReg)
load();
long msk = (1L << i) - 1;
if ((reg & msk) == (msk >>> 1)) {
reg >>>= i;
numBitsInReg -= i;
return i - 1;
}
}
}
public int decodeUnsigned() throws IOException {
if (numBitsInReg < 16)
load();
int e, n, v;
if ((n = (e = unsigned1[(int)(reg & 0x3fL)]) >>> 5) <= 6)
v = e & 0x1f;
else if ((n = (e = unsigned2[(int)(reg & 0x7ffL)]) >>> 12) <= 11)
v = e & 0xfff;
else if ((n = (e = unsigned3[(int)(reg & 0xffffL)]) >>> 16) <= 16)
v = e & 0xffff;
else
return _decodeUnsigned();
reg >>>= n;
numBitsInReg -= n;
return v;
}
int _decodeUnsigned() throws IOException {
int nbits = decodeUnary() * 4;
if (nbits > numBitsInReg)
load();
long val = reg & ((nbits < 64) ? ((1L << nbits) - 1) : -1L);
for (int i = 0; i < nbits; i += 4)
val += (1L << i);
reg >>>= nbits;
numBitsInReg -= nbits;
return (int)(val & 0xffffffffL);
}
public int decodeUnsigned(int n) throws IOException {
if (n > numBitsInReg)
load();
long v = reg & ((n < 64) ? ((1L << n) - 1) : -1L);
reg >>>= n;
numBitsInReg -= n;
return (int)(v & 0xffffffffL);
}
private static final byte[] unary1Make() {
byte table[] = new byte[256];
for (int i = 0; i < 256; i++) {
table[i] = (byte)0xff;
for (int j = 0; j < 8; j++) {
if ((i & ((1 << (j + 1)) - 1)) != ((1 << j) - 1))
continue;
table[i] = (byte)j;
break;
}
}
if (LOG.isTraceEnabled()) {
LOG.trace("unary1 ");
for (int i=0; i<256; i++) {
LOG.trace(String.format("%02x \n", table[i]));
}
}
return table;
}
static final byte unary1[] = unary1Make();
private static final byte[] unary2Make() {
byte table[] = new byte[65536];
for (int i = 0; i < 65536; i++) {
table[i] = (byte)0xff;
for (int j = 0; j < 16; j++) {
if ((i & ((1 << (j + 1)) - 1)) != ((1 << j) - 1))
continue;
table[i] = (byte)j;
break;
}
}
return table;
}
static final byte unary2[] = unary2Make();
private static final byte[] unsigned1Make() {
byte table[] = new byte[64];
for (int i = 0; i < 64; i++) {
int entry = unary1[i];
int bbits = entry * 4;
int ubits = entry + 1;
if (bbits + ubits > 6)
table[i] = (byte)0xe0;
else {
int val = (i >>> ubits) & ((1 << bbits) - 1);
for (int j = 0; j < bbits; j += 4)
val += (1 << j);
table[i] = (byte)(((bbits + ubits) << 5) | val);
}
}
if (LOG.isTraceEnabled()) {
LOG.trace("unsigned1 ");
for (int i=0; i<64; i++) {
LOG.trace(String.format("%02x \n", table[i]));
}
}
return table;
}
static final byte unsigned1[] = unsigned1Make();
private static final short[] unsigned2Make() {
short table[] = new short[2048];
for (int i = 0; i < 2048; i++) {
int entry = unary2[i];
int bbits = entry * 4;
int ubits = entry + 1;
if (bbits + ubits > 11)
table[i] = (short)0xf000;
else {
int val = (i >>> ubits) & ((1 << bbits) - 1);
for (int j = 0; j < bbits; j += 4)
val += (1 << j);
table[i] = (short)(((bbits + ubits) << 12) | val);
}
}
if (LOG.isTraceEnabled()) {
LOG.trace("unsigned2 ");
for (int i=0; i<64; i++) {
LOG.trace(String.format("%04x \n", table[i]));
}
}
return table;
}
static final short unsigned2[] = unsigned2Make();
private static final int[] unsigned3Make() {
int table[] = new int[65536];
for (int i = 0; i < 65536; i++) {
int entry = unary2[i]&0xff;
int bbits = entry * 4;
int ubits = entry + 1;
if (bbits + ubits > 16)
table[i] = 0xffff0000;
else {
int val = (i >>> ubits) & ((1 << bbits) - 1);
for (int j = 0; j < bbits; j += 4)
val += (1 << j);
table[i] = (((bbits + ubits) << 16) | val);
}
}
if (LOG.isTraceEnabled()) {
LOG.trace("unsigned3 ");
for (int i=0; i<64; i++) {
LOG.trace(String.format("%08x \n", table[i]));
}
}
return table;
}
static final int unsigned3[] = unsigned3Make();
}
|
@extends('layouts.bootstrap.container')
@section('page')
<link rel="stylesheet" href="{{asset('plugins/steps/jquery.steps.css')}}">
<link rel="stylesheet" href="{{asset('plugins/iCheck/all.css')}}">
<link rel="stylesheet" href="{{asset('plugins/country-select/build/css/countrySelect.css')}}" media="screen">
<link rel="stylesheet" href="{{asset('plugins/intlTelInput/build/css/intlTelInput.css')}}" media="screen">
<link rel="stylesheet" href="{{asset('css/steps.custom.css')}}" media="screen">
<script src="{{asset('plugins/steps/jquery.steps.min.js')}}" charset="utf-8"></script>
<script src="{{asset('plugins/validate/jquery.validate.min.js')}}" charset="utf-8"></script>
<script src="{{asset('plugins/iCheck/icheck.min.js')}}" charset="utf-8"></script>
<script src="{{asset('plugins/country-select/build/js/countrySelect.min.js')}}" charset="utf-8"></script>
<script src="{{asset('plugins/intlTelInput/build/js/intlTelInput.min.js')}}" charset="utf-8"></script>
<script src="{{asset('plugins/intlTelInput/build/js/utils.js')}}" charset="utf-8"></script>
<script src="{{asset('js/steps.custom.js')}}" charset="utf-8"></script>
<div class="steps">
<ul class="blue 5steps">
<li class="current"><a href="#"><em>Step 1</em><span>Description 1</span></a></li>
<li class=""><a href="#"><em>Step 2</em><span>Description 2</span></a></li>
<li class=""><a href="#"><em>Step 3</em><span>Descripción del paso 1</span></a></li>
<li class=""><a href="#"><em>Step 4</em><span></span></a></li>
<li class="end"><a href="#"><em>Step 5</em><span></span></a></li>
</ul>
</div>
<span style="clear:both; display:block;"> </span>
<input class="btn btn-danger" type="button" value="Start over" onclick="$('.steps').steps('start');"/>
<input class="btn btn-warning" type="button" value="<< Prev" onclick="$('.steps').steps('prev');"/>
<input class="btn btn-primary" type="button" value="Next >>" onclick="$('.steps').steps('next');"/>
<input class="btn btn-success" type="button" value="Finish" onclick="$('.steps').steps('finish');"/>
@endsection
|
(ns contrib-dom.core
(:require [cljs.core.async :as a]
[clojure.string :as s]
[contrib.ext.async :as async :refer-macros [loop-chan-js]]
[contrib.ext.core :as ext]
[contrib.talk.emitter :as emitter]
[reagent.core :as r]
[reagent.ratom :refer-macros [reaction]]))
(defn mount
"Mount the given react component on the HTML dom element
with the given node-id"
[main node-id]
(->> (js/document.getElementById node-id)
(r/render-component main)))
(defn str->node
"Combines a list of strings to form a keyword"
[& combs]
(->> combs (s/join ".") keyword))
(defn get-node
"Pass the node to f, when the component is mounted. Ensure that the
node is not mutated as that might cause some problems with react"
[render f]
(r/create-class {:reagent-render render
:component-did-mount #(f (r/dom-node %))}))
(defn valid-form? [{errors :errors}]
(every? not (vals errors)))
(defn add-class
"Append a new class to the pre-existing classes, if there is one."
[props cls]
(letfn [(new-class [old new]
(if (seq old)
(str old " " new)
new))]
(update props :class-name new-class cls)))
(defn toggle-class
"Based on the value of on?, add the class cls to the class list"
[props on? cls]
(if on?
(add-class props cls)
props))
(defn on-change
"Emit :update events whenever a change happens"
[emitter name]
(letfn [(notify [value]
(emitter/emit emitter [:update name value]))]
#(-> % (aget "target") (aget "value") notify)))
(defn on-click
"Emit the given action when a button is clicked"
[emitter action]
#(emitter/emit emitter [action]))
(defn icon [{:keys [name size]}]
[(str->node "div" "icon" (if size (str "is-" size) ""))
[(str->node "i" "fa" (str "fa-" name))]])
(defn header
([{:keys [brand href] :or {href "/"}}]
[:header.nav.has-shadow.header
[:div.container
[:div.nav-left
[:a.nav-item {:href href}
[:h1.title.is-brand brand]]]]])
([{:keys [brand href] :or {href "/"}} & right]
[:header.nav.has-shadow.header
[:div.container
[:div.nav-left
[:a.nav-item {:href href}
[:h1.title.is-brand brand]]]
(into [:div.nav-toggle] (repeat 3 [:span]))
(into [:div.nav-right.nav-menu]
(map #(-> [:div.nav-item %]))
right)]]))
(defn button [emitter {:keys [name] :as props} & children]
(into [:button.button (merge props
{:type "button"
:on-click (on-click emitter name)})]
children))
(defn async-button [state emitter props & children]
(let [loading? (reaction (:loading? @state))]
(fn []
(into [button emitter (toggle-class props @loading? "is-loading")]
children))))
(defn update$ [state emitter]
(loop-chan-js [[_ n v] (emitter/on-event emitter :update)]
(swap! state assoc-in [:values n] v)))
(defn validate$ [state emitter]
(let [sub (async/fixed-chan 10 (emitter/by-event :update))
_ (emitter/on emitter sub)
out (async/pipeline sub (a/chan 10) 200)
valid? #(.checkValidity %)
message #(aget % "validationMessage")]
(loop-chan-js [[_ n v] out]
(let [node (get-in @state [:nodes n])]
(when node
(if (valid? node)
(swap! state assoc-in [:errors n] false)
(swap! state assoc-in [:errors n] (message node))))))))
(defn text [state emitter {:keys [name type] :or {type "text"} :as props}]
(let [value (reaction (get-in @state [:values name]))
invalid? (reaction (get-in @state [:errors name]))]
(fn []
[:input.input (-> props
(toggle-class @invalid? "is-danger")
(merge {:type type
:name name
:on-change (on-change emitter name)
:value @value}))])))
(defn v-text [state emitter {name :name :as props}]
(get-node #(-> [text state emitter props])
#(swap! state assoc-in [:nodes name] %)))
(defn auto-options [state emitter {name :name} render]
(let [suggestions (reaction (get-in state [:suggestions name]))
selection (reaction (get-in state [:values name]))
visible? (reaction (or (not-empty @suggestions)
(nil? selection)))]
(fn []
(into [:ul.options.box {:class-name (if (not visible?) "is-hidden")}]
(map (fn [[i s]]
[:li.option {:key i
:on-click #(emitter/emit emitter [:select name s])}
[render s]]))
(ext/vector-map @suggestions)))))
(defn suggest$ [state emitter search]
(loop-chan-js [[_ n v] (emitter/on-event emitter :update)]
(swap! state assoc-in [:suggestions n] (search n v))
(js/console.log (clj->js (:suggestions @state)))))
(defn select$ [state emitter render]
(loop-chan-js [[_ n v] (emitter/on-event emitter :select)]
(swap! state assoc-in [:selections n] v)
(swap! state assoc-in [:values n] (render n v))))
(defn textarea [state emitter {:keys [name] :as props}]
(let [value (reaction (get-in @state [:values name]))
invalid? (reaction (get-in @state [:errors name]))]
(fn []
[:textarea.textarea (-> props
(toggle-class @invalid? "is-danger")
(merge {:name name
:on-change (on-change emitter name)
:value @value}))])))
(defn submit [state emitter {:keys [disabled]} & children]
[async-button state emitter (add-class {:name :submit
:disabled disabled}
"is-primary")
children])
(defn error [state {:keys [name]}]
(let [valid? (reaction (get-in @state [:errors name]))]
[:p.help.is-danger (toggle-class {} (not @valid?) "is-hidden") @valid?]))
(defn submit-deck [state emitter props & children]
[:div.is-clearfix
[:div.is-pulled-right
(into [submit state emitter props] children)]])
(defn open-modal$ [state emitter]
(loop-chan-js [_ (emitter/on-event emitter :open)]
(swap! state assoc :open? true)))
(defn close-modal$ [state emitter]
(loop-chan-js [_ (emitter/on-event emitter :close)]
(swap! state assoc :open? false)))
(defn modal-trigger [emitter props & children]
(into [button emitter (assoc props :name :open)] children))
(defn modal [state emitter {:keys [size] :or {size "small"}} & children]
(let [active? (reaction (:open? @state))]
#(into [(str->node "div" "modal" (str "is-" size)) {:class-name (if @active? "is-active")}
[:div.modal-background {:on-click (on-click emitter :close)}]
(into [:div.modal-content.box] children)
[button emitter {:name :close
:class-name "modal-close"
:on-click (on-click emitter :close)}]])))
|
---
title: ExtensionServices
description: documentation for extended functionality in MRTK
author: davidkline-ms
ms.author: davidkl
ms.date: 01/12/2021
keywords: Unity,HoloLens, HoloLens 2, Mixed Reality, development, MRTK,
---
# Extension services
Extension services are components that extend the functionality of the Mixed Reality Toolkit. These services may be provided
by the MRTK or by other parties.
## Creating an extension service
The most efficient way to create an extension service is to use the [extension service creation wizard](../tools/extension-service-creation-wizard.md).
To start the extension service creation wizard, select **Mixed Reality Toolkit > Utilities > Create Extension Service**.

The wizard automates the creation of the service components and ensures the proper interface inheritance.

> [!Note]
> In MRTK version 2.0.0, there is an issue in the extension service wizard where the service inspector
and service profile are required to be generated. Please see issue [5654](https://github.com/microsoft/MixedRealityToolkit-Unity/issues/5654) for more information.
When the wizard completes, the service functionality can be implemented.
## Registering an extension service
To be accessible by an application, the new extension service needs to be registered with the Mixed Reality Toolkit.
The extension service creation wizard can be used to register the service.

The service can also be manually registered using the Mixed Reality Toolkit configuration inspector.

If the extension service uses a profile, please ensure that it is specified in the inspector.

The component name and priority can also be adjusted.
## Accessing an extension service
Extension services are accessed, in code, using the [`MixedRealityServiceRegistry`](xref:Microsoft.MixedReality.Toolkit.MixedRealityServiceRegistry)
as shown in the example below.
```c#
INewService service = null;
if (MixedRealityServiceRegistry.TryGetService<INewService>(out service))
{
// Succeeded in getting the service, perform any desired tasks.
}
```
## See also
- [Systems, extension services and data providers](../../architecture/systems-extensions-providers.md)
- [Extension service creation wizard](../tools/extension-service-creation-wizard.md)
- [IMixedRealityExtensionService](xref:Microsoft.MixedReality.Toolkit.IMixedRealityExtensionService)
- [MixedRealityServiceRegistry](xref:Microsoft.MixedReality.Toolkit.MixedRealityServiceRegistry)
|
using System;
namespace MicroApi.Server
{
public interface IHttpServerBuilder
{
IHttpServerBuilder SetHostUrl(string hostUrl);
IHttpServerBuilder SetProcessor(Func<string, string> expression);
IHttpServer Build();
}
}
|
import sys
import time
from tornado.testing import AsyncTestCase
import tornadoredis
def get_callable(obj):
return hasattr(obj, '__call__')
def async_test_ex(timeout=5):
def _inner(func):
def _runner(self, *args, **kwargs):
try:
func(self, *args, **kwargs)
except:
self.stop()
raise
return self.wait(timeout=timeout)
return _runner
return _inner
def async_test(func):
_inner = async_test_ex()
return _inner(func)
class TestRedisClient(tornadoredis.Client):
def __init__(self, *args, **kwargs):
self._on_destroy = kwargs.get('on_destroy', None)
if 'on_destroy' in kwargs:
del kwargs['on_destroy']
super(TestRedisClient, self).__init__(*args, **kwargs)
def __del__(self):
super(TestRedisClient, self).__del__()
if self._on_destroy:
self._on_destroy()
if sys.version_info < (2, 7):
_MAX_LENGTH = 80
def safe_repr(obj, short=False):
try:
result = repr(obj)
except Exception:
result = object.__repr__(obj)
if not short or len(result) < _MAX_LENGTH:
return result
return result[:_MAX_LENGTH] + ' [truncated]...'
class RedisTestCase(AsyncTestCase):
test_db = 9
test_port = 6379
if sys.version_info < (2, 7):
def assertIn(self, test_value, expected_set):
msg = "%s did not occur in %s" % (test_value, expected_set)
self.assert_(test_value in expected_set, msg)
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def setUp(self):
super(RedisTestCase, self).setUp()
self.client = self._new_client()
self.client.flushdb()
def tearDown(self):
try:
self.client.connection.disconnect()
del self.client
except AttributeError:
pass
super(RedisTestCase, self).tearDown()
def _new_client(self, pool=None, on_destroy=None, selected_db=None):
if selected_db is None:
selected_db = self.test_db
client = TestRedisClient(io_loop=self.io_loop,
port=self.test_port,
selected_db=selected_db,
connection_pool=pool,
on_destroy=on_destroy)
return client
def delayed(self, timeout, cb):
self.io_loop.add_timeout(time.time() + timeout, cb)
def pause(self, timeout=0.1, callback=None):
self.io_loop.add_timeout(time.time() + timeout, callback)
|
# -----------------------------------------------------------------------------
#
# Package : xalan-j
# Version : xalan-j_2_7_2
# Source repo : https://github.com/apache/xalan-j
# Tested on : UBI 8.4
# Script License: Apache License, Version 2 or later
# Maintainer : Atharv Phadnis <Atharv.Phadnis@ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
PACKAGE_NAME=xalan-j
PACKAGE_VERSION=xalan-j_2_7_2
PACKAGE_URL=https://github.com/apache/xalan-j
yum install -y java-1.8.0-openjdk-devel git wget
#Install ANT
wget https://downloads.apache.org/ant/binaries/apache-ant-1.10.12-bin.tar.gz
tar -xf apache-ant-1.10.12-bin.tar.gz
# Set ANT_HOME variable
export ANT_HOME=${pwd}/apache-ant-1.10.12
# update the path env. variable
export PATH=${PATH}:${ANT_HOME}/bin
HOME_DIR=`pwd`
OS_NAME=$(cat /etc/os-release | grep ^PRETTY_NAME | cut -d= -f2)
if ! git clone $PACKAGE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | Fail | Clone_Fails"
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
git checkout $PACKAGE_VERSION
cd $HOME_DIR/$PACKAGE_NAME
if ! ant; then
echo "------------------$PACKAGE_NAME:build_fails---------------------"
echo "$PACKAGE_URL $PACKAGE_NAME "
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | Fail | Build_Fails"
exit 0
else
echo "------------------$PACKAGE_NAME:build_success-------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME "
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | Pass | Build_Success"
exit 0
fi
|
// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE.md file.
library testing.discover;
import 'dart:io' show
Directory,
FileSystemEntity,
Platform,
Process;
import 'dart:async' show
Future,
Stream,
StreamController,
StreamSubscription;
import '../testing.dart' show
TestDescription;
final Uri packageConfig = computePackageConfig();
final Uri dartSdk = computeDartSdk();
/// Common arguments when running a dart program. Returns a copy that can
/// safely be modified by caller.
List<String> get dartArguments => <String>["-c", "--packages=$packageConfig"];
Stream<TestDescription> listTests(List<Uri> testRoots, {Pattern pattern}) {
StreamController<TestDescription> controller =
new StreamController<TestDescription>();
Map<Uri, StreamSubscription> subscriptions = <Uri, StreamSubscription>{};
for (Uri testRootUri in testRoots) {
subscriptions[testRootUri] = null;
Directory testRoot = new Directory.fromUri(testRootUri);
testRoot.exists().then((bool exists) {
if (exists) {
Stream<FileSystemEntity> stream =
testRoot.list(recursive: true, followLinks: false);
var subscription = stream.listen((FileSystemEntity entity) {
TestDescription description =
TestDescription.from(testRootUri, entity, pattern: pattern);
if (description != null) {
controller.add(description);
}
}, onError: (error, StackTrace trace) {
controller.addError(error, trace);
}, onDone: () {
subscriptions.remove(testRootUri);
if (subscriptions.isEmpty) {
controller.close(); // TODO(ahe): catchError???
}
});
subscriptions[testRootUri] = subscription;
} else {
controller.addError("$testRootUri isn't a directory");
subscriptions.remove(testRootUri);
}
if (subscriptions.isEmpty) {
controller.close(); // TODO(ahe): catchError???
}
});
}
return controller.stream;
}
Uri computePackageConfig() {
String path = Platform.packageConfig;
if (path != null) return Uri.base.resolve(path);
return Uri.base.resolve(".packages");
}
Uri computeDartSdk() {
String dartSdkPath = Platform.environment["DART_SDK"]
?? const String.fromEnvironment("DART_SDK");
if (dartSdkPath != null) {
return Uri.base.resolveUri(new Uri.file(dartSdkPath));
} else {
return Uri.base.resolve(Platform.resolvedExecutable).resolve("../");
}
}
Future<Process> startDart(
Uri program,
[List<String> arguments,
List<String> vmArguments]) {
List<String> allArguments = <String>[];
allArguments.addAll(vmArguments ?? dartArguments);
allArguments.add(program.toFilePath());
if (arguments != null) {
allArguments.addAll(arguments);
}
return Process.start(Platform.resolvedExecutable, allArguments);
}
|
from django.shortcuts import render
from .forms import RegisterForm
from .models import UserModel
from django.http import HttpResponse
# Create your views here.
def hello(request):
form = RegisterForm()
return render(request, "hello.html", {"form" : form})
def saveUser(request):
resp = ''
if request.POST:
form = RegisterForm(data = request.POST)
if form.is_valid():
newUser = UserModel(
firstName = request.POST.get('firstName',''),
lastName = request.POST.get('lastName',''),
email = request.POST.get('email',''),
password = request.POST.get('password','')
)
newUser.save()
resp = 'Saved Successfully'
else:
resp = 'Form invalid'
else:
resp = 'Invalid request'
return HttpResponse(resp)
|
CREATE R SCALAR SCRIPT
get_database_name() returns varchar(300) AS
run <- function(ctx)
exa$meta$database_name
/
create R scalar script
get_database_version() returns varchar(20) as
run <- function(ctx)
exa$meta$database_version
/
create R scalar script
get_script_language() emits (s1 varchar(300), s2 varchar(300)) as
run <- function(ctx)
ctx$emit(exa$meta$script_language, "R")
/
create R scalar script
get_script_name() returns varchar(200) as
run <- function(ctx)
exa$meta$script_name
/
create R scalar script
get_script_schema() returns varchar(200) as
run <- function(ctx)
exa$meta$script_schema
/
create R scalar script
get_current_user() returns varchar(200) as
run <- function(ctx)
exa$meta$current_user
/
create R scalar script
get_scope_user() returns varchar(200) as
run <- function(ctx)
exa$meta$scope_user
/
create R scalar script
get_current_schema() returns varchar(200) as
run <- function(ctx)
exa$meta$current_schema
/
create R scalar script
get_script_code() returns varchar(2000) as
run <- function(ctx)
exa$meta$script_code
/
create R scalar script
get_session_id() returns varchar(200) as
run <- function(ctx)
exa$meta$session_id
/
create R scalar script
get_statement_id() returns number as
run <- function(ctx)
exa$meta$statement_id
/
create R scalar script
get_node_count() returns number as
run <- function(ctx)
exa$meta$node_count
/
create R scalar script
get_node_id() returns number as
run <- function(ctx)
exa$meta$node_id
/
create R scalar script
get_vm_id() returns varchar(200) as
run <- function(ctx)
exa$meta$vm_id
/
create R scalar script
get_input_type_scalar() returns varchar(200) as
run <- function(ctx)
exa$meta$input_type
/
create R set script
get_input_type_set(a double) returns varchar(200) as
run <- function(ctx)
exa$meta$input_type
/
create R scalar script
get_input_column_count_scalar(c1 double, c2 varchar(100))
returns number as
run <- function(ctx)
exa$meta$input_column_count
/
create R set script
get_input_column_count_set(c1 double, c2 varchar(100))
returns number as
run <- function(ctx)
exa$meta$input_column_count
/
create R scalar script
get_input_columns(c1 double, c2 varchar(200))
emits (column_id number, column_name varchar(200), column_type varchar(20),
column_sql_type varchar(20), column_precision number, column_scale number,
column_length number) as
run <- function(ctx) {
cols <- exa$meta$input_columns
for (i in 1:length(cols)) {
name <- cols[[i]]$name
precision <- cols[[i]]$precision
thetype <- cols[[i]]$type
sql_type <- cols[[i]]$sql_type
scale <- cols[[i]]$scale
length <- cols[[i]]$length
if (is.null(name) || is.na(name)) name <- 'no-name'
if (is.null(thetype) || is.na(thetype)) thetype <- 'no-type'
if (is.null(sql_type) || is.na(sql_type)) sql_type <- 'no-sql-type'
if (is.null(precision) || is.na(precision)) precision <- 0
if (is.null(scale) || is.na(scale)) scale <- 0
if (is.null(length) || is.na(length)) length <- 0
ctx$emit(i, name, thetype, sql_type, precision, scale, length)
}
}
/
create R scalar script
get_output_type_return()
returns varchar(200) as
run <- function(ctx)
exa$meta$output_type
/
create R scalar script
get_output_type_emit()
emits (t varchar(200)) as
run <- function(ctx)
ctx$emit(exa$meta$output_type)
/
create R scalar script
get_output_column_count_return()
returns number as
run <- function(ctx)
exa$meta$output_column_count
/
create R scalar script
get_output_column_count_emit()
emits (x number, y number, z number) as
run <- function(ctx)
ctx$emit(exa$meta$output_column_count,exa$meta$output_column_count,exa$meta$output_column_count)
/
create R scalar script
get_output_columns()
emits (column_id number, column_name varchar(200), column_type varchar(20),
column_sql_type varchar(20), column_precision number, column_scale number,
column_length number) as
run <- function(ctx) {
cols <- exa$meta$output_columns
for (i in 1:length(cols)) {
name <- cols[[i]]$name
precision <- cols[[i]]$precision
thetype <- cols[[i]]$type
sql_type <- cols[[i]]$sql_type
scale <- cols[[i]]$scale
length <- cols[[i]]$length
if (is.null(name) || is.na(name)) name <- 'no-name'
if (is.null(thetype) || is.na(thetype)) thetype <- 'no-type'
if (is.null(sql_type) || is.na(sql_type)) sql_type <- 'no-sql-type'
if (is.null(precision) || is.na(precision)) precision <- 0
if (is.null(scale) || is.na(scale)) scale <- 0
if (is.null(length) || is.na(length)) length <- 0
ctx$emit(i, name, thetype, sql_type, precision, scale, length)
}
}
/
create R scalar script
get_precision_scale_length(n decimal(6,3), v varchar(10))
emits (precision1 number, scale1 number, length1 number, precision2 number, scale2 number, length2 number) as
run <- function(ctx) {
v <- exa$meta$input_columns[[1]]
precision1 <- v$precision
scale1 <- v$scale
length1 <- v$length
w <- exa$meta$input_columns[[2]]
precision2 <- w$precision
scale2 <- w$scale
length2 <- w$length
if (is.null(precision1) || is.na(precision1)) precision1 <- 0
if (is.null(scale1) || is.na(scale1)) scale1 <- 0
if (is.null(length1) || is.na(length1)) length1 <- 0
if (is.null(precision2) || is.na(precision2)) precision2 <- 0
if (is.null(scale2) || is.na(scale2)) scale2 <- 0
if (is.null(length2) || is.na(length2)) length2 <- 0
ctx$emit(precision1, scale1, length1, precision2, scale2, length2)
}
/
create r scalar script
get_char_length(text char(10))
emits(len1 number, len2 number, dummy char(20))
as
run <- function(ctx) {
v <- exa$meta$input_columns[[1]]
w <- exa$meta$output_columns[[3]]
ctx$emit(v$length,w$length,'9876543210')
}
/
|
package com.apurebase.kgraphql.schema
interface Subscription {
fun request(n: Long)
fun cancel()
}
|
class Apply < MailForm::Base
attributes :first, :validate => true
attributes :last, :validate => true
attributes :email, :validate => /\A([\w\.%\+\-]+)@([\w\-]+\.)+([\w]{2,})\z/i
attributes :category, :validate => true
attributes :description, :validate => true
attributes :nickname, :captcha => true
def headers
{
:subject => "Apply",
:to => "janethamrani12@gmail.com",
:from => %("#{first} #{last}" <#{email}>)
}
end
end
|
{-# LANGUAGE FlexibleInstances, OverloadedStrings, BangPatterns #-}
module Language.Nano.TypeCheck where
import Language.Nano.Types
import Language.Nano.Parser
import qualified Data.List as L
import Text.Printf (printf)
import Control.Exception (throw)
--------------------------------------------------------------------------------
typeOfFile :: FilePath -> IO Type
typeOfFile f = parseFile f >>= typeOfExpr
typeOfString :: String -> IO Type
typeOfString s = typeOfExpr (parseString s)
typeOfExpr :: Expr -> IO Type
typeOfExpr e = do
let (!st, t) = infer initInferState preludeTypes e
if (length (stSub st)) < 0 then throw (Error ("count Negative: " ++ show (stCnt st)))
else return t
--------------------------------------------------------------------------------
-- Problem 1: Warm-up
--------------------------------------------------------------------------------
-- | Things that have free type variables
class HasTVars a where
freeTVars :: a -> [TVar]
-- | Type variables of a type
instance HasTVars Type where
freeTVars t = error "TBD: type freeTVars"
-- | Free type variables of a poly-type (remove forall-bound vars)
instance HasTVars Poly where
freeTVars s = error "TBD: poly freeTVars"
-- | Free type variables of a type environment
instance HasTVars TypeEnv where
freeTVars gamma = concat [freeTVars s | (x, s) <- gamma]
-- | Lookup a variable in the type environment
lookupVarType :: Id -> TypeEnv -> Poly
lookupVarType x ((y, s) : gamma)
| x == y = s
| otherwise = lookupVarType x gamma
lookupVarType x [] = throw (Error ("unbound variable: " ++ x))
-- | Extend the type environment with a new biding
extendTypeEnv :: Id -> Poly -> TypeEnv -> TypeEnv
extendTypeEnv x s gamma = (x,s) : gamma
-- | Lookup a type variable in a substitution;
-- if not present, return the variable unchanged
lookupTVar :: TVar -> Subst -> Type
lookupTVar a sub = error "TBD: lookupTVar"
-- | Remove a type variable from a substitution
removeTVar :: TVar -> Subst -> Subst
removeTVar a sub = error "TBD: removeTVar"
-- | Things to which type substitutions can be apply
class Substitutable a where
apply :: Subst -> a -> a
-- | Apply substitution to type
instance Substitutable Type where
apply sub t = error "TBD: type apply"
-- | Apply substitution to poly-type
instance Substitutable Poly where
apply sub s = error "TBD: poly apply"
-- | Apply substitution to (all poly-types in) another substitution
instance Substitutable Subst where
apply sub to = zip keys $ map (apply sub) vals
where
(keys, vals) = unzip to
-- | Apply substitution to a type environment
instance Substitutable TypeEnv where
apply sub gamma = zip keys $ map (apply sub) vals
where
(keys, vals) = unzip gamma
-- | Extend substitution with a new type assignment
extendSubst :: Subst -> TVar -> Type -> Subst
extendSubst sub a t = error "TBD: extendSubst"
--------------------------------------------------------------------------------
-- Problem 2: Unification
--------------------------------------------------------------------------------
-- | State of the type inference algorithm
data InferState = InferState {
stSub :: Subst -- ^ current substitution
, stCnt :: Int -- ^ number of fresh type variables generated so far
} deriving (Eq,Show)
-- | Initial state: empty substitution; 0 type variables
initInferState = InferState [] 0
-- | Fresh type variable number n
freshTV n = TVar $ "a" ++ show n
-- | Extend the current substitution of a state with a new type assignment
extendState :: InferState -> TVar -> Type -> InferState
extendState (InferState sub n) a t = InferState (extendSubst sub a t) n
-- | Unify a type variable with a type;
-- if successful return an updated state, otherwise throw an error
unifyTVar :: InferState -> TVar -> Type -> InferState
unifyTVar st a t = error "TBD: unifyTVar"
-- | Unify two types;
-- if successful return an updated state, otherwise throw an error
unify :: InferState -> Type -> Type -> InferState
unify st t1 t2 = error "TBD: unify"
--------------------------------------------------------------------------------
-- Problem 3: Type Inference
--------------------------------------------------------------------------------
infer :: InferState -> TypeEnv -> Expr -> (InferState, Type)
infer st _ (EInt _) = error "TBD: infer EInt"
infer st _ (EBool _) = error "TBD: infer EBool"
infer st gamma (EVar x) = error "TBD: infer EVar"
infer st gamma (ELam x body) = error "TBD: infer ELam"
infer st gamma (EApp e1 e2) = error "TBD: infer EApp"
infer st gamma (ELet x e1 e2) = error "TBD: infer ELet"
infer st gamma (EBin op e1 e2) = infer st gamma asApp
where
asApp = EApp (EApp opVar e1) e2
opVar = EVar (show op)
infer st gamma (EIf c e1 e2) = infer st gamma asApp
where
asApp = EApp (EApp (EApp ifVar c) e1) e2
ifVar = EVar "if"
infer st gamma ENil = infer st gamma (EVar "[]")
-- | Generalize type variables inside a type
generalize :: TypeEnv -> Type -> Poly
generalize gamma t = error "TBD: generalize"
-- | Instantiate a polymorphic type into a mono-type with fresh type variables
instantiate :: Int -> Poly -> (Int, Type)
instantiate n s = error "TBD: instantiate"
-- | Types of built-in operators and functions
preludeTypes :: TypeEnv
preludeTypes =
[ ("+", Mono $ TInt :=> TInt :=> TInt)
, ("-", error "TBD: -")
, ("*", error "TBD: *")
, ("/", error "TBD: /")
, ("==", error "TBD: ==")
, ("!=", error "TBD: !=")
, ("<", error "TBD: <")
, ("<=", error "TBD: <=")
, ("&&", error "TBD: &&")
, ("||", error "TBD: ||")
, ("if", error "TBD: if")
-- lists:
, ("[]", error "TBD: []")
, (":", error "TBD: :")
, ("head", error "TBD: head")
, ("tail", error "TBD: tail")
]
|
package edu.ubb.micro.nowaste.usermanager.controller
import org.springframework.http.ResponseEntity
import org.springframework.web.bind.annotation.RequestMapping
import org.springframework.web.bind.annotation.RestController
@RestController
class ServiceController {
@RequestMapping("/users/health")
fun getServiceHealth(): ResponseEntity<String> = ResponseEntity.ok("User manager up and running.")
}
|
from mock import call, patch
import pytest
from merfi.backends import gpg
class TestGpg(object):
backend = gpg.Gpg([])
# args to merfi.backends.gpg.util's run()
detached = ['gpg', '--batch', '--yes', '--armor', '--detach-sig', '--output', 'Release.gpg', 'Release']
clearsign = ['gpg', '--batch', '--yes', '--clearsign', '--output', 'InRelease', 'Release']
@patch("merfi.backends.gpg.util")
def test_sign_no_files(self, m_util, tmpdir):
self.backend.path = str(tmpdir)
self.backend.sign()
assert not m_util.run.called
@patch("merfi.backends.gpg.util")
def test_sign_two_files(self, m_util, deb_repotree):
self.backend.path = deb_repotree
self.backend.sign()
# Our deb_repotree fixture has two "Release" files.
# Each one gets detached-signed and clearsign'd.
calls = [
call(self.detached),
call(self.clearsign),
call(self.detached),
call(self.clearsign),
]
m_util.run.assert_has_calls(calls)
|
/* Add your widget's module(s) here */
export {DemoWidgetModule} from "./demo-widget/demo-widget.module";
|
using NLog;
using PaletteInsightAgent.Helpers;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
namespace PaletteInsightAgent.Output.OutputDrivers
{
public class WebserviceConfiguration
{
/// <summary>
/// The http/s endpoint to connect to.
/// Example: https://user:pass@localhost:9000
/// </summary>
public string Endpoint;
/// <summary>
/// The authentication token, which used to be same as the license key. But the licenses
/// are gone since this project became open-source.
/// </summary>
public string AuthToken;
/// <summary>
/// Should the output use multiple-file-upload (true) or single-file-upload(false)
/// </summary>
public bool UseMultifile = false;
/// <summary>
/// Use HTTP Proxy or not
/// </summary>
public bool UseProxy = false;
/// <summary>
/// HTTP Proxy address
/// </summary>
public string ProxyAddress = "";
/// <summary>
/// HTTP Proxy username
/// </summary>
public string ProxyUsername = "";
/// <summary>
/// HTTP Proxy password
/// </summary>
public string ProxyPassword = "";
/// <summary>
/// Returns true if the webservice configuration is valid.
/// TODO: do a proper check of the Endpoint
/// </summary>
/// <returns></returns>
public bool IsValid
{
get
{
return Endpoint.Length >= 4 && Endpoint.StartsWith("http") && AuthToken.Length > 0;
}
}
}
/// <summary>
/// Output class that writes to a web service
/// </summary>
public class WebserviceOutput
{
/// <summary>
/// Factory method for WebserviceOutput.
/// This method is here because we dont want to throw in the constructor on config errors
/// </summary>
/// <param name="config"></param>
/// <returns></returns>
public static IOutput MakeWebservice(WebserviceConfiguration config)
{
if (!config.IsValid)
{
throw new ArgumentException("Invalid webservice configuration provided!");
}
return new SinglefileBackend { config = config };
}
}
}
|
class MlAuthorsController < ApplicationController
layout "main"
def index
@authors = MlAuthor.search(params['user_input']) \
.paginate(:page => params[:page], :per_page => 12) \
.order('n_appeared desc')
@users = User.all
end
def show
#TODO: show ml author page
@ml_author = MlAuthor.find(params[:id])
@users = User.all
end
def edit
#TODO: edit author details
@ml_author = MlAuthor.find(params[:id])
end
def update
@ml_author = MlAuthor.find(params[:id])
if @ml_author.update(ml_author_params)
redirect_to @ml_author
else
render 'edit'
end
end
def pull
# TODO: fetch profile from scholar.google.com
@ml_author = MlAuthor.find(params[:id])
# ml_author_gsprofile = gs_pull(@ml_author.name)
# @ml_author.affiliation = ml_author_gsprofile
# @ml_author.save
# after retrieving all info of the author, refresh the show view
@users = User.all
render 'show', object: [@ml_author, @users]
end
# def sort
# @authors = MlAuthor.search(params['user_input']) \
# .paginate(:page => params[:page], :per_page => 12) \
# .order(params[:by]+' '+params[:order])
# @users = User.all
# redirect_to 'index'
# end
private
def ml_author_params
params.require(:ml_author).permit(:affiliation, :country, :email)
end
def gs_pull(author_name)
# TODO: retrieve gs profile
end
end
|
/*
* This work is released into the Public Domain under the
* terms of the Creative Commons CC0 1.0 Universal license.
* https://creativecommons.org/publicdomain/zero/1.0/
*/
package pityoulish.sockets.server;
/**
* Represents a request to the Message Board Server.
*
* Because the protocol is simple and has only a few possible data elements
* in a request, it is feasible to define a single interface that represents
* any kind of request. Depending on the request type, some of the getters
* return values, while others return <code>null</code>.
*/
public interface MsgBoardRequest
{
/**
* The available request types.
* They correspond to a subset of the
* {@link pityoulish.sockets.tlv.MsgBoardType TLV types}.
*/
public enum ReqType {
LIST_MESSAGES, PUT_MESSAGE,
OBTAIN_TICKET, RETURN_TICKET, REPLACE_TICKET;
};
/**
* Obtains the request type.
* This is the only attribute guaranteed to be present.
*
* @return the request type, never <code>null</code>
*/
public ReqType getReqType()
;
public Integer getLimit()
;
public String getMarker()
;
public String getTicket()
;
public String getText()
;
public String getOriginator()
;
}
|
# Pick a value from a JSON string
## Installation
```
npm install -g pluckjson
```
## Usage
```
pluckjson --key=hello.galaxy < test-file.json
```
## Options
```
--key: key to value to print
--copy: copy value to clipboard
--noemit: do not print value
```
# Change Logs
## 0.0.1
- initial release
|
package dev.cancio.start.endpoint.service
import dev.cancio.core.model.Debt
import dev.cancio.core.repository.DebtRepository
import lombok.extern.slf4j.Slf4j
import org.springframework.stereotype.Service
@Service
@Slf4j
class DebtService(
val debtRepository: DebtRepository
) {
fun list(pageable: org.springframework.data.domain.Pageable): Iterable<Debt> = debtRepository.findAll(pageable)
}
|
package dev.trotrohailer.shared.util
import android.content.Intent
import android.content.res.Resources
import android.location.Location
import android.os.Bundle
import android.os.Parcel
import android.view.View
import android.widget.Toast
import androidx.annotation.DimenRes
import androidx.core.content.res.ResourcesCompat
import androidx.core.os.ParcelCompat
import androidx.core.os.bundleOf
import androidx.core.text.trimmedLength
import androidx.fragment.app.FragmentActivity
import com.google.android.gms.maps.model.LatLng
import com.google.firebase.auth.FirebaseUser
import com.google.firebase.firestore.FirebaseFirestore
import com.google.firebase.firestore.GeoPoint
import dev.trotrohailer.shared.data.Driver
import dev.trotrohailer.shared.data.Passenger
import dev.trotrohailer.shared.util.Constants.AVAILABLE_DRIVERS
import dev.trotrohailer.shared.util.Constants.DRIVERS
import dev.trotrohailer.shared.util.Constants.PASSENGERS
import dev.trotrohailer.shared.util.Constants.PASSENGER_REQUESTS
fun debugger(msg: Any?) = println("TroTro ==> ${msg.toString()}")
fun FirebaseFirestore.passengerDocument(userId: String) = collection(PASSENGERS).document(userId)
fun FirebaseFirestore.passengers() = collection(PASSENGERS)
fun FirebaseFirestore.driverDocument(userId: String) = collection(DRIVERS).document(userId)
fun FirebaseFirestore.drivers() = collection(DRIVERS)
fun FirebaseFirestore.availableDrivers() = collection(AVAILABLE_DRIVERS)
fun FirebaseFirestore.passengerRequests() = collection(PASSENGER_REQUESTS)
fun FragmentActivity.intentTo(
target: Class<out FragmentActivity>,
finished: Boolean = false,
bundle: Bundle = bundleOf()
) {
startActivity(Intent(applicationContext, target).apply { putExtras(bundle) })
if (finished) finishAffinity()
}
/**
* Linearly interpolate between two values.
*/
fun lerp(a: Float, b: Float, t: Float): Float {
return a + (b - a) * t
}
/**
* Alternative to Resources.getDimension() for values that are TYPE_FLOAT.
*/
fun Resources.getFloatUsingCompat(@DimenRes resId: Int): Float {
return ResourcesCompat.getFloat(this, resId)
}
/** Write a boolean to a Parcel. */
fun Parcel.writeBooleanUsingCompat(value: Boolean) = ParcelCompat.writeBoolean(this, value)
/** Read a boolean from a Parcel. */
fun Parcel.readBooleanUsingCompat() = ParcelCompat.readBoolean(this)
fun FirebaseUser.mapToPassenger(): Passenger = Passenger(
uid,
displayName ?: "No username",
if (photoUrl == null) null else if (photoUrl.toString().trimmedLength() > 80) null else photoUrl.toString(),
phoneNumber
)
fun FirebaseUser.mapToDriver(): Driver =
Driver(
uid, displayName ?: "No username", "", "",
if (photoUrl == null) null else if (photoUrl.toString().trimmedLength() > 80) null else photoUrl.toString()
)
fun View.toast(msg: Any?) = Toast.makeText(context, msg.toString(), Toast.LENGTH_SHORT).show()
object Constants {
const val PASSENGERS = "passengers"
const val DRIVERS = "drivers"
const val TRIPS = "trips"
const val AVAILABLE_DRIVERS = "available_drivers"
const val PASSENGER_REQUESTS = "passenger_requests"
}
fun Location.toLatLng(): LatLng = LatLng(latitude, longitude)
fun Location.toGeoPoint(): GeoPoint= GeoPoint(latitude, longitude)
|
// ReSharper disable All
using Lumina.Text;
using Lumina.Data;
using Lumina.Data.Structs.Excel;
namespace Lumina.Excel.GeneratedSheets
{
[Sheet( "GatheringItem", columnHash: 0x032ca4ae )]
public partial class GatheringItem : ExcelRow
{
public int Item { get; set; }
public LazyRow< GatheringItemLevelConvertTable > GatheringItemLevel { get; set; }
public bool Unknown2 { get; set; }
public LazyRow< Quest > Quest { get; set; }
public bool IsHidden { get; set; }
public override void PopulateData( RowParser parser, GameData gameData, Language language )
{
base.PopulateData( parser, gameData, language );
Item = parser.ReadColumn< int >( 0 );
GatheringItemLevel = new LazyRow< GatheringItemLevelConvertTable >( gameData, parser.ReadColumn< ushort >( 1 ), language );
Unknown2 = parser.ReadColumn< bool >( 2 );
Quest = new LazyRow< Quest >( gameData, parser.ReadColumn< uint >( 3 ), language );
IsHidden = parser.ReadColumn< bool >( 4 );
}
}
}
|
// // Library imports
// import mongoose from 'mongoose';
// // Project imports
// import { logger } from './logger';
// const mongo_host = process.env.MONGO_HOST;
// const mongo_port = process.env.MONGO_PORT;
// const mongo_db_name = process.env.MONGO_DATABASE;
// const mongo_url = `mongodb://${mongo_host}:${mongo_port}/${mongo_db_name}`;
// const options = {
// useNewUrlParser: true,
// useUnifiedTopology: true
// }
// mongoose.connect(mongo_url, options);
// try {
// mongoose.connect(mongo_url, options);
// } catch (e) {
// logger.error({message: e});
// process.exit();
// }
// const mongo_client = mongoose.connection;
// mongo_client.on('error', () => {
// logger.error('Connection to database has been lost');
// });
// mongo_client.once('open', () => {
// logger.info('Successfully connected to MongoDB database');
// });
// export {
// mongo_client
// }
|
package code
fun main(args: Array<String>) : Unit {
println(AddFunction(3,6))
println("3 + 6 + 9 = ${AddFunction(3,6)+ 9}")
}
fun AddFunction(number1: Int, number2: Int): Int{
println("number1: $number1 number2: $number2 ")
return number1 + number2
}
|
module Dec05 where
import Common
import Data.Char
import Data.Maybe
import Data.Function
import Data.List
dec05P1 :: IO ()
dec05P1 = readFile "data/dec05.txt" >>= reactFull .> print
reaction :: String -> String
reaction = go []
where
go xs [] = reverse xs
go xs (c:cs) = case cs of
[] -> go (c:xs) []
c':cs' -> if (isLower c && c' == toUpper c) || (isUpper c && c' == toLower c)
then (case xs of x:xs' -> go xs' (x:cs'); [] -> go [] cs')
else go (c:xs) cs
reactFull s = let s' = reaction s in if s' == s then length s' else reactFull s'
dec05P2 :: IO ()
dec05P2 = readFile "data/dec05.txt" >>= go .> print
where
go s =
sort [ (n, c)
| c <- ['a' .. 'z']
, let n = reactFull $ filter (\c' -> c' /= c && c' /= toUpper c) s
]
|
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=thumbv8.1m.main-none-none-eabi -verify-machineinstrs -mattr=+mve %s -o - | FileCheck %s
@var_36 = hidden local_unnamed_addr global i8 0, align 1
@arr_61 = hidden local_unnamed_addr global [1 x i32] zeroinitializer, align 4
define i32 @test(i8 zeroext %var_2, i16 signext %var_15, [18 x [22 x i8]]* %arr_60) {
; CHECK-LABEL: test:
; CHECK: @ %bb.0: @ %entry
; CHECK-NEXT: .save {r4, r5, r6, r7, r8, r9, r10, r11, lr}
; CHECK-NEXT: push.w {r4, r5, r6, r7, r8, r9, r10, r11, lr}
; CHECK-NEXT: cmp r0, #0
; CHECK-NEXT: beq.w .LBB0_10
; CHECK-NEXT: @ %bb.1: @ %for.cond1.preheader
; CHECK-NEXT: cmp r2, #0
; CHECK-NEXT: beq.w .LBB0_11
; CHECK-NEXT: @ %bb.2: @ %for.cond1.preheader1
; CHECK-NEXT: movw r8, :lower16:var_36
; CHECK-NEXT: movw r0, #27476
; CHECK-NEXT: addw r10, r2, #397
; CHECK-NEXT: mov.w r9, #11
; CHECK-NEXT: movt r8, :upper16:var_36
; CHECK-NEXT: sdiv r1, r0, r1
; CHECK-NEXT: mov.w r11, #0
; CHECK-NEXT: .LBB0_3: @ %for.cond6.preheader
; CHECK-NEXT: @ =>This Loop Header: Depth=1
; CHECK-NEXT: @ Child Loop BB0_4 Depth 2
; CHECK-NEXT: @ Child Loop BB0_6 Depth 2
; CHECK-NEXT: @ Child Loop BB0_8 Depth 2
; CHECK-NEXT: movs r0, #22
; CHECK-NEXT: dls lr, r9
; CHECK-NEXT: mla r7, r11, r0, r10
; CHECK-NEXT: movw r0, :lower16:arr_61
; CHECK-NEXT: movt r0, :upper16:arr_61
; CHECK-NEXT: adds r0, #4
; CHECK-NEXT: mov r3, r2
; CHECK-NEXT: mov r6, r0
; CHECK-NEXT: .LBB0_4: @ %for.body10
; CHECK-NEXT: @ Parent Loop BB0_3 Depth=1
; CHECK-NEXT: @ => This Inner Loop Header: Depth=2
; CHECK-NEXT: str r3, [r6, #-4]
; CHECK-NEXT: add.w r12, r3, #396
; CHECK-NEXT: ldrb r5, [r7, #-1]
; CHECK-NEXT: add.w r3, r3, #792
; CHECK-NEXT: cmp r5, #0
; CHECK-NEXT: ite ne
; CHECK-NEXT: sxthne r5, r1
; CHECK-NEXT: moveq r5, #0
; CHECK-NEXT: cmp r5, #0
; CHECK-NEXT: str.w r12, [r6]
; CHECK-NEXT: cset r5, ne
; CHECK-NEXT: adds r6, #8
; CHECK-NEXT: strb.w r5, [r8]
; CHECK-NEXT: ldrb r5, [r7]
; CHECK-NEXT: adds r7, #2
; CHECK-NEXT: cmp r5, #0
; CHECK-NEXT: ite ne
; CHECK-NEXT: sxthne r5, r1
; CHECK-NEXT: moveq r5, #0
; CHECK-NEXT: cmp r5, #0
; CHECK-NEXT: cset r5, ne
; CHECK-NEXT: strb.w r5, [r8]
; CHECK-NEXT: le lr, .LBB0_4
; CHECK-NEXT: @ %bb.5: @ %for.cond.cleanup9
; CHECK-NEXT: @ in Loop: Header=BB0_3 Depth=1
; CHECK-NEXT: add.w r3, r11, #1
; CHECK-NEXT: movs r7, #22
; CHECK-NEXT: dls lr, r9
; CHECK-NEXT: mov r6, r0
; CHECK-NEXT: uxtb r3, r3
; CHECK-NEXT: smlabb r7, r3, r7, r10
; CHECK-NEXT: mov r3, r2
; CHECK-NEXT: .LBB0_6: @ %for.body10.1
; CHECK-NEXT: @ Parent Loop BB0_3 Depth=1
; CHECK-NEXT: @ => This Inner Loop Header: Depth=2
; CHECK-NEXT: str r3, [r6, #-4]
; CHECK-NEXT: add.w r4, r3, #396
; CHECK-NEXT: ldrb r5, [r7, #-1]
; CHECK-NEXT: add.w r3, r3, #792
; CHECK-NEXT: cmp r5, #0
; CHECK-NEXT: ite ne
; CHECK-NEXT: sxthne r5, r1
; CHECK-NEXT: moveq r5, #0
; CHECK-NEXT: cmp r5, #0
; CHECK-NEXT: str r4, [r6]
; CHECK-NEXT: cset r5, ne
; CHECK-NEXT: adds r6, #8
; CHECK-NEXT: strb.w r5, [r8]
; CHECK-NEXT: ldrb r5, [r7]
; CHECK-NEXT: adds r7, #2
; CHECK-NEXT: cmp r5, #0
; CHECK-NEXT: ite ne
; CHECK-NEXT: sxthne r5, r1
; CHECK-NEXT: moveq r5, #0
; CHECK-NEXT: cmp r5, #0
; CHECK-NEXT: cset r5, ne
; CHECK-NEXT: strb.w r5, [r8]
; CHECK-NEXT: le lr, .LBB0_6
; CHECK-NEXT: @ %bb.7: @ %for.cond.cleanup9.1
; CHECK-NEXT: @ in Loop: Header=BB0_3 Depth=1
; CHECK-NEXT: add.w r3, r11, #2
; CHECK-NEXT: movs r7, #22
; CHECK-NEXT: dls lr, r9
; CHECK-NEXT: uxtb r3, r3
; CHECK-NEXT: smlabb r7, r3, r7, r10
; CHECK-NEXT: mov r3, r2
; CHECK-NEXT: .LBB0_8: @ %for.body10.2
; CHECK-NEXT: @ Parent Loop BB0_3 Depth=1
; CHECK-NEXT: @ => This Inner Loop Header: Depth=2
; CHECK-NEXT: str r3, [r0, #-4]
; CHECK-NEXT: ldrb r6, [r7, #-1]
; CHECK-NEXT: cmp r6, #0
; CHECK-NEXT: ite ne
; CHECK-NEXT: sxthne r5, r1
; CHECK-NEXT: moveq r5, #0
; CHECK-NEXT: add.w r6, r3, #396
; CHECK-NEXT: cmp r5, #0
; CHECK-NEXT: str r6, [r0]
; CHECK-NEXT: cset r6, ne
; CHECK-NEXT: strb.w r6, [r8]
; CHECK-NEXT: add.w r3, r3, #792
; CHECK-NEXT: ldrb r6, [r7]
; CHECK-NEXT: adds r0, #8
; CHECK-NEXT: adds r7, #2
; CHECK-NEXT: cmp r6, #0
; CHECK-NEXT: ite ne
; CHECK-NEXT: sxthne r5, r1
; CHECK-NEXT: moveq r5, #0
; CHECK-NEXT: cmp r5, #0
; CHECK-NEXT: cset r6, ne
; CHECK-NEXT: strb.w r6, [r8]
; CHECK-NEXT: le lr, .LBB0_8
; CHECK-NEXT: @ %bb.9: @ %for.cond.cleanup9.2
; CHECK-NEXT: @ in Loop: Header=BB0_3 Depth=1
; CHECK-NEXT: add.w r0, r11, #3
; CHECK-NEXT: uxtb.w r11, r0
; CHECK-NEXT: cmp.w r11, #18
; CHECK-NEXT: it hs
; CHECK-NEXT: movhs.w r11, #0
; CHECK-NEXT: b .LBB0_3
; CHECK-NEXT: .LBB0_10: @ %for.cond.cleanup
; CHECK-NEXT: pop.w {r4, r5, r6, r7, r8, r9, r10, r11, pc}
; CHECK-NEXT: .LBB0_11: @ %for.cond1.us.preheader
; CHECK-NEXT: movw r0, :lower16:arr_61
; CHECK-NEXT: movs r1, #0
; CHECK-NEXT: movt r0, :upper16:arr_61
; CHECK-NEXT: str r1, [r0, #84]
; CHECK-NEXT: .inst.n 0xdefe
entry:
%tobool.not = icmp eq i8 %var_2, 0
br i1 %tobool.not, label %for.cond.cleanup, label %for.cond1.preheader
for.cond1.preheader: ; preds = %entry
%cmp11.not = icmp eq [18 x [22 x i8]]* %arr_60, null
br i1 %cmp11.not, label %for.cond1.us.preheader, label %for.cond1
for.cond1.us.preheader: ; preds = %for.cond1.preheader
store i32 0, i32* getelementptr ([1 x i32], [1 x i32]* @arr_61, i32 21, i32 0), align 4
call void @llvm.trap()
unreachable
for.cond.cleanup: ; preds = %entry
ret i32 undef
for.cond1: ; preds = %for.cond.cleanup9.2, %for.cond1.preheader
br label %for.cond6.preheader
for.cond6.preheader: ; preds = %for.cond.cleanup9.2, %for.cond1
%conv45 = phi i32 [ 0, %for.cond1 ], [ %conv.2, %for.cond.cleanup9.2 ]
br label %for.body10
for.cond.cleanup9: ; preds = %cond.end22.1
%add27 = add nuw nsw i32 %conv45, 1
%conv = and i32 %add27, 255
br label %for.body10.1
for.body10: ; preds = %cond.end22.1, %for.cond6.preheader
%i_15.044 = phi i32 [ 0, %for.cond6.preheader ], [ %add.1, %cond.end22.1 ]
%arraydecay = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 %i_15.044, i32 0
%0 = ptrtoint [22 x i8]* %arraydecay to i32
%arrayidx13 = getelementptr inbounds [1 x i32], [1 x i32]* @arr_61, i32 0, i32 %i_15.044
store i32 %0, i32* %arrayidx13, align 4
%arrayidx16 = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 1, i32 %conv45, i32 %i_15.044
%1 = load i8, i8* %arrayidx16, align 1
%tobool18.not = icmp eq i8 %1, 0
br i1 %tobool18.not, label %cond.end22, label %cond.true19
cond.true19: ; preds = %for.body10
%div43 = sdiv i16 27476, %var_15
%div.sext = sext i16 %div43 to i32
br label %cond.end22
cond.end22: ; preds = %for.body10, %cond.true19
%cond23 = phi i32 [ %div.sext, %cond.true19 ], [ 0, %for.body10 ]
%tobool24 = icmp ne i32 %cond23, 0
%frombool = zext i1 %tobool24 to i8
store i8 %frombool, i8* @var_36, align 1
%add = or i32 %i_15.044, 1
%arraydecay.1 = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 %add, i32 0
%2 = ptrtoint [22 x i8]* %arraydecay.1 to i32
%arrayidx13.1 = getelementptr inbounds [1 x i32], [1 x i32]* @arr_61, i32 0, i32 %add
store i32 %2, i32* %arrayidx13.1, align 4
%arrayidx16.1 = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 1, i32 %conv45, i32 %add
%3 = load i8, i8* %arrayidx16.1, align 1
%tobool18.not.1 = icmp eq i8 %3, 0
br i1 %tobool18.not.1, label %cond.end22.1, label %cond.true19.1
cond.true19.1: ; preds = %cond.end22
%div43.1 = sdiv i16 27476, %var_15
%div.sext.1 = sext i16 %div43.1 to i32
br label %cond.end22.1
cond.end22.1: ; preds = %cond.true19.1, %cond.end22
%cond23.1 = phi i32 [ %div.sext.1, %cond.true19.1 ], [ 0, %cond.end22 ]
%tobool24.1 = icmp ne i32 %cond23.1, 0
%frombool.1 = zext i1 %tobool24.1 to i8
store i8 %frombool.1, i8* @var_36, align 1
%add.1 = add nuw nsw i32 %i_15.044, 2
%exitcond105.not.1 = icmp eq i32 %add.1, 22
br i1 %exitcond105.not.1, label %for.cond.cleanup9, label %for.body10
for.body10.1: ; preds = %cond.end22.1.1, %for.cond.cleanup9
%i_15.044.1 = phi i32 [ 0, %for.cond.cleanup9 ], [ %add.1.1, %cond.end22.1.1 ]
%arraydecay.1108 = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 %i_15.044.1, i32 0
%4 = ptrtoint [22 x i8]* %arraydecay.1108 to i32
%arrayidx13.1109 = getelementptr inbounds [1 x i32], [1 x i32]* @arr_61, i32 0, i32 %i_15.044.1
store i32 %4, i32* %arrayidx13.1109, align 4
%arrayidx16.1110 = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 1, i32 %conv, i32 %i_15.044.1
%5 = load i8, i8* %arrayidx16.1110, align 1
%tobool18.not.1111 = icmp eq i8 %5, 0
br i1 %tobool18.not.1111, label %cond.end22.1119, label %cond.true19.1114
cond.true19.1114: ; preds = %for.body10.1
%div43.1112 = sdiv i16 27476, %var_15
%div.sext.1113 = sext i16 %div43.1112 to i32
br label %cond.end22.1119
cond.end22.1119: ; preds = %cond.true19.1114, %for.body10.1
%cond23.1115 = phi i32 [ %div.sext.1113, %cond.true19.1114 ], [ 0, %for.body10.1 ]
%tobool24.1116 = icmp ne i32 %cond23.1115, 0
%frombool.1117 = zext i1 %tobool24.1116 to i8
store i8 %frombool.1117, i8* @var_36, align 1
%add.1118 = or i32 %i_15.044.1, 1
%arraydecay.1.1 = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 %add.1118, i32 0
%6 = ptrtoint [22 x i8]* %arraydecay.1.1 to i32
%arrayidx13.1.1 = getelementptr inbounds [1 x i32], [1 x i32]* @arr_61, i32 0, i32 %add.1118
store i32 %6, i32* %arrayidx13.1.1, align 4
%arrayidx16.1.1 = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 1, i32 %conv, i32 %add.1118
%7 = load i8, i8* %arrayidx16.1.1, align 1
%tobool18.not.1.1 = icmp eq i8 %7, 0
br i1 %tobool18.not.1.1, label %cond.end22.1.1, label %cond.true19.1.1
cond.true19.1.1: ; preds = %cond.end22.1119
%div43.1.1 = sdiv i16 27476, %var_15
%div.sext.1.1 = sext i16 %div43.1.1 to i32
br label %cond.end22.1.1
cond.end22.1.1: ; preds = %cond.true19.1.1, %cond.end22.1119
%cond23.1.1 = phi i32 [ %div.sext.1.1, %cond.true19.1.1 ], [ 0, %cond.end22.1119 ]
%tobool24.1.1 = icmp ne i32 %cond23.1.1, 0
%frombool.1.1 = zext i1 %tobool24.1.1 to i8
store i8 %frombool.1.1, i8* @var_36, align 1
%add.1.1 = add nuw nsw i32 %i_15.044.1, 2
%exitcond105.not.1.1 = icmp eq i32 %add.1.1, 22
br i1 %exitcond105.not.1.1, label %for.cond.cleanup9.1, label %for.body10.1
for.cond.cleanup9.1: ; preds = %cond.end22.1.1
%add27.1 = add nuw nsw i32 %conv45, 2
%conv.1 = and i32 %add27.1, 255
br label %for.body10.2
for.body10.2: ; preds = %cond.end22.1.2, %for.cond.cleanup9.1
%i_15.044.2 = phi i32 [ 0, %for.cond.cleanup9.1 ], [ %add.1.2, %cond.end22.1.2 ]
%arraydecay.2 = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 %i_15.044.2, i32 0
%8 = ptrtoint [22 x i8]* %arraydecay.2 to i32
%arrayidx13.2 = getelementptr inbounds [1 x i32], [1 x i32]* @arr_61, i32 0, i32 %i_15.044.2
store i32 %8, i32* %arrayidx13.2, align 4
%arrayidx16.2 = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 1, i32 %conv.1, i32 %i_15.044.2
%9 = load i8, i8* %arrayidx16.2, align 1
%tobool18.not.2 = icmp eq i8 %9, 0
br i1 %tobool18.not.2, label %cond.end22.2, label %cond.true19.2
cond.true19.2: ; preds = %for.body10.2
%div43.2 = sdiv i16 27476, %var_15
%div.sext.2 = sext i16 %div43.2 to i32
br label %cond.end22.2
cond.end22.2: ; preds = %cond.true19.2, %for.body10.2
%cond23.2 = phi i32 [ %div.sext.2, %cond.true19.2 ], [ 0, %for.body10.2 ]
%tobool24.2 = icmp ne i32 %cond23.2, 0
%frombool.2 = zext i1 %tobool24.2 to i8
store i8 %frombool.2, i8* @var_36, align 1
%add.2 = or i32 %i_15.044.2, 1
%arraydecay.1.2 = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 %add.2, i32 0
%10 = ptrtoint [22 x i8]* %arraydecay.1.2 to i32
%arrayidx13.1.2 = getelementptr inbounds [1 x i32], [1 x i32]* @arr_61, i32 0, i32 %add.2
store i32 %10, i32* %arrayidx13.1.2, align 4
%arrayidx16.1.2 = getelementptr inbounds [18 x [22 x i8]], [18 x [22 x i8]]* %arr_60, i32 1, i32 %conv.1, i32 %add.2
%11 = load i8, i8* %arrayidx16.1.2, align 1
%tobool18.not.1.2 = icmp eq i8 %11, 0
br i1 %tobool18.not.1.2, label %cond.end22.1.2, label %cond.true19.1.2
cond.true19.1.2: ; preds = %cond.end22.2
%div43.1.2 = sdiv i16 27476, %var_15
%div.sext.1.2 = sext i16 %div43.1.2 to i32
br label %cond.end22.1.2
cond.end22.1.2: ; preds = %cond.true19.1.2, %cond.end22.2
%cond23.1.2 = phi i32 [ %div.sext.1.2, %cond.true19.1.2 ], [ 0, %cond.end22.2 ]
%tobool24.1.2 = icmp ne i32 %cond23.1.2, 0
%frombool.1.2 = zext i1 %tobool24.1.2 to i8
store i8 %frombool.1.2, i8* @var_36, align 1
%add.1.2 = add nuw nsw i32 %i_15.044.2, 2
%exitcond105.not.1.2 = icmp eq i32 %add.1.2, 22
br i1 %exitcond105.not.1.2, label %for.cond.cleanup9.2, label %for.body10.2
for.cond.cleanup9.2: ; preds = %cond.end22.1.2
%add27.2 = add nuw nsw i32 %conv45, 3
%conv.2 = and i32 %add27.2, 255
%cmp.2 = icmp ult i32 %conv.2, 18
br i1 %cmp.2, label %for.cond6.preheader, label %for.cond1
}
declare void @llvm.trap() #1
@b = hidden local_unnamed_addr global i32 0, align 4
@a = hidden local_unnamed_addr global i32 0, align 4
@c = hidden local_unnamed_addr global [1 x i32] zeroinitializer, align 4
define i32 @d(i64 %e, i32 %f, i64 %g, i32 %h) {
; CHECK-LABEL: d:
; CHECK: @ %bb.0: @ %entry
; CHECK-NEXT: .save {r4, r5, r6, r7, r8, r9, r10, r11, lr}
; CHECK-NEXT: push.w {r4, r5, r6, r7, r8, r9, r10, r11, lr}
; CHECK-NEXT: .pad #4
; CHECK-NEXT: sub sp, #4
; CHECK-NEXT: .vsave {d8, d9, d10, d11, d12, d13}
; CHECK-NEXT: vpush {d8, d9, d10, d11, d12, d13}
; CHECK-NEXT: .pad #16
; CHECK-NEXT: sub sp, #16
; CHECK-NEXT: mov r12, r1
; CHECK-NEXT: subs r1, r0, #1
; CHECK-NEXT: sbcs r1, r12, #0
; CHECK-NEXT: blt.w .LBB1_28
; CHECK-NEXT: @ %bb.1: @ %for.cond2.preheader.lr.ph
; CHECK-NEXT: movs r7, #1
; CHECK-NEXT: cmp r2, #1
; CHECK-NEXT: csel r3, r2, r7, lt
; CHECK-NEXT: movw r6, #43691
; CHECK-NEXT: mov r1, r3
; CHECK-NEXT: cmp r3, #3
; CHECK-NEXT: it ls
; CHECK-NEXT: movls r1, #3
; CHECK-NEXT: movt r6, #43690
; CHECK-NEXT: subs r1, r1, r3
; CHECK-NEXT: ldr r4, [sp, #112]
; CHECK-NEXT: adds r1, #2
; CHECK-NEXT: movw r10, :lower16:c
; CHECK-NEXT: movt r10, :upper16:c
; CHECK-NEXT: vmov.i32 q5, #0xc
; CHECK-NEXT: umull r1, r5, r1, r6
; CHECK-NEXT: vmov.i32 q6, #0xc
; CHECK-NEXT: @ implicit-def: $r11
; CHECK-NEXT: @ implicit-def: $r9
; CHECK-NEXT: str r3, [sp, #12] @ 4-byte Spill
; CHECK-NEXT: str r0, [sp, #8] @ 4-byte Spill
; CHECK-NEXT: strd r2, r12, [sp] @ 8-byte Folded Spill
; CHECK-NEXT: add.w r6, r7, r5, lsr #1
; CHECK-NEXT: @ implicit-def: $r5
; CHECK-NEXT: adr r1, .LCPI1_0
; CHECK-NEXT: vldrw.u32 q0, [r1]
; CHECK-NEXT: vadd.i32 q4, q0, r3
; CHECK-NEXT: b .LBB1_4
; CHECK-NEXT: .LBB1_2: @ %for.body6.preheader
; CHECK-NEXT: @ in Loop: Header=BB1_4 Depth=1
; CHECK-NEXT: mov r0, r9
; CHECK-NEXT: cmn.w r9, #4
; CHECK-NEXT: it le
; CHECK-NEXT: mvnle r0, #3
; CHECK-NEXT: movw r2, #18725
; CHECK-NEXT: adds r0, #6
; CHECK-NEXT: movt r2, #9362
; CHECK-NEXT: sub.w r1, r0, r9
; CHECK-NEXT: movs r5, #0
; CHECK-NEXT: umull r2, r3, r1, r2
; CHECK-NEXT: subs r2, r1, r3
; CHECK-NEXT: add.w r2, r3, r2, lsr #1
; CHECK-NEXT: lsrs r3, r2, #2
; CHECK-NEXT: lsls r3, r3, #3
; CHECK-NEXT: sub.w r2, r3, r2, lsr #2
; CHECK-NEXT: subs r1, r2, r1
; CHECK-NEXT: add r0, r1
; CHECK-NEXT: add.w r9, r0, #7
; CHECK-NEXT: ldrd r12, r0, [sp, #4] @ 8-byte Folded Reload
; CHECK-NEXT: .LBB1_3: @ %for.cond.cleanup5
; CHECK-NEXT: @ in Loop: Header=BB1_4 Depth=1
; CHECK-NEXT: add.w r11, r11, #2
; CHECK-NEXT: subs.w r1, r11, r0
; CHECK-NEXT: asr.w r7, r11, #31
; CHECK-NEXT: sbcs.w r1, r7, r12
; CHECK-NEXT: bge.w .LBB1_28
; CHECK-NEXT: .LBB1_4: @ %for.cond2.preheader
; CHECK-NEXT: @ =>This Loop Header: Depth=1
; CHECK-NEXT: @ Child Loop BB1_17 Depth 2
; CHECK-NEXT: @ Child Loop BB1_8 Depth 2
; CHECK-NEXT: @ Child Loop BB1_10 Depth 3
; CHECK-NEXT: @ Child Loop BB1_12 Depth 3
; CHECK-NEXT: cmp.w r9, #2
; CHECK-NEXT: bgt .LBB1_3
; CHECK-NEXT: @ %bb.5: @ %for.body6.lr.ph
; CHECK-NEXT: @ in Loop: Header=BB1_4 Depth=1
; CHECK-NEXT: ldr r0, [sp, #12] @ 4-byte Reload
; CHECK-NEXT: cmp r0, #5
; CHECK-NEXT: bhi .LBB1_15
; CHECK-NEXT: @ %bb.6: @ %for.body6.us.preheader
; CHECK-NEXT: @ in Loop: Header=BB1_4 Depth=1
; CHECK-NEXT: ldrd r2, r3, [sp, #104]
; CHECK-NEXT: movs r0, #32
; CHECK-NEXT: movs r1, #0
; CHECK-NEXT: bl __aeabi_ldivmod
; CHECK-NEXT: vdup.32 q0, r2
; CHECK-NEXT: ldr r0, [sp, #8] @ 4-byte Reload
; CHECK-NEXT: ldrd r2, r12, [sp] @ 8-byte Folded Reload
; CHECK-NEXT: mov r7, r9
; CHECK-NEXT: b .LBB1_8
; CHECK-NEXT: .LBB1_7: @ %for.cond.cleanup17.us
; CHECK-NEXT: @ in Loop: Header=BB1_8 Depth=2
; CHECK-NEXT: add.w r9, r7, #7
; CHECK-NEXT: cmn.w r7, #4
; CHECK-NEXT: mov.w r5, #0
; CHECK-NEXT: mov r7, r9
; CHECK-NEXT: bge .LBB1_3
; CHECK-NEXT: .LBB1_8: @ %for.body6.us
; CHECK-NEXT: @ Parent Loop BB1_4 Depth=1
; CHECK-NEXT: @ => This Loop Header: Depth=2
; CHECK-NEXT: @ Child Loop BB1_10 Depth 3
; CHECK-NEXT: @ Child Loop BB1_12 Depth 3
; CHECK-NEXT: cbz r2, .LBB1_11
; CHECK-NEXT: @ %bb.9: @ %for.body13.us51.preheader
; CHECK-NEXT: @ in Loop: Header=BB1_8 Depth=2
; CHECK-NEXT: movw r3, :lower16:a
; CHECK-NEXT: movs r1, #0
; CHECK-NEXT: movt r3, :upper16:a
; CHECK-NEXT: vmov q1, q4
; CHECK-NEXT: str r1, [r3]
; CHECK-NEXT: movw r3, :lower16:b
; CHECK-NEXT: movt r3, :upper16:b
; CHECK-NEXT: str r1, [r3]
; CHECK-NEXT: dlstp.32 lr, r6
; CHECK-NEXT: .LBB1_10: @ %vector.body111
; CHECK-NEXT: @ Parent Loop BB1_4 Depth=1
; CHECK-NEXT: @ Parent Loop BB1_8 Depth=2
; CHECK-NEXT: @ => This Inner Loop Header: Depth=3
; CHECK-NEXT: vshl.i32 q2, q1, #2
; CHECK-NEXT: vadd.i32 q2, q2, r10
; CHECK-NEXT: vadd.i32 q1, q1, q6
; CHECK-NEXT: vstrw.32 q0, [q2]
; CHECK-NEXT: letp lr, .LBB1_10
; CHECK-NEXT: b .LBB1_13
; CHECK-NEXT: .LBB1_11: @ %vector.body.preheader
; CHECK-NEXT: @ in Loop: Header=BB1_8 Depth=2
; CHECK-NEXT: vmov q1, q4
; CHECK-NEXT: dlstp.32 lr, r6
; CHECK-NEXT: .LBB1_12: @ %vector.body
; CHECK-NEXT: @ Parent Loop BB1_4 Depth=1
; CHECK-NEXT: @ Parent Loop BB1_8 Depth=2
; CHECK-NEXT: @ => This Inner Loop Header: Depth=3
; CHECK-NEXT: vshl.i32 q2, q1, #2
; CHECK-NEXT: vadd.i32 q2, q2, r10
; CHECK-NEXT: vadd.i32 q1, q1, q5
; CHECK-NEXT: vstrw.32 q0, [q2]
; CHECK-NEXT: letp lr, .LBB1_12
; CHECK-NEXT: .LBB1_13: @ %for.cond9.for.cond15.preheader_crit_edge.us
; CHECK-NEXT: @ in Loop: Header=BB1_8 Depth=2
; CHECK-NEXT: cmp r4, #0
; CHECK-NEXT: beq .LBB1_7
; CHECK-NEXT: @ %bb.14: @ %for.cond9.for.cond15.preheader_crit_edge.us
; CHECK-NEXT: @ in Loop: Header=BB1_8 Depth=2
; CHECK-NEXT: eor r1, r5, #1
; CHECK-NEXT: lsls r1, r1, #31
; CHECK-NEXT: bne .LBB1_7
; CHECK-NEXT: b .LBB1_26
; CHECK-NEXT: .LBB1_15: @ %for.body6.lr.ph.split
; CHECK-NEXT: @ in Loop: Header=BB1_4 Depth=1
; CHECK-NEXT: cmp r4, #0
; CHECK-NEXT: beq.w .LBB1_2
; CHECK-NEXT: @ %bb.16: @ in Loop: Header=BB1_4 Depth=1
; CHECK-NEXT: ldrd r12, r0, [sp, #4] @ 8-byte Folded Reload
; CHECK-NEXT: mov r7, r9
; CHECK-NEXT: .LBB1_17: @ %for.body6.us60
; CHECK-NEXT: @ Parent Loop BB1_4 Depth=1
; CHECK-NEXT: @ => This Inner Loop Header: Depth=2
; CHECK-NEXT: lsls r1, r5, #31
; CHECK-NEXT: bne .LBB1_27
; CHECK-NEXT: @ %bb.18: @ %for.cond.cleanup17.us63
; CHECK-NEXT: @ in Loop: Header=BB1_17 Depth=2
; CHECK-NEXT: cmn.w r7, #4
; CHECK-NEXT: bge .LBB1_22
; CHECK-NEXT: @ %bb.19: @ %for.cond.cleanup17.us63.1
; CHECK-NEXT: @ in Loop: Header=BB1_17 Depth=2
; CHECK-NEXT: cmn.w r7, #12
; CHECK-NEXT: bgt .LBB1_23
; CHECK-NEXT: @ %bb.20: @ %for.cond.cleanup17.us63.2
; CHECK-NEXT: @ in Loop: Header=BB1_17 Depth=2
; CHECK-NEXT: cmn.w r7, #19
; CHECK-NEXT: bgt .LBB1_24
; CHECK-NEXT: @ %bb.21: @ %for.cond.cleanup17.us63.3
; CHECK-NEXT: @ in Loop: Header=BB1_17 Depth=2
; CHECK-NEXT: add.w r9, r7, #28
; CHECK-NEXT: cmn.w r7, #25
; CHECK-NEXT: mov.w r5, #0
; CHECK-NEXT: mov r7, r9
; CHECK-NEXT: blt .LBB1_17
; CHECK-NEXT: b .LBB1_3
; CHECK-NEXT: .LBB1_22: @ %for.cond.cleanup5.loopexit134.split.loop.exit139
; CHECK-NEXT: @ in Loop: Header=BB1_4 Depth=1
; CHECK-NEXT: add.w r9, r7, #7
; CHECK-NEXT: b .LBB1_25
; CHECK-NEXT: .LBB1_23: @ %for.cond.cleanup5.loopexit134.split.loop.exit137
; CHECK-NEXT: @ in Loop: Header=BB1_4 Depth=1
; CHECK-NEXT: add.w r9, r7, #14
; CHECK-NEXT: b .LBB1_25
; CHECK-NEXT: .LBB1_24: @ %for.cond.cleanup5.loopexit134.split.loop.exit135
; CHECK-NEXT: @ in Loop: Header=BB1_4 Depth=1
; CHECK-NEXT: add.w r9, r7, #21
; CHECK-NEXT: .LBB1_25: @ %for.cond.cleanup5
; CHECK-NEXT: @ in Loop: Header=BB1_4 Depth=1
; CHECK-NEXT: movs r5, #0
; CHECK-NEXT: b .LBB1_3
; CHECK-NEXT: .LBB1_26: @ %for.inc19.us
; CHECK-NEXT: @ =>This Inner Loop Header: Depth=1
; CHECK-NEXT: b .LBB1_26
; CHECK-NEXT: .LBB1_27: @ %for.inc19.us66
; CHECK-NEXT: @ =>This Inner Loop Header: Depth=1
; CHECK-NEXT: b .LBB1_27
; CHECK-NEXT: .LBB1_28: @ %for.cond.cleanup
; CHECK-NEXT: add sp, #16
; CHECK-NEXT: vpop {d8, d9, d10, d11, d12, d13}
; CHECK-NEXT: add sp, #4
; CHECK-NEXT: pop.w {r4, r5, r6, r7, r8, r9, r10, r11, pc}
; CHECK-NEXT: .p2align 4
; CHECK-NEXT: @ %bb.29:
; CHECK-NEXT: .LCPI1_0:
; CHECK-NEXT: .long 0 @ 0x0
; CHECK-NEXT: .long 3 @ 0x3
; CHECK-NEXT: .long 6 @ 0x6
; CHECK-NEXT: .long 9 @ 0x9
entry:
%cmp47 = icmp sgt i64 %e, 0
br i1 %cmp47, label %for.cond2.preheader.lr.ph, label %for.cond.cleanup
for.cond2.preheader.lr.ph: ; preds = %entry
%cmp7.inv = icmp slt i32 %f, 1
%spec.select = select i1 %cmp7.inv, i32 %f, i32 1
%cmp1041 = icmp ult i32 %spec.select, 6
%tobool.not = icmp eq i32 %f, 0
%tobool20.not97 = icmp eq i32 %h, 0
%0 = icmp ugt i32 %spec.select, 3
%umax = select i1 %0, i32 %spec.select, i32 3
%1 = add i32 %umax, 2
%2 = sub i32 %1, %spec.select
%3 = udiv i32 %2, 3
%4 = add nuw nsw i32 %3, 1
%5 = icmp ugt i32 %spec.select, 3
%umax112 = select i1 %5, i32 %spec.select, i32 3
%6 = add i32 %umax112, 2
%7 = sub i32 %6, %spec.select
%8 = udiv i32 %7, 3
%9 = add nuw nsw i32 %8, 1
%n.rnd.up114 = add nuw nsw i32 %8, 4
%n.vec116 = and i32 %n.rnd.up114, 2147483644
%.splatinsert121 = insertelement <4 x i32> poison, i32 %spec.select, i32 0
%.splat122 = shufflevector <4 x i32> %.splatinsert121, <4 x i32> poison, <4 x i32> zeroinitializer
%induction123 = add <4 x i32> %.splat122, <i32 0, i32 3, i32 6, i32 9>
%n.rnd.up = add nuw nsw i32 %3, 4
%n.vec = and i32 %n.rnd.up, 2147483644
%.splatinsert = insertelement <4 x i32> poison, i32 %spec.select, i32 0
%.splat = shufflevector <4 x i32> %.splatinsert, <4 x i32> poison, <4 x i32> zeroinitializer
%induction = add <4 x i32> %.splat, <i32 0, i32 3, i32 6, i32 9>
br label %for.cond2.preheader
for.cond2.preheader: ; preds = %for.cond2.preheader.lr.ph, %for.cond.cleanup5
%l.0.off050 = phi i1 [ undef, %for.cond2.preheader.lr.ph ], [ %l.1.off0.lcssa, %for.cond.cleanup5 ]
%i.049 = phi i32 [ undef, %for.cond2.preheader.lr.ph ], [ %add26, %for.cond.cleanup5 ]
%j.048 = phi i32 [ undef, %for.cond2.preheader.lr.ph ], [ %j.1.lcssa, %for.cond.cleanup5 ]
%cmp343 = icmp slt i32 %j.048, 3
br i1 %cmp343, label %for.body6.lr.ph, label %for.cond.cleanup5
for.body6.lr.ph: ; preds = %for.cond2.preheader
br i1 %cmp1041, label %for.body6.us.preheader, label %for.body6.lr.ph.split
for.body6.us.preheader: ; preds = %for.body6.lr.ph
%rem.us = srem i64 32, %g
%conv14.us = trunc i64 %rem.us to i32
%broadcast.splatinsert131 = insertelement <4 x i32> poison, i32 %conv14.us, i32 0
%broadcast.splat132 = shufflevector <4 x i32> %broadcast.splatinsert131, <4 x i32> poison, <4 x i32> zeroinitializer
%broadcast.splatinsert107 = insertelement <4 x i32> poison, i32 %conv14.us, i32 0
%broadcast.splat108 = shufflevector <4 x i32> %broadcast.splatinsert107, <4 x i32> poison, <4 x i32> zeroinitializer
br label %for.body6.us
for.body6.us: ; preds = %for.body6.us.preheader, %for.cond.cleanup17.us
%l.1.off045.us = phi i1 [ false, %for.cond.cleanup17.us ], [ %l.0.off050, %for.body6.us.preheader ]
%j.144.us = phi i32 [ %add23.us, %for.cond.cleanup17.us ], [ %j.048, %for.body6.us.preheader ]
br i1 %tobool.not, label %vector.body, label %for.body13.us51.preheader
vector.body: ; preds = %for.body6.us, %vector.body
%index = phi i32 [ %index.next, %vector.body ], [ 0, %for.body6.us ]
%vec.ind = phi <4 x i32> [ %vec.ind.next, %vector.body ], [ %induction, %for.body6.us ]
%active.lane.mask = call <4 x i1> @llvm.get.active.lane.mask.v4i1.i32(i32 %index, i32 %4)
%10 = getelementptr inbounds [1 x i32], [1 x i32]* @c, i32 0, <4 x i32> %vec.ind
call void @llvm.masked.scatter.v4i32.v4p0i32(<4 x i32> %broadcast.splat108, <4 x i32*> %10, i32 4, <4 x i1> %active.lane.mask)
%index.next = add i32 %index, 4
%vec.ind.next = add <4 x i32> %vec.ind, <i32 12, i32 12, i32 12, i32 12>
%11 = icmp eq i32 %index.next, %n.vec
br i1 %11, label %for.cond9.for.cond15.preheader_crit_edge.us, label %vector.body
for.body13.us51.preheader: ; preds = %for.body6.us
store i32 0, i32* @b, align 4
store i32 0, i32* @a, align 4
br label %vector.body111
vector.body111: ; preds = %vector.body111, %for.body13.us51.preheader
%index117 = phi i32 [ 0, %for.body13.us51.preheader ], [ %index.next118, %vector.body111 ]
%vec.ind124 = phi <4 x i32> [ %induction123, %for.body13.us51.preheader ], [ %vec.ind.next125, %vector.body111 ]
%active.lane.mask130 = call <4 x i1> @llvm.get.active.lane.mask.v4i1.i32(i32 %index117, i32 %9)
%12 = getelementptr inbounds [1 x i32], [1 x i32]* @c, i32 0, <4 x i32> %vec.ind124
call void @llvm.masked.scatter.v4i32.v4p0i32(<4 x i32> %broadcast.splat132, <4 x i32*> %12, i32 4, <4 x i1> %active.lane.mask130)
%index.next118 = add i32 %index117, 4
%vec.ind.next125 = add <4 x i32> %vec.ind124, <i32 12, i32 12, i32 12, i32 12>
%13 = icmp eq i32 %index.next118, %n.vec116
br i1 %13, label %for.cond9.for.cond15.preheader_crit_edge.us, label %vector.body111
for.cond.cleanup17.us: ; preds = %for.cond9.for.cond15.preheader_crit_edge.us
%add23.us = add nsw i32 %j.144.us, 7
%cmp3.us = icmp slt i32 %j.144.us, -4
br i1 %cmp3.us, label %for.body6.us, label %for.cond.cleanup5
for.inc19.us: ; preds = %for.cond9.for.cond15.preheader_crit_edge.us, %for.inc19.us
br label %for.inc19.us
for.cond9.for.cond15.preheader_crit_edge.us: ; preds = %vector.body111, %vector.body
%l.1.off045.us.not = xor i1 %l.1.off045.us, true
%brmerge = or i1 %tobool20.not97, %l.1.off045.us.not
br i1 %brmerge, label %for.cond.cleanup17.us, label %for.inc19.us
for.body6.lr.ph.split: ; preds = %for.body6.lr.ph
br i1 %tobool20.not97, label %for.body6.preheader, label %for.body6.us60
for.body6.preheader: ; preds = %for.body6.lr.ph.split
%14 = icmp sgt i32 %j.048, -4
%smax = select i1 %14, i32 %j.048, i32 -4
%15 = add nsw i32 %smax, 6
%16 = sub i32 %15, %j.048
%17 = urem i32 %16, 7
%18 = sub i32 %16, %17
%19 = add nsw i32 %j.048, 7
%20 = add i32 %19, %18
br label %for.cond.cleanup5
for.body6.us60: ; preds = %for.body6.lr.ph.split, %for.cond.cleanup17.us63.3
%l.1.off045.us61 = phi i1 [ false, %for.cond.cleanup17.us63.3 ], [ %l.0.off050, %for.body6.lr.ph.split ]
%j.144.us62 = phi i32 [ %add23.us64.3, %for.cond.cleanup17.us63.3 ], [ %j.048, %for.body6.lr.ph.split ]
br i1 %l.1.off045.us61, label %for.inc19.us66, label %for.cond.cleanup17.us63
for.cond.cleanup17.us63: ; preds = %for.body6.us60
%cmp3.us65 = icmp slt i32 %j.144.us62, -4
br i1 %cmp3.us65, label %for.cond.cleanup17.us63.1, label %for.cond.cleanup5.loopexit134.split.loop.exit139
for.inc19.us66: ; preds = %for.body6.us60, %for.inc19.us66
br label %for.inc19.us66
for.cond.cleanup: ; preds = %for.cond.cleanup5, %entry
ret i32 undef
for.cond.cleanup5.loopexit134.split.loop.exit135: ; preds = %for.cond.cleanup17.us63.2
%add23.us64.2.le = add nsw i32 %j.144.us62, 21
br label %for.cond.cleanup5
for.cond.cleanup5.loopexit134.split.loop.exit137: ; preds = %for.cond.cleanup17.us63.1
%add23.us64.1.le = add nsw i32 %j.144.us62, 14
br label %for.cond.cleanup5
for.cond.cleanup5.loopexit134.split.loop.exit139: ; preds = %for.cond.cleanup17.us63
%add23.us64.le = add nsw i32 %j.144.us62, 7
br label %for.cond.cleanup5
for.cond.cleanup5: ; preds = %for.cond.cleanup5.loopexit134.split.loop.exit135, %for.cond.cleanup5.loopexit134.split.loop.exit137, %for.cond.cleanup5.loopexit134.split.loop.exit139, %for.cond.cleanup17.us63.3, %for.cond.cleanup17.us, %for.body6.preheader, %for.cond2.preheader
%j.1.lcssa = phi i32 [ %j.048, %for.cond2.preheader ], [ %20, %for.body6.preheader ], [ %add23.us, %for.cond.cleanup17.us ], [ %add23.us64.2.le, %for.cond.cleanup5.loopexit134.split.loop.exit135 ], [ %add23.us64.1.le, %for.cond.cleanup5.loopexit134.split.loop.exit137 ], [ %add23.us64.le, %for.cond.cleanup5.loopexit134.split.loop.exit139 ], [ %add23.us64.3, %for.cond.cleanup17.us63.3 ]
%l.1.off0.lcssa = phi i1 [ %l.0.off050, %for.cond2.preheader ], [ false, %for.body6.preheader ], [ false, %for.cond.cleanup17.us ], [ false, %for.cond.cleanup17.us63.3 ], [ false, %for.cond.cleanup5.loopexit134.split.loop.exit139 ], [ false, %for.cond.cleanup5.loopexit134.split.loop.exit137 ], [ false, %for.cond.cleanup5.loopexit134.split.loop.exit135 ]
%add26 = add nsw i32 %i.049, 2
%conv = sext i32 %add26 to i64
%cmp = icmp slt i64 %conv, %e
br i1 %cmp, label %for.cond2.preheader, label %for.cond.cleanup
for.cond.cleanup17.us63.1: ; preds = %for.cond.cleanup17.us63
%cmp3.us65.1 = icmp slt i32 %j.144.us62, -11
br i1 %cmp3.us65.1, label %for.cond.cleanup17.us63.2, label %for.cond.cleanup5.loopexit134.split.loop.exit137
for.cond.cleanup17.us63.2: ; preds = %for.cond.cleanup17.us63.1
%cmp3.us65.2 = icmp slt i32 %j.144.us62, -18
br i1 %cmp3.us65.2, label %for.cond.cleanup17.us63.3, label %for.cond.cleanup5.loopexit134.split.loop.exit135
for.cond.cleanup17.us63.3: ; preds = %for.cond.cleanup17.us63.2
%add23.us64.3 = add nsw i32 %j.144.us62, 28
%cmp3.us65.3 = icmp slt i32 %j.144.us62, -25
br i1 %cmp3.us65.3, label %for.body6.us60, label %for.cond.cleanup5
}
declare <4 x i1> @llvm.get.active.lane.mask.v4i1.i32(i32, i32) #1
declare void @llvm.masked.scatter.v4i32.v4p0i32(<4 x i32>, <4 x i32*>, i32 immarg, <4 x i1>) #2
|
using System;
using System.Collections.Generic;
using System.Text;
namespace Cake.LibMan
{
/// <summary>
/// Details the libman verbosity log levels
/// </summary>
public enum LibManVerbosity
{
/// <summary>
/// default
/// </summary>
Default = 0,
/// <summary>
/// normal
/// </summary>
Normal = 1,
/// <summary>
/// quiet
/// </summary>
Quiet = 2,
/// <summary>
/// detailed
/// </summary>
Detailed = 3
}
}
|
(ns lan-van.views
(:require [re-frame.core :refer [subscribe dispatch]]
[lan-van.subs :as subs]
[lan-van.events :as events]
[reagent.core :as r]
[clojure.string :as str]))
(defn dispatch-resize-event
[]
(dispatch [::events/window-resize]))
(defonce resize-event
(.addEventListener js/window
"resize"
dispatch-resize-event))
(defn dispatch-location-poll-event
[]
(dispatch [::events/get-van-location])
(dispatch [::events/get-van-status])
(dispatch [::events/get-current-dropup]))
(defonce location-poll
(js/setInterval dispatch-location-poll-event 1000))
(defn dispatch-dropups-poll-event
[]
(dispatch [::events/get-dropups]))
(defonce dropups-poll
(js/setInterval dispatch-dropups-poll-event (* 1000 60)))
(defn gmap-inner []
(let [gmap (atom nil)
options (clj->js {:zoom 18
:clickableIcons false
:disableDefaultUI true
:disableDoubleClickZoom true
:draggable false
:keyboardShortcuts false
:scrollwheel false})
update (fn [comp]
(let [props (r/props comp)
{:keys [latitude longitude]} (:location props)
height (:height props)
latlng (js/google.maps.LatLng. latitude longitude)]
(set! (.-height (.-style (:canvas @gmap)))
(str height "px"))
(.trigger js/google.maps.event
(:map @gmap)
"resize")
(.setPosition (:marker @gmap) latlng)
(.panTo (:map @gmap) latlng)))]
(r/create-class
{:reagent-render
(fn []
[:div#map-canvas])
:component-did-mount
(fn [comp]
(let [canvas (.getElementById js/document "map-canvas")
gm (js/google.maps.Map. canvas options)
marker-opts {:clickable false
:icon "/images/NyanVan.gif"
:map gm
:optimized false
:title "Lan Van"}
marker (js/google.maps.Marker. (clj->js marker-opts))]
(reset! gmap {:map gm
:marker marker
:canvas canvas}))
(update comp))
:component-did-update update
:display-name "gmap-inner"})))
(defn gmap-outer [opts]
(let [location (subscribe [::subs/location])
height (subscribe [::subs/height])
van-status (subscribe [::subs/van-status])]
(fn []
[:div opts
(when-not (= @van-status :tracking)
[:div.overlay
[:div
"OFFLINE"]])
[gmap-inner {:location @location
:height @height}]])))
(defn dropups
[opts]
(let [dropups (subscribe [::subs/dropups])
current (subscribe [::subs/current])
height (subscribe [::subs/height])]
(fn []
[:div#dropouts (merge opts {:style {:height (str @height "px")}})
[:table
[:thead
[:tr
[:th "Name"]
[:th "Time"]]]
[:tbody
(doall
(for [d @dropups]
(let [names (map :name (:people d))
current? (= @current (:id d))]
^{:key (if current? "current" (:id d))}
[:tr {:class (when current? "current")}
[:td (str/join ", " names)]
[:td (:time d)]])))]]])))
(defn play-button
[]
(let [playing? (subscribe [::subs/playing?])]
(fn []
[:div#play-button
{:on-click (fn []
(dispatch [::events/toggle-music]))
:class (if @playing?
"playing"
"paused")}])))
(defn main-panel
[]
(let [christmas? (subscribe [::subs/christmas?])]
(fn []
[:div
(when @christmas?
[:div#snow])
[:div.container-fluid
[:div.row.row-no-padding
[gmap-outer {:class "col-sm-10 col-xs-12"}]
[dropups {:class "col-sm-2 col-xs-12"}]
[play-button]]]])))
|
---
layout: page
title: About
permalink: /about/
---
Hey there, I'm Lizzy! Welcome to my blog, I
guess. Here are some links around the internet
that relate to me.
- **Twitter:** [@LizzyReborn](https://twitter.com/LizzyReborn)
- **GitHub:** [@LizAinslie](https://github.com/LizAinslie)
- **Instagram:** [@railrunner166](https://instagram.com/railrunner166)
- **Twitch:** [@railrunner166](https://www.twitch.tv/railrunner166)
I also own a company called Synapse Technologies:
- **Website:** [synapsetech.dev](https://synapsetech.dev)
- **Twitter:** [@syntech_llc](https://twitter.com/syntech_llc)
- **GitHub:** [@SynapseTech](https://github.com/SynapseTech)
- **Support Email:** [support@synapsetech.dev](mailto:support@synapsetech.dev)
## Content
I figure it might be helpful to outline what you will and **won't** find on this blog.
### What you WILL find:
- **Tech Tutorials.** A lot of what I write has
good instructional value.
- **My Opinions.** I write from my own point of
view. Don't be surprised if something is
opinionated.
- **Rants.** I often write when I'm frustrated
with something. It often helps me solve the
problem, and it can be informational to
someone else with the same problem.
- **Swearing.** I have some specific views on
language, and long story short, I don't
often filter my language. This does not
include slurs; I don't use slurs. **Make sure
you are okay with strong language.**
### What you WON'T find:
- **18+/NSFW Content.** Not here. No thanks.
- **Hate Speech.** Everyone is different, in
every way except one. The one we share is
the one that should unite us: **We are
humans.** Hate speech is counterproductive
to our society, and you won't see any of it
here.
|
---
title: "Hello Github from Neel"
date: 2021-02-08
---
**This is Neel**
*How is it going*
|
package infrastructure.tester
import infrastructure.IT
/**
* requester and responder connections for test purposes
*/
trait RequesterAndResponder extends TestRequester with TestResponder{
self:IT =>
/**
* starts both - requester and responder
* @param proto broker protocol: http (default) or https
* @param host broker host
* @param port broker port
* @param responderName responder id (for path)
* @param requesterName requester id (for path)
* @param action some actions with connected requester and responder
*/
def withRequesterAndResponder(proto:String = "http",
host:String = "localhost",
port:Int = 9000,
responderName:String = "scala-test-responder",
requesterName:String = "scala-test-requester"
)(action: (TestResponderHandler,
TestRequesterHandler)=>Unit) = {
withResponder(proto, host, port, responderName){responder =>
withRequester(proto, host, port, requesterName){ requester =>
action(responder, requester)
}
}
}
}
|
import { useEffect, useState } from 'react';
import WikipediaApi from './api/wikipedia';
const useStateInfo = (state) => {
const [stateInfo, setStateInfo] = useState(null);
const fetchStateInfo = (stateToFetch) => {
WikipediaApi
.summary(stateToFetch)
.then(data => {
setStateInfo({
summery: data
});
})
.catch(error => {
setStateInfo('Unable to fetch state info');
});
};
useEffect(() => {
fetchStateInfo(state);
}, [state]);
return [stateInfo, fetchStateInfo];
};
export default useStateInfo;
|
---
title: White Label
layout: category
category: "white-label"
permalink: /en/category/white-label
lang: en
---
|
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.web.util.matcher;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.http.MediaType;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.web.HttpMediaTypeNotAcceptableException;
import org.springframework.web.accept.ContentNegotiationStrategy;
import org.springframework.web.context.request.NativeWebRequest;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
/**
* @author Rob Winch
* @author Dan Zheng
*/
@RunWith(MockitoJUnitRunner.class)
public class MediaTypeRequestMatcherTests {
private MediaTypeRequestMatcher matcher;
private MockHttpServletRequest request;
@Mock
private ContentNegotiationStrategy negotiationStrategy;
@Before
public void setup() {
this.request = new MockHttpServletRequest();
}
@Test
public void constructorWhenNullCNSThenIAE() {
ContentNegotiationStrategy c = null;
assertThatIllegalArgumentException().isThrownBy(() -> new MediaTypeRequestMatcher(c, MediaType.ALL));
}
@Test
public void constructorNullCNSSet() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new MediaTypeRequestMatcher(null, Collections.singleton(MediaType.ALL)));
}
@Test
public void constructorNoVarargs() {
assertThatIllegalArgumentException().isThrownBy(() -> new MediaTypeRequestMatcher(this.negotiationStrategy));
}
@Test
public void constructorNullMediaTypes() {
Collection<MediaType> mediaTypes = null;
assertThatIllegalArgumentException()
.isThrownBy(() -> new MediaTypeRequestMatcher(this.negotiationStrategy, mediaTypes));
}
@Test
public void constructorEmtpyMediaTypes() {
assertThatIllegalArgumentException().isThrownBy(
() -> new MediaTypeRequestMatcher(this.negotiationStrategy, Collections.<MediaType>emptyList()));
}
@Test
public void constructorWhenEmptyMediaTypeThenIAE() {
assertThatIllegalArgumentException().isThrownBy(() -> new MediaTypeRequestMatcher());
}
@Test
public void constructorWhenEmptyMediaTypeCollectionThenIAE() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new MediaTypeRequestMatcher(Collections.<MediaType>emptyList()));
}
@Test
public void negotiationStrategyThrowsHMTNAE() throws HttpMediaTypeNotAcceptableException {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willThrow(new HttpMediaTypeNotAcceptableException("oops"));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.ALL);
assertThat(this.matcher.matches(this.request)).isFalse();
}
@Test
public void mediaAllMatches() throws Exception {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willReturn(Arrays.asList(MediaType.ALL));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.TEXT_HTML);
assertThat(this.matcher.matches(this.request)).isTrue();
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.APPLICATION_XHTML_XML);
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenAcceptHeaderAsteriskThenAll() {
this.request.addHeader("Accept", "*/*");
this.matcher = new MediaTypeRequestMatcher(MediaType.ALL);
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenAcceptHeaderAsteriskThenAnyone() {
this.request.addHeader("Accept", "*/*");
this.matcher = new MediaTypeRequestMatcher(MediaType.TEXT_HTML);
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenAcceptHeaderAsteriskThenAllInCollection() {
this.request.addHeader("Accept", "*/*");
this.matcher = new MediaTypeRequestMatcher(Collections.singleton(MediaType.ALL));
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenAcceptHeaderAsteriskThenAnyoneInCollection() {
this.request.addHeader("Accept", "*/*");
this.matcher = new MediaTypeRequestMatcher(Collections.singleton(MediaType.TEXT_HTML));
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenNoAcceptHeaderThenAll() {
this.request.removeHeader("Accept");
// if not set Accept, it is match all
this.matcher = new MediaTypeRequestMatcher(MediaType.ALL);
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenNoAcceptHeaderThenAnyone() {
this.request.removeHeader("Accept");
this.matcher = new MediaTypeRequestMatcher(MediaType.TEXT_HTML);
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenSingleAcceptHeaderThenOne() {
this.request.addHeader("Accept", "text/html");
this.matcher = new MediaTypeRequestMatcher(MediaType.TEXT_HTML);
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenSingleAcceptHeaderThenOneWithCollection() {
this.request.addHeader("Accept", "text/html");
this.matcher = new MediaTypeRequestMatcher(Collections.singleton(MediaType.TEXT_HTML));
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenMultipleAcceptHeaderThenMatchMultiple() {
this.request.addHeader("Accept", "text/html, application/xhtml+xml, application/xml;q=0.9");
this.matcher = new MediaTypeRequestMatcher(MediaType.TEXT_HTML, MediaType.APPLICATION_XHTML_XML,
MediaType.APPLICATION_XML);
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenMultipleAcceptHeaderThenAnyoneInCollection() {
this.request.addHeader("Accept", "text/html, application/xhtml+xml, application/xml;q=0.9");
this.matcher = new MediaTypeRequestMatcher(Arrays.asList(MediaType.APPLICATION_XHTML_XML));
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void multipleMediaType() throws HttpMediaTypeNotAcceptableException {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willReturn(Arrays.asList(MediaType.TEXT_PLAIN, MediaType.APPLICATION_XHTML_XML, MediaType.TEXT_HTML));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.APPLICATION_ATOM_XML,
MediaType.TEXT_HTML);
assertThat(this.matcher.matches(this.request)).isTrue();
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.APPLICATION_XHTML_XML,
MediaType.APPLICATION_JSON);
assertThat(this.matcher.matches(this.request)).isTrue();
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.APPLICATION_FORM_URLENCODED,
MediaType.APPLICATION_JSON);
assertThat(this.matcher.matches(this.request)).isFalse();
}
@Test
public void resolveTextPlainMatchesTextAll() throws HttpMediaTypeNotAcceptableException {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willReturn(Arrays.asList(MediaType.TEXT_PLAIN));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, new MediaType("text", "*"));
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenAcceptHeaderIsTextThenMediaTypeAllIsMatched() {
this.request.addHeader("Accept", MediaType.TEXT_PLAIN_VALUE);
this.matcher = new MediaTypeRequestMatcher(new MediaType("text", "*"));
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void resolveTextAllMatchesTextPlain() throws HttpMediaTypeNotAcceptableException {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willReturn(Arrays.asList(new MediaType("text", "*")));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.TEXT_PLAIN);
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void matchWhenAcceptHeaderIsTextWildcardThenMediaTypeTextIsMatched() {
this.request.addHeader("Accept", "text/*");
this.matcher = new MediaTypeRequestMatcher(MediaType.TEXT_PLAIN);
assertThat(this.matcher.matches(this.request)).isTrue();
}
// useEquals
@Test
public void useEqualsResolveTextAllMatchesTextPlain() throws HttpMediaTypeNotAcceptableException {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willReturn(Arrays.asList(new MediaType("text", "*")));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.TEXT_PLAIN);
this.matcher.setUseEquals(true);
assertThat(this.matcher.matches(this.request)).isFalse();
}
@Test
public void useEqualsWhenTrueThenMediaTypeTextIsNotMatched() {
this.request.addHeader("Accept", "text/*");
this.matcher = new MediaTypeRequestMatcher(MediaType.TEXT_PLAIN);
this.matcher.setUseEquals(true);
assertThat(this.matcher.matches(this.request)).isFalse();
}
@Test
public void useEqualsResolveTextPlainMatchesTextAll() throws HttpMediaTypeNotAcceptableException {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willReturn(Arrays.asList(MediaType.TEXT_PLAIN));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, new MediaType("text", "*"));
this.matcher.setUseEquals(true);
assertThat(this.matcher.matches(this.request)).isFalse();
}
@Test
public void useEqualsWhenTrueThenMediaTypeTextAllIsNotMatched() {
this.request.addHeader("Accept", MediaType.TEXT_PLAIN_VALUE);
this.matcher = new MediaTypeRequestMatcher(new MediaType("text", "*"));
this.matcher.setUseEquals(true);
assertThat(this.matcher.matches(this.request)).isFalse();
}
@Test
public void useEqualsSame() throws HttpMediaTypeNotAcceptableException {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willReturn(Arrays.asList(MediaType.TEXT_PLAIN));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.TEXT_PLAIN);
this.matcher.setUseEquals(true);
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void useEqualsWhenTrueThenMediaTypeIsMatchedWithEqualString() {
this.request.addHeader("Accept", MediaType.TEXT_PLAIN_VALUE);
this.matcher = new MediaTypeRequestMatcher(MediaType.TEXT_PLAIN);
this.matcher.setUseEquals(true);
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void useEqualsWithCustomMediaType() throws HttpMediaTypeNotAcceptableException {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willReturn(Arrays.asList(new MediaType("text", "unique")));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, new MediaType("text", "unique"));
this.matcher.setUseEquals(true);
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void useEqualsWhenTrueThenCustomMediaTypeIsMatched() {
this.request.addHeader("Accept", "text/unique");
this.matcher = new MediaTypeRequestMatcher(new MediaType("text", "unique"));
this.matcher.setUseEquals(true);
assertThat(this.matcher.matches(this.request)).isTrue();
}
// ignoreMediaTypeAll
@Test
public void mediaAllIgnoreMediaTypeAll() throws HttpMediaTypeNotAcceptableException {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willReturn(Arrays.asList(MediaType.ALL));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.TEXT_HTML);
this.matcher.setIgnoredMediaTypes(Collections.singleton(MediaType.ALL));
assertThat(this.matcher.matches(this.request)).isFalse();
}
@Test
public void ignoredMediaTypesWhenAllThenAnyoneIsNotMatched() {
this.request.addHeader("Accept", MediaType.ALL_VALUE);
this.matcher = new MediaTypeRequestMatcher(MediaType.TEXT_HTML);
this.matcher.setIgnoredMediaTypes(Collections.singleton(MediaType.ALL));
assertThat(this.matcher.matches(this.request)).isFalse();
}
@Test
public void mediaAllAndTextHtmlIgnoreMediaTypeAll() throws HttpMediaTypeNotAcceptableException {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willReturn(Arrays.asList(MediaType.ALL, MediaType.TEXT_HTML));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.TEXT_HTML);
this.matcher.setIgnoredMediaTypes(Collections.singleton(MediaType.ALL));
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void ignoredMediaTypesWhenAllAndTextThenTextCanBeMatched() {
this.request.addHeader("Accept", MediaType.ALL_VALUE + ", " + MediaType.TEXT_HTML_VALUE);
this.matcher = new MediaTypeRequestMatcher(MediaType.TEXT_HTML);
this.matcher.setIgnoredMediaTypes(Collections.singleton(MediaType.ALL));
assertThat(this.matcher.matches(this.request)).isTrue();
}
@Test
public void mediaAllQ08AndTextPlainIgnoreMediaTypeAll() throws HttpMediaTypeNotAcceptableException {
given(this.negotiationStrategy.resolveMediaTypes(any(NativeWebRequest.class)))
.willReturn(Arrays.asList(MediaType.TEXT_PLAIN, MediaType.parseMediaType("*/*;q=0.8")));
this.matcher = new MediaTypeRequestMatcher(this.negotiationStrategy, MediaType.TEXT_HTML);
this.matcher.setIgnoredMediaTypes(Collections.singleton(MediaType.ALL));
assertThat(this.matcher.matches(this.request)).isFalse();
}
@Test
public void ignoredMediaTypesWhenAllThenQ08WithTextIsNotMatched() {
this.request.addHeader("Accept", MediaType.TEXT_PLAIN + ", */*;q=0.8");
this.matcher = new MediaTypeRequestMatcher(MediaType.TEXT_HTML);
this.matcher.setIgnoredMediaTypes(Collections.singleton(MediaType.ALL));
assertThat(this.matcher.matches(this.request)).isFalse();
}
}
|
<?php
/*
* // +----------------------------------------------------------------------
* // | erp
* // +----------------------------------------------------------------------
* // | Copyright (c) 2006~2020 erp All rights reserved.
* // +----------------------------------------------------------------------
* // | Licensed ( LICENSE-1.0.0 )
* // +----------------------------------------------------------------------
* // | Author: yxx <1365831278@qq.com>
* // +----------------------------------------------------------------------
*/
namespace App\Observers;
use App\Models\MakeProductItemModel;
use App\Models\MakeProductOrderModel;
use App\Models\PositionModel;
use App\Models\TaskModel;
use Dcat\Admin\Admin;
class TaskObserver
{
public function creating(TaskModel $taskModel)
{
$taskModel->user_id = Admin::user()->id;
}
public function saving(TaskModel $taskModel)
{
if ($taskModel->isDirty('status') && $taskModel->status === TaskModel::STATUS_DRAW) {
$item = $taskModel->make_product_order->items;
$avgCostPrice = bcdiv($taskModel->sum_cost_price, $item->actual_num, 2);
$item->cost_price = $avgCostPrice;
$item->saveOrFail();
}
}
public function saved(TaskModel $taskModel)
{
$makeProductOrder = MakeProductOrderModel::query()->firstOrCreate(
['with_id' => $taskModel->id],
[
'order_no' => build_order_no('SCRK'),
'user_id' => Admin::user()->id,
'apply_id' => Admin::user()->id,
'other' => '',
]
);
MakeProductItemModel::query()->firstOrCreate(
[
'order_id' => $makeProductOrder->id,
],
[
'should_num' => $taskModel->plan_num,
'actual_num' => $taskModel->plan_num,
'batch_no' => "PC".date('Ymd'),
'percent' => $taskModel->percent,
'standard' => $taskModel->standard,
'sku_id' => $taskModel->sku_id,
'position_id' => PositionModel::query()->value('id') ?? 0,
]
);
}
}
|
class Meteorlog::Client
include Meteorlog::Logger::Helper
include Meteorlog::Utils
def initialize(options = {})
@options = options
@cloud_watch_logs = Aws::CloudWatchLogs::Client.new
end
def export(opts = {})
exported = Meteorlog::Exporter.export(@cloud_watch_logs, @options.merge(opts))
Meteorlog::DSL.convert(exported, @options.merge(opts))
end
def apply(file)
walk(file)
end
private
def walk(file)
dsl = load_file(file)
dsl_log_groups = collect_to_hash(dsl.log_groups, :log_group_name)
aws = Meteorlog::Wrapper.wrap(@cloud_watch_logs, @options)
aws_log_groups = collect_to_hash(aws.log_groups, :log_group_name)
dsl_log_groups.each do |log_group_name, dsl_log_group|
next unless Meteorlog::Utils.matched?(log_group_name, @options[:include], @options[:exclude])
aws_log_group = aws_log_groups[log_group_name]
unless aws_log_group
aws_log_group = aws.log_groups.create(log_group_name)
aws_log_groups[log_group_name] = aws_log_group
end
end
dsl_log_groups.each do |log_group_name, dsl_log_group|
next unless Meteorlog::Utils.matched?(log_group_name, @options[:include], @options[:exclude])
aws_log_group = aws_log_groups.delete(log_group_name)
walk_log_group(dsl_log_group, aws_log_group)
end
unless @options[:skip_delete_group]
aws_log_groups.each do |log_group_name, aws_log_group|
next unless Meteorlog::Utils.matched?(log_group_name, @options[:include], @options[:exclude])
aws_log_group.delete
end
end
aws.modified?
end
def walk_log_group(dsl_log_group, aws_log_group)
unless dsl_log_group.any_log_streams
walk_log_streams(dsl_log_group.log_streams, aws_log_group.log_streams)
end
walk_metric_filters(dsl_log_group.metric_filters, aws_log_group.metric_filters)
end
def walk_log_streams(dsl_log_streams, aws_log_streams)
collection_api = aws_log_streams
dsl_log_streams = collect_to_hash(dsl_log_streams, :log_stream_name)
aws_log_streams = collect_to_hash(aws_log_streams, :log_stream_name)
dsl_log_streams.each do |log_stream_name, dsl_log_stream|
aws_log_stream = aws_log_streams.delete(log_stream_name)
unless aws_log_stream
collection_api.create(log_stream_name)
end
end
aws_log_streams.each do |log_stream_name, aws_log_stream|
aws_log_stream.delete
end
end
def walk_metric_filters(dsl_metric_filters, aws_metric_filters)
collection_api = aws_metric_filters
dsl_metric_filters = collect_to_hash(dsl_metric_filters, :filter_name)
aws_metric_filters = collect_to_hash(aws_metric_filters, :filter_name)
dsl_metric_filters.each do |filter_name, dsl_metric_filter|
aws_metric_filter = aws_metric_filters.delete(filter_name)
if aws_metric_filter
unless aws_metric_filter.eql?(dsl_metric_filter)
aws_metric_filter.update(dsl_metric_filter)
end
else
collection_api.create(filter_name, dsl_metric_filter)
end
end
aws_metric_filters.each do |filter_name, aws_metric_filter|
aws_metric_filter.delete
end
end
def load_file(file)
if file.kind_of?(String)
open(file) do |f|
Meteorlog::DSL.parse(f.read, file)
end
elsif file.respond_to?(:read)
Meteorlog::DSL.parse(file.read, file.path)
else
raise TypeError, "can't convert #{file} into File"
end
end
end
|
import React from 'react'
import renderer from 'react-test-renderer'
import sinon from 'sinon'
import { configure, shallow, mount, render } from 'enzyme'
import { ActionLink } from '../ActionLink'
import Adapter from 'enzyme-adapter-react-16'
configure({ adapter: new Adapter() })
describe('<ActionLink />', () => {
it('should render without crashing', () => {
const component = renderer.create(<ActionLink />)
expect(component.toJSON()).toMatchSnapshot()
})
it('should render empty component', () => {
const component = renderer.create(<ActionLink />)
expect(component.toJSON()).toMatchSnapshot()
})
it('should react to events', () => {
const onButtonClick = sinon.spy()
const wrapper = shallow(<ActionLink onButtonClick={onButtonClick} />)
wrapper.find('button').simulate('click')
expect(onButtonClick.calledOnce).to.equal(true)
})
})
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace SafetyAnalysis.Framework.Graphs
{
[Serializable]
public abstract class HeapGraphBase
{
public abstract IEnumerable<HeapVertexBase> Vertices { get; }
public abstract IEnumerable<HeapEdgeBase> Edges { get; }
public abstract int VertexCount { get; }
public abstract int EdgeCount { get; }
public abstract bool IsVerticesEmpty { get; }
public abstract void AddEdge(HeapEdgeBase edge);
public abstract void AddVertex(HeapVertexBase v);
public abstract void RemoveVertex(HeapVertexBase v);
public abstract void RemoveEdge(HeapEdgeBase e);
public abstract void RemoveVertices(IEnumerable<HeapVertexBase> vrs);
public abstract void RemoveEdges(IEnumerable<HeapEdgeBase> edges);
public abstract bool ContainsVertex(HeapVertexBase v);
public abstract bool ContainsHeapEdge(HeapEdgeBase edge);
public abstract IEnumerable<HeapEdgeBase> OutEdges(HeapVertexBase v);
public abstract IEnumerable<HeapEdgeBase> InEdges(HeapVertexBase v);
//this is used in strong updates
public abstract void RemoveAllOutEdges(HeapVertexBase vertex);
/// <summary>
/// Will not copy vertex state
/// </summary>
/// <returns></returns>
public abstract HeapGraphBase Copy();
public abstract void Dump();
public virtual bool ContainedIn(HeapGraphBase hg)
{
foreach (var v in this.Vertices)
{
if (!hg.ContainsVertex(v))
return false;
}
foreach (var e in this.Edges)
{
if (!hg.ContainsHeapEdge(e))
return false;
}
return true;
}
public virtual void Union(HeapGraphBase graph)
{
foreach (HeapVertexBase vertex in graph.Vertices)
{
if (!this.ContainsVertex(vertex))
this.AddVertex(vertex);
}
foreach (HeapEdgeBase edge in graph.Edges)
{
if (!this.ContainsHeapEdge(edge))
this.AddEdge(edge);
}
}
public abstract bool IsOutEdgesEmpty(HeapVertexBase v);
}
}
|
using IOTConnect.Application.Values;
using IOTConnect.Domain.IO;
using IOTConnect.Domain.System.Logging;
using System;
namespace IOTConnect.Persistence.IO.Adapters
{
public class JsonToValueStateAdapter : IAdaptable
{
public JsonToValueStateAdapter()
{
}
public Tout Adapt<Tout, Tin>(Tin input) where Tout : new()
{
// type checks
Type outType = typeof(Tout);
Type inType = typeof(Tin);
if (typeof(ValueState).IsAssignableFrom(outType) == false ||
typeof(object).IsAssignableFrom(inType) == false)
{
Log.Error($"The casting assumes the type '{typeof(ValueState).Name}' but '{outType.Name}' was found.");
return new Tout();
}
// convert input, process and return output
string inputString = input.ToString();
Tout output = JsonIO.FromJsonString<Tout>(inputString);
return output;
}
}
}
|
package Memoize::SDBM_File;
=head1 NAME
Memoize::SDBM_File - glue to provide EXISTS for SDBM_File for Storable use
=head1 DESCRIPTION
See L<Memoize>.
=cut
use SDBM_File;
@ISA = qw(SDBM_File);
$VERSION = '1.03';
$Verbose = 0;
sub AUTOLOAD {
warn "Nonexistent function $AUTOLOAD invoked in Memoize::SDBM_File\n";
}
sub import {
warn "Importing Memoize::SDBM_File\n" if $Verbose;
}
my %keylist;
# This is so ridiculous...
sub _backhash {
my $self = shift;
my %fakehash;
my $k;
for ($k = $self->FIRSTKEY(); defined $k; $k = $self->NEXTKEY($k)) {
$fakehash{$k} = undef;
}
$keylist{$self} = \%fakehash;
}
sub EXISTS {
warn "Memoize::SDBM_File EXISTS (@_)\n" if $Verbose;
my $self = shift;
_backhash($self) unless exists $keylist{$self};
my $r = exists $keylist{$self}{$_[0]};
warn "Memoize::SDBM_File EXISTS (@_) ==> $r\n" if $Verbose;
$r;
}
sub DEFINED {
warn "Memoize::SDBM_File DEFINED (@_)\n" if $Verbose;
my $self = shift;
_backhash($self) unless exists $keylist{$self};
defined $keylist{$self}{$_[0]};
}
sub DESTROY {
warn "Memoize::SDBM_File DESTROY (@_)\n" if $Verbose;
my $self = shift;
delete $keylist{$self}; # So much for reference counting...
$self->SUPER::DESTROY(@_);
}
# Maybe establish the keylist at TIEHASH time instead?
sub STORE {
warn "Memoize::SDBM_File STORE (@_)\n" if $VERBOSE;
my $self = shift;
$keylist{$self}{$_[0]} = undef;
$self->SUPER::STORE(@_);
}
# Inherit FETCH and TIEHASH
1;
|
<?php
namespace org\camunda\php\sdk\service;
use Exception;
use org\camunda\php\sdk\entity\request\AuthorizationRequest;
use org\camunda\php\sdk\entity\response\Authorization;
use org\camunda\php\sdk\entity\response\ResourceOption;
class AuthorizationService extends RequestService
{
/**
* Removes an authorization by id
* @Link http://docs.camunda.org/latest/api-references/rest/#authorization-delete-authorization
*
* @param string $id authorization ID
* @throws \Exception
*/
function deleteAuthorization($id)
{
$this->setRequestUrl('/authorization/' . $id);
$this->setRequestObject(null);
$this->setRequestMethod('DELETE');
$this->execute();
}
/**
* Retrieves a single authorization by id.
* @Link http://docs.camunda.org/latest/api-references/rest/#authorization-get-single-authorization
*
* @param string $id authorization ID
* @throws \Exception
* @return \org\camunda\php\sdk\entity\response\Authorization $this requested authorization
*/
function getAuthorization($id)
{
$this->setRequestUrl('/authorization/' . $id);
$this->setRequestObject(null);
return Authorization::cast($this->execute());
}
/**
* Performs an authorization check for the currently authenticated user.
* @Link http://docs.camunda.org/latest/api-references/rest/#authorization-perform-an-authorization-check
*
* @param AuthorizationRequest $request
* @throws \Exception
* @return mixed
*/
function checkAuthorization(AuthorizationRequest $request)
{
$checkerArray = [
0 => 'permissionName',
1 => 'permissionValue',
2 => 'resourceName',
3 => 'resourceType',
];
foreach ($checkerArray as $value) {
if (empty($request[$value])) {
throw new Exception("Missing $value parameter");
}
}
$this->setRequestUrl('/authorization/check');
$this->setRequestObject($request);
return Authorization::cast($this->execute());
}
/**
* Query for a list of authorizations using a list of parameters. The size of the result set can be retrieved by
* using the get authorization count method.
* @Link http://docs.camunda.org/latest/api-references/rest/#authorization-get-authorizations
*
* @param AuthorizationRequest $request
* @throws \Exception
* @return Authorization[]
*/
function getAuthorizations(AuthorizationRequest $request)
{
$this->setRequestUrl('/authorization');
$this->setRequestObject($request);
return Authorization::castList($this->execute());
}
/**
* Query for authorizations using a list of parameters and retrieves the count.
* @Link http://docs.camunda.org/latest/api-references/rest/#authorization-get-authorizations-count
*
* @param AuthorizationRequest $request
* @throws \Exception
* @return Integer $this count of authorizations
*/
function getCount(AuthorizationRequest $request)
{
$this->setRequestUrl('/authorization/count');
$this->setRequestObject($request);
return $this->execute()->count;
}
/**
* Allows checking for the set of available operations that the currently authenticated user can perform.
* @Link http://docs.camunda.org/latest/api-references/rest/#authorization-authorization-resource-options
*
* @throws \Exception
* @return \org\camunda\php\sdk\entity\response\ResourceOption
*/
function getResourceOption()
{
$this->setRequestUrl('/authorization');
$this->setRequestObject(null);
$this->setRequestMethod('OPTIONS');
return ResourceOption::cast($this->execute());
}
/**
* Allows checking for the set of available operations that the currently authenticated user can perform.
* @Link http://docs.camunda.org/latest/api-references/rest/#authorization-authorization-resource-options
*
* @param string $id authorization ID
* @throws \Exception
* @return \org\camunda\php\sdk\entity\response\ResourceOption
*/
function getResourceInstanceOption($id)
{
$this->setRequestUrl('/authorization/' . $id);
$this->setRequestObject(null);
$this->setRequestMethod('OPTIONS');
return ResourceOption::cast($this->execute());
}
/**
* Creates a new authorization
* @Link http://docs.camunda.org/latest/api-references/rest/#authorization-create-a-new-authorization
*
* @param AuthorizationRequest $request
* @throws \Exception
* @return \org\camunda\php\sdk\entity\response\Authorization
*/
function createAuthorization(AuthorizationRequest $request)
{
$this->setRequestUrl('/authorization/create');
$this->setRequestObject($request);
$this->setRequestMethod('POST');
return Authorization::cast($this->execute());
}
/**
* Updates a single authorization.
*
* @link http://docs.camunda.org/latest/api-references/rest/#authorization-update-a-single-authorization
*
* @param $id
* @param AuthorizationRequest $request
* @throws \Exception
*/
function updateAuthorization($id, AuthorizationRequest $request)
{
$this->setRequestUrl('/authorization/' . $id);
$this->setRequestObject($request);
$this->setRequestMethod('PUT');
$this->execute();
}
}
|
import { CanActivate, ExecutionContext } from '@nestjs/common';
export class AdminGuard implements CanActivate {
canActivate(ctx: ExecutionContext) {
const request = ctx.switchToHttp().getRequest();
if (!request.currentUser) return false;
return request.currentUser.admin;
}
}
|
using System.Collections;
using System.Collections.Generic;
using System;
using UnityEngine;
using UnityEditor;
using System.Reflection;
using System.Linq;
using Object = UnityEngine.Object;
namespace OnionCollections.DataEditor.Editor
{
public static class OnionDataEditor
{
const string ResourcePath = "OnionDataEditorResource";
public static string Path
{
get
{
string rootPath = GetRootPath();
return rootPath ?? "Packages/com.macacagames.oniondataeditor";
}
}
static string GetRootPath()
{
const string AsmdefQuery = "t:ASMDEF OnionDataEditor.Editor";
string asmdefGUID = AssetDatabase.FindAssets(AsmdefQuery)[0];
string asmdefPath = AssetDatabase.GUIDToAssetPath(asmdefGUID);
DirectoryVisitor directoryVisitor = new DirectoryVisitor(asmdefPath);
directoryVisitor.Back();
string rootPath = directoryVisitor.GetPathWithoutSplitChar();
if (rootPath.StartsWith("Assets/"))
{
return rootPath;
}
return null;
}
static OnionSetting _setting;
internal static OnionSetting Setting
{
get
{
const string assetName = "Setting";
if (_setting == null)
_setting = AutoCreateLoad<OnionSetting>(assetName);
return _setting;
}
}
internal static bool IsSetting(TreeNode node)
{
return node.IsPseudo == false && node.Target == Setting;
}
internal static TreeNode Bookmarks => Setting.BookmarksNode;
internal static bool IsBookmark(TreeNode node)
{
return node.tags.Contains("Bookmarks");
}
#region Icon
static OnionEditorIconGroup iconGroup = null;
internal static Texture2D GetIconTexture(string iconKey)
{
if (string.IsNullOrEmpty(iconKey))
return null;
if(iconGroup == null)
{
iconGroup = AssetDatabase.LoadAssetAtPath<OnionEditorIconGroup>($"{Path}/Editor/IconGroup.asset");
}
Texture2D result = iconGroup.GetIcon(iconKey);
return result;
}
public static Texture2D SmallErrorIcon => EditorGUIUtility.FindTexture("console.erroricon.sml");
public static Texture2D ErrorIcon => EditorGUIUtility.FindTexture("console.erroricon");
public static Texture2D SmallInfoIcon => EditorGUIUtility.FindTexture("console.infoicon.sml");
public static Texture2D InfoIcon => EditorGUIUtility.FindTexture("console.infoicon");
public static Texture2D SmallWarningIcon => EditorGUIUtility.FindTexture("console.warnicon.sml");
public static Texture2D WarningIcon => EditorGUIUtility.FindTexture("console.warnicon");
#endregion
static T AutoCreateLoad<T>(string assetName) where T : ScriptableObject
{
string[] guids = AssetDatabase.FindAssets($"t:{typeof(T).Name}");
if (guids.Length > 0)
{
string path = AssetDatabase.GUIDToAssetPath(guids[0]);
var result = AssetDatabase.LoadAssetAtPath<T>(path);
return result;
}
else
{
DirectoryVisitor directoryVisitor = new DirectoryVisitor("Assets/")
.CreateFolderIfNotExist(ResourcePath)
.Enter(ResourcePath);
var assetIns = ScriptableObject.CreateInstance<T>();
string path = $"{directoryVisitor}{assetName}.asset";
Debug.Log($"Auto create asset : {path}");
AssetDatabase.CreateAsset(assetIns, path);
var result = AssetDatabase.LoadAssetAtPath<T>(path);
return result;
}
}
[MenuItem("Assets/Open with Onion Data Editor")]
public static void OpenWithOnionDataEditor()
{
UnityEngine.Object selectObj = Selection.activeObject;
if (selectObj != null)
{
TreeNode targetNode = new TreeNode(selectObj);
OnionDataEditorWindow.ShowWindow(targetNode);
}
}
public static void OpenWithOnionDataEditor(Object selectObj)
{
if (selectObj != null)
{
TreeNode targetNode = new TreeNode(selectObj);
OnionDataEditorWindow.ShowWindow(targetNode);
}
}
static readonly Dictionary<Type, bool?> openWithDataEditorQuery = new Dictionary<Type, bool?>();
[UnityEditor.Callbacks.OnOpenAsset(1)]
public static bool OnOpenAsset(int instanceID, int line)
{
UnityEngine.Object target = EditorUtility.InstanceIDToObject(instanceID);
var t = target.GetType();
if (openWithDataEditorQuery.TryGetValue(t, out bool? queryResult) == false)
{
var openAttr = t.GetCustomAttribute<OpenWithOnionDataEditorAttribute>(true);
queryResult = openAttr?.openWithDataEditor;
openWithDataEditorQuery.Add(t, queryResult);
}
bool openResult;
if (queryResult.HasValue)
{
openResult = openWithDataEditorQuery[t].Value;
}
else
{
openResult = target is IQueryableData;
}
if (openResult == true)
{
OpenWithOnionDataEditor();
return true;
}
else
{
return false;
}
}
}
}
|
package org.apache.gora.examples.generated;
import java.util.Set;
import org.apache.gora.persistency.Persistent;
import org.apache.gora.persistency.StateManager;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBAttribute;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBHashKey;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBRangeKey;
import com.amazonaws.services.dynamodb.datamodeling.DynamoDBTable;
@DynamoDBTable(tableName = "person")
public class person implements Persistent {
private String ssn;
private String date;
@DynamoDBHashKey(attributeName="ssn")
public String getHashKey() { return ssn; }
public void setHashKey(String pSsn){ this.ssn = pSsn; }
@DynamoDBRangeKey(attributeName="date")
public String getRangeKey() { return date; }
public void setRangeKey(String pDate){ this.date = pDate; }
private String lastName;
@DynamoDBAttribute(attributeName = "LastName")
public String getLastName() { return lastName; }
public void setLastName(String pLastName) { this.lastName = pLastName; }
private Set<String> visitedplaces;
@DynamoDBAttribute(attributeName = "Visitedplaces")
public Set<String> getVisitedplaces() { return visitedplaces; }
public void setVisitedplaces(Set<String> pVisitedplaces) { this.visitedplaces = pVisitedplaces; }
private double salary;
@DynamoDBAttribute(attributeName = "Salary")
public double getSalary() { return salary; }
public void setSalary(double pSalary) { this.salary = pSalary; }
private String firstName;
@DynamoDBAttribute(attributeName = "FirstName")
public String getFirstName() { return firstName; }
public void setFirstName(String pFirstName) { this.firstName = pFirstName; }
public void setNew(boolean pNew){}
public void setDirty(boolean pDirty){}
@Override
public StateManager getStateManager() { return null; }
@Override
public Persistent newInstance(StateManager stateManager) { return null; }
@Override
public String[] getFields() { return null; }
@Override
public String getField(int index) { return null; }
@Override
public int getFieldIndex(String field) { return 0; }
@Override
public void clear() { }
@Override
public person clone() { return null; }
@Override
public boolean isNew() { return false; }
@Override
public void setNew() { }
@Override
public void clearNew() { }
@Override
public boolean isDirty() { return false; }
@Override
public boolean isDirty(int fieldIndex) { return false; }
@Override
public boolean isDirty(String field) { return false; }
@Override
public void setDirty() { }
@Override
public void setDirty(int fieldIndex) { }
@Override
public void setDirty(String field) { }
@Override
public void clearDirty(int fieldIndex) { }
@Override
public void clearDirty(String field) { }
@Override
public void clearDirty() { }
@Override
public boolean isReadable(int fieldIndex) { return false; }
@Override
public boolean isReadable(String field) { return false; }
@Override
public void setReadable(int fieldIndex) { }
@Override
public void setReadable(String field) { }
@Override
public void clearReadable(int fieldIndex) { }
@Override
public void clearReadable(String field) { }
@Override
public void clearReadable() { }
}
|
import { MoyskladAdapter } from '@warehouse/moysklad-adapter'
import { IWarehouseService } from './warehouse.interfaces'
export class WarehouseService implements IWarehouseService {
private vendorAdapter: MoyskladAdapter
constructor() {
this.vendorAdapter = new MoyskladAdapter()
}
async createProduct(product) {
return this.vendorAdapter.createProduct(product)
}
async getStocks() {
return this.vendorAdapter.getStocks()
}
}
|
namespace EA.Weee.Web.Areas.Admin.ViewModels.Scheme.Overview.PcsDetails
{
public class PcsDetailsOverviewViewModel : OverviewViewModel
{
public string ApprovalNumber { get; set; }
public string BillingReference { get; set; }
public string ObligationType { get; set; }
public string AppropriateAuthority { get; set; }
public string Status { get; set; }
public bool IsRejected { get; set; }
public bool CanEditPcs { get; set; }
public PcsDetailsOverviewViewModel()
: base(OverviewDisplayOption.PcsDetails)
{
}
}
}
|
use std::vec;
pub struct StorageByInts2<T> {
data: ~[T],
size_0: uint,
size_1: uint,
}
impl <T:Clone> StorageByInts2<T> {
pub fn from_elem(size_0: uint, size_1: uint, elem: T) -> StorageByInts2<T> {
let sz = size_0 * size_1;
let data = vec::from_elem(sz, elem);
StorageByInts2 {
data: data,
size_0: size_0,
size_1: size_1,
}
}
pub fn from_fn(size_0: uint, size_1: uint,
f: |i0:uint, i1:uint| -> T) -> StorageByInts2<T> {
let mut data = vec::with_capacity(size_0 * size_1);
for i0 in range(0, size_0) {
for i1 in range(0, size_1) {
data.push(f(i0, i1));
}
}
StorageByInts2 {
data: data,
size_0: size_0,
size_1: size_1,
}
}
#[inline]
pub fn get(&self, i0: uint, i1: uint) -> T {
self.data[i0 * self.size_1 + i1].clone()
}
#[inline]
pub fn set(&mut self, i0: uint, i1: uint, val: T) {
self.data[i0 * self.size_1 + i1] = val;
}
}
pub struct StorageByInts3<T> {
data: ~[T],
size_0: uint,
size_1: uint,
size_2: uint,
}
impl <T:Clone> StorageByInts3<T> {
pub fn from_fn(size_0: uint, size_1: uint, size_2: uint,
f: |i0:uint, i1:uint, i2:uint| -> T) -> StorageByInts3<T> {
let mut data = vec::with_capacity(size_0 * size_1 * size_2);
for i0 in range(0, size_0) {
for i1 in range(0, size_1) {
for i2 in range(0, size_2) {
data.push(f(i0, i1, i2));
}
}
}
StorageByInts3 {
data: data,
size_0: size_0,
size_1: size_1,
size_2: size_2,
}
}
#[inline]
pub fn get(&self, i0: uint, i1: uint, i2: uint) -> T {
self.data[(i0 * self.size_1 + i1)*self.size_2 + i2].clone()
}
}
pub struct StorageByInts4<T> {
data: ~[T],
size_0: uint,
size_1: uint,
size_2: uint,
size_3: uint,
}
impl <T:Clone> StorageByInts4<T> {
pub fn from_elem(size_0: uint, size_1: uint, size_2: uint, size_3: uint, elem: T) -> StorageByInts4<T> {
let sz = size_0 * size_1 * size_2 * size_3;
let data = vec::from_elem(sz, elem);
StorageByInts4 {
data: data,
size_0: size_0,
size_1: size_1,
size_2: size_2,
size_3: size_3,
}
}
pub fn from_fn(size_0: uint, size_1: uint, size_2: uint, size_3: uint,
f: |i0:uint, i1:uint, i2:uint, i3: uint| -> T) -> StorageByInts4<T> {
let sz = size_0 * size_1 * size_2 * size_3;
let mut data = vec::with_capacity(sz);
for i0 in range(0, size_0) {
for i1 in range(0, size_1) {
for i2 in range(0, size_2) {
for i3 in range(0, size_3) {
data.push(f(i0, i1, i2, i3));
}
}
}
}
StorageByInts4 {
data: data,
size_0: size_0,
size_1: size_1,
size_2: size_2,
size_3: size_3,
}
}
#[inline]
pub fn get(&self, i0: uint, i1: uint, i2: uint, i3: uint) -> T {
self.data[((i0 * self.size_1 + i1) * self.size_2 + i2) * self.size_3 + i3].clone()
}
}
pub struct StorageByInts5<T> {
data: ~[T],
size_0: uint,
size_1: uint,
size_2: uint,
size_3: uint,
size_4: uint,
}
impl <T:Clone> StorageByInts5<T> {
pub fn from_fn(size_0: uint, size_1: uint, size_2: uint, size_3: uint, size_4: uint,
f: |i0:uint, i1:uint, i2:uint, i3: uint, i4: uint| -> T) -> StorageByInts5<T> {
let sz = size_0 * size_1 * size_2 * size_3 * size_4;
let mut data = vec::with_capacity(sz);
for i0 in range(0, size_0) {
for i1 in range(0, size_1) {
for i2 in range(0, size_2) {
for i3 in range(0, size_3) {
for i4 in range(0, size_4) {
data.push(f(i0, i1, i2, i3, i4));
}
}
}
}
}
StorageByInts5 {
data: data,
size_0: size_0,
size_1: size_1,
size_2: size_2,
size_3: size_3,
size_4: size_4,
}
}
#[inline]
pub fn get(&self, i0: uint, i1: uint, i2: uint, i3: uint, i4: uint) -> T {
self.data[(((i0 * self.size_1 + i1) * self.size_2 + i2) * self.size_3 + i3) * self.size_4 + i4].clone()
}
}
|
// Copyright (c) The Perspex Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using System.Linq;
using Perspex.Controls.Generators;
using Perspex.Controls.Primitives;
using Perspex.Controls.Shapes;
using Perspex.Controls.Templates;
using Perspex.Input;
using Perspex.Layout;
using Perspex.Media;
using Perspex.VisualTree;
namespace Perspex.Controls
{
public class DropDown : SelectingItemsControl, IContentControl
{
public static readonly PerspexProperty<object> ContentProperty =
ContentControl.ContentProperty.AddOwner<DropDown>();
public static readonly PerspexProperty<HorizontalAlignment> HorizontalContentAlignmentProperty =
ContentControl.HorizontalContentAlignmentProperty.AddOwner<DropDown>();
public static readonly PerspexProperty<VerticalAlignment> VerticalContentAlignmentProperty =
ContentControl.VerticalContentAlignmentProperty.AddOwner<DropDown>();
public static readonly PerspexProperty<bool> IsDropDownOpenProperty =
PerspexProperty.RegisterDirect<DropDown, bool>(
nameof(IsDropDownOpen),
o => o.IsDropDownOpen,
(o, v) => o.IsDropDownOpen = v);
public static readonly PerspexProperty<object> SelectionBoxItemProperty =
PerspexProperty.Register<DropDown, object>("SelectionBoxItem");
private bool _isDropDownOpen;
private Popup _popup;
static DropDown()
{
FocusableProperty.OverrideDefaultValue<DropDown>(true);
SelectedItemProperty.Changed.AddClassHandler<DropDown>(x => x.SelectedItemChanged);
}
public DropDown()
{
Bind(ContentProperty, GetObservable(SelectedItemProperty));
}
public object Content
{
get { return GetValue(ContentProperty); }
set { SetValue(ContentProperty, value); }
}
public HorizontalAlignment HorizontalContentAlignment
{
get { return GetValue(HorizontalContentAlignmentProperty); }
set { SetValue(HorizontalContentAlignmentProperty, value); }
}
public VerticalAlignment VerticalContentAlignment
{
get { return GetValue(VerticalContentAlignmentProperty); }
set { SetValue(VerticalContentAlignmentProperty, value); }
}
public bool IsDropDownOpen
{
get { return _isDropDownOpen; }
set { SetAndRaise(IsDropDownOpenProperty, ref _isDropDownOpen, value); }
}
public object SelectionBoxItem
{
get { return GetValue(SelectionBoxItemProperty); }
set { SetValue(SelectionBoxItemProperty, value); }
}
protected override IItemContainerGenerator CreateItemContainerGenerator()
{
return new ItemContainerGenerator<ListBoxItem>(this, ListBoxItem.ContentProperty);
}
protected override void OnKeyDown(KeyEventArgs e)
{
base.OnKeyDown(e);
if (!e.Handled)
{
if (e.Key == Key.F4 ||
(e.Key == Key.Down && ((e.Modifiers & InputModifiers.Alt) != 0)))
{
IsDropDownOpen = !IsDropDownOpen;
e.Handled = true;
}
else if (IsDropDownOpen && (e.Key == Key.Escape || e.Key == Key.Enter))
{
IsDropDownOpen = false;
e.Handled = true;
}
}
}
protected override void OnPointerPressed(PointerPressEventArgs e)
{
if (!IsDropDownOpen)
{
if (((IVisual)e.Source).GetVisualAncestors().Last().GetType() != typeof(PopupRoot))
{
IsDropDownOpen = true;
e.Handled = true;
}
}
if (!e.Handled)
{
if (UpdateSelectionFromEventSource(e.Source))
{
e.Handled = true;
}
}
base.OnPointerPressed(e);
}
protected override void OnTemplateApplied()
{
if (_popup != null)
{
_popup.Opened -= PopupOpened;
}
_popup = this.GetTemplateChild<Popup>("PART_Popup");
_popup.Opened += PopupOpened;
}
private void PopupOpened(object sender, EventArgs e)
{
var selectedIndex = SelectedIndex;
if (selectedIndex != -1)
{
var container = ItemContainerGenerator.ContainerFromIndex(selectedIndex);
container?.Focus();
}
}
private void SelectedItemChanged(PerspexPropertyChangedEventArgs e)
{
var control = e.NewValue as IControl;
if (control != null)
{
control.Measure(Size.Infinity);
SelectionBoxItem = new Rectangle
{
Width = control.DesiredSize.Width,
Height = control.DesiredSize.Height,
Fill = new VisualBrush
{
Visual = control,
Stretch = Stretch.None,
AlignmentX = AlignmentX.Left,
}
};
}
else
{
SelectionBoxItem = e.NewValue;
}
}
}
}
|
import getHTMLText from "./getHTMLText";
/**
* getHTMLCharacterLength will strip all tags and return remaining
* character length.
*
* @param html the html which length should be determined
*/
export default function getHTMLCharacterLength(html: string | undefined) {
if (!html) {
return 0;
}
const innerText = getHTMLText(html);
return innerText.trim().replace(/\n/g, "").length;
}
|
#!/bin/bash
hostname=$1
echo 'Updating /etc/sudoers'
if [[ ! -e '/etc/sudoers.d/jenkins' ]]; then
sudo /bin/bash --login -c 'echo "jenkins ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/jenkins'
fi
sudo sed -i -e 's/^Defaults.*requiretty/# Defaults requiretty/g' /etc/sudoers
sudo /bin/bash --login -c 'echo "Defaults:admin !requiretty" >> /etc/sudoers'
sudo sed -i -e 's/^%sudo.+ALL=(ALL:ALL) ALL/%sudo.+ALL=(ALL:ALL) NOPASSWD:ALL/g' /etc/sudoers
echo "Updating FQDN: ${hostname}"
cat /etc/hosts | grep "${hostname}" || sudo sed "s/127.0.0.1/127.0.0.1 ${hostname}/g" -i /etc/hosts
hostname | grep "${hostname}" || sudo hostname "${hostname}"
# Listens on TCP 80 & 443 by default which collides w/ Apache
# TCP 80 Collision
sudo systemctl disable nginx
sudo systemctl stop nginx
# TCP 443 Collision
sudo systemctl disable inetsim
sudo systemctl stop inetsim
|
extern BOOL goTopBydeVbugForSafari();
extern BOOL goBottomBydeVbugForSafari();
extern BOOL goLeftBydeVbugForSafari(int);
extern BOOL goRightBydeVbugForSafari(int);
extern BOOL goUpBydeVbugForSafari(int);
extern BOOL goDownBydeVbugForSafari(int);
extern BOOL prevTabBydeVbugForSafari();
extern BOOL nextTabBydeVbugForSafari();
extern BOOL reloadBydeVbugForSafari();
extern BOOL openNewTabBydeVbugForSafari();
extern BOOL closeTabBydeVbugForSafari();
extern BOOL focusAddressViewBydeVbugForSafari();
extern BOOL resignFocusToMainViewBydeVbugForSafari();
extern BOOL showSearchOnThisPageBydeVbugForSafari();
extern BOOL nextSearchOnPageBydeVbugForSafari();
extern BOOL prevSearchOnPageBydeVbugForSafari();
extern BOOL goBackBydeVbugForSafari();
extern BOOL goForwardBydeVbugForSafari();
extern BOOL showPrintPanelBydeVbugForSafari();
extern BOOL showTweetControllerBydeVbugForSafari();
extern BOOL showAddToHomeBydeVbugForSafari();
extern BOOL showAddBookmarkBydeVbugForSafari();
extern BOOL showBookmarksBydeVbugForSafari();
extern BOOL showActionsBydeVbugForSafari();
extern BOOL addToReadingListBydeVbugForSafari();
extern BOOL zoomInBydeVbugForSafari(float);
extern BOOL zoomOutBydeVbugForSafari(float);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.